1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
25 #include "langhooks.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
37 #include "diagnostic.h"
38 #include "lto-streamer.h"
40 /* Vector where the parameter infos are actually stored. */
41 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
42 /* Vector where the parameter infos are actually stored. */
43 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
45 /* Bitmap with all UIDs of call graph edges that have been already processed
46 by indirect inlining. */
47 static bitmap iinlining_processed_edges;
49 /* Holders of ipa cgraph hooks: */
50 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
51 static struct cgraph_node_hook_list *node_removal_hook_holder;
52 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
53 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
55 /* Add cgraph NODE described by INFO to the worklist WL regardless of whether
56 it is in one or not. It should almost never be used directly, as opposed to
57 ipa_push_func_to_list. */
60 ipa_push_func_to_list_1 (struct ipa_func_list **wl,
61 struct cgraph_node *node,
62 struct ipa_node_params *info)
64 struct ipa_func_list *temp;
66 info->node_enqueued = 1;
67 temp = XCNEW (struct ipa_func_list);
73 /* Initialize worklist to contain all functions. */
75 struct ipa_func_list *
76 ipa_init_func_list (void)
78 struct cgraph_node *node;
79 struct ipa_func_list * wl;
82 for (node = cgraph_nodes; node; node = node->next)
85 struct ipa_node_params *info = IPA_NODE_REF (node);
86 /* Unreachable nodes should have been eliminated before ipcp and
88 gcc_assert (node->needed || node->reachable);
89 ipa_push_func_to_list_1 (&wl, node, info);
95 /* Remove a function from the worklist WL and return it. */
98 ipa_pop_func_from_list (struct ipa_func_list **wl)
100 struct ipa_node_params *info;
101 struct ipa_func_list *first;
102 struct cgraph_node *node;
109 info = IPA_NODE_REF (node);
110 info->node_enqueued = 0;
114 /* Return index of the formal whose tree is PTREE in function which corresponds
118 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
122 count = ipa_get_param_count (info);
123 for (i = 0; i < count; i++)
124 if (ipa_get_param(info, i) == ptree)
130 /* Populate the param_decl field in parameter descriptors of INFO that
131 corresponds to NODE. */
134 ipa_populate_param_decls (struct cgraph_node *node,
135 struct ipa_node_params *info)
143 fnargs = DECL_ARGUMENTS (fndecl);
145 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
147 info->params[param_num].decl = parm;
152 /* Return how many formal parameters FNDECL has. */
155 count_formal_params_1 (tree fndecl)
160 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = TREE_CHAIN (parm))
166 /* Count number of formal parameters in NOTE. Store the result to the
167 appropriate field of INFO. */
170 ipa_count_formal_params (struct cgraph_node *node,
171 struct ipa_node_params *info)
175 param_num = count_formal_params_1 (node->decl);
176 ipa_set_param_count (info, param_num);
179 /* Initialize the ipa_node_params structure associated with NODE by counting
180 the function parameters, creating the descriptors and populating their
184 ipa_initialize_node_params (struct cgraph_node *node)
186 struct ipa_node_params *info = IPA_NODE_REF (node);
190 ipa_count_formal_params (node, info);
191 info->params = XCNEWVEC (struct ipa_param_descriptor,
192 ipa_get_param_count (info));
193 ipa_populate_param_decls (node, info);
197 /* Callback of walk_stmt_load_store_addr_ops for the visit_store and visit_addr
198 parameters. If OP is a parameter declaration, mark it as modified in the
199 info structure passed in DATA. */
202 visit_store_addr_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
205 struct ipa_node_params *info = (struct ipa_node_params *) data;
207 op = get_base_address (op);
209 && TREE_CODE (op) == PARM_DECL)
211 int index = ipa_get_param_decl_index (info, op);
212 gcc_assert (index >= 0);
213 info->params[index].modified = true;
219 /* Compute which formal parameters of function associated with NODE are locally
220 modified or their address is taken. Note that this does not apply on
221 parameters with SSA names but those can and should be analyzed
225 ipa_detect_param_modifications (struct cgraph_node *node)
227 tree decl = node->decl;
229 struct function *func;
230 gimple_stmt_iterator gsi;
231 struct ipa_node_params *info = IPA_NODE_REF (node);
233 if (ipa_get_param_count (info) == 0 || info->modification_analysis_done)
236 func = DECL_STRUCT_FUNCTION (decl);
237 FOR_EACH_BB_FN (bb, func)
238 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
239 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info, NULL,
240 visit_store_addr_for_mod_analysis,
241 visit_store_addr_for_mod_analysis);
243 info->modification_analysis_done = 1;
246 /* Count number of arguments callsite CS has and store it in
247 ipa_edge_args structure corresponding to this callsite. */
250 ipa_count_arguments (struct cgraph_edge *cs)
255 stmt = cs->call_stmt;
256 gcc_assert (is_gimple_call (stmt));
257 arg_num = gimple_call_num_args (stmt);
258 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
259 <= (unsigned) cgraph_edge_max_uid)
260 VEC_safe_grow_cleared (ipa_edge_args_t, gc,
261 ipa_edge_args_vector, cgraph_edge_max_uid + 1);
262 ipa_set_cs_argument_count (IPA_EDGE_REF (cs), arg_num);
265 /* Print the jump functions of all arguments on all call graph edges going from
269 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
272 struct cgraph_edge *cs;
273 struct ipa_jump_func *jump_func;
274 enum jump_func_type type;
276 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
277 for (cs = node->callees; cs; cs = cs->next_callee)
279 if (!ipa_edge_args_info_available_for_edge_p (cs))
282 fprintf (f, " callsite %s ", cgraph_node_name (node));
283 fprintf (f, "-> %s :: \n", cgraph_node_name (cs->callee));
285 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
286 for (i = 0; i < count; i++)
288 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
289 type = jump_func->type;
291 fprintf (f, " param %d: ", i);
292 if (type == IPA_JF_UNKNOWN)
293 fprintf (f, "UNKNOWN\n");
294 else if (type == IPA_JF_KNOWN_TYPE)
296 tree binfo_type = TREE_TYPE (jump_func->value.base_binfo);
297 fprintf (f, "KNOWN TYPE, type in binfo is: ");
298 print_generic_expr (f, binfo_type, 0);
299 fprintf (f, " (%u)\n", TYPE_UID (binfo_type));
301 else if (type == IPA_JF_CONST)
303 tree val = jump_func->value.constant;
304 fprintf (f, "CONST: ");
305 print_generic_expr (f, val, 0);
306 if (TREE_CODE (val) == ADDR_EXPR
307 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
310 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
315 else if (type == IPA_JF_CONST_MEMBER_PTR)
317 fprintf (f, "CONST MEMBER PTR: ");
318 print_generic_expr (f, jump_func->value.member_cst.pfn, 0);
320 print_generic_expr (f, jump_func->value.member_cst.delta, 0);
323 else if (type == IPA_JF_PASS_THROUGH)
325 fprintf (f, "PASS THROUGH: ");
326 fprintf (f, "%d, op %s ",
327 jump_func->value.pass_through.formal_id,
329 jump_func->value.pass_through.operation]);
330 if (jump_func->value.pass_through.operation != NOP_EXPR)
331 print_generic_expr (dump_file,
332 jump_func->value.pass_through.operand, 0);
333 fprintf (dump_file, "\n");
335 else if (type == IPA_JF_ANCESTOR)
337 fprintf (f, "ANCESTOR: ");
338 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
339 jump_func->value.ancestor.formal_id,
340 jump_func->value.ancestor.offset);
341 print_generic_expr (f, jump_func->value.ancestor.type, 0);
342 fprintf (dump_file, "\n");
348 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
351 ipa_print_all_jump_functions (FILE *f)
353 struct cgraph_node *node;
355 fprintf (f, "\nJump functions:\n");
356 for (node = cgraph_nodes; node; node = node->next)
358 ipa_print_node_jump_functions (f, node);
362 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
363 of an assignment statement STMT, try to find out whether NAME can be
364 described by a (possibly polynomial) pass-through jump-function or an
365 ancestor jump function and if so, write the appropriate function into
369 compute_complex_assign_jump_func (struct ipa_node_params *info,
370 struct ipa_jump_func *jfunc,
371 gimple stmt, tree name)
373 HOST_WIDE_INT offset, size, max_size;
377 op1 = gimple_assign_rhs1 (stmt);
378 op2 = gimple_assign_rhs2 (stmt);
380 if (TREE_CODE (op1) == SSA_NAME
381 && SSA_NAME_IS_DEFAULT_DEF (op1))
383 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
389 if (!is_gimple_ip_invariant (op2)
390 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
391 && !useless_type_conversion_p (TREE_TYPE (name),
395 jfunc->type = IPA_JF_PASS_THROUGH;
396 jfunc->value.pass_through.formal_id = index;
397 jfunc->value.pass_through.operation = gimple_assign_rhs_code (stmt);
398 jfunc->value.pass_through.operand = op2;
400 else if (gimple_assign_unary_nop_p (stmt))
402 jfunc->type = IPA_JF_PASS_THROUGH;
403 jfunc->value.pass_through.formal_id = index;
404 jfunc->value.pass_through.operation = NOP_EXPR;
409 if (TREE_CODE (op1) != ADDR_EXPR)
412 op1 = TREE_OPERAND (op1, 0);
413 type = TREE_TYPE (op1);
414 if (TREE_CODE (type) != RECORD_TYPE)
416 op1 = get_ref_base_and_extent (op1, &offset, &size, &max_size);
417 if (TREE_CODE (op1) != INDIRECT_REF
418 /* If this is a varying address, punt. */
422 op1 = TREE_OPERAND (op1, 0);
423 if (TREE_CODE (op1) != SSA_NAME
424 || !SSA_NAME_IS_DEFAULT_DEF (op1))
427 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
430 jfunc->type = IPA_JF_ANCESTOR;
431 jfunc->value.ancestor.formal_id = index;
432 jfunc->value.ancestor.offset = offset;
433 jfunc->value.ancestor.type = type;
438 /* Given that an actual argument is an SSA_NAME that is a result of a phi
439 statement PHI, try to find out whether NAME is in fact a
440 multiple-inheritance typecast from a descendant into an ancestor of a formal
441 parameter and thus can be described by an ancestor jump function and if so,
442 write the appropriate function into JFUNC.
444 Essentially we want to match the following pattern:
452 iftmp.1_3 = &obj_2(D)->D.1762;
455 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
456 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
460 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
461 struct ipa_jump_func *jfunc,
464 HOST_WIDE_INT offset, size, max_size;
466 basic_block phi_bb, assign_bb, cond_bb;
467 tree tmp, parm, expr;
470 if (gimple_phi_num_args (phi) != 2
471 || !integer_zerop (PHI_ARG_DEF (phi, 1)))
474 tmp = PHI_ARG_DEF (phi, 0);
475 if (TREE_CODE (tmp) != SSA_NAME
476 || SSA_NAME_IS_DEFAULT_DEF (tmp)
477 || !POINTER_TYPE_P (TREE_TYPE (tmp))
478 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
481 assign = SSA_NAME_DEF_STMT (tmp);
482 assign_bb = gimple_bb (assign);
483 if (!single_pred_p (assign_bb)
484 || !gimple_assign_single_p (assign))
486 expr = gimple_assign_rhs1 (assign);
488 if (TREE_CODE (expr) != ADDR_EXPR)
490 expr = TREE_OPERAND (expr, 0);
491 expr = get_ref_base_and_extent (expr, &offset, &size, &max_size);
493 if (TREE_CODE (expr) != INDIRECT_REF
494 /* If this is a varying address, punt. */
498 parm = TREE_OPERAND (expr, 0);
499 if (TREE_CODE (parm) != SSA_NAME
500 || !SSA_NAME_IS_DEFAULT_DEF (parm))
503 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
507 cond_bb = single_pred (assign_bb);
508 cond = last_stmt (cond_bb);
509 if (gimple_code (cond) != GIMPLE_COND
510 || gimple_cond_code (cond) != NE_EXPR
511 || gimple_cond_lhs (cond) != parm
512 || !integer_zerop (gimple_cond_rhs (cond)))
516 phi_bb = gimple_bb (phi);
517 for (i = 0; i < 2; i++)
519 basic_block pred = EDGE_PRED (phi_bb, i)->src;
520 if (pred != assign_bb && pred != cond_bb)
524 jfunc->type = IPA_JF_ANCESTOR;
525 jfunc->value.ancestor.formal_id = index;
526 jfunc->value.ancestor.offset = offset;
527 jfunc->value.ancestor.type = TREE_TYPE (TREE_TYPE (tmp));
530 /* Given OP whch is passed as an actual argument to a called function,
531 determine if it is possible to construct a KNOWN_TYPE jump function for it
532 and if so, create one and store it to JFUNC. */
535 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc)
539 if (TREE_CODE (op) != ADDR_EXPR)
542 op = TREE_OPERAND (op, 0);
543 binfo = gimple_get_relevant_ref_binfo (op, NULL_TREE);
546 jfunc->type = IPA_JF_KNOWN_TYPE;
547 jfunc->value.base_binfo = binfo;
552 /* Determine the jump functions of scalar arguments. Scalar means SSA names
553 and constants of a number of selected types. INFO is the ipa_node_params
554 structure associated with the caller, FUNCTIONS is a pointer to an array of
555 jump function structures associated with CALL which is the call statement
559 compute_scalar_jump_functions (struct ipa_node_params *info,
560 struct ipa_jump_func *functions,
566 for (num = 0; num < gimple_call_num_args (call); num++)
568 arg = gimple_call_arg (call, num);
570 if (is_gimple_ip_invariant (arg))
572 functions[num].type = IPA_JF_CONST;
573 functions[num].value.constant = arg;
575 else if (TREE_CODE (arg) == SSA_NAME)
577 if (SSA_NAME_IS_DEFAULT_DEF (arg))
579 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
583 functions[num].type = IPA_JF_PASS_THROUGH;
584 functions[num].value.pass_through.formal_id = index;
585 functions[num].value.pass_through.operation = NOP_EXPR;
590 gimple stmt = SSA_NAME_DEF_STMT (arg);
591 if (is_gimple_assign (stmt))
592 compute_complex_assign_jump_func (info, &functions[num],
594 else if (gimple_code (stmt) == GIMPLE_PHI)
595 compute_complex_ancestor_jump_func (info, &functions[num],
600 compute_known_type_jump_func (arg, &functions[num]);
604 /* Inspect the given TYPE and return true iff it has the same structure (the
605 same number of fields of the same types) as a C++ member pointer. If
606 METHOD_PTR and DELTA are non-NULL, store the trees representing the
607 corresponding fields there. */
610 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
614 if (TREE_CODE (type) != RECORD_TYPE)
617 fld = TYPE_FIELDS (type);
618 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
619 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE)
625 fld = TREE_CHAIN (fld);
626 if (!fld || INTEGRAL_TYPE_P (fld))
631 if (TREE_CHAIN (fld))
637 /* Go through arguments of the CALL and for every one that looks like a member
638 pointer, check whether it can be safely declared pass-through and if so,
639 mark that to the corresponding item of jump FUNCTIONS. Return true iff
640 there are non-pass-through member pointers within the arguments. INFO
641 describes formal parameters of the caller. */
644 compute_pass_through_member_ptrs (struct ipa_node_params *info,
645 struct ipa_jump_func *functions,
648 bool undecided_members = false;
652 for (num = 0; num < gimple_call_num_args (call); num++)
654 arg = gimple_call_arg (call, num);
656 if (type_like_member_ptr_p (TREE_TYPE (arg), NULL, NULL))
658 if (TREE_CODE (arg) == PARM_DECL)
660 int index = ipa_get_param_decl_index (info, arg);
662 gcc_assert (index >=0);
663 if (!ipa_is_param_modified (info, index))
665 functions[num].type = IPA_JF_PASS_THROUGH;
666 functions[num].value.pass_through.formal_id = index;
667 functions[num].value.pass_through.operation = NOP_EXPR;
670 undecided_members = true;
673 undecided_members = true;
677 return undecided_members;
680 /* Simple function filling in a member pointer constant jump function (with PFN
681 and DELTA as the constant value) into JFUNC. */
684 fill_member_ptr_cst_jump_function (struct ipa_jump_func *jfunc,
685 tree pfn, tree delta)
687 jfunc->type = IPA_JF_CONST_MEMBER_PTR;
688 jfunc->value.member_cst.pfn = pfn;
689 jfunc->value.member_cst.delta = delta;
692 /* If RHS is an SSA_NAMe and it is defined by a simple copy assign statement,
693 return the rhs of its defining statement. */
696 get_ssa_def_if_simple_copy (tree rhs)
698 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
700 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
702 if (gimple_assign_single_p (def_stmt))
703 rhs = gimple_assign_rhs1 (def_stmt);
710 /* Traverse statements from CALL backwards, scanning whether the argument ARG
711 which is a member pointer is filled in with constant values. If it is, fill
712 the jump function JFUNC in appropriately. METHOD_FIELD and DELTA_FIELD are
713 fields of the record type of the member pointer. To give an example, we
714 look for a pattern looking like the following:
716 D.2515.__pfn ={v} printStuff;
717 D.2515.__delta ={v} 0;
718 i_1 = doprinting (D.2515); */
721 determine_cst_member_ptr (gimple call, tree arg, tree method_field,
722 tree delta_field, struct ipa_jump_func *jfunc)
724 gimple_stmt_iterator gsi;
725 tree method = NULL_TREE;
726 tree delta = NULL_TREE;
728 gsi = gsi_for_stmt (call);
731 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
733 gimple stmt = gsi_stmt (gsi);
736 if (!gimple_assign_single_p (stmt))
739 lhs = gimple_assign_lhs (stmt);
740 rhs = gimple_assign_rhs1 (stmt);
742 if (TREE_CODE (lhs) != COMPONENT_REF
743 || TREE_OPERAND (lhs, 0) != arg)
746 fld = TREE_OPERAND (lhs, 1);
747 if (!method && fld == method_field)
749 rhs = get_ssa_def_if_simple_copy (rhs);
750 if (TREE_CODE (rhs) == ADDR_EXPR
751 && TREE_CODE (TREE_OPERAND (rhs, 0)) == FUNCTION_DECL
752 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 0))) == METHOD_TYPE)
754 method = TREE_OPERAND (rhs, 0);
757 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
765 if (!delta && fld == delta_field)
767 rhs = get_ssa_def_if_simple_copy (rhs);
768 if (TREE_CODE (rhs) == INTEGER_CST)
773 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
785 /* Go through the arguments of the CALL and for every member pointer within
786 tries determine whether it is a constant. If it is, create a corresponding
787 constant jump function in FUNCTIONS which is an array of jump functions
788 associated with the call. */
791 compute_cst_member_ptr_arguments (struct ipa_jump_func *functions,
795 tree arg, method_field, delta_field;
797 for (num = 0; num < gimple_call_num_args (call); num++)
799 arg = gimple_call_arg (call, num);
801 if (functions[num].type == IPA_JF_UNKNOWN
802 && type_like_member_ptr_p (TREE_TYPE (arg), &method_field,
804 determine_cst_member_ptr (call, arg, method_field, delta_field,
809 /* Compute jump function for all arguments of callsite CS and insert the
810 information in the jump_functions array in the ipa_edge_args corresponding
814 ipa_compute_jump_functions (struct cgraph_edge *cs)
816 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
817 struct ipa_edge_args *arguments = IPA_EDGE_REF (cs);
820 if (ipa_get_cs_argument_count (arguments) == 0 || arguments->jump_functions)
822 arguments->jump_functions = GGC_CNEWVEC (struct ipa_jump_func,
823 ipa_get_cs_argument_count (arguments));
825 call = cs->call_stmt;
826 gcc_assert (is_gimple_call (call));
828 /* We will deal with constants and SSA scalars first: */
829 compute_scalar_jump_functions (info, arguments->jump_functions, call);
831 /* Let's check whether there are any potential member pointers and if so,
832 whether we can determine their functions as pass_through. */
833 if (!compute_pass_through_member_ptrs (info, arguments->jump_functions, call))
836 /* Finally, let's check whether we actually pass a new constant member
838 compute_cst_member_ptr_arguments (arguments->jump_functions, call);
841 /* If RHS looks like a rhs of a statement loading pfn from a member
842 pointer formal parameter, return the parameter, otherwise return
843 NULL. If USE_DELTA, then we look for a use of the delta field
844 rather than the pfn. */
847 ipa_get_member_ptr_load_param (tree rhs, bool use_delta)
853 if (TREE_CODE (rhs) != COMPONENT_REF)
856 rec = TREE_OPERAND (rhs, 0);
857 if (TREE_CODE (rec) != PARM_DECL
858 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
861 fld = TREE_OPERAND (rhs, 1);
862 if (use_delta ? (fld == delta_field) : (fld == ptr_field))
868 /* If STMT looks like a statement loading a value from a member pointer formal
869 parameter, this function returns that parameter. */
872 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta)
876 if (!gimple_assign_single_p (stmt))
879 rhs = gimple_assign_rhs1 (stmt);
880 return ipa_get_member_ptr_load_param (rhs, use_delta);
883 /* Returns true iff T is an SSA_NAME defined by a statement. */
886 ipa_is_ssa_with_stmt_def (tree t)
888 if (TREE_CODE (t) == SSA_NAME
889 && !SSA_NAME_IS_DEFAULT_DEF (t))
895 /* Find the indirect call graph edge corresponding to STMT and add to it all
896 information necessary to describe a call to a parameter number PARAM_INDEX.
897 NODE is the caller. POLYMORPHIC should be set to true iff the call is a
901 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt,
904 struct cgraph_edge *cs;
906 cs = cgraph_edge (node, stmt);
907 cs->indirect_info->param_index = param_index;
908 cs->indirect_info->anc_offset = 0;
909 cs->indirect_info->polymorphic = polymorphic;
912 tree otr = gimple_call_fn (stmt);
913 tree type, token = OBJ_TYPE_REF_TOKEN (otr);
914 cs->indirect_info->otr_token = tree_low_cst (token, 1);
915 type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (otr)));
916 cs->indirect_info->otr_type = type;
920 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
921 (described by INFO). Currently it checks whether the call calls a pointer
922 that is a formal parameter and if so, the parameter is marked with the
923 called flag and an indirect call graph edge describing the call is created.
924 This is very simple for ordinary pointers represented in SSA but not-so-nice
925 when it comes to member pointers. The ugly part of this function does
926 nothing more than trying to match the pattern of such a call. An example of
927 such a pattern is the gimple dump below, the call is on the last line:
930 f$__delta_5 = f.__delta;
931 f$__pfn_24 = f.__pfn;
932 D.2496_3 = (int) f$__pfn_24;
933 D.2497_4 = D.2496_3 & 1;
940 D.2500_7 = (unsigned int) f$__delta_5;
941 D.2501_8 = &S + D.2500_7;
942 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
943 D.2503_10 = *D.2502_9;
944 D.2504_12 = f$__pfn_24 + -1;
945 D.2505_13 = (unsigned int) D.2504_12;
946 D.2506_14 = D.2503_10 + D.2505_13;
947 D.2507_15 = *D.2506_14;
948 iftmp.11_16 = (String:: *) D.2507_15;
951 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
952 D.2500_19 = (unsigned int) f$__delta_5;
953 D.2508_20 = &S + D.2500_19;
954 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
956 Such patterns are results of simple calls to a member pointer:
958 int doprinting (int (MyString::* f)(int) const)
960 MyString S ("somestring");
967 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
968 struct ipa_node_params *info,
969 gimple call, tree target)
974 tree rec, rec2, cond;
977 basic_block bb, virt_bb, join;
979 if (SSA_NAME_IS_DEFAULT_DEF (target))
981 tree var = SSA_NAME_VAR (target);
982 index = ipa_get_param_decl_index (info, var);
984 ipa_note_param_call (node, index, call, false);
988 /* Now we need to try to match the complex pattern of calling a member
991 if (!POINTER_TYPE_P (TREE_TYPE (target))
992 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
995 def = SSA_NAME_DEF_STMT (target);
996 if (gimple_code (def) != GIMPLE_PHI)
999 if (gimple_phi_num_args (def) != 2)
1002 /* First, we need to check whether one of these is a load from a member
1003 pointer that is a parameter to this function. */
1004 n1 = PHI_ARG_DEF (def, 0);
1005 n2 = PHI_ARG_DEF (def, 1);
1006 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1008 d1 = SSA_NAME_DEF_STMT (n1);
1009 d2 = SSA_NAME_DEF_STMT (n2);
1011 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false)))
1013 if (ipa_get_stmt_member_ptr_load_param (d2, false))
1016 bb = gimple_bb (d1);
1017 virt_bb = gimple_bb (d2);
1019 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false)))
1021 bb = gimple_bb (d2);
1022 virt_bb = gimple_bb (d1);
1027 /* Second, we need to check that the basic blocks are laid out in the way
1028 corresponding to the pattern. */
1030 join = gimple_bb (def);
1031 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1032 || single_pred (virt_bb) != bb
1033 || single_succ (virt_bb) != join)
1036 /* Third, let's see that the branching is done depending on the least
1037 significant bit of the pfn. */
1039 branch = last_stmt (bb);
1040 if (gimple_code (branch) != GIMPLE_COND)
1043 if (gimple_cond_code (branch) != NE_EXPR
1044 || !integer_zerop (gimple_cond_rhs (branch)))
1047 cond = gimple_cond_lhs (branch);
1048 if (!ipa_is_ssa_with_stmt_def (cond))
1051 def = SSA_NAME_DEF_STMT (cond);
1052 if (!is_gimple_assign (def)
1053 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1054 || !integer_onep (gimple_assign_rhs2 (def)))
1057 cond = gimple_assign_rhs1 (def);
1058 if (!ipa_is_ssa_with_stmt_def (cond))
1061 def = SSA_NAME_DEF_STMT (cond);
1063 if (is_gimple_assign (def)
1064 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1066 cond = gimple_assign_rhs1 (def);
1067 if (!ipa_is_ssa_with_stmt_def (cond))
1069 def = SSA_NAME_DEF_STMT (cond);
1072 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1073 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1074 == ptrmemfunc_vbit_in_delta));
1079 index = ipa_get_param_decl_index (info, rec);
1080 if (index >= 0 && !ipa_is_param_modified (info, index))
1081 ipa_note_param_call (node, index, call, false);
1086 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1087 object referenced in the expression is a formal parameter of the caller
1088 (described by INFO), create a call note for the statement. */
1091 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1092 struct ipa_node_params *info, gimple call,
1095 tree obj = OBJ_TYPE_REF_OBJECT (target);
1099 if (TREE_CODE (obj) == ADDR_EXPR)
1103 obj = TREE_OPERAND (obj, 0);
1105 while (TREE_CODE (obj) == COMPONENT_REF);
1106 if (TREE_CODE (obj) != INDIRECT_REF)
1108 obj = TREE_OPERAND (obj, 0);
1111 if (TREE_CODE (obj) != SSA_NAME
1112 || !SSA_NAME_IS_DEFAULT_DEF (obj))
1115 var = SSA_NAME_VAR (obj);
1116 index = ipa_get_param_decl_index (info, var);
1119 ipa_note_param_call (node, index, call, true);
1122 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1123 of the caller (described by INFO). */
1126 ipa_analyze_call_uses (struct cgraph_node *node,
1127 struct ipa_node_params *info, gimple call)
1129 tree target = gimple_call_fn (call);
1131 if (TREE_CODE (target) == SSA_NAME)
1132 ipa_analyze_indirect_call_uses (node, info, call, target);
1133 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1134 ipa_analyze_virtual_call_uses (node, info, call, target);
1138 /* Analyze the call statement STMT with respect to formal parameters (described
1139 in INFO) of caller given by NODE. Currently it only checks whether formal
1140 parameters are called. */
1143 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1146 if (is_gimple_call (stmt))
1147 ipa_analyze_call_uses (node, info, stmt);
1150 /* Scan the function body of NODE and inspect the uses of formal parameters.
1151 Store the findings in various structures of the associated ipa_node_params
1152 structure, such as parameter flags, notes etc. */
1155 ipa_analyze_params_uses (struct cgraph_node *node)
1157 tree decl = node->decl;
1159 struct function *func;
1160 gimple_stmt_iterator gsi;
1161 struct ipa_node_params *info = IPA_NODE_REF (node);
1163 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1166 func = DECL_STRUCT_FUNCTION (decl);
1167 FOR_EACH_BB_FN (bb, func)
1169 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1171 gimple stmt = gsi_stmt (gsi);
1172 ipa_analyze_stmt_uses (node, info, stmt);
1176 info->uses_analysis_done = 1;
1179 /* Update the jump function DST when the call graph edge correspondng to SRC is
1180 is being inlined, knowing that DST is of type ancestor and src of known
1184 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1185 struct ipa_jump_func *dst)
1189 new_binfo = get_binfo_at_offset (src->value.base_binfo,
1190 dst->value.ancestor.offset,
1191 dst->value.ancestor.type);
1194 dst->type = IPA_JF_KNOWN_TYPE;
1195 dst->value.base_binfo = new_binfo;
1198 dst->type = IPA_JF_UNKNOWN;
1201 /* Update the jump functions associated with call graph edge E when the call
1202 graph edge CS is being inlined, assuming that E->caller is already (possibly
1203 indirectly) inlined into CS->callee and that E has not been inlined. */
1206 update_jump_functions_after_inlining (struct cgraph_edge *cs,
1207 struct cgraph_edge *e)
1209 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1210 struct ipa_edge_args *args = IPA_EDGE_REF (e);
1211 int count = ipa_get_cs_argument_count (args);
1214 for (i = 0; i < count; i++)
1216 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
1218 if (dst->type == IPA_JF_ANCESTOR)
1220 struct ipa_jump_func *src;
1222 /* Variable number of arguments can cause havoc if we try to access
1223 one that does not exist in the inlined edge. So make sure we
1225 if (dst->value.ancestor.formal_id >= ipa_get_cs_argument_count (top))
1227 dst->type = IPA_JF_UNKNOWN;
1231 src = ipa_get_ith_jump_func (top, dst->value.ancestor.formal_id);
1232 if (src->type == IPA_JF_KNOWN_TYPE)
1233 combine_known_type_and_ancestor_jfs (src, dst);
1234 else if (src->type == IPA_JF_CONST)
1236 struct ipa_jump_func kt_func;
1238 kt_func.type = IPA_JF_UNKNOWN;
1239 compute_known_type_jump_func (src->value.constant, &kt_func);
1240 if (kt_func.type == IPA_JF_KNOWN_TYPE)
1241 combine_known_type_and_ancestor_jfs (&kt_func, dst);
1243 dst->type = IPA_JF_UNKNOWN;
1245 else if (src->type == IPA_JF_PASS_THROUGH
1246 && src->value.pass_through.operation == NOP_EXPR)
1247 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
1248 else if (src->type == IPA_JF_ANCESTOR)
1250 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
1251 dst->value.ancestor.offset += src->value.ancestor.offset;
1254 dst->type = IPA_JF_UNKNOWN;
1256 else if (dst->type == IPA_JF_PASS_THROUGH)
1258 struct ipa_jump_func *src;
1259 /* We must check range due to calls with variable number of arguments
1260 and we cannot combine jump functions with operations. */
1261 if (dst->value.pass_through.operation == NOP_EXPR
1262 && (dst->value.pass_through.formal_id
1263 < ipa_get_cs_argument_count (top)))
1265 src = ipa_get_ith_jump_func (top,
1266 dst->value.pass_through.formal_id);
1270 dst->type = IPA_JF_UNKNOWN;
1275 /* If TARGET is an addr_expr of a function declaration, make it the destination
1276 of an indirect edge IE and return the edge. Otherwise, return NULL. */
1278 static struct cgraph_edge *
1279 make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
1281 struct cgraph_node *callee;
1283 if (TREE_CODE (target) != ADDR_EXPR)
1285 target = TREE_OPERAND (target, 0);
1286 if (TREE_CODE (target) != FUNCTION_DECL)
1288 callee = cgraph_node (target);
1292 cgraph_make_edge_direct (ie, callee);
1295 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
1296 "(%s/%i -> %s/%i) for stmt ",
1297 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
1298 cgraph_node_name (ie->caller), ie->caller->uid,
1299 cgraph_node_name (ie->callee), ie->callee->uid);
1302 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
1304 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
1309 /* Try to find a destination for indirect edge IE that corresponds to a simple
1310 call or a call of a member function pointer and where the destination is a
1311 pointer formal parameter described by jump function JFUNC. If it can be
1312 determined, return the newly direct edge, otherwise return NULL. */
1314 static struct cgraph_edge *
1315 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
1316 struct ipa_jump_func *jfunc)
1320 if (jfunc->type == IPA_JF_CONST)
1321 target = jfunc->value.constant;
1322 else if (jfunc->type == IPA_JF_CONST_MEMBER_PTR)
1323 target = jfunc->value.member_cst.pfn;
1327 return make_edge_direct_to_target (ie, target);
1330 /* Try to find a destination for indirect edge IE that corresponds to a
1331 virtuall call based on a formal parameter which is described by jump
1332 function JFUNC and if it can be determined, make it direct and return the
1333 direct edge. Otherwise, return NULL. */
1335 static struct cgraph_edge *
1336 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
1337 struct ipa_jump_func *jfunc)
1339 tree binfo, type, target;
1340 HOST_WIDE_INT token;
1342 if (jfunc->type == IPA_JF_KNOWN_TYPE)
1343 binfo = jfunc->value.base_binfo;
1344 else if (jfunc->type == IPA_JF_CONST)
1346 tree cst = jfunc->value.constant;
1347 if (TREE_CODE (cst) == ADDR_EXPR)
1348 binfo = gimple_get_relevant_ref_binfo (TREE_OPERAND (cst, 0),
1359 token = ie->indirect_info->otr_token;
1360 type = ie->indirect_info->otr_type;
1361 binfo = get_binfo_at_offset (binfo, ie->indirect_info->anc_offset, type);
1363 target = gimple_fold_obj_type_ref_known_binfo (token, binfo);
1368 return make_edge_direct_to_target (ie, target);
1373 /* Update the param called notes associated with NODE when CS is being inlined,
1374 assuming NODE is (potentially indirectly) inlined into CS->callee.
1375 Moreover, if the callee is discovered to be constant, create a new cgraph
1376 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
1377 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
1380 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
1381 struct cgraph_node *node,
1382 VEC (cgraph_edge_p, heap) **new_edges)
1384 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1385 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
1388 ipa_check_create_edge_args ();
1390 for (ie = node->indirect_calls; ie; ie = next_ie)
1392 struct cgraph_indirect_call_info *ici = ie->indirect_info;
1393 struct ipa_jump_func *jfunc;
1395 next_ie = ie->next_callee;
1396 if (bitmap_bit_p (iinlining_processed_edges, ie->uid))
1399 /* If we ever use indirect edges for anything other than indirect
1400 inlining, we will need to skip those with negative param_indices. */
1401 if (ici->param_index == -1)
1404 /* We must check range due to calls with variable number of arguments: */
1405 if (ici->param_index >= ipa_get_cs_argument_count (top))
1407 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1411 jfunc = ipa_get_ith_jump_func (top, ici->param_index);
1412 if (jfunc->type == IPA_JF_PASS_THROUGH
1413 && jfunc->value.pass_through.operation == NOP_EXPR)
1414 ici->param_index = jfunc->value.pass_through.formal_id;
1415 else if (jfunc->type == IPA_JF_ANCESTOR)
1417 ici->param_index = jfunc->value.ancestor.formal_id;
1418 ici->anc_offset += jfunc->value.ancestor.offset;
1421 /* Either we can find a destination for this edge now or never. */
1422 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1424 if (ici->polymorphic)
1425 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
1427 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
1429 if (new_direct_edge)
1431 new_direct_edge->indirect_inlining_edge = 1;
1434 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
1436 top = IPA_EDGE_REF (cs);
1445 /* Recursively traverse subtree of NODE (including node) made of inlined
1446 cgraph_edges when CS has been inlined and invoke
1447 update_indirect_edges_after_inlining on all nodes and
1448 update_jump_functions_after_inlining on all non-inlined edges that lead out
1449 of this subtree. Newly discovered indirect edges will be added to
1450 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
1454 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
1455 struct cgraph_node *node,
1456 VEC (cgraph_edge_p, heap) **new_edges)
1458 struct cgraph_edge *e;
1461 res = update_indirect_edges_after_inlining (cs, node, new_edges);
1463 for (e = node->callees; e; e = e->next_callee)
1464 if (!e->inline_failed)
1465 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
1467 update_jump_functions_after_inlining (cs, e);
1472 /* Update jump functions and call note functions on inlining the call site CS.
1473 CS is expected to lead to a node already cloned by
1474 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
1475 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
1479 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
1480 VEC (cgraph_edge_p, heap) **new_edges)
1482 /* FIXME lto: We do not stream out indirect call information. */
1486 /* Do nothing if the preparation phase has not been carried out yet
1487 (i.e. during early inlining). */
1488 if (!ipa_node_params_vector)
1490 gcc_assert (ipa_edge_args_vector);
1492 return propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
1495 /* Frees all dynamically allocated structures that the argument info points
1499 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
1501 if (args->jump_functions)
1502 ggc_free (args->jump_functions);
1504 memset (args, 0, sizeof (*args));
1507 /* Free all ipa_edge structures. */
1510 ipa_free_all_edge_args (void)
1513 struct ipa_edge_args *args;
1516 VEC_iterate (ipa_edge_args_t, ipa_edge_args_vector, i, args);
1518 ipa_free_edge_args_substructures (args);
1520 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
1521 ipa_edge_args_vector = NULL;
1524 /* Frees all dynamically allocated structures that the param info points
1528 ipa_free_node_params_substructures (struct ipa_node_params *info)
1531 free (info->params);
1533 memset (info, 0, sizeof (*info));
1536 /* Free all ipa_node_params structures. */
1539 ipa_free_all_node_params (void)
1542 struct ipa_node_params *info;
1545 VEC_iterate (ipa_node_params_t, ipa_node_params_vector, i, info);
1547 ipa_free_node_params_substructures (info);
1549 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
1550 ipa_node_params_vector = NULL;
1553 /* Hook that is called by cgraph.c when an edge is removed. */
1556 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
1558 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1559 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
1560 <= (unsigned)cs->uid)
1562 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
1565 /* Hook that is called by cgraph.c when a node is removed. */
1568 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1570 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1571 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
1572 <= (unsigned)node->uid)
1574 ipa_free_node_params_substructures (IPA_NODE_REF (node));
1577 /* Helper function to duplicate an array of size N that is at SRC and store a
1578 pointer to it to DST. Nothing is done if SRC is NULL. */
1581 duplicate_array (void *src, size_t n)
1593 /* Like duplicate_array byt in GGC memory. */
1596 duplicate_ggc_array (void *src, size_t n)
1608 /* Hook that is called by cgraph.c when a node is duplicated. */
1611 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
1612 __attribute__((unused)) void *data)
1614 struct ipa_edge_args *old_args, *new_args;
1617 ipa_check_create_edge_args ();
1619 old_args = IPA_EDGE_REF (src);
1620 new_args = IPA_EDGE_REF (dst);
1622 arg_count = ipa_get_cs_argument_count (old_args);
1623 ipa_set_cs_argument_count (new_args, arg_count);
1624 new_args->jump_functions = (struct ipa_jump_func *)
1625 duplicate_ggc_array (old_args->jump_functions,
1626 sizeof (struct ipa_jump_func) * arg_count);
1628 if (iinlining_processed_edges
1629 && bitmap_bit_p (iinlining_processed_edges, src->uid))
1630 bitmap_set_bit (iinlining_processed_edges, dst->uid);
1633 /* Hook that is called by cgraph.c when a node is duplicated. */
1636 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
1637 __attribute__((unused)) void *data)
1639 struct ipa_node_params *old_info, *new_info;
1642 ipa_check_create_node_params ();
1643 old_info = IPA_NODE_REF (src);
1644 new_info = IPA_NODE_REF (dst);
1645 param_count = ipa_get_param_count (old_info);
1647 ipa_set_param_count (new_info, param_count);
1648 new_info->params = (struct ipa_param_descriptor *)
1649 duplicate_array (old_info->params,
1650 sizeof (struct ipa_param_descriptor) * param_count);
1651 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
1652 new_info->count_scale = old_info->count_scale;
1655 /* Register our cgraph hooks if they are not already there. */
1658 ipa_register_cgraph_hooks (void)
1660 if (!edge_removal_hook_holder)
1661 edge_removal_hook_holder =
1662 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
1663 if (!node_removal_hook_holder)
1664 node_removal_hook_holder =
1665 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
1666 if (!edge_duplication_hook_holder)
1667 edge_duplication_hook_holder =
1668 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
1669 if (!node_duplication_hook_holder)
1670 node_duplication_hook_holder =
1671 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
1674 /* Unregister our cgraph hooks if they are not already there. */
1677 ipa_unregister_cgraph_hooks (void)
1679 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
1680 edge_removal_hook_holder = NULL;
1681 cgraph_remove_node_removal_hook (node_removal_hook_holder);
1682 node_removal_hook_holder = NULL;
1683 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
1684 edge_duplication_hook_holder = NULL;
1685 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
1686 node_duplication_hook_holder = NULL;
1689 /* Allocate all necessary data strucutures necessary for indirect inlining. */
1692 ipa_create_all_structures_for_iinln (void)
1694 iinlining_processed_edges = BITMAP_ALLOC (NULL);
1697 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
1698 longer needed after ipa-cp. */
1701 ipa_free_all_structures_after_ipa_cp (void)
1703 if (!flag_indirect_inlining)
1705 ipa_free_all_edge_args ();
1706 ipa_free_all_node_params ();
1707 ipa_unregister_cgraph_hooks ();
1711 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
1712 longer needed after indirect inlining. */
1715 ipa_free_all_structures_after_iinln (void)
1717 BITMAP_FREE (iinlining_processed_edges);
1719 ipa_free_all_edge_args ();
1720 ipa_free_all_node_params ();
1721 ipa_unregister_cgraph_hooks ();
1724 /* Print ipa_tree_map data structures of all functions in the
1728 ipa_print_node_params (FILE * f, struct cgraph_node *node)
1732 struct ipa_node_params *info;
1734 if (!node->analyzed)
1736 info = IPA_NODE_REF (node);
1737 fprintf (f, " function %s parameter descriptors:\n",
1738 cgraph_node_name (node));
1739 count = ipa_get_param_count (info);
1740 for (i = 0; i < count; i++)
1742 temp = ipa_get_param (info, i);
1743 if (TREE_CODE (temp) == PARM_DECL)
1744 fprintf (f, " param %d : %s", i,
1746 ? (*lang_hooks.decl_printable_name) (temp, 2)
1748 if (ipa_is_param_modified (info, i))
1749 fprintf (f, " modified");
1754 /* Print ipa_tree_map data structures of all functions in the
1758 ipa_print_all_params (FILE * f)
1760 struct cgraph_node *node;
1762 fprintf (f, "\nFunction parameters:\n");
1763 for (node = cgraph_nodes; node; node = node->next)
1764 ipa_print_node_params (f, node);
1767 /* Return a heap allocated vector containing formal parameters of FNDECL. */
1770 ipa_get_vector_of_formal_parms (tree fndecl)
1772 VEC(tree, heap) *args;
1776 count = count_formal_params_1 (fndecl);
1777 args = VEC_alloc (tree, heap, count);
1778 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = TREE_CHAIN (parm))
1779 VEC_quick_push (tree, args, parm);
1784 /* Return a heap allocated vector containing types of formal parameters of
1785 function type FNTYPE. */
1787 static inline VEC(tree, heap) *
1788 get_vector_of_formal_parm_types (tree fntype)
1790 VEC(tree, heap) *types;
1794 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
1797 types = VEC_alloc (tree, heap, count);
1798 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
1799 VEC_quick_push (tree, types, TREE_VALUE (t));
1804 /* Modify the function declaration FNDECL and its type according to the plan in
1805 ADJUSTMENTS. It also sets base fields of individual adjustments structures
1806 to reflect the actual parameters being modified which are determined by the
1807 base_index field. */
1810 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
1811 const char *synth_parm_prefix)
1813 VEC(tree, heap) *oparms, *otypes;
1814 tree orig_type, new_type = NULL;
1815 tree old_arg_types, t, new_arg_types = NULL;
1816 tree parm, *link = &DECL_ARGUMENTS (fndecl);
1817 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
1818 tree new_reversed = NULL;
1819 bool care_for_types, last_parm_void;
1821 if (!synth_parm_prefix)
1822 synth_parm_prefix = "SYNTH";
1824 oparms = ipa_get_vector_of_formal_parms (fndecl);
1825 orig_type = TREE_TYPE (fndecl);
1826 old_arg_types = TYPE_ARG_TYPES (orig_type);
1828 /* The following test is an ugly hack, some functions simply don't have any
1829 arguments in their type. This is probably a bug but well... */
1830 care_for_types = (old_arg_types != NULL_TREE);
1833 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
1835 otypes = get_vector_of_formal_parm_types (orig_type);
1837 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
1839 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
1843 last_parm_void = false;
1847 for (i = 0; i < len; i++)
1849 struct ipa_parm_adjustment *adj;
1852 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
1853 parm = VEC_index (tree, oparms, adj->base_index);
1856 if (adj->copy_param)
1859 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
1863 link = &TREE_CHAIN (parm);
1865 else if (!adj->remove_param)
1871 ptype = build_pointer_type (adj->type);
1876 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
1878 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
1880 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
1882 DECL_ARTIFICIAL (new_parm) = 1;
1883 DECL_ARG_TYPE (new_parm) = ptype;
1884 DECL_CONTEXT (new_parm) = fndecl;
1885 TREE_USED (new_parm) = 1;
1886 DECL_IGNORED_P (new_parm) = 1;
1887 layout_decl (new_parm, 0);
1889 add_referenced_var (new_parm);
1890 mark_sym_for_renaming (new_parm);
1892 adj->reduction = new_parm;
1896 link = &TREE_CHAIN (new_parm);
1904 new_reversed = nreverse (new_arg_types);
1908 TREE_CHAIN (new_arg_types) = void_list_node;
1910 new_reversed = void_list_node;
1914 /* Use copy_node to preserve as much as possible from original type
1915 (debug info, attribute lists etc.)
1916 Exception is METHOD_TYPEs must have THIS argument.
1917 When we are asked to remove it, we need to build new FUNCTION_TYPE
1919 if (TREE_CODE (orig_type) != METHOD_TYPE
1920 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0)->copy_param
1921 && VEC_index (ipa_parm_adjustment_t, adjustments, 0)->base_index == 0))
1923 new_type = copy_node (orig_type);
1924 TYPE_ARG_TYPES (new_type) = new_reversed;
1929 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
1931 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
1932 DECL_VINDEX (fndecl) = NULL_TREE;
1935 /* This is a new type, not a copy of an old type. Need to reassociate
1936 variants. We can handle everything except the main variant lazily. */
1937 t = TYPE_MAIN_VARIANT (orig_type);
1940 TYPE_MAIN_VARIANT (new_type) = t;
1941 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
1942 TYPE_NEXT_VARIANT (t) = new_type;
1946 TYPE_MAIN_VARIANT (new_type) = new_type;
1947 TYPE_NEXT_VARIANT (new_type) = NULL;
1950 TREE_TYPE (fndecl) = new_type;
1952 VEC_free (tree, heap, otypes);
1953 VEC_free (tree, heap, oparms);
1956 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
1957 If this is a directly recursive call, CS must be NULL. Otherwise it must
1958 contain the corresponding call graph edge. */
1961 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
1962 ipa_parm_adjustment_vec adjustments)
1964 VEC(tree, heap) *vargs;
1966 gimple_stmt_iterator gsi;
1970 len = VEC_length (ipa_parm_adjustment_t, adjustments);
1971 vargs = VEC_alloc (tree, heap, len);
1973 gsi = gsi_for_stmt (stmt);
1974 for (i = 0; i < len; i++)
1976 struct ipa_parm_adjustment *adj;
1978 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
1980 if (adj->copy_param)
1982 tree arg = gimple_call_arg (stmt, adj->base_index);
1984 VEC_quick_push (tree, vargs, arg);
1986 else if (!adj->remove_param)
1988 tree expr, orig_expr;
1989 bool allow_ptr, repl_found;
1991 orig_expr = expr = gimple_call_arg (stmt, adj->base_index);
1992 if (TREE_CODE (expr) == ADDR_EXPR)
1995 expr = TREE_OPERAND (expr, 0);
2000 repl_found = build_ref_for_offset (&expr, TREE_TYPE (expr),
2001 adj->offset, adj->type,
2006 expr = build_fold_addr_expr (expr);
2010 tree ptrtype = build_pointer_type (adj->type);
2012 if (!POINTER_TYPE_P (TREE_TYPE (expr)))
2013 expr = build_fold_addr_expr (expr);
2014 if (!useless_type_conversion_p (ptrtype, TREE_TYPE (expr)))
2015 expr = fold_convert (ptrtype, expr);
2016 expr = fold_build2 (POINTER_PLUS_EXPR, ptrtype, expr,
2017 build_int_cst (sizetype,
2018 adj->offset / BITS_PER_UNIT));
2020 expr = fold_build1 (INDIRECT_REF, adj->type, expr);
2022 expr = force_gimple_operand_gsi (&gsi, expr,
2024 || is_gimple_reg_type (adj->type),
2025 NULL, true, GSI_SAME_STMT);
2026 VEC_quick_push (tree, vargs, expr);
2030 if (dump_file && (dump_flags & TDF_DETAILS))
2032 fprintf (dump_file, "replacing stmt:");
2033 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2036 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
2037 new_stmt = gimple_build_call_vec (callee_decl, vargs);
2038 VEC_free (tree, heap, vargs);
2039 if (gimple_call_lhs (stmt))
2040 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2042 gimple_set_block (new_stmt, gimple_block (stmt));
2043 if (gimple_has_location (stmt))
2044 gimple_set_location (new_stmt, gimple_location (stmt));
2045 gimple_call_copy_flags (new_stmt, stmt);
2046 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2048 if (dump_file && (dump_flags & TDF_DETAILS))
2050 fprintf (dump_file, "with stmt:");
2051 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2052 fprintf (dump_file, "\n");
2054 gsi_replace (&gsi, new_stmt, true);
2056 cgraph_set_call_stmt (cs, new_stmt);
2057 update_ssa (TODO_update_ssa);
2058 free_dominance_info (CDI_DOMINATORS);
2061 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
2064 index_in_adjustments_multiple_times_p (int base_index,
2065 ipa_parm_adjustment_vec adjustments)
2067 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2070 for (i = 0; i < len; i++)
2072 struct ipa_parm_adjustment *adj;
2073 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2075 if (adj->base_index == base_index)
2087 /* Return adjustments that should have the same effect on function parameters
2088 and call arguments as if they were first changed according to adjustments in
2089 INNER and then by adjustments in OUTER. */
2091 ipa_parm_adjustment_vec
2092 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
2093 ipa_parm_adjustment_vec outer)
2095 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
2096 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
2098 ipa_parm_adjustment_vec adjustments, tmp;
2100 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
2101 for (i = 0; i < inlen; i++)
2103 struct ipa_parm_adjustment *n;
2104 n = VEC_index (ipa_parm_adjustment_t, inner, i);
2106 if (n->remove_param)
2109 VEC_quick_push (ipa_parm_adjustment_t, tmp, n);
2112 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
2113 for (i = 0; i < outlen; i++)
2115 struct ipa_parm_adjustment *r;
2116 struct ipa_parm_adjustment *out = VEC_index (ipa_parm_adjustment_t,
2118 struct ipa_parm_adjustment *in = VEC_index (ipa_parm_adjustment_t, tmp,
2121 gcc_assert (!in->remove_param);
2122 if (out->remove_param)
2124 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
2126 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2127 memset (r, 0, sizeof (*r));
2128 r->remove_param = true;
2133 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2134 memset (r, 0, sizeof (*r));
2135 r->base_index = in->base_index;
2136 r->type = out->type;
2138 /* FIXME: Create nonlocal value too. */
2140 if (in->copy_param && out->copy_param)
2141 r->copy_param = true;
2142 else if (in->copy_param)
2143 r->offset = out->offset;
2144 else if (out->copy_param)
2145 r->offset = in->offset;
2147 r->offset = in->offset + out->offset;
2150 for (i = 0; i < inlen; i++)
2152 struct ipa_parm_adjustment *n = VEC_index (ipa_parm_adjustment_t,
2155 if (n->remove_param)
2156 VEC_quick_push (ipa_parm_adjustment_t, adjustments, n);
2159 VEC_free (ipa_parm_adjustment_t, heap, tmp);
2163 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
2164 friendly way, assuming they are meant to be applied to FNDECL. */
2167 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
2170 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2172 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
2174 fprintf (file, "IPA param adjustments: ");
2175 for (i = 0; i < len; i++)
2177 struct ipa_parm_adjustment *adj;
2178 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2181 fprintf (file, " ");
2185 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
2186 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
2189 fprintf (file, ", base: ");
2190 print_generic_expr (file, adj->base, 0);
2194 fprintf (file, ", reduction: ");
2195 print_generic_expr (file, adj->reduction, 0);
2197 if (adj->new_ssa_base)
2199 fprintf (file, ", new_ssa_base: ");
2200 print_generic_expr (file, adj->new_ssa_base, 0);
2203 if (adj->copy_param)
2204 fprintf (file, ", copy_param");
2205 else if (adj->remove_param)
2206 fprintf (file, ", remove_param");
2208 fprintf (file, ", offset %li", (long) adj->offset);
2210 fprintf (file, ", by_ref");
2211 print_node_brief (file, ", type: ", adj->type, 0);
2212 fprintf (file, "\n");
2214 VEC_free (tree, heap, parms);
2217 /* Stream out jump function JUMP_FUNC to OB. */
2220 ipa_write_jump_function (struct output_block *ob,
2221 struct ipa_jump_func *jump_func)
2223 lto_output_uleb128_stream (ob->main_stream,
2226 switch (jump_func->type)
2228 case IPA_JF_UNKNOWN:
2230 case IPA_JF_KNOWN_TYPE:
2231 lto_output_tree (ob, jump_func->value.base_binfo, true);
2234 lto_output_tree (ob, jump_func->value.constant, true);
2236 case IPA_JF_PASS_THROUGH:
2237 lto_output_tree (ob, jump_func->value.pass_through.operand, true);
2238 lto_output_uleb128_stream (ob->main_stream,
2239 jump_func->value.pass_through.formal_id);
2240 lto_output_uleb128_stream (ob->main_stream,
2241 jump_func->value.pass_through.operation);
2243 case IPA_JF_ANCESTOR:
2244 lto_output_uleb128_stream (ob->main_stream,
2245 jump_func->value.ancestor.offset);
2246 lto_output_tree (ob, jump_func->value.ancestor.type, true);
2247 lto_output_uleb128_stream (ob->main_stream,
2248 jump_func->value.ancestor.formal_id);
2250 case IPA_JF_CONST_MEMBER_PTR:
2251 lto_output_tree (ob, jump_func->value.member_cst.pfn, true);
2252 lto_output_tree (ob, jump_func->value.member_cst.delta, false);
2257 /* Read in jump function JUMP_FUNC from IB. */
2260 ipa_read_jump_function (struct lto_input_block *ib,
2261 struct ipa_jump_func *jump_func,
2262 struct data_in *data_in)
2264 jump_func->type = (enum jump_func_type) lto_input_uleb128 (ib);
2266 switch (jump_func->type)
2268 case IPA_JF_UNKNOWN:
2270 case IPA_JF_KNOWN_TYPE:
2271 jump_func->value.base_binfo = lto_input_tree (ib, data_in);
2274 jump_func->value.constant = lto_input_tree (ib, data_in);
2276 case IPA_JF_PASS_THROUGH:
2277 jump_func->value.pass_through.operand = lto_input_tree (ib, data_in);
2278 jump_func->value.pass_through.formal_id = lto_input_uleb128 (ib);
2279 jump_func->value.pass_through.operation = (enum tree_code) lto_input_uleb128 (ib);
2281 case IPA_JF_ANCESTOR:
2282 jump_func->value.ancestor.offset = lto_input_uleb128 (ib);
2283 jump_func->value.ancestor.type = lto_input_tree (ib, data_in);
2284 jump_func->value.ancestor.formal_id = lto_input_uleb128 (ib);
2286 case IPA_JF_CONST_MEMBER_PTR:
2287 jump_func->value.member_cst.pfn = lto_input_tree (ib, data_in);
2288 jump_func->value.member_cst.delta = lto_input_tree (ib, data_in);
2293 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
2294 relevant to indirect inlining to OB. */
2297 ipa_write_indirect_edge_info (struct output_block *ob,
2298 struct cgraph_edge *cs)
2300 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2301 struct bitpack_d *bp;
2303 lto_output_sleb128_stream (ob->main_stream, ii->param_index);
2304 lto_output_sleb128_stream (ob->main_stream, ii->anc_offset);
2305 bp = bitpack_create ();
2306 bp_pack_value (bp, ii->polymorphic, 1);
2307 lto_output_bitpack (ob->main_stream, bp);
2308 bitpack_delete (bp);
2310 if (ii->polymorphic)
2312 lto_output_sleb128_stream (ob->main_stream, ii->otr_token);
2313 lto_output_tree (ob, ii->otr_type, true);
2317 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
2318 relevant to indirect inlining from IB. */
2321 ipa_read_indirect_edge_info (struct lto_input_block *ib,
2322 struct data_in *data_in ATTRIBUTE_UNUSED,
2323 struct cgraph_edge *cs)
2325 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2326 struct bitpack_d *bp;
2328 ii->param_index = (int) lto_input_sleb128 (ib);
2329 ii->anc_offset = (HOST_WIDE_INT) lto_input_sleb128 (ib);
2330 bp = lto_input_bitpack (ib);
2331 ii->polymorphic = bp_unpack_value (bp, 1);
2332 bitpack_delete (bp);
2333 if (ii->polymorphic)
2335 ii->otr_token = (HOST_WIDE_INT) lto_input_sleb128 (ib);
2336 ii->otr_type = lto_input_tree (ib, data_in);
2340 /* Stream out NODE info to OB. */
2343 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
2346 lto_cgraph_encoder_t encoder;
2347 struct ipa_node_params *info = IPA_NODE_REF (node);
2349 struct cgraph_edge *e;
2350 struct bitpack_d *bp;
2352 encoder = ob->decl_state->cgraph_node_encoder;
2353 node_ref = lto_cgraph_encoder_encode (encoder, node);
2354 lto_output_uleb128_stream (ob->main_stream, node_ref);
2356 bp = bitpack_create ();
2357 bp_pack_value (bp, info->called_with_var_arguments, 1);
2358 bp_pack_value (bp, info->uses_analysis_done, 1);
2359 gcc_assert (info->modification_analysis_done
2360 || ipa_get_param_count (info) == 0);
2361 gcc_assert (!info->node_enqueued);
2362 gcc_assert (!info->ipcp_orig_node);
2363 for (j = 0; j < ipa_get_param_count (info); j++)
2364 bp_pack_value (bp, info->params[j].modified, 1);
2365 lto_output_bitpack (ob->main_stream, bp);
2366 bitpack_delete (bp);
2367 for (e = node->callees; e; e = e->next_callee)
2369 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2371 lto_output_uleb128_stream (ob->main_stream,
2372 ipa_get_cs_argument_count (args));
2373 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
2374 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
2376 for (e = node->indirect_calls; e; e = e->next_callee)
2377 ipa_write_indirect_edge_info (ob, e);
2380 /* Srtream in NODE info from IB. */
2383 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
2384 struct data_in *data_in)
2386 struct ipa_node_params *info = IPA_NODE_REF (node);
2388 struct cgraph_edge *e;
2389 struct bitpack_d *bp;
2391 ipa_initialize_node_params (node);
2393 bp = lto_input_bitpack (ib);
2394 info->called_with_var_arguments = bp_unpack_value (bp, 1);
2395 info->uses_analysis_done = bp_unpack_value (bp, 1);
2396 if (ipa_get_param_count (info) != 0)
2398 info->modification_analysis_done = true;
2399 info->uses_analysis_done = true;
2401 info->node_enqueued = false;
2402 for (k = 0; k < ipa_get_param_count (info); k++)
2403 info->params[k].modified = bp_unpack_value (bp, 1);
2404 bitpack_delete (bp);
2405 for (e = node->callees; e; e = e->next_callee)
2407 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2408 int count = lto_input_uleb128 (ib);
2410 ipa_set_cs_argument_count (args, count);
2414 args->jump_functions = GGC_CNEWVEC (struct ipa_jump_func,
2415 ipa_get_cs_argument_count (args));
2416 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
2417 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
2419 for (e = node->indirect_calls; e; e = e->next_callee)
2420 ipa_read_indirect_edge_info (ib, data_in, e);
2423 /* Write jump functions for nodes in SET. */
2426 ipa_prop_write_jump_functions (cgraph_node_set set)
2428 struct cgraph_node *node;
2429 struct output_block *ob = create_output_block (LTO_section_jump_functions);
2430 unsigned int count = 0;
2431 cgraph_node_set_iterator csi;
2433 ob->cgraph_node = NULL;
2435 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2437 node = csi_node (csi);
2438 if (node->analyzed && IPA_NODE_REF (node) != NULL)
2442 lto_output_uleb128_stream (ob->main_stream, count);
2444 /* Process all of the functions. */
2445 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2447 node = csi_node (csi);
2448 if (node->analyzed && IPA_NODE_REF (node) != NULL)
2449 ipa_write_node_info (ob, node);
2451 lto_output_1_stream (ob->main_stream, 0);
2452 produce_asm (ob, NULL);
2453 destroy_output_block (ob);
2456 /* Read section in file FILE_DATA of length LEN with data DATA. */
2459 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
2462 const struct lto_function_header *header =
2463 (const struct lto_function_header *) data;
2464 const int32_t cfg_offset = sizeof (struct lto_function_header);
2465 const int32_t main_offset = cfg_offset + header->cfg_size;
2466 const int32_t string_offset = main_offset + header->main_size;
2467 struct data_in *data_in;
2468 struct lto_input_block ib_main;
2472 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
2476 lto_data_in_create (file_data, (const char *) data + string_offset,
2477 header->string_size, NULL);
2478 count = lto_input_uleb128 (&ib_main);
2480 for (i = 0; i < count; i++)
2483 struct cgraph_node *node;
2484 lto_cgraph_encoder_t encoder;
2486 index = lto_input_uleb128 (&ib_main);
2487 encoder = file_data->cgraph_node_encoder;
2488 node = lto_cgraph_encoder_deref (encoder, index);
2489 gcc_assert (node->analyzed);
2490 ipa_read_node_info (&ib_main, node, data_in);
2492 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
2494 lto_data_in_delete (data_in);
2497 /* Read ipcp jump functions. */
2500 ipa_prop_read_jump_functions (void)
2502 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2503 struct lto_file_decl_data *file_data;
2506 ipa_check_create_node_params ();
2507 ipa_check_create_edge_args ();
2508 ipa_register_cgraph_hooks ();
2510 while ((file_data = file_data_vec[j++]))
2513 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
2516 ipa_prop_read_section (file_data, data, len);
2520 /* After merging units, we can get mismatch in argument counts.
2521 Also decl merging might've rendered parameter lists obsolette.
2522 Also compute called_with_variable_arg info. */
2525 ipa_update_after_lto_read (void)
2527 struct cgraph_node *node;
2528 struct cgraph_edge *cs;
2530 ipa_check_create_node_params ();
2531 ipa_check_create_edge_args ();
2533 for (node = cgraph_nodes; node; node = node->next)
2535 ipa_initialize_node_params (node);
2537 for (node = cgraph_nodes; node; node = node->next)
2539 for (cs = node->callees; cs; cs = cs->next_callee)
2541 if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
2542 != ipa_get_param_count (IPA_NODE_REF (cs->callee)))
2543 ipa_set_called_with_variable_arg (IPA_NODE_REF (cs->callee));