1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
25 #include "langhooks.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
37 #include "diagnostic.h"
38 #include "lto-streamer.h"
40 /* Vector where the parameter infos are actually stored. */
41 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
42 /* Vector where the parameter infos are actually stored. */
43 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
45 /* Bitmap with all UIDs of call graph edges that have been already processed
46 by indirect inlining. */
47 static bitmap iinlining_processed_edges;
49 /* Holders of ipa cgraph hooks: */
50 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
51 static struct cgraph_node_hook_list *node_removal_hook_holder;
52 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
53 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
55 /* Add cgraph NODE described by INFO to the worklist WL regardless of whether
56 it is in one or not. It should almost never be used directly, as opposed to
57 ipa_push_func_to_list. */
60 ipa_push_func_to_list_1 (struct ipa_func_list **wl,
61 struct cgraph_node *node,
62 struct ipa_node_params *info)
64 struct ipa_func_list *temp;
66 info->node_enqueued = 1;
67 temp = XCNEW (struct ipa_func_list);
73 /* Initialize worklist to contain all functions. */
75 struct ipa_func_list *
76 ipa_init_func_list (void)
78 struct cgraph_node *node;
79 struct ipa_func_list * wl;
82 for (node = cgraph_nodes; node; node = node->next)
85 struct ipa_node_params *info = IPA_NODE_REF (node);
86 /* Unreachable nodes should have been eliminated before ipcp and
88 gcc_assert (node->needed || node->reachable);
89 ipa_push_func_to_list_1 (&wl, node, info);
95 /* Remove a function from the worklist WL and return it. */
98 ipa_pop_func_from_list (struct ipa_func_list **wl)
100 struct ipa_node_params *info;
101 struct ipa_func_list *first;
102 struct cgraph_node *node;
109 info = IPA_NODE_REF (node);
110 info->node_enqueued = 0;
114 /* Return index of the formal whose tree is PTREE in function which corresponds
118 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
122 count = ipa_get_param_count (info);
123 for (i = 0; i < count; i++)
124 if (ipa_get_param(info, i) == ptree)
130 /* Populate the param_decl field in parameter descriptors of INFO that
131 corresponds to NODE. */
134 ipa_populate_param_decls (struct cgraph_node *node,
135 struct ipa_node_params *info)
143 fnargs = DECL_ARGUMENTS (fndecl);
145 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
147 info->params[param_num].decl = parm;
152 /* Return how many formal parameters FNDECL has. */
155 count_formal_params_1 (tree fndecl)
160 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = TREE_CHAIN (parm))
166 /* Count number of formal parameters in NOTE. Store the result to the
167 appropriate field of INFO. */
170 ipa_count_formal_params (struct cgraph_node *node,
171 struct ipa_node_params *info)
175 param_num = count_formal_params_1 (node->decl);
176 ipa_set_param_count (info, param_num);
179 /* Initialize the ipa_node_params structure associated with NODE by counting
180 the function parameters, creating the descriptors and populating their
184 ipa_initialize_node_params (struct cgraph_node *node)
186 struct ipa_node_params *info = IPA_NODE_REF (node);
190 ipa_count_formal_params (node, info);
191 info->params = XCNEWVEC (struct ipa_param_descriptor,
192 ipa_get_param_count (info));
193 ipa_populate_param_decls (node, info);
197 /* Callback of walk_stmt_load_store_addr_ops for the visit_store and visit_addr
198 parameters. If OP is a parameter declaration, mark it as modified in the
199 info structure passed in DATA. */
202 visit_store_addr_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
205 struct ipa_node_params *info = (struct ipa_node_params *) data;
207 op = get_base_address (op);
209 && TREE_CODE (op) == PARM_DECL)
211 int index = ipa_get_param_decl_index (info, op);
212 gcc_assert (index >= 0);
213 info->params[index].modified = true;
214 info->params[index].used = true;
220 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
221 If OP is a parameter declaration, mark it as used in the info structure
225 visit_load_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
228 struct ipa_node_params *info = (struct ipa_node_params *) data;
230 op = get_base_address (op);
232 && TREE_CODE (op) == PARM_DECL)
234 int index = ipa_get_param_decl_index (info, op);
235 gcc_assert (index >= 0);
236 info->params[index].used = true;
242 /* Compute which formal parameters of function associated with NODE are locally
243 modified or their address is taken. Note that this does not apply on
244 parameters with SSA names but those can and should be analyzed
248 ipa_detect_param_modifications (struct cgraph_node *node)
250 tree decl = node->decl;
252 struct function *func;
253 gimple_stmt_iterator gsi;
254 struct ipa_node_params *info = IPA_NODE_REF (node);
257 if (ipa_get_param_count (info) == 0 || info->modification_analysis_done)
260 for (i = 0; i < ipa_get_param_count (info); i++)
262 tree parm = ipa_get_param (info, i);
263 /* For SSA regs see if parameter is used. For non-SSA we compute
264 the flag during modification analysis. */
265 if (is_gimple_reg (parm)
266 && gimple_default_def (DECL_STRUCT_FUNCTION (node->decl), parm))
267 info->params[i].used = true;
270 func = DECL_STRUCT_FUNCTION (decl);
271 FOR_EACH_BB_FN (bb, func)
273 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
274 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
275 visit_load_for_mod_analysis,
276 visit_store_addr_for_mod_analysis,
277 visit_store_addr_for_mod_analysis);
278 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
279 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
280 visit_load_for_mod_analysis,
281 visit_store_addr_for_mod_analysis,
282 visit_store_addr_for_mod_analysis);
285 info->modification_analysis_done = 1;
288 /* Count number of arguments callsite CS has and store it in
289 ipa_edge_args structure corresponding to this callsite. */
292 ipa_count_arguments (struct cgraph_edge *cs)
297 stmt = cs->call_stmt;
298 gcc_assert (is_gimple_call (stmt));
299 arg_num = gimple_call_num_args (stmt);
300 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
301 <= (unsigned) cgraph_edge_max_uid)
302 VEC_safe_grow_cleared (ipa_edge_args_t, gc,
303 ipa_edge_args_vector, cgraph_edge_max_uid + 1);
304 ipa_set_cs_argument_count (IPA_EDGE_REF (cs), arg_num);
307 /* Print the jump functions of all arguments on all call graph edges going from
311 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
314 struct cgraph_edge *cs;
315 struct ipa_jump_func *jump_func;
316 enum jump_func_type type;
318 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
319 for (cs = node->callees; cs; cs = cs->next_callee)
321 if (!ipa_edge_args_info_available_for_edge_p (cs))
324 fprintf (f, " callsite %s ", cgraph_node_name (node));
325 fprintf (f, "-> %s :: \n", cgraph_node_name (cs->callee));
327 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
328 for (i = 0; i < count; i++)
330 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
331 type = jump_func->type;
333 fprintf (f, " param %d: ", i);
334 if (type == IPA_JF_UNKNOWN)
335 fprintf (f, "UNKNOWN\n");
336 else if (type == IPA_JF_KNOWN_TYPE)
338 tree binfo_type = TREE_TYPE (jump_func->value.base_binfo);
339 fprintf (f, "KNOWN TYPE, type in binfo is: ");
340 print_generic_expr (f, binfo_type, 0);
341 fprintf (f, " (%u)\n", TYPE_UID (binfo_type));
343 else if (type == IPA_JF_CONST)
345 tree val = jump_func->value.constant;
346 fprintf (f, "CONST: ");
347 print_generic_expr (f, val, 0);
348 if (TREE_CODE (val) == ADDR_EXPR
349 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
352 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
357 else if (type == IPA_JF_CONST_MEMBER_PTR)
359 fprintf (f, "CONST MEMBER PTR: ");
360 print_generic_expr (f, jump_func->value.member_cst.pfn, 0);
362 print_generic_expr (f, jump_func->value.member_cst.delta, 0);
365 else if (type == IPA_JF_PASS_THROUGH)
367 fprintf (f, "PASS THROUGH: ");
368 fprintf (f, "%d, op %s ",
369 jump_func->value.pass_through.formal_id,
371 jump_func->value.pass_through.operation]);
372 if (jump_func->value.pass_through.operation != NOP_EXPR)
373 print_generic_expr (dump_file,
374 jump_func->value.pass_through.operand, 0);
375 fprintf (dump_file, "\n");
377 else if (type == IPA_JF_ANCESTOR)
379 fprintf (f, "ANCESTOR: ");
380 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
381 jump_func->value.ancestor.formal_id,
382 jump_func->value.ancestor.offset);
383 print_generic_expr (f, jump_func->value.ancestor.type, 0);
384 fprintf (dump_file, "\n");
390 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
393 ipa_print_all_jump_functions (FILE *f)
395 struct cgraph_node *node;
397 fprintf (f, "\nJump functions:\n");
398 for (node = cgraph_nodes; node; node = node->next)
400 ipa_print_node_jump_functions (f, node);
404 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
405 of an assignment statement STMT, try to find out whether NAME can be
406 described by a (possibly polynomial) pass-through jump-function or an
407 ancestor jump function and if so, write the appropriate function into
411 compute_complex_assign_jump_func (struct ipa_node_params *info,
412 struct ipa_jump_func *jfunc,
413 gimple stmt, tree name)
415 HOST_WIDE_INT offset, size, max_size;
419 op1 = gimple_assign_rhs1 (stmt);
420 op2 = gimple_assign_rhs2 (stmt);
422 if (TREE_CODE (op1) == SSA_NAME
423 && SSA_NAME_IS_DEFAULT_DEF (op1))
425 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
431 if (!is_gimple_ip_invariant (op2)
432 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
433 && !useless_type_conversion_p (TREE_TYPE (name),
437 jfunc->type = IPA_JF_PASS_THROUGH;
438 jfunc->value.pass_through.formal_id = index;
439 jfunc->value.pass_through.operation = gimple_assign_rhs_code (stmt);
440 jfunc->value.pass_through.operand = op2;
442 else if (gimple_assign_unary_nop_p (stmt))
444 jfunc->type = IPA_JF_PASS_THROUGH;
445 jfunc->value.pass_through.formal_id = index;
446 jfunc->value.pass_through.operation = NOP_EXPR;
451 if (TREE_CODE (op1) != ADDR_EXPR)
454 op1 = TREE_OPERAND (op1, 0);
455 type = TREE_TYPE (op1);
456 if (TREE_CODE (type) != RECORD_TYPE)
458 op1 = get_ref_base_and_extent (op1, &offset, &size, &max_size);
459 if (TREE_CODE (op1) != INDIRECT_REF
460 /* If this is a varying address, punt. */
464 op1 = TREE_OPERAND (op1, 0);
465 if (TREE_CODE (op1) != SSA_NAME
466 || !SSA_NAME_IS_DEFAULT_DEF (op1))
469 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
472 jfunc->type = IPA_JF_ANCESTOR;
473 jfunc->value.ancestor.formal_id = index;
474 jfunc->value.ancestor.offset = offset;
475 jfunc->value.ancestor.type = type;
480 /* Given that an actual argument is an SSA_NAME that is a result of a phi
481 statement PHI, try to find out whether NAME is in fact a
482 multiple-inheritance typecast from a descendant into an ancestor of a formal
483 parameter and thus can be described by an ancestor jump function and if so,
484 write the appropriate function into JFUNC.
486 Essentially we want to match the following pattern:
494 iftmp.1_3 = &obj_2(D)->D.1762;
497 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
498 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
502 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
503 struct ipa_jump_func *jfunc,
506 HOST_WIDE_INT offset, size, max_size;
508 basic_block phi_bb, assign_bb, cond_bb;
509 tree tmp, parm, expr;
512 if (gimple_phi_num_args (phi) != 2
513 || !integer_zerop (PHI_ARG_DEF (phi, 1)))
516 tmp = PHI_ARG_DEF (phi, 0);
517 if (TREE_CODE (tmp) != SSA_NAME
518 || SSA_NAME_IS_DEFAULT_DEF (tmp)
519 || !POINTER_TYPE_P (TREE_TYPE (tmp))
520 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
523 assign = SSA_NAME_DEF_STMT (tmp);
524 assign_bb = gimple_bb (assign);
525 if (!single_pred_p (assign_bb)
526 || !gimple_assign_single_p (assign))
528 expr = gimple_assign_rhs1 (assign);
530 if (TREE_CODE (expr) != ADDR_EXPR)
532 expr = TREE_OPERAND (expr, 0);
533 expr = get_ref_base_and_extent (expr, &offset, &size, &max_size);
535 if (TREE_CODE (expr) != INDIRECT_REF
536 /* If this is a varying address, punt. */
540 parm = TREE_OPERAND (expr, 0);
541 if (TREE_CODE (parm) != SSA_NAME
542 || !SSA_NAME_IS_DEFAULT_DEF (parm))
545 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
549 cond_bb = single_pred (assign_bb);
550 cond = last_stmt (cond_bb);
551 if (gimple_code (cond) != GIMPLE_COND
552 || gimple_cond_code (cond) != NE_EXPR
553 || gimple_cond_lhs (cond) != parm
554 || !integer_zerop (gimple_cond_rhs (cond)))
558 phi_bb = gimple_bb (phi);
559 for (i = 0; i < 2; i++)
561 basic_block pred = EDGE_PRED (phi_bb, i)->src;
562 if (pred != assign_bb && pred != cond_bb)
566 jfunc->type = IPA_JF_ANCESTOR;
567 jfunc->value.ancestor.formal_id = index;
568 jfunc->value.ancestor.offset = offset;
569 jfunc->value.ancestor.type = TREE_TYPE (TREE_TYPE (tmp));
572 /* Given OP whch is passed as an actual argument to a called function,
573 determine if it is possible to construct a KNOWN_TYPE jump function for it
574 and if so, create one and store it to JFUNC. */
577 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc)
581 if (TREE_CODE (op) != ADDR_EXPR)
584 op = TREE_OPERAND (op, 0);
585 binfo = gimple_get_relevant_ref_binfo (op, NULL_TREE);
588 jfunc->type = IPA_JF_KNOWN_TYPE;
589 jfunc->value.base_binfo = binfo;
594 /* Determine the jump functions of scalar arguments. Scalar means SSA names
595 and constants of a number of selected types. INFO is the ipa_node_params
596 structure associated with the caller, FUNCTIONS is a pointer to an array of
597 jump function structures associated with CALL which is the call statement
601 compute_scalar_jump_functions (struct ipa_node_params *info,
602 struct ipa_jump_func *functions,
608 for (num = 0; num < gimple_call_num_args (call); num++)
610 arg = gimple_call_arg (call, num);
612 if (is_gimple_ip_invariant (arg))
614 functions[num].type = IPA_JF_CONST;
615 functions[num].value.constant = arg;
617 else if (TREE_CODE (arg) == SSA_NAME)
619 if (SSA_NAME_IS_DEFAULT_DEF (arg))
621 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
625 functions[num].type = IPA_JF_PASS_THROUGH;
626 functions[num].value.pass_through.formal_id = index;
627 functions[num].value.pass_through.operation = NOP_EXPR;
632 gimple stmt = SSA_NAME_DEF_STMT (arg);
633 if (is_gimple_assign (stmt))
634 compute_complex_assign_jump_func (info, &functions[num],
636 else if (gimple_code (stmt) == GIMPLE_PHI)
637 compute_complex_ancestor_jump_func (info, &functions[num],
642 compute_known_type_jump_func (arg, &functions[num]);
646 /* Inspect the given TYPE and return true iff it has the same structure (the
647 same number of fields of the same types) as a C++ member pointer. If
648 METHOD_PTR and DELTA are non-NULL, store the trees representing the
649 corresponding fields there. */
652 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
656 if (TREE_CODE (type) != RECORD_TYPE)
659 fld = TYPE_FIELDS (type);
660 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
661 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE)
667 fld = TREE_CHAIN (fld);
668 if (!fld || INTEGRAL_TYPE_P (fld))
673 if (TREE_CHAIN (fld))
679 /* Go through arguments of the CALL and for every one that looks like a member
680 pointer, check whether it can be safely declared pass-through and if so,
681 mark that to the corresponding item of jump FUNCTIONS. Return true iff
682 there are non-pass-through member pointers within the arguments. INFO
683 describes formal parameters of the caller. */
686 compute_pass_through_member_ptrs (struct ipa_node_params *info,
687 struct ipa_jump_func *functions,
690 bool undecided_members = false;
694 for (num = 0; num < gimple_call_num_args (call); num++)
696 arg = gimple_call_arg (call, num);
698 if (type_like_member_ptr_p (TREE_TYPE (arg), NULL, NULL))
700 if (TREE_CODE (arg) == PARM_DECL)
702 int index = ipa_get_param_decl_index (info, arg);
704 gcc_assert (index >=0);
705 if (!ipa_is_param_modified (info, index))
707 functions[num].type = IPA_JF_PASS_THROUGH;
708 functions[num].value.pass_through.formal_id = index;
709 functions[num].value.pass_through.operation = NOP_EXPR;
712 undecided_members = true;
715 undecided_members = true;
719 return undecided_members;
722 /* Simple function filling in a member pointer constant jump function (with PFN
723 and DELTA as the constant value) into JFUNC. */
726 fill_member_ptr_cst_jump_function (struct ipa_jump_func *jfunc,
727 tree pfn, tree delta)
729 jfunc->type = IPA_JF_CONST_MEMBER_PTR;
730 jfunc->value.member_cst.pfn = pfn;
731 jfunc->value.member_cst.delta = delta;
734 /* If RHS is an SSA_NAMe and it is defined by a simple copy assign statement,
735 return the rhs of its defining statement. */
738 get_ssa_def_if_simple_copy (tree rhs)
740 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
742 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
744 if (gimple_assign_single_p (def_stmt))
745 rhs = gimple_assign_rhs1 (def_stmt);
752 /* Traverse statements from CALL backwards, scanning whether the argument ARG
753 which is a member pointer is filled in with constant values. If it is, fill
754 the jump function JFUNC in appropriately. METHOD_FIELD and DELTA_FIELD are
755 fields of the record type of the member pointer. To give an example, we
756 look for a pattern looking like the following:
758 D.2515.__pfn ={v} printStuff;
759 D.2515.__delta ={v} 0;
760 i_1 = doprinting (D.2515); */
763 determine_cst_member_ptr (gimple call, tree arg, tree method_field,
764 tree delta_field, struct ipa_jump_func *jfunc)
766 gimple_stmt_iterator gsi;
767 tree method = NULL_TREE;
768 tree delta = NULL_TREE;
770 gsi = gsi_for_stmt (call);
773 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
775 gimple stmt = gsi_stmt (gsi);
778 if (!gimple_assign_single_p (stmt))
781 lhs = gimple_assign_lhs (stmt);
782 rhs = gimple_assign_rhs1 (stmt);
784 if (TREE_CODE (lhs) != COMPONENT_REF
785 || TREE_OPERAND (lhs, 0) != arg)
788 fld = TREE_OPERAND (lhs, 1);
789 if (!method && fld == method_field)
791 rhs = get_ssa_def_if_simple_copy (rhs);
792 if (TREE_CODE (rhs) == ADDR_EXPR
793 && TREE_CODE (TREE_OPERAND (rhs, 0)) == FUNCTION_DECL
794 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 0))) == METHOD_TYPE)
796 method = TREE_OPERAND (rhs, 0);
799 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
807 if (!delta && fld == delta_field)
809 rhs = get_ssa_def_if_simple_copy (rhs);
810 if (TREE_CODE (rhs) == INTEGER_CST)
815 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
827 /* Go through the arguments of the CALL and for every member pointer within
828 tries determine whether it is a constant. If it is, create a corresponding
829 constant jump function in FUNCTIONS which is an array of jump functions
830 associated with the call. */
833 compute_cst_member_ptr_arguments (struct ipa_jump_func *functions,
837 tree arg, method_field, delta_field;
839 for (num = 0; num < gimple_call_num_args (call); num++)
841 arg = gimple_call_arg (call, num);
843 if (functions[num].type == IPA_JF_UNKNOWN
844 && type_like_member_ptr_p (TREE_TYPE (arg), &method_field,
846 determine_cst_member_ptr (call, arg, method_field, delta_field,
851 /* Compute jump function for all arguments of callsite CS and insert the
852 information in the jump_functions array in the ipa_edge_args corresponding
856 ipa_compute_jump_functions (struct cgraph_edge *cs)
858 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
859 struct ipa_edge_args *arguments = IPA_EDGE_REF (cs);
862 if (ipa_get_cs_argument_count (arguments) == 0 || arguments->jump_functions)
864 arguments->jump_functions = GGC_CNEWVEC (struct ipa_jump_func,
865 ipa_get_cs_argument_count (arguments));
867 call = cs->call_stmt;
868 gcc_assert (is_gimple_call (call));
870 /* We will deal with constants and SSA scalars first: */
871 compute_scalar_jump_functions (info, arguments->jump_functions, call);
873 /* Let's check whether there are any potential member pointers and if so,
874 whether we can determine their functions as pass_through. */
875 if (!compute_pass_through_member_ptrs (info, arguments->jump_functions, call))
878 /* Finally, let's check whether we actually pass a new constant member
880 compute_cst_member_ptr_arguments (arguments->jump_functions, call);
883 /* If RHS looks like a rhs of a statement loading pfn from a member
884 pointer formal parameter, return the parameter, otherwise return
885 NULL. If USE_DELTA, then we look for a use of the delta field
886 rather than the pfn. */
889 ipa_get_member_ptr_load_param (tree rhs, bool use_delta)
895 if (TREE_CODE (rhs) != COMPONENT_REF)
898 rec = TREE_OPERAND (rhs, 0);
899 if (TREE_CODE (rec) != PARM_DECL
900 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
903 fld = TREE_OPERAND (rhs, 1);
904 if (use_delta ? (fld == delta_field) : (fld == ptr_field))
910 /* If STMT looks like a statement loading a value from a member pointer formal
911 parameter, this function returns that parameter. */
914 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta)
918 if (!gimple_assign_single_p (stmt))
921 rhs = gimple_assign_rhs1 (stmt);
922 return ipa_get_member_ptr_load_param (rhs, use_delta);
925 /* Returns true iff T is an SSA_NAME defined by a statement. */
928 ipa_is_ssa_with_stmt_def (tree t)
930 if (TREE_CODE (t) == SSA_NAME
931 && !SSA_NAME_IS_DEFAULT_DEF (t))
937 /* Find the indirect call graph edge corresponding to STMT and add to it all
938 information necessary to describe a call to a parameter number PARAM_INDEX.
939 NODE is the caller. POLYMORPHIC should be set to true iff the call is a
943 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt,
946 struct cgraph_edge *cs;
948 cs = cgraph_edge (node, stmt);
949 cs->indirect_info->param_index = param_index;
950 cs->indirect_info->anc_offset = 0;
951 cs->indirect_info->polymorphic = polymorphic;
954 tree otr = gimple_call_fn (stmt);
955 tree type, token = OBJ_TYPE_REF_TOKEN (otr);
956 cs->indirect_info->otr_token = tree_low_cst (token, 1);
957 type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (otr)));
958 cs->indirect_info->otr_type = type;
962 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
963 (described by INFO). Currently it checks whether the call calls a pointer
964 that is a formal parameter and if so, the parameter is marked with the
965 called flag and an indirect call graph edge describing the call is created.
966 This is very simple for ordinary pointers represented in SSA but not-so-nice
967 when it comes to member pointers. The ugly part of this function does
968 nothing more than trying to match the pattern of such a call. An example of
969 such a pattern is the gimple dump below, the call is on the last line:
972 f$__delta_5 = f.__delta;
973 f$__pfn_24 = f.__pfn;
974 D.2496_3 = (int) f$__pfn_24;
975 D.2497_4 = D.2496_3 & 1;
982 D.2500_7 = (unsigned int) f$__delta_5;
983 D.2501_8 = &S + D.2500_7;
984 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
985 D.2503_10 = *D.2502_9;
986 D.2504_12 = f$__pfn_24 + -1;
987 D.2505_13 = (unsigned int) D.2504_12;
988 D.2506_14 = D.2503_10 + D.2505_13;
989 D.2507_15 = *D.2506_14;
990 iftmp.11_16 = (String:: *) D.2507_15;
993 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
994 D.2500_19 = (unsigned int) f$__delta_5;
995 D.2508_20 = &S + D.2500_19;
996 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
998 Such patterns are results of simple calls to a member pointer:
1000 int doprinting (int (MyString::* f)(int) const)
1002 MyString S ("somestring");
1009 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1010 struct ipa_node_params *info,
1011 gimple call, tree target)
1016 tree rec, rec2, cond;
1019 basic_block bb, virt_bb, join;
1021 if (SSA_NAME_IS_DEFAULT_DEF (target))
1023 tree var = SSA_NAME_VAR (target);
1024 index = ipa_get_param_decl_index (info, var);
1026 ipa_note_param_call (node, index, call, false);
1030 /* Now we need to try to match the complex pattern of calling a member
1033 if (!POINTER_TYPE_P (TREE_TYPE (target))
1034 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1037 def = SSA_NAME_DEF_STMT (target);
1038 if (gimple_code (def) != GIMPLE_PHI)
1041 if (gimple_phi_num_args (def) != 2)
1044 /* First, we need to check whether one of these is a load from a member
1045 pointer that is a parameter to this function. */
1046 n1 = PHI_ARG_DEF (def, 0);
1047 n2 = PHI_ARG_DEF (def, 1);
1048 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1050 d1 = SSA_NAME_DEF_STMT (n1);
1051 d2 = SSA_NAME_DEF_STMT (n2);
1053 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false)))
1055 if (ipa_get_stmt_member_ptr_load_param (d2, false))
1058 bb = gimple_bb (d1);
1059 virt_bb = gimple_bb (d2);
1061 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false)))
1063 bb = gimple_bb (d2);
1064 virt_bb = gimple_bb (d1);
1069 /* Second, we need to check that the basic blocks are laid out in the way
1070 corresponding to the pattern. */
1072 join = gimple_bb (def);
1073 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1074 || single_pred (virt_bb) != bb
1075 || single_succ (virt_bb) != join)
1078 /* Third, let's see that the branching is done depending on the least
1079 significant bit of the pfn. */
1081 branch = last_stmt (bb);
1082 if (gimple_code (branch) != GIMPLE_COND)
1085 if (gimple_cond_code (branch) != NE_EXPR
1086 || !integer_zerop (gimple_cond_rhs (branch)))
1089 cond = gimple_cond_lhs (branch);
1090 if (!ipa_is_ssa_with_stmt_def (cond))
1093 def = SSA_NAME_DEF_STMT (cond);
1094 if (!is_gimple_assign (def)
1095 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1096 || !integer_onep (gimple_assign_rhs2 (def)))
1099 cond = gimple_assign_rhs1 (def);
1100 if (!ipa_is_ssa_with_stmt_def (cond))
1103 def = SSA_NAME_DEF_STMT (cond);
1105 if (is_gimple_assign (def)
1106 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1108 cond = gimple_assign_rhs1 (def);
1109 if (!ipa_is_ssa_with_stmt_def (cond))
1111 def = SSA_NAME_DEF_STMT (cond);
1114 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1115 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1116 == ptrmemfunc_vbit_in_delta));
1121 index = ipa_get_param_decl_index (info, rec);
1122 if (index >= 0 && !ipa_is_param_modified (info, index))
1123 ipa_note_param_call (node, index, call, false);
1128 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1129 object referenced in the expression is a formal parameter of the caller
1130 (described by INFO), create a call note for the statement. */
1133 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1134 struct ipa_node_params *info, gimple call,
1137 tree obj = OBJ_TYPE_REF_OBJECT (target);
1141 if (TREE_CODE (obj) == ADDR_EXPR)
1145 obj = TREE_OPERAND (obj, 0);
1147 while (TREE_CODE (obj) == COMPONENT_REF);
1148 if (TREE_CODE (obj) != INDIRECT_REF)
1150 obj = TREE_OPERAND (obj, 0);
1153 if (TREE_CODE (obj) != SSA_NAME
1154 || !SSA_NAME_IS_DEFAULT_DEF (obj))
1157 var = SSA_NAME_VAR (obj);
1158 index = ipa_get_param_decl_index (info, var);
1161 ipa_note_param_call (node, index, call, true);
1164 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1165 of the caller (described by INFO). */
1168 ipa_analyze_call_uses (struct cgraph_node *node,
1169 struct ipa_node_params *info, gimple call)
1171 tree target = gimple_call_fn (call);
1173 if (TREE_CODE (target) == SSA_NAME)
1174 ipa_analyze_indirect_call_uses (node, info, call, target);
1175 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1176 ipa_analyze_virtual_call_uses (node, info, call, target);
1180 /* Analyze the call statement STMT with respect to formal parameters (described
1181 in INFO) of caller given by NODE. Currently it only checks whether formal
1182 parameters are called. */
1185 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1188 if (is_gimple_call (stmt))
1189 ipa_analyze_call_uses (node, info, stmt);
1192 /* Scan the function body of NODE and inspect the uses of formal parameters.
1193 Store the findings in various structures of the associated ipa_node_params
1194 structure, such as parameter flags, notes etc. */
1197 ipa_analyze_params_uses (struct cgraph_node *node)
1199 tree decl = node->decl;
1201 struct function *func;
1202 gimple_stmt_iterator gsi;
1203 struct ipa_node_params *info = IPA_NODE_REF (node);
1205 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1208 func = DECL_STRUCT_FUNCTION (decl);
1209 FOR_EACH_BB_FN (bb, func)
1211 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1213 gimple stmt = gsi_stmt (gsi);
1214 ipa_analyze_stmt_uses (node, info, stmt);
1218 info->uses_analysis_done = 1;
1221 /* Update the jump function DST when the call graph edge correspondng to SRC is
1222 is being inlined, knowing that DST is of type ancestor and src of known
1226 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1227 struct ipa_jump_func *dst)
1231 new_binfo = get_binfo_at_offset (src->value.base_binfo,
1232 dst->value.ancestor.offset,
1233 dst->value.ancestor.type);
1236 dst->type = IPA_JF_KNOWN_TYPE;
1237 dst->value.base_binfo = new_binfo;
1240 dst->type = IPA_JF_UNKNOWN;
1243 /* Update the jump functions associated with call graph edge E when the call
1244 graph edge CS is being inlined, assuming that E->caller is already (possibly
1245 indirectly) inlined into CS->callee and that E has not been inlined. */
1248 update_jump_functions_after_inlining (struct cgraph_edge *cs,
1249 struct cgraph_edge *e)
1251 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1252 struct ipa_edge_args *args = IPA_EDGE_REF (e);
1253 int count = ipa_get_cs_argument_count (args);
1256 for (i = 0; i < count; i++)
1258 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
1260 if (dst->type == IPA_JF_ANCESTOR)
1262 struct ipa_jump_func *src;
1264 /* Variable number of arguments can cause havoc if we try to access
1265 one that does not exist in the inlined edge. So make sure we
1267 if (dst->value.ancestor.formal_id >= ipa_get_cs_argument_count (top))
1269 dst->type = IPA_JF_UNKNOWN;
1273 src = ipa_get_ith_jump_func (top, dst->value.ancestor.formal_id);
1274 if (src->type == IPA_JF_KNOWN_TYPE)
1275 combine_known_type_and_ancestor_jfs (src, dst);
1276 else if (src->type == IPA_JF_CONST)
1278 struct ipa_jump_func kt_func;
1280 kt_func.type = IPA_JF_UNKNOWN;
1281 compute_known_type_jump_func (src->value.constant, &kt_func);
1282 if (kt_func.type == IPA_JF_KNOWN_TYPE)
1283 combine_known_type_and_ancestor_jfs (&kt_func, dst);
1285 dst->type = IPA_JF_UNKNOWN;
1287 else if (src->type == IPA_JF_PASS_THROUGH
1288 && src->value.pass_through.operation == NOP_EXPR)
1289 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
1290 else if (src->type == IPA_JF_ANCESTOR)
1292 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
1293 dst->value.ancestor.offset += src->value.ancestor.offset;
1296 dst->type = IPA_JF_UNKNOWN;
1298 else if (dst->type == IPA_JF_PASS_THROUGH)
1300 struct ipa_jump_func *src;
1301 /* We must check range due to calls with variable number of arguments
1302 and we cannot combine jump functions with operations. */
1303 if (dst->value.pass_through.operation == NOP_EXPR
1304 && (dst->value.pass_through.formal_id
1305 < ipa_get_cs_argument_count (top)))
1307 src = ipa_get_ith_jump_func (top,
1308 dst->value.pass_through.formal_id);
1312 dst->type = IPA_JF_UNKNOWN;
1317 /* If TARGET is an addr_expr of a function declaration, make it the destination
1318 of an indirect edge IE and return the edge. Otherwise, return NULL. */
1320 static struct cgraph_edge *
1321 make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
1323 struct cgraph_node *callee;
1325 if (TREE_CODE (target) != ADDR_EXPR)
1327 target = TREE_OPERAND (target, 0);
1328 if (TREE_CODE (target) != FUNCTION_DECL)
1330 callee = cgraph_node (target);
1334 cgraph_make_edge_direct (ie, callee);
1337 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
1338 "(%s/%i -> %s/%i) for stmt ",
1339 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
1340 cgraph_node_name (ie->caller), ie->caller->uid,
1341 cgraph_node_name (ie->callee), ie->callee->uid);
1344 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
1346 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
1351 /* Try to find a destination for indirect edge IE that corresponds to a simple
1352 call or a call of a member function pointer and where the destination is a
1353 pointer formal parameter described by jump function JFUNC. If it can be
1354 determined, return the newly direct edge, otherwise return NULL. */
1356 static struct cgraph_edge *
1357 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
1358 struct ipa_jump_func *jfunc)
1362 if (jfunc->type == IPA_JF_CONST)
1363 target = jfunc->value.constant;
1364 else if (jfunc->type == IPA_JF_CONST_MEMBER_PTR)
1365 target = jfunc->value.member_cst.pfn;
1369 return make_edge_direct_to_target (ie, target);
1372 /* Try to find a destination for indirect edge IE that corresponds to a
1373 virtuall call based on a formal parameter which is described by jump
1374 function JFUNC and if it can be determined, make it direct and return the
1375 direct edge. Otherwise, return NULL. */
1377 static struct cgraph_edge *
1378 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
1379 struct ipa_jump_func *jfunc)
1381 tree binfo, type, target;
1382 HOST_WIDE_INT token;
1384 if (jfunc->type == IPA_JF_KNOWN_TYPE)
1385 binfo = jfunc->value.base_binfo;
1386 else if (jfunc->type == IPA_JF_CONST)
1388 tree cst = jfunc->value.constant;
1389 if (TREE_CODE (cst) == ADDR_EXPR)
1390 binfo = gimple_get_relevant_ref_binfo (TREE_OPERAND (cst, 0),
1401 token = ie->indirect_info->otr_token;
1402 type = ie->indirect_info->otr_type;
1403 binfo = get_binfo_at_offset (binfo, ie->indirect_info->anc_offset, type);
1405 target = gimple_fold_obj_type_ref_known_binfo (token, binfo);
1410 return make_edge_direct_to_target (ie, target);
1415 /* Update the param called notes associated with NODE when CS is being inlined,
1416 assuming NODE is (potentially indirectly) inlined into CS->callee.
1417 Moreover, if the callee is discovered to be constant, create a new cgraph
1418 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
1419 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
1422 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
1423 struct cgraph_node *node,
1424 VEC (cgraph_edge_p, heap) **new_edges)
1426 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1427 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
1430 ipa_check_create_edge_args ();
1432 for (ie = node->indirect_calls; ie; ie = next_ie)
1434 struct cgraph_indirect_call_info *ici = ie->indirect_info;
1435 struct ipa_jump_func *jfunc;
1437 next_ie = ie->next_callee;
1438 if (bitmap_bit_p (iinlining_processed_edges, ie->uid))
1441 /* If we ever use indirect edges for anything other than indirect
1442 inlining, we will need to skip those with negative param_indices. */
1443 if (ici->param_index == -1)
1446 /* We must check range due to calls with variable number of arguments: */
1447 if (ici->param_index >= ipa_get_cs_argument_count (top))
1449 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1453 jfunc = ipa_get_ith_jump_func (top, ici->param_index);
1454 if (jfunc->type == IPA_JF_PASS_THROUGH
1455 && jfunc->value.pass_through.operation == NOP_EXPR)
1456 ici->param_index = jfunc->value.pass_through.formal_id;
1457 else if (jfunc->type == IPA_JF_ANCESTOR)
1459 ici->param_index = jfunc->value.ancestor.formal_id;
1460 ici->anc_offset += jfunc->value.ancestor.offset;
1463 /* Either we can find a destination for this edge now or never. */
1464 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1466 if (ici->polymorphic)
1467 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
1469 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
1471 if (new_direct_edge)
1473 new_direct_edge->indirect_inlining_edge = 1;
1476 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
1478 top = IPA_EDGE_REF (cs);
1487 /* Recursively traverse subtree of NODE (including node) made of inlined
1488 cgraph_edges when CS has been inlined and invoke
1489 update_indirect_edges_after_inlining on all nodes and
1490 update_jump_functions_after_inlining on all non-inlined edges that lead out
1491 of this subtree. Newly discovered indirect edges will be added to
1492 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
1496 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
1497 struct cgraph_node *node,
1498 VEC (cgraph_edge_p, heap) **new_edges)
1500 struct cgraph_edge *e;
1503 res = update_indirect_edges_after_inlining (cs, node, new_edges);
1505 for (e = node->callees; e; e = e->next_callee)
1506 if (!e->inline_failed)
1507 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
1509 update_jump_functions_after_inlining (cs, e);
1514 /* Update jump functions and call note functions on inlining the call site CS.
1515 CS is expected to lead to a node already cloned by
1516 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
1517 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
1521 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
1522 VEC (cgraph_edge_p, heap) **new_edges)
1524 /* FIXME lto: We do not stream out indirect call information. */
1528 /* Do nothing if the preparation phase has not been carried out yet
1529 (i.e. during early inlining). */
1530 if (!ipa_node_params_vector)
1532 gcc_assert (ipa_edge_args_vector);
1534 return propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
1537 /* Frees all dynamically allocated structures that the argument info points
1541 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
1543 if (args->jump_functions)
1544 ggc_free (args->jump_functions);
1546 memset (args, 0, sizeof (*args));
1549 /* Free all ipa_edge structures. */
1552 ipa_free_all_edge_args (void)
1555 struct ipa_edge_args *args;
1558 VEC_iterate (ipa_edge_args_t, ipa_edge_args_vector, i, args);
1560 ipa_free_edge_args_substructures (args);
1562 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
1563 ipa_edge_args_vector = NULL;
1566 /* Frees all dynamically allocated structures that the param info points
1570 ipa_free_node_params_substructures (struct ipa_node_params *info)
1573 free (info->params);
1575 memset (info, 0, sizeof (*info));
1578 /* Free all ipa_node_params structures. */
1581 ipa_free_all_node_params (void)
1584 struct ipa_node_params *info;
1587 VEC_iterate (ipa_node_params_t, ipa_node_params_vector, i, info);
1589 ipa_free_node_params_substructures (info);
1591 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
1592 ipa_node_params_vector = NULL;
1595 /* Hook that is called by cgraph.c when an edge is removed. */
1598 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
1600 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1601 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
1602 <= (unsigned)cs->uid)
1604 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
1607 /* Hook that is called by cgraph.c when a node is removed. */
1610 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1612 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1613 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
1614 <= (unsigned)node->uid)
1616 ipa_free_node_params_substructures (IPA_NODE_REF (node));
1619 /* Helper function to duplicate an array of size N that is at SRC and store a
1620 pointer to it to DST. Nothing is done if SRC is NULL. */
1623 duplicate_array (void *src, size_t n)
1635 /* Like duplicate_array byt in GGC memory. */
1638 duplicate_ggc_array (void *src, size_t n)
1650 /* Hook that is called by cgraph.c when a node is duplicated. */
1653 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
1654 __attribute__((unused)) void *data)
1656 struct ipa_edge_args *old_args, *new_args;
1659 ipa_check_create_edge_args ();
1661 old_args = IPA_EDGE_REF (src);
1662 new_args = IPA_EDGE_REF (dst);
1664 arg_count = ipa_get_cs_argument_count (old_args);
1665 ipa_set_cs_argument_count (new_args, arg_count);
1666 new_args->jump_functions = (struct ipa_jump_func *)
1667 duplicate_ggc_array (old_args->jump_functions,
1668 sizeof (struct ipa_jump_func) * arg_count);
1670 if (iinlining_processed_edges
1671 && bitmap_bit_p (iinlining_processed_edges, src->uid))
1672 bitmap_set_bit (iinlining_processed_edges, dst->uid);
1675 /* Hook that is called by cgraph.c when a node is duplicated. */
1678 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
1679 __attribute__((unused)) void *data)
1681 struct ipa_node_params *old_info, *new_info;
1684 ipa_check_create_node_params ();
1685 old_info = IPA_NODE_REF (src);
1686 new_info = IPA_NODE_REF (dst);
1687 param_count = ipa_get_param_count (old_info);
1689 ipa_set_param_count (new_info, param_count);
1690 new_info->params = (struct ipa_param_descriptor *)
1691 duplicate_array (old_info->params,
1692 sizeof (struct ipa_param_descriptor) * param_count);
1693 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
1694 new_info->count_scale = old_info->count_scale;
1697 /* Register our cgraph hooks if they are not already there. */
1700 ipa_register_cgraph_hooks (void)
1702 if (!edge_removal_hook_holder)
1703 edge_removal_hook_holder =
1704 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
1705 if (!node_removal_hook_holder)
1706 node_removal_hook_holder =
1707 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
1708 if (!edge_duplication_hook_holder)
1709 edge_duplication_hook_holder =
1710 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
1711 if (!node_duplication_hook_holder)
1712 node_duplication_hook_holder =
1713 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
1716 /* Unregister our cgraph hooks if they are not already there. */
1719 ipa_unregister_cgraph_hooks (void)
1721 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
1722 edge_removal_hook_holder = NULL;
1723 cgraph_remove_node_removal_hook (node_removal_hook_holder);
1724 node_removal_hook_holder = NULL;
1725 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
1726 edge_duplication_hook_holder = NULL;
1727 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
1728 node_duplication_hook_holder = NULL;
1731 /* Allocate all necessary data strucutures necessary for indirect inlining. */
1734 ipa_create_all_structures_for_iinln (void)
1736 iinlining_processed_edges = BITMAP_ALLOC (NULL);
1739 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
1740 longer needed after ipa-cp. */
1743 ipa_free_all_structures_after_ipa_cp (void)
1745 if (!flag_indirect_inlining)
1747 ipa_free_all_edge_args ();
1748 ipa_free_all_node_params ();
1749 ipa_unregister_cgraph_hooks ();
1753 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
1754 longer needed after indirect inlining. */
1757 ipa_free_all_structures_after_iinln (void)
1759 BITMAP_FREE (iinlining_processed_edges);
1761 ipa_free_all_edge_args ();
1762 ipa_free_all_node_params ();
1763 ipa_unregister_cgraph_hooks ();
1766 /* Print ipa_tree_map data structures of all functions in the
1770 ipa_print_node_params (FILE * f, struct cgraph_node *node)
1774 struct ipa_node_params *info;
1776 if (!node->analyzed)
1778 info = IPA_NODE_REF (node);
1779 fprintf (f, " function %s parameter descriptors:\n",
1780 cgraph_node_name (node));
1781 count = ipa_get_param_count (info);
1782 for (i = 0; i < count; i++)
1784 temp = ipa_get_param (info, i);
1785 if (TREE_CODE (temp) == PARM_DECL)
1786 fprintf (f, " param %d : %s", i,
1788 ? (*lang_hooks.decl_printable_name) (temp, 2)
1790 if (ipa_is_param_modified (info, i))
1791 fprintf (f, " modified");
1792 if (ipa_is_param_used (info, i))
1793 fprintf (f, " used");
1798 /* Print ipa_tree_map data structures of all functions in the
1802 ipa_print_all_params (FILE * f)
1804 struct cgraph_node *node;
1806 fprintf (f, "\nFunction parameters:\n");
1807 for (node = cgraph_nodes; node; node = node->next)
1808 ipa_print_node_params (f, node);
1811 /* Return a heap allocated vector containing formal parameters of FNDECL. */
1814 ipa_get_vector_of_formal_parms (tree fndecl)
1816 VEC(tree, heap) *args;
1820 count = count_formal_params_1 (fndecl);
1821 args = VEC_alloc (tree, heap, count);
1822 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = TREE_CHAIN (parm))
1823 VEC_quick_push (tree, args, parm);
1828 /* Return a heap allocated vector containing types of formal parameters of
1829 function type FNTYPE. */
1831 static inline VEC(tree, heap) *
1832 get_vector_of_formal_parm_types (tree fntype)
1834 VEC(tree, heap) *types;
1838 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
1841 types = VEC_alloc (tree, heap, count);
1842 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
1843 VEC_quick_push (tree, types, TREE_VALUE (t));
1848 /* Modify the function declaration FNDECL and its type according to the plan in
1849 ADJUSTMENTS. It also sets base fields of individual adjustments structures
1850 to reflect the actual parameters being modified which are determined by the
1851 base_index field. */
1854 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
1855 const char *synth_parm_prefix)
1857 VEC(tree, heap) *oparms, *otypes;
1858 tree orig_type, new_type = NULL;
1859 tree old_arg_types, t, new_arg_types = NULL;
1860 tree parm, *link = &DECL_ARGUMENTS (fndecl);
1861 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
1862 tree new_reversed = NULL;
1863 bool care_for_types, last_parm_void;
1865 if (!synth_parm_prefix)
1866 synth_parm_prefix = "SYNTH";
1868 oparms = ipa_get_vector_of_formal_parms (fndecl);
1869 orig_type = TREE_TYPE (fndecl);
1870 old_arg_types = TYPE_ARG_TYPES (orig_type);
1872 /* The following test is an ugly hack, some functions simply don't have any
1873 arguments in their type. This is probably a bug but well... */
1874 care_for_types = (old_arg_types != NULL_TREE);
1877 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
1879 otypes = get_vector_of_formal_parm_types (orig_type);
1881 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
1883 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
1887 last_parm_void = false;
1891 for (i = 0; i < len; i++)
1893 struct ipa_parm_adjustment *adj;
1896 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
1897 parm = VEC_index (tree, oparms, adj->base_index);
1900 if (adj->copy_param)
1903 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
1907 link = &TREE_CHAIN (parm);
1909 else if (!adj->remove_param)
1915 ptype = build_pointer_type (adj->type);
1920 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
1922 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
1924 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
1926 DECL_ARTIFICIAL (new_parm) = 1;
1927 DECL_ARG_TYPE (new_parm) = ptype;
1928 DECL_CONTEXT (new_parm) = fndecl;
1929 TREE_USED (new_parm) = 1;
1930 DECL_IGNORED_P (new_parm) = 1;
1931 layout_decl (new_parm, 0);
1933 add_referenced_var (new_parm);
1934 mark_sym_for_renaming (new_parm);
1936 adj->reduction = new_parm;
1940 link = &TREE_CHAIN (new_parm);
1948 new_reversed = nreverse (new_arg_types);
1952 TREE_CHAIN (new_arg_types) = void_list_node;
1954 new_reversed = void_list_node;
1958 /* Use copy_node to preserve as much as possible from original type
1959 (debug info, attribute lists etc.)
1960 Exception is METHOD_TYPEs must have THIS argument.
1961 When we are asked to remove it, we need to build new FUNCTION_TYPE
1963 if (TREE_CODE (orig_type) != METHOD_TYPE
1964 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0)->copy_param
1965 && VEC_index (ipa_parm_adjustment_t, adjustments, 0)->base_index == 0))
1967 new_type = copy_node (orig_type);
1968 TYPE_ARG_TYPES (new_type) = new_reversed;
1973 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
1975 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
1976 DECL_VINDEX (fndecl) = NULL_TREE;
1979 /* This is a new type, not a copy of an old type. Need to reassociate
1980 variants. We can handle everything except the main variant lazily. */
1981 t = TYPE_MAIN_VARIANT (orig_type);
1984 TYPE_MAIN_VARIANT (new_type) = t;
1985 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
1986 TYPE_NEXT_VARIANT (t) = new_type;
1990 TYPE_MAIN_VARIANT (new_type) = new_type;
1991 TYPE_NEXT_VARIANT (new_type) = NULL;
1994 TREE_TYPE (fndecl) = new_type;
1996 VEC_free (tree, heap, otypes);
1997 VEC_free (tree, heap, oparms);
2000 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2001 If this is a directly recursive call, CS must be NULL. Otherwise it must
2002 contain the corresponding call graph edge. */
2005 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2006 ipa_parm_adjustment_vec adjustments)
2008 VEC(tree, heap) *vargs;
2010 gimple_stmt_iterator gsi;
2014 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2015 vargs = VEC_alloc (tree, heap, len);
2017 gsi = gsi_for_stmt (stmt);
2018 for (i = 0; i < len; i++)
2020 struct ipa_parm_adjustment *adj;
2022 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2024 if (adj->copy_param)
2026 tree arg = gimple_call_arg (stmt, adj->base_index);
2028 VEC_quick_push (tree, vargs, arg);
2030 else if (!adj->remove_param)
2032 tree expr, orig_expr;
2033 bool allow_ptr, repl_found;
2035 orig_expr = expr = gimple_call_arg (stmt, adj->base_index);
2036 if (TREE_CODE (expr) == ADDR_EXPR)
2039 expr = TREE_OPERAND (expr, 0);
2044 repl_found = build_ref_for_offset (&expr, TREE_TYPE (expr),
2045 adj->offset, adj->type,
2050 expr = build_fold_addr_expr (expr);
2054 tree ptrtype = build_pointer_type (adj->type);
2056 if (!POINTER_TYPE_P (TREE_TYPE (expr)))
2057 expr = build_fold_addr_expr (expr);
2058 if (!useless_type_conversion_p (ptrtype, TREE_TYPE (expr)))
2059 expr = fold_convert (ptrtype, expr);
2060 expr = fold_build2 (POINTER_PLUS_EXPR, ptrtype, expr,
2061 build_int_cst (sizetype,
2062 adj->offset / BITS_PER_UNIT));
2064 expr = fold_build1 (INDIRECT_REF, adj->type, expr);
2066 expr = force_gimple_operand_gsi (&gsi, expr,
2068 || is_gimple_reg_type (adj->type),
2069 NULL, true, GSI_SAME_STMT);
2070 VEC_quick_push (tree, vargs, expr);
2074 if (dump_file && (dump_flags & TDF_DETAILS))
2076 fprintf (dump_file, "replacing stmt:");
2077 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2080 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
2081 new_stmt = gimple_build_call_vec (callee_decl, vargs);
2082 VEC_free (tree, heap, vargs);
2083 if (gimple_call_lhs (stmt))
2084 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2086 gimple_set_block (new_stmt, gimple_block (stmt));
2087 if (gimple_has_location (stmt))
2088 gimple_set_location (new_stmt, gimple_location (stmt));
2089 gimple_call_copy_flags (new_stmt, stmt);
2090 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2092 if (dump_file && (dump_flags & TDF_DETAILS))
2094 fprintf (dump_file, "with stmt:");
2095 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2096 fprintf (dump_file, "\n");
2098 gsi_replace (&gsi, new_stmt, true);
2100 cgraph_set_call_stmt (cs, new_stmt);
2101 update_ssa (TODO_update_ssa);
2102 free_dominance_info (CDI_DOMINATORS);
2105 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
2108 index_in_adjustments_multiple_times_p (int base_index,
2109 ipa_parm_adjustment_vec adjustments)
2111 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2114 for (i = 0; i < len; i++)
2116 struct ipa_parm_adjustment *adj;
2117 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2119 if (adj->base_index == base_index)
2131 /* Return adjustments that should have the same effect on function parameters
2132 and call arguments as if they were first changed according to adjustments in
2133 INNER and then by adjustments in OUTER. */
2135 ipa_parm_adjustment_vec
2136 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
2137 ipa_parm_adjustment_vec outer)
2139 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
2140 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
2142 ipa_parm_adjustment_vec adjustments, tmp;
2144 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
2145 for (i = 0; i < inlen; i++)
2147 struct ipa_parm_adjustment *n;
2148 n = VEC_index (ipa_parm_adjustment_t, inner, i);
2150 if (n->remove_param)
2153 VEC_quick_push (ipa_parm_adjustment_t, tmp, n);
2156 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
2157 for (i = 0; i < outlen; i++)
2159 struct ipa_parm_adjustment *r;
2160 struct ipa_parm_adjustment *out = VEC_index (ipa_parm_adjustment_t,
2162 struct ipa_parm_adjustment *in = VEC_index (ipa_parm_adjustment_t, tmp,
2165 gcc_assert (!in->remove_param);
2166 if (out->remove_param)
2168 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
2170 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2171 memset (r, 0, sizeof (*r));
2172 r->remove_param = true;
2177 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2178 memset (r, 0, sizeof (*r));
2179 r->base_index = in->base_index;
2180 r->type = out->type;
2182 /* FIXME: Create nonlocal value too. */
2184 if (in->copy_param && out->copy_param)
2185 r->copy_param = true;
2186 else if (in->copy_param)
2187 r->offset = out->offset;
2188 else if (out->copy_param)
2189 r->offset = in->offset;
2191 r->offset = in->offset + out->offset;
2194 for (i = 0; i < inlen; i++)
2196 struct ipa_parm_adjustment *n = VEC_index (ipa_parm_adjustment_t,
2199 if (n->remove_param)
2200 VEC_quick_push (ipa_parm_adjustment_t, adjustments, n);
2203 VEC_free (ipa_parm_adjustment_t, heap, tmp);
2207 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
2208 friendly way, assuming they are meant to be applied to FNDECL. */
2211 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
2214 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2216 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
2218 fprintf (file, "IPA param adjustments: ");
2219 for (i = 0; i < len; i++)
2221 struct ipa_parm_adjustment *adj;
2222 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2225 fprintf (file, " ");
2229 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
2230 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
2233 fprintf (file, ", base: ");
2234 print_generic_expr (file, adj->base, 0);
2238 fprintf (file, ", reduction: ");
2239 print_generic_expr (file, adj->reduction, 0);
2241 if (adj->new_ssa_base)
2243 fprintf (file, ", new_ssa_base: ");
2244 print_generic_expr (file, adj->new_ssa_base, 0);
2247 if (adj->copy_param)
2248 fprintf (file, ", copy_param");
2249 else if (adj->remove_param)
2250 fprintf (file, ", remove_param");
2252 fprintf (file, ", offset %li", (long) adj->offset);
2254 fprintf (file, ", by_ref");
2255 print_node_brief (file, ", type: ", adj->type, 0);
2256 fprintf (file, "\n");
2258 VEC_free (tree, heap, parms);
2261 /* Stream out jump function JUMP_FUNC to OB. */
2264 ipa_write_jump_function (struct output_block *ob,
2265 struct ipa_jump_func *jump_func)
2267 lto_output_uleb128_stream (ob->main_stream,
2270 switch (jump_func->type)
2272 case IPA_JF_UNKNOWN:
2274 case IPA_JF_KNOWN_TYPE:
2275 lto_output_tree (ob, jump_func->value.base_binfo, true);
2278 lto_output_tree (ob, jump_func->value.constant, true);
2280 case IPA_JF_PASS_THROUGH:
2281 lto_output_tree (ob, jump_func->value.pass_through.operand, true);
2282 lto_output_uleb128_stream (ob->main_stream,
2283 jump_func->value.pass_through.formal_id);
2284 lto_output_uleb128_stream (ob->main_stream,
2285 jump_func->value.pass_through.operation);
2287 case IPA_JF_ANCESTOR:
2288 lto_output_uleb128_stream (ob->main_stream,
2289 jump_func->value.ancestor.offset);
2290 lto_output_tree (ob, jump_func->value.ancestor.type, true);
2291 lto_output_uleb128_stream (ob->main_stream,
2292 jump_func->value.ancestor.formal_id);
2294 case IPA_JF_CONST_MEMBER_PTR:
2295 lto_output_tree (ob, jump_func->value.member_cst.pfn, true);
2296 lto_output_tree (ob, jump_func->value.member_cst.delta, false);
2301 /* Read in jump function JUMP_FUNC from IB. */
2304 ipa_read_jump_function (struct lto_input_block *ib,
2305 struct ipa_jump_func *jump_func,
2306 struct data_in *data_in)
2308 jump_func->type = (enum jump_func_type) lto_input_uleb128 (ib);
2310 switch (jump_func->type)
2312 case IPA_JF_UNKNOWN:
2314 case IPA_JF_KNOWN_TYPE:
2315 jump_func->value.base_binfo = lto_input_tree (ib, data_in);
2318 jump_func->value.constant = lto_input_tree (ib, data_in);
2320 case IPA_JF_PASS_THROUGH:
2321 jump_func->value.pass_through.operand = lto_input_tree (ib, data_in);
2322 jump_func->value.pass_through.formal_id = lto_input_uleb128 (ib);
2323 jump_func->value.pass_through.operation = (enum tree_code) lto_input_uleb128 (ib);
2325 case IPA_JF_ANCESTOR:
2326 jump_func->value.ancestor.offset = lto_input_uleb128 (ib);
2327 jump_func->value.ancestor.type = lto_input_tree (ib, data_in);
2328 jump_func->value.ancestor.formal_id = lto_input_uleb128 (ib);
2330 case IPA_JF_CONST_MEMBER_PTR:
2331 jump_func->value.member_cst.pfn = lto_input_tree (ib, data_in);
2332 jump_func->value.member_cst.delta = lto_input_tree (ib, data_in);
2337 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
2338 relevant to indirect inlining to OB. */
2341 ipa_write_indirect_edge_info (struct output_block *ob,
2342 struct cgraph_edge *cs)
2344 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2345 struct bitpack_d *bp;
2347 lto_output_sleb128_stream (ob->main_stream, ii->param_index);
2348 lto_output_sleb128_stream (ob->main_stream, ii->anc_offset);
2349 bp = bitpack_create ();
2350 bp_pack_value (bp, ii->polymorphic, 1);
2351 lto_output_bitpack (ob->main_stream, bp);
2352 bitpack_delete (bp);
2354 if (ii->polymorphic)
2356 lto_output_sleb128_stream (ob->main_stream, ii->otr_token);
2357 lto_output_tree (ob, ii->otr_type, true);
2361 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
2362 relevant to indirect inlining from IB. */
2365 ipa_read_indirect_edge_info (struct lto_input_block *ib,
2366 struct data_in *data_in ATTRIBUTE_UNUSED,
2367 struct cgraph_edge *cs)
2369 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2370 struct bitpack_d *bp;
2372 ii->param_index = (int) lto_input_sleb128 (ib);
2373 ii->anc_offset = (HOST_WIDE_INT) lto_input_sleb128 (ib);
2374 bp = lto_input_bitpack (ib);
2375 ii->polymorphic = bp_unpack_value (bp, 1);
2376 bitpack_delete (bp);
2377 if (ii->polymorphic)
2379 ii->otr_token = (HOST_WIDE_INT) lto_input_sleb128 (ib);
2380 ii->otr_type = lto_input_tree (ib, data_in);
2384 /* Stream out NODE info to OB. */
2387 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
2390 lto_cgraph_encoder_t encoder;
2391 struct ipa_node_params *info = IPA_NODE_REF (node);
2393 struct cgraph_edge *e;
2394 struct bitpack_d *bp;
2396 encoder = ob->decl_state->cgraph_node_encoder;
2397 node_ref = lto_cgraph_encoder_encode (encoder, node);
2398 lto_output_uleb128_stream (ob->main_stream, node_ref);
2400 bp = bitpack_create ();
2401 bp_pack_value (bp, info->called_with_var_arguments, 1);
2402 bp_pack_value (bp, info->uses_analysis_done, 1);
2403 gcc_assert (info->modification_analysis_done
2404 || ipa_get_param_count (info) == 0);
2405 gcc_assert (!info->node_enqueued);
2406 gcc_assert (!info->ipcp_orig_node);
2407 for (j = 0; j < ipa_get_param_count (info); j++)
2409 bp_pack_value (bp, info->params[j].modified, 1);
2410 bp_pack_value (bp, info->params[j].used, 1);
2412 lto_output_bitpack (ob->main_stream, bp);
2413 bitpack_delete (bp);
2414 for (e = node->callees; e; e = e->next_callee)
2416 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2418 lto_output_uleb128_stream (ob->main_stream,
2419 ipa_get_cs_argument_count (args));
2420 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
2421 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
2423 for (e = node->indirect_calls; e; e = e->next_callee)
2424 ipa_write_indirect_edge_info (ob, e);
2427 /* Srtream in NODE info from IB. */
2430 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
2431 struct data_in *data_in)
2433 struct ipa_node_params *info = IPA_NODE_REF (node);
2435 struct cgraph_edge *e;
2436 struct bitpack_d *bp;
2438 ipa_initialize_node_params (node);
2440 bp = lto_input_bitpack (ib);
2441 info->called_with_var_arguments = bp_unpack_value (bp, 1);
2442 info->uses_analysis_done = bp_unpack_value (bp, 1);
2443 if (ipa_get_param_count (info) != 0)
2445 info->modification_analysis_done = true;
2446 info->uses_analysis_done = true;
2448 info->node_enqueued = false;
2449 for (k = 0; k < ipa_get_param_count (info); k++)
2451 info->params[k].modified = bp_unpack_value (bp, 1);
2452 info->params[k].used = bp_unpack_value (bp, 1);
2454 bitpack_delete (bp);
2455 for (e = node->callees; e; e = e->next_callee)
2457 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2458 int count = lto_input_uleb128 (ib);
2460 ipa_set_cs_argument_count (args, count);
2464 args->jump_functions = GGC_CNEWVEC (struct ipa_jump_func,
2465 ipa_get_cs_argument_count (args));
2466 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
2467 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
2469 for (e = node->indirect_calls; e; e = e->next_callee)
2470 ipa_read_indirect_edge_info (ib, data_in, e);
2473 /* Write jump functions for nodes in SET. */
2476 ipa_prop_write_jump_functions (cgraph_node_set set)
2478 struct cgraph_node *node;
2479 struct output_block *ob = create_output_block (LTO_section_jump_functions);
2480 unsigned int count = 0;
2481 cgraph_node_set_iterator csi;
2483 ob->cgraph_node = NULL;
2485 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2487 node = csi_node (csi);
2488 if (node->analyzed && IPA_NODE_REF (node) != NULL)
2492 lto_output_uleb128_stream (ob->main_stream, count);
2494 /* Process all of the functions. */
2495 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2497 node = csi_node (csi);
2498 if (node->analyzed && IPA_NODE_REF (node) != NULL)
2499 ipa_write_node_info (ob, node);
2501 lto_output_1_stream (ob->main_stream, 0);
2502 produce_asm (ob, NULL);
2503 destroy_output_block (ob);
2506 /* Read section in file FILE_DATA of length LEN with data DATA. */
2509 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
2512 const struct lto_function_header *header =
2513 (const struct lto_function_header *) data;
2514 const int32_t cfg_offset = sizeof (struct lto_function_header);
2515 const int32_t main_offset = cfg_offset + header->cfg_size;
2516 const int32_t string_offset = main_offset + header->main_size;
2517 struct data_in *data_in;
2518 struct lto_input_block ib_main;
2522 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
2526 lto_data_in_create (file_data, (const char *) data + string_offset,
2527 header->string_size, NULL);
2528 count = lto_input_uleb128 (&ib_main);
2530 for (i = 0; i < count; i++)
2533 struct cgraph_node *node;
2534 lto_cgraph_encoder_t encoder;
2536 index = lto_input_uleb128 (&ib_main);
2537 encoder = file_data->cgraph_node_encoder;
2538 node = lto_cgraph_encoder_deref (encoder, index);
2539 gcc_assert (node->analyzed);
2540 ipa_read_node_info (&ib_main, node, data_in);
2542 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
2544 lto_data_in_delete (data_in);
2547 /* Read ipcp jump functions. */
2550 ipa_prop_read_jump_functions (void)
2552 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2553 struct lto_file_decl_data *file_data;
2556 ipa_check_create_node_params ();
2557 ipa_check_create_edge_args ();
2558 ipa_register_cgraph_hooks ();
2560 while ((file_data = file_data_vec[j++]))
2563 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
2566 ipa_prop_read_section (file_data, data, len);
2570 /* After merging units, we can get mismatch in argument counts.
2571 Also decl merging might've rendered parameter lists obsolette.
2572 Also compute called_with_variable_arg info. */
2575 ipa_update_after_lto_read (void)
2577 struct cgraph_node *node;
2578 struct cgraph_edge *cs;
2580 ipa_check_create_node_params ();
2581 ipa_check_create_edge_args ();
2583 for (node = cgraph_nodes; node; node = node->next)
2585 ipa_initialize_node_params (node);
2587 for (node = cgraph_nodes; node; node = node->next)
2589 for (cs = node->callees; cs; cs = cs->next_callee)
2591 if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
2592 != ipa_get_param_count (IPA_NODE_REF (cs->callee)))
2593 ipa_set_called_with_variable_arg (IPA_NODE_REF (cs->callee));