1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
25 #include "langhooks.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
37 #include "diagnostic.h"
38 #include "tree-pretty-print.h"
39 #include "gimple-pretty-print.h"
40 #include "lto-streamer.h"
43 /* Intermediate information about a parameter that is only useful during the
44 run of ipa_analyze_node and is not kept afterwards. */
46 struct param_analysis_info
49 bitmap visited_statements;
52 /* Vector where the parameter infos are actually stored. */
53 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
54 /* Vector where the parameter infos are actually stored. */
55 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
57 /* Bitmap with all UIDs of call graph edges that have been already processed
58 by indirect inlining. */
59 static bitmap iinlining_processed_edges;
61 /* Holders of ipa cgraph hooks: */
62 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
63 static struct cgraph_node_hook_list *node_removal_hook_holder;
64 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
65 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
67 /* Add cgraph NODE described by INFO to the worklist WL regardless of whether
68 it is in one or not. It should almost never be used directly, as opposed to
69 ipa_push_func_to_list. */
72 ipa_push_func_to_list_1 (struct ipa_func_list **wl,
73 struct cgraph_node *node,
74 struct ipa_node_params *info)
76 struct ipa_func_list *temp;
78 info->node_enqueued = 1;
79 temp = XCNEW (struct ipa_func_list);
85 /* Initialize worklist to contain all functions. */
87 struct ipa_func_list *
88 ipa_init_func_list (void)
90 struct cgraph_node *node;
91 struct ipa_func_list * wl;
94 for (node = cgraph_nodes; node; node = node->next)
97 struct ipa_node_params *info = IPA_NODE_REF (node);
98 /* Unreachable nodes should have been eliminated before ipcp and
100 gcc_assert (node->needed || node->reachable);
101 ipa_push_func_to_list_1 (&wl, node, info);
107 /* Remove a function from the worklist WL and return it. */
110 ipa_pop_func_from_list (struct ipa_func_list **wl)
112 struct ipa_node_params *info;
113 struct ipa_func_list *first;
114 struct cgraph_node *node;
121 info = IPA_NODE_REF (node);
122 info->node_enqueued = 0;
126 /* Return index of the formal whose tree is PTREE in function which corresponds
130 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
134 count = ipa_get_param_count (info);
135 for (i = 0; i < count; i++)
136 if (ipa_get_param(info, i) == ptree)
142 /* Populate the param_decl field in parameter descriptors of INFO that
143 corresponds to NODE. */
146 ipa_populate_param_decls (struct cgraph_node *node,
147 struct ipa_node_params *info)
155 fnargs = DECL_ARGUMENTS (fndecl);
157 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
159 info->params[param_num].decl = parm;
164 /* Return how many formal parameters FNDECL has. */
167 count_formal_params_1 (tree fndecl)
172 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
178 /* Count number of formal parameters in NOTE. Store the result to the
179 appropriate field of INFO. */
182 ipa_count_formal_params (struct cgraph_node *node,
183 struct ipa_node_params *info)
187 param_num = count_formal_params_1 (node->decl);
188 ipa_set_param_count (info, param_num);
191 /* Initialize the ipa_node_params structure associated with NODE by counting
192 the function parameters, creating the descriptors and populating their
196 ipa_initialize_node_params (struct cgraph_node *node)
198 struct ipa_node_params *info = IPA_NODE_REF (node);
202 ipa_count_formal_params (node, info);
203 info->params = XCNEWVEC (struct ipa_param_descriptor,
204 ipa_get_param_count (info));
205 ipa_populate_param_decls (node, info);
209 /* Count number of arguments callsite CS has and store it in
210 ipa_edge_args structure corresponding to this callsite. */
213 ipa_count_arguments (struct cgraph_edge *cs)
218 stmt = cs->call_stmt;
219 gcc_assert (is_gimple_call (stmt));
220 arg_num = gimple_call_num_args (stmt);
221 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
222 <= (unsigned) cgraph_edge_max_uid)
223 VEC_safe_grow_cleared (ipa_edge_args_t, gc,
224 ipa_edge_args_vector, cgraph_edge_max_uid + 1);
225 ipa_set_cs_argument_count (IPA_EDGE_REF (cs), arg_num);
228 /* Print the jump functions associated with call graph edge CS to file F. */
231 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
235 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
236 for (i = 0; i < count; i++)
238 struct ipa_jump_func *jump_func;
239 enum jump_func_type type;
241 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
242 type = jump_func->type;
244 fprintf (f, " param %d: ", i);
245 if (type == IPA_JF_UNKNOWN)
246 fprintf (f, "UNKNOWN\n");
247 else if (type == IPA_JF_KNOWN_TYPE)
249 tree binfo_type = TREE_TYPE (jump_func->value.base_binfo);
250 fprintf (f, "KNOWN TYPE, type in binfo is: ");
251 print_generic_expr (f, binfo_type, 0);
252 fprintf (f, " (%u)\n", TYPE_UID (binfo_type));
254 else if (type == IPA_JF_CONST)
256 tree val = jump_func->value.constant;
257 fprintf (f, "CONST: ");
258 print_generic_expr (f, val, 0);
259 if (TREE_CODE (val) == ADDR_EXPR
260 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
263 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
268 else if (type == IPA_JF_CONST_MEMBER_PTR)
270 fprintf (f, "CONST MEMBER PTR: ");
271 print_generic_expr (f, jump_func->value.member_cst.pfn, 0);
273 print_generic_expr (f, jump_func->value.member_cst.delta, 0);
276 else if (type == IPA_JF_PASS_THROUGH)
278 fprintf (f, "PASS THROUGH: ");
279 fprintf (f, "%d, op %s ",
280 jump_func->value.pass_through.formal_id,
282 jump_func->value.pass_through.operation]);
283 if (jump_func->value.pass_through.operation != NOP_EXPR)
284 print_generic_expr (dump_file,
285 jump_func->value.pass_through.operand, 0);
286 fprintf (dump_file, "\n");
288 else if (type == IPA_JF_ANCESTOR)
290 fprintf (f, "ANCESTOR: ");
291 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
292 jump_func->value.ancestor.formal_id,
293 jump_func->value.ancestor.offset);
294 print_generic_expr (f, jump_func->value.ancestor.type, 0);
295 fprintf (dump_file, "\n");
301 /* Print the jump functions of all arguments on all call graph edges going from
305 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
307 struct cgraph_edge *cs;
310 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
311 for (cs = node->callees; cs; cs = cs->next_callee)
313 if (!ipa_edge_args_info_available_for_edge_p (cs))
316 fprintf (f, " callsite %s/%i -> %s/%i : \n",
317 cgraph_node_name (node), node->uid,
318 cgraph_node_name (cs->callee), cs->callee->uid);
319 ipa_print_node_jump_functions_for_edge (f, cs);
322 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
324 if (!ipa_edge_args_info_available_for_edge_p (cs))
329 fprintf (f, " indirect callsite %d for stmt ", i);
330 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
333 fprintf (f, " indirect callsite %d :\n", i);
334 ipa_print_node_jump_functions_for_edge (f, cs);
339 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
342 ipa_print_all_jump_functions (FILE *f)
344 struct cgraph_node *node;
346 fprintf (f, "\nJump functions:\n");
347 for (node = cgraph_nodes; node; node = node->next)
349 ipa_print_node_jump_functions (f, node);
353 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
354 of an assignment statement STMT, try to find out whether NAME can be
355 described by a (possibly polynomial) pass-through jump-function or an
356 ancestor jump function and if so, write the appropriate function into
360 compute_complex_assign_jump_func (struct ipa_node_params *info,
361 struct ipa_jump_func *jfunc,
362 gimple stmt, tree name)
364 HOST_WIDE_INT offset, size, max_size;
368 op1 = gimple_assign_rhs1 (stmt);
369 op2 = gimple_assign_rhs2 (stmt);
371 if (TREE_CODE (op1) == SSA_NAME
372 && SSA_NAME_IS_DEFAULT_DEF (op1))
374 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
380 if (!is_gimple_ip_invariant (op2)
381 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
382 && !useless_type_conversion_p (TREE_TYPE (name),
386 jfunc->type = IPA_JF_PASS_THROUGH;
387 jfunc->value.pass_through.formal_id = index;
388 jfunc->value.pass_through.operation = gimple_assign_rhs_code (stmt);
389 jfunc->value.pass_through.operand = op2;
391 else if (gimple_assign_unary_nop_p (stmt))
393 jfunc->type = IPA_JF_PASS_THROUGH;
394 jfunc->value.pass_through.formal_id = index;
395 jfunc->value.pass_through.operation = NOP_EXPR;
400 if (TREE_CODE (op1) != ADDR_EXPR)
403 op1 = TREE_OPERAND (op1, 0);
404 type = TREE_TYPE (op1);
405 if (TREE_CODE (type) != RECORD_TYPE)
407 op1 = get_ref_base_and_extent (op1, &offset, &size, &max_size);
408 if (TREE_CODE (op1) != MEM_REF
409 /* If this is a varying address, punt. */
413 offset += mem_ref_offset (op1).low * BITS_PER_UNIT;
414 op1 = TREE_OPERAND (op1, 0);
415 if (TREE_CODE (op1) != SSA_NAME
416 || !SSA_NAME_IS_DEFAULT_DEF (op1)
420 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
423 jfunc->type = IPA_JF_ANCESTOR;
424 jfunc->value.ancestor.formal_id = index;
425 jfunc->value.ancestor.offset = offset;
426 jfunc->value.ancestor.type = type;
431 /* Given that an actual argument is an SSA_NAME that is a result of a phi
432 statement PHI, try to find out whether NAME is in fact a
433 multiple-inheritance typecast from a descendant into an ancestor of a formal
434 parameter and thus can be described by an ancestor jump function and if so,
435 write the appropriate function into JFUNC.
437 Essentially we want to match the following pattern:
445 iftmp.1_3 = &obj_2(D)->D.1762;
448 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
449 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
453 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
454 struct ipa_jump_func *jfunc,
457 HOST_WIDE_INT offset, size, max_size;
459 basic_block phi_bb, assign_bb, cond_bb;
460 tree tmp, parm, expr;
463 if (gimple_phi_num_args (phi) != 2)
466 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
467 tmp = PHI_ARG_DEF (phi, 0);
468 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
469 tmp = PHI_ARG_DEF (phi, 1);
472 if (TREE_CODE (tmp) != SSA_NAME
473 || SSA_NAME_IS_DEFAULT_DEF (tmp)
474 || !POINTER_TYPE_P (TREE_TYPE (tmp))
475 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
478 assign = SSA_NAME_DEF_STMT (tmp);
479 assign_bb = gimple_bb (assign);
480 if (!single_pred_p (assign_bb)
481 || !gimple_assign_single_p (assign))
483 expr = gimple_assign_rhs1 (assign);
485 if (TREE_CODE (expr) != ADDR_EXPR)
487 expr = TREE_OPERAND (expr, 0);
488 expr = get_ref_base_and_extent (expr, &offset, &size, &max_size);
490 if (TREE_CODE (expr) != MEM_REF
491 /* If this is a varying address, punt. */
495 offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
496 parm = TREE_OPERAND (expr, 0);
497 if (TREE_CODE (parm) != SSA_NAME
498 || !SSA_NAME_IS_DEFAULT_DEF (parm)
502 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
506 cond_bb = single_pred (assign_bb);
507 cond = last_stmt (cond_bb);
509 || gimple_code (cond) != GIMPLE_COND
510 || gimple_cond_code (cond) != NE_EXPR
511 || gimple_cond_lhs (cond) != parm
512 || !integer_zerop (gimple_cond_rhs (cond)))
516 phi_bb = gimple_bb (phi);
517 for (i = 0; i < 2; i++)
519 basic_block pred = EDGE_PRED (phi_bb, i)->src;
520 if (pred != assign_bb && pred != cond_bb)
524 jfunc->type = IPA_JF_ANCESTOR;
525 jfunc->value.ancestor.formal_id = index;
526 jfunc->value.ancestor.offset = offset;
527 jfunc->value.ancestor.type = TREE_TYPE (TREE_TYPE (tmp));
530 /* Given OP whch is passed as an actual argument to a called function,
531 determine if it is possible to construct a KNOWN_TYPE jump function for it
532 and if so, create one and store it to JFUNC. */
535 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc)
539 if (TREE_CODE (op) != ADDR_EXPR)
542 op = TREE_OPERAND (op, 0);
543 binfo = gimple_get_relevant_ref_binfo (op, NULL_TREE);
546 jfunc->type = IPA_JF_KNOWN_TYPE;
547 jfunc->value.base_binfo = binfo;
552 /* Determine the jump functions of scalar arguments. Scalar means SSA names
553 and constants of a number of selected types. INFO is the ipa_node_params
554 structure associated with the caller, FUNCTIONS is a pointer to an array of
555 jump function structures associated with CALL which is the call statement
559 compute_scalar_jump_functions (struct ipa_node_params *info,
560 struct ipa_jump_func *functions,
566 for (num = 0; num < gimple_call_num_args (call); num++)
568 arg = gimple_call_arg (call, num);
570 if (is_gimple_ip_invariant (arg))
572 functions[num].type = IPA_JF_CONST;
573 functions[num].value.constant = arg;
575 else if (TREE_CODE (arg) == SSA_NAME)
577 if (SSA_NAME_IS_DEFAULT_DEF (arg))
579 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
583 functions[num].type = IPA_JF_PASS_THROUGH;
584 functions[num].value.pass_through.formal_id = index;
585 functions[num].value.pass_through.operation = NOP_EXPR;
590 gimple stmt = SSA_NAME_DEF_STMT (arg);
591 if (is_gimple_assign (stmt))
592 compute_complex_assign_jump_func (info, &functions[num],
594 else if (gimple_code (stmt) == GIMPLE_PHI)
595 compute_complex_ancestor_jump_func (info, &functions[num],
600 compute_known_type_jump_func (arg, &functions[num]);
604 /* Inspect the given TYPE and return true iff it has the same structure (the
605 same number of fields of the same types) as a C++ member pointer. If
606 METHOD_PTR and DELTA are non-NULL, store the trees representing the
607 corresponding fields there. */
610 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
614 if (TREE_CODE (type) != RECORD_TYPE)
617 fld = TYPE_FIELDS (type);
618 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
619 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE)
625 fld = DECL_CHAIN (fld);
626 if (!fld || INTEGRAL_TYPE_P (fld))
631 if (DECL_CHAIN (fld))
637 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
638 boolean variable pointed to by DATA. */
641 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
644 bool *b = (bool *) data;
649 /* Return true if the formal parameter PARM might have been modified in this
650 function before reaching the statement CALL. PARM_INFO is a pointer to a
651 structure containing intermediate information about PARM. */
654 is_parm_modified_before_call (struct param_analysis_info *parm_info,
655 gimple call, tree parm)
657 bool modified = false;
660 if (parm_info->modified)
663 ao_ref_init (&refd, parm);
664 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
665 &modified, &parm_info->visited_statements);
668 parm_info->modified = true;
674 /* Go through arguments of the CALL and for every one that looks like a member
675 pointer, check whether it can be safely declared pass-through and if so,
676 mark that to the corresponding item of jump FUNCTIONS. Return true iff
677 there are non-pass-through member pointers within the arguments. INFO
678 describes formal parameters of the caller. PARMS_INFO is a pointer to a
679 vector containing intermediate information about each formal parameter. */
682 compute_pass_through_member_ptrs (struct ipa_node_params *info,
683 struct param_analysis_info *parms_info,
684 struct ipa_jump_func *functions,
687 bool undecided_members = false;
691 for (num = 0; num < gimple_call_num_args (call); num++)
693 arg = gimple_call_arg (call, num);
695 if (type_like_member_ptr_p (TREE_TYPE (arg), NULL, NULL))
697 if (TREE_CODE (arg) == PARM_DECL)
699 int index = ipa_get_param_decl_index (info, arg);
701 gcc_assert (index >=0);
702 if (!is_parm_modified_before_call (&parms_info[index], call, arg))
704 functions[num].type = IPA_JF_PASS_THROUGH;
705 functions[num].value.pass_through.formal_id = index;
706 functions[num].value.pass_through.operation = NOP_EXPR;
709 undecided_members = true;
712 undecided_members = true;
716 return undecided_members;
719 /* Simple function filling in a member pointer constant jump function (with PFN
720 and DELTA as the constant value) into JFUNC. */
723 fill_member_ptr_cst_jump_function (struct ipa_jump_func *jfunc,
724 tree pfn, tree delta)
726 jfunc->type = IPA_JF_CONST_MEMBER_PTR;
727 jfunc->value.member_cst.pfn = pfn;
728 jfunc->value.member_cst.delta = delta;
731 /* If RHS is an SSA_NAMe and it is defined by a simple copy assign statement,
732 return the rhs of its defining statement. */
735 get_ssa_def_if_simple_copy (tree rhs)
737 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
739 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
741 if (gimple_assign_single_p (def_stmt))
742 rhs = gimple_assign_rhs1 (def_stmt);
749 /* Traverse statements from CALL backwards, scanning whether the argument ARG
750 which is a member pointer is filled in with constant values. If it is, fill
751 the jump function JFUNC in appropriately. METHOD_FIELD and DELTA_FIELD are
752 fields of the record type of the member pointer. To give an example, we
753 look for a pattern looking like the following:
755 D.2515.__pfn ={v} printStuff;
756 D.2515.__delta ={v} 0;
757 i_1 = doprinting (D.2515); */
760 determine_cst_member_ptr (gimple call, tree arg, tree method_field,
761 tree delta_field, struct ipa_jump_func *jfunc)
763 gimple_stmt_iterator gsi;
764 tree method = NULL_TREE;
765 tree delta = NULL_TREE;
767 gsi = gsi_for_stmt (call);
770 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
772 gimple stmt = gsi_stmt (gsi);
775 if (!stmt_may_clobber_ref_p (stmt, arg))
777 if (!gimple_assign_single_p (stmt))
780 lhs = gimple_assign_lhs (stmt);
781 rhs = gimple_assign_rhs1 (stmt);
783 if (TREE_CODE (lhs) != COMPONENT_REF
784 || TREE_OPERAND (lhs, 0) != arg)
787 fld = TREE_OPERAND (lhs, 1);
788 if (!method && fld == method_field)
790 rhs = get_ssa_def_if_simple_copy (rhs);
791 if (TREE_CODE (rhs) == ADDR_EXPR
792 && TREE_CODE (TREE_OPERAND (rhs, 0)) == FUNCTION_DECL
793 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 0))) == METHOD_TYPE)
795 method = TREE_OPERAND (rhs, 0);
798 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
806 if (!delta && fld == delta_field)
808 rhs = get_ssa_def_if_simple_copy (rhs);
809 if (TREE_CODE (rhs) == INTEGER_CST)
814 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
826 /* Go through the arguments of the CALL and for every member pointer within
827 tries determine whether it is a constant. If it is, create a corresponding
828 constant jump function in FUNCTIONS which is an array of jump functions
829 associated with the call. */
832 compute_cst_member_ptr_arguments (struct ipa_jump_func *functions,
836 tree arg, method_field, delta_field;
838 for (num = 0; num < gimple_call_num_args (call); num++)
840 arg = gimple_call_arg (call, num);
842 if (functions[num].type == IPA_JF_UNKNOWN
843 && type_like_member_ptr_p (TREE_TYPE (arg), &method_field,
845 determine_cst_member_ptr (call, arg, method_field, delta_field,
850 /* Compute jump function for all arguments of callsite CS and insert the
851 information in the jump_functions array in the ipa_edge_args corresponding
855 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_info,
856 struct cgraph_edge *cs)
858 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
859 struct ipa_edge_args *arguments = IPA_EDGE_REF (cs);
862 if (ipa_get_cs_argument_count (arguments) == 0 || arguments->jump_functions)
864 arguments->jump_functions = ggc_alloc_cleared_vec_ipa_jump_func
865 (ipa_get_cs_argument_count (arguments));
867 call = cs->call_stmt;
868 gcc_assert (is_gimple_call (call));
870 /* We will deal with constants and SSA scalars first: */
871 compute_scalar_jump_functions (info, arguments->jump_functions, call);
873 /* Let's check whether there are any potential member pointers and if so,
874 whether we can determine their functions as pass_through. */
875 if (!compute_pass_through_member_ptrs (info, parms_info,
876 arguments->jump_functions, call))
879 /* Finally, let's check whether we actually pass a new constant member
881 compute_cst_member_ptr_arguments (arguments->jump_functions, call);
884 /* Compute jump functions for all edges - both direct and indirect - outgoing
885 from NODE. Also count the actual arguments in the process. */
888 ipa_compute_jump_functions (struct cgraph_node *node,
889 struct param_analysis_info *parms_info)
891 struct cgraph_edge *cs;
893 for (cs = node->callees; cs; cs = cs->next_callee)
895 /* We do not need to bother analyzing calls to unknown
896 functions unless they may become known during lto/whopr. */
897 if (!cs->callee->analyzed && !flag_lto)
899 ipa_count_arguments (cs);
900 /* If the descriptor of the callee is not initialized yet, we have to do
902 if (cs->callee->analyzed)
903 ipa_initialize_node_params (cs->callee);
904 if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
905 != ipa_get_param_count (IPA_NODE_REF (cs->callee)))
906 ipa_set_called_with_variable_arg (IPA_NODE_REF (cs->callee));
907 ipa_compute_jump_functions_for_edge (parms_info, cs);
910 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
912 ipa_count_arguments (cs);
913 ipa_compute_jump_functions_for_edge (parms_info, cs);
917 /* If RHS looks like a rhs of a statement loading pfn from a member
918 pointer formal parameter, return the parameter, otherwise return
919 NULL. If USE_DELTA, then we look for a use of the delta field
920 rather than the pfn. */
923 ipa_get_member_ptr_load_param (tree rhs, bool use_delta)
925 tree rec, ref_field, ref_offset, fld, fld_offset, ptr_field, delta_field;
927 if (TREE_CODE (rhs) == COMPONENT_REF)
929 ref_field = TREE_OPERAND (rhs, 1);
930 rhs = TREE_OPERAND (rhs, 0);
933 ref_field = NULL_TREE;
934 if (TREE_CODE (rhs) != MEM_REF)
936 rec = TREE_OPERAND (rhs, 0);
937 if (TREE_CODE (rec) != ADDR_EXPR)
939 rec = TREE_OPERAND (rec, 0);
940 if (TREE_CODE (rec) != PARM_DECL
941 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
944 ref_offset = TREE_OPERAND (rhs, 1);
948 if (integer_nonzerop (ref_offset))
956 return ref_field == fld ? rec : NULL_TREE;
960 fld_offset = byte_position (delta_field);
962 fld_offset = byte_position (ptr_field);
964 return tree_int_cst_equal (ref_offset, fld_offset) ? rec : NULL_TREE;
967 /* If STMT looks like a statement loading a value from a member pointer formal
968 parameter, this function returns that parameter. */
971 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta)
975 if (!gimple_assign_single_p (stmt))
978 rhs = gimple_assign_rhs1 (stmt);
979 return ipa_get_member_ptr_load_param (rhs, use_delta);
982 /* Returns true iff T is an SSA_NAME defined by a statement. */
985 ipa_is_ssa_with_stmt_def (tree t)
987 if (TREE_CODE (t) == SSA_NAME
988 && !SSA_NAME_IS_DEFAULT_DEF (t))
994 /* Find the indirect call graph edge corresponding to STMT and add to it all
995 information necessary to describe a call to a parameter number PARAM_INDEX.
996 NODE is the caller. POLYMORPHIC should be set to true iff the call is a
1000 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt,
1003 struct cgraph_edge *cs;
1005 cs = cgraph_edge (node, stmt);
1006 cs->indirect_info->param_index = param_index;
1007 cs->indirect_info->anc_offset = 0;
1008 cs->indirect_info->polymorphic = polymorphic;
1011 tree otr = gimple_call_fn (stmt);
1012 tree type, token = OBJ_TYPE_REF_TOKEN (otr);
1013 cs->indirect_info->otr_token = tree_low_cst (token, 1);
1014 type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (otr)));
1015 cs->indirect_info->otr_type = type;
1019 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1020 (described by INFO). PARMS_INFO is a pointer to a vector containing
1021 intermediate information about each formal parameter. Currently it checks
1022 whether the call calls a pointer that is a formal parameter and if so, the
1023 parameter is marked with the called flag and an indirect call graph edge
1024 describing the call is created. This is very simple for ordinary pointers
1025 represented in SSA but not-so-nice when it comes to member pointers. The
1026 ugly part of this function does nothing more than trying to match the
1027 pattern of such a call. An example of such a pattern is the gimple dump
1028 below, the call is on the last line:
1031 f$__delta_5 = f.__delta;
1032 f$__pfn_24 = f.__pfn;
1036 f$__delta_5 = MEM[(struct *)&f];
1037 f$__pfn_24 = MEM[(struct *)&f + 4B];
1039 and a few lines below:
1042 D.2496_3 = (int) f$__pfn_24;
1043 D.2497_4 = D.2496_3 & 1;
1050 D.2500_7 = (unsigned int) f$__delta_5;
1051 D.2501_8 = &S + D.2500_7;
1052 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1053 D.2503_10 = *D.2502_9;
1054 D.2504_12 = f$__pfn_24 + -1;
1055 D.2505_13 = (unsigned int) D.2504_12;
1056 D.2506_14 = D.2503_10 + D.2505_13;
1057 D.2507_15 = *D.2506_14;
1058 iftmp.11_16 = (String:: *) D.2507_15;
1061 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1062 D.2500_19 = (unsigned int) f$__delta_5;
1063 D.2508_20 = &S + D.2500_19;
1064 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1066 Such patterns are results of simple calls to a member pointer:
1068 int doprinting (int (MyString::* f)(int) const)
1070 MyString S ("somestring");
1077 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1078 struct ipa_node_params *info,
1079 struct param_analysis_info *parms_info,
1080 gimple call, tree target)
1085 tree rec, rec2, cond;
1088 basic_block bb, virt_bb, join;
1090 if (SSA_NAME_IS_DEFAULT_DEF (target))
1092 tree var = SSA_NAME_VAR (target);
1093 index = ipa_get_param_decl_index (info, var);
1095 ipa_note_param_call (node, index, call, false);
1099 /* Now we need to try to match the complex pattern of calling a member
1102 if (!POINTER_TYPE_P (TREE_TYPE (target))
1103 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1106 def = SSA_NAME_DEF_STMT (target);
1107 if (gimple_code (def) != GIMPLE_PHI)
1110 if (gimple_phi_num_args (def) != 2)
1113 /* First, we need to check whether one of these is a load from a member
1114 pointer that is a parameter to this function. */
1115 n1 = PHI_ARG_DEF (def, 0);
1116 n2 = PHI_ARG_DEF (def, 1);
1117 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1119 d1 = SSA_NAME_DEF_STMT (n1);
1120 d2 = SSA_NAME_DEF_STMT (n2);
1122 join = gimple_bb (def);
1123 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false)))
1125 if (ipa_get_stmt_member_ptr_load_param (d2, false))
1128 bb = EDGE_PRED (join, 0)->src;
1129 virt_bb = gimple_bb (d2);
1131 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false)))
1133 bb = EDGE_PRED (join, 1)->src;
1134 virt_bb = gimple_bb (d1);
1139 /* Second, we need to check that the basic blocks are laid out in the way
1140 corresponding to the pattern. */
1142 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1143 || single_pred (virt_bb) != bb
1144 || single_succ (virt_bb) != join)
1147 /* Third, let's see that the branching is done depending on the least
1148 significant bit of the pfn. */
1150 branch = last_stmt (bb);
1151 if (!branch || gimple_code (branch) != GIMPLE_COND)
1154 if (gimple_cond_code (branch) != NE_EXPR
1155 || !integer_zerop (gimple_cond_rhs (branch)))
1158 cond = gimple_cond_lhs (branch);
1159 if (!ipa_is_ssa_with_stmt_def (cond))
1162 def = SSA_NAME_DEF_STMT (cond);
1163 if (!is_gimple_assign (def)
1164 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1165 || !integer_onep (gimple_assign_rhs2 (def)))
1168 cond = gimple_assign_rhs1 (def);
1169 if (!ipa_is_ssa_with_stmt_def (cond))
1172 def = SSA_NAME_DEF_STMT (cond);
1174 if (is_gimple_assign (def)
1175 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1177 cond = gimple_assign_rhs1 (def);
1178 if (!ipa_is_ssa_with_stmt_def (cond))
1180 def = SSA_NAME_DEF_STMT (cond);
1183 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1184 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1185 == ptrmemfunc_vbit_in_delta));
1190 index = ipa_get_param_decl_index (info, rec);
1191 if (index >= 0 && !is_parm_modified_before_call (&parms_info[index],
1193 ipa_note_param_call (node, index, call, false);
1198 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1199 object referenced in the expression is a formal parameter of the caller
1200 (described by INFO), create a call note for the statement. */
1203 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1204 struct ipa_node_params *info, gimple call,
1207 tree obj = OBJ_TYPE_REF_OBJECT (target);
1211 if (TREE_CODE (obj) == ADDR_EXPR)
1215 obj = TREE_OPERAND (obj, 0);
1217 while (TREE_CODE (obj) == COMPONENT_REF);
1218 if (TREE_CODE (obj) != MEM_REF)
1220 obj = TREE_OPERAND (obj, 0);
1223 if (TREE_CODE (obj) != SSA_NAME
1224 || !SSA_NAME_IS_DEFAULT_DEF (obj))
1227 var = SSA_NAME_VAR (obj);
1228 index = ipa_get_param_decl_index (info, var);
1231 ipa_note_param_call (node, index, call, true);
1234 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1235 of the caller (described by INFO). PARMS_INFO is a pointer to a vector
1236 containing intermediate information about each formal parameter. */
1239 ipa_analyze_call_uses (struct cgraph_node *node,
1240 struct ipa_node_params *info,
1241 struct param_analysis_info *parms_info, gimple call)
1243 tree target = gimple_call_fn (call);
1245 if (TREE_CODE (target) == SSA_NAME)
1246 ipa_analyze_indirect_call_uses (node, info, parms_info, call, target);
1247 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1248 ipa_analyze_virtual_call_uses (node, info, call, target);
1252 /* Analyze the call statement STMT with respect to formal parameters (described
1253 in INFO) of caller given by NODE. Currently it only checks whether formal
1254 parameters are called. PARMS_INFO is a pointer to a vector containing
1255 intermediate information about each formal parameter. */
1258 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1259 struct param_analysis_info *parms_info, gimple stmt)
1261 if (is_gimple_call (stmt))
1262 ipa_analyze_call_uses (node, info, parms_info, stmt);
1265 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1266 If OP is a parameter declaration, mark it as used in the info structure
1270 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1271 tree op, void *data)
1273 struct ipa_node_params *info = (struct ipa_node_params *) data;
1275 op = get_base_address (op);
1277 && TREE_CODE (op) == PARM_DECL)
1279 int index = ipa_get_param_decl_index (info, op);
1280 gcc_assert (index >= 0);
1281 info->params[index].used = true;
1287 /* Scan the function body of NODE and inspect the uses of formal parameters.
1288 Store the findings in various structures of the associated ipa_node_params
1289 structure, such as parameter flags, notes etc. PARMS_INFO is a pointer to a
1290 vector containing intermediate information about each formal parameter. */
1293 ipa_analyze_params_uses (struct cgraph_node *node,
1294 struct param_analysis_info *parms_info)
1296 tree decl = node->decl;
1298 struct function *func;
1299 gimple_stmt_iterator gsi;
1300 struct ipa_node_params *info = IPA_NODE_REF (node);
1303 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1306 for (i = 0; i < ipa_get_param_count (info); i++)
1308 tree parm = ipa_get_param (info, i);
1309 /* For SSA regs see if parameter is used. For non-SSA we compute
1310 the flag during modification analysis. */
1311 if (is_gimple_reg (parm)
1312 && gimple_default_def (DECL_STRUCT_FUNCTION (node->decl), parm))
1313 info->params[i].used = true;
1316 func = DECL_STRUCT_FUNCTION (decl);
1317 FOR_EACH_BB_FN (bb, func)
1319 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1321 gimple stmt = gsi_stmt (gsi);
1323 if (is_gimple_debug (stmt))
1326 ipa_analyze_stmt_uses (node, info, parms_info, stmt);
1327 walk_stmt_load_store_addr_ops (stmt, info,
1328 visit_ref_for_mod_analysis,
1329 visit_ref_for_mod_analysis,
1330 visit_ref_for_mod_analysis);
1332 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
1333 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1334 visit_ref_for_mod_analysis,
1335 visit_ref_for_mod_analysis,
1336 visit_ref_for_mod_analysis);
1339 info->uses_analysis_done = 1;
1342 /* Initialize the array describing properties of of formal parameters of NODE,
1343 analyze their uses and and compute jump functions associated witu actual
1344 arguments of calls from within NODE. */
1347 ipa_analyze_node (struct cgraph_node *node)
1349 struct ipa_node_params *info = IPA_NODE_REF (node);
1350 struct param_analysis_info *parms_info;
1353 ipa_initialize_node_params (node);
1355 param_count = ipa_get_param_count (info);
1356 parms_info = XALLOCAVEC (struct param_analysis_info, param_count);
1357 memset (parms_info, 0, sizeof (struct param_analysis_info) * param_count);
1359 ipa_analyze_params_uses (node, parms_info);
1360 ipa_compute_jump_functions (node, parms_info);
1362 for (i = 0; i < param_count; i++)
1363 if (parms_info[i].visited_statements)
1364 BITMAP_FREE (parms_info[i].visited_statements);
1368 /* Update the jump function DST when the call graph edge correspondng to SRC is
1369 is being inlined, knowing that DST is of type ancestor and src of known
1373 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1374 struct ipa_jump_func *dst)
1378 new_binfo = get_binfo_at_offset (src->value.base_binfo,
1379 dst->value.ancestor.offset,
1380 dst->value.ancestor.type);
1383 dst->type = IPA_JF_KNOWN_TYPE;
1384 dst->value.base_binfo = new_binfo;
1387 dst->type = IPA_JF_UNKNOWN;
1390 /* Update the jump functions associated with call graph edge E when the call
1391 graph edge CS is being inlined, assuming that E->caller is already (possibly
1392 indirectly) inlined into CS->callee and that E has not been inlined. */
1395 update_jump_functions_after_inlining (struct cgraph_edge *cs,
1396 struct cgraph_edge *e)
1398 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1399 struct ipa_edge_args *args = IPA_EDGE_REF (e);
1400 int count = ipa_get_cs_argument_count (args);
1403 for (i = 0; i < count; i++)
1405 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
1407 if (dst->type == IPA_JF_ANCESTOR)
1409 struct ipa_jump_func *src;
1411 /* Variable number of arguments can cause havoc if we try to access
1412 one that does not exist in the inlined edge. So make sure we
1414 if (dst->value.ancestor.formal_id >= ipa_get_cs_argument_count (top))
1416 dst->type = IPA_JF_UNKNOWN;
1420 src = ipa_get_ith_jump_func (top, dst->value.ancestor.formal_id);
1421 if (src->type == IPA_JF_KNOWN_TYPE)
1422 combine_known_type_and_ancestor_jfs (src, dst);
1423 else if (src->type == IPA_JF_CONST)
1425 struct ipa_jump_func kt_func;
1427 kt_func.type = IPA_JF_UNKNOWN;
1428 compute_known_type_jump_func (src->value.constant, &kt_func);
1429 if (kt_func.type == IPA_JF_KNOWN_TYPE)
1430 combine_known_type_and_ancestor_jfs (&kt_func, dst);
1432 dst->type = IPA_JF_UNKNOWN;
1434 else if (src->type == IPA_JF_PASS_THROUGH
1435 && src->value.pass_through.operation == NOP_EXPR)
1436 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
1437 else if (src->type == IPA_JF_ANCESTOR)
1439 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
1440 dst->value.ancestor.offset += src->value.ancestor.offset;
1443 dst->type = IPA_JF_UNKNOWN;
1445 else if (dst->type == IPA_JF_PASS_THROUGH)
1447 struct ipa_jump_func *src;
1448 /* We must check range due to calls with variable number of arguments
1449 and we cannot combine jump functions with operations. */
1450 if (dst->value.pass_through.operation == NOP_EXPR
1451 && (dst->value.pass_through.formal_id
1452 < ipa_get_cs_argument_count (top)))
1454 src = ipa_get_ith_jump_func (top,
1455 dst->value.pass_through.formal_id);
1459 dst->type = IPA_JF_UNKNOWN;
1464 /* If TARGET is an addr_expr of a function declaration, make it the destination
1465 of an indirect edge IE and return the edge. Otherwise, return NULL. */
1467 struct cgraph_edge *
1468 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
1470 struct cgraph_node *callee;
1472 if (TREE_CODE (target) != ADDR_EXPR)
1474 target = TREE_OPERAND (target, 0);
1475 if (TREE_CODE (target) != FUNCTION_DECL)
1477 callee = cgraph_node (target);
1480 ipa_check_create_node_params ();
1481 cgraph_make_edge_direct (ie, callee);
1484 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
1485 "(%s/%i -> %s/%i) for stmt ",
1486 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
1487 cgraph_node_name (ie->caller), ie->caller->uid,
1488 cgraph_node_name (ie->callee), ie->callee->uid);
1491 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
1493 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
1496 if (ipa_get_cs_argument_count (IPA_EDGE_REF (ie))
1497 != ipa_get_param_count (IPA_NODE_REF (callee)))
1498 ipa_set_called_with_variable_arg (IPA_NODE_REF (callee));
1503 /* Try to find a destination for indirect edge IE that corresponds to a simple
1504 call or a call of a member function pointer and where the destination is a
1505 pointer formal parameter described by jump function JFUNC. If it can be
1506 determined, return the newly direct edge, otherwise return NULL. */
1508 static struct cgraph_edge *
1509 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
1510 struct ipa_jump_func *jfunc)
1514 if (jfunc->type == IPA_JF_CONST)
1515 target = jfunc->value.constant;
1516 else if (jfunc->type == IPA_JF_CONST_MEMBER_PTR)
1517 target = jfunc->value.member_cst.pfn;
1521 return ipa_make_edge_direct_to_target (ie, target);
1524 /* Try to find a destination for indirect edge IE that corresponds to a
1525 virtuall call based on a formal parameter which is described by jump
1526 function JFUNC and if it can be determined, make it direct and return the
1527 direct edge. Otherwise, return NULL. */
1529 static struct cgraph_edge *
1530 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
1531 struct ipa_jump_func *jfunc)
1533 tree binfo, type, target;
1534 HOST_WIDE_INT token;
1536 if (jfunc->type == IPA_JF_KNOWN_TYPE)
1537 binfo = jfunc->value.base_binfo;
1538 else if (jfunc->type == IPA_JF_CONST)
1540 tree cst = jfunc->value.constant;
1541 if (TREE_CODE (cst) == ADDR_EXPR)
1542 binfo = gimple_get_relevant_ref_binfo (TREE_OPERAND (cst, 0),
1553 token = ie->indirect_info->otr_token;
1554 type = ie->indirect_info->otr_type;
1555 binfo = get_binfo_at_offset (binfo, ie->indirect_info->anc_offset, type);
1557 target = gimple_fold_obj_type_ref_known_binfo (token, binfo);
1562 return ipa_make_edge_direct_to_target (ie, target);
1567 /* Update the param called notes associated with NODE when CS is being inlined,
1568 assuming NODE is (potentially indirectly) inlined into CS->callee.
1569 Moreover, if the callee is discovered to be constant, create a new cgraph
1570 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
1571 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
1574 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
1575 struct cgraph_node *node,
1576 VEC (cgraph_edge_p, heap) **new_edges)
1578 struct ipa_edge_args *top;
1579 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
1582 ipa_check_create_edge_args ();
1583 top = IPA_EDGE_REF (cs);
1585 for (ie = node->indirect_calls; ie; ie = next_ie)
1587 struct cgraph_indirect_call_info *ici = ie->indirect_info;
1588 struct ipa_jump_func *jfunc;
1590 next_ie = ie->next_callee;
1591 if (bitmap_bit_p (iinlining_processed_edges, ie->uid))
1594 /* If we ever use indirect edges for anything other than indirect
1595 inlining, we will need to skip those with negative param_indices. */
1596 if (ici->param_index == -1)
1599 /* We must check range due to calls with variable number of arguments: */
1600 if (ici->param_index >= ipa_get_cs_argument_count (top))
1602 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1606 jfunc = ipa_get_ith_jump_func (top, ici->param_index);
1607 if (jfunc->type == IPA_JF_PASS_THROUGH
1608 && jfunc->value.pass_through.operation == NOP_EXPR)
1609 ici->param_index = jfunc->value.pass_through.formal_id;
1610 else if (jfunc->type == IPA_JF_ANCESTOR)
1612 ici->param_index = jfunc->value.ancestor.formal_id;
1613 ici->anc_offset += jfunc->value.ancestor.offset;
1616 /* Either we can find a destination for this edge now or never. */
1617 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1619 if (ici->polymorphic)
1620 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
1622 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
1624 if (new_direct_edge)
1626 new_direct_edge->indirect_inlining_edge = 1;
1629 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
1631 top = IPA_EDGE_REF (cs);
1640 /* Recursively traverse subtree of NODE (including node) made of inlined
1641 cgraph_edges when CS has been inlined and invoke
1642 update_indirect_edges_after_inlining on all nodes and
1643 update_jump_functions_after_inlining on all non-inlined edges that lead out
1644 of this subtree. Newly discovered indirect edges will be added to
1645 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
1649 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
1650 struct cgraph_node *node,
1651 VEC (cgraph_edge_p, heap) **new_edges)
1653 struct cgraph_edge *e;
1656 res = update_indirect_edges_after_inlining (cs, node, new_edges);
1658 for (e = node->callees; e; e = e->next_callee)
1659 if (!e->inline_failed)
1660 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
1662 update_jump_functions_after_inlining (cs, e);
1667 /* Update jump functions and call note functions on inlining the call site CS.
1668 CS is expected to lead to a node already cloned by
1669 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
1670 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
1674 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
1675 VEC (cgraph_edge_p, heap) **new_edges)
1677 /* FIXME lto: We do not stream out indirect call information. */
1681 /* Do nothing if the preparation phase has not been carried out yet
1682 (i.e. during early inlining). */
1683 if (!ipa_node_params_vector)
1685 gcc_assert (ipa_edge_args_vector);
1687 return propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
1690 /* Frees all dynamically allocated structures that the argument info points
1694 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
1696 if (args->jump_functions)
1697 ggc_free (args->jump_functions);
1699 memset (args, 0, sizeof (*args));
1702 /* Free all ipa_edge structures. */
1705 ipa_free_all_edge_args (void)
1708 struct ipa_edge_args *args;
1710 FOR_EACH_VEC_ELT (ipa_edge_args_t, ipa_edge_args_vector, i, args)
1711 ipa_free_edge_args_substructures (args);
1713 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
1714 ipa_edge_args_vector = NULL;
1717 /* Frees all dynamically allocated structures that the param info points
1721 ipa_free_node_params_substructures (struct ipa_node_params *info)
1724 free (info->params);
1726 memset (info, 0, sizeof (*info));
1729 /* Free all ipa_node_params structures. */
1732 ipa_free_all_node_params (void)
1735 struct ipa_node_params *info;
1737 FOR_EACH_VEC_ELT (ipa_node_params_t, ipa_node_params_vector, i, info)
1738 ipa_free_node_params_substructures (info);
1740 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
1741 ipa_node_params_vector = NULL;
1744 /* Hook that is called by cgraph.c when an edge is removed. */
1747 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
1749 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1750 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
1751 <= (unsigned)cs->uid)
1753 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
1756 /* Hook that is called by cgraph.c when a node is removed. */
1759 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1761 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1762 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
1763 <= (unsigned)node->uid)
1765 ipa_free_node_params_substructures (IPA_NODE_REF (node));
1768 /* Helper function to duplicate an array of size N that is at SRC and store a
1769 pointer to it to DST. Nothing is done if SRC is NULL. */
1772 duplicate_array (void *src, size_t n)
1784 static struct ipa_jump_func *
1785 duplicate_ipa_jump_func_array (const struct ipa_jump_func * src, size_t n)
1787 struct ipa_jump_func *p;
1792 p = ggc_alloc_vec_ipa_jump_func (n);
1793 memcpy (p, src, n * sizeof (struct ipa_jump_func));
1797 /* Hook that is called by cgraph.c when a node is duplicated. */
1800 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
1801 __attribute__((unused)) void *data)
1803 struct ipa_edge_args *old_args, *new_args;
1806 ipa_check_create_edge_args ();
1808 old_args = IPA_EDGE_REF (src);
1809 new_args = IPA_EDGE_REF (dst);
1811 arg_count = ipa_get_cs_argument_count (old_args);
1812 ipa_set_cs_argument_count (new_args, arg_count);
1813 new_args->jump_functions =
1814 duplicate_ipa_jump_func_array (old_args->jump_functions, arg_count);
1816 if (iinlining_processed_edges
1817 && bitmap_bit_p (iinlining_processed_edges, src->uid))
1818 bitmap_set_bit (iinlining_processed_edges, dst->uid);
1821 /* Hook that is called by cgraph.c when a node is duplicated. */
1824 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
1825 __attribute__((unused)) void *data)
1827 struct ipa_node_params *old_info, *new_info;
1830 ipa_check_create_node_params ();
1831 old_info = IPA_NODE_REF (src);
1832 new_info = IPA_NODE_REF (dst);
1833 param_count = ipa_get_param_count (old_info);
1835 ipa_set_param_count (new_info, param_count);
1836 new_info->params = (struct ipa_param_descriptor *)
1837 duplicate_array (old_info->params,
1838 sizeof (struct ipa_param_descriptor) * param_count);
1839 for (i = 0; i < param_count; i++)
1840 new_info->params[i].types = VEC_copy (tree, heap,
1841 old_info->params[i].types);
1842 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
1843 new_info->count_scale = old_info->count_scale;
1845 new_info->called_with_var_arguments = old_info->called_with_var_arguments;
1846 new_info->uses_analysis_done = old_info->uses_analysis_done;
1847 new_info->node_enqueued = old_info->node_enqueued;
1850 /* Register our cgraph hooks if they are not already there. */
1853 ipa_register_cgraph_hooks (void)
1855 if (!edge_removal_hook_holder)
1856 edge_removal_hook_holder =
1857 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
1858 if (!node_removal_hook_holder)
1859 node_removal_hook_holder =
1860 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
1861 if (!edge_duplication_hook_holder)
1862 edge_duplication_hook_holder =
1863 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
1864 if (!node_duplication_hook_holder)
1865 node_duplication_hook_holder =
1866 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
1869 /* Unregister our cgraph hooks if they are not already there. */
1872 ipa_unregister_cgraph_hooks (void)
1874 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
1875 edge_removal_hook_holder = NULL;
1876 cgraph_remove_node_removal_hook (node_removal_hook_holder);
1877 node_removal_hook_holder = NULL;
1878 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
1879 edge_duplication_hook_holder = NULL;
1880 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
1881 node_duplication_hook_holder = NULL;
1884 /* Allocate all necessary data strucutures necessary for indirect inlining. */
1887 ipa_create_all_structures_for_iinln (void)
1889 iinlining_processed_edges = BITMAP_ALLOC (NULL);
1892 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
1893 longer needed after ipa-cp. */
1896 ipa_free_all_structures_after_ipa_cp (void)
1898 if (!flag_indirect_inlining)
1900 ipa_free_all_edge_args ();
1901 ipa_free_all_node_params ();
1902 ipa_unregister_cgraph_hooks ();
1906 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
1907 longer needed after indirect inlining. */
1910 ipa_free_all_structures_after_iinln (void)
1912 BITMAP_FREE (iinlining_processed_edges);
1914 ipa_free_all_edge_args ();
1915 ipa_free_all_node_params ();
1916 ipa_unregister_cgraph_hooks ();
1919 /* Print ipa_tree_map data structures of all functions in the
1923 ipa_print_node_params (FILE * f, struct cgraph_node *node)
1927 struct ipa_node_params *info;
1929 if (!node->analyzed)
1931 info = IPA_NODE_REF (node);
1932 fprintf (f, " function %s parameter descriptors:\n",
1933 cgraph_node_name (node));
1934 count = ipa_get_param_count (info);
1935 for (i = 0; i < count; i++)
1937 temp = ipa_get_param (info, i);
1938 if (TREE_CODE (temp) == PARM_DECL)
1939 fprintf (f, " param %d : %s", i,
1941 ? (*lang_hooks.decl_printable_name) (temp, 2)
1943 if (ipa_is_param_used (info, i))
1944 fprintf (f, " used");
1949 /* Print ipa_tree_map data structures of all functions in the
1953 ipa_print_all_params (FILE * f)
1955 struct cgraph_node *node;
1957 fprintf (f, "\nFunction parameters:\n");
1958 for (node = cgraph_nodes; node; node = node->next)
1959 ipa_print_node_params (f, node);
1962 /* Return a heap allocated vector containing formal parameters of FNDECL. */
1965 ipa_get_vector_of_formal_parms (tree fndecl)
1967 VEC(tree, heap) *args;
1971 count = count_formal_params_1 (fndecl);
1972 args = VEC_alloc (tree, heap, count);
1973 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
1974 VEC_quick_push (tree, args, parm);
1979 /* Return a heap allocated vector containing types of formal parameters of
1980 function type FNTYPE. */
1982 static inline VEC(tree, heap) *
1983 get_vector_of_formal_parm_types (tree fntype)
1985 VEC(tree, heap) *types;
1989 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
1992 types = VEC_alloc (tree, heap, count);
1993 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
1994 VEC_quick_push (tree, types, TREE_VALUE (t));
1999 /* Modify the function declaration FNDECL and its type according to the plan in
2000 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2001 to reflect the actual parameters being modified which are determined by the
2002 base_index field. */
2005 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
2006 const char *synth_parm_prefix)
2008 VEC(tree, heap) *oparms, *otypes;
2009 tree orig_type, new_type = NULL;
2010 tree old_arg_types, t, new_arg_types = NULL;
2011 tree parm, *link = &DECL_ARGUMENTS (fndecl);
2012 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2013 tree new_reversed = NULL;
2014 bool care_for_types, last_parm_void;
2016 if (!synth_parm_prefix)
2017 synth_parm_prefix = "SYNTH";
2019 oparms = ipa_get_vector_of_formal_parms (fndecl);
2020 orig_type = TREE_TYPE (fndecl);
2021 old_arg_types = TYPE_ARG_TYPES (orig_type);
2023 /* The following test is an ugly hack, some functions simply don't have any
2024 arguments in their type. This is probably a bug but well... */
2025 care_for_types = (old_arg_types != NULL_TREE);
2028 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2030 otypes = get_vector_of_formal_parm_types (orig_type);
2032 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
2034 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
2038 last_parm_void = false;
2042 for (i = 0; i < len; i++)
2044 struct ipa_parm_adjustment *adj;
2047 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2048 parm = VEC_index (tree, oparms, adj->base_index);
2051 if (adj->copy_param)
2054 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
2058 link = &DECL_CHAIN (parm);
2060 else if (!adj->remove_param)
2066 ptype = build_pointer_type (adj->type);
2071 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2073 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2075 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2077 DECL_ARTIFICIAL (new_parm) = 1;
2078 DECL_ARG_TYPE (new_parm) = ptype;
2079 DECL_CONTEXT (new_parm) = fndecl;
2080 TREE_USED (new_parm) = 1;
2081 DECL_IGNORED_P (new_parm) = 1;
2082 layout_decl (new_parm, 0);
2084 add_referenced_var (new_parm);
2085 mark_sym_for_renaming (new_parm);
2087 adj->reduction = new_parm;
2091 link = &DECL_CHAIN (new_parm);
2099 new_reversed = nreverse (new_arg_types);
2103 TREE_CHAIN (new_arg_types) = void_list_node;
2105 new_reversed = void_list_node;
2109 /* Use copy_node to preserve as much as possible from original type
2110 (debug info, attribute lists etc.)
2111 Exception is METHOD_TYPEs must have THIS argument.
2112 When we are asked to remove it, we need to build new FUNCTION_TYPE
2114 if (TREE_CODE (orig_type) != METHOD_TYPE
2115 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0)->copy_param
2116 && VEC_index (ipa_parm_adjustment_t, adjustments, 0)->base_index == 0))
2118 new_type = build_distinct_type_copy (orig_type);
2119 TYPE_ARG_TYPES (new_type) = new_reversed;
2124 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2126 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2127 DECL_VINDEX (fndecl) = NULL_TREE;
2130 /* When signature changes, we need to clear builtin info. */
2131 if (DECL_BUILT_IN (fndecl))
2133 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2134 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2137 /* This is a new type, not a copy of an old type. Need to reassociate
2138 variants. We can handle everything except the main variant lazily. */
2139 t = TYPE_MAIN_VARIANT (orig_type);
2142 TYPE_MAIN_VARIANT (new_type) = t;
2143 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2144 TYPE_NEXT_VARIANT (t) = new_type;
2148 TYPE_MAIN_VARIANT (new_type) = new_type;
2149 TYPE_NEXT_VARIANT (new_type) = NULL;
2152 TREE_TYPE (fndecl) = new_type;
2153 DECL_VIRTUAL_P (fndecl) = 0;
2155 VEC_free (tree, heap, otypes);
2156 VEC_free (tree, heap, oparms);
2159 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2160 If this is a directly recursive call, CS must be NULL. Otherwise it must
2161 contain the corresponding call graph edge. */
2164 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2165 ipa_parm_adjustment_vec adjustments)
2167 VEC(tree, heap) *vargs;
2169 gimple_stmt_iterator gsi;
2173 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2174 vargs = VEC_alloc (tree, heap, len);
2176 gsi = gsi_for_stmt (stmt);
2177 for (i = 0; i < len; i++)
2179 struct ipa_parm_adjustment *adj;
2181 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2183 if (adj->copy_param)
2185 tree arg = gimple_call_arg (stmt, adj->base_index);
2187 VEC_quick_push (tree, vargs, arg);
2189 else if (!adj->remove_param)
2191 tree expr, base, off;
2194 /* We create a new parameter out of the value of the old one, we can
2195 do the following kind of transformations:
2197 - A scalar passed by reference is converted to a scalar passed by
2198 value. (adj->by_ref is false and the type of the original
2199 actual argument is a pointer to a scalar).
2201 - A part of an aggregate is passed instead of the whole aggregate.
2202 The part can be passed either by value or by reference, this is
2203 determined by value of adj->by_ref. Moreover, the code below
2204 handles both situations when the original aggregate is passed by
2205 value (its type is not a pointer) and when it is passed by
2206 reference (it is a pointer to an aggregate).
2208 When the new argument is passed by reference (adj->by_ref is true)
2209 it must be a part of an aggregate and therefore we form it by
2210 simply taking the address of a reference inside the original
2213 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2214 base = gimple_call_arg (stmt, adj->base_index);
2215 loc = EXPR_LOCATION (base);
2217 if (TREE_CODE (base) != ADDR_EXPR
2218 && POINTER_TYPE_P (TREE_TYPE (base)))
2219 off = build_int_cst (adj->alias_ptr_type,
2220 adj->offset / BITS_PER_UNIT);
2223 HOST_WIDE_INT base_offset;
2226 if (TREE_CODE (base) == ADDR_EXPR)
2227 base = TREE_OPERAND (base, 0);
2229 base = get_addr_base_and_unit_offset (base, &base_offset);
2230 /* Aggregate arguments can have non-invariant addresses. */
2233 base = build_fold_addr_expr (prev_base);
2234 off = build_int_cst (adj->alias_ptr_type,
2235 adj->offset / BITS_PER_UNIT);
2237 else if (TREE_CODE (base) == MEM_REF)
2239 off = build_int_cst (adj->alias_ptr_type,
2241 + adj->offset / BITS_PER_UNIT);
2242 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
2244 base = TREE_OPERAND (base, 0);
2248 off = build_int_cst (adj->alias_ptr_type,
2250 + adj->offset / BITS_PER_UNIT);
2251 base = build_fold_addr_expr (base);
2255 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
2257 expr = build_fold_addr_expr (expr);
2259 expr = force_gimple_operand_gsi (&gsi, expr,
2261 || is_gimple_reg_type (adj->type),
2262 NULL, true, GSI_SAME_STMT);
2263 VEC_quick_push (tree, vargs, expr);
2267 if (dump_file && (dump_flags & TDF_DETAILS))
2269 fprintf (dump_file, "replacing stmt:");
2270 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2273 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
2274 new_stmt = gimple_build_call_vec (callee_decl, vargs);
2275 VEC_free (tree, heap, vargs);
2276 if (gimple_call_lhs (stmt))
2277 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2279 gimple_set_block (new_stmt, gimple_block (stmt));
2280 if (gimple_has_location (stmt))
2281 gimple_set_location (new_stmt, gimple_location (stmt));
2282 gimple_call_copy_flags (new_stmt, stmt);
2283 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2285 if (dump_file && (dump_flags & TDF_DETAILS))
2287 fprintf (dump_file, "with stmt:");
2288 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2289 fprintf (dump_file, "\n");
2291 gsi_replace (&gsi, new_stmt, true);
2293 cgraph_set_call_stmt (cs, new_stmt);
2294 update_ssa (TODO_update_ssa);
2295 free_dominance_info (CDI_DOMINATORS);
2298 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
2301 index_in_adjustments_multiple_times_p (int base_index,
2302 ipa_parm_adjustment_vec adjustments)
2304 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2307 for (i = 0; i < len; i++)
2309 struct ipa_parm_adjustment *adj;
2310 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2312 if (adj->base_index == base_index)
2324 /* Return adjustments that should have the same effect on function parameters
2325 and call arguments as if they were first changed according to adjustments in
2326 INNER and then by adjustments in OUTER. */
2328 ipa_parm_adjustment_vec
2329 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
2330 ipa_parm_adjustment_vec outer)
2332 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
2333 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
2335 ipa_parm_adjustment_vec adjustments, tmp;
2337 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
2338 for (i = 0; i < inlen; i++)
2340 struct ipa_parm_adjustment *n;
2341 n = VEC_index (ipa_parm_adjustment_t, inner, i);
2343 if (n->remove_param)
2346 VEC_quick_push (ipa_parm_adjustment_t, tmp, n);
2349 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
2350 for (i = 0; i < outlen; i++)
2352 struct ipa_parm_adjustment *r;
2353 struct ipa_parm_adjustment *out = VEC_index (ipa_parm_adjustment_t,
2355 struct ipa_parm_adjustment *in = VEC_index (ipa_parm_adjustment_t, tmp,
2358 gcc_assert (!in->remove_param);
2359 if (out->remove_param)
2361 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
2363 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2364 memset (r, 0, sizeof (*r));
2365 r->remove_param = true;
2370 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2371 memset (r, 0, sizeof (*r));
2372 r->base_index = in->base_index;
2373 r->type = out->type;
2375 /* FIXME: Create nonlocal value too. */
2377 if (in->copy_param && out->copy_param)
2378 r->copy_param = true;
2379 else if (in->copy_param)
2380 r->offset = out->offset;
2381 else if (out->copy_param)
2382 r->offset = in->offset;
2384 r->offset = in->offset + out->offset;
2387 for (i = 0; i < inlen; i++)
2389 struct ipa_parm_adjustment *n = VEC_index (ipa_parm_adjustment_t,
2392 if (n->remove_param)
2393 VEC_quick_push (ipa_parm_adjustment_t, adjustments, n);
2396 VEC_free (ipa_parm_adjustment_t, heap, tmp);
2400 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
2401 friendly way, assuming they are meant to be applied to FNDECL. */
2404 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
2407 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2409 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
2411 fprintf (file, "IPA param adjustments: ");
2412 for (i = 0; i < len; i++)
2414 struct ipa_parm_adjustment *adj;
2415 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2418 fprintf (file, " ");
2422 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
2423 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
2426 fprintf (file, ", base: ");
2427 print_generic_expr (file, adj->base, 0);
2431 fprintf (file, ", reduction: ");
2432 print_generic_expr (file, adj->reduction, 0);
2434 if (adj->new_ssa_base)
2436 fprintf (file, ", new_ssa_base: ");
2437 print_generic_expr (file, adj->new_ssa_base, 0);
2440 if (adj->copy_param)
2441 fprintf (file, ", copy_param");
2442 else if (adj->remove_param)
2443 fprintf (file, ", remove_param");
2445 fprintf (file, ", offset %li", (long) adj->offset);
2447 fprintf (file, ", by_ref");
2448 print_node_brief (file, ", type: ", adj->type, 0);
2449 fprintf (file, "\n");
2451 VEC_free (tree, heap, parms);
2454 /* Stream out jump function JUMP_FUNC to OB. */
2457 ipa_write_jump_function (struct output_block *ob,
2458 struct ipa_jump_func *jump_func)
2460 lto_output_uleb128_stream (ob->main_stream,
2463 switch (jump_func->type)
2465 case IPA_JF_UNKNOWN:
2467 case IPA_JF_KNOWN_TYPE:
2468 lto_output_tree (ob, jump_func->value.base_binfo, true);
2471 lto_output_tree (ob, jump_func->value.constant, true);
2473 case IPA_JF_PASS_THROUGH:
2474 lto_output_tree (ob, jump_func->value.pass_through.operand, true);
2475 lto_output_uleb128_stream (ob->main_stream,
2476 jump_func->value.pass_through.formal_id);
2477 lto_output_uleb128_stream (ob->main_stream,
2478 jump_func->value.pass_through.operation);
2480 case IPA_JF_ANCESTOR:
2481 lto_output_uleb128_stream (ob->main_stream,
2482 jump_func->value.ancestor.offset);
2483 lto_output_tree (ob, jump_func->value.ancestor.type, true);
2484 lto_output_uleb128_stream (ob->main_stream,
2485 jump_func->value.ancestor.formal_id);
2487 case IPA_JF_CONST_MEMBER_PTR:
2488 lto_output_tree (ob, jump_func->value.member_cst.pfn, true);
2489 lto_output_tree (ob, jump_func->value.member_cst.delta, false);
2494 /* Read in jump function JUMP_FUNC from IB. */
2497 ipa_read_jump_function (struct lto_input_block *ib,
2498 struct ipa_jump_func *jump_func,
2499 struct data_in *data_in)
2501 jump_func->type = (enum jump_func_type) lto_input_uleb128 (ib);
2503 switch (jump_func->type)
2505 case IPA_JF_UNKNOWN:
2507 case IPA_JF_KNOWN_TYPE:
2508 jump_func->value.base_binfo = lto_input_tree (ib, data_in);
2511 jump_func->value.constant = lto_input_tree (ib, data_in);
2513 case IPA_JF_PASS_THROUGH:
2514 jump_func->value.pass_through.operand = lto_input_tree (ib, data_in);
2515 jump_func->value.pass_through.formal_id = lto_input_uleb128 (ib);
2516 jump_func->value.pass_through.operation = (enum tree_code) lto_input_uleb128 (ib);
2518 case IPA_JF_ANCESTOR:
2519 jump_func->value.ancestor.offset = lto_input_uleb128 (ib);
2520 jump_func->value.ancestor.type = lto_input_tree (ib, data_in);
2521 jump_func->value.ancestor.formal_id = lto_input_uleb128 (ib);
2523 case IPA_JF_CONST_MEMBER_PTR:
2524 jump_func->value.member_cst.pfn = lto_input_tree (ib, data_in);
2525 jump_func->value.member_cst.delta = lto_input_tree (ib, data_in);
2530 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
2531 relevant to indirect inlining to OB. */
2534 ipa_write_indirect_edge_info (struct output_block *ob,
2535 struct cgraph_edge *cs)
2537 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2538 struct bitpack_d bp;
2540 lto_output_sleb128_stream (ob->main_stream, ii->param_index);
2541 lto_output_sleb128_stream (ob->main_stream, ii->anc_offset);
2542 bp = bitpack_create (ob->main_stream);
2543 bp_pack_value (&bp, ii->polymorphic, 1);
2544 lto_output_bitpack (&bp);
2546 if (ii->polymorphic)
2548 lto_output_sleb128_stream (ob->main_stream, ii->otr_token);
2549 lto_output_tree (ob, ii->otr_type, true);
2553 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
2554 relevant to indirect inlining from IB. */
2557 ipa_read_indirect_edge_info (struct lto_input_block *ib,
2558 struct data_in *data_in ATTRIBUTE_UNUSED,
2559 struct cgraph_edge *cs)
2561 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2562 struct bitpack_d bp;
2564 ii->param_index = (int) lto_input_sleb128 (ib);
2565 ii->anc_offset = (HOST_WIDE_INT) lto_input_sleb128 (ib);
2566 bp = lto_input_bitpack (ib);
2567 ii->polymorphic = bp_unpack_value (&bp, 1);
2568 if (ii->polymorphic)
2570 ii->otr_token = (HOST_WIDE_INT) lto_input_sleb128 (ib);
2571 ii->otr_type = lto_input_tree (ib, data_in);
2575 /* Stream out NODE info to OB. */
2578 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
2581 lto_cgraph_encoder_t encoder;
2582 struct ipa_node_params *info = IPA_NODE_REF (node);
2584 struct cgraph_edge *e;
2585 struct bitpack_d bp;
2587 encoder = ob->decl_state->cgraph_node_encoder;
2588 node_ref = lto_cgraph_encoder_encode (encoder, node);
2589 lto_output_uleb128_stream (ob->main_stream, node_ref);
2591 bp = bitpack_create (ob->main_stream);
2592 bp_pack_value (&bp, info->called_with_var_arguments, 1);
2593 gcc_assert (info->uses_analysis_done
2594 || ipa_get_param_count (info) == 0);
2595 gcc_assert (!info->node_enqueued);
2596 gcc_assert (!info->ipcp_orig_node);
2597 for (j = 0; j < ipa_get_param_count (info); j++)
2598 bp_pack_value (&bp, info->params[j].used, 1);
2599 lto_output_bitpack (&bp);
2600 for (e = node->callees; e; e = e->next_callee)
2602 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2604 lto_output_uleb128_stream (ob->main_stream,
2605 ipa_get_cs_argument_count (args));
2606 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
2607 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
2609 for (e = node->indirect_calls; e; e = e->next_callee)
2610 ipa_write_indirect_edge_info (ob, e);
2613 /* Srtream in NODE info from IB. */
2616 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
2617 struct data_in *data_in)
2619 struct ipa_node_params *info = IPA_NODE_REF (node);
2621 struct cgraph_edge *e;
2622 struct bitpack_d bp;
2624 ipa_initialize_node_params (node);
2626 bp = lto_input_bitpack (ib);
2627 info->called_with_var_arguments = bp_unpack_value (&bp, 1);
2628 if (ipa_get_param_count (info) != 0)
2629 info->uses_analysis_done = true;
2630 info->node_enqueued = false;
2631 for (k = 0; k < ipa_get_param_count (info); k++)
2632 info->params[k].used = bp_unpack_value (&bp, 1);
2633 for (e = node->callees; e; e = e->next_callee)
2635 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2636 int count = lto_input_uleb128 (ib);
2638 ipa_set_cs_argument_count (args, count);
2642 args->jump_functions = ggc_alloc_cleared_vec_ipa_jump_func
2643 (ipa_get_cs_argument_count (args));
2644 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
2645 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
2647 for (e = node->indirect_calls; e; e = e->next_callee)
2648 ipa_read_indirect_edge_info (ib, data_in, e);
2651 /* Write jump functions for nodes in SET. */
2654 ipa_prop_write_jump_functions (cgraph_node_set set)
2656 struct cgraph_node *node;
2657 struct output_block *ob = create_output_block (LTO_section_jump_functions);
2658 unsigned int count = 0;
2659 cgraph_node_set_iterator csi;
2661 ob->cgraph_node = NULL;
2663 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2665 node = csi_node (csi);
2666 if (node->analyzed && IPA_NODE_REF (node) != NULL)
2670 lto_output_uleb128_stream (ob->main_stream, count);
2672 /* Process all of the functions. */
2673 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2675 node = csi_node (csi);
2676 if (node->analyzed && IPA_NODE_REF (node) != NULL)
2677 ipa_write_node_info (ob, node);
2679 lto_output_1_stream (ob->main_stream, 0);
2680 produce_asm (ob, NULL);
2681 destroy_output_block (ob);
2684 /* Read section in file FILE_DATA of length LEN with data DATA. */
2687 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
2690 const struct lto_function_header *header =
2691 (const struct lto_function_header *) data;
2692 const int32_t cfg_offset = sizeof (struct lto_function_header);
2693 const int32_t main_offset = cfg_offset + header->cfg_size;
2694 const int32_t string_offset = main_offset + header->main_size;
2695 struct data_in *data_in;
2696 struct lto_input_block ib_main;
2700 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
2704 lto_data_in_create (file_data, (const char *) data + string_offset,
2705 header->string_size, NULL);
2706 count = lto_input_uleb128 (&ib_main);
2708 for (i = 0; i < count; i++)
2711 struct cgraph_node *node;
2712 lto_cgraph_encoder_t encoder;
2714 index = lto_input_uleb128 (&ib_main);
2715 encoder = file_data->cgraph_node_encoder;
2716 node = lto_cgraph_encoder_deref (encoder, index);
2717 gcc_assert (node->analyzed);
2718 ipa_read_node_info (&ib_main, node, data_in);
2720 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
2722 lto_data_in_delete (data_in);
2725 /* Read ipcp jump functions. */
2728 ipa_prop_read_jump_functions (void)
2730 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2731 struct lto_file_decl_data *file_data;
2734 ipa_check_create_node_params ();
2735 ipa_check_create_edge_args ();
2736 ipa_register_cgraph_hooks ();
2738 while ((file_data = file_data_vec[j++]))
2741 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
2744 ipa_prop_read_section (file_data, data, len);
2748 /* After merging units, we can get mismatch in argument counts.
2749 Also decl merging might've rendered parameter lists obsolette.
2750 Also compute called_with_variable_arg info. */
2753 ipa_update_after_lto_read (void)
2755 struct cgraph_node *node;
2756 struct cgraph_edge *cs;
2758 ipa_check_create_node_params ();
2759 ipa_check_create_edge_args ();
2761 for (node = cgraph_nodes; node; node = node->next)
2763 ipa_initialize_node_params (node);
2765 for (node = cgraph_nodes; node; node = node->next)
2767 for (cs = node->callees; cs; cs = cs->next_callee)
2769 if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
2770 != ipa_get_param_count (IPA_NODE_REF (cs->callee)))
2771 ipa_set_called_with_variable_arg (IPA_NODE_REF (cs->callee));