1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
25 #include "langhooks.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
37 #include "diagnostic.h"
38 #include "tree-pretty-print.h"
39 #include "gimple-pretty-print.h"
40 #include "lto-streamer.h"
43 /* Intermediate information about a parameter that is only useful during the
44 run of ipa_analyze_node and is not kept afterwards. */
46 struct param_analysis_info
49 bitmap visited_statements;
52 /* Vector where the parameter infos are actually stored. */
53 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
54 /* Vector where the parameter infos are actually stored. */
55 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
57 /* Bitmap with all UIDs of call graph edges that have been already processed
58 by indirect inlining. */
59 static bitmap iinlining_processed_edges;
61 /* Holders of ipa cgraph hooks: */
62 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
63 static struct cgraph_node_hook_list *node_removal_hook_holder;
64 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
65 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
67 /* Add cgraph NODE described by INFO to the worklist WL regardless of whether
68 it is in one or not. It should almost never be used directly, as opposed to
69 ipa_push_func_to_list. */
72 ipa_push_func_to_list_1 (struct ipa_func_list **wl,
73 struct cgraph_node *node,
74 struct ipa_node_params *info)
76 struct ipa_func_list *temp;
78 info->node_enqueued = 1;
79 temp = XCNEW (struct ipa_func_list);
85 /* Initialize worklist to contain all functions. */
87 struct ipa_func_list *
88 ipa_init_func_list (void)
90 struct cgraph_node *node;
91 struct ipa_func_list * wl;
94 for (node = cgraph_nodes; node; node = node->next)
97 struct ipa_node_params *info = IPA_NODE_REF (node);
98 /* Unreachable nodes should have been eliminated before ipcp and
100 gcc_assert (node->needed || node->reachable);
101 ipa_push_func_to_list_1 (&wl, node, info);
107 /* Remove a function from the worklist WL and return it. */
110 ipa_pop_func_from_list (struct ipa_func_list **wl)
112 struct ipa_node_params *info;
113 struct ipa_func_list *first;
114 struct cgraph_node *node;
121 info = IPA_NODE_REF (node);
122 info->node_enqueued = 0;
126 /* Return index of the formal whose tree is PTREE in function which corresponds
130 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
134 count = ipa_get_param_count (info);
135 for (i = 0; i < count; i++)
136 if (ipa_get_param(info, i) == ptree)
142 /* Populate the param_decl field in parameter descriptors of INFO that
143 corresponds to NODE. */
146 ipa_populate_param_decls (struct cgraph_node *node,
147 struct ipa_node_params *info)
155 fnargs = DECL_ARGUMENTS (fndecl);
157 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
159 info->params[param_num].decl = parm;
164 /* Return how many formal parameters FNDECL has. */
167 count_formal_params_1 (tree fndecl)
172 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
178 /* Count number of formal parameters in NOTE. Store the result to the
179 appropriate field of INFO. */
182 ipa_count_formal_params (struct cgraph_node *node,
183 struct ipa_node_params *info)
187 param_num = count_formal_params_1 (node->decl);
188 ipa_set_param_count (info, param_num);
191 /* Initialize the ipa_node_params structure associated with NODE by counting
192 the function parameters, creating the descriptors and populating their
196 ipa_initialize_node_params (struct cgraph_node *node)
198 struct ipa_node_params *info = IPA_NODE_REF (node);
202 ipa_count_formal_params (node, info);
203 info->params = XCNEWVEC (struct ipa_param_descriptor,
204 ipa_get_param_count (info));
205 ipa_populate_param_decls (node, info);
209 /* Count number of arguments callsite CS has and store it in
210 ipa_edge_args structure corresponding to this callsite. */
213 ipa_count_arguments (struct cgraph_edge *cs)
218 stmt = cs->call_stmt;
219 gcc_assert (is_gimple_call (stmt));
220 arg_num = gimple_call_num_args (stmt);
221 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
222 <= (unsigned) cgraph_edge_max_uid)
223 VEC_safe_grow_cleared (ipa_edge_args_t, gc,
224 ipa_edge_args_vector, cgraph_edge_max_uid + 1);
225 ipa_set_cs_argument_count (IPA_EDGE_REF (cs), arg_num);
228 /* Print the jump functions associated with call graph edge CS to file F. */
231 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
235 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
236 for (i = 0; i < count; i++)
238 struct ipa_jump_func *jump_func;
239 enum jump_func_type type;
241 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
242 type = jump_func->type;
244 fprintf (f, " param %d: ", i);
245 if (type == IPA_JF_UNKNOWN)
246 fprintf (f, "UNKNOWN\n");
247 else if (type == IPA_JF_KNOWN_TYPE)
249 tree binfo_type = TREE_TYPE (jump_func->value.base_binfo);
250 fprintf (f, "KNOWN TYPE, type in binfo is: ");
251 print_generic_expr (f, binfo_type, 0);
252 fprintf (f, " (%u)\n", TYPE_UID (binfo_type));
254 else if (type == IPA_JF_CONST)
256 tree val = jump_func->value.constant;
257 fprintf (f, "CONST: ");
258 print_generic_expr (f, val, 0);
259 if (TREE_CODE (val) == ADDR_EXPR
260 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
263 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
268 else if (type == IPA_JF_CONST_MEMBER_PTR)
270 fprintf (f, "CONST MEMBER PTR: ");
271 print_generic_expr (f, jump_func->value.member_cst.pfn, 0);
273 print_generic_expr (f, jump_func->value.member_cst.delta, 0);
276 else if (type == IPA_JF_PASS_THROUGH)
278 fprintf (f, "PASS THROUGH: ");
279 fprintf (f, "%d, op %s ",
280 jump_func->value.pass_through.formal_id,
282 jump_func->value.pass_through.operation]);
283 if (jump_func->value.pass_through.operation != NOP_EXPR)
284 print_generic_expr (dump_file,
285 jump_func->value.pass_through.operand, 0);
286 fprintf (dump_file, "\n");
288 else if (type == IPA_JF_ANCESTOR)
290 fprintf (f, "ANCESTOR: ");
291 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
292 jump_func->value.ancestor.formal_id,
293 jump_func->value.ancestor.offset);
294 print_generic_expr (f, jump_func->value.ancestor.type, 0);
295 fprintf (dump_file, "\n");
301 /* Print the jump functions of all arguments on all call graph edges going from
305 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
307 struct cgraph_edge *cs;
310 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
311 for (cs = node->callees; cs; cs = cs->next_callee)
313 if (!ipa_edge_args_info_available_for_edge_p (cs))
316 fprintf (f, " callsite %s/%i -> %s/%i : \n",
317 cgraph_node_name (node), node->uid,
318 cgraph_node_name (cs->callee), cs->callee->uid);
319 ipa_print_node_jump_functions_for_edge (f, cs);
322 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
324 if (!ipa_edge_args_info_available_for_edge_p (cs))
329 fprintf (f, " indirect callsite %d for stmt ", i);
330 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
333 fprintf (f, " indirect callsite %d :\n", i);
334 ipa_print_node_jump_functions_for_edge (f, cs);
339 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
342 ipa_print_all_jump_functions (FILE *f)
344 struct cgraph_node *node;
346 fprintf (f, "\nJump functions:\n");
347 for (node = cgraph_nodes; node; node = node->next)
349 ipa_print_node_jump_functions (f, node);
353 /* Structure to be passed in between detect_type_change and
354 check_stmt_for_type_change. */
356 struct type_change_info
358 /* Set to true if dynamic type change has been detected. */
359 bool type_maybe_changed;
362 /* Return true if STMT can modify a virtual method table pointer.
364 This function makes special assumptions about both constructors and
365 destructors which are all the functions that are allowed to alter the VMT
366 pointers. It assumes that destructors begin with assignment into all VMT
367 pointers and that constructors essentially look in the following way:
369 1) The very first thing they do is that they call constructors of ancestor
370 sub-objects that have them.
372 2) Then VMT pointers of this and all its ancestors is set to new values
373 corresponding to the type corresponding to the constructor.
375 3) Only afterwards, other stuff such as constructor of member sub-objects
376 and the code written by the user is run. Only this may include calling
377 virtual functions, directly or indirectly.
379 There is no way to call a constructor of an ancestor sub-object in any
382 This means that we do not have to care whether constructors get the correct
383 type information because they will always change it (in fact, if we define
384 the type to be given by the VMT pointer, it is undefined).
386 The most important fact to derive from the above is that if, for some
387 statement in the section 3, we try to detect whether the dynamic type has
388 changed, we can safely ignore all calls as we examine the function body
389 backwards until we reach statements in section 2 because these calls cannot
390 be ancestor constructors or destructors (if the input is not bogus) and so
391 do not change the dynamic type (this holds true only for automatically
392 allocated objects but at the moment we devirtualize only these). We then
393 must detect that statements in section 2 change the dynamic type and can try
394 to derive the new type. That is enough and we can stop, we will never see
395 the calls into constructors of sub-objects in this code. Therefore we can
396 safely ignore all call statements that we traverse.
400 stmt_may_be_vtbl_ptr_store (gimple stmt)
402 if (is_gimple_call (stmt))
404 else if (is_gimple_assign (stmt))
406 tree lhs = gimple_assign_lhs (stmt);
408 if (TREE_CODE (lhs) == COMPONENT_REF
409 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
410 && !AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
412 /* In the future we might want to use get_base_ref_and_offset to find
413 if there is a field corresponding to the offset and if so, proceed
414 almost like if it was a component ref. */
419 /* Callbeck of walk_aliased_vdefs and a helper function for
420 detect_type_change to check whether a particular statement may modify
421 the virtual table pointer, and if possible also determine the new type of
422 the (sub-)object. It stores its result into DATA, which points to a
423 type_change_info structure. */
426 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
428 gimple stmt = SSA_NAME_DEF_STMT (vdef);
429 struct type_change_info *tci = (struct type_change_info *) data;
431 if (stmt_may_be_vtbl_ptr_store (stmt))
433 tci->type_maybe_changed = true;
440 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
441 looking for assignments to its virtual table pointer. If it is, return true
442 and fill in the jump function JFUNC with relevant type information or set it
443 to unknown. ARG is the object itself (not a pointer to it, unless
444 dereferenced). BASE is the base of the memory access as returned by
445 get_ref_base_and_extent, as is the offset. */
448 detect_type_change (tree arg, tree base, gimple call,
449 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
451 struct type_change_info tci;
454 gcc_checking_assert (DECL_P (arg)
455 || TREE_CODE (arg) == MEM_REF
456 || handled_component_p (arg));
457 /* Const calls cannot call virtual methods through VMT and so type changes do
459 if (!gimple_vuse (call))
462 tci.type_maybe_changed = false;
467 ao.size = POINTER_SIZE;
468 ao.max_size = ao.size;
469 ao.ref_alias_set = -1;
470 ao.base_alias_set = -1;
472 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
474 if (!tci.type_maybe_changed)
477 jfunc->type = IPA_JF_UNKNOWN;
481 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
482 SSA name (its dereference will become the base and the offset is assumed to
486 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
488 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
489 if (!POINTER_TYPE_P (TREE_TYPE (arg))
490 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
493 arg = build2 (MEM_REF, ptr_type_node, arg,
494 build_int_cst (ptr_type_node, 0));
496 return detect_type_change (arg, arg, call, jfunc, 0);
500 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
501 of an assignment statement STMT, try to find out whether NAME can be
502 described by a (possibly polynomial) pass-through jump-function or an
503 ancestor jump function and if so, write the appropriate function into
507 compute_complex_assign_jump_func (struct ipa_node_params *info,
508 struct ipa_jump_func *jfunc,
509 gimple call, gimple stmt, tree name)
511 HOST_WIDE_INT offset, size, max_size;
512 tree op1, op2, base, ssa;
515 op1 = gimple_assign_rhs1 (stmt);
516 op2 = gimple_assign_rhs2 (stmt);
518 if (TREE_CODE (op1) == SSA_NAME
519 && SSA_NAME_IS_DEFAULT_DEF (op1))
521 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
527 if (!is_gimple_ip_invariant (op2)
528 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
529 && !useless_type_conversion_p (TREE_TYPE (name),
533 jfunc->type = IPA_JF_PASS_THROUGH;
534 jfunc->value.pass_through.formal_id = index;
535 jfunc->value.pass_through.operation = gimple_assign_rhs_code (stmt);
536 jfunc->value.pass_through.operand = op2;
538 else if (gimple_assign_unary_nop_p (stmt)
539 && !detect_type_change_ssa (op1, call, jfunc))
541 jfunc->type = IPA_JF_PASS_THROUGH;
542 jfunc->value.pass_through.formal_id = index;
543 jfunc->value.pass_through.operation = NOP_EXPR;
548 if (TREE_CODE (op1) != ADDR_EXPR)
550 op1 = TREE_OPERAND (op1, 0);
551 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
553 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
554 if (TREE_CODE (base) != MEM_REF
555 /* If this is a varying address, punt. */
559 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
560 ssa = TREE_OPERAND (base, 0);
561 if (TREE_CODE (ssa) != SSA_NAME
562 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
566 /* Dynamic types are changed only in constructors and destructors and */
567 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
569 && !detect_type_change (op1, base, call, jfunc, offset))
571 jfunc->type = IPA_JF_ANCESTOR;
572 jfunc->value.ancestor.formal_id = index;
573 jfunc->value.ancestor.offset = offset;
574 jfunc->value.ancestor.type = TREE_TYPE (op1);
579 /* Given that an actual argument is an SSA_NAME that is a result of a phi
580 statement PHI, try to find out whether NAME is in fact a
581 multiple-inheritance typecast from a descendant into an ancestor of a formal
582 parameter and thus can be described by an ancestor jump function and if so,
583 write the appropriate function into JFUNC.
585 Essentially we want to match the following pattern:
593 iftmp.1_3 = &obj_2(D)->D.1762;
596 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
597 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
601 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
602 struct ipa_jump_func *jfunc,
603 gimple call, gimple phi)
605 HOST_WIDE_INT offset, size, max_size;
607 basic_block phi_bb, assign_bb, cond_bb;
608 tree tmp, parm, expr, obj;
611 if (gimple_phi_num_args (phi) != 2)
614 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
615 tmp = PHI_ARG_DEF (phi, 0);
616 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
617 tmp = PHI_ARG_DEF (phi, 1);
620 if (TREE_CODE (tmp) != SSA_NAME
621 || SSA_NAME_IS_DEFAULT_DEF (tmp)
622 || !POINTER_TYPE_P (TREE_TYPE (tmp))
623 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
626 assign = SSA_NAME_DEF_STMT (tmp);
627 assign_bb = gimple_bb (assign);
628 if (!single_pred_p (assign_bb)
629 || !gimple_assign_single_p (assign))
631 expr = gimple_assign_rhs1 (assign);
633 if (TREE_CODE (expr) != ADDR_EXPR)
635 expr = TREE_OPERAND (expr, 0);
637 expr = get_ref_base_and_extent (expr, &offset, &size, &max_size);
639 if (TREE_CODE (expr) != MEM_REF
640 /* If this is a varying address, punt. */
644 offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
645 parm = TREE_OPERAND (expr, 0);
646 if (TREE_CODE (parm) != SSA_NAME
647 || !SSA_NAME_IS_DEFAULT_DEF (parm)
651 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
655 cond_bb = single_pred (assign_bb);
656 cond = last_stmt (cond_bb);
658 || gimple_code (cond) != GIMPLE_COND
659 || gimple_cond_code (cond) != NE_EXPR
660 || gimple_cond_lhs (cond) != parm
661 || !integer_zerop (gimple_cond_rhs (cond)))
664 phi_bb = gimple_bb (phi);
665 for (i = 0; i < 2; i++)
667 basic_block pred = EDGE_PRED (phi_bb, i)->src;
668 if (pred != assign_bb && pred != cond_bb)
672 if (!detect_type_change (obj, expr, call, jfunc, offset))
674 jfunc->type = IPA_JF_ANCESTOR;
675 jfunc->value.ancestor.formal_id = index;
676 jfunc->value.ancestor.offset = offset;
677 jfunc->value.ancestor.type = TREE_TYPE (obj);;
681 /* Given OP whch is passed as an actual argument to a called function,
682 determine if it is possible to construct a KNOWN_TYPE jump function for it
683 and if so, create one and store it to JFUNC. */
686 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
689 HOST_WIDE_INT offset, size, max_size;
692 if (TREE_CODE (op) != ADDR_EXPR
693 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
696 op = TREE_OPERAND (op, 0);
697 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
701 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
702 || is_global_var (base))
705 if (detect_type_change (op, base, call, jfunc, offset))
708 binfo = TYPE_BINFO (TREE_TYPE (base));
711 binfo = get_binfo_at_offset (binfo, offset, TREE_TYPE (op));
714 jfunc->type = IPA_JF_KNOWN_TYPE;
715 jfunc->value.base_binfo = binfo;
720 /* Determine the jump functions of scalar arguments. Scalar means SSA names
721 and constants of a number of selected types. INFO is the ipa_node_params
722 structure associated with the caller, FUNCTIONS is a pointer to an array of
723 jump function structures associated with CALL which is the call statement
727 compute_scalar_jump_functions (struct ipa_node_params *info,
728 struct ipa_jump_func *functions,
734 for (num = 0; num < gimple_call_num_args (call); num++)
736 arg = gimple_call_arg (call, num);
738 if (is_gimple_ip_invariant (arg))
740 functions[num].type = IPA_JF_CONST;
741 functions[num].value.constant = arg;
743 else if (TREE_CODE (arg) == SSA_NAME)
745 if (SSA_NAME_IS_DEFAULT_DEF (arg))
747 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
750 && !detect_type_change_ssa (arg, call, &functions[num]))
752 functions[num].type = IPA_JF_PASS_THROUGH;
753 functions[num].value.pass_through.formal_id = index;
754 functions[num].value.pass_through.operation = NOP_EXPR;
759 gimple stmt = SSA_NAME_DEF_STMT (arg);
760 if (is_gimple_assign (stmt))
761 compute_complex_assign_jump_func (info, &functions[num],
763 else if (gimple_code (stmt) == GIMPLE_PHI)
764 compute_complex_ancestor_jump_func (info, &functions[num],
769 compute_known_type_jump_func (arg, &functions[num], call);
773 /* Inspect the given TYPE and return true iff it has the same structure (the
774 same number of fields of the same types) as a C++ member pointer. If
775 METHOD_PTR and DELTA are non-NULL, store the trees representing the
776 corresponding fields there. */
779 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
783 if (TREE_CODE (type) != RECORD_TYPE)
786 fld = TYPE_FIELDS (type);
787 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
788 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE)
794 fld = DECL_CHAIN (fld);
795 if (!fld || INTEGRAL_TYPE_P (fld))
800 if (DECL_CHAIN (fld))
806 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
807 boolean variable pointed to by DATA. */
810 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
813 bool *b = (bool *) data;
818 /* Return true if the formal parameter PARM might have been modified in this
819 function before reaching the statement CALL. PARM_INFO is a pointer to a
820 structure containing intermediate information about PARM. */
823 is_parm_modified_before_call (struct param_analysis_info *parm_info,
824 gimple call, tree parm)
826 bool modified = false;
829 if (parm_info->modified)
832 ao_ref_init (&refd, parm);
833 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
834 &modified, &parm_info->visited_statements);
837 parm_info->modified = true;
843 /* Go through arguments of the CALL and for every one that looks like a member
844 pointer, check whether it can be safely declared pass-through and if so,
845 mark that to the corresponding item of jump FUNCTIONS. Return true iff
846 there are non-pass-through member pointers within the arguments. INFO
847 describes formal parameters of the caller. PARMS_INFO is a pointer to a
848 vector containing intermediate information about each formal parameter. */
851 compute_pass_through_member_ptrs (struct ipa_node_params *info,
852 struct param_analysis_info *parms_info,
853 struct ipa_jump_func *functions,
856 bool undecided_members = false;
860 for (num = 0; num < gimple_call_num_args (call); num++)
862 arg = gimple_call_arg (call, num);
864 if (type_like_member_ptr_p (TREE_TYPE (arg), NULL, NULL))
866 if (TREE_CODE (arg) == PARM_DECL)
868 int index = ipa_get_param_decl_index (info, arg);
870 gcc_assert (index >=0);
871 if (!is_parm_modified_before_call (&parms_info[index], call, arg))
873 functions[num].type = IPA_JF_PASS_THROUGH;
874 functions[num].value.pass_through.formal_id = index;
875 functions[num].value.pass_through.operation = NOP_EXPR;
878 undecided_members = true;
881 undecided_members = true;
885 return undecided_members;
888 /* Simple function filling in a member pointer constant jump function (with PFN
889 and DELTA as the constant value) into JFUNC. */
892 fill_member_ptr_cst_jump_function (struct ipa_jump_func *jfunc,
893 tree pfn, tree delta)
895 jfunc->type = IPA_JF_CONST_MEMBER_PTR;
896 jfunc->value.member_cst.pfn = pfn;
897 jfunc->value.member_cst.delta = delta;
900 /* If RHS is an SSA_NAMe and it is defined by a simple copy assign statement,
901 return the rhs of its defining statement. */
904 get_ssa_def_if_simple_copy (tree rhs)
906 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
908 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
910 if (gimple_assign_single_p (def_stmt))
911 rhs = gimple_assign_rhs1 (def_stmt);
918 /* Traverse statements from CALL backwards, scanning whether the argument ARG
919 which is a member pointer is filled in with constant values. If it is, fill
920 the jump function JFUNC in appropriately. METHOD_FIELD and DELTA_FIELD are
921 fields of the record type of the member pointer. To give an example, we
922 look for a pattern looking like the following:
924 D.2515.__pfn ={v} printStuff;
925 D.2515.__delta ={v} 0;
926 i_1 = doprinting (D.2515); */
929 determine_cst_member_ptr (gimple call, tree arg, tree method_field,
930 tree delta_field, struct ipa_jump_func *jfunc)
932 gimple_stmt_iterator gsi;
933 tree method = NULL_TREE;
934 tree delta = NULL_TREE;
936 gsi = gsi_for_stmt (call);
939 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
941 gimple stmt = gsi_stmt (gsi);
944 if (!stmt_may_clobber_ref_p (stmt, arg))
946 if (!gimple_assign_single_p (stmt))
949 lhs = gimple_assign_lhs (stmt);
950 rhs = gimple_assign_rhs1 (stmt);
952 if (TREE_CODE (lhs) != COMPONENT_REF
953 || TREE_OPERAND (lhs, 0) != arg)
956 fld = TREE_OPERAND (lhs, 1);
957 if (!method && fld == method_field)
959 rhs = get_ssa_def_if_simple_copy (rhs);
960 if (TREE_CODE (rhs) == ADDR_EXPR
961 && TREE_CODE (TREE_OPERAND (rhs, 0)) == FUNCTION_DECL
962 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 0))) == METHOD_TYPE)
964 method = TREE_OPERAND (rhs, 0);
967 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
975 if (!delta && fld == delta_field)
977 rhs = get_ssa_def_if_simple_copy (rhs);
978 if (TREE_CODE (rhs) == INTEGER_CST)
983 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
995 /* Go through the arguments of the CALL and for every member pointer within
996 tries determine whether it is a constant. If it is, create a corresponding
997 constant jump function in FUNCTIONS which is an array of jump functions
998 associated with the call. */
1001 compute_cst_member_ptr_arguments (struct ipa_jump_func *functions,
1005 tree arg, method_field, delta_field;
1007 for (num = 0; num < gimple_call_num_args (call); num++)
1009 arg = gimple_call_arg (call, num);
1011 if (functions[num].type == IPA_JF_UNKNOWN
1012 && type_like_member_ptr_p (TREE_TYPE (arg), &method_field,
1014 determine_cst_member_ptr (call, arg, method_field, delta_field,
1019 /* Compute jump function for all arguments of callsite CS and insert the
1020 information in the jump_functions array in the ipa_edge_args corresponding
1021 to this callsite. */
1024 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_info,
1025 struct cgraph_edge *cs)
1027 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1028 struct ipa_edge_args *arguments = IPA_EDGE_REF (cs);
1031 if (ipa_get_cs_argument_count (arguments) == 0 || arguments->jump_functions)
1033 arguments->jump_functions = ggc_alloc_cleared_vec_ipa_jump_func
1034 (ipa_get_cs_argument_count (arguments));
1036 call = cs->call_stmt;
1037 gcc_assert (is_gimple_call (call));
1039 /* We will deal with constants and SSA scalars first: */
1040 compute_scalar_jump_functions (info, arguments->jump_functions, call);
1042 /* Let's check whether there are any potential member pointers and if so,
1043 whether we can determine their functions as pass_through. */
1044 if (!compute_pass_through_member_ptrs (info, parms_info,
1045 arguments->jump_functions, call))
1048 /* Finally, let's check whether we actually pass a new constant member
1050 compute_cst_member_ptr_arguments (arguments->jump_functions, call);
1053 /* Compute jump functions for all edges - both direct and indirect - outgoing
1054 from NODE. Also count the actual arguments in the process. */
1057 ipa_compute_jump_functions (struct cgraph_node *node,
1058 struct param_analysis_info *parms_info)
1060 struct cgraph_edge *cs;
1062 for (cs = node->callees; cs; cs = cs->next_callee)
1064 /* We do not need to bother analyzing calls to unknown
1065 functions unless they may become known during lto/whopr. */
1066 if (!cs->callee->analyzed && !flag_lto)
1068 ipa_count_arguments (cs);
1069 /* If the descriptor of the callee is not initialized yet, we have to do
1071 if (cs->callee->analyzed)
1072 ipa_initialize_node_params (cs->callee);
1073 if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
1074 != ipa_get_param_count (IPA_NODE_REF (cs->callee)))
1075 ipa_set_called_with_variable_arg (IPA_NODE_REF (cs->callee));
1076 ipa_compute_jump_functions_for_edge (parms_info, cs);
1079 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1081 ipa_count_arguments (cs);
1082 ipa_compute_jump_functions_for_edge (parms_info, cs);
1086 /* If RHS looks like a rhs of a statement loading pfn from a member
1087 pointer formal parameter, return the parameter, otherwise return
1088 NULL. If USE_DELTA, then we look for a use of the delta field
1089 rather than the pfn. */
1092 ipa_get_member_ptr_load_param (tree rhs, bool use_delta)
1094 tree rec, ref_field, ref_offset, fld, fld_offset, ptr_field, delta_field;
1096 if (TREE_CODE (rhs) == COMPONENT_REF)
1098 ref_field = TREE_OPERAND (rhs, 1);
1099 rhs = TREE_OPERAND (rhs, 0);
1102 ref_field = NULL_TREE;
1103 if (TREE_CODE (rhs) != MEM_REF)
1105 rec = TREE_OPERAND (rhs, 0);
1106 if (TREE_CODE (rec) != ADDR_EXPR)
1108 rec = TREE_OPERAND (rec, 0);
1109 if (TREE_CODE (rec) != PARM_DECL
1110 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1113 ref_offset = TREE_OPERAND (rhs, 1);
1117 if (integer_nonzerop (ref_offset))
1125 return ref_field == fld ? rec : NULL_TREE;
1129 fld_offset = byte_position (delta_field);
1131 fld_offset = byte_position (ptr_field);
1133 return tree_int_cst_equal (ref_offset, fld_offset) ? rec : NULL_TREE;
1136 /* If STMT looks like a statement loading a value from a member pointer formal
1137 parameter, this function returns that parameter. */
1140 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta)
1144 if (!gimple_assign_single_p (stmt))
1147 rhs = gimple_assign_rhs1 (stmt);
1148 return ipa_get_member_ptr_load_param (rhs, use_delta);
1151 /* Returns true iff T is an SSA_NAME defined by a statement. */
1154 ipa_is_ssa_with_stmt_def (tree t)
1156 if (TREE_CODE (t) == SSA_NAME
1157 && !SSA_NAME_IS_DEFAULT_DEF (t))
1163 /* Find the indirect call graph edge corresponding to STMT and add to it all
1164 information necessary to describe a call to a parameter number PARAM_INDEX.
1165 NODE is the caller. POLYMORPHIC should be set to true iff the call is a
1169 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt,
1172 struct cgraph_edge *cs;
1174 cs = cgraph_edge (node, stmt);
1175 cs->indirect_info->param_index = param_index;
1176 cs->indirect_info->anc_offset = 0;
1177 cs->indirect_info->polymorphic = polymorphic;
1180 tree otr = gimple_call_fn (stmt);
1181 tree type, token = OBJ_TYPE_REF_TOKEN (otr);
1182 cs->indirect_info->otr_token = tree_low_cst (token, 1);
1183 type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (otr)));
1184 cs->indirect_info->otr_type = type;
1188 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1189 (described by INFO). PARMS_INFO is a pointer to a vector containing
1190 intermediate information about each formal parameter. Currently it checks
1191 whether the call calls a pointer that is a formal parameter and if so, the
1192 parameter is marked with the called flag and an indirect call graph edge
1193 describing the call is created. This is very simple for ordinary pointers
1194 represented in SSA but not-so-nice when it comes to member pointers. The
1195 ugly part of this function does nothing more than trying to match the
1196 pattern of such a call. An example of such a pattern is the gimple dump
1197 below, the call is on the last line:
1200 f$__delta_5 = f.__delta;
1201 f$__pfn_24 = f.__pfn;
1205 f$__delta_5 = MEM[(struct *)&f];
1206 f$__pfn_24 = MEM[(struct *)&f + 4B];
1208 and a few lines below:
1211 D.2496_3 = (int) f$__pfn_24;
1212 D.2497_4 = D.2496_3 & 1;
1219 D.2500_7 = (unsigned int) f$__delta_5;
1220 D.2501_8 = &S + D.2500_7;
1221 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1222 D.2503_10 = *D.2502_9;
1223 D.2504_12 = f$__pfn_24 + -1;
1224 D.2505_13 = (unsigned int) D.2504_12;
1225 D.2506_14 = D.2503_10 + D.2505_13;
1226 D.2507_15 = *D.2506_14;
1227 iftmp.11_16 = (String:: *) D.2507_15;
1230 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1231 D.2500_19 = (unsigned int) f$__delta_5;
1232 D.2508_20 = &S + D.2500_19;
1233 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1235 Such patterns are results of simple calls to a member pointer:
1237 int doprinting (int (MyString::* f)(int) const)
1239 MyString S ("somestring");
1246 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1247 struct ipa_node_params *info,
1248 struct param_analysis_info *parms_info,
1249 gimple call, tree target)
1254 tree rec, rec2, cond;
1257 basic_block bb, virt_bb, join;
1259 if (SSA_NAME_IS_DEFAULT_DEF (target))
1261 tree var = SSA_NAME_VAR (target);
1262 index = ipa_get_param_decl_index (info, var);
1264 ipa_note_param_call (node, index, call, false);
1268 /* Now we need to try to match the complex pattern of calling a member
1271 if (!POINTER_TYPE_P (TREE_TYPE (target))
1272 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1275 def = SSA_NAME_DEF_STMT (target);
1276 if (gimple_code (def) != GIMPLE_PHI)
1279 if (gimple_phi_num_args (def) != 2)
1282 /* First, we need to check whether one of these is a load from a member
1283 pointer that is a parameter to this function. */
1284 n1 = PHI_ARG_DEF (def, 0);
1285 n2 = PHI_ARG_DEF (def, 1);
1286 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1288 d1 = SSA_NAME_DEF_STMT (n1);
1289 d2 = SSA_NAME_DEF_STMT (n2);
1291 join = gimple_bb (def);
1292 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false)))
1294 if (ipa_get_stmt_member_ptr_load_param (d2, false))
1297 bb = EDGE_PRED (join, 0)->src;
1298 virt_bb = gimple_bb (d2);
1300 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false)))
1302 bb = EDGE_PRED (join, 1)->src;
1303 virt_bb = gimple_bb (d1);
1308 /* Second, we need to check that the basic blocks are laid out in the way
1309 corresponding to the pattern. */
1311 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1312 || single_pred (virt_bb) != bb
1313 || single_succ (virt_bb) != join)
1316 /* Third, let's see that the branching is done depending on the least
1317 significant bit of the pfn. */
1319 branch = last_stmt (bb);
1320 if (!branch || gimple_code (branch) != GIMPLE_COND)
1323 if (gimple_cond_code (branch) != NE_EXPR
1324 || !integer_zerop (gimple_cond_rhs (branch)))
1327 cond = gimple_cond_lhs (branch);
1328 if (!ipa_is_ssa_with_stmt_def (cond))
1331 def = SSA_NAME_DEF_STMT (cond);
1332 if (!is_gimple_assign (def)
1333 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1334 || !integer_onep (gimple_assign_rhs2 (def)))
1337 cond = gimple_assign_rhs1 (def);
1338 if (!ipa_is_ssa_with_stmt_def (cond))
1341 def = SSA_NAME_DEF_STMT (cond);
1343 if (is_gimple_assign (def)
1344 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1346 cond = gimple_assign_rhs1 (def);
1347 if (!ipa_is_ssa_with_stmt_def (cond))
1349 def = SSA_NAME_DEF_STMT (cond);
1352 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1353 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1354 == ptrmemfunc_vbit_in_delta));
1359 index = ipa_get_param_decl_index (info, rec);
1360 if (index >= 0 && !is_parm_modified_before_call (&parms_info[index],
1362 ipa_note_param_call (node, index, call, false);
1367 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1368 object referenced in the expression is a formal parameter of the caller
1369 (described by INFO), create a call note for the statement. */
1372 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1373 struct ipa_node_params *info, gimple call,
1376 struct ipa_jump_func jfunc;
1377 tree obj = OBJ_TYPE_REF_OBJECT (target);
1381 if (TREE_CODE (obj) == ADDR_EXPR)
1385 obj = TREE_OPERAND (obj, 0);
1387 while (TREE_CODE (obj) == COMPONENT_REF);
1388 if (TREE_CODE (obj) != MEM_REF)
1390 obj = TREE_OPERAND (obj, 0);
1393 if (TREE_CODE (obj) != SSA_NAME
1394 || !SSA_NAME_IS_DEFAULT_DEF (obj))
1397 var = SSA_NAME_VAR (obj);
1398 index = ipa_get_param_decl_index (info, var);
1401 && !detect_type_change_ssa (obj, call, &jfunc))
1402 ipa_note_param_call (node, index, call, true);
1405 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1406 of the caller (described by INFO). PARMS_INFO is a pointer to a vector
1407 containing intermediate information about each formal parameter. */
1410 ipa_analyze_call_uses (struct cgraph_node *node,
1411 struct ipa_node_params *info,
1412 struct param_analysis_info *parms_info, gimple call)
1414 tree target = gimple_call_fn (call);
1416 if (TREE_CODE (target) == SSA_NAME)
1417 ipa_analyze_indirect_call_uses (node, info, parms_info, call, target);
1418 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1419 ipa_analyze_virtual_call_uses (node, info, call, target);
1423 /* Analyze the call statement STMT with respect to formal parameters (described
1424 in INFO) of caller given by NODE. Currently it only checks whether formal
1425 parameters are called. PARMS_INFO is a pointer to a vector containing
1426 intermediate information about each formal parameter. */
1429 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1430 struct param_analysis_info *parms_info, gimple stmt)
1432 if (is_gimple_call (stmt))
1433 ipa_analyze_call_uses (node, info, parms_info, stmt);
1436 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1437 If OP is a parameter declaration, mark it as used in the info structure
1441 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1442 tree op, void *data)
1444 struct ipa_node_params *info = (struct ipa_node_params *) data;
1446 op = get_base_address (op);
1448 && TREE_CODE (op) == PARM_DECL)
1450 int index = ipa_get_param_decl_index (info, op);
1451 gcc_assert (index >= 0);
1452 info->params[index].used = true;
1458 /* Scan the function body of NODE and inspect the uses of formal parameters.
1459 Store the findings in various structures of the associated ipa_node_params
1460 structure, such as parameter flags, notes etc. PARMS_INFO is a pointer to a
1461 vector containing intermediate information about each formal parameter. */
1464 ipa_analyze_params_uses (struct cgraph_node *node,
1465 struct param_analysis_info *parms_info)
1467 tree decl = node->decl;
1469 struct function *func;
1470 gimple_stmt_iterator gsi;
1471 struct ipa_node_params *info = IPA_NODE_REF (node);
1474 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1477 for (i = 0; i < ipa_get_param_count (info); i++)
1479 tree parm = ipa_get_param (info, i);
1480 /* For SSA regs see if parameter is used. For non-SSA we compute
1481 the flag during modification analysis. */
1482 if (is_gimple_reg (parm)
1483 && gimple_default_def (DECL_STRUCT_FUNCTION (node->decl), parm))
1484 info->params[i].used = true;
1487 func = DECL_STRUCT_FUNCTION (decl);
1488 FOR_EACH_BB_FN (bb, func)
1490 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1492 gimple stmt = gsi_stmt (gsi);
1494 if (is_gimple_debug (stmt))
1497 ipa_analyze_stmt_uses (node, info, parms_info, stmt);
1498 walk_stmt_load_store_addr_ops (stmt, info,
1499 visit_ref_for_mod_analysis,
1500 visit_ref_for_mod_analysis,
1501 visit_ref_for_mod_analysis);
1503 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
1504 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1505 visit_ref_for_mod_analysis,
1506 visit_ref_for_mod_analysis,
1507 visit_ref_for_mod_analysis);
1510 info->uses_analysis_done = 1;
1513 /* Initialize the array describing properties of of formal parameters of NODE,
1514 analyze their uses and and compute jump functions associated witu actual
1515 arguments of calls from within NODE. */
1518 ipa_analyze_node (struct cgraph_node *node)
1520 struct ipa_node_params *info = IPA_NODE_REF (node);
1521 struct param_analysis_info *parms_info;
1524 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1525 current_function_decl = node->decl;
1526 ipa_initialize_node_params (node);
1528 param_count = ipa_get_param_count (info);
1529 parms_info = XALLOCAVEC (struct param_analysis_info, param_count);
1530 memset (parms_info, 0, sizeof (struct param_analysis_info) * param_count);
1532 ipa_analyze_params_uses (node, parms_info);
1533 ipa_compute_jump_functions (node, parms_info);
1535 for (i = 0; i < param_count; i++)
1536 if (parms_info[i].visited_statements)
1537 BITMAP_FREE (parms_info[i].visited_statements);
1539 current_function_decl = NULL;
1544 /* Update the jump function DST when the call graph edge correspondng to SRC is
1545 is being inlined, knowing that DST is of type ancestor and src of known
1549 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1550 struct ipa_jump_func *dst)
1554 new_binfo = get_binfo_at_offset (src->value.base_binfo,
1555 dst->value.ancestor.offset,
1556 dst->value.ancestor.type);
1559 dst->type = IPA_JF_KNOWN_TYPE;
1560 dst->value.base_binfo = new_binfo;
1563 dst->type = IPA_JF_UNKNOWN;
1566 /* Update the jump functions associated with call graph edge E when the call
1567 graph edge CS is being inlined, assuming that E->caller is already (possibly
1568 indirectly) inlined into CS->callee and that E has not been inlined. */
1571 update_jump_functions_after_inlining (struct cgraph_edge *cs,
1572 struct cgraph_edge *e)
1574 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1575 struct ipa_edge_args *args = IPA_EDGE_REF (e);
1576 int count = ipa_get_cs_argument_count (args);
1579 for (i = 0; i < count; i++)
1581 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
1583 if (dst->type == IPA_JF_ANCESTOR)
1585 struct ipa_jump_func *src;
1587 /* Variable number of arguments can cause havoc if we try to access
1588 one that does not exist in the inlined edge. So make sure we
1590 if (dst->value.ancestor.formal_id >= ipa_get_cs_argument_count (top))
1592 dst->type = IPA_JF_UNKNOWN;
1596 src = ipa_get_ith_jump_func (top, dst->value.ancestor.formal_id);
1597 if (src->type == IPA_JF_KNOWN_TYPE)
1598 combine_known_type_and_ancestor_jfs (src, dst);
1599 else if (src->type == IPA_JF_PASS_THROUGH
1600 && src->value.pass_through.operation == NOP_EXPR)
1601 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
1602 else if (src->type == IPA_JF_ANCESTOR)
1604 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
1605 dst->value.ancestor.offset += src->value.ancestor.offset;
1608 dst->type = IPA_JF_UNKNOWN;
1610 else if (dst->type == IPA_JF_PASS_THROUGH)
1612 struct ipa_jump_func *src;
1613 /* We must check range due to calls with variable number of arguments
1614 and we cannot combine jump functions with operations. */
1615 if (dst->value.pass_through.operation == NOP_EXPR
1616 && (dst->value.pass_through.formal_id
1617 < ipa_get_cs_argument_count (top)))
1619 src = ipa_get_ith_jump_func (top,
1620 dst->value.pass_through.formal_id);
1624 dst->type = IPA_JF_UNKNOWN;
1629 /* If TARGET is an addr_expr of a function declaration, make it the destination
1630 of an indirect edge IE and return the edge. Otherwise, return NULL. Delta,
1631 if non-NULL, is an integer constant that must be added to this pointer
1632 (first parameter). */
1634 struct cgraph_edge *
1635 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target, tree delta)
1637 struct cgraph_node *callee;
1639 if (TREE_CODE (target) == ADDR_EXPR)
1640 target = TREE_OPERAND (target, 0);
1641 if (TREE_CODE (target) != FUNCTION_DECL)
1643 callee = cgraph_node (target);
1646 ipa_check_create_node_params ();
1648 /* We can not make edges to inline clones. It is bug that someone removed the cgraph
1650 gcc_assert (!callee->global.inlined_to);
1652 cgraph_make_edge_direct (ie, callee, delta ? tree_low_cst (delta, 0) : 0);
1655 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
1656 "(%s/%i -> %s/%i), for stmt ",
1657 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
1658 cgraph_node_name (ie->caller), ie->caller->uid,
1659 cgraph_node_name (ie->callee), ie->callee->uid);
1661 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
1663 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
1667 fprintf (dump_file, " Thunk delta is ");
1668 print_generic_expr (dump_file, delta, 0);
1669 fprintf (dump_file, "\n");
1673 if (ipa_get_cs_argument_count (IPA_EDGE_REF (ie))
1674 != ipa_get_param_count (IPA_NODE_REF (callee)))
1675 ipa_set_called_with_variable_arg (IPA_NODE_REF (callee));
1680 /* Try to find a destination for indirect edge IE that corresponds to a simple
1681 call or a call of a member function pointer and where the destination is a
1682 pointer formal parameter described by jump function JFUNC. If it can be
1683 determined, return the newly direct edge, otherwise return NULL. */
1685 static struct cgraph_edge *
1686 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
1687 struct ipa_jump_func *jfunc)
1691 if (jfunc->type == IPA_JF_CONST)
1692 target = jfunc->value.constant;
1693 else if (jfunc->type == IPA_JF_CONST_MEMBER_PTR)
1694 target = jfunc->value.member_cst.pfn;
1698 return ipa_make_edge_direct_to_target (ie, target, NULL_TREE);
1701 /* Try to find a destination for indirect edge IE that corresponds to a
1702 virtuall call based on a formal parameter which is described by jump
1703 function JFUNC and if it can be determined, make it direct and return the
1704 direct edge. Otherwise, return NULL. */
1706 static struct cgraph_edge *
1707 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
1708 struct ipa_jump_func *jfunc)
1710 tree binfo, type, target, delta;
1711 HOST_WIDE_INT token;
1713 if (jfunc->type == IPA_JF_KNOWN_TYPE)
1714 binfo = jfunc->value.base_binfo;
1721 token = ie->indirect_info->otr_token;
1722 type = ie->indirect_info->otr_type;
1723 binfo = get_binfo_at_offset (binfo, ie->indirect_info->anc_offset, type);
1725 target = gimple_get_virt_mehtod_for_binfo (token, binfo, &delta, true);
1730 return ipa_make_edge_direct_to_target (ie, target, delta);
1735 /* Update the param called notes associated with NODE when CS is being inlined,
1736 assuming NODE is (potentially indirectly) inlined into CS->callee.
1737 Moreover, if the callee is discovered to be constant, create a new cgraph
1738 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
1739 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
1742 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
1743 struct cgraph_node *node,
1744 VEC (cgraph_edge_p, heap) **new_edges)
1746 struct ipa_edge_args *top;
1747 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
1750 ipa_check_create_edge_args ();
1751 top = IPA_EDGE_REF (cs);
1753 for (ie = node->indirect_calls; ie; ie = next_ie)
1755 struct cgraph_indirect_call_info *ici = ie->indirect_info;
1756 struct ipa_jump_func *jfunc;
1758 next_ie = ie->next_callee;
1759 if (bitmap_bit_p (iinlining_processed_edges, ie->uid))
1762 /* If we ever use indirect edges for anything other than indirect
1763 inlining, we will need to skip those with negative param_indices. */
1764 if (ici->param_index == -1)
1767 /* We must check range due to calls with variable number of arguments: */
1768 if (ici->param_index >= ipa_get_cs_argument_count (top))
1770 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1774 jfunc = ipa_get_ith_jump_func (top, ici->param_index);
1775 if (jfunc->type == IPA_JF_PASS_THROUGH
1776 && jfunc->value.pass_through.operation == NOP_EXPR)
1777 ici->param_index = jfunc->value.pass_through.formal_id;
1778 else if (jfunc->type == IPA_JF_ANCESTOR)
1780 ici->param_index = jfunc->value.ancestor.formal_id;
1781 ici->anc_offset += jfunc->value.ancestor.offset;
1784 /* Either we can find a destination for this edge now or never. */
1785 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1787 if (ici->polymorphic)
1788 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
1790 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
1792 if (new_direct_edge)
1794 new_direct_edge->indirect_inlining_edge = 1;
1797 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
1799 top = IPA_EDGE_REF (cs);
1808 /* Recursively traverse subtree of NODE (including node) made of inlined
1809 cgraph_edges when CS has been inlined and invoke
1810 update_indirect_edges_after_inlining on all nodes and
1811 update_jump_functions_after_inlining on all non-inlined edges that lead out
1812 of this subtree. Newly discovered indirect edges will be added to
1813 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
1817 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
1818 struct cgraph_node *node,
1819 VEC (cgraph_edge_p, heap) **new_edges)
1821 struct cgraph_edge *e;
1824 res = update_indirect_edges_after_inlining (cs, node, new_edges);
1826 for (e = node->callees; e; e = e->next_callee)
1827 if (!e->inline_failed)
1828 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
1830 update_jump_functions_after_inlining (cs, e);
1835 /* Update jump functions and call note functions on inlining the call site CS.
1836 CS is expected to lead to a node already cloned by
1837 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
1838 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
1842 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
1843 VEC (cgraph_edge_p, heap) **new_edges)
1845 /* FIXME lto: We do not stream out indirect call information. */
1849 /* Do nothing if the preparation phase has not been carried out yet
1850 (i.e. during early inlining). */
1851 if (!ipa_node_params_vector)
1853 gcc_assert (ipa_edge_args_vector);
1855 return propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
1858 /* Frees all dynamically allocated structures that the argument info points
1862 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
1864 if (args->jump_functions)
1865 ggc_free (args->jump_functions);
1867 memset (args, 0, sizeof (*args));
1870 /* Free all ipa_edge structures. */
1873 ipa_free_all_edge_args (void)
1876 struct ipa_edge_args *args;
1878 FOR_EACH_VEC_ELT (ipa_edge_args_t, ipa_edge_args_vector, i, args)
1879 ipa_free_edge_args_substructures (args);
1881 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
1882 ipa_edge_args_vector = NULL;
1885 /* Frees all dynamically allocated structures that the param info points
1889 ipa_free_node_params_substructures (struct ipa_node_params *info)
1892 free (info->params);
1894 memset (info, 0, sizeof (*info));
1897 /* Free all ipa_node_params structures. */
1900 ipa_free_all_node_params (void)
1903 struct ipa_node_params *info;
1905 FOR_EACH_VEC_ELT (ipa_node_params_t, ipa_node_params_vector, i, info)
1906 ipa_free_node_params_substructures (info);
1908 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
1909 ipa_node_params_vector = NULL;
1912 /* Hook that is called by cgraph.c when an edge is removed. */
1915 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
1917 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1918 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
1919 <= (unsigned)cs->uid)
1921 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
1924 /* Hook that is called by cgraph.c when a node is removed. */
1927 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1929 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1930 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
1931 <= (unsigned)node->uid)
1933 ipa_free_node_params_substructures (IPA_NODE_REF (node));
1936 /* Helper function to duplicate an array of size N that is at SRC and store a
1937 pointer to it to DST. Nothing is done if SRC is NULL. */
1940 duplicate_array (void *src, size_t n)
1952 static struct ipa_jump_func *
1953 duplicate_ipa_jump_func_array (const struct ipa_jump_func * src, size_t n)
1955 struct ipa_jump_func *p;
1960 p = ggc_alloc_vec_ipa_jump_func (n);
1961 memcpy (p, src, n * sizeof (struct ipa_jump_func));
1965 /* Hook that is called by cgraph.c when a node is duplicated. */
1968 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
1969 __attribute__((unused)) void *data)
1971 struct ipa_edge_args *old_args, *new_args;
1974 ipa_check_create_edge_args ();
1976 old_args = IPA_EDGE_REF (src);
1977 new_args = IPA_EDGE_REF (dst);
1979 arg_count = ipa_get_cs_argument_count (old_args);
1980 ipa_set_cs_argument_count (new_args, arg_count);
1981 new_args->jump_functions =
1982 duplicate_ipa_jump_func_array (old_args->jump_functions, arg_count);
1984 if (iinlining_processed_edges
1985 && bitmap_bit_p (iinlining_processed_edges, src->uid))
1986 bitmap_set_bit (iinlining_processed_edges, dst->uid);
1989 /* Hook that is called by cgraph.c when a node is duplicated. */
1992 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
1993 __attribute__((unused)) void *data)
1995 struct ipa_node_params *old_info, *new_info;
1998 ipa_check_create_node_params ();
1999 old_info = IPA_NODE_REF (src);
2000 new_info = IPA_NODE_REF (dst);
2001 param_count = ipa_get_param_count (old_info);
2003 ipa_set_param_count (new_info, param_count);
2004 new_info->params = (struct ipa_param_descriptor *)
2005 duplicate_array (old_info->params,
2006 sizeof (struct ipa_param_descriptor) * param_count);
2007 for (i = 0; i < param_count; i++)
2008 new_info->params[i].types = VEC_copy (tree, heap,
2009 old_info->params[i].types);
2010 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
2011 new_info->count_scale = old_info->count_scale;
2013 new_info->called_with_var_arguments = old_info->called_with_var_arguments;
2014 new_info->uses_analysis_done = old_info->uses_analysis_done;
2015 new_info->node_enqueued = old_info->node_enqueued;
2018 /* Register our cgraph hooks if they are not already there. */
2021 ipa_register_cgraph_hooks (void)
2023 if (!edge_removal_hook_holder)
2024 edge_removal_hook_holder =
2025 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2026 if (!node_removal_hook_holder)
2027 node_removal_hook_holder =
2028 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2029 if (!edge_duplication_hook_holder)
2030 edge_duplication_hook_holder =
2031 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2032 if (!node_duplication_hook_holder)
2033 node_duplication_hook_holder =
2034 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2037 /* Unregister our cgraph hooks if they are not already there. */
2040 ipa_unregister_cgraph_hooks (void)
2042 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2043 edge_removal_hook_holder = NULL;
2044 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2045 node_removal_hook_holder = NULL;
2046 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2047 edge_duplication_hook_holder = NULL;
2048 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2049 node_duplication_hook_holder = NULL;
2052 /* Allocate all necessary data strucutures necessary for indirect inlining. */
2055 ipa_create_all_structures_for_iinln (void)
2057 iinlining_processed_edges = BITMAP_ALLOC (NULL);
2060 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2061 longer needed after ipa-cp. */
2064 ipa_free_all_structures_after_ipa_cp (void)
2066 if (!flag_indirect_inlining)
2068 ipa_free_all_edge_args ();
2069 ipa_free_all_node_params ();
2070 ipa_unregister_cgraph_hooks ();
2074 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2075 longer needed after indirect inlining. */
2078 ipa_free_all_structures_after_iinln (void)
2080 BITMAP_FREE (iinlining_processed_edges);
2082 ipa_free_all_edge_args ();
2083 ipa_free_all_node_params ();
2084 ipa_unregister_cgraph_hooks ();
2087 /* Print ipa_tree_map data structures of all functions in the
2091 ipa_print_node_params (FILE * f, struct cgraph_node *node)
2095 struct ipa_node_params *info;
2097 if (!node->analyzed)
2099 info = IPA_NODE_REF (node);
2100 fprintf (f, " function %s parameter descriptors:\n",
2101 cgraph_node_name (node));
2102 count = ipa_get_param_count (info);
2103 for (i = 0; i < count; i++)
2105 temp = ipa_get_param (info, i);
2106 if (TREE_CODE (temp) == PARM_DECL)
2107 fprintf (f, " param %d : %s", i,
2109 ? (*lang_hooks.decl_printable_name) (temp, 2)
2111 if (ipa_is_param_used (info, i))
2112 fprintf (f, " used");
2117 /* Print ipa_tree_map data structures of all functions in the
2121 ipa_print_all_params (FILE * f)
2123 struct cgraph_node *node;
2125 fprintf (f, "\nFunction parameters:\n");
2126 for (node = cgraph_nodes; node; node = node->next)
2127 ipa_print_node_params (f, node);
2130 /* Return a heap allocated vector containing formal parameters of FNDECL. */
2133 ipa_get_vector_of_formal_parms (tree fndecl)
2135 VEC(tree, heap) *args;
2139 count = count_formal_params_1 (fndecl);
2140 args = VEC_alloc (tree, heap, count);
2141 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2142 VEC_quick_push (tree, args, parm);
2147 /* Return a heap allocated vector containing types of formal parameters of
2148 function type FNTYPE. */
2150 static inline VEC(tree, heap) *
2151 get_vector_of_formal_parm_types (tree fntype)
2153 VEC(tree, heap) *types;
2157 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2160 types = VEC_alloc (tree, heap, count);
2161 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2162 VEC_quick_push (tree, types, TREE_VALUE (t));
2167 /* Modify the function declaration FNDECL and its type according to the plan in
2168 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2169 to reflect the actual parameters being modified which are determined by the
2170 base_index field. */
2173 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
2174 const char *synth_parm_prefix)
2176 VEC(tree, heap) *oparms, *otypes;
2177 tree orig_type, new_type = NULL;
2178 tree old_arg_types, t, new_arg_types = NULL;
2179 tree parm, *link = &DECL_ARGUMENTS (fndecl);
2180 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2181 tree new_reversed = NULL;
2182 bool care_for_types, last_parm_void;
2184 if (!synth_parm_prefix)
2185 synth_parm_prefix = "SYNTH";
2187 oparms = ipa_get_vector_of_formal_parms (fndecl);
2188 orig_type = TREE_TYPE (fndecl);
2189 old_arg_types = TYPE_ARG_TYPES (orig_type);
2191 /* The following test is an ugly hack, some functions simply don't have any
2192 arguments in their type. This is probably a bug but well... */
2193 care_for_types = (old_arg_types != NULL_TREE);
2196 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2198 otypes = get_vector_of_formal_parm_types (orig_type);
2200 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
2202 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
2206 last_parm_void = false;
2210 for (i = 0; i < len; i++)
2212 struct ipa_parm_adjustment *adj;
2215 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2216 parm = VEC_index (tree, oparms, adj->base_index);
2219 if (adj->copy_param)
2222 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
2226 link = &DECL_CHAIN (parm);
2228 else if (!adj->remove_param)
2234 ptype = build_pointer_type (adj->type);
2239 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2241 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2243 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2245 DECL_ARTIFICIAL (new_parm) = 1;
2246 DECL_ARG_TYPE (new_parm) = ptype;
2247 DECL_CONTEXT (new_parm) = fndecl;
2248 TREE_USED (new_parm) = 1;
2249 DECL_IGNORED_P (new_parm) = 1;
2250 layout_decl (new_parm, 0);
2252 add_referenced_var (new_parm);
2253 mark_sym_for_renaming (new_parm);
2255 adj->reduction = new_parm;
2259 link = &DECL_CHAIN (new_parm);
2267 new_reversed = nreverse (new_arg_types);
2271 TREE_CHAIN (new_arg_types) = void_list_node;
2273 new_reversed = void_list_node;
2277 /* Use copy_node to preserve as much as possible from original type
2278 (debug info, attribute lists etc.)
2279 Exception is METHOD_TYPEs must have THIS argument.
2280 When we are asked to remove it, we need to build new FUNCTION_TYPE
2282 if (TREE_CODE (orig_type) != METHOD_TYPE
2283 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0)->copy_param
2284 && VEC_index (ipa_parm_adjustment_t, adjustments, 0)->base_index == 0))
2286 new_type = build_distinct_type_copy (orig_type);
2287 TYPE_ARG_TYPES (new_type) = new_reversed;
2292 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2294 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2295 DECL_VINDEX (fndecl) = NULL_TREE;
2298 /* When signature changes, we need to clear builtin info. */
2299 if (DECL_BUILT_IN (fndecl))
2301 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2302 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2305 /* This is a new type, not a copy of an old type. Need to reassociate
2306 variants. We can handle everything except the main variant lazily. */
2307 t = TYPE_MAIN_VARIANT (orig_type);
2310 TYPE_MAIN_VARIANT (new_type) = t;
2311 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2312 TYPE_NEXT_VARIANT (t) = new_type;
2316 TYPE_MAIN_VARIANT (new_type) = new_type;
2317 TYPE_NEXT_VARIANT (new_type) = NULL;
2320 TREE_TYPE (fndecl) = new_type;
2321 DECL_VIRTUAL_P (fndecl) = 0;
2323 VEC_free (tree, heap, otypes);
2324 VEC_free (tree, heap, oparms);
2327 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2328 If this is a directly recursive call, CS must be NULL. Otherwise it must
2329 contain the corresponding call graph edge. */
2332 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2333 ipa_parm_adjustment_vec adjustments)
2335 VEC(tree, heap) *vargs;
2337 gimple_stmt_iterator gsi;
2341 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2342 vargs = VEC_alloc (tree, heap, len);
2344 gsi = gsi_for_stmt (stmt);
2345 for (i = 0; i < len; i++)
2347 struct ipa_parm_adjustment *adj;
2349 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2351 if (adj->copy_param)
2353 tree arg = gimple_call_arg (stmt, adj->base_index);
2355 VEC_quick_push (tree, vargs, arg);
2357 else if (!adj->remove_param)
2359 tree expr, base, off;
2362 /* We create a new parameter out of the value of the old one, we can
2363 do the following kind of transformations:
2365 - A scalar passed by reference is converted to a scalar passed by
2366 value. (adj->by_ref is false and the type of the original
2367 actual argument is a pointer to a scalar).
2369 - A part of an aggregate is passed instead of the whole aggregate.
2370 The part can be passed either by value or by reference, this is
2371 determined by value of adj->by_ref. Moreover, the code below
2372 handles both situations when the original aggregate is passed by
2373 value (its type is not a pointer) and when it is passed by
2374 reference (it is a pointer to an aggregate).
2376 When the new argument is passed by reference (adj->by_ref is true)
2377 it must be a part of an aggregate and therefore we form it by
2378 simply taking the address of a reference inside the original
2381 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2382 base = gimple_call_arg (stmt, adj->base_index);
2383 loc = EXPR_LOCATION (base);
2385 if (TREE_CODE (base) != ADDR_EXPR
2386 && POINTER_TYPE_P (TREE_TYPE (base)))
2387 off = build_int_cst (adj->alias_ptr_type,
2388 adj->offset / BITS_PER_UNIT);
2391 HOST_WIDE_INT base_offset;
2394 if (TREE_CODE (base) == ADDR_EXPR)
2395 base = TREE_OPERAND (base, 0);
2397 base = get_addr_base_and_unit_offset (base, &base_offset);
2398 /* Aggregate arguments can have non-invariant addresses. */
2401 base = build_fold_addr_expr (prev_base);
2402 off = build_int_cst (adj->alias_ptr_type,
2403 adj->offset / BITS_PER_UNIT);
2405 else if (TREE_CODE (base) == MEM_REF)
2407 off = build_int_cst (adj->alias_ptr_type,
2409 + adj->offset / BITS_PER_UNIT);
2410 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
2412 base = TREE_OPERAND (base, 0);
2416 off = build_int_cst (adj->alias_ptr_type,
2418 + adj->offset / BITS_PER_UNIT);
2419 base = build_fold_addr_expr (base);
2423 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
2425 expr = build_fold_addr_expr (expr);
2427 expr = force_gimple_operand_gsi (&gsi, expr,
2429 || is_gimple_reg_type (adj->type),
2430 NULL, true, GSI_SAME_STMT);
2431 VEC_quick_push (tree, vargs, expr);
2435 if (dump_file && (dump_flags & TDF_DETAILS))
2437 fprintf (dump_file, "replacing stmt:");
2438 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2441 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
2442 new_stmt = gimple_build_call_vec (callee_decl, vargs);
2443 VEC_free (tree, heap, vargs);
2444 if (gimple_call_lhs (stmt))
2445 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2447 gimple_set_block (new_stmt, gimple_block (stmt));
2448 if (gimple_has_location (stmt))
2449 gimple_set_location (new_stmt, gimple_location (stmt));
2450 gimple_call_copy_flags (new_stmt, stmt);
2451 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2453 if (dump_file && (dump_flags & TDF_DETAILS))
2455 fprintf (dump_file, "with stmt:");
2456 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2457 fprintf (dump_file, "\n");
2459 gsi_replace (&gsi, new_stmt, true);
2461 cgraph_set_call_stmt (cs, new_stmt);
2462 update_ssa (TODO_update_ssa);
2463 free_dominance_info (CDI_DOMINATORS);
2466 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
2469 index_in_adjustments_multiple_times_p (int base_index,
2470 ipa_parm_adjustment_vec adjustments)
2472 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2475 for (i = 0; i < len; i++)
2477 struct ipa_parm_adjustment *adj;
2478 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2480 if (adj->base_index == base_index)
2492 /* Return adjustments that should have the same effect on function parameters
2493 and call arguments as if they were first changed according to adjustments in
2494 INNER and then by adjustments in OUTER. */
2496 ipa_parm_adjustment_vec
2497 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
2498 ipa_parm_adjustment_vec outer)
2500 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
2501 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
2503 ipa_parm_adjustment_vec adjustments, tmp;
2505 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
2506 for (i = 0; i < inlen; i++)
2508 struct ipa_parm_adjustment *n;
2509 n = VEC_index (ipa_parm_adjustment_t, inner, i);
2511 if (n->remove_param)
2514 VEC_quick_push (ipa_parm_adjustment_t, tmp, n);
2517 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
2518 for (i = 0; i < outlen; i++)
2520 struct ipa_parm_adjustment *r;
2521 struct ipa_parm_adjustment *out = VEC_index (ipa_parm_adjustment_t,
2523 struct ipa_parm_adjustment *in = VEC_index (ipa_parm_adjustment_t, tmp,
2526 gcc_assert (!in->remove_param);
2527 if (out->remove_param)
2529 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
2531 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2532 memset (r, 0, sizeof (*r));
2533 r->remove_param = true;
2538 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2539 memset (r, 0, sizeof (*r));
2540 r->base_index = in->base_index;
2541 r->type = out->type;
2543 /* FIXME: Create nonlocal value too. */
2545 if (in->copy_param && out->copy_param)
2546 r->copy_param = true;
2547 else if (in->copy_param)
2548 r->offset = out->offset;
2549 else if (out->copy_param)
2550 r->offset = in->offset;
2552 r->offset = in->offset + out->offset;
2555 for (i = 0; i < inlen; i++)
2557 struct ipa_parm_adjustment *n = VEC_index (ipa_parm_adjustment_t,
2560 if (n->remove_param)
2561 VEC_quick_push (ipa_parm_adjustment_t, adjustments, n);
2564 VEC_free (ipa_parm_adjustment_t, heap, tmp);
2568 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
2569 friendly way, assuming they are meant to be applied to FNDECL. */
2572 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
2575 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2577 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
2579 fprintf (file, "IPA param adjustments: ");
2580 for (i = 0; i < len; i++)
2582 struct ipa_parm_adjustment *adj;
2583 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2586 fprintf (file, " ");
2590 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
2591 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
2594 fprintf (file, ", base: ");
2595 print_generic_expr (file, adj->base, 0);
2599 fprintf (file, ", reduction: ");
2600 print_generic_expr (file, adj->reduction, 0);
2602 if (adj->new_ssa_base)
2604 fprintf (file, ", new_ssa_base: ");
2605 print_generic_expr (file, adj->new_ssa_base, 0);
2608 if (adj->copy_param)
2609 fprintf (file, ", copy_param");
2610 else if (adj->remove_param)
2611 fprintf (file, ", remove_param");
2613 fprintf (file, ", offset %li", (long) adj->offset);
2615 fprintf (file, ", by_ref");
2616 print_node_brief (file, ", type: ", adj->type, 0);
2617 fprintf (file, "\n");
2619 VEC_free (tree, heap, parms);
2622 /* Stream out jump function JUMP_FUNC to OB. */
2625 ipa_write_jump_function (struct output_block *ob,
2626 struct ipa_jump_func *jump_func)
2628 lto_output_uleb128_stream (ob->main_stream,
2631 switch (jump_func->type)
2633 case IPA_JF_UNKNOWN:
2635 case IPA_JF_KNOWN_TYPE:
2636 lto_output_tree (ob, jump_func->value.base_binfo, true);
2639 lto_output_tree (ob, jump_func->value.constant, true);
2641 case IPA_JF_PASS_THROUGH:
2642 lto_output_tree (ob, jump_func->value.pass_through.operand, true);
2643 lto_output_uleb128_stream (ob->main_stream,
2644 jump_func->value.pass_through.formal_id);
2645 lto_output_uleb128_stream (ob->main_stream,
2646 jump_func->value.pass_through.operation);
2648 case IPA_JF_ANCESTOR:
2649 lto_output_uleb128_stream (ob->main_stream,
2650 jump_func->value.ancestor.offset);
2651 lto_output_tree (ob, jump_func->value.ancestor.type, true);
2652 lto_output_uleb128_stream (ob->main_stream,
2653 jump_func->value.ancestor.formal_id);
2655 case IPA_JF_CONST_MEMBER_PTR:
2656 lto_output_tree (ob, jump_func->value.member_cst.pfn, true);
2657 lto_output_tree (ob, jump_func->value.member_cst.delta, false);
2662 /* Read in jump function JUMP_FUNC from IB. */
2665 ipa_read_jump_function (struct lto_input_block *ib,
2666 struct ipa_jump_func *jump_func,
2667 struct data_in *data_in)
2669 jump_func->type = (enum jump_func_type) lto_input_uleb128 (ib);
2671 switch (jump_func->type)
2673 case IPA_JF_UNKNOWN:
2675 case IPA_JF_KNOWN_TYPE:
2676 jump_func->value.base_binfo = lto_input_tree (ib, data_in);
2679 jump_func->value.constant = lto_input_tree (ib, data_in);
2681 case IPA_JF_PASS_THROUGH:
2682 jump_func->value.pass_through.operand = lto_input_tree (ib, data_in);
2683 jump_func->value.pass_through.formal_id = lto_input_uleb128 (ib);
2684 jump_func->value.pass_through.operation = (enum tree_code) lto_input_uleb128 (ib);
2686 case IPA_JF_ANCESTOR:
2687 jump_func->value.ancestor.offset = lto_input_uleb128 (ib);
2688 jump_func->value.ancestor.type = lto_input_tree (ib, data_in);
2689 jump_func->value.ancestor.formal_id = lto_input_uleb128 (ib);
2691 case IPA_JF_CONST_MEMBER_PTR:
2692 jump_func->value.member_cst.pfn = lto_input_tree (ib, data_in);
2693 jump_func->value.member_cst.delta = lto_input_tree (ib, data_in);
2698 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
2699 relevant to indirect inlining to OB. */
2702 ipa_write_indirect_edge_info (struct output_block *ob,
2703 struct cgraph_edge *cs)
2705 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2706 struct bitpack_d bp;
2708 lto_output_sleb128_stream (ob->main_stream, ii->param_index);
2709 lto_output_sleb128_stream (ob->main_stream, ii->anc_offset);
2710 bp = bitpack_create (ob->main_stream);
2711 bp_pack_value (&bp, ii->polymorphic, 1);
2712 lto_output_bitpack (&bp);
2714 if (ii->polymorphic)
2716 lto_output_sleb128_stream (ob->main_stream, ii->otr_token);
2717 lto_output_tree (ob, ii->otr_type, true);
2721 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
2722 relevant to indirect inlining from IB. */
2725 ipa_read_indirect_edge_info (struct lto_input_block *ib,
2726 struct data_in *data_in ATTRIBUTE_UNUSED,
2727 struct cgraph_edge *cs)
2729 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2730 struct bitpack_d bp;
2732 ii->param_index = (int) lto_input_sleb128 (ib);
2733 ii->anc_offset = (HOST_WIDE_INT) lto_input_sleb128 (ib);
2734 bp = lto_input_bitpack (ib);
2735 ii->polymorphic = bp_unpack_value (&bp, 1);
2736 if (ii->polymorphic)
2738 ii->otr_token = (HOST_WIDE_INT) lto_input_sleb128 (ib);
2739 ii->otr_type = lto_input_tree (ib, data_in);
2743 /* Stream out NODE info to OB. */
2746 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
2749 lto_cgraph_encoder_t encoder;
2750 struct ipa_node_params *info = IPA_NODE_REF (node);
2752 struct cgraph_edge *e;
2753 struct bitpack_d bp;
2755 encoder = ob->decl_state->cgraph_node_encoder;
2756 node_ref = lto_cgraph_encoder_encode (encoder, node);
2757 lto_output_uleb128_stream (ob->main_stream, node_ref);
2759 bp = bitpack_create (ob->main_stream);
2760 bp_pack_value (&bp, info->called_with_var_arguments, 1);
2761 gcc_assert (info->uses_analysis_done
2762 || ipa_get_param_count (info) == 0);
2763 gcc_assert (!info->node_enqueued);
2764 gcc_assert (!info->ipcp_orig_node);
2765 for (j = 0; j < ipa_get_param_count (info); j++)
2766 bp_pack_value (&bp, info->params[j].used, 1);
2767 lto_output_bitpack (&bp);
2768 for (e = node->callees; e; e = e->next_callee)
2770 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2772 lto_output_uleb128_stream (ob->main_stream,
2773 ipa_get_cs_argument_count (args));
2774 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
2775 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
2777 for (e = node->indirect_calls; e; e = e->next_callee)
2778 ipa_write_indirect_edge_info (ob, e);
2781 /* Srtream in NODE info from IB. */
2784 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
2785 struct data_in *data_in)
2787 struct ipa_node_params *info = IPA_NODE_REF (node);
2789 struct cgraph_edge *e;
2790 struct bitpack_d bp;
2792 ipa_initialize_node_params (node);
2794 bp = lto_input_bitpack (ib);
2795 info->called_with_var_arguments = bp_unpack_value (&bp, 1);
2796 if (ipa_get_param_count (info) != 0)
2797 info->uses_analysis_done = true;
2798 info->node_enqueued = false;
2799 for (k = 0; k < ipa_get_param_count (info); k++)
2800 info->params[k].used = bp_unpack_value (&bp, 1);
2801 for (e = node->callees; e; e = e->next_callee)
2803 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2804 int count = lto_input_uleb128 (ib);
2806 ipa_set_cs_argument_count (args, count);
2810 args->jump_functions = ggc_alloc_cleared_vec_ipa_jump_func
2811 (ipa_get_cs_argument_count (args));
2812 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
2813 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
2815 for (e = node->indirect_calls; e; e = e->next_callee)
2816 ipa_read_indirect_edge_info (ib, data_in, e);
2819 /* Write jump functions for nodes in SET. */
2822 ipa_prop_write_jump_functions (cgraph_node_set set)
2824 struct cgraph_node *node;
2825 struct output_block *ob = create_output_block (LTO_section_jump_functions);
2826 unsigned int count = 0;
2827 cgraph_node_set_iterator csi;
2829 ob->cgraph_node = NULL;
2831 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2833 node = csi_node (csi);
2834 if (node->analyzed && IPA_NODE_REF (node) != NULL)
2838 lto_output_uleb128_stream (ob->main_stream, count);
2840 /* Process all of the functions. */
2841 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2843 node = csi_node (csi);
2844 if (node->analyzed && IPA_NODE_REF (node) != NULL)
2845 ipa_write_node_info (ob, node);
2847 lto_output_1_stream (ob->main_stream, 0);
2848 produce_asm (ob, NULL);
2849 destroy_output_block (ob);
2852 /* Read section in file FILE_DATA of length LEN with data DATA. */
2855 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
2858 const struct lto_function_header *header =
2859 (const struct lto_function_header *) data;
2860 const int32_t cfg_offset = sizeof (struct lto_function_header);
2861 const int32_t main_offset = cfg_offset + header->cfg_size;
2862 const int32_t string_offset = main_offset + header->main_size;
2863 struct data_in *data_in;
2864 struct lto_input_block ib_main;
2868 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
2872 lto_data_in_create (file_data, (const char *) data + string_offset,
2873 header->string_size, NULL);
2874 count = lto_input_uleb128 (&ib_main);
2876 for (i = 0; i < count; i++)
2879 struct cgraph_node *node;
2880 lto_cgraph_encoder_t encoder;
2882 index = lto_input_uleb128 (&ib_main);
2883 encoder = file_data->cgraph_node_encoder;
2884 node = lto_cgraph_encoder_deref (encoder, index);
2885 gcc_assert (node->analyzed);
2886 ipa_read_node_info (&ib_main, node, data_in);
2888 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
2890 lto_data_in_delete (data_in);
2893 /* Read ipcp jump functions. */
2896 ipa_prop_read_jump_functions (void)
2898 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2899 struct lto_file_decl_data *file_data;
2902 ipa_check_create_node_params ();
2903 ipa_check_create_edge_args ();
2904 ipa_register_cgraph_hooks ();
2906 while ((file_data = file_data_vec[j++]))
2909 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
2912 ipa_prop_read_section (file_data, data, len);
2916 /* After merging units, we can get mismatch in argument counts.
2917 Also decl merging might've rendered parameter lists obsolette.
2918 Also compute called_with_variable_arg info. */
2921 ipa_update_after_lto_read (void)
2923 struct cgraph_node *node;
2924 struct cgraph_edge *cs;
2926 ipa_check_create_node_params ();
2927 ipa_check_create_edge_args ();
2929 for (node = cgraph_nodes; node; node = node->next)
2931 ipa_initialize_node_params (node);
2933 for (node = cgraph_nodes; node; node = node->next)
2935 for (cs = node->callees; cs; cs = cs->next_callee)
2937 if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
2938 != ipa_get_param_count (IPA_NODE_REF (cs->callee)))
2939 ipa_set_called_with_variable_arg (IPA_NODE_REF (cs->callee));