1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
25 #include "langhooks.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
37 #include "diagnostic.h"
38 #include "tree-pretty-print.h"
39 #include "gimple-pretty-print.h"
40 #include "lto-streamer.h"
43 /* Intermediate information about a parameter that is only useful during the
44 run of ipa_analyze_node and is not kept afterwards. */
46 struct param_analysis_info
49 bitmap visited_statements;
52 /* Vector where the parameter infos are actually stored. */
53 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
54 /* Vector where the parameter infos are actually stored. */
55 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
57 /* Bitmap with all UIDs of call graph edges that have been already processed
58 by indirect inlining. */
59 static bitmap iinlining_processed_edges;
61 /* Holders of ipa cgraph hooks: */
62 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
63 static struct cgraph_node_hook_list *node_removal_hook_holder;
64 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
65 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
66 static struct cgraph_node_hook_list *function_insertion_hook_holder;
68 /* Return index of the formal whose tree is PTREE in function which corresponds
72 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
76 count = ipa_get_param_count (info);
77 for (i = 0; i < count; i++)
78 if (ipa_get_param (info, i) == ptree)
84 /* Populate the param_decl field in parameter descriptors of INFO that
85 corresponds to NODE. */
88 ipa_populate_param_decls (struct cgraph_node *node,
89 struct ipa_node_params *info)
97 fnargs = DECL_ARGUMENTS (fndecl);
99 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
101 VEC_index (ipa_param_descriptor_t,
102 info->descriptors, param_num)->decl = parm;
107 /* Return how many formal parameters FNDECL has. */
110 count_formal_params (tree fndecl)
115 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
121 /* Initialize the ipa_node_params structure associated with NODE by counting
122 the function parameters, creating the descriptors and populating their
126 ipa_initialize_node_params (struct cgraph_node *node)
128 struct ipa_node_params *info = IPA_NODE_REF (node);
130 if (!info->descriptors)
134 param_count = count_formal_params (node->decl);
137 VEC_safe_grow_cleared (ipa_param_descriptor_t, heap,
138 info->descriptors, param_count);
139 ipa_populate_param_decls (node, info);
144 /* Count number of arguments callsite CS has and store it in
145 ipa_edge_args structure corresponding to this callsite. */
148 ipa_count_arguments (struct cgraph_edge *cs)
153 stmt = cs->call_stmt;
154 gcc_assert (is_gimple_call (stmt));
155 arg_num = gimple_call_num_args (stmt);
156 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
157 <= (unsigned) cgraph_edge_max_uid)
158 VEC_safe_grow_cleared (ipa_edge_args_t, gc,
159 ipa_edge_args_vector, cgraph_edge_max_uid + 1);
160 ipa_set_cs_argument_count (IPA_EDGE_REF (cs), arg_num);
163 /* Print the jump functions associated with call graph edge CS to file F. */
166 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
170 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
171 for (i = 0; i < count; i++)
173 struct ipa_jump_func *jump_func;
174 enum jump_func_type type;
176 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
177 type = jump_func->type;
179 fprintf (f, " param %d: ", i);
180 if (type == IPA_JF_UNKNOWN)
181 fprintf (f, "UNKNOWN\n");
182 else if (type == IPA_JF_KNOWN_TYPE)
184 tree binfo_type = TREE_TYPE (jump_func->value.base_binfo);
185 fprintf (f, "KNOWN TYPE, type in binfo is: ");
186 print_generic_expr (f, binfo_type, 0);
187 fprintf (f, " (%u)\n", TYPE_UID (binfo_type));
189 else if (type == IPA_JF_CONST)
191 tree val = jump_func->value.constant;
192 fprintf (f, "CONST: ");
193 print_generic_expr (f, val, 0);
194 if (TREE_CODE (val) == ADDR_EXPR
195 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
198 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
203 else if (type == IPA_JF_CONST_MEMBER_PTR)
205 fprintf (f, "CONST MEMBER PTR: ");
206 print_generic_expr (f, jump_func->value.member_cst.pfn, 0);
208 print_generic_expr (f, jump_func->value.member_cst.delta, 0);
211 else if (type == IPA_JF_PASS_THROUGH)
213 fprintf (f, "PASS THROUGH: ");
214 fprintf (f, "%d, op %s ",
215 jump_func->value.pass_through.formal_id,
217 jump_func->value.pass_through.operation]);
218 if (jump_func->value.pass_through.operation != NOP_EXPR)
219 print_generic_expr (dump_file,
220 jump_func->value.pass_through.operand, 0);
221 fprintf (dump_file, "\n");
223 else if (type == IPA_JF_ANCESTOR)
225 fprintf (f, "ANCESTOR: ");
226 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
227 jump_func->value.ancestor.formal_id,
228 jump_func->value.ancestor.offset);
229 print_generic_expr (f, jump_func->value.ancestor.type, 0);
230 fprintf (dump_file, "\n");
236 /* Print the jump functions of all arguments on all call graph edges going from
240 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
242 struct cgraph_edge *cs;
245 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
246 for (cs = node->callees; cs; cs = cs->next_callee)
248 if (!ipa_edge_args_info_available_for_edge_p (cs))
251 fprintf (f, " callsite %s/%i -> %s/%i : \n",
252 cgraph_node_name (node), node->uid,
253 cgraph_node_name (cs->callee), cs->callee->uid);
254 ipa_print_node_jump_functions_for_edge (f, cs);
257 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
259 if (!ipa_edge_args_info_available_for_edge_p (cs))
264 fprintf (f, " indirect callsite %d for stmt ", i);
265 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
268 fprintf (f, " indirect callsite %d :\n", i);
269 ipa_print_node_jump_functions_for_edge (f, cs);
274 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
277 ipa_print_all_jump_functions (FILE *f)
279 struct cgraph_node *node;
281 fprintf (f, "\nJump functions:\n");
282 for (node = cgraph_nodes; node; node = node->next)
284 ipa_print_node_jump_functions (f, node);
288 /* Structure to be passed in between detect_type_change and
289 check_stmt_for_type_change. */
291 struct type_change_info
293 /* Set to true if dynamic type change has been detected. */
294 bool type_maybe_changed;
297 /* Return true if STMT can modify a virtual method table pointer.
299 This function makes special assumptions about both constructors and
300 destructors which are all the functions that are allowed to alter the VMT
301 pointers. It assumes that destructors begin with assignment into all VMT
302 pointers and that constructors essentially look in the following way:
304 1) The very first thing they do is that they call constructors of ancestor
305 sub-objects that have them.
307 2) Then VMT pointers of this and all its ancestors is set to new values
308 corresponding to the type corresponding to the constructor.
310 3) Only afterwards, other stuff such as constructor of member sub-objects
311 and the code written by the user is run. Only this may include calling
312 virtual functions, directly or indirectly.
314 There is no way to call a constructor of an ancestor sub-object in any
317 This means that we do not have to care whether constructors get the correct
318 type information because they will always change it (in fact, if we define
319 the type to be given by the VMT pointer, it is undefined).
321 The most important fact to derive from the above is that if, for some
322 statement in the section 3, we try to detect whether the dynamic type has
323 changed, we can safely ignore all calls as we examine the function body
324 backwards until we reach statements in section 2 because these calls cannot
325 be ancestor constructors or destructors (if the input is not bogus) and so
326 do not change the dynamic type (this holds true only for automatically
327 allocated objects but at the moment we devirtualize only these). We then
328 must detect that statements in section 2 change the dynamic type and can try
329 to derive the new type. That is enough and we can stop, we will never see
330 the calls into constructors of sub-objects in this code. Therefore we can
331 safely ignore all call statements that we traverse.
335 stmt_may_be_vtbl_ptr_store (gimple stmt)
337 if (is_gimple_call (stmt))
339 else if (is_gimple_assign (stmt))
341 tree lhs = gimple_assign_lhs (stmt);
343 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
345 if (flag_strict_aliasing
346 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
349 if (TREE_CODE (lhs) == COMPONENT_REF
350 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
352 /* In the future we might want to use get_base_ref_and_offset to find
353 if there is a field corresponding to the offset and if so, proceed
354 almost like if it was a component ref. */
360 /* Callback of walk_aliased_vdefs and a helper function for
361 detect_type_change to check whether a particular statement may modify
362 the virtual table pointer, and if possible also determine the new type of
363 the (sub-)object. It stores its result into DATA, which points to a
364 type_change_info structure. */
367 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
369 gimple stmt = SSA_NAME_DEF_STMT (vdef);
370 struct type_change_info *tci = (struct type_change_info *) data;
372 if (stmt_may_be_vtbl_ptr_store (stmt))
374 tci->type_maybe_changed = true;
381 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
382 looking for assignments to its virtual table pointer. If it is, return true
383 and fill in the jump function JFUNC with relevant type information or set it
384 to unknown. ARG is the object itself (not a pointer to it, unless
385 dereferenced). BASE is the base of the memory access as returned by
386 get_ref_base_and_extent, as is the offset. */
389 detect_type_change (tree arg, tree base, gimple call,
390 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
392 struct type_change_info tci;
395 gcc_checking_assert (DECL_P (arg)
396 || TREE_CODE (arg) == MEM_REF
397 || handled_component_p (arg));
398 /* Const calls cannot call virtual methods through VMT and so type changes do
400 if (!flag_devirtualize || !gimple_vuse (call))
403 tci.type_maybe_changed = false;
408 ao.size = POINTER_SIZE;
409 ao.max_size = ao.size;
410 ao.ref_alias_set = -1;
411 ao.base_alias_set = -1;
413 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
415 if (!tci.type_maybe_changed)
418 jfunc->type = IPA_JF_UNKNOWN;
422 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
423 SSA name (its dereference will become the base and the offset is assumed to
427 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
429 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
430 if (!flag_devirtualize
431 || !POINTER_TYPE_P (TREE_TYPE (arg))
432 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
435 arg = build2 (MEM_REF, ptr_type_node, arg,
436 build_int_cst (ptr_type_node, 0));
438 return detect_type_change (arg, arg, call, jfunc, 0);
442 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
443 of an assignment statement STMT, try to find out whether NAME can be
444 described by a (possibly polynomial) pass-through jump-function or an
445 ancestor jump function and if so, write the appropriate function into
449 compute_complex_assign_jump_func (struct ipa_node_params *info,
450 struct ipa_jump_func *jfunc,
451 gimple call, gimple stmt, tree name)
453 HOST_WIDE_INT offset, size, max_size;
454 tree op1, op2, base, ssa;
457 op1 = gimple_assign_rhs1 (stmt);
458 op2 = gimple_assign_rhs2 (stmt);
460 if (TREE_CODE (op1) == SSA_NAME
461 && SSA_NAME_IS_DEFAULT_DEF (op1))
463 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
469 if (!is_gimple_ip_invariant (op2)
470 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
471 && !useless_type_conversion_p (TREE_TYPE (name),
475 jfunc->type = IPA_JF_PASS_THROUGH;
476 jfunc->value.pass_through.formal_id = index;
477 jfunc->value.pass_through.operation = gimple_assign_rhs_code (stmt);
478 jfunc->value.pass_through.operand = op2;
480 else if (gimple_assign_unary_nop_p (stmt)
481 && !detect_type_change_ssa (op1, call, jfunc))
483 jfunc->type = IPA_JF_PASS_THROUGH;
484 jfunc->value.pass_through.formal_id = index;
485 jfunc->value.pass_through.operation = NOP_EXPR;
490 if (TREE_CODE (op1) != ADDR_EXPR)
492 op1 = TREE_OPERAND (op1, 0);
493 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
495 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
496 if (TREE_CODE (base) != MEM_REF
497 /* If this is a varying address, punt. */
501 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
502 ssa = TREE_OPERAND (base, 0);
503 if (TREE_CODE (ssa) != SSA_NAME
504 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
508 /* Dynamic types are changed only in constructors and destructors and */
509 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
511 && !detect_type_change (op1, base, call, jfunc, offset))
513 jfunc->type = IPA_JF_ANCESTOR;
514 jfunc->value.ancestor.formal_id = index;
515 jfunc->value.ancestor.offset = offset;
516 jfunc->value.ancestor.type = TREE_TYPE (op1);
520 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
523 iftmp.1_3 = &obj_2(D)->D.1762;
525 The base of the MEM_REF must be a default definition SSA NAME of a
526 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
527 whole MEM_REF expression is returned and the offset calculated from any
528 handled components and the MEM_REF itself is stored into *OFFSET. The whole
529 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
532 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
534 HOST_WIDE_INT size, max_size;
535 tree expr, parm, obj;
537 if (!gimple_assign_single_p (assign))
539 expr = gimple_assign_rhs1 (assign);
541 if (TREE_CODE (expr) != ADDR_EXPR)
543 expr = TREE_OPERAND (expr, 0);
545 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
547 if (TREE_CODE (expr) != MEM_REF
548 /* If this is a varying address, punt. */
553 parm = TREE_OPERAND (expr, 0);
554 if (TREE_CODE (parm) != SSA_NAME
555 || !SSA_NAME_IS_DEFAULT_DEF (parm)
556 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
559 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
565 /* Given that an actual argument is an SSA_NAME that is a result of a phi
566 statement PHI, try to find out whether NAME is in fact a
567 multiple-inheritance typecast from a descendant into an ancestor of a formal
568 parameter and thus can be described by an ancestor jump function and if so,
569 write the appropriate function into JFUNC.
571 Essentially we want to match the following pattern:
579 iftmp.1_3 = &obj_2(D)->D.1762;
582 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
583 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
587 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
588 struct ipa_jump_func *jfunc,
589 gimple call, gimple phi)
591 HOST_WIDE_INT offset;
593 basic_block phi_bb, assign_bb, cond_bb;
594 tree tmp, parm, expr, obj;
597 if (gimple_phi_num_args (phi) != 2)
600 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
601 tmp = PHI_ARG_DEF (phi, 0);
602 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
603 tmp = PHI_ARG_DEF (phi, 1);
606 if (TREE_CODE (tmp) != SSA_NAME
607 || SSA_NAME_IS_DEFAULT_DEF (tmp)
608 || !POINTER_TYPE_P (TREE_TYPE (tmp))
609 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
612 assign = SSA_NAME_DEF_STMT (tmp);
613 assign_bb = gimple_bb (assign);
614 if (!single_pred_p (assign_bb))
616 expr = get_ancestor_addr_info (assign, &obj, &offset);
619 parm = TREE_OPERAND (expr, 0);
620 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
621 gcc_assert (index >= 0);
623 cond_bb = single_pred (assign_bb);
624 cond = last_stmt (cond_bb);
626 || gimple_code (cond) != GIMPLE_COND
627 || gimple_cond_code (cond) != NE_EXPR
628 || gimple_cond_lhs (cond) != parm
629 || !integer_zerop (gimple_cond_rhs (cond)))
632 phi_bb = gimple_bb (phi);
633 for (i = 0; i < 2; i++)
635 basic_block pred = EDGE_PRED (phi_bb, i)->src;
636 if (pred != assign_bb && pred != cond_bb)
640 if (!detect_type_change (obj, expr, call, jfunc, offset))
642 jfunc->type = IPA_JF_ANCESTOR;
643 jfunc->value.ancestor.formal_id = index;
644 jfunc->value.ancestor.offset = offset;
645 jfunc->value.ancestor.type = TREE_TYPE (obj);
649 /* Given OP which is passed as an actual argument to a called function,
650 determine if it is possible to construct a KNOWN_TYPE jump function for it
651 and if so, create one and store it to JFUNC. */
654 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
657 HOST_WIDE_INT offset, size, max_size;
660 if (!flag_devirtualize
661 || TREE_CODE (op) != ADDR_EXPR
662 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
665 op = TREE_OPERAND (op, 0);
666 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
670 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
671 || is_global_var (base))
674 if (detect_type_change (op, base, call, jfunc, offset))
677 binfo = TYPE_BINFO (TREE_TYPE (base));
680 binfo = get_binfo_at_offset (binfo, offset, TREE_TYPE (op));
683 jfunc->type = IPA_JF_KNOWN_TYPE;
684 jfunc->value.base_binfo = binfo;
689 /* Determine the jump functions of scalar arguments. Scalar means SSA names
690 and constants of a number of selected types. INFO is the ipa_node_params
691 structure associated with the caller, FUNCTIONS is a pointer to an array of
692 jump function structures associated with CALL which is the call statement
696 compute_scalar_jump_functions (struct ipa_node_params *info,
697 struct ipa_jump_func *functions,
703 for (num = 0; num < gimple_call_num_args (call); num++)
705 arg = gimple_call_arg (call, num);
707 if (is_gimple_ip_invariant (arg))
709 functions[num].type = IPA_JF_CONST;
710 functions[num].value.constant = arg;
712 else if (TREE_CODE (arg) == SSA_NAME)
714 if (SSA_NAME_IS_DEFAULT_DEF (arg))
716 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
719 && !detect_type_change_ssa (arg, call, &functions[num]))
721 functions[num].type = IPA_JF_PASS_THROUGH;
722 functions[num].value.pass_through.formal_id = index;
723 functions[num].value.pass_through.operation = NOP_EXPR;
728 gimple stmt = SSA_NAME_DEF_STMT (arg);
729 if (is_gimple_assign (stmt))
730 compute_complex_assign_jump_func (info, &functions[num],
732 else if (gimple_code (stmt) == GIMPLE_PHI)
733 compute_complex_ancestor_jump_func (info, &functions[num],
738 compute_known_type_jump_func (arg, &functions[num], call);
742 /* Inspect the given TYPE and return true iff it has the same structure (the
743 same number of fields of the same types) as a C++ member pointer. If
744 METHOD_PTR and DELTA are non-NULL, store the trees representing the
745 corresponding fields there. */
748 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
752 if (TREE_CODE (type) != RECORD_TYPE)
755 fld = TYPE_FIELDS (type);
756 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
757 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE)
763 fld = DECL_CHAIN (fld);
764 if (!fld || INTEGRAL_TYPE_P (fld))
769 if (DECL_CHAIN (fld))
775 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
776 boolean variable pointed to by DATA. */
779 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
782 bool *b = (bool *) data;
787 /* Return true if the formal parameter PARM might have been modified in this
788 function before reaching the statement CALL. PARM_INFO is a pointer to a
789 structure containing intermediate information about PARM. */
792 is_parm_modified_before_call (struct param_analysis_info *parm_info,
793 gimple call, tree parm)
795 bool modified = false;
798 if (parm_info->modified)
801 ao_ref_init (&refd, parm);
802 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
803 &modified, &parm_info->visited_statements);
806 parm_info->modified = true;
812 /* Go through arguments of the CALL and for every one that looks like a member
813 pointer, check whether it can be safely declared pass-through and if so,
814 mark that to the corresponding item of jump FUNCTIONS. Return true iff
815 there are non-pass-through member pointers within the arguments. INFO
816 describes formal parameters of the caller. PARMS_INFO is a pointer to a
817 vector containing intermediate information about each formal parameter. */
820 compute_pass_through_member_ptrs (struct ipa_node_params *info,
821 struct param_analysis_info *parms_info,
822 struct ipa_jump_func *functions,
825 bool undecided_members = false;
829 for (num = 0; num < gimple_call_num_args (call); num++)
831 arg = gimple_call_arg (call, num);
833 if (type_like_member_ptr_p (TREE_TYPE (arg), NULL, NULL))
835 if (TREE_CODE (arg) == PARM_DECL)
837 int index = ipa_get_param_decl_index (info, arg);
839 gcc_assert (index >=0);
840 if (!is_parm_modified_before_call (&parms_info[index], call, arg))
842 functions[num].type = IPA_JF_PASS_THROUGH;
843 functions[num].value.pass_through.formal_id = index;
844 functions[num].value.pass_through.operation = NOP_EXPR;
847 undecided_members = true;
850 undecided_members = true;
854 return undecided_members;
857 /* Simple function filling in a member pointer constant jump function (with PFN
858 and DELTA as the constant value) into JFUNC. */
861 fill_member_ptr_cst_jump_function (struct ipa_jump_func *jfunc,
862 tree pfn, tree delta)
864 jfunc->type = IPA_JF_CONST_MEMBER_PTR;
865 jfunc->value.member_cst.pfn = pfn;
866 jfunc->value.member_cst.delta = delta;
869 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
870 return the rhs of its defining statement. */
873 get_ssa_def_if_simple_copy (tree rhs)
875 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
877 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
879 if (gimple_assign_single_p (def_stmt))
880 rhs = gimple_assign_rhs1 (def_stmt);
887 /* Traverse statements from CALL backwards, scanning whether the argument ARG
888 which is a member pointer is filled in with constant values. If it is, fill
889 the jump function JFUNC in appropriately. METHOD_FIELD and DELTA_FIELD are
890 fields of the record type of the member pointer. To give an example, we
891 look for a pattern looking like the following:
893 D.2515.__pfn ={v} printStuff;
894 D.2515.__delta ={v} 0;
895 i_1 = doprinting (D.2515); */
898 determine_cst_member_ptr (gimple call, tree arg, tree method_field,
899 tree delta_field, struct ipa_jump_func *jfunc)
901 gimple_stmt_iterator gsi;
902 tree method = NULL_TREE;
903 tree delta = NULL_TREE;
905 gsi = gsi_for_stmt (call);
908 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
910 gimple stmt = gsi_stmt (gsi);
913 if (!stmt_may_clobber_ref_p (stmt, arg))
915 if (!gimple_assign_single_p (stmt))
918 lhs = gimple_assign_lhs (stmt);
919 rhs = gimple_assign_rhs1 (stmt);
921 if (TREE_CODE (lhs) != COMPONENT_REF
922 || TREE_OPERAND (lhs, 0) != arg)
925 fld = TREE_OPERAND (lhs, 1);
926 if (!method && fld == method_field)
928 rhs = get_ssa_def_if_simple_copy (rhs);
929 if (TREE_CODE (rhs) == ADDR_EXPR
930 && TREE_CODE (TREE_OPERAND (rhs, 0)) == FUNCTION_DECL
931 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 0))) == METHOD_TYPE)
933 method = TREE_OPERAND (rhs, 0);
936 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
944 if (!delta && fld == delta_field)
946 rhs = get_ssa_def_if_simple_copy (rhs);
947 if (TREE_CODE (rhs) == INTEGER_CST)
952 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
964 /* Go through the arguments of the CALL and for every member pointer within
965 tries determine whether it is a constant. If it is, create a corresponding
966 constant jump function in FUNCTIONS which is an array of jump functions
967 associated with the call. */
970 compute_cst_member_ptr_arguments (struct ipa_jump_func *functions,
974 tree arg, method_field, delta_field;
976 for (num = 0; num < gimple_call_num_args (call); num++)
978 arg = gimple_call_arg (call, num);
980 if (functions[num].type == IPA_JF_UNKNOWN
981 && type_like_member_ptr_p (TREE_TYPE (arg), &method_field,
983 determine_cst_member_ptr (call, arg, method_field, delta_field,
988 /* Compute jump function for all arguments of callsite CS and insert the
989 information in the jump_functions array in the ipa_edge_args corresponding
993 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_info,
994 struct cgraph_edge *cs)
996 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
997 struct ipa_edge_args *arguments = IPA_EDGE_REF (cs);
1000 if (ipa_get_cs_argument_count (arguments) == 0 || arguments->jump_functions)
1002 arguments->jump_functions = ggc_alloc_cleared_vec_ipa_jump_func
1003 (ipa_get_cs_argument_count (arguments));
1005 call = cs->call_stmt;
1006 gcc_assert (is_gimple_call (call));
1008 /* We will deal with constants and SSA scalars first: */
1009 compute_scalar_jump_functions (info, arguments->jump_functions, call);
1011 /* Let's check whether there are any potential member pointers and if so,
1012 whether we can determine their functions as pass_through. */
1013 if (!compute_pass_through_member_ptrs (info, parms_info,
1014 arguments->jump_functions, call))
1017 /* Finally, let's check whether we actually pass a new constant member
1019 compute_cst_member_ptr_arguments (arguments->jump_functions, call);
1022 /* Compute jump functions for all edges - both direct and indirect - outgoing
1023 from NODE. Also count the actual arguments in the process. */
1026 ipa_compute_jump_functions (struct cgraph_node *node,
1027 struct param_analysis_info *parms_info)
1029 struct cgraph_edge *cs;
1031 for (cs = node->callees; cs; cs = cs->next_callee)
1033 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee, NULL);
1034 /* We do not need to bother analyzing calls to unknown
1035 functions unless they may become known during lto/whopr. */
1036 if (!cs->callee->analyzed && !flag_lto)
1038 ipa_count_arguments (cs);
1039 /* If the descriptor of the callee is not initialized yet, we have to do
1041 if (callee->analyzed)
1042 ipa_initialize_node_params (callee);
1043 if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
1044 != ipa_get_param_count (IPA_NODE_REF (callee)))
1045 ipa_set_called_with_variable_arg (IPA_NODE_REF (callee));
1046 ipa_compute_jump_functions_for_edge (parms_info, cs);
1049 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1051 ipa_count_arguments (cs);
1052 ipa_compute_jump_functions_for_edge (parms_info, cs);
1056 /* If RHS looks like a rhs of a statement loading pfn from a member
1057 pointer formal parameter, return the parameter, otherwise return
1058 NULL. If USE_DELTA, then we look for a use of the delta field
1059 rather than the pfn. */
1062 ipa_get_member_ptr_load_param (tree rhs, bool use_delta)
1064 tree rec, ref_field, ref_offset, fld, fld_offset, ptr_field, delta_field;
1066 if (TREE_CODE (rhs) == COMPONENT_REF)
1068 ref_field = TREE_OPERAND (rhs, 1);
1069 rhs = TREE_OPERAND (rhs, 0);
1072 ref_field = NULL_TREE;
1073 if (TREE_CODE (rhs) != MEM_REF)
1075 rec = TREE_OPERAND (rhs, 0);
1076 if (TREE_CODE (rec) != ADDR_EXPR)
1078 rec = TREE_OPERAND (rec, 0);
1079 if (TREE_CODE (rec) != PARM_DECL
1080 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1083 ref_offset = TREE_OPERAND (rhs, 1);
1087 if (integer_nonzerop (ref_offset))
1095 return ref_field == fld ? rec : NULL_TREE;
1099 fld_offset = byte_position (delta_field);
1101 fld_offset = byte_position (ptr_field);
1103 return tree_int_cst_equal (ref_offset, fld_offset) ? rec : NULL_TREE;
1106 /* If STMT looks like a statement loading a value from a member pointer formal
1107 parameter, this function returns that parameter. */
1110 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta)
1114 if (!gimple_assign_single_p (stmt))
1117 rhs = gimple_assign_rhs1 (stmt);
1118 return ipa_get_member_ptr_load_param (rhs, use_delta);
1121 /* Returns true iff T is an SSA_NAME defined by a statement. */
1124 ipa_is_ssa_with_stmt_def (tree t)
1126 if (TREE_CODE (t) == SSA_NAME
1127 && !SSA_NAME_IS_DEFAULT_DEF (t))
1133 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1134 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1135 indirect call graph edge. */
1137 static struct cgraph_edge *
1138 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1140 struct cgraph_edge *cs;
1142 cs = cgraph_edge (node, stmt);
1143 cs->indirect_info->param_index = param_index;
1144 cs->indirect_info->anc_offset = 0;
1145 cs->indirect_info->polymorphic = 0;
1149 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1150 (described by INFO). PARMS_INFO is a pointer to a vector containing
1151 intermediate information about each formal parameter. Currently it checks
1152 whether the call calls a pointer that is a formal parameter and if so, the
1153 parameter is marked with the called flag and an indirect call graph edge
1154 describing the call is created. This is very simple for ordinary pointers
1155 represented in SSA but not-so-nice when it comes to member pointers. The
1156 ugly part of this function does nothing more than trying to match the
1157 pattern of such a call. An example of such a pattern is the gimple dump
1158 below, the call is on the last line:
1161 f$__delta_5 = f.__delta;
1162 f$__pfn_24 = f.__pfn;
1166 f$__delta_5 = MEM[(struct *)&f];
1167 f$__pfn_24 = MEM[(struct *)&f + 4B];
1169 and a few lines below:
1172 D.2496_3 = (int) f$__pfn_24;
1173 D.2497_4 = D.2496_3 & 1;
1180 D.2500_7 = (unsigned int) f$__delta_5;
1181 D.2501_8 = &S + D.2500_7;
1182 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1183 D.2503_10 = *D.2502_9;
1184 D.2504_12 = f$__pfn_24 + -1;
1185 D.2505_13 = (unsigned int) D.2504_12;
1186 D.2506_14 = D.2503_10 + D.2505_13;
1187 D.2507_15 = *D.2506_14;
1188 iftmp.11_16 = (String:: *) D.2507_15;
1191 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1192 D.2500_19 = (unsigned int) f$__delta_5;
1193 D.2508_20 = &S + D.2500_19;
1194 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1196 Such patterns are results of simple calls to a member pointer:
1198 int doprinting (int (MyString::* f)(int) const)
1200 MyString S ("somestring");
1207 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1208 struct ipa_node_params *info,
1209 struct param_analysis_info *parms_info,
1210 gimple call, tree target)
1215 tree rec, rec2, cond;
1218 basic_block bb, virt_bb, join;
1220 if (SSA_NAME_IS_DEFAULT_DEF (target))
1222 tree var = SSA_NAME_VAR (target);
1223 index = ipa_get_param_decl_index (info, var);
1225 ipa_note_param_call (node, index, call);
1229 /* Now we need to try to match the complex pattern of calling a member
1232 if (!POINTER_TYPE_P (TREE_TYPE (target))
1233 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1236 def = SSA_NAME_DEF_STMT (target);
1237 if (gimple_code (def) != GIMPLE_PHI)
1240 if (gimple_phi_num_args (def) != 2)
1243 /* First, we need to check whether one of these is a load from a member
1244 pointer that is a parameter to this function. */
1245 n1 = PHI_ARG_DEF (def, 0);
1246 n2 = PHI_ARG_DEF (def, 1);
1247 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1249 d1 = SSA_NAME_DEF_STMT (n1);
1250 d2 = SSA_NAME_DEF_STMT (n2);
1252 join = gimple_bb (def);
1253 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false)))
1255 if (ipa_get_stmt_member_ptr_load_param (d2, false))
1258 bb = EDGE_PRED (join, 0)->src;
1259 virt_bb = gimple_bb (d2);
1261 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false)))
1263 bb = EDGE_PRED (join, 1)->src;
1264 virt_bb = gimple_bb (d1);
1269 /* Second, we need to check that the basic blocks are laid out in the way
1270 corresponding to the pattern. */
1272 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1273 || single_pred (virt_bb) != bb
1274 || single_succ (virt_bb) != join)
1277 /* Third, let's see that the branching is done depending on the least
1278 significant bit of the pfn. */
1280 branch = last_stmt (bb);
1281 if (!branch || gimple_code (branch) != GIMPLE_COND)
1284 if ((gimple_cond_code (branch) != NE_EXPR
1285 && gimple_cond_code (branch) != EQ_EXPR)
1286 || !integer_zerop (gimple_cond_rhs (branch)))
1289 cond = gimple_cond_lhs (branch);
1290 if (!ipa_is_ssa_with_stmt_def (cond))
1293 def = SSA_NAME_DEF_STMT (cond);
1294 if (!is_gimple_assign (def)
1295 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1296 || !integer_onep (gimple_assign_rhs2 (def)))
1299 cond = gimple_assign_rhs1 (def);
1300 if (!ipa_is_ssa_with_stmt_def (cond))
1303 def = SSA_NAME_DEF_STMT (cond);
1305 if (is_gimple_assign (def)
1306 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1308 cond = gimple_assign_rhs1 (def);
1309 if (!ipa_is_ssa_with_stmt_def (cond))
1311 def = SSA_NAME_DEF_STMT (cond);
1314 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1315 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1316 == ptrmemfunc_vbit_in_delta));
1321 index = ipa_get_param_decl_index (info, rec);
1322 if (index >= 0 && !is_parm_modified_before_call (&parms_info[index],
1324 ipa_note_param_call (node, index, call);
1329 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1330 object referenced in the expression is a formal parameter of the caller
1331 (described by INFO), create a call note for the statement. */
1334 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1335 struct ipa_node_params *info, gimple call,
1338 struct cgraph_edge *cs;
1339 struct cgraph_indirect_call_info *ii;
1340 struct ipa_jump_func jfunc;
1341 tree obj = OBJ_TYPE_REF_OBJECT (target);
1343 HOST_WIDE_INT anc_offset;
1345 if (!flag_devirtualize)
1348 if (TREE_CODE (obj) != SSA_NAME)
1351 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1353 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1357 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1358 gcc_assert (index >= 0);
1359 if (detect_type_change_ssa (obj, call, &jfunc))
1364 gimple stmt = SSA_NAME_DEF_STMT (obj);
1367 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1370 index = ipa_get_param_decl_index (info,
1371 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1372 gcc_assert (index >= 0);
1373 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1377 cs = ipa_note_param_call (node, index, call);
1378 ii = cs->indirect_info;
1379 ii->anc_offset = anc_offset;
1380 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1381 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1382 ii->polymorphic = 1;
1385 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1386 of the caller (described by INFO). PARMS_INFO is a pointer to a vector
1387 containing intermediate information about each formal parameter. */
1390 ipa_analyze_call_uses (struct cgraph_node *node,
1391 struct ipa_node_params *info,
1392 struct param_analysis_info *parms_info, gimple call)
1394 tree target = gimple_call_fn (call);
1398 if (TREE_CODE (target) == SSA_NAME)
1399 ipa_analyze_indirect_call_uses (node, info, parms_info, call, target);
1400 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1401 ipa_analyze_virtual_call_uses (node, info, call, target);
1405 /* Analyze the call statement STMT with respect to formal parameters (described
1406 in INFO) of caller given by NODE. Currently it only checks whether formal
1407 parameters are called. PARMS_INFO is a pointer to a vector containing
1408 intermediate information about each formal parameter. */
1411 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1412 struct param_analysis_info *parms_info, gimple stmt)
1414 if (is_gimple_call (stmt))
1415 ipa_analyze_call_uses (node, info, parms_info, stmt);
1418 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1419 If OP is a parameter declaration, mark it as used in the info structure
1423 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1424 tree op, void *data)
1426 struct ipa_node_params *info = (struct ipa_node_params *) data;
1428 op = get_base_address (op);
1430 && TREE_CODE (op) == PARM_DECL)
1432 int index = ipa_get_param_decl_index (info, op);
1433 gcc_assert (index >= 0);
1434 ipa_set_param_used (info, index, true);
1440 /* Scan the function body of NODE and inspect the uses of formal parameters.
1441 Store the findings in various structures of the associated ipa_node_params
1442 structure, such as parameter flags, notes etc. PARMS_INFO is a pointer to a
1443 vector containing intermediate information about each formal parameter. */
1446 ipa_analyze_params_uses (struct cgraph_node *node,
1447 struct param_analysis_info *parms_info)
1449 tree decl = node->decl;
1451 struct function *func;
1452 gimple_stmt_iterator gsi;
1453 struct ipa_node_params *info = IPA_NODE_REF (node);
1456 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1459 for (i = 0; i < ipa_get_param_count (info); i++)
1461 tree parm = ipa_get_param (info, i);
1462 /* For SSA regs see if parameter is used. For non-SSA we compute
1463 the flag during modification analysis. */
1464 if (is_gimple_reg (parm)
1465 && gimple_default_def (DECL_STRUCT_FUNCTION (node->decl), parm))
1466 ipa_set_param_used (info, i, true);
1469 func = DECL_STRUCT_FUNCTION (decl);
1470 FOR_EACH_BB_FN (bb, func)
1472 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1474 gimple stmt = gsi_stmt (gsi);
1476 if (is_gimple_debug (stmt))
1479 ipa_analyze_stmt_uses (node, info, parms_info, stmt);
1480 walk_stmt_load_store_addr_ops (stmt, info,
1481 visit_ref_for_mod_analysis,
1482 visit_ref_for_mod_analysis,
1483 visit_ref_for_mod_analysis);
1485 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
1486 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1487 visit_ref_for_mod_analysis,
1488 visit_ref_for_mod_analysis,
1489 visit_ref_for_mod_analysis);
1492 info->uses_analysis_done = 1;
1495 /* Initialize the array describing properties of of formal parameters
1496 of NODE, analyze their uses and compute jump functions associated
1497 with actual arguments of calls from within NODE. */
1500 ipa_analyze_node (struct cgraph_node *node)
1502 struct ipa_node_params *info;
1503 struct param_analysis_info *parms_info;
1506 ipa_check_create_node_params ();
1507 ipa_check_create_edge_args ();
1508 info = IPA_NODE_REF (node);
1509 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1510 current_function_decl = node->decl;
1511 ipa_initialize_node_params (node);
1513 param_count = ipa_get_param_count (info);
1514 parms_info = XALLOCAVEC (struct param_analysis_info, param_count);
1515 memset (parms_info, 0, sizeof (struct param_analysis_info) * param_count);
1517 ipa_analyze_params_uses (node, parms_info);
1518 ipa_compute_jump_functions (node, parms_info);
1520 for (i = 0; i < param_count; i++)
1521 if (parms_info[i].visited_statements)
1522 BITMAP_FREE (parms_info[i].visited_statements);
1524 current_function_decl = NULL;
1529 /* Update the jump function DST when the call graph edge corresponding to SRC is
1530 is being inlined, knowing that DST is of type ancestor and src of known
1534 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1535 struct ipa_jump_func *dst)
1539 new_binfo = get_binfo_at_offset (src->value.base_binfo,
1540 dst->value.ancestor.offset,
1541 dst->value.ancestor.type);
1544 dst->type = IPA_JF_KNOWN_TYPE;
1545 dst->value.base_binfo = new_binfo;
1548 dst->type = IPA_JF_UNKNOWN;
1551 /* Update the jump functions associated with call graph edge E when the call
1552 graph edge CS is being inlined, assuming that E->caller is already (possibly
1553 indirectly) inlined into CS->callee and that E has not been inlined. */
1556 update_jump_functions_after_inlining (struct cgraph_edge *cs,
1557 struct cgraph_edge *e)
1559 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1560 struct ipa_edge_args *args = IPA_EDGE_REF (e);
1561 int count = ipa_get_cs_argument_count (args);
1564 for (i = 0; i < count; i++)
1566 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
1568 if (dst->type == IPA_JF_ANCESTOR)
1570 struct ipa_jump_func *src;
1572 /* Variable number of arguments can cause havoc if we try to access
1573 one that does not exist in the inlined edge. So make sure we
1575 if (dst->value.ancestor.formal_id >= ipa_get_cs_argument_count (top))
1577 dst->type = IPA_JF_UNKNOWN;
1581 src = ipa_get_ith_jump_func (top, dst->value.ancestor.formal_id);
1582 if (src->type == IPA_JF_KNOWN_TYPE)
1583 combine_known_type_and_ancestor_jfs (src, dst);
1584 else if (src->type == IPA_JF_PASS_THROUGH
1585 && src->value.pass_through.operation == NOP_EXPR)
1586 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
1587 else if (src->type == IPA_JF_ANCESTOR)
1589 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
1590 dst->value.ancestor.offset += src->value.ancestor.offset;
1593 dst->type = IPA_JF_UNKNOWN;
1595 else if (dst->type == IPA_JF_PASS_THROUGH)
1597 struct ipa_jump_func *src;
1598 /* We must check range due to calls with variable number of arguments
1599 and we cannot combine jump functions with operations. */
1600 if (dst->value.pass_through.operation == NOP_EXPR
1601 && (dst->value.pass_through.formal_id
1602 < ipa_get_cs_argument_count (top)))
1604 src = ipa_get_ith_jump_func (top,
1605 dst->value.pass_through.formal_id);
1609 dst->type = IPA_JF_UNKNOWN;
1614 /* If TARGET is an addr_expr of a function declaration, make it the destination
1615 of an indirect edge IE and return the edge. Otherwise, return NULL. Delta,
1616 if non-NULL, is an integer constant that must be added to this pointer
1617 (first parameter). */
1619 struct cgraph_edge *
1620 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target, tree delta)
1622 struct cgraph_node *callee;
1624 if (TREE_CODE (target) == ADDR_EXPR)
1625 target = TREE_OPERAND (target, 0);
1626 if (TREE_CODE (target) != FUNCTION_DECL)
1628 callee = cgraph_get_node (target);
1631 ipa_check_create_node_params ();
1633 /* We can not make edges to inline clones. It is bug that someone removed the cgraph
1635 gcc_assert (!callee->global.inlined_to);
1637 cgraph_make_edge_direct (ie, callee, delta ? tree_low_cst (delta, 0) : 0);
1640 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
1641 "(%s/%i -> %s/%i), for stmt ",
1642 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
1643 cgraph_node_name (ie->caller), ie->caller->uid,
1644 cgraph_node_name (ie->callee), ie->callee->uid);
1646 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
1648 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
1652 fprintf (dump_file, " Thunk delta is ");
1653 print_generic_expr (dump_file, delta, 0);
1654 fprintf (dump_file, "\n");
1657 callee = cgraph_function_or_thunk_node (callee, NULL);
1659 if (ipa_get_cs_argument_count (IPA_EDGE_REF (ie))
1660 != ipa_get_param_count (IPA_NODE_REF (callee)))
1661 ipa_set_called_with_variable_arg (IPA_NODE_REF (callee));
1666 /* Try to find a destination for indirect edge IE that corresponds to a simple
1667 call or a call of a member function pointer and where the destination is a
1668 pointer formal parameter described by jump function JFUNC. If it can be
1669 determined, return the newly direct edge, otherwise return NULL. */
1671 static struct cgraph_edge *
1672 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
1673 struct ipa_jump_func *jfunc)
1677 if (jfunc->type == IPA_JF_CONST)
1678 target = jfunc->value.constant;
1679 else if (jfunc->type == IPA_JF_CONST_MEMBER_PTR)
1680 target = jfunc->value.member_cst.pfn;
1684 return ipa_make_edge_direct_to_target (ie, target, NULL_TREE);
1687 /* Try to find a destination for indirect edge IE that corresponds to a
1688 virtual call based on a formal parameter which is described by jump
1689 function JFUNC and if it can be determined, make it direct and return the
1690 direct edge. Otherwise, return NULL. */
1692 static struct cgraph_edge *
1693 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
1694 struct ipa_jump_func *jfunc)
1696 tree binfo, type, target, delta;
1697 HOST_WIDE_INT token;
1699 if (jfunc->type == IPA_JF_KNOWN_TYPE)
1700 binfo = jfunc->value.base_binfo;
1707 token = ie->indirect_info->otr_token;
1708 type = ie->indirect_info->otr_type;
1709 binfo = get_binfo_at_offset (binfo, ie->indirect_info->anc_offset, type);
1711 target = gimple_get_virt_method_for_binfo (token, binfo, &delta);
1716 return ipa_make_edge_direct_to_target (ie, target, delta);
1721 /* Update the param called notes associated with NODE when CS is being inlined,
1722 assuming NODE is (potentially indirectly) inlined into CS->callee.
1723 Moreover, if the callee is discovered to be constant, create a new cgraph
1724 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
1725 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
1728 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
1729 struct cgraph_node *node,
1730 VEC (cgraph_edge_p, heap) **new_edges)
1732 struct ipa_edge_args *top;
1733 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
1736 ipa_check_create_edge_args ();
1737 top = IPA_EDGE_REF (cs);
1739 for (ie = node->indirect_calls; ie; ie = next_ie)
1741 struct cgraph_indirect_call_info *ici = ie->indirect_info;
1742 struct ipa_jump_func *jfunc;
1744 next_ie = ie->next_callee;
1745 if (bitmap_bit_p (iinlining_processed_edges, ie->uid))
1748 /* If we ever use indirect edges for anything other than indirect
1749 inlining, we will need to skip those with negative param_indices. */
1750 if (ici->param_index == -1)
1753 /* We must check range due to calls with variable number of arguments: */
1754 if (ici->param_index >= ipa_get_cs_argument_count (top))
1756 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1760 jfunc = ipa_get_ith_jump_func (top, ici->param_index);
1761 if (jfunc->type == IPA_JF_PASS_THROUGH
1762 && jfunc->value.pass_through.operation == NOP_EXPR)
1763 ici->param_index = jfunc->value.pass_through.formal_id;
1764 else if (jfunc->type == IPA_JF_ANCESTOR)
1766 ici->param_index = jfunc->value.ancestor.formal_id;
1767 ici->anc_offset += jfunc->value.ancestor.offset;
1770 /* Either we can find a destination for this edge now or never. */
1771 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1773 if (ici->polymorphic)
1774 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
1776 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
1778 if (new_direct_edge)
1780 new_direct_edge->indirect_inlining_edge = 1;
1783 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
1785 top = IPA_EDGE_REF (cs);
1794 /* Recursively traverse subtree of NODE (including node) made of inlined
1795 cgraph_edges when CS has been inlined and invoke
1796 update_indirect_edges_after_inlining on all nodes and
1797 update_jump_functions_after_inlining on all non-inlined edges that lead out
1798 of this subtree. Newly discovered indirect edges will be added to
1799 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
1803 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
1804 struct cgraph_node *node,
1805 VEC (cgraph_edge_p, heap) **new_edges)
1807 struct cgraph_edge *e;
1810 res = update_indirect_edges_after_inlining (cs, node, new_edges);
1812 for (e = node->callees; e; e = e->next_callee)
1813 if (!e->inline_failed)
1814 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
1816 update_jump_functions_after_inlining (cs, e);
1821 /* Update jump functions and call note functions on inlining the call site CS.
1822 CS is expected to lead to a node already cloned by
1823 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
1824 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
1828 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
1829 VEC (cgraph_edge_p, heap) **new_edges)
1831 /* Do nothing if the preparation phase has not been carried out yet
1832 (i.e. during early inlining). */
1833 if (!ipa_node_params_vector)
1835 gcc_assert (ipa_edge_args_vector);
1837 return propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
1840 /* Frees all dynamically allocated structures that the argument info points
1844 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
1846 if (args->jump_functions)
1847 ggc_free (args->jump_functions);
1849 memset (args, 0, sizeof (*args));
1852 /* Free all ipa_edge structures. */
1855 ipa_free_all_edge_args (void)
1858 struct ipa_edge_args *args;
1860 FOR_EACH_VEC_ELT (ipa_edge_args_t, ipa_edge_args_vector, i, args)
1861 ipa_free_edge_args_substructures (args);
1863 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
1864 ipa_edge_args_vector = NULL;
1867 /* Frees all dynamically allocated structures that the param info points
1871 ipa_free_node_params_substructures (struct ipa_node_params *info)
1873 VEC_free (ipa_param_descriptor_t, heap, info->descriptors);
1874 free (info->lattices);
1875 /* Lattice values and their sources are deallocated with their alocation
1877 VEC_free (tree, heap, info->known_vals);
1878 memset (info, 0, sizeof (*info));
1881 /* Free all ipa_node_params structures. */
1884 ipa_free_all_node_params (void)
1887 struct ipa_node_params *info;
1889 FOR_EACH_VEC_ELT (ipa_node_params_t, ipa_node_params_vector, i, info)
1890 ipa_free_node_params_substructures (info);
1892 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
1893 ipa_node_params_vector = NULL;
1896 /* Hook that is called by cgraph.c when an edge is removed. */
1899 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
1901 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1902 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
1903 <= (unsigned)cs->uid)
1905 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
1908 /* Hook that is called by cgraph.c when a node is removed. */
1911 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1913 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1914 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
1915 <= (unsigned)node->uid)
1917 ipa_free_node_params_substructures (IPA_NODE_REF (node));
1920 static struct ipa_jump_func *
1921 duplicate_ipa_jump_func_array (const struct ipa_jump_func * src, size_t n)
1923 struct ipa_jump_func *p;
1928 p = ggc_alloc_vec_ipa_jump_func (n);
1929 memcpy (p, src, n * sizeof (struct ipa_jump_func));
1933 /* Hook that is called by cgraph.c when a node is duplicated. */
1936 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
1937 __attribute__((unused)) void *data)
1939 struct ipa_edge_args *old_args, *new_args;
1942 ipa_check_create_edge_args ();
1944 old_args = IPA_EDGE_REF (src);
1945 new_args = IPA_EDGE_REF (dst);
1947 arg_count = ipa_get_cs_argument_count (old_args);
1948 ipa_set_cs_argument_count (new_args, arg_count);
1949 new_args->jump_functions =
1950 duplicate_ipa_jump_func_array (old_args->jump_functions, arg_count);
1952 if (iinlining_processed_edges
1953 && bitmap_bit_p (iinlining_processed_edges, src->uid))
1954 bitmap_set_bit (iinlining_processed_edges, dst->uid);
1957 /* Hook that is called by cgraph.c when a node is duplicated. */
1960 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
1961 ATTRIBUTE_UNUSED void *data)
1963 struct ipa_node_params *old_info, *new_info;
1965 ipa_check_create_node_params ();
1966 old_info = IPA_NODE_REF (src);
1967 new_info = IPA_NODE_REF (dst);
1969 new_info->descriptors = VEC_copy (ipa_param_descriptor_t, heap,
1970 old_info->descriptors);
1971 new_info->lattices = NULL;
1972 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
1974 new_info->called_with_var_arguments = old_info->called_with_var_arguments;
1975 new_info->uses_analysis_done = old_info->uses_analysis_done;
1976 new_info->node_enqueued = old_info->node_enqueued;
1980 /* Analyze newly added function into callgraph. */
1983 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1985 ipa_analyze_node (node);
1988 /* Register our cgraph hooks if they are not already there. */
1991 ipa_register_cgraph_hooks (void)
1993 if (!edge_removal_hook_holder)
1994 edge_removal_hook_holder =
1995 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
1996 if (!node_removal_hook_holder)
1997 node_removal_hook_holder =
1998 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
1999 if (!edge_duplication_hook_holder)
2000 edge_duplication_hook_holder =
2001 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2002 if (!node_duplication_hook_holder)
2003 node_duplication_hook_holder =
2004 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2005 function_insertion_hook_holder =
2006 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
2009 /* Unregister our cgraph hooks if they are not already there. */
2012 ipa_unregister_cgraph_hooks (void)
2014 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2015 edge_removal_hook_holder = NULL;
2016 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2017 node_removal_hook_holder = NULL;
2018 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2019 edge_duplication_hook_holder = NULL;
2020 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2021 node_duplication_hook_holder = NULL;
2022 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
2023 function_insertion_hook_holder = NULL;
2026 /* Allocate all necessary data structures necessary for indirect inlining. */
2029 ipa_create_all_structures_for_iinln (void)
2031 iinlining_processed_edges = BITMAP_ALLOC (NULL);
2034 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2035 longer needed after ipa-cp. */
2038 ipa_free_all_structures_after_ipa_cp (void)
2040 if (!flag_indirect_inlining)
2042 ipa_free_all_edge_args ();
2043 ipa_free_all_node_params ();
2044 free_alloc_pool (ipcp_sources_pool);
2045 free_alloc_pool (ipcp_values_pool);
2046 ipa_unregister_cgraph_hooks ();
2050 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2051 longer needed after indirect inlining. */
2054 ipa_free_all_structures_after_iinln (void)
2056 BITMAP_FREE (iinlining_processed_edges);
2058 ipa_free_all_edge_args ();
2059 ipa_free_all_node_params ();
2060 ipa_unregister_cgraph_hooks ();
2061 if (ipcp_sources_pool)
2062 free_alloc_pool (ipcp_sources_pool);
2063 if (ipcp_values_pool)
2064 free_alloc_pool (ipcp_values_pool);
2067 /* Print ipa_tree_map data structures of all functions in the
2071 ipa_print_node_params (FILE * f, struct cgraph_node *node)
2075 struct ipa_node_params *info;
2077 if (!node->analyzed)
2079 info = IPA_NODE_REF (node);
2080 fprintf (f, " function %s parameter descriptors:\n",
2081 cgraph_node_name (node));
2082 count = ipa_get_param_count (info);
2083 for (i = 0; i < count; i++)
2085 temp = ipa_get_param (info, i);
2086 if (TREE_CODE (temp) == PARM_DECL)
2087 fprintf (f, " param %d : %s", i,
2089 ? (*lang_hooks.decl_printable_name) (temp, 2)
2091 if (ipa_is_param_used (info, i))
2092 fprintf (f, " used");
2097 /* Print ipa_tree_map data structures of all functions in the
2101 ipa_print_all_params (FILE * f)
2103 struct cgraph_node *node;
2105 fprintf (f, "\nFunction parameters:\n");
2106 for (node = cgraph_nodes; node; node = node->next)
2107 ipa_print_node_params (f, node);
2110 /* Return a heap allocated vector containing formal parameters of FNDECL. */
2113 ipa_get_vector_of_formal_parms (tree fndecl)
2115 VEC(tree, heap) *args;
2119 count = count_formal_params (fndecl);
2120 args = VEC_alloc (tree, heap, count);
2121 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2122 VEC_quick_push (tree, args, parm);
2127 /* Return a heap allocated vector containing types of formal parameters of
2128 function type FNTYPE. */
2130 static inline VEC(tree, heap) *
2131 get_vector_of_formal_parm_types (tree fntype)
2133 VEC(tree, heap) *types;
2137 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2140 types = VEC_alloc (tree, heap, count);
2141 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2142 VEC_quick_push (tree, types, TREE_VALUE (t));
2147 /* Modify the function declaration FNDECL and its type according to the plan in
2148 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2149 to reflect the actual parameters being modified which are determined by the
2150 base_index field. */
2153 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
2154 const char *synth_parm_prefix)
2156 VEC(tree, heap) *oparms, *otypes;
2157 tree orig_type, new_type = NULL;
2158 tree old_arg_types, t, new_arg_types = NULL;
2159 tree parm, *link = &DECL_ARGUMENTS (fndecl);
2160 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2161 tree new_reversed = NULL;
2162 bool care_for_types, last_parm_void;
2164 if (!synth_parm_prefix)
2165 synth_parm_prefix = "SYNTH";
2167 oparms = ipa_get_vector_of_formal_parms (fndecl);
2168 orig_type = TREE_TYPE (fndecl);
2169 old_arg_types = TYPE_ARG_TYPES (orig_type);
2171 /* The following test is an ugly hack, some functions simply don't have any
2172 arguments in their type. This is probably a bug but well... */
2173 care_for_types = (old_arg_types != NULL_TREE);
2176 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2178 otypes = get_vector_of_formal_parm_types (orig_type);
2180 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
2182 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
2186 last_parm_void = false;
2190 for (i = 0; i < len; i++)
2192 struct ipa_parm_adjustment *adj;
2195 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2196 parm = VEC_index (tree, oparms, adj->base_index);
2199 if (adj->copy_param)
2202 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
2206 link = &DECL_CHAIN (parm);
2208 else if (!adj->remove_param)
2214 ptype = build_pointer_type (adj->type);
2219 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2221 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2223 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2225 DECL_ARTIFICIAL (new_parm) = 1;
2226 DECL_ARG_TYPE (new_parm) = ptype;
2227 DECL_CONTEXT (new_parm) = fndecl;
2228 TREE_USED (new_parm) = 1;
2229 DECL_IGNORED_P (new_parm) = 1;
2230 layout_decl (new_parm, 0);
2232 add_referenced_var (new_parm);
2233 mark_sym_for_renaming (new_parm);
2235 adj->reduction = new_parm;
2239 link = &DECL_CHAIN (new_parm);
2247 new_reversed = nreverse (new_arg_types);
2251 TREE_CHAIN (new_arg_types) = void_list_node;
2253 new_reversed = void_list_node;
2257 /* Use copy_node to preserve as much as possible from original type
2258 (debug info, attribute lists etc.)
2259 Exception is METHOD_TYPEs must have THIS argument.
2260 When we are asked to remove it, we need to build new FUNCTION_TYPE
2262 if (TREE_CODE (orig_type) != METHOD_TYPE
2263 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0)->copy_param
2264 && VEC_index (ipa_parm_adjustment_t, adjustments, 0)->base_index == 0))
2266 new_type = build_distinct_type_copy (orig_type);
2267 TYPE_ARG_TYPES (new_type) = new_reversed;
2272 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2274 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2275 DECL_VINDEX (fndecl) = NULL_TREE;
2278 /* When signature changes, we need to clear builtin info. */
2279 if (DECL_BUILT_IN (fndecl))
2281 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2282 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2285 /* This is a new type, not a copy of an old type. Need to reassociate
2286 variants. We can handle everything except the main variant lazily. */
2287 t = TYPE_MAIN_VARIANT (orig_type);
2290 TYPE_MAIN_VARIANT (new_type) = t;
2291 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2292 TYPE_NEXT_VARIANT (t) = new_type;
2296 TYPE_MAIN_VARIANT (new_type) = new_type;
2297 TYPE_NEXT_VARIANT (new_type) = NULL;
2300 TREE_TYPE (fndecl) = new_type;
2301 DECL_VIRTUAL_P (fndecl) = 0;
2303 VEC_free (tree, heap, otypes);
2304 VEC_free (tree, heap, oparms);
2307 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2308 If this is a directly recursive call, CS must be NULL. Otherwise it must
2309 contain the corresponding call graph edge. */
2312 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2313 ipa_parm_adjustment_vec adjustments)
2315 VEC(tree, heap) *vargs;
2316 VEC(tree, gc) **debug_args = NULL;
2318 gimple_stmt_iterator gsi;
2322 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2323 vargs = VEC_alloc (tree, heap, len);
2324 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
2326 gsi = gsi_for_stmt (stmt);
2327 for (i = 0; i < len; i++)
2329 struct ipa_parm_adjustment *adj;
2331 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2333 if (adj->copy_param)
2335 tree arg = gimple_call_arg (stmt, adj->base_index);
2337 VEC_quick_push (tree, vargs, arg);
2339 else if (!adj->remove_param)
2341 tree expr, base, off;
2344 /* We create a new parameter out of the value of the old one, we can
2345 do the following kind of transformations:
2347 - A scalar passed by reference is converted to a scalar passed by
2348 value. (adj->by_ref is false and the type of the original
2349 actual argument is a pointer to a scalar).
2351 - A part of an aggregate is passed instead of the whole aggregate.
2352 The part can be passed either by value or by reference, this is
2353 determined by value of adj->by_ref. Moreover, the code below
2354 handles both situations when the original aggregate is passed by
2355 value (its type is not a pointer) and when it is passed by
2356 reference (it is a pointer to an aggregate).
2358 When the new argument is passed by reference (adj->by_ref is true)
2359 it must be a part of an aggregate and therefore we form it by
2360 simply taking the address of a reference inside the original
2363 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2364 base = gimple_call_arg (stmt, adj->base_index);
2365 loc = EXPR_LOCATION (base);
2367 if (TREE_CODE (base) != ADDR_EXPR
2368 && POINTER_TYPE_P (TREE_TYPE (base)))
2369 off = build_int_cst (adj->alias_ptr_type,
2370 adj->offset / BITS_PER_UNIT);
2373 HOST_WIDE_INT base_offset;
2376 if (TREE_CODE (base) == ADDR_EXPR)
2377 base = TREE_OPERAND (base, 0);
2379 base = get_addr_base_and_unit_offset (base, &base_offset);
2380 /* Aggregate arguments can have non-invariant addresses. */
2383 base = build_fold_addr_expr (prev_base);
2384 off = build_int_cst (adj->alias_ptr_type,
2385 adj->offset / BITS_PER_UNIT);
2387 else if (TREE_CODE (base) == MEM_REF)
2389 off = build_int_cst (adj->alias_ptr_type,
2391 + adj->offset / BITS_PER_UNIT);
2392 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
2394 base = TREE_OPERAND (base, 0);
2398 off = build_int_cst (adj->alias_ptr_type,
2400 + adj->offset / BITS_PER_UNIT);
2401 base = build_fold_addr_expr (base);
2405 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
2407 expr = build_fold_addr_expr (expr);
2409 expr = force_gimple_operand_gsi (&gsi, expr,
2411 || is_gimple_reg_type (adj->type),
2412 NULL, true, GSI_SAME_STMT);
2413 VEC_quick_push (tree, vargs, expr);
2415 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
2418 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
2421 arg = gimple_call_arg (stmt, adj->base_index);
2422 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
2424 if (!fold_convertible_p (TREE_TYPE (origin), arg))
2426 arg = fold_convert_loc (gimple_location (stmt),
2427 TREE_TYPE (origin), arg);
2429 if (debug_args == NULL)
2430 debug_args = decl_debug_args_insert (callee_decl);
2431 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl); ix += 2)
2432 if (ddecl == origin)
2434 ddecl = VEC_index (tree, *debug_args, ix + 1);
2439 ddecl = make_node (DEBUG_EXPR_DECL);
2440 DECL_ARTIFICIAL (ddecl) = 1;
2441 TREE_TYPE (ddecl) = TREE_TYPE (origin);
2442 DECL_MODE (ddecl) = DECL_MODE (origin);
2444 VEC_safe_push (tree, gc, *debug_args, origin);
2445 VEC_safe_push (tree, gc, *debug_args, ddecl);
2447 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg),
2449 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
2453 if (dump_file && (dump_flags & TDF_DETAILS))
2455 fprintf (dump_file, "replacing stmt:");
2456 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2459 new_stmt = gimple_build_call_vec (callee_decl, vargs);
2460 VEC_free (tree, heap, vargs);
2461 if (gimple_call_lhs (stmt))
2462 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2464 gimple_set_block (new_stmt, gimple_block (stmt));
2465 if (gimple_has_location (stmt))
2466 gimple_set_location (new_stmt, gimple_location (stmt));
2467 gimple_call_copy_flags (new_stmt, stmt);
2468 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2470 if (dump_file && (dump_flags & TDF_DETAILS))
2472 fprintf (dump_file, "with stmt:");
2473 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2474 fprintf (dump_file, "\n");
2476 gsi_replace (&gsi, new_stmt, true);
2478 cgraph_set_call_stmt (cs, new_stmt);
2479 update_ssa (TODO_update_ssa);
2480 free_dominance_info (CDI_DOMINATORS);
2483 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
2486 index_in_adjustments_multiple_times_p (int base_index,
2487 ipa_parm_adjustment_vec adjustments)
2489 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2492 for (i = 0; i < len; i++)
2494 struct ipa_parm_adjustment *adj;
2495 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2497 if (adj->base_index == base_index)
2509 /* Return adjustments that should have the same effect on function parameters
2510 and call arguments as if they were first changed according to adjustments in
2511 INNER and then by adjustments in OUTER. */
2513 ipa_parm_adjustment_vec
2514 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
2515 ipa_parm_adjustment_vec outer)
2517 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
2518 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
2520 ipa_parm_adjustment_vec adjustments, tmp;
2522 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
2523 for (i = 0; i < inlen; i++)
2525 struct ipa_parm_adjustment *n;
2526 n = VEC_index (ipa_parm_adjustment_t, inner, i);
2528 if (n->remove_param)
2531 VEC_quick_push (ipa_parm_adjustment_t, tmp, n);
2534 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
2535 for (i = 0; i < outlen; i++)
2537 struct ipa_parm_adjustment *r;
2538 struct ipa_parm_adjustment *out = VEC_index (ipa_parm_adjustment_t,
2540 struct ipa_parm_adjustment *in = VEC_index (ipa_parm_adjustment_t, tmp,
2543 gcc_assert (!in->remove_param);
2544 if (out->remove_param)
2546 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
2548 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2549 memset (r, 0, sizeof (*r));
2550 r->remove_param = true;
2555 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2556 memset (r, 0, sizeof (*r));
2557 r->base_index = in->base_index;
2558 r->type = out->type;
2560 /* FIXME: Create nonlocal value too. */
2562 if (in->copy_param && out->copy_param)
2563 r->copy_param = true;
2564 else if (in->copy_param)
2565 r->offset = out->offset;
2566 else if (out->copy_param)
2567 r->offset = in->offset;
2569 r->offset = in->offset + out->offset;
2572 for (i = 0; i < inlen; i++)
2574 struct ipa_parm_adjustment *n = VEC_index (ipa_parm_adjustment_t,
2577 if (n->remove_param)
2578 VEC_quick_push (ipa_parm_adjustment_t, adjustments, n);
2581 VEC_free (ipa_parm_adjustment_t, heap, tmp);
2585 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
2586 friendly way, assuming they are meant to be applied to FNDECL. */
2589 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
2592 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2594 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
2596 fprintf (file, "IPA param adjustments: ");
2597 for (i = 0; i < len; i++)
2599 struct ipa_parm_adjustment *adj;
2600 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2603 fprintf (file, " ");
2607 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
2608 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
2611 fprintf (file, ", base: ");
2612 print_generic_expr (file, adj->base, 0);
2616 fprintf (file, ", reduction: ");
2617 print_generic_expr (file, adj->reduction, 0);
2619 if (adj->new_ssa_base)
2621 fprintf (file, ", new_ssa_base: ");
2622 print_generic_expr (file, adj->new_ssa_base, 0);
2625 if (adj->copy_param)
2626 fprintf (file, ", copy_param");
2627 else if (adj->remove_param)
2628 fprintf (file, ", remove_param");
2630 fprintf (file, ", offset %li", (long) adj->offset);
2632 fprintf (file, ", by_ref");
2633 print_node_brief (file, ", type: ", adj->type, 0);
2634 fprintf (file, "\n");
2636 VEC_free (tree, heap, parms);
2639 /* Stream out jump function JUMP_FUNC to OB. */
2642 ipa_write_jump_function (struct output_block *ob,
2643 struct ipa_jump_func *jump_func)
2645 lto_output_uleb128_stream (ob->main_stream,
2648 switch (jump_func->type)
2650 case IPA_JF_UNKNOWN:
2652 case IPA_JF_KNOWN_TYPE:
2653 lto_output_tree (ob, jump_func->value.base_binfo, true);
2656 lto_output_tree (ob, jump_func->value.constant, true);
2658 case IPA_JF_PASS_THROUGH:
2659 lto_output_tree (ob, jump_func->value.pass_through.operand, true);
2660 lto_output_uleb128_stream (ob->main_stream,
2661 jump_func->value.pass_through.formal_id);
2662 lto_output_uleb128_stream (ob->main_stream,
2663 jump_func->value.pass_through.operation);
2665 case IPA_JF_ANCESTOR:
2666 lto_output_uleb128_stream (ob->main_stream,
2667 jump_func->value.ancestor.offset);
2668 lto_output_tree (ob, jump_func->value.ancestor.type, true);
2669 lto_output_uleb128_stream (ob->main_stream,
2670 jump_func->value.ancestor.formal_id);
2672 case IPA_JF_CONST_MEMBER_PTR:
2673 lto_output_tree (ob, jump_func->value.member_cst.pfn, true);
2674 lto_output_tree (ob, jump_func->value.member_cst.delta, false);
2679 /* Read in jump function JUMP_FUNC from IB. */
2682 ipa_read_jump_function (struct lto_input_block *ib,
2683 struct ipa_jump_func *jump_func,
2684 struct data_in *data_in)
2686 jump_func->type = (enum jump_func_type) lto_input_uleb128 (ib);
2688 switch (jump_func->type)
2690 case IPA_JF_UNKNOWN:
2692 case IPA_JF_KNOWN_TYPE:
2693 jump_func->value.base_binfo = lto_input_tree (ib, data_in);
2696 jump_func->value.constant = lto_input_tree (ib, data_in);
2698 case IPA_JF_PASS_THROUGH:
2699 jump_func->value.pass_through.operand = lto_input_tree (ib, data_in);
2700 jump_func->value.pass_through.formal_id = lto_input_uleb128 (ib);
2701 jump_func->value.pass_through.operation = (enum tree_code) lto_input_uleb128 (ib);
2703 case IPA_JF_ANCESTOR:
2704 jump_func->value.ancestor.offset = lto_input_uleb128 (ib);
2705 jump_func->value.ancestor.type = lto_input_tree (ib, data_in);
2706 jump_func->value.ancestor.formal_id = lto_input_uleb128 (ib);
2708 case IPA_JF_CONST_MEMBER_PTR:
2709 jump_func->value.member_cst.pfn = lto_input_tree (ib, data_in);
2710 jump_func->value.member_cst.delta = lto_input_tree (ib, data_in);
2715 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
2716 relevant to indirect inlining to OB. */
2719 ipa_write_indirect_edge_info (struct output_block *ob,
2720 struct cgraph_edge *cs)
2722 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2723 struct bitpack_d bp;
2725 lto_output_sleb128_stream (ob->main_stream, ii->param_index);
2726 lto_output_sleb128_stream (ob->main_stream, ii->anc_offset);
2727 bp = bitpack_create (ob->main_stream);
2728 bp_pack_value (&bp, ii->polymorphic, 1);
2729 lto_output_bitpack (&bp);
2731 if (ii->polymorphic)
2733 lto_output_sleb128_stream (ob->main_stream, ii->otr_token);
2734 lto_output_tree (ob, ii->otr_type, true);
2738 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
2739 relevant to indirect inlining from IB. */
2742 ipa_read_indirect_edge_info (struct lto_input_block *ib,
2743 struct data_in *data_in ATTRIBUTE_UNUSED,
2744 struct cgraph_edge *cs)
2746 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2747 struct bitpack_d bp;
2749 ii->param_index = (int) lto_input_sleb128 (ib);
2750 ii->anc_offset = (HOST_WIDE_INT) lto_input_sleb128 (ib);
2751 bp = lto_input_bitpack (ib);
2752 ii->polymorphic = bp_unpack_value (&bp, 1);
2753 if (ii->polymorphic)
2755 ii->otr_token = (HOST_WIDE_INT) lto_input_sleb128 (ib);
2756 ii->otr_type = lto_input_tree (ib, data_in);
2760 /* Stream out NODE info to OB. */
2763 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
2766 lto_cgraph_encoder_t encoder;
2767 struct ipa_node_params *info = IPA_NODE_REF (node);
2769 struct cgraph_edge *e;
2770 struct bitpack_d bp;
2772 encoder = ob->decl_state->cgraph_node_encoder;
2773 node_ref = lto_cgraph_encoder_encode (encoder, node);
2774 lto_output_uleb128_stream (ob->main_stream, node_ref);
2776 bp = bitpack_create (ob->main_stream);
2777 gcc_assert (info->uses_analysis_done
2778 || ipa_get_param_count (info) == 0);
2779 gcc_assert (!info->node_enqueued);
2780 gcc_assert (!info->ipcp_orig_node);
2781 for (j = 0; j < ipa_get_param_count (info); j++)
2782 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
2783 lto_output_bitpack (&bp);
2784 for (e = node->callees; e; e = e->next_callee)
2786 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2788 lto_output_uleb128_stream (ob->main_stream,
2789 ipa_get_cs_argument_count (args));
2790 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
2791 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
2793 for (e = node->indirect_calls; e; e = e->next_callee)
2795 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2797 lto_output_uleb128_stream (ob->main_stream,
2798 ipa_get_cs_argument_count (args));
2799 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
2800 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
2801 ipa_write_indirect_edge_info (ob, e);
2805 /* Stream in NODE info from IB. */
2808 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
2809 struct data_in *data_in)
2811 struct ipa_node_params *info = IPA_NODE_REF (node);
2813 struct cgraph_edge *e;
2814 struct bitpack_d bp;
2816 ipa_initialize_node_params (node);
2818 bp = lto_input_bitpack (ib);
2819 if (ipa_get_param_count (info) != 0)
2820 info->uses_analysis_done = true;
2821 info->node_enqueued = false;
2822 for (k = 0; k < ipa_get_param_count (info); k++)
2823 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
2824 for (e = node->callees; e; e = e->next_callee)
2826 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2827 int count = lto_input_uleb128 (ib);
2829 ipa_set_cs_argument_count (args, count);
2833 args->jump_functions = ggc_alloc_cleared_vec_ipa_jump_func
2834 (ipa_get_cs_argument_count (args));
2835 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
2836 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
2838 for (e = node->indirect_calls; e; e = e->next_callee)
2840 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2841 int count = lto_input_uleb128 (ib);
2843 ipa_set_cs_argument_count (args, count);
2846 args->jump_functions = ggc_alloc_cleared_vec_ipa_jump_func
2847 (ipa_get_cs_argument_count (args));
2848 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
2849 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
2851 ipa_read_indirect_edge_info (ib, data_in, e);
2855 /* Write jump functions for nodes in SET. */
2858 ipa_prop_write_jump_functions (cgraph_node_set set)
2860 struct cgraph_node *node;
2861 struct output_block *ob;
2862 unsigned int count = 0;
2863 cgraph_node_set_iterator csi;
2865 if (!ipa_node_params_vector)
2868 ob = create_output_block (LTO_section_jump_functions);
2869 ob->cgraph_node = NULL;
2870 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2872 node = csi_node (csi);
2873 if (cgraph_function_with_gimple_body_p (node)
2874 && IPA_NODE_REF (node) != NULL)
2878 lto_output_uleb128_stream (ob->main_stream, count);
2880 /* Process all of the functions. */
2881 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2883 node = csi_node (csi);
2884 if (cgraph_function_with_gimple_body_p (node)
2885 && IPA_NODE_REF (node) != NULL)
2886 ipa_write_node_info (ob, node);
2888 lto_output_1_stream (ob->main_stream, 0);
2889 produce_asm (ob, NULL);
2890 destroy_output_block (ob);
2893 /* Read section in file FILE_DATA of length LEN with data DATA. */
2896 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
2899 const struct lto_function_header *header =
2900 (const struct lto_function_header *) data;
2901 const int32_t cfg_offset = sizeof (struct lto_function_header);
2902 const int32_t main_offset = cfg_offset + header->cfg_size;
2903 const int32_t string_offset = main_offset + header->main_size;
2904 struct data_in *data_in;
2905 struct lto_input_block ib_main;
2909 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
2913 lto_data_in_create (file_data, (const char *) data + string_offset,
2914 header->string_size, NULL);
2915 count = lto_input_uleb128 (&ib_main);
2917 for (i = 0; i < count; i++)
2920 struct cgraph_node *node;
2921 lto_cgraph_encoder_t encoder;
2923 index = lto_input_uleb128 (&ib_main);
2924 encoder = file_data->cgraph_node_encoder;
2925 node = lto_cgraph_encoder_deref (encoder, index);
2926 gcc_assert (node->analyzed);
2927 ipa_read_node_info (&ib_main, node, data_in);
2929 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
2931 lto_data_in_delete (data_in);
2934 /* Read ipcp jump functions. */
2937 ipa_prop_read_jump_functions (void)
2939 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2940 struct lto_file_decl_data *file_data;
2943 ipa_check_create_node_params ();
2944 ipa_check_create_edge_args ();
2945 ipa_register_cgraph_hooks ();
2947 while ((file_data = file_data_vec[j++]))
2950 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
2953 ipa_prop_read_section (file_data, data, len);
2957 /* After merging units, we can get mismatch in argument counts.
2958 Also decl merging might've rendered parameter lists obsolete.
2959 Also compute called_with_variable_arg info. */
2962 ipa_update_after_lto_read (void)
2964 struct cgraph_node *node;
2965 struct cgraph_edge *cs;
2967 ipa_check_create_node_params ();
2968 ipa_check_create_edge_args ();
2970 for (node = cgraph_nodes; node; node = node->next)
2972 ipa_initialize_node_params (node);
2974 for (node = cgraph_nodes; node; node = node->next)
2976 for (cs = node->callees; cs; cs = cs->next_callee)
2978 struct cgraph_node *callee;
2980 callee = cgraph_function_or_thunk_node (cs->callee, NULL);
2981 if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
2982 != ipa_get_param_count (IPA_NODE_REF (callee)))
2983 ipa_set_called_with_variable_arg (IPA_NODE_REF (callee));