1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
25 #include "langhooks.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
37 #include "diagnostic.h"
38 #include "tree-pretty-print.h"
39 #include "gimple-pretty-print.h"
40 #include "lto-streamer.h"
43 /* Intermediate information about a parameter that is only useful during the
44 run of ipa_analyze_node and is not kept afterwards. */
46 struct param_analysis_info
49 bitmap visited_statements;
52 /* Vector where the parameter infos are actually stored. */
53 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
54 /* Vector where the parameter infos are actually stored. */
55 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
57 /* Bitmap with all UIDs of call graph edges that have been already processed
58 by indirect inlining. */
59 static bitmap iinlining_processed_edges;
61 /* Holders of ipa cgraph hooks: */
62 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
63 static struct cgraph_node_hook_list *node_removal_hook_holder;
64 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
65 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
67 /* Add cgraph NODE described by INFO to the worklist WL regardless of whether
68 it is in one or not. It should almost never be used directly, as opposed to
69 ipa_push_func_to_list. */
72 ipa_push_func_to_list_1 (struct ipa_func_list **wl,
73 struct cgraph_node *node,
74 struct ipa_node_params *info)
76 struct ipa_func_list *temp;
78 info->node_enqueued = 1;
79 temp = XCNEW (struct ipa_func_list);
85 /* Initialize worklist to contain all functions. */
87 struct ipa_func_list *
88 ipa_init_func_list (void)
90 struct cgraph_node *node;
91 struct ipa_func_list * wl;
94 for (node = cgraph_nodes; node; node = node->next)
97 struct ipa_node_params *info = IPA_NODE_REF (node);
98 /* Unreachable nodes should have been eliminated before ipcp and
100 gcc_assert (node->needed || node->reachable);
101 ipa_push_func_to_list_1 (&wl, node, info);
107 /* Remove a function from the worklist WL and return it. */
110 ipa_pop_func_from_list (struct ipa_func_list **wl)
112 struct ipa_node_params *info;
113 struct ipa_func_list *first;
114 struct cgraph_node *node;
121 info = IPA_NODE_REF (node);
122 info->node_enqueued = 0;
126 /* Return index of the formal whose tree is PTREE in function which corresponds
130 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
134 count = ipa_get_param_count (info);
135 for (i = 0; i < count; i++)
136 if (ipa_get_param(info, i) == ptree)
142 /* Populate the param_decl field in parameter descriptors of INFO that
143 corresponds to NODE. */
146 ipa_populate_param_decls (struct cgraph_node *node,
147 struct ipa_node_params *info)
155 fnargs = DECL_ARGUMENTS (fndecl);
157 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
159 info->params[param_num].decl = parm;
164 /* Return how many formal parameters FNDECL has. */
167 count_formal_params_1 (tree fndecl)
172 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
178 /* Count number of formal parameters in NOTE. Store the result to the
179 appropriate field of INFO. */
182 ipa_count_formal_params (struct cgraph_node *node,
183 struct ipa_node_params *info)
187 param_num = count_formal_params_1 (node->decl);
188 ipa_set_param_count (info, param_num);
191 /* Initialize the ipa_node_params structure associated with NODE by counting
192 the function parameters, creating the descriptors and populating their
196 ipa_initialize_node_params (struct cgraph_node *node)
198 struct ipa_node_params *info = IPA_NODE_REF (node);
202 ipa_count_formal_params (node, info);
203 info->params = XCNEWVEC (struct ipa_param_descriptor,
204 ipa_get_param_count (info));
205 ipa_populate_param_decls (node, info);
209 /* Count number of arguments callsite CS has and store it in
210 ipa_edge_args structure corresponding to this callsite. */
213 ipa_count_arguments (struct cgraph_edge *cs)
218 stmt = cs->call_stmt;
219 gcc_assert (is_gimple_call (stmt));
220 arg_num = gimple_call_num_args (stmt);
221 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
222 <= (unsigned) cgraph_edge_max_uid)
223 VEC_safe_grow_cleared (ipa_edge_args_t, gc,
224 ipa_edge_args_vector, cgraph_edge_max_uid + 1);
225 ipa_set_cs_argument_count (IPA_EDGE_REF (cs), arg_num);
228 /* Print the jump functions associated with call graph edge CS to file F. */
231 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
235 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
236 for (i = 0; i < count; i++)
238 struct ipa_jump_func *jump_func;
239 enum jump_func_type type;
241 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
242 type = jump_func->type;
244 fprintf (f, " param %d: ", i);
245 if (type == IPA_JF_UNKNOWN)
246 fprintf (f, "UNKNOWN\n");
247 else if (type == IPA_JF_KNOWN_TYPE)
249 tree binfo_type = TREE_TYPE (jump_func->value.base_binfo);
250 fprintf (f, "KNOWN TYPE, type in binfo is: ");
251 print_generic_expr (f, binfo_type, 0);
252 fprintf (f, " (%u)\n", TYPE_UID (binfo_type));
254 else if (type == IPA_JF_CONST)
256 tree val = jump_func->value.constant;
257 fprintf (f, "CONST: ");
258 print_generic_expr (f, val, 0);
259 if (TREE_CODE (val) == ADDR_EXPR
260 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
263 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
268 else if (type == IPA_JF_CONST_MEMBER_PTR)
270 fprintf (f, "CONST MEMBER PTR: ");
271 print_generic_expr (f, jump_func->value.member_cst.pfn, 0);
273 print_generic_expr (f, jump_func->value.member_cst.delta, 0);
276 else if (type == IPA_JF_PASS_THROUGH)
278 fprintf (f, "PASS THROUGH: ");
279 fprintf (f, "%d, op %s ",
280 jump_func->value.pass_through.formal_id,
282 jump_func->value.pass_through.operation]);
283 if (jump_func->value.pass_through.operation != NOP_EXPR)
284 print_generic_expr (dump_file,
285 jump_func->value.pass_through.operand, 0);
286 fprintf (dump_file, "\n");
288 else if (type == IPA_JF_ANCESTOR)
290 fprintf (f, "ANCESTOR: ");
291 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
292 jump_func->value.ancestor.formal_id,
293 jump_func->value.ancestor.offset);
294 print_generic_expr (f, jump_func->value.ancestor.type, 0);
295 fprintf (dump_file, "\n");
301 /* Print the jump functions of all arguments on all call graph edges going from
305 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
307 struct cgraph_edge *cs;
310 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
311 for (cs = node->callees; cs; cs = cs->next_callee)
313 if (!ipa_edge_args_info_available_for_edge_p (cs))
316 fprintf (f, " callsite %s/%i -> %s/%i : \n",
317 cgraph_node_name (node), node->uid,
318 cgraph_node_name (cs->callee), cs->callee->uid);
319 ipa_print_node_jump_functions_for_edge (f, cs);
322 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
324 if (!ipa_edge_args_info_available_for_edge_p (cs))
329 fprintf (f, " indirect callsite %d for stmt ", i);
330 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
333 fprintf (f, " indirect callsite %d :\n", i);
334 ipa_print_node_jump_functions_for_edge (f, cs);
339 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
342 ipa_print_all_jump_functions (FILE *f)
344 struct cgraph_node *node;
346 fprintf (f, "\nJump functions:\n");
347 for (node = cgraph_nodes; node; node = node->next)
349 ipa_print_node_jump_functions (f, node);
353 /* Structure to be passed in between detect_type_change and
354 check_stmt_for_type_change. */
356 struct type_change_info
358 /* Set to true if dynamic type change has been detected. */
359 bool type_maybe_changed;
362 /* Return true if STMT can modify a virtual method table pointer.
364 This function makes special assumptions about both constructors and
365 destructors which are all the functions that are allowed to alter the VMT
366 pointers. It assumes that destructors begin with assignment into all VMT
367 pointers and that constructors essentially look in the following way:
369 1) The very first thing they do is that they call constructors of ancestor
370 sub-objects that have them.
372 2) Then VMT pointers of this and all its ancestors is set to new values
373 corresponding to the type corresponding to the constructor.
375 3) Only afterwards, other stuff such as constructor of member sub-objects
376 and the code written by the user is run. Only this may include calling
377 virtual functions, directly or indirectly.
379 There is no way to call a constructor of an ancestor sub-object in any
382 This means that we do not have to care whether constructors get the correct
383 type information because they will always change it (in fact, if we define
384 the type to be given by the VMT pointer, it is undefined).
386 The most important fact to derive from the above is that if, for some
387 statement in the section 3, we try to detect whether the dynamic type has
388 changed, we can safely ignore all calls as we examine the function body
389 backwards until we reach statements in section 2 because these calls cannot
390 be ancestor constructors or destructors (if the input is not bogus) and so
391 do not change the dynamic type (this holds true only for automatically
392 allocated objects but at the moment we devirtualize only these). We then
393 must detect that statements in section 2 change the dynamic type and can try
394 to derive the new type. That is enough and we can stop, we will never see
395 the calls into constructors of sub-objects in this code. Therefore we can
396 safely ignore all call statements that we traverse.
400 stmt_may_be_vtbl_ptr_store (gimple stmt)
402 if (is_gimple_call (stmt))
404 else if (is_gimple_assign (stmt))
406 tree lhs = gimple_assign_lhs (stmt);
408 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
410 if (flag_strict_aliasing
411 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
414 if (TREE_CODE (lhs) == COMPONENT_REF
415 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
417 /* In the future we might want to use get_base_ref_and_offset to find
418 if there is a field corresponding to the offset and if so, proceed
419 almost like if it was a component ref. */
425 /* Callback of walk_aliased_vdefs and a helper function for
426 detect_type_change to check whether a particular statement may modify
427 the virtual table pointer, and if possible also determine the new type of
428 the (sub-)object. It stores its result into DATA, which points to a
429 type_change_info structure. */
432 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
434 gimple stmt = SSA_NAME_DEF_STMT (vdef);
435 struct type_change_info *tci = (struct type_change_info *) data;
437 if (stmt_may_be_vtbl_ptr_store (stmt))
439 tci->type_maybe_changed = true;
446 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
447 looking for assignments to its virtual table pointer. If it is, return true
448 and fill in the jump function JFUNC with relevant type information or set it
449 to unknown. ARG is the object itself (not a pointer to it, unless
450 dereferenced). BASE is the base of the memory access as returned by
451 get_ref_base_and_extent, as is the offset. */
454 detect_type_change (tree arg, tree base, gimple call,
455 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
457 struct type_change_info tci;
460 gcc_checking_assert (DECL_P (arg)
461 || TREE_CODE (arg) == MEM_REF
462 || handled_component_p (arg));
463 /* Const calls cannot call virtual methods through VMT and so type changes do
465 if (!flag_devirtualize || !gimple_vuse (call))
468 tci.type_maybe_changed = false;
473 ao.size = POINTER_SIZE;
474 ao.max_size = ao.size;
475 ao.ref_alias_set = -1;
476 ao.base_alias_set = -1;
478 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
480 if (!tci.type_maybe_changed)
483 jfunc->type = IPA_JF_UNKNOWN;
487 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
488 SSA name (its dereference will become the base and the offset is assumed to
492 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
494 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
495 if (!flag_devirtualize
496 || !POINTER_TYPE_P (TREE_TYPE (arg))
497 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
500 arg = build2 (MEM_REF, ptr_type_node, arg,
501 build_int_cst (ptr_type_node, 0));
503 return detect_type_change (arg, arg, call, jfunc, 0);
507 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
508 of an assignment statement STMT, try to find out whether NAME can be
509 described by a (possibly polynomial) pass-through jump-function or an
510 ancestor jump function and if so, write the appropriate function into
514 compute_complex_assign_jump_func (struct ipa_node_params *info,
515 struct ipa_jump_func *jfunc,
516 gimple call, gimple stmt, tree name)
518 HOST_WIDE_INT offset, size, max_size;
519 tree op1, op2, base, ssa;
522 op1 = gimple_assign_rhs1 (stmt);
523 op2 = gimple_assign_rhs2 (stmt);
525 if (TREE_CODE (op1) == SSA_NAME
526 && SSA_NAME_IS_DEFAULT_DEF (op1))
528 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
534 if (!is_gimple_ip_invariant (op2)
535 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
536 && !useless_type_conversion_p (TREE_TYPE (name),
540 jfunc->type = IPA_JF_PASS_THROUGH;
541 jfunc->value.pass_through.formal_id = index;
542 jfunc->value.pass_through.operation = gimple_assign_rhs_code (stmt);
543 jfunc->value.pass_through.operand = op2;
545 else if (gimple_assign_unary_nop_p (stmt)
546 && !detect_type_change_ssa (op1, call, jfunc))
548 jfunc->type = IPA_JF_PASS_THROUGH;
549 jfunc->value.pass_through.formal_id = index;
550 jfunc->value.pass_through.operation = NOP_EXPR;
555 if (TREE_CODE (op1) != ADDR_EXPR)
557 op1 = TREE_OPERAND (op1, 0);
558 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
560 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
561 if (TREE_CODE (base) != MEM_REF
562 /* If this is a varying address, punt. */
566 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
567 ssa = TREE_OPERAND (base, 0);
568 if (TREE_CODE (ssa) != SSA_NAME
569 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
573 /* Dynamic types are changed only in constructors and destructors and */
574 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
576 && !detect_type_change (op1, base, call, jfunc, offset))
578 jfunc->type = IPA_JF_ANCESTOR;
579 jfunc->value.ancestor.formal_id = index;
580 jfunc->value.ancestor.offset = offset;
581 jfunc->value.ancestor.type = TREE_TYPE (op1);
585 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
588 iftmp.1_3 = &obj_2(D)->D.1762;
590 The base of the MEM_REF must be a default definition SSA NAME of a
591 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
592 whole MEM_REF expression is returned and the offset calculated from any
593 handled components and the MEM_REF itself is stored into *OFFSET. The whole
594 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
597 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
599 HOST_WIDE_INT size, max_size;
600 tree expr, parm, obj;
602 if (!gimple_assign_single_p (assign))
604 expr = gimple_assign_rhs1 (assign);
606 if (TREE_CODE (expr) != ADDR_EXPR)
608 expr = TREE_OPERAND (expr, 0);
610 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
612 if (TREE_CODE (expr) != MEM_REF
613 /* If this is a varying address, punt. */
618 parm = TREE_OPERAND (expr, 0);
619 if (TREE_CODE (parm) != SSA_NAME
620 || !SSA_NAME_IS_DEFAULT_DEF (parm)
621 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
624 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
630 /* Given that an actual argument is an SSA_NAME that is a result of a phi
631 statement PHI, try to find out whether NAME is in fact a
632 multiple-inheritance typecast from a descendant into an ancestor of a formal
633 parameter and thus can be described by an ancestor jump function and if so,
634 write the appropriate function into JFUNC.
636 Essentially we want to match the following pattern:
644 iftmp.1_3 = &obj_2(D)->D.1762;
647 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
648 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
652 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
653 struct ipa_jump_func *jfunc,
654 gimple call, gimple phi)
656 HOST_WIDE_INT offset;
658 basic_block phi_bb, assign_bb, cond_bb;
659 tree tmp, parm, expr, obj;
662 if (gimple_phi_num_args (phi) != 2)
665 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
666 tmp = PHI_ARG_DEF (phi, 0);
667 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
668 tmp = PHI_ARG_DEF (phi, 1);
671 if (TREE_CODE (tmp) != SSA_NAME
672 || SSA_NAME_IS_DEFAULT_DEF (tmp)
673 || !POINTER_TYPE_P (TREE_TYPE (tmp))
674 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
677 assign = SSA_NAME_DEF_STMT (tmp);
678 assign_bb = gimple_bb (assign);
679 if (!single_pred_p (assign_bb))
681 expr = get_ancestor_addr_info (assign, &obj, &offset);
684 parm = TREE_OPERAND (expr, 0);
685 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
686 gcc_assert (index >= 0);
688 cond_bb = single_pred (assign_bb);
689 cond = last_stmt (cond_bb);
691 || gimple_code (cond) != GIMPLE_COND
692 || gimple_cond_code (cond) != NE_EXPR
693 || gimple_cond_lhs (cond) != parm
694 || !integer_zerop (gimple_cond_rhs (cond)))
697 phi_bb = gimple_bb (phi);
698 for (i = 0; i < 2; i++)
700 basic_block pred = EDGE_PRED (phi_bb, i)->src;
701 if (pred != assign_bb && pred != cond_bb)
705 if (!detect_type_change (obj, expr, call, jfunc, offset))
707 jfunc->type = IPA_JF_ANCESTOR;
708 jfunc->value.ancestor.formal_id = index;
709 jfunc->value.ancestor.offset = offset;
710 jfunc->value.ancestor.type = TREE_TYPE (obj);
714 /* Given OP which is passed as an actual argument to a called function,
715 determine if it is possible to construct a KNOWN_TYPE jump function for it
716 and if so, create one and store it to JFUNC. */
719 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
722 HOST_WIDE_INT offset, size, max_size;
725 if (!flag_devirtualize
726 || TREE_CODE (op) != ADDR_EXPR
727 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
730 op = TREE_OPERAND (op, 0);
731 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
735 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
736 || is_global_var (base))
739 if (detect_type_change (op, base, call, jfunc, offset))
742 binfo = TYPE_BINFO (TREE_TYPE (base));
745 binfo = get_binfo_at_offset (binfo, offset, TREE_TYPE (op));
748 jfunc->type = IPA_JF_KNOWN_TYPE;
749 jfunc->value.base_binfo = binfo;
754 /* Determine the jump functions of scalar arguments. Scalar means SSA names
755 and constants of a number of selected types. INFO is the ipa_node_params
756 structure associated with the caller, FUNCTIONS is a pointer to an array of
757 jump function structures associated with CALL which is the call statement
761 compute_scalar_jump_functions (struct ipa_node_params *info,
762 struct ipa_jump_func *functions,
768 for (num = 0; num < gimple_call_num_args (call); num++)
770 arg = gimple_call_arg (call, num);
772 if (is_gimple_ip_invariant (arg))
774 functions[num].type = IPA_JF_CONST;
775 functions[num].value.constant = arg;
777 else if (TREE_CODE (arg) == SSA_NAME)
779 if (SSA_NAME_IS_DEFAULT_DEF (arg))
781 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
784 && !detect_type_change_ssa (arg, call, &functions[num]))
786 functions[num].type = IPA_JF_PASS_THROUGH;
787 functions[num].value.pass_through.formal_id = index;
788 functions[num].value.pass_through.operation = NOP_EXPR;
793 gimple stmt = SSA_NAME_DEF_STMT (arg);
794 if (is_gimple_assign (stmt))
795 compute_complex_assign_jump_func (info, &functions[num],
797 else if (gimple_code (stmt) == GIMPLE_PHI)
798 compute_complex_ancestor_jump_func (info, &functions[num],
803 compute_known_type_jump_func (arg, &functions[num], call);
807 /* Inspect the given TYPE and return true iff it has the same structure (the
808 same number of fields of the same types) as a C++ member pointer. If
809 METHOD_PTR and DELTA are non-NULL, store the trees representing the
810 corresponding fields there. */
813 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
817 if (TREE_CODE (type) != RECORD_TYPE)
820 fld = TYPE_FIELDS (type);
821 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
822 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE)
828 fld = DECL_CHAIN (fld);
829 if (!fld || INTEGRAL_TYPE_P (fld))
834 if (DECL_CHAIN (fld))
840 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
841 boolean variable pointed to by DATA. */
844 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
847 bool *b = (bool *) data;
852 /* Return true if the formal parameter PARM might have been modified in this
853 function before reaching the statement CALL. PARM_INFO is a pointer to a
854 structure containing intermediate information about PARM. */
857 is_parm_modified_before_call (struct param_analysis_info *parm_info,
858 gimple call, tree parm)
860 bool modified = false;
863 if (parm_info->modified)
866 ao_ref_init (&refd, parm);
867 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
868 &modified, &parm_info->visited_statements);
871 parm_info->modified = true;
877 /* Go through arguments of the CALL and for every one that looks like a member
878 pointer, check whether it can be safely declared pass-through and if so,
879 mark that to the corresponding item of jump FUNCTIONS. Return true iff
880 there are non-pass-through member pointers within the arguments. INFO
881 describes formal parameters of the caller. PARMS_INFO is a pointer to a
882 vector containing intermediate information about each formal parameter. */
885 compute_pass_through_member_ptrs (struct ipa_node_params *info,
886 struct param_analysis_info *parms_info,
887 struct ipa_jump_func *functions,
890 bool undecided_members = false;
894 for (num = 0; num < gimple_call_num_args (call); num++)
896 arg = gimple_call_arg (call, num);
898 if (type_like_member_ptr_p (TREE_TYPE (arg), NULL, NULL))
900 if (TREE_CODE (arg) == PARM_DECL)
902 int index = ipa_get_param_decl_index (info, arg);
904 gcc_assert (index >=0);
905 if (!is_parm_modified_before_call (&parms_info[index], call, arg))
907 functions[num].type = IPA_JF_PASS_THROUGH;
908 functions[num].value.pass_through.formal_id = index;
909 functions[num].value.pass_through.operation = NOP_EXPR;
912 undecided_members = true;
915 undecided_members = true;
919 return undecided_members;
922 /* Simple function filling in a member pointer constant jump function (with PFN
923 and DELTA as the constant value) into JFUNC. */
926 fill_member_ptr_cst_jump_function (struct ipa_jump_func *jfunc,
927 tree pfn, tree delta)
929 jfunc->type = IPA_JF_CONST_MEMBER_PTR;
930 jfunc->value.member_cst.pfn = pfn;
931 jfunc->value.member_cst.delta = delta;
934 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
935 return the rhs of its defining statement. */
938 get_ssa_def_if_simple_copy (tree rhs)
940 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
942 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
944 if (gimple_assign_single_p (def_stmt))
945 rhs = gimple_assign_rhs1 (def_stmt);
952 /* Traverse statements from CALL backwards, scanning whether the argument ARG
953 which is a member pointer is filled in with constant values. If it is, fill
954 the jump function JFUNC in appropriately. METHOD_FIELD and DELTA_FIELD are
955 fields of the record type of the member pointer. To give an example, we
956 look for a pattern looking like the following:
958 D.2515.__pfn ={v} printStuff;
959 D.2515.__delta ={v} 0;
960 i_1 = doprinting (D.2515); */
963 determine_cst_member_ptr (gimple call, tree arg, tree method_field,
964 tree delta_field, struct ipa_jump_func *jfunc)
966 gimple_stmt_iterator gsi;
967 tree method = NULL_TREE;
968 tree delta = NULL_TREE;
970 gsi = gsi_for_stmt (call);
973 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
975 gimple stmt = gsi_stmt (gsi);
978 if (!stmt_may_clobber_ref_p (stmt, arg))
980 if (!gimple_assign_single_p (stmt))
983 lhs = gimple_assign_lhs (stmt);
984 rhs = gimple_assign_rhs1 (stmt);
986 if (TREE_CODE (lhs) != COMPONENT_REF
987 || TREE_OPERAND (lhs, 0) != arg)
990 fld = TREE_OPERAND (lhs, 1);
991 if (!method && fld == method_field)
993 rhs = get_ssa_def_if_simple_copy (rhs);
994 if (TREE_CODE (rhs) == ADDR_EXPR
995 && TREE_CODE (TREE_OPERAND (rhs, 0)) == FUNCTION_DECL
996 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 0))) == METHOD_TYPE)
998 method = TREE_OPERAND (rhs, 0);
1001 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
1009 if (!delta && fld == delta_field)
1011 rhs = get_ssa_def_if_simple_copy (rhs);
1012 if (TREE_CODE (rhs) == INTEGER_CST)
1017 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
1029 /* Go through the arguments of the CALL and for every member pointer within
1030 tries determine whether it is a constant. If it is, create a corresponding
1031 constant jump function in FUNCTIONS which is an array of jump functions
1032 associated with the call. */
1035 compute_cst_member_ptr_arguments (struct ipa_jump_func *functions,
1039 tree arg, method_field, delta_field;
1041 for (num = 0; num < gimple_call_num_args (call); num++)
1043 arg = gimple_call_arg (call, num);
1045 if (functions[num].type == IPA_JF_UNKNOWN
1046 && type_like_member_ptr_p (TREE_TYPE (arg), &method_field,
1048 determine_cst_member_ptr (call, arg, method_field, delta_field,
1053 /* Compute jump function for all arguments of callsite CS and insert the
1054 information in the jump_functions array in the ipa_edge_args corresponding
1055 to this callsite. */
1058 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_info,
1059 struct cgraph_edge *cs)
1061 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1062 struct ipa_edge_args *arguments = IPA_EDGE_REF (cs);
1065 if (ipa_get_cs_argument_count (arguments) == 0 || arguments->jump_functions)
1067 arguments->jump_functions = ggc_alloc_cleared_vec_ipa_jump_func
1068 (ipa_get_cs_argument_count (arguments));
1070 call = cs->call_stmt;
1071 gcc_assert (is_gimple_call (call));
1073 /* We will deal with constants and SSA scalars first: */
1074 compute_scalar_jump_functions (info, arguments->jump_functions, call);
1076 /* Let's check whether there are any potential member pointers and if so,
1077 whether we can determine their functions as pass_through. */
1078 if (!compute_pass_through_member_ptrs (info, parms_info,
1079 arguments->jump_functions, call))
1082 /* Finally, let's check whether we actually pass a new constant member
1084 compute_cst_member_ptr_arguments (arguments->jump_functions, call);
1087 /* Compute jump functions for all edges - both direct and indirect - outgoing
1088 from NODE. Also count the actual arguments in the process. */
1091 ipa_compute_jump_functions (struct cgraph_node *node,
1092 struct param_analysis_info *parms_info)
1094 struct cgraph_edge *cs;
1096 for (cs = node->callees; cs; cs = cs->next_callee)
1098 /* We do not need to bother analyzing calls to unknown
1099 functions unless they may become known during lto/whopr. */
1100 if (!cs->callee->analyzed && !flag_lto)
1102 ipa_count_arguments (cs);
1103 /* If the descriptor of the callee is not initialized yet, we have to do
1105 if (cs->callee->analyzed)
1106 ipa_initialize_node_params (cs->callee);
1107 if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
1108 != ipa_get_param_count (IPA_NODE_REF (cs->callee)))
1109 ipa_set_called_with_variable_arg (IPA_NODE_REF (cs->callee));
1110 ipa_compute_jump_functions_for_edge (parms_info, cs);
1113 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1115 ipa_count_arguments (cs);
1116 ipa_compute_jump_functions_for_edge (parms_info, cs);
1120 /* If RHS looks like a rhs of a statement loading pfn from a member
1121 pointer formal parameter, return the parameter, otherwise return
1122 NULL. If USE_DELTA, then we look for a use of the delta field
1123 rather than the pfn. */
1126 ipa_get_member_ptr_load_param (tree rhs, bool use_delta)
1128 tree rec, ref_field, ref_offset, fld, fld_offset, ptr_field, delta_field;
1130 if (TREE_CODE (rhs) == COMPONENT_REF)
1132 ref_field = TREE_OPERAND (rhs, 1);
1133 rhs = TREE_OPERAND (rhs, 0);
1136 ref_field = NULL_TREE;
1137 if (TREE_CODE (rhs) != MEM_REF)
1139 rec = TREE_OPERAND (rhs, 0);
1140 if (TREE_CODE (rec) != ADDR_EXPR)
1142 rec = TREE_OPERAND (rec, 0);
1143 if (TREE_CODE (rec) != PARM_DECL
1144 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1147 ref_offset = TREE_OPERAND (rhs, 1);
1151 if (integer_nonzerop (ref_offset))
1159 return ref_field == fld ? rec : NULL_TREE;
1163 fld_offset = byte_position (delta_field);
1165 fld_offset = byte_position (ptr_field);
1167 return tree_int_cst_equal (ref_offset, fld_offset) ? rec : NULL_TREE;
1170 /* If STMT looks like a statement loading a value from a member pointer formal
1171 parameter, this function returns that parameter. */
1174 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta)
1178 if (!gimple_assign_single_p (stmt))
1181 rhs = gimple_assign_rhs1 (stmt);
1182 return ipa_get_member_ptr_load_param (rhs, use_delta);
1185 /* Returns true iff T is an SSA_NAME defined by a statement. */
1188 ipa_is_ssa_with_stmt_def (tree t)
1190 if (TREE_CODE (t) == SSA_NAME
1191 && !SSA_NAME_IS_DEFAULT_DEF (t))
1197 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1198 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1199 indirect call graph edge. */
1201 static struct cgraph_edge *
1202 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1204 struct cgraph_edge *cs;
1206 cs = cgraph_edge (node, stmt);
1207 cs->indirect_info->param_index = param_index;
1208 cs->indirect_info->anc_offset = 0;
1209 cs->indirect_info->polymorphic = 0;
1213 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1214 (described by INFO). PARMS_INFO is a pointer to a vector containing
1215 intermediate information about each formal parameter. Currently it checks
1216 whether the call calls a pointer that is a formal parameter and if so, the
1217 parameter is marked with the called flag and an indirect call graph edge
1218 describing the call is created. This is very simple for ordinary pointers
1219 represented in SSA but not-so-nice when it comes to member pointers. The
1220 ugly part of this function does nothing more than trying to match the
1221 pattern of such a call. An example of such a pattern is the gimple dump
1222 below, the call is on the last line:
1225 f$__delta_5 = f.__delta;
1226 f$__pfn_24 = f.__pfn;
1230 f$__delta_5 = MEM[(struct *)&f];
1231 f$__pfn_24 = MEM[(struct *)&f + 4B];
1233 and a few lines below:
1236 D.2496_3 = (int) f$__pfn_24;
1237 D.2497_4 = D.2496_3 & 1;
1244 D.2500_7 = (unsigned int) f$__delta_5;
1245 D.2501_8 = &S + D.2500_7;
1246 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1247 D.2503_10 = *D.2502_9;
1248 D.2504_12 = f$__pfn_24 + -1;
1249 D.2505_13 = (unsigned int) D.2504_12;
1250 D.2506_14 = D.2503_10 + D.2505_13;
1251 D.2507_15 = *D.2506_14;
1252 iftmp.11_16 = (String:: *) D.2507_15;
1255 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1256 D.2500_19 = (unsigned int) f$__delta_5;
1257 D.2508_20 = &S + D.2500_19;
1258 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1260 Such patterns are results of simple calls to a member pointer:
1262 int doprinting (int (MyString::* f)(int) const)
1264 MyString S ("somestring");
1271 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1272 struct ipa_node_params *info,
1273 struct param_analysis_info *parms_info,
1274 gimple call, tree target)
1279 tree rec, rec2, cond;
1282 basic_block bb, virt_bb, join;
1284 if (SSA_NAME_IS_DEFAULT_DEF (target))
1286 tree var = SSA_NAME_VAR (target);
1287 index = ipa_get_param_decl_index (info, var);
1289 ipa_note_param_call (node, index, call);
1293 /* Now we need to try to match the complex pattern of calling a member
1296 if (!POINTER_TYPE_P (TREE_TYPE (target))
1297 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1300 def = SSA_NAME_DEF_STMT (target);
1301 if (gimple_code (def) != GIMPLE_PHI)
1304 if (gimple_phi_num_args (def) != 2)
1307 /* First, we need to check whether one of these is a load from a member
1308 pointer that is a parameter to this function. */
1309 n1 = PHI_ARG_DEF (def, 0);
1310 n2 = PHI_ARG_DEF (def, 1);
1311 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1313 d1 = SSA_NAME_DEF_STMT (n1);
1314 d2 = SSA_NAME_DEF_STMT (n2);
1316 join = gimple_bb (def);
1317 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false)))
1319 if (ipa_get_stmt_member_ptr_load_param (d2, false))
1322 bb = EDGE_PRED (join, 0)->src;
1323 virt_bb = gimple_bb (d2);
1325 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false)))
1327 bb = EDGE_PRED (join, 1)->src;
1328 virt_bb = gimple_bb (d1);
1333 /* Second, we need to check that the basic blocks are laid out in the way
1334 corresponding to the pattern. */
1336 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1337 || single_pred (virt_bb) != bb
1338 || single_succ (virt_bb) != join)
1341 /* Third, let's see that the branching is done depending on the least
1342 significant bit of the pfn. */
1344 branch = last_stmt (bb);
1345 if (!branch || gimple_code (branch) != GIMPLE_COND)
1348 if (gimple_cond_code (branch) != NE_EXPR
1349 || !integer_zerop (gimple_cond_rhs (branch)))
1352 cond = gimple_cond_lhs (branch);
1353 if (!ipa_is_ssa_with_stmt_def (cond))
1356 def = SSA_NAME_DEF_STMT (cond);
1357 if (!is_gimple_assign (def)
1358 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1359 || !integer_onep (gimple_assign_rhs2 (def)))
1362 cond = gimple_assign_rhs1 (def);
1363 if (!ipa_is_ssa_with_stmt_def (cond))
1366 def = SSA_NAME_DEF_STMT (cond);
1368 if (is_gimple_assign (def)
1369 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1371 cond = gimple_assign_rhs1 (def);
1372 if (!ipa_is_ssa_with_stmt_def (cond))
1374 def = SSA_NAME_DEF_STMT (cond);
1377 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1378 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1379 == ptrmemfunc_vbit_in_delta));
1384 index = ipa_get_param_decl_index (info, rec);
1385 if (index >= 0 && !is_parm_modified_before_call (&parms_info[index],
1387 ipa_note_param_call (node, index, call);
1392 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1393 object referenced in the expression is a formal parameter of the caller
1394 (described by INFO), create a call note for the statement. */
1397 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1398 struct ipa_node_params *info, gimple call,
1401 struct cgraph_edge *cs;
1402 struct cgraph_indirect_call_info *ii;
1403 struct ipa_jump_func jfunc;
1404 tree obj = OBJ_TYPE_REF_OBJECT (target);
1406 HOST_WIDE_INT anc_offset;
1408 if (!flag_devirtualize)
1411 if (TREE_CODE (obj) != SSA_NAME)
1414 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1416 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1420 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1421 gcc_assert (index >= 0);
1422 if (detect_type_change_ssa (obj, call, &jfunc))
1427 gimple stmt = SSA_NAME_DEF_STMT (obj);
1430 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1433 index = ipa_get_param_decl_index (info,
1434 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1435 gcc_assert (index >= 0);
1436 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1440 cs = ipa_note_param_call (node, index, call);
1441 ii = cs->indirect_info;
1442 ii->anc_offset = anc_offset;
1443 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1444 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1445 ii->polymorphic = 1;
1448 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1449 of the caller (described by INFO). PARMS_INFO is a pointer to a vector
1450 containing intermediate information about each formal parameter. */
1453 ipa_analyze_call_uses (struct cgraph_node *node,
1454 struct ipa_node_params *info,
1455 struct param_analysis_info *parms_info, gimple call)
1457 tree target = gimple_call_fn (call);
1461 if (TREE_CODE (target) == SSA_NAME)
1462 ipa_analyze_indirect_call_uses (node, info, parms_info, call, target);
1463 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1464 ipa_analyze_virtual_call_uses (node, info, call, target);
1468 /* Analyze the call statement STMT with respect to formal parameters (described
1469 in INFO) of caller given by NODE. Currently it only checks whether formal
1470 parameters are called. PARMS_INFO is a pointer to a vector containing
1471 intermediate information about each formal parameter. */
1474 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1475 struct param_analysis_info *parms_info, gimple stmt)
1477 if (is_gimple_call (stmt))
1478 ipa_analyze_call_uses (node, info, parms_info, stmt);
1481 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1482 If OP is a parameter declaration, mark it as used in the info structure
1486 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1487 tree op, void *data)
1489 struct ipa_node_params *info = (struct ipa_node_params *) data;
1491 op = get_base_address (op);
1493 && TREE_CODE (op) == PARM_DECL)
1495 int index = ipa_get_param_decl_index (info, op);
1496 gcc_assert (index >= 0);
1497 info->params[index].used = true;
1503 /* Scan the function body of NODE and inspect the uses of formal parameters.
1504 Store the findings in various structures of the associated ipa_node_params
1505 structure, such as parameter flags, notes etc. PARMS_INFO is a pointer to a
1506 vector containing intermediate information about each formal parameter. */
1509 ipa_analyze_params_uses (struct cgraph_node *node,
1510 struct param_analysis_info *parms_info)
1512 tree decl = node->decl;
1514 struct function *func;
1515 gimple_stmt_iterator gsi;
1516 struct ipa_node_params *info = IPA_NODE_REF (node);
1519 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1522 for (i = 0; i < ipa_get_param_count (info); i++)
1524 tree parm = ipa_get_param (info, i);
1525 /* For SSA regs see if parameter is used. For non-SSA we compute
1526 the flag during modification analysis. */
1527 if (is_gimple_reg (parm)
1528 && gimple_default_def (DECL_STRUCT_FUNCTION (node->decl), parm))
1529 info->params[i].used = true;
1532 func = DECL_STRUCT_FUNCTION (decl);
1533 FOR_EACH_BB_FN (bb, func)
1535 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1537 gimple stmt = gsi_stmt (gsi);
1539 if (is_gimple_debug (stmt))
1542 ipa_analyze_stmt_uses (node, info, parms_info, stmt);
1543 walk_stmt_load_store_addr_ops (stmt, info,
1544 visit_ref_for_mod_analysis,
1545 visit_ref_for_mod_analysis,
1546 visit_ref_for_mod_analysis);
1548 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
1549 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1550 visit_ref_for_mod_analysis,
1551 visit_ref_for_mod_analysis,
1552 visit_ref_for_mod_analysis);
1555 info->uses_analysis_done = 1;
1558 /* Initialize the array describing properties of of formal parameters
1559 of NODE, analyze their uses and compute jump functions associated
1560 with actual arguments of calls from within NODE. */
1563 ipa_analyze_node (struct cgraph_node *node)
1565 struct ipa_node_params *info;
1566 struct param_analysis_info *parms_info;
1569 ipa_check_create_node_params ();
1570 ipa_check_create_edge_args ();
1571 info = IPA_NODE_REF (node);
1572 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1573 current_function_decl = node->decl;
1574 ipa_initialize_node_params (node);
1576 param_count = ipa_get_param_count (info);
1577 parms_info = XALLOCAVEC (struct param_analysis_info, param_count);
1578 memset (parms_info, 0, sizeof (struct param_analysis_info) * param_count);
1580 ipa_analyze_params_uses (node, parms_info);
1581 ipa_compute_jump_functions (node, parms_info);
1583 for (i = 0; i < param_count; i++)
1584 if (parms_info[i].visited_statements)
1585 BITMAP_FREE (parms_info[i].visited_statements);
1587 current_function_decl = NULL;
1592 /* Update the jump function DST when the call graph edge corresponding to SRC is
1593 is being inlined, knowing that DST is of type ancestor and src of known
1597 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1598 struct ipa_jump_func *dst)
1602 new_binfo = get_binfo_at_offset (src->value.base_binfo,
1603 dst->value.ancestor.offset,
1604 dst->value.ancestor.type);
1607 dst->type = IPA_JF_KNOWN_TYPE;
1608 dst->value.base_binfo = new_binfo;
1611 dst->type = IPA_JF_UNKNOWN;
1614 /* Update the jump functions associated with call graph edge E when the call
1615 graph edge CS is being inlined, assuming that E->caller is already (possibly
1616 indirectly) inlined into CS->callee and that E has not been inlined. */
1619 update_jump_functions_after_inlining (struct cgraph_edge *cs,
1620 struct cgraph_edge *e)
1622 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1623 struct ipa_edge_args *args = IPA_EDGE_REF (e);
1624 int count = ipa_get_cs_argument_count (args);
1627 for (i = 0; i < count; i++)
1629 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
1631 if (dst->type == IPA_JF_ANCESTOR)
1633 struct ipa_jump_func *src;
1635 /* Variable number of arguments can cause havoc if we try to access
1636 one that does not exist in the inlined edge. So make sure we
1638 if (dst->value.ancestor.formal_id >= ipa_get_cs_argument_count (top))
1640 dst->type = IPA_JF_UNKNOWN;
1644 src = ipa_get_ith_jump_func (top, dst->value.ancestor.formal_id);
1645 if (src->type == IPA_JF_KNOWN_TYPE)
1646 combine_known_type_and_ancestor_jfs (src, dst);
1647 else if (src->type == IPA_JF_PASS_THROUGH
1648 && src->value.pass_through.operation == NOP_EXPR)
1649 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
1650 else if (src->type == IPA_JF_ANCESTOR)
1652 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
1653 dst->value.ancestor.offset += src->value.ancestor.offset;
1656 dst->type = IPA_JF_UNKNOWN;
1658 else if (dst->type == IPA_JF_PASS_THROUGH)
1660 struct ipa_jump_func *src;
1661 /* We must check range due to calls with variable number of arguments
1662 and we cannot combine jump functions with operations. */
1663 if (dst->value.pass_through.operation == NOP_EXPR
1664 && (dst->value.pass_through.formal_id
1665 < ipa_get_cs_argument_count (top)))
1667 src = ipa_get_ith_jump_func (top,
1668 dst->value.pass_through.formal_id);
1672 dst->type = IPA_JF_UNKNOWN;
1677 /* If TARGET is an addr_expr of a function declaration, make it the destination
1678 of an indirect edge IE and return the edge. Otherwise, return NULL. Delta,
1679 if non-NULL, is an integer constant that must be added to this pointer
1680 (first parameter). */
1682 struct cgraph_edge *
1683 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target, tree delta)
1685 struct cgraph_node *callee;
1687 if (TREE_CODE (target) == ADDR_EXPR)
1688 target = TREE_OPERAND (target, 0);
1689 if (TREE_CODE (target) != FUNCTION_DECL)
1691 callee = cgraph_get_node (target);
1694 ipa_check_create_node_params ();
1696 /* We can not make edges to inline clones. It is bug that someone removed the cgraph
1698 gcc_assert (!callee->global.inlined_to);
1700 cgraph_make_edge_direct (ie, callee, delta ? tree_low_cst (delta, 0) : 0);
1703 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
1704 "(%s/%i -> %s/%i), for stmt ",
1705 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
1706 cgraph_node_name (ie->caller), ie->caller->uid,
1707 cgraph_node_name (ie->callee), ie->callee->uid);
1709 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
1711 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
1715 fprintf (dump_file, " Thunk delta is ");
1716 print_generic_expr (dump_file, delta, 0);
1717 fprintf (dump_file, "\n");
1721 if (ipa_get_cs_argument_count (IPA_EDGE_REF (ie))
1722 != ipa_get_param_count (IPA_NODE_REF (callee)))
1723 ipa_set_called_with_variable_arg (IPA_NODE_REF (callee));
1728 /* Try to find a destination for indirect edge IE that corresponds to a simple
1729 call or a call of a member function pointer and where the destination is a
1730 pointer formal parameter described by jump function JFUNC. If it can be
1731 determined, return the newly direct edge, otherwise return NULL. */
1733 static struct cgraph_edge *
1734 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
1735 struct ipa_jump_func *jfunc)
1739 if (jfunc->type == IPA_JF_CONST)
1740 target = jfunc->value.constant;
1741 else if (jfunc->type == IPA_JF_CONST_MEMBER_PTR)
1742 target = jfunc->value.member_cst.pfn;
1746 return ipa_make_edge_direct_to_target (ie, target, NULL_TREE);
1749 /* Try to find a destination for indirect edge IE that corresponds to a
1750 virtual call based on a formal parameter which is described by jump
1751 function JFUNC and if it can be determined, make it direct and return the
1752 direct edge. Otherwise, return NULL. */
1754 static struct cgraph_edge *
1755 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
1756 struct ipa_jump_func *jfunc)
1758 tree binfo, type, target, delta;
1759 HOST_WIDE_INT token;
1761 if (jfunc->type == IPA_JF_KNOWN_TYPE)
1762 binfo = jfunc->value.base_binfo;
1769 token = ie->indirect_info->otr_token;
1770 type = ie->indirect_info->otr_type;
1771 binfo = get_binfo_at_offset (binfo, ie->indirect_info->anc_offset, type);
1773 target = gimple_get_virt_method_for_binfo (token, binfo, &delta, true);
1778 return ipa_make_edge_direct_to_target (ie, target, delta);
1783 /* Update the param called notes associated with NODE when CS is being inlined,
1784 assuming NODE is (potentially indirectly) inlined into CS->callee.
1785 Moreover, if the callee is discovered to be constant, create a new cgraph
1786 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
1787 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
1790 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
1791 struct cgraph_node *node,
1792 VEC (cgraph_edge_p, heap) **new_edges)
1794 struct ipa_edge_args *top;
1795 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
1798 ipa_check_create_edge_args ();
1799 top = IPA_EDGE_REF (cs);
1801 for (ie = node->indirect_calls; ie; ie = next_ie)
1803 struct cgraph_indirect_call_info *ici = ie->indirect_info;
1804 struct ipa_jump_func *jfunc;
1806 next_ie = ie->next_callee;
1807 if (bitmap_bit_p (iinlining_processed_edges, ie->uid))
1810 /* If we ever use indirect edges for anything other than indirect
1811 inlining, we will need to skip those with negative param_indices. */
1812 if (ici->param_index == -1)
1815 /* We must check range due to calls with variable number of arguments: */
1816 if (ici->param_index >= ipa_get_cs_argument_count (top))
1818 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1822 jfunc = ipa_get_ith_jump_func (top, ici->param_index);
1823 if (jfunc->type == IPA_JF_PASS_THROUGH
1824 && jfunc->value.pass_through.operation == NOP_EXPR)
1825 ici->param_index = jfunc->value.pass_through.formal_id;
1826 else if (jfunc->type == IPA_JF_ANCESTOR)
1828 ici->param_index = jfunc->value.ancestor.formal_id;
1829 ici->anc_offset += jfunc->value.ancestor.offset;
1832 /* Either we can find a destination for this edge now or never. */
1833 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1835 if (ici->polymorphic)
1836 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
1838 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
1840 if (new_direct_edge)
1842 new_direct_edge->indirect_inlining_edge = 1;
1845 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
1847 top = IPA_EDGE_REF (cs);
1856 /* Recursively traverse subtree of NODE (including node) made of inlined
1857 cgraph_edges when CS has been inlined and invoke
1858 update_indirect_edges_after_inlining on all nodes and
1859 update_jump_functions_after_inlining on all non-inlined edges that lead out
1860 of this subtree. Newly discovered indirect edges will be added to
1861 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
1865 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
1866 struct cgraph_node *node,
1867 VEC (cgraph_edge_p, heap) **new_edges)
1869 struct cgraph_edge *e;
1872 res = update_indirect_edges_after_inlining (cs, node, new_edges);
1874 for (e = node->callees; e; e = e->next_callee)
1875 if (!e->inline_failed)
1876 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
1878 update_jump_functions_after_inlining (cs, e);
1883 /* Update jump functions and call note functions on inlining the call site CS.
1884 CS is expected to lead to a node already cloned by
1885 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
1886 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
1890 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
1891 VEC (cgraph_edge_p, heap) **new_edges)
1893 /* FIXME lto: We do not stream out indirect call information. */
1897 /* Do nothing if the preparation phase has not been carried out yet
1898 (i.e. during early inlining). */
1899 if (!ipa_node_params_vector)
1901 gcc_assert (ipa_edge_args_vector);
1903 return propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
1906 /* Frees all dynamically allocated structures that the argument info points
1910 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
1912 if (args->jump_functions)
1913 ggc_free (args->jump_functions);
1915 memset (args, 0, sizeof (*args));
1918 /* Free all ipa_edge structures. */
1921 ipa_free_all_edge_args (void)
1924 struct ipa_edge_args *args;
1926 FOR_EACH_VEC_ELT (ipa_edge_args_t, ipa_edge_args_vector, i, args)
1927 ipa_free_edge_args_substructures (args);
1929 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
1930 ipa_edge_args_vector = NULL;
1933 /* Frees all dynamically allocated structures that the param info points
1937 ipa_free_node_params_substructures (struct ipa_node_params *info)
1939 free (info->params);
1941 memset (info, 0, sizeof (*info));
1944 /* Free all ipa_node_params structures. */
1947 ipa_free_all_node_params (void)
1950 struct ipa_node_params *info;
1952 FOR_EACH_VEC_ELT (ipa_node_params_t, ipa_node_params_vector, i, info)
1953 ipa_free_node_params_substructures (info);
1955 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
1956 ipa_node_params_vector = NULL;
1959 /* Hook that is called by cgraph.c when an edge is removed. */
1962 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
1964 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1965 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
1966 <= (unsigned)cs->uid)
1968 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
1971 /* Hook that is called by cgraph.c when a node is removed. */
1974 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1976 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1977 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
1978 <= (unsigned)node->uid)
1980 ipa_free_node_params_substructures (IPA_NODE_REF (node));
1983 /* Helper function to duplicate an array of size N that is at SRC and store a
1984 pointer to it to DST. Nothing is done if SRC is NULL. */
1987 duplicate_array (void *src, size_t n)
1999 static struct ipa_jump_func *
2000 duplicate_ipa_jump_func_array (const struct ipa_jump_func * src, size_t n)
2002 struct ipa_jump_func *p;
2007 p = ggc_alloc_vec_ipa_jump_func (n);
2008 memcpy (p, src, n * sizeof (struct ipa_jump_func));
2012 /* Hook that is called by cgraph.c when a node is duplicated. */
2015 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2016 __attribute__((unused)) void *data)
2018 struct ipa_edge_args *old_args, *new_args;
2021 ipa_check_create_edge_args ();
2023 old_args = IPA_EDGE_REF (src);
2024 new_args = IPA_EDGE_REF (dst);
2026 arg_count = ipa_get_cs_argument_count (old_args);
2027 ipa_set_cs_argument_count (new_args, arg_count);
2028 new_args->jump_functions =
2029 duplicate_ipa_jump_func_array (old_args->jump_functions, arg_count);
2031 if (iinlining_processed_edges
2032 && bitmap_bit_p (iinlining_processed_edges, src->uid))
2033 bitmap_set_bit (iinlining_processed_edges, dst->uid);
2036 /* Hook that is called by cgraph.c when a node is duplicated. */
2039 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
2040 ATTRIBUTE_UNUSED void *data)
2042 struct ipa_node_params *old_info, *new_info;
2045 ipa_check_create_node_params ();
2046 old_info = IPA_NODE_REF (src);
2047 new_info = IPA_NODE_REF (dst);
2048 param_count = ipa_get_param_count (old_info);
2050 ipa_set_param_count (new_info, param_count);
2051 new_info->params = (struct ipa_param_descriptor *)
2052 duplicate_array (old_info->params,
2053 sizeof (struct ipa_param_descriptor) * param_count);
2054 for (i = 0; i < param_count; i++)
2055 new_info->params[i].types = VEC_copy (tree, heap,
2056 old_info->params[i].types);
2057 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
2058 new_info->count_scale = old_info->count_scale;
2060 new_info->called_with_var_arguments = old_info->called_with_var_arguments;
2061 new_info->uses_analysis_done = old_info->uses_analysis_done;
2062 new_info->node_enqueued = old_info->node_enqueued;
2065 /* Register our cgraph hooks if they are not already there. */
2068 ipa_register_cgraph_hooks (void)
2070 if (!edge_removal_hook_holder)
2071 edge_removal_hook_holder =
2072 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2073 if (!node_removal_hook_holder)
2074 node_removal_hook_holder =
2075 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2076 if (!edge_duplication_hook_holder)
2077 edge_duplication_hook_holder =
2078 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2079 if (!node_duplication_hook_holder)
2080 node_duplication_hook_holder =
2081 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2084 /* Unregister our cgraph hooks if they are not already there. */
2087 ipa_unregister_cgraph_hooks (void)
2089 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2090 edge_removal_hook_holder = NULL;
2091 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2092 node_removal_hook_holder = NULL;
2093 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2094 edge_duplication_hook_holder = NULL;
2095 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2096 node_duplication_hook_holder = NULL;
2099 /* Allocate all necessary data structures necessary for indirect inlining. */
2102 ipa_create_all_structures_for_iinln (void)
2104 iinlining_processed_edges = BITMAP_ALLOC (NULL);
2107 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2108 longer needed after ipa-cp. */
2111 ipa_free_all_structures_after_ipa_cp (void)
2113 if (!flag_indirect_inlining)
2115 ipa_free_all_edge_args ();
2116 ipa_free_all_node_params ();
2117 ipa_unregister_cgraph_hooks ();
2121 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2122 longer needed after indirect inlining. */
2125 ipa_free_all_structures_after_iinln (void)
2127 BITMAP_FREE (iinlining_processed_edges);
2129 ipa_free_all_edge_args ();
2130 ipa_free_all_node_params ();
2131 ipa_unregister_cgraph_hooks ();
2134 /* Print ipa_tree_map data structures of all functions in the
2138 ipa_print_node_params (FILE * f, struct cgraph_node *node)
2142 struct ipa_node_params *info;
2144 if (!node->analyzed)
2146 info = IPA_NODE_REF (node);
2147 fprintf (f, " function %s parameter descriptors:\n",
2148 cgraph_node_name (node));
2149 count = ipa_get_param_count (info);
2150 for (i = 0; i < count; i++)
2152 temp = ipa_get_param (info, i);
2153 if (TREE_CODE (temp) == PARM_DECL)
2154 fprintf (f, " param %d : %s", i,
2156 ? (*lang_hooks.decl_printable_name) (temp, 2)
2158 if (ipa_is_param_used (info, i))
2159 fprintf (f, " used");
2164 /* Print ipa_tree_map data structures of all functions in the
2168 ipa_print_all_params (FILE * f)
2170 struct cgraph_node *node;
2172 fprintf (f, "\nFunction parameters:\n");
2173 for (node = cgraph_nodes; node; node = node->next)
2174 ipa_print_node_params (f, node);
2177 /* Return a heap allocated vector containing formal parameters of FNDECL. */
2180 ipa_get_vector_of_formal_parms (tree fndecl)
2182 VEC(tree, heap) *args;
2186 count = count_formal_params_1 (fndecl);
2187 args = VEC_alloc (tree, heap, count);
2188 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2189 VEC_quick_push (tree, args, parm);
2194 /* Return a heap allocated vector containing types of formal parameters of
2195 function type FNTYPE. */
2197 static inline VEC(tree, heap) *
2198 get_vector_of_formal_parm_types (tree fntype)
2200 VEC(tree, heap) *types;
2204 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2207 types = VEC_alloc (tree, heap, count);
2208 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2209 VEC_quick_push (tree, types, TREE_VALUE (t));
2214 /* Modify the function declaration FNDECL and its type according to the plan in
2215 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2216 to reflect the actual parameters being modified which are determined by the
2217 base_index field. */
2220 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
2221 const char *synth_parm_prefix)
2223 VEC(tree, heap) *oparms, *otypes;
2224 tree orig_type, new_type = NULL;
2225 tree old_arg_types, t, new_arg_types = NULL;
2226 tree parm, *link = &DECL_ARGUMENTS (fndecl);
2227 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2228 tree new_reversed = NULL;
2229 bool care_for_types, last_parm_void;
2231 if (!synth_parm_prefix)
2232 synth_parm_prefix = "SYNTH";
2234 oparms = ipa_get_vector_of_formal_parms (fndecl);
2235 orig_type = TREE_TYPE (fndecl);
2236 old_arg_types = TYPE_ARG_TYPES (orig_type);
2238 /* The following test is an ugly hack, some functions simply don't have any
2239 arguments in their type. This is probably a bug but well... */
2240 care_for_types = (old_arg_types != NULL_TREE);
2243 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2245 otypes = get_vector_of_formal_parm_types (orig_type);
2247 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
2249 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
2253 last_parm_void = false;
2257 for (i = 0; i < len; i++)
2259 struct ipa_parm_adjustment *adj;
2262 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2263 parm = VEC_index (tree, oparms, adj->base_index);
2266 if (adj->copy_param)
2269 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
2273 link = &DECL_CHAIN (parm);
2275 else if (!adj->remove_param)
2281 ptype = build_pointer_type (adj->type);
2286 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2288 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2290 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2292 DECL_ARTIFICIAL (new_parm) = 1;
2293 DECL_ARG_TYPE (new_parm) = ptype;
2294 DECL_CONTEXT (new_parm) = fndecl;
2295 TREE_USED (new_parm) = 1;
2296 DECL_IGNORED_P (new_parm) = 1;
2297 layout_decl (new_parm, 0);
2299 add_referenced_var (new_parm);
2300 mark_sym_for_renaming (new_parm);
2302 adj->reduction = new_parm;
2306 link = &DECL_CHAIN (new_parm);
2314 new_reversed = nreverse (new_arg_types);
2318 TREE_CHAIN (new_arg_types) = void_list_node;
2320 new_reversed = void_list_node;
2324 /* Use copy_node to preserve as much as possible from original type
2325 (debug info, attribute lists etc.)
2326 Exception is METHOD_TYPEs must have THIS argument.
2327 When we are asked to remove it, we need to build new FUNCTION_TYPE
2329 if (TREE_CODE (orig_type) != METHOD_TYPE
2330 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0)->copy_param
2331 && VEC_index (ipa_parm_adjustment_t, adjustments, 0)->base_index == 0))
2333 new_type = build_distinct_type_copy (orig_type);
2334 TYPE_ARG_TYPES (new_type) = new_reversed;
2339 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2341 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2342 DECL_VINDEX (fndecl) = NULL_TREE;
2345 /* When signature changes, we need to clear builtin info. */
2346 if (DECL_BUILT_IN (fndecl))
2348 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2349 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2352 /* This is a new type, not a copy of an old type. Need to reassociate
2353 variants. We can handle everything except the main variant lazily. */
2354 t = TYPE_MAIN_VARIANT (orig_type);
2357 TYPE_MAIN_VARIANT (new_type) = t;
2358 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2359 TYPE_NEXT_VARIANT (t) = new_type;
2363 TYPE_MAIN_VARIANT (new_type) = new_type;
2364 TYPE_NEXT_VARIANT (new_type) = NULL;
2367 TREE_TYPE (fndecl) = new_type;
2368 DECL_VIRTUAL_P (fndecl) = 0;
2370 VEC_free (tree, heap, otypes);
2371 VEC_free (tree, heap, oparms);
2374 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2375 If this is a directly recursive call, CS must be NULL. Otherwise it must
2376 contain the corresponding call graph edge. */
2379 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2380 ipa_parm_adjustment_vec adjustments)
2382 VEC(tree, heap) *vargs;
2384 gimple_stmt_iterator gsi;
2388 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2389 vargs = VEC_alloc (tree, heap, len);
2391 gsi = gsi_for_stmt (stmt);
2392 for (i = 0; i < len; i++)
2394 struct ipa_parm_adjustment *adj;
2396 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2398 if (adj->copy_param)
2400 tree arg = gimple_call_arg (stmt, adj->base_index);
2402 VEC_quick_push (tree, vargs, arg);
2404 else if (!adj->remove_param)
2406 tree expr, base, off;
2409 /* We create a new parameter out of the value of the old one, we can
2410 do the following kind of transformations:
2412 - A scalar passed by reference is converted to a scalar passed by
2413 value. (adj->by_ref is false and the type of the original
2414 actual argument is a pointer to a scalar).
2416 - A part of an aggregate is passed instead of the whole aggregate.
2417 The part can be passed either by value or by reference, this is
2418 determined by value of adj->by_ref. Moreover, the code below
2419 handles both situations when the original aggregate is passed by
2420 value (its type is not a pointer) and when it is passed by
2421 reference (it is a pointer to an aggregate).
2423 When the new argument is passed by reference (adj->by_ref is true)
2424 it must be a part of an aggregate and therefore we form it by
2425 simply taking the address of a reference inside the original
2428 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2429 base = gimple_call_arg (stmt, adj->base_index);
2430 loc = EXPR_LOCATION (base);
2432 if (TREE_CODE (base) != ADDR_EXPR
2433 && POINTER_TYPE_P (TREE_TYPE (base)))
2434 off = build_int_cst (adj->alias_ptr_type,
2435 adj->offset / BITS_PER_UNIT);
2438 HOST_WIDE_INT base_offset;
2441 if (TREE_CODE (base) == ADDR_EXPR)
2442 base = TREE_OPERAND (base, 0);
2444 base = get_addr_base_and_unit_offset (base, &base_offset);
2445 /* Aggregate arguments can have non-invariant addresses. */
2448 base = build_fold_addr_expr (prev_base);
2449 off = build_int_cst (adj->alias_ptr_type,
2450 adj->offset / BITS_PER_UNIT);
2452 else if (TREE_CODE (base) == MEM_REF)
2454 off = build_int_cst (adj->alias_ptr_type,
2456 + adj->offset / BITS_PER_UNIT);
2457 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
2459 base = TREE_OPERAND (base, 0);
2463 off = build_int_cst (adj->alias_ptr_type,
2465 + adj->offset / BITS_PER_UNIT);
2466 base = build_fold_addr_expr (base);
2470 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
2472 expr = build_fold_addr_expr (expr);
2474 expr = force_gimple_operand_gsi (&gsi, expr,
2476 || is_gimple_reg_type (adj->type),
2477 NULL, true, GSI_SAME_STMT);
2478 VEC_quick_push (tree, vargs, expr);
2482 if (dump_file && (dump_flags & TDF_DETAILS))
2484 fprintf (dump_file, "replacing stmt:");
2485 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2488 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
2489 new_stmt = gimple_build_call_vec (callee_decl, vargs);
2490 VEC_free (tree, heap, vargs);
2491 if (gimple_call_lhs (stmt))
2492 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2494 gimple_set_block (new_stmt, gimple_block (stmt));
2495 if (gimple_has_location (stmt))
2496 gimple_set_location (new_stmt, gimple_location (stmt));
2497 gimple_call_copy_flags (new_stmt, stmt);
2498 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2500 if (dump_file && (dump_flags & TDF_DETAILS))
2502 fprintf (dump_file, "with stmt:");
2503 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2504 fprintf (dump_file, "\n");
2506 gsi_replace (&gsi, new_stmt, true);
2508 cgraph_set_call_stmt (cs, new_stmt);
2509 update_ssa (TODO_update_ssa);
2510 free_dominance_info (CDI_DOMINATORS);
2513 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
2516 index_in_adjustments_multiple_times_p (int base_index,
2517 ipa_parm_adjustment_vec adjustments)
2519 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2522 for (i = 0; i < len; i++)
2524 struct ipa_parm_adjustment *adj;
2525 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2527 if (adj->base_index == base_index)
2539 /* Return adjustments that should have the same effect on function parameters
2540 and call arguments as if they were first changed according to adjustments in
2541 INNER and then by adjustments in OUTER. */
2543 ipa_parm_adjustment_vec
2544 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
2545 ipa_parm_adjustment_vec outer)
2547 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
2548 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
2550 ipa_parm_adjustment_vec adjustments, tmp;
2552 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
2553 for (i = 0; i < inlen; i++)
2555 struct ipa_parm_adjustment *n;
2556 n = VEC_index (ipa_parm_adjustment_t, inner, i);
2558 if (n->remove_param)
2561 VEC_quick_push (ipa_parm_adjustment_t, tmp, n);
2564 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
2565 for (i = 0; i < outlen; i++)
2567 struct ipa_parm_adjustment *r;
2568 struct ipa_parm_adjustment *out = VEC_index (ipa_parm_adjustment_t,
2570 struct ipa_parm_adjustment *in = VEC_index (ipa_parm_adjustment_t, tmp,
2573 gcc_assert (!in->remove_param);
2574 if (out->remove_param)
2576 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
2578 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2579 memset (r, 0, sizeof (*r));
2580 r->remove_param = true;
2585 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2586 memset (r, 0, sizeof (*r));
2587 r->base_index = in->base_index;
2588 r->type = out->type;
2590 /* FIXME: Create nonlocal value too. */
2592 if (in->copy_param && out->copy_param)
2593 r->copy_param = true;
2594 else if (in->copy_param)
2595 r->offset = out->offset;
2596 else if (out->copy_param)
2597 r->offset = in->offset;
2599 r->offset = in->offset + out->offset;
2602 for (i = 0; i < inlen; i++)
2604 struct ipa_parm_adjustment *n = VEC_index (ipa_parm_adjustment_t,
2607 if (n->remove_param)
2608 VEC_quick_push (ipa_parm_adjustment_t, adjustments, n);
2611 VEC_free (ipa_parm_adjustment_t, heap, tmp);
2615 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
2616 friendly way, assuming they are meant to be applied to FNDECL. */
2619 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
2622 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2624 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
2626 fprintf (file, "IPA param adjustments: ");
2627 for (i = 0; i < len; i++)
2629 struct ipa_parm_adjustment *adj;
2630 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2633 fprintf (file, " ");
2637 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
2638 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
2641 fprintf (file, ", base: ");
2642 print_generic_expr (file, adj->base, 0);
2646 fprintf (file, ", reduction: ");
2647 print_generic_expr (file, adj->reduction, 0);
2649 if (adj->new_ssa_base)
2651 fprintf (file, ", new_ssa_base: ");
2652 print_generic_expr (file, adj->new_ssa_base, 0);
2655 if (adj->copy_param)
2656 fprintf (file, ", copy_param");
2657 else if (adj->remove_param)
2658 fprintf (file, ", remove_param");
2660 fprintf (file, ", offset %li", (long) adj->offset);
2662 fprintf (file, ", by_ref");
2663 print_node_brief (file, ", type: ", adj->type, 0);
2664 fprintf (file, "\n");
2666 VEC_free (tree, heap, parms);
2669 /* Stream out jump function JUMP_FUNC to OB. */
2672 ipa_write_jump_function (struct output_block *ob,
2673 struct ipa_jump_func *jump_func)
2675 lto_output_uleb128_stream (ob->main_stream,
2678 switch (jump_func->type)
2680 case IPA_JF_UNKNOWN:
2682 case IPA_JF_KNOWN_TYPE:
2683 lto_output_tree (ob, jump_func->value.base_binfo, true);
2686 lto_output_tree (ob, jump_func->value.constant, true);
2688 case IPA_JF_PASS_THROUGH:
2689 lto_output_tree (ob, jump_func->value.pass_through.operand, true);
2690 lto_output_uleb128_stream (ob->main_stream,
2691 jump_func->value.pass_through.formal_id);
2692 lto_output_uleb128_stream (ob->main_stream,
2693 jump_func->value.pass_through.operation);
2695 case IPA_JF_ANCESTOR:
2696 lto_output_uleb128_stream (ob->main_stream,
2697 jump_func->value.ancestor.offset);
2698 lto_output_tree (ob, jump_func->value.ancestor.type, true);
2699 lto_output_uleb128_stream (ob->main_stream,
2700 jump_func->value.ancestor.formal_id);
2702 case IPA_JF_CONST_MEMBER_PTR:
2703 lto_output_tree (ob, jump_func->value.member_cst.pfn, true);
2704 lto_output_tree (ob, jump_func->value.member_cst.delta, false);
2709 /* Read in jump function JUMP_FUNC from IB. */
2712 ipa_read_jump_function (struct lto_input_block *ib,
2713 struct ipa_jump_func *jump_func,
2714 struct data_in *data_in)
2716 jump_func->type = (enum jump_func_type) lto_input_uleb128 (ib);
2718 switch (jump_func->type)
2720 case IPA_JF_UNKNOWN:
2722 case IPA_JF_KNOWN_TYPE:
2723 jump_func->value.base_binfo = lto_input_tree (ib, data_in);
2726 jump_func->value.constant = lto_input_tree (ib, data_in);
2728 case IPA_JF_PASS_THROUGH:
2729 jump_func->value.pass_through.operand = lto_input_tree (ib, data_in);
2730 jump_func->value.pass_through.formal_id = lto_input_uleb128 (ib);
2731 jump_func->value.pass_through.operation = (enum tree_code) lto_input_uleb128 (ib);
2733 case IPA_JF_ANCESTOR:
2734 jump_func->value.ancestor.offset = lto_input_uleb128 (ib);
2735 jump_func->value.ancestor.type = lto_input_tree (ib, data_in);
2736 jump_func->value.ancestor.formal_id = lto_input_uleb128 (ib);
2738 case IPA_JF_CONST_MEMBER_PTR:
2739 jump_func->value.member_cst.pfn = lto_input_tree (ib, data_in);
2740 jump_func->value.member_cst.delta = lto_input_tree (ib, data_in);
2745 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
2746 relevant to indirect inlining to OB. */
2749 ipa_write_indirect_edge_info (struct output_block *ob,
2750 struct cgraph_edge *cs)
2752 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2753 struct bitpack_d bp;
2755 lto_output_sleb128_stream (ob->main_stream, ii->param_index);
2756 lto_output_sleb128_stream (ob->main_stream, ii->anc_offset);
2757 bp = bitpack_create (ob->main_stream);
2758 bp_pack_value (&bp, ii->polymorphic, 1);
2759 lto_output_bitpack (&bp);
2761 if (ii->polymorphic)
2763 lto_output_sleb128_stream (ob->main_stream, ii->otr_token);
2764 lto_output_tree (ob, ii->otr_type, true);
2768 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
2769 relevant to indirect inlining from IB. */
2772 ipa_read_indirect_edge_info (struct lto_input_block *ib,
2773 struct data_in *data_in ATTRIBUTE_UNUSED,
2774 struct cgraph_edge *cs)
2776 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2777 struct bitpack_d bp;
2779 ii->param_index = (int) lto_input_sleb128 (ib);
2780 ii->anc_offset = (HOST_WIDE_INT) lto_input_sleb128 (ib);
2781 bp = lto_input_bitpack (ib);
2782 ii->polymorphic = bp_unpack_value (&bp, 1);
2783 if (ii->polymorphic)
2785 ii->otr_token = (HOST_WIDE_INT) lto_input_sleb128 (ib);
2786 ii->otr_type = lto_input_tree (ib, data_in);
2790 /* Stream out NODE info to OB. */
2793 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
2796 lto_cgraph_encoder_t encoder;
2797 struct ipa_node_params *info = IPA_NODE_REF (node);
2799 struct cgraph_edge *e;
2800 struct bitpack_d bp;
2802 encoder = ob->decl_state->cgraph_node_encoder;
2803 node_ref = lto_cgraph_encoder_encode (encoder, node);
2804 lto_output_uleb128_stream (ob->main_stream, node_ref);
2806 bp = bitpack_create (ob->main_stream);
2807 bp_pack_value (&bp, info->called_with_var_arguments, 1);
2808 gcc_assert (info->uses_analysis_done
2809 || ipa_get_param_count (info) == 0);
2810 gcc_assert (!info->node_enqueued);
2811 gcc_assert (!info->ipcp_orig_node);
2812 for (j = 0; j < ipa_get_param_count (info); j++)
2813 bp_pack_value (&bp, info->params[j].used, 1);
2814 lto_output_bitpack (&bp);
2815 for (e = node->callees; e; e = e->next_callee)
2817 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2819 lto_output_uleb128_stream (ob->main_stream,
2820 ipa_get_cs_argument_count (args));
2821 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
2822 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
2824 for (e = node->indirect_calls; e; e = e->next_callee)
2825 ipa_write_indirect_edge_info (ob, e);
2828 /* Stream in NODE info from IB. */
2831 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
2832 struct data_in *data_in)
2834 struct ipa_node_params *info = IPA_NODE_REF (node);
2836 struct cgraph_edge *e;
2837 struct bitpack_d bp;
2839 ipa_initialize_node_params (node);
2841 bp = lto_input_bitpack (ib);
2842 info->called_with_var_arguments = bp_unpack_value (&bp, 1);
2843 if (ipa_get_param_count (info) != 0)
2844 info->uses_analysis_done = true;
2845 info->node_enqueued = false;
2846 for (k = 0; k < ipa_get_param_count (info); k++)
2847 info->params[k].used = bp_unpack_value (&bp, 1);
2848 for (e = node->callees; e; e = e->next_callee)
2850 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2851 int count = lto_input_uleb128 (ib);
2853 ipa_set_cs_argument_count (args, count);
2857 args->jump_functions = ggc_alloc_cleared_vec_ipa_jump_func
2858 (ipa_get_cs_argument_count (args));
2859 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
2860 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
2862 for (e = node->indirect_calls; e; e = e->next_callee)
2863 ipa_read_indirect_edge_info (ib, data_in, e);
2866 /* Write jump functions for nodes in SET. */
2869 ipa_prop_write_jump_functions (cgraph_node_set set)
2871 struct cgraph_node *node;
2872 struct output_block *ob = create_output_block (LTO_section_jump_functions);
2873 unsigned int count = 0;
2874 cgraph_node_set_iterator csi;
2876 ob->cgraph_node = NULL;
2878 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2880 node = csi_node (csi);
2881 if (node->analyzed && IPA_NODE_REF (node) != NULL)
2885 lto_output_uleb128_stream (ob->main_stream, count);
2887 /* Process all of the functions. */
2888 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2890 node = csi_node (csi);
2891 if (node->analyzed && IPA_NODE_REF (node) != NULL)
2892 ipa_write_node_info (ob, node);
2894 lto_output_1_stream (ob->main_stream, 0);
2895 produce_asm (ob, NULL);
2896 destroy_output_block (ob);
2899 /* Read section in file FILE_DATA of length LEN with data DATA. */
2902 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
2905 const struct lto_function_header *header =
2906 (const struct lto_function_header *) data;
2907 const int32_t cfg_offset = sizeof (struct lto_function_header);
2908 const int32_t main_offset = cfg_offset + header->cfg_size;
2909 const int32_t string_offset = main_offset + header->main_size;
2910 struct data_in *data_in;
2911 struct lto_input_block ib_main;
2915 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
2919 lto_data_in_create (file_data, (const char *) data + string_offset,
2920 header->string_size, NULL);
2921 count = lto_input_uleb128 (&ib_main);
2923 for (i = 0; i < count; i++)
2926 struct cgraph_node *node;
2927 lto_cgraph_encoder_t encoder;
2929 index = lto_input_uleb128 (&ib_main);
2930 encoder = file_data->cgraph_node_encoder;
2931 node = lto_cgraph_encoder_deref (encoder, index);
2932 gcc_assert (node->analyzed);
2933 ipa_read_node_info (&ib_main, node, data_in);
2935 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
2937 lto_data_in_delete (data_in);
2940 /* Read ipcp jump functions. */
2943 ipa_prop_read_jump_functions (void)
2945 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2946 struct lto_file_decl_data *file_data;
2949 ipa_check_create_node_params ();
2950 ipa_check_create_edge_args ();
2951 ipa_register_cgraph_hooks ();
2953 while ((file_data = file_data_vec[j++]))
2956 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
2959 ipa_prop_read_section (file_data, data, len);
2963 /* After merging units, we can get mismatch in argument counts.
2964 Also decl merging might've rendered parameter lists obsolete.
2965 Also compute called_with_variable_arg info. */
2968 ipa_update_after_lto_read (void)
2970 struct cgraph_node *node;
2971 struct cgraph_edge *cs;
2973 ipa_check_create_node_params ();
2974 ipa_check_create_edge_args ();
2976 for (node = cgraph_nodes; node; node = node->next)
2978 ipa_initialize_node_params (node);
2980 for (node = cgraph_nodes; node; node = node->next)
2982 for (cs = node->callees; cs; cs = cs->next_callee)
2984 if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
2985 != ipa_get_param_count (IPA_NODE_REF (cs->callee)))
2986 ipa_set_called_with_variable_arg (IPA_NODE_REF (cs->callee));
2990 /* Given the jump function JFUNC, compute the lattice LAT that describes the
2991 value coming down the callsite. INFO describes the caller node so that
2992 pass-through jump functions can be evaluated. */
2994 ipa_lattice_from_jfunc (struct ipa_node_params *info, struct ipcp_lattice *lat,
2995 struct ipa_jump_func *jfunc)
2997 if (jfunc->type == IPA_JF_CONST)
2999 lat->type = IPA_CONST_VALUE;
3000 lat->constant = jfunc->value.constant;
3002 else if (jfunc->type == IPA_JF_PASS_THROUGH)
3004 struct ipcp_lattice *caller_lat;
3007 caller_lat = ipa_get_lattice (info, jfunc->value.pass_through.formal_id);
3008 lat->type = caller_lat->type;
3009 if (caller_lat->type != IPA_CONST_VALUE)
3011 cst = caller_lat->constant;
3013 if (jfunc->value.pass_through.operation != NOP_EXPR)
3016 if (TREE_CODE_CLASS (jfunc->value.pass_through.operation)
3018 restype = boolean_type_node;
3020 restype = TREE_TYPE (cst);
3021 cst = fold_binary (jfunc->value.pass_through.operation,
3022 restype, cst, jfunc->value.pass_through.operand);
3024 if (!cst || !is_gimple_ip_invariant (cst))
3025 lat->type = IPA_BOTTOM;
3026 lat->constant = cst;
3028 else if (jfunc->type == IPA_JF_ANCESTOR)
3030 struct ipcp_lattice *caller_lat;
3033 caller_lat = ipa_get_lattice (info, jfunc->value.ancestor.formal_id);
3034 lat->type = caller_lat->type;
3035 if (caller_lat->type != IPA_CONST_VALUE)
3037 if (TREE_CODE (caller_lat->constant) != ADDR_EXPR)
3039 /* This can happen when the constant is a NULL pointer. */
3040 lat->type = IPA_BOTTOM;
3043 t = TREE_OPERAND (caller_lat->constant, 0);
3044 t = build_ref_for_offset (EXPR_LOCATION (t), t,
3045 jfunc->value.ancestor.offset,
3046 jfunc->value.ancestor.type, NULL, false);
3047 lat->constant = build_fold_addr_expr (t);
3050 lat->type = IPA_BOTTOM;