1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
25 #include "langhooks.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
37 #include "diagnostic.h"
38 #include "tree-pretty-print.h"
39 #include "gimple-pretty-print.h"
40 #include "lto-streamer.h"
43 /* Intermediate information about a parameter that is only useful during the
44 run of ipa_analyze_node and is not kept afterwards. */
46 struct param_analysis_info
49 bitmap visited_statements;
52 /* Vector where the parameter infos are actually stored. */
53 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
54 /* Vector where the parameter infos are actually stored. */
55 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
57 /* Bitmap with all UIDs of call graph edges that have been already processed
58 by indirect inlining. */
59 static bitmap iinlining_processed_edges;
61 /* Holders of ipa cgraph hooks: */
62 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
63 static struct cgraph_node_hook_list *node_removal_hook_holder;
64 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
65 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
67 /* Add cgraph NODE described by INFO to the worklist WL regardless of whether
68 it is in one or not. It should almost never be used directly, as opposed to
69 ipa_push_func_to_list. */
72 ipa_push_func_to_list_1 (struct ipa_func_list **wl,
73 struct cgraph_node *node,
74 struct ipa_node_params *info)
76 struct ipa_func_list *temp;
78 info->node_enqueued = 1;
79 temp = XCNEW (struct ipa_func_list);
85 /* Initialize worklist to contain all functions. */
87 struct ipa_func_list *
88 ipa_init_func_list (void)
90 struct cgraph_node *node;
91 struct ipa_func_list * wl;
94 for (node = cgraph_nodes; node; node = node->next)
97 struct ipa_node_params *info = IPA_NODE_REF (node);
98 /* Unreachable nodes should have been eliminated before ipcp and
100 gcc_assert (node->needed || node->reachable);
101 ipa_push_func_to_list_1 (&wl, node, info);
107 /* Remove a function from the worklist WL and return it. */
110 ipa_pop_func_from_list (struct ipa_func_list **wl)
112 struct ipa_node_params *info;
113 struct ipa_func_list *first;
114 struct cgraph_node *node;
121 info = IPA_NODE_REF (node);
122 info->node_enqueued = 0;
126 /* Return index of the formal whose tree is PTREE in function which corresponds
130 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
134 count = ipa_get_param_count (info);
135 for (i = 0; i < count; i++)
136 if (ipa_get_param(info, i) == ptree)
142 /* Populate the param_decl field in parameter descriptors of INFO that
143 corresponds to NODE. */
146 ipa_populate_param_decls (struct cgraph_node *node,
147 struct ipa_node_params *info)
155 fnargs = DECL_ARGUMENTS (fndecl);
157 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
159 info->params[param_num].decl = parm;
164 /* Return how many formal parameters FNDECL has. */
167 count_formal_params_1 (tree fndecl)
172 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
178 /* Count number of formal parameters in NOTE. Store the result to the
179 appropriate field of INFO. */
182 ipa_count_formal_params (struct cgraph_node *node,
183 struct ipa_node_params *info)
187 param_num = count_formal_params_1 (node->decl);
188 ipa_set_param_count (info, param_num);
191 /* Initialize the ipa_node_params structure associated with NODE by counting
192 the function parameters, creating the descriptors and populating their
196 ipa_initialize_node_params (struct cgraph_node *node)
198 struct ipa_node_params *info = IPA_NODE_REF (node);
202 ipa_count_formal_params (node, info);
203 info->params = XCNEWVEC (struct ipa_param_descriptor,
204 ipa_get_param_count (info));
205 ipa_populate_param_decls (node, info);
209 /* Count number of arguments callsite CS has and store it in
210 ipa_edge_args structure corresponding to this callsite. */
213 ipa_count_arguments (struct cgraph_edge *cs)
218 stmt = cs->call_stmt;
219 gcc_assert (is_gimple_call (stmt));
220 arg_num = gimple_call_num_args (stmt);
221 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
222 <= (unsigned) cgraph_edge_max_uid)
223 VEC_safe_grow_cleared (ipa_edge_args_t, gc,
224 ipa_edge_args_vector, cgraph_edge_max_uid + 1);
225 ipa_set_cs_argument_count (IPA_EDGE_REF (cs), arg_num);
228 /* Print the jump functions associated with call graph edge CS to file F. */
231 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
235 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
236 for (i = 0; i < count; i++)
238 struct ipa_jump_func *jump_func;
239 enum jump_func_type type;
241 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
242 type = jump_func->type;
244 fprintf (f, " param %d: ", i);
245 if (type == IPA_JF_UNKNOWN)
246 fprintf (f, "UNKNOWN\n");
247 else if (type == IPA_JF_KNOWN_TYPE)
249 tree binfo_type = TREE_TYPE (jump_func->value.base_binfo);
250 fprintf (f, "KNOWN TYPE, type in binfo is: ");
251 print_generic_expr (f, binfo_type, 0);
252 fprintf (f, " (%u)\n", TYPE_UID (binfo_type));
254 else if (type == IPA_JF_CONST)
256 tree val = jump_func->value.constant;
257 fprintf (f, "CONST: ");
258 print_generic_expr (f, val, 0);
259 if (TREE_CODE (val) == ADDR_EXPR
260 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
263 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
268 else if (type == IPA_JF_CONST_MEMBER_PTR)
270 fprintf (f, "CONST MEMBER PTR: ");
271 print_generic_expr (f, jump_func->value.member_cst.pfn, 0);
273 print_generic_expr (f, jump_func->value.member_cst.delta, 0);
276 else if (type == IPA_JF_PASS_THROUGH)
278 fprintf (f, "PASS THROUGH: ");
279 fprintf (f, "%d, op %s ",
280 jump_func->value.pass_through.formal_id,
282 jump_func->value.pass_through.operation]);
283 if (jump_func->value.pass_through.operation != NOP_EXPR)
284 print_generic_expr (dump_file,
285 jump_func->value.pass_through.operand, 0);
286 fprintf (dump_file, "\n");
288 else if (type == IPA_JF_ANCESTOR)
290 fprintf (f, "ANCESTOR: ");
291 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
292 jump_func->value.ancestor.formal_id,
293 jump_func->value.ancestor.offset);
294 print_generic_expr (f, jump_func->value.ancestor.type, 0);
295 fprintf (dump_file, "\n");
301 /* Print the jump functions of all arguments on all call graph edges going from
305 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
307 struct cgraph_edge *cs;
310 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
311 for (cs = node->callees; cs; cs = cs->next_callee)
313 if (!ipa_edge_args_info_available_for_edge_p (cs))
316 fprintf (f, " callsite %s/%i -> %s/%i : \n",
317 cgraph_node_name (node), node->uid,
318 cgraph_node_name (cs->callee), cs->callee->uid);
319 ipa_print_node_jump_functions_for_edge (f, cs);
322 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
324 if (!ipa_edge_args_info_available_for_edge_p (cs))
329 fprintf (f, " indirect callsite %d for stmt ", i);
330 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
333 fprintf (f, " indirect callsite %d :\n", i);
334 ipa_print_node_jump_functions_for_edge (f, cs);
339 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
342 ipa_print_all_jump_functions (FILE *f)
344 struct cgraph_node *node;
346 fprintf (f, "\nJump functions:\n");
347 for (node = cgraph_nodes; node; node = node->next)
349 ipa_print_node_jump_functions (f, node);
353 /* Structure to be passed in between detect_type_change and
354 check_stmt_for_type_change. */
356 struct type_change_info
358 /* Set to true if dynamic type change has been detected. */
359 bool type_maybe_changed;
362 /* Return true if STMT can modify a virtual method table pointer.
364 This function makes special assumptions about both constructors and
365 destructors which are all the functions that are allowed to alter the VMT
366 pointers. It assumes that destructors begin with assignment into all VMT
367 pointers and that constructors essentially look in the following way:
369 1) The very first thing they do is that they call constructors of ancestor
370 sub-objects that have them.
372 2) Then VMT pointers of this and all its ancestors is set to new values
373 corresponding to the type corresponding to the constructor.
375 3) Only afterwards, other stuff such as constructor of member sub-objects
376 and the code written by the user is run. Only this may include calling
377 virtual functions, directly or indirectly.
379 There is no way to call a constructor of an ancestor sub-object in any
382 This means that we do not have to care whether constructors get the correct
383 type information because they will always change it (in fact, if we define
384 the type to be given by the VMT pointer, it is undefined).
386 The most important fact to derive from the above is that if, for some
387 statement in the section 3, we try to detect whether the dynamic type has
388 changed, we can safely ignore all calls as we examine the function body
389 backwards until we reach statements in section 2 because these calls cannot
390 be ancestor constructors or destructors (if the input is not bogus) and so
391 do not change the dynamic type (this holds true only for automatically
392 allocated objects but at the moment we devirtualize only these). We then
393 must detect that statements in section 2 change the dynamic type and can try
394 to derive the new type. That is enough and we can stop, we will never see
395 the calls into constructors of sub-objects in this code. Therefore we can
396 safely ignore all call statements that we traverse.
400 stmt_may_be_vtbl_ptr_store (gimple stmt)
402 if (is_gimple_call (stmt))
404 else if (is_gimple_assign (stmt))
406 tree lhs = gimple_assign_lhs (stmt);
408 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
410 if (flag_strict_aliasing
411 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
414 if (TREE_CODE (lhs) == COMPONENT_REF
415 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
417 /* In the future we might want to use get_base_ref_and_offset to find
418 if there is a field corresponding to the offset and if so, proceed
419 almost like if it was a component ref. */
425 /* Callback of walk_aliased_vdefs and a helper function for
426 detect_type_change to check whether a particular statement may modify
427 the virtual table pointer, and if possible also determine the new type of
428 the (sub-)object. It stores its result into DATA, which points to a
429 type_change_info structure. */
432 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
434 gimple stmt = SSA_NAME_DEF_STMT (vdef);
435 struct type_change_info *tci = (struct type_change_info *) data;
437 if (stmt_may_be_vtbl_ptr_store (stmt))
439 tci->type_maybe_changed = true;
446 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
447 looking for assignments to its virtual table pointer. If it is, return true
448 and fill in the jump function JFUNC with relevant type information or set it
449 to unknown. ARG is the object itself (not a pointer to it, unless
450 dereferenced). BASE is the base of the memory access as returned by
451 get_ref_base_and_extent, as is the offset. */
454 detect_type_change (tree arg, tree base, gimple call,
455 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
457 struct type_change_info tci;
460 gcc_checking_assert (DECL_P (arg)
461 || TREE_CODE (arg) == MEM_REF
462 || handled_component_p (arg));
463 /* Const calls cannot call virtual methods through VMT and so type changes do
465 if (!flag_devirtualize || !gimple_vuse (call))
468 tci.type_maybe_changed = false;
473 ao.size = POINTER_SIZE;
474 ao.max_size = ao.size;
475 ao.ref_alias_set = -1;
476 ao.base_alias_set = -1;
478 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
480 if (!tci.type_maybe_changed)
483 jfunc->type = IPA_JF_UNKNOWN;
487 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
488 SSA name (its dereference will become the base and the offset is assumed to
492 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
494 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
495 if (!flag_devirtualize
496 || !POINTER_TYPE_P (TREE_TYPE (arg))
497 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
500 arg = build2 (MEM_REF, ptr_type_node, arg,
501 build_int_cst (ptr_type_node, 0));
503 return detect_type_change (arg, arg, call, jfunc, 0);
507 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
508 of an assignment statement STMT, try to find out whether NAME can be
509 described by a (possibly polynomial) pass-through jump-function or an
510 ancestor jump function and if so, write the appropriate function into
514 compute_complex_assign_jump_func (struct ipa_node_params *info,
515 struct ipa_jump_func *jfunc,
516 gimple call, gimple stmt, tree name)
518 HOST_WIDE_INT offset, size, max_size;
519 tree op1, op2, base, ssa;
522 op1 = gimple_assign_rhs1 (stmt);
523 op2 = gimple_assign_rhs2 (stmt);
525 if (TREE_CODE (op1) == SSA_NAME
526 && SSA_NAME_IS_DEFAULT_DEF (op1))
528 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
534 if (!is_gimple_ip_invariant (op2)
535 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
536 && !useless_type_conversion_p (TREE_TYPE (name),
540 jfunc->type = IPA_JF_PASS_THROUGH;
541 jfunc->value.pass_through.formal_id = index;
542 jfunc->value.pass_through.operation = gimple_assign_rhs_code (stmt);
543 jfunc->value.pass_through.operand = op2;
545 else if (gimple_assign_unary_nop_p (stmt)
546 && !detect_type_change_ssa (op1, call, jfunc))
548 jfunc->type = IPA_JF_PASS_THROUGH;
549 jfunc->value.pass_through.formal_id = index;
550 jfunc->value.pass_through.operation = NOP_EXPR;
555 if (TREE_CODE (op1) != ADDR_EXPR)
557 op1 = TREE_OPERAND (op1, 0);
558 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
560 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
561 if (TREE_CODE (base) != MEM_REF
562 /* If this is a varying address, punt. */
566 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
567 ssa = TREE_OPERAND (base, 0);
568 if (TREE_CODE (ssa) != SSA_NAME
569 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
573 /* Dynamic types are changed only in constructors and destructors and */
574 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
576 && !detect_type_change (op1, base, call, jfunc, offset))
578 jfunc->type = IPA_JF_ANCESTOR;
579 jfunc->value.ancestor.formal_id = index;
580 jfunc->value.ancestor.offset = offset;
581 jfunc->value.ancestor.type = TREE_TYPE (op1);
585 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
588 iftmp.1_3 = &obj_2(D)->D.1762;
590 The base of the MEM_REF must be a default definition SSA NAME of a
591 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
592 whole MEM_REF expression is returned and the offset calculated from any
593 handled components and the MEM_REF itself is stored into *OFFSET. The whole
594 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
597 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
599 HOST_WIDE_INT size, max_size;
600 tree expr, parm, obj;
602 if (!gimple_assign_single_p (assign))
604 expr = gimple_assign_rhs1 (assign);
606 if (TREE_CODE (expr) != ADDR_EXPR)
608 expr = TREE_OPERAND (expr, 0);
610 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
612 if (TREE_CODE (expr) != MEM_REF
613 /* If this is a varying address, punt. */
618 parm = TREE_OPERAND (expr, 0);
619 if (TREE_CODE (parm) != SSA_NAME
620 || !SSA_NAME_IS_DEFAULT_DEF (parm)
621 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
624 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
630 /* Given that an actual argument is an SSA_NAME that is a result of a phi
631 statement PHI, try to find out whether NAME is in fact a
632 multiple-inheritance typecast from a descendant into an ancestor of a formal
633 parameter and thus can be described by an ancestor jump function and if so,
634 write the appropriate function into JFUNC.
636 Essentially we want to match the following pattern:
644 iftmp.1_3 = &obj_2(D)->D.1762;
647 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
648 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
652 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
653 struct ipa_jump_func *jfunc,
654 gimple call, gimple phi)
656 HOST_WIDE_INT offset;
658 basic_block phi_bb, assign_bb, cond_bb;
659 tree tmp, parm, expr, obj;
662 if (gimple_phi_num_args (phi) != 2)
665 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
666 tmp = PHI_ARG_DEF (phi, 0);
667 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
668 tmp = PHI_ARG_DEF (phi, 1);
671 if (TREE_CODE (tmp) != SSA_NAME
672 || SSA_NAME_IS_DEFAULT_DEF (tmp)
673 || !POINTER_TYPE_P (TREE_TYPE (tmp))
674 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
677 assign = SSA_NAME_DEF_STMT (tmp);
678 assign_bb = gimple_bb (assign);
679 if (!single_pred_p (assign_bb))
681 expr = get_ancestor_addr_info (assign, &obj, &offset);
684 parm = TREE_OPERAND (expr, 0);
685 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
686 gcc_assert (index >= 0);
688 cond_bb = single_pred (assign_bb);
689 cond = last_stmt (cond_bb);
691 || gimple_code (cond) != GIMPLE_COND
692 || gimple_cond_code (cond) != NE_EXPR
693 || gimple_cond_lhs (cond) != parm
694 || !integer_zerop (gimple_cond_rhs (cond)))
697 phi_bb = gimple_bb (phi);
698 for (i = 0; i < 2; i++)
700 basic_block pred = EDGE_PRED (phi_bb, i)->src;
701 if (pred != assign_bb && pred != cond_bb)
705 if (!detect_type_change (obj, expr, call, jfunc, offset))
707 jfunc->type = IPA_JF_ANCESTOR;
708 jfunc->value.ancestor.formal_id = index;
709 jfunc->value.ancestor.offset = offset;
710 jfunc->value.ancestor.type = TREE_TYPE (obj);
714 /* Given OP which is passed as an actual argument to a called function,
715 determine if it is possible to construct a KNOWN_TYPE jump function for it
716 and if so, create one and store it to JFUNC. */
719 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
722 HOST_WIDE_INT offset, size, max_size;
725 if (!flag_devirtualize
726 || TREE_CODE (op) != ADDR_EXPR
727 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
730 op = TREE_OPERAND (op, 0);
731 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
735 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
736 || is_global_var (base))
739 if (detect_type_change (op, base, call, jfunc, offset))
742 binfo = TYPE_BINFO (TREE_TYPE (base));
745 binfo = get_binfo_at_offset (binfo, offset, TREE_TYPE (op));
748 jfunc->type = IPA_JF_KNOWN_TYPE;
749 jfunc->value.base_binfo = binfo;
754 /* Determine the jump functions of scalar arguments. Scalar means SSA names
755 and constants of a number of selected types. INFO is the ipa_node_params
756 structure associated with the caller, FUNCTIONS is a pointer to an array of
757 jump function structures associated with CALL which is the call statement
761 compute_scalar_jump_functions (struct ipa_node_params *info,
762 struct ipa_jump_func *functions,
768 for (num = 0; num < gimple_call_num_args (call); num++)
770 arg = gimple_call_arg (call, num);
772 if (is_gimple_ip_invariant (arg))
774 functions[num].type = IPA_JF_CONST;
775 functions[num].value.constant = arg;
777 else if (TREE_CODE (arg) == SSA_NAME)
779 if (SSA_NAME_IS_DEFAULT_DEF (arg))
781 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
784 && !detect_type_change_ssa (arg, call, &functions[num]))
786 functions[num].type = IPA_JF_PASS_THROUGH;
787 functions[num].value.pass_through.formal_id = index;
788 functions[num].value.pass_through.operation = NOP_EXPR;
793 gimple stmt = SSA_NAME_DEF_STMT (arg);
794 if (is_gimple_assign (stmt))
795 compute_complex_assign_jump_func (info, &functions[num],
797 else if (gimple_code (stmt) == GIMPLE_PHI)
798 compute_complex_ancestor_jump_func (info, &functions[num],
803 compute_known_type_jump_func (arg, &functions[num], call);
807 /* Inspect the given TYPE and return true iff it has the same structure (the
808 same number of fields of the same types) as a C++ member pointer. If
809 METHOD_PTR and DELTA are non-NULL, store the trees representing the
810 corresponding fields there. */
813 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
817 if (TREE_CODE (type) != RECORD_TYPE)
820 fld = TYPE_FIELDS (type);
821 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
822 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE)
828 fld = DECL_CHAIN (fld);
829 if (!fld || INTEGRAL_TYPE_P (fld))
834 if (DECL_CHAIN (fld))
840 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
841 boolean variable pointed to by DATA. */
844 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
847 bool *b = (bool *) data;
852 /* Return true if the formal parameter PARM might have been modified in this
853 function before reaching the statement CALL. PARM_INFO is a pointer to a
854 structure containing intermediate information about PARM. */
857 is_parm_modified_before_call (struct param_analysis_info *parm_info,
858 gimple call, tree parm)
860 bool modified = false;
863 if (parm_info->modified)
866 ao_ref_init (&refd, parm);
867 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
868 &modified, &parm_info->visited_statements);
871 parm_info->modified = true;
877 /* Go through arguments of the CALL and for every one that looks like a member
878 pointer, check whether it can be safely declared pass-through and if so,
879 mark that to the corresponding item of jump FUNCTIONS. Return true iff
880 there are non-pass-through member pointers within the arguments. INFO
881 describes formal parameters of the caller. PARMS_INFO is a pointer to a
882 vector containing intermediate information about each formal parameter. */
885 compute_pass_through_member_ptrs (struct ipa_node_params *info,
886 struct param_analysis_info *parms_info,
887 struct ipa_jump_func *functions,
890 bool undecided_members = false;
894 for (num = 0; num < gimple_call_num_args (call); num++)
896 arg = gimple_call_arg (call, num);
898 if (type_like_member_ptr_p (TREE_TYPE (arg), NULL, NULL))
900 if (TREE_CODE (arg) == PARM_DECL)
902 int index = ipa_get_param_decl_index (info, arg);
904 gcc_assert (index >=0);
905 if (!is_parm_modified_before_call (&parms_info[index], call, arg))
907 functions[num].type = IPA_JF_PASS_THROUGH;
908 functions[num].value.pass_through.formal_id = index;
909 functions[num].value.pass_through.operation = NOP_EXPR;
912 undecided_members = true;
915 undecided_members = true;
919 return undecided_members;
922 /* Simple function filling in a member pointer constant jump function (with PFN
923 and DELTA as the constant value) into JFUNC. */
926 fill_member_ptr_cst_jump_function (struct ipa_jump_func *jfunc,
927 tree pfn, tree delta)
929 jfunc->type = IPA_JF_CONST_MEMBER_PTR;
930 jfunc->value.member_cst.pfn = pfn;
931 jfunc->value.member_cst.delta = delta;
934 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
935 return the rhs of its defining statement. */
938 get_ssa_def_if_simple_copy (tree rhs)
940 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
942 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
944 if (gimple_assign_single_p (def_stmt))
945 rhs = gimple_assign_rhs1 (def_stmt);
952 /* Traverse statements from CALL backwards, scanning whether the argument ARG
953 which is a member pointer is filled in with constant values. If it is, fill
954 the jump function JFUNC in appropriately. METHOD_FIELD and DELTA_FIELD are
955 fields of the record type of the member pointer. To give an example, we
956 look for a pattern looking like the following:
958 D.2515.__pfn ={v} printStuff;
959 D.2515.__delta ={v} 0;
960 i_1 = doprinting (D.2515); */
963 determine_cst_member_ptr (gimple call, tree arg, tree method_field,
964 tree delta_field, struct ipa_jump_func *jfunc)
966 gimple_stmt_iterator gsi;
967 tree method = NULL_TREE;
968 tree delta = NULL_TREE;
970 gsi = gsi_for_stmt (call);
973 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
975 gimple stmt = gsi_stmt (gsi);
978 if (!stmt_may_clobber_ref_p (stmt, arg))
980 if (!gimple_assign_single_p (stmt))
983 lhs = gimple_assign_lhs (stmt);
984 rhs = gimple_assign_rhs1 (stmt);
986 if (TREE_CODE (lhs) != COMPONENT_REF
987 || TREE_OPERAND (lhs, 0) != arg)
990 fld = TREE_OPERAND (lhs, 1);
991 if (!method && fld == method_field)
993 rhs = get_ssa_def_if_simple_copy (rhs);
994 if (TREE_CODE (rhs) == ADDR_EXPR
995 && TREE_CODE (TREE_OPERAND (rhs, 0)) == FUNCTION_DECL
996 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 0))) == METHOD_TYPE)
998 method = TREE_OPERAND (rhs, 0);
1001 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
1009 if (!delta && fld == delta_field)
1011 rhs = get_ssa_def_if_simple_copy (rhs);
1012 if (TREE_CODE (rhs) == INTEGER_CST)
1017 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
1029 /* Go through the arguments of the CALL and for every member pointer within
1030 tries determine whether it is a constant. If it is, create a corresponding
1031 constant jump function in FUNCTIONS which is an array of jump functions
1032 associated with the call. */
1035 compute_cst_member_ptr_arguments (struct ipa_jump_func *functions,
1039 tree arg, method_field, delta_field;
1041 for (num = 0; num < gimple_call_num_args (call); num++)
1043 arg = gimple_call_arg (call, num);
1045 if (functions[num].type == IPA_JF_UNKNOWN
1046 && type_like_member_ptr_p (TREE_TYPE (arg), &method_field,
1048 determine_cst_member_ptr (call, arg, method_field, delta_field,
1053 /* Compute jump function for all arguments of callsite CS and insert the
1054 information in the jump_functions array in the ipa_edge_args corresponding
1055 to this callsite. */
1058 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_info,
1059 struct cgraph_edge *cs)
1061 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1062 struct ipa_edge_args *arguments = IPA_EDGE_REF (cs);
1065 if (ipa_get_cs_argument_count (arguments) == 0 || arguments->jump_functions)
1067 arguments->jump_functions = ggc_alloc_cleared_vec_ipa_jump_func
1068 (ipa_get_cs_argument_count (arguments));
1070 call = cs->call_stmt;
1071 gcc_assert (is_gimple_call (call));
1073 /* We will deal with constants and SSA scalars first: */
1074 compute_scalar_jump_functions (info, arguments->jump_functions, call);
1076 /* Let's check whether there are any potential member pointers and if so,
1077 whether we can determine their functions as pass_through. */
1078 if (!compute_pass_through_member_ptrs (info, parms_info,
1079 arguments->jump_functions, call))
1082 /* Finally, let's check whether we actually pass a new constant member
1084 compute_cst_member_ptr_arguments (arguments->jump_functions, call);
1087 /* Compute jump functions for all edges - both direct and indirect - outgoing
1088 from NODE. Also count the actual arguments in the process. */
1091 ipa_compute_jump_functions (struct cgraph_node *node,
1092 struct param_analysis_info *parms_info)
1094 struct cgraph_edge *cs;
1096 for (cs = node->callees; cs; cs = cs->next_callee)
1098 /* We do not need to bother analyzing calls to unknown
1099 functions unless they may become known during lto/whopr. */
1100 if (!cs->callee->analyzed && !flag_lto)
1102 ipa_count_arguments (cs);
1103 /* If the descriptor of the callee is not initialized yet, we have to do
1105 if (cs->callee->analyzed)
1106 ipa_initialize_node_params (cs->callee);
1107 if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
1108 != ipa_get_param_count (IPA_NODE_REF (cs->callee)))
1109 ipa_set_called_with_variable_arg (IPA_NODE_REF (cs->callee));
1110 ipa_compute_jump_functions_for_edge (parms_info, cs);
1113 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1115 ipa_count_arguments (cs);
1116 ipa_compute_jump_functions_for_edge (parms_info, cs);
1120 /* If RHS looks like a rhs of a statement loading pfn from a member
1121 pointer formal parameter, return the parameter, otherwise return
1122 NULL. If USE_DELTA, then we look for a use of the delta field
1123 rather than the pfn. */
1126 ipa_get_member_ptr_load_param (tree rhs, bool use_delta)
1128 tree rec, ref_field, ref_offset, fld, fld_offset, ptr_field, delta_field;
1130 if (TREE_CODE (rhs) == COMPONENT_REF)
1132 ref_field = TREE_OPERAND (rhs, 1);
1133 rhs = TREE_OPERAND (rhs, 0);
1136 ref_field = NULL_TREE;
1137 if (TREE_CODE (rhs) != MEM_REF)
1139 rec = TREE_OPERAND (rhs, 0);
1140 if (TREE_CODE (rec) != ADDR_EXPR)
1142 rec = TREE_OPERAND (rec, 0);
1143 if (TREE_CODE (rec) != PARM_DECL
1144 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1147 ref_offset = TREE_OPERAND (rhs, 1);
1151 if (integer_nonzerop (ref_offset))
1159 return ref_field == fld ? rec : NULL_TREE;
1163 fld_offset = byte_position (delta_field);
1165 fld_offset = byte_position (ptr_field);
1167 return tree_int_cst_equal (ref_offset, fld_offset) ? rec : NULL_TREE;
1170 /* If STMT looks like a statement loading a value from a member pointer formal
1171 parameter, this function returns that parameter. */
1174 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta)
1178 if (!gimple_assign_single_p (stmt))
1181 rhs = gimple_assign_rhs1 (stmt);
1182 return ipa_get_member_ptr_load_param (rhs, use_delta);
1185 /* Returns true iff T is an SSA_NAME defined by a statement. */
1188 ipa_is_ssa_with_stmt_def (tree t)
1190 if (TREE_CODE (t) == SSA_NAME
1191 && !SSA_NAME_IS_DEFAULT_DEF (t))
1197 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1198 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1199 indirect call graph edge. */
1201 static struct cgraph_edge *
1202 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1204 struct cgraph_edge *cs;
1206 cs = cgraph_edge (node, stmt);
1207 cs->indirect_info->param_index = param_index;
1208 cs->indirect_info->anc_offset = 0;
1209 cs->indirect_info->polymorphic = 0;
1213 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1214 (described by INFO). PARMS_INFO is a pointer to a vector containing
1215 intermediate information about each formal parameter. Currently it checks
1216 whether the call calls a pointer that is a formal parameter and if so, the
1217 parameter is marked with the called flag and an indirect call graph edge
1218 describing the call is created. This is very simple for ordinary pointers
1219 represented in SSA but not-so-nice when it comes to member pointers. The
1220 ugly part of this function does nothing more than trying to match the
1221 pattern of such a call. An example of such a pattern is the gimple dump
1222 below, the call is on the last line:
1225 f$__delta_5 = f.__delta;
1226 f$__pfn_24 = f.__pfn;
1230 f$__delta_5 = MEM[(struct *)&f];
1231 f$__pfn_24 = MEM[(struct *)&f + 4B];
1233 and a few lines below:
1236 D.2496_3 = (int) f$__pfn_24;
1237 D.2497_4 = D.2496_3 & 1;
1244 D.2500_7 = (unsigned int) f$__delta_5;
1245 D.2501_8 = &S + D.2500_7;
1246 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1247 D.2503_10 = *D.2502_9;
1248 D.2504_12 = f$__pfn_24 + -1;
1249 D.2505_13 = (unsigned int) D.2504_12;
1250 D.2506_14 = D.2503_10 + D.2505_13;
1251 D.2507_15 = *D.2506_14;
1252 iftmp.11_16 = (String:: *) D.2507_15;
1255 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1256 D.2500_19 = (unsigned int) f$__delta_5;
1257 D.2508_20 = &S + D.2500_19;
1258 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1260 Such patterns are results of simple calls to a member pointer:
1262 int doprinting (int (MyString::* f)(int) const)
1264 MyString S ("somestring");
1271 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1272 struct ipa_node_params *info,
1273 struct param_analysis_info *parms_info,
1274 gimple call, tree target)
1279 tree rec, rec2, cond;
1282 basic_block bb, virt_bb, join;
1284 if (SSA_NAME_IS_DEFAULT_DEF (target))
1286 tree var = SSA_NAME_VAR (target);
1287 index = ipa_get_param_decl_index (info, var);
1289 ipa_note_param_call (node, index, call);
1293 /* Now we need to try to match the complex pattern of calling a member
1296 if (!POINTER_TYPE_P (TREE_TYPE (target))
1297 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1300 def = SSA_NAME_DEF_STMT (target);
1301 if (gimple_code (def) != GIMPLE_PHI)
1304 if (gimple_phi_num_args (def) != 2)
1307 /* First, we need to check whether one of these is a load from a member
1308 pointer that is a parameter to this function. */
1309 n1 = PHI_ARG_DEF (def, 0);
1310 n2 = PHI_ARG_DEF (def, 1);
1311 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1313 d1 = SSA_NAME_DEF_STMT (n1);
1314 d2 = SSA_NAME_DEF_STMT (n2);
1316 join = gimple_bb (def);
1317 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false)))
1319 if (ipa_get_stmt_member_ptr_load_param (d2, false))
1322 bb = EDGE_PRED (join, 0)->src;
1323 virt_bb = gimple_bb (d2);
1325 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false)))
1327 bb = EDGE_PRED (join, 1)->src;
1328 virt_bb = gimple_bb (d1);
1333 /* Second, we need to check that the basic blocks are laid out in the way
1334 corresponding to the pattern. */
1336 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1337 || single_pred (virt_bb) != bb
1338 || single_succ (virt_bb) != join)
1341 /* Third, let's see that the branching is done depending on the least
1342 significant bit of the pfn. */
1344 branch = last_stmt (bb);
1345 if (!branch || gimple_code (branch) != GIMPLE_COND)
1348 if (gimple_cond_code (branch) != NE_EXPR
1349 || !integer_zerop (gimple_cond_rhs (branch)))
1352 cond = gimple_cond_lhs (branch);
1353 if (!ipa_is_ssa_with_stmt_def (cond))
1356 def = SSA_NAME_DEF_STMT (cond);
1357 if (!is_gimple_assign (def)
1358 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1359 || !integer_onep (gimple_assign_rhs2 (def)))
1362 cond = gimple_assign_rhs1 (def);
1363 if (!ipa_is_ssa_with_stmt_def (cond))
1366 def = SSA_NAME_DEF_STMT (cond);
1368 if (is_gimple_assign (def)
1369 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1371 cond = gimple_assign_rhs1 (def);
1372 if (!ipa_is_ssa_with_stmt_def (cond))
1374 def = SSA_NAME_DEF_STMT (cond);
1377 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1378 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1379 == ptrmemfunc_vbit_in_delta));
1384 index = ipa_get_param_decl_index (info, rec);
1385 if (index >= 0 && !is_parm_modified_before_call (&parms_info[index],
1387 ipa_note_param_call (node, index, call);
1392 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1393 object referenced in the expression is a formal parameter of the caller
1394 (described by INFO), create a call note for the statement. */
1397 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1398 struct ipa_node_params *info, gimple call,
1401 struct cgraph_edge *cs;
1402 struct cgraph_indirect_call_info *ii;
1403 struct ipa_jump_func jfunc;
1404 tree obj = OBJ_TYPE_REF_OBJECT (target);
1406 HOST_WIDE_INT anc_offset;
1408 if (!flag_devirtualize)
1411 if (TREE_CODE (obj) != SSA_NAME)
1414 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1416 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1420 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1421 gcc_assert (index >= 0);
1422 if (detect_type_change_ssa (obj, call, &jfunc))
1427 gimple stmt = SSA_NAME_DEF_STMT (obj);
1430 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1433 index = ipa_get_param_decl_index (info,
1434 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1435 gcc_assert (index >= 0);
1436 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1440 cs = ipa_note_param_call (node, index, call);
1441 ii = cs->indirect_info;
1442 ii->anc_offset = anc_offset;
1443 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1444 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1445 ii->polymorphic = 1;
1448 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1449 of the caller (described by INFO). PARMS_INFO is a pointer to a vector
1450 containing intermediate information about each formal parameter. */
1453 ipa_analyze_call_uses (struct cgraph_node *node,
1454 struct ipa_node_params *info,
1455 struct param_analysis_info *parms_info, gimple call)
1457 tree target = gimple_call_fn (call);
1459 if (TREE_CODE (target) == SSA_NAME)
1460 ipa_analyze_indirect_call_uses (node, info, parms_info, call, target);
1461 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1462 ipa_analyze_virtual_call_uses (node, info, call, target);
1466 /* Analyze the call statement STMT with respect to formal parameters (described
1467 in INFO) of caller given by NODE. Currently it only checks whether formal
1468 parameters are called. PARMS_INFO is a pointer to a vector containing
1469 intermediate information about each formal parameter. */
1472 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1473 struct param_analysis_info *parms_info, gimple stmt)
1475 if (is_gimple_call (stmt))
1476 ipa_analyze_call_uses (node, info, parms_info, stmt);
1479 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1480 If OP is a parameter declaration, mark it as used in the info structure
1484 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1485 tree op, void *data)
1487 struct ipa_node_params *info = (struct ipa_node_params *) data;
1489 op = get_base_address (op);
1491 && TREE_CODE (op) == PARM_DECL)
1493 int index = ipa_get_param_decl_index (info, op);
1494 gcc_assert (index >= 0);
1495 info->params[index].used = true;
1501 /* Scan the function body of NODE and inspect the uses of formal parameters.
1502 Store the findings in various structures of the associated ipa_node_params
1503 structure, such as parameter flags, notes etc. PARMS_INFO is a pointer to a
1504 vector containing intermediate information about each formal parameter. */
1507 ipa_analyze_params_uses (struct cgraph_node *node,
1508 struct param_analysis_info *parms_info)
1510 tree decl = node->decl;
1512 struct function *func;
1513 gimple_stmt_iterator gsi;
1514 struct ipa_node_params *info = IPA_NODE_REF (node);
1517 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1520 for (i = 0; i < ipa_get_param_count (info); i++)
1522 tree parm = ipa_get_param (info, i);
1523 /* For SSA regs see if parameter is used. For non-SSA we compute
1524 the flag during modification analysis. */
1525 if (is_gimple_reg (parm)
1526 && gimple_default_def (DECL_STRUCT_FUNCTION (node->decl), parm))
1527 info->params[i].used = true;
1530 func = DECL_STRUCT_FUNCTION (decl);
1531 FOR_EACH_BB_FN (bb, func)
1533 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1535 gimple stmt = gsi_stmt (gsi);
1537 if (is_gimple_debug (stmt))
1540 ipa_analyze_stmt_uses (node, info, parms_info, stmt);
1541 walk_stmt_load_store_addr_ops (stmt, info,
1542 visit_ref_for_mod_analysis,
1543 visit_ref_for_mod_analysis,
1544 visit_ref_for_mod_analysis);
1546 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
1547 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1548 visit_ref_for_mod_analysis,
1549 visit_ref_for_mod_analysis,
1550 visit_ref_for_mod_analysis);
1553 info->uses_analysis_done = 1;
1556 /* Initialize the array describing properties of of formal parameters
1557 of NODE, analyze their uses and compute jump functions associated
1558 with actual arguments of calls from within NODE. */
1561 ipa_analyze_node (struct cgraph_node *node)
1563 struct ipa_node_params *info;
1564 struct param_analysis_info *parms_info;
1567 ipa_check_create_node_params ();
1568 ipa_check_create_edge_args ();
1569 info = IPA_NODE_REF (node);
1570 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1571 current_function_decl = node->decl;
1572 ipa_initialize_node_params (node);
1574 param_count = ipa_get_param_count (info);
1575 parms_info = XALLOCAVEC (struct param_analysis_info, param_count);
1576 memset (parms_info, 0, sizeof (struct param_analysis_info) * param_count);
1578 ipa_analyze_params_uses (node, parms_info);
1579 ipa_compute_jump_functions (node, parms_info);
1581 for (i = 0; i < param_count; i++)
1582 if (parms_info[i].visited_statements)
1583 BITMAP_FREE (parms_info[i].visited_statements);
1585 current_function_decl = NULL;
1590 /* Update the jump function DST when the call graph edge corresponding to SRC is
1591 is being inlined, knowing that DST is of type ancestor and src of known
1595 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1596 struct ipa_jump_func *dst)
1600 new_binfo = get_binfo_at_offset (src->value.base_binfo,
1601 dst->value.ancestor.offset,
1602 dst->value.ancestor.type);
1605 dst->type = IPA_JF_KNOWN_TYPE;
1606 dst->value.base_binfo = new_binfo;
1609 dst->type = IPA_JF_UNKNOWN;
1612 /* Update the jump functions associated with call graph edge E when the call
1613 graph edge CS is being inlined, assuming that E->caller is already (possibly
1614 indirectly) inlined into CS->callee and that E has not been inlined. */
1617 update_jump_functions_after_inlining (struct cgraph_edge *cs,
1618 struct cgraph_edge *e)
1620 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1621 struct ipa_edge_args *args = IPA_EDGE_REF (e);
1622 int count = ipa_get_cs_argument_count (args);
1625 for (i = 0; i < count; i++)
1627 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
1629 if (dst->type == IPA_JF_ANCESTOR)
1631 struct ipa_jump_func *src;
1633 /* Variable number of arguments can cause havoc if we try to access
1634 one that does not exist in the inlined edge. So make sure we
1636 if (dst->value.ancestor.formal_id >= ipa_get_cs_argument_count (top))
1638 dst->type = IPA_JF_UNKNOWN;
1642 src = ipa_get_ith_jump_func (top, dst->value.ancestor.formal_id);
1643 if (src->type == IPA_JF_KNOWN_TYPE)
1644 combine_known_type_and_ancestor_jfs (src, dst);
1645 else if (src->type == IPA_JF_PASS_THROUGH
1646 && src->value.pass_through.operation == NOP_EXPR)
1647 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
1648 else if (src->type == IPA_JF_ANCESTOR)
1650 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
1651 dst->value.ancestor.offset += src->value.ancestor.offset;
1654 dst->type = IPA_JF_UNKNOWN;
1656 else if (dst->type == IPA_JF_PASS_THROUGH)
1658 struct ipa_jump_func *src;
1659 /* We must check range due to calls with variable number of arguments
1660 and we cannot combine jump functions with operations. */
1661 if (dst->value.pass_through.operation == NOP_EXPR
1662 && (dst->value.pass_through.formal_id
1663 < ipa_get_cs_argument_count (top)))
1665 src = ipa_get_ith_jump_func (top,
1666 dst->value.pass_through.formal_id);
1670 dst->type = IPA_JF_UNKNOWN;
1675 /* If TARGET is an addr_expr of a function declaration, make it the destination
1676 of an indirect edge IE and return the edge. Otherwise, return NULL. Delta,
1677 if non-NULL, is an integer constant that must be added to this pointer
1678 (first parameter). */
1680 struct cgraph_edge *
1681 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target, tree delta)
1683 struct cgraph_node *callee;
1685 if (TREE_CODE (target) == ADDR_EXPR)
1686 target = TREE_OPERAND (target, 0);
1687 if (TREE_CODE (target) != FUNCTION_DECL)
1689 callee = cgraph_get_node (target);
1692 ipa_check_create_node_params ();
1694 /* We can not make edges to inline clones. It is bug that someone removed the cgraph
1696 gcc_assert (!callee->global.inlined_to);
1698 cgraph_make_edge_direct (ie, callee, delta ? tree_low_cst (delta, 0) : 0);
1701 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
1702 "(%s/%i -> %s/%i), for stmt ",
1703 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
1704 cgraph_node_name (ie->caller), ie->caller->uid,
1705 cgraph_node_name (ie->callee), ie->callee->uid);
1707 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
1709 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
1713 fprintf (dump_file, " Thunk delta is ");
1714 print_generic_expr (dump_file, delta, 0);
1715 fprintf (dump_file, "\n");
1719 if (ipa_get_cs_argument_count (IPA_EDGE_REF (ie))
1720 != ipa_get_param_count (IPA_NODE_REF (callee)))
1721 ipa_set_called_with_variable_arg (IPA_NODE_REF (callee));
1726 /* Try to find a destination for indirect edge IE that corresponds to a simple
1727 call or a call of a member function pointer and where the destination is a
1728 pointer formal parameter described by jump function JFUNC. If it can be
1729 determined, return the newly direct edge, otherwise return NULL. */
1731 static struct cgraph_edge *
1732 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
1733 struct ipa_jump_func *jfunc)
1737 if (jfunc->type == IPA_JF_CONST)
1738 target = jfunc->value.constant;
1739 else if (jfunc->type == IPA_JF_CONST_MEMBER_PTR)
1740 target = jfunc->value.member_cst.pfn;
1744 return ipa_make_edge_direct_to_target (ie, target, NULL_TREE);
1747 /* Try to find a destination for indirect edge IE that corresponds to a
1748 virtual call based on a formal parameter which is described by jump
1749 function JFUNC and if it can be determined, make it direct and return the
1750 direct edge. Otherwise, return NULL. */
1752 static struct cgraph_edge *
1753 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
1754 struct ipa_jump_func *jfunc)
1756 tree binfo, type, target, delta;
1757 HOST_WIDE_INT token;
1759 if (jfunc->type == IPA_JF_KNOWN_TYPE)
1760 binfo = jfunc->value.base_binfo;
1767 token = ie->indirect_info->otr_token;
1768 type = ie->indirect_info->otr_type;
1769 binfo = get_binfo_at_offset (binfo, ie->indirect_info->anc_offset, type);
1771 target = gimple_get_virt_method_for_binfo (token, binfo, &delta, true);
1776 return ipa_make_edge_direct_to_target (ie, target, delta);
1781 /* Update the param called notes associated with NODE when CS is being inlined,
1782 assuming NODE is (potentially indirectly) inlined into CS->callee.
1783 Moreover, if the callee is discovered to be constant, create a new cgraph
1784 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
1785 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
1788 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
1789 struct cgraph_node *node,
1790 VEC (cgraph_edge_p, heap) **new_edges)
1792 struct ipa_edge_args *top;
1793 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
1796 ipa_check_create_edge_args ();
1797 top = IPA_EDGE_REF (cs);
1799 for (ie = node->indirect_calls; ie; ie = next_ie)
1801 struct cgraph_indirect_call_info *ici = ie->indirect_info;
1802 struct ipa_jump_func *jfunc;
1804 next_ie = ie->next_callee;
1805 if (bitmap_bit_p (iinlining_processed_edges, ie->uid))
1808 /* If we ever use indirect edges for anything other than indirect
1809 inlining, we will need to skip those with negative param_indices. */
1810 if (ici->param_index == -1)
1813 /* We must check range due to calls with variable number of arguments: */
1814 if (ici->param_index >= ipa_get_cs_argument_count (top))
1816 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1820 jfunc = ipa_get_ith_jump_func (top, ici->param_index);
1821 if (jfunc->type == IPA_JF_PASS_THROUGH
1822 && jfunc->value.pass_through.operation == NOP_EXPR)
1823 ici->param_index = jfunc->value.pass_through.formal_id;
1824 else if (jfunc->type == IPA_JF_ANCESTOR)
1826 ici->param_index = jfunc->value.ancestor.formal_id;
1827 ici->anc_offset += jfunc->value.ancestor.offset;
1830 /* Either we can find a destination for this edge now or never. */
1831 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1833 if (ici->polymorphic)
1834 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
1836 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
1838 if (new_direct_edge)
1840 new_direct_edge->indirect_inlining_edge = 1;
1843 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
1845 top = IPA_EDGE_REF (cs);
1854 /* Recursively traverse subtree of NODE (including node) made of inlined
1855 cgraph_edges when CS has been inlined and invoke
1856 update_indirect_edges_after_inlining on all nodes and
1857 update_jump_functions_after_inlining on all non-inlined edges that lead out
1858 of this subtree. Newly discovered indirect edges will be added to
1859 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
1863 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
1864 struct cgraph_node *node,
1865 VEC (cgraph_edge_p, heap) **new_edges)
1867 struct cgraph_edge *e;
1870 res = update_indirect_edges_after_inlining (cs, node, new_edges);
1872 for (e = node->callees; e; e = e->next_callee)
1873 if (!e->inline_failed)
1874 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
1876 update_jump_functions_after_inlining (cs, e);
1881 /* Update jump functions and call note functions on inlining the call site CS.
1882 CS is expected to lead to a node already cloned by
1883 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
1884 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
1888 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
1889 VEC (cgraph_edge_p, heap) **new_edges)
1891 /* FIXME lto: We do not stream out indirect call information. */
1895 /* Do nothing if the preparation phase has not been carried out yet
1896 (i.e. during early inlining). */
1897 if (!ipa_node_params_vector)
1899 gcc_assert (ipa_edge_args_vector);
1901 return propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
1904 /* Frees all dynamically allocated structures that the argument info points
1908 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
1910 if (args->jump_functions)
1911 ggc_free (args->jump_functions);
1913 memset (args, 0, sizeof (*args));
1916 /* Free all ipa_edge structures. */
1919 ipa_free_all_edge_args (void)
1922 struct ipa_edge_args *args;
1924 FOR_EACH_VEC_ELT (ipa_edge_args_t, ipa_edge_args_vector, i, args)
1925 ipa_free_edge_args_substructures (args);
1927 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
1928 ipa_edge_args_vector = NULL;
1931 /* Frees all dynamically allocated structures that the param info points
1935 ipa_free_node_params_substructures (struct ipa_node_params *info)
1938 free (info->params);
1940 memset (info, 0, sizeof (*info));
1943 /* Free all ipa_node_params structures. */
1946 ipa_free_all_node_params (void)
1949 struct ipa_node_params *info;
1951 FOR_EACH_VEC_ELT (ipa_node_params_t, ipa_node_params_vector, i, info)
1952 ipa_free_node_params_substructures (info);
1954 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
1955 ipa_node_params_vector = NULL;
1958 /* Hook that is called by cgraph.c when an edge is removed. */
1961 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
1963 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1964 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
1965 <= (unsigned)cs->uid)
1967 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
1970 /* Hook that is called by cgraph.c when a node is removed. */
1973 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1975 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1976 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
1977 <= (unsigned)node->uid)
1979 ipa_free_node_params_substructures (IPA_NODE_REF (node));
1982 /* Helper function to duplicate an array of size N that is at SRC and store a
1983 pointer to it to DST. Nothing is done if SRC is NULL. */
1986 duplicate_array (void *src, size_t n)
1998 static struct ipa_jump_func *
1999 duplicate_ipa_jump_func_array (const struct ipa_jump_func * src, size_t n)
2001 struct ipa_jump_func *p;
2006 p = ggc_alloc_vec_ipa_jump_func (n);
2007 memcpy (p, src, n * sizeof (struct ipa_jump_func));
2011 /* Hook that is called by cgraph.c when a node is duplicated. */
2014 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2015 __attribute__((unused)) void *data)
2017 struct ipa_edge_args *old_args, *new_args;
2020 ipa_check_create_edge_args ();
2022 old_args = IPA_EDGE_REF (src);
2023 new_args = IPA_EDGE_REF (dst);
2025 arg_count = ipa_get_cs_argument_count (old_args);
2026 ipa_set_cs_argument_count (new_args, arg_count);
2027 new_args->jump_functions =
2028 duplicate_ipa_jump_func_array (old_args->jump_functions, arg_count);
2030 if (iinlining_processed_edges
2031 && bitmap_bit_p (iinlining_processed_edges, src->uid))
2032 bitmap_set_bit (iinlining_processed_edges, dst->uid);
2035 /* Hook that is called by cgraph.c when a node is duplicated. */
2038 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
2039 ATTRIBUTE_UNUSED void *data)
2041 struct ipa_node_params *old_info, *new_info;
2044 ipa_check_create_node_params ();
2045 old_info = IPA_NODE_REF (src);
2046 new_info = IPA_NODE_REF (dst);
2047 param_count = ipa_get_param_count (old_info);
2049 ipa_set_param_count (new_info, param_count);
2050 new_info->params = (struct ipa_param_descriptor *)
2051 duplicate_array (old_info->params,
2052 sizeof (struct ipa_param_descriptor) * param_count);
2053 for (i = 0; i < param_count; i++)
2054 new_info->params[i].types = VEC_copy (tree, heap,
2055 old_info->params[i].types);
2056 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
2057 new_info->count_scale = old_info->count_scale;
2059 new_info->called_with_var_arguments = old_info->called_with_var_arguments;
2060 new_info->uses_analysis_done = old_info->uses_analysis_done;
2061 new_info->node_enqueued = old_info->node_enqueued;
2064 /* Register our cgraph hooks if they are not already there. */
2067 ipa_register_cgraph_hooks (void)
2069 if (!edge_removal_hook_holder)
2070 edge_removal_hook_holder =
2071 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2072 if (!node_removal_hook_holder)
2073 node_removal_hook_holder =
2074 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2075 if (!edge_duplication_hook_holder)
2076 edge_duplication_hook_holder =
2077 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2078 if (!node_duplication_hook_holder)
2079 node_duplication_hook_holder =
2080 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2083 /* Unregister our cgraph hooks if they are not already there. */
2086 ipa_unregister_cgraph_hooks (void)
2088 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2089 edge_removal_hook_holder = NULL;
2090 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2091 node_removal_hook_holder = NULL;
2092 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2093 edge_duplication_hook_holder = NULL;
2094 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2095 node_duplication_hook_holder = NULL;
2098 /* Allocate all necessary data structures necessary for indirect inlining. */
2101 ipa_create_all_structures_for_iinln (void)
2103 iinlining_processed_edges = BITMAP_ALLOC (NULL);
2106 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2107 longer needed after ipa-cp. */
2110 ipa_free_all_structures_after_ipa_cp (void)
2112 if (!flag_indirect_inlining)
2114 ipa_free_all_edge_args ();
2115 ipa_free_all_node_params ();
2116 ipa_unregister_cgraph_hooks ();
2120 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2121 longer needed after indirect inlining. */
2124 ipa_free_all_structures_after_iinln (void)
2126 BITMAP_FREE (iinlining_processed_edges);
2128 ipa_free_all_edge_args ();
2129 ipa_free_all_node_params ();
2130 ipa_unregister_cgraph_hooks ();
2133 /* Print ipa_tree_map data structures of all functions in the
2137 ipa_print_node_params (FILE * f, struct cgraph_node *node)
2141 struct ipa_node_params *info;
2143 if (!node->analyzed)
2145 info = IPA_NODE_REF (node);
2146 fprintf (f, " function %s parameter descriptors:\n",
2147 cgraph_node_name (node));
2148 count = ipa_get_param_count (info);
2149 for (i = 0; i < count; i++)
2151 temp = ipa_get_param (info, i);
2152 if (TREE_CODE (temp) == PARM_DECL)
2153 fprintf (f, " param %d : %s", i,
2155 ? (*lang_hooks.decl_printable_name) (temp, 2)
2157 if (ipa_is_param_used (info, i))
2158 fprintf (f, " used");
2163 /* Print ipa_tree_map data structures of all functions in the
2167 ipa_print_all_params (FILE * f)
2169 struct cgraph_node *node;
2171 fprintf (f, "\nFunction parameters:\n");
2172 for (node = cgraph_nodes; node; node = node->next)
2173 ipa_print_node_params (f, node);
2176 /* Return a heap allocated vector containing formal parameters of FNDECL. */
2179 ipa_get_vector_of_formal_parms (tree fndecl)
2181 VEC(tree, heap) *args;
2185 count = count_formal_params_1 (fndecl);
2186 args = VEC_alloc (tree, heap, count);
2187 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2188 VEC_quick_push (tree, args, parm);
2193 /* Return a heap allocated vector containing types of formal parameters of
2194 function type FNTYPE. */
2196 static inline VEC(tree, heap) *
2197 get_vector_of_formal_parm_types (tree fntype)
2199 VEC(tree, heap) *types;
2203 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2206 types = VEC_alloc (tree, heap, count);
2207 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2208 VEC_quick_push (tree, types, TREE_VALUE (t));
2213 /* Modify the function declaration FNDECL and its type according to the plan in
2214 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2215 to reflect the actual parameters being modified which are determined by the
2216 base_index field. */
2219 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
2220 const char *synth_parm_prefix)
2222 VEC(tree, heap) *oparms, *otypes;
2223 tree orig_type, new_type = NULL;
2224 tree old_arg_types, t, new_arg_types = NULL;
2225 tree parm, *link = &DECL_ARGUMENTS (fndecl);
2226 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2227 tree new_reversed = NULL;
2228 bool care_for_types, last_parm_void;
2230 if (!synth_parm_prefix)
2231 synth_parm_prefix = "SYNTH";
2233 oparms = ipa_get_vector_of_formal_parms (fndecl);
2234 orig_type = TREE_TYPE (fndecl);
2235 old_arg_types = TYPE_ARG_TYPES (orig_type);
2237 /* The following test is an ugly hack, some functions simply don't have any
2238 arguments in their type. This is probably a bug but well... */
2239 care_for_types = (old_arg_types != NULL_TREE);
2242 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2244 otypes = get_vector_of_formal_parm_types (orig_type);
2246 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
2248 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
2252 last_parm_void = false;
2256 for (i = 0; i < len; i++)
2258 struct ipa_parm_adjustment *adj;
2261 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2262 parm = VEC_index (tree, oparms, adj->base_index);
2265 if (adj->copy_param)
2268 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
2272 link = &DECL_CHAIN (parm);
2274 else if (!adj->remove_param)
2280 ptype = build_pointer_type (adj->type);
2285 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2287 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2289 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2291 DECL_ARTIFICIAL (new_parm) = 1;
2292 DECL_ARG_TYPE (new_parm) = ptype;
2293 DECL_CONTEXT (new_parm) = fndecl;
2294 TREE_USED (new_parm) = 1;
2295 DECL_IGNORED_P (new_parm) = 1;
2296 layout_decl (new_parm, 0);
2298 add_referenced_var (new_parm);
2299 mark_sym_for_renaming (new_parm);
2301 adj->reduction = new_parm;
2305 link = &DECL_CHAIN (new_parm);
2313 new_reversed = nreverse (new_arg_types);
2317 TREE_CHAIN (new_arg_types) = void_list_node;
2319 new_reversed = void_list_node;
2323 /* Use copy_node to preserve as much as possible from original type
2324 (debug info, attribute lists etc.)
2325 Exception is METHOD_TYPEs must have THIS argument.
2326 When we are asked to remove it, we need to build new FUNCTION_TYPE
2328 if (TREE_CODE (orig_type) != METHOD_TYPE
2329 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0)->copy_param
2330 && VEC_index (ipa_parm_adjustment_t, adjustments, 0)->base_index == 0))
2332 new_type = build_distinct_type_copy (orig_type);
2333 TYPE_ARG_TYPES (new_type) = new_reversed;
2338 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2340 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2341 DECL_VINDEX (fndecl) = NULL_TREE;
2344 /* When signature changes, we need to clear builtin info. */
2345 if (DECL_BUILT_IN (fndecl))
2347 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2348 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2351 /* This is a new type, not a copy of an old type. Need to reassociate
2352 variants. We can handle everything except the main variant lazily. */
2353 t = TYPE_MAIN_VARIANT (orig_type);
2356 TYPE_MAIN_VARIANT (new_type) = t;
2357 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2358 TYPE_NEXT_VARIANT (t) = new_type;
2362 TYPE_MAIN_VARIANT (new_type) = new_type;
2363 TYPE_NEXT_VARIANT (new_type) = NULL;
2366 TREE_TYPE (fndecl) = new_type;
2367 DECL_VIRTUAL_P (fndecl) = 0;
2369 VEC_free (tree, heap, otypes);
2370 VEC_free (tree, heap, oparms);
2373 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2374 If this is a directly recursive call, CS must be NULL. Otherwise it must
2375 contain the corresponding call graph edge. */
2378 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2379 ipa_parm_adjustment_vec adjustments)
2381 VEC(tree, heap) *vargs;
2383 gimple_stmt_iterator gsi;
2387 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2388 vargs = VEC_alloc (tree, heap, len);
2390 gsi = gsi_for_stmt (stmt);
2391 for (i = 0; i < len; i++)
2393 struct ipa_parm_adjustment *adj;
2395 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2397 if (adj->copy_param)
2399 tree arg = gimple_call_arg (stmt, adj->base_index);
2401 VEC_quick_push (tree, vargs, arg);
2403 else if (!adj->remove_param)
2405 tree expr, base, off;
2408 /* We create a new parameter out of the value of the old one, we can
2409 do the following kind of transformations:
2411 - A scalar passed by reference is converted to a scalar passed by
2412 value. (adj->by_ref is false and the type of the original
2413 actual argument is a pointer to a scalar).
2415 - A part of an aggregate is passed instead of the whole aggregate.
2416 The part can be passed either by value or by reference, this is
2417 determined by value of adj->by_ref. Moreover, the code below
2418 handles both situations when the original aggregate is passed by
2419 value (its type is not a pointer) and when it is passed by
2420 reference (it is a pointer to an aggregate).
2422 When the new argument is passed by reference (adj->by_ref is true)
2423 it must be a part of an aggregate and therefore we form it by
2424 simply taking the address of a reference inside the original
2427 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2428 base = gimple_call_arg (stmt, adj->base_index);
2429 loc = EXPR_LOCATION (base);
2431 if (TREE_CODE (base) != ADDR_EXPR
2432 && POINTER_TYPE_P (TREE_TYPE (base)))
2433 off = build_int_cst (adj->alias_ptr_type,
2434 adj->offset / BITS_PER_UNIT);
2437 HOST_WIDE_INT base_offset;
2440 if (TREE_CODE (base) == ADDR_EXPR)
2441 base = TREE_OPERAND (base, 0);
2443 base = get_addr_base_and_unit_offset (base, &base_offset);
2444 /* Aggregate arguments can have non-invariant addresses. */
2447 base = build_fold_addr_expr (prev_base);
2448 off = build_int_cst (adj->alias_ptr_type,
2449 adj->offset / BITS_PER_UNIT);
2451 else if (TREE_CODE (base) == MEM_REF)
2453 off = build_int_cst (adj->alias_ptr_type,
2455 + adj->offset / BITS_PER_UNIT);
2456 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
2458 base = TREE_OPERAND (base, 0);
2462 off = build_int_cst (adj->alias_ptr_type,
2464 + adj->offset / BITS_PER_UNIT);
2465 base = build_fold_addr_expr (base);
2469 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
2471 expr = build_fold_addr_expr (expr);
2473 expr = force_gimple_operand_gsi (&gsi, expr,
2475 || is_gimple_reg_type (adj->type),
2476 NULL, true, GSI_SAME_STMT);
2477 VEC_quick_push (tree, vargs, expr);
2481 if (dump_file && (dump_flags & TDF_DETAILS))
2483 fprintf (dump_file, "replacing stmt:");
2484 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2487 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
2488 new_stmt = gimple_build_call_vec (callee_decl, vargs);
2489 VEC_free (tree, heap, vargs);
2490 if (gimple_call_lhs (stmt))
2491 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2493 gimple_set_block (new_stmt, gimple_block (stmt));
2494 if (gimple_has_location (stmt))
2495 gimple_set_location (new_stmt, gimple_location (stmt));
2496 gimple_call_copy_flags (new_stmt, stmt);
2497 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2499 if (dump_file && (dump_flags & TDF_DETAILS))
2501 fprintf (dump_file, "with stmt:");
2502 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2503 fprintf (dump_file, "\n");
2505 gsi_replace (&gsi, new_stmt, true);
2507 cgraph_set_call_stmt (cs, new_stmt);
2508 update_ssa (TODO_update_ssa);
2509 free_dominance_info (CDI_DOMINATORS);
2512 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
2515 index_in_adjustments_multiple_times_p (int base_index,
2516 ipa_parm_adjustment_vec adjustments)
2518 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2521 for (i = 0; i < len; i++)
2523 struct ipa_parm_adjustment *adj;
2524 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2526 if (adj->base_index == base_index)
2538 /* Return adjustments that should have the same effect on function parameters
2539 and call arguments as if they were first changed according to adjustments in
2540 INNER and then by adjustments in OUTER. */
2542 ipa_parm_adjustment_vec
2543 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
2544 ipa_parm_adjustment_vec outer)
2546 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
2547 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
2549 ipa_parm_adjustment_vec adjustments, tmp;
2551 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
2552 for (i = 0; i < inlen; i++)
2554 struct ipa_parm_adjustment *n;
2555 n = VEC_index (ipa_parm_adjustment_t, inner, i);
2557 if (n->remove_param)
2560 VEC_quick_push (ipa_parm_adjustment_t, tmp, n);
2563 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
2564 for (i = 0; i < outlen; i++)
2566 struct ipa_parm_adjustment *r;
2567 struct ipa_parm_adjustment *out = VEC_index (ipa_parm_adjustment_t,
2569 struct ipa_parm_adjustment *in = VEC_index (ipa_parm_adjustment_t, tmp,
2572 gcc_assert (!in->remove_param);
2573 if (out->remove_param)
2575 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
2577 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2578 memset (r, 0, sizeof (*r));
2579 r->remove_param = true;
2584 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2585 memset (r, 0, sizeof (*r));
2586 r->base_index = in->base_index;
2587 r->type = out->type;
2589 /* FIXME: Create nonlocal value too. */
2591 if (in->copy_param && out->copy_param)
2592 r->copy_param = true;
2593 else if (in->copy_param)
2594 r->offset = out->offset;
2595 else if (out->copy_param)
2596 r->offset = in->offset;
2598 r->offset = in->offset + out->offset;
2601 for (i = 0; i < inlen; i++)
2603 struct ipa_parm_adjustment *n = VEC_index (ipa_parm_adjustment_t,
2606 if (n->remove_param)
2607 VEC_quick_push (ipa_parm_adjustment_t, adjustments, n);
2610 VEC_free (ipa_parm_adjustment_t, heap, tmp);
2614 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
2615 friendly way, assuming they are meant to be applied to FNDECL. */
2618 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
2621 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2623 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
2625 fprintf (file, "IPA param adjustments: ");
2626 for (i = 0; i < len; i++)
2628 struct ipa_parm_adjustment *adj;
2629 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2632 fprintf (file, " ");
2636 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
2637 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
2640 fprintf (file, ", base: ");
2641 print_generic_expr (file, adj->base, 0);
2645 fprintf (file, ", reduction: ");
2646 print_generic_expr (file, adj->reduction, 0);
2648 if (adj->new_ssa_base)
2650 fprintf (file, ", new_ssa_base: ");
2651 print_generic_expr (file, adj->new_ssa_base, 0);
2654 if (adj->copy_param)
2655 fprintf (file, ", copy_param");
2656 else if (adj->remove_param)
2657 fprintf (file, ", remove_param");
2659 fprintf (file, ", offset %li", (long) adj->offset);
2661 fprintf (file, ", by_ref");
2662 print_node_brief (file, ", type: ", adj->type, 0);
2663 fprintf (file, "\n");
2665 VEC_free (tree, heap, parms);
2668 /* Stream out jump function JUMP_FUNC to OB. */
2671 ipa_write_jump_function (struct output_block *ob,
2672 struct ipa_jump_func *jump_func)
2674 lto_output_uleb128_stream (ob->main_stream,
2677 switch (jump_func->type)
2679 case IPA_JF_UNKNOWN:
2681 case IPA_JF_KNOWN_TYPE:
2682 lto_output_tree (ob, jump_func->value.base_binfo, true);
2685 lto_output_tree (ob, jump_func->value.constant, true);
2687 case IPA_JF_PASS_THROUGH:
2688 lto_output_tree (ob, jump_func->value.pass_through.operand, true);
2689 lto_output_uleb128_stream (ob->main_stream,
2690 jump_func->value.pass_through.formal_id);
2691 lto_output_uleb128_stream (ob->main_stream,
2692 jump_func->value.pass_through.operation);
2694 case IPA_JF_ANCESTOR:
2695 lto_output_uleb128_stream (ob->main_stream,
2696 jump_func->value.ancestor.offset);
2697 lto_output_tree (ob, jump_func->value.ancestor.type, true);
2698 lto_output_uleb128_stream (ob->main_stream,
2699 jump_func->value.ancestor.formal_id);
2701 case IPA_JF_CONST_MEMBER_PTR:
2702 lto_output_tree (ob, jump_func->value.member_cst.pfn, true);
2703 lto_output_tree (ob, jump_func->value.member_cst.delta, false);
2708 /* Read in jump function JUMP_FUNC from IB. */
2711 ipa_read_jump_function (struct lto_input_block *ib,
2712 struct ipa_jump_func *jump_func,
2713 struct data_in *data_in)
2715 jump_func->type = (enum jump_func_type) lto_input_uleb128 (ib);
2717 switch (jump_func->type)
2719 case IPA_JF_UNKNOWN:
2721 case IPA_JF_KNOWN_TYPE:
2722 jump_func->value.base_binfo = lto_input_tree (ib, data_in);
2725 jump_func->value.constant = lto_input_tree (ib, data_in);
2727 case IPA_JF_PASS_THROUGH:
2728 jump_func->value.pass_through.operand = lto_input_tree (ib, data_in);
2729 jump_func->value.pass_through.formal_id = lto_input_uleb128 (ib);
2730 jump_func->value.pass_through.operation = (enum tree_code) lto_input_uleb128 (ib);
2732 case IPA_JF_ANCESTOR:
2733 jump_func->value.ancestor.offset = lto_input_uleb128 (ib);
2734 jump_func->value.ancestor.type = lto_input_tree (ib, data_in);
2735 jump_func->value.ancestor.formal_id = lto_input_uleb128 (ib);
2737 case IPA_JF_CONST_MEMBER_PTR:
2738 jump_func->value.member_cst.pfn = lto_input_tree (ib, data_in);
2739 jump_func->value.member_cst.delta = lto_input_tree (ib, data_in);
2744 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
2745 relevant to indirect inlining to OB. */
2748 ipa_write_indirect_edge_info (struct output_block *ob,
2749 struct cgraph_edge *cs)
2751 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2752 struct bitpack_d bp;
2754 lto_output_sleb128_stream (ob->main_stream, ii->param_index);
2755 lto_output_sleb128_stream (ob->main_stream, ii->anc_offset);
2756 bp = bitpack_create (ob->main_stream);
2757 bp_pack_value (&bp, ii->polymorphic, 1);
2758 lto_output_bitpack (&bp);
2760 if (ii->polymorphic)
2762 lto_output_sleb128_stream (ob->main_stream, ii->otr_token);
2763 lto_output_tree (ob, ii->otr_type, true);
2767 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
2768 relevant to indirect inlining from IB. */
2771 ipa_read_indirect_edge_info (struct lto_input_block *ib,
2772 struct data_in *data_in ATTRIBUTE_UNUSED,
2773 struct cgraph_edge *cs)
2775 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2776 struct bitpack_d bp;
2778 ii->param_index = (int) lto_input_sleb128 (ib);
2779 ii->anc_offset = (HOST_WIDE_INT) lto_input_sleb128 (ib);
2780 bp = lto_input_bitpack (ib);
2781 ii->polymorphic = bp_unpack_value (&bp, 1);
2782 if (ii->polymorphic)
2784 ii->otr_token = (HOST_WIDE_INT) lto_input_sleb128 (ib);
2785 ii->otr_type = lto_input_tree (ib, data_in);
2789 /* Stream out NODE info to OB. */
2792 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
2795 lto_cgraph_encoder_t encoder;
2796 struct ipa_node_params *info = IPA_NODE_REF (node);
2798 struct cgraph_edge *e;
2799 struct bitpack_d bp;
2801 encoder = ob->decl_state->cgraph_node_encoder;
2802 node_ref = lto_cgraph_encoder_encode (encoder, node);
2803 lto_output_uleb128_stream (ob->main_stream, node_ref);
2805 bp = bitpack_create (ob->main_stream);
2806 bp_pack_value (&bp, info->called_with_var_arguments, 1);
2807 gcc_assert (info->uses_analysis_done
2808 || ipa_get_param_count (info) == 0);
2809 gcc_assert (!info->node_enqueued);
2810 gcc_assert (!info->ipcp_orig_node);
2811 for (j = 0; j < ipa_get_param_count (info); j++)
2812 bp_pack_value (&bp, info->params[j].used, 1);
2813 lto_output_bitpack (&bp);
2814 for (e = node->callees; e; e = e->next_callee)
2816 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2818 lto_output_uleb128_stream (ob->main_stream,
2819 ipa_get_cs_argument_count (args));
2820 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
2821 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
2823 for (e = node->indirect_calls; e; e = e->next_callee)
2824 ipa_write_indirect_edge_info (ob, e);
2827 /* Stream in NODE info from IB. */
2830 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
2831 struct data_in *data_in)
2833 struct ipa_node_params *info = IPA_NODE_REF (node);
2835 struct cgraph_edge *e;
2836 struct bitpack_d bp;
2838 ipa_initialize_node_params (node);
2840 bp = lto_input_bitpack (ib);
2841 info->called_with_var_arguments = bp_unpack_value (&bp, 1);
2842 if (ipa_get_param_count (info) != 0)
2843 info->uses_analysis_done = true;
2844 info->node_enqueued = false;
2845 for (k = 0; k < ipa_get_param_count (info); k++)
2846 info->params[k].used = bp_unpack_value (&bp, 1);
2847 for (e = node->callees; e; e = e->next_callee)
2849 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2850 int count = lto_input_uleb128 (ib);
2852 ipa_set_cs_argument_count (args, count);
2856 args->jump_functions = ggc_alloc_cleared_vec_ipa_jump_func
2857 (ipa_get_cs_argument_count (args));
2858 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
2859 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
2861 for (e = node->indirect_calls; e; e = e->next_callee)
2862 ipa_read_indirect_edge_info (ib, data_in, e);
2865 /* Write jump functions for nodes in SET. */
2868 ipa_prop_write_jump_functions (cgraph_node_set set)
2870 struct cgraph_node *node;
2871 struct output_block *ob = create_output_block (LTO_section_jump_functions);
2872 unsigned int count = 0;
2873 cgraph_node_set_iterator csi;
2875 ob->cgraph_node = NULL;
2877 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2879 node = csi_node (csi);
2880 if (node->analyzed && IPA_NODE_REF (node) != NULL)
2884 lto_output_uleb128_stream (ob->main_stream, count);
2886 /* Process all of the functions. */
2887 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2889 node = csi_node (csi);
2890 if (node->analyzed && IPA_NODE_REF (node) != NULL)
2891 ipa_write_node_info (ob, node);
2893 lto_output_1_stream (ob->main_stream, 0);
2894 produce_asm (ob, NULL);
2895 destroy_output_block (ob);
2898 /* Read section in file FILE_DATA of length LEN with data DATA. */
2901 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
2904 const struct lto_function_header *header =
2905 (const struct lto_function_header *) data;
2906 const int32_t cfg_offset = sizeof (struct lto_function_header);
2907 const int32_t main_offset = cfg_offset + header->cfg_size;
2908 const int32_t string_offset = main_offset + header->main_size;
2909 struct data_in *data_in;
2910 struct lto_input_block ib_main;
2914 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
2918 lto_data_in_create (file_data, (const char *) data + string_offset,
2919 header->string_size, NULL);
2920 count = lto_input_uleb128 (&ib_main);
2922 for (i = 0; i < count; i++)
2925 struct cgraph_node *node;
2926 lto_cgraph_encoder_t encoder;
2928 index = lto_input_uleb128 (&ib_main);
2929 encoder = file_data->cgraph_node_encoder;
2930 node = lto_cgraph_encoder_deref (encoder, index);
2931 gcc_assert (node->analyzed);
2932 ipa_read_node_info (&ib_main, node, data_in);
2934 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
2936 lto_data_in_delete (data_in);
2939 /* Read ipcp jump functions. */
2942 ipa_prop_read_jump_functions (void)
2944 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2945 struct lto_file_decl_data *file_data;
2948 ipa_check_create_node_params ();
2949 ipa_check_create_edge_args ();
2950 ipa_register_cgraph_hooks ();
2952 while ((file_data = file_data_vec[j++]))
2955 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
2958 ipa_prop_read_section (file_data, data, len);
2962 /* After merging units, we can get mismatch in argument counts.
2963 Also decl merging might've rendered parameter lists obsolete.
2964 Also compute called_with_variable_arg info. */
2967 ipa_update_after_lto_read (void)
2969 struct cgraph_node *node;
2970 struct cgraph_edge *cs;
2972 ipa_check_create_node_params ();
2973 ipa_check_create_edge_args ();
2975 for (node = cgraph_nodes; node; node = node->next)
2977 ipa_initialize_node_params (node);
2979 for (node = cgraph_nodes; node; node = node->next)
2981 for (cs = node->callees; cs; cs = cs->next_callee)
2983 if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
2984 != ipa_get_param_count (IPA_NODE_REF (cs->callee)))
2985 ipa_set_called_with_variable_arg (IPA_NODE_REF (cs->callee));