1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file marks functions as being either const (TREE_READONLY) or
23 pure (DECL_PURE_P). It can also set a variant of these that
24 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
26 This must be run after inlining decisions have been made since
27 otherwise, the local sets will not contain information that is
28 consistent with post inlined state. The global sets are not prone
29 to this problem since they are by definition transitive. */
31 /* The code in this module is called by the ipa pass manager. It
32 should be one of the later passes since it's information is used by
33 the rest of the compilation. */
37 #include "coretypes.h"
40 #include "tree-flow.h"
41 #include "tree-inline.h"
42 #include "tree-pass.h"
43 #include "langhooks.h"
44 #include "pointer-set.h"
46 #include "ipa-utils.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "langhooks.h"
57 #include "lto-streamer.h"
59 #include "tree-scalar-evolution.h"
63 static struct pointer_set_t *visited_nodes;
65 /* Lattice values for const and pure functions. Everything starts out
66 being const, then may drop to pure and then neither depending on
68 enum pure_const_state_e
75 const char *pure_const_names[3] = {"const", "pure", "neither"};
77 /* Holder for the const_state. There is one of these per function
82 enum pure_const_state_e pure_const_state;
83 /* What user set here; we can be always sure about this. */
84 enum pure_const_state_e state_previously_known;
85 bool looping_previously_known;
87 /* True if the function could possibly infinite loop. There are a
88 lot of ways that this could be determined. We are pretty
89 conservative here. While it is possible to cse pure and const
90 calls, it is not legal to have dce get rid of the call if there
91 is a possibility that the call could infinite loop since this is
92 a behavioral change. */
98 typedef struct funct_state_d * funct_state;
100 /* The storage of the funct_state is abstracted because there is the
101 possibility that it may be desirable to move this to the cgraph
104 /* Array, indexed by cgraph node uid, of function states. */
106 DEF_VEC_P (funct_state);
107 DEF_VEC_ALLOC_P (funct_state, heap);
108 static VEC (funct_state, heap) *funct_state_vec;
110 /* Holders of ipa cgraph hooks: */
111 static struct cgraph_node_hook_list *function_insertion_hook_holder;
112 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
113 static struct cgraph_node_hook_list *node_removal_hook_holder;
115 /* Try to guess if function body will always be visible to compiler
116 when compiling the call and whether compiler will be able
117 to propagate the information by itself. */
120 function_always_visible_to_compiler_p (tree decl)
122 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl));
125 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
126 is true if the function is known to be finite. The diagnostic is
127 controlled by OPTION. WARNED_ABOUT is a pointer_set unique for
128 OPTION, this function may initialize it and it is always returned
131 static struct pointer_set_t *
132 suggest_attribute (int option, tree decl, bool known_finite,
133 struct pointer_set_t *warned_about,
134 const char * attrib_name)
136 if (!option_enabled (option))
138 if (TREE_THIS_VOLATILE (decl)
139 || (known_finite && function_always_visible_to_compiler_p (decl)))
143 warned_about = pointer_set_create ();
144 if (pointer_set_contains (warned_about, decl))
146 pointer_set_insert (warned_about, decl);
147 warning_at (DECL_SOURCE_LOCATION (decl),
150 ? _("function might be candidate for attribute %<%s%>")
151 : _("function might be candidate for attribute %<%s%>"
152 " if it is known to return normally"), attrib_name);
156 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
157 is true if the function is known to be finite. */
160 warn_function_pure (tree decl, bool known_finite)
162 static struct pointer_set_t *warned_about;
165 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
166 known_finite, warned_about, "pure");
169 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
170 is true if the function is known to be finite. */
173 warn_function_const (tree decl, bool known_finite)
175 static struct pointer_set_t *warned_about;
177 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
178 known_finite, warned_about, "const");
180 /* Init the function state. */
185 free (funct_state_vec);
189 /* Return true if we have a function state for NODE. */
192 has_function_state (struct cgraph_node *node)
195 || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
197 return VEC_index (funct_state, funct_state_vec, node->uid) != NULL;
200 /* Return the function state from NODE. */
202 static inline funct_state
203 get_function_state (struct cgraph_node *node)
205 static struct funct_state_d varying
206 = { IPA_NEITHER, IPA_NEITHER, true, true, true };
208 || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
209 /* We might want to put correct previously_known state into varying. */
211 return VEC_index (funct_state, funct_state_vec, node->uid);
214 /* Set the function state S for NODE. */
217 set_function_state (struct cgraph_node *node, funct_state s)
220 || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
221 VEC_safe_grow_cleared (funct_state, heap, funct_state_vec, node->uid + 1);
222 VEC_replace (funct_state, funct_state_vec, node->uid, s);
225 /* Check to see if the use (or definition when CHECKING_WRITE is true)
226 variable T is legal in a function that is either pure or const. */
229 check_decl (funct_state local,
230 tree t, bool checking_write, bool ipa)
232 /* Do not want to do anything with volatile except mark any
233 function that uses one to be not const or pure. */
234 if (TREE_THIS_VOLATILE (t))
236 local->pure_const_state = IPA_NEITHER;
238 fprintf (dump_file, " Volatile operand is not const/pure");
242 /* Do not care about a local automatic that is not static. */
243 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
246 /* If the variable has the "used" attribute, treat it as if it had a
247 been touched by the devil. */
248 if (DECL_PRESERVE_P (t))
250 local->pure_const_state = IPA_NEITHER;
252 fprintf (dump_file, " Used static/global variable is not const/pure\n");
256 /* In IPA mode we are not interested in checking actual loads and stores;
257 they will be processed at propagation time using ipa_ref. */
261 /* Since we have dealt with the locals and params cases above, if we
262 are CHECKING_WRITE, this cannot be a pure or constant
266 local->pure_const_state = IPA_NEITHER;
268 fprintf (dump_file, " static/global memory write is not const/pure\n");
272 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
274 /* Readonly reads are safe. */
275 if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
276 return; /* Read of a constant, do not change the function state. */
280 fprintf (dump_file, " global memory read is not const\n");
281 /* Just a regular read. */
282 if (local->pure_const_state == IPA_CONST)
283 local->pure_const_state = IPA_PURE;
288 /* Compilation level statics can be read if they are readonly
290 if (TREE_READONLY (t))
294 fprintf (dump_file, " static memory read is not const\n");
295 /* Just a regular read. */
296 if (local->pure_const_state == IPA_CONST)
297 local->pure_const_state = IPA_PURE;
302 /* Check to see if the use (or definition when CHECKING_WRITE is true)
303 variable T is legal in a function that is either pure or const. */
306 check_op (funct_state local, tree t, bool checking_write)
308 t = get_base_address (t);
309 if (t && TREE_THIS_VOLATILE (t))
311 local->pure_const_state = IPA_NEITHER;
313 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
317 && INDIRECT_REF_P (t)
318 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
319 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
322 fprintf (dump_file, " Indirect ref to local memory is OK\n");
325 else if (checking_write)
327 local->pure_const_state = IPA_NEITHER;
329 fprintf (dump_file, " Indirect ref write is not const/pure\n");
335 fprintf (dump_file, " Indirect ref read is not const\n");
336 if (local->pure_const_state == IPA_CONST)
337 local->pure_const_state = IPA_PURE;
341 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
344 state_from_flags (enum pure_const_state_e *state, bool *looping,
345 int flags, bool cannot_lead_to_return)
348 if (flags & ECF_LOOPING_CONST_OR_PURE)
351 if (dump_file && (dump_flags & TDF_DETAILS))
352 fprintf (dump_file, " looping");
354 if (flags & ECF_CONST)
357 if (dump_file && (dump_flags & TDF_DETAILS))
358 fprintf (dump_file, " const\n");
360 else if (flags & ECF_PURE)
363 if (dump_file && (dump_flags & TDF_DETAILS))
364 fprintf (dump_file, " pure\n");
366 else if (cannot_lead_to_return)
370 if (dump_file && (dump_flags & TDF_DETAILS))
371 fprintf (dump_file, " ignoring side effects->pure looping\n");
375 if (dump_file && (dump_flags & TDF_DETAILS))
376 fprintf (dump_file, " neihter\n");
377 *state = IPA_NEITHER;
382 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
383 into STATE and LOOPING better of the two variants.
384 Be sure to merge looping correctly. IPA_NEITHER functions
385 have looping 0 even if they don't have to return. */
388 better_state (enum pure_const_state_e *state, bool *looping,
389 enum pure_const_state_e state2, bool looping2)
393 if (*state == IPA_NEITHER)
396 *looping = MIN (*looping, looping2);
398 else if (state2 != IPA_NEITHER)
399 *looping = MIN (*looping, looping2);
402 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
403 into STATE and LOOPING worse of the two variants. */
406 worse_state (enum pure_const_state_e *state, bool *looping,
407 enum pure_const_state_e state2, bool looping2)
409 *state = MAX (*state, state2);
410 *looping = MAX (*looping, looping2);
413 /* Check the parameters of a function call to CALL_EXPR to see if
414 there are any references in the parameters that are not allowed for
415 pure or const functions. Also check to see if this is either an
416 indirect call, a call outside the compilation unit, or has special
417 attributes that may also effect the purity. The CALL_EXPR node for
418 the entire call expression. */
421 check_call (funct_state local, gimple call, bool ipa)
423 int flags = gimple_call_flags (call);
424 tree callee_t = gimple_call_fndecl (call);
425 bool possibly_throws = stmt_could_throw_p (call);
426 bool possibly_throws_externally = (possibly_throws
427 && stmt_can_throw_external (call));
432 for (i = 0; i < gimple_num_ops (call); i++)
433 if (gimple_op (call, i)
434 && tree_could_throw_p (gimple_op (call, i)))
436 if (possibly_throws && cfun->can_throw_non_call_exceptions)
439 fprintf (dump_file, " operand can throw; looping\n");
440 local->looping = true;
442 if (possibly_throws_externally)
445 fprintf (dump_file, " operand can throw externally\n");
446 local->can_throw = true;
451 /* The const and pure flags are set by a variety of places in the
452 compiler (including here). If someone has already set the flags
453 for the callee, (such as for some of the builtins) we will use
454 them, otherwise we will compute our own information.
456 Const and pure functions have less clobber effects than other
457 functions so we process these first. Otherwise if it is a call
458 outside the compilation unit or an indirect call we punt. This
459 leaves local calls which will be processed by following the call
463 /* built_in_return is really just an return statemnt. */
464 if (gimple_call_builtin_p (call, BUILT_IN_RETURN))
466 /* When bad things happen to bad functions, they cannot be const
468 if (setjmp_call_p (callee_t))
471 fprintf (dump_file, " setjmp is not const/pure\n");
472 local->looping = true;
473 local->pure_const_state = IPA_NEITHER;
476 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
477 switch (DECL_FUNCTION_CODE (callee_t))
479 case BUILT_IN_LONGJMP:
480 case BUILT_IN_NONLOCAL_GOTO:
482 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
483 local->pure_const_state = IPA_NEITHER;
484 local->looping = true;
491 /* When not in IPA mode, we can still handle self recursion. */
492 if (!ipa && callee_t == current_function_decl)
495 fprintf (dump_file, " Recursive call can loop.\n");
496 local->looping = true;
498 /* Either calle is unknown or we are doing local analysis.
499 Look to see if there are any bits available for the callee (such as by
500 declaration or because it is builtin) and process solely on the basis of
504 enum pure_const_state_e call_state;
506 if (possibly_throws && cfun->can_throw_non_call_exceptions)
509 fprintf (dump_file, " can throw; looping\n");
510 local->looping = true;
512 if (possibly_throws_externally)
516 fprintf (dump_file, " can throw externally to lp %i\n",
517 lookup_stmt_eh_lp (call));
519 fprintf (dump_file, " callee:%s\n",
520 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
522 local->can_throw = true;
524 if (dump_file && (dump_flags & TDF_DETAILS))
525 fprintf (dump_file, " checking flags for call:");
526 state_from_flags (&call_state, &call_looping, flags,
527 ((flags & (ECF_NORETURN | ECF_NOTHROW))
528 == (ECF_NORETURN | ECF_NOTHROW))
529 || (!flag_exceptions && (flags & ECF_NORETURN)));
530 worse_state (&local->pure_const_state, &local->looping,
531 call_state, call_looping);
533 /* Direct functions calls are handled by IPA propagation. */
536 /* Wrapper around check_decl for loads in local more. */
539 check_load (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
542 check_decl ((funct_state)data, op, false, false);
544 check_op ((funct_state)data, op, false);
548 /* Wrapper around check_decl for stores in local more. */
551 check_store (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
554 check_decl ((funct_state)data, op, true, false);
556 check_op ((funct_state)data, op, true);
560 /* Wrapper around check_decl for loads in ipa mode. */
563 check_ipa_load (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
566 check_decl ((funct_state)data, op, false, true);
568 check_op ((funct_state)data, op, false);
572 /* Wrapper around check_decl for stores in ipa mode. */
575 check_ipa_store (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
578 check_decl ((funct_state)data, op, true, true);
580 check_op ((funct_state)data, op, true);
584 /* Look into pointer pointed to by GSIP and figure out what interesting side
587 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
589 gimple stmt = gsi_stmt (*gsip);
592 if (is_gimple_debug (stmt))
597 fprintf (dump_file, " scanning: ");
598 print_gimple_stmt (dump_file, stmt, 0, 0);
601 /* Look for loads and stores. */
602 walk_stmt_load_store_ops (stmt, local,
603 ipa ? check_ipa_load : check_load,
604 ipa ? check_ipa_store : check_store);
606 if (gimple_code (stmt) != GIMPLE_CALL
607 && stmt_could_throw_p (stmt))
609 if (cfun->can_throw_non_call_exceptions)
612 fprintf (dump_file, " can throw; looping");
613 local->looping = true;
615 if (stmt_can_throw_external (stmt))
618 fprintf (dump_file, " can throw externally");
619 local->can_throw = true;
622 switch (gimple_code (stmt))
625 check_call (local, stmt, ipa);
628 if (DECL_NONLOCAL (gimple_label_label (stmt)))
629 /* Target of long jump. */
632 fprintf (dump_file, " nonlocal label is not const/pure");
633 local->pure_const_state = IPA_NEITHER;
637 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
639 tree op = gimple_asm_clobber_op (stmt, i);
640 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
643 fprintf (dump_file, " memory asm clobber is not const/pure");
644 /* Abandon all hope, ye who enter here. */
645 local->pure_const_state = IPA_NEITHER;
648 if (gimple_asm_volatile_p (stmt))
651 fprintf (dump_file, " volatile is not const/pure");
652 /* Abandon all hope, ye who enter here. */
653 local->pure_const_state = IPA_NEITHER;
654 local->looping = true;
663 /* This is the main routine for finding the reference patterns for
664 global variables within a function FN. */
667 analyze_function (struct cgraph_node *fn, bool ipa)
669 tree decl = fn->decl;
670 tree old_decl = current_function_decl;
672 basic_block this_block;
674 l = XCNEW (struct funct_state_d);
675 l->pure_const_state = IPA_CONST;
676 l->state_previously_known = IPA_NEITHER;
677 l->looping_previously_known = true;
679 l->can_throw = false;
683 fprintf (dump_file, "\n\n local analysis of %s\n ",
684 cgraph_node_name (fn));
687 push_cfun (DECL_STRUCT_FUNCTION (decl));
688 current_function_decl = decl;
690 FOR_EACH_BB (this_block)
692 gimple_stmt_iterator gsi;
693 struct walk_stmt_info wi;
695 memset (&wi, 0, sizeof(wi));
696 for (gsi = gsi_start_bb (this_block);
700 check_stmt (&gsi, l, ipa);
701 if (l->pure_const_state == IPA_NEITHER && l->looping && l->can_throw)
707 if (l->pure_const_state != IPA_NEITHER)
709 /* Const functions cannot have back edges (an
710 indication of possible infinite loop side
712 if (mark_dfs_back_edges ())
714 /* Preheaders are needed for SCEV to work.
715 Simple lateches and recorded exits improve chances that loop will
716 proved to be finite in testcases such as in loop-15.c and loop-24.c */
717 loop_optimizer_init (LOOPS_NORMAL
718 | LOOPS_HAVE_RECORDED_EXITS);
719 if (dump_file && (dump_flags & TDF_DETAILS))
720 flow_loops_dump (dump_file, NULL, 0);
721 if (mark_irreducible_loops ())
724 fprintf (dump_file, " has irreducible loops\n");
732 FOR_EACH_LOOP (li, loop, 0)
733 if (!finite_loop_p (loop))
736 fprintf (dump_file, " can not prove finiteness of loop %i\n", loop->num);
742 loop_optimizer_finalize ();
746 if (dump_file && (dump_flags & TDF_DETAILS))
747 fprintf (dump_file, " checking previously known:");
748 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
749 flags_from_decl_or_type (fn->decl),
750 cgraph_node_cannot_return (fn));
752 better_state (&l->pure_const_state, &l->looping,
753 l->state_previously_known,
754 l->looping_previously_known);
755 if (TREE_NOTHROW (decl))
756 l->can_throw = false;
759 current_function_decl = old_decl;
763 fprintf (dump_file, "Function is locally looping.\n");
765 fprintf (dump_file, "Function is locally throwing.\n");
766 if (l->pure_const_state == IPA_CONST)
767 fprintf (dump_file, "Function is locally const.\n");
768 if (l->pure_const_state == IPA_PURE)
769 fprintf (dump_file, "Function is locally pure.\n");
774 /* Called when new function is inserted to callgraph late. */
776 add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
778 if (cgraph_function_body_availability (node) < AVAIL_OVERWRITABLE)
780 /* There are some shared nodes, in particular the initializers on
781 static declarations. We do not need to scan them more than once
782 since all we would be interested in are the addressof
784 visited_nodes = pointer_set_create ();
785 if (cgraph_function_body_availability (node) > AVAIL_OVERWRITABLE)
786 set_function_state (node, analyze_function (node, true));
787 pointer_set_destroy (visited_nodes);
788 visited_nodes = NULL;
791 /* Called when new clone is inserted to callgraph late. */
794 duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
795 void *data ATTRIBUTE_UNUSED)
797 if (has_function_state (src))
799 funct_state l = XNEW (struct funct_state_d);
800 gcc_assert (!has_function_state (dst));
801 memcpy (l, get_function_state (src), sizeof (*l));
802 set_function_state (dst, l);
806 /* Called when new clone is inserted to callgraph late. */
809 remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
811 if (has_function_state (node))
813 free (get_function_state (node));
814 set_function_state (node, NULL);
820 register_hooks (void)
822 static bool init_p = false;
829 node_removal_hook_holder =
830 cgraph_add_node_removal_hook (&remove_node_data, NULL);
831 node_duplication_hook_holder =
832 cgraph_add_node_duplication_hook (&duplicate_node_data, NULL);
833 function_insertion_hook_holder =
834 cgraph_add_function_insertion_hook (&add_new_function, NULL);
838 /* Analyze each function in the cgraph to see if it is locally PURE or
842 generate_summary (void)
844 struct cgraph_node *node;
848 /* There are some shared nodes, in particular the initializers on
849 static declarations. We do not need to scan them more than once
850 since all we would be interested in are the addressof
852 visited_nodes = pointer_set_create ();
854 /* Process all of the functions.
856 We process AVAIL_OVERWRITABLE functions. We can not use the results
857 by default, but the info can be used at LTO with -fwhole-program or
858 when function got clonned and the clone is AVAILABLE. */
860 for (node = cgraph_nodes; node; node = node->next)
861 if (cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
862 set_function_state (node, analyze_function (node, true));
864 pointer_set_destroy (visited_nodes);
865 visited_nodes = NULL;
869 /* Serialize the ipa info for lto. */
872 pure_const_write_summary (cgraph_node_set set,
873 varpool_node_set vset ATTRIBUTE_UNUSED)
875 struct cgraph_node *node;
876 struct lto_simple_output_block *ob
877 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
878 unsigned int count = 0;
879 cgraph_node_set_iterator csi;
881 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
883 node = csi_node (csi);
884 if (node->analyzed && has_function_state (node))
888 lto_output_uleb128_stream (ob->main_stream, count);
890 /* Process all of the functions. */
891 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
893 node = csi_node (csi);
894 if (node->analyzed && has_function_state (node))
896 struct bitpack_d *bp;
899 lto_cgraph_encoder_t encoder;
901 fs = get_function_state (node);
903 encoder = ob->decl_state->cgraph_node_encoder;
904 node_ref = lto_cgraph_encoder_encode (encoder, node);
905 lto_output_uleb128_stream (ob->main_stream, node_ref);
907 /* Note that flags will need to be read in the opposite
908 order as we are pushing the bitflags into FLAGS. */
909 bp = bitpack_create ();
910 bp_pack_value (bp, fs->pure_const_state, 2);
911 bp_pack_value (bp, fs->state_previously_known, 2);
912 bp_pack_value (bp, fs->looping_previously_known, 1);
913 bp_pack_value (bp, fs->looping, 1);
914 bp_pack_value (bp, fs->can_throw, 1);
915 lto_output_bitpack (ob->main_stream, bp);
920 lto_destroy_simple_output_block (ob);
924 /* Deserialize the ipa info for lto. */
927 pure_const_read_summary (void)
929 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
930 struct lto_file_decl_data *file_data;
934 while ((file_data = file_data_vec[j++]))
938 struct lto_input_block *ib
939 = lto_create_simple_input_block (file_data,
940 LTO_section_ipa_pure_const,
945 unsigned int count = lto_input_uleb128 (ib);
947 for (i = 0; i < count; i++)
950 struct cgraph_node *node;
951 struct bitpack_d *bp;
953 lto_cgraph_encoder_t encoder;
955 fs = XCNEW (struct funct_state_d);
956 index = lto_input_uleb128 (ib);
957 encoder = file_data->cgraph_node_encoder;
958 node = lto_cgraph_encoder_deref (encoder, index);
959 set_function_state (node, fs);
961 /* Note that the flags must be read in the opposite
962 order in which they were written (the bitflags were
963 pushed into FLAGS). */
964 bp = lto_input_bitpack (ib);
966 = (enum pure_const_state_e) bp_unpack_value (bp, 2);
967 fs->state_previously_known
968 = (enum pure_const_state_e) bp_unpack_value (bp, 2);
969 fs->looping_previously_known = bp_unpack_value (bp, 1);
970 fs->looping = bp_unpack_value (bp, 1);
971 fs->can_throw = bp_unpack_value (bp, 1);
974 int flags = flags_from_decl_or_type (node->decl);
975 fprintf (dump_file, "Read info for %s/%i ",
976 cgraph_node_name (node),
978 if (flags & ECF_CONST)
979 fprintf (dump_file, " const");
980 if (flags & ECF_PURE)
981 fprintf (dump_file, " pure");
982 if (flags & ECF_NOTHROW)
983 fprintf (dump_file, " nothrow");
984 fprintf (dump_file, "\n pure const state: %s\n",
985 pure_const_names[fs->pure_const_state]);
986 fprintf (dump_file, " previously known state: %s\n",
987 pure_const_names[fs->looping_previously_known]);
989 fprintf (dump_file," function is locally looping\n");
990 if (fs->looping_previously_known)
991 fprintf (dump_file," function is previously known looping\n");
993 fprintf (dump_file," function is locally throwing\n");
998 lto_destroy_simple_input_block (file_data,
999 LTO_section_ipa_pure_const,
1007 ignore_edge (struct cgraph_edge *e)
1009 return (!e->can_throw_external);
1012 /* Return true if NODE is self recursive function. */
1015 self_recursive_p (struct cgraph_node *node)
1017 struct cgraph_edge *e;
1018 for (e = node->callees; e; e = e->next_callee)
1019 if (e->callee == node)
1025 /* Produce the global information by preforming a transitive closure
1026 on the local information that was produced by generate_summary.
1027 Note that there is no function_transform pass since this only
1028 updates the function_decl. */
1033 struct cgraph_node *node;
1034 struct cgraph_node *w;
1035 struct cgraph_node **order =
1036 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1039 struct ipa_dfs_info * w_info;
1041 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
1042 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
1043 cgraph_remove_node_removal_hook (node_removal_hook_holder);
1044 order_pos = ipa_utils_reduced_inorder (order, true, false, NULL);
1047 dump_cgraph (dump_file);
1048 ipa_utils_print_order(dump_file, "reduced", order, order_pos);
1051 /* Propagate the local information thru the call graph to produce
1052 the global information. All the nodes within a cycle will have
1053 the same info so we collapse cycles first. Then we can do the
1054 propagation in one pass from the leaves to the roots. */
1055 for (i = 0; i < order_pos; i++ )
1057 enum pure_const_state_e pure_const_state = IPA_CONST;
1058 bool looping = false;
1062 if (dump_file && (dump_flags & TDF_DETAILS))
1063 fprintf (dump_file, "Starting cycle\n");
1065 /* Find the worst state for any node in the cycle. */
1067 while (w && pure_const_state != IPA_NEITHER)
1069 struct cgraph_edge *e;
1070 struct cgraph_edge *ie;
1072 struct ipa_ref *ref;
1074 funct_state w_l = get_function_state (w);
1075 if (dump_file && (dump_flags & TDF_DETAILS))
1076 fprintf (dump_file, " Visiting %s/%i state:%s looping %i\n",
1077 cgraph_node_name (w),
1079 pure_const_names[w_l->pure_const_state],
1082 /* First merge in function body properties. */
1083 worse_state (&pure_const_state, &looping,
1084 w_l->pure_const_state, w_l->looping);
1085 if (pure_const_state == IPA_NEITHER)
1088 /* For overwritable nodes we can not assume anything. */
1089 if (cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
1091 worse_state (&pure_const_state, &looping,
1092 w_l->state_previously_known,
1093 w_l->looping_previously_known);
1094 if (dump_file && (dump_flags & TDF_DETAILS))
1097 " Overwritable. state %s looping %i\n",
1098 pure_const_names[w_l->state_previously_known],
1099 w_l->looping_previously_known);
1106 /* We consider recursive cycles as possibly infinite.
1107 This might be relaxed since infinite recursion leads to stack
1112 /* Now walk the edges and merge in callee properties. */
1113 for (e = w->callees; e; e = e->next_callee)
1115 struct cgraph_node *y = e->callee;
1116 enum pure_const_state_e edge_state = IPA_CONST;
1117 bool edge_looping = false;
1119 if (dump_file && (dump_flags & TDF_DETAILS))
1123 cgraph_node_name (e->callee),
1126 if (cgraph_function_body_availability (y) > AVAIL_OVERWRITABLE)
1128 funct_state y_l = get_function_state (y);
1129 if (dump_file && (dump_flags & TDF_DETAILS))
1132 " state:%s looping:%i\n",
1133 pure_const_names[y_l->pure_const_state],
1136 if (y_l->pure_const_state > IPA_PURE
1137 && cgraph_edge_cannot_lead_to_return (e))
1139 if (dump_file && (dump_flags & TDF_DETAILS))
1141 " Ignoring side effects"
1142 " -> pure, looping\n");
1143 edge_state = IPA_PURE;
1144 edge_looping = true;
1148 edge_state = y_l->pure_const_state;
1149 edge_looping = y_l->looping;
1153 state_from_flags (&edge_state, &edge_looping,
1154 flags_from_decl_or_type (y->decl),
1155 cgraph_edge_cannot_lead_to_return (e));
1157 /* Merge the results with what we already know. */
1158 better_state (&edge_state, &edge_looping,
1159 w_l->state_previously_known,
1160 w_l->looping_previously_known);
1161 worse_state (&pure_const_state, &looping,
1162 edge_state, edge_looping);
1163 if (pure_const_state == IPA_NEITHER)
1166 if (pure_const_state == IPA_NEITHER)
1169 /* Now process the indirect call. */
1170 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
1172 enum pure_const_state_e edge_state = IPA_CONST;
1173 bool edge_looping = false;
1175 if (dump_file && (dump_flags & TDF_DETAILS))
1176 fprintf (dump_file, " Indirect call");
1177 state_from_flags (&edge_state, &edge_looping,
1178 ie->indirect_info->ecf_flags,
1179 cgraph_edge_cannot_lead_to_return (ie));
1180 /* Merge the results with what we already know. */
1181 better_state (&edge_state, &edge_looping,
1182 w_l->state_previously_known,
1183 w_l->looping_previously_known);
1184 worse_state (&pure_const_state, &looping,
1185 edge_state, edge_looping);
1186 if (pure_const_state == IPA_NEITHER)
1189 if (pure_const_state == IPA_NEITHER)
1192 /* And finally all loads and stores. */
1193 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
1195 enum pure_const_state_e ref_state = IPA_CONST;
1196 bool ref_looping = false;
1200 /* readonly reads are safe. */
1201 if (TREE_READONLY (ipa_ref_varpool_node (ref)->decl))
1203 if (dump_file && (dump_flags & TDF_DETAILS))
1204 fprintf (dump_file, " nonreadonly global var read\n");
1205 ref_state = IPA_PURE;
1208 if (ipa_ref_cannot_lead_to_return (ref))
1210 ref_state = IPA_NEITHER;
1211 if (dump_file && (dump_flags & TDF_DETAILS))
1212 fprintf (dump_file, " global var write\n");
1217 better_state (&ref_state, &ref_looping,
1218 w_l->state_previously_known,
1219 w_l->looping_previously_known);
1220 worse_state (&pure_const_state, &looping,
1221 ref_state, ref_looping);
1222 if (pure_const_state == IPA_NEITHER)
1225 w_info = (struct ipa_dfs_info *) w->aux;
1226 w = w_info->next_cycle;
1228 if (dump_file && (dump_flags & TDF_DETAILS))
1229 fprintf (dump_file, "Result %s looping %i\n",
1230 pure_const_names [pure_const_state],
1233 /* Copy back the region's pure_const_state which is shared by
1234 all nodes in the region. */
1238 funct_state w_l = get_function_state (w);
1239 enum pure_const_state_e this_state = pure_const_state;
1240 bool this_looping = looping;
1242 if (w_l->state_previously_known != IPA_NEITHER
1243 && this_state > w_l->state_previously_known)
1245 this_state = w_l->state_previously_known;
1246 this_looping |= w_l->looping_previously_known;
1248 if (!this_looping && self_recursive_p (w))
1249 this_looping = true;
1250 if (!w_l->looping_previously_known)
1251 this_looping = false;
1253 /* All nodes within a cycle share the same info. */
1254 w_l->pure_const_state = this_state;
1255 w_l->looping = this_looping;
1260 if (!TREE_READONLY (w->decl))
1262 warn_function_const (w->decl, !this_looping);
1264 fprintf (dump_file, "Function found to be %sconst: %s\n",
1265 this_looping ? "looping " : "",
1266 cgraph_node_name (w));
1268 cgraph_set_readonly_flag (w, true);
1269 cgraph_set_looping_const_or_pure_flag (w, this_looping);
1273 if (!DECL_PURE_P (w->decl))
1275 warn_function_pure (w->decl, !this_looping);
1277 fprintf (dump_file, "Function found to be %spure: %s\n",
1278 this_looping ? "looping " : "",
1279 cgraph_node_name (w));
1281 cgraph_set_pure_flag (w, true);
1282 cgraph_set_looping_const_or_pure_flag (w, this_looping);
1288 w_info = (struct ipa_dfs_info *) w->aux;
1289 w = w_info->next_cycle;
1294 for (node = cgraph_nodes; node; node = node->next)
1296 /* Get rid of the aux information. */
1299 w_info = (struct ipa_dfs_info *) node->aux;
1304 order_pos = ipa_utils_reduced_inorder (order, true, false, ignore_edge);
1307 dump_cgraph (dump_file);
1308 ipa_utils_print_order(dump_file, "reduced for nothrow", order, order_pos);
1310 /* Propagate the local information thru the call graph to produce
1311 the global information. All the nodes within a cycle will have
1312 the same info so we collapse cycles first. Then we can do the
1313 propagation in one pass from the leaves to the roots. */
1314 for (i = 0; i < order_pos; i++ )
1316 bool can_throw = false;
1319 /* Find the worst state for any node in the cycle. */
1323 struct cgraph_edge *e, *ie;
1324 funct_state w_l = get_function_state (w);
1327 || cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
1333 for (e = w->callees; e; e = e->next_callee)
1335 struct cgraph_node *y = e->callee;
1337 if (cgraph_function_body_availability (y) > AVAIL_OVERWRITABLE)
1339 funct_state y_l = get_function_state (y);
1343 if (y_l->can_throw && !TREE_NOTHROW (w->decl)
1344 && e->can_throw_external)
1347 else if (e->can_throw_external && !TREE_NOTHROW (y->decl))
1350 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
1351 if (ie->can_throw_external)
1353 w_info = (struct ipa_dfs_info *) w->aux;
1354 w = w_info->next_cycle;
1357 /* Copy back the region's pure_const_state which is shared by
1358 all nodes in the region. */
1362 funct_state w_l = get_function_state (w);
1363 if (!can_throw && !TREE_NOTHROW (w->decl))
1365 struct cgraph_edge *e;
1366 cgraph_set_nothrow_flag (w, true);
1367 for (e = w->callers; e; e = e->next_caller)
1368 e->can_throw_external = false;
1370 fprintf (dump_file, "Function found to be nothrow: %s\n",
1371 cgraph_node_name (w));
1373 else if (can_throw && !TREE_NOTHROW (w->decl))
1374 w_l->can_throw = true;
1375 w_info = (struct ipa_dfs_info *) w->aux;
1376 w = w_info->next_cycle;
1381 for (node = cgraph_nodes; node; node = node->next)
1383 /* Get rid of the aux information. */
1386 w_info = (struct ipa_dfs_info *) node->aux;
1390 if (cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE
1391 && has_function_state (node))
1392 free (get_function_state (node));
1396 VEC_free (funct_state, heap, funct_state_vec);
1402 gate_pure_const (void)
1404 return (flag_ipa_pure_const
1405 /* Don't bother doing anything if the program has errors. */
1409 struct ipa_opt_pass_d pass_ipa_pure_const =
1413 "pure-const", /* name */
1414 gate_pure_const, /* gate */
1415 propagate, /* execute */
1418 0, /* static_pass_number */
1419 TV_IPA_PURE_CONST, /* tv_id */
1420 0, /* properties_required */
1421 0, /* properties_provided */
1422 0, /* properties_destroyed */
1423 0, /* todo_flags_start */
1424 0 /* todo_flags_finish */
1426 generate_summary, /* generate_summary */
1427 pure_const_write_summary, /* write_summary */
1428 pure_const_read_summary, /* read_summary */
1429 NULL, /* write_optimization_summary */
1430 NULL, /* read_optimization_summary */
1431 NULL, /* stmt_fixup */
1433 NULL, /* function_transform */
1434 NULL /* variable_transform */
1437 /* Return true if function should be skipped for local pure const analysis. */
1440 skip_function_for_local_pure_const (struct cgraph_node *node)
1442 /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1443 we must not promote functions that are called by already processed functions. */
1445 if (function_called_by_processed_nodes_p ())
1448 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1451 if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
1454 fprintf (dump_file, "Function is not available or overwrittable; not analyzing.\n");
1460 /* Simple local pass for pure const discovery reusing the analysis from
1461 ipa_pure_const. This pass is effective when executed together with
1462 other optimization passes in early optimization pass queue. */
1465 local_pure_const (void)
1467 bool changed = false;
1470 struct cgraph_node *node;
1472 node = cgraph_node (current_function_decl);
1473 skip = skip_function_for_local_pure_const (node);
1474 if (!warn_suggest_attribute_const
1475 && !warn_suggest_attribute_pure
1479 /* First do NORETURN discovery. */
1480 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
1481 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0)
1483 if (warn_missing_noreturn
1484 && !lang_hooks.missing_noreturn_ok_p (cfun->decl))
1485 warning_at (DECL_SOURCE_LOCATION (cfun->decl), OPT_Wmissing_noreturn,
1486 "function might be possible candidate "
1487 "for attribute %<noreturn%>");
1489 fprintf (dump_file, "Function found to be noreturn: %s\n",
1490 lang_hooks.decl_printable_name (current_function_decl, 2));
1492 /* Update declaration and reduce profile to executed once. */
1493 TREE_THIS_VOLATILE (current_function_decl) = 1;
1494 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
1495 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
1499 l = analyze_function (node, false);
1501 switch (l->pure_const_state)
1504 if (!TREE_READONLY (current_function_decl))
1506 warn_function_const (current_function_decl, !l->looping);
1509 cgraph_set_readonly_flag (node, true);
1510 cgraph_set_looping_const_or_pure_flag (node, l->looping);
1514 fprintf (dump_file, "Function found to be %sconst: %s\n",
1515 l->looping ? "looping " : "",
1516 lang_hooks.decl_printable_name (current_function_decl,
1519 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1524 cgraph_set_looping_const_or_pure_flag (node, false);
1528 fprintf (dump_file, "Function found to be non-looping: %s\n",
1529 lang_hooks.decl_printable_name (current_function_decl,
1535 if (!DECL_PURE_P (current_function_decl))
1539 cgraph_set_pure_flag (node, true);
1540 cgraph_set_looping_const_or_pure_flag (node, l->looping);
1543 warn_function_pure (current_function_decl, !l->looping);
1545 fprintf (dump_file, "Function found to be %spure: %s\n",
1546 l->looping ? "looping " : "",
1547 lang_hooks.decl_printable_name (current_function_decl,
1550 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1555 cgraph_set_looping_const_or_pure_flag (node, false);
1559 fprintf (dump_file, "Function found to be non-looping: %s\n",
1560 lang_hooks.decl_printable_name (current_function_decl,
1568 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
1570 struct cgraph_edge *e;
1572 cgraph_set_nothrow_flag (node, true);
1573 for (e = node->callers; e; e = e->next_caller)
1574 e->can_throw_external = false;
1577 fprintf (dump_file, "Function found to be nothrow: %s\n",
1578 lang_hooks.decl_printable_name (current_function_decl,
1584 return execute_fixup_cfg ();
1589 struct gimple_opt_pass pass_local_pure_const =
1593 "local-pure-const", /* name */
1594 gate_pure_const, /* gate */
1595 local_pure_const, /* execute */
1598 0, /* static_pass_number */
1599 TV_IPA_PURE_CONST, /* tv_id */
1600 0, /* properties_required */
1601 0, /* properties_provided */
1602 0, /* properties_destroyed */
1603 0, /* todo_flags_start */
1604 0 /* todo_flags_finish */