1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file marks functions as being either const (TREE_READONLY) or
23 pure (DECL_PURE_P). It can also set a variant of these that
24 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
26 This must be run after inlining decisions have been made since
27 otherwise, the local sets will not contain information that is
28 consistent with post inlined state. The global sets are not prone
29 to this problem since they are by definition transitive. */
31 /* The code in this module is called by the ipa pass manager. It
32 should be one of the later passes since it's information is used by
33 the rest of the compilation. */
37 #include "coretypes.h"
40 #include "tree-flow.h"
41 #include "tree-inline.h"
42 #include "tree-pass.h"
43 #include "langhooks.h"
44 #include "pointer-set.h"
46 #include "ipa-utils.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "langhooks.h"
57 #include "lto-streamer.h"
59 #include "tree-scalar-evolution.h"
63 static struct pointer_set_t *visited_nodes;
65 /* Lattice values for const and pure functions. Everything starts out
66 being const, then may drop to pure and then neither depending on
68 enum pure_const_state_e
75 const char *pure_const_names[3] = {"const", "pure", "neither"};
77 /* Holder for the const_state. There is one of these per function
82 enum pure_const_state_e pure_const_state;
83 /* What user set here; we can be always sure about this. */
84 enum pure_const_state_e state_previously_known;
85 bool looping_previously_known;
87 /* True if the function could possibly infinite loop. There are a
88 lot of ways that this could be determined. We are pretty
89 conservative here. While it is possible to cse pure and const
90 calls, it is not legal to have dce get rid of the call if there
91 is a possibility that the call could infinite loop since this is
92 a behavioral change. */
98 typedef struct funct_state_d * funct_state;
100 /* The storage of the funct_state is abstracted because there is the
101 possibility that it may be desirable to move this to the cgraph
104 /* Array, indexed by cgraph node uid, of function states. */
106 DEF_VEC_P (funct_state);
107 DEF_VEC_ALLOC_P (funct_state, heap);
108 static VEC (funct_state, heap) *funct_state_vec;
110 /* Holders of ipa cgraph hooks: */
111 static struct cgraph_node_hook_list *function_insertion_hook_holder;
112 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
113 static struct cgraph_node_hook_list *node_removal_hook_holder;
115 /* Try to guess if function body will always be visible to compiler
116 when compiling the call and whether compiler will be able
117 to propagate the information by itself. */
120 function_always_visible_to_compiler_p (tree decl)
122 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl));
125 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
126 is true if the function is known to be finite. The diagnostic is
127 controlled by OPTION. WARNED_ABOUT is a pointer_set unique for
128 OPTION, this function may initialize it and it is always returned
131 static struct pointer_set_t *
132 suggest_attribute (int option, tree decl, bool known_finite,
133 struct pointer_set_t *warned_about,
134 const char * attrib_name)
136 if (!option_enabled (option))
138 if (TREE_THIS_VOLATILE (decl)
139 || (known_finite && function_always_visible_to_compiler_p (decl)))
143 warned_about = pointer_set_create ();
144 if (pointer_set_contains (warned_about, decl))
146 pointer_set_insert (warned_about, decl);
147 warning_at (DECL_SOURCE_LOCATION (decl),
150 ? _("function might be candidate for attribute %<%s%>")
151 : _("function might be candidate for attribute %<%s%>"
152 " if it is known to return normally"), attrib_name);
156 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
157 is true if the function is known to be finite. */
160 warn_function_pure (tree decl, bool known_finite)
162 static struct pointer_set_t *warned_about;
165 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
166 known_finite, warned_about, "pure");
169 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
170 is true if the function is known to be finite. */
173 warn_function_const (tree decl, bool known_finite)
175 static struct pointer_set_t *warned_about;
177 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
178 known_finite, warned_about, "const");
180 /* Init the function state. */
185 free (funct_state_vec);
189 /* Return true if we have a function state for NODE. */
192 has_function_state (struct cgraph_node *node)
195 || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
197 return VEC_index (funct_state, funct_state_vec, node->uid) != NULL;
200 /* Return the function state from NODE. */
202 static inline funct_state
203 get_function_state (struct cgraph_node *node)
205 static struct funct_state_d varying
206 = { IPA_NEITHER, IPA_NEITHER, true, true, true };
208 || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
209 /* We might want to put correct previously_known state into varying. */
211 return VEC_index (funct_state, funct_state_vec, node->uid);
214 /* Set the function state S for NODE. */
217 set_function_state (struct cgraph_node *node, funct_state s)
220 || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
221 VEC_safe_grow_cleared (funct_state, heap, funct_state_vec, node->uid + 1);
222 VEC_replace (funct_state, funct_state_vec, node->uid, s);
225 /* Check to see if the use (or definition when CHECKING_WRITE is true)
226 variable T is legal in a function that is either pure or const. */
229 check_decl (funct_state local,
230 tree t, bool checking_write)
232 /* Do not want to do anything with volatile except mark any
233 function that uses one to be not const or pure. */
234 if (TREE_THIS_VOLATILE (t))
236 local->pure_const_state = IPA_NEITHER;
238 fprintf (dump_file, " Volatile operand is not const/pure");
242 /* Do not care about a local automatic that is not static. */
243 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
246 /* If the variable has the "used" attribute, treat it as if it had a
247 been touched by the devil. */
248 if (DECL_PRESERVE_P (t))
250 local->pure_const_state = IPA_NEITHER;
252 fprintf (dump_file, " Used static/global variable is not const/pure\n");
256 /* Since we have dealt with the locals and params cases above, if we
257 are CHECKING_WRITE, this cannot be a pure or constant
261 local->pure_const_state = IPA_NEITHER;
263 fprintf (dump_file, " static/global memory write is not const/pure\n");
267 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
269 /* Readonly reads are safe. */
270 if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
271 return; /* Read of a constant, do not change the function state. */
275 fprintf (dump_file, " global memory read is not const\n");
276 /* Just a regular read. */
277 if (local->pure_const_state == IPA_CONST)
278 local->pure_const_state = IPA_PURE;
283 /* Compilation level statics can be read if they are readonly
285 if (TREE_READONLY (t))
289 fprintf (dump_file, " static memory read is not const\n");
290 /* Just a regular read. */
291 if (local->pure_const_state == IPA_CONST)
292 local->pure_const_state = IPA_PURE;
297 /* Check to see if the use (or definition when CHECKING_WRITE is true)
298 variable T is legal in a function that is either pure or const. */
301 check_op (funct_state local, tree t, bool checking_write)
303 t = get_base_address (t);
304 if (t && TREE_THIS_VOLATILE (t))
306 local->pure_const_state = IPA_NEITHER;
308 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
312 && INDIRECT_REF_P (t)
313 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
314 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
317 fprintf (dump_file, " Indirect ref to local memory is OK\n");
320 else if (checking_write)
322 local->pure_const_state = IPA_NEITHER;
324 fprintf (dump_file, " Indirect ref write is not const/pure\n");
330 fprintf (dump_file, " Indirect ref read is not const\n");
331 if (local->pure_const_state == IPA_CONST)
332 local->pure_const_state = IPA_PURE;
336 /* Check the parameters of a function call to CALL_EXPR to see if
337 there are any references in the parameters that are not allowed for
338 pure or const functions. Also check to see if this is either an
339 indirect call, a call outside the compilation unit, or has special
340 attributes that may also effect the purity. The CALL_EXPR node for
341 the entire call expression. */
344 check_call (funct_state local, gimple call, bool ipa)
346 int flags = gimple_call_flags (call);
347 tree callee_t = gimple_call_fndecl (call);
348 bool possibly_throws = stmt_could_throw_p (call);
349 bool possibly_throws_externally = (possibly_throws
350 && stmt_can_throw_external (call));
355 for (i = 0; i < gimple_num_ops (call); i++)
356 if (gimple_op (call, i)
357 && tree_could_throw_p (gimple_op (call, i)))
359 if (possibly_throws && cfun->can_throw_non_call_exceptions)
362 fprintf (dump_file, " operand can throw; looping\n");
363 local->looping = true;
365 if (possibly_throws_externally)
368 fprintf (dump_file, " operand can throw externally\n");
369 local->can_throw = true;
374 /* The const and pure flags are set by a variety of places in the
375 compiler (including here). If someone has already set the flags
376 for the callee, (such as for some of the builtins) we will use
377 them, otherwise we will compute our own information.
379 Const and pure functions have less clobber effects than other
380 functions so we process these first. Otherwise if it is a call
381 outside the compilation unit or an indirect call we punt. This
382 leaves local calls which will be processed by following the call
386 /* built_in_return is really just an return statemnt. */
387 if (gimple_call_builtin_p (call, BUILT_IN_RETURN))
389 /* When bad things happen to bad functions, they cannot be const
391 if (setjmp_call_p (callee_t))
394 fprintf (dump_file, " setjmp is not const/pure\n");
395 local->looping = true;
396 local->pure_const_state = IPA_NEITHER;
399 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
400 switch (DECL_FUNCTION_CODE (callee_t))
402 case BUILT_IN_LONGJMP:
403 case BUILT_IN_NONLOCAL_GOTO:
405 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
406 local->pure_const_state = IPA_NEITHER;
407 local->looping = true;
414 /* When not in IPA mode, we can still handle self recursion. */
415 if (!ipa && callee_t == current_function_decl)
418 fprintf (dump_file, " Recursive call can loop.\n");
419 local->looping = true;
421 /* Either calle is unknown or we are doing local analysis.
422 Look to see if there are any bits available for the callee (such as by
423 declaration or because it is builtin) and process solely on the basis of
425 else if (!ipa || !callee_t)
427 if (possibly_throws && cfun->can_throw_non_call_exceptions)
430 fprintf (dump_file, " can throw; looping\n");
431 local->looping = true;
433 if (possibly_throws_externally)
437 fprintf (dump_file, " can throw externally to lp %i\n",
438 lookup_stmt_eh_lp (call));
440 fprintf (dump_file, " callee:%s\n",
441 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
443 local->can_throw = true;
445 if (flags & ECF_CONST)
447 if (callee_t && DECL_LOOPING_CONST_OR_PURE_P (callee_t))
450 fprintf (dump_file, " calls looping pure.\n");
451 local->looping = true;
454 else if (flags & ECF_PURE)
456 if (callee_t && DECL_LOOPING_CONST_OR_PURE_P (callee_t))
459 fprintf (dump_file, " calls looping const.\n");
460 local->looping = true;
463 fprintf (dump_file, " pure function call in not const\n");
464 if (local->pure_const_state == IPA_CONST)
465 local->pure_const_state = IPA_PURE;
467 else if ((flags & (ECF_NORETURN | ECF_NOTHROW))
468 == (ECF_NORETURN | ECF_NOTHROW)
469 || (!flag_exceptions && (flags & ECF_NORETURN)))
472 fprintf (dump_file, " noreturn nothrow call is looping pure\n");
473 if (local->pure_const_state == IPA_CONST)
474 local->pure_const_state = IPA_PURE;
475 local->looping = true;
480 fprintf (dump_file, " uknown function call is not const/pure\n");
481 local->pure_const_state = IPA_NEITHER;
482 local->looping = true;
485 /* Direct functions calls are handled by IPA propagation. */
488 /* Wrapper around check_decl for loads. */
491 check_load (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
494 check_decl ((funct_state)data, op, false);
496 check_op ((funct_state)data, op, false);
500 /* Wrapper around check_decl for stores. */
503 check_store (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
506 check_decl ((funct_state)data, op, true);
508 check_op ((funct_state)data, op, true);
512 /* Look into pointer pointed to by GSIP and figure out what interesting side
515 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
517 gimple stmt = gsi_stmt (*gsip);
520 if (is_gimple_debug (stmt))
525 fprintf (dump_file, " scanning: ");
526 print_gimple_stmt (dump_file, stmt, 0, 0);
529 /* Look for loads and stores. */
530 walk_stmt_load_store_ops (stmt, local, check_load, check_store);
532 if (gimple_code (stmt) != GIMPLE_CALL
533 && stmt_could_throw_p (stmt))
535 if (cfun->can_throw_non_call_exceptions)
538 fprintf (dump_file, " can throw; looping");
539 local->looping = true;
541 if (stmt_can_throw_external (stmt))
544 fprintf (dump_file, " can throw externally");
545 local->can_throw = true;
548 switch (gimple_code (stmt))
551 check_call (local, stmt, ipa);
554 if (DECL_NONLOCAL (gimple_label_label (stmt)))
555 /* Target of long jump. */
558 fprintf (dump_file, " nonlocal label is not const/pure");
559 local->pure_const_state = IPA_NEITHER;
563 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
565 tree op = gimple_asm_clobber_op (stmt, i);
566 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
569 fprintf (dump_file, " memory asm clobber is not const/pure");
570 /* Abandon all hope, ye who enter here. */
571 local->pure_const_state = IPA_NEITHER;
574 if (gimple_asm_volatile_p (stmt))
577 fprintf (dump_file, " volatile is not const/pure");
578 /* Abandon all hope, ye who enter here. */
579 local->pure_const_state = IPA_NEITHER;
580 local->looping = true;
589 /* This is the main routine for finding the reference patterns for
590 global variables within a function FN. */
593 analyze_function (struct cgraph_node *fn, bool ipa)
595 tree decl = fn->decl;
596 tree old_decl = current_function_decl;
598 basic_block this_block;
600 l = XCNEW (struct funct_state_d);
601 l->pure_const_state = IPA_CONST;
602 l->state_previously_known = IPA_NEITHER;
603 l->looping_previously_known = true;
605 l->can_throw = false;
609 fprintf (dump_file, "\n\n local analysis of %s\n ",
610 cgraph_node_name (fn));
613 push_cfun (DECL_STRUCT_FUNCTION (decl));
614 current_function_decl = decl;
616 FOR_EACH_BB (this_block)
618 gimple_stmt_iterator gsi;
619 struct walk_stmt_info wi;
621 memset (&wi, 0, sizeof(wi));
622 for (gsi = gsi_start_bb (this_block);
626 check_stmt (&gsi, l, ipa);
627 if (l->pure_const_state == IPA_NEITHER && l->looping && l->can_throw)
633 if (l->pure_const_state != IPA_NEITHER)
635 /* Const functions cannot have back edges (an
636 indication of possible infinite loop side
638 if (mark_dfs_back_edges ())
640 /* Preheaders are needed for SCEV to work.
641 Simple lateches and recorded exits improve chances that loop will
642 proved to be finite in testcases such as in loop-15.c and loop-24.c */
643 loop_optimizer_init (LOOPS_NORMAL
644 | LOOPS_HAVE_RECORDED_EXITS);
645 if (dump_file && (dump_flags & TDF_DETAILS))
646 flow_loops_dump (dump_file, NULL, 0);
647 if (mark_irreducible_loops ())
650 fprintf (dump_file, " has irreducible loops\n");
658 FOR_EACH_LOOP (li, loop, 0)
659 if (!finite_loop_p (loop))
662 fprintf (dump_file, " can not prove finiteness of loop %i\n", loop->num);
668 loop_optimizer_finalize ();
672 if (TREE_READONLY (decl))
674 l->pure_const_state = IPA_CONST;
675 l->state_previously_known = IPA_CONST;
676 if (!DECL_LOOPING_CONST_OR_PURE_P (decl))
677 l->looping = false, l->looping_previously_known = false;
679 if (DECL_PURE_P (decl))
681 if (l->pure_const_state != IPA_CONST)
682 l->pure_const_state = IPA_PURE;
683 l->state_previously_known = IPA_PURE;
684 if (!DECL_LOOPING_CONST_OR_PURE_P (decl))
685 l->looping = false, l->looping_previously_known = false;
687 if (TREE_NOTHROW (decl))
688 l->can_throw = false;
691 current_function_decl = old_decl;
695 fprintf (dump_file, "Function is locally looping.\n");
697 fprintf (dump_file, "Function is locally throwing.\n");
698 if (l->pure_const_state == IPA_CONST)
699 fprintf (dump_file, "Function is locally const.\n");
700 if (l->pure_const_state == IPA_PURE)
701 fprintf (dump_file, "Function is locally pure.\n");
706 /* Called when new function is inserted to callgraph late. */
708 add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
710 if (cgraph_function_body_availability (node) < AVAIL_OVERWRITABLE)
712 /* There are some shared nodes, in particular the initializers on
713 static declarations. We do not need to scan them more than once
714 since all we would be interested in are the addressof
716 visited_nodes = pointer_set_create ();
717 if (cgraph_function_body_availability (node) > AVAIL_OVERWRITABLE)
718 set_function_state (node, analyze_function (node, true));
719 pointer_set_destroy (visited_nodes);
720 visited_nodes = NULL;
723 /* Called when new clone is inserted to callgraph late. */
726 duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
727 void *data ATTRIBUTE_UNUSED)
729 if (has_function_state (src))
731 funct_state l = XNEW (struct funct_state_d);
732 gcc_assert (!has_function_state (dst));
733 memcpy (l, get_function_state (src), sizeof (*l));
734 set_function_state (dst, l);
738 /* Called when new clone is inserted to callgraph late. */
741 remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
743 if (has_function_state (node))
745 free (get_function_state (node));
746 set_function_state (node, NULL);
752 register_hooks (void)
754 static bool init_p = false;
761 node_removal_hook_holder =
762 cgraph_add_node_removal_hook (&remove_node_data, NULL);
763 node_duplication_hook_holder =
764 cgraph_add_node_duplication_hook (&duplicate_node_data, NULL);
765 function_insertion_hook_holder =
766 cgraph_add_function_insertion_hook (&add_new_function, NULL);
770 /* Analyze each function in the cgraph to see if it is locally PURE or
774 generate_summary (void)
776 struct cgraph_node *node;
780 /* There are some shared nodes, in particular the initializers on
781 static declarations. We do not need to scan them more than once
782 since all we would be interested in are the addressof
784 visited_nodes = pointer_set_create ();
786 /* Process all of the functions.
788 We process AVAIL_OVERWRITABLE functions. We can not use the results
789 by default, but the info can be used at LTO with -fwhole-program or
790 when function got clonned and the clone is AVAILABLE. */
792 for (node = cgraph_nodes; node; node = node->next)
793 if (cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
794 set_function_state (node, analyze_function (node, true));
796 pointer_set_destroy (visited_nodes);
797 visited_nodes = NULL;
801 /* Serialize the ipa info for lto. */
804 pure_const_write_summary (cgraph_node_set set,
805 varpool_node_set vset ATTRIBUTE_UNUSED)
807 struct cgraph_node *node;
808 struct lto_simple_output_block *ob
809 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
810 unsigned int count = 0;
811 cgraph_node_set_iterator csi;
813 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
815 node = csi_node (csi);
816 if (node->analyzed && has_function_state (node))
820 lto_output_uleb128_stream (ob->main_stream, count);
822 /* Process all of the functions. */
823 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
825 node = csi_node (csi);
826 if (node->analyzed && has_function_state (node))
828 struct bitpack_d *bp;
831 lto_cgraph_encoder_t encoder;
833 fs = get_function_state (node);
835 encoder = ob->decl_state->cgraph_node_encoder;
836 node_ref = lto_cgraph_encoder_encode (encoder, node);
837 lto_output_uleb128_stream (ob->main_stream, node_ref);
839 /* Note that flags will need to be read in the opposite
840 order as we are pushing the bitflags into FLAGS. */
841 bp = bitpack_create ();
842 bp_pack_value (bp, fs->pure_const_state, 2);
843 bp_pack_value (bp, fs->state_previously_known, 2);
844 bp_pack_value (bp, fs->looping_previously_known, 1);
845 bp_pack_value (bp, fs->looping, 1);
846 bp_pack_value (bp, fs->can_throw, 1);
847 lto_output_bitpack (ob->main_stream, bp);
852 lto_destroy_simple_output_block (ob);
856 /* Deserialize the ipa info for lto. */
859 pure_const_read_summary (void)
861 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
862 struct lto_file_decl_data *file_data;
866 while ((file_data = file_data_vec[j++]))
870 struct lto_input_block *ib
871 = lto_create_simple_input_block (file_data,
872 LTO_section_ipa_pure_const,
877 unsigned int count = lto_input_uleb128 (ib);
879 for (i = 0; i < count; i++)
882 struct cgraph_node *node;
883 struct bitpack_d *bp;
885 lto_cgraph_encoder_t encoder;
887 fs = XCNEW (struct funct_state_d);
888 index = lto_input_uleb128 (ib);
889 encoder = file_data->cgraph_node_encoder;
890 node = lto_cgraph_encoder_deref (encoder, index);
891 set_function_state (node, fs);
893 /* Note that the flags must be read in the opposite
894 order in which they were written (the bitflags were
895 pushed into FLAGS). */
896 bp = lto_input_bitpack (ib);
898 = (enum pure_const_state_e) bp_unpack_value (bp, 2);
899 fs->state_previously_known
900 = (enum pure_const_state_e) bp_unpack_value (bp, 2);
901 fs->looping_previously_known = bp_unpack_value (bp, 1);
902 fs->looping = bp_unpack_value (bp, 1);
903 fs->can_throw = bp_unpack_value (bp, 1);
906 int flags = flags_from_decl_or_type (node->decl);
907 fprintf (dump_file, "Read info for %s/%i ",
908 cgraph_node_name (node),
910 if (flags & ECF_CONST)
911 fprintf (dump_file, " const");
912 if (flags & ECF_PURE)
913 fprintf (dump_file, " pure");
914 if (flags & ECF_NOTHROW)
915 fprintf (dump_file, " nothrow");
916 fprintf (dump_file, "\n pure const state: %s\n",
917 pure_const_names[fs->pure_const_state]);
918 fprintf (dump_file, " previously known state: %s\n",
919 pure_const_names[fs->looping_previously_known]);
921 fprintf (dump_file," function is locally looping\n");
922 if (fs->looping_previously_known)
923 fprintf (dump_file," function is previously known looping\n");
925 fprintf (dump_file," function is locally throwing\n");
930 lto_destroy_simple_input_block (file_data,
931 LTO_section_ipa_pure_const,
939 ignore_edge (struct cgraph_edge *e)
941 return (!e->can_throw_external);
944 /* Return true if NODE is self recursive function. */
947 self_recursive_p (struct cgraph_node *node)
949 struct cgraph_edge *e;
950 for (e = node->callees; e; e = e->next_callee)
951 if (e->callee == node)
956 /* Produce the global information by preforming a transitive closure
957 on the local information that was produced by generate_summary.
958 Note that there is no function_transform pass since this only
959 updates the function_decl. */
964 struct cgraph_node *node;
965 struct cgraph_node *w;
966 struct cgraph_node **order =
967 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
970 struct ipa_dfs_info * w_info;
972 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
973 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
974 cgraph_remove_node_removal_hook (node_removal_hook_holder);
975 order_pos = ipa_utils_reduced_inorder (order, true, false, NULL);
978 dump_cgraph (dump_file);
979 ipa_utils_print_order(dump_file, "reduced", order, order_pos);
982 /* Propagate the local information thru the call graph to produce
983 the global information. All the nodes within a cycle will have
984 the same info so we collapse cycles first. Then we can do the
985 propagation in one pass from the leaves to the roots. */
986 for (i = 0; i < order_pos; i++ )
988 enum pure_const_state_e pure_const_state = IPA_CONST;
989 bool looping = false;
993 if (dump_file && (dump_flags & TDF_DETAILS))
994 fprintf (dump_file, "Starting cycle\n");
996 /* Find the worst state for any node in the cycle. */
1000 struct cgraph_edge *e;
1001 funct_state w_l = get_function_state (w);
1002 if (dump_file && (dump_flags & TDF_DETAILS))
1003 fprintf (dump_file, " Visiting %s/%i state:%s looping %i\n",
1004 cgraph_node_name (w),
1006 pure_const_names[w_l->pure_const_state],
1008 if (pure_const_state < w_l->pure_const_state)
1009 pure_const_state = w_l->pure_const_state;
1013 if (cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
1015 looping |= w_l->looping_previously_known;
1016 if (pure_const_state < w_l->state_previously_known)
1017 pure_const_state = w_l->state_previously_known;
1018 if (dump_file && (dump_flags & TDF_DETAILS))
1021 " Overwritable. state %s looping %i\n",
1022 pure_const_names[w_l->state_previously_known],
1023 w_l->looping_previously_known);
1027 if (pure_const_state == IPA_NEITHER)
1035 for (e = w->callees; e; e = e->next_callee)
1037 struct cgraph_node *y = e->callee;
1038 enum pure_const_state_e edge_state = IPA_CONST;
1039 bool edge_looping = false;
1041 if (dump_file && (dump_flags & TDF_DETAILS))
1045 cgraph_node_name (e->callee),
1048 if (cgraph_function_body_availability (y) > AVAIL_OVERWRITABLE)
1050 funct_state y_l = get_function_state (y);
1051 if (dump_file && (dump_flags & TDF_DETAILS))
1054 " state:%s looping:%i\n",
1055 pure_const_names[y_l->pure_const_state],
1058 if (y_l->pure_const_state > IPA_PURE
1059 && cgraph_edge_cannot_lead_to_return (e))
1061 if (dump_file && (dump_flags & TDF_DETAILS))
1064 " Ignoring side effects -> pure, looping\n");
1066 edge_state = IPA_PURE;
1067 edge_looping = true;
1071 edge_state = y_l->pure_const_state;
1072 edge_looping = y_l->looping;
1077 int flags = flags_from_decl_or_type (y->decl);
1079 if (flags & ECF_LOOPING_CONST_OR_PURE)
1081 edge_looping = true;
1082 if (dump_file && (dump_flags & TDF_DETAILS))
1083 fprintf (dump_file, " unavailable looping");
1085 if (flags & ECF_CONST)
1087 if (dump_file && (dump_flags & TDF_DETAILS))
1088 fprintf (dump_file, " const\n");
1090 else if (flags & ECF_PURE)
1092 edge_state = IPA_PURE;
1093 if (dump_file && (dump_flags & TDF_DETAILS))
1094 fprintf (dump_file, " pure\n");
1096 else if (cgraph_edge_cannot_lead_to_return (e))
1098 edge_state = IPA_PURE;
1099 edge_looping = true;
1100 if (dump_file && (dump_flags & TDF_DETAILS))
1101 fprintf (dump_file, " ignoring side effects->pure looping\n");
1105 if (dump_file && (dump_flags & TDF_DETAILS))
1106 fprintf (dump_file, " neihter\n");
1107 edge_state = IPA_NEITHER;
1108 edge_looping = true;
1112 /* Merge the results with what we already know.
1113 When we found function to be NEITHER, but we know
1114 it is looping pure const, be sure to set the looping flag. */
1115 pure_const_state = MAX (pure_const_state, MIN (edge_state,
1116 w_l->state_previously_known));
1117 if (edge_state > w_l->state_previously_known)
1118 looping = MAX (looping, w_l->looping_previously_known);
1120 looping = MAX (looping, MIN (edge_looping,
1121 w_l->looping_previously_known));
1122 if (pure_const_state == IPA_NEITHER)
1125 w_info = (struct ipa_dfs_info *) w->aux;
1126 w = w_info->next_cycle;
1128 if (dump_file && (dump_flags & TDF_DETAILS))
1129 fprintf (dump_file, "Result %s looping %i\n",
1130 pure_const_names [pure_const_state],
1133 /* Copy back the region's pure_const_state which is shared by
1134 all nodes in the region. */
1138 funct_state w_l = get_function_state (w);
1139 enum pure_const_state_e this_state = pure_const_state;
1140 bool this_looping = looping;
1142 if (w_l->state_previously_known != IPA_NEITHER
1143 && this_state > w_l->state_previously_known)
1145 this_state = w_l->state_previously_known;
1146 this_looping |= w_l->looping_previously_known;
1148 if (!this_looping && self_recursive_p (w))
1149 this_looping = true;
1150 if (!w_l->looping_previously_known)
1151 this_looping = false;
1153 /* All nodes within a cycle share the same info. */
1154 w_l->pure_const_state = this_state;
1155 w_l->looping = this_looping;
1160 if (!TREE_READONLY (w->decl))
1162 warn_function_const (w->decl, !this_looping);
1164 fprintf (dump_file, "Function found to be %sconst: %s\n",
1165 this_looping ? "looping " : "",
1166 cgraph_node_name (w));
1168 cgraph_set_readonly_flag (w, true);
1169 cgraph_set_looping_const_or_pure_flag (w, this_looping);
1173 if (!DECL_PURE_P (w->decl))
1175 warn_function_pure (w->decl, !this_looping);
1177 fprintf (dump_file, "Function found to be %spure: %s\n",
1178 this_looping ? "looping " : "",
1179 cgraph_node_name (w));
1181 cgraph_set_pure_flag (w, true);
1182 cgraph_set_looping_const_or_pure_flag (w, this_looping);
1188 w_info = (struct ipa_dfs_info *) w->aux;
1189 w = w_info->next_cycle;
1194 for (node = cgraph_nodes; node; node = node->next)
1196 /* Get rid of the aux information. */
1199 w_info = (struct ipa_dfs_info *) node->aux;
1204 order_pos = ipa_utils_reduced_inorder (order, true, false, ignore_edge);
1207 dump_cgraph (dump_file);
1208 ipa_utils_print_order(dump_file, "reduced for nothrow", order, order_pos);
1210 /* Propagate the local information thru the call graph to produce
1211 the global information. All the nodes within a cycle will have
1212 the same info so we collapse cycles first. Then we can do the
1213 propagation in one pass from the leaves to the roots. */
1214 for (i = 0; i < order_pos; i++ )
1216 bool can_throw = false;
1219 /* Find the worst state for any node in the cycle. */
1223 struct cgraph_edge *e;
1224 funct_state w_l = get_function_state (w);
1227 || cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
1233 for (e = w->callees; e; e = e->next_callee)
1235 struct cgraph_node *y = e->callee;
1237 if (cgraph_function_body_availability (y) > AVAIL_OVERWRITABLE)
1239 funct_state y_l = get_function_state (y);
1243 if (y_l->can_throw && !TREE_NOTHROW (w->decl)
1244 && e->can_throw_external)
1247 else if (e->can_throw_external && !TREE_NOTHROW (y->decl))
1250 w_info = (struct ipa_dfs_info *) w->aux;
1251 w = w_info->next_cycle;
1254 /* Copy back the region's pure_const_state which is shared by
1255 all nodes in the region. */
1259 funct_state w_l = get_function_state (w);
1260 if (!can_throw && !TREE_NOTHROW (w->decl))
1262 struct cgraph_edge *e;
1263 cgraph_set_nothrow_flag (w, true);
1264 for (e = w->callers; e; e = e->next_caller)
1265 e->can_throw_external = false;
1267 fprintf (dump_file, "Function found to be nothrow: %s\n",
1268 cgraph_node_name (w));
1270 else if (can_throw && !TREE_NOTHROW (w->decl))
1271 w_l->can_throw = true;
1272 w_info = (struct ipa_dfs_info *) w->aux;
1273 w = w_info->next_cycle;
1278 for (node = cgraph_nodes; node; node = node->next)
1280 /* Get rid of the aux information. */
1283 w_info = (struct ipa_dfs_info *) node->aux;
1287 if (cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE
1288 && has_function_state (node))
1289 free (get_function_state (node));
1293 VEC_free (funct_state, heap, funct_state_vec);
1299 gate_pure_const (void)
1301 return (flag_ipa_pure_const
1302 /* Don't bother doing anything if the program has errors. */
1306 struct ipa_opt_pass_d pass_ipa_pure_const =
1310 "pure-const", /* name */
1311 gate_pure_const, /* gate */
1312 propagate, /* execute */
1315 0, /* static_pass_number */
1316 TV_IPA_PURE_CONST, /* tv_id */
1317 0, /* properties_required */
1318 0, /* properties_provided */
1319 0, /* properties_destroyed */
1320 0, /* todo_flags_start */
1321 0 /* todo_flags_finish */
1323 generate_summary, /* generate_summary */
1324 pure_const_write_summary, /* write_summary */
1325 pure_const_read_summary, /* read_summary */
1326 NULL, /* write_optimization_summary */
1327 NULL, /* read_optimization_summary */
1328 NULL, /* stmt_fixup */
1330 NULL, /* function_transform */
1331 NULL /* variable_transform */
1334 /* Return true if function should be skipped for local pure const analysis. */
1337 skip_function_for_local_pure_const (struct cgraph_node *node)
1339 /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1340 we must not promote functions that are called by already processed functions. */
1342 if (function_called_by_processed_nodes_p ())
1345 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1348 if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
1351 fprintf (dump_file, "Function is not available or overwrittable; not analyzing.\n");
1357 /* Simple local pass for pure const discovery reusing the analysis from
1358 ipa_pure_const. This pass is effective when executed together with
1359 other optimization passes in early optimization pass queue. */
1362 local_pure_const (void)
1364 bool changed = false;
1367 struct cgraph_node *node;
1369 node = cgraph_node (current_function_decl);
1370 skip = skip_function_for_local_pure_const (node);
1371 if (!warn_suggest_attribute_const
1372 && !warn_suggest_attribute_pure
1376 /* First do NORETURN discovery. */
1377 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
1378 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0)
1380 if (warn_missing_noreturn
1381 && !lang_hooks.missing_noreturn_ok_p (cfun->decl))
1382 warning_at (DECL_SOURCE_LOCATION (cfun->decl), OPT_Wmissing_noreturn,
1383 "function might be possible candidate "
1384 "for attribute %<noreturn%>");
1386 fprintf (dump_file, "Function found to be noreturn: %s\n",
1387 lang_hooks.decl_printable_name (current_function_decl, 2));
1389 /* Update declaration and reduce profile to executed once. */
1390 TREE_THIS_VOLATILE (current_function_decl) = 1;
1391 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
1392 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
1396 l = analyze_function (node, false);
1398 switch (l->pure_const_state)
1401 if (!TREE_READONLY (current_function_decl))
1403 warn_function_const (current_function_decl, !l->looping);
1406 cgraph_set_readonly_flag (node, true);
1407 cgraph_set_looping_const_or_pure_flag (node, l->looping);
1411 fprintf (dump_file, "Function found to be %sconst: %s\n",
1412 l->looping ? "looping " : "",
1413 lang_hooks.decl_printable_name (current_function_decl,
1416 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1421 cgraph_set_looping_const_or_pure_flag (node, false);
1425 fprintf (dump_file, "Function found to be non-looping: %s\n",
1426 lang_hooks.decl_printable_name (current_function_decl,
1432 if (!DECL_PURE_P (current_function_decl))
1436 cgraph_set_pure_flag (node, true);
1437 cgraph_set_looping_const_or_pure_flag (node, l->looping);
1440 warn_function_pure (current_function_decl, !l->looping);
1442 fprintf (dump_file, "Function found to be %spure: %s\n",
1443 l->looping ? "looping " : "",
1444 lang_hooks.decl_printable_name (current_function_decl,
1447 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1452 cgraph_set_looping_const_or_pure_flag (node, false);
1456 fprintf (dump_file, "Function found to be non-looping: %s\n",
1457 lang_hooks.decl_printable_name (current_function_decl,
1465 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
1467 struct cgraph_edge *e;
1469 cgraph_set_nothrow_flag (node, true);
1470 for (e = node->callers; e; e = e->next_caller)
1471 e->can_throw_external = false;
1474 fprintf (dump_file, "Function found to be nothrow: %s\n",
1475 lang_hooks.decl_printable_name (current_function_decl,
1481 return execute_fixup_cfg ();
1486 struct gimple_opt_pass pass_local_pure_const =
1490 "local-pure-const", /* name */
1491 gate_pure_const, /* gate */
1492 local_pure_const, /* execute */
1495 0, /* static_pass_number */
1496 TV_IPA_PURE_CONST, /* tv_id */
1497 0, /* properties_required */
1498 0, /* properties_provided */
1499 0, /* properties_destroyed */
1500 0, /* todo_flags_start */
1501 0 /* todo_flags_finish */