1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
36 #include "coretypes.h"
39 #include "tree-flow.h"
40 #include "tree-inline.h"
41 #include "tree-pass.h"
42 #include "langhooks.h"
43 #include "pointer-set.h"
45 #include "ipa-utils.h"
51 #include "diagnostic.h"
52 #include "langhooks.h"
54 #include "lto-streamer.h"
56 #include "tree-scalar-evolution.h"
58 static struct pointer_set_t *visited_nodes;
60 /* Lattice values for const and pure functions. Everything starts out
61 being const, then may drop to pure and then neither depending on
63 enum pure_const_state_e
70 /* Holder for the const_state. There is one of these per function
75 enum pure_const_state_e pure_const_state;
76 /* What user set here; we can be always sure about this. */
77 enum pure_const_state_e state_previously_known;
78 bool looping_previously_known;
80 /* True if the function could possibly infinite loop. There are a
81 lot of ways that this could be determined. We are pretty
82 conservative here. While it is possible to cse pure and const
83 calls, it is not legal to have dce get rid of the call if there
84 is a possibility that the call could infinite loop since this is
85 a behavioral change. */
91 typedef struct funct_state_d * funct_state;
93 /* The storage of the funct_state is abstracted because there is the
94 possibility that it may be desirable to move this to the cgraph
97 /* Array, indexed by cgraph node uid, of function states. */
99 DEF_VEC_P (funct_state);
100 DEF_VEC_ALLOC_P (funct_state, heap);
101 static VEC (funct_state, heap) *funct_state_vec;
103 /* Holders of ipa cgraph hooks: */
104 static struct cgraph_node_hook_list *function_insertion_hook_holder;
105 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
106 static struct cgraph_node_hook_list *node_removal_hook_holder;
108 /* Init the function state. */
113 free (funct_state_vec);
117 /* Return the function state from NODE. */
119 static inline funct_state
120 get_function_state (struct cgraph_node *node)
123 || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
125 return VEC_index (funct_state, funct_state_vec, node->uid);
128 /* Set the function state S for NODE. */
131 set_function_state (struct cgraph_node *node, funct_state s)
134 || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
135 VEC_safe_grow_cleared (funct_state, heap, funct_state_vec, node->uid + 1);
136 VEC_replace (funct_state, funct_state_vec, node->uid, s);
139 /* Check to see if the use (or definition when CHECKING_WRITE is true)
140 variable T is legal in a function that is either pure or const. */
143 check_decl (funct_state local,
144 tree t, bool checking_write)
146 /* Do not want to do anything with volatile except mark any
147 function that uses one to be not const or pure. */
148 if (TREE_THIS_VOLATILE (t))
150 local->pure_const_state = IPA_NEITHER;
152 fprintf (dump_file, " Volatile operand is not const/pure");
156 /* Do not care about a local automatic that is not static. */
157 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
160 /* If the variable has the "used" attribute, treat it as if it had a
161 been touched by the devil. */
162 if (lookup_attribute ("used", DECL_ATTRIBUTES (t)))
164 local->pure_const_state = IPA_NEITHER;
166 fprintf (dump_file, " Used static/global variable is not const/pure\n");
170 /* Since we have dealt with the locals and params cases above, if we
171 are CHECKING_WRITE, this cannot be a pure or constant
175 local->pure_const_state = IPA_NEITHER;
177 fprintf (dump_file, " static/global memory write is not const/pure\n");
181 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
183 /* Readonly reads are safe. */
184 if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
185 return; /* Read of a constant, do not change the function state. */
189 fprintf (dump_file, " global memory read is not const\n");
190 /* Just a regular read. */
191 if (local->pure_const_state == IPA_CONST)
192 local->pure_const_state = IPA_PURE;
197 /* Compilation level statics can be read if they are readonly
199 if (TREE_READONLY (t))
203 fprintf (dump_file, " static memory read is not const\n");
204 /* Just a regular read. */
205 if (local->pure_const_state == IPA_CONST)
206 local->pure_const_state = IPA_PURE;
211 /* Check to see if the use (or definition when CHECKING_WRITE is true)
212 variable T is legal in a function that is either pure or const. */
215 check_op (funct_state local, tree t, bool checking_write)
217 t = get_base_address (t);
218 if (t && TREE_THIS_VOLATILE (t))
220 local->pure_const_state = IPA_NEITHER;
222 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
226 && INDIRECT_REF_P (t)
227 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
228 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
231 fprintf (dump_file, " Indirect ref to local memory is OK\n");
234 else if (checking_write)
236 local->pure_const_state = IPA_NEITHER;
238 fprintf (dump_file, " Indirect ref write is not const/pure\n");
244 fprintf (dump_file, " Indirect ref read is not const\n");
245 if (local->pure_const_state == IPA_CONST)
246 local->pure_const_state = IPA_PURE;
250 /* Check the parameters of a function call to CALL_EXPR to see if
251 there are any references in the parameters that are not allowed for
252 pure or const functions. Also check to see if this is either an
253 indirect call, a call outside the compilation unit, or has special
254 attributes that may also effect the purity. The CALL_EXPR node for
255 the entire call expression. */
258 check_call (funct_state local, gimple call, bool ipa)
260 int flags = gimple_call_flags (call);
261 tree callee_t = gimple_call_fndecl (call);
262 struct cgraph_node* callee;
263 enum availability avail = AVAIL_NOT_AVAILABLE;
264 bool possibly_throws = stmt_could_throw_p (call);
265 bool possibly_throws_externally = (possibly_throws
266 && stmt_can_throw_external (call));
271 for (i = 0; i < gimple_num_ops (call); i++)
272 if (gimple_op (call, i)
273 && tree_could_throw_p (gimple_op (call, i)))
275 if (possibly_throws && flag_non_call_exceptions)
278 fprintf (dump_file, " operand can throw; looping\n");
279 local->looping = true;
281 if (possibly_throws_externally)
284 fprintf (dump_file, " operand can throw externally\n");
285 local->can_throw = true;
290 /* The const and pure flags are set by a variety of places in the
291 compiler (including here). If someone has already set the flags
292 for the callee, (such as for some of the builtins) we will use
293 them, otherwise we will compute our own information.
295 Const and pure functions have less clobber effects than other
296 functions so we process these first. Otherwise if it is a call
297 outside the compilation unit or an indirect call we punt. This
298 leaves local calls which will be processed by following the call
302 callee = cgraph_node(callee_t);
303 avail = cgraph_function_body_availability (callee);
305 /* When bad things happen to bad functions, they cannot be const
307 if (setjmp_call_p (callee_t))
310 fprintf (dump_file, " setjmp is not const/pure\n");
311 local->looping = true;
312 local->pure_const_state = IPA_NEITHER;
315 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
316 switch (DECL_FUNCTION_CODE (callee_t))
318 case BUILT_IN_LONGJMP:
319 case BUILT_IN_NONLOCAL_GOTO:
321 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
322 local->pure_const_state = IPA_NEITHER;
323 local->looping = true;
330 /* When not in IPA mode, we can still handle self recursion. */
331 if (!ipa && callee_t == current_function_decl)
332 local->looping = true;
333 /* The callee is either unknown (indirect call) or there is just no
334 scannable code for it (external call) . We look to see if there
335 are any bits available for the callee (such as by declaration or
336 because it is builtin) and process solely on the basis of those
338 else if (avail <= AVAIL_OVERWRITABLE || !ipa)
340 if (possibly_throws && flag_non_call_exceptions)
343 fprintf (dump_file, " can throw; looping\n");
344 local->looping = true;
346 if (possibly_throws_externally)
350 fprintf (dump_file, " can throw externally to lp %i\n",
351 lookup_stmt_eh_lp (call));
353 fprintf (dump_file, " callee:%s\n",
354 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
356 local->can_throw = true;
358 if (flags & ECF_CONST)
360 if (callee_t && DECL_LOOPING_CONST_OR_PURE_P (callee_t))
361 local->looping = true;
363 else if (flags & ECF_PURE)
365 if (callee_t && DECL_LOOPING_CONST_OR_PURE_P (callee_t))
366 local->looping = true;
368 fprintf (dump_file, " pure function call in not const\n");
369 if (local->pure_const_state == IPA_CONST)
370 local->pure_const_state = IPA_PURE;
375 fprintf (dump_file, " uknown function call is not const/pure\n");
376 local->pure_const_state = IPA_NEITHER;
377 local->looping = true;
380 /* Direct functions calls are handled by IPA propagation. */
383 /* Wrapper around check_decl for loads. */
386 check_load (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
389 check_decl ((funct_state)data, op, false);
391 check_op ((funct_state)data, op, false);
395 /* Wrapper around check_decl for stores. */
398 check_store (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
401 check_decl ((funct_state)data, op, true);
403 check_op ((funct_state)data, op, true);
407 /* Look into pointer pointed to by GSIP and figure out what interesting side
410 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
412 gimple stmt = gsi_stmt (*gsip);
415 if (is_gimple_debug (stmt))
420 fprintf (dump_file, " scanning: ");
421 print_gimple_stmt (dump_file, stmt, 0, 0);
424 /* Look for loads and stores. */
425 walk_stmt_load_store_ops (stmt, local, check_load, check_store);
427 if (gimple_code (stmt) != GIMPLE_CALL
428 && stmt_could_throw_p (stmt))
430 if (flag_non_call_exceptions)
433 fprintf (dump_file, " can throw; looping");
434 local->looping = true;
436 if (stmt_can_throw_external (stmt))
439 fprintf (dump_file, " can throw externally");
440 local->can_throw = true;
443 switch (gimple_code (stmt))
446 check_call (local, stmt, ipa);
449 if (DECL_NONLOCAL (gimple_label_label (stmt)))
450 /* Target of long jump. */
453 fprintf (dump_file, " nonlocal label is not const/pure");
454 local->pure_const_state = IPA_NEITHER;
458 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
460 tree op = gimple_asm_clobber_op (stmt, i);
461 if (simple_cst_equal(TREE_VALUE (op), memory_identifier_string) == 1)
464 fprintf (dump_file, " memory asm clobber is not const/pure");
465 /* Abandon all hope, ye who enter here. */
466 local->pure_const_state = IPA_NEITHER;
469 if (gimple_asm_volatile_p (stmt))
472 fprintf (dump_file, " volatile is not const/pure");
473 /* Abandon all hope, ye who enter here. */
474 local->pure_const_state = IPA_NEITHER;
475 local->looping = true;
484 /* This is the main routine for finding the reference patterns for
485 global variables within a function FN. */
488 analyze_function (struct cgraph_node *fn, bool ipa)
490 tree decl = fn->decl;
491 tree old_decl = current_function_decl;
493 basic_block this_block;
495 if (cgraph_function_body_availability (fn) <= AVAIL_OVERWRITABLE)
498 fprintf (dump_file, "Function is not available or overwrittable; not analyzing.\n");
502 l = XCNEW (struct funct_state_d);
503 l->pure_const_state = IPA_CONST;
504 l->state_previously_known = IPA_NEITHER;
505 l->looping_previously_known = true;
507 l->can_throw = false;
511 fprintf (dump_file, "\n\n local analysis of %s\n ",
512 cgraph_node_name (fn));
515 push_cfun (DECL_STRUCT_FUNCTION (decl));
516 current_function_decl = decl;
518 FOR_EACH_BB (this_block)
520 gimple_stmt_iterator gsi;
521 struct walk_stmt_info wi;
523 memset (&wi, 0, sizeof(wi));
524 for (gsi = gsi_start_bb (this_block);
528 check_stmt (&gsi, l, ipa);
529 if (l->pure_const_state == IPA_NEITHER && l->looping && l->can_throw)
535 if (l->pure_const_state != IPA_NEITHER)
537 /* Const functions cannot have back edges (an
538 indication of possible infinite loop side
540 if (mark_dfs_back_edges ())
542 /* Preheaders are needed for SCEV to work.
543 Simple lateches and recorded exits improve chances that loop will
544 proved to be finite in testcases such as in loop-15.c and loop-24.c */
545 loop_optimizer_init (LOOPS_NORMAL
546 | LOOPS_HAVE_RECORDED_EXITS);
547 if (dump_file && (dump_flags & TDF_DETAILS))
548 flow_loops_dump (dump_file, NULL, 0);
549 if (mark_irreducible_loops ())
552 fprintf (dump_file, " has irreducible loops\n");
560 FOR_EACH_LOOP (li, loop, 0)
561 if (!finite_loop_p (loop))
564 fprintf (dump_file, " can not prove finiteness of loop %i\n", loop->num);
570 loop_optimizer_finalize ();
574 if (TREE_READONLY (decl))
576 l->pure_const_state = IPA_CONST;
577 l->state_previously_known = IPA_CONST;
578 if (!DECL_LOOPING_CONST_OR_PURE_P (decl))
579 l->looping = false, l->looping_previously_known = false;
581 if (DECL_PURE_P (decl))
583 if (l->pure_const_state != IPA_CONST)
584 l->pure_const_state = IPA_PURE;
585 l->state_previously_known = IPA_PURE;
586 if (!DECL_LOOPING_CONST_OR_PURE_P (decl))
587 l->looping = false, l->looping_previously_known = false;
589 if (TREE_NOTHROW (decl))
590 l->can_throw = false;
593 current_function_decl = old_decl;
597 fprintf (dump_file, "Function is locally looping.\n");
599 fprintf (dump_file, "Function is locally throwing.\n");
600 if (l->pure_const_state == IPA_CONST)
601 fprintf (dump_file, "Function is locally const.\n");
602 if (l->pure_const_state == IPA_PURE)
603 fprintf (dump_file, "Function is locally pure.\n");
608 /* Called when new function is inserted to callgraph late. */
610 add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
612 if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
614 /* There are some shared nodes, in particular the initializers on
615 static declarations. We do not need to scan them more than once
616 since all we would be interested in are the addressof
618 visited_nodes = pointer_set_create ();
619 set_function_state (node, analyze_function (node, true));
620 pointer_set_destroy (visited_nodes);
621 visited_nodes = NULL;
624 /* Called when new clone is inserted to callgraph late. */
627 duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
628 void *data ATTRIBUTE_UNUSED)
630 if (get_function_state (src))
632 funct_state l = XNEW (struct funct_state_d);
633 gcc_assert (!get_function_state (dst));
634 memcpy (l, get_function_state (src), sizeof (*l));
635 set_function_state (dst, l);
639 /* Called when new clone is inserted to callgraph late. */
642 remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
644 if (get_function_state (node))
646 free (get_function_state (node));
647 set_function_state (node, NULL);
653 register_hooks (void)
655 static bool init_p = false;
662 node_removal_hook_holder =
663 cgraph_add_node_removal_hook (&remove_node_data, NULL);
664 node_duplication_hook_holder =
665 cgraph_add_node_duplication_hook (&duplicate_node_data, NULL);
666 function_insertion_hook_holder =
667 cgraph_add_function_insertion_hook (&add_new_function, NULL);
671 /* Analyze each function in the cgraph to see if it is locally PURE or
675 generate_summary (void)
677 struct cgraph_node *node;
681 /* There are some shared nodes, in particular the initializers on
682 static declarations. We do not need to scan them more than once
683 since all we would be interested in are the addressof
685 visited_nodes = pointer_set_create ();
687 /* Process all of the functions.
689 We do NOT process any AVAIL_OVERWRITABLE functions, we cannot
690 guarantee that what we learn about the one we see will be true
691 for the one that overrides it.
693 for (node = cgraph_nodes; node; node = node->next)
694 if (cgraph_function_body_availability (node) > AVAIL_OVERWRITABLE)
695 set_function_state (node, analyze_function (node, true));
697 pointer_set_destroy (visited_nodes);
698 visited_nodes = NULL;
702 /* Serialize the ipa info for lto. */
705 pure_const_write_summary (cgraph_node_set set)
707 struct cgraph_node *node;
708 struct lto_simple_output_block *ob
709 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
710 unsigned int count = 0;
711 cgraph_node_set_iterator csi;
713 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
715 node = csi_node (csi);
716 if (node->analyzed && get_function_state (node) != NULL)
720 lto_output_uleb128_stream (ob->main_stream, count);
722 /* Process all of the functions. */
723 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
725 node = csi_node (csi);
726 if (node->analyzed && get_function_state (node) != NULL)
728 struct bitpack_d *bp;
731 lto_cgraph_encoder_t encoder;
733 fs = get_function_state (node);
735 encoder = ob->decl_state->cgraph_node_encoder;
736 node_ref = lto_cgraph_encoder_encode (encoder, node);
737 lto_output_uleb128_stream (ob->main_stream, node_ref);
739 /* Note that flags will need to be read in the opposite
740 order as we are pushing the bitflags into FLAGS. */
741 bp = bitpack_create ();
742 bp_pack_value (bp, fs->pure_const_state, 2);
743 bp_pack_value (bp, fs->state_previously_known, 2);
744 bp_pack_value (bp, fs->looping_previously_known, 1);
745 bp_pack_value (bp, fs->looping, 1);
746 bp_pack_value (bp, fs->can_throw, 1);
747 lto_output_bitpack (ob->main_stream, bp);
752 lto_destroy_simple_output_block (ob);
756 /* Deserialize the ipa info for lto. */
759 pure_const_read_summary (void)
761 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
762 struct lto_file_decl_data *file_data;
766 while ((file_data = file_data_vec[j++]))
770 struct lto_input_block *ib
771 = lto_create_simple_input_block (file_data,
772 LTO_section_ipa_pure_const,
777 unsigned int count = lto_input_uleb128 (ib);
779 for (i = 0; i < count; i++)
782 struct cgraph_node *node;
783 struct bitpack_d *bp;
785 lto_cgraph_encoder_t encoder;
787 fs = XCNEW (struct funct_state_d);
788 index = lto_input_uleb128 (ib);
789 encoder = file_data->cgraph_node_encoder;
790 node = lto_cgraph_encoder_deref (encoder, index);
791 set_function_state (node, fs);
793 /* Note that the flags must be read in the opposite
794 order in which they were written (the bitflags were
795 pushed into FLAGS). */
796 bp = lto_input_bitpack (ib);
798 = (enum pure_const_state_e) bp_unpack_value (bp, 2);
799 fs->state_previously_known
800 = (enum pure_const_state_e) bp_unpack_value (bp, 2);
801 fs->looping_previously_known = bp_unpack_value (bp, 1);
802 fs->looping = bp_unpack_value (bp, 1);
803 fs->can_throw = bp_unpack_value (bp, 1);
807 lto_destroy_simple_input_block (file_data,
808 LTO_section_ipa_pure_const,
816 ignore_edge (struct cgraph_edge *e)
818 return (!e->can_throw_external);
821 /* Return true if NODE is self recursive function. */
824 self_recursive_p (struct cgraph_node *node)
826 struct cgraph_edge *e;
827 for (e = node->callees; e; e = e->next_callee)
828 if (e->callee == node)
833 /* Produce the global information by preforming a transitive closure
834 on the local information that was produced by generate_summary.
835 Note that there is no function_transform pass since this only
836 updates the function_decl. */
841 struct cgraph_node *node;
842 struct cgraph_node *w;
843 struct cgraph_node **order =
844 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
847 struct ipa_dfs_info * w_info;
849 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
850 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
851 cgraph_remove_node_removal_hook (node_removal_hook_holder);
852 order_pos = ipa_utils_reduced_inorder (order, true, false, NULL);
855 dump_cgraph (dump_file);
856 ipa_utils_print_order(dump_file, "reduced", order, order_pos);
859 /* Propagate the local information thru the call graph to produce
860 the global information. All the nodes within a cycle will have
861 the same info so we collapse cycles first. Then we can do the
862 propagation in one pass from the leaves to the roots. */
863 for (i = 0; i < order_pos; i++ )
865 enum pure_const_state_e pure_const_state = IPA_CONST;
866 bool looping = false;
870 /* Find the worst state for any node in the cycle. */
874 struct cgraph_edge *e;
875 funct_state w_l = get_function_state (w);
876 if (pure_const_state < w_l->pure_const_state)
877 pure_const_state = w_l->pure_const_state;
882 if (pure_const_state == IPA_NEITHER)
890 for (e = w->callees; e; e = e->next_callee)
892 struct cgraph_node *y = e->callee;
894 if (cgraph_function_body_availability (y) > AVAIL_OVERWRITABLE)
896 funct_state y_l = get_function_state (y);
897 if (pure_const_state < y_l->pure_const_state)
898 pure_const_state = y_l->pure_const_state;
899 if (pure_const_state == IPA_NEITHER)
905 w_info = (struct ipa_dfs_info *) w->aux;
906 w = w_info->next_cycle;
909 /* Copy back the region's pure_const_state which is shared by
910 all nodes in the region. */
914 funct_state w_l = get_function_state (w);
915 enum pure_const_state_e this_state = pure_const_state;
916 bool this_looping = looping;
918 if (w_l->state_previously_known != IPA_NEITHER
919 && this_state > w_l->state_previously_known)
920 this_state = w_l->state_previously_known;
921 if (!this_looping && self_recursive_p (w))
923 if (!w_l->looping_previously_known)
924 this_looping = false;
926 /* All nodes within a cycle share the same info. */
927 w_l->pure_const_state = this_state;
928 w_l->looping = this_looping;
933 if (!TREE_READONLY (w->decl) && dump_file)
934 fprintf (dump_file, "Function found to be %sconst: %s\n",
935 this_looping ? "looping " : "",
936 cgraph_node_name (w));
937 TREE_READONLY (w->decl) = 1;
938 DECL_LOOPING_CONST_OR_PURE_P (w->decl) = this_looping;
942 if (!DECL_PURE_P (w->decl) && dump_file)
943 fprintf (dump_file, "Function found to be %spure: %s\n",
944 this_looping ? "looping " : "",
945 cgraph_node_name (w));
946 DECL_PURE_P (w->decl) = 1;
947 DECL_LOOPING_CONST_OR_PURE_P (w->decl) = this_looping;
953 w_info = (struct ipa_dfs_info *) w->aux;
954 w = w_info->next_cycle;
959 for (node = cgraph_nodes; node; node = node->next)
961 /* Get rid of the aux information. */
964 w_info = (struct ipa_dfs_info *) node->aux;
969 order_pos = ipa_utils_reduced_inorder (order, true, false, ignore_edge);
972 dump_cgraph (dump_file);
973 ipa_utils_print_order(dump_file, "reduced for nothrow", order, order_pos);
975 /* Propagate the local information thru the call graph to produce
976 the global information. All the nodes within a cycle will have
977 the same info so we collapse cycles first. Then we can do the
978 propagation in one pass from the leaves to the roots. */
979 for (i = 0; i < order_pos; i++ )
981 bool can_throw = false;
984 /* Find the worst state for any node in the cycle. */
988 struct cgraph_edge *e;
989 funct_state w_l = get_function_state (w);
997 for (e = w->callees; e; e = e->next_callee)
999 struct cgraph_node *y = e->callee;
1001 if (cgraph_function_body_availability (y) > AVAIL_OVERWRITABLE)
1003 funct_state y_l = get_function_state (y);
1007 if (y_l->can_throw && !TREE_NOTHROW (w->decl)
1008 && e->can_throw_external)
1012 w_info = (struct ipa_dfs_info *) w->aux;
1013 w = w_info->next_cycle;
1016 /* Copy back the region's pure_const_state which is shared by
1017 all nodes in the region. */
1021 funct_state w_l = get_function_state (w);
1022 if (!can_throw && !TREE_NOTHROW (w->decl))
1024 struct cgraph_edge *e;
1025 TREE_NOTHROW (w->decl) = true;
1026 for (e = w->callers; e; e = e->next_caller)
1027 e->can_throw_external = false;
1029 fprintf (dump_file, "Function found to be nothrow: %s\n",
1030 cgraph_node_name (w));
1032 else if (can_throw && !TREE_NOTHROW (w->decl))
1033 w_l->can_throw = true;
1034 w_info = (struct ipa_dfs_info *) w->aux;
1035 w = w_info->next_cycle;
1040 for (node = cgraph_nodes; node; node = node->next)
1042 /* Get rid of the aux information. */
1045 w_info = (struct ipa_dfs_info *) node->aux;
1049 if (cgraph_function_body_availability (node) > AVAIL_OVERWRITABLE)
1050 free (get_function_state (node));
1054 VEC_free (funct_state, heap, funct_state_vec);
1060 gate_pure_const (void)
1062 return (flag_ipa_pure_const
1063 /* Don't bother doing anything if the program has errors. */
1064 && !(errorcount || sorrycount));
1067 struct ipa_opt_pass_d pass_ipa_pure_const =
1071 "pure-const", /* name */
1072 gate_pure_const, /* gate */
1073 propagate, /* execute */
1076 0, /* static_pass_number */
1077 TV_IPA_PURE_CONST, /* tv_id */
1078 0, /* properties_required */
1079 0, /* properties_provided */
1080 0, /* properties_destroyed */
1081 0, /* todo_flags_start */
1082 0 /* todo_flags_finish */
1084 generate_summary, /* generate_summary */
1085 pure_const_write_summary, /* write_summary */
1086 pure_const_read_summary, /* read_summary */
1087 NULL, /* function_read_summary */
1089 NULL, /* function_transform */
1090 NULL /* variable_transform */
1093 /* Simple local pass for pure const discovery reusing the analysis from
1094 ipa_pure_const. This pass is effective when executed together with
1095 other optimization passes in early optimization pass queue. */
1098 local_pure_const (void)
1100 bool changed = false;
1103 /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1104 we must not promote functions that are called by already processed functions. */
1106 if (function_called_by_processed_nodes_p ())
1109 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1113 l = analyze_function (cgraph_node (current_function_decl), false);
1117 fprintf (dump_file, "Function has wrong visibility; ignoring\n");
1121 switch (l->pure_const_state)
1124 if (!TREE_READONLY (current_function_decl))
1126 TREE_READONLY (current_function_decl) = 1;
1127 DECL_LOOPING_CONST_OR_PURE_P (current_function_decl) = l->looping;
1130 fprintf (dump_file, "Function found to be %sconst: %s\n",
1131 l->looping ? "looping " : "",
1132 lang_hooks.decl_printable_name (current_function_decl,
1135 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1138 DECL_LOOPING_CONST_OR_PURE_P (current_function_decl) = false;
1141 fprintf (dump_file, "Function found to be non-looping: %s\n",
1142 lang_hooks.decl_printable_name (current_function_decl,
1148 if (!TREE_READONLY (current_function_decl))
1150 DECL_PURE_P (current_function_decl) = 1;
1151 DECL_LOOPING_CONST_OR_PURE_P (current_function_decl) = l->looping;
1154 fprintf (dump_file, "Function found to be %spure: %s\n",
1155 l->looping ? "looping " : "",
1156 lang_hooks.decl_printable_name (current_function_decl,
1159 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1162 DECL_LOOPING_CONST_OR_PURE_P (current_function_decl) = false;
1165 fprintf (dump_file, "Function found to be non-looping: %s\n",
1166 lang_hooks.decl_printable_name (current_function_decl,
1174 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
1176 struct cgraph_edge *e;
1178 TREE_NOTHROW (current_function_decl) = true;
1179 for (e = cgraph_node (current_function_decl)->callers;
1180 e; e = e->next_caller)
1181 e->can_throw_external = false;
1184 fprintf (dump_file, "Function found to be nothrow: %s\n",
1185 lang_hooks.decl_printable_name (current_function_decl,
1191 return execute_fixup_cfg ();
1196 struct gimple_opt_pass pass_local_pure_const =
1200 "local-pure-const", /* name */
1201 gate_pure_const, /* gate */
1202 local_pure_const, /* execute */
1205 0, /* static_pass_number */
1206 TV_IPA_PURE_CONST, /* tv_id */
1207 0, /* properties_required */
1208 0, /* properties_provided */
1209 0, /* properties_destroyed */
1210 0, /* todo_flags_start */
1211 0 /* todo_flags_finish */