1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
36 #include "langhooks.h"
37 #include "diagnostic.h"
38 #include "tree-flow.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
45 #include "cfglayout.h"
46 #include "tree-ssa-propagate.h"
47 #include "value-prof.h"
48 #include "pointer-set.h"
49 #include "tree-inline.h"
51 /* This file contains functions for building the Control Flow Graph (CFG)
52 for a function tree. */
54 /* Local declarations. */
56 /* Initial capacity for the basic block array. */
57 static const int initial_cfg_capacity = 20;
59 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
60 which use a particular edge. The CASE_LABEL_EXPRs are chained together
61 via their TREE_CHAIN field, which we clear after we're done with the
62 hash table to prevent problems with duplication of SWITCH_EXPRs.
64 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
65 update the case vector in response to edge redirections.
67 Right now this table is set up and torn down at key points in the
68 compilation process. It would be nice if we could make the table
69 more persistent. The key is getting notification of changes to
70 the CFG (particularly edge removal, creation and redirection). */
72 static struct pointer_map_t *edge_to_cases;
77 long num_merged_labels;
80 static struct cfg_stats_d cfg_stats;
82 /* Nonzero if we found a computed goto while building basic blocks. */
83 static bool found_computed_goto;
85 /* Basic blocks and flowgraphs. */
86 static basic_block create_bb (void *, void *, basic_block);
87 static void make_blocks (tree);
88 static void factor_computed_gotos (void);
91 static void make_edges (void);
92 static void make_cond_expr_edges (basic_block);
93 static void make_switch_expr_edges (basic_block);
94 static void make_goto_expr_edges (basic_block);
95 static edge tree_redirect_edge_and_branch (edge, basic_block);
96 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
97 static unsigned int split_critical_edges (void);
99 /* Various helpers. */
100 static inline bool stmt_starts_bb_p (const_tree, const_tree);
101 static int tree_verify_flow_info (void);
102 static void tree_make_forwarder_block (edge);
103 static void tree_cfg2vcg (FILE *);
104 static inline void change_bb_for_stmt (tree t, basic_block bb);
106 /* Flowgraph optimization and cleanup. */
107 static void tree_merge_blocks (basic_block, basic_block);
108 static bool tree_can_merge_blocks_p (basic_block, basic_block);
109 static void remove_bb (basic_block);
110 static edge find_taken_edge_computed_goto (basic_block, tree);
111 static edge find_taken_edge_cond_expr (basic_block, tree);
112 static edge find_taken_edge_switch_expr (basic_block, tree);
113 static tree find_case_label_for_value (tree, tree);
116 init_empty_tree_cfg (void)
118 /* Initialize the basic block array. */
120 profile_status = PROFILE_ABSENT;
121 n_basic_blocks = NUM_FIXED_BLOCKS;
122 last_basic_block = NUM_FIXED_BLOCKS;
123 basic_block_info = VEC_alloc (basic_block, gc, initial_cfg_capacity);
124 VEC_safe_grow_cleared (basic_block, gc, basic_block_info,
125 initial_cfg_capacity);
127 /* Build a mapping of labels to their associated blocks. */
128 label_to_block_map = VEC_alloc (basic_block, gc, initial_cfg_capacity);
129 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
130 initial_cfg_capacity);
132 SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR);
133 SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR);
134 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
135 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
138 /*---------------------------------------------------------------------------
140 ---------------------------------------------------------------------------*/
142 /* Entry point to the CFG builder for trees. TP points to the list of
143 statements to be added to the flowgraph. */
146 build_tree_cfg (tree *tp)
148 /* Register specific tree functions. */
149 tree_register_cfg_hooks ();
151 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
153 init_empty_tree_cfg ();
155 found_computed_goto = 0;
158 /* Computed gotos are hell to deal with, especially if there are
159 lots of them with a large number of destinations. So we factor
160 them to a common computed goto location before we build the
161 edge list. After we convert back to normal form, we will un-factor
162 the computed gotos since factoring introduces an unwanted jump. */
163 if (found_computed_goto)
164 factor_computed_gotos ();
166 /* Make sure there is always at least one block, even if it's empty. */
167 if (n_basic_blocks == NUM_FIXED_BLOCKS)
168 create_empty_bb (ENTRY_BLOCK_PTR);
170 /* Adjust the size of the array. */
171 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
172 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
174 /* To speed up statement iterator walks, we first purge dead labels. */
175 cleanup_dead_labels ();
177 /* Group case nodes to reduce the number of edges.
178 We do this after cleaning up dead labels because otherwise we miss
179 a lot of obvious case merging opportunities. */
180 group_case_labels ();
182 /* Create the edges of the flowgraph. */
184 cleanup_dead_labels ();
186 /* Debugging dumps. */
188 /* Write the flowgraph to a VCG file. */
190 int local_dump_flags;
191 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
194 tree_cfg2vcg (vcg_file);
195 dump_end (TDI_vcg, vcg_file);
199 #ifdef ENABLE_CHECKING
203 /* Dump a textual representation of the flowgraph. */
205 dump_tree_cfg (dump_file, dump_flags);
209 execute_build_cfg (void)
211 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
215 struct gimple_opt_pass pass_build_cfg =
221 execute_build_cfg, /* execute */
224 0, /* static_pass_number */
225 TV_TREE_CFG, /* tv_id */
226 PROP_gimple_leh, /* properties_required */
227 PROP_cfg, /* properties_provided */
228 0, /* properties_destroyed */
229 0, /* todo_flags_start */
230 TODO_verify_stmts | TODO_cleanup_cfg /* todo_flags_finish */
234 /* Search the CFG for any computed gotos. If found, factor them to a
235 common computed goto site. Also record the location of that site so
236 that we can un-factor the gotos after we have converted back to
240 factor_computed_gotos (void)
243 tree factored_label_decl = NULL;
245 tree factored_computed_goto_label = NULL;
246 tree factored_computed_goto = NULL;
248 /* We know there are one or more computed gotos in this function.
249 Examine the last statement in each basic block to see if the block
250 ends with a computed goto. */
254 block_stmt_iterator bsi = bsi_last (bb);
259 last = bsi_stmt (bsi);
261 /* Ignore the computed goto we create when we factor the original
263 if (last == factored_computed_goto)
266 /* If the last statement is a computed goto, factor it. */
267 if (computed_goto_p (last))
271 /* The first time we find a computed goto we need to create
272 the factored goto block and the variable each original
273 computed goto will use for their goto destination. */
274 if (! factored_computed_goto)
276 basic_block new_bb = create_empty_bb (bb);
277 block_stmt_iterator new_bsi = bsi_start (new_bb);
279 /* Create the destination of the factored goto. Each original
280 computed goto will put its desired destination into this
281 variable and jump to the label we create immediately
283 var = create_tmp_var (ptr_type_node, "gotovar");
285 /* Build a label for the new block which will contain the
286 factored computed goto. */
287 factored_label_decl = create_artificial_label ();
288 factored_computed_goto_label
289 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
290 bsi_insert_after (&new_bsi, factored_computed_goto_label,
293 /* Build our new computed goto. */
294 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
295 bsi_insert_after (&new_bsi, factored_computed_goto,
299 /* Copy the original computed goto's destination into VAR. */
300 assignment = build_gimple_modify_stmt (var,
301 GOTO_DESTINATION (last));
302 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
304 /* And re-vector the computed goto to the new destination. */
305 GOTO_DESTINATION (last) = factored_label_decl;
311 /* Build a flowgraph for the statement_list STMT_LIST. */
314 make_blocks (tree stmt_list)
316 tree_stmt_iterator i = tsi_start (stmt_list);
318 bool start_new_block = true;
319 bool first_stmt_of_list = true;
320 basic_block bb = ENTRY_BLOCK_PTR;
322 while (!tsi_end_p (i))
329 /* If the statement starts a new basic block or if we have determined
330 in a previous pass that we need to create a new block for STMT, do
332 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
334 if (!first_stmt_of_list)
335 stmt_list = tsi_split_statement_list_before (&i);
336 bb = create_basic_block (stmt_list, NULL, bb);
337 start_new_block = false;
340 /* Now add STMT to BB and create the subgraphs for special statement
342 set_bb_for_stmt (stmt, bb);
344 if (computed_goto_p (stmt))
345 found_computed_goto = true;
347 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
349 if (stmt_ends_bb_p (stmt))
350 start_new_block = true;
353 first_stmt_of_list = false;
358 /* Create and return a new empty basic block after bb AFTER. */
361 create_bb (void *h, void *e, basic_block after)
367 /* Create and initialize a new basic block. Since alloc_block uses
368 ggc_alloc_cleared to allocate a basic block, we do not have to
369 clear the newly allocated basic block here. */
372 bb->index = last_basic_block;
374 bb->il.tree = GGC_CNEW (struct tree_bb_info);
375 set_bb_stmt_list (bb, h ? (tree) h : alloc_stmt_list ());
377 /* Add the new block to the linked list of blocks. */
378 link_block (bb, after);
380 /* Grow the basic block array if needed. */
381 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
383 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
384 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
387 /* Add the newly created block to the array. */
388 SET_BASIC_BLOCK (last_basic_block, bb);
397 /*---------------------------------------------------------------------------
399 ---------------------------------------------------------------------------*/
401 /* Fold COND_EXPR_COND of each COND_EXPR. */
404 fold_cond_expr_cond (void)
410 tree stmt = last_stmt (bb);
413 && TREE_CODE (stmt) == COND_EXPR)
418 fold_defer_overflow_warnings ();
419 cond = fold (COND_EXPR_COND (stmt));
420 zerop = integer_zerop (cond);
421 onep = integer_onep (cond);
422 fold_undefer_overflow_warnings (zerop || onep,
424 WARN_STRICT_OVERFLOW_CONDITIONAL);
426 COND_EXPR_COND (stmt) = boolean_false_node;
428 COND_EXPR_COND (stmt) = boolean_true_node;
433 /* Join all the blocks in the flowgraph. */
439 struct omp_region *cur_region = NULL;
441 /* Create an edge from entry to the first block with executable
443 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
445 /* Traverse the basic block array placing edges. */
448 tree last = last_stmt (bb);
453 enum tree_code code = TREE_CODE (last);
457 make_goto_expr_edges (bb);
461 make_edge (bb, EXIT_BLOCK_PTR, 0);
465 make_cond_expr_edges (bb);
469 make_switch_expr_edges (bb);
473 make_eh_edges (last);
478 /* If this function receives a nonlocal goto, then we need to
479 make edges from this call site to all the nonlocal goto
481 if (tree_can_make_abnormal_goto (last))
482 make_abnormal_goto_edges (bb, true);
484 /* If this statement has reachable exception handlers, then
485 create abnormal edges to them. */
486 make_eh_edges (last);
488 /* Some calls are known not to return. */
489 fallthru = !(call_expr_flags (last) & ECF_NORETURN);
495 case GIMPLE_MODIFY_STMT:
496 if (is_ctrl_altering_stmt (last))
498 /* A GIMPLE_MODIFY_STMT may have a CALL_EXPR on its RHS and
499 the CALL_EXPR may have an abnormal edge. Search the RHS
500 for this case and create any required edges. */
501 if (tree_can_make_abnormal_goto (last))
502 make_abnormal_goto_edges (bb, true);
504 make_eh_edges (last);
516 cur_region = new_omp_region (bb, code, cur_region);
521 cur_region = new_omp_region (bb, code, cur_region);
525 case OMP_SECTIONS_SWITCH:
530 case OMP_ATOMIC_LOAD:
531 case OMP_ATOMIC_STORE:
537 /* In the case of an OMP_SECTION, the edge will go somewhere
538 other than the next block. This will be created later. */
539 cur_region->exit = bb;
540 fallthru = cur_region->type != OMP_SECTION;
541 cur_region = cur_region->outer;
545 cur_region->cont = bb;
546 switch (cur_region->type)
549 /* Mark all OMP_FOR and OMP_CONTINUE succs edges as abnormal
550 to prevent splitting them. */
551 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
552 /* Make the loopback edge. */
553 make_edge (bb, single_succ (cur_region->entry),
556 /* Create an edge from OMP_FOR to exit, which corresponds to
557 the case that the body of the loop is not executed at
559 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
560 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
565 /* Wire up the edges into and out of the nested sections. */
567 basic_block switch_bb = single_succ (cur_region->entry);
569 struct omp_region *i;
570 for (i = cur_region->inner; i ; i = i->next)
572 gcc_assert (i->type == OMP_SECTION);
573 make_edge (switch_bb, i->entry, 0);
574 make_edge (i->exit, bb, EDGE_FALLTHRU);
577 /* Make the loopback edge to the block with
578 OMP_SECTIONS_SWITCH. */
579 make_edge (bb, switch_bb, 0);
581 /* Make the edge from the switch to exit. */
582 make_edge (switch_bb, bb->next_bb, 0);
593 gcc_assert (!stmt_ends_bb_p (last));
601 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
607 /* Fold COND_EXPR_COND of each COND_EXPR. */
608 fold_cond_expr_cond ();
612 /* Create the edges for a COND_EXPR starting at block BB.
613 At this point, both clauses must contain only simple gotos. */
616 make_cond_expr_edges (basic_block bb)
618 tree entry = last_stmt (bb);
619 basic_block then_bb, else_bb;
620 tree then_label, else_label;
624 gcc_assert (TREE_CODE (entry) == COND_EXPR);
626 /* Entry basic blocks for each component. */
627 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
628 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
629 then_bb = label_to_block (then_label);
630 else_bb = label_to_block (else_label);
632 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
633 e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
634 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
636 e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
638 /* We do not need the gotos anymore. */
639 COND_EXPR_THEN (entry) = NULL_TREE;
640 COND_EXPR_ELSE (entry) = NULL_TREE;
644 /* Called for each element in the hash table (P) as we delete the
645 edge to cases hash table.
647 Clear all the TREE_CHAINs to prevent problems with copying of
648 SWITCH_EXPRs and structure sharing rules, then free the hash table
652 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
653 void *data ATTRIBUTE_UNUSED)
657 for (t = (tree) *value; t; t = next)
659 next = TREE_CHAIN (t);
660 TREE_CHAIN (t) = NULL;
667 /* Start recording information mapping edges to case labels. */
670 start_recording_case_labels (void)
672 gcc_assert (edge_to_cases == NULL);
673 edge_to_cases = pointer_map_create ();
676 /* Return nonzero if we are recording information for case labels. */
679 recording_case_labels_p (void)
681 return (edge_to_cases != NULL);
684 /* Stop recording information mapping edges to case labels and
685 remove any information we have recorded. */
687 end_recording_case_labels (void)
689 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
690 pointer_map_destroy (edge_to_cases);
691 edge_to_cases = NULL;
694 /* If we are inside a {start,end}_recording_cases block, then return
695 a chain of CASE_LABEL_EXPRs from T which reference E.
697 Otherwise return NULL. */
700 get_cases_for_edge (edge e, tree t)
706 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
707 chains available. Return NULL so the caller can detect this case. */
708 if (!recording_case_labels_p ())
711 slot = pointer_map_contains (edge_to_cases, e);
715 /* If we did not find E in the hash table, then this must be the first
716 time we have been queried for information about E & T. Add all the
717 elements from T to the hash table then perform the query again. */
719 vec = SWITCH_LABELS (t);
720 n = TREE_VEC_LENGTH (vec);
721 for (i = 0; i < n; i++)
723 tree elt = TREE_VEC_ELT (vec, i);
724 tree lab = CASE_LABEL (elt);
725 basic_block label_bb = label_to_block (lab);
726 edge this_edge = find_edge (e->src, label_bb);
728 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
730 slot = pointer_map_insert (edge_to_cases, this_edge);
731 TREE_CHAIN (elt) = (tree) *slot;
735 return (tree) *pointer_map_contains (edge_to_cases, e);
738 /* Create the edges for a SWITCH_EXPR starting at block BB.
739 At this point, the switch body has been lowered and the
740 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
743 make_switch_expr_edges (basic_block bb)
745 tree entry = last_stmt (bb);
749 vec = SWITCH_LABELS (entry);
750 n = TREE_VEC_LENGTH (vec);
752 for (i = 0; i < n; ++i)
754 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
755 basic_block label_bb = label_to_block (lab);
756 make_edge (bb, label_bb, 0);
761 /* Return the basic block holding label DEST. */
764 label_to_block_fn (struct function *ifun, tree dest)
766 int uid = LABEL_DECL_UID (dest);
768 /* We would die hard when faced by an undefined label. Emit a label to
769 the very first basic block. This will hopefully make even the dataflow
770 and undefined variable warnings quite right. */
771 if ((errorcount || sorrycount) && uid < 0)
773 block_stmt_iterator bsi =
774 bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
777 stmt = build1 (LABEL_EXPR, void_type_node, dest);
778 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
779 uid = LABEL_DECL_UID (dest);
781 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
782 <= (unsigned int) uid)
784 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
787 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
788 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
791 make_abnormal_goto_edges (basic_block bb, bool for_call)
793 basic_block target_bb;
794 block_stmt_iterator bsi;
796 FOR_EACH_BB (target_bb)
797 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
799 tree target = bsi_stmt (bsi);
801 if (TREE_CODE (target) != LABEL_EXPR)
804 target = LABEL_EXPR_LABEL (target);
806 /* Make an edge to every label block that has been marked as a
807 potential target for a computed goto or a non-local goto. */
808 if ((FORCED_LABEL (target) && !for_call)
809 || (DECL_NONLOCAL (target) && for_call))
811 make_edge (bb, target_bb, EDGE_ABNORMAL);
817 /* Create edges for a goto statement at block BB. */
820 make_goto_expr_edges (basic_block bb)
822 block_stmt_iterator last = bsi_last (bb);
823 tree goto_t = bsi_stmt (last);
825 /* A simple GOTO creates normal edges. */
826 if (simple_goto_p (goto_t))
828 tree dest = GOTO_DESTINATION (goto_t);
829 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
830 e->goto_locus = EXPR_LOCATION (goto_t);
831 bsi_remove (&last, true);
835 /* A computed GOTO creates abnormal edges. */
836 make_abnormal_goto_edges (bb, false);
840 /*---------------------------------------------------------------------------
842 ---------------------------------------------------------------------------*/
844 /* Cleanup useless labels in basic blocks. This is something we wish
845 to do early because it allows us to group case labels before creating
846 the edges for the CFG, and it speeds up block statement iterators in
848 We rerun this pass after CFG is created, to get rid of the labels that
849 are no longer referenced. After then we do not run it any more, since
850 (almost) no new labels should be created. */
852 /* A map from basic block index to the leading label of that block. */
853 static struct label_record
858 /* True if the label is referenced from somewhere. */
862 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
864 update_eh_label (struct eh_region *region)
866 tree old_label = get_eh_region_tree_label (region);
870 basic_block bb = label_to_block (old_label);
872 /* ??? After optimizing, there may be EH regions with labels
873 that have already been removed from the function body, so
874 there is no basic block for them. */
878 new_label = label_for_bb[bb->index].label;
879 label_for_bb[bb->index].used = true;
880 set_eh_region_tree_label (region, new_label);
884 /* Given LABEL return the first label in the same basic block. */
886 main_block_label (tree label)
888 basic_block bb = label_to_block (label);
889 tree main_label = label_for_bb[bb->index].label;
891 /* label_to_block possibly inserted undefined label into the chain. */
894 label_for_bb[bb->index].label = label;
898 label_for_bb[bb->index].used = true;
902 /* Cleanup redundant labels. This is a three-step process:
903 1) Find the leading label for each block.
904 2) Redirect all references to labels to the leading labels.
905 3) Cleanup all useless labels. */
908 cleanup_dead_labels (void)
911 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
913 /* Find a suitable label for each block. We use the first user-defined
914 label if there is one, or otherwise just the first label we see. */
917 block_stmt_iterator i;
919 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
921 tree label, stmt = bsi_stmt (i);
923 if (TREE_CODE (stmt) != LABEL_EXPR)
926 label = LABEL_EXPR_LABEL (stmt);
928 /* If we have not yet seen a label for the current block,
929 remember this one and see if there are more labels. */
930 if (!label_for_bb[bb->index].label)
932 label_for_bb[bb->index].label = label;
936 /* If we did see a label for the current block already, but it
937 is an artificially created label, replace it if the current
938 label is a user defined label. */
939 if (!DECL_ARTIFICIAL (label)
940 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
942 label_for_bb[bb->index].label = label;
948 /* Now redirect all jumps/branches to the selected label.
949 First do so for each block ending in a control statement. */
952 tree stmt = last_stmt (bb);
956 switch (TREE_CODE (stmt))
960 tree true_branch, false_branch;
962 true_branch = COND_EXPR_THEN (stmt);
963 false_branch = COND_EXPR_ELSE (stmt);
966 GOTO_DESTINATION (true_branch)
967 = main_block_label (GOTO_DESTINATION (true_branch));
969 GOTO_DESTINATION (false_branch)
970 = main_block_label (GOTO_DESTINATION (false_branch));
978 tree vec = SWITCH_LABELS (stmt);
979 size_t n = TREE_VEC_LENGTH (vec);
981 /* Replace all destination labels. */
982 for (i = 0; i < n; ++i)
984 tree elt = TREE_VEC_ELT (vec, i);
985 tree label = main_block_label (CASE_LABEL (elt));
986 CASE_LABEL (elt) = label;
991 /* We have to handle GOTO_EXPRs until they're removed, and we don't
992 remove them until after we've created the CFG edges. */
994 if (! computed_goto_p (stmt))
996 GOTO_DESTINATION (stmt)
997 = main_block_label (GOTO_DESTINATION (stmt));
1006 for_each_eh_region (update_eh_label);
1008 /* Finally, purge dead labels. All user-defined labels and labels that
1009 can be the target of non-local gotos and labels which have their
1010 address taken are preserved. */
1013 block_stmt_iterator i;
1014 tree label_for_this_bb = label_for_bb[bb->index].label;
1016 if (!label_for_this_bb)
1019 /* If the main label of the block is unused, we may still remove it. */
1020 if (!label_for_bb[bb->index].used)
1021 label_for_this_bb = NULL;
1023 for (i = bsi_start (bb); !bsi_end_p (i); )
1025 tree label, stmt = bsi_stmt (i);
1027 if (TREE_CODE (stmt) != LABEL_EXPR)
1030 label = LABEL_EXPR_LABEL (stmt);
1032 if (label == label_for_this_bb
1033 || ! DECL_ARTIFICIAL (label)
1034 || DECL_NONLOCAL (label)
1035 || FORCED_LABEL (label))
1038 bsi_remove (&i, true);
1042 free (label_for_bb);
1045 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1046 and scan the sorted vector of cases. Combine the ones jumping to the
1048 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1051 group_case_labels (void)
1057 tree stmt = last_stmt (bb);
1058 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1060 tree labels = SWITCH_LABELS (stmt);
1061 int old_size = TREE_VEC_LENGTH (labels);
1062 int i, j, new_size = old_size;
1063 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1066 /* The default label is always the last case in a switch
1067 statement after gimplification. */
1068 default_label = CASE_LABEL (default_case);
1070 /* Look for possible opportunities to merge cases.
1071 Ignore the last element of the label vector because it
1072 must be the default case. */
1074 while (i < old_size - 1)
1076 tree base_case, base_label, base_high;
1077 base_case = TREE_VEC_ELT (labels, i);
1079 gcc_assert (base_case);
1080 base_label = CASE_LABEL (base_case);
1082 /* Discard cases that have the same destination as the
1084 if (base_label == default_label)
1086 TREE_VEC_ELT (labels, i) = NULL_TREE;
1092 base_high = CASE_HIGH (base_case) ?
1093 CASE_HIGH (base_case) : CASE_LOW (base_case);
1095 /* Try to merge case labels. Break out when we reach the end
1096 of the label vector or when we cannot merge the next case
1097 label with the current one. */
1098 while (i < old_size - 1)
1100 tree merge_case = TREE_VEC_ELT (labels, i);
1101 tree merge_label = CASE_LABEL (merge_case);
1102 tree t = int_const_binop (PLUS_EXPR, base_high,
1103 integer_one_node, 1);
1105 /* Merge the cases if they jump to the same place,
1106 and their ranges are consecutive. */
1107 if (merge_label == base_label
1108 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1110 base_high = CASE_HIGH (merge_case) ?
1111 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1112 CASE_HIGH (base_case) = base_high;
1113 TREE_VEC_ELT (labels, i) = NULL_TREE;
1122 /* Compress the case labels in the label vector, and adjust the
1123 length of the vector. */
1124 for (i = 0, j = 0; i < new_size; i++)
1126 while (! TREE_VEC_ELT (labels, j))
1128 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1130 TREE_VEC_LENGTH (labels) = new_size;
1135 /* Checks whether we can merge block B into block A. */
1138 tree_can_merge_blocks_p (basic_block a, basic_block b)
1141 block_stmt_iterator bsi;
1144 if (!single_succ_p (a))
1147 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1150 if (single_succ (a) != b)
1153 if (!single_pred_p (b))
1156 if (b == EXIT_BLOCK_PTR)
1159 /* If A ends by a statement causing exceptions or something similar, we
1160 cannot merge the blocks. */
1161 /* This CONST_CAST is okay because last_stmt doesn't modify its
1162 argument and the return value is assign to a const_tree. */
1163 stmt = last_stmt (CONST_CAST_BB (a));
1164 if (stmt && stmt_ends_bb_p (stmt))
1167 /* Do not allow a block with only a non-local label to be merged. */
1168 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1169 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1172 /* It must be possible to eliminate all phi nodes in B. If ssa form
1173 is not up-to-date, we cannot eliminate any phis; however, if only
1174 some symbols as whole are marked for renaming, this is not a problem,
1175 as phi nodes for those symbols are irrelevant in updating anyway. */
1176 phi = phi_nodes (b);
1179 if (name_mappings_registered_p ())
1182 for (; phi; phi = PHI_CHAIN (phi))
1183 if (!is_gimple_reg (PHI_RESULT (phi))
1184 && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
1188 /* Do not remove user labels. */
1189 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1191 stmt = bsi_stmt (bsi);
1192 if (TREE_CODE (stmt) != LABEL_EXPR)
1194 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1198 /* Protect the loop latches. */
1200 && b->loop_father->latch == b)
1206 /* Replaces all uses of NAME by VAL. */
1209 replace_uses_by (tree name, tree val)
1211 imm_use_iterator imm_iter;
1216 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1218 if (TREE_CODE (stmt) != PHI_NODE)
1219 push_stmt_changes (&stmt);
1221 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1223 replace_exp (use, val);
1225 if (TREE_CODE (stmt) == PHI_NODE)
1227 e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
1228 if (e->flags & EDGE_ABNORMAL)
1230 /* This can only occur for virtual operands, since
1231 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1232 would prevent replacement. */
1233 gcc_assert (!is_gimple_reg (name));
1234 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1239 if (TREE_CODE (stmt) != PHI_NODE)
1243 fold_stmt_inplace (stmt);
1244 if (cfgcleanup_altered_bbs)
1245 bitmap_set_bit (cfgcleanup_altered_bbs, bb_for_stmt (stmt)->index);
1247 /* FIXME. This should go in pop_stmt_changes. */
1248 rhs = get_rhs (stmt);
1249 if (TREE_CODE (rhs) == ADDR_EXPR)
1250 recompute_tree_invariant_for_addr_expr (rhs);
1252 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1254 pop_stmt_changes (&stmt);
1258 gcc_assert (has_zero_uses (name));
1260 /* Also update the trees stored in loop structures. */
1266 FOR_EACH_LOOP (li, loop, 0)
1268 substitute_in_loop_info (loop, name, val);
1273 /* Merge block B into block A. */
1276 tree_merge_blocks (basic_block a, basic_block b)
1278 block_stmt_iterator bsi;
1279 tree_stmt_iterator last;
1283 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1285 /* Remove all single-valued PHI nodes from block B of the form
1286 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1288 for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
1290 tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
1292 bool may_replace_uses = may_propagate_copy (def, use);
1294 /* In case we maintain loop closed ssa form, do not propagate arguments
1295 of loop exit phi nodes. */
1297 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1298 && is_gimple_reg (def)
1299 && TREE_CODE (use) == SSA_NAME
1300 && a->loop_father != b->loop_father)
1301 may_replace_uses = false;
1303 if (!may_replace_uses)
1305 gcc_assert (is_gimple_reg (def));
1307 /* Note that just emitting the copies is fine -- there is no problem
1308 with ordering of phi nodes. This is because A is the single
1309 predecessor of B, therefore results of the phi nodes cannot
1310 appear as arguments of the phi nodes. */
1311 copy = build_gimple_modify_stmt (def, use);
1312 bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
1313 SSA_NAME_DEF_STMT (def) = copy;
1314 remove_phi_node (phi, NULL, false);
1318 /* If we deal with a PHI for virtual operands, we can simply
1319 propagate these without fussing with folding or updating
1321 if (!is_gimple_reg (def))
1323 imm_use_iterator iter;
1324 use_operand_p use_p;
1327 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1328 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1329 SET_USE (use_p, use);
1332 replace_uses_by (def, use);
1333 remove_phi_node (phi, NULL, true);
1337 /* Ensure that B follows A. */
1338 move_block_after (b, a);
1340 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1341 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1343 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1344 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1346 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1348 tree label = bsi_stmt (bsi);
1350 bsi_remove (&bsi, false);
1351 /* Now that we can thread computed gotos, we might have
1352 a situation where we have a forced label in block B
1353 However, the label at the start of block B might still be
1354 used in other ways (think about the runtime checking for
1355 Fortran assigned gotos). So we can not just delete the
1356 label. Instead we move the label to the start of block A. */
1357 if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1359 block_stmt_iterator dest_bsi = bsi_start (a);
1360 bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1365 change_bb_for_stmt (bsi_stmt (bsi), a);
1370 /* Merge the chains. */
1371 last = tsi_last (bb_stmt_list (a));
1372 tsi_link_after (&last, bb_stmt_list (b), TSI_NEW_STMT);
1373 set_bb_stmt_list (b, NULL_TREE);
1375 if (cfgcleanup_altered_bbs)
1376 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1380 /* Return the one of two successors of BB that is not reachable by a
1381 reached by a complex edge, if there is one. Else, return BB. We use
1382 this in optimizations that use post-dominators for their heuristics,
1383 to catch the cases in C++ where function calls are involved. */
1386 single_noncomplex_succ (basic_block bb)
1389 if (EDGE_COUNT (bb->succs) != 2)
1392 e0 = EDGE_SUCC (bb, 0);
1393 e1 = EDGE_SUCC (bb, 1);
1394 if (e0->flags & EDGE_COMPLEX)
1396 if (e1->flags & EDGE_COMPLEX)
1403 /* Walk the function tree removing unnecessary statements.
1405 * Empty statement nodes are removed
1407 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1409 * Unnecessary COND_EXPRs are removed
1411 * Some unnecessary BIND_EXPRs are removed
1413 Clearly more work could be done. The trick is doing the analysis
1414 and removal fast enough to be a net improvement in compile times.
1416 Note that when we remove a control structure such as a COND_EXPR
1417 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1418 to ensure we eliminate all the useless code. */
1429 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1432 remove_useless_stmts_warn_notreached (tree stmt)
1434 if (EXPR_HAS_LOCATION (stmt))
1436 location_t loc = EXPR_LOCATION (stmt);
1437 if (LOCATION_LINE (loc) > 0)
1439 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
1444 switch (TREE_CODE (stmt))
1446 case STATEMENT_LIST:
1448 tree_stmt_iterator i;
1449 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1450 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1456 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1458 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1460 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1464 case TRY_FINALLY_EXPR:
1465 case TRY_CATCH_EXPR:
1466 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1468 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1473 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1474 case EH_FILTER_EXPR:
1475 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1477 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1480 /* Not a live container. */
1488 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1490 tree then_clause, else_clause, cond;
1491 bool save_has_label, then_has_label, else_has_label;
1493 save_has_label = data->has_label;
1494 data->has_label = false;
1495 data->last_goto = NULL;
1497 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1499 then_has_label = data->has_label;
1500 data->has_label = false;
1501 data->last_goto = NULL;
1503 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1505 else_has_label = data->has_label;
1506 data->has_label = save_has_label | then_has_label | else_has_label;
1508 then_clause = COND_EXPR_THEN (*stmt_p);
1509 else_clause = COND_EXPR_ELSE (*stmt_p);
1510 cond = fold (COND_EXPR_COND (*stmt_p));
1512 /* If neither arm does anything at all, we can remove the whole IF. */
1513 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1515 *stmt_p = build_empty_stmt ();
1516 data->repeat = true;
1519 /* If there are no reachable statements in an arm, then we can
1520 zap the entire conditional. */
1521 else if (integer_nonzerop (cond) && !else_has_label)
1523 if (warn_notreached)
1524 remove_useless_stmts_warn_notreached (else_clause);
1525 *stmt_p = then_clause;
1526 data->repeat = true;
1528 else if (integer_zerop (cond) && !then_has_label)
1530 if (warn_notreached)
1531 remove_useless_stmts_warn_notreached (then_clause);
1532 *stmt_p = else_clause;
1533 data->repeat = true;
1536 /* Check a couple of simple things on then/else with single stmts. */
1539 tree then_stmt = expr_only (then_clause);
1540 tree else_stmt = expr_only (else_clause);
1542 /* Notice branches to a common destination. */
1543 if (then_stmt && else_stmt
1544 && TREE_CODE (then_stmt) == GOTO_EXPR
1545 && TREE_CODE (else_stmt) == GOTO_EXPR
1546 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1548 *stmt_p = then_stmt;
1549 data->repeat = true;
1552 /* If the THEN/ELSE clause merely assigns a value to a variable or
1553 parameter which is already known to contain that value, then
1554 remove the useless THEN/ELSE clause. */
1555 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1558 && TREE_CODE (else_stmt) == GIMPLE_MODIFY_STMT
1559 && GIMPLE_STMT_OPERAND (else_stmt, 0) == cond
1560 && integer_zerop (GIMPLE_STMT_OPERAND (else_stmt, 1)))
1561 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1563 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1564 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1565 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1566 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1568 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1569 ? then_stmt : else_stmt);
1570 tree *location = (TREE_CODE (cond) == EQ_EXPR
1571 ? &COND_EXPR_THEN (*stmt_p)
1572 : &COND_EXPR_ELSE (*stmt_p));
1575 && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
1576 && GIMPLE_STMT_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1577 && GIMPLE_STMT_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1578 *location = alloc_stmt_list ();
1582 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1583 would be re-introduced during lowering. */
1584 data->last_goto = NULL;
1589 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1591 bool save_may_branch, save_may_throw;
1592 bool this_may_branch, this_may_throw;
1594 /* Collect may_branch and may_throw information for the body only. */
1595 save_may_branch = data->may_branch;
1596 save_may_throw = data->may_throw;
1597 data->may_branch = false;
1598 data->may_throw = false;
1599 data->last_goto = NULL;
1601 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1603 this_may_branch = data->may_branch;
1604 this_may_throw = data->may_throw;
1605 data->may_branch |= save_may_branch;
1606 data->may_throw |= save_may_throw;
1607 data->last_goto = NULL;
1609 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1611 /* If the body is empty, then we can emit the FINALLY block without
1612 the enclosing TRY_FINALLY_EXPR. */
1613 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1615 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1616 data->repeat = true;
1619 /* If the handler is empty, then we can emit the TRY block without
1620 the enclosing TRY_FINALLY_EXPR. */
1621 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1623 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1624 data->repeat = true;
1627 /* If the body neither throws, nor branches, then we can safely
1628 string the TRY and FINALLY blocks together. */
1629 else if (!this_may_branch && !this_may_throw)
1631 tree stmt = *stmt_p;
1632 *stmt_p = TREE_OPERAND (stmt, 0);
1633 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1634 data->repeat = true;
1640 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1642 bool save_may_throw, this_may_throw;
1643 tree_stmt_iterator i;
1646 /* Collect may_throw information for the body only. */
1647 save_may_throw = data->may_throw;
1648 data->may_throw = false;
1649 data->last_goto = NULL;
1651 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1653 this_may_throw = data->may_throw;
1654 data->may_throw = save_may_throw;
1656 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1657 if (!this_may_throw)
1659 if (warn_notreached)
1660 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1661 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1662 data->repeat = true;
1666 /* Process the catch clause specially. We may be able to tell that
1667 no exceptions propagate past this point. */
1669 this_may_throw = true;
1670 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1671 stmt = tsi_stmt (i);
1672 data->last_goto = NULL;
1674 switch (TREE_CODE (stmt))
1677 for (; !tsi_end_p (i); tsi_next (&i))
1679 stmt = tsi_stmt (i);
1680 /* If we catch all exceptions, then the body does not
1681 propagate exceptions past this point. */
1682 if (CATCH_TYPES (stmt) == NULL)
1683 this_may_throw = false;
1684 data->last_goto = NULL;
1685 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1689 case EH_FILTER_EXPR:
1690 if (EH_FILTER_MUST_NOT_THROW (stmt))
1691 this_may_throw = false;
1692 else if (EH_FILTER_TYPES (stmt) == NULL)
1693 this_may_throw = false;
1694 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1698 /* Otherwise this is a cleanup. */
1699 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1701 /* If the cleanup is empty, then we can emit the TRY block without
1702 the enclosing TRY_CATCH_EXPR. */
1703 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1705 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1706 data->repeat = true;
1710 data->may_throw |= this_may_throw;
1715 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1719 /* First remove anything underneath the BIND_EXPR. */
1720 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1722 /* If the BIND_EXPR has no variables, then we can pull everything
1723 up one level and remove the BIND_EXPR, unless this is the toplevel
1724 BIND_EXPR for the current function or an inlined function.
1726 When this situation occurs we will want to apply this
1727 optimization again. */
1728 block = BIND_EXPR_BLOCK (*stmt_p);
1729 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1730 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1732 || ! BLOCK_ABSTRACT_ORIGIN (block)
1733 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1736 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1737 data->repeat = true;
1743 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1745 tree dest = GOTO_DESTINATION (*stmt_p);
1747 data->may_branch = true;
1748 data->last_goto = NULL;
1750 /* Record the last goto expr, so that we can delete it if unnecessary. */
1751 if (TREE_CODE (dest) == LABEL_DECL)
1752 data->last_goto = stmt_p;
1757 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1759 tree label = LABEL_EXPR_LABEL (*stmt_p);
1761 data->has_label = true;
1763 /* We do want to jump across non-local label receiver code. */
1764 if (DECL_NONLOCAL (label))
1765 data->last_goto = NULL;
1767 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1769 *data->last_goto = build_empty_stmt ();
1770 data->repeat = true;
1773 /* ??? Add something here to delete unused labels. */
1777 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1778 decl. This allows us to eliminate redundant or useless
1779 calls to "const" functions.
1781 Gimplifier already does the same operation, but we may notice functions
1782 being const and pure once their calls has been gimplified, so we need
1783 to update the flag. */
1786 update_call_expr_flags (tree call)
1788 tree decl = get_callee_fndecl (call);
1791 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1792 TREE_SIDE_EFFECTS (call) = 0;
1793 if (TREE_NOTHROW (decl))
1794 TREE_NOTHROW (call) = 1;
1798 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1801 notice_special_calls (tree t)
1803 int flags = call_expr_flags (t);
1805 if (flags & ECF_MAY_BE_ALLOCA)
1806 current_function_calls_alloca = true;
1807 if (flags & ECF_RETURNS_TWICE)
1808 current_function_calls_setjmp = true;
1812 /* Clear flags set by notice_special_calls. Used by dead code removal
1813 to update the flags. */
1816 clear_special_calls (void)
1818 current_function_calls_alloca = false;
1819 current_function_calls_setjmp = false;
1824 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1828 switch (TREE_CODE (t))
1831 remove_useless_stmts_cond (tp, data);
1834 case TRY_FINALLY_EXPR:
1835 remove_useless_stmts_tf (tp, data);
1838 case TRY_CATCH_EXPR:
1839 remove_useless_stmts_tc (tp, data);
1843 remove_useless_stmts_bind (tp, data);
1847 remove_useless_stmts_goto (tp, data);
1851 remove_useless_stmts_label (tp, data);
1856 data->last_goto = NULL;
1857 data->may_branch = true;
1862 data->last_goto = NULL;
1863 notice_special_calls (t);
1864 update_call_expr_flags (t);
1865 if (tree_could_throw_p (t))
1866 data->may_throw = true;
1872 case GIMPLE_MODIFY_STMT:
1873 data->last_goto = NULL;
1875 op = get_call_expr_in (t);
1878 update_call_expr_flags (op);
1879 notice_special_calls (op);
1881 if (tree_could_throw_p (t))
1882 data->may_throw = true;
1885 case STATEMENT_LIST:
1887 tree_stmt_iterator i = tsi_start (t);
1888 while (!tsi_end_p (i))
1891 if (IS_EMPTY_STMT (t))
1897 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1900 if (TREE_CODE (t) == STATEMENT_LIST)
1902 tsi_link_before (&i, t, TSI_SAME_STMT);
1912 data->last_goto = NULL;
1916 data->last_goto = NULL;
1922 remove_useless_stmts (void)
1924 struct rus_data data;
1926 clear_special_calls ();
1930 memset (&data, 0, sizeof (data));
1931 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1933 while (data.repeat);
1938 struct gimple_opt_pass pass_remove_useless_stmts =
1942 "useless", /* name */
1944 remove_useless_stmts, /* execute */
1947 0, /* static_pass_number */
1949 PROP_gimple_any, /* properties_required */
1950 0, /* properties_provided */
1951 0, /* properties_destroyed */
1952 0, /* todo_flags_start */
1953 TODO_dump_func /* todo_flags_finish */
1957 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1960 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1964 /* Since this block is no longer reachable, we can just delete all
1965 of its PHI nodes. */
1966 phi = phi_nodes (bb);
1969 tree next = PHI_CHAIN (phi);
1970 remove_phi_node (phi, NULL_TREE, true);
1974 /* Remove edges to BB's successors. */
1975 while (EDGE_COUNT (bb->succs) > 0)
1976 remove_edge (EDGE_SUCC (bb, 0));
1980 /* Remove statements of basic block BB. */
1983 remove_bb (basic_block bb)
1985 block_stmt_iterator i;
1986 source_location loc = UNKNOWN_LOCATION;
1990 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1991 if (dump_flags & TDF_DETAILS)
1993 dump_bb (bb, dump_file, 0);
1994 fprintf (dump_file, "\n");
2000 struct loop *loop = bb->loop_father;
2002 /* If a loop gets removed, clean up the information associated
2004 if (loop->latch == bb
2005 || loop->header == bb)
2006 free_numbers_of_iterations_estimates_loop (loop);
2009 /* Remove all the instructions in the block. */
2010 if (bb_stmt_list (bb) != NULL_TREE)
2012 for (i = bsi_start (bb); !bsi_end_p (i);)
2014 tree stmt = bsi_stmt (i);
2015 if (TREE_CODE (stmt) == LABEL_EXPR
2016 && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
2017 || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
2020 block_stmt_iterator new_bsi;
2022 /* A non-reachable non-local label may still be referenced.
2023 But it no longer needs to carry the extra semantics of
2025 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
2027 DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
2028 FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
2031 new_bb = bb->prev_bb;
2032 new_bsi = bsi_start (new_bb);
2033 bsi_remove (&i, false);
2034 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2038 /* Release SSA definitions if we are in SSA. Note that we
2039 may be called when not in SSA. For example,
2040 final_cleanup calls this function via
2041 cleanup_tree_cfg. */
2042 if (gimple_in_ssa_p (cfun))
2043 release_defs (stmt);
2045 bsi_remove (&i, true);
2048 /* Don't warn for removed gotos. Gotos are often removed due to
2049 jump threading, thus resulting in bogus warnings. Not great,
2050 since this way we lose warnings for gotos in the original
2051 program that are indeed unreachable. */
2052 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2054 if (EXPR_HAS_LOCATION (stmt))
2055 loc = EXPR_LOCATION (stmt);
2060 /* If requested, give a warning that the first statement in the
2061 block is unreachable. We walk statements backwards in the
2062 loop above, so the last statement we process is the first statement
2064 if (loc > BUILTINS_LOCATION && LOCATION_LINE (loc) > 0)
2065 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2067 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2072 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2073 predicate VAL, return the edge that will be taken out of the block.
2074 If VAL does not match a unique edge, NULL is returned. */
2077 find_taken_edge (basic_block bb, tree val)
2081 stmt = last_stmt (bb);
2084 gcc_assert (is_ctrl_stmt (stmt));
2087 if (! is_gimple_min_invariant (val))
2090 if (TREE_CODE (stmt) == COND_EXPR)
2091 return find_taken_edge_cond_expr (bb, val);
2093 if (TREE_CODE (stmt) == SWITCH_EXPR)
2094 return find_taken_edge_switch_expr (bb, val);
2096 if (computed_goto_p (stmt))
2098 /* Only optimize if the argument is a label, if the argument is
2099 not a label then we can not construct a proper CFG.
2101 It may be the case that we only need to allow the LABEL_REF to
2102 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2103 appear inside a LABEL_EXPR just to be safe. */
2104 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2105 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2106 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2113 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2114 statement, determine which of the outgoing edges will be taken out of the
2115 block. Return NULL if either edge may be taken. */
2118 find_taken_edge_computed_goto (basic_block bb, tree val)
2123 dest = label_to_block (val);
2126 e = find_edge (bb, dest);
2127 gcc_assert (e != NULL);
2133 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2134 statement, determine which of the two edges will be taken out of the
2135 block. Return NULL if either edge may be taken. */
2138 find_taken_edge_cond_expr (basic_block bb, tree val)
2140 edge true_edge, false_edge;
2142 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2144 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2145 return (integer_zerop (val) ? false_edge : true_edge);
2148 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2149 statement, determine which edge will be taken out of the block. Return
2150 NULL if any edge may be taken. */
2153 find_taken_edge_switch_expr (basic_block bb, tree val)
2155 tree switch_expr, taken_case;
2156 basic_block dest_bb;
2159 switch_expr = last_stmt (bb);
2160 taken_case = find_case_label_for_value (switch_expr, val);
2161 dest_bb = label_to_block (CASE_LABEL (taken_case));
2163 e = find_edge (bb, dest_bb);
2169 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2170 We can make optimal use here of the fact that the case labels are
2171 sorted: We can do a binary search for a case matching VAL. */
2174 find_case_label_for_value (tree switch_expr, tree val)
2176 tree vec = SWITCH_LABELS (switch_expr);
2177 size_t low, high, n = TREE_VEC_LENGTH (vec);
2178 tree default_case = TREE_VEC_ELT (vec, n - 1);
2180 for (low = -1, high = n - 1; high - low > 1; )
2182 size_t i = (high + low) / 2;
2183 tree t = TREE_VEC_ELT (vec, i);
2186 /* Cache the result of comparing CASE_LOW and val. */
2187 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2194 if (CASE_HIGH (t) == NULL)
2196 /* A singe-valued case label. */
2202 /* A case range. We can only handle integer ranges. */
2203 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2208 return default_case;
2214 /*---------------------------------------------------------------------------
2216 ---------------------------------------------------------------------------*/
2218 /* Dump tree-specific information of block BB to file OUTF. */
2221 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2223 dump_generic_bb (outf, bb, indent, TDF_VOPS|TDF_MEMSYMS);
2227 /* Dump a basic block on stderr. */
2230 debug_tree_bb (basic_block bb)
2232 dump_bb (bb, stderr, 0);
2236 /* Dump basic block with index N on stderr. */
2239 debug_tree_bb_n (int n)
2241 debug_tree_bb (BASIC_BLOCK (n));
2242 return BASIC_BLOCK (n);
2246 /* Dump the CFG on stderr.
2248 FLAGS are the same used by the tree dumping functions
2249 (see TDF_* in tree-pass.h). */
2252 debug_tree_cfg (int flags)
2254 dump_tree_cfg (stderr, flags);
2258 /* Dump the program showing basic block boundaries on the given FILE.
2260 FLAGS are the same used by the tree dumping functions (see TDF_* in
2264 dump_tree_cfg (FILE *file, int flags)
2266 if (flags & TDF_DETAILS)
2268 const char *funcname
2269 = lang_hooks.decl_printable_name (current_function_decl, 2);
2272 fprintf (file, ";; Function %s\n\n", funcname);
2273 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2274 n_basic_blocks, n_edges, last_basic_block);
2276 brief_dump_cfg (file);
2277 fprintf (file, "\n");
2280 if (flags & TDF_STATS)
2281 dump_cfg_stats (file);
2283 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2287 /* Dump CFG statistics on FILE. */
2290 dump_cfg_stats (FILE *file)
2292 static long max_num_merged_labels = 0;
2293 unsigned long size, total = 0;
2296 const char * const fmt_str = "%-30s%-13s%12s\n";
2297 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2298 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2299 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2300 const char *funcname
2301 = lang_hooks.decl_printable_name (current_function_decl, 2);
2304 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2306 fprintf (file, "---------------------------------------------------------\n");
2307 fprintf (file, fmt_str, "", " Number of ", "Memory");
2308 fprintf (file, fmt_str, "", " instances ", "used ");
2309 fprintf (file, "---------------------------------------------------------\n");
2311 size = n_basic_blocks * sizeof (struct basic_block_def);
2313 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2314 SCALE (size), LABEL (size));
2318 num_edges += EDGE_COUNT (bb->succs);
2319 size = num_edges * sizeof (struct edge_def);
2321 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2323 fprintf (file, "---------------------------------------------------------\n");
2324 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2326 fprintf (file, "---------------------------------------------------------\n");
2327 fprintf (file, "\n");
2329 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2330 max_num_merged_labels = cfg_stats.num_merged_labels;
2332 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2333 cfg_stats.num_merged_labels, max_num_merged_labels);
2335 fprintf (file, "\n");
2339 /* Dump CFG statistics on stderr. Keep extern so that it's always
2340 linked in the final executable. */
2343 debug_cfg_stats (void)
2345 dump_cfg_stats (stderr);
2349 /* Dump the flowgraph to a .vcg FILE. */
2352 tree_cfg2vcg (FILE *file)
2357 const char *funcname
2358 = lang_hooks.decl_printable_name (current_function_decl, 2);
2360 /* Write the file header. */
2361 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2362 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2363 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2365 /* Write blocks and edges. */
2366 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2368 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2371 if (e->flags & EDGE_FAKE)
2372 fprintf (file, " linestyle: dotted priority: 10");
2374 fprintf (file, " linestyle: solid priority: 100");
2376 fprintf (file, " }\n");
2382 enum tree_code head_code, end_code;
2383 const char *head_name, *end_name;
2386 tree first = first_stmt (bb);
2387 tree last = last_stmt (bb);
2391 head_code = TREE_CODE (first);
2392 head_name = tree_code_name[head_code];
2393 head_line = get_lineno (first);
2396 head_name = "no-statement";
2400 end_code = TREE_CODE (last);
2401 end_name = tree_code_name[end_code];
2402 end_line = get_lineno (last);
2405 end_name = "no-statement";
2407 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2408 bb->index, bb->index, head_name, head_line, end_name,
2411 FOR_EACH_EDGE (e, ei, bb->succs)
2413 if (e->dest == EXIT_BLOCK_PTR)
2414 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2416 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2418 if (e->flags & EDGE_FAKE)
2419 fprintf (file, " priority: 10 linestyle: dotted");
2421 fprintf (file, " priority: 100 linestyle: solid");
2423 fprintf (file, " }\n");
2426 if (bb->next_bb != EXIT_BLOCK_PTR)
2430 fputs ("}\n\n", file);
2435 /*---------------------------------------------------------------------------
2436 Miscellaneous helpers
2437 ---------------------------------------------------------------------------*/
2439 /* Return true if T represents a stmt that always transfers control. */
2442 is_ctrl_stmt (const_tree t)
2444 return (TREE_CODE (t) == COND_EXPR
2445 || TREE_CODE (t) == SWITCH_EXPR
2446 || TREE_CODE (t) == GOTO_EXPR
2447 || TREE_CODE (t) == RETURN_EXPR
2448 || TREE_CODE (t) == RESX_EXPR);
2452 /* Return true if T is a statement that may alter the flow of control
2453 (e.g., a call to a non-returning function). */
2456 is_ctrl_altering_stmt (const_tree t)
2461 call = get_call_expr_in (CONST_CAST_TREE (t));
2464 /* A non-pure/const CALL_EXPR alters flow control if the current
2465 function has nonlocal labels. */
2466 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2469 /* A CALL_EXPR also alters control flow if it does not return. */
2470 if (call_expr_flags (call) & ECF_NORETURN)
2474 /* OpenMP directives alter control flow. */
2475 if (OMP_DIRECTIVE_P (t))
2478 /* If a statement can throw, it alters control flow. */
2479 return tree_can_throw_internal (t);
2483 /* Return true if T is a computed goto. */
2486 computed_goto_p (const_tree t)
2488 return (TREE_CODE (t) == GOTO_EXPR
2489 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2493 /* Return true if T is a simple local goto. */
2496 simple_goto_p (const_tree t)
2498 return (TREE_CODE (t) == GOTO_EXPR
2499 && TREE_CODE (GOTO_DESTINATION (t)) == LABEL_DECL);
2503 /* Return true if T can make an abnormal transfer of control flow.
2504 Transfers of control flow associated with EH are excluded. */
2507 tree_can_make_abnormal_goto (const_tree t)
2509 if (computed_goto_p (t))
2511 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
2512 t = GIMPLE_STMT_OPERAND (t, 1);
2513 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2514 t = TREE_OPERAND (t, 0);
2515 if (TREE_CODE (t) == CALL_EXPR)
2516 return TREE_SIDE_EFFECTS (t) && current_function_has_nonlocal_label;
2521 /* Return true if T should start a new basic block. PREV_T is the
2522 statement preceding T. It is used when T is a label or a case label.
2523 Labels should only start a new basic block if their previous statement
2524 wasn't a label. Otherwise, sequence of labels would generate
2525 unnecessary basic blocks that only contain a single label. */
2528 stmt_starts_bb_p (const_tree t, const_tree prev_t)
2533 /* LABEL_EXPRs start a new basic block only if the preceding
2534 statement wasn't a label of the same type. This prevents the
2535 creation of consecutive blocks that have nothing but a single
2537 if (TREE_CODE (t) == LABEL_EXPR)
2539 /* Nonlocal and computed GOTO targets always start a new block. */
2540 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2541 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2544 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2546 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2549 cfg_stats.num_merged_labels++;
2560 /* Return true if T should end a basic block. */
2563 stmt_ends_bb_p (const_tree t)
2565 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2568 /* Remove block annotations and other datastructures. */
2571 delete_tree_cfg_annotations (void)
2574 block_stmt_iterator bsi;
2576 /* Remove annotations from every tree in the function. */
2578 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2580 tree stmt = bsi_stmt (bsi);
2581 ggc_free (stmt->base.ann);
2582 stmt->base.ann = NULL;
2584 label_to_block_map = NULL;
2588 /* Return the first statement in basic block BB. */
2591 first_stmt (basic_block bb)
2593 block_stmt_iterator i = bsi_start (bb);
2594 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2597 /* Return the last statement in basic block BB. */
2600 last_stmt (basic_block bb)
2602 block_stmt_iterator b = bsi_last (bb);
2603 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2606 /* Return the last statement of an otherwise empty block. Return NULL
2607 if the block is totally empty, or if it contains more than one
2611 last_and_only_stmt (basic_block bb)
2613 block_stmt_iterator i = bsi_last (bb);
2619 last = bsi_stmt (i);
2624 /* Empty statements should no longer appear in the instruction stream.
2625 Everything that might have appeared before should be deleted by
2626 remove_useless_stmts, and the optimizers should just bsi_remove
2627 instead of smashing with build_empty_stmt.
2629 Thus the only thing that should appear here in a block containing
2630 one executable statement is a label. */
2631 prev = bsi_stmt (i);
2632 if (TREE_CODE (prev) == LABEL_EXPR)
2639 /* Mark BB as the basic block holding statement T. */
2642 set_bb_for_stmt (tree t, basic_block bb)
2644 if (TREE_CODE (t) == PHI_NODE)
2646 else if (TREE_CODE (t) == STATEMENT_LIST)
2648 tree_stmt_iterator i;
2649 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2650 set_bb_for_stmt (tsi_stmt (i), bb);
2654 stmt_ann_t ann = get_stmt_ann (t);
2657 /* If the statement is a label, add the label to block-to-labels map
2658 so that we can speed up edge creation for GOTO_EXPRs. */
2659 if (TREE_CODE (t) == LABEL_EXPR)
2663 t = LABEL_EXPR_LABEL (t);
2664 uid = LABEL_DECL_UID (t);
2667 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2668 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2669 if (old_len <= (unsigned) uid)
2671 unsigned new_len = 3 * uid / 2;
2673 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
2678 /* We're moving an existing label. Make sure that we've
2679 removed it from the old block. */
2681 || !VEC_index (basic_block, label_to_block_map, uid));
2682 VEC_replace (basic_block, label_to_block_map, uid, bb);
2687 /* Faster version of set_bb_for_stmt that assume that statement is being moved
2688 from one basic block to another.
2689 For BB splitting we can run into quadratic case, so performance is quite
2690 important and knowing that the tables are big enough, change_bb_for_stmt
2691 can inline as leaf function. */
2693 change_bb_for_stmt (tree t, basic_block bb)
2695 get_stmt_ann (t)->bb = bb;
2696 if (TREE_CODE (t) == LABEL_EXPR)
2697 VEC_replace (basic_block, label_to_block_map,
2698 LABEL_DECL_UID (LABEL_EXPR_LABEL (t)), bb);
2701 /* Finds iterator for STMT. */
2703 extern block_stmt_iterator
2704 bsi_for_stmt (tree stmt)
2706 block_stmt_iterator bsi;
2708 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2709 if (bsi_stmt (bsi) == stmt)
2715 /* Mark statement T as modified, and update it. */
2717 update_modified_stmts (tree t)
2719 if (!ssa_operands_active ())
2721 if (TREE_CODE (t) == STATEMENT_LIST)
2723 tree_stmt_iterator i;
2725 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2727 stmt = tsi_stmt (i);
2728 update_stmt_if_modified (stmt);
2732 update_stmt_if_modified (t);
2735 /* Insert statement (or statement list) T before the statement
2736 pointed-to by iterator I. M specifies how to update iterator I
2737 after insertion (see enum bsi_iterator_update). */
2740 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2742 set_bb_for_stmt (t, i->bb);
2743 update_modified_stmts (t);
2744 tsi_link_before (&i->tsi, t, m);
2748 /* Insert statement (or statement list) T after the statement
2749 pointed-to by iterator I. M specifies how to update iterator I
2750 after insertion (see enum bsi_iterator_update). */
2753 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2755 set_bb_for_stmt (t, i->bb);
2756 update_modified_stmts (t);
2757 tsi_link_after (&i->tsi, t, m);
2761 /* Remove the statement pointed to by iterator I. The iterator is updated
2762 to the next statement.
2764 When REMOVE_EH_INFO is true we remove the statement pointed to by
2765 iterator I from the EH tables. Otherwise we do not modify the EH
2768 Generally, REMOVE_EH_INFO should be true when the statement is going to
2769 be removed from the IL and not reinserted elsewhere. */
2772 bsi_remove (block_stmt_iterator *i, bool remove_eh_info)
2774 tree t = bsi_stmt (*i);
2775 set_bb_for_stmt (t, NULL);
2776 delink_stmt_imm_use (t);
2777 tsi_delink (&i->tsi);
2778 mark_stmt_modified (t);
2781 remove_stmt_from_eh_region (t);
2782 gimple_remove_stmt_histograms (cfun, t);
2787 /* Move the statement at FROM so it comes right after the statement at TO. */
2790 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2792 tree stmt = bsi_stmt (*from);
2793 bsi_remove (from, false);
2794 /* We must have BSI_NEW_STMT here, as bsi_move_after is sometimes used to
2795 move statements to an empty block. */
2796 bsi_insert_after (to, stmt, BSI_NEW_STMT);
2800 /* Move the statement at FROM so it comes right before the statement at TO. */
2803 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2805 tree stmt = bsi_stmt (*from);
2806 bsi_remove (from, false);
2807 /* For consistency with bsi_move_after, it might be better to have
2808 BSI_NEW_STMT here; however, that breaks several places that expect
2809 that TO does not change. */
2810 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2814 /* Move the statement at FROM to the end of basic block BB. */
2817 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2819 block_stmt_iterator last = bsi_last (bb);
2821 /* Have to check bsi_end_p because it could be an empty block. */
2822 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2823 bsi_move_before (from, &last);
2825 bsi_move_after (from, &last);
2829 /* Replace the contents of the statement pointed to by iterator BSI
2830 with STMT. If UPDATE_EH_INFO is true, the exception handling
2831 information of the original statement is moved to the new statement. */
2834 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool update_eh_info)
2837 tree orig_stmt = bsi_stmt (*bsi);
2839 if (stmt == orig_stmt)
2841 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2842 set_bb_for_stmt (stmt, bsi->bb);
2844 /* Preserve EH region information from the original statement, if
2845 requested by the caller. */
2848 eh_region = lookup_stmt_eh_region (orig_stmt);
2851 remove_stmt_from_eh_region (orig_stmt);
2852 add_stmt_to_eh_region (stmt, eh_region);
2856 gimple_duplicate_stmt_histograms (cfun, stmt, cfun, orig_stmt);
2857 gimple_remove_stmt_histograms (cfun, orig_stmt);
2858 delink_stmt_imm_use (orig_stmt);
2859 *bsi_stmt_ptr (*bsi) = stmt;
2860 mark_stmt_modified (stmt);
2861 update_modified_stmts (stmt);
2865 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2866 is made to place the statement in an existing basic block, but
2867 sometimes that isn't possible. When it isn't possible, the edge is
2868 split and the statement is added to the new block.
2870 In all cases, the returned *BSI points to the correct location. The
2871 return value is true if insertion should be done after the location,
2872 or false if it should be done before the location. If new basic block
2873 has to be created, it is stored in *NEW_BB. */
2876 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2877 basic_block *new_bb)
2879 basic_block dest, src;
2885 /* If the destination has one predecessor which has no PHI nodes,
2886 insert there. Except for the exit block.
2888 The requirement for no PHI nodes could be relaxed. Basically we
2889 would have to examine the PHIs to prove that none of them used
2890 the value set by the statement we want to insert on E. That
2891 hardly seems worth the effort. */
2892 if (single_pred_p (dest)
2893 && ! phi_nodes (dest)
2894 && dest != EXIT_BLOCK_PTR)
2896 *bsi = bsi_start (dest);
2897 if (bsi_end_p (*bsi))
2900 /* Make sure we insert after any leading labels. */
2901 tmp = bsi_stmt (*bsi);
2902 while (TREE_CODE (tmp) == LABEL_EXPR)
2905 if (bsi_end_p (*bsi))
2907 tmp = bsi_stmt (*bsi);
2910 if (bsi_end_p (*bsi))
2912 *bsi = bsi_last (dest);
2919 /* If the source has one successor, the edge is not abnormal and
2920 the last statement does not end a basic block, insert there.
2921 Except for the entry block. */
2923 if ((e->flags & EDGE_ABNORMAL) == 0
2924 && single_succ_p (src)
2925 && src != ENTRY_BLOCK_PTR)
2927 *bsi = bsi_last (src);
2928 if (bsi_end_p (*bsi))
2931 tmp = bsi_stmt (*bsi);
2932 if (!stmt_ends_bb_p (tmp))
2935 /* Insert code just before returning the value. We may need to decompose
2936 the return in the case it contains non-trivial operand. */
2937 if (TREE_CODE (tmp) == RETURN_EXPR)
2939 tree op = TREE_OPERAND (tmp, 0);
2940 if (op && !is_gimple_val (op))
2942 gcc_assert (TREE_CODE (op) == GIMPLE_MODIFY_STMT);
2943 bsi_insert_before (bsi, op, BSI_NEW_STMT);
2944 TREE_OPERAND (tmp, 0) = GIMPLE_STMT_OPERAND (op, 0);
2951 /* Otherwise, create a new basic block, and split this edge. */
2952 dest = split_edge (e);
2955 e = single_pred_edge (dest);
2960 /* This routine will commit all pending edge insertions, creating any new
2961 basic blocks which are necessary. */
2964 bsi_commit_edge_inserts (void)
2970 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
2973 FOR_EACH_EDGE (e, ei, bb->succs)
2974 bsi_commit_one_edge_insert (e, NULL);
2978 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
2979 to this block, otherwise set it to NULL. */
2982 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
2986 if (PENDING_STMT (e))
2988 block_stmt_iterator bsi;
2989 tree stmt = PENDING_STMT (e);
2991 PENDING_STMT (e) = NULL_TREE;
2993 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
2994 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
2996 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3001 /* Add STMT to the pending list of edge E. No actual insertion is
3002 made until a call to bsi_commit_edge_inserts () is made. */
3005 bsi_insert_on_edge (edge e, tree stmt)
3007 append_to_statement_list (stmt, &PENDING_STMT (e));
3010 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3011 block has to be created, it is returned. */
3014 bsi_insert_on_edge_immediate (edge e, tree stmt)
3016 block_stmt_iterator bsi;
3017 basic_block new_bb = NULL;
3019 gcc_assert (!PENDING_STMT (e));
3021 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3022 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3024 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3029 /*---------------------------------------------------------------------------
3030 Tree specific functions for CFG manipulation
3031 ---------------------------------------------------------------------------*/
3033 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3036 reinstall_phi_args (edge new_edge, edge old_edge)
3039 edge_var_map_vector v;
3043 v = redirect_edge_var_map_vector (old_edge);
3047 for (i = 0, phi = phi_nodes (new_edge->dest);
3048 VEC_iterate (edge_var_map, v, i, vm) && phi;
3049 i++, phi = PHI_CHAIN (phi))
3051 tree result = redirect_edge_var_map_result (vm);
3052 tree arg = redirect_edge_var_map_def (vm);
3054 gcc_assert (result == PHI_RESULT (phi));
3056 add_phi_arg (phi, arg, new_edge);
3059 redirect_edge_var_map_clear (old_edge);
3062 /* Returns the basic block after which the new basic block created
3063 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3064 near its "logical" location. This is of most help to humans looking
3065 at debugging dumps. */
3068 split_edge_bb_loc (edge edge_in)
3070 basic_block dest = edge_in->dest;
3072 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3073 return edge_in->src;
3075 return dest->prev_bb;
3078 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3079 Abort on abnormal edges. */
3082 tree_split_edge (edge edge_in)
3084 basic_block new_bb, after_bb, dest;
3087 /* Abnormal edges cannot be split. */
3088 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3090 dest = edge_in->dest;
3092 after_bb = split_edge_bb_loc (edge_in);
3094 new_bb = create_empty_bb (after_bb);
3095 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3096 new_bb->count = edge_in->count;
3097 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3098 new_edge->probability = REG_BR_PROB_BASE;
3099 new_edge->count = edge_in->count;
3101 e = redirect_edge_and_branch (edge_in, new_bb);
3102 gcc_assert (e == edge_in);
3103 reinstall_phi_args (new_edge, e);
3108 /* Callback for walk_tree, check that all elements with address taken are
3109 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3110 inside a PHI node. */
3113 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3116 bool in_phi = (data != NULL);
3121 /* Check operand N for being valid GIMPLE and give error MSG if not. */
3122 #define CHECK_OP(N, MSG) \
3123 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
3124 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3126 switch (TREE_CODE (t))
3129 if (SSA_NAME_IN_FREE_LIST (t))
3131 error ("SSA name in freelist but still referenced");
3137 x = fold (ASSERT_EXPR_COND (t));
3138 if (x == boolean_false_node)
3140 error ("ASSERT_EXPR with an always-false condition");
3148 case GIMPLE_MODIFY_STMT:
3149 x = GIMPLE_STMT_OPERAND (t, 0);
3150 if (TREE_CODE (x) == BIT_FIELD_REF
3151 && is_gimple_reg (TREE_OPERAND (x, 0)))
3153 error ("GIMPLE register modified with BIT_FIELD_REF");
3162 bool old_side_effects;
3165 bool new_side_effects;
3167 /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3168 dead PHIs that take the address of something. But if the PHI
3169 result is dead, the fact that it takes the address of anything
3170 is irrelevant. Because we can not tell from here if a PHI result
3171 is dead, we just skip this check for PHIs altogether. This means
3172 we may be missing "valid" checks, but what can you do?
3173 This was PR19217. */
3176 if (!is_gimple_min_invariant (t))
3178 error ("non-invariant address expression in PHI argument");
3184 old_invariant = TREE_INVARIANT (t);
3185 old_constant = TREE_CONSTANT (t);
3186 old_side_effects = TREE_SIDE_EFFECTS (t);
3188 recompute_tree_invariant_for_addr_expr (t);
3189 new_invariant = TREE_INVARIANT (t);
3190 new_side_effects = TREE_SIDE_EFFECTS (t);
3191 new_constant = TREE_CONSTANT (t);
3193 if (old_invariant != new_invariant)
3195 error ("invariant not recomputed when ADDR_EXPR changed");
3199 if (old_constant != new_constant)
3201 error ("constant not recomputed when ADDR_EXPR changed");
3204 if (old_side_effects != new_side_effects)
3206 error ("side effects not recomputed when ADDR_EXPR changed");
3210 /* Skip any references (they will be checked when we recurse down the
3211 tree) and ensure that any variable used as a prefix is marked
3213 for (x = TREE_OPERAND (t, 0);
3214 handled_component_p (x);
3215 x = TREE_OPERAND (x, 0))
3218 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3220 if (!TREE_ADDRESSABLE (x))
3222 error ("address taken, but ADDRESSABLE bit not set");
3230 x = COND_EXPR_COND (t);
3231 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
3233 error ("non-integral used in condition");
3236 if (!is_gimple_condexpr (x))
3238 error ("invalid conditional operand");
3245 case FIX_TRUNC_EXPR:
3250 case NON_LVALUE_EXPR:
3251 case TRUTH_NOT_EXPR:
3252 CHECK_OP (0, "invalid operand to unary operator");
3259 case ARRAY_RANGE_REF:
3261 case VIEW_CONVERT_EXPR:
3262 /* We have a nest of references. Verify that each of the operands
3263 that determine where to reference is either a constant or a variable,
3264 verify that the base is valid, and then show we've already checked
3266 while (handled_component_p (t))
3268 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3269 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3270 else if (TREE_CODE (t) == ARRAY_REF
3271 || TREE_CODE (t) == ARRAY_RANGE_REF)
3273 CHECK_OP (1, "invalid array index");
3274 if (TREE_OPERAND (t, 2))
3275 CHECK_OP (2, "invalid array lower bound");
3276 if (TREE_OPERAND (t, 3))
3277 CHECK_OP (3, "invalid array stride");
3279 else if (TREE_CODE (t) == BIT_FIELD_REF)
3281 if (!host_integerp (TREE_OPERAND (t, 1), 1)
3282 || !host_integerp (TREE_OPERAND (t, 2), 1))
3284 error ("invalid position or size operand to BIT_FIELD_REF");
3287 else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3288 && (TYPE_PRECISION (TREE_TYPE (t))
3289 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
3291 error ("integral result type precision does not match "
3292 "field size of BIT_FIELD_REF");
3295 if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3296 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
3297 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
3299 error ("mode precision of non-integral result does not "
3300 "match field size of BIT_FIELD_REF");
3305 t = TREE_OPERAND (t, 0);
3308 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3310 error ("invalid reference prefix");
3317 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3318 POINTER_PLUS_EXPR. */
3319 if (POINTER_TYPE_P (TREE_TYPE (t)))
3321 error ("invalid operand to plus/minus, type is a pointer");
3324 CHECK_OP (0, "invalid operand to binary operator");
3325 CHECK_OP (1, "invalid operand to binary operator");
3328 case POINTER_PLUS_EXPR:
3329 /* Check to make sure the first operand is a pointer or reference type. */
3330 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3332 error ("invalid operand to pointer plus, first operand is not a pointer");
3335 /* Check to make sure the second operand is an integer with type of
3337 if (!useless_type_conversion_p (sizetype,
3338 TREE_TYPE (TREE_OPERAND (t, 1))))
3340 error ("invalid operand to pointer plus, second operand is not an "
3341 "integer with type of sizetype.");
3351 case UNORDERED_EXPR:
3360 case TRUNC_DIV_EXPR:
3362 case FLOOR_DIV_EXPR:
3363 case ROUND_DIV_EXPR:
3364 case TRUNC_MOD_EXPR:
3366 case FLOOR_MOD_EXPR:
3367 case ROUND_MOD_EXPR:
3369 case EXACT_DIV_EXPR:
3379 CHECK_OP (0, "invalid operand to binary operator");
3380 CHECK_OP (1, "invalid operand to binary operator");
3384 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3396 /* Verifies if EXPR is a valid GIMPLE unary expression. Returns true
3397 if there is an error, otherwise false. */
3400 verify_gimple_unary_expr (const_tree expr)
3402 tree op = TREE_OPERAND (expr, 0);
3403 tree type = TREE_TYPE (expr);
3405 if (!is_gimple_val (op))
3407 error ("invalid operand in unary expression");
3411 /* For general unary expressions we have the operations type
3412 as the effective type the operation is carried out on. So all
3413 we need to require is that the operand is trivially convertible
3415 if (!useless_type_conversion_p (type, TREE_TYPE (op)))
3417 error ("type mismatch in unary expression");
3418 debug_generic_expr (type);
3419 debug_generic_expr (TREE_TYPE (op));
3426 /* Verifies if EXPR is a valid GIMPLE binary expression. Returns true
3427 if there is an error, otherwise false. */
3430 verify_gimple_binary_expr (const_tree expr)
3432 tree op0 = TREE_OPERAND (expr, 0);
3433 tree op1 = TREE_OPERAND (expr, 1);
3434 tree type = TREE_TYPE (expr);
3436 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3438 error ("invalid operands in binary expression");
3442 /* For general binary expressions we have the operations type
3443 as the effective type the operation is carried out on. So all
3444 we need to require is that both operands are trivially convertible
3446 if (!useless_type_conversion_p (type, TREE_TYPE (op0))
3447 || !useless_type_conversion_p (type, TREE_TYPE (op1)))
3449 error ("type mismatch in binary expression");
3450 debug_generic_stmt (type);
3451 debug_generic_stmt (TREE_TYPE (op0));
3452 debug_generic_stmt (TREE_TYPE (op1));
3459 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3460 Returns true if there is an error, otherwise false. */
3463 verify_gimple_min_lval (tree expr)
3467 if (is_gimple_id (expr))
3470 if (TREE_CODE (expr) != INDIRECT_REF
3471 && TREE_CODE (expr) != ALIGN_INDIRECT_REF
3472 && TREE_CODE (expr) != MISALIGNED_INDIRECT_REF)
3474 error ("invalid expression for min lvalue");
3478 op = TREE_OPERAND (expr, 0);
3479 if (!is_gimple_val (op))
3481 error ("invalid operand in indirect reference");
3482 debug_generic_stmt (op);
3485 if (!useless_type_conversion_p (TREE_TYPE (expr),
3486 TREE_TYPE (TREE_TYPE (op))))
3488 error ("type mismatch in indirect reference");
3489 debug_generic_stmt (TREE_TYPE (expr));
3490 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3497 /* Verify if EXPR is a valid GIMPLE reference expression. Returns true
3498 if there is an error, otherwise false. */
3501 verify_gimple_reference (tree expr)
3503 while (handled_component_p (expr))
3505 tree op = TREE_OPERAND (expr, 0);
3507 if (TREE_CODE (expr) == ARRAY_REF
3508 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3510 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3511 || (TREE_OPERAND (expr, 2)
3512 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3513 || (TREE_OPERAND (expr, 3)
3514 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3516 error ("invalid operands to array reference");
3517 debug_generic_stmt (expr);
3522 /* Verify if the reference array element types are compatible. */
3523 if (TREE_CODE (expr) == ARRAY_REF
3524 && !useless_type_conversion_p (TREE_TYPE (expr),
3525 TREE_TYPE (TREE_TYPE (op))))
3527 error ("type mismatch in array reference");
3528 debug_generic_stmt (TREE_TYPE (expr));
3529 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3532 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3533 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3534 TREE_TYPE (TREE_TYPE (op))))
3536 error ("type mismatch in array range reference");
3537 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3538 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3542 if ((TREE_CODE (expr) == REALPART_EXPR
3543 || TREE_CODE (expr) == IMAGPART_EXPR)
3544 && !useless_type_conversion_p (TREE_TYPE (expr),
3545 TREE_TYPE (TREE_TYPE (op))))
3547 error ("type mismatch in real/imagpart reference");
3548 debug_generic_stmt (TREE_TYPE (expr));
3549 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3553 if (TREE_CODE (expr) == COMPONENT_REF
3554 && !useless_type_conversion_p (TREE_TYPE (expr),
3555 TREE_TYPE (TREE_OPERAND (expr, 1))))
3557 error ("type mismatch in component reference");
3558 debug_generic_stmt (TREE_TYPE (expr));
3559 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3563 /* For VIEW_CONVERT_EXPRs which are allowed here, too, there
3564 is nothing to verify. Gross mismatches at most invoke
3565 undefined behavior. */
3570 return verify_gimple_min_lval (expr);
3573 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3574 list of pointer-to types that is trivially convertible to DEST. */
3577 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3581 if (!TYPE_POINTER_TO (src_obj))
3584 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3585 if (useless_type_conversion_p (dest, src))
3591 /* Verify the GIMPLE expression EXPR. Returns true if there is an
3592 error, otherwise false. */
3595 verify_gimple_expr (tree expr)
3597 tree type = TREE_TYPE (expr);
3599 if (is_gimple_val (expr))
3602 /* Special codes we cannot handle via their class. */
3603 switch (TREE_CODE (expr))
3608 tree op = TREE_OPERAND (expr, 0);
3609 if (!is_gimple_val (op))
3611 error ("invalid operand in conversion");
3615 /* Allow conversions between integral types and between
3617 if ((INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (op)))
3618 || (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (op))))
3621 /* Allow conversions between integral types and pointers only if
3622 there is no sign or zero extension involved. */
3623 if (((POINTER_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (op)))
3624 || (POINTER_TYPE_P (TREE_TYPE (op)) && INTEGRAL_TYPE_P (type)))
3625 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op)))
3628 /* Allow conversion from integer to offset type and vice versa. */
3629 if ((TREE_CODE (type) == OFFSET_TYPE
3630 && TREE_CODE (TREE_TYPE (op)) == INTEGER_TYPE)
3631 || (TREE_CODE (type) == INTEGER_TYPE
3632 && TREE_CODE (TREE_TYPE (op)) == OFFSET_TYPE))
3635 /* Otherwise assert we are converting between types of the
3637 if (TREE_CODE (type) != TREE_CODE (TREE_TYPE (op)))
3639 error ("invalid types in nop conversion");
3640 debug_generic_expr (type);
3641 debug_generic_expr (TREE_TYPE (op));
3650 tree op = TREE_OPERAND (expr, 0);
3651 if (!is_gimple_val (op))
3653 error ("invalid operand in int to float conversion");
3656 if (!INTEGRAL_TYPE_P (TREE_TYPE (op))
3657 || !SCALAR_FLOAT_TYPE_P (type))
3659 error ("invalid types in conversion to floating point");
3660 debug_generic_expr (type);
3661 debug_generic_expr (TREE_TYPE (op));
3667 case FIX_TRUNC_EXPR:
3669 tree op = TREE_OPERAND (expr, 0);
3670 if (!is_gimple_val (op))
3672 error ("invalid operand in float to int conversion");
3675 if (!INTEGRAL_TYPE_P (type)
3676 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (op)))
3678 error ("invalid types in conversion to integer");
3679 debug_generic_expr (type);
3680 debug_generic_expr (TREE_TYPE (op));
3688 tree op0 = TREE_OPERAND (expr, 0);
3689 tree op1 = TREE_OPERAND (expr, 1);
3690 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3692 error ("invalid operands in complex expression");
3695 if (!TREE_CODE (type) == COMPLEX_TYPE
3696 || !(TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
3697 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (op0)))
3698 || !(TREE_CODE (TREE_TYPE (op1)) == INTEGER_TYPE
3699 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (op1)))
3700 || !useless_type_conversion_p (TREE_TYPE (type),
3702 || !useless_type_conversion_p (TREE_TYPE (type),
3705 error ("type mismatch in complex expression");
3706 debug_generic_stmt (TREE_TYPE (expr));
3707 debug_generic_stmt (TREE_TYPE (op0));
3708 debug_generic_stmt (TREE_TYPE (op1));
3716 /* This is used like COMPLEX_EXPR but for vectors. */
3717 if (TREE_CODE (type) != VECTOR_TYPE)
3719 error ("constructor not allowed for non-vector types");
3720 debug_generic_stmt (type);
3723 /* FIXME: verify constructor arguments. */
3732 tree op0 = TREE_OPERAND (expr, 0);
3733 tree op1 = TREE_OPERAND (expr, 1);
3734 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3736 error ("invalid operands in shift expression");
3739 if (!TREE_CODE (TREE_TYPE (op1)) == INTEGER_TYPE
3740 || !useless_type_conversion_p (type, TREE_TYPE (op0)))
3742 error ("type mismatch in shift expression");
3743 debug_generic_stmt (TREE_TYPE (expr));
3744 debug_generic_stmt (TREE_TYPE (op0));
3745 debug_generic_stmt (TREE_TYPE (op1));
3754 tree op0 = TREE_OPERAND (expr, 0);
3755 tree op1 = TREE_OPERAND (expr, 1);
3756 if (POINTER_TYPE_P (type)
3757 || POINTER_TYPE_P (TREE_TYPE (op0))
3758 || POINTER_TYPE_P (TREE_TYPE (op1)))
3760 error ("invalid (pointer) operands to plus/minus");
3763 /* Continue with generic binary expression handling. */
3767 case POINTER_PLUS_EXPR:
3769 tree op0 = TREE_OPERAND (expr, 0);
3770 tree op1 = TREE_OPERAND (expr, 1);
3771 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3773 error ("invalid operands in pointer plus expression");
3776 if (!POINTER_TYPE_P (TREE_TYPE (op0))
3777 || !useless_type_conversion_p (type, TREE_TYPE (op0))
3778 || !useless_type_conversion_p (sizetype, TREE_TYPE (op1)))
3780 error ("type mismatch in pointer plus expression");
3781 debug_generic_stmt (type);
3782 debug_generic_stmt (TREE_TYPE (op0));
3783 debug_generic_stmt (TREE_TYPE (op1));
3791 tree op0 = TREE_OPERAND (expr, 0);
3792 tree op1 = TREE_OPERAND (expr, 1);
3793 tree op2 = TREE_OPERAND (expr, 2);
3794 if ((!is_gimple_val (op1)
3795 && TREE_CODE (TREE_TYPE (op1)) != VOID_TYPE)
3796 || (!is_gimple_val (op2)
3797 && TREE_CODE (TREE_TYPE (op2)) != VOID_TYPE))
3799 error ("invalid operands in conditional expression");
3802 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0))
3803 || (TREE_CODE (TREE_TYPE (op1)) != VOID_TYPE
3804 && !useless_type_conversion_p (type, TREE_TYPE (op1)))
3805 || (TREE_CODE (TREE_TYPE (op2)) != VOID_TYPE
3806 && !useless_type_conversion_p (type, TREE_TYPE (op2))))
3808 error ("type mismatch in conditional expression");
3809 debug_generic_stmt (type);
3810 debug_generic_stmt (TREE_TYPE (op0));
3811 debug_generic_stmt (TREE_TYPE (op1));
3812 debug_generic_stmt (TREE_TYPE (op2));
3815 return verify_gimple_expr (op0);
3820 tree op = TREE_OPERAND (expr, 0);
3821 if (!is_gimple_addressable (op))
3823 error ("invalid operand in unary expression");
3826 if (!one_pointer_to_useless_type_conversion_p (type, TREE_TYPE (op))
3827 /* FIXME: a longstanding wart, &a == &a[0]. */
3828 && (TREE_CODE (TREE_TYPE (op)) != ARRAY_TYPE
3829 || !one_pointer_to_useless_type_conversion_p (type,
3830 TREE_TYPE (TREE_TYPE (op)))))
3832 error ("type mismatch in address expression");
3833 debug_generic_stmt (TREE_TYPE (expr));
3834 debug_generic_stmt (TYPE_POINTER_TO (TREE_TYPE (op)));
3838 return verify_gimple_reference (op);
3841 case TRUTH_ANDIF_EXPR:
3842 case TRUTH_ORIF_EXPR:
3843 case TRUTH_AND_EXPR:
3845 case TRUTH_XOR_EXPR:
3847 tree op0 = TREE_OPERAND (expr, 0);
3848 tree op1 = TREE_OPERAND (expr, 1);
3850 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3852 error ("invalid operands in truth expression");
3856 /* We allow any kind of integral typed argument and result. */
3857 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0))
3858 || !INTEGRAL_TYPE_P (TREE_TYPE (op1))
3859 || !INTEGRAL_TYPE_P (type))
3861 error ("type mismatch in binary truth expression");
3862 debug_generic_stmt (type);
3863 debug_generic_stmt (TREE_TYPE (op0));
3864 debug_generic_stmt (TREE_TYPE (op1));
3871 case TRUTH_NOT_EXPR:
3873 tree op = TREE_OPERAND (expr, 0);
3875 if (!is_gimple_val (op))
3877 error ("invalid operand in unary not");
3881 /* For TRUTH_NOT_EXPR we can have any kind of integral
3882 typed arguments and results. */
3883 if (!INTEGRAL_TYPE_P (TREE_TYPE (op))
3884 || !INTEGRAL_TYPE_P (type))
3886 error ("type mismatch in not expression");
3887 debug_generic_expr (TREE_TYPE (expr));
3888 debug_generic_expr (TREE_TYPE (op));
3896 /* FIXME. The C frontend passes unpromoted arguments in case it
3897 didn't see a function declaration before the call. */
3907 /* Generic handling via classes. */
3908 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3911 return verify_gimple_unary_expr (expr);
3914 return verify_gimple_binary_expr (expr);
3917 return verify_gimple_reference (expr);
3919 case tcc_comparison:
3921 tree op0 = TREE_OPERAND (expr, 0);
3922 tree op1 = TREE_OPERAND (expr, 1);
3923 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3925 error ("invalid operands in comparison expression");
3928 /* For comparisons we do not have the operations type as the
3929 effective type the comparison is carried out in. Instead
3930 we require that either the first operand is trivially
3931 convertible into the second, or the other way around.
3932 The resulting type of a comparison may be any integral type.
3933 Because we special-case pointers to void we allow
3934 comparisons of pointers with the same mode as well. */
3935 if ((!useless_type_conversion_p (TREE_TYPE (op0), TREE_TYPE (op1))
3936 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0))
3937 && (!POINTER_TYPE_P (TREE_TYPE (op0))
3938 || !POINTER_TYPE_P (TREE_TYPE (op1))
3939 || TYPE_MODE (TREE_TYPE (op0)) != TYPE_MODE (TREE_TYPE (op1))))
3940 || !INTEGRAL_TYPE_P (type))
3942 error ("type mismatch in comparison expression");
3943 debug_generic_stmt (TREE_TYPE (expr));
3944 debug_generic_stmt (TREE_TYPE (op0));
3945 debug_generic_stmt (TREE_TYPE (op1));
3958 /* Verify the GIMPLE assignment statement STMT. Returns true if there
3959 is an error, otherwise false. */
3962 verify_gimple_modify_stmt (const_tree stmt)
3964 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
3965 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
3967 gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
3969 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3972 error ("non-trivial conversion at assignment");
3973 debug_generic_expr (TREE_TYPE (lhs));
3974 debug_generic_expr (TREE_TYPE (rhs));
3978 /* Loads/stores from/to a variable are ok. */
3979 if ((is_gimple_val (lhs)
3980 && is_gimple_variable (rhs))
3981 || (is_gimple_val (rhs)
3982 && is_gimple_variable (lhs)))
3985 /* Aggregate copies are ok. */
3986 if (!is_gimple_reg_type (TREE_TYPE (lhs))
3987 && !is_gimple_reg_type (TREE_TYPE (rhs)))
3990 /* We might get 'loads' from a parameter which is not a gimple value. */
3991 if (TREE_CODE (rhs) == PARM_DECL)
3992 return verify_gimple_expr (lhs);
3994 if (!is_gimple_variable (lhs)
3995 && verify_gimple_expr (lhs))
3998 if (!is_gimple_variable (rhs)
3999 && verify_gimple_expr (rhs))
4005 /* Verify the GIMPLE statement STMT. Returns true if there is an
4006 error, otherwise false. */
4009 verify_gimple_stmt (tree stmt)
4011 if (!is_gimple_stmt (stmt))
4013 error ("is not a valid GIMPLE statement");
4017 if (OMP_DIRECTIVE_P (stmt))
4019 /* OpenMP directives are validated by the FE and never operated
4020 on by the optimizers. Furthermore, OMP_FOR may contain
4021 non-gimple expressions when the main index variable has had
4022 its address taken. This does not affect the loop itself
4023 because the header of an OMP_FOR is merely used to determine
4024 how to setup the parallel iteration. */
4028 switch (TREE_CODE (stmt))
4030 case GIMPLE_MODIFY_STMT:
4031 return verify_gimple_modify_stmt (stmt);
4038 if (!is_gimple_val (TREE_OPERAND (stmt, 0)))
4040 error ("invalid operand to switch statement");
4041 debug_generic_expr (TREE_OPERAND (stmt, 0));
4047 tree op = TREE_OPERAND (stmt, 0);
4049 if (TREE_CODE (TREE_TYPE (stmt)) != VOID_TYPE)
4051 error ("type error in return expression");
4056 || TREE_CODE (op) == RESULT_DECL)
4059 return verify_gimple_modify_stmt (op);
4064 return verify_gimple_expr (stmt);
4067 case CHANGE_DYNAMIC_TYPE_EXPR:
4077 /* Verify the GIMPLE statements inside the statement list STMTS.
4078 Returns true if there were any errors. */
4081 verify_gimple_2 (tree stmts)
4083 tree_stmt_iterator tsi;
4086 for (tsi = tsi_start (stmts); !tsi_end_p (tsi); tsi_next (&tsi))
4088 tree stmt = tsi_stmt (tsi);
4090 switch (TREE_CODE (stmt))
4093 err |= verify_gimple_2 (BIND_EXPR_BODY (stmt));
4096 case TRY_CATCH_EXPR:
4097 case TRY_FINALLY_EXPR:
4098 err |= verify_gimple_2 (TREE_OPERAND (stmt, 0));
4099 err |= verify_gimple_2 (TREE_OPERAND (stmt, 1));
4103 err |= verify_gimple_2 (CATCH_BODY (stmt));
4106 case EH_FILTER_EXPR:
4107 err |= verify_gimple_2 (EH_FILTER_FAILURE (stmt));
4112 bool err2 = verify_gimple_stmt (stmt);
4114 debug_generic_expr (stmt);
4124 /* Verify the GIMPLE statements inside the statement list STMTS. */
4127 verify_gimple_1 (tree stmts)
4129 if (verify_gimple_2 (stmts))
4130 internal_error ("verify_gimple failed");
4133 /* Verify the GIMPLE statements inside the current function. */
4136 verify_gimple (void)
4138 verify_gimple_1 (BIND_EXPR_BODY (DECL_SAVED_TREE (cfun->decl)));
4141 /* Verify STMT, return true if STMT is not in GIMPLE form.
4142 TODO: Implement type checking. */
4145 verify_stmt (tree stmt, bool last_in_block)
4149 if (OMP_DIRECTIVE_P (stmt))
4151 /* OpenMP directives are validated by the FE and never operated
4152 on by the optimizers. Furthermore, OMP_FOR may contain
4153 non-gimple expressions when the main index variable has had
4154 its address taken. This does not affect the loop itself
4155 because the header of an OMP_FOR is merely used to determine
4156 how to setup the parallel iteration. */
4160 if (!is_gimple_stmt (stmt))
4162 error ("is not a valid GIMPLE statement");
4166 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
4169 debug_generic_stmt (addr);
4173 /* If the statement is marked as part of an EH region, then it is
4174 expected that the statement could throw. Verify that when we
4175 have optimizations that simplify statements such that we prove
4176 that they cannot throw, that we update other data structures
4178 if (lookup_stmt_eh_region (stmt) >= 0)
4180 if (!tree_could_throw_p (stmt))
4182 error ("statement marked for throw, but doesn%'t");
4185 if (!last_in_block && tree_can_throw_internal (stmt))
4187 error ("statement marked for throw in middle of block");
4195 debug_generic_stmt (stmt);
4200 /* Return true when the T can be shared. */
4203 tree_node_can_be_shared (tree t)
4205 if (IS_TYPE_OR_DECL_P (t)
4206 || is_gimple_min_invariant (t)
4207 || TREE_CODE (t) == SSA_NAME
4208 || t == error_mark_node
4209 || TREE_CODE (t) == IDENTIFIER_NODE)
4212 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4215 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4216 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4217 || TREE_CODE (t) == COMPONENT_REF
4218 || TREE_CODE (t) == REALPART_EXPR
4219 || TREE_CODE (t) == IMAGPART_EXPR)
4220 t = TREE_OPERAND (t, 0);
4229 /* Called via walk_trees. Verify tree sharing. */
4232 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
4234 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4236 if (tree_node_can_be_shared (*tp))
4238 *walk_subtrees = false;
4242 if (pointer_set_insert (visited, *tp))
4249 /* Helper function for verify_gimple_tuples. */
4252 verify_gimple_tuples_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4253 void *data ATTRIBUTE_UNUSED)
4255 switch (TREE_CODE (*tp))
4258 error ("unexpected non-tuple");
4268 /* Verify that there are no trees that should have been converted to
4269 gimple tuples. Return true if T contains a node that should have
4270 been converted to a gimple tuple, but hasn't. */
4273 verify_gimple_tuples (tree t)
4275 return walk_tree (&t, verify_gimple_tuples_1, NULL, NULL) != NULL;
4278 static bool eh_error_found;
4280 verify_eh_throw_stmt_node (void **slot, void *data)
4282 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4283 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4285 if (!pointer_set_contains (visited, node->stmt))
4287 error ("Dead STMT in EH table");
4288 debug_generic_stmt (node->stmt);
4289 eh_error_found = true;
4294 /* Verify the GIMPLE statement chain. */
4300 block_stmt_iterator bsi;
4302 struct pointer_set_t *visited, *visited_stmts;
4305 timevar_push (TV_TREE_STMT_VERIFY);
4306 visited = pointer_set_create ();
4307 visited_stmts = pointer_set_create ();
4314 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4316 int phi_num_args = PHI_NUM_ARGS (phi);
4318 pointer_set_insert (visited_stmts, phi);
4319 if (bb_for_stmt (phi) != bb)
4321 error ("bb_for_stmt (phi) is set to a wrong basic block");
4325 for (i = 0; i < phi_num_args; i++)
4327 tree t = PHI_ARG_DEF (phi, i);
4332 error ("missing PHI def");
4333 debug_generic_stmt (phi);
4337 /* Addressable variables do have SSA_NAMEs but they
4338 are not considered gimple values. */
4339 else if (TREE_CODE (t) != SSA_NAME
4340 && TREE_CODE (t) != FUNCTION_DECL
4341 && !is_gimple_val (t))
4343 error ("PHI def is not a GIMPLE value");
4344 debug_generic_stmt (phi);
4345 debug_generic_stmt (t);
4349 addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
4352 debug_generic_stmt (addr);
4356 addr = walk_tree (&t, verify_node_sharing, visited, NULL);
4359 error ("incorrect sharing of tree nodes");
4360 debug_generic_stmt (phi);
4361 debug_generic_stmt (addr);
4367 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
4369 tree stmt = bsi_stmt (bsi);
4371 pointer_set_insert (visited_stmts, stmt);
4372 err |= verify_gimple_tuples (stmt);
4374 if (bb_for_stmt (stmt) != bb)
4376 error ("bb_for_stmt (stmt) is set to a wrong basic block");
4381 err |= verify_stmt (stmt, bsi_end_p (bsi));
4382 addr = walk_tree (&stmt, verify_node_sharing, visited, NULL);
4385 error ("incorrect sharing of tree nodes");
4386 debug_generic_stmt (stmt);
4387 debug_generic_stmt (addr);
4392 eh_error_found = false;
4393 if (get_eh_throw_stmt_table (cfun))
4394 htab_traverse (get_eh_throw_stmt_table (cfun),
4395 verify_eh_throw_stmt_node,
4398 if (err | eh_error_found)
4399 internal_error ("verify_stmts failed");
4401 pointer_set_destroy (visited);
4402 pointer_set_destroy (visited_stmts);
4403 verify_histograms ();
4404 timevar_pop (TV_TREE_STMT_VERIFY);
4408 /* Verifies that the flow information is OK. */
4411 tree_verify_flow_info (void)
4415 block_stmt_iterator bsi;
4420 if (ENTRY_BLOCK_PTR->il.tree)
4422 error ("ENTRY_BLOCK has IL associated with it");
4426 if (EXIT_BLOCK_PTR->il.tree)
4428 error ("EXIT_BLOCK has IL associated with it");
4432 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4433 if (e->flags & EDGE_FALLTHRU)
4435 error ("fallthru to exit from bb %d", e->src->index);
4441 bool found_ctrl_stmt = false;
4445 /* Skip labels on the start of basic block. */
4446 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4448 tree prev_stmt = stmt;
4450 stmt = bsi_stmt (bsi);
4452 if (TREE_CODE (stmt) != LABEL_EXPR)
4455 if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
4457 error ("nonlocal label ");
4458 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
4459 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4464 if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
4467 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
4468 fprintf (stderr, " to block does not match in bb %d",
4473 if (decl_function_context (LABEL_EXPR_LABEL (stmt))
4474 != current_function_decl)
4477 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
4478 fprintf (stderr, " has incorrect context in bb %d",
4484 /* Verify that body of basic block BB is free of control flow. */
4485 for (; !bsi_end_p (bsi); bsi_next (&bsi))
4487 tree stmt = bsi_stmt (bsi);
4489 if (found_ctrl_stmt)
4491 error ("control flow in the middle of basic block %d",
4496 if (stmt_ends_bb_p (stmt))
4497 found_ctrl_stmt = true;
4499 if (TREE_CODE (stmt) == LABEL_EXPR)
4502 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
4503 fprintf (stderr, " in the middle of basic block %d", bb->index);
4508 bsi = bsi_last (bb);
4509 if (bsi_end_p (bsi))
4512 stmt = bsi_stmt (bsi);
4514 err |= verify_eh_edges (stmt);
4516 if (is_ctrl_stmt (stmt))
4518 FOR_EACH_EDGE (e, ei, bb->succs)
4519 if (e->flags & EDGE_FALLTHRU)
4521 error ("fallthru edge after a control statement in bb %d",
4527 if (TREE_CODE (stmt) != COND_EXPR)
4529 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4530 after anything else but if statement. */
4531 FOR_EACH_EDGE (e, ei, bb->succs)
4532 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4534 error ("true/false edge after a non-COND_EXPR in bb %d",
4540 switch (TREE_CODE (stmt))
4547 if (COND_EXPR_THEN (stmt) != NULL_TREE
4548 || COND_EXPR_ELSE (stmt) != NULL_TREE)
4550 error ("COND_EXPR with code in branches at the end of bb %d",
4555 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4557 if (!true_edge || !false_edge
4558 || !(true_edge->flags & EDGE_TRUE_VALUE)
4559 || !(false_edge->flags & EDGE_FALSE_VALUE)
4560 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4561 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4562 || EDGE_COUNT (bb->succs) >= 3)
4564 error ("wrong outgoing edge flags at end of bb %d",
4572 if (simple_goto_p (stmt))
4574 error ("explicit goto at end of bb %d", bb->index);
4579 /* FIXME. We should double check that the labels in the
4580 destination blocks have their address taken. */
4581 FOR_EACH_EDGE (e, ei, bb->succs)
4582 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4583 | EDGE_FALSE_VALUE))
4584 || !(e->flags & EDGE_ABNORMAL))
4586 error ("wrong outgoing edge flags at end of bb %d",
4594 if (!single_succ_p (bb)
4595 || (single_succ_edge (bb)->flags
4596 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4597 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4599 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4602 if (single_succ (bb) != EXIT_BLOCK_PTR)
4604 error ("return edge does not point to exit in bb %d",
4617 vec = SWITCH_LABELS (stmt);
4618 n = TREE_VEC_LENGTH (vec);
4620 /* Mark all the destination basic blocks. */
4621 for (i = 0; i < n; ++i)
4623 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
4624 basic_block label_bb = label_to_block (lab);
4626 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4627 label_bb->aux = (void *)1;
4630 /* Verify that the case labels are sorted. */
4631 prev = TREE_VEC_ELT (vec, 0);
4632 for (i = 1; i < n - 1; ++i)
4634 tree c = TREE_VEC_ELT (vec, i);
4637 error ("found default case not at end of case vector");
4641 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4643 error ("case labels not sorted: ");
4644 print_generic_expr (stderr, prev, 0);
4645 fprintf (stderr," is greater than ");
4646 print_generic_expr (stderr, c, 0);
4647 fprintf (stderr," but comes before it.\n");
4652 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
4654 error ("no default case found at end of case vector");
4658 FOR_EACH_EDGE (e, ei, bb->succs)
4662 error ("extra outgoing edge %d->%d",
4663 bb->index, e->dest->index);
4666 e->dest->aux = (void *)2;
4667 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4668 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4670 error ("wrong outgoing edge flags at end of bb %d",
4676 /* Check that we have all of them. */
4677 for (i = 0; i < n; ++i)
4679 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
4680 basic_block label_bb = label_to_block (lab);
4682 if (label_bb->aux != (void *)2)
4684 error ("missing edge %i->%i",
4685 bb->index, label_bb->index);
4690 FOR_EACH_EDGE (e, ei, bb->succs)
4691 e->dest->aux = (void *)0;
4698 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
4699 verify_dominators (CDI_DOMINATORS);
4705 /* Updates phi nodes after creating a forwarder block joined
4706 by edge FALLTHRU. */
4709 tree_make_forwarder_block (edge fallthru)
4713 basic_block dummy, bb;
4714 tree phi, new_phi, var;
4716 dummy = fallthru->src;
4717 bb = fallthru->dest;
4719 if (single_pred_p (bb))
4722 /* If we redirected a branch we must create new PHI nodes at the
4724 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
4726 var = PHI_RESULT (phi);
4727 new_phi = create_phi_node (var, bb);
4728 SSA_NAME_DEF_STMT (var) = new_phi;
4729 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4730 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
4733 /* Ensure that the PHI node chain is in the same order. */
4734 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
4736 /* Add the arguments we have stored on edges. */
4737 FOR_EACH_EDGE (e, ei, bb->preds)
4742 flush_pending_stmts (e);
4747 /* Return a non-special label in the head of basic block BLOCK.
4748 Create one if it doesn't exist. */
4751 tree_block_label (basic_block bb)
4753 block_stmt_iterator i, s = bsi_start (bb);
4757 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
4759 stmt = bsi_stmt (i);
4760 if (TREE_CODE (stmt) != LABEL_EXPR)
4762 label = LABEL_EXPR_LABEL (stmt);
4763 if (!DECL_NONLOCAL (label))
4766 bsi_move_before (&i, &s);
4771 label = create_artificial_label ();
4772 stmt = build1 (LABEL_EXPR, void_type_node, label);
4773 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4778 /* Attempt to perform edge redirection by replacing a possibly complex
4779 jump instruction by a goto or by removing the jump completely.
4780 This can apply only if all edges now point to the same block. The
4781 parameters and return values are equivalent to
4782 redirect_edge_and_branch. */
4785 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4787 basic_block src = e->src;
4788 block_stmt_iterator b;
4791 /* We can replace or remove a complex jump only when we have exactly
4793 if (EDGE_COUNT (src->succs) != 2
4794 /* Verify that all targets will be TARGET. Specifically, the
4795 edge that is not E must also go to TARGET. */
4796 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4802 stmt = bsi_stmt (b);
4804 if (TREE_CODE (stmt) == COND_EXPR
4805 || TREE_CODE (stmt) == SWITCH_EXPR)
4807 bsi_remove (&b, true);
4808 e = ssa_redirect_edge (e, target);
4809 e->flags = EDGE_FALLTHRU;
4817 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4818 edge representing the redirected branch. */
4821 tree_redirect_edge_and_branch (edge e, basic_block dest)
4823 basic_block bb = e->src;
4824 block_stmt_iterator bsi;
4828 if (e->flags & EDGE_ABNORMAL)
4831 if (e->src != ENTRY_BLOCK_PTR
4832 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4835 if (e->dest == dest)
4838 bsi = bsi_last (bb);
4839 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4841 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4844 /* For COND_EXPR, we only need to redirect the edge. */
4848 /* No non-abnormal edges should lead from a non-simple goto, and
4849 simple ones should be represented implicitly. */
4854 tree cases = get_cases_for_edge (e, stmt);
4855 tree label = tree_block_label (dest);
4857 /* If we have a list of cases associated with E, then use it
4858 as it's a lot faster than walking the entire case vector. */
4861 edge e2 = find_edge (e->src, dest);
4868 CASE_LABEL (cases) = label;
4869 cases = TREE_CHAIN (cases);
4872 /* If there was already an edge in the CFG, then we need
4873 to move all the cases associated with E to E2. */
4876 tree cases2 = get_cases_for_edge (e2, stmt);
4878 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4879 TREE_CHAIN (cases2) = first;
4884 tree vec = SWITCH_LABELS (stmt);
4885 size_t i, n = TREE_VEC_LENGTH (vec);
4887 for (i = 0; i < n; i++)
4889 tree elt = TREE_VEC_ELT (vec, i);
4891 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4892 CASE_LABEL (elt) = label;
4900 bsi_remove (&bsi, true);
4901 e->flags |= EDGE_FALLTHRU;
4906 case OMP_SECTIONS_SWITCH:
4908 /* The edges from OMP constructs can be simply redirected. */
4912 /* Otherwise it must be a fallthru edge, and we don't need to
4913 do anything besides redirecting it. */
4914 gcc_assert (e->flags & EDGE_FALLTHRU);
4918 /* Update/insert PHI nodes as necessary. */
4920 /* Now update the edges in the CFG. */
4921 e = ssa_redirect_edge (e, dest);
4926 /* Returns true if it is possible to remove edge E by redirecting
4927 it to the destination of the other edge from E->src. */
4930 tree_can_remove_branch_p (const_edge e)
4932 if (e->flags & EDGE_ABNORMAL)
4938 /* Simple wrapper, as we can always redirect fallthru edges. */
4941 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4943 e = tree_redirect_edge_and_branch (e, dest);
4950 /* Splits basic block BB after statement STMT (but at least after the
4951 labels). If STMT is NULL, BB is split just after the labels. */
4954 tree_split_block (basic_block bb, void *stmt)
4956 block_stmt_iterator bsi;
4957 tree_stmt_iterator tsi_tgt;
4963 new_bb = create_empty_bb (bb);
4965 /* Redirect the outgoing edges. */
4966 new_bb->succs = bb->succs;
4968 FOR_EACH_EDGE (e, ei, new_bb->succs)
4971 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4974 /* Move everything from BSI to the new basic block. */
4975 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4977 act = bsi_stmt (bsi);
4978 if (TREE_CODE (act) == LABEL_EXPR)
4991 if (bsi_end_p (bsi))
4994 /* Split the statement list - avoid re-creating new containers as this
4995 brings ugly quadratic memory consumption in the inliner.
4996 (We are still quadratic since we need to update stmt BB pointers,
4998 list = tsi_split_statement_list_before (&bsi.tsi);
4999 set_bb_stmt_list (new_bb, list);
5000 for (tsi_tgt = tsi_start (list);
5001 !tsi_end_p (tsi_tgt); tsi_next (&tsi_tgt))
5002 change_bb_for_stmt (tsi_stmt (tsi_tgt), new_bb);
5008 /* Moves basic block BB after block AFTER. */
5011 tree_move_block_after (basic_block bb, basic_block after)
5013 if (bb->prev_bb == after)
5017 link_block (bb, after);
5023 /* Return true if basic_block can be duplicated. */
5026 tree_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5032 /* Create a duplicate of the basic block BB. NOTE: This does not
5033 preserve SSA form. */
5036 tree_duplicate_bb (basic_block bb)
5039 block_stmt_iterator bsi, bsi_tgt;
5042 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
5044 /* Copy the PHI nodes. We ignore PHI node arguments here because
5045 the incoming edges have not been setup yet. */
5046 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5048 tree copy = create_phi_node (PHI_RESULT (phi), new_bb);
5049 create_new_def_for (PHI_RESULT (copy), copy, PHI_RESULT_PTR (copy));
5052 /* Keep the chain of PHI nodes in the same order so that they can be
5053 updated by ssa_redirect_edge. */
5054 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
5056 bsi_tgt = bsi_start (new_bb);
5057 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
5059 def_operand_p def_p;
5060 ssa_op_iter op_iter;
5064 stmt = bsi_stmt (bsi);
5065 if (TREE_CODE (stmt) == LABEL_EXPR)
5068 /* Create a new copy of STMT and duplicate STMT's virtual
5070 copy = unshare_expr (stmt);
5071 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
5072 copy_virtual_operands (copy, stmt);
5073 region = lookup_stmt_eh_region (stmt);
5075 add_stmt_to_eh_region (copy, region);
5076 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5078 /* Create new names for all the definitions created by COPY and
5079 add replacement mappings for each new name. */
5080 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5081 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5087 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5090 add_phi_args_after_copy_edge (edge e_copy)
5092 basic_block bb, bb_copy = e_copy->src, dest;
5095 tree phi, phi_copy, phi_next, def;
5097 if (!phi_nodes (e_copy->dest))
5100 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5102 if (e_copy->dest->flags & BB_DUPLICATED)
5103 dest = get_bb_original (e_copy->dest);
5105 dest = e_copy->dest;
5107 e = find_edge (bb, dest);
5110 /* During loop unrolling the target of the latch edge is copied.
5111 In this case we are not looking for edge to dest, but to
5112 duplicated block whose original was dest. */
5113 FOR_EACH_EDGE (e, ei, bb->succs)
5115 if ((e->dest->flags & BB_DUPLICATED)
5116 && get_bb_original (e->dest) == dest)
5120 gcc_assert (e != NULL);
5123 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
5125 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
5127 phi_next = PHI_CHAIN (phi);
5128 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5129 add_phi_arg (phi_copy, def, e_copy);
5134 /* Basic block BB_COPY was created by code duplication. Add phi node
5135 arguments for edges going out of BB_COPY. The blocks that were
5136 duplicated have BB_DUPLICATED set. */
5139 add_phi_args_after_copy_bb (basic_block bb_copy)
5144 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5146 add_phi_args_after_copy_edge (e_copy);
5150 /* Blocks in REGION_COPY array of length N_REGION were created by
5151 duplication of basic blocks. Add phi node arguments for edges
5152 going from these blocks. If E_COPY is not NULL, also add
5153 phi node arguments for its destination.*/
5156 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5161 for (i = 0; i < n_region; i++)
5162 region_copy[i]->flags |= BB_DUPLICATED;
5164 for (i = 0; i < n_region; i++)
5165 add_phi_args_after_copy_bb (region_copy[i]);
5167 add_phi_args_after_copy_edge (e_copy);
5169 for (i = 0; i < n_region; i++)
5170 region_copy[i]->flags &= ~BB_DUPLICATED;
5173 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5174 important exit edge EXIT. By important we mean that no SSA name defined
5175 inside region is live over the other exit edges of the region. All entry
5176 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5177 to the duplicate of the region. SSA form, dominance and loop information
5178 is updated. The new basic blocks are stored to REGION_COPY in the same
5179 order as they had in REGION, provided that REGION_COPY is not NULL.
5180 The function returns false if it is unable to copy the region,
5184 tree_duplicate_sese_region (edge entry, edge exit,
5185 basic_block *region, unsigned n_region,
5186 basic_block *region_copy)
5189 bool free_region_copy = false, copying_header = false;
5190 struct loop *loop = entry->dest->loop_father;
5192 VEC (basic_block, heap) *doms;
5194 int total_freq = 0, entry_freq = 0;
5195 gcov_type total_count = 0, entry_count = 0;
5197 if (!can_copy_bbs_p (region, n_region))
5200 /* Some sanity checking. Note that we do not check for all possible
5201 missuses of the functions. I.e. if you ask to copy something weird,
5202 it will work, but the state of structures probably will not be
5204 for (i = 0; i < n_region; i++)
5206 /* We do not handle subloops, i.e. all the blocks must belong to the
5208 if (region[i]->loop_father != loop)
5211 if (region[i] != entry->dest
5212 && region[i] == loop->header)
5216 set_loop_copy (loop, loop);
5218 /* In case the function is used for loop header copying (which is the primary
5219 use), ensure that EXIT and its copy will be new latch and entry edges. */
5220 if (loop->header == entry->dest)
5222 copying_header = true;
5223 set_loop_copy (loop, loop_outer (loop));
5225 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5228 for (i = 0; i < n_region; i++)
5229 if (region[i] != exit->src
5230 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5236 region_copy = XNEWVEC (basic_block, n_region);
5237 free_region_copy = true;
5240 gcc_assert (!need_ssa_update_p ());
5242 /* Record blocks outside the region that are dominated by something
5245 initialize_original_copy_tables ();
5247 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5249 if (entry->dest->count)
5251 total_count = entry->dest->count;
5252 entry_count = entry->count;
5253 /* Fix up corner cases, to avoid division by zero or creation of negative
5255 if (entry_count > total_count)
5256 entry_count = total_count;
5260 total_freq = entry->dest->frequency;
5261 entry_freq = EDGE_FREQUENCY (entry);
5262 /* Fix up corner cases, to avoid division by zero or creation of negative
5264 if (total_freq == 0)
5266 else if (entry_freq > total_freq)
5267 entry_freq = total_freq;
5270 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5271 split_edge_bb_loc (entry));
5274 scale_bbs_frequencies_gcov_type (region, n_region,
5275 total_count - entry_count,
5277 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5282 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5284 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5289 loop->header = exit->dest;
5290 loop->latch = exit->src;
5293 /* Redirect the entry and add the phi node arguments. */
5294 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5295 gcc_assert (redirected != NULL);
5296 flush_pending_stmts (entry);
5298 /* Concerning updating of dominators: We must recount dominators
5299 for entry block and its copy. Anything that is outside of the
5300 region, but was dominated by something inside needs recounting as
5302 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5303 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5304 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5305 VEC_free (basic_block, heap, doms);
5307 /* Add the other PHI node arguments. */
5308 add_phi_args_after_copy (region_copy, n_region, NULL);
5310 /* Update the SSA web. */
5311 update_ssa (TODO_update_ssa);
5313 if (free_region_copy)
5316 free_original_copy_tables ();
5320 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5321 are stored to REGION_COPY in the same order in that they appear
5322 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5323 the region, EXIT an exit from it. The condition guarding EXIT
5324 is moved to ENTRY. Returns true if duplication succeeds, false
5350 tree_duplicate_sese_tail (edge entry, edge exit,
5351 basic_block *region, unsigned n_region,
5352 basic_block *region_copy)
5355 bool free_region_copy = false;
5356 struct loop *loop = exit->dest->loop_father;
5357 struct loop *orig_loop = entry->dest->loop_father;
5358 basic_block switch_bb, entry_bb, nentry_bb;
5359 VEC (basic_block, heap) *doms;
5360 int total_freq = 0, exit_freq = 0;
5361 gcov_type total_count = 0, exit_count = 0;
5362 edge exits[2], nexits[2], e;
5363 block_stmt_iterator bsi;
5367 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5369 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5371 if (!can_copy_bbs_p (region, n_region))
5374 /* Some sanity checking. Note that we do not check for all possible
5375 missuses of the functions. I.e. if you ask to copy something weird
5376 (e.g., in the example, if there is a jump from inside to the middle
5377 of some_code, or come_code defines some of the values used in cond)
5378 it will work, but the resulting code will not be correct. */
5379 for (i = 0; i < n_region; i++)
5381 /* We do not handle subloops, i.e. all the blocks must belong to the
5383 if (region[i]->loop_father != orig_loop)
5386 if (region[i] == orig_loop->latch)
5390 initialize_original_copy_tables ();
5391 set_loop_copy (orig_loop, loop);
5395 region_copy = XNEWVEC (basic_block, n_region);
5396 free_region_copy = true;
5399 gcc_assert (!need_ssa_update_p ());
5401 /* Record blocks outside the region that are dominated by something
5403 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5405 if (exit->src->count)
5407 total_count = exit->src->count;
5408 exit_count = exit->count;
5409 /* Fix up corner cases, to avoid division by zero or creation of negative
5411 if (exit_count > total_count)
5412 exit_count = total_count;
5416 total_freq = exit->src->frequency;
5417 exit_freq = EDGE_FREQUENCY (exit);
5418 /* Fix up corner cases, to avoid division by zero or creation of negative
5420 if (total_freq == 0)
5422 if (exit_freq > total_freq)
5423 exit_freq = total_freq;
5426 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5427 split_edge_bb_loc (exit));
5430 scale_bbs_frequencies_gcov_type (region, n_region,
5431 total_count - exit_count,
5433 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5438 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5440 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5443 /* Create the switch block, and put the exit condition to it. */
5444 entry_bb = entry->dest;
5445 nentry_bb = get_bb_copy (entry_bb);
5446 if (!last_stmt (entry->src)
5447 || !stmt_ends_bb_p (last_stmt (entry->src)))
5448 switch_bb = entry->src;
5450 switch_bb = split_edge (entry);
5451 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5453 bsi = bsi_last (switch_bb);
5454 cond = last_stmt (exit->src);
5455 gcc_assert (TREE_CODE (cond) == COND_EXPR);
5456 bsi_insert_after (&bsi, unshare_expr (cond), BSI_NEW_STMT);
5458 sorig = single_succ_edge (switch_bb);
5459 sorig->flags = exits[1]->flags;
5460 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5462 /* Register the new edge from SWITCH_BB in loop exit lists. */
5463 rescan_loop_exit (snew, true, false);
5465 /* Add the PHI node arguments. */
5466 add_phi_args_after_copy (region_copy, n_region, snew);
5468 /* Get rid of now superfluous conditions and associated edges (and phi node
5470 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5471 PENDING_STMT (e) = NULL_TREE;
5472 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
5473 PENDING_STMT (e) = NULL_TREE;
5475 /* Anything that is outside of the region, but was dominated by something
5476 inside needs to update dominance info. */
5477 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5478 VEC_free (basic_block, heap, doms);
5480 /* Update the SSA web. */
5481 update_ssa (TODO_update_ssa);
5483 if (free_region_copy)
5486 free_original_copy_tables ();
5491 DEF_VEC_P(basic_block);
5492 DEF_VEC_ALLOC_P(basic_block,heap);
5495 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5496 adding blocks when the dominator traversal reaches EXIT. This
5497 function silently assumes that ENTRY strictly dominates EXIT. */
5500 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5501 VEC(basic_block,heap) **bbs_p)
5505 for (son = first_dom_son (CDI_DOMINATORS, entry);
5507 son = next_dom_son (CDI_DOMINATORS, son))
5509 VEC_safe_push (basic_block, heap, *bbs_p, son);
5511 gather_blocks_in_sese_region (son, exit, bbs_p);
5515 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5516 The duplicates are recorded in VARS_MAP. */
5519 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5522 tree t = *tp, new_t;
5523 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5526 if (DECL_CONTEXT (t) == to_context)
5529 loc = pointer_map_contains (vars_map, t);
5533 loc = pointer_map_insert (vars_map, t);
5537 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
5538 f->unexpanded_var_list
5539 = tree_cons (NULL_TREE, new_t, f->unexpanded_var_list);
5543 gcc_assert (TREE_CODE (t) == CONST_DECL);
5544 new_t = copy_node (t);
5546 DECL_CONTEXT (new_t) = to_context;
5556 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5557 VARS_MAP maps old ssa names and var_decls to the new ones. */
5560 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5564 tree new_name, decl = SSA_NAME_VAR (name);
5566 gcc_assert (is_gimple_reg (name));
5568 loc = pointer_map_contains (vars_map, name);
5572 replace_by_duplicate_decl (&decl, vars_map, to_context);
5574 push_cfun (DECL_STRUCT_FUNCTION (to_context));
5575 if (gimple_in_ssa_p (cfun))
5576 add_referenced_var (decl);
5578 new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5579 if (SSA_NAME_IS_DEFAULT_DEF (name))
5580 set_default_def (decl, new_name);
5583 loc = pointer_map_insert (vars_map, name);
5597 struct pointer_map_t *vars_map;
5598 htab_t new_label_map;
5602 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
5603 contained in *TP and change the DECL_CONTEXT of every local
5604 variable referenced in *TP. */
5607 move_stmt_r (tree *tp, int *walk_subtrees, void *data)
5609 struct move_stmt_d *p = (struct move_stmt_d *) data;
5613 && (EXPR_P (t) || GIMPLE_STMT_P (t)))
5614 TREE_BLOCK (t) = p->block;
5616 if (OMP_DIRECTIVE_P (t)
5617 && TREE_CODE (t) != OMP_RETURN
5618 && TREE_CODE (t) != OMP_CONTINUE)
5620 /* Do not remap variables inside OMP directives. Variables
5621 referenced in clauses and directive header belong to the
5622 parent function and should not be moved into the child
5624 bool save_remap_decls_p = p->remap_decls_p;
5625 p->remap_decls_p = false;
5628 walk_tree (&OMP_BODY (t), move_stmt_r, p, NULL);
5630 p->remap_decls_p = save_remap_decls_p;
5632 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
5634 if (TREE_CODE (t) == SSA_NAME)
5635 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
5636 else if (TREE_CODE (t) == LABEL_DECL)
5638 if (p->new_label_map)
5640 struct tree_map in, *out;
5642 out = htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
5647 DECL_CONTEXT (t) = p->to_context;
5649 else if (p->remap_decls_p)
5651 /* Replace T with its duplicate. T should no longer appear in the
5652 parent function, so this looks wasteful; however, it may appear
5653 in referenced_vars, and more importantly, as virtual operands of
5654 statements, and in alias lists of other variables. It would be
5655 quite difficult to expunge it from all those places. ??? It might
5656 suffice to do this for addressable variables. */
5657 if ((TREE_CODE (t) == VAR_DECL
5658 && !is_global_var (t))
5659 || TREE_CODE (t) == CONST_DECL)
5660 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
5663 && gimple_in_ssa_p (cfun))
5665 push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
5666 add_referenced_var (*tp);
5672 else if (TYPE_P (t))
5678 /* Marks virtual operands of all statements in basic blocks BBS for
5682 mark_virtual_ops_in_bb (basic_block bb)
5685 block_stmt_iterator bsi;
5687 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5688 mark_virtual_ops_for_renaming (phi);
5690 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
5691 mark_virtual_ops_for_renaming (bsi_stmt (bsi));
5694 /* Marks virtual operands of all statements in basic blocks BBS for
5698 mark_virtual_ops_in_region (VEC (basic_block,heap) *bbs)
5703 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
5704 mark_virtual_ops_in_bb (bb);
5707 /* Move basic block BB from function CFUN to function DEST_FN. The
5708 block is moved out of the original linked list and placed after
5709 block AFTER in the new list. Also, the block is removed from the
5710 original array of blocks and placed in DEST_FN's array of blocks.
5711 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
5712 updated to reflect the moved edges.
5714 The local variables are remapped to new instances, VARS_MAP is used
5715 to record the mapping. */
5718 move_block_to_fn (struct function *dest_cfun, basic_block bb,
5719 basic_block after, bool update_edge_count_p,
5720 struct pointer_map_t *vars_map, htab_t new_label_map,
5723 struct control_flow_graph *cfg;
5726 block_stmt_iterator si;
5727 struct move_stmt_d d;
5728 unsigned old_len, new_len;
5731 /* Remove BB from dominance structures. */
5732 delete_from_dominance_info (CDI_DOMINATORS, bb);
5734 remove_bb_from_loops (bb);
5736 /* Link BB to the new linked list. */
5737 move_block_after (bb, after);
5739 /* Update the edge count in the corresponding flowgraphs. */
5740 if (update_edge_count_p)
5741 FOR_EACH_EDGE (e, ei, bb->succs)
5743 cfun->cfg->x_n_edges--;
5744 dest_cfun->cfg->x_n_edges++;
5747 /* Remove BB from the original basic block array. */
5748 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
5749 cfun->cfg->x_n_basic_blocks--;
5751 /* Grow DEST_CFUN's basic block array if needed. */
5752 cfg = dest_cfun->cfg;
5753 cfg->x_n_basic_blocks++;
5754 if (bb->index >= cfg->x_last_basic_block)
5755 cfg->x_last_basic_block = bb->index + 1;
5757 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
5758 if ((unsigned) cfg->x_last_basic_block >= old_len)
5760 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
5761 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
5765 VEC_replace (basic_block, cfg->x_basic_block_info,
5768 /* Remap the variables in phi nodes. */
5769 for (phi = phi_nodes (bb); phi; phi = next_phi)
5772 tree op = PHI_RESULT (phi);
5775 next_phi = PHI_CHAIN (phi);
5776 if (!is_gimple_reg (op))
5778 /* Remove the phi nodes for virtual operands (alias analysis will be
5779 run for the new function, anyway). */
5780 remove_phi_node (phi, NULL, true);
5784 SET_PHI_RESULT (phi, replace_ssa_name (op, vars_map, dest_cfun->decl));
5785 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
5787 op = USE_FROM_PTR (use);
5788 if (TREE_CODE (op) == SSA_NAME)
5789 SET_USE (use, replace_ssa_name (op, vars_map, dest_cfun->decl));
5793 /* The statements in BB need to be associated with a new TREE_BLOCK.
5794 Labels need to be associated with a new label-to-block map. */
5795 memset (&d, 0, sizeof (d));
5796 d.vars_map = vars_map;
5797 d.from_context = cfun->decl;
5798 d.to_context = dest_cfun->decl;
5799 d.new_label_map = new_label_map;
5801 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
5803 tree stmt = bsi_stmt (si);
5806 d.remap_decls_p = true;
5807 if (TREE_BLOCK (stmt))
5808 d.block = DECL_INITIAL (dest_cfun->decl);
5810 walk_tree (&stmt, move_stmt_r, &d, NULL);
5812 if (TREE_CODE (stmt) == LABEL_EXPR)
5814 tree label = LABEL_EXPR_LABEL (stmt);
5815 int uid = LABEL_DECL_UID (label);
5817 gcc_assert (uid > -1);
5819 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
5820 if (old_len <= (unsigned) uid)
5822 new_len = 3 * uid / 2;
5823 VEC_safe_grow_cleared (basic_block, gc,
5824 cfg->x_label_to_block_map, new_len);
5827 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
5828 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
5830 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
5832 if (uid >= dest_cfun->last_label_uid)
5833 dest_cfun->last_label_uid = uid + 1;
5835 else if (TREE_CODE (stmt) == RESX_EXPR && eh_offset != 0)
5836 TREE_OPERAND (stmt, 0) =
5837 build_int_cst (NULL_TREE,
5838 TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0))
5841 region = lookup_stmt_eh_region (stmt);
5844 add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
5845 remove_stmt_from_eh_region (stmt);
5846 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
5847 gimple_remove_stmt_histograms (cfun, stmt);
5850 /* We cannot leave any operands allocated from the operand caches of
5851 the current function. */
5852 free_stmt_operands (stmt);
5853 push_cfun (dest_cfun);
5859 /* Examine the statements in BB (which is in SRC_CFUN); find and return
5860 the outermost EH region. Use REGION as the incoming base EH region. */
5863 find_outermost_region_in_block (struct function *src_cfun,
5864 basic_block bb, int region)
5866 block_stmt_iterator si;
5868 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
5870 tree stmt = bsi_stmt (si);
5873 if (TREE_CODE (stmt) == RESX_EXPR)
5874 stmt_region = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
5876 stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
5877 if (stmt_region > 0)
5880 region = stmt_region;
5881 else if (stmt_region != region)
5883 region = eh_region_outermost (src_cfun, stmt_region, region);
5884 gcc_assert (region != -1);
5893 new_label_mapper (tree decl, void *data)
5895 htab_t hash = (htab_t) data;
5899 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
5901 m = xmalloc (sizeof (struct tree_map));
5902 m->hash = DECL_UID (decl);
5903 m->base.from = decl;
5904 m->to = create_artificial_label ();
5905 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
5906 if (LABEL_DECL_UID (m->to) >= cfun->last_label_uid)
5907 cfun->last_label_uid = LABEL_DECL_UID (m->to) + 1;
5909 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
5910 gcc_assert (*slot == NULL);
5917 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
5918 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
5919 single basic block in the original CFG and the new basic block is
5920 returned. DEST_CFUN must not have a CFG yet.
5922 Note that the region need not be a pure SESE region. Blocks inside
5923 the region may contain calls to abort/exit. The only restriction
5924 is that ENTRY_BB should be the only entry point and it must
5927 All local variables referenced in the region are assumed to be in
5928 the corresponding BLOCK_VARS and unexpanded variable lists
5929 associated with DEST_CFUN. */
5932 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
5933 basic_block exit_bb)
5935 VEC(basic_block,heap) *bbs, *dom_bbs;
5936 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
5937 basic_block after, bb, *entry_pred, *exit_succ, abb;
5938 struct function *saved_cfun = cfun;
5939 int *entry_flag, *exit_flag, eh_offset;
5940 unsigned *entry_prob, *exit_prob;
5941 unsigned i, num_entry_edges, num_exit_edges;
5944 htab_t new_label_map;
5945 struct pointer_map_t *vars_map;
5946 struct loop *loop = entry_bb->loop_father;
5948 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
5950 gcc_assert (entry_bb != exit_bb
5952 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
5954 /* Collect all the blocks in the region. Manually add ENTRY_BB
5955 because it won't be added by dfs_enumerate_from. */
5957 VEC_safe_push (basic_block, heap, bbs, entry_bb);
5958 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
5960 /* The blocks that used to be dominated by something in BBS will now be
5961 dominated by the new block. */
5962 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
5963 VEC_address (basic_block, bbs),
5964 VEC_length (basic_block, bbs));
5966 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
5967 the predecessor edges to ENTRY_BB and the successor edges to
5968 EXIT_BB so that we can re-attach them to the new basic block that
5969 will replace the region. */
5970 num_entry_edges = EDGE_COUNT (entry_bb->preds);
5971 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
5972 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
5973 entry_prob = XNEWVEC (unsigned, num_entry_edges);
5975 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
5977 entry_prob[i] = e->probability;
5978 entry_flag[i] = e->flags;
5979 entry_pred[i++] = e->src;
5985 num_exit_edges = EDGE_COUNT (exit_bb->succs);
5986 exit_succ = (basic_block *) xcalloc (num_exit_edges,
5987 sizeof (basic_block));
5988 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
5989 exit_prob = XNEWVEC (unsigned, num_exit_edges);
5991 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
5993 exit_prob[i] = e->probability;
5994 exit_flag[i] = e->flags;
5995 exit_succ[i++] = e->dest;
6007 /* Switch context to the child function to initialize DEST_FN's CFG. */
6008 gcc_assert (dest_cfun->cfg == NULL);
6009 push_cfun (dest_cfun);
6011 init_empty_tree_cfg ();
6013 /* Initialize EH information for the new function. */
6015 new_label_map = NULL;
6020 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6021 region = find_outermost_region_in_block (saved_cfun, bb, region);
6023 init_eh_for_function ();
6026 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6027 eh_offset = duplicate_eh_regions (saved_cfun, new_label_mapper,
6028 new_label_map, region, 0);
6034 /* The ssa form for virtual operands in the source function will have to
6035 be repaired. We do not care for the real operands -- the sese region
6036 must be closed with respect to those. */
6037 mark_virtual_ops_in_region (bbs);
6039 /* Move blocks from BBS into DEST_CFUN. */
6040 gcc_assert (VEC_length (basic_block, bbs) >= 2);
6041 after = dest_cfun->cfg->x_entry_block_ptr;
6042 vars_map = pointer_map_create ();
6043 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6045 /* No need to update edge counts on the last block. It has
6046 already been updated earlier when we detached the region from
6047 the original CFG. */
6048 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, vars_map,
6049 new_label_map, eh_offset);
6054 htab_delete (new_label_map);
6055 pointer_map_destroy (vars_map);
6057 /* Rewire the entry and exit blocks. The successor to the entry
6058 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6059 the child function. Similarly, the predecessor of DEST_FN's
6060 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6061 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6062 various CFG manipulation function get to the right CFG.
6064 FIXME, this is silly. The CFG ought to become a parameter to
6066 push_cfun (dest_cfun);
6067 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6069 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6072 /* Back in the original function, the SESE region has disappeared,
6073 create a new basic block in its place. */
6074 bb = create_empty_bb (entry_pred[0]);
6076 add_bb_to_loop (bb, loop);
6077 for (i = 0; i < num_entry_edges; i++)
6079 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6080 e->probability = entry_prob[i];
6083 for (i = 0; i < num_exit_edges; i++)
6085 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6086 e->probability = exit_prob[i];
6089 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6090 for (i = 0; VEC_iterate (basic_block, dom_bbs, i, abb); i++)
6091 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6092 VEC_free (basic_block, heap, dom_bbs);
6103 VEC_free (basic_block, heap, bbs);
6109 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
6112 dump_function_to_file (tree fn, FILE *file, int flags)
6114 tree arg, vars, var;
6115 struct function *dsf;
6116 bool ignore_topmost_bind = false, any_var = false;
6120 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
6122 arg = DECL_ARGUMENTS (fn);
6125 print_generic_expr (file, arg, dump_flags);
6126 if (TREE_CHAIN (arg))
6127 fprintf (file, ", ");
6128 arg = TREE_CHAIN (arg);
6130 fprintf (file, ")\n");
6132 dsf = DECL_STRUCT_FUNCTION (fn);
6133 if (dsf && (flags & TDF_DETAILS))
6134 dump_eh_tree (file, dsf);
6136 if (flags & TDF_RAW)
6138 dump_node (fn, TDF_SLIM | flags, file);
6142 /* Switch CFUN to point to FN. */
6143 push_cfun (DECL_STRUCT_FUNCTION (fn));
6145 /* When GIMPLE is lowered, the variables are no longer available in
6146 BIND_EXPRs, so display them separately. */
6147 if (cfun && cfun->decl == fn && cfun->unexpanded_var_list)
6149 ignore_topmost_bind = true;
6151 fprintf (file, "{\n");
6152 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
6154 var = TREE_VALUE (vars);
6156 print_generic_decl (file, var, flags);
6157 fprintf (file, "\n");
6163 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6165 /* Make a CFG based dump. */
6166 check_bb_profile (ENTRY_BLOCK_PTR, file);
6167 if (!ignore_topmost_bind)
6168 fprintf (file, "{\n");
6170 if (any_var && n_basic_blocks)
6171 fprintf (file, "\n");
6174 dump_generic_bb (file, bb, 2, flags);
6176 fprintf (file, "}\n");
6177 check_bb_profile (EXIT_BLOCK_PTR, file);
6183 /* Make a tree based dump. */
6184 chain = DECL_SAVED_TREE (fn);
6186 if (chain && TREE_CODE (chain) == BIND_EXPR)
6188 if (ignore_topmost_bind)
6190 chain = BIND_EXPR_BODY (chain);
6198 if (!ignore_topmost_bind)
6199 fprintf (file, "{\n");
6204 fprintf (file, "\n");
6206 print_generic_stmt_indented (file, chain, flags, indent);
6207 if (ignore_topmost_bind)
6208 fprintf (file, "}\n");
6211 fprintf (file, "\n\n");
6218 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6221 debug_function (tree fn, int flags)
6223 dump_function_to_file (fn, stderr, flags);
6227 /* Print on FILE the indexes for the predecessors of basic_block BB. */
6230 print_pred_bbs (FILE *file, basic_block bb)
6235 FOR_EACH_EDGE (e, ei, bb->preds)
6236 fprintf (file, "bb_%d ", e->src->index);
6240 /* Print on FILE the indexes for the successors of basic_block BB. */
6243 print_succ_bbs (FILE *file, basic_block bb)
6248 FOR_EACH_EDGE (e, ei, bb->succs)
6249 fprintf (file, "bb_%d ", e->dest->index);
6252 /* Print to FILE the basic block BB following the VERBOSITY level. */
6255 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6257 char *s_indent = (char *) alloca ((size_t) indent + 1);
6258 memset ((void *) s_indent, ' ', (size_t) indent);
6259 s_indent[indent] = '\0';
6261 /* Print basic_block's header. */
6264 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6265 print_pred_bbs (file, bb);
6266 fprintf (file, "}, succs = {");
6267 print_succ_bbs (file, bb);
6268 fprintf (file, "})\n");
6271 /* Print basic_block's body. */
6274 fprintf (file, "%s {\n", s_indent);
6275 tree_dump_bb (bb, file, indent + 4);
6276 fprintf (file, "%s }\n", s_indent);
6280 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6282 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
6283 VERBOSITY level this outputs the contents of the loop, or just its
6287 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6295 s_indent = (char *) alloca ((size_t) indent + 1);
6296 memset ((void *) s_indent, ' ', (size_t) indent);
6297 s_indent[indent] = '\0';
6299 /* Print loop's header. */
6300 fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
6301 loop->num, loop->header->index, loop->latch->index);
6302 fprintf (file, ", niter = ");
6303 print_generic_expr (file, loop->nb_iterations, 0);
6305 if (loop->any_upper_bound)
6307 fprintf (file, ", upper_bound = ");
6308 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6311 if (loop->any_estimate)
6313 fprintf (file, ", estimate = ");
6314 dump_double_int (file, loop->nb_iterations_estimate, true);
6316 fprintf (file, ")\n");
6318 /* Print loop's body. */
6321 fprintf (file, "%s{\n", s_indent);
6323 if (bb->loop_father == loop)
6324 print_loops_bb (file, bb, indent, verbosity);
6326 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6327 fprintf (file, "%s}\n", s_indent);
6331 /* Print the LOOP and its sibling loops on FILE, indented INDENT
6332 spaces. Following VERBOSITY level this outputs the contents of the
6333 loop, or just its structure. */
6336 print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6341 print_loop (file, loop, indent, verbosity);
6342 print_loop_and_siblings (file, loop->next, indent, verbosity);
6345 /* Follow a CFG edge from the entry point of the program, and on entry
6346 of a loop, pretty print the loop structure on FILE. */
6349 print_loops (FILE *file, int verbosity)
6353 bb = BASIC_BLOCK (NUM_FIXED_BLOCKS);
6354 if (bb && bb->loop_father)
6355 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6359 /* Debugging loops structure at tree level, at some VERBOSITY level. */
6362 debug_loops (int verbosity)
6364 print_loops (stderr, verbosity);
6367 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
6370 debug_loop (struct loop *loop, int verbosity)
6372 print_loop (stderr, loop, 0, verbosity);
6375 /* Print on stderr the code of loop number NUM, at some VERBOSITY
6379 debug_loop_num (unsigned num, int verbosity)
6381 debug_loop (get_loop (num), verbosity);
6384 /* Return true if BB ends with a call, possibly followed by some
6385 instructions that must stay with the call. Return false,
6389 tree_block_ends_with_call_p (basic_block bb)
6391 block_stmt_iterator bsi = bsi_last (bb);
6392 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
6396 /* Return true if BB ends with a conditional branch. Return false,
6400 tree_block_ends_with_condjump_p (const_basic_block bb)
6402 /* This CONST_CAST is okay because last_stmt doesn't modify its
6403 argument and the return value is not modified. */
6404 const_tree stmt = last_stmt (CONST_CAST_BB(bb));
6405 return (stmt && TREE_CODE (stmt) == COND_EXPR);
6409 /* Return true if we need to add fake edge to exit at statement T.
6410 Helper function for tree_flow_call_edges_add. */
6413 need_fake_edge_p (tree t)
6417 /* NORETURN and LONGJMP calls already have an edge to exit.
6418 CONST and PURE calls do not need one.
6419 We don't currently check for CONST and PURE here, although
6420 it would be a good idea, because those attributes are
6421 figured out from the RTL in mark_constant_function, and
6422 the counter incrementation code from -fprofile-arcs
6423 leads to different results from -fbranch-probabilities. */
6424 call = get_call_expr_in (t);
6426 && !(call_expr_flags (call) & ECF_NORETURN))
6429 if (TREE_CODE (t) == ASM_EXPR
6430 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
6437 /* Add fake edges to the function exit for any non constant and non
6438 noreturn calls, volatile inline assembly in the bitmap of blocks
6439 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
6440 the number of blocks that were split.
6442 The goal is to expose cases in which entering a basic block does
6443 not imply that all subsequent instructions must be executed. */
6446 tree_flow_call_edges_add (sbitmap blocks)
6449 int blocks_split = 0;
6450 int last_bb = last_basic_block;
6451 bool check_last_block = false;
6453 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6457 check_last_block = true;
6459 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
6461 /* In the last basic block, before epilogue generation, there will be
6462 a fallthru edge to EXIT. Special care is required if the last insn
6463 of the last basic block is a call because make_edge folds duplicate
6464 edges, which would result in the fallthru edge also being marked
6465 fake, which would result in the fallthru edge being removed by
6466 remove_fake_edges, which would result in an invalid CFG.
6468 Moreover, we can't elide the outgoing fake edge, since the block
6469 profiler needs to take this into account in order to solve the minimal
6470 spanning tree in the case that the call doesn't return.
6472 Handle this by adding a dummy instruction in a new last basic block. */
6473 if (check_last_block)
6475 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
6476 block_stmt_iterator bsi = bsi_last (bb);
6478 if (!bsi_end_p (bsi))
6481 if (t && need_fake_edge_p (t))
6485 e = find_edge (bb, EXIT_BLOCK_PTR);
6488 bsi_insert_on_edge (e, build_empty_stmt ());
6489 bsi_commit_edge_inserts ();
6494 /* Now add fake edges to the function exit for any non constant
6495 calls since there is no way that we can determine if they will
6497 for (i = 0; i < last_bb; i++)
6499 basic_block bb = BASIC_BLOCK (i);
6500 block_stmt_iterator bsi;
6501 tree stmt, last_stmt;
6506 if (blocks && !TEST_BIT (blocks, i))
6509 bsi = bsi_last (bb);
6510 if (!bsi_end_p (bsi))
6512 last_stmt = bsi_stmt (bsi);
6515 stmt = bsi_stmt (bsi);
6516 if (need_fake_edge_p (stmt))
6519 /* The handling above of the final block before the
6520 epilogue should be enough to verify that there is
6521 no edge to the exit block in CFG already.
6522 Calling make_edge in such case would cause us to
6523 mark that edge as fake and remove it later. */
6524 #ifdef ENABLE_CHECKING
6525 if (stmt == last_stmt)
6527 e = find_edge (bb, EXIT_BLOCK_PTR);
6528 gcc_assert (e == NULL);
6532 /* Note that the following may create a new basic block
6533 and renumber the existing basic blocks. */
6534 if (stmt != last_stmt)
6536 e = split_block (bb, stmt);
6540 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
6544 while (!bsi_end_p (bsi));
6549 verify_flow_info ();
6551 return blocks_split;
6554 /* Purge dead abnormal call edges from basic block BB. */
6557 tree_purge_dead_abnormal_call_edges (basic_block bb)
6559 bool changed = tree_purge_dead_eh_edges (bb);
6561 if (current_function_has_nonlocal_label)
6563 tree stmt = last_stmt (bb);
6567 if (!(stmt && tree_can_make_abnormal_goto (stmt)))
6568 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6570 if (e->flags & EDGE_ABNORMAL)
6579 /* See tree_purge_dead_eh_edges below. */
6581 free_dominance_info (CDI_DOMINATORS);
6587 /* Stores all basic blocks dominated by BB to DOM_BBS. */
6590 get_all_dominated_blocks (basic_block bb, VEC (basic_block, heap) **dom_bbs)
6594 VEC_safe_push (basic_block, heap, *dom_bbs, bb);
6595 for (son = first_dom_son (CDI_DOMINATORS, bb);
6597 son = next_dom_son (CDI_DOMINATORS, son))
6598 get_all_dominated_blocks (son, dom_bbs);
6601 /* Removes edge E and all the blocks dominated by it, and updates dominance
6602 information. The IL in E->src needs to be updated separately.
6603 If dominance info is not available, only the edge E is removed.*/
6606 remove_edge_and_dominated_blocks (edge e)
6608 VEC (basic_block, heap) *bbs_to_remove = NULL;
6609 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
6613 bool none_removed = false;
6615 basic_block bb, dbb;
6618 if (!dom_info_available_p (CDI_DOMINATORS))
6624 /* No updating is needed for edges to exit. */
6625 if (e->dest == EXIT_BLOCK_PTR)
6627 if (cfgcleanup_altered_bbs)
6628 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6633 /* First, we find the basic blocks to remove. If E->dest has a predecessor
6634 that is not dominated by E->dest, then this set is empty. Otherwise,
6635 all the basic blocks dominated by E->dest are removed.
6637 Also, to DF_IDOM we store the immediate dominators of the blocks in
6638 the dominance frontier of E (i.e., of the successors of the
6639 removed blocks, if there are any, and of E->dest otherwise). */
6640 FOR_EACH_EDGE (f, ei, e->dest->preds)
6645 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
6647 none_removed = true;
6652 df = BITMAP_ALLOC (NULL);
6653 df_idom = BITMAP_ALLOC (NULL);
6656 bitmap_set_bit (df_idom,
6657 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
6660 get_all_dominated_blocks (e->dest, &bbs_to_remove);
6661 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6663 FOR_EACH_EDGE (f, ei, bb->succs)
6665 if (f->dest != EXIT_BLOCK_PTR)
6666 bitmap_set_bit (df, f->dest->index);
6669 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6670 bitmap_clear_bit (df, bb->index);
6672 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
6674 bb = BASIC_BLOCK (i);
6675 bitmap_set_bit (df_idom,
6676 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
6680 if (cfgcleanup_altered_bbs)
6682 /* Record the set of the altered basic blocks. */
6683 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6684 bitmap_ior_into (cfgcleanup_altered_bbs, df);
6687 /* Remove E and the cancelled blocks. */
6692 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6693 delete_basic_block (bb);
6696 /* Update the dominance information. The immediate dominator may change only
6697 for blocks whose immediate dominator belongs to DF_IDOM:
6699 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
6700 removal. Let Z the arbitrary block such that idom(Z) = Y and
6701 Z dominates X after the removal. Before removal, there exists a path P
6702 from Y to X that avoids Z. Let F be the last edge on P that is
6703 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
6704 dominates W, and because of P, Z does not dominate W), and W belongs to
6705 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
6706 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
6708 bb = BASIC_BLOCK (i);
6709 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
6711 dbb = next_dom_son (CDI_DOMINATORS, dbb))
6712 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
6715 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
6718 BITMAP_FREE (df_idom);
6719 VEC_free (basic_block, heap, bbs_to_remove);
6720 VEC_free (basic_block, heap, bbs_to_fix_dom);
6723 /* Purge dead EH edges from basic block BB. */
6726 tree_purge_dead_eh_edges (basic_block bb)
6728 bool changed = false;
6731 tree stmt = last_stmt (bb);
6733 if (stmt && tree_can_throw_internal (stmt))
6736 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6738 if (e->flags & EDGE_EH)
6740 remove_edge_and_dominated_blocks (e);
6751 tree_purge_all_dead_eh_edges (const_bitmap blocks)
6753 bool changed = false;
6757 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
6759 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
6765 /* This function is called whenever a new edge is created or
6769 tree_execute_on_growing_pred (edge e)
6771 basic_block bb = e->dest;
6774 reserve_phi_args_for_new_edge (bb);
6777 /* This function is called immediately before edge E is removed from
6778 the edge vector E->dest->preds. */
6781 tree_execute_on_shrinking_pred (edge e)
6783 if (phi_nodes (e->dest))
6784 remove_phi_args (e);
6787 /*---------------------------------------------------------------------------
6788 Helper functions for Loop versioning
6789 ---------------------------------------------------------------------------*/
6791 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
6792 of 'first'. Both of them are dominated by 'new_head' basic block. When
6793 'new_head' was created by 'second's incoming edge it received phi arguments
6794 on the edge by split_edge(). Later, additional edge 'e' was created to
6795 connect 'new_head' and 'first'. Now this routine adds phi args on this
6796 additional edge 'e' that new_head to second edge received as part of edge
6801 tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
6802 basic_block new_head, edge e)
6805 edge e2 = find_edge (new_head, second);
6807 /* Because NEW_HEAD has been created by splitting SECOND's incoming
6808 edge, we should always have an edge from NEW_HEAD to SECOND. */
6809 gcc_assert (e2 != NULL);
6811 /* Browse all 'second' basic block phi nodes and add phi args to
6812 edge 'e' for 'first' head. PHI args are always in correct order. */
6814 for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
6816 phi2 = PHI_CHAIN (phi2), phi1 = PHI_CHAIN (phi1))
6818 tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
6819 add_phi_arg (phi1, def, e);
6823 /* Adds a if else statement to COND_BB with condition COND_EXPR.
6824 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
6825 the destination of the ELSE part. */
6827 tree_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
6828 basic_block second_head ATTRIBUTE_UNUSED,
6829 basic_block cond_bb, void *cond_e)
6831 block_stmt_iterator bsi;
6832 tree new_cond_expr = NULL_TREE;
6833 tree cond_expr = (tree) cond_e;
6836 /* Build new conditional expr */
6837 new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr,
6838 NULL_TREE, NULL_TREE);
6840 /* Add new cond in cond_bb. */
6841 bsi = bsi_start (cond_bb);
6842 bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
6843 /* Adjust edges appropriately to connect new head with first head
6844 as well as second head. */
6845 e0 = single_succ_edge (cond_bb);
6846 e0->flags &= ~EDGE_FALLTHRU;
6847 e0->flags |= EDGE_FALSE_VALUE;
6850 struct cfg_hooks tree_cfg_hooks = {
6852 tree_verify_flow_info,
6853 tree_dump_bb, /* dump_bb */
6854 create_bb, /* create_basic_block */
6855 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
6856 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
6857 tree_can_remove_branch_p, /* can_remove_branch_p */
6858 remove_bb, /* delete_basic_block */
6859 tree_split_block, /* split_block */
6860 tree_move_block_after, /* move_block_after */
6861 tree_can_merge_blocks_p, /* can_merge_blocks_p */
6862 tree_merge_blocks, /* merge_blocks */
6863 tree_predict_edge, /* predict_edge */
6864 tree_predicted_by_p, /* predicted_by_p */
6865 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
6866 tree_duplicate_bb, /* duplicate_block */
6867 tree_split_edge, /* split_edge */
6868 tree_make_forwarder_block, /* make_forward_block */
6869 NULL, /* tidy_fallthru_edge */
6870 tree_block_ends_with_call_p, /* block_ends_with_call_p */
6871 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
6872 tree_flow_call_edges_add, /* flow_call_edges_add */
6873 tree_execute_on_growing_pred, /* execute_on_growing_pred */
6874 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
6875 tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
6876 tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
6877 tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
6878 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
6879 flush_pending_stmts /* flush_pending_stmts */
6883 /* Split all critical edges. */
6886 split_critical_edges (void)
6892 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
6893 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
6894 mappings around the calls to split_edge. */
6895 start_recording_case_labels ();
6898 FOR_EACH_EDGE (e, ei, bb->succs)
6899 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
6904 end_recording_case_labels ();
6908 struct gimple_opt_pass pass_split_crit_edges =
6912 "crited", /* name */
6914 split_critical_edges, /* execute */
6917 0, /* static_pass_number */
6918 TV_TREE_SPLIT_EDGES, /* tv_id */
6919 PROP_cfg, /* properties required */
6920 PROP_no_crit_edges, /* properties_provided */
6921 0, /* properties_destroyed */
6922 0, /* todo_flags_start */
6923 TODO_dump_func /* todo_flags_finish */
6928 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
6929 a temporary, make sure and register it to be renamed if necessary,
6930 and finally return the temporary. Put the statements to compute
6931 EXP before the current statement in BSI. */
6934 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
6936 tree t, new_stmt, orig_stmt;
6938 if (is_gimple_val (exp))
6941 t = make_rename_temp (type, NULL);
6942 new_stmt = build_gimple_modify_stmt (t, exp);
6944 orig_stmt = bsi_stmt (*bsi);
6945 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
6946 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
6948 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
6949 if (gimple_in_ssa_p (cfun))
6950 mark_symbols_for_renaming (new_stmt);
6955 /* Build a ternary operation and gimplify it. Emit code before BSI.
6956 Return the gimple_val holding the result. */
6959 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
6960 tree type, tree a, tree b, tree c)
6964 ret = fold_build3 (code, type, a, b, c);
6967 return gimplify_val (bsi, type, ret);
6970 /* Build a binary operation and gimplify it. Emit code before BSI.
6971 Return the gimple_val holding the result. */
6974 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
6975 tree type, tree a, tree b)
6979 ret = fold_build2 (code, type, a, b);
6982 return gimplify_val (bsi, type, ret);
6985 /* Build a unary operation and gimplify it. Emit code before BSI.
6986 Return the gimple_val holding the result. */
6989 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
6994 ret = fold_build1 (code, type, a);
6997 return gimplify_val (bsi, type, ret);
7002 /* Emit return warnings. */
7005 execute_warn_function_return (void)
7007 source_location location;
7012 /* If we have a path to EXIT, then we do return. */
7013 if (TREE_THIS_VOLATILE (cfun->decl)
7014 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
7016 location = UNKNOWN_LOCATION;
7017 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7019 last = last_stmt (e->src);
7020 if (TREE_CODE (last) == RETURN_EXPR
7021 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
7024 if (location == UNKNOWN_LOCATION)
7025 location = cfun->function_end_locus;
7026 warning (0, "%H%<noreturn%> function does return", &location);
7029 /* If we see "return;" in some basic block, then we do reach the end
7030 without returning a value. */
7031 else if (warn_return_type
7032 && !TREE_NO_WARNING (cfun->decl)
7033 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
7034 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7036 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7038 tree last = last_stmt (e->src);
7039 if (TREE_CODE (last) == RETURN_EXPR
7040 && TREE_OPERAND (last, 0) == NULL
7041 && !TREE_NO_WARNING (last))
7043 location = EXPR_LOCATION (last);
7044 if (location == UNKNOWN_LOCATION)
7045 location = cfun->function_end_locus;
7046 warning (OPT_Wreturn_type, "%Hcontrol reaches end of non-void function", &location);
7047 TREE_NO_WARNING (cfun->decl) = 1;
7056 /* Given a basic block B which ends with a conditional and has
7057 precisely two successors, determine which of the edges is taken if
7058 the conditional is true and which is taken if the conditional is
7059 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7062 extract_true_false_edges_from_block (basic_block b,
7066 edge e = EDGE_SUCC (b, 0);
7068 if (e->flags & EDGE_TRUE_VALUE)
7071 *false_edge = EDGE_SUCC (b, 1);
7076 *true_edge = EDGE_SUCC (b, 1);
7080 struct gimple_opt_pass pass_warn_function_return =
7086 execute_warn_function_return, /* execute */
7089 0, /* static_pass_number */
7091 PROP_cfg, /* properties_required */
7092 0, /* properties_provided */
7093 0, /* properties_destroyed */
7094 0, /* todo_flags_start */
7095 0 /* todo_flags_finish */
7099 /* Emit noreturn warnings. */
7102 execute_warn_function_noreturn (void)
7104 if (warn_missing_noreturn
7105 && !TREE_THIS_VOLATILE (cfun->decl)
7106 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
7107 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
7108 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
7109 "for attribute %<noreturn%>",
7114 struct gimple_opt_pass pass_warn_function_noreturn =
7120 execute_warn_function_noreturn, /* execute */
7123 0, /* static_pass_number */
7125 PROP_cfg, /* properties_required */
7126 0, /* properties_provided */
7127 0, /* properties_destroyed */
7128 0, /* todo_flags_start */
7129 0 /* todo_flags_finish */