1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
36 #include "langhooks.h"
37 #include "diagnostic.h"
38 #include "tree-flow.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
45 #include "cfglayout.h"
46 #include "tree-ssa-propagate.h"
47 #include "value-prof.h"
48 #include "pointer-set.h"
49 #include "tree-inline.h"
51 /* This file contains functions for building the Control Flow Graph (CFG)
52 for a function tree. */
54 /* Local declarations. */
56 /* Initial capacity for the basic block array. */
57 static const int initial_cfg_capacity = 20;
59 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
60 which use a particular edge. The CASE_LABEL_EXPRs are chained together
61 via their TREE_CHAIN field, which we clear after we're done with the
62 hash table to prevent problems with duplication of SWITCH_EXPRs.
64 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
65 update the case vector in response to edge redirections.
67 Right now this table is set up and torn down at key points in the
68 compilation process. It would be nice if we could make the table
69 more persistent. The key is getting notification of changes to
70 the CFG (particularly edge removal, creation and redirection). */
72 static struct pointer_map_t *edge_to_cases;
77 long num_merged_labels;
80 static struct cfg_stats_d cfg_stats;
82 /* Nonzero if we found a computed goto while building basic blocks. */
83 static bool found_computed_goto;
85 /* Basic blocks and flowgraphs. */
86 static basic_block create_bb (void *, void *, basic_block);
87 static void make_blocks (tree);
88 static void factor_computed_gotos (void);
91 static void make_edges (void);
92 static void make_cond_expr_edges (basic_block);
93 static void make_switch_expr_edges (basic_block);
94 static void make_goto_expr_edges (basic_block);
95 static edge tree_redirect_edge_and_branch (edge, basic_block);
96 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
97 static unsigned int split_critical_edges (void);
99 /* Various helpers. */
100 static inline bool stmt_starts_bb_p (const_tree, const_tree);
101 static int tree_verify_flow_info (void);
102 static void tree_make_forwarder_block (edge);
103 static void tree_cfg2vcg (FILE *);
104 static inline void change_bb_for_stmt (tree t, basic_block bb);
105 static bool computed_goto_p (const_tree);
107 /* Flowgraph optimization and cleanup. */
108 static void tree_merge_blocks (basic_block, basic_block);
109 static bool tree_can_merge_blocks_p (basic_block, basic_block);
110 static void remove_bb (basic_block);
111 static edge find_taken_edge_computed_goto (basic_block, tree);
112 static edge find_taken_edge_cond_expr (basic_block, tree);
113 static edge find_taken_edge_switch_expr (basic_block, tree);
114 static tree find_case_label_for_value (tree, tree);
117 init_empty_tree_cfg_for_function (struct function *fn)
119 /* Initialize the basic block array. */
121 profile_status_for_function (fn) = PROFILE_ABSENT;
122 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
123 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
124 basic_block_info_for_function (fn)
125 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
126 VEC_safe_grow_cleared (basic_block, gc,
127 basic_block_info_for_function (fn),
128 initial_cfg_capacity);
130 /* Build a mapping of labels to their associated blocks. */
131 label_to_block_map_for_function (fn)
132 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
133 VEC_safe_grow_cleared (basic_block, gc,
134 label_to_block_map_for_function (fn),
135 initial_cfg_capacity);
137 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
138 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
139 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
140 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
142 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
143 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
144 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
145 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
149 init_empty_tree_cfg (void)
151 init_empty_tree_cfg_for_function (cfun);
154 /*---------------------------------------------------------------------------
156 ---------------------------------------------------------------------------*/
158 /* Entry point to the CFG builder for trees. TP points to the list of
159 statements to be added to the flowgraph. */
162 build_tree_cfg (tree *tp)
164 /* Register specific tree functions. */
165 tree_register_cfg_hooks ();
167 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
169 init_empty_tree_cfg ();
171 found_computed_goto = 0;
174 /* Computed gotos are hell to deal with, especially if there are
175 lots of them with a large number of destinations. So we factor
176 them to a common computed goto location before we build the
177 edge list. After we convert back to normal form, we will un-factor
178 the computed gotos since factoring introduces an unwanted jump. */
179 if (found_computed_goto)
180 factor_computed_gotos ();
182 /* Make sure there is always at least one block, even if it's empty. */
183 if (n_basic_blocks == NUM_FIXED_BLOCKS)
184 create_empty_bb (ENTRY_BLOCK_PTR);
186 /* Adjust the size of the array. */
187 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
188 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
190 /* To speed up statement iterator walks, we first purge dead labels. */
191 cleanup_dead_labels ();
193 /* Group case nodes to reduce the number of edges.
194 We do this after cleaning up dead labels because otherwise we miss
195 a lot of obvious case merging opportunities. */
196 group_case_labels ();
198 /* Create the edges of the flowgraph. */
200 cleanup_dead_labels ();
202 /* Debugging dumps. */
204 /* Write the flowgraph to a VCG file. */
206 int local_dump_flags;
207 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
210 tree_cfg2vcg (vcg_file);
211 dump_end (TDI_vcg, vcg_file);
215 #ifdef ENABLE_CHECKING
219 /* Dump a textual representation of the flowgraph. */
221 dump_tree_cfg (dump_file, dump_flags);
225 execute_build_cfg (void)
227 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
231 struct gimple_opt_pass pass_build_cfg =
237 execute_build_cfg, /* execute */
240 0, /* static_pass_number */
241 TV_TREE_CFG, /* tv_id */
242 PROP_gimple_leh, /* properties_required */
243 PROP_cfg, /* properties_provided */
244 0, /* properties_destroyed */
245 0, /* todo_flags_start */
246 TODO_verify_stmts | TODO_cleanup_cfg /* todo_flags_finish */
250 /* Search the CFG for any computed gotos. If found, factor them to a
251 common computed goto site. Also record the location of that site so
252 that we can un-factor the gotos after we have converted back to
256 factor_computed_gotos (void)
259 tree factored_label_decl = NULL;
261 tree factored_computed_goto_label = NULL;
262 tree factored_computed_goto = NULL;
264 /* We know there are one or more computed gotos in this function.
265 Examine the last statement in each basic block to see if the block
266 ends with a computed goto. */
270 block_stmt_iterator bsi = bsi_last (bb);
275 last = bsi_stmt (bsi);
277 /* Ignore the computed goto we create when we factor the original
279 if (last == factored_computed_goto)
282 /* If the last statement is a computed goto, factor it. */
283 if (computed_goto_p (last))
287 /* The first time we find a computed goto we need to create
288 the factored goto block and the variable each original
289 computed goto will use for their goto destination. */
290 if (! factored_computed_goto)
292 basic_block new_bb = create_empty_bb (bb);
293 block_stmt_iterator new_bsi = bsi_start (new_bb);
295 /* Create the destination of the factored goto. Each original
296 computed goto will put its desired destination into this
297 variable and jump to the label we create immediately
299 var = create_tmp_var (ptr_type_node, "gotovar");
301 /* Build a label for the new block which will contain the
302 factored computed goto. */
303 factored_label_decl = create_artificial_label ();
304 factored_computed_goto_label
305 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
306 bsi_insert_after (&new_bsi, factored_computed_goto_label,
309 /* Build our new computed goto. */
310 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
311 bsi_insert_after (&new_bsi, factored_computed_goto,
315 /* Copy the original computed goto's destination into VAR. */
316 assignment = build_gimple_modify_stmt (var,
317 GOTO_DESTINATION (last));
318 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
320 /* And re-vector the computed goto to the new destination. */
321 GOTO_DESTINATION (last) = factored_label_decl;
327 /* Build a flowgraph for the statement_list STMT_LIST. */
330 make_blocks (tree stmt_list)
332 tree_stmt_iterator i = tsi_start (stmt_list);
334 bool start_new_block = true;
335 bool first_stmt_of_list = true;
336 basic_block bb = ENTRY_BLOCK_PTR;
338 while (!tsi_end_p (i))
345 /* If the statement starts a new basic block or if we have determined
346 in a previous pass that we need to create a new block for STMT, do
348 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
350 if (!first_stmt_of_list)
351 stmt_list = tsi_split_statement_list_before (&i);
352 bb = create_basic_block (stmt_list, NULL, bb);
353 start_new_block = false;
356 /* Now add STMT to BB and create the subgraphs for special statement
358 set_bb_for_stmt (stmt, bb);
360 if (computed_goto_p (stmt))
361 found_computed_goto = true;
363 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
365 if (stmt_ends_bb_p (stmt))
366 start_new_block = true;
369 first_stmt_of_list = false;
374 /* Create and return a new empty basic block after bb AFTER. */
377 create_bb (void *h, void *e, basic_block after)
383 /* Create and initialize a new basic block. Since alloc_block uses
384 ggc_alloc_cleared to allocate a basic block, we do not have to
385 clear the newly allocated basic block here. */
388 bb->index = last_basic_block;
390 bb->il.tree = GGC_CNEW (struct tree_bb_info);
391 set_bb_stmt_list (bb, h ? (tree) h : alloc_stmt_list ());
393 /* Add the new block to the linked list of blocks. */
394 link_block (bb, after);
396 /* Grow the basic block array if needed. */
397 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
399 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
400 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
403 /* Add the newly created block to the array. */
404 SET_BASIC_BLOCK (last_basic_block, bb);
413 /*---------------------------------------------------------------------------
415 ---------------------------------------------------------------------------*/
417 /* Fold COND_EXPR_COND of each COND_EXPR. */
420 fold_cond_expr_cond (void)
426 tree stmt = last_stmt (bb);
429 && TREE_CODE (stmt) == COND_EXPR)
434 fold_defer_overflow_warnings ();
435 cond = fold (COND_EXPR_COND (stmt));
436 zerop = integer_zerop (cond);
437 onep = integer_onep (cond);
438 fold_undefer_overflow_warnings (zerop || onep,
440 WARN_STRICT_OVERFLOW_CONDITIONAL);
442 COND_EXPR_COND (stmt) = boolean_false_node;
444 COND_EXPR_COND (stmt) = boolean_true_node;
449 /* Join all the blocks in the flowgraph. */
455 struct omp_region *cur_region = NULL;
457 /* Create an edge from entry to the first block with executable
459 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
461 /* Traverse the basic block array placing edges. */
464 tree last = last_stmt (bb);
469 enum tree_code code = TREE_CODE (last);
473 make_goto_expr_edges (bb);
477 make_edge (bb, EXIT_BLOCK_PTR, 0);
481 make_cond_expr_edges (bb);
485 make_switch_expr_edges (bb);
489 make_eh_edges (last);
494 /* If this function receives a nonlocal goto, then we need to
495 make edges from this call site to all the nonlocal goto
497 if (tree_can_make_abnormal_goto (last))
498 make_abnormal_goto_edges (bb, true);
500 /* If this statement has reachable exception handlers, then
501 create abnormal edges to them. */
502 make_eh_edges (last);
504 /* Some calls are known not to return. */
505 fallthru = !(call_expr_flags (last) & ECF_NORETURN);
511 case GIMPLE_MODIFY_STMT:
512 if (is_ctrl_altering_stmt (last))
514 /* A GIMPLE_MODIFY_STMT may have a CALL_EXPR on its RHS and
515 the CALL_EXPR may have an abnormal edge. Search the RHS
516 for this case and create any required edges. */
517 if (tree_can_make_abnormal_goto (last))
518 make_abnormal_goto_edges (bb, true);
520 make_eh_edges (last);
532 cur_region = new_omp_region (bb, code, cur_region);
537 cur_region = new_omp_region (bb, code, cur_region);
541 case OMP_SECTIONS_SWITCH:
546 case OMP_ATOMIC_LOAD:
547 case OMP_ATOMIC_STORE:
553 /* In the case of an OMP_SECTION, the edge will go somewhere
554 other than the next block. This will be created later. */
555 cur_region->exit = bb;
556 fallthru = cur_region->type != OMP_SECTION;
557 cur_region = cur_region->outer;
561 cur_region->cont = bb;
562 switch (cur_region->type)
565 /* Mark all OMP_FOR and OMP_CONTINUE succs edges as abnormal
566 to prevent splitting them. */
567 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
568 /* Make the loopback edge. */
569 make_edge (bb, single_succ (cur_region->entry),
572 /* Create an edge from OMP_FOR to exit, which corresponds to
573 the case that the body of the loop is not executed at
575 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
576 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
581 /* Wire up the edges into and out of the nested sections. */
583 basic_block switch_bb = single_succ (cur_region->entry);
585 struct omp_region *i;
586 for (i = cur_region->inner; i ; i = i->next)
588 gcc_assert (i->type == OMP_SECTION);
589 make_edge (switch_bb, i->entry, 0);
590 make_edge (i->exit, bb, EDGE_FALLTHRU);
593 /* Make the loopback edge to the block with
594 OMP_SECTIONS_SWITCH. */
595 make_edge (bb, switch_bb, 0);
597 /* Make the edge from the switch to exit. */
598 make_edge (switch_bb, bb->next_bb, 0);
609 gcc_assert (!stmt_ends_bb_p (last));
617 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
623 /* Fold COND_EXPR_COND of each COND_EXPR. */
624 fold_cond_expr_cond ();
628 /* Create the edges for a COND_EXPR starting at block BB.
629 At this point, both clauses must contain only simple gotos. */
632 make_cond_expr_edges (basic_block bb)
634 tree entry = last_stmt (bb);
635 basic_block then_bb, else_bb;
636 tree then_label, else_label;
640 gcc_assert (TREE_CODE (entry) == COND_EXPR);
642 /* Entry basic blocks for each component. */
643 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
644 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
645 then_bb = label_to_block (then_label);
646 else_bb = label_to_block (else_label);
648 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
649 e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
650 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
652 e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
654 /* We do not need the gotos anymore. */
655 COND_EXPR_THEN (entry) = NULL_TREE;
656 COND_EXPR_ELSE (entry) = NULL_TREE;
660 /* Called for each element in the hash table (P) as we delete the
661 edge to cases hash table.
663 Clear all the TREE_CHAINs to prevent problems with copying of
664 SWITCH_EXPRs and structure sharing rules, then free the hash table
668 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
669 void *data ATTRIBUTE_UNUSED)
673 for (t = (tree) *value; t; t = next)
675 next = TREE_CHAIN (t);
676 TREE_CHAIN (t) = NULL;
683 /* Start recording information mapping edges to case labels. */
686 start_recording_case_labels (void)
688 gcc_assert (edge_to_cases == NULL);
689 edge_to_cases = pointer_map_create ();
692 /* Return nonzero if we are recording information for case labels. */
695 recording_case_labels_p (void)
697 return (edge_to_cases != NULL);
700 /* Stop recording information mapping edges to case labels and
701 remove any information we have recorded. */
703 end_recording_case_labels (void)
705 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
706 pointer_map_destroy (edge_to_cases);
707 edge_to_cases = NULL;
710 /* If we are inside a {start,end}_recording_cases block, then return
711 a chain of CASE_LABEL_EXPRs from T which reference E.
713 Otherwise return NULL. */
716 get_cases_for_edge (edge e, tree t)
722 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
723 chains available. Return NULL so the caller can detect this case. */
724 if (!recording_case_labels_p ())
727 slot = pointer_map_contains (edge_to_cases, e);
731 /* If we did not find E in the hash table, then this must be the first
732 time we have been queried for information about E & T. Add all the
733 elements from T to the hash table then perform the query again. */
735 vec = SWITCH_LABELS (t);
736 n = TREE_VEC_LENGTH (vec);
737 for (i = 0; i < n; i++)
739 tree elt = TREE_VEC_ELT (vec, i);
740 tree lab = CASE_LABEL (elt);
741 basic_block label_bb = label_to_block (lab);
742 edge this_edge = find_edge (e->src, label_bb);
744 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
746 slot = pointer_map_insert (edge_to_cases, this_edge);
747 TREE_CHAIN (elt) = (tree) *slot;
751 return (tree) *pointer_map_contains (edge_to_cases, e);
754 /* Create the edges for a SWITCH_EXPR starting at block BB.
755 At this point, the switch body has been lowered and the
756 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
759 make_switch_expr_edges (basic_block bb)
761 tree entry = last_stmt (bb);
765 vec = SWITCH_LABELS (entry);
766 n = TREE_VEC_LENGTH (vec);
768 for (i = 0; i < n; ++i)
770 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
771 basic_block label_bb = label_to_block (lab);
772 make_edge (bb, label_bb, 0);
777 /* Return the basic block holding label DEST. */
780 label_to_block_fn (struct function *ifun, tree dest)
782 int uid = LABEL_DECL_UID (dest);
784 /* We would die hard when faced by an undefined label. Emit a label to
785 the very first basic block. This will hopefully make even the dataflow
786 and undefined variable warnings quite right. */
787 if ((errorcount || sorrycount) && uid < 0)
789 block_stmt_iterator bsi =
790 bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
793 stmt = build1 (LABEL_EXPR, void_type_node, dest);
794 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
795 uid = LABEL_DECL_UID (dest);
797 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
798 <= (unsigned int) uid)
800 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
803 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
804 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
807 make_abnormal_goto_edges (basic_block bb, bool for_call)
809 basic_block target_bb;
810 block_stmt_iterator bsi;
812 FOR_EACH_BB (target_bb)
813 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
815 tree target = bsi_stmt (bsi);
817 if (TREE_CODE (target) != LABEL_EXPR)
820 target = LABEL_EXPR_LABEL (target);
822 /* Make an edge to every label block that has been marked as a
823 potential target for a computed goto or a non-local goto. */
824 if ((FORCED_LABEL (target) && !for_call)
825 || (DECL_NONLOCAL (target) && for_call))
827 make_edge (bb, target_bb, EDGE_ABNORMAL);
833 /* Create edges for a goto statement at block BB. */
836 make_goto_expr_edges (basic_block bb)
838 block_stmt_iterator last = bsi_last (bb);
839 tree goto_t = bsi_stmt (last);
841 /* A simple GOTO creates normal edges. */
842 if (simple_goto_p (goto_t))
844 tree dest = GOTO_DESTINATION (goto_t);
845 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
846 e->goto_locus = EXPR_LOCATION (goto_t);
847 bsi_remove (&last, true);
851 /* A computed GOTO creates abnormal edges. */
852 make_abnormal_goto_edges (bb, false);
856 /*---------------------------------------------------------------------------
858 ---------------------------------------------------------------------------*/
860 /* Cleanup useless labels in basic blocks. This is something we wish
861 to do early because it allows us to group case labels before creating
862 the edges for the CFG, and it speeds up block statement iterators in
864 We rerun this pass after CFG is created, to get rid of the labels that
865 are no longer referenced. After then we do not run it any more, since
866 (almost) no new labels should be created. */
868 /* A map from basic block index to the leading label of that block. */
869 static struct label_record
874 /* True if the label is referenced from somewhere. */
878 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
880 update_eh_label (struct eh_region *region)
882 tree old_label = get_eh_region_tree_label (region);
886 basic_block bb = label_to_block (old_label);
888 /* ??? After optimizing, there may be EH regions with labels
889 that have already been removed from the function body, so
890 there is no basic block for them. */
894 new_label = label_for_bb[bb->index].label;
895 label_for_bb[bb->index].used = true;
896 set_eh_region_tree_label (region, new_label);
900 /* Given LABEL return the first label in the same basic block. */
902 main_block_label (tree label)
904 basic_block bb = label_to_block (label);
905 tree main_label = label_for_bb[bb->index].label;
907 /* label_to_block possibly inserted undefined label into the chain. */
910 label_for_bb[bb->index].label = label;
914 label_for_bb[bb->index].used = true;
918 /* Cleanup redundant labels. This is a three-step process:
919 1) Find the leading label for each block.
920 2) Redirect all references to labels to the leading labels.
921 3) Cleanup all useless labels. */
924 cleanup_dead_labels (void)
927 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
929 /* Find a suitable label for each block. We use the first user-defined
930 label if there is one, or otherwise just the first label we see. */
933 block_stmt_iterator i;
935 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
937 tree label, stmt = bsi_stmt (i);
939 if (TREE_CODE (stmt) != LABEL_EXPR)
942 label = LABEL_EXPR_LABEL (stmt);
944 /* If we have not yet seen a label for the current block,
945 remember this one and see if there are more labels. */
946 if (!label_for_bb[bb->index].label)
948 label_for_bb[bb->index].label = label;
952 /* If we did see a label for the current block already, but it
953 is an artificially created label, replace it if the current
954 label is a user defined label. */
955 if (!DECL_ARTIFICIAL (label)
956 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
958 label_for_bb[bb->index].label = label;
964 /* Now redirect all jumps/branches to the selected label.
965 First do so for each block ending in a control statement. */
968 tree stmt = last_stmt (bb);
972 switch (TREE_CODE (stmt))
976 tree true_branch, false_branch;
978 true_branch = COND_EXPR_THEN (stmt);
979 false_branch = COND_EXPR_ELSE (stmt);
982 GOTO_DESTINATION (true_branch)
983 = main_block_label (GOTO_DESTINATION (true_branch));
985 GOTO_DESTINATION (false_branch)
986 = main_block_label (GOTO_DESTINATION (false_branch));
994 tree vec = SWITCH_LABELS (stmt);
995 size_t n = TREE_VEC_LENGTH (vec);
997 /* Replace all destination labels. */
998 for (i = 0; i < n; ++i)
1000 tree elt = TREE_VEC_ELT (vec, i);
1001 tree label = main_block_label (CASE_LABEL (elt));
1002 CASE_LABEL (elt) = label;
1007 /* We have to handle GOTO_EXPRs until they're removed, and we don't
1008 remove them until after we've created the CFG edges. */
1010 if (! computed_goto_p (stmt))
1012 GOTO_DESTINATION (stmt)
1013 = main_block_label (GOTO_DESTINATION (stmt));
1022 for_each_eh_region (update_eh_label);
1024 /* Finally, purge dead labels. All user-defined labels and labels that
1025 can be the target of non-local gotos and labels which have their
1026 address taken are preserved. */
1029 block_stmt_iterator i;
1030 tree label_for_this_bb = label_for_bb[bb->index].label;
1032 if (!label_for_this_bb)
1035 /* If the main label of the block is unused, we may still remove it. */
1036 if (!label_for_bb[bb->index].used)
1037 label_for_this_bb = NULL;
1039 for (i = bsi_start (bb); !bsi_end_p (i); )
1041 tree label, stmt = bsi_stmt (i);
1043 if (TREE_CODE (stmt) != LABEL_EXPR)
1046 label = LABEL_EXPR_LABEL (stmt);
1048 if (label == label_for_this_bb
1049 || ! DECL_ARTIFICIAL (label)
1050 || DECL_NONLOCAL (label)
1051 || FORCED_LABEL (label))
1054 bsi_remove (&i, true);
1058 free (label_for_bb);
1061 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1062 and scan the sorted vector of cases. Combine the ones jumping to the
1064 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1067 group_case_labels (void)
1073 tree stmt = last_stmt (bb);
1074 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1076 tree labels = SWITCH_LABELS (stmt);
1077 int old_size = TREE_VEC_LENGTH (labels);
1078 int i, j, new_size = old_size;
1079 tree default_case = NULL_TREE;
1080 tree default_label = NULL_TREE;
1082 /* The default label is always the last case in a switch
1083 statement after gimplification if it was not optimized
1085 if (!CASE_LOW (TREE_VEC_ELT (labels, old_size - 1))
1086 && !CASE_HIGH (TREE_VEC_ELT (labels, old_size - 1)))
1088 default_case = TREE_VEC_ELT (labels, old_size - 1);
1089 default_label = CASE_LABEL (default_case);
1093 /* Look for possible opportunities to merge cases. */
1095 while (i < old_size)
1097 tree base_case, base_label, base_high;
1098 base_case = TREE_VEC_ELT (labels, i);
1100 gcc_assert (base_case);
1101 base_label = CASE_LABEL (base_case);
1103 /* Discard cases that have the same destination as the
1105 if (base_label == default_label)
1107 TREE_VEC_ELT (labels, i) = NULL_TREE;
1113 base_high = CASE_HIGH (base_case) ?
1114 CASE_HIGH (base_case) : CASE_LOW (base_case);
1116 /* Try to merge case labels. Break out when we reach the end
1117 of the label vector or when we cannot merge the next case
1118 label with the current one. */
1119 while (i < old_size)
1121 tree merge_case = TREE_VEC_ELT (labels, i);
1122 tree merge_label = CASE_LABEL (merge_case);
1123 tree t = int_const_binop (PLUS_EXPR, base_high,
1124 integer_one_node, 1);
1126 /* Merge the cases if they jump to the same place,
1127 and their ranges are consecutive. */
1128 if (merge_label == base_label
1129 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1131 base_high = CASE_HIGH (merge_case) ?
1132 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1133 CASE_HIGH (base_case) = base_high;
1134 TREE_VEC_ELT (labels, i) = NULL_TREE;
1143 /* Compress the case labels in the label vector, and adjust the
1144 length of the vector. */
1145 for (i = 0, j = 0; i < new_size; i++)
1147 while (! TREE_VEC_ELT (labels, j))
1149 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1151 TREE_VEC_LENGTH (labels) = new_size;
1156 /* Checks whether we can merge block B into block A. */
1159 tree_can_merge_blocks_p (basic_block a, basic_block b)
1162 block_stmt_iterator bsi;
1165 if (!single_succ_p (a))
1168 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1171 if (single_succ (a) != b)
1174 if (!single_pred_p (b))
1177 if (b == EXIT_BLOCK_PTR)
1180 /* If A ends by a statement causing exceptions or something similar, we
1181 cannot merge the blocks. */
1182 /* This CONST_CAST is okay because last_stmt doesn't modify its
1183 argument and the return value is assign to a const_tree. */
1184 stmt = last_stmt (CONST_CAST_BB (a));
1185 if (stmt && stmt_ends_bb_p (stmt))
1188 /* Do not allow a block with only a non-local label to be merged. */
1189 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1190 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1193 /* It must be possible to eliminate all phi nodes in B. If ssa form
1194 is not up-to-date, we cannot eliminate any phis; however, if only
1195 some symbols as whole are marked for renaming, this is not a problem,
1196 as phi nodes for those symbols are irrelevant in updating anyway. */
1197 phi = phi_nodes (b);
1200 if (name_mappings_registered_p ())
1203 for (; phi; phi = PHI_CHAIN (phi))
1204 if (!is_gimple_reg (PHI_RESULT (phi))
1205 && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
1209 /* Do not remove user labels. */
1210 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1212 stmt = bsi_stmt (bsi);
1213 if (TREE_CODE (stmt) != LABEL_EXPR)
1215 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1219 /* Protect the loop latches. */
1221 && b->loop_father->latch == b)
1227 /* Replaces all uses of NAME by VAL. */
1230 replace_uses_by (tree name, tree val)
1232 imm_use_iterator imm_iter;
1237 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1239 if (TREE_CODE (stmt) != PHI_NODE)
1240 push_stmt_changes (&stmt);
1242 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1244 replace_exp (use, val);
1246 if (TREE_CODE (stmt) == PHI_NODE)
1248 e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
1249 if (e->flags & EDGE_ABNORMAL)
1251 /* This can only occur for virtual operands, since
1252 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1253 would prevent replacement. */
1254 gcc_assert (!is_gimple_reg (name));
1255 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1260 if (TREE_CODE (stmt) != PHI_NODE)
1264 fold_stmt_inplace (stmt);
1265 if (cfgcleanup_altered_bbs)
1266 bitmap_set_bit (cfgcleanup_altered_bbs, bb_for_stmt (stmt)->index);
1268 /* FIXME. This should go in pop_stmt_changes. */
1269 rhs = get_rhs (stmt);
1270 if (TREE_CODE (rhs) == ADDR_EXPR)
1271 recompute_tree_invariant_for_addr_expr (rhs);
1273 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1275 pop_stmt_changes (&stmt);
1279 gcc_assert (has_zero_uses (name));
1281 /* Also update the trees stored in loop structures. */
1287 FOR_EACH_LOOP (li, loop, 0)
1289 substitute_in_loop_info (loop, name, val);
1294 /* Merge block B into block A. */
1297 tree_merge_blocks (basic_block a, basic_block b)
1299 block_stmt_iterator bsi;
1300 tree_stmt_iterator last;
1304 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1306 /* Remove all single-valued PHI nodes from block B of the form
1307 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1309 for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
1311 tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
1313 bool may_replace_uses = may_propagate_copy (def, use);
1315 /* In case we maintain loop closed ssa form, do not propagate arguments
1316 of loop exit phi nodes. */
1318 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1319 && is_gimple_reg (def)
1320 && TREE_CODE (use) == SSA_NAME
1321 && a->loop_father != b->loop_father)
1322 may_replace_uses = false;
1324 if (!may_replace_uses)
1326 gcc_assert (is_gimple_reg (def));
1328 /* Note that just emitting the copies is fine -- there is no problem
1329 with ordering of phi nodes. This is because A is the single
1330 predecessor of B, therefore results of the phi nodes cannot
1331 appear as arguments of the phi nodes. */
1332 copy = build_gimple_modify_stmt (def, use);
1333 bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
1334 SSA_NAME_DEF_STMT (def) = copy;
1335 remove_phi_node (phi, NULL, false);
1339 /* If we deal with a PHI for virtual operands, we can simply
1340 propagate these without fussing with folding or updating
1342 if (!is_gimple_reg (def))
1344 imm_use_iterator iter;
1345 use_operand_p use_p;
1348 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1349 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1350 SET_USE (use_p, use);
1353 replace_uses_by (def, use);
1354 remove_phi_node (phi, NULL, true);
1358 /* Ensure that B follows A. */
1359 move_block_after (b, a);
1361 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1362 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1364 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1365 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1367 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1369 tree label = bsi_stmt (bsi);
1371 bsi_remove (&bsi, false);
1372 /* Now that we can thread computed gotos, we might have
1373 a situation where we have a forced label in block B
1374 However, the label at the start of block B might still be
1375 used in other ways (think about the runtime checking for
1376 Fortran assigned gotos). So we can not just delete the
1377 label. Instead we move the label to the start of block A. */
1378 if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1380 block_stmt_iterator dest_bsi = bsi_start (a);
1381 bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1386 change_bb_for_stmt (bsi_stmt (bsi), a);
1391 /* Merge the chains. */
1392 last = tsi_last (bb_stmt_list (a));
1393 tsi_link_after (&last, bb_stmt_list (b), TSI_NEW_STMT);
1394 set_bb_stmt_list (b, NULL_TREE);
1396 if (cfgcleanup_altered_bbs)
1397 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1401 /* Return the one of two successors of BB that is not reachable by a
1402 reached by a complex edge, if there is one. Else, return BB. We use
1403 this in optimizations that use post-dominators for their heuristics,
1404 to catch the cases in C++ where function calls are involved. */
1407 single_noncomplex_succ (basic_block bb)
1410 if (EDGE_COUNT (bb->succs) != 2)
1413 e0 = EDGE_SUCC (bb, 0);
1414 e1 = EDGE_SUCC (bb, 1);
1415 if (e0->flags & EDGE_COMPLEX)
1417 if (e1->flags & EDGE_COMPLEX)
1424 /* Walk the function tree removing unnecessary statements.
1426 * Empty statement nodes are removed
1428 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1430 * Unnecessary COND_EXPRs are removed
1432 * Some unnecessary BIND_EXPRs are removed
1434 Clearly more work could be done. The trick is doing the analysis
1435 and removal fast enough to be a net improvement in compile times.
1437 Note that when we remove a control structure such as a COND_EXPR
1438 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1439 to ensure we eliminate all the useless code. */
1450 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1453 remove_useless_stmts_warn_notreached (tree stmt)
1455 if (EXPR_HAS_LOCATION (stmt))
1457 location_t loc = EXPR_LOCATION (stmt);
1458 if (LOCATION_LINE (loc) > 0)
1460 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
1465 switch (TREE_CODE (stmt))
1467 case STATEMENT_LIST:
1469 tree_stmt_iterator i;
1470 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1471 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1477 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1479 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1481 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1485 case TRY_FINALLY_EXPR:
1486 case TRY_CATCH_EXPR:
1487 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1489 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1494 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1495 case EH_FILTER_EXPR:
1496 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1498 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1501 /* Not a live container. */
1509 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1511 tree then_clause, else_clause, cond;
1512 bool save_has_label, then_has_label, else_has_label;
1514 save_has_label = data->has_label;
1515 data->has_label = false;
1516 data->last_goto = NULL;
1518 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1520 then_has_label = data->has_label;
1521 data->has_label = false;
1522 data->last_goto = NULL;
1524 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1526 else_has_label = data->has_label;
1527 data->has_label = save_has_label | then_has_label | else_has_label;
1529 then_clause = COND_EXPR_THEN (*stmt_p);
1530 else_clause = COND_EXPR_ELSE (*stmt_p);
1531 cond = fold (COND_EXPR_COND (*stmt_p));
1533 /* If neither arm does anything at all, we can remove the whole IF. */
1534 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1536 *stmt_p = build_empty_stmt ();
1537 data->repeat = true;
1540 /* If there are no reachable statements in an arm, then we can
1541 zap the entire conditional. */
1542 else if (integer_nonzerop (cond) && !else_has_label)
1544 if (warn_notreached)
1545 remove_useless_stmts_warn_notreached (else_clause);
1546 *stmt_p = then_clause;
1547 data->repeat = true;
1549 else if (integer_zerop (cond) && !then_has_label)
1551 if (warn_notreached)
1552 remove_useless_stmts_warn_notreached (then_clause);
1553 *stmt_p = else_clause;
1554 data->repeat = true;
1557 /* Check a couple of simple things on then/else with single stmts. */
1560 tree then_stmt = expr_only (then_clause);
1561 tree else_stmt = expr_only (else_clause);
1563 /* Notice branches to a common destination. */
1564 if (then_stmt && else_stmt
1565 && TREE_CODE (then_stmt) == GOTO_EXPR
1566 && TREE_CODE (else_stmt) == GOTO_EXPR
1567 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1569 *stmt_p = then_stmt;
1570 data->repeat = true;
1573 /* If the THEN/ELSE clause merely assigns a value to a variable or
1574 parameter which is already known to contain that value, then
1575 remove the useless THEN/ELSE clause. */
1576 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1579 && TREE_CODE (else_stmt) == GIMPLE_MODIFY_STMT
1580 && GIMPLE_STMT_OPERAND (else_stmt, 0) == cond
1581 && integer_zerop (GIMPLE_STMT_OPERAND (else_stmt, 1)))
1582 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1584 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1585 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1586 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1587 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1589 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1590 ? then_stmt : else_stmt);
1591 tree *location = (TREE_CODE (cond) == EQ_EXPR
1592 ? &COND_EXPR_THEN (*stmt_p)
1593 : &COND_EXPR_ELSE (*stmt_p));
1596 && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
1597 && GIMPLE_STMT_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1598 && GIMPLE_STMT_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1599 *location = alloc_stmt_list ();
1603 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1604 would be re-introduced during lowering. */
1605 data->last_goto = NULL;
1610 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1612 bool save_may_branch, save_may_throw;
1613 bool this_may_branch, this_may_throw;
1615 /* Collect may_branch and may_throw information for the body only. */
1616 save_may_branch = data->may_branch;
1617 save_may_throw = data->may_throw;
1618 data->may_branch = false;
1619 data->may_throw = false;
1620 data->last_goto = NULL;
1622 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1624 this_may_branch = data->may_branch;
1625 this_may_throw = data->may_throw;
1626 data->may_branch |= save_may_branch;
1627 data->may_throw |= save_may_throw;
1628 data->last_goto = NULL;
1630 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1632 /* If the body is empty, then we can emit the FINALLY block without
1633 the enclosing TRY_FINALLY_EXPR. */
1634 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1636 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1637 data->repeat = true;
1640 /* If the handler is empty, then we can emit the TRY block without
1641 the enclosing TRY_FINALLY_EXPR. */
1642 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1644 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1645 data->repeat = true;
1648 /* If the body neither throws, nor branches, then we can safely
1649 string the TRY and FINALLY blocks together. */
1650 else if (!this_may_branch && !this_may_throw)
1652 tree stmt = *stmt_p;
1653 *stmt_p = TREE_OPERAND (stmt, 0);
1654 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1655 data->repeat = true;
1661 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1663 bool save_may_throw, this_may_throw;
1664 tree_stmt_iterator i;
1667 /* Collect may_throw information for the body only. */
1668 save_may_throw = data->may_throw;
1669 data->may_throw = false;
1670 data->last_goto = NULL;
1672 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1674 this_may_throw = data->may_throw;
1675 data->may_throw = save_may_throw;
1677 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1678 if (!this_may_throw)
1680 if (warn_notreached)
1681 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1682 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1683 data->repeat = true;
1687 /* Process the catch clause specially. We may be able to tell that
1688 no exceptions propagate past this point. */
1690 this_may_throw = true;
1691 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1692 stmt = tsi_stmt (i);
1693 data->last_goto = NULL;
1695 switch (TREE_CODE (stmt))
1698 for (; !tsi_end_p (i); tsi_next (&i))
1700 stmt = tsi_stmt (i);
1701 /* If we catch all exceptions, then the body does not
1702 propagate exceptions past this point. */
1703 if (CATCH_TYPES (stmt) == NULL)
1704 this_may_throw = false;
1705 data->last_goto = NULL;
1706 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1710 case EH_FILTER_EXPR:
1711 if (EH_FILTER_MUST_NOT_THROW (stmt))
1712 this_may_throw = false;
1713 else if (EH_FILTER_TYPES (stmt) == NULL)
1714 this_may_throw = false;
1715 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1719 /* Otherwise this is a cleanup. */
1720 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1722 /* If the cleanup is empty, then we can emit the TRY block without
1723 the enclosing TRY_CATCH_EXPR. */
1724 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1726 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1727 data->repeat = true;
1731 data->may_throw |= this_may_throw;
1736 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1740 /* First remove anything underneath the BIND_EXPR. */
1741 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1743 /* If the BIND_EXPR has no variables, then we can pull everything
1744 up one level and remove the BIND_EXPR, unless this is the toplevel
1745 BIND_EXPR for the current function or an inlined function.
1747 When this situation occurs we will want to apply this
1748 optimization again. */
1749 block = BIND_EXPR_BLOCK (*stmt_p);
1750 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1751 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1753 || ! BLOCK_ABSTRACT_ORIGIN (block)
1754 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1757 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1758 data->repeat = true;
1764 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1766 tree dest = GOTO_DESTINATION (*stmt_p);
1768 data->may_branch = true;
1769 data->last_goto = NULL;
1771 /* Record the last goto expr, so that we can delete it if unnecessary. */
1772 if (TREE_CODE (dest) == LABEL_DECL)
1773 data->last_goto = stmt_p;
1778 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1780 tree label = LABEL_EXPR_LABEL (*stmt_p);
1782 data->has_label = true;
1784 /* We do want to jump across non-local label receiver code. */
1785 if (DECL_NONLOCAL (label))
1786 data->last_goto = NULL;
1788 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1790 *data->last_goto = build_empty_stmt ();
1791 data->repeat = true;
1794 /* ??? Add something here to delete unused labels. */
1798 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1799 decl. This allows us to eliminate redundant or useless
1800 calls to "const" functions.
1802 Gimplifier already does the same operation, but we may notice functions
1803 being const and pure once their calls has been gimplified, so we need
1804 to update the flag. */
1807 update_call_expr_flags (tree call)
1809 tree decl = get_callee_fndecl (call);
1813 flags = call_expr_flags (call);
1814 if (flags & (ECF_CONST | ECF_PURE) && !(flags & ECF_LOOPING_CONST_OR_PURE))
1815 TREE_SIDE_EFFECTS (call) = 0;
1816 if (TREE_NOTHROW (decl))
1817 TREE_NOTHROW (call) = 1;
1821 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1824 notice_special_calls (tree t)
1826 int flags = call_expr_flags (t);
1828 if (flags & ECF_MAY_BE_ALLOCA)
1829 cfun->calls_alloca = true;
1830 if (flags & ECF_RETURNS_TWICE)
1831 cfun->calls_setjmp = true;
1835 /* Clear flags set by notice_special_calls. Used by dead code removal
1836 to update the flags. */
1839 clear_special_calls (void)
1841 cfun->calls_alloca = false;
1842 cfun->calls_setjmp = false;
1847 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1851 switch (TREE_CODE (t))
1854 remove_useless_stmts_cond (tp, data);
1857 case TRY_FINALLY_EXPR:
1858 remove_useless_stmts_tf (tp, data);
1861 case TRY_CATCH_EXPR:
1862 remove_useless_stmts_tc (tp, data);
1866 remove_useless_stmts_bind (tp, data);
1870 remove_useless_stmts_goto (tp, data);
1874 remove_useless_stmts_label (tp, data);
1879 data->last_goto = NULL;
1880 data->may_branch = true;
1885 data->last_goto = NULL;
1886 notice_special_calls (t);
1887 update_call_expr_flags (t);
1888 if (tree_could_throw_p (t))
1889 data->may_throw = true;
1895 case GIMPLE_MODIFY_STMT:
1896 data->last_goto = NULL;
1898 op = get_call_expr_in (t);
1901 update_call_expr_flags (op);
1902 notice_special_calls (op);
1904 if (tree_could_throw_p (t))
1905 data->may_throw = true;
1908 case STATEMENT_LIST:
1910 tree_stmt_iterator i = tsi_start (t);
1911 while (!tsi_end_p (i))
1914 if (IS_EMPTY_STMT (t))
1920 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1923 if (TREE_CODE (t) == STATEMENT_LIST)
1925 tsi_link_before (&i, t, TSI_SAME_STMT);
1935 data->last_goto = NULL;
1939 /* Make sure the outermost BIND_EXPR in OMP_BODY isn't removed
1941 remove_useless_stmts_1 (&BIND_EXPR_BODY (OMP_BODY (*tp)), data);
1942 data->last_goto = NULL;
1951 remove_useless_stmts_1 (&OMP_BODY (*tp), data);
1952 data->last_goto = NULL;
1956 remove_useless_stmts_1 (&OMP_FOR_BODY (*tp), data);
1957 data->last_goto = NULL;
1958 if (OMP_FOR_PRE_BODY (*tp))
1960 remove_useless_stmts_1 (&OMP_FOR_PRE_BODY (*tp), data);
1961 data->last_goto = NULL;
1966 data->last_goto = NULL;
1972 remove_useless_stmts (void)
1974 struct rus_data data;
1976 clear_special_calls ();
1980 memset (&data, 0, sizeof (data));
1981 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1983 while (data.repeat);
1988 struct gimple_opt_pass pass_remove_useless_stmts =
1992 "useless", /* name */
1994 remove_useless_stmts, /* execute */
1997 0, /* static_pass_number */
1999 PROP_gimple_any, /* properties_required */
2000 0, /* properties_provided */
2001 0, /* properties_destroyed */
2002 0, /* todo_flags_start */
2003 TODO_dump_func /* todo_flags_finish */
2007 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2010 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2014 /* Since this block is no longer reachable, we can just delete all
2015 of its PHI nodes. */
2016 phi = phi_nodes (bb);
2019 tree next = PHI_CHAIN (phi);
2020 remove_phi_node (phi, NULL_TREE, true);
2024 /* Remove edges to BB's successors. */
2025 while (EDGE_COUNT (bb->succs) > 0)
2026 remove_edge (EDGE_SUCC (bb, 0));
2030 /* Remove statements of basic block BB. */
2033 remove_bb (basic_block bb)
2035 block_stmt_iterator i;
2036 source_location loc = UNKNOWN_LOCATION;
2040 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2041 if (dump_flags & TDF_DETAILS)
2043 dump_bb (bb, dump_file, 0);
2044 fprintf (dump_file, "\n");
2050 struct loop *loop = bb->loop_father;
2052 /* If a loop gets removed, clean up the information associated
2054 if (loop->latch == bb
2055 || loop->header == bb)
2056 free_numbers_of_iterations_estimates_loop (loop);
2059 /* Remove all the instructions in the block. */
2060 if (bb_stmt_list (bb) != NULL_TREE)
2062 for (i = bsi_start (bb); !bsi_end_p (i);)
2064 tree stmt = bsi_stmt (i);
2065 if (TREE_CODE (stmt) == LABEL_EXPR
2066 && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
2067 || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
2070 block_stmt_iterator new_bsi;
2072 /* A non-reachable non-local label may still be referenced.
2073 But it no longer needs to carry the extra semantics of
2075 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
2077 DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
2078 FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
2081 new_bb = bb->prev_bb;
2082 new_bsi = bsi_start (new_bb);
2083 bsi_remove (&i, false);
2084 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2088 /* Release SSA definitions if we are in SSA. Note that we
2089 may be called when not in SSA. For example,
2090 final_cleanup calls this function via
2091 cleanup_tree_cfg. */
2092 if (gimple_in_ssa_p (cfun))
2093 release_defs (stmt);
2095 bsi_remove (&i, true);
2098 /* Don't warn for removed gotos. Gotos are often removed due to
2099 jump threading, thus resulting in bogus warnings. Not great,
2100 since this way we lose warnings for gotos in the original
2101 program that are indeed unreachable. */
2102 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2104 if (EXPR_HAS_LOCATION (stmt))
2105 loc = EXPR_LOCATION (stmt);
2110 /* If requested, give a warning that the first statement in the
2111 block is unreachable. We walk statements backwards in the
2112 loop above, so the last statement we process is the first statement
2114 if (loc > BUILTINS_LOCATION && LOCATION_LINE (loc) > 0)
2115 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2117 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2122 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2123 predicate VAL, return the edge that will be taken out of the block.
2124 If VAL does not match a unique edge, NULL is returned. */
2127 find_taken_edge (basic_block bb, tree val)
2131 stmt = last_stmt (bb);
2134 gcc_assert (is_ctrl_stmt (stmt));
2137 if (! is_gimple_min_invariant (val))
2140 if (TREE_CODE (stmt) == COND_EXPR)
2141 return find_taken_edge_cond_expr (bb, val);
2143 if (TREE_CODE (stmt) == SWITCH_EXPR)
2144 return find_taken_edge_switch_expr (bb, val);
2146 if (computed_goto_p (stmt))
2148 /* Only optimize if the argument is a label, if the argument is
2149 not a label then we can not construct a proper CFG.
2151 It may be the case that we only need to allow the LABEL_REF to
2152 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2153 appear inside a LABEL_EXPR just to be safe. */
2154 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2155 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2156 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2163 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2164 statement, determine which of the outgoing edges will be taken out of the
2165 block. Return NULL if either edge may be taken. */
2168 find_taken_edge_computed_goto (basic_block bb, tree val)
2173 dest = label_to_block (val);
2176 e = find_edge (bb, dest);
2177 gcc_assert (e != NULL);
2183 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2184 statement, determine which of the two edges will be taken out of the
2185 block. Return NULL if either edge may be taken. */
2188 find_taken_edge_cond_expr (basic_block bb, tree val)
2190 edge true_edge, false_edge;
2192 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2194 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2195 return (integer_zerop (val) ? false_edge : true_edge);
2198 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2199 statement, determine which edge will be taken out of the block. Return
2200 NULL if any edge may be taken. */
2203 find_taken_edge_switch_expr (basic_block bb, tree val)
2205 tree switch_expr, taken_case;
2206 basic_block dest_bb;
2209 switch_expr = last_stmt (bb);
2210 taken_case = find_case_label_for_value (switch_expr, val);
2211 dest_bb = label_to_block (CASE_LABEL (taken_case));
2213 e = find_edge (bb, dest_bb);
2219 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2220 We can make optimal use here of the fact that the case labels are
2221 sorted: We can do a binary search for a case matching VAL. */
2224 find_case_label_for_value (tree switch_expr, tree val)
2226 tree vec = SWITCH_LABELS (switch_expr);
2227 size_t low, high, n = TREE_VEC_LENGTH (vec);
2228 tree default_case = TREE_VEC_ELT (vec, n - 1);
2230 for (low = -1, high = n - 1; high - low > 1; )
2232 size_t i = (high + low) / 2;
2233 tree t = TREE_VEC_ELT (vec, i);
2236 /* Cache the result of comparing CASE_LOW and val. */
2237 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2244 if (CASE_HIGH (t) == NULL)
2246 /* A singe-valued case label. */
2252 /* A case range. We can only handle integer ranges. */
2253 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2258 return default_case;
2264 /*---------------------------------------------------------------------------
2266 ---------------------------------------------------------------------------*/
2268 /* Dump tree-specific information of block BB to file OUTF. */
2271 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2273 dump_generic_bb (outf, bb, indent, TDF_VOPS|TDF_MEMSYMS);
2277 /* Dump a basic block on stderr. */
2280 debug_tree_bb (basic_block bb)
2282 dump_bb (bb, stderr, 0);
2286 /* Dump basic block with index N on stderr. */
2289 debug_tree_bb_n (int n)
2291 debug_tree_bb (BASIC_BLOCK (n));
2292 return BASIC_BLOCK (n);
2296 /* Dump the CFG on stderr.
2298 FLAGS are the same used by the tree dumping functions
2299 (see TDF_* in tree-pass.h). */
2302 debug_tree_cfg (int flags)
2304 dump_tree_cfg (stderr, flags);
2308 /* Dump the program showing basic block boundaries on the given FILE.
2310 FLAGS are the same used by the tree dumping functions (see TDF_* in
2314 dump_tree_cfg (FILE *file, int flags)
2316 if (flags & TDF_DETAILS)
2318 const char *funcname
2319 = lang_hooks.decl_printable_name (current_function_decl, 2);
2322 fprintf (file, ";; Function %s\n\n", funcname);
2323 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2324 n_basic_blocks, n_edges, last_basic_block);
2326 brief_dump_cfg (file);
2327 fprintf (file, "\n");
2330 if (flags & TDF_STATS)
2331 dump_cfg_stats (file);
2333 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2337 /* Dump CFG statistics on FILE. */
2340 dump_cfg_stats (FILE *file)
2342 static long max_num_merged_labels = 0;
2343 unsigned long size, total = 0;
2346 const char * const fmt_str = "%-30s%-13s%12s\n";
2347 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2348 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2349 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2350 const char *funcname
2351 = lang_hooks.decl_printable_name (current_function_decl, 2);
2354 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2356 fprintf (file, "---------------------------------------------------------\n");
2357 fprintf (file, fmt_str, "", " Number of ", "Memory");
2358 fprintf (file, fmt_str, "", " instances ", "used ");
2359 fprintf (file, "---------------------------------------------------------\n");
2361 size = n_basic_blocks * sizeof (struct basic_block_def);
2363 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2364 SCALE (size), LABEL (size));
2368 num_edges += EDGE_COUNT (bb->succs);
2369 size = num_edges * sizeof (struct edge_def);
2371 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2373 fprintf (file, "---------------------------------------------------------\n");
2374 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2376 fprintf (file, "---------------------------------------------------------\n");
2377 fprintf (file, "\n");
2379 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2380 max_num_merged_labels = cfg_stats.num_merged_labels;
2382 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2383 cfg_stats.num_merged_labels, max_num_merged_labels);
2385 fprintf (file, "\n");
2389 /* Dump CFG statistics on stderr. Keep extern so that it's always
2390 linked in the final executable. */
2393 debug_cfg_stats (void)
2395 dump_cfg_stats (stderr);
2399 /* Dump the flowgraph to a .vcg FILE. */
2402 tree_cfg2vcg (FILE *file)
2407 const char *funcname
2408 = lang_hooks.decl_printable_name (current_function_decl, 2);
2410 /* Write the file header. */
2411 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2412 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2413 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2415 /* Write blocks and edges. */
2416 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2418 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2421 if (e->flags & EDGE_FAKE)
2422 fprintf (file, " linestyle: dotted priority: 10");
2424 fprintf (file, " linestyle: solid priority: 100");
2426 fprintf (file, " }\n");
2432 enum tree_code head_code, end_code;
2433 const char *head_name, *end_name;
2436 tree first = first_stmt (bb);
2437 tree last = last_stmt (bb);
2441 head_code = TREE_CODE (first);
2442 head_name = tree_code_name[head_code];
2443 head_line = get_lineno (first);
2446 head_name = "no-statement";
2450 end_code = TREE_CODE (last);
2451 end_name = tree_code_name[end_code];
2452 end_line = get_lineno (last);
2455 end_name = "no-statement";
2457 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2458 bb->index, bb->index, head_name, head_line, end_name,
2461 FOR_EACH_EDGE (e, ei, bb->succs)
2463 if (e->dest == EXIT_BLOCK_PTR)
2464 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2466 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2468 if (e->flags & EDGE_FAKE)
2469 fprintf (file, " priority: 10 linestyle: dotted");
2471 fprintf (file, " priority: 100 linestyle: solid");
2473 fprintf (file, " }\n");
2476 if (bb->next_bb != EXIT_BLOCK_PTR)
2480 fputs ("}\n\n", file);
2485 /*---------------------------------------------------------------------------
2486 Miscellaneous helpers
2487 ---------------------------------------------------------------------------*/
2489 /* Return true if T represents a stmt that always transfers control. */
2492 is_ctrl_stmt (const_tree t)
2494 return (TREE_CODE (t) == COND_EXPR
2495 || TREE_CODE (t) == SWITCH_EXPR
2496 || TREE_CODE (t) == GOTO_EXPR
2497 || TREE_CODE (t) == RETURN_EXPR
2498 || TREE_CODE (t) == RESX_EXPR);
2502 /* Return true if T is a statement that may alter the flow of control
2503 (e.g., a call to a non-returning function). */
2506 is_ctrl_altering_stmt (const_tree t)
2511 call = get_call_expr_in (CONST_CAST_TREE (t));
2514 /* A non-pure/const CALL_EXPR alters flow control if the current
2515 function has nonlocal labels. */
2516 if (TREE_SIDE_EFFECTS (call) && cfun->has_nonlocal_label)
2519 /* A CALL_EXPR also alters control flow if it does not return. */
2520 if (call_expr_flags (call) & ECF_NORETURN)
2524 /* OpenMP directives alter control flow. */
2525 if (OMP_DIRECTIVE_P (t))
2528 /* If a statement can throw, it alters control flow. */
2529 return tree_can_throw_internal (t);
2533 /* Return true if T is a computed goto. */
2536 computed_goto_p (const_tree t)
2538 return (TREE_CODE (t) == GOTO_EXPR
2539 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2543 /* Return true if T is a simple local goto. */
2546 simple_goto_p (const_tree t)
2548 return (TREE_CODE (t) == GOTO_EXPR
2549 && TREE_CODE (GOTO_DESTINATION (t)) == LABEL_DECL);
2553 /* Return true if T can make an abnormal transfer of control flow.
2554 Transfers of control flow associated with EH are excluded. */
2557 tree_can_make_abnormal_goto (const_tree t)
2559 if (computed_goto_p (t))
2561 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
2562 t = GIMPLE_STMT_OPERAND (t, 1);
2563 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2564 t = TREE_OPERAND (t, 0);
2565 if (TREE_CODE (t) == CALL_EXPR)
2566 return TREE_SIDE_EFFECTS (t) && cfun->has_nonlocal_label;
2571 /* Return true if T should start a new basic block. PREV_T is the
2572 statement preceding T. It is used when T is a label or a case label.
2573 Labels should only start a new basic block if their previous statement
2574 wasn't a label. Otherwise, sequence of labels would generate
2575 unnecessary basic blocks that only contain a single label. */
2578 stmt_starts_bb_p (const_tree t, const_tree prev_t)
2583 /* LABEL_EXPRs start a new basic block only if the preceding
2584 statement wasn't a label of the same type. This prevents the
2585 creation of consecutive blocks that have nothing but a single
2587 if (TREE_CODE (t) == LABEL_EXPR)
2589 /* Nonlocal and computed GOTO targets always start a new block. */
2590 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2591 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2594 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2596 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2599 cfg_stats.num_merged_labels++;
2610 /* Return true if T should end a basic block. */
2613 stmt_ends_bb_p (const_tree t)
2615 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2618 /* Remove block annotations and other datastructures. */
2621 delete_tree_cfg_annotations (void)
2624 block_stmt_iterator bsi;
2626 /* Remove annotations from every tree in the function. */
2628 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2630 tree stmt = bsi_stmt (bsi);
2631 ggc_free (stmt->base.ann);
2632 stmt->base.ann = NULL;
2634 label_to_block_map = NULL;
2638 /* Return the first statement in basic block BB. */
2641 first_stmt (basic_block bb)
2643 block_stmt_iterator i = bsi_start (bb);
2644 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2647 /* Return the last statement in basic block BB. */
2650 last_stmt (basic_block bb)
2652 block_stmt_iterator b = bsi_last (bb);
2653 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2656 /* Return the last statement of an otherwise empty block. Return NULL
2657 if the block is totally empty, or if it contains more than one
2661 last_and_only_stmt (basic_block bb)
2663 block_stmt_iterator i = bsi_last (bb);
2669 last = bsi_stmt (i);
2674 /* Empty statements should no longer appear in the instruction stream.
2675 Everything that might have appeared before should be deleted by
2676 remove_useless_stmts, and the optimizers should just bsi_remove
2677 instead of smashing with build_empty_stmt.
2679 Thus the only thing that should appear here in a block containing
2680 one executable statement is a label. */
2681 prev = bsi_stmt (i);
2682 if (TREE_CODE (prev) == LABEL_EXPR)
2689 /* Mark BB as the basic block holding statement T. */
2692 set_bb_for_stmt (tree t, basic_block bb)
2694 if (TREE_CODE (t) == PHI_NODE)
2696 else if (TREE_CODE (t) == STATEMENT_LIST)
2698 tree_stmt_iterator i;
2699 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2700 set_bb_for_stmt (tsi_stmt (i), bb);
2704 stmt_ann_t ann = get_stmt_ann (t);
2707 /* If the statement is a label, add the label to block-to-labels map
2708 so that we can speed up edge creation for GOTO_EXPRs. */
2709 if (TREE_CODE (t) == LABEL_EXPR)
2713 t = LABEL_EXPR_LABEL (t);
2714 uid = LABEL_DECL_UID (t);
2717 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2718 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
2719 if (old_len <= (unsigned) uid)
2721 unsigned new_len = 3 * uid / 2;
2723 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
2728 /* We're moving an existing label. Make sure that we've
2729 removed it from the old block. */
2731 || !VEC_index (basic_block, label_to_block_map, uid));
2732 VEC_replace (basic_block, label_to_block_map, uid, bb);
2737 /* Faster version of set_bb_for_stmt that assume that statement is being moved
2738 from one basic block to another.
2739 For BB splitting we can run into quadratic case, so performance is quite
2740 important and knowing that the tables are big enough, change_bb_for_stmt
2741 can inline as leaf function. */
2743 change_bb_for_stmt (tree t, basic_block bb)
2745 get_stmt_ann (t)->bb = bb;
2746 if (TREE_CODE (t) == LABEL_EXPR)
2747 VEC_replace (basic_block, label_to_block_map,
2748 LABEL_DECL_UID (LABEL_EXPR_LABEL (t)), bb);
2751 /* Finds iterator for STMT. */
2753 extern block_stmt_iterator
2754 bsi_for_stmt (tree stmt)
2756 block_stmt_iterator bsi;
2758 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2759 if (bsi_stmt (bsi) == stmt)
2765 /* Mark statement T as modified, and update it. */
2767 update_modified_stmts (tree t)
2769 if (!ssa_operands_active ())
2771 if (TREE_CODE (t) == STATEMENT_LIST)
2773 tree_stmt_iterator i;
2775 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2777 stmt = tsi_stmt (i);
2778 update_stmt_if_modified (stmt);
2782 update_stmt_if_modified (t);
2785 /* Insert statement (or statement list) T before the statement
2786 pointed-to by iterator I. M specifies how to update iterator I
2787 after insertion (see enum bsi_iterator_update). */
2790 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2792 set_bb_for_stmt (t, i->bb);
2793 update_modified_stmts (t);
2794 tsi_link_before (&i->tsi, t, m);
2798 /* Insert statement (or statement list) T after the statement
2799 pointed-to by iterator I. M specifies how to update iterator I
2800 after insertion (see enum bsi_iterator_update). */
2803 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2805 set_bb_for_stmt (t, i->bb);
2806 update_modified_stmts (t);
2807 tsi_link_after (&i->tsi, t, m);
2811 /* Remove the statement pointed to by iterator I. The iterator is updated
2812 to the next statement.
2814 When REMOVE_EH_INFO is true we remove the statement pointed to by
2815 iterator I from the EH tables. Otherwise we do not modify the EH
2818 Generally, REMOVE_EH_INFO should be true when the statement is going to
2819 be removed from the IL and not reinserted elsewhere. */
2822 bsi_remove (block_stmt_iterator *i, bool remove_eh_info)
2824 tree t = bsi_stmt (*i);
2825 set_bb_for_stmt (t, NULL);
2826 delink_stmt_imm_use (t);
2827 tsi_delink (&i->tsi);
2828 mark_stmt_modified (t);
2831 remove_stmt_from_eh_region (t);
2832 gimple_remove_stmt_histograms (cfun, t);
2837 /* Move the statement at FROM so it comes right after the statement at TO. */
2840 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2842 tree stmt = bsi_stmt (*from);
2843 bsi_remove (from, false);
2844 /* We must have BSI_NEW_STMT here, as bsi_move_after is sometimes used to
2845 move statements to an empty block. */
2846 bsi_insert_after (to, stmt, BSI_NEW_STMT);
2850 /* Move the statement at FROM so it comes right before the statement at TO. */
2853 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2855 tree stmt = bsi_stmt (*from);
2856 bsi_remove (from, false);
2857 /* For consistency with bsi_move_after, it might be better to have
2858 BSI_NEW_STMT here; however, that breaks several places that expect
2859 that TO does not change. */
2860 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2864 /* Move the statement at FROM to the end of basic block BB. */
2867 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2869 block_stmt_iterator last = bsi_last (bb);
2871 /* Have to check bsi_end_p because it could be an empty block. */
2872 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2873 bsi_move_before (from, &last);
2875 bsi_move_after (from, &last);
2879 /* Replace the contents of the statement pointed to by iterator BSI
2880 with STMT. If UPDATE_EH_INFO is true, the exception handling
2881 information of the original statement is moved to the new statement. */
2884 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool update_eh_info)
2887 tree orig_stmt = bsi_stmt (*bsi);
2889 if (stmt == orig_stmt)
2891 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2892 set_bb_for_stmt (stmt, bsi->bb);
2894 /* Preserve EH region information from the original statement, if
2895 requested by the caller. */
2898 eh_region = lookup_stmt_eh_region (orig_stmt);
2901 remove_stmt_from_eh_region (orig_stmt);
2902 add_stmt_to_eh_region (stmt, eh_region);
2906 gimple_duplicate_stmt_histograms (cfun, stmt, cfun, orig_stmt);
2907 gimple_remove_stmt_histograms (cfun, orig_stmt);
2908 delink_stmt_imm_use (orig_stmt);
2909 *bsi_stmt_ptr (*bsi) = stmt;
2910 mark_stmt_modified (stmt);
2911 update_modified_stmts (stmt);
2915 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2916 is made to place the statement in an existing basic block, but
2917 sometimes that isn't possible. When it isn't possible, the edge is
2918 split and the statement is added to the new block.
2920 In all cases, the returned *BSI points to the correct location. The
2921 return value is true if insertion should be done after the location,
2922 or false if it should be done before the location. If new basic block
2923 has to be created, it is stored in *NEW_BB. */
2926 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2927 basic_block *new_bb)
2929 basic_block dest, src;
2935 /* If the destination has one predecessor which has no PHI nodes,
2936 insert there. Except for the exit block.
2938 The requirement for no PHI nodes could be relaxed. Basically we
2939 would have to examine the PHIs to prove that none of them used
2940 the value set by the statement we want to insert on E. That
2941 hardly seems worth the effort. */
2942 if (single_pred_p (dest)
2943 && ! phi_nodes (dest)
2944 && dest != EXIT_BLOCK_PTR)
2946 *bsi = bsi_start (dest);
2947 if (bsi_end_p (*bsi))
2950 /* Make sure we insert after any leading labels. */
2951 tmp = bsi_stmt (*bsi);
2952 while (TREE_CODE (tmp) == LABEL_EXPR)
2955 if (bsi_end_p (*bsi))
2957 tmp = bsi_stmt (*bsi);
2960 if (bsi_end_p (*bsi))
2962 *bsi = bsi_last (dest);
2969 /* If the source has one successor, the edge is not abnormal and
2970 the last statement does not end a basic block, insert there.
2971 Except for the entry block. */
2973 if ((e->flags & EDGE_ABNORMAL) == 0
2974 && single_succ_p (src)
2975 && src != ENTRY_BLOCK_PTR)
2977 *bsi = bsi_last (src);
2978 if (bsi_end_p (*bsi))
2981 tmp = bsi_stmt (*bsi);
2982 if (!stmt_ends_bb_p (tmp))
2985 /* Insert code just before returning the value. We may need to decompose
2986 the return in the case it contains non-trivial operand. */
2987 if (TREE_CODE (tmp) == RETURN_EXPR)
2989 tree op = TREE_OPERAND (tmp, 0);
2990 if (op && !is_gimple_val (op))
2992 gcc_assert (TREE_CODE (op) == GIMPLE_MODIFY_STMT);
2993 bsi_insert_before (bsi, op, BSI_NEW_STMT);
2994 TREE_OPERAND (tmp, 0) = GIMPLE_STMT_OPERAND (op, 0);
3001 /* Otherwise, create a new basic block, and split this edge. */
3002 dest = split_edge (e);
3005 e = single_pred_edge (dest);
3010 /* This routine will commit all pending edge insertions, creating any new
3011 basic blocks which are necessary. */
3014 bsi_commit_edge_inserts (void)
3020 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
3023 FOR_EACH_EDGE (e, ei, bb->succs)
3024 bsi_commit_one_edge_insert (e, NULL);
3028 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3029 to this block, otherwise set it to NULL. */
3032 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
3036 if (PENDING_STMT (e))
3038 block_stmt_iterator bsi;
3039 tree stmt = PENDING_STMT (e);
3041 PENDING_STMT (e) = NULL_TREE;
3043 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
3044 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3046 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3051 /* Add STMT to the pending list of edge E. No actual insertion is
3052 made until a call to bsi_commit_edge_inserts () is made. */
3055 bsi_insert_on_edge (edge e, tree stmt)
3057 append_to_statement_list (stmt, &PENDING_STMT (e));
3060 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3061 block has to be created, it is returned. */
3064 bsi_insert_on_edge_immediate (edge e, tree stmt)
3066 block_stmt_iterator bsi;
3067 basic_block new_bb = NULL;
3069 gcc_assert (!PENDING_STMT (e));
3071 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3072 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3074 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3079 /*---------------------------------------------------------------------------
3080 Tree specific functions for CFG manipulation
3081 ---------------------------------------------------------------------------*/
3083 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3086 reinstall_phi_args (edge new_edge, edge old_edge)
3089 edge_var_map_vector v;
3093 v = redirect_edge_var_map_vector (old_edge);
3097 for (i = 0, phi = phi_nodes (new_edge->dest);
3098 VEC_iterate (edge_var_map, v, i, vm) && phi;
3099 i++, phi = PHI_CHAIN (phi))
3101 tree result = redirect_edge_var_map_result (vm);
3102 tree arg = redirect_edge_var_map_def (vm);
3104 gcc_assert (result == PHI_RESULT (phi));
3106 add_phi_arg (phi, arg, new_edge);
3109 redirect_edge_var_map_clear (old_edge);
3112 /* Returns the basic block after which the new basic block created
3113 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3114 near its "logical" location. This is of most help to humans looking
3115 at debugging dumps. */
3118 split_edge_bb_loc (edge edge_in)
3120 basic_block dest = edge_in->dest;
3122 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3123 return edge_in->src;
3125 return dest->prev_bb;
3128 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3129 Abort on abnormal edges. */
3132 tree_split_edge (edge edge_in)
3134 basic_block new_bb, after_bb, dest;
3137 /* Abnormal edges cannot be split. */
3138 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3140 dest = edge_in->dest;
3142 after_bb = split_edge_bb_loc (edge_in);
3144 new_bb = create_empty_bb (after_bb);
3145 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3146 new_bb->count = edge_in->count;
3147 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3148 new_edge->probability = REG_BR_PROB_BASE;
3149 new_edge->count = edge_in->count;
3151 e = redirect_edge_and_branch (edge_in, new_bb);
3152 gcc_assert (e == edge_in);
3153 reinstall_phi_args (new_edge, e);
3158 /* Callback for walk_tree, check that all elements with address taken are
3159 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3160 inside a PHI node. */
3163 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3170 /* Check operand N for being valid GIMPLE and give error MSG if not. */
3171 #define CHECK_OP(N, MSG) \
3172 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
3173 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3175 switch (TREE_CODE (t))
3178 if (SSA_NAME_IN_FREE_LIST (t))
3180 error ("SSA name in freelist but still referenced");
3186 x = fold (ASSERT_EXPR_COND (t));
3187 if (x == boolean_false_node)
3189 error ("ASSERT_EXPR with an always-false condition");
3197 case GIMPLE_MODIFY_STMT:
3198 x = GIMPLE_STMT_OPERAND (t, 0);
3199 if (TREE_CODE (x) == BIT_FIELD_REF
3200 && is_gimple_reg (TREE_OPERAND (x, 0)))
3202 error ("GIMPLE register modified with BIT_FIELD_REF");
3210 bool old_side_effects;
3212 bool new_side_effects;
3214 gcc_assert (is_gimple_address (t));
3216 old_constant = TREE_CONSTANT (t);
3217 old_side_effects = TREE_SIDE_EFFECTS (t);
3219 recompute_tree_invariant_for_addr_expr (t);
3220 new_side_effects = TREE_SIDE_EFFECTS (t);
3221 new_constant = TREE_CONSTANT (t);
3223 if (old_constant != new_constant)
3225 error ("constant not recomputed when ADDR_EXPR changed");
3228 if (old_side_effects != new_side_effects)
3230 error ("side effects not recomputed when ADDR_EXPR changed");
3234 /* Skip any references (they will be checked when we recurse down the
3235 tree) and ensure that any variable used as a prefix is marked
3237 for (x = TREE_OPERAND (t, 0);
3238 handled_component_p (x);
3239 x = TREE_OPERAND (x, 0))
3242 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3244 if (!TREE_ADDRESSABLE (x))
3246 error ("address taken, but ADDRESSABLE bit not set");
3254 x = COND_EXPR_COND (t);
3255 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
3257 error ("non-integral used in condition");
3260 if (!is_gimple_condexpr (x))
3262 error ("invalid conditional operand");
3267 case NON_LVALUE_EXPR:
3271 case FIX_TRUNC_EXPR:
3276 case TRUTH_NOT_EXPR:
3277 CHECK_OP (0, "invalid operand to unary operator");
3284 case ARRAY_RANGE_REF:
3286 case VIEW_CONVERT_EXPR:
3287 /* We have a nest of references. Verify that each of the operands
3288 that determine where to reference is either a constant or a variable,
3289 verify that the base is valid, and then show we've already checked
3291 while (handled_component_p (t))
3293 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3294 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3295 else if (TREE_CODE (t) == ARRAY_REF
3296 || TREE_CODE (t) == ARRAY_RANGE_REF)
3298 CHECK_OP (1, "invalid array index");
3299 if (TREE_OPERAND (t, 2))
3300 CHECK_OP (2, "invalid array lower bound");
3301 if (TREE_OPERAND (t, 3))
3302 CHECK_OP (3, "invalid array stride");
3304 else if (TREE_CODE (t) == BIT_FIELD_REF)
3306 if (!host_integerp (TREE_OPERAND (t, 1), 1)
3307 || !host_integerp (TREE_OPERAND (t, 2), 1))
3309 error ("invalid position or size operand to BIT_FIELD_REF");
3312 else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3313 && (TYPE_PRECISION (TREE_TYPE (t))
3314 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
3316 error ("integral result type precision does not match "
3317 "field size of BIT_FIELD_REF");
3320 if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3321 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
3322 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
3324 error ("mode precision of non-integral result does not "
3325 "match field size of BIT_FIELD_REF");
3330 t = TREE_OPERAND (t, 0);
3333 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3335 error ("invalid reference prefix");
3342 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3343 POINTER_PLUS_EXPR. */
3344 if (POINTER_TYPE_P (TREE_TYPE (t)))
3346 error ("invalid operand to plus/minus, type is a pointer");
3349 CHECK_OP (0, "invalid operand to binary operator");
3350 CHECK_OP (1, "invalid operand to binary operator");
3353 case POINTER_PLUS_EXPR:
3354 /* Check to make sure the first operand is a pointer or reference type. */
3355 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3357 error ("invalid operand to pointer plus, first operand is not a pointer");
3360 /* Check to make sure the second operand is an integer with type of
3362 if (!useless_type_conversion_p (sizetype,
3363 TREE_TYPE (TREE_OPERAND (t, 1))))
3365 error ("invalid operand to pointer plus, second operand is not an "
3366 "integer with type of sizetype.");
3376 case UNORDERED_EXPR:
3385 case TRUNC_DIV_EXPR:
3387 case FLOOR_DIV_EXPR:
3388 case ROUND_DIV_EXPR:
3389 case TRUNC_MOD_EXPR:
3391 case FLOOR_MOD_EXPR:
3392 case ROUND_MOD_EXPR:
3394 case EXACT_DIV_EXPR:
3404 CHECK_OP (0, "invalid operand to binary operator");
3405 CHECK_OP (1, "invalid operand to binary operator");
3409 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3421 /* Verifies if EXPR is a valid GIMPLE unary expression. Returns true
3422 if there is an error, otherwise false. */
3425 verify_gimple_unary_expr (const_tree expr)
3427 tree op = TREE_OPERAND (expr, 0);
3428 tree type = TREE_TYPE (expr);
3430 if (!is_gimple_val (op))
3432 error ("invalid operand in unary expression");
3436 /* For general unary expressions we have the operations type
3437 as the effective type the operation is carried out on. So all
3438 we need to require is that the operand is trivially convertible
3440 if (!useless_type_conversion_p (type, TREE_TYPE (op)))
3442 error ("type mismatch in unary expression");
3443 debug_generic_expr (type);
3444 debug_generic_expr (TREE_TYPE (op));
3451 /* Verifies if EXPR is a valid GIMPLE binary expression. Returns true
3452 if there is an error, otherwise false. */
3455 verify_gimple_binary_expr (const_tree expr)
3457 tree op0 = TREE_OPERAND (expr, 0);
3458 tree op1 = TREE_OPERAND (expr, 1);
3459 tree type = TREE_TYPE (expr);
3461 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3463 error ("invalid operands in binary expression");
3467 /* For general binary expressions we have the operations type
3468 as the effective type the operation is carried out on. So all
3469 we need to require is that both operands are trivially convertible
3471 if (!useless_type_conversion_p (type, TREE_TYPE (op0))
3472 || !useless_type_conversion_p (type, TREE_TYPE (op1)))
3474 error ("type mismatch in binary expression");
3475 debug_generic_stmt (type);
3476 debug_generic_stmt (TREE_TYPE (op0));
3477 debug_generic_stmt (TREE_TYPE (op1));
3484 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3485 Returns true if there is an error, otherwise false. */
3488 verify_gimple_min_lval (tree expr)
3492 if (is_gimple_id (expr))
3495 if (TREE_CODE (expr) != INDIRECT_REF
3496 && TREE_CODE (expr) != ALIGN_INDIRECT_REF
3497 && TREE_CODE (expr) != MISALIGNED_INDIRECT_REF)
3499 error ("invalid expression for min lvalue");
3503 op = TREE_OPERAND (expr, 0);
3504 if (!is_gimple_val (op))
3506 error ("invalid operand in indirect reference");
3507 debug_generic_stmt (op);
3510 if (!useless_type_conversion_p (TREE_TYPE (expr),
3511 TREE_TYPE (TREE_TYPE (op))))
3513 error ("type mismatch in indirect reference");
3514 debug_generic_stmt (TREE_TYPE (expr));
3515 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3522 /* Verify if EXPR is a valid GIMPLE reference expression. Returns true
3523 if there is an error, otherwise false. */
3526 verify_gimple_reference (tree expr)
3528 while (handled_component_p (expr))
3530 tree op = TREE_OPERAND (expr, 0);
3532 if (TREE_CODE (expr) == ARRAY_REF
3533 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3535 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3536 || (TREE_OPERAND (expr, 2)
3537 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3538 || (TREE_OPERAND (expr, 3)
3539 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3541 error ("invalid operands to array reference");
3542 debug_generic_stmt (expr);
3547 /* Verify if the reference array element types are compatible. */
3548 if (TREE_CODE (expr) == ARRAY_REF
3549 && !useless_type_conversion_p (TREE_TYPE (expr),
3550 TREE_TYPE (TREE_TYPE (op))))
3552 error ("type mismatch in array reference");
3553 debug_generic_stmt (TREE_TYPE (expr));
3554 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3557 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3558 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3559 TREE_TYPE (TREE_TYPE (op))))
3561 error ("type mismatch in array range reference");
3562 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3563 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3567 if ((TREE_CODE (expr) == REALPART_EXPR
3568 || TREE_CODE (expr) == IMAGPART_EXPR)
3569 && !useless_type_conversion_p (TREE_TYPE (expr),
3570 TREE_TYPE (TREE_TYPE (op))))
3572 error ("type mismatch in real/imagpart reference");
3573 debug_generic_stmt (TREE_TYPE (expr));
3574 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3578 if (TREE_CODE (expr) == COMPONENT_REF
3579 && !useless_type_conversion_p (TREE_TYPE (expr),
3580 TREE_TYPE (TREE_OPERAND (expr, 1))))
3582 error ("type mismatch in component reference");
3583 debug_generic_stmt (TREE_TYPE (expr));
3584 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3588 /* For VIEW_CONVERT_EXPRs which are allowed here, too, there
3589 is nothing to verify. Gross mismatches at most invoke
3590 undefined behavior. */
3595 return verify_gimple_min_lval (expr);
3598 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3599 list of pointer-to types that is trivially convertible to DEST. */
3602 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3606 if (!TYPE_POINTER_TO (src_obj))
3609 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3610 if (useless_type_conversion_p (dest, src))
3616 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3617 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3620 valid_fixed_convert_types_p (tree type1, tree type2)
3622 return (FIXED_POINT_TYPE_P (type1)
3623 && (INTEGRAL_TYPE_P (type2)
3624 || SCALAR_FLOAT_TYPE_P (type2)
3625 || FIXED_POINT_TYPE_P (type2)));
3628 /* Verify the GIMPLE expression EXPR. Returns true if there is an
3629 error, otherwise false. */
3632 verify_gimple_expr (tree expr)
3634 tree type = TREE_TYPE (expr);
3636 if (is_gimple_val (expr))
3639 /* Special codes we cannot handle via their class. */
3640 switch (TREE_CODE (expr))
3644 tree op = TREE_OPERAND (expr, 0);
3645 if (!is_gimple_val (op))
3647 error ("invalid operand in conversion");