1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
36 #include "langhooks.h"
37 #include "diagnostic.h"
38 #include "tree-flow.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
45 #include "cfglayout.h"
46 #include "tree-ssa-propagate.h"
47 #include "value-prof.h"
48 #include "pointer-set.h"
49 #include "tree-inline.h"
51 /* This file contains functions for building the Control Flow Graph (CFG)
52 for a function tree. */
54 /* Local declarations. */
56 /* Initial capacity for the basic block array. */
57 static const int initial_cfg_capacity = 20;
59 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
60 which use a particular edge. The CASE_LABEL_EXPRs are chained together
61 via their TREE_CHAIN field, which we clear after we're done with the
62 hash table to prevent problems with duplication of SWITCH_EXPRs.
64 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
65 update the case vector in response to edge redirections.
67 Right now this table is set up and torn down at key points in the
68 compilation process. It would be nice if we could make the table
69 more persistent. The key is getting notification of changes to
70 the CFG (particularly edge removal, creation and redirection). */
72 static struct pointer_map_t *edge_to_cases;
77 long num_merged_labels;
80 static struct cfg_stats_d cfg_stats;
82 /* Nonzero if we found a computed goto while building basic blocks. */
83 static bool found_computed_goto;
85 /* Basic blocks and flowgraphs. */
86 static basic_block create_bb (void *, void *, basic_block);
87 static void make_blocks (tree);
88 static void factor_computed_gotos (void);
91 static void make_edges (void);
92 static void make_cond_expr_edges (basic_block);
93 static void make_switch_expr_edges (basic_block);
94 static void make_goto_expr_edges (basic_block);
95 static edge tree_redirect_edge_and_branch (edge, basic_block);
96 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
97 static unsigned int split_critical_edges (void);
99 /* Various helpers. */
100 static inline bool stmt_starts_bb_p (const_tree, const_tree);
101 static int tree_verify_flow_info (void);
102 static void tree_make_forwarder_block (edge);
103 static void tree_cfg2vcg (FILE *);
104 static inline void change_bb_for_stmt (tree t, basic_block bb);
106 /* Flowgraph optimization and cleanup. */
107 static void tree_merge_blocks (basic_block, basic_block);
108 static bool tree_can_merge_blocks_p (basic_block, basic_block);
109 static void remove_bb (basic_block);
110 static edge find_taken_edge_computed_goto (basic_block, tree);
111 static edge find_taken_edge_cond_expr (basic_block, tree);
112 static edge find_taken_edge_switch_expr (basic_block, tree);
113 static tree find_case_label_for_value (tree, tree);
116 init_empty_tree_cfg (void)
118 /* Initialize the basic block array. */
120 profile_status = PROFILE_ABSENT;
121 n_basic_blocks = NUM_FIXED_BLOCKS;
122 last_basic_block = NUM_FIXED_BLOCKS;
123 basic_block_info = VEC_alloc (basic_block, gc, initial_cfg_capacity);
124 VEC_safe_grow_cleared (basic_block, gc, basic_block_info,
125 initial_cfg_capacity);
127 /* Build a mapping of labels to their associated blocks. */
128 label_to_block_map = VEC_alloc (basic_block, gc, initial_cfg_capacity);
129 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
130 initial_cfg_capacity);
132 SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR);
133 SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR);
134 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
135 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
138 /*---------------------------------------------------------------------------
140 ---------------------------------------------------------------------------*/
142 /* Entry point to the CFG builder for trees. TP points to the list of
143 statements to be added to the flowgraph. */
146 build_tree_cfg (tree *tp)
148 /* Register specific tree functions. */
149 tree_register_cfg_hooks ();
151 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
153 init_empty_tree_cfg ();
155 found_computed_goto = 0;
158 /* Computed gotos are hell to deal with, especially if there are
159 lots of them with a large number of destinations. So we factor
160 them to a common computed goto location before we build the
161 edge list. After we convert back to normal form, we will un-factor
162 the computed gotos since factoring introduces an unwanted jump. */
163 if (found_computed_goto)
164 factor_computed_gotos ();
166 /* Make sure there is always at least one block, even if it's empty. */
167 if (n_basic_blocks == NUM_FIXED_BLOCKS)
168 create_empty_bb (ENTRY_BLOCK_PTR);
170 /* Adjust the size of the array. */
171 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
172 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
174 /* To speed up statement iterator walks, we first purge dead labels. */
175 cleanup_dead_labels ();
177 /* Group case nodes to reduce the number of edges.
178 We do this after cleaning up dead labels because otherwise we miss
179 a lot of obvious case merging opportunities. */
180 group_case_labels ();
182 /* Create the edges of the flowgraph. */
184 cleanup_dead_labels ();
186 /* Debugging dumps. */
188 /* Write the flowgraph to a VCG file. */
190 int local_dump_flags;
191 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
194 tree_cfg2vcg (vcg_file);
195 dump_end (TDI_vcg, vcg_file);
199 #ifdef ENABLE_CHECKING
203 /* Dump a textual representation of the flowgraph. */
205 dump_tree_cfg (dump_file, dump_flags);
209 execute_build_cfg (void)
211 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
215 struct gimple_opt_pass pass_build_cfg =
221 execute_build_cfg, /* execute */
224 0, /* static_pass_number */
225 TV_TREE_CFG, /* tv_id */
226 PROP_gimple_leh, /* properties_required */
227 PROP_cfg, /* properties_provided */
228 0, /* properties_destroyed */
229 0, /* todo_flags_start */
230 TODO_verify_stmts | TODO_cleanup_cfg /* todo_flags_finish */
234 /* Search the CFG for any computed gotos. If found, factor them to a
235 common computed goto site. Also record the location of that site so
236 that we can un-factor the gotos after we have converted back to
240 factor_computed_gotos (void)
243 tree factored_label_decl = NULL;
245 tree factored_computed_goto_label = NULL;
246 tree factored_computed_goto = NULL;
248 /* We know there are one or more computed gotos in this function.
249 Examine the last statement in each basic block to see if the block
250 ends with a computed goto. */
254 block_stmt_iterator bsi = bsi_last (bb);
259 last = bsi_stmt (bsi);
261 /* Ignore the computed goto we create when we factor the original
263 if (last == factored_computed_goto)
266 /* If the last statement is a computed goto, factor it. */
267 if (computed_goto_p (last))
271 /* The first time we find a computed goto we need to create
272 the factored goto block and the variable each original
273 computed goto will use for their goto destination. */
274 if (! factored_computed_goto)
276 basic_block new_bb = create_empty_bb (bb);
277 block_stmt_iterator new_bsi = bsi_start (new_bb);
279 /* Create the destination of the factored goto. Each original
280 computed goto will put its desired destination into this
281 variable and jump to the label we create immediately
283 var = create_tmp_var (ptr_type_node, "gotovar");
285 /* Build a label for the new block which will contain the
286 factored computed goto. */
287 factored_label_decl = create_artificial_label ();
288 factored_computed_goto_label
289 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
290 bsi_insert_after (&new_bsi, factored_computed_goto_label,
293 /* Build our new computed goto. */
294 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
295 bsi_insert_after (&new_bsi, factored_computed_goto,
299 /* Copy the original computed goto's destination into VAR. */
300 assignment = build_gimple_modify_stmt (var,
301 GOTO_DESTINATION (last));
302 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
304 /* And re-vector the computed goto to the new destination. */
305 GOTO_DESTINATION (last) = factored_label_decl;
311 /* Build a flowgraph for the statement_list STMT_LIST. */
314 make_blocks (tree stmt_list)
316 tree_stmt_iterator i = tsi_start (stmt_list);
318 bool start_new_block = true;
319 bool first_stmt_of_list = true;
320 basic_block bb = ENTRY_BLOCK_PTR;
322 while (!tsi_end_p (i))
329 /* If the statement starts a new basic block or if we have determined
330 in a previous pass that we need to create a new block for STMT, do
332 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
334 if (!first_stmt_of_list)
335 stmt_list = tsi_split_statement_list_before (&i);
336 bb = create_basic_block (stmt_list, NULL, bb);
337 start_new_block = false;
340 /* Now add STMT to BB and create the subgraphs for special statement
342 set_bb_for_stmt (stmt, bb);
344 if (computed_goto_p (stmt))
345 found_computed_goto = true;
347 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
349 if (stmt_ends_bb_p (stmt))
350 start_new_block = true;
353 first_stmt_of_list = false;
358 /* Create and return a new empty basic block after bb AFTER. */
361 create_bb (void *h, void *e, basic_block after)
367 /* Create and initialize a new basic block. Since alloc_block uses
368 ggc_alloc_cleared to allocate a basic block, we do not have to
369 clear the newly allocated basic block here. */
372 bb->index = last_basic_block;
374 bb->il.tree = GGC_CNEW (struct tree_bb_info);
375 set_bb_stmt_list (bb, h ? (tree) h : alloc_stmt_list ());
377 /* Add the new block to the linked list of blocks. */
378 link_block (bb, after);
380 /* Grow the basic block array if needed. */
381 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
383 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
384 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
387 /* Add the newly created block to the array. */
388 SET_BASIC_BLOCK (last_basic_block, bb);
397 /*---------------------------------------------------------------------------
399 ---------------------------------------------------------------------------*/
401 /* Fold COND_EXPR_COND of each COND_EXPR. */
404 fold_cond_expr_cond (void)
410 tree stmt = last_stmt (bb);
413 && TREE_CODE (stmt) == COND_EXPR)
418 fold_defer_overflow_warnings ();
419 cond = fold (COND_EXPR_COND (stmt));
420 zerop = integer_zerop (cond);
421 onep = integer_onep (cond);
422 fold_undefer_overflow_warnings (zerop || onep,
424 WARN_STRICT_OVERFLOW_CONDITIONAL);
426 COND_EXPR_COND (stmt) = boolean_false_node;
428 COND_EXPR_COND (stmt) = boolean_true_node;
433 /* Join all the blocks in the flowgraph. */
439 struct omp_region *cur_region = NULL;
441 /* Create an edge from entry to the first block with executable
443 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
445 /* Traverse the basic block array placing edges. */
448 tree last = last_stmt (bb);
453 enum tree_code code = TREE_CODE (last);
457 make_goto_expr_edges (bb);
461 make_edge (bb, EXIT_BLOCK_PTR, 0);
465 make_cond_expr_edges (bb);
469 make_switch_expr_edges (bb);
473 make_eh_edges (last);
478 /* If this function receives a nonlocal goto, then we need to
479 make edges from this call site to all the nonlocal goto
481 if (tree_can_make_abnormal_goto (last))
482 make_abnormal_goto_edges (bb, true);
484 /* If this statement has reachable exception handlers, then
485 create abnormal edges to them. */
486 make_eh_edges (last);
488 /* Some calls are known not to return. */
489 fallthru = !(call_expr_flags (last) & ECF_NORETURN);
495 case GIMPLE_MODIFY_STMT:
496 if (is_ctrl_altering_stmt (last))
498 /* A GIMPLE_MODIFY_STMT may have a CALL_EXPR on its RHS and
499 the CALL_EXPR may have an abnormal edge. Search the RHS
500 for this case and create any required edges. */
501 if (tree_can_make_abnormal_goto (last))
502 make_abnormal_goto_edges (bb, true);
504 make_eh_edges (last);
516 cur_region = new_omp_region (bb, code, cur_region);
521 cur_region = new_omp_region (bb, code, cur_region);
525 case OMP_SECTIONS_SWITCH:
530 case OMP_ATOMIC_LOAD:
531 case OMP_ATOMIC_STORE:
537 /* In the case of an OMP_SECTION, the edge will go somewhere
538 other than the next block. This will be created later. */
539 cur_region->exit = bb;
540 fallthru = cur_region->type != OMP_SECTION;
541 cur_region = cur_region->outer;
545 cur_region->cont = bb;
546 switch (cur_region->type)
549 /* Mark all OMP_FOR and OMP_CONTINUE succs edges as abnormal
550 to prevent splitting them. */
551 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
552 /* Make the loopback edge. */
553 make_edge (bb, single_succ (cur_region->entry),
556 /* Create an edge from OMP_FOR to exit, which corresponds to
557 the case that the body of the loop is not executed at
559 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
560 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
565 /* Wire up the edges into and out of the nested sections. */
567 basic_block switch_bb = single_succ (cur_region->entry);
569 struct omp_region *i;
570 for (i = cur_region->inner; i ; i = i->next)
572 gcc_assert (i->type == OMP_SECTION);
573 make_edge (switch_bb, i->entry, 0);
574 make_edge (i->exit, bb, EDGE_FALLTHRU);
577 /* Make the loopback edge to the block with
578 OMP_SECTIONS_SWITCH. */
579 make_edge (bb, switch_bb, 0);
581 /* Make the edge from the switch to exit. */
582 make_edge (switch_bb, bb->next_bb, 0);
593 gcc_assert (!stmt_ends_bb_p (last));
601 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
607 /* Fold COND_EXPR_COND of each COND_EXPR. */
608 fold_cond_expr_cond ();
612 /* Create the edges for a COND_EXPR starting at block BB.
613 At this point, both clauses must contain only simple gotos. */
616 make_cond_expr_edges (basic_block bb)
618 tree entry = last_stmt (bb);
619 basic_block then_bb, else_bb;
620 tree then_label, else_label;
624 gcc_assert (TREE_CODE (entry) == COND_EXPR);
626 /* Entry basic blocks for each component. */
627 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
628 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
629 then_bb = label_to_block (then_label);
630 else_bb = label_to_block (else_label);
632 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
633 e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
634 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
636 e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
638 /* We do not need the gotos anymore. */
639 COND_EXPR_THEN (entry) = NULL_TREE;
640 COND_EXPR_ELSE (entry) = NULL_TREE;
644 /* Called for each element in the hash table (P) as we delete the
645 edge to cases hash table.
647 Clear all the TREE_CHAINs to prevent problems with copying of
648 SWITCH_EXPRs and structure sharing rules, then free the hash table
652 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
653 void *data ATTRIBUTE_UNUSED)
657 for (t = (tree) *value; t; t = next)
659 next = TREE_CHAIN (t);
660 TREE_CHAIN (t) = NULL;
667 /* Start recording information mapping edges to case labels. */
670 start_recording_case_labels (void)
672 gcc_assert (edge_to_cases == NULL);
673 edge_to_cases = pointer_map_create ();
676 /* Return nonzero if we are recording information for case labels. */
679 recording_case_labels_p (void)
681 return (edge_to_cases != NULL);
684 /* Stop recording information mapping edges to case labels and
685 remove any information we have recorded. */
687 end_recording_case_labels (void)
689 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
690 pointer_map_destroy (edge_to_cases);
691 edge_to_cases = NULL;
694 /* If we are inside a {start,end}_recording_cases block, then return
695 a chain of CASE_LABEL_EXPRs from T which reference E.
697 Otherwise return NULL. */
700 get_cases_for_edge (edge e, tree t)
706 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
707 chains available. Return NULL so the caller can detect this case. */
708 if (!recording_case_labels_p ())
711 slot = pointer_map_contains (edge_to_cases, e);
715 /* If we did not find E in the hash table, then this must be the first
716 time we have been queried for information about E & T. Add all the
717 elements from T to the hash table then perform the query again. */
719 vec = SWITCH_LABELS (t);
720 n = TREE_VEC_LENGTH (vec);
721 for (i = 0; i < n; i++)
723 tree elt = TREE_VEC_ELT (vec, i);
724 tree lab = CASE_LABEL (elt);
725 basic_block label_bb = label_to_block (lab);
726 edge this_edge = find_edge (e->src, label_bb);
728 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
730 slot = pointer_map_insert (edge_to_cases, this_edge);
731 TREE_CHAIN (elt) = (tree) *slot;
735 return (tree) *pointer_map_contains (edge_to_cases, e);
738 /* Create the edges for a SWITCH_EXPR starting at block BB.
739 At this point, the switch body has been lowered and the
740 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
743 make_switch_expr_edges (basic_block bb)
745 tree entry = last_stmt (bb);
749 vec = SWITCH_LABELS (entry);
750 n = TREE_VEC_LENGTH (vec);
752 for (i = 0; i < n; ++i)
754 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
755 basic_block label_bb = label_to_block (lab);
756 make_edge (bb, label_bb, 0);
761 /* Return the basic block holding label DEST. */
764 label_to_block_fn (struct function *ifun, tree dest)
766 int uid = LABEL_DECL_UID (dest);
768 /* We would die hard when faced by an undefined label. Emit a label to
769 the very first basic block. This will hopefully make even the dataflow
770 and undefined variable warnings quite right. */
771 if ((errorcount || sorrycount) && uid < 0)
773 block_stmt_iterator bsi =
774 bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
777 stmt = build1 (LABEL_EXPR, void_type_node, dest);
778 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
779 uid = LABEL_DECL_UID (dest);
781 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
782 <= (unsigned int) uid)
784 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
787 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
788 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
791 make_abnormal_goto_edges (basic_block bb, bool for_call)
793 basic_block target_bb;
794 block_stmt_iterator bsi;
796 FOR_EACH_BB (target_bb)
797 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
799 tree target = bsi_stmt (bsi);
801 if (TREE_CODE (target) != LABEL_EXPR)
804 target = LABEL_EXPR_LABEL (target);
806 /* Make an edge to every label block that has been marked as a
807 potential target for a computed goto or a non-local goto. */
808 if ((FORCED_LABEL (target) && !for_call)
809 || (DECL_NONLOCAL (target) && for_call))
811 make_edge (bb, target_bb, EDGE_ABNORMAL);
817 /* Create edges for a goto statement at block BB. */
820 make_goto_expr_edges (basic_block bb)
822 block_stmt_iterator last = bsi_last (bb);
823 tree goto_t = bsi_stmt (last);
825 /* A simple GOTO creates normal edges. */
826 if (simple_goto_p (goto_t))
828 tree dest = GOTO_DESTINATION (goto_t);
829 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
830 e->goto_locus = EXPR_LOCATION (goto_t);
831 bsi_remove (&last, true);
835 /* A computed GOTO creates abnormal edges. */
836 make_abnormal_goto_edges (bb, false);
840 /*---------------------------------------------------------------------------
842 ---------------------------------------------------------------------------*/
844 /* Cleanup useless labels in basic blocks. This is something we wish
845 to do early because it allows us to group case labels before creating
846 the edges for the CFG, and it speeds up block statement iterators in
848 We rerun this pass after CFG is created, to get rid of the labels that
849 are no longer referenced. After then we do not run it any more, since
850 (almost) no new labels should be created. */
852 /* A map from basic block index to the leading label of that block. */
853 static struct label_record
858 /* True if the label is referenced from somewhere. */
862 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
864 update_eh_label (struct eh_region *region)
866 tree old_label = get_eh_region_tree_label (region);
870 basic_block bb = label_to_block (old_label);
872 /* ??? After optimizing, there may be EH regions with labels
873 that have already been removed from the function body, so
874 there is no basic block for them. */
878 new_label = label_for_bb[bb->index].label;
879 label_for_bb[bb->index].used = true;
880 set_eh_region_tree_label (region, new_label);
884 /* Given LABEL return the first label in the same basic block. */
886 main_block_label (tree label)
888 basic_block bb = label_to_block (label);
889 tree main_label = label_for_bb[bb->index].label;
891 /* label_to_block possibly inserted undefined label into the chain. */
894 label_for_bb[bb->index].label = label;
898 label_for_bb[bb->index].used = true;
902 /* Cleanup redundant labels. This is a three-step process:
903 1) Find the leading label for each block.
904 2) Redirect all references to labels to the leading labels.
905 3) Cleanup all useless labels. */
908 cleanup_dead_labels (void)
911 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
913 /* Find a suitable label for each block. We use the first user-defined
914 label if there is one, or otherwise just the first label we see. */
917 block_stmt_iterator i;
919 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
921 tree label, stmt = bsi_stmt (i);
923 if (TREE_CODE (stmt) != LABEL_EXPR)
926 label = LABEL_EXPR_LABEL (stmt);
928 /* If we have not yet seen a label for the current block,
929 remember this one and see if there are more labels. */
930 if (!label_for_bb[bb->index].label)
932 label_for_bb[bb->index].label = label;
936 /* If we did see a label for the current block already, but it
937 is an artificially created label, replace it if the current
938 label is a user defined label. */
939 if (!DECL_ARTIFICIAL (label)
940 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
942 label_for_bb[bb->index].label = label;
948 /* Now redirect all jumps/branches to the selected label.
949 First do so for each block ending in a control statement. */
952 tree stmt = last_stmt (bb);
956 switch (TREE_CODE (stmt))
960 tree true_branch, false_branch;
962 true_branch = COND_EXPR_THEN (stmt);
963 false_branch = COND_EXPR_ELSE (stmt);
966 GOTO_DESTINATION (true_branch)
967 = main_block_label (GOTO_DESTINATION (true_branch));
969 GOTO_DESTINATION (false_branch)
970 = main_block_label (GOTO_DESTINATION (false_branch));
978 tree vec = SWITCH_LABELS (stmt);
979 size_t n = TREE_VEC_LENGTH (vec);
981 /* Replace all destination labels. */
982 for (i = 0; i < n; ++i)
984 tree elt = TREE_VEC_ELT (vec, i);
985 tree label = main_block_label (CASE_LABEL (elt));
986 CASE_LABEL (elt) = label;
991 /* We have to handle GOTO_EXPRs until they're removed, and we don't
992 remove them until after we've created the CFG edges. */
994 if (! computed_goto_p (stmt))
996 GOTO_DESTINATION (stmt)
997 = main_block_label (GOTO_DESTINATION (stmt));
1006 for_each_eh_region (update_eh_label);
1008 /* Finally, purge dead labels. All user-defined labels and labels that
1009 can be the target of non-local gotos and labels which have their
1010 address taken are preserved. */
1013 block_stmt_iterator i;
1014 tree label_for_this_bb = label_for_bb[bb->index].label;
1016 if (!label_for_this_bb)
1019 /* If the main label of the block is unused, we may still remove it. */
1020 if (!label_for_bb[bb->index].used)
1021 label_for_this_bb = NULL;
1023 for (i = bsi_start (bb); !bsi_end_p (i); )
1025 tree label, stmt = bsi_stmt (i);
1027 if (TREE_CODE (stmt) != LABEL_EXPR)
1030 label = LABEL_EXPR_LABEL (stmt);
1032 if (label == label_for_this_bb
1033 || ! DECL_ARTIFICIAL (label)
1034 || DECL_NONLOCAL (label)
1035 || FORCED_LABEL (label))
1038 bsi_remove (&i, true);
1042 free (label_for_bb);
1045 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1046 and scan the sorted vector of cases. Combine the ones jumping to the
1048 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1051 group_case_labels (void)
1057 tree stmt = last_stmt (bb);
1058 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1060 tree labels = SWITCH_LABELS (stmt);
1061 int old_size = TREE_VEC_LENGTH (labels);
1062 int i, j, new_size = old_size;
1063 tree default_case = NULL_TREE;
1064 tree default_label = NULL_TREE;
1066 /* The default label is always the last case in a switch
1067 statement after gimplification if it was not optimized
1069 if (!CASE_LOW (TREE_VEC_ELT (labels, old_size - 1))
1070 && !CASE_HIGH (TREE_VEC_ELT (labels, old_size - 1)))
1072 default_case = TREE_VEC_ELT (labels, old_size - 1);
1073 default_label = CASE_LABEL (default_case);
1077 /* Look for possible opportunities to merge cases. */
1079 while (i < old_size)
1081 tree base_case, base_label, base_high;
1082 base_case = TREE_VEC_ELT (labels, i);
1084 gcc_assert (base_case);
1085 base_label = CASE_LABEL (base_case);
1087 /* Discard cases that have the same destination as the
1089 if (base_label == default_label)
1091 TREE_VEC_ELT (labels, i) = NULL_TREE;
1097 base_high = CASE_HIGH (base_case) ?
1098 CASE_HIGH (base_case) : CASE_LOW (base_case);
1100 /* Try to merge case labels. Break out when we reach the end
1101 of the label vector or when we cannot merge the next case
1102 label with the current one. */
1103 while (i < old_size)
1105 tree merge_case = TREE_VEC_ELT (labels, i);
1106 tree merge_label = CASE_LABEL (merge_case);
1107 tree t = int_const_binop (PLUS_EXPR, base_high,
1108 integer_one_node, 1);
1110 /* Merge the cases if they jump to the same place,
1111 and their ranges are consecutive. */
1112 if (merge_label == base_label
1113 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1115 base_high = CASE_HIGH (merge_case) ?
1116 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1117 CASE_HIGH (base_case) = base_high;
1118 TREE_VEC_ELT (labels, i) = NULL_TREE;
1127 /* Compress the case labels in the label vector, and adjust the
1128 length of the vector. */
1129 for (i = 0, j = 0; i < new_size; i++)
1131 while (! TREE_VEC_ELT (labels, j))
1133 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1135 TREE_VEC_LENGTH (labels) = new_size;
1140 /* Checks whether we can merge block B into block A. */
1143 tree_can_merge_blocks_p (basic_block a, basic_block b)
1146 block_stmt_iterator bsi;
1149 if (!single_succ_p (a))
1152 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1155 if (single_succ (a) != b)
1158 if (!single_pred_p (b))
1161 if (b == EXIT_BLOCK_PTR)
1164 /* If A ends by a statement causing exceptions or something similar, we
1165 cannot merge the blocks. */
1166 /* This CONST_CAST is okay because last_stmt doesn't modify its
1167 argument and the return value is assign to a const_tree. */
1168 stmt = last_stmt (CONST_CAST_BB (a));
1169 if (stmt && stmt_ends_bb_p (stmt))
1172 /* Do not allow a block with only a non-local label to be merged. */
1173 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1174 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1177 /* It must be possible to eliminate all phi nodes in B. If ssa form
1178 is not up-to-date, we cannot eliminate any phis; however, if only
1179 some symbols as whole are marked for renaming, this is not a problem,
1180 as phi nodes for those symbols are irrelevant in updating anyway. */
1181 phi = phi_nodes (b);
1184 if (name_mappings_registered_p ())
1187 for (; phi; phi = PHI_CHAIN (phi))
1188 if (!is_gimple_reg (PHI_RESULT (phi))
1189 && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
1193 /* Do not remove user labels. */
1194 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1196 stmt = bsi_stmt (bsi);
1197 if (TREE_CODE (stmt) != LABEL_EXPR)
1199 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1203 /* Protect the loop latches. */
1205 && b->loop_father->latch == b)
1211 /* Replaces all uses of NAME by VAL. */
1214 replace_uses_by (tree name, tree val)
1216 imm_use_iterator imm_iter;
1221 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1223 if (TREE_CODE (stmt) != PHI_NODE)
1224 push_stmt_changes (&stmt);
1226 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1228 replace_exp (use, val);
1230 if (TREE_CODE (stmt) == PHI_NODE)
1232 e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
1233 if (e->flags & EDGE_ABNORMAL)
1235 /* This can only occur for virtual operands, since
1236 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1237 would prevent replacement. */
1238 gcc_assert (!is_gimple_reg (name));
1239 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1244 if (TREE_CODE (stmt) != PHI_NODE)
1248 fold_stmt_inplace (stmt);
1249 if (cfgcleanup_altered_bbs)
1250 bitmap_set_bit (cfgcleanup_altered_bbs, bb_for_stmt (stmt)->index);
1252 /* FIXME. This should go in pop_stmt_changes. */
1253 rhs = get_rhs (stmt);
1254 if (TREE_CODE (rhs) == ADDR_EXPR)
1255 recompute_tree_invariant_for_addr_expr (rhs);
1257 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1259 pop_stmt_changes (&stmt);
1263 gcc_assert (has_zero_uses (name));
1265 /* Also update the trees stored in loop structures. */
1271 FOR_EACH_LOOP (li, loop, 0)
1273 substitute_in_loop_info (loop, name, val);
1278 /* Merge block B into block A. */
1281 tree_merge_blocks (basic_block a, basic_block b)
1283 block_stmt_iterator bsi;
1284 tree_stmt_iterator last;
1288 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1290 /* Remove all single-valued PHI nodes from block B of the form
1291 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1293 for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
1295 tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
1297 bool may_replace_uses = may_propagate_copy (def, use);
1299 /* In case we maintain loop closed ssa form, do not propagate arguments
1300 of loop exit phi nodes. */
1302 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1303 && is_gimple_reg (def)
1304 && TREE_CODE (use) == SSA_NAME
1305 && a->loop_father != b->loop_father)
1306 may_replace_uses = false;
1308 if (!may_replace_uses)
1310 gcc_assert (is_gimple_reg (def));
1312 /* Note that just emitting the copies is fine -- there is no problem
1313 with ordering of phi nodes. This is because A is the single
1314 predecessor of B, therefore results of the phi nodes cannot
1315 appear as arguments of the phi nodes. */
1316 copy = build_gimple_modify_stmt (def, use);
1317 bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
1318 SSA_NAME_DEF_STMT (def) = copy;
1319 remove_phi_node (phi, NULL, false);
1323 /* If we deal with a PHI for virtual operands, we can simply
1324 propagate these without fussing with folding or updating
1326 if (!is_gimple_reg (def))
1328 imm_use_iterator iter;
1329 use_operand_p use_p;
1332 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1333 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1334 SET_USE (use_p, use);
1337 replace_uses_by (def, use);
1338 remove_phi_node (phi, NULL, true);
1342 /* Ensure that B follows A. */
1343 move_block_after (b, a);
1345 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1346 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1348 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1349 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1351 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1353 tree label = bsi_stmt (bsi);
1355 bsi_remove (&bsi, false);
1356 /* Now that we can thread computed gotos, we might have
1357 a situation where we have a forced label in block B
1358 However, the label at the start of block B might still be
1359 used in other ways (think about the runtime checking for
1360 Fortran assigned gotos). So we can not just delete the
1361 label. Instead we move the label to the start of block A. */
1362 if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1364 block_stmt_iterator dest_bsi = bsi_start (a);
1365 bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1370 change_bb_for_stmt (bsi_stmt (bsi), a);
1375 /* Merge the chains. */
1376 last = tsi_last (bb_stmt_list (a));
1377 tsi_link_after (&last, bb_stmt_list (b), TSI_NEW_STMT);
1378 set_bb_stmt_list (b, NULL_TREE);
1380 if (cfgcleanup_altered_bbs)
1381 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1385 /* Return the one of two successors of BB that is not reachable by a
1386 reached by a complex edge, if there is one. Else, return BB. We use
1387 this in optimizations that use post-dominators for their heuristics,
1388 to catch the cases in C++ where function calls are involved. */
1391 single_noncomplex_succ (basic_block bb)
1394 if (EDGE_COUNT (bb->succs) != 2)
1397 e0 = EDGE_SUCC (bb, 0);
1398 e1 = EDGE_SUCC (bb, 1);
1399 if (e0->flags & EDGE_COMPLEX)
1401 if (e1->flags & EDGE_COMPLEX)
1408 /* Walk the function tree removing unnecessary statements.
1410 * Empty statement nodes are removed
1412 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1414 * Unnecessary COND_EXPRs are removed
1416 * Some unnecessary BIND_EXPRs are removed
1418 Clearly more work could be done. The trick is doing the analysis
1419 and removal fast enough to be a net improvement in compile times.
1421 Note that when we remove a control structure such as a COND_EXPR
1422 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1423 to ensure we eliminate all the useless code. */
1434 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1437 remove_useless_stmts_warn_notreached (tree stmt)
1439 if (EXPR_HAS_LOCATION (stmt))
1441 location_t loc = EXPR_LOCATION (stmt);
1442 if (LOCATION_LINE (loc) > 0)
1444 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
1449 switch (TREE_CODE (stmt))
1451 case STATEMENT_LIST:
1453 tree_stmt_iterator i;
1454 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1455 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1461 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1463 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1465 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1469 case TRY_FINALLY_EXPR:
1470 case TRY_CATCH_EXPR:
1471 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1473 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1478 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1479 case EH_FILTER_EXPR:
1480 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1482 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1485 /* Not a live container. */
1493 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1495 tree then_clause, else_clause, cond;
1496 bool save_has_label, then_has_label, else_has_label;
1498 save_has_label = data->has_label;
1499 data->has_label = false;
1500 data->last_goto = NULL;
1502 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1504 then_has_label = data->has_label;
1505 data->has_label = false;
1506 data->last_goto = NULL;
1508 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1510 else_has_label = data->has_label;
1511 data->has_label = save_has_label | then_has_label | else_has_label;
1513 then_clause = COND_EXPR_THEN (*stmt_p);
1514 else_clause = COND_EXPR_ELSE (*stmt_p);
1515 cond = fold (COND_EXPR_COND (*stmt_p));
1517 /* If neither arm does anything at all, we can remove the whole IF. */
1518 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1520 *stmt_p = build_empty_stmt ();
1521 data->repeat = true;
1524 /* If there are no reachable statements in an arm, then we can
1525 zap the entire conditional. */
1526 else if (integer_nonzerop (cond) && !else_has_label)
1528 if (warn_notreached)
1529 remove_useless_stmts_warn_notreached (else_clause);
1530 *stmt_p = then_clause;
1531 data->repeat = true;
1533 else if (integer_zerop (cond) && !then_has_label)
1535 if (warn_notreached)
1536 remove_useless_stmts_warn_notreached (then_clause);
1537 *stmt_p = else_clause;
1538 data->repeat = true;
1541 /* Check a couple of simple things on then/else with single stmts. */
1544 tree then_stmt = expr_only (then_clause);
1545 tree else_stmt = expr_only (else_clause);
1547 /* Notice branches to a common destination. */
1548 if (then_stmt && else_stmt
1549 && TREE_CODE (then_stmt) == GOTO_EXPR
1550 && TREE_CODE (else_stmt) == GOTO_EXPR
1551 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1553 *stmt_p = then_stmt;
1554 data->repeat = true;
1557 /* If the THEN/ELSE clause merely assigns a value to a variable or
1558 parameter which is already known to contain that value, then
1559 remove the useless THEN/ELSE clause. */
1560 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1563 && TREE_CODE (else_stmt) == GIMPLE_MODIFY_STMT
1564 && GIMPLE_STMT_OPERAND (else_stmt, 0) == cond
1565 && integer_zerop (GIMPLE_STMT_OPERAND (else_stmt, 1)))
1566 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1568 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1569 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1570 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1571 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1573 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1574 ? then_stmt : else_stmt);
1575 tree *location = (TREE_CODE (cond) == EQ_EXPR
1576 ? &COND_EXPR_THEN (*stmt_p)
1577 : &COND_EXPR_ELSE (*stmt_p));
1580 && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
1581 && GIMPLE_STMT_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1582 && GIMPLE_STMT_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1583 *location = alloc_stmt_list ();
1587 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1588 would be re-introduced during lowering. */
1589 data->last_goto = NULL;
1594 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1596 bool save_may_branch, save_may_throw;
1597 bool this_may_branch, this_may_throw;
1599 /* Collect may_branch and may_throw information for the body only. */
1600 save_may_branch = data->may_branch;
1601 save_may_throw = data->may_throw;
1602 data->may_branch = false;
1603 data->may_throw = false;
1604 data->last_goto = NULL;
1606 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1608 this_may_branch = data->may_branch;
1609 this_may_throw = data->may_throw;
1610 data->may_branch |= save_may_branch;
1611 data->may_throw |= save_may_throw;
1612 data->last_goto = NULL;
1614 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1616 /* If the body is empty, then we can emit the FINALLY block without
1617 the enclosing TRY_FINALLY_EXPR. */
1618 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1620 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1621 data->repeat = true;
1624 /* If the handler is empty, then we can emit the TRY block without
1625 the enclosing TRY_FINALLY_EXPR. */
1626 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1628 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1629 data->repeat = true;
1632 /* If the body neither throws, nor branches, then we can safely
1633 string the TRY and FINALLY blocks together. */
1634 else if (!this_may_branch && !this_may_throw)
1636 tree stmt = *stmt_p;
1637 *stmt_p = TREE_OPERAND (stmt, 0);
1638 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1639 data->repeat = true;
1645 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1647 bool save_may_throw, this_may_throw;
1648 tree_stmt_iterator i;
1651 /* Collect may_throw information for the body only. */
1652 save_may_throw = data->may_throw;
1653 data->may_throw = false;
1654 data->last_goto = NULL;
1656 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1658 this_may_throw = data->may_throw;
1659 data->may_throw = save_may_throw;
1661 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1662 if (!this_may_throw)
1664 if (warn_notreached)
1665 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1666 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1667 data->repeat = true;
1671 /* Process the catch clause specially. We may be able to tell that
1672 no exceptions propagate past this point. */
1674 this_may_throw = true;
1675 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1676 stmt = tsi_stmt (i);
1677 data->last_goto = NULL;
1679 switch (TREE_CODE (stmt))
1682 for (; !tsi_end_p (i); tsi_next (&i))
1684 stmt = tsi_stmt (i);
1685 /* If we catch all exceptions, then the body does not
1686 propagate exceptions past this point. */
1687 if (CATCH_TYPES (stmt) == NULL)
1688 this_may_throw = false;
1689 data->last_goto = NULL;
1690 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1694 case EH_FILTER_EXPR:
1695 if (EH_FILTER_MUST_NOT_THROW (stmt))
1696 this_may_throw = false;
1697 else if (EH_FILTER_TYPES (stmt) == NULL)
1698 this_may_throw = false;
1699 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1703 /* Otherwise this is a cleanup. */
1704 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1706 /* If the cleanup is empty, then we can emit the TRY block without
1707 the enclosing TRY_CATCH_EXPR. */
1708 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1710 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1711 data->repeat = true;
1715 data->may_throw |= this_may_throw;
1720 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1724 /* First remove anything underneath the BIND_EXPR. */
1725 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1727 /* If the BIND_EXPR has no variables, then we can pull everything
1728 up one level and remove the BIND_EXPR, unless this is the toplevel
1729 BIND_EXPR for the current function or an inlined function.
1731 When this situation occurs we will want to apply this
1732 optimization again. */
1733 block = BIND_EXPR_BLOCK (*stmt_p);
1734 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1735 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1737 || ! BLOCK_ABSTRACT_ORIGIN (block)
1738 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1741 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1742 data->repeat = true;
1748 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1750 tree dest = GOTO_DESTINATION (*stmt_p);
1752 data->may_branch = true;
1753 data->last_goto = NULL;
1755 /* Record the last goto expr, so that we can delete it if unnecessary. */
1756 if (TREE_CODE (dest) == LABEL_DECL)
1757 data->last_goto = stmt_p;
1762 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1764 tree label = LABEL_EXPR_LABEL (*stmt_p);
1766 data->has_label = true;
1768 /* We do want to jump across non-local label receiver code. */
1769 if (DECL_NONLOCAL (label))
1770 data->last_goto = NULL;
1772 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1774 *data->last_goto = build_empty_stmt ();
1775 data->repeat = true;
1778 /* ??? Add something here to delete unused labels. */
1782 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1783 decl. This allows us to eliminate redundant or useless
1784 calls to "const" functions.
1786 Gimplifier already does the same operation, but we may notice functions
1787 being const and pure once their calls has been gimplified, so we need
1788 to update the flag. */
1791 update_call_expr_flags (tree call)
1793 tree decl = get_callee_fndecl (call);
1796 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1797 TREE_SIDE_EFFECTS (call) = 0;
1798 if (TREE_NOTHROW (decl))
1799 TREE_NOTHROW (call) = 1;
1803 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1806 notice_special_calls (tree t)
1808 int flags = call_expr_flags (t);
1810 if (flags & ECF_MAY_BE_ALLOCA)
1811 current_function_calls_alloca = true;
1812 if (flags & ECF_RETURNS_TWICE)
1813 current_function_calls_setjmp = true;
1817 /* Clear flags set by notice_special_calls. Used by dead code removal
1818 to update the flags. */
1821 clear_special_calls (void)
1823 current_function_calls_alloca = false;
1824 current_function_calls_setjmp = false;
1829 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1833 switch (TREE_CODE (t))
1836 remove_useless_stmts_cond (tp, data);
1839 case TRY_FINALLY_EXPR:
1840 remove_useless_stmts_tf (tp, data);
1843 case TRY_CATCH_EXPR:
1844 remove_useless_stmts_tc (tp, data);
1848 remove_useless_stmts_bind (tp, data);
1852 remove_useless_stmts_goto (tp, data);
1856 remove_useless_stmts_label (tp, data);
1861 data->last_goto = NULL;
1862 data->may_branch = true;
1867 data->last_goto = NULL;
1868 notice_special_calls (t);
1869 update_call_expr_flags (t);
1870 if (tree_could_throw_p (t))
1871 data->may_throw = true;
1877 case GIMPLE_MODIFY_STMT:
1878 data->last_goto = NULL;
1880 op = get_call_expr_in (t);
1883 update_call_expr_flags (op);
1884 notice_special_calls (op);
1886 if (tree_could_throw_p (t))
1887 data->may_throw = true;
1890 case STATEMENT_LIST:
1892 tree_stmt_iterator i = tsi_start (t);
1893 while (!tsi_end_p (i))
1896 if (IS_EMPTY_STMT (t))
1902 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1905 if (TREE_CODE (t) == STATEMENT_LIST)
1907 tsi_link_before (&i, t, TSI_SAME_STMT);
1917 data->last_goto = NULL;
1921 /* Make sure the outermost BIND_EXPR in OMP_BODY isn't removed
1923 remove_useless_stmts_1 (&BIND_EXPR_BODY (OMP_BODY (*tp)), data);
1924 data->last_goto = NULL;
1933 remove_useless_stmts_1 (&OMP_BODY (*tp), data);
1934 data->last_goto = NULL;
1938 remove_useless_stmts_1 (&OMP_FOR_BODY (*tp), data);
1939 data->last_goto = NULL;
1940 if (OMP_FOR_PRE_BODY (*tp))
1942 remove_useless_stmts_1 (&OMP_FOR_PRE_BODY (*tp), data);
1943 data->last_goto = NULL;
1948 data->last_goto = NULL;
1954 remove_useless_stmts (void)
1956 struct rus_data data;
1958 clear_special_calls ();
1962 memset (&data, 0, sizeof (data));
1963 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1965 while (data.repeat);
1970 struct gimple_opt_pass pass_remove_useless_stmts =
1974 "useless", /* name */
1976 remove_useless_stmts, /* execute */
1979 0, /* static_pass_number */
1981 PROP_gimple_any, /* properties_required */
1982 0, /* properties_provided */
1983 0, /* properties_destroyed */
1984 0, /* todo_flags_start */
1985 TODO_dump_func /* todo_flags_finish */
1989 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1992 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1996 /* Since this block is no longer reachable, we can just delete all
1997 of its PHI nodes. */
1998 phi = phi_nodes (bb);
2001 tree next = PHI_CHAIN (phi);
2002 remove_phi_node (phi, NULL_TREE, true);
2006 /* Remove edges to BB's successors. */
2007 while (EDGE_COUNT (bb->succs) > 0)
2008 remove_edge (EDGE_SUCC (bb, 0));
2012 /* Remove statements of basic block BB. */
2015 remove_bb (basic_block bb)
2017 block_stmt_iterator i;
2018 source_location loc = UNKNOWN_LOCATION;
2022 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2023 if (dump_flags & TDF_DETAILS)
2025 dump_bb (bb, dump_file, 0);
2026 fprintf (dump_file, "\n");
2032 struct loop *loop = bb->loop_father;
2034 /* If a loop gets removed, clean up the information associated
2036 if (loop->latch == bb
2037 || loop->header == bb)
2038 free_numbers_of_iterations_estimates_loop (loop);
2041 /* Remove all the instructions in the block. */
2042 if (bb_stmt_list (bb) != NULL_TREE)
2044 for (i = bsi_start (bb); !bsi_end_p (i);)
2046 tree stmt = bsi_stmt (i);
2047 if (TREE_CODE (stmt) == LABEL_EXPR
2048 && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
2049 || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
2052 block_stmt_iterator new_bsi;
2054 /* A non-reachable non-local label may still be referenced.
2055 But it no longer needs to carry the extra semantics of
2057 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
2059 DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
2060 FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
2063 new_bb = bb->prev_bb;
2064 new_bsi = bsi_start (new_bb);
2065 bsi_remove (&i, false);
2066 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2070 /* Release SSA definitions if we are in SSA. Note that we
2071 may be called when not in SSA. For example,
2072 final_cleanup calls this function via
2073 cleanup_tree_cfg. */
2074 if (gimple_in_ssa_p (cfun))
2075 release_defs (stmt);
2077 bsi_remove (&i, true);
2080 /* Don't warn for removed gotos. Gotos are often removed due to
2081 jump threading, thus resulting in bogus warnings. Not great,
2082 since this way we lose warnings for gotos in the original
2083 program that are indeed unreachable. */
2084 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2086 if (EXPR_HAS_LOCATION (stmt))
2087 loc = EXPR_LOCATION (stmt);
2092 /* If requested, give a warning that the first statement in the
2093 block is unreachable. We walk statements backwards in the
2094 loop above, so the last statement we process is the first statement
2096 if (loc > BUILTINS_LOCATION && LOCATION_LINE (loc) > 0)
2097 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2099 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2104 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2105 predicate VAL, return the edge that will be taken out of the block.
2106 If VAL does not match a unique edge, NULL is returned. */
2109 find_taken_edge (basic_block bb, tree val)
2113 stmt = last_stmt (bb);
2116 gcc_assert (is_ctrl_stmt (stmt));
2119 if (! is_gimple_min_invariant (val))
2122 if (TREE_CODE (stmt) == COND_EXPR)
2123 return find_taken_edge_cond_expr (bb, val);
2125 if (TREE_CODE (stmt) == SWITCH_EXPR)
2126 return find_taken_edge_switch_expr (bb, val);
2128 if (computed_goto_p (stmt))
2130 /* Only optimize if the argument is a label, if the argument is
2131 not a label then we can not construct a proper CFG.
2133 It may be the case that we only need to allow the LABEL_REF to
2134 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2135 appear inside a LABEL_EXPR just to be safe. */
2136 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2137 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2138 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2145 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2146 statement, determine which of the outgoing edges will be taken out of the
2147 block. Return NULL if either edge may be taken. */
2150 find_taken_edge_computed_goto (basic_block bb, tree val)
2155 dest = label_to_block (val);
2158 e = find_edge (bb, dest);
2159 gcc_assert (e != NULL);
2165 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2166 statement, determine which of the two edges will be taken out of the
2167 block. Return NULL if either edge may be taken. */
2170 find_taken_edge_cond_expr (basic_block bb, tree val)
2172 edge true_edge, false_edge;
2174 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2176 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2177 return (integer_zerop (val) ? false_edge : true_edge);
2180 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2181 statement, determine which edge will be taken out of the block. Return
2182 NULL if any edge may be taken. */
2185 find_taken_edge_switch_expr (basic_block bb, tree val)
2187 tree switch_expr, taken_case;
2188 basic_block dest_bb;
2191 switch_expr = last_stmt (bb);
2192 taken_case = find_case_label_for_value (switch_expr, val);
2193 dest_bb = label_to_block (CASE_LABEL (taken_case));
2195 e = find_edge (bb, dest_bb);
2201 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2202 We can make optimal use here of the fact that the case labels are
2203 sorted: We can do a binary search for a case matching VAL. */
2206 find_case_label_for_value (tree switch_expr, tree val)
2208 tree vec = SWITCH_LABELS (switch_expr);
2209 size_t low, high, n = TREE_VEC_LENGTH (vec);
2210 tree default_case = TREE_VEC_ELT (vec, n - 1);
2212 for (low = -1, high = n - 1; high - low > 1; )
2214 size_t i = (high + low) / 2;
2215 tree t = TREE_VEC_ELT (vec, i);
2218 /* Cache the result of comparing CASE_LOW and val. */
2219 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2226 if (CASE_HIGH (t) == NULL)
2228 /* A singe-valued case label. */
2234 /* A case range. We can only handle integer ranges. */
2235 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2240 return default_case;
2246 /*---------------------------------------------------------------------------
2248 ---------------------------------------------------------------------------*/
2250 /* Dump tree-specific information of block BB to file OUTF. */
2253 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2255 dump_generic_bb (outf, bb, indent, TDF_VOPS|TDF_MEMSYMS);
2259 /* Dump a basic block on stderr. */
2262 debug_tree_bb (basic_block bb)
2264 dump_bb (bb, stderr, 0);
2268 /* Dump basic block with index N on stderr. */
2271 debug_tree_bb_n (int n)
2273 debug_tree_bb (BASIC_BLOCK (n));
2274 return BASIC_BLOCK (n);
2278 /* Dump the CFG on stderr.
2280 FLAGS are the same used by the tree dumping functions
2281 (see TDF_* in tree-pass.h). */
2284 debug_tree_cfg (int flags)
2286 dump_tree_cfg (stderr, flags);
2290 /* Dump the program showing basic block boundaries on the given FILE.
2292 FLAGS are the same used by the tree dumping functions (see TDF_* in
2296 dump_tree_cfg (FILE *file, int flags)
2298 if (flags & TDF_DETAILS)
2300 const char *funcname
2301 = lang_hooks.decl_printable_name (current_function_decl, 2);
2304 fprintf (file, ";; Function %s\n\n", funcname);
2305 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2306 n_basic_blocks, n_edges, last_basic_block);
2308 brief_dump_cfg (file);
2309 fprintf (file, "\n");
2312 if (flags & TDF_STATS)
2313 dump_cfg_stats (file);
2315 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2319 /* Dump CFG statistics on FILE. */
2322 dump_cfg_stats (FILE *file)
2324 static long max_num_merged_labels = 0;
2325 unsigned long size, total = 0;
2328 const char * const fmt_str = "%-30s%-13s%12s\n";
2329 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2330 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2331 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2332 const char *funcname
2333 = lang_hooks.decl_printable_name (current_function_decl, 2);
2336 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2338 fprintf (file, "---------------------------------------------------------\n");
2339 fprintf (file, fmt_str, "", " Number of ", "Memory");
2340 fprintf (file, fmt_str, "", " instances ", "used ");
2341 fprintf (file, "---------------------------------------------------------\n");
2343 size = n_basic_blocks * sizeof (struct basic_block_def);
2345 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2346 SCALE (size), LABEL (size));
2350 num_edges += EDGE_COUNT (bb->succs);
2351 size = num_edges * sizeof (struct edge_def);
2353 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2355 fprintf (file, "---------------------------------------------------------\n");
2356 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2358 fprintf (file, "---------------------------------------------------------\n");
2359 fprintf (file, "\n");
2361 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2362 max_num_merged_labels = cfg_stats.num_merged_labels;
2364 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2365 cfg_stats.num_merged_labels, max_num_merged_labels);
2367 fprintf (file, "\n");
2371 /* Dump CFG statistics on stderr. Keep extern so that it's always
2372 linked in the final executable. */
2375 debug_cfg_stats (void)
2377 dump_cfg_stats (stderr);
2381 /* Dump the flowgraph to a .vcg FILE. */
2384 tree_cfg2vcg (FILE *file)
2389 const char *funcname
2390 = lang_hooks.decl_printable_name (current_function_decl, 2);
2392 /* Write the file header. */
2393 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2394 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2395 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2397 /* Write blocks and edges. */
2398 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2400 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2403 if (e->flags & EDGE_FAKE)
2404 fprintf (file, " linestyle: dotted priority: 10");
2406 fprintf (file, " linestyle: solid priority: 100");
2408 fprintf (file, " }\n");
2414 enum tree_code head_code, end_code;
2415 const char *head_name, *end_name;
2418 tree first = first_stmt (bb);
2419 tree last = last_stmt (bb);
2423 head_code = TREE_CODE (first);
2424 head_name = tree_code_name[head_code];
2425 head_line = get_lineno (first);
2428 head_name = "no-statement";
2432 end_code = TREE_CODE (last);
2433 end_name = tree_code_name[end_code];
2434 end_line = get_lineno (last);
2437 end_name = "no-statement";
2439 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2440 bb->index, bb->index, head_name, head_line, end_name,
2443 FOR_EACH_EDGE (e, ei, bb->succs)
2445 if (e->dest == EXIT_BLOCK_PTR)
2446 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2448 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2450 if (e->flags & EDGE_FAKE)
2451 fprintf (file, " priority: 10 linestyle: dotted");
2453 fprintf (file, " priority: 100 linestyle: solid");
2455 fprintf (file, " }\n");
2458 if (bb->next_bb != EXIT_BLOCK_PTR)
2462 fputs ("}\n\n", file);
2467 /*---------------------------------------------------------------------------
2468 Miscellaneous helpers
2469 ---------------------------------------------------------------------------*/
2471 /* Return true if T represents a stmt that always transfers control. */
2474 is_ctrl_stmt (const_tree t)
2476 return (TREE_CODE (t) == COND_EXPR
2477 || TREE_CODE (t) == SWITCH_EXPR
2478 || TREE_CODE (t) == GOTO_EXPR
2479 || TREE_CODE (t) == RETURN_EXPR
2480 || TREE_CODE (t) == RESX_EXPR);
2484 /* Return true if T is a statement that may alter the flow of control
2485 (e.g., a call to a non-returning function). */
2488 is_ctrl_altering_stmt (const_tree t)
2493 call = get_call_expr_in (CONST_CAST_TREE (t));
2496 /* A non-pure/const CALL_EXPR alters flow control if the current
2497 function has nonlocal labels. */
2498 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2501 /* A CALL_EXPR also alters control flow if it does not return. */
2502 if (call_expr_flags (call) & ECF_NORETURN)
2506 /* OpenMP directives alter control flow. */
2507 if (OMP_DIRECTIVE_P (t))
2510 /* If a statement can throw, it alters control flow. */
2511 return tree_can_throw_internal (t);
2515 /* Return true if T is a computed goto. */
2518 computed_goto_p (const_tree t)
2520 return (TREE_CODE (t) == GOTO_EXPR
2521 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2525 /* Return true if T is a simple local goto. */
2528 simple_goto_p (const_tree t)
2530 return (TREE_CODE (t) == GOTO_EXPR
2531 && TREE_CODE (GOTO_DESTINATION (t)) == LABEL_DECL);
2535 /* Return true if T can make an abnormal transfer of control flow.
2536 Transfers of control flow associated with EH are excluded. */
2539 tree_can_make_abnormal_goto (const_tree t)
2541 if (computed_goto_p (t))
2543 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
2544 t = GIMPLE_STMT_OPERAND (t, 1);
2545 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2546 t = TREE_OPERAND (t, 0);
2547 if (TREE_CODE (t) == CALL_EXPR)
2548 return TREE_SIDE_EFFECTS (t) && current_function_has_nonlocal_label;
2553 /* Return true if T should start a new basic block. PREV_T is the
2554 statement preceding T. It is used when T is a label or a case label.
2555 Labels should only start a new basic block if their previous statement
2556 wasn't a label. Otherwise, sequence of labels would generate
2557 unnecessary basic blocks that only contain a single label. */
2560 stmt_starts_bb_p (const_tree t, const_tree prev_t)
2565 /* LABEL_EXPRs start a new basic block only if the preceding
2566 statement wasn't a label of the same type. This prevents the
2567 creation of consecutive blocks that have nothing but a single
2569 if (TREE_CODE (t) == LABEL_EXPR)
2571 /* Nonlocal and computed GOTO targets always start a new block. */
2572 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2573 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2576 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2578 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2581 cfg_stats.num_merged_labels++;
2592 /* Return true if T should end a basic block. */
2595 stmt_ends_bb_p (const_tree t)
2597 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2600 /* Remove block annotations and other datastructures. */
2603 delete_tree_cfg_annotations (void)
2606 block_stmt_iterator bsi;
2608 /* Remove annotations from every tree in the function. */
2610 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2612 tree stmt = bsi_stmt (bsi);
2613 ggc_free (stmt->base.ann);
2614 stmt->base.ann = NULL;
2616 label_to_block_map = NULL;
2620 /* Return the first statement in basic block BB. */
2623 first_stmt (basic_block bb)
2625 block_stmt_iterator i = bsi_start (bb);
2626 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2629 /* Return the last statement in basic block BB. */
2632 last_stmt (basic_block bb)
2634 block_stmt_iterator b = bsi_last (bb);
2635 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2638 /* Return the last statement of an otherwise empty block. Return NULL
2639 if the block is totally empty, or if it contains more than one
2643 last_and_only_stmt (basic_block bb)
2645 block_stmt_iterator i = bsi_last (bb);
2651 last = bsi_stmt (i);
2656 /* Empty statements should no longer appear in the instruction stream.
2657 Everything that might have appeared before should be deleted by
2658 remove_useless_stmts, and the optimizers should just bsi_remove
2659 instead of smashing with build_empty_stmt.
2661 Thus the only thing that should appear here in a block containing
2662 one executable statement is a label. */
2663 prev = bsi_stmt (i);
2664 if (TREE_CODE (prev) == LABEL_EXPR)
2671 /* Mark BB as the basic block holding statement T. */
2674 set_bb_for_stmt (tree t, basic_block bb)
2676 if (TREE_CODE (t) == PHI_NODE)
2678 else if (TREE_CODE (t) == STATEMENT_LIST)
2680 tree_stmt_iterator i;
2681 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2682 set_bb_for_stmt (tsi_stmt (i), bb);
2686 stmt_ann_t ann = get_stmt_ann (t);
2689 /* If the statement is a label, add the label to block-to-labels map
2690 so that we can speed up edge creation for GOTO_EXPRs. */
2691 if (TREE_CODE (t) == LABEL_EXPR)
2695 t = LABEL_EXPR_LABEL (t);
2696 uid = LABEL_DECL_UID (t);
2699 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2700 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2701 if (old_len <= (unsigned) uid)
2703 unsigned new_len = 3 * uid / 2;
2705 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
2710 /* We're moving an existing label. Make sure that we've
2711 removed it from the old block. */
2713 || !VEC_index (basic_block, label_to_block_map, uid));
2714 VEC_replace (basic_block, label_to_block_map, uid, bb);
2719 /* Faster version of set_bb_for_stmt that assume that statement is being moved
2720 from one basic block to another.
2721 For BB splitting we can run into quadratic case, so performance is quite
2722 important and knowing that the tables are big enough, change_bb_for_stmt
2723 can inline as leaf function. */
2725 change_bb_for_stmt (tree t, basic_block bb)
2727 get_stmt_ann (t)->bb = bb;
2728 if (TREE_CODE (t) == LABEL_EXPR)
2729 VEC_replace (basic_block, label_to_block_map,
2730 LABEL_DECL_UID (LABEL_EXPR_LABEL (t)), bb);
2733 /* Finds iterator for STMT. */
2735 extern block_stmt_iterator
2736 bsi_for_stmt (tree stmt)
2738 block_stmt_iterator bsi;
2740 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2741 if (bsi_stmt (bsi) == stmt)
2747 /* Mark statement T as modified, and update it. */
2749 update_modified_stmts (tree t)
2751 if (!ssa_operands_active ())
2753 if (TREE_CODE (t) == STATEMENT_LIST)
2755 tree_stmt_iterator i;
2757 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2759 stmt = tsi_stmt (i);
2760 update_stmt_if_modified (stmt);
2764 update_stmt_if_modified (t);
2767 /* Insert statement (or statement list) T before the statement
2768 pointed-to by iterator I. M specifies how to update iterator I
2769 after insertion (see enum bsi_iterator_update). */
2772 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2774 set_bb_for_stmt (t, i->bb);
2775 update_modified_stmts (t);
2776 tsi_link_before (&i->tsi, t, m);
2780 /* Insert statement (or statement list) T after the statement
2781 pointed-to by iterator I. M specifies how to update iterator I
2782 after insertion (see enum bsi_iterator_update). */
2785 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2787 set_bb_for_stmt (t, i->bb);
2788 update_modified_stmts (t);
2789 tsi_link_after (&i->tsi, t, m);
2793 /* Remove the statement pointed to by iterator I. The iterator is updated
2794 to the next statement.
2796 When REMOVE_EH_INFO is true we remove the statement pointed to by
2797 iterator I from the EH tables. Otherwise we do not modify the EH
2800 Generally, REMOVE_EH_INFO should be true when the statement is going to
2801 be removed from the IL and not reinserted elsewhere. */
2804 bsi_remove (block_stmt_iterator *i, bool remove_eh_info)
2806 tree t = bsi_stmt (*i);
2807 set_bb_for_stmt (t, NULL);
2808 delink_stmt_imm_use (t);
2809 tsi_delink (&i->tsi);
2810 mark_stmt_modified (t);
2813 remove_stmt_from_eh_region (t);
2814 gimple_remove_stmt_histograms (cfun, t);
2819 /* Move the statement at FROM so it comes right after the statement at TO. */
2822 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2824 tree stmt = bsi_stmt (*from);
2825 bsi_remove (from, false);
2826 /* We must have BSI_NEW_STMT here, as bsi_move_after is sometimes used to
2827 move statements to an empty block. */
2828 bsi_insert_after (to, stmt, BSI_NEW_STMT);
2832 /* Move the statement at FROM so it comes right before the statement at TO. */
2835 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2837 tree stmt = bsi_stmt (*from);
2838 bsi_remove (from, false);
2839 /* For consistency with bsi_move_after, it might be better to have
2840 BSI_NEW_STMT here; however, that breaks several places that expect
2841 that TO does not change. */
2842 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2846 /* Move the statement at FROM to the end of basic block BB. */
2849 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2851 block_stmt_iterator last = bsi_last (bb);
2853 /* Have to check bsi_end_p because it could be an empty block. */
2854 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2855 bsi_move_before (from, &last);
2857 bsi_move_after (from, &last);
2861 /* Replace the contents of the statement pointed to by iterator BSI
2862 with STMT. If UPDATE_EH_INFO is true, the exception handling
2863 information of the original statement is moved to the new statement. */
2866 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool update_eh_info)
2869 tree orig_stmt = bsi_stmt (*bsi);
2871 if (stmt == orig_stmt)
2873 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2874 set_bb_for_stmt (stmt, bsi->bb);
2876 /* Preserve EH region information from the original statement, if
2877 requested by the caller. */
2880 eh_region = lookup_stmt_eh_region (orig_stmt);
2883 remove_stmt_from_eh_region (orig_stmt);
2884 add_stmt_to_eh_region (stmt, eh_region);
2888 gimple_duplicate_stmt_histograms (cfun, stmt, cfun, orig_stmt);
2889 gimple_remove_stmt_histograms (cfun, orig_stmt);
2890 delink_stmt_imm_use (orig_stmt);
2891 *bsi_stmt_ptr (*bsi) = stmt;
2892 mark_stmt_modified (stmt);
2893 update_modified_stmts (stmt);
2897 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2898 is made to place the statement in an existing basic block, but
2899 sometimes that isn't possible. When it isn't possible, the edge is
2900 split and the statement is added to the new block.
2902 In all cases, the returned *BSI points to the correct location. The
2903 return value is true if insertion should be done after the location,
2904 or false if it should be done before the location. If new basic block
2905 has to be created, it is stored in *NEW_BB. */
2908 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2909 basic_block *new_bb)
2911 basic_block dest, src;
2917 /* If the destination has one predecessor which has no PHI nodes,
2918 insert there. Except for the exit block.
2920 The requirement for no PHI nodes could be relaxed. Basically we
2921 would have to examine the PHIs to prove that none of them used
2922 the value set by the statement we want to insert on E. That
2923 hardly seems worth the effort. */
2924 if (single_pred_p (dest)
2925 && ! phi_nodes (dest)
2926 && dest != EXIT_BLOCK_PTR)
2928 *bsi = bsi_start (dest);
2929 if (bsi_end_p (*bsi))
2932 /* Make sure we insert after any leading labels. */
2933 tmp = bsi_stmt (*bsi);
2934 while (TREE_CODE (tmp) == LABEL_EXPR)
2937 if (bsi_end_p (*bsi))
2939 tmp = bsi_stmt (*bsi);
2942 if (bsi_end_p (*bsi))
2944 *bsi = bsi_last (dest);
2951 /* If the source has one successor, the edge is not abnormal and
2952 the last statement does not end a basic block, insert there.
2953 Except for the entry block. */
2955 if ((e->flags & EDGE_ABNORMAL) == 0
2956 && single_succ_p (src)
2957 && src != ENTRY_BLOCK_PTR)
2959 *bsi = bsi_last (src);
2960 if (bsi_end_p (*bsi))
2963 tmp = bsi_stmt (*bsi);
2964 if (!stmt_ends_bb_p (tmp))
2967 /* Insert code just before returning the value. We may need to decompose
2968 the return in the case it contains non-trivial operand. */
2969 if (TREE_CODE (tmp) == RETURN_EXPR)
2971 tree op = TREE_OPERAND (tmp, 0);
2972 if (op && !is_gimple_val (op))
2974 gcc_assert (TREE_CODE (op) == GIMPLE_MODIFY_STMT);
2975 bsi_insert_before (bsi, op, BSI_NEW_STMT);
2976 TREE_OPERAND (tmp, 0) = GIMPLE_STMT_OPERAND (op, 0);
2983 /* Otherwise, create a new basic block, and split this edge. */
2984 dest = split_edge (e);
2987 e = single_pred_edge (dest);
2992 /* This routine will commit all pending edge insertions, creating any new
2993 basic blocks which are necessary. */
2996 bsi_commit_edge_inserts (void)
3002 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
3005 FOR_EACH_EDGE (e, ei, bb->succs)
3006 bsi_commit_one_edge_insert (e, NULL);
3010 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3011 to this block, otherwise set it to NULL. */
3014 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
3018 if (PENDING_STMT (e))
3020 block_stmt_iterator bsi;
3021 tree stmt = PENDING_STMT (e);
3023 PENDING_STMT (e) = NULL_TREE;
3025 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
3026 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3028 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3033 /* Add STMT to the pending list of edge E. No actual insertion is
3034 made until a call to bsi_commit_edge_inserts () is made. */
3037 bsi_insert_on_edge (edge e, tree stmt)
3039 append_to_statement_list (stmt, &PENDING_STMT (e));
3042 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3043 block has to be created, it is returned. */
3046 bsi_insert_on_edge_immediate (edge e, tree stmt)
3048 block_stmt_iterator bsi;
3049 basic_block new_bb = NULL;
3051 gcc_assert (!PENDING_STMT (e));
3053 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3054 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3056 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3061 /*---------------------------------------------------------------------------
3062 Tree specific functions for CFG manipulation
3063 ---------------------------------------------------------------------------*/
3065 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3068 reinstall_phi_args (edge new_edge, edge old_edge)
3071 edge_var_map_vector v;
3075 v = redirect_edge_var_map_vector (old_edge);
3079 for (i = 0, phi = phi_nodes (new_edge->dest);
3080 VEC_iterate (edge_var_map, v, i, vm) && phi;
3081 i++, phi = PHI_CHAIN (phi))
3083 tree result = redirect_edge_var_map_result (vm);
3084 tree arg = redirect_edge_var_map_def (vm);
3086 gcc_assert (result == PHI_RESULT (phi));
3088 add_phi_arg (phi, arg, new_edge);
3091 redirect_edge_var_map_clear (old_edge);
3094 /* Returns the basic block after which the new basic block created
3095 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3096 near its "logical" location. This is of most help to humans looking
3097 at debugging dumps. */
3100 split_edge_bb_loc (edge edge_in)
3102 basic_block dest = edge_in->dest;
3104 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3105 return edge_in->src;
3107 return dest->prev_bb;
3110 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3111 Abort on abnormal edges. */
3114 tree_split_edge (edge edge_in)
3116 basic_block new_bb, after_bb, dest;
3119 /* Abnormal edges cannot be split. */
3120 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3122 dest = edge_in->dest;
3124 after_bb = split_edge_bb_loc (edge_in);
3126 new_bb = create_empty_bb (after_bb);
3127 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3128 new_bb->count = edge_in->count;
3129 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3130 new_edge->probability = REG_BR_PROB_BASE;
3131 new_edge->count = edge_in->count;
3133 e = redirect_edge_and_branch (edge_in, new_bb);
3134 gcc_assert (e == edge_in);
3135 reinstall_phi_args (new_edge, e);
3140 /* Callback for walk_tree, check that all elements with address taken are
3141 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3142 inside a PHI node. */
3145 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3152 /* Check operand N for being valid GIMPLE and give error MSG if not. */
3153 #define CHECK_OP(N, MSG) \
3154 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
3155 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3157 switch (TREE_CODE (t))
3160 if (SSA_NAME_IN_FREE_LIST (t))
3162 error ("SSA name in freelist but still referenced");
3168 x = fold (ASSERT_EXPR_COND (t));
3169 if (x == boolean_false_node)
3171 error ("ASSERT_EXPR with an always-false condition");
3179 case GIMPLE_MODIFY_STMT:
3180 x = GIMPLE_STMT_OPERAND (t, 0);
3181 if (TREE_CODE (x) == BIT_FIELD_REF
3182 && is_gimple_reg (TREE_OPERAND (x, 0)))
3184 error ("GIMPLE register modified with BIT_FIELD_REF");
3193 bool old_side_effects;
3196 bool new_side_effects;
3198 old_invariant = TREE_INVARIANT (t);
3199 old_constant = TREE_CONSTANT (t);
3200 old_side_effects = TREE_SIDE_EFFECTS (t);
3202 recompute_tree_invariant_for_addr_expr (t);
3203 new_invariant = TREE_INVARIANT (t);
3204 new_side_effects = TREE_SIDE_EFFECTS (t);
3205 new_constant = TREE_CONSTANT (t);
3207 if (old_invariant != new_invariant)
3209 error ("invariant not recomputed when ADDR_EXPR changed");
3213 if (old_constant != new_constant)
3215 error ("constant not recomputed when ADDR_EXPR changed");
3218 if (old_side_effects != new_side_effects)
3220 error ("side effects not recomputed when ADDR_EXPR changed");
3224 /* Skip any references (they will be checked when we recurse down the
3225 tree) and ensure that any variable used as a prefix is marked
3227 for (x = TREE_OPERAND (t, 0);
3228 handled_component_p (x);
3229 x = TREE_OPERAND (x, 0))
3232 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3234 if (!TREE_ADDRESSABLE (x))
3236 error ("address taken, but ADDRESSABLE bit not set");
3244 x = COND_EXPR_COND (t);
3245 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
3247 error ("non-integral used in condition");
3250 if (!is_gimple_condexpr (x))
3252 error ("invalid conditional operand");
3259 case FIX_TRUNC_EXPR:
3264 case NON_LVALUE_EXPR:
3265 case TRUTH_NOT_EXPR:
3266 CHECK_OP (0, "invalid operand to unary operator");
3273 case ARRAY_RANGE_REF:
3275 case VIEW_CONVERT_EXPR:
3276 /* We have a nest of references. Verify that each of the operands
3277 that determine where to reference is either a constant or a variable,
3278 verify that the base is valid, and then show we've already checked
3280 while (handled_component_p (t))
3282 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3283 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3284 else if (TREE_CODE (t) == ARRAY_REF
3285 || TREE_CODE (t) == ARRAY_RANGE_REF)
3287 CHECK_OP (1, "invalid array index");
3288 if (TREE_OPERAND (t, 2))
3289 CHECK_OP (2, "invalid array lower bound");
3290 if (TREE_OPERAND (t, 3))
3291 CHECK_OP (3, "invalid array stride");
3293 else if (TREE_CODE (t) == BIT_FIELD_REF)
3295 if (!host_integerp (TREE_OPERAND (t, 1), 1)
3296 || !host_integerp (TREE_OPERAND (t, 2), 1))
3298 error ("invalid position or size operand to BIT_FIELD_REF");
3301 else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3302 && (TYPE_PRECISION (TREE_TYPE (t))
3303 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
3305 error ("integral result type precision does not match "
3306 "field size of BIT_FIELD_REF");
3309 if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3310 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
3311 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
3313 error ("mode precision of non-integral result does not "
3314 "match field size of BIT_FIELD_REF");
3319 t = TREE_OPERAND (t, 0);
3322 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3324 error ("invalid reference prefix");
3331 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3332 POINTER_PLUS_EXPR. */
3333 if (POINTER_TYPE_P (TREE_TYPE (t)))
3335 error ("invalid operand to plus/minus, type is a pointer");
3338 CHECK_OP (0, "invalid operand to binary operator");
3339 CHECK_OP (1, "invalid operand to binary operator");
3342 case POINTER_PLUS_EXPR:
3343 /* Check to make sure the first operand is a pointer or reference type. */
3344 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3346 error ("invalid operand to pointer plus, first operand is not a pointer");
3349 /* Check to make sure the second operand is an integer with type of
3351 if (!useless_type_conversion_p (sizetype,
3352 TREE_TYPE (TREE_OPERAND (t, 1))))
3354 error ("invalid operand to pointer plus, second operand is not an "
3355 "integer with type of sizetype.");
3365 case UNORDERED_EXPR:
3374 case TRUNC_DIV_EXPR:
3376 case FLOOR_DIV_EXPR:
3377 case ROUND_DIV_EXPR:
3378 case TRUNC_MOD_EXPR:
3380 case FLOOR_MOD_EXPR:
3381 case ROUND_MOD_EXPR:
3383 case EXACT_DIV_EXPR:
3393 CHECK_OP (0, "invalid operand to binary operator");
3394 CHECK_OP (1, "invalid operand to binary operator");
3398 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3410 /* Verifies if EXPR is a valid GIMPLE unary expression. Returns true
3411 if there is an error, otherwise false. */
3414 verify_gimple_unary_expr (const_tree expr)
3416 tree op = TREE_OPERAND (expr, 0);
3417 tree type = TREE_TYPE (expr);
3419 if (!is_gimple_val (op))
3421 error ("invalid operand in unary expression");
3425 /* For general unary expressions we have the operations type
3426 as the effective type the operation is carried out on. So all
3427 we need to require is that the operand is trivially convertible
3429 if (!useless_type_conversion_p (type, TREE_TYPE (op)))
3431 error ("type mismatch in unary expression");
3432 debug_generic_expr (type);
3433 debug_generic_expr (TREE_TYPE (op));
3440 /* Verifies if EXPR is a valid GIMPLE binary expression. Returns true
3441 if there is an error, otherwise false. */
3444 verify_gimple_binary_expr (const_tree expr)
3446 tree op0 = TREE_OPERAND (expr, 0);
3447 tree op1 = TREE_OPERAND (expr, 1);
3448 tree type = TREE_TYPE (expr);
3450 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3452 error ("invalid operands in binary expression");
3456 /* For general binary expressions we have the operations type
3457 as the effective type the operation is carried out on. So all
3458 we need to require is that both operands are trivially convertible
3460 if (!useless_type_conversion_p (type, TREE_TYPE (op0))
3461 || !useless_type_conversion_p (type, TREE_TYPE (op1)))
3463 error ("type mismatch in binary expression");
3464 debug_generic_stmt (type);
3465 debug_generic_stmt (TREE_TYPE (op0));
3466 debug_generic_stmt (TREE_TYPE (op1));
3473 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3474 Returns true if there is an error, otherwise false. */
3477 verify_gimple_min_lval (tree expr)
3481 if (is_gimple_id (expr))
3484 if (TREE_CODE (expr) != INDIRECT_REF
3485 && TREE_CODE (expr) != ALIGN_INDIRECT_REF
3486 && TREE_CODE (expr) != MISALIGNED_INDIRECT_REF)
3488 error ("invalid expression for min lvalue");
3492 op = TREE_OPERAND (expr, 0);
3493 if (!is_gimple_val (op))
3495 error ("invalid operand in indirect reference");
3496 debug_generic_stmt (op);
3499 if (!useless_type_conversion_p (TREE_TYPE (expr),
3500 TREE_TYPE (TREE_TYPE (op))))
3502 error ("type mismatch in indirect reference");
3503 debug_generic_stmt (TREE_TYPE (expr));
3504 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3511 /* Verify if EXPR is a valid GIMPLE reference expression. Returns true
3512 if there is an error, otherwise false. */
3515 verify_gimple_reference (tree expr)
3517 while (handled_component_p (expr))
3519 tree op = TREE_OPERAND (expr, 0);
3521 if (TREE_CODE (expr) == ARRAY_REF
3522 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3524 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3525 || (TREE_OPERAND (expr, 2)
3526 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3527 || (TREE_OPERAND (expr, 3)
3528 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3530 error ("invalid operands to array reference");
3531 debug_generic_stmt (expr);
3536 /* Verify if the reference array element types are compatible. */
3537 if (TREE_CODE (expr) == ARRAY_REF
3538 && !useless_type_conversion_p (TREE_TYPE (expr),
3539 TREE_TYPE (TREE_TYPE (op))))
3541 error ("type mismatch in array reference");
3542 debug_generic_stmt (TREE_TYPE (expr));
3543 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3546 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3547 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3548 TREE_TYPE (TREE_TYPE (op))))
3550 error ("type mismatch in array range reference");
3551 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3552 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3556 if ((TREE_CODE (expr) == REALPART_EXPR
3557 || TREE_CODE (expr) == IMAGPART_EXPR)
3558 && !useless_type_conversion_p (TREE_TYPE (expr),
3559 TREE_TYPE (TREE_TYPE (op))))
3561 error ("type mismatch in real/imagpart reference");
3562 debug_generic_stmt (TREE_TYPE (expr));
3563 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3567 if (TREE_CODE (expr) == COMPONENT_REF
3568 && !useless_type_conversion_p (TREE_TYPE (expr),
3569 TREE_TYPE (TREE_OPERAND (expr, 1))))
3571 error ("type mismatch in component reference");
3572 debug_generic_stmt (TREE_TYPE (expr));
3573 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3577 /* For VIEW_CONVERT_EXPRs which are allowed here, too, there
3578 is nothing to verify. Gross mismatches at most invoke
3579 undefined behavior. */
3584 return verify_gimple_min_lval (expr);
3587 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3588 list of pointer-to types that is trivially convertible to DEST. */
3591 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3595 if (!TYPE_POINTER_TO (src_obj))
3598 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3599 if (useless_type_conversion_p (dest, src))
3605 /* Verify the GIMPLE expression EXPR. Returns true if there is an
3606 error, otherwise false. */
3609 verify_gimple_expr (tree expr)
3611 tree type = TREE_TYPE (expr);
3613 if (is_gimple_val (expr))
3616 /* Special codes we cannot handle via their class. */
3617 switch (TREE_CODE (expr))
3622 tree op = TREE_OPERAND (expr, 0);
3623 if (!is_gimple_val (op))
3625 error ("invalid operand in conversion");
3629 /* Allow conversions between integral types and between
3631 if ((INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (op)))
3632 || (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (op))))
3635 /* Allow conversions between integral types and pointers only if
3636 there is no sign or zero extension involved. */
3637 if (((POINTER_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (op)))
3638 || (POINTER_TYPE_P (TREE_TYPE (op)) && INTEGRAL_TYPE_P (type)))
3639 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op)))
3642 /* Allow conversion from integer to offset type and vice versa. */
3643 if ((TREE_CODE (type) == OFFSET_TYPE
3644 && TREE_CODE (TREE_TYPE (op)) == INTEGER_TYPE)
3645 || (TREE_CODE (type) == INTEGER_TYPE
3646 && TREE_CODE (TREE_TYPE (op)) == OFFSET_TYPE))