1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "basic-block.h"
33 #include "langhooks.h"
34 #include "tree-pretty-print.h"
35 #include "gimple-pretty-print.h"
36 #include "tree-flow.h"
38 #include "tree-dump.h"
39 #include "tree-pass.h"
40 #include "diagnostic-core.h"
44 #include "cfglayout.h"
45 #include "tree-ssa-propagate.h"
46 #include "value-prof.h"
47 #include "pointer-set.h"
48 #include "tree-inline.h"
50 /* This file contains functions for building the Control Flow Graph (CFG)
51 for a function tree. */
53 /* Local declarations. */
55 /* Initial capacity for the basic block array. */
56 static const int initial_cfg_capacity = 20;
58 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
59 which use a particular edge. The CASE_LABEL_EXPRs are chained together
60 via their TREE_CHAIN field, which we clear after we're done with the
61 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
63 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
64 update the case vector in response to edge redirections.
66 Right now this table is set up and torn down at key points in the
67 compilation process. It would be nice if we could make the table
68 more persistent. The key is getting notification of changes to
69 the CFG (particularly edge removal, creation and redirection). */
71 static struct pointer_map_t *edge_to_cases;
73 /* If we record edge_to_cases, this bitmap will hold indexes
74 of basic blocks that end in a GIMPLE_SWITCH which we touched
75 due to edge manipulations. */
77 static bitmap touched_switch_bbs;
82 long num_merged_labels;
85 static struct cfg_stats_d cfg_stats;
87 /* Nonzero if we found a computed goto while building basic blocks. */
88 static bool found_computed_goto;
90 /* Hash table to store last discriminator assigned for each locus. */
91 struct locus_discrim_map
96 static htab_t discriminator_per_locus;
98 /* Basic blocks and flowgraphs. */
99 static void make_blocks (gimple_seq);
100 static void factor_computed_gotos (void);
103 static void make_edges (void);
104 static void make_cond_expr_edges (basic_block);
105 static void make_gimple_switch_edges (basic_block);
106 static void make_goto_expr_edges (basic_block);
107 static void make_gimple_asm_edges (basic_block);
108 static unsigned int locus_map_hash (const void *);
109 static int locus_map_eq (const void *, const void *);
110 static void assign_discriminator (location_t, basic_block);
111 static edge gimple_redirect_edge_and_branch (edge, basic_block);
112 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
113 static unsigned int split_critical_edges (void);
115 /* Various helpers. */
116 static inline bool stmt_starts_bb_p (gimple, gimple);
117 static int gimple_verify_flow_info (void);
118 static void gimple_make_forwarder_block (edge);
119 static void gimple_cfg2vcg (FILE *);
120 static gimple first_non_label_stmt (basic_block);
122 /* Flowgraph optimization and cleanup. */
123 static void gimple_merge_blocks (basic_block, basic_block);
124 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
125 static void remove_bb (basic_block);
126 static edge find_taken_edge_computed_goto (basic_block, tree);
127 static edge find_taken_edge_cond_expr (basic_block, tree);
128 static edge find_taken_edge_switch_expr (basic_block, tree);
129 static tree find_case_label_for_value (gimple, tree);
130 static void group_case_labels_stmt (gimple);
133 init_empty_tree_cfg_for_function (struct function *fn)
135 /* Initialize the basic block array. */
137 profile_status_for_function (fn) = PROFILE_ABSENT;
138 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
139 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
140 basic_block_info_for_function (fn)
141 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
142 VEC_safe_grow_cleared (basic_block, gc,
143 basic_block_info_for_function (fn),
144 initial_cfg_capacity);
146 /* Build a mapping of labels to their associated blocks. */
147 label_to_block_map_for_function (fn)
148 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
149 VEC_safe_grow_cleared (basic_block, gc,
150 label_to_block_map_for_function (fn),
151 initial_cfg_capacity);
153 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
154 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
155 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
156 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
158 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
159 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
160 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
161 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
165 init_empty_tree_cfg (void)
167 init_empty_tree_cfg_for_function (cfun);
170 /*---------------------------------------------------------------------------
172 ---------------------------------------------------------------------------*/
174 /* Entry point to the CFG builder for trees. SEQ is the sequence of
175 statements to be added to the flowgraph. */
178 build_gimple_cfg (gimple_seq seq)
180 /* Register specific gimple functions. */
181 gimple_register_cfg_hooks ();
183 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
185 init_empty_tree_cfg ();
187 found_computed_goto = 0;
190 /* Computed gotos are hell to deal with, especially if there are
191 lots of them with a large number of destinations. So we factor
192 them to a common computed goto location before we build the
193 edge list. After we convert back to normal form, we will un-factor
194 the computed gotos since factoring introduces an unwanted jump. */
195 if (found_computed_goto)
196 factor_computed_gotos ();
198 /* Make sure there is always at least one block, even if it's empty. */
199 if (n_basic_blocks == NUM_FIXED_BLOCKS)
200 create_empty_bb (ENTRY_BLOCK_PTR);
202 /* Adjust the size of the array. */
203 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
204 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
206 /* To speed up statement iterator walks, we first purge dead labels. */
207 cleanup_dead_labels ();
209 /* Group case nodes to reduce the number of edges.
210 We do this after cleaning up dead labels because otherwise we miss
211 a lot of obvious case merging opportunities. */
212 group_case_labels ();
214 /* Create the edges of the flowgraph. */
215 discriminator_per_locus = htab_create (13, locus_map_hash, locus_map_eq,
218 cleanup_dead_labels ();
219 htab_delete (discriminator_per_locus);
221 /* Debugging dumps. */
223 /* Write the flowgraph to a VCG file. */
225 int local_dump_flags;
226 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
229 gimple_cfg2vcg (vcg_file);
230 dump_end (TDI_vcg, vcg_file);
234 #ifdef ENABLE_CHECKING
240 execute_build_cfg (void)
242 gimple_seq body = gimple_body (current_function_decl);
244 build_gimple_cfg (body);
245 gimple_set_body (current_function_decl, NULL);
246 if (dump_file && (dump_flags & TDF_DETAILS))
248 fprintf (dump_file, "Scope blocks:\n");
249 dump_scope_blocks (dump_file, dump_flags);
254 struct gimple_opt_pass pass_build_cfg =
260 execute_build_cfg, /* execute */
263 0, /* static_pass_number */
264 TV_TREE_CFG, /* tv_id */
265 PROP_gimple_leh, /* properties_required */
266 PROP_cfg, /* properties_provided */
267 0, /* properties_destroyed */
268 0, /* todo_flags_start */
269 TODO_verify_stmts | TODO_cleanup_cfg
270 | TODO_dump_func /* todo_flags_finish */
275 /* Return true if T is a computed goto. */
278 computed_goto_p (gimple t)
280 return (gimple_code (t) == GIMPLE_GOTO
281 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
285 /* Search the CFG for any computed gotos. If found, factor them to a
286 common computed goto site. Also record the location of that site so
287 that we can un-factor the gotos after we have converted back to
291 factor_computed_gotos (void)
294 tree factored_label_decl = NULL;
296 gimple factored_computed_goto_label = NULL;
297 gimple factored_computed_goto = NULL;
299 /* We know there are one or more computed gotos in this function.
300 Examine the last statement in each basic block to see if the block
301 ends with a computed goto. */
305 gimple_stmt_iterator gsi = gsi_last_bb (bb);
311 last = gsi_stmt (gsi);
313 /* Ignore the computed goto we create when we factor the original
315 if (last == factored_computed_goto)
318 /* If the last statement is a computed goto, factor it. */
319 if (computed_goto_p (last))
323 /* The first time we find a computed goto we need to create
324 the factored goto block and the variable each original
325 computed goto will use for their goto destination. */
326 if (!factored_computed_goto)
328 basic_block new_bb = create_empty_bb (bb);
329 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
331 /* Create the destination of the factored goto. Each original
332 computed goto will put its desired destination into this
333 variable and jump to the label we create immediately
335 var = create_tmp_var (ptr_type_node, "gotovar");
337 /* Build a label for the new block which will contain the
338 factored computed goto. */
339 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
340 factored_computed_goto_label
341 = gimple_build_label (factored_label_decl);
342 gsi_insert_after (&new_gsi, factored_computed_goto_label,
345 /* Build our new computed goto. */
346 factored_computed_goto = gimple_build_goto (var);
347 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
350 /* Copy the original computed goto's destination into VAR. */
351 assignment = gimple_build_assign (var, gimple_goto_dest (last));
352 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
354 /* And re-vector the computed goto to the new destination. */
355 gimple_goto_set_dest (last, factored_label_decl);
361 /* Build a flowgraph for the sequence of stmts SEQ. */
364 make_blocks (gimple_seq seq)
366 gimple_stmt_iterator i = gsi_start (seq);
368 bool start_new_block = true;
369 bool first_stmt_of_seq = true;
370 basic_block bb = ENTRY_BLOCK_PTR;
372 while (!gsi_end_p (i))
379 /* If the statement starts a new basic block or if we have determined
380 in a previous pass that we need to create a new block for STMT, do
382 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
384 if (!first_stmt_of_seq)
385 seq = gsi_split_seq_before (&i);
386 bb = create_basic_block (seq, NULL, bb);
387 start_new_block = false;
390 /* Now add STMT to BB and create the subgraphs for special statement
392 gimple_set_bb (stmt, bb);
394 if (computed_goto_p (stmt))
395 found_computed_goto = true;
397 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
399 if (stmt_ends_bb_p (stmt))
401 /* If the stmt can make abnormal goto use a new temporary
402 for the assignment to the LHS. This makes sure the old value
403 of the LHS is available on the abnormal edge. Otherwise
404 we will end up with overlapping life-ranges for abnormal
406 if (gimple_has_lhs (stmt)
407 && stmt_can_make_abnormal_goto (stmt)
408 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
410 tree lhs = gimple_get_lhs (stmt);
411 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
412 gimple s = gimple_build_assign (lhs, tmp);
413 gimple_set_location (s, gimple_location (stmt));
414 gimple_set_block (s, gimple_block (stmt));
415 gimple_set_lhs (stmt, tmp);
416 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
417 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
418 DECL_GIMPLE_REG_P (tmp) = 1;
419 gsi_insert_after (&i, s, GSI_SAME_STMT);
421 start_new_block = true;
425 first_stmt_of_seq = false;
430 /* Create and return a new empty basic block after bb AFTER. */
433 create_bb (void *h, void *e, basic_block after)
439 /* Create and initialize a new basic block. Since alloc_block uses
440 GC allocation that clears memory to allocate a basic block, we do
441 not have to clear the newly allocated basic block here. */
444 bb->index = last_basic_block;
446 bb->il.gimple = ggc_alloc_cleared_gimple_bb_info ();
447 set_bb_seq (bb, h ? (gimple_seq) h : gimple_seq_alloc ());
449 /* Add the new block to the linked list of blocks. */
450 link_block (bb, after);
452 /* Grow the basic block array if needed. */
453 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
455 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
456 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
459 /* Add the newly created block to the array. */
460 SET_BASIC_BLOCK (last_basic_block, bb);
469 /*---------------------------------------------------------------------------
471 ---------------------------------------------------------------------------*/
473 /* Fold COND_EXPR_COND of each COND_EXPR. */
476 fold_cond_expr_cond (void)
482 gimple stmt = last_stmt (bb);
484 if (stmt && gimple_code (stmt) == GIMPLE_COND)
486 location_t loc = gimple_location (stmt);
490 fold_defer_overflow_warnings ();
491 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
492 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
495 zerop = integer_zerop (cond);
496 onep = integer_onep (cond);
499 zerop = onep = false;
501 fold_undefer_overflow_warnings (zerop || onep,
503 WARN_STRICT_OVERFLOW_CONDITIONAL);
505 gimple_cond_make_false (stmt);
507 gimple_cond_make_true (stmt);
512 /* Join all the blocks in the flowgraph. */
518 struct omp_region *cur_region = NULL;
520 /* Create an edge from entry to the first block with executable
522 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
524 /* Traverse the basic block array placing edges. */
527 gimple last = last_stmt (bb);
532 enum gimple_code code = gimple_code (last);
536 make_goto_expr_edges (bb);
540 make_edge (bb, EXIT_BLOCK_PTR, 0);
544 make_cond_expr_edges (bb);
548 make_gimple_switch_edges (bb);
552 make_eh_edges (last);
555 case GIMPLE_EH_DISPATCH:
556 fallthru = make_eh_dispatch_edges (last);
560 /* If this function receives a nonlocal goto, then we need to
561 make edges from this call site to all the nonlocal goto
563 if (stmt_can_make_abnormal_goto (last))
564 make_abnormal_goto_edges (bb, true);
566 /* If this statement has reachable exception handlers, then
567 create abnormal edges to them. */
568 make_eh_edges (last);
570 /* BUILTIN_RETURN is really a return statement. */
571 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
572 make_edge (bb, EXIT_BLOCK_PTR, 0), fallthru = false;
573 /* Some calls are known not to return. */
575 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
579 /* A GIMPLE_ASSIGN may throw internally and thus be considered
581 if (is_ctrl_altering_stmt (last))
582 make_eh_edges (last);
587 make_gimple_asm_edges (bb);
591 case GIMPLE_OMP_PARALLEL:
592 case GIMPLE_OMP_TASK:
594 case GIMPLE_OMP_SINGLE:
595 case GIMPLE_OMP_MASTER:
596 case GIMPLE_OMP_ORDERED:
597 case GIMPLE_OMP_CRITICAL:
598 case GIMPLE_OMP_SECTION:
599 cur_region = new_omp_region (bb, code, cur_region);
603 case GIMPLE_OMP_SECTIONS:
604 cur_region = new_omp_region (bb, code, cur_region);
608 case GIMPLE_OMP_SECTIONS_SWITCH:
612 case GIMPLE_OMP_ATOMIC_LOAD:
613 case GIMPLE_OMP_ATOMIC_STORE:
617 case GIMPLE_OMP_RETURN:
618 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
619 somewhere other than the next block. This will be
621 cur_region->exit = bb;
622 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
623 cur_region = cur_region->outer;
626 case GIMPLE_OMP_CONTINUE:
627 cur_region->cont = bb;
628 switch (cur_region->type)
631 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
632 succs edges as abnormal to prevent splitting
634 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
635 /* Make the loopback edge. */
636 make_edge (bb, single_succ (cur_region->entry),
639 /* Create an edge from GIMPLE_OMP_FOR to exit, which
640 corresponds to the case that the body of the loop
641 is not executed at all. */
642 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
643 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
647 case GIMPLE_OMP_SECTIONS:
648 /* Wire up the edges into and out of the nested sections. */
650 basic_block switch_bb = single_succ (cur_region->entry);
652 struct omp_region *i;
653 for (i = cur_region->inner; i ; i = i->next)
655 gcc_assert (i->type == GIMPLE_OMP_SECTION);
656 make_edge (switch_bb, i->entry, 0);
657 make_edge (i->exit, bb, EDGE_FALLTHRU);
660 /* Make the loopback edge to the block with
661 GIMPLE_OMP_SECTIONS_SWITCH. */
662 make_edge (bb, switch_bb, 0);
664 /* Make the edge from the switch to exit. */
665 make_edge (switch_bb, bb->next_bb, 0);
676 gcc_assert (!stmt_ends_bb_p (last));
685 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
687 assign_discriminator (gimple_location (last), bb->next_bb);
694 /* Fold COND_EXPR_COND of each COND_EXPR. */
695 fold_cond_expr_cond ();
698 /* Trivial hash function for a location_t. ITEM is a pointer to
699 a hash table entry that maps a location_t to a discriminator. */
702 locus_map_hash (const void *item)
704 return ((const struct locus_discrim_map *) item)->locus;
707 /* Equality function for the locus-to-discriminator map. VA and VB
708 point to the two hash table entries to compare. */
711 locus_map_eq (const void *va, const void *vb)
713 const struct locus_discrim_map *a = (const struct locus_discrim_map *) va;
714 const struct locus_discrim_map *b = (const struct locus_discrim_map *) vb;
715 return a->locus == b->locus;
718 /* Find the next available discriminator value for LOCUS. The
719 discriminator distinguishes among several basic blocks that
720 share a common locus, allowing for more accurate sample-based
724 next_discriminator_for_locus (location_t locus)
726 struct locus_discrim_map item;
727 struct locus_discrim_map **slot;
730 item.discriminator = 0;
731 slot = (struct locus_discrim_map **)
732 htab_find_slot_with_hash (discriminator_per_locus, (void *) &item,
733 (hashval_t) locus, INSERT);
735 if (*slot == HTAB_EMPTY_ENTRY)
737 *slot = XNEW (struct locus_discrim_map);
739 (*slot)->locus = locus;
740 (*slot)->discriminator = 0;
742 (*slot)->discriminator++;
743 return (*slot)->discriminator;
746 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
749 same_line_p (location_t locus1, location_t locus2)
751 expanded_location from, to;
753 if (locus1 == locus2)
756 from = expand_location (locus1);
757 to = expand_location (locus2);
759 if (from.line != to.line)
761 if (from.file == to.file)
763 return (from.file != NULL
765 && strcmp (from.file, to.file) == 0);
768 /* Assign a unique discriminator value to block BB if it begins at the same
769 LOCUS as its predecessor block. */
772 assign_discriminator (location_t locus, basic_block bb)
774 gimple first_in_to_bb, last_in_to_bb;
776 if (locus == 0 || bb->discriminator != 0)
779 first_in_to_bb = first_non_label_stmt (bb);
780 last_in_to_bb = last_stmt (bb);
781 if ((first_in_to_bb && same_line_p (locus, gimple_location (first_in_to_bb)))
782 || (last_in_to_bb && same_line_p (locus, gimple_location (last_in_to_bb))))
783 bb->discriminator = next_discriminator_for_locus (locus);
786 /* Create the edges for a GIMPLE_COND starting at block BB. */
789 make_cond_expr_edges (basic_block bb)
791 gimple entry = last_stmt (bb);
792 gimple then_stmt, else_stmt;
793 basic_block then_bb, else_bb;
794 tree then_label, else_label;
796 location_t entry_locus;
799 gcc_assert (gimple_code (entry) == GIMPLE_COND);
801 entry_locus = gimple_location (entry);
803 /* Entry basic blocks for each component. */
804 then_label = gimple_cond_true_label (entry);
805 else_label = gimple_cond_false_label (entry);
806 then_bb = label_to_block (then_label);
807 else_bb = label_to_block (else_label);
808 then_stmt = first_stmt (then_bb);
809 else_stmt = first_stmt (else_bb);
811 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
812 assign_discriminator (entry_locus, then_bb);
813 e->goto_locus = gimple_location (then_stmt);
815 e->goto_block = gimple_block (then_stmt);
816 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
819 assign_discriminator (entry_locus, else_bb);
820 e->goto_locus = gimple_location (else_stmt);
822 e->goto_block = gimple_block (else_stmt);
825 /* We do not need the labels anymore. */
826 gimple_cond_set_true_label (entry, NULL_TREE);
827 gimple_cond_set_false_label (entry, NULL_TREE);
831 /* Called for each element in the hash table (P) as we delete the
832 edge to cases hash table.
834 Clear all the TREE_CHAINs to prevent problems with copying of
835 SWITCH_EXPRs and structure sharing rules, then free the hash table
839 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
840 void *data ATTRIBUTE_UNUSED)
844 for (t = (tree) *value; t; t = next)
846 next = TREE_CHAIN (t);
847 TREE_CHAIN (t) = NULL;
854 /* Start recording information mapping edges to case labels. */
857 start_recording_case_labels (void)
859 gcc_assert (edge_to_cases == NULL);
860 edge_to_cases = pointer_map_create ();
861 touched_switch_bbs = BITMAP_ALLOC (NULL);
864 /* Return nonzero if we are recording information for case labels. */
867 recording_case_labels_p (void)
869 return (edge_to_cases != NULL);
872 /* Stop recording information mapping edges to case labels and
873 remove any information we have recorded. */
875 end_recording_case_labels (void)
879 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
880 pointer_map_destroy (edge_to_cases);
881 edge_to_cases = NULL;
882 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
884 basic_block bb = BASIC_BLOCK (i);
887 gimple stmt = last_stmt (bb);
888 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
889 group_case_labels_stmt (stmt);
892 BITMAP_FREE (touched_switch_bbs);
895 /* If we are inside a {start,end}_recording_cases block, then return
896 a chain of CASE_LABEL_EXPRs from T which reference E.
898 Otherwise return NULL. */
901 get_cases_for_edge (edge e, gimple t)
906 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
907 chains available. Return NULL so the caller can detect this case. */
908 if (!recording_case_labels_p ())
911 slot = pointer_map_contains (edge_to_cases, e);
915 /* If we did not find E in the hash table, then this must be the first
916 time we have been queried for information about E & T. Add all the
917 elements from T to the hash table then perform the query again. */
919 n = gimple_switch_num_labels (t);
920 for (i = 0; i < n; i++)
922 tree elt = gimple_switch_label (t, i);
923 tree lab = CASE_LABEL (elt);
924 basic_block label_bb = label_to_block (lab);
925 edge this_edge = find_edge (e->src, label_bb);
927 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
929 slot = pointer_map_insert (edge_to_cases, this_edge);
930 TREE_CHAIN (elt) = (tree) *slot;
934 return (tree) *pointer_map_contains (edge_to_cases, e);
937 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
940 make_gimple_switch_edges (basic_block bb)
942 gimple entry = last_stmt (bb);
943 location_t entry_locus;
946 entry_locus = gimple_location (entry);
948 n = gimple_switch_num_labels (entry);
950 for (i = 0; i < n; ++i)
952 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
953 basic_block label_bb = label_to_block (lab);
954 make_edge (bb, label_bb, 0);
955 assign_discriminator (entry_locus, label_bb);
960 /* Return the basic block holding label DEST. */
963 label_to_block_fn (struct function *ifun, tree dest)
965 int uid = LABEL_DECL_UID (dest);
967 /* We would die hard when faced by an undefined label. Emit a label to
968 the very first basic block. This will hopefully make even the dataflow
969 and undefined variable warnings quite right. */
970 if (seen_error () && uid < 0)
972 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
975 stmt = gimple_build_label (dest);
976 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
977 uid = LABEL_DECL_UID (dest);
979 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
980 <= (unsigned int) uid)
982 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
985 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
986 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
989 make_abnormal_goto_edges (basic_block bb, bool for_call)
991 basic_block target_bb;
992 gimple_stmt_iterator gsi;
994 FOR_EACH_BB (target_bb)
995 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
997 gimple label_stmt = gsi_stmt (gsi);
1000 if (gimple_code (label_stmt) != GIMPLE_LABEL)
1003 target = gimple_label_label (label_stmt);
1005 /* Make an edge to every label block that has been marked as a
1006 potential target for a computed goto or a non-local goto. */
1007 if ((FORCED_LABEL (target) && !for_call)
1008 || (DECL_NONLOCAL (target) && for_call))
1010 make_edge (bb, target_bb, EDGE_ABNORMAL);
1016 /* Create edges for a goto statement at block BB. */
1019 make_goto_expr_edges (basic_block bb)
1021 gimple_stmt_iterator last = gsi_last_bb (bb);
1022 gimple goto_t = gsi_stmt (last);
1024 /* A simple GOTO creates normal edges. */
1025 if (simple_goto_p (goto_t))
1027 tree dest = gimple_goto_dest (goto_t);
1028 basic_block label_bb = label_to_block (dest);
1029 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1030 e->goto_locus = gimple_location (goto_t);
1031 assign_discriminator (e->goto_locus, label_bb);
1033 e->goto_block = gimple_block (goto_t);
1034 gsi_remove (&last, true);
1038 /* A computed GOTO creates abnormal edges. */
1039 make_abnormal_goto_edges (bb, false);
1042 /* Create edges for an asm statement with labels at block BB. */
1045 make_gimple_asm_edges (basic_block bb)
1047 gimple stmt = last_stmt (bb);
1048 location_t stmt_loc = gimple_location (stmt);
1049 int i, n = gimple_asm_nlabels (stmt);
1051 for (i = 0; i < n; ++i)
1053 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1054 basic_block label_bb = label_to_block (label);
1055 make_edge (bb, label_bb, 0);
1056 assign_discriminator (stmt_loc, label_bb);
1060 /*---------------------------------------------------------------------------
1062 ---------------------------------------------------------------------------*/
1064 /* Cleanup useless labels in basic blocks. This is something we wish
1065 to do early because it allows us to group case labels before creating
1066 the edges for the CFG, and it speeds up block statement iterators in
1067 all passes later on.
1068 We rerun this pass after CFG is created, to get rid of the labels that
1069 are no longer referenced. After then we do not run it any more, since
1070 (almost) no new labels should be created. */
1072 /* A map from basic block index to the leading label of that block. */
1073 static struct label_record
1078 /* True if the label is referenced from somewhere. */
1082 /* Given LABEL return the first label in the same basic block. */
1085 main_block_label (tree label)
1087 basic_block bb = label_to_block (label);
1088 tree main_label = label_for_bb[bb->index].label;
1090 /* label_to_block possibly inserted undefined label into the chain. */
1093 label_for_bb[bb->index].label = label;
1097 label_for_bb[bb->index].used = true;
1101 /* Clean up redundant labels within the exception tree. */
1104 cleanup_dead_labels_eh (void)
1111 if (cfun->eh == NULL)
1114 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1115 if (lp && lp->post_landing_pad)
1117 lab = main_block_label (lp->post_landing_pad);
1118 if (lab != lp->post_landing_pad)
1120 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1121 EH_LANDING_PAD_NR (lab) = lp->index;
1125 FOR_ALL_EH_REGION (r)
1129 case ERT_MUST_NOT_THROW:
1135 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1139 c->label = main_block_label (lab);
1144 case ERT_ALLOWED_EXCEPTIONS:
1145 lab = r->u.allowed.label;
1147 r->u.allowed.label = main_block_label (lab);
1153 /* Cleanup redundant labels. This is a three-step process:
1154 1) Find the leading label for each block.
1155 2) Redirect all references to labels to the leading labels.
1156 3) Cleanup all useless labels. */
1159 cleanup_dead_labels (void)
1162 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1164 /* Find a suitable label for each block. We use the first user-defined
1165 label if there is one, or otherwise just the first label we see. */
1168 gimple_stmt_iterator i;
1170 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1173 gimple stmt = gsi_stmt (i);
1175 if (gimple_code (stmt) != GIMPLE_LABEL)
1178 label = gimple_label_label (stmt);
1180 /* If we have not yet seen a label for the current block,
1181 remember this one and see if there are more labels. */
1182 if (!label_for_bb[bb->index].label)
1184 label_for_bb[bb->index].label = label;
1188 /* If we did see a label for the current block already, but it
1189 is an artificially created label, replace it if the current
1190 label is a user defined label. */
1191 if (!DECL_ARTIFICIAL (label)
1192 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1194 label_for_bb[bb->index].label = label;
1200 /* Now redirect all jumps/branches to the selected label.
1201 First do so for each block ending in a control statement. */
1204 gimple stmt = last_stmt (bb);
1208 switch (gimple_code (stmt))
1212 tree true_label = gimple_cond_true_label (stmt);
1213 tree false_label = gimple_cond_false_label (stmt);
1216 gimple_cond_set_true_label (stmt, main_block_label (true_label));
1218 gimple_cond_set_false_label (stmt, main_block_label (false_label));
1224 size_t i, n = gimple_switch_num_labels (stmt);
1226 /* Replace all destination labels. */
1227 for (i = 0; i < n; ++i)
1229 tree case_label = gimple_switch_label (stmt, i);
1230 tree label = main_block_label (CASE_LABEL (case_label));
1231 CASE_LABEL (case_label) = label;
1238 int i, n = gimple_asm_nlabels (stmt);
1240 for (i = 0; i < n; ++i)
1242 tree cons = gimple_asm_label_op (stmt, i);
1243 tree label = main_block_label (TREE_VALUE (cons));
1244 TREE_VALUE (cons) = label;
1249 /* We have to handle gotos until they're removed, and we don't
1250 remove them until after we've created the CFG edges. */
1252 if (!computed_goto_p (stmt))
1254 tree new_dest = main_block_label (gimple_goto_dest (stmt));
1255 gimple_goto_set_dest (stmt, new_dest);
1264 /* Do the same for the exception region tree labels. */
1265 cleanup_dead_labels_eh ();
1267 /* Finally, purge dead labels. All user-defined labels and labels that
1268 can be the target of non-local gotos and labels which have their
1269 address taken are preserved. */
1272 gimple_stmt_iterator i;
1273 tree label_for_this_bb = label_for_bb[bb->index].label;
1275 if (!label_for_this_bb)
1278 /* If the main label of the block is unused, we may still remove it. */
1279 if (!label_for_bb[bb->index].used)
1280 label_for_this_bb = NULL;
1282 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1285 gimple stmt = gsi_stmt (i);
1287 if (gimple_code (stmt) != GIMPLE_LABEL)
1290 label = gimple_label_label (stmt);
1292 if (label == label_for_this_bb
1293 || !DECL_ARTIFICIAL (label)
1294 || DECL_NONLOCAL (label)
1295 || FORCED_LABEL (label))
1298 gsi_remove (&i, true);
1302 free (label_for_bb);
1305 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1306 the ones jumping to the same label.
1307 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1310 group_case_labels_stmt (gimple stmt)
1312 int old_size = gimple_switch_num_labels (stmt);
1313 int i, j, new_size = old_size;
1314 tree default_case = NULL_TREE;
1315 tree default_label = NULL_TREE;
1318 /* The default label is always the first case in a switch
1319 statement after gimplification if it was not optimized
1321 if (!CASE_LOW (gimple_switch_default_label (stmt))
1322 && !CASE_HIGH (gimple_switch_default_label (stmt)))
1324 default_case = gimple_switch_default_label (stmt);
1325 default_label = CASE_LABEL (default_case);
1329 has_default = false;
1331 /* Look for possible opportunities to merge cases. */
1336 while (i < old_size)
1338 tree base_case, base_label, base_high;
1339 base_case = gimple_switch_label (stmt, i);
1341 gcc_assert (base_case);
1342 base_label = CASE_LABEL (base_case);
1344 /* Discard cases that have the same destination as the
1346 if (base_label == default_label)
1348 gimple_switch_set_label (stmt, i, NULL_TREE);
1354 base_high = CASE_HIGH (base_case)
1355 ? CASE_HIGH (base_case)
1356 : CASE_LOW (base_case);
1359 /* Try to merge case labels. Break out when we reach the end
1360 of the label vector or when we cannot merge the next case
1361 label with the current one. */
1362 while (i < old_size)
1364 tree merge_case = gimple_switch_label (stmt, i);
1365 tree merge_label = CASE_LABEL (merge_case);
1366 tree t = int_const_binop (PLUS_EXPR, base_high,
1367 integer_one_node, 1);
1369 /* Merge the cases if they jump to the same place,
1370 and their ranges are consecutive. */
1371 if (merge_label == base_label
1372 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1374 base_high = CASE_HIGH (merge_case) ?
1375 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1376 CASE_HIGH (base_case) = base_high;
1377 gimple_switch_set_label (stmt, i, NULL_TREE);
1386 /* Compress the case labels in the label vector, and adjust the
1387 length of the vector. */
1388 for (i = 0, j = 0; i < new_size; i++)
1390 while (! gimple_switch_label (stmt, j))
1392 gimple_switch_set_label (stmt, i,
1393 gimple_switch_label (stmt, j++));
1396 gcc_assert (new_size <= old_size);
1397 gimple_switch_set_num_labels (stmt, new_size);
1400 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1401 and scan the sorted vector of cases. Combine the ones jumping to the
1405 group_case_labels (void)
1411 gimple stmt = last_stmt (bb);
1412 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1413 group_case_labels_stmt (stmt);
1417 /* Checks whether we can merge block B into block A. */
1420 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1423 gimple_stmt_iterator gsi;
1426 if (!single_succ_p (a))
1429 if (single_succ_edge (a)->flags & (EDGE_ABNORMAL | EDGE_EH))
1432 if (single_succ (a) != b)
1435 if (!single_pred_p (b))
1438 if (b == EXIT_BLOCK_PTR)
1441 /* If A ends by a statement causing exceptions or something similar, we
1442 cannot merge the blocks. */
1443 stmt = last_stmt (a);
1444 if (stmt && stmt_ends_bb_p (stmt))
1447 /* Do not allow a block with only a non-local label to be merged. */
1449 && gimple_code (stmt) == GIMPLE_LABEL
1450 && DECL_NONLOCAL (gimple_label_label (stmt)))
1453 /* Examine the labels at the beginning of B. */
1454 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1457 stmt = gsi_stmt (gsi);
1458 if (gimple_code (stmt) != GIMPLE_LABEL)
1460 lab = gimple_label_label (stmt);
1462 /* Do not remove user labels. */
1463 if (!DECL_ARTIFICIAL (lab))
1467 /* Protect the loop latches. */
1468 if (current_loops && b->loop_father->latch == b)
1471 /* It must be possible to eliminate all phi nodes in B. If ssa form
1472 is not up-to-date and a name-mapping is registered, we cannot eliminate
1473 any phis. Symbols marked for renaming are never a problem though. */
1474 phis = phi_nodes (b);
1475 if (!gimple_seq_empty_p (phis)
1476 && name_mappings_registered_p ())
1479 /* When not optimizing, don't merge if we'd lose goto_locus. */
1481 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1483 location_t goto_locus = single_succ_edge (a)->goto_locus;
1484 gimple_stmt_iterator prev, next;
1485 prev = gsi_last_nondebug_bb (a);
1486 next = gsi_after_labels (b);
1487 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1488 gsi_next_nondebug (&next);
1489 if ((gsi_end_p (prev)
1490 || gimple_location (gsi_stmt (prev)) != goto_locus)
1491 && (gsi_end_p (next)
1492 || gimple_location (gsi_stmt (next)) != goto_locus))
1499 /* Return true if the var whose chain of uses starts at PTR has no
1502 has_zero_uses_1 (const ssa_use_operand_t *head)
1504 const ssa_use_operand_t *ptr;
1506 for (ptr = head->next; ptr != head; ptr = ptr->next)
1507 if (!is_gimple_debug (USE_STMT (ptr)))
1513 /* Return true if the var whose chain of uses starts at PTR has a
1514 single nondebug use. Set USE_P and STMT to that single nondebug
1515 use, if so, or to NULL otherwise. */
1517 single_imm_use_1 (const ssa_use_operand_t *head,
1518 use_operand_p *use_p, gimple *stmt)
1520 ssa_use_operand_t *ptr, *single_use = 0;
1522 for (ptr = head->next; ptr != head; ptr = ptr->next)
1523 if (!is_gimple_debug (USE_STMT (ptr)))
1534 *use_p = single_use;
1537 *stmt = single_use ? single_use->loc.stmt : NULL;
1539 return !!single_use;
1542 /* Replaces all uses of NAME by VAL. */
1545 replace_uses_by (tree name, tree val)
1547 imm_use_iterator imm_iter;
1552 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1554 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1556 replace_exp (use, val);
1558 if (gimple_code (stmt) == GIMPLE_PHI)
1560 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1561 if (e->flags & EDGE_ABNORMAL)
1563 /* This can only occur for virtual operands, since
1564 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1565 would prevent replacement. */
1566 gcc_assert (!is_gimple_reg (name));
1567 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1572 if (gimple_code (stmt) != GIMPLE_PHI)
1576 fold_stmt_inplace (stmt);
1577 if (cfgcleanup_altered_bbs)
1578 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1580 /* FIXME. This should go in update_stmt. */
1581 for (i = 0; i < gimple_num_ops (stmt); i++)
1583 tree op = gimple_op (stmt, i);
1584 /* Operands may be empty here. For example, the labels
1585 of a GIMPLE_COND are nulled out following the creation
1586 of the corresponding CFG edges. */
1587 if (op && TREE_CODE (op) == ADDR_EXPR)
1588 recompute_tree_invariant_for_addr_expr (op);
1591 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1596 gcc_assert (has_zero_uses (name));
1598 /* Also update the trees stored in loop structures. */
1604 FOR_EACH_LOOP (li, loop, 0)
1606 substitute_in_loop_info (loop, name, val);
1611 /* Merge block B into block A. */
1614 gimple_merge_blocks (basic_block a, basic_block b)
1616 gimple_stmt_iterator last, gsi, psi;
1617 gimple_seq phis = phi_nodes (b);
1620 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1622 /* Remove all single-valued PHI nodes from block B of the form
1623 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1624 gsi = gsi_last_bb (a);
1625 for (psi = gsi_start (phis); !gsi_end_p (psi); )
1627 gimple phi = gsi_stmt (psi);
1628 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1630 bool may_replace_uses = !is_gimple_reg (def)
1631 || may_propagate_copy (def, use);
1633 /* In case we maintain loop closed ssa form, do not propagate arguments
1634 of loop exit phi nodes. */
1636 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1637 && is_gimple_reg (def)
1638 && TREE_CODE (use) == SSA_NAME
1639 && a->loop_father != b->loop_father)
1640 may_replace_uses = false;
1642 if (!may_replace_uses)
1644 gcc_assert (is_gimple_reg (def));
1646 /* Note that just emitting the copies is fine -- there is no problem
1647 with ordering of phi nodes. This is because A is the single
1648 predecessor of B, therefore results of the phi nodes cannot
1649 appear as arguments of the phi nodes. */
1650 copy = gimple_build_assign (def, use);
1651 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1652 remove_phi_node (&psi, false);
1656 /* If we deal with a PHI for virtual operands, we can simply
1657 propagate these without fussing with folding or updating
1659 if (!is_gimple_reg (def))
1661 imm_use_iterator iter;
1662 use_operand_p use_p;
1665 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1666 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1667 SET_USE (use_p, use);
1669 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1670 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1673 replace_uses_by (def, use);
1675 remove_phi_node (&psi, true);
1679 /* Ensure that B follows A. */
1680 move_block_after (b, a);
1682 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1683 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1685 /* Remove labels from B and set gimple_bb to A for other statements. */
1686 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1688 gimple stmt = gsi_stmt (gsi);
1689 if (gimple_code (stmt) == GIMPLE_LABEL)
1691 tree label = gimple_label_label (stmt);
1694 gsi_remove (&gsi, false);
1696 /* Now that we can thread computed gotos, we might have
1697 a situation where we have a forced label in block B
1698 However, the label at the start of block B might still be
1699 used in other ways (think about the runtime checking for
1700 Fortran assigned gotos). So we can not just delete the
1701 label. Instead we move the label to the start of block A. */
1702 if (FORCED_LABEL (label))
1704 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1705 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1708 lp_nr = EH_LANDING_PAD_NR (label);
1711 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1712 lp->post_landing_pad = NULL;
1717 gimple_set_bb (stmt, a);
1722 /* Merge the sequences. */
1723 last = gsi_last_bb (a);
1724 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1725 set_bb_seq (b, NULL);
1727 if (cfgcleanup_altered_bbs)
1728 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1732 /* Return the one of two successors of BB that is not reachable by a
1733 complex edge, if there is one. Else, return BB. We use
1734 this in optimizations that use post-dominators for their heuristics,
1735 to catch the cases in C++ where function calls are involved. */
1738 single_noncomplex_succ (basic_block bb)
1741 if (EDGE_COUNT (bb->succs) != 2)
1744 e0 = EDGE_SUCC (bb, 0);
1745 e1 = EDGE_SUCC (bb, 1);
1746 if (e0->flags & EDGE_COMPLEX)
1748 if (e1->flags & EDGE_COMPLEX)
1754 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1757 notice_special_calls (gimple call)
1759 int flags = gimple_call_flags (call);
1761 if (flags & ECF_MAY_BE_ALLOCA)
1762 cfun->calls_alloca = true;
1763 if (flags & ECF_RETURNS_TWICE)
1764 cfun->calls_setjmp = true;
1768 /* Clear flags set by notice_special_calls. Used by dead code removal
1769 to update the flags. */
1772 clear_special_calls (void)
1774 cfun->calls_alloca = false;
1775 cfun->calls_setjmp = false;
1778 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1781 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1783 /* Since this block is no longer reachable, we can just delete all
1784 of its PHI nodes. */
1785 remove_phi_nodes (bb);
1787 /* Remove edges to BB's successors. */
1788 while (EDGE_COUNT (bb->succs) > 0)
1789 remove_edge (EDGE_SUCC (bb, 0));
1793 /* Remove statements of basic block BB. */
1796 remove_bb (basic_block bb)
1798 gimple_stmt_iterator i;
1802 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1803 if (dump_flags & TDF_DETAILS)
1805 dump_bb (bb, dump_file, 0);
1806 fprintf (dump_file, "\n");
1812 struct loop *loop = bb->loop_father;
1814 /* If a loop gets removed, clean up the information associated
1816 if (loop->latch == bb
1817 || loop->header == bb)
1818 free_numbers_of_iterations_estimates_loop (loop);
1821 /* Remove all the instructions in the block. */
1822 if (bb_seq (bb) != NULL)
1824 /* Walk backwards so as to get a chance to substitute all
1825 released DEFs into debug stmts. See
1826 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1828 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1830 gimple stmt = gsi_stmt (i);
1831 if (gimple_code (stmt) == GIMPLE_LABEL
1832 && (FORCED_LABEL (gimple_label_label (stmt))
1833 || DECL_NONLOCAL (gimple_label_label (stmt))))
1836 gimple_stmt_iterator new_gsi;
1838 /* A non-reachable non-local label may still be referenced.
1839 But it no longer needs to carry the extra semantics of
1841 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1843 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1844 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1847 new_bb = bb->prev_bb;
1848 new_gsi = gsi_start_bb (new_bb);
1849 gsi_remove (&i, false);
1850 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1854 /* Release SSA definitions if we are in SSA. Note that we
1855 may be called when not in SSA. For example,
1856 final_cleanup calls this function via
1857 cleanup_tree_cfg. */
1858 if (gimple_in_ssa_p (cfun))
1859 release_defs (stmt);
1861 gsi_remove (&i, true);
1865 i = gsi_last_bb (bb);
1871 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1872 bb->il.gimple = NULL;
1876 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1877 predicate VAL, return the edge that will be taken out of the block.
1878 If VAL does not match a unique edge, NULL is returned. */
1881 find_taken_edge (basic_block bb, tree val)
1885 stmt = last_stmt (bb);
1888 gcc_assert (is_ctrl_stmt (stmt));
1893 if (!is_gimple_min_invariant (val))
1896 if (gimple_code (stmt) == GIMPLE_COND)
1897 return find_taken_edge_cond_expr (bb, val);
1899 if (gimple_code (stmt) == GIMPLE_SWITCH)
1900 return find_taken_edge_switch_expr (bb, val);
1902 if (computed_goto_p (stmt))
1904 /* Only optimize if the argument is a label, if the argument is
1905 not a label then we can not construct a proper CFG.
1907 It may be the case that we only need to allow the LABEL_REF to
1908 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1909 appear inside a LABEL_EXPR just to be safe. */
1910 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1911 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1912 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1919 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1920 statement, determine which of the outgoing edges will be taken out of the
1921 block. Return NULL if either edge may be taken. */
1924 find_taken_edge_computed_goto (basic_block bb, tree val)
1929 dest = label_to_block (val);
1932 e = find_edge (bb, dest);
1933 gcc_assert (e != NULL);
1939 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1940 statement, determine which of the two edges will be taken out of the
1941 block. Return NULL if either edge may be taken. */
1944 find_taken_edge_cond_expr (basic_block bb, tree val)
1946 edge true_edge, false_edge;
1948 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1950 gcc_assert (TREE_CODE (val) == INTEGER_CST);
1951 return (integer_zerop (val) ? false_edge : true_edge);
1954 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
1955 statement, determine which edge will be taken out of the block. Return
1956 NULL if any edge may be taken. */
1959 find_taken_edge_switch_expr (basic_block bb, tree val)
1961 basic_block dest_bb;
1966 switch_stmt = last_stmt (bb);
1967 taken_case = find_case_label_for_value (switch_stmt, val);
1968 dest_bb = label_to_block (CASE_LABEL (taken_case));
1970 e = find_edge (bb, dest_bb);
1976 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
1977 We can make optimal use here of the fact that the case labels are
1978 sorted: We can do a binary search for a case matching VAL. */
1981 find_case_label_for_value (gimple switch_stmt, tree val)
1983 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
1984 tree default_case = gimple_switch_default_label (switch_stmt);
1986 for (low = 0, high = n; high - low > 1; )
1988 size_t i = (high + low) / 2;
1989 tree t = gimple_switch_label (switch_stmt, i);
1992 /* Cache the result of comparing CASE_LOW and val. */
1993 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2000 if (CASE_HIGH (t) == NULL)
2002 /* A singe-valued case label. */
2008 /* A case range. We can only handle integer ranges. */
2009 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2014 return default_case;
2018 /* Dump a basic block on stderr. */
2021 gimple_debug_bb (basic_block bb)
2023 gimple_dump_bb (bb, stderr, 0, TDF_VOPS|TDF_MEMSYMS);
2027 /* Dump basic block with index N on stderr. */
2030 gimple_debug_bb_n (int n)
2032 gimple_debug_bb (BASIC_BLOCK (n));
2033 return BASIC_BLOCK (n);
2037 /* Dump the CFG on stderr.
2039 FLAGS are the same used by the tree dumping functions
2040 (see TDF_* in tree-pass.h). */
2043 gimple_debug_cfg (int flags)
2045 gimple_dump_cfg (stderr, flags);
2049 /* Dump the program showing basic block boundaries on the given FILE.
2051 FLAGS are the same used by the tree dumping functions (see TDF_* in
2055 gimple_dump_cfg (FILE *file, int flags)
2057 if (flags & TDF_DETAILS)
2059 const char *funcname
2060 = lang_hooks.decl_printable_name (current_function_decl, 2);
2063 fprintf (file, ";; Function %s\n\n", funcname);
2064 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2065 n_basic_blocks, n_edges, last_basic_block);
2067 brief_dump_cfg (file);
2068 fprintf (file, "\n");
2071 if (flags & TDF_STATS)
2072 dump_cfg_stats (file);
2074 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2078 /* Dump CFG statistics on FILE. */
2081 dump_cfg_stats (FILE *file)
2083 static long max_num_merged_labels = 0;
2084 unsigned long size, total = 0;
2087 const char * const fmt_str = "%-30s%-13s%12s\n";
2088 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2089 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2090 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2091 const char *funcname
2092 = lang_hooks.decl_printable_name (current_function_decl, 2);
2095 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2097 fprintf (file, "---------------------------------------------------------\n");
2098 fprintf (file, fmt_str, "", " Number of ", "Memory");
2099 fprintf (file, fmt_str, "", " instances ", "used ");
2100 fprintf (file, "---------------------------------------------------------\n");
2102 size = n_basic_blocks * sizeof (struct basic_block_def);
2104 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2105 SCALE (size), LABEL (size));
2109 num_edges += EDGE_COUNT (bb->succs);
2110 size = num_edges * sizeof (struct edge_def);
2112 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2114 fprintf (file, "---------------------------------------------------------\n");
2115 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2117 fprintf (file, "---------------------------------------------------------\n");
2118 fprintf (file, "\n");
2120 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2121 max_num_merged_labels = cfg_stats.num_merged_labels;
2123 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2124 cfg_stats.num_merged_labels, max_num_merged_labels);
2126 fprintf (file, "\n");
2130 /* Dump CFG statistics on stderr. Keep extern so that it's always
2131 linked in the final executable. */
2134 debug_cfg_stats (void)
2136 dump_cfg_stats (stderr);
2140 /* Dump the flowgraph to a .vcg FILE. */
2143 gimple_cfg2vcg (FILE *file)
2148 const char *funcname
2149 = lang_hooks.decl_printable_name (current_function_decl, 2);
2151 /* Write the file header. */
2152 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2153 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2154 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2156 /* Write blocks and edges. */
2157 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2159 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2162 if (e->flags & EDGE_FAKE)
2163 fprintf (file, " linestyle: dotted priority: 10");
2165 fprintf (file, " linestyle: solid priority: 100");
2167 fprintf (file, " }\n");
2173 enum gimple_code head_code, end_code;
2174 const char *head_name, *end_name;
2177 gimple first = first_stmt (bb);
2178 gimple last = last_stmt (bb);
2182 head_code = gimple_code (first);
2183 head_name = gimple_code_name[head_code];
2184 head_line = get_lineno (first);
2187 head_name = "no-statement";
2191 end_code = gimple_code (last);
2192 end_name = gimple_code_name[end_code];
2193 end_line = get_lineno (last);
2196 end_name = "no-statement";
2198 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2199 bb->index, bb->index, head_name, head_line, end_name,
2202 FOR_EACH_EDGE (e, ei, bb->succs)
2204 if (e->dest == EXIT_BLOCK_PTR)
2205 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2207 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2209 if (e->flags & EDGE_FAKE)
2210 fprintf (file, " priority: 10 linestyle: dotted");
2212 fprintf (file, " priority: 100 linestyle: solid");
2214 fprintf (file, " }\n");
2217 if (bb->next_bb != EXIT_BLOCK_PTR)
2221 fputs ("}\n\n", file);
2226 /*---------------------------------------------------------------------------
2227 Miscellaneous helpers
2228 ---------------------------------------------------------------------------*/
2230 /* Return true if T represents a stmt that always transfers control. */
2233 is_ctrl_stmt (gimple t)
2235 switch (gimple_code (t))
2249 /* Return true if T is a statement that may alter the flow of control
2250 (e.g., a call to a non-returning function). */
2253 is_ctrl_altering_stmt (gimple t)
2257 switch (gimple_code (t))
2261 int flags = gimple_call_flags (t);
2263 /* A non-pure/const call alters flow control if the current
2264 function has nonlocal labels. */
2265 if (!(flags & (ECF_CONST | ECF_PURE)) && cfun->has_nonlocal_label)
2268 /* A call also alters control flow if it does not return. */
2269 if (flags & ECF_NORETURN)
2272 /* BUILT_IN_RETURN call is same as return statement. */
2273 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2278 case GIMPLE_EH_DISPATCH:
2279 /* EH_DISPATCH branches to the individual catch handlers at
2280 this level of a try or allowed-exceptions region. It can
2281 fallthru to the next statement as well. */
2285 if (gimple_asm_nlabels (t) > 0)
2290 /* OpenMP directives alter control flow. */
2297 /* If a statement can throw, it alters control flow. */
2298 return stmt_can_throw_internal (t);
2302 /* Return true if T is a simple local goto. */
2305 simple_goto_p (gimple t)
2307 return (gimple_code (t) == GIMPLE_GOTO
2308 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2312 /* Return true if T can make an abnormal transfer of control flow.
2313 Transfers of control flow associated with EH are excluded. */
2316 stmt_can_make_abnormal_goto (gimple t)
2318 if (computed_goto_p (t))
2320 if (is_gimple_call (t))
2321 return gimple_has_side_effects (t) && cfun->has_nonlocal_label;
2326 /* Return true if STMT should start a new basic block. PREV_STMT is
2327 the statement preceding STMT. It is used when STMT is a label or a
2328 case label. Labels should only start a new basic block if their
2329 previous statement wasn't a label. Otherwise, sequence of labels
2330 would generate unnecessary basic blocks that only contain a single
2334 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2339 /* Labels start a new basic block only if the preceding statement
2340 wasn't a label of the same type. This prevents the creation of
2341 consecutive blocks that have nothing but a single label. */
2342 if (gimple_code (stmt) == GIMPLE_LABEL)
2344 /* Nonlocal and computed GOTO targets always start a new block. */
2345 if (DECL_NONLOCAL (gimple_label_label (stmt))
2346 || FORCED_LABEL (gimple_label_label (stmt)))
2349 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2351 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2354 cfg_stats.num_merged_labels++;
2365 /* Return true if T should end a basic block. */
2368 stmt_ends_bb_p (gimple t)
2370 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2373 /* Remove block annotations and other data structures. */
2376 delete_tree_cfg_annotations (void)
2378 label_to_block_map = NULL;
2382 /* Return the first statement in basic block BB. */
2385 first_stmt (basic_block bb)
2387 gimple_stmt_iterator i = gsi_start_bb (bb);
2390 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2398 /* Return the first non-label statement in basic block BB. */
2401 first_non_label_stmt (basic_block bb)
2403 gimple_stmt_iterator i = gsi_start_bb (bb);
2404 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2406 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2409 /* Return the last statement in basic block BB. */
2412 last_stmt (basic_block bb)
2414 gimple_stmt_iterator i = gsi_last_bb (bb);
2417 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2425 /* Return the last statement of an otherwise empty block. Return NULL
2426 if the block is totally empty, or if it contains more than one
2430 last_and_only_stmt (basic_block bb)
2432 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2438 last = gsi_stmt (i);
2439 gsi_prev_nondebug (&i);
2443 /* Empty statements should no longer appear in the instruction stream.
2444 Everything that might have appeared before should be deleted by
2445 remove_useless_stmts, and the optimizers should just gsi_remove
2446 instead of smashing with build_empty_stmt.
2448 Thus the only thing that should appear here in a block containing
2449 one executable statement is a label. */
2450 prev = gsi_stmt (i);
2451 if (gimple_code (prev) == GIMPLE_LABEL)
2457 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2460 reinstall_phi_args (edge new_edge, edge old_edge)
2462 edge_var_map_vector v;
2465 gimple_stmt_iterator phis;
2467 v = redirect_edge_var_map_vector (old_edge);
2471 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2472 VEC_iterate (edge_var_map, v, i, vm) && !gsi_end_p (phis);
2473 i++, gsi_next (&phis))
2475 gimple phi = gsi_stmt (phis);
2476 tree result = redirect_edge_var_map_result (vm);
2477 tree arg = redirect_edge_var_map_def (vm);
2479 gcc_assert (result == gimple_phi_result (phi));
2481 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2484 redirect_edge_var_map_clear (old_edge);
2487 /* Returns the basic block after which the new basic block created
2488 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2489 near its "logical" location. This is of most help to humans looking
2490 at debugging dumps. */
2493 split_edge_bb_loc (edge edge_in)
2495 basic_block dest = edge_in->dest;
2496 basic_block dest_prev = dest->prev_bb;
2500 edge e = find_edge (dest_prev, dest);
2501 if (e && !(e->flags & EDGE_COMPLEX))
2502 return edge_in->src;
2507 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2508 Abort on abnormal edges. */
2511 gimple_split_edge (edge edge_in)
2513 basic_block new_bb, after_bb, dest;
2516 /* Abnormal edges cannot be split. */
2517 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2519 dest = edge_in->dest;
2521 after_bb = split_edge_bb_loc (edge_in);
2523 new_bb = create_empty_bb (after_bb);
2524 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2525 new_bb->count = edge_in->count;
2526 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2527 new_edge->probability = REG_BR_PROB_BASE;
2528 new_edge->count = edge_in->count;
2530 e = redirect_edge_and_branch (edge_in, new_bb);
2531 gcc_assert (e == edge_in);
2532 reinstall_phi_args (new_edge, e);
2538 /* Verify properties of the address expression T with base object BASE. */
2541 verify_address (tree t, tree base)
2544 bool old_side_effects;
2546 bool new_side_effects;
2548 old_constant = TREE_CONSTANT (t);
2549 old_side_effects = TREE_SIDE_EFFECTS (t);
2551 recompute_tree_invariant_for_addr_expr (t);
2552 new_side_effects = TREE_SIDE_EFFECTS (t);
2553 new_constant = TREE_CONSTANT (t);
2555 if (old_constant != new_constant)
2557 error ("constant not recomputed when ADDR_EXPR changed");
2560 if (old_side_effects != new_side_effects)
2562 error ("side effects not recomputed when ADDR_EXPR changed");
2566 if (!(TREE_CODE (base) == VAR_DECL
2567 || TREE_CODE (base) == PARM_DECL
2568 || TREE_CODE (base) == RESULT_DECL))
2571 if (DECL_GIMPLE_REG_P (base))
2573 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2580 /* Callback for walk_tree, check that all elements with address taken are
2581 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2582 inside a PHI node. */
2585 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2592 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2593 #define CHECK_OP(N, MSG) \
2594 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2595 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2597 switch (TREE_CODE (t))
2600 if (SSA_NAME_IN_FREE_LIST (t))
2602 error ("SSA name in freelist but still referenced");
2608 error ("INDIRECT_REF in gimple IL");
2612 x = TREE_OPERAND (t, 0);
2613 if (!POINTER_TYPE_P (TREE_TYPE (x))
2614 || !is_gimple_mem_ref_addr (x))
2616 error ("Invalid first operand of MEM_REF.");
2619 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2620 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2622 error ("Invalid offset operand of MEM_REF.");
2623 return TREE_OPERAND (t, 1);
2625 if (TREE_CODE (x) == ADDR_EXPR
2626 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2632 x = fold (ASSERT_EXPR_COND (t));
2633 if (x == boolean_false_node)
2635 error ("ASSERT_EXPR with an always-false condition");
2641 error ("MODIFY_EXPR not expected while having tuples.");
2648 gcc_assert (is_gimple_address (t));
2650 /* Skip any references (they will be checked when we recurse down the
2651 tree) and ensure that any variable used as a prefix is marked
2653 for (x = TREE_OPERAND (t, 0);
2654 handled_component_p (x);
2655 x = TREE_OPERAND (x, 0))
2658 if ((tem = verify_address (t, x)))
2661 if (!(TREE_CODE (x) == VAR_DECL
2662 || TREE_CODE (x) == PARM_DECL
2663 || TREE_CODE (x) == RESULT_DECL))
2666 if (!TREE_ADDRESSABLE (x))
2668 error ("address taken, but ADDRESSABLE bit not set");
2676 x = COND_EXPR_COND (t);
2677 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2679 error ("non-integral used in condition");
2682 if (!is_gimple_condexpr (x))
2684 error ("invalid conditional operand");
2689 case NON_LVALUE_EXPR:
2693 case FIX_TRUNC_EXPR:
2698 case TRUTH_NOT_EXPR:
2699 CHECK_OP (0, "invalid operand to unary operator");
2706 case ARRAY_RANGE_REF:
2708 case VIEW_CONVERT_EXPR:
2709 /* We have a nest of references. Verify that each of the operands
2710 that determine where to reference is either a constant or a variable,
2711 verify that the base is valid, and then show we've already checked
2713 while (handled_component_p (t))
2715 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2716 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2717 else if (TREE_CODE (t) == ARRAY_REF
2718 || TREE_CODE (t) == ARRAY_RANGE_REF)
2720 CHECK_OP (1, "invalid array index");
2721 if (TREE_OPERAND (t, 2))
2722 CHECK_OP (2, "invalid array lower bound");
2723 if (TREE_OPERAND (t, 3))
2724 CHECK_OP (3, "invalid array stride");
2726 else if (TREE_CODE (t) == BIT_FIELD_REF)
2728 if (!host_integerp (TREE_OPERAND (t, 1), 1)
2729 || !host_integerp (TREE_OPERAND (t, 2), 1))
2731 error ("invalid position or size operand to BIT_FIELD_REF");
2734 else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2735 && (TYPE_PRECISION (TREE_TYPE (t))
2736 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2738 error ("integral result type precision does not match "
2739 "field size of BIT_FIELD_REF");
2742 if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2743 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2744 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2746 error ("mode precision of non-integral result does not "
2747 "match field size of BIT_FIELD_REF");
2752 t = TREE_OPERAND (t, 0);
2755 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2757 error ("invalid reference prefix");
2764 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2765 POINTER_PLUS_EXPR. */
2766 if (POINTER_TYPE_P (TREE_TYPE (t)))
2768 error ("invalid operand to plus/minus, type is a pointer");
2771 CHECK_OP (0, "invalid operand to binary operator");
2772 CHECK_OP (1, "invalid operand to binary operator");
2775 case POINTER_PLUS_EXPR:
2776 /* Check to make sure the first operand is a pointer or reference type. */
2777 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2779 error ("invalid operand to pointer plus, first operand is not a pointer");
2782 /* Check to make sure the second operand is an integer with type of
2784 if (!useless_type_conversion_p (sizetype,
2785 TREE_TYPE (TREE_OPERAND (t, 1))))
2787 error ("invalid operand to pointer plus, second operand is not an "
2788 "integer with type of sizetype.");
2798 case UNORDERED_EXPR:
2807 case TRUNC_DIV_EXPR:
2809 case FLOOR_DIV_EXPR:
2810 case ROUND_DIV_EXPR:
2811 case TRUNC_MOD_EXPR:
2813 case FLOOR_MOD_EXPR:
2814 case ROUND_MOD_EXPR:
2816 case EXACT_DIV_EXPR:
2826 CHECK_OP (0, "invalid operand to binary operator");
2827 CHECK_OP (1, "invalid operand to binary operator");
2831 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2844 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2845 Returns true if there is an error, otherwise false. */
2848 verify_types_in_gimple_min_lval (tree expr)
2852 if (is_gimple_id (expr))
2855 if (TREE_CODE (expr) != MISALIGNED_INDIRECT_REF
2856 && TREE_CODE (expr) != TARGET_MEM_REF
2857 && TREE_CODE (expr) != MEM_REF)
2859 error ("invalid expression for min lvalue");
2863 /* TARGET_MEM_REFs are strange beasts. */
2864 if (TREE_CODE (expr) == TARGET_MEM_REF)
2867 op = TREE_OPERAND (expr, 0);
2868 if (!is_gimple_val (op))
2870 error ("invalid operand in indirect reference");
2871 debug_generic_stmt (op);
2874 /* Memory references now generally can involve a value conversion. */
2879 /* Verify if EXPR is a valid GIMPLE reference expression. If
2880 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2881 if there is an error, otherwise false. */
2884 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2886 while (handled_component_p (expr))
2888 tree op = TREE_OPERAND (expr, 0);
2890 if (TREE_CODE (expr) == ARRAY_REF
2891 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2893 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2894 || (TREE_OPERAND (expr, 2)
2895 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2896 || (TREE_OPERAND (expr, 3)
2897 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2899 error ("invalid operands to array reference");
2900 debug_generic_stmt (expr);
2905 /* Verify if the reference array element types are compatible. */
2906 if (TREE_CODE (expr) == ARRAY_REF
2907 && !useless_type_conversion_p (TREE_TYPE (expr),
2908 TREE_TYPE (TREE_TYPE (op))))
2910 error ("type mismatch in array reference");
2911 debug_generic_stmt (TREE_TYPE (expr));
2912 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2915 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2916 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2917 TREE_TYPE (TREE_TYPE (op))))
2919 error ("type mismatch in array range reference");
2920 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2921 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2925 if ((TREE_CODE (expr) == REALPART_EXPR
2926 || TREE_CODE (expr) == IMAGPART_EXPR)
2927 && !useless_type_conversion_p (TREE_TYPE (expr),
2928 TREE_TYPE (TREE_TYPE (op))))
2930 error ("type mismatch in real/imagpart reference");
2931 debug_generic_stmt (TREE_TYPE (expr));
2932 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2936 if (TREE_CODE (expr) == COMPONENT_REF
2937 && !useless_type_conversion_p (TREE_TYPE (expr),
2938 TREE_TYPE (TREE_OPERAND (expr, 1))))
2940 error ("type mismatch in component reference");
2941 debug_generic_stmt (TREE_TYPE (expr));
2942 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2946 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2948 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2949 that their operand is not an SSA name or an invariant when
2950 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2951 bug). Otherwise there is nothing to verify, gross mismatches at
2952 most invoke undefined behavior. */
2954 && (TREE_CODE (op) == SSA_NAME
2955 || is_gimple_min_invariant (op)))
2957 error ("Conversion of an SSA_NAME on the left hand side.");
2958 debug_generic_stmt (expr);
2961 else if (TREE_CODE (op) == SSA_NAME
2962 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
2964 error ("Conversion of register to a different size.");
2965 debug_generic_stmt (expr);
2968 else if (!handled_component_p (op))
2975 if (TREE_CODE (expr) == MEM_REF)
2977 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
2979 error ("Invalid address operand in MEM_REF.");
2980 debug_generic_stmt (expr);
2983 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
2984 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
2986 error ("Invalid offset operand in MEM_REF.");
2987 debug_generic_stmt (expr);
2991 else if (TREE_CODE (expr) == TARGET_MEM_REF)
2993 if (TMR_SYMBOL (expr)
2995 && !useless_type_conversion_p (sizetype, TREE_TYPE (TMR_BASE (expr))))
2997 error ("Non-sizetype base in TARGET_MEM_REF with symbol");
3000 if (!TMR_OFFSET (expr)
3001 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3002 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3004 error ("Invalid offset operand in TARGET_MEM_REF.");
3005 debug_generic_stmt (expr);
3010 return ((require_lvalue || !is_gimple_min_invariant (expr))
3011 && verify_types_in_gimple_min_lval (expr));
3014 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3015 list of pointer-to types that is trivially convertible to DEST. */
3018 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3022 if (!TYPE_POINTER_TO (src_obj))
3025 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3026 if (useless_type_conversion_p (dest, src))
3032 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3033 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3036 valid_fixed_convert_types_p (tree type1, tree type2)
3038 return (FIXED_POINT_TYPE_P (type1)
3039 && (INTEGRAL_TYPE_P (type2)
3040 || SCALAR_FLOAT_TYPE_P (type2)
3041 || FIXED_POINT_TYPE_P (type2)));
3044 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3045 is a problem, otherwise false. */
3048 verify_gimple_call (gimple stmt)
3050 tree fn = gimple_call_fn (stmt);
3054 if (TREE_CODE (fn) != OBJ_TYPE_REF
3055 && !is_gimple_val (fn))
3057 error ("invalid function in gimple call");
3058 debug_generic_stmt (fn);
3062 if (!POINTER_TYPE_P (TREE_TYPE (fn))
3063 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3064 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE))
3066 error ("non-function in gimple call");
3070 if (gimple_call_lhs (stmt)
3071 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3072 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3074 error ("invalid LHS in gimple call");
3078 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3080 error ("LHS in noreturn call");
3084 fntype = TREE_TYPE (TREE_TYPE (fn));
3085 if (gimple_call_lhs (stmt)
3086 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3088 /* ??? At least C++ misses conversions at assignments from
3089 void * call results.
3090 ??? Java is completely off. Especially with functions
3091 returning java.lang.Object.
3092 For now simply allow arbitrary pointer type conversions. */
3093 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3094 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3096 error ("invalid conversion in gimple call");
3097 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3098 debug_generic_stmt (TREE_TYPE (fntype));
3102 if (gimple_call_chain (stmt)
3103 && !is_gimple_val (gimple_call_chain (stmt)))
3105 error ("invalid static chain in gimple call");
3106 debug_generic_stmt (gimple_call_chain (stmt));
3110 /* If there is a static chain argument, this should not be an indirect
3111 call, and the decl should have DECL_STATIC_CHAIN set. */
3112 if (gimple_call_chain (stmt))
3114 if (TREE_CODE (fn) != ADDR_EXPR
3115 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
3117 error ("static chain in indirect gimple call");
3120 fn = TREE_OPERAND (fn, 0);
3122 if (!DECL_STATIC_CHAIN (fn))
3124 error ("static chain with function that doesn't use one");
3129 /* ??? The C frontend passes unpromoted arguments in case it
3130 didn't see a function declaration before the call. So for now
3131 leave the call arguments mostly unverified. Once we gimplify
3132 unit-at-a-time we have a chance to fix this. */
3134 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3136 tree arg = gimple_call_arg (stmt, i);
3137 if ((is_gimple_reg_type (TREE_TYPE (arg))
3138 && !is_gimple_val (arg))
3139 || (!is_gimple_reg_type (TREE_TYPE (arg))
3140 && !is_gimple_lvalue (arg)))
3142 error ("invalid argument to gimple call");
3143 debug_generic_expr (arg);
3150 /* Verifies the gimple comparison with the result type TYPE and
3151 the operands OP0 and OP1. */
3154 verify_gimple_comparison (tree type, tree op0, tree op1)
3156 tree op0_type = TREE_TYPE (op0);
3157 tree op1_type = TREE_TYPE (op1);
3159 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3161 error ("invalid operands in gimple comparison");
3165 /* For comparisons we do not have the operations type as the
3166 effective type the comparison is carried out in. Instead
3167 we require that either the first operand is trivially
3168 convertible into the second, or the other way around.
3169 The resulting type of a comparison may be any integral type.
3170 Because we special-case pointers to void we allow
3171 comparisons of pointers with the same mode as well. */
3172 if ((!useless_type_conversion_p (op0_type, op1_type)
3173 && !useless_type_conversion_p (op1_type, op0_type)
3174 && (!POINTER_TYPE_P (op0_type)
3175 || !POINTER_TYPE_P (op1_type)
3176 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3177 || !INTEGRAL_TYPE_P (type))
3179 error ("type mismatch in comparison expression");
3180 debug_generic_expr (type);
3181 debug_generic_expr (op0_type);
3182 debug_generic_expr (op1_type);
3189 /* Verify a gimple assignment statement STMT with an unary rhs.
3190 Returns true if anything is wrong. */
3193 verify_gimple_assign_unary (gimple stmt)
3195 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3196 tree lhs = gimple_assign_lhs (stmt);
3197 tree lhs_type = TREE_TYPE (lhs);
3198 tree rhs1 = gimple_assign_rhs1 (stmt);
3199 tree rhs1_type = TREE_TYPE (rhs1);
3201 if (!is_gimple_reg (lhs)
3203 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3205 error ("non-register as LHS of unary operation");
3209 if (!is_gimple_val (rhs1))
3211 error ("invalid operand in unary operation");
3215 /* First handle conversions. */
3220 /* Allow conversions between integral types and pointers only if
3221 there is no sign or zero extension involved.
3222 For targets were the precision of sizetype doesn't match that
3223 of pointers we need to allow arbitrary conversions from and
3225 if ((POINTER_TYPE_P (lhs_type)
3226 && INTEGRAL_TYPE_P (rhs1_type)
3227 && (TYPE_PRECISION (lhs_type) >= TYPE_PRECISION (rhs1_type)
3228 || rhs1_type == sizetype))
3229 || (POINTER_TYPE_P (rhs1_type)
3230 && INTEGRAL_TYPE_P (lhs_type)
3231 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3232 || lhs_type == sizetype)))
3235 /* Allow conversion from integer to offset type and vice versa. */
3236 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3237 && TREE_CODE (rhs1_type) == INTEGER_TYPE)
3238 || (TREE_CODE (lhs_type) == INTEGER_TYPE
3239 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3242 /* Otherwise assert we are converting between types of the
3244 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3246 error ("invalid types in nop conversion");
3247 debug_generic_expr (lhs_type);
3248 debug_generic_expr (rhs1_type);
3255 case ADDR_SPACE_CONVERT_EXPR:
3257 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3258 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3259 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3261 error ("invalid types in address space conversion");
3262 debug_generic_expr (lhs_type);
3263 debug_generic_expr (rhs1_type);
3270 case FIXED_CONVERT_EXPR:
3272 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3273 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3275 error ("invalid types in fixed-point conversion");
3276 debug_generic_expr (lhs_type);
3277 debug_generic_expr (rhs1_type);
3286 if (!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3288 error ("invalid types in conversion to floating point");
3289 debug_generic_expr (lhs_type);
3290 debug_generic_expr (rhs1_type);
3297 case FIX_TRUNC_EXPR:
3299 if (!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3301 error ("invalid types in conversion to integer");
3302 debug_generic_expr (lhs_type);
3303 debug_generic_expr (rhs1_type);
3310 case VEC_UNPACK_HI_EXPR:
3311 case VEC_UNPACK_LO_EXPR:
3312 case REDUC_MAX_EXPR:
3313 case REDUC_MIN_EXPR:
3314 case REDUC_PLUS_EXPR:
3315 case VEC_UNPACK_FLOAT_HI_EXPR:
3316 case VEC_UNPACK_FLOAT_LO_EXPR:
3320 case TRUTH_NOT_EXPR:
3325 case NON_LVALUE_EXPR:
3333 /* For the remaining codes assert there is no conversion involved. */
3334 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3336 error ("non-trivial conversion in unary operation");
3337 debug_generic_expr (lhs_type);
3338 debug_generic_expr (rhs1_type);
3345 /* Verify a gimple assignment statement STMT with a binary rhs.
3346 Returns true if anything is wrong. */
3349 verify_gimple_assign_binary (gimple stmt)
3351 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3352 tree lhs = gimple_assign_lhs (stmt);
3353 tree lhs_type = TREE_TYPE (lhs);
3354 tree rhs1 = gimple_assign_rhs1 (stmt);
3355 tree rhs1_type = TREE_TYPE (rhs1);
3356 tree rhs2 = gimple_assign_rhs2 (stmt);
3357 tree rhs2_type = TREE_TYPE (rhs2);
3359 if (!is_gimple_reg (lhs)
3361 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3363 error ("non-register as LHS of binary operation");
3367 if (!is_gimple_val (rhs1)
3368 || !is_gimple_val (rhs2))
3370 error ("invalid operands in binary operation");
3374 /* First handle operations that involve different types. */
3379 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3380 || !(INTEGRAL_TYPE_P (rhs1_type)
3381 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3382 || !(INTEGRAL_TYPE_P (rhs2_type)
3383 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3385 error ("type mismatch in complex expression");
3386 debug_generic_expr (lhs_type);
3387 debug_generic_expr (rhs1_type);
3388 debug_generic_expr (rhs2_type);
3400 /* Shifts and rotates are ok on integral types, fixed point
3401 types and integer vector types. */
3402 if ((!INTEGRAL_TYPE_P (rhs1_type)
3403 && !FIXED_POINT_TYPE_P (rhs1_type)
3404 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3405 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3406 || (!INTEGRAL_TYPE_P (rhs2_type)
3407 /* Vector shifts of vectors are also ok. */
3408 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3409 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3410 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3411 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3412 || !useless_type_conversion_p (lhs_type, rhs1_type))
3414 error ("type mismatch in shift expression");
3415 debug_generic_expr (lhs_type);
3416 debug_generic_expr (rhs1_type);
3417 debug_generic_expr (rhs2_type);
3424 case VEC_LSHIFT_EXPR:
3425 case VEC_RSHIFT_EXPR:
3427 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3428 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3429 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3430 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3431 || (!INTEGRAL_TYPE_P (rhs2_type)
3432 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3433 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3434 || !useless_type_conversion_p (lhs_type, rhs1_type))
3436 error ("type mismatch in vector shift expression");
3437 debug_generic_expr (lhs_type);
3438 debug_generic_expr (rhs1_type);
3439 debug_generic_expr (rhs2_type);
3442 /* For shifting a vector of floating point components we
3443 only allow shifting by a constant multiple of the element size. */
3444 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
3445 && (TREE_CODE (rhs2) != INTEGER_CST
3446 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3447 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3449 error ("non-element sized vector shift of floating point vector");
3458 /* We use regular PLUS_EXPR for vectors.
3459 ??? This just makes the checker happy and may not be what is
3461 if (TREE_CODE (lhs_type) == VECTOR_TYPE
3462 && POINTER_TYPE_P (TREE_TYPE (lhs_type)))
3464 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3465 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3467 error ("invalid non-vector operands to vector valued plus");
3470 lhs_type = TREE_TYPE (lhs_type);
3471 rhs1_type = TREE_TYPE (rhs1_type);
3472 rhs2_type = TREE_TYPE (rhs2_type);
3473 /* PLUS_EXPR is commutative, so we might end up canonicalizing
3474 the pointer to 2nd place. */
3475 if (POINTER_TYPE_P (rhs2_type))
3477 tree tem = rhs1_type;
3478 rhs1_type = rhs2_type;
3481 goto do_pointer_plus_expr_check;
3487 if (POINTER_TYPE_P (lhs_type)
3488 || POINTER_TYPE_P (rhs1_type)
3489 || POINTER_TYPE_P (rhs2_type))
3491 error ("invalid (pointer) operands to plus/minus");
3495 /* Continue with generic binary expression handling. */
3499 case POINTER_PLUS_EXPR:
3501 do_pointer_plus_expr_check:
3502 if (!POINTER_TYPE_P (rhs1_type)
3503 || !useless_type_conversion_p (lhs_type, rhs1_type)
3504 || !useless_type_conversion_p (sizetype, rhs2_type))
3506 error ("type mismatch in pointer plus expression");
3507 debug_generic_stmt (lhs_type);
3508 debug_generic_stmt (rhs1_type);
3509 debug_generic_stmt (rhs2_type);
3516 case TRUTH_ANDIF_EXPR:
3517 case TRUTH_ORIF_EXPR:
3520 case TRUTH_AND_EXPR:
3522 case TRUTH_XOR_EXPR:
3524 /* We allow any kind of integral typed argument and result. */
3525 if (!INTEGRAL_TYPE_P (rhs1_type)
3526 || !INTEGRAL_TYPE_P (rhs2_type)
3527 || !INTEGRAL_TYPE_P (lhs_type))
3529 error ("type mismatch in binary truth expression");
3530 debug_generic_expr (lhs_type);
3531 debug_generic_expr (rhs1_type);
3532 debug_generic_expr (rhs2_type);
3545 case UNORDERED_EXPR:
3553 /* Comparisons are also binary, but the result type is not
3554 connected to the operand types. */
3555 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3557 case WIDEN_MULT_EXPR:
3558 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3560 return ((2 * TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (lhs_type))
3561 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3563 case WIDEN_SUM_EXPR:
3564 case VEC_WIDEN_MULT_HI_EXPR:
3565 case VEC_WIDEN_MULT_LO_EXPR:
3566 case VEC_PACK_TRUNC_EXPR:
3567 case VEC_PACK_SAT_EXPR:
3568 case VEC_PACK_FIX_TRUNC_EXPR:
3569 case VEC_EXTRACT_EVEN_EXPR:
3570 case VEC_EXTRACT_ODD_EXPR:
3571 case VEC_INTERLEAVE_HIGH_EXPR:
3572 case VEC_INTERLEAVE_LOW_EXPR:
3577 case TRUNC_DIV_EXPR:
3579 case FLOOR_DIV_EXPR:
3580 case ROUND_DIV_EXPR:
3581 case TRUNC_MOD_EXPR:
3583 case FLOOR_MOD_EXPR:
3584 case ROUND_MOD_EXPR:
3586 case EXACT_DIV_EXPR:
3592 /* Continue with generic binary expression handling. */
3599 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3600 || !useless_type_conversion_p (lhs_type, rhs2_type))
3602 error ("type mismatch in binary expression");
3603 debug_generic_stmt (lhs_type);
3604 debug_generic_stmt (rhs1_type);
3605 debug_generic_stmt (rhs2_type);
3612 /* Verify a gimple assignment statement STMT with a ternary rhs.
3613 Returns true if anything is wrong. */
3616 verify_gimple_assign_ternary (gimple stmt)
3618 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3619 tree lhs = gimple_assign_lhs (stmt);
3620 tree lhs_type = TREE_TYPE (lhs);
3621 tree rhs1 = gimple_assign_rhs1 (stmt);
3622 tree rhs1_type = TREE_TYPE (rhs1);
3623 tree rhs2 = gimple_assign_rhs2 (stmt);
3624 tree rhs2_type = TREE_TYPE (rhs2);
3625 tree rhs3 = gimple_assign_rhs3 (stmt);
3626 tree rhs3_type = TREE_TYPE (rhs3);
3628 if (!is_gimple_reg (lhs)
3630 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3632 error ("non-register as LHS of ternary operation");
3636 if (!is_gimple_val (rhs1)
3637 || !is_gimple_val (rhs2)
3638 || !is_gimple_val (rhs3))
3640 error ("invalid operands in ternary operation");
3644 /* First handle operations that involve different types. */
3647 case WIDEN_MULT_PLUS_EXPR:
3648 case WIDEN_MULT_MINUS_EXPR:
3649 if ((!INTEGRAL_TYPE_P (rhs1_type)
3650 && !FIXED_POINT_TYPE_P (rhs1_type))
3651 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3652 || !useless_type_conversion_p (lhs_type, rhs3_type)
3653 || 2 * TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (lhs_type)
3654 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3656 error ("type mismatch in widening multiply-accumulate expression");
3657 debug_generic_expr (lhs_type);
3658 debug_generic_expr (rhs1_type);
3659 debug_generic_expr (rhs2_type);
3660 debug_generic_expr (rhs3_type);
3671 /* Verify a gimple assignment statement STMT with a single rhs.
3672 Returns true if anything is wrong. */
3675 verify_gimple_assign_single (gimple stmt)
3677 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3678 tree lhs = gimple_assign_lhs (stmt);
3679 tree lhs_type = TREE_TYPE (lhs);
3680 tree rhs1 = gimple_assign_rhs1 (stmt);
3681 tree rhs1_type = TREE_TYPE (rhs1);
3684 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3686 error ("non-trivial conversion at assignment");
3687 debug_generic_expr (lhs_type);
3688 debug_generic_expr (rhs1_type);
3692 if (handled_component_p (lhs))
3693 res |= verify_types_in_gimple_reference (lhs, true);
3695 /* Special codes we cannot handle via their class. */
3700 tree op = TREE_OPERAND (rhs1, 0);
3701 if (!is_gimple_addressable (op))
3703 error ("invalid operand in unary expression");
3707 if (!types_compatible_p (TREE_TYPE (op), TREE_TYPE (TREE_TYPE (rhs1)))
3708 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3711 error ("type mismatch in address expression");
3712 debug_generic_stmt (TREE_TYPE (rhs1));
3713 debug_generic_stmt (TREE_TYPE (op));
3717 return verify_types_in_gimple_reference (op, true);
3722 error ("INDIRECT_REF in gimple IL");
3727 case MISALIGNED_INDIRECT_REF:
3729 case ARRAY_RANGE_REF:
3730 case VIEW_CONVERT_EXPR:
3733 case TARGET_MEM_REF:
3735 if (!is_gimple_reg (lhs)
3736 && is_gimple_reg_type (TREE_TYPE (lhs)))
3738 error ("invalid rhs for gimple memory store");
3739 debug_generic_stmt (lhs);
3740 debug_generic_stmt (rhs1);
3743 return res || verify_types_in_gimple_reference (rhs1, false);
3755 /* tcc_declaration */
3760 if (!is_gimple_reg (lhs)
3761 && !is_gimple_reg (rhs1)
3762 && is_gimple_reg_type (TREE_TYPE (lhs)))
3764 error ("invalid rhs for gimple memory store");
3765 debug_generic_stmt (lhs);
3766 debug_generic_stmt (rhs1);
3772 if (!is_gimple_reg (lhs)
3773 || (!is_gimple_reg (TREE_OPERAND (rhs1, 0))
3774 && !COMPARISON_CLASS_P (TREE_OPERAND (rhs1, 0)))
3775 || (!is_gimple_reg (TREE_OPERAND (rhs1, 1))
3776 && !is_gimple_min_invariant (TREE_OPERAND (rhs1, 1)))
3777 || (!is_gimple_reg (TREE_OPERAND (rhs1, 2))
3778 && !is_gimple_min_invariant (TREE_OPERAND (rhs1, 2))))
3780 error ("invalid COND_EXPR in gimple assignment");
3781 debug_generic_stmt (rhs1);
3789 case WITH_SIZE_EXPR:
3790 case POLYNOMIAL_CHREC:
3793 case REALIGN_LOAD_EXPR:
3803 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
3804 is a problem, otherwise false. */
3807 verify_gimple_assign (gimple stmt)
3809 switch (gimple_assign_rhs_class (stmt))
3811 case GIMPLE_SINGLE_RHS:
3812 return verify_gimple_assign_single (stmt);
3814 case GIMPLE_UNARY_RHS:
3815 return verify_gimple_assign_unary (stmt);
3817 case GIMPLE_BINARY_RHS:
3818 return verify_gimple_assign_binary (stmt);
3820 case GIMPLE_TERNARY_RHS:
3821 return verify_gimple_assign_ternary (stmt);
3828 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
3829 is a problem, otherwise false. */
3832 verify_gimple_return (gimple stmt)
3834 tree op = gimple_return_retval (stmt);
3835 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
3837 /* We cannot test for present return values as we do not fix up missing
3838 return values from the original source. */
3842 if (!is_gimple_val (op)
3843 && TREE_CODE (op) != RESULT_DECL)
3845 error ("invalid operand in return statement");
3846 debug_generic_stmt (op);
3850 if ((TREE_CODE (op) == RESULT_DECL
3851 && DECL_BY_REFERENCE (op))
3852 || (TREE_CODE (op) == SSA_NAME
3853 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
3854 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
3855 op = TREE_TYPE (op);
3857 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
3859 error ("invalid conversion in return statement");
3860 debug_generic_stmt (restype);
3861 debug_generic_stmt (TREE_TYPE (op));
3869 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
3870 is a problem, otherwise false. */
3873 verify_gimple_goto (gimple stmt)
3875 tree dest = gimple_goto_dest (stmt);
3877 /* ??? We have two canonical forms of direct goto destinations, a
3878 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
3879 if (TREE_CODE (dest) != LABEL_DECL
3880 && (!is_gimple_val (dest)
3881 || !POINTER_TYPE_P (TREE_TYPE (dest))))
3883 error ("goto destination is neither a label nor a pointer");
3890 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
3891 is a problem, otherwise false. */
3894 verify_gimple_switch (gimple stmt)
3896 if (!is_gimple_val (gimple_switch_index (stmt)))
3898 error ("invalid operand to switch statement");
3899 debug_generic_stmt (gimple_switch_index (stmt));
3907 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
3908 and false otherwise. */
3911 verify_gimple_phi (gimple stmt)
3913 tree type = TREE_TYPE (gimple_phi_result (stmt));
3916 if (TREE_CODE (gimple_phi_result (stmt)) != SSA_NAME)
3918 error ("Invalid PHI result");
3922 for (i = 0; i < gimple_phi_num_args (stmt); i++)
3924 tree arg = gimple_phi_arg_def (stmt, i);
3925 if ((is_gimple_reg (gimple_phi_result (stmt))
3926 && !is_gimple_val (arg))
3927 || (!is_gimple_reg (gimple_phi_result (stmt))
3928 && !is_gimple_addressable (arg)))
3930 error ("Invalid PHI argument");
3931 debug_generic_stmt (arg);
3934 if (!useless_type_conversion_p (type, TREE_TYPE (arg)))
3936 error ("Incompatible types in PHI argument %u", i);
3937 debug_generic_stmt (type);
3938 debug_generic_stmt (TREE_TYPE (arg));
3947 /* Verify a gimple debug statement STMT.
3948 Returns true if anything is wrong. */
3951 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
3953 /* There isn't much that could be wrong in a gimple debug stmt. A
3954 gimple debug bind stmt, for example, maps a tree, that's usually
3955 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
3956 component or member of an aggregate type, to another tree, that
3957 can be an arbitrary expression. These stmts expand into debug
3958 insns, and are converted to debug notes by var-tracking.c. */
3963 /* Verify the GIMPLE statement STMT. Returns true if there is an
3964 error, otherwise false. */
3967 verify_types_in_gimple_stmt (gimple stmt)
3969 switch (gimple_code (stmt))
3972 return verify_gimple_assign (stmt);
3975 return TREE_CODE (gimple_label_label (stmt)) != LABEL_DECL;
3978 return verify_gimple_call (stmt);
3981 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
3983 error ("invalid comparison code in gimple cond");
3986 if (!(!gimple_cond_true_label (stmt)
3987 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
3988 || !(!gimple_cond_false_label (stmt)
3989 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
3991 error ("invalid labels in gimple cond");
3995 return verify_gimple_comparison (boolean_type_node,
3996 gimple_cond_lhs (stmt),
3997 gimple_cond_rhs (stmt));
4000 return verify_gimple_goto (stmt);
4003 return verify_gimple_switch (stmt);
4006 return verify_gimple_return (stmt);
4012 return verify_gimple_phi (stmt);
4014 /* Tuples that do not have tree operands. */
4016 case GIMPLE_PREDICT:
4018 case GIMPLE_EH_DISPATCH:
4019 case GIMPLE_EH_MUST_NOT_THROW:
4023 /* OpenMP directives are validated by the FE and never operated
4024 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4025 non-gimple expressions when the main index variable has had
4026 its address taken. This does not affect the loop itself
4027 because the header of an GIMPLE_OMP_FOR is merely used to determine
4028 how to setup the parallel iteration. */
4032 return verify_gimple_debug (stmt);
4039 /* Verify the GIMPLE statements inside the sequence STMTS. */
4042 verify_types_in_gimple_seq_2 (gimple_seq stmts)
4044 gimple_stmt_iterator ittr;
4047 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4049 gimple stmt = gsi_stmt (ittr);
4051 switch (gimple_code (stmt))
4054 err |= verify_types_in_gimple_seq_2 (gimple_bind_body (stmt));
4058 err |= verify_types_in_gimple_seq_2 (gimple_try_eval (stmt));
4059 err |= verify_types_in_gimple_seq_2 (gimple_try_cleanup (stmt));
4062 case GIMPLE_EH_FILTER:
4063 err |= verify_types_in_gimple_seq_2 (gimple_eh_filter_failure (stmt));
4067 err |= verify_types_in_gimple_seq_2 (gimple_catch_handler (stmt));
4072 bool err2 = verify_types_in_gimple_stmt (stmt);
4074 debug_gimple_stmt (stmt);
4084 /* Verify the GIMPLE statements inside the statement list STMTS. */
4087 verify_types_in_gimple_seq (gimple_seq stmts)
4089 if (verify_types_in_gimple_seq_2 (stmts))
4090 internal_error ("verify_gimple failed");
4094 /* Verify STMT, return true if STMT is not in GIMPLE form.
4095 TODO: Implement type checking. */
4098 verify_stmt (gimple_stmt_iterator *gsi)
4101 struct walk_stmt_info wi;
4102 bool last_in_block = gsi_one_before_end_p (*gsi);
4103 gimple stmt = gsi_stmt (*gsi);
4106 if (is_gimple_omp (stmt))
4108 /* OpenMP directives are validated by the FE and never operated
4109 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4110 non-gimple expressions when the main index variable has had
4111 its address taken. This does not affect the loop itself
4112 because the header of an GIMPLE_OMP_FOR is merely used to determine
4113 how to setup the parallel iteration. */
4117 /* FIXME. The C frontend passes unpromoted arguments in case it
4118 didn't see a function declaration before the call. */
4119 if (is_gimple_call (stmt))
4123 if (!is_gimple_call_addr (gimple_call_fn (stmt)))
4125 error ("invalid function in call statement");
4129 decl = gimple_call_fndecl (stmt);
4131 && TREE_CODE (decl) == FUNCTION_DECL
4132 && DECL_LOOPING_CONST_OR_PURE_P (decl)
4133 && (!DECL_PURE_P (decl))
4134 && (!TREE_READONLY (decl)))
4136 error ("invalid pure const state for function");
4141 if (is_gimple_debug (stmt))
4144 memset (&wi, 0, sizeof (wi));
4145 addr = walk_gimple_op (gsi_stmt (*gsi), verify_expr, &wi);
4148 debug_generic_expr (addr);
4149 inform (gimple_location (gsi_stmt (*gsi)), "in statement");
4150 debug_gimple_stmt (stmt);
4154 /* If the statement is marked as part of an EH region, then it is
4155 expected that the statement could throw. Verify that when we
4156 have optimizations that simplify statements such that we prove
4157 that they cannot throw, that we update other data structures
4159 lp_nr = lookup_stmt_eh_lp (stmt);
4162 if (!stmt_could_throw_p (stmt))
4164 error ("statement marked for throw, but doesn%'t");
4167 else if (lp_nr > 0 && !last_in_block && stmt_can_throw_internal (stmt))
4169 error ("statement marked for throw in middle of block");
4177 debug_gimple_stmt (stmt);
4182 /* Return true when the T can be shared. */
4185 tree_node_can_be_shared (tree t)
4187 if (IS_TYPE_OR_DECL_P (t)
4188 || is_gimple_min_invariant (t)
4189 || TREE_CODE (t) == SSA_NAME
4190 || t == error_mark_node
4191 || TREE_CODE (t) == IDENTIFIER_NODE)
4194 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4197 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4198 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4199 || TREE_CODE (t) == COMPONENT_REF
4200 || TREE_CODE (t) == REALPART_EXPR
4201 || TREE_CODE (t) == IMAGPART_EXPR)
4202 t = TREE_OPERAND (t, 0);
4211 /* Called via walk_gimple_stmt. Verify tree sharing. */
4214 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4216 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4217 struct pointer_set_t *visited = (struct pointer_set_t *) wi->info;
4219 if (tree_node_can_be_shared (*tp))
4221 *walk_subtrees = false;
4225 if (pointer_set_insert (visited, *tp))
4232 static bool eh_error_found;
4234 verify_eh_throw_stmt_node (void **slot, void *data)
4236 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4237 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4239 if (!pointer_set_contains (visited, node->stmt))
4241 error ("Dead STMT in EH table");
4242 debug_gimple_stmt (node->stmt);
4243 eh_error_found = true;
4249 /* Verify the GIMPLE statements in every basic block. */
4255 gimple_stmt_iterator gsi;
4257 struct pointer_set_t *visited, *visited_stmts;
4259 struct walk_stmt_info wi;
4261 timevar_push (TV_TREE_STMT_VERIFY);
4262 visited = pointer_set_create ();
4263 visited_stmts = pointer_set_create ();
4265 memset (&wi, 0, sizeof (wi));
4266 wi.info = (void *) visited;
4273 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4275 phi = gsi_stmt (gsi);
4276 pointer_set_insert (visited_stmts, phi);
4277 if (gimple_bb (phi) != bb)
4279 error ("gimple_bb (phi) is set to a wrong basic block");
4283 for (i = 0; i < gimple_phi_num_args (phi); i++)
4285 tree t = gimple_phi_arg_def (phi, i);
4290 error ("missing PHI def");
4291 debug_gimple_stmt (phi);
4295 /* Addressable variables do have SSA_NAMEs but they
4296 are not considered gimple values. */
4297 else if (TREE_CODE (t) != SSA_NAME
4298 && TREE_CODE (t) != FUNCTION_DECL
4299 && !is_gimple_min_invariant (t))
4301 error ("PHI argument is not a GIMPLE value");
4302 debug_gimple_stmt (phi);
4303 debug_generic_expr (t);
4307 addr = walk_tree (&t, verify_node_sharing, visited, NULL);
4310 error ("incorrect sharing of tree nodes");
4311 debug_gimple_stmt (phi);
4312 debug_generic_expr (addr);
4317 #ifdef ENABLE_TYPES_CHECKING
4318 if (verify_gimple_phi (phi))
4320 debug_gimple_stmt (phi);
4326 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
4328 gimple stmt = gsi_stmt (gsi);
4330 if (gimple_code (stmt) == GIMPLE_WITH_CLEANUP_EXPR
4331 || gimple_code (stmt) == GIMPLE_BIND)
4333 error ("invalid GIMPLE statement");
4334 debug_gimple_stmt (stmt);
4338 pointer_set_insert (visited_stmts, stmt);
4340 if (gimple_bb (stmt) != bb)
4342 error ("gimple_bb (stmt) is set to a wrong basic block");
4343 debug_gimple_stmt (stmt);
4347 if (gimple_code (stmt) == GIMPLE_LABEL)
4349 tree decl = gimple_label_label (stmt);
4350 int uid = LABEL_DECL_UID (decl);
4353 || VEC_index (basic_block, label_to_block_map, uid) != bb)
4355 error ("incorrect entry in label_to_block_map");
4359 uid = EH_LANDING_PAD_NR (decl);
4362 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4363 if (decl != lp->post_landing_pad)
4365 error ("incorrect setting of landing pad number");
4371 err |= verify_stmt (&gsi);
4373 #ifdef ENABLE_TYPES_CHECKING
4374 if (verify_types_in_gimple_stmt (gsi_stmt (gsi)))
4376 debug_gimple_stmt (stmt);
4380 addr = walk_gimple_op (gsi_stmt (gsi), verify_node_sharing, &wi);
4383 error ("incorrect sharing of tree nodes");
4384 debug_gimple_stmt (stmt);
4385 debug_generic_expr (addr);
4392 eh_error_found = false;
4393 if (get_eh_throw_stmt_table (cfun))
4394 htab_traverse (get_eh_throw_stmt_table (cfun),
4395 verify_eh_throw_stmt_node,
4398 if (err | eh_error_found)
4399 internal_error ("verify_stmts failed");
4401 pointer_set_destroy (visited);
4402 pointer_set_destroy (visited_stmts);
4403 verify_histograms ();
4404 timevar_pop (TV_TREE_STMT_VERIFY);
4408 /* Verifies that the flow information is OK. */
4411 gimple_verify_flow_info (void)
4415 gimple_stmt_iterator gsi;
4420 if (ENTRY_BLOCK_PTR->il.gimple)
4422 error ("ENTRY_BLOCK has IL associated with it");
4426 if (EXIT_BLOCK_PTR->il.gimple)
4428 error ("EXIT_BLOCK has IL associated with it");
4432 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4433 if (e->flags & EDGE_FALLTHRU)
4435 error ("fallthru to exit from bb %d", e->src->index);
4441 bool found_ctrl_stmt = false;
4445 /* Skip labels on the start of basic block. */
4446 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4449 gimple prev_stmt = stmt;
4451 stmt = gsi_stmt (gsi);
4453 if (gimple_code (stmt) != GIMPLE_LABEL)
4456 label = gimple_label_label (stmt);
4457 if (prev_stmt && DECL_NONLOCAL (label))
4459 error ("nonlocal label ");
4460 print_generic_expr (stderr, label, 0);
4461 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4466 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4468 error ("EH landing pad label ");
4469 print_generic_expr (stderr, label, 0);
4470 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4475 if (label_to_block (label) != bb)
4478 print_generic_expr (stderr, label, 0);
4479 fprintf (stderr, " to block does not match in bb %d",
4484 if (decl_function_context (label) != current_function_decl)
4487 print_generic_expr (stderr, label, 0);
4488 fprintf (stderr, " has incorrect context in bb %d",
4494 /* Verify that body of basic block BB is free of control flow. */
4495 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4497 gimple stmt = gsi_stmt (gsi);
4499 if (found_ctrl_stmt)
4501 error ("control flow in the middle of basic block %d",
4506 if (stmt_ends_bb_p (stmt))
4507 found_ctrl_stmt = true;
4509 if (gimple_code (stmt) == GIMPLE_LABEL)
4512 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4513 fprintf (stderr, " in the middle of basic block %d", bb->index);
4518 gsi = gsi_last_bb (bb);
4519 if (gsi_end_p (gsi))
4522 stmt = gsi_stmt (gsi);
4524 if (gimple_code (stmt) == GIMPLE_LABEL)
4527 err |= verify_eh_edges (stmt);
4529 if (is_ctrl_stmt (stmt))
4531 FOR_EACH_EDGE (e, ei, bb->succs)
4532 if (e->flags & EDGE_FALLTHRU)
4534 error ("fallthru edge after a control statement in bb %d",
4540 if (gimple_code (stmt) != GIMPLE_COND)
4542 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4543 after anything else but if statement. */
4544 FOR_EACH_EDGE (e, ei, bb->succs)
4545 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4547 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4553 switch (gimple_code (stmt))
4560 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4564 || !(true_edge->flags & EDGE_TRUE_VALUE)
4565 || !(false_edge->flags & EDGE_FALSE_VALUE)
4566 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4567 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4568 || EDGE_COUNT (bb->succs) >= 3)
4570 error ("wrong outgoing edge flags at end of bb %d",
4578 if (simple_goto_p (stmt))
4580 error ("explicit goto at end of bb %d", bb->index);
4585 /* FIXME. We should double check that the labels in the
4586 destination blocks have their address taken. */
4587 FOR_EACH_EDGE (e, ei, bb->succs)
4588 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4589 | EDGE_FALSE_VALUE))
4590 || !(e->flags & EDGE_ABNORMAL))
4592 error ("wrong outgoing edge flags at end of bb %d",
4600 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
4602 /* ... fallthru ... */
4604 if (!single_succ_p (bb)
4605 || (single_succ_edge (bb)->flags
4606 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4607 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4609 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4612 if (single_succ (bb) != EXIT_BLOCK_PTR)
4614 error ("return edge does not point to exit in bb %d",
4626 n = gimple_switch_num_labels (stmt);
4628 /* Mark all the destination basic blocks. */
4629 for (i = 0; i < n; ++i)
4631 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4632 basic_block label_bb = label_to_block (lab);
4633 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4634 label_bb->aux = (void *)1;
4637 /* Verify that the case labels are sorted. */
4638 prev = gimple_switch_label (stmt, 0);
4639 for (i = 1; i < n; ++i)
4641 tree c = gimple_switch_label (stmt, i);
4644 error ("found default case not at the start of "
4650 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4652 error ("case labels not sorted: ");
4653 print_generic_expr (stderr, prev, 0);
4654 fprintf (stderr," is greater than ");
4655 print_generic_expr (stderr, c, 0);
4656 fprintf (stderr," but comes before it.\n");
4661 /* VRP will remove the default case if it can prove it will
4662 never be executed. So do not verify there always exists
4663 a default case here. */
4665 FOR_EACH_EDGE (e, ei, bb->succs)
4669 error ("extra outgoing edge %d->%d",
4670 bb->index, e->dest->index);
4674 e->dest->aux = (void *)2;
4675 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4676 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4678 error ("wrong outgoing edge flags at end of bb %d",
4684 /* Check that we have all of them. */
4685 for (i = 0; i < n; ++i)
4687 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4688 basic_block label_bb = label_to_block (lab);
4690 if (label_bb->aux != (void *)2)
4692 error ("missing edge %i->%i", bb->index, label_bb->index);
4697 FOR_EACH_EDGE (e, ei, bb->succs)
4698 e->dest->aux = (void *)0;
4702 case GIMPLE_EH_DISPATCH:
4703 err |= verify_eh_dispatch_edge (stmt);
4711 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
4712 verify_dominators (CDI_DOMINATORS);
4718 /* Updates phi nodes after creating a forwarder block joined
4719 by edge FALLTHRU. */
4722 gimple_make_forwarder_block (edge fallthru)
4726 basic_block dummy, bb;
4728 gimple_stmt_iterator gsi;
4730 dummy = fallthru->src;
4731 bb = fallthru->dest;
4733 if (single_pred_p (bb))
4736 /* If we redirected a branch we must create new PHI nodes at the
4738 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
4740 gimple phi, new_phi;
4742 phi = gsi_stmt (gsi);
4743 var = gimple_phi_result (phi);
4744 new_phi = create_phi_node (var, bb);
4745 SSA_NAME_DEF_STMT (var) = new_phi;
4746 gimple_phi_set_result (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4747 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
4751 /* Add the arguments we have stored on edges. */
4752 FOR_EACH_EDGE (e, ei, bb->preds)
4757 flush_pending_stmts (e);
4762 /* Return a non-special label in the head of basic block BLOCK.
4763 Create one if it doesn't exist. */
4766 gimple_block_label (basic_block bb)
4768 gimple_stmt_iterator i, s = gsi_start_bb (bb);
4773 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
4775 stmt = gsi_stmt (i);
4776 if (gimple_code (stmt) != GIMPLE_LABEL)
4778 label = gimple_label_label (stmt);
4779 if (!DECL_NONLOCAL (label))
4782 gsi_move_before (&i, &s);
4787 label = create_artificial_label (UNKNOWN_LOCATION);
4788 stmt = gimple_build_label (label);
4789 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
4794 /* Attempt to perform edge redirection by replacing a possibly complex
4795 jump instruction by a goto or by removing the jump completely.
4796 This can apply only if all edges now point to the same block. The
4797 parameters and return values are equivalent to
4798 redirect_edge_and_branch. */
4801 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
4803 basic_block src = e->src;
4804 gimple_stmt_iterator i;
4807 /* We can replace or remove a complex jump only when we have exactly
4809 if (EDGE_COUNT (src->succs) != 2
4810 /* Verify that all targets will be TARGET. Specifically, the
4811 edge that is not E must also go to TARGET. */
4812 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4815 i = gsi_last_bb (src);
4819 stmt = gsi_stmt (i);
4821 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
4823 gsi_remove (&i, true);
4824 e = ssa_redirect_edge (e, target);
4825 e->flags = EDGE_FALLTHRU;
4833 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4834 edge representing the redirected branch. */
4837 gimple_redirect_edge_and_branch (edge e, basic_block dest)
4839 basic_block bb = e->src;
4840 gimple_stmt_iterator gsi;
4844 if (e->flags & EDGE_ABNORMAL)
4847 if (e->dest == dest)
4850 if (e->flags & EDGE_EH)
4851 return redirect_eh_edge (e, dest);
4853 if (e->src != ENTRY_BLOCK_PTR)
4855 ret = gimple_try_redirect_by_replacing_jump (e, dest);
4860 gsi = gsi_last_bb (bb);
4861 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
4863 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
4866 /* For COND_EXPR, we only need to redirect the edge. */
4870 /* No non-abnormal edges should lead from a non-simple goto, and
4871 simple ones should be represented implicitly. */
4876 tree label = gimple_block_label (dest);
4877 tree cases = get_cases_for_edge (e, stmt);
4879 /* If we have a list of cases associated with E, then use it
4880 as it's a lot faster than walking the entire case vector. */
4883 edge e2 = find_edge (e->src, dest);
4890 CASE_LABEL (cases) = label;
4891 cases = TREE_CHAIN (cases);
4894 /* If there was already an edge in the CFG, then we need
4895 to move all the cases associated with E to E2. */
4898 tree cases2 = get_cases_for_edge (e2, stmt);
4900 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4901 TREE_CHAIN (cases2) = first;
4903 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
4907 size_t i, n = gimple_switch_num_labels (stmt);
4909 for (i = 0; i < n; i++)
4911 tree elt = gimple_switch_label (stmt, i);
4912 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4913 CASE_LABEL (elt) = label;
4921 int i, n = gimple_asm_nlabels (stmt);
4924 for (i = 0; i < n; ++i)
4926 tree cons = gimple_asm_label_op (stmt, i);
4927 if (label_to_block (TREE_VALUE (cons)) == e->dest)
4930 label = gimple_block_label (dest);
4931 TREE_VALUE (cons) = label;
4935 /* If we didn't find any label matching the former edge in the
4936 asm labels, we must be redirecting the fallthrough
4938 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
4943 gsi_remove (&gsi, true);
4944 e->flags |= EDGE_FALLTHRU;
4947 case GIMPLE_OMP_RETURN:
4948 case GIMPLE_OMP_CONTINUE:
4949 case GIMPLE_OMP_SECTIONS_SWITCH:
4950 case GIMPLE_OMP_FOR:
4951 /* The edges from OMP constructs can be simply redirected. */
4954 case GIMPLE_EH_DISPATCH:
4955 if (!(e->flags & EDGE_FALLTHRU))
4956 redirect_eh_dispatch_edge (stmt, e, dest);
4960 /* Otherwise it must be a fallthru edge, and we don't need to
4961 do anything besides redirecting it. */
4962 gcc_assert (e->flags & EDGE_FALLTHRU);
4966 /* Update/insert PHI nodes as necessary. */
4968 /* Now update the edges in the CFG. */
4969 e = ssa_redirect_edge (e, dest);
4974 /* Returns true if it is possible to remove edge E by redirecting
4975 it to the destination of the other edge from E->src. */
4978 gimple_can_remove_branch_p (const_edge e)
4980 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
4986 /* Simple wrapper, as we can always redirect fallthru edges. */
4989 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
4991 e = gimple_redirect_edge_and_branch (e, dest);
4998 /* Splits basic block BB after statement STMT (but at least after the
4999 labels). If STMT is NULL, BB is split just after the labels. */
5002 gimple_split_block (basic_block bb, void *stmt)
5004 gimple_stmt_iterator gsi;
5005 gimple_stmt_iterator gsi_tgt;
5012 new_bb = create_empty_bb (bb);
5014 /* Redirect the outgoing edges. */
5015 new_bb->succs = bb->succs;
5017 FOR_EACH_EDGE (e, ei, new_bb->succs)
5020 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5023 /* Move everything from GSI to the new basic block. */
5024 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5026 act = gsi_stmt (gsi);
5027 if (gimple_code (act) == GIMPLE_LABEL)
5040 if (gsi_end_p (gsi))
5043 /* Split the statement list - avoid re-creating new containers as this
5044 brings ugly quadratic memory consumption in the inliner.
5045 (We are still quadratic since we need to update stmt BB pointers,
5047 list = gsi_split_seq_before (&gsi);
5048 set_bb_seq (new_bb, list);
5049 for (gsi_tgt = gsi_start (list);
5050 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5051 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5057 /* Moves basic block BB after block AFTER. */
5060 gimple_move_block_after (basic_block bb, basic_block after)
5062 if (bb->prev_bb == after)
5066 link_block (bb, after);
5072 /* Return true if basic_block can be duplicated. */
5075 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5080 /* Create a duplicate of the basic block BB. NOTE: This does not
5081 preserve SSA form. */
5084 gimple_duplicate_bb (basic_block bb)
5087 gimple_stmt_iterator gsi, gsi_tgt;
5088 gimple_seq phis = phi_nodes (bb);
5089 gimple phi, stmt, copy;
5091 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
5093 /* Copy the PHI nodes. We ignore PHI node arguments here because
5094 the incoming edges have not been setup yet. */
5095 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5097 phi = gsi_stmt (gsi);
5098 copy = create_phi_node (gimple_phi_result (phi), new_bb);
5099 create_new_def_for (gimple_phi_result (copy), copy,
5100 gimple_phi_result_ptr (copy));
5103 gsi_tgt = gsi_start_bb (new_bb);
5104 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5106 def_operand_p def_p;
5107 ssa_op_iter op_iter;
5109 stmt = gsi_stmt (gsi);
5110 if (gimple_code (stmt) == GIMPLE_LABEL)
5113 /* Create a new copy of STMT and duplicate STMT's virtual
5115 copy = gimple_copy (stmt);
5116 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5118 maybe_duplicate_eh_stmt (copy, stmt);
5119 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5121 /* Create new names for all the definitions created by COPY and
5122 add replacement mappings for each new name. */
5123 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5124 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5130 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5133 add_phi_args_after_copy_edge (edge e_copy)
5135 basic_block bb, bb_copy = e_copy->src, dest;
5138 gimple phi, phi_copy;
5140 gimple_stmt_iterator psi, psi_copy;
5142 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5145 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5147 if (e_copy->dest->flags & BB_DUPLICATED)
5148 dest = get_bb_original (e_copy->dest);
5150 dest = e_copy->dest;
5152 e = find_edge (bb, dest);
5155 /* During loop unrolling the target of the latch edge is copied.
5156 In this case we are not looking for edge to dest, but to
5157 duplicated block whose original was dest. */
5158 FOR_EACH_EDGE (e, ei, bb->succs)
5160 if ((e->dest->flags & BB_DUPLICATED)
5161 && get_bb_original (e->dest) == dest)
5165 gcc_assert (e != NULL);
5168 for (psi = gsi_start_phis (e->dest),
5169 psi_copy = gsi_start_phis (e_copy->dest);
5171 gsi_next (&psi), gsi_next (&psi_copy))
5173 phi = gsi_stmt (psi);
5174 phi_copy = gsi_stmt (psi_copy);
5175 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5176 add_phi_arg (phi_copy, def, e_copy,
5177 gimple_phi_arg_location_from_edge (phi, e));
5182 /* Basic block BB_COPY was created by code duplication. Add phi node
5183 arguments for edges going out of BB_COPY. The blocks that were
5184 duplicated have BB_DUPLICATED set. */
5187 add_phi_args_after_copy_bb (basic_block bb_copy)
5192 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5194 add_phi_args_after_copy_edge (e_copy);
5198 /* Blocks in REGION_COPY array of length N_REGION were created by
5199 duplication of basic blocks. Add phi node arguments for edges
5200 going from these blocks. If E_COPY is not NULL, also add
5201 phi node arguments for its destination.*/
5204 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5209 for (i = 0; i < n_region; i++)
5210 region_copy[i]->flags |= BB_DUPLICATED;
5212 for (i = 0; i < n_region; i++)
5213 add_phi_args_after_copy_bb (region_copy[i]);
5215 add_phi_args_after_copy_edge (e_copy);
5217 for (i = 0; i < n_region; i++)
5218 region_copy[i]->flags &= ~BB_DUPLICATED;
5221 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5222 important exit edge EXIT. By important we mean that no SSA name defined
5223 inside region is live over the other exit edges of the region. All entry
5224 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5225 to the duplicate of the region. SSA form, dominance and loop information
5226 is updated. The new basic blocks are stored to REGION_COPY in the same
5227 order as they had in REGION, provided that REGION_COPY is not NULL.
5228 The function returns false if it is unable to copy the region,
5232 gimple_duplicate_sese_region (edge entry, edge exit,
5233 basic_block *region, unsigned n_region,
5234 basic_block *region_copy)
5237 bool free_region_copy = false, copying_header = false;
5238 struct loop *loop = entry->dest->loop_father;
5240 VEC (basic_block, heap) *doms;
5242 int total_freq = 0, entry_freq = 0;
5243 gcov_type total_count = 0, entry_count = 0;
5245 if (!can_copy_bbs_p (region, n_region))
5248 /* Some sanity checking. Note that we do not check for all possible
5249 missuses of the functions. I.e. if you ask to copy something weird,
5250 it will work, but the state of structures probably will not be
5252 for (i = 0; i < n_region; i++)
5254 /* We do not handle subloops, i.e. all the blocks must belong to the
5256 if (region[i]->loop_father != loop)
5259 if (region[i] != entry->dest
5260 && region[i] == loop->header)
5264 set_loop_copy (loop, loop);
5266 /* In case the function is used for loop header copying (which is the primary
5267 use), ensure that EXIT and its copy will be new latch and entry edges. */
5268 if (loop->header == entry->dest)
5270 copying_header = true;
5271 set_loop_copy (loop, loop_outer (loop));
5273 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5276 for (i = 0; i < n_region; i++)
5277 if (region[i] != exit->src
5278 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5284 region_copy = XNEWVEC (basic_block, n_region);
5285 free_region_copy = true;
5288 gcc_assert (!need_ssa_update_p (cfun));
5290 /* Record blocks outside the region that are dominated by something
5293 initialize_original_copy_tables ();
5295 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5297 if (entry->dest->count)
5299 total_count = entry->dest->count;
5300 entry_count = entry->count;
5301 /* Fix up corner cases, to avoid division by zero or creation of negative
5303 if (entry_count > total_count)
5304 entry_count = total_count;
5308 total_freq = entry->dest->frequency;
5309 entry_freq = EDGE_FREQUENCY (entry);
5310 /* Fix up corner cases, to avoid division by zero or creation of negative
5312 if (total_freq == 0)
5314 else if (entry_freq > total_freq)
5315 entry_freq = total_freq;
5318 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5319 split_edge_bb_loc (entry));
5322 scale_bbs_frequencies_gcov_type (region, n_region,
5323 total_count - entry_count,
5325 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5330 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5332 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5337 loop->header = exit->dest;
5338 loop->latch = exit->src;
5341 /* Redirect the entry and add the phi node arguments. */
5342 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5343 gcc_assert (redirected != NULL);
5344 flush_pending_stmts (entry);
5346 /* Concerning updating of dominators: We must recount dominators
5347 for entry block and its copy. Anything that is outside of the
5348 region, but was dominated by something inside needs recounting as
5350 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5351 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5352 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5353 VEC_free (basic_block, heap, doms);
5355 /* Add the other PHI node arguments. */
5356 add_phi_args_after_copy (region_copy, n_region, NULL);
5358 /* Update the SSA web. */
5359 update_ssa (TODO_update_ssa);
5361 if (free_region_copy)
5364 free_original_copy_tables ();
5368 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5369 are stored to REGION_COPY in the same order in that they appear
5370 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5371 the region, EXIT an exit from it. The condition guarding EXIT
5372 is moved to ENTRY. Returns true if duplication succeeds, false
5398 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5399 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5400 basic_block *region_copy ATTRIBUTE_UNUSED)
5403 bool free_region_copy = false;
5404 struct loop *loop = exit->dest->loop_father;
5405 struct loop *orig_loop = entry->dest->loop_father;
5406 basic_block switch_bb, entry_bb, nentry_bb;
5407 VEC (basic_block, heap) *doms;
5408 int total_freq = 0, exit_freq = 0;
5409 gcov_type total_count = 0, exit_count = 0;
5410 edge exits[2], nexits[2], e;
5411 gimple_stmt_iterator gsi,gsi1;
5414 basic_block exit_bb;
5415 basic_block iters_bb;
5417 gimple_stmt_iterator psi;
5421 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5423 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5425 if (!can_copy_bbs_p (region, n_region))
5428 initialize_original_copy_tables ();
5429 set_loop_copy (orig_loop, loop);
5430 duplicate_subloops (orig_loop, loop);
5434 region_copy = XNEWVEC (basic_block, n_region);
5435 free_region_copy = true;
5438 gcc_assert (!need_ssa_update_p (cfun));
5440 /* Record blocks outside the region that are dominated by something
5442 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5444 if (exit->src->count)
5446 total_count = exit->src->count;
5447 exit_count = exit->count;
5448 /* Fix up corner cases, to avoid division by zero or creation of negative
5450 if (exit_count > total_count)
5451 exit_count = total_count;
5455 total_freq = exit->src->frequency;
5456 exit_freq = EDGE_FREQUENCY (exit);
5457 /* Fix up corner cases, to avoid division by zero or creation of negative
5459 if (total_freq == 0)
5461 if (exit_freq > total_freq)
5462 exit_freq = total_freq;
5465 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5466 split_edge_bb_loc (exit));
5469 scale_bbs_frequencies_gcov_type (region, n_region,
5470 total_count - exit_count,
5472 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5477 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5479 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5482 /* Create the switch block, and put the exit condition to it. */
5483 entry_bb = entry->dest;
5484 nentry_bb = get_bb_copy (entry_bb);
5485 if (!last_stmt (entry->src)
5486 || !stmt_ends_bb_p (last_stmt (entry->src)))
5487 switch_bb = entry->src;
5489 switch_bb = split_edge (entry);
5490 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5492 gsi = gsi_last_bb (switch_bb);
5493 cond_stmt = last_stmt (exit->src);
5494 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5495 cond_stmt = gimple_copy (cond_stmt);
5497 /* If the block consisting of the exit condition has the latch as
5498 successor, then the body of the loop is executed before
5499 the exit condition is tested. In such case, moving the
5500 condition to the entry, causes that the loop will iterate
5501 one less iteration (which is the wanted outcome, since we
5502 peel out the last iteration). If the body is executed after
5503 the condition, moving the condition to the entry requires
5504 decrementing one iteration. */
5505 if (exits[1]->dest == orig_loop->latch)
5506 new_rhs = gimple_cond_rhs (cond_stmt);
5509 new_rhs = fold_build2 (MINUS_EXPR, TREE_TYPE (gimple_cond_rhs (cond_stmt)),
5510 gimple_cond_rhs (cond_stmt),
5511 build_int_cst (TREE_TYPE (gimple_cond_rhs (cond_stmt)), 1));
5513 if (TREE_CODE (gimple_cond_rhs (cond_stmt)) == SSA_NAME)
5515 iters_bb = gimple_bb (SSA_NAME_DEF_STMT (gimple_cond_rhs (cond_stmt)));
5516 for (gsi1 = gsi_start_bb (iters_bb); !gsi_end_p (gsi1); gsi_next (&gsi1))
5517 if (gsi_stmt (gsi1) == SSA_NAME_DEF_STMT (gimple_cond_rhs (cond_stmt)))
5520 new_rhs = force_gimple_operand_gsi (&gsi1, new_rhs, true,
5521 NULL_TREE,false,GSI_CONTINUE_LINKING);
5524 gimple_cond_set_rhs (cond_stmt, unshare_expr (new_rhs));
5525 gimple_cond_set_lhs (cond_stmt, unshare_expr (gimple_cond_lhs (cond_stmt)));
5526 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5528 sorig = single_succ_edge (switch_bb);
5529 sorig->flags = exits[1]->flags;
5530 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5532 /* Register the new edge from SWITCH_BB in loop exit lists. */
5533 rescan_loop_exit (snew, true, false);
5535 /* Add the PHI node arguments. */
5536 add_phi_args_after_copy (region_copy, n_region, snew);
5538 /* Get rid of now superfluous conditions and associated edges (and phi node
5540 exit_bb = exit->dest;
5542 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5543 PENDING_STMT (e) = NULL;
5545 /* The latch of ORIG_LOOP was copied, and so was the backedge
5546 to the original header. We redirect this backedge to EXIT_BB. */
5547 for (i = 0; i < n_region; i++)
5548 if (get_bb_original (region_copy[i]) == orig_loop->latch)
5550 gcc_assert (single_succ_edge (region_copy[i]));
5551 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
5552 PENDING_STMT (e) = NULL;
5553 for (psi = gsi_start_phis (exit_bb);
5557 phi = gsi_stmt (psi);
5558 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
5559 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
5562 e = redirect_edge_and_branch (nexits[0], nexits[1]->dest);
5563 PENDING_STMT (e) = NULL;
5565 /* Anything that is outside of the region, but was dominated by something
5566 inside needs to update dominance info. */
5567 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5568 VEC_free (basic_block, heap, doms);
5569 /* Update the SSA web. */
5570 update_ssa (TODO_update_ssa);
5572 if (free_region_copy)
5575 free_original_copy_tables ();
5579 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5580 adding blocks when the dominator traversal reaches EXIT. This
5581 function silently assumes that ENTRY strictly dominates EXIT. */
5584 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5585 VEC(basic_block,heap) **bbs_p)
5589 for (son = first_dom_son (CDI_DOMINATORS, entry);
5591 son = next_dom_son (CDI_DOMINATORS, son))
5593 VEC_safe_push (basic_block, heap, *bbs_p, son);
5595 gather_blocks_in_sese_region (son, exit, bbs_p);
5599 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5600 The duplicates are recorded in VARS_MAP. */
5603 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5606 tree t = *tp, new_t;
5607 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5610 if (DECL_CONTEXT (t) == to_context)
5613 loc = pointer_map_contains (vars_map, t);
5617 loc = pointer_map_insert (vars_map, t);
5621 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
5622 add_local_decl (f, new_t);
5626 gcc_assert (TREE_CODE (t) == CONST_DECL);
5627 new_t = copy_node (t);
5629 DECL_CONTEXT (new_t) = to_context;
5634 new_t = (tree) *loc;
5640 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5641 VARS_MAP maps old ssa names and var_decls to the new ones. */
5644 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5648 tree new_name, decl = SSA_NAME_VAR (name);
5650 gcc_assert (is_gimple_reg (name));
5652 loc = pointer_map_contains (vars_map, name);
5656 replace_by_duplicate_decl (&decl, vars_map, to_context);
5658 push_cfun (DECL_STRUCT_FUNCTION (to_context));
5659 if (gimple_in_ssa_p (cfun))
5660 add_referenced_var (decl);
5662 new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5663 if (SSA_NAME_IS_DEFAULT_DEF (name))
5664 set_default_def (decl, new_name);
5667 loc = pointer_map_insert (vars_map, name);
5671 new_name = (tree) *loc;
5682 struct pointer_map_t *vars_map;
5683 htab_t new_label_map;
5684 struct pointer_map_t *eh_map;
5688 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
5689 contained in *TP if it has been ORIG_BLOCK previously and change the
5690 DECL_CONTEXT of every local variable referenced in *TP. */
5693 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
5695 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5696 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5700 /* We should never have TREE_BLOCK set on non-statements. */
5701 gcc_assert (!TREE_BLOCK (t));
5703 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
5705 if (TREE_CODE (t) == SSA_NAME)
5706 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
5707 else if (TREE_CODE (t) == LABEL_DECL)
5709 if (p->new_label_map)
5711 struct tree_map in, *out;
5713 out = (struct tree_map *)
5714 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
5719 DECL_CONTEXT (t) = p->to_context;
5721 else if (p->remap_decls_p)
5723 /* Replace T with its duplicate. T should no longer appear in the
5724 parent function, so this looks wasteful; however, it may appear
5725 in referenced_vars, and more importantly, as virtual operands of
5726 statements, and in alias lists of other variables. It would be
5727 quite difficult to expunge it from all those places. ??? It might
5728 suffice to do this for addressable variables. */
5729 if ((TREE_CODE (t) == VAR_DECL
5730 && !is_global_var (t))
5731 || TREE_CODE (t) == CONST_DECL)
5732 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
5735 && gimple_in_ssa_p (cfun))
5737 push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
5738 add_referenced_var (*tp);
5744 else if (TYPE_P (t))
5750 /* Helper for move_stmt_r. Given an EH region number for the source
5751 function, map that to the duplicate EH regio number in the dest. */
5754 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
5756 eh_region old_r, new_r;
5759 old_r = get_eh_region_from_number (old_nr);
5760 slot = pointer_map_contains (p->eh_map, old_r);
5761 new_r = (eh_region) *slot;
5763 return new_r->index;
5766 /* Similar, but operate on INTEGER_CSTs. */
5769 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
5773 old_nr = tree_low_cst (old_t_nr, 0);
5774 new_nr = move_stmt_eh_region_nr (old_nr, p);
5776 return build_int_cst (NULL, new_nr);
5779 /* Like move_stmt_op, but for gimple statements.
5781 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
5782 contained in the current statement in *GSI_P and change the
5783 DECL_CONTEXT of every local variable referenced in the current
5787 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
5788 struct walk_stmt_info *wi)
5790 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5791 gimple stmt = gsi_stmt (*gsi_p);
5792 tree block = gimple_block (stmt);
5794 if (p->orig_block == NULL_TREE
5795 || block == p->orig_block
5796 || block == NULL_TREE)
5797 gimple_set_block (stmt, p->new_block);
5798 #ifdef ENABLE_CHECKING
5799 else if (block != p->new_block)
5801 while (block && block != p->orig_block)
5802 block = BLOCK_SUPERCONTEXT (block);
5807 switch (gimple_code (stmt))
5810 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
5812 tree r, fndecl = gimple_call_fndecl (stmt);
5813 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5814 switch (DECL_FUNCTION_CODE (fndecl))
5816 case BUILT_IN_EH_COPY_VALUES:
5817 r = gimple_call_arg (stmt, 1);
5818 r = move_stmt_eh_region_tree_nr (r, p);
5819 gimple_call_set_arg (stmt, 1, r);
5822 case BUILT_IN_EH_POINTER:
5823 case BUILT_IN_EH_FILTER:
5824 r = gimple_call_arg (stmt, 0);
5825 r = move_stmt_eh_region_tree_nr (r, p);
5826 gimple_call_set_arg (stmt, 0, r);
5837 int r = gimple_resx_region (stmt);
5838 r = move_stmt_eh_region_nr (r, p);
5839 gimple_resx_set_region (stmt, r);
5843 case GIMPLE_EH_DISPATCH:
5845 int r = gimple_eh_dispatch_region (stmt);
5846 r = move_stmt_eh_region_nr (r, p);
5847 gimple_eh_dispatch_set_region (stmt, r);
5851 case GIMPLE_OMP_RETURN:
5852 case GIMPLE_OMP_CONTINUE:
5855 if (is_gimple_omp (stmt))
5857 /* Do not remap variables inside OMP directives. Variables
5858 referenced in clauses and directive header belong to the
5859 parent function and should not be moved into the child
5861 bool save_remap_decls_p = p->remap_decls_p;
5862 p->remap_decls_p = false;
5863 *handled_ops_p = true;
5865 walk_gimple_seq (gimple_omp_body (stmt), move_stmt_r,
5868 p->remap_decls_p = save_remap_decls_p;
5876 /* Move basic block BB from function CFUN to function DEST_FN. The
5877 block is moved out of the original linked list and placed after
5878 block AFTER in the new list. Also, the block is removed from the
5879 original array of blocks and placed in DEST_FN's array of blocks.
5880 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
5881 updated to reflect the moved edges.
5883 The local variables are remapped to new instances, VARS_MAP is used
5884 to record the mapping. */
5887 move_block_to_fn (struct function *dest_cfun, basic_block bb,
5888 basic_block after, bool update_edge_count_p,
5889 struct move_stmt_d *d)
5891 struct control_flow_graph *cfg;
5894 gimple_stmt_iterator si;
5895 unsigned old_len, new_len;
5897 /* Remove BB from dominance structures. */
5898 delete_from_dominance_info (CDI_DOMINATORS, bb);
5900 remove_bb_from_loops (bb);
5902 /* Link BB to the new linked list. */
5903 move_block_after (bb, after);
5905 /* Update the edge count in the corresponding flowgraphs. */
5906 if (update_edge_count_p)
5907 FOR_EACH_EDGE (e, ei, bb->succs)
5909 cfun->cfg->x_n_edges--;
5910 dest_cfun->cfg->x_n_edges++;
5913 /* Remove BB from the original basic block array. */
5914 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
5915 cfun->cfg->x_n_basic_blocks--;
5917 /* Grow DEST_CFUN's basic block array if needed. */
5918 cfg = dest_cfun->cfg;
5919 cfg->x_n_basic_blocks++;
5920 if (bb->index >= cfg->x_last_basic_block)
5921 cfg->x_last_basic_block = bb->index + 1;
5923 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
5924 if ((unsigned) cfg->x_last_basic_block >= old_len)
5926 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
5927 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
5931 VEC_replace (basic_block, cfg->x_basic_block_info,
5934 /* Remap the variables in phi nodes. */
5935 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
5937 gimple phi = gsi_stmt (si);
5939 tree op = PHI_RESULT (phi);
5942 if (!is_gimple_reg (op))
5944 /* Remove the phi nodes for virtual operands (alias analysis will be
5945 run for the new function, anyway). */
5946 remove_phi_node (&si, true);
5950 SET_PHI_RESULT (phi,
5951 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
5952 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
5954 op = USE_FROM_PTR (use);
5955 if (TREE_CODE (op) == SSA_NAME)
5956 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
5962 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5964 gimple stmt = gsi_stmt (si);
5965 struct walk_stmt_info wi;
5967 memset (&wi, 0, sizeof (wi));
5969 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
5971 if (gimple_code (stmt) == GIMPLE_LABEL)
5973 tree label = gimple_label_label (stmt);
5974 int uid = LABEL_DECL_UID (label);
5976 gcc_assert (uid > -1);
5978 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
5979 if (old_len <= (unsigned) uid)
5981 new_len = 3 * uid / 2 + 1;
5982 VEC_safe_grow_cleared (basic_block, gc,
5983 cfg->x_label_to_block_map, new_len);
5986 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
5987 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
5989 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
5991 if (uid >= dest_cfun->cfg->last_label_uid)
5992 dest_cfun->cfg->last_label_uid = uid + 1;
5995 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
5996 remove_stmt_from_eh_lp_fn (cfun, stmt);
5998 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
5999 gimple_remove_stmt_histograms (cfun, stmt);
6001 /* We cannot leave any operands allocated from the operand caches of
6002 the current function. */
6003 free_stmt_operands (stmt);
6004 push_cfun (dest_cfun);
6009 FOR_EACH_EDGE (e, ei, bb->succs)
6012 tree block = e->goto_block;
6013 if (d->orig_block == NULL_TREE
6014 || block == d->orig_block)
6015 e->goto_block = d->new_block;
6016 #ifdef ENABLE_CHECKING
6017 else if (block != d->new_block)
6019 while (block && block != d->orig_block)
6020 block = BLOCK_SUPERCONTEXT (block);
6027 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6028 the outermost EH region. Use REGION as the incoming base EH region. */
6031 find_outermost_region_in_block (struct function *src_cfun,
6032 basic_block bb, eh_region region)
6034 gimple_stmt_iterator si;
6036 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6038 gimple stmt = gsi_stmt (si);
6039 eh_region stmt_region;
6042 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6043 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6047 region = stmt_region;
6048 else if (stmt_region != region)
6050 region = eh_region_outermost (src_cfun, stmt_region, region);
6051 gcc_assert (region != NULL);
6060 new_label_mapper (tree decl, void *data)
6062 htab_t hash = (htab_t) data;
6066 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6068 m = XNEW (struct tree_map);
6069 m->hash = DECL_UID (decl);
6070 m->base.from = decl;
6071 m->to = create_artificial_label (UNKNOWN_LOCATION);
6072 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6073 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6074 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6076 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6077 gcc_assert (*slot == NULL);
6084 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6088 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6093 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6096 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6098 replace_by_duplicate_decl (&t, vars_map, to_context);
6101 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6103 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6104 DECL_HAS_VALUE_EXPR_P (t) = 1;
6106 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6111 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6112 replace_block_vars_by_duplicates (block, vars_map, to_context);
6115 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6116 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6117 single basic block in the original CFG and the new basic block is
6118 returned. DEST_CFUN must not have a CFG yet.
6120 Note that the region need not be a pure SESE region. Blocks inside
6121 the region may contain calls to abort/exit. The only restriction
6122 is that ENTRY_BB should be the only entry point and it must
6125 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6126 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6127 to the new function.
6129 All local variables referenced in the region are assumed to be in
6130 the corresponding BLOCK_VARS and unexpanded variable lists
6131 associated with DEST_CFUN. */
6134 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6135 basic_block exit_bb, tree orig_block)
6137 VEC(basic_block,heap) *bbs, *dom_bbs;
6138 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6139 basic_block after, bb, *entry_pred, *exit_succ, abb;
6140 struct function *saved_cfun = cfun;
6141 int *entry_flag, *exit_flag;
6142 unsigned *entry_prob, *exit_prob;
6143 unsigned i, num_entry_edges, num_exit_edges;
6146 htab_t new_label_map;
6147 struct pointer_map_t *vars_map, *eh_map;
6148 struct loop *loop = entry_bb->loop_father;
6149 struct move_stmt_d d;
6151 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6153 gcc_assert (entry_bb != exit_bb
6155 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6157 /* Collect all the blocks in the region. Manually add ENTRY_BB
6158 because it won't be added by dfs_enumerate_from. */
6160 VEC_safe_push (basic_block, heap, bbs, entry_bb);
6161 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6163 /* The blocks that used to be dominated by something in BBS will now be
6164 dominated by the new block. */
6165 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6166 VEC_address (basic_block, bbs),
6167 VEC_length (basic_block, bbs));
6169 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6170 the predecessor edges to ENTRY_BB and the successor edges to
6171 EXIT_BB so that we can re-attach them to the new basic block that
6172 will replace the region. */
6173 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6174 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
6175 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
6176 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6178 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6180 entry_prob[i] = e->probability;
6181 entry_flag[i] = e->flags;
6182 entry_pred[i++] = e->src;
6188 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6189 exit_succ = (basic_block *) xcalloc (num_exit_edges,
6190 sizeof (basic_block));
6191 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
6192 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6194 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6196 exit_prob[i] = e->probability;
6197 exit_flag[i] = e->flags;
6198 exit_succ[i++] = e->dest;
6210 /* Switch context to the child function to initialize DEST_FN's CFG. */
6211 gcc_assert (dest_cfun->cfg == NULL);
6212 push_cfun (dest_cfun);
6214 init_empty_tree_cfg ();
6216 /* Initialize EH information for the new function. */
6218 new_label_map = NULL;
6221 eh_region region = NULL;
6223 FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
6224 region = find_outermost_region_in_block (saved_cfun, bb, region);
6226 init_eh_for_function ();
6229 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6230 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6231 new_label_mapper, new_label_map);
6237 /* Move blocks from BBS into DEST_CFUN. */
6238 gcc_assert (VEC_length (basic_block, bbs) >= 2);
6239 after = dest_cfun->cfg->x_entry_block_ptr;
6240 vars_map = pointer_map_create ();
6242 memset (&d, 0, sizeof (d));
6243 d.orig_block = orig_block;
6244 d.new_block = DECL_INITIAL (dest_cfun->decl);
6245 d.from_context = cfun->decl;
6246 d.to_context = dest_cfun->decl;
6247 d.vars_map = vars_map;
6248 d.new_label_map = new_label_map;
6250 d.remap_decls_p = true;
6252 FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
6254 /* No need to update edge counts on the last block. It has
6255 already been updated earlier when we detached the region from
6256 the original CFG. */
6257 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6261 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6265 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6267 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6268 = BLOCK_SUBBLOCKS (orig_block);
6269 for (block = BLOCK_SUBBLOCKS (orig_block);
6270 block; block = BLOCK_CHAIN (block))
6271 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6272 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6275 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6276 vars_map, dest_cfun->decl);
6279 htab_delete (new_label_map);
6281 pointer_map_destroy (eh_map);
6282 pointer_map_destroy (vars_map);
6284 /* Rewire the entry and exit blocks. The successor to the entry
6285 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6286 the child function. Similarly, the predecessor of DEST_FN's
6287 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6288 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6289 various CFG manipulation function get to the right CFG.
6291 FIXME, this is silly. The CFG ought to become a parameter to
6293 push_cfun (dest_cfun);
6294 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6296 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6299 /* Back in the original function, the SESE region has disappeared,
6300 create a new basic block in its place. */
6301 bb = create_empty_bb (entry_pred[0]);
6303 add_bb_to_loop (bb, loop);
6304 for (i = 0; i < num_entry_edges; i++)
6306 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6307 e->probability = entry_prob[i];
6310 for (i = 0; i < num_exit_edges; i++)
6312 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6313 e->probability = exit_prob[i];
6316 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6317 FOR_EACH_VEC_ELT (basic_block, dom_bbs, i, abb)
6318 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6319 VEC_free (basic_block, heap, dom_bbs);
6330 VEC_free (basic_block, heap, bbs);
6336 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree-pass.h)
6340 dump_function_to_file (tree fn, FILE *file, int flags)
6343 struct function *dsf;
6344 bool ignore_topmost_bind = false, any_var = false;
6348 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
6350 arg = DECL_ARGUMENTS (fn);
6353 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6354 fprintf (file, " ");
6355 print_generic_expr (file, arg, dump_flags);
6356 if (flags & TDF_VERBOSE)
6357 print_node (file, "", arg, 4);
6358 if (DECL_CHAIN (arg))
6359 fprintf (file, ", ");
6360 arg = DECL_CHAIN (arg);
6362 fprintf (file, ")\n");
6364 if (flags & TDF_VERBOSE)
6365 print_node (file, "", fn, 2);
6367 dsf = DECL_STRUCT_FUNCTION (fn);
6368 if (dsf && (flags & TDF_EH))
6369 dump_eh_tree (file, dsf);
6371 if (flags & TDF_RAW && !gimple_has_body_p (fn))
6373 dump_node (fn, TDF_SLIM | flags, file);
6377 /* Switch CFUN to point to FN. */
6378 push_cfun (DECL_STRUCT_FUNCTION (fn));
6380 /* When GIMPLE is lowered, the variables are no longer available in
6381 BIND_EXPRs, so display them separately. */
6382 if (cfun && cfun->decl == fn && !VEC_empty (tree, cfun->local_decls))
6385 ignore_topmost_bind = true;
6387 fprintf (file, "{\n");
6388 FOR_EACH_LOCAL_DECL (cfun, ix, var)
6390 print_generic_decl (file, var, flags);
6391 if (flags & TDF_VERBOSE)
6392 print_node (file, "", var, 4);
6393 fprintf (file, "\n");
6399 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6401 /* If the CFG has been built, emit a CFG-based dump. */
6402 check_bb_profile (ENTRY_BLOCK_PTR, file);
6403 if (!ignore_topmost_bind)
6404 fprintf (file, "{\n");
6406 if (any_var && n_basic_blocks)
6407 fprintf (file, "\n");
6410 gimple_dump_bb (bb, file, 2, flags);
6412 fprintf (file, "}\n");
6413 check_bb_profile (EXIT_BLOCK_PTR, file);
6415 else if (DECL_SAVED_TREE (fn) == NULL)
6417 /* The function is now in GIMPLE form but the CFG has not been
6418 built yet. Emit the single sequence of GIMPLE statements
6419 that make up its body. */
6420 gimple_seq body = gimple_body (fn);
6422 if (gimple_seq_first_stmt (body)
6423 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6424 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6425 print_gimple_seq (file, body, 0, flags);
6428 if (!ignore_topmost_bind)
6429 fprintf (file, "{\n");
6432 fprintf (file, "\n");
6434 print_gimple_seq (file, body, 2, flags);
6435 fprintf (file, "}\n");
6442 /* Make a tree based dump. */
6443 chain = DECL_SAVED_TREE (fn);
6445 if (chain && TREE_CODE (chain) == BIND_EXPR)
6447 if (ignore_topmost_bind)
6449 chain = BIND_EXPR_BODY (chain);
6457 if (!ignore_topmost_bind)
6458 fprintf (file, "{\n");
6463 fprintf (file, "\n");
6465 print_generic_stmt_indented (file, chain, flags, indent);
6466 if (ignore_topmost_bind)
6467 fprintf (file, "}\n");
6470 if (flags & TDF_ENUMERATE_LOCALS)
6471 dump_enumerated_decls (file, flags);
6472 fprintf (file, "\n\n");
6479 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6482 debug_function (tree fn, int flags)
6484 dump_function_to_file (fn, stderr, flags);
6488 /* Print on FILE the indexes for the predecessors of basic_block BB. */
6491 print_pred_bbs (FILE *file, basic_block bb)
6496 FOR_EACH_EDGE (e, ei, bb->preds)
6497 fprintf (file, "bb_%d ", e->src->index);
6501 /* Print on FILE the indexes for the successors of basic_block BB. */
6504 print_succ_bbs (FILE *file, basic_block bb)
6509 FOR_EACH_EDGE (e, ei, bb->succs)
6510 fprintf (file, "bb_%d ", e->dest->index);
6513 /* Print to FILE the basic block BB following the VERBOSITY level. */
6516 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6518 char *s_indent = (char *) alloca ((size_t) indent + 1);
6519 memset ((void *) s_indent, ' ', (size_t) indent);
6520 s_indent[indent] = '\0';
6522 /* Print basic_block's header. */
6525 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6526 print_pred_bbs (file, bb);
6527 fprintf (file, "}, succs = {");
6528 print_succ_bbs (file, bb);
6529 fprintf (file, "})\n");
6532 /* Print basic_block's body. */
6535 fprintf (file, "%s {\n", s_indent);
6536 gimple_dump_bb (bb, file, indent + 4, TDF_VOPS|TDF_MEMSYMS);
6537 fprintf (file, "%s }\n", s_indent);
6541 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6543 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
6544 VERBOSITY level this outputs the contents of the loop, or just its
6548 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6556 s_indent = (char *) alloca ((size_t) indent + 1);
6557 memset ((void *) s_indent, ' ', (size_t) indent);
6558 s_indent[indent] = '\0';
6560 /* Print loop's header. */
6561 fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
6562 loop->num, loop->header->index, loop->latch->index);
6563 fprintf (file, ", niter = ");
6564 print_generic_expr (file, loop->nb_iterations, 0);
6566 if (loop->any_upper_bound)
6568 fprintf (file, ", upper_bound = ");
6569 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6572 if (loop->any_estimate)
6574 fprintf (file, ", estimate = ");
6575 dump_double_int (file, loop->nb_iterations_estimate, true);
6577 fprintf (file, ")\n");
6579 /* Print loop's body. */
6582 fprintf (file, "%s{\n", s_indent);
6584 if (bb->loop_father == loop)
6585 print_loops_bb (file, bb, indent, verbosity);
6587 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6588 fprintf (file, "%s}\n", s_indent);
6592 /* Print the LOOP and its sibling loops on FILE, indented INDENT
6593 spaces. Following VERBOSITY level this outputs the contents of the
6594 loop, or just its structure. */
6597 print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6602 print_loop (file, loop, indent, verbosity);
6603 print_loop_and_siblings (file, loop->next, indent, verbosity);
6606 /* Follow a CFG edge from the entry point of the program, and on entry
6607 of a loop, pretty print the loop structure on FILE. */
6610 print_loops (FILE *file, int verbosity)
6614 bb = ENTRY_BLOCK_PTR;
6615 if (bb && bb->loop_father)
6616 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6620 /* Debugging loops structure at tree level, at some VERBOSITY level. */
6623 debug_loops (int verbosity)
6625 print_loops (stderr, verbosity);
6628 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
6631 debug_loop (struct loop *loop, int verbosity)
6633 print_loop (stderr, loop, 0, verbosity);
6636 /* Print on stderr the code of loop number NUM, at some VERBOSITY
6640 debug_loop_num (unsigned num, int verbosity)
6642 debug_loop (get_loop (num), verbosity);
6645 /* Return true if BB ends with a call, possibly followed by some
6646 instructions that must stay with the call. Return false,
6650 gimple_block_ends_with_call_p (basic_block bb)
6652 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6653 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
6657 /* Return true if BB ends with a conditional branch. Return false,
6661 gimple_block_ends_with_condjump_p (const_basic_block bb)
6663 gimple stmt = last_stmt (CONST_CAST_BB (bb));
6664 return (stmt && gimple_code (stmt) == GIMPLE_COND);
6668 /* Return true if we need to add fake edge to exit at statement T.
6669 Helper function for gimple_flow_call_edges_add. */
6672 need_fake_edge_p (gimple t)
6674 tree fndecl = NULL_TREE;
6677 /* NORETURN and LONGJMP calls already have an edge to exit.
6678 CONST and PURE calls do not need one.
6679 We don't currently check for CONST and PURE here, although
6680 it would be a good idea, because those attributes are
6681 figured out from the RTL in mark_constant_function, and
6682 the counter incrementation code from -fprofile-arcs
6683 leads to different results from -fbranch-probabilities. */
6684 if (is_gimple_call (t))
6686 fndecl = gimple_call_fndecl (t);
6687 call_flags = gimple_call_flags (t);
6690 if (is_gimple_call (t)
6692 && DECL_BUILT_IN (fndecl)
6693 && (call_flags & ECF_NOTHROW)
6694 && !(call_flags & ECF_RETURNS_TWICE)
6695 /* fork() doesn't really return twice, but the effect of
6696 wrapping it in __gcov_fork() which calls __gcov_flush()
6697 and clears the counters before forking has the same
6698 effect as returning twice. Force a fake edge. */
6699 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6700 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
6703 if (is_gimple_call (t)
6704 && !(call_flags & ECF_NORETURN))
6707 if (gimple_code (t) == GIMPLE_ASM
6708 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
6715 /* Add fake edges to the function exit for any non constant and non
6716 noreturn calls, volatile inline assembly in the bitmap of blocks
6717 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
6718 the number of blocks that were split.
6720 The goal is to expose cases in which entering a basic block does
6721 not imply that all subsequent instructions must be executed. */
6724 gimple_flow_call_edges_add (sbitmap blocks)
6727 int blocks_split = 0;
6728 int last_bb = last_basic_block;
6729 bool check_last_block = false;
6731 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6735 check_last_block = true;
6737 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
6739 /* In the last basic block, before epilogue generation, there will be
6740 a fallthru edge to EXIT. Special care is required if the last insn
6741 of the last basic block is a call because make_edge folds duplicate
6742 edges, which would result in the fallthru edge also being marked
6743 fake, which would result in the fallthru edge being removed by
6744 remove_fake_edges, which would result in an invalid CFG.
6746 Moreover, we can't elide the outgoing fake edge, since the block
6747 profiler needs to take this into account in order to solve the minimal
6748 spanning tree in the case that the call doesn't return.
6750 Handle this by adding a dummy instruction in a new last basic block. */
6751 if (check_last_block)
6753 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
6754 gimple_stmt_iterator gsi = gsi_last_bb (bb);
6757 if (!gsi_end_p (gsi))
6760 if (t && need_fake_edge_p (t))
6764 e = find_edge (bb, EXIT_BLOCK_PTR);
6767 gsi_insert_on_edge (e, gimple_build_nop ());
6768 gsi_commit_edge_inserts ();
6773 /* Now add fake edges to the function exit for any non constant
6774 calls since there is no way that we can determine if they will
6776 for (i = 0; i < last_bb; i++)
6778 basic_block bb = BASIC_BLOCK (i);
6779 gimple_stmt_iterator gsi;
6780 gimple stmt, last_stmt;
6785 if (blocks && !TEST_BIT (blocks, i))
6788 gsi = gsi_last_bb (bb);
6789 if (!gsi_end_p (gsi))
6791 last_stmt = gsi_stmt (gsi);
6794 stmt = gsi_stmt (gsi);
6795 if (need_fake_edge_p (stmt))
6799 /* The handling above of the final block before the
6800 epilogue should be enough to verify that there is
6801 no edge to the exit block in CFG already.
6802 Calling make_edge in such case would cause us to
6803 mark that edge as fake and remove it later. */
6804 #ifdef ENABLE_CHECKING
6805 if (stmt == last_stmt)
6807 e = find_edge (bb, EXIT_BLOCK_PTR);
6808 gcc_assert (e == NULL);
6812 /* Note that the following may create a new basic block
6813 and renumber the existing basic blocks. */
6814 if (stmt != last_stmt)
6816 e = split_block (bb, stmt);
6820 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
6824 while (!gsi_end_p (gsi));
6829 verify_flow_info ();
6831 return blocks_split;
6834 /* Purge dead abnormal call edges from basic block BB. */
6837 gimple_purge_dead_abnormal_call_edges (basic_block bb)
6839 bool changed = gimple_purge_dead_eh_edges (bb);
6841 if (cfun->has_nonlocal_label)
6843 gimple stmt = last_stmt (bb);
6847 if (!(stmt && stmt_can_make_abnormal_goto (stmt)))
6848 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6850 if (e->flags & EDGE_ABNORMAL)
6859 /* See gimple_purge_dead_eh_edges below. */
6861 free_dominance_info (CDI_DOMINATORS);
6867 /* Removes edge E and all the blocks dominated by it, and updates dominance
6868 information. The IL in E->src needs to be updated separately.
6869 If dominance info is not available, only the edge E is removed.*/
6872 remove_edge_and_dominated_blocks (edge e)
6874 VEC (basic_block, heap) *bbs_to_remove = NULL;
6875 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
6879 bool none_removed = false;
6881 basic_block bb, dbb;
6884 if (!dom_info_available_p (CDI_DOMINATORS))
6890 /* No updating is needed for edges to exit. */
6891 if (e->dest == EXIT_BLOCK_PTR)
6893 if (cfgcleanup_altered_bbs)
6894 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6899 /* First, we find the basic blocks to remove. If E->dest has a predecessor
6900 that is not dominated by E->dest, then this set is empty. Otherwise,
6901 all the basic blocks dominated by E->dest are removed.
6903 Also, to DF_IDOM we store the immediate dominators of the blocks in
6904 the dominance frontier of E (i.e., of the successors of the
6905 removed blocks, if there are any, and of E->dest otherwise). */
6906 FOR_EACH_EDGE (f, ei, e->dest->preds)
6911 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
6913 none_removed = true;
6918 df = BITMAP_ALLOC (NULL);
6919 df_idom = BITMAP_ALLOC (NULL);
6922 bitmap_set_bit (df_idom,
6923 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
6926 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
6927 FOR_EACH_VEC_ELT (basic_block, bbs_to_remove, i, bb)
6929 FOR_EACH_EDGE (f, ei, bb->succs)
6931 if (f->dest != EXIT_BLOCK_PTR)
6932 bitmap_set_bit (df, f->dest->index);
6935 FOR_EACH_VEC_ELT (basic_block, bbs_to_remove, i, bb)
6936 bitmap_clear_bit (df, bb->index);
6938 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
6940 bb = BASIC_BLOCK (i);
6941 bitmap_set_bit (df_idom,
6942 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
6946 if (cfgcleanup_altered_bbs)
6948 /* Record the set of the altered basic blocks. */
6949 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6950 bitmap_ior_into (cfgcleanup_altered_bbs, df);
6953 /* Remove E and the cancelled blocks. */
6958 /* Walk backwards so as to get a chance to substitute all
6959 released DEFs into debug stmts. See
6960 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
6962 for (i = VEC_length (basic_block, bbs_to_remove); i-- > 0; )
6963 delete_basic_block (VEC_index (basic_block, bbs_to_remove, i));
6966 /* Update the dominance information. The immediate dominator may change only
6967 for blocks whose immediate dominator belongs to DF_IDOM:
6969 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
6970 removal. Let Z the arbitrary block such that idom(Z) = Y and
6971 Z dominates X after the removal. Before removal, there exists a path P
6972 from Y to X that avoids Z. Let F be the last edge on P that is
6973 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
6974 dominates W, and because of P, Z does not dominate W), and W belongs to
6975 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
6976 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
6978 bb = BASIC_BLOCK (i);
6979 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
6981 dbb = next_dom_son (CDI_DOMINATORS, dbb))
6982 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
6985 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
6988 BITMAP_FREE (df_idom);
6989 VEC_free (basic_block, heap, bbs_to_remove);
6990 VEC_free (basic_block, heap, bbs_to_fix_dom);
6993 /* Purge dead EH edges from basic block BB. */
6996 gimple_purge_dead_eh_edges (basic_block bb)
6998 bool changed = false;
7001 gimple stmt = last_stmt (bb);
7003 if (stmt && stmt_can_throw_internal (stmt))
7006 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7008 if (e->flags & EDGE_EH)
7010 remove_edge_and_dominated_blocks (e);
7021 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7023 bool changed = false;
7027 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7029 basic_block bb = BASIC_BLOCK (i);
7031 /* Earlier gimple_purge_dead_eh_edges could have removed
7032 this basic block already. */
7033 gcc_assert (bb || changed);
7035 changed |= gimple_purge_dead_eh_edges (bb);
7041 /* This function is called whenever a new edge is created or
7045 gimple_execute_on_growing_pred (edge e)
7047 basic_block bb = e->dest;
7049 if (!gimple_seq_empty_p (phi_nodes (bb)))
7050 reserve_phi_args_for_new_edge (bb);
7053 /* This function is called immediately before edge E is removed from
7054 the edge vector E->dest->preds. */
7057 gimple_execute_on_shrinking_pred (edge e)
7059 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7060 remove_phi_args (e);
7063 /*---------------------------------------------------------------------------
7064 Helper functions for Loop versioning
7065 ---------------------------------------------------------------------------*/
7067 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7068 of 'first'. Both of them are dominated by 'new_head' basic block. When
7069 'new_head' was created by 'second's incoming edge it received phi arguments
7070 on the edge by split_edge(). Later, additional edge 'e' was created to
7071 connect 'new_head' and 'first'. Now this routine adds phi args on this
7072 additional edge 'e' that new_head to second edge received as part of edge
7076 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7077 basic_block new_head, edge e)
7080 gimple_stmt_iterator psi1, psi2;
7082 edge e2 = find_edge (new_head, second);
7084 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7085 edge, we should always have an edge from NEW_HEAD to SECOND. */
7086 gcc_assert (e2 != NULL);
7088 /* Browse all 'second' basic block phi nodes and add phi args to
7089 edge 'e' for 'first' head. PHI args are always in correct order. */
7091 for (psi2 = gsi_start_phis (second),
7092 psi1 = gsi_start_phis (first);
7093 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7094 gsi_next (&psi2), gsi_next (&psi1))
7096 phi1 = gsi_stmt (psi1);
7097 phi2 = gsi_stmt (psi2);
7098 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7099 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7104 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7105 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7106 the destination of the ELSE part. */
7109 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7110 basic_block second_head ATTRIBUTE_UNUSED,
7111 basic_block cond_bb, void *cond_e)
7113 gimple_stmt_iterator gsi;
7114 gimple new_cond_expr;
7115 tree cond_expr = (tree) cond_e;
7118 /* Build new conditional expr */
7119 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7120 NULL_TREE, NULL_TREE);
7122 /* Add new cond in cond_bb. */
7123 gsi = gsi_last_bb (cond_bb);
7124 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7126 /* Adjust edges appropriately to connect new head with first head
7127 as well as second head. */
7128 e0 = single_succ_edge (cond_bb);
7129 e0->flags &= ~EDGE_FALLTHRU;
7130 e0->flags |= EDGE_FALSE_VALUE;
7133 struct cfg_hooks gimple_cfg_hooks = {
7135 gimple_verify_flow_info,
7136 gimple_dump_bb, /* dump_bb */
7137 create_bb, /* create_basic_block */
7138 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7139 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7140 gimple_can_remove_branch_p, /* can_remove_branch_p */
7141 remove_bb, /* delete_basic_block */
7142 gimple_split_block, /* split_block */
7143 gimple_move_block_after, /* move_block_after */
7144 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7145 gimple_merge_blocks, /* merge_blocks */
7146 gimple_predict_edge, /* predict_edge */
7147 gimple_predicted_by_p, /* predicted_by_p */
7148 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7149 gimple_duplicate_bb, /* duplicate_block */
7150 gimple_split_edge, /* split_edge */
7151 gimple_make_forwarder_block, /* make_forward_block */
7152 NULL, /* tidy_fallthru_edge */
7153 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7154 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7155 gimple_flow_call_edges_add, /* flow_call_edges_add */
7156 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7157 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7158 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7159 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7160 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7161 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7162 flush_pending_stmts /* flush_pending_stmts */
7166 /* Split all critical edges. */
7169 split_critical_edges (void)
7175 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7176 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7177 mappings around the calls to split_edge. */
7178 start_recording_case_labels ();
7181 FOR_EACH_EDGE (e, ei, bb->succs)
7183 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7185 /* PRE inserts statements to edges and expects that
7186 since split_critical_edges was done beforehand, committing edge
7187 insertions will not split more edges. In addition to critical
7188 edges we must split edges that have multiple successors and
7189 end by control flow statements, such as RESX.
7190 Go ahead and split them too. This matches the logic in
7191 gimple_find_edge_insert_loc. */
7192 else if ((!single_pred_p (e->dest)
7193 || !gimple_seq_empty_p (phi_nodes (e->dest))
7194 || e->dest == EXIT_BLOCK_PTR)
7195 && e->src != ENTRY_BLOCK_PTR
7196 && !(e->flags & EDGE_ABNORMAL))
7198 gimple_stmt_iterator gsi;
7200 gsi = gsi_last_bb (e->src);
7201 if (!gsi_end_p (gsi)
7202 && stmt_ends_bb_p (gsi_stmt (gsi))
7203 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7204 && !gimple_call_builtin_p (gsi_stmt (gsi),
7210 end_recording_case_labels ();
7214 struct gimple_opt_pass pass_split_crit_edges =
7218 "crited", /* name */
7220 split_critical_edges, /* execute */
7223 0, /* static_pass_number */
7224 TV_TREE_SPLIT_EDGES, /* tv_id */
7225 PROP_cfg, /* properties required */
7226 PROP_no_crit_edges, /* properties_provided */
7227 0, /* properties_destroyed */
7228 0, /* todo_flags_start */
7229 TODO_dump_func | TODO_verify_flow /* todo_flags_finish */
7234 /* Build a ternary operation and gimplify it. Emit code before GSI.
7235 Return the gimple_val holding the result. */
7238 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7239 tree type, tree a, tree b, tree c)
7242 location_t loc = gimple_location (gsi_stmt (*gsi));
7244 ret = fold_build3_loc (loc, code, type, a, b, c);
7247 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7251 /* Build a binary operation and gimplify it. Emit code before GSI.
7252 Return the gimple_val holding the result. */
7255 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
7256 tree type, tree a, tree b)
7260 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
7263 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7267 /* Build a unary operation and gimplify it. Emit code before GSI.
7268 Return the gimple_val holding the result. */
7271 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
7276 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
7279 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7285 /* Emit return warnings. */
7288 execute_warn_function_return (void)
7290 source_location location;
7295 /* If we have a path to EXIT, then we do return. */
7296 if (TREE_THIS_VOLATILE (cfun->decl)
7297 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
7299 location = UNKNOWN_LOCATION;
7300 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7302 last = last_stmt (e->src);
7303 if ((gimple_code (last) == GIMPLE_RETURN
7304 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
7305 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
7308 if (location == UNKNOWN_LOCATION)
7309 location = cfun->function_end_locus;
7310 warning_at (location, 0, "%<noreturn%> function does return");
7313 /* If we see "return;" in some basic block, then we do reach the end
7314 without returning a value. */
7315 else if (warn_return_type
7316 && !TREE_NO_WARNING (cfun->decl)
7317 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
7318 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7320 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7322 gimple last = last_stmt (e->src);
7323 if (gimple_code (last) == GIMPLE_RETURN
7324 && gimple_return_retval (last) == NULL
7325 && !gimple_no_warning_p (last))
7327 location = gimple_location (last);
7328 if (location == UNKNOWN_LOCATION)
7329 location = cfun->function_end_locus;
7330 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
7331 TREE_NO_WARNING (cfun->decl) = 1;
7340 /* Given a basic block B which ends with a conditional and has
7341 precisely two successors, determine which of the edges is taken if
7342 the conditional is true and which is taken if the conditional is
7343 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7346 extract_true_false_edges_from_block (basic_block b,
7350 edge e = EDGE_SUCC (b, 0);
7352 if (e->flags & EDGE_TRUE_VALUE)
7355 *false_edge = EDGE_SUCC (b, 1);
7360 *true_edge = EDGE_SUCC (b, 1);
7364 struct gimple_opt_pass pass_warn_function_return =
7368 "*warn_function_return", /* name */
7370 execute_warn_function_return, /* execute */
7373 0, /* static_pass_number */
7374 TV_NONE, /* tv_id */
7375 PROP_cfg, /* properties_required */
7376 0, /* properties_provided */
7377 0, /* properties_destroyed */
7378 0, /* todo_flags_start */
7379 0 /* todo_flags_finish */
7383 /* Emit noreturn warnings. */
7386 execute_warn_function_noreturn (void)
7388 if (!TREE_THIS_VOLATILE (current_function_decl)
7389 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0)
7390 warn_function_noreturn (current_function_decl);
7395 gate_warn_function_noreturn (void)
7397 return warn_suggest_attribute_noreturn;
7400 struct gimple_opt_pass pass_warn_function_noreturn =
7404 "*warn_function_noreturn", /* name */
7405 gate_warn_function_noreturn, /* gate */
7406 execute_warn_function_noreturn, /* execute */
7409 0, /* static_pass_number */
7410 TV_NONE, /* tv_id */
7411 PROP_cfg, /* properties_required */
7412 0, /* properties_provided */
7413 0, /* properties_destroyed */
7414 0, /* todo_flags_start */
7415 0 /* todo_flags_finish */
7420 /* Walk a gimplified function and warn for functions whose return value is
7421 ignored and attribute((warn_unused_result)) is set. This is done before
7422 inlining, so we don't have to worry about that. */
7425 do_warn_unused_result (gimple_seq seq)
7428 gimple_stmt_iterator i;
7430 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
7432 gimple g = gsi_stmt (i);
7434 switch (gimple_code (g))
7437 do_warn_unused_result (gimple_bind_body (g));
7440 do_warn_unused_result (gimple_try_eval (g));
7441 do_warn_unused_result (gimple_try_cleanup (g));
7444 do_warn_unused_result (gimple_catch_handler (g));
7446 case GIMPLE_EH_FILTER:
7447 do_warn_unused_result (gimple_eh_filter_failure (g));
7451 if (gimple_call_lhs (g))
7454 /* This is a naked call, as opposed to a GIMPLE_CALL with an
7455 LHS. All calls whose value is ignored should be
7456 represented like this. Look for the attribute. */
7457 fdecl = gimple_call_fndecl (g);
7458 ftype = TREE_TYPE (TREE_TYPE (gimple_call_fn (g)));
7460 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
7462 location_t loc = gimple_location (g);
7465 warning_at (loc, OPT_Wunused_result,
7466 "ignoring return value of %qD, "
7467 "declared with attribute warn_unused_result",
7470 warning_at (loc, OPT_Wunused_result,
7471 "ignoring return value of function "
7472 "declared with attribute warn_unused_result");
7477 /* Not a container, not a call, or a call whose value is used. */
7484 run_warn_unused_result (void)
7486 do_warn_unused_result (gimple_body (current_function_decl));
7491 gate_warn_unused_result (void)
7493 return flag_warn_unused_result;
7496 struct gimple_opt_pass pass_warn_unused_result =
7500 "*warn_unused_result", /* name */
7501 gate_warn_unused_result, /* gate */
7502 run_warn_unused_result, /* execute */
7505 0, /* static_pass_number */
7506 TV_NONE, /* tv_id */
7507 PROP_gimple_any, /* properties_required */
7508 0, /* properties_provided */
7509 0, /* properties_destroyed */
7510 0, /* todo_flags_start */
7511 0, /* todo_flags_finish */