static void clear_blocks_annotations (void);
static void make_blocks (tree);
static void factor_computed_gotos (void);
-static tree tree_block_label (basic_block bb);
/* Edges. */
static void make_edges (void);
static void tree_merge_blocks (basic_block, basic_block);
static bool tree_can_merge_blocks_p (basic_block, basic_block);
static void remove_bb (basic_block);
-static void cleanup_dead_labels (void);
static bool cleanup_control_flow (void);
static bool cleanup_control_expr_graph (basic_block, block_stmt_iterator);
static edge find_taken_edge_cond_expr (basic_block, tree);
/* Initialize the basic block array. */
init_flow ();
+ profile_status = PROFILE_ABSENT;
n_basic_blocks = 0;
last_basic_block = 0;
VARRAY_BB_INIT (basic_block_info, initial_cfg_capacity, "basic_block_info");
/* Adjust the size of the array. */
VARRAY_GROW (basic_block_info, n_basic_blocks);
+ /* To speed up statement iterator walks, we first purge dead labels. */
+ cleanup_dead_labels ();
+
+ /* Group case nodes to reduce the number of edges.
+ We do this after cleaning up dead labels because otherwise we miss
+ a lot of obvious case merging opportunities. */
+ group_case_labels ();
+
/* Create the edges of the flowgraph. */
make_edges ();
PROP_cfg, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_verify_stmts /* todo_flags_finish */
+ TODO_verify_stmts, /* todo_flags_finish */
+ 0 /* letter */
};
/* Search the CFG for any computed gotos. If found, factor them to a
make_edges (void)
{
basic_block bb;
- edge e;
/* Create an edge from entry to the first block with executable
statements in it. */
make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
}
- /* If there is a fallthru edge to exit out of the last block, transform it
- to a return statement. */
- for (e = EXIT_BLOCK_PTR->prev_bb->succ; e; e = e->succ_next)
- if (e->flags & EDGE_FALLTHRU)
- break;
-
- if (e && e->dest == EXIT_BLOCK_PTR)
- {
- block_stmt_iterator bsi;
- basic_block ret_bb = EXIT_BLOCK_PTR->prev_bb;
- tree x;
-
- /* If E->SRC ends with a call that has an abnormal edge (for EH or
- nonlocal goto), then we will need to split the edge to insert
- an explicit return statement. */
- if (e != ret_bb->succ || e->succ_next)
- {
- ret_bb = split_edge (e);
- e = ret_bb->succ;
- }
- e->flags &= ~EDGE_FALLTHRU;
-
- x = build (RETURN_EXPR, void_type_node, NULL_TREE);
- bsi = bsi_last (ret_bb);
- bsi_insert_after (&bsi, x, BSI_NEW_STMT);
- }
-
/* We do not care about fake edges, so remove any that the CFG
builder inserted for completeness. */
- remove_fake_edges ();
-
- /* To speed up statement iterator walks, we first purge dead labels. */
- cleanup_dead_labels ();
+ remove_fake_exit_edges ();
/* Clean up the graph and warn for unreachable code. */
cleanup_tree_cfg ();
make_ctrl_stmt_edges (basic_block bb)
{
tree last = last_stmt (bb);
- tree first = first_stmt (bb);
#if defined ENABLE_CHECKING
if (last == NULL_TREE)
abort();
#endif
- if (TREE_CODE (first) == LABEL_EXPR
- && DECL_NONLOCAL (LABEL_EXPR_LABEL (first)))
- make_edge (ENTRY_BLOCK_PTR, bb, EDGE_ABNORMAL);
-
switch (TREE_CODE (last))
{
case GOTO_EXPR:
static void
make_exit_edges (basic_block bb)
{
- tree last = last_stmt (bb);
+ tree last = last_stmt (bb), op;
if (last == NULL_TREE)
abort ();
/* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the CALL_EXPR
may have an abnormal edge. Search the RHS for this case and
create any required edges. */
- if (TREE_CODE (TREE_OPERAND (last, 1)) == CALL_EXPR
- && TREE_SIDE_EFFECTS (TREE_OPERAND (last, 1))
+ op = get_call_expr_in (last);
+ if (op && TREE_SIDE_EFFECTS (op)
&& current_function_has_nonlocal_label)
make_goto_expr_edges (bb);
basic_block
label_to_block (tree dest)
{
- return VARRAY_BB (label_to_block_map, LABEL_DECL_UID (dest));
+ int uid = LABEL_DECL_UID (dest);
+
+ /* We would die hard when faced by undefined label. Emit label to
+ very first basic block. This will hopefully make even the dataflow
+ and undefined variable warnings quite right. */
+ if ((errorcount || sorrycount) && uid < 0)
+ {
+ block_stmt_iterator bsi = bsi_start (BASIC_BLOCK (0));
+ tree stmt;
+
+ stmt = build1 (LABEL_EXPR, void_type_node, dest);
+ bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
+ uid = LABEL_DECL_UID (dest);
+ }
+ return VARRAY_BB (label_to_block_map, uid);
}
/* A GOTO to a local label creates normal edges. */
if (simple_goto_p (goto_t))
{
- make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
+ edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
+#ifdef USE_MAPPED_LOCATION
+ e->goto_locus = EXPR_LOCATION (goto_t);
+#else
+ e->goto_locus = EXPR_LOCUS (goto_t);
+#endif
bsi_remove (&last);
return;
}
- /* Nothing more to do for nonlocal gotos. */
+ /* Nothing more to do for nonlocal gotos. */
if (TREE_CODE (dest) == LABEL_DECL)
return;
/* Remove unreachable blocks and other miscellaneous clean up work. */
-void
+bool
cleanup_tree_cfg (void)
{
bool something_changed = true;
+ bool retval = false;
timevar_push (TV_TREE_CLEANUP_CFG);
while (something_changed)
{
something_changed = cleanup_control_flow ();
- something_changed |= thread_jumps ();
something_changed |= delete_unreachable_blocks ();
+ something_changed |= thread_jumps ();
+ retval |= something_changed;
}
/* Merging the blocks creates no new opportunities for the other
verify_flow_info ();
#endif
timevar_pop (TV_TREE_CLEANUP_CFG);
+ return retval;
}
-/* Cleanup useless labels from the flow graph. */
+/* Cleanup useless labels in basic blocks. This is something we wish
+ to do early because it allows us to group case labels before creating
+ the edges for the CFG, and it speeds up block statement iterators in
+ all passes later on.
+ We only run this pass once, running it more than once is probably not
+ profitable. */
+/* A map from basic block index to the leading label of that block. */
+static tree *label_for_bb;
+
+/* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
static void
+update_eh_label (struct eh_region *region)
+{
+ tree old_label = get_eh_region_tree_label (region);
+ if (old_label)
+ {
+ tree new_label;
+ basic_block bb = label_to_block (old_label);
+
+ /* ??? After optimizing, there may be EH regions with labels
+ that have already been removed from the function body, so
+ there is no basic block for them. */
+ if (! bb)
+ return;
+
+ new_label = label_for_bb[bb->index];
+ set_eh_region_tree_label (region, new_label);
+ }
+}
+
+/* Given LABEL return the first label in the same basic block. */
+static tree
+main_block_label (tree label)
+{
+ basic_block bb = label_to_block (label);
+
+ /* label_to_block possibly inserted undefined label into the chain. */
+ if (!label_for_bb[bb->index])
+ label_for_bb[bb->index] = label;
+ return label_for_bb[bb->index];
+}
+
+/* Cleanup redundant labels. This is a three-steo process:
+ 1) Find the leading label for each block.
+ 2) Redirect all references to labels to the leading labels.
+ 3) Cleanup all useless labels. */
+
+void
cleanup_dead_labels (void)
{
basic_block bb;
- tree *label_for_bb = xcalloc (last_basic_block, sizeof (tree));
+ label_for_bb = xcalloc (last_basic_block, sizeof (tree));
/* Find a suitable label for each block. We use the first user-defined
label is there is one, or otherwise just the first label we see. */
}
}
- /* Now redirect all jumps/branches to the selected label for each block. */
+ /* Now redirect all jumps/branches to the selected label.
+ First do so for each block ending in a control statement. */
FOR_EACH_BB (bb)
{
tree stmt = last_stmt (bb);
case COND_EXPR:
{
tree true_branch, false_branch;
- basic_block true_bb, false_bb;
true_branch = COND_EXPR_THEN (stmt);
false_branch = COND_EXPR_ELSE (stmt);
- true_bb = label_to_block (GOTO_DESTINATION (true_branch));
- false_bb = label_to_block (GOTO_DESTINATION (false_branch));
- GOTO_DESTINATION (true_branch) = label_for_bb[true_bb->index];
- GOTO_DESTINATION (false_branch) = label_for_bb[false_bb->index];
+ GOTO_DESTINATION (true_branch)
+ = main_block_label (GOTO_DESTINATION (true_branch));
+ GOTO_DESTINATION (false_branch)
+ = main_block_label (GOTO_DESTINATION (false_branch));
break;
}
/* Replace all destination labels. */
for (i = 0; i < n; ++i)
- {
- tree label = CASE_LABEL (TREE_VEC_ELT (vec, i));
-
- CASE_LABEL (TREE_VEC_ELT (vec, i)) =
- label_for_bb[label_to_block (label)->index];
- }
+ CASE_LABEL (TREE_VEC_ELT (vec, i))
+ = main_block_label (CASE_LABEL (TREE_VEC_ELT (vec, i)));
break;
}
+ /* We have to handle GOTO_EXPRs until they're removed, and we don't
+ remove them until after we've created the CFG edges. */
+ case GOTO_EXPR:
+ if (! computed_goto_p (stmt))
+ {
+ GOTO_DESTINATION (stmt)
+ = main_block_label (GOTO_DESTINATION (stmt));
+ break;
+ }
+
default:
break;
}
}
+ for_each_eh_region (update_eh_label);
+
/* Finally, purge dead labels. All user-defined labels and labels that
can be the target of non-local gotos are preserved. */
FOR_EACH_BB (bb)
free (label_for_bb);
}
+/* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
+ and scan the sorted vector of cases. Combine the ones jumping to the
+ same label.
+ Eg. three separate entries 1: 2: 3: become one entry 1..3: */
+
+void
+group_case_labels (void)
+{
+ basic_block bb;
+
+ FOR_EACH_BB (bb)
+ {
+ tree stmt = last_stmt (bb);
+ if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
+ {
+ tree labels = SWITCH_LABELS (stmt);
+ int old_size = TREE_VEC_LENGTH (labels);
+ int i, j, new_size = old_size;
+ tree default_label = TREE_VEC_ELT (labels, old_size - 1);
+
+ /* Look for possible opportunities to merge cases.
+ Ignore the last element of the label vector because it
+ must be the default case. */
+ i = 0;
+ while (i < old_size - 2)
+ {
+ tree base_case, base_label, base_high, type;
+ base_case = TREE_VEC_ELT (labels, i);
+
+ if (! base_case)
+ abort ();
+
+ base_label = CASE_LABEL (base_case);
+
+ /* Discard cases that have the same destination as the
+ default case. */
+ if (base_label == default_label)
+ {
+ TREE_VEC_ELT (labels, i) = NULL_TREE;
+ i++;
+ continue;
+ }
+
+ type = TREE_TYPE (CASE_LOW (base_case));
+ base_high = CASE_HIGH (base_case) ?
+ CASE_HIGH (base_case) : CASE_LOW (base_case);
+
+ /* Try to merge case labels. Break out when we reach the end
+ of the label vector or when we cannot merge the next case
+ label with the current one. */
+ while (i < old_size - 2)
+ {
+ tree merge_case = TREE_VEC_ELT (labels, ++i);
+ tree merge_label = CASE_LABEL (merge_case);
+ tree t = int_const_binop (PLUS_EXPR, base_high,
+ integer_one_node, 1);
+
+ /* Merge the cases if they jump to the same place,
+ and their ranges are consecutive. */
+ if (merge_label == base_label
+ && tree_int_cst_equal (CASE_LOW (merge_case), t))
+ {
+ base_high = CASE_HIGH (merge_case) ?
+ CASE_HIGH (merge_case) : CASE_LOW (merge_case);
+ CASE_HIGH (base_case) = base_high;
+ TREE_VEC_ELT (labels, i) = NULL_TREE;
+ new_size--;
+ }
+ else
+ break;
+ }
+ }
+
+ /* Compress the case labels in the label vector, and adjust the
+ length of the vector. */
+ for (i = 0, j = 0; i < new_size; i++)
+ {
+ while (! TREE_VEC_ELT (labels, j))
+ j++;
+ TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
+ }
+ TREE_VEC_LENGTH (labels) = new_size;
+ }
+ }
+}
/* Checks whether we can merge block B into block A. */
static bool
remove_useless_stmts_warn_notreached (tree stmt)
{
- if (EXPR_LOCUS (stmt))
+ if (EXPR_HAS_LOCATION (stmt))
{
- warning ("%Hwill never be executed", EXPR_LOCUS (stmt));
+ location_t loc = EXPR_LOCATION (stmt);
+ warning ("%Hwill never be executed", &loc);
return true;
}
static void
remove_useless_stmts_1 (tree *tp, struct rus_data *data)
{
- tree t = *tp;
+ tree t = *tp, op;
switch (TREE_CODE (t))
{
case MODIFY_EXPR:
data->last_goto = NULL;
fold_stmt (tp);
- if (TREE_CODE (TREE_OPERAND (t, 1)) == CALL_EXPR)
+ op = get_call_expr_in (t);
+ if (op)
{
- update_call_expr_flags (TREE_OPERAND (t, 1));
- notice_special_calls (TREE_OPERAND (t, 1));
+ update_call_expr_flags (op);
+ notice_special_calls (op);
}
if (tree_could_throw_p (t))
data->may_throw = true;
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func /* todo_flags_finish */
+ TODO_dump_func, /* todo_flags_finish */
+ 0 /* letter */
};
continue;
}
- /* Invalidate the var if we encounter something that could modify it. */
+ /* Invalidate the var if we encounter something that could modify it.
+ Likewise for the value it was previously set to. Note that we only
+ consider values that are either a VAR_DECL or PARM_DECL so we
+ can test for conflict very simply. */
if (TREE_CODE (stmt) == ASM_EXPR
- || TREE_CODE (stmt) == VA_ARG_EXPR
|| (TREE_CODE (stmt) == MODIFY_EXPR
&& (TREE_OPERAND (stmt, 0) == var
- || TREE_OPERAND (stmt, 0) == val
- || TREE_CODE (TREE_OPERAND (stmt, 1)) == VA_ARG_EXPR)))
+ || TREE_OPERAND (stmt, 0) == val)))
return;
bsi_next (&bsi);
phi = phi_nodes (bb);
while (phi)
{
- tree next = TREE_CHAIN (phi);
+ tree next = PHI_CHAIN (phi);
remove_phi_node (phi, NULL_TREE, bb);
phi = next;
}
remove_bb (basic_block bb)
{
block_stmt_iterator i;
- location_t *loc = NULL;
+ source_locus loc = 0;
if (dump_file)
{
jump threading, thus resulting in bogus warnings. Not great,
since this way we lose warnings for gotos in the original
program that are indeed unreachable. */
- if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_LOCUS (stmt) && !loc)
+ if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
+#ifdef USE_MAPPED_LOCATION
+ loc = EXPR_LOCATION (stmt);
+#else
loc = EXPR_LOCUS (stmt);
+#endif
}
/* If requested, give a warning that the first statement in the
loop above, so the last statement we process is the first statement
in the block. */
if (warn_notreached && loc)
+#ifdef USE_MAPPED_LOCATION
+ warning ("%Hwill never be executed", &loc);
+#else
warning ("%Hwill never be executed", loc);
+#endif
remove_phi_nodes_and_edges_for_unreachable_block (bb);
}
}
-/* Given a control block BB and a constant value VAL, return the edge that
- will be taken out of the block. If VAL does not match a unique edge,
- NULL is returned. */
+/* Given a control block BB and a predicate VAL, return the edge that
+ will be taken out of the block. If VAL does not match a unique
+ edge, NULL is returned. */
edge
find_taken_edge (basic_block bb, tree val)
abort ();
#endif
+ /* If VAL is a predicate of the form N RELOP N, where N is an
+ SSA_NAME, we can always determine its truth value (except when
+ doing floating point comparisons that may involve NaNs). */
+ if (val
+ && TREE_CODE_CLASS (TREE_CODE (val)) == '<'
+ && TREE_OPERAND (val, 0) == TREE_OPERAND (val, 1)
+ && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME
+ && (TREE_CODE (TREE_TYPE (TREE_OPERAND (val, 0))) != REAL_TYPE
+ || !HONOR_NANS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (val, 0))))))
+ {
+ enum tree_code code = TREE_CODE (val);
+
+ if (code == EQ_EXPR || code == LE_EXPR || code == GE_EXPR)
+ val = boolean_true_node;
+ else if (code == LT_EXPR || code == GT_EXPR || code == NE_EXPR)
+ val = boolean_false_node;
+ }
+
/* If VAL is not a constant, we can't determine which edge might
be taken. */
if (val == NULL || !really_constant_p (val))
}
-/* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL. */
+/* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
+ We can make optimal use here of the fact that the case labels are
+ sorted: We can do a binary search for a case matching VAL. */
static tree
find_case_label_for_value (tree switch_expr, tree val)
{
tree vec = SWITCH_LABELS (switch_expr);
- size_t i, n = TREE_VEC_LENGTH (vec);
- tree default_case = NULL;
+ size_t low, high, n = TREE_VEC_LENGTH (vec);
+ tree default_case = TREE_VEC_ELT (vec, n - 1);
- for (i = 0; i < n; ++i)
+ for (low = -1, high = n - 1; high - low > 1; )
{
+ size_t i = (high + low) / 2;
tree t = TREE_VEC_ELT (vec, i);
+ int cmp;
+
+ /* Cache the result of comparing CASE_LOW and val. */
+ cmp = tree_int_cst_compare (CASE_LOW (t), val);
- if (CASE_LOW (t) == NULL)
- default_case = t;
- else if (CASE_HIGH (t) == NULL)
+ if (cmp > 0)
+ high = i;
+ else
+ low = i;
+
+ if (CASE_HIGH (t) == NULL)
{
- /* A `normal' case label. */
- if (simple_cst_equal (CASE_LOW (t), val) == 1)
+ /* A singe-valued case label. */
+ if (cmp == 0)
return t;
}
else
{
/* A case range. We can only handle integer ranges. */
- if (tree_int_cst_compare (CASE_LOW (t), val) <= 0
- && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
+ if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
return t;
}
}
- if (!default_case)
- abort ();
-
return default_case;
}
tree phi, val1, val2;
int n1, n2;
- for (phi = phi_nodes (dest); phi; phi = TREE_CHAIN (phi))
+ for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
{
n1 = phi_arg_from_edge (phi, e1);
n2 = phi_arg_from_edge (phi, e2);
}
-/* Computing the Dominance Frontier:
-
- As described in Morgan, section 3.5, this may be done simply by
- walking the dominator tree bottom-up, computing the frontier for
- the children before the parent. When considering a block B,
- there are two cases:
-
- (1) A flow graph edge leaving B that does not lead to a child
- of B in the dominator tree must be a block that is either equal
- to B or not dominated by B. Such blocks belong in the frontier
- of B.
-
- (2) Consider a block X in the frontier of one of the children C
- of B. If X is not equal to B and is not dominated by B, it
- is in the frontier of B. */
-
-static void
-compute_dominance_frontiers_1 (bitmap *frontiers, basic_block bb, sbitmap done)
-{
- edge e;
- basic_block c;
-
- SET_BIT (done, bb->index);
-
- /* Do the frontier of the children first. Not all children in the
- dominator tree (blocks dominated by this one) are children in the
- CFG, so check all blocks. */
- for (c = first_dom_son (CDI_DOMINATORS, bb);
- c;
- c = next_dom_son (CDI_DOMINATORS, c))
- {
- if (! TEST_BIT (done, c->index))
- compute_dominance_frontiers_1 (frontiers, c, done);
- }
-
- /* Find blocks conforming to rule (1) above. */
- for (e = bb->succ; e; e = e->succ_next)
- {
- if (e->dest == EXIT_BLOCK_PTR)
- continue;
- if (get_immediate_dominator (CDI_DOMINATORS, e->dest) != bb)
- bitmap_set_bit (frontiers[bb->index], e->dest->index);
- }
-
- /* Find blocks conforming to rule (2). */
- for (c = first_dom_son (CDI_DOMINATORS, bb);
- c;
- c = next_dom_son (CDI_DOMINATORS, c))
- {
- int x;
-
- EXECUTE_IF_SET_IN_BITMAP (frontiers[c->index], 0, x,
- {
- if (get_immediate_dominator (CDI_DOMINATORS, BASIC_BLOCK (x)) != bb)
- bitmap_set_bit (frontiers[bb->index], x);
- });
- }
-}
-
-
-void
-compute_dominance_frontiers (bitmap *frontiers)
-{
- sbitmap done = sbitmap_alloc (last_basic_block);
-
- timevar_push (TV_DOM_FRONTIERS);
-
- sbitmap_zero (done);
-
- compute_dominance_frontiers_1 (frontiers, ENTRY_BLOCK_PTR->succ->dest, done);
-
- sbitmap_free (done);
-
- timevar_pop (TV_DOM_FRONTIERS);
-}
-
-
-
/*---------------------------------------------------------------------------
Debugging functions
---------------------------------------------------------------------------*/
if (flags & TDF_DETAILS)
{
const char *funcname
- = (*lang_hooks.decl_printable_name) (current_function_decl, 2);
+ = lang_hooks.decl_printable_name (current_function_decl, 2);
fputc ('\n', file);
fprintf (file, ";; Function %s\n\n", funcname);
{
static long max_num_merged_labels = 0;
unsigned long size, total = 0;
- long n_edges;
+ int n_edges;
basic_block bb;
const char * const fmt_str = "%-30s%-13s%12s\n";
- const char * const fmt_str_1 = "%-30s%13lu%11lu%c\n";
+ const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
const char * const fmt_str_3 = "%-43s%11lu%c\n";
const char *funcname
- = (*lang_hooks.decl_printable_name) (current_function_decl, 2);
+ = lang_hooks.decl_printable_name (current_function_decl, 2);
fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
size = n_basic_blocks * sizeof (struct basic_block_def);
total += size;
- fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks, SCALE (size),
- LABEL (size));
+ fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
+ SCALE (size), LABEL (size));
n_edges = 0;
FOR_EACH_BB (bb)
edge e;
basic_block bb;
const char *funcname
- = (*lang_hooks.decl_printable_name) (current_function_decl, 2);
+ = lang_hooks.decl_printable_name (current_function_decl, 2);
/* Write the file header. */
fprintf (file, "graph: { title: \"%s\"\n", funcname);
bool
is_ctrl_altering_stmt (tree t)
{
- tree call = t;
+ tree call;
#if defined ENABLE_CHECKING
if (t == NULL)
abort ();
#endif
- switch (TREE_CODE (t))
+ call = get_call_expr_in (t);
+ if (call)
{
- case MODIFY_EXPR:
- /* A MODIFY_EXPR with a rhs of a call has the characteristics
- of the call. */
- call = TREE_OPERAND (t, 1);
- if (TREE_CODE (call) != CALL_EXPR)
- break;
- /* FALLTHRU */
-
- case CALL_EXPR:
/* A non-pure/const CALL_EXPR alters flow control if the current
function has nonlocal labels. */
- if (TREE_SIDE_EFFECTS (t)
- && current_function_has_nonlocal_label)
+ if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
return true;
/* A CALL_EXPR also alters control flow if it does not return. */
if (call_expr_flags (call) & (ECF_NORETURN | ECF_LONGJMP))
return true;
- break;
-
- default:
- return false;
}
/* If a statement can throw, it alters control flow. */
bool
simple_goto_p (tree expr)
{
- return (TREE_CODE (expr) == GOTO_EXPR
- && TREE_CODE (GOTO_DESTINATION (expr)) == LABEL_DECL
- && (decl_function_context (GOTO_DESTINATION (expr))
- == current_function_decl));
+ return (TREE_CODE (expr) == GOTO_EXPR
+ && TREE_CODE (GOTO_DESTINATION (expr)) == LABEL_DECL);
}
basic_block bb;
block_stmt_iterator last;
edge e;
- tree stmt, label, forward;
+ tree stmt, label;
FOR_EACH_BB (bb)
{
if (e->flags & EDGE_FALLTHRU)
break;
- if (!e
- || e->dest == bb->next_bb)
+ if (!e || e->dest == bb->next_bb)
continue;
if (e->dest == EXIT_BLOCK_PTR)
label = tree_block_label (e->dest);
- /* If this is a goto to a goto, jump to the final destination.
- Handles unfactoring of the computed jumps.
- ??? Why bother putting this back together when rtl is just
- about to take it apart again? */
- forward = last_and_only_stmt (e->dest);
- if (forward
- && TREE_CODE (forward) == GOTO_EXPR)
- label = GOTO_DESTINATION (forward);
-
- bsi_insert_after (&last,
- build1 (GOTO_EXPR, void_type_node, label),
- BSI_NEW_STMT);
+ stmt = build1 (GOTO_EXPR, void_type_node, label);
+#ifdef USE_MAPPED_LOCATION
+ SET_EXPR_LOCATION (stmt, e->goto_locus);
+#else
+ SET_EXPR_LOCUS (stmt, e->goto_locus);
+#endif
+ bsi_insert_after (&last, stmt, BSI_NEW_STMT);
e->flags &= ~EDGE_FALLTHRU;
}
}
-
-/* Remove all the blocks and edges that make up the flowgraph. */
+/* Remove block annotations and other datastructures. */
void
-delete_tree_cfg (void)
+delete_tree_cfg_annotations (void)
{
+ basic_block bb;
if (n_basic_blocks > 0)
free_blocks_annotations ();
- free_basic_block_vars ();
- basic_block_info = NULL;
label_to_block_map = NULL;
free_rbi_pool ();
+ FOR_EACH_BB (bb)
+ bb->rbi = NULL;
}
}
}
+/* Finds iterator for STMT. */
+
+extern block_stmt_iterator
+stmt_for_bsi (tree stmt)
+{
+ block_stmt_iterator bsi;
+
+ for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
+ if (bsi_stmt (bsi) == stmt)
+ return bsi;
+
+ abort ();
+}
/* Insert statement (or statement list) T before the statement
pointed-to by iterator I. M specifies how to update iterator I
bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
{
set_bb_for_stmt (t, i->bb);
- modify_stmt (t);
tsi_link_before (&i->tsi, t, m);
+ modify_stmt (t);
}
bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
{
set_bb_for_stmt (t, i->bb);
- modify_stmt (t);
tsi_link_after (&i->tsi, t, m);
+ modify_stmt (t);
}
{
tree t = bsi_stmt (*i);
set_bb_for_stmt (t, NULL);
- modify_stmt (t);
tsi_delink (&i->tsi);
}
In all cases, the returned *BSI points to the correct location. The
return value is true if insertion should be done after the location,
- or false if it should be done before the location. */
+ or false if it should be done before the location. If new basic block
+ has to be created, it is stored in *NEW_BB. */
static bool
-tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi)
+tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
+ basic_block *new_bb)
{
basic_block dest, src;
tree tmp;
tmp = bsi_stmt (*bsi);
if (!stmt_ends_bb_p (tmp))
return true;
+
+ /* Insert code just before returning the value. We may need to decompose
+ the return in the case it contains non-trivial operand. */
+ if (TREE_CODE (tmp) == RETURN_EXPR)
+ {
+ tree op = TREE_OPERAND (tmp, 0);
+ if (!is_gimple_val (op))
+ {
+ if (TREE_CODE (op) != MODIFY_EXPR)
+ abort ();
+ bsi_insert_before (bsi, op, BSI_NEW_STMT);
+ TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0);
+ }
+ bsi_prev (bsi);
+ return true;
+ }
}
/* Otherwise, create a new basic block, and split this edge. */
dest = split_edge (e);
+ if (new_bb)
+ *new_bb = dest;
e = dest->pred;
goto restart;
}
PENDING_STMT (e) = NULL_TREE;
- if (tree_find_edge_insert_loc (e, &bsi))
+ if (tree_find_edge_insert_loc (e, &bsi, NULL))
bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
else
bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
append_to_statement_list (stmt, &PENDING_STMT (e));
}
+/* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If new block has to
+ be created, it is returned. */
-/* Specialized edge insertion for SSA-PRE. FIXME: This should
- probably disappear. The only reason it's here is because PRE needs
- the call to tree_find_edge_insert_loc(). */
-
-void pre_insert_on_edge (edge e, tree stmt);
-
-void
-pre_insert_on_edge (edge e, tree stmt)
+basic_block
+bsi_insert_on_edge_immediate (edge e, tree stmt)
{
block_stmt_iterator bsi;
+ basic_block new_bb = NULL;
if (PENDING_STMT (e))
abort ();
- if (tree_find_edge_insert_loc (e, &bsi))
+ if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
else
bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
-}
+ return new_bb;
+}
/*---------------------------------------------------------------------------
Tree specific functions for CFG manipulation
/* Find all the PHI arguments on the original edge, and change them to
the new edge. Do it before redirection, so that the argument does not
get removed. */
- for (phi = phi_nodes (dest); phi; phi = TREE_CHAIN (phi))
+ for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
{
num_elem = PHI_NUM_ARGS (phi);
for (i = 0; i < num_elem; i++)
properly noticed as such. */
static tree
-verify_expr (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
- void *data ATTRIBUTE_UNUSED)
+verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
{
tree t = *tp, x;
if (TYPE_P (t))
*walk_subtrees = 0;
+
+ /* Check operand N for being valid GIMPLE and give error MSG if not.
+ We check for constants explicitly since they are not considered
+ gimple invariants if they overflowed. */
+#define CHECK_OP(N, MSG) \
+ do { if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (t, N))) != 'c' \
+ && !is_gimple_val (TREE_OPERAND (t, N))) \
+ { error (MSG); return TREE_OPERAND (t, N); }} while (0)
switch (TREE_CODE (t))
{
&& is_gimple_reg (TREE_OPERAND (x, 0)))
{
error ("GIMPLE register modified with BIT_FIELD_REF");
- return *tp;
+ return t;
}
break;
case ADDR_EXPR:
- x = TREE_OPERAND (t, 0);
- while (TREE_CODE (x) == ARRAY_REF
- || TREE_CODE (x) == COMPONENT_REF
- || TREE_CODE (x) == REALPART_EXPR
- || TREE_CODE (x) == IMAGPART_EXPR)
- x = TREE_OPERAND (x, 0);
+ /* Skip any references (they will be checked when we recurse down the
+ tree) and ensure that any variable used as a prefix is marked
+ addressable. */
+ for (x = TREE_OPERAND (t, 0);
+ (handled_component_p (x)
+ || TREE_CODE (x) == REALPART_EXPR
+ || TREE_CODE (x) == IMAGPART_EXPR);
+ x = TREE_OPERAND (x, 0))
+ ;
+
if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
return NULL;
if (!TREE_ADDRESSABLE (x))
case BIT_NOT_EXPR:
case NON_LVALUE_EXPR:
case TRUTH_NOT_EXPR:
- x = TREE_OPERAND (t, 0);
- /* We check for constants explicitly since they are not considered
- gimple invariants if they overflowed. */
- if (TREE_CODE_CLASS (TREE_CODE (x)) != 'c'
- && !is_gimple_val (x))
- {
- error ("Invalid operand to unary operator");
- return x;
- }
+ CHECK_OP (0, "Invalid operand to unary operator");
break;
case REALPART_EXPR:
case IMAGPART_EXPR:
+ case COMPONENT_REF:
+ case ARRAY_REF:
+ case ARRAY_RANGE_REF:
+ case BIT_FIELD_REF:
+ case VIEW_CONVERT_EXPR:
+ /* We have a nest of references. Verify that each of the operands
+ that determine where to reference is either a constant or a variable,
+ verify that the base is valid, and then show we've already checked
+ the subtrees. */
+ while (TREE_CODE (t) == REALPART_EXPR || TREE_CODE (t) == IMAGPART_EXPR
+ || handled_component_p (t))
+ {
+ if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
+ CHECK_OP (2, "Invalid COMPONENT_REF offset operator");
+ else if (TREE_CODE (t) == ARRAY_REF
+ || TREE_CODE (t) == ARRAY_RANGE_REF)
+ {
+ CHECK_OP (1, "Invalid array index.");
+ if (TREE_OPERAND (t, 2))
+ CHECK_OP (2, "Invalid array lower bound.");
+ if (TREE_OPERAND (t, 3))
+ CHECK_OP (3, "Invalid array stride.");
+ }
+ else if (TREE_CODE (t) == BIT_FIELD_REF)
+ {
+ CHECK_OP (1, "Invalid operand to BIT_FIELD_REF");
+ CHECK_OP (2, "Invalid operand to BIT_FIELD_REF");
+ }
+
+ t = TREE_OPERAND (t, 0);
+ }
+
+ if (TREE_CODE_CLASS (TREE_CODE (t)) != 'c'
+ && !is_gimple_lvalue (t))
+ {
+ error ("Invalid reference prefix.");
+ return t;
+ }
+ *walk_subtrees = 0;
break;
case LT_EXPR:
case UNGT_EXPR:
case UNGE_EXPR:
case UNEQ_EXPR:
+ case LTGT_EXPR:
case PLUS_EXPR:
case MINUS_EXPR:
case MULT_EXPR:
case BIT_IOR_EXPR:
case BIT_XOR_EXPR:
case BIT_AND_EXPR:
- x = TREE_OPERAND (t, 0);
- /* We check for constants explicitly since they are not considered
- gimple invariants if they overflowed. */
- if (TREE_CODE_CLASS (TREE_CODE (x)) != 'c'
- && !is_gimple_val (x))
- {
- error ("Invalid operand to binary operator");
- return x;
- }
- x = TREE_OPERAND (t, 1);
- /* We check for constants explicitly since they are not considered
- gimple invariants if they overflowed. */
- if (TREE_CODE_CLASS (TREE_CODE (x)) != 'c'
- && !is_gimple_val (x))
- {
- error ("Invalid operand to binary operator");
- return x;
- }
+ CHECK_OP (0, "Invalid operand to binary operator");
+ CHECK_OP (1, "Invalid operand to binary operator");
break;
default:
break;
}
return NULL;
+
+#undef CHECK_OP
}
TODO: Implement type checking. */
static bool
-verify_stmt (tree stmt)
+verify_stmt (tree stmt, bool last_in_block)
{
tree addr;
if (!is_gimple_stmt (stmt))
{
error ("Is not a valid GIMPLE statement.");
- debug_generic_stmt (stmt);
- return true;
+ goto fail;
}
addr = walk_tree (&stmt, verify_expr, NULL, NULL);
return true;
}
+ /* If the statement is marked as part of an EH region, then it is
+ expected that the statement could throw. Verify that when we
+ have optimizations that simplify statements such that we prove
+ that they cannot throw, that we update other data structures
+ to match. */
+ if (lookup_stmt_eh_region (stmt) >= 0)
+ {
+ if (!tree_could_throw_p (stmt))
+ {
+ error ("Statement marked for throw, but doesn't.");
+ goto fail;
+ }
+ if (!last_in_block && tree_can_throw_internal (stmt))
+ {
+ error ("Statement marked for throw in middle of block.");
+ goto fail;
+ }
+ }
+
return false;
+
+ fail:
+ debug_generic_stmt (stmt);
+ return true;
}
|| TREE_CODE (t) == SSA_NAME)
return true;
- while ((TREE_CODE (t) == ARRAY_REF
+ while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
/* We check for constants explicitly since they are not considered
gimple invariants if they overflowed. */
&& (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (t, 1))) == 'c'
tree phi;
int i;
- for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
+ for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
{
int phi_num_args = PHI_NUM_ARGS (phi);
}
}
- for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
{
tree stmt = bsi_stmt (bsi);
- err |= verify_stmt (stmt);
+ bsi_next (&bsi);
+ err |= verify_stmt (stmt, bsi_end_p (bsi));
addr = walk_tree (&stmt, verify_node_sharing, htab, NULL);
if (addr)
{
case SWITCH_EXPR:
{
+ tree prev;
edge e;
size_t i, n;
tree vec;
label_bb->aux = (void *)1;
}
+ /* Verify that the case labels are sorted. */
+ prev = TREE_VEC_ELT (vec, 0);
+ for (i = 1; i < n - 1; ++i)
+ {
+ tree c = TREE_VEC_ELT (vec, i);
+ if (! CASE_LOW (c))
+ {
+ error ("Found default case not at end of case vector");
+ err = 1;
+ continue;
+ }
+ if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
+ {
+ error ("Case labels not sorted:\n ");
+ print_generic_expr (stderr, prev, 0);
+ fprintf (stderr," is greater than ");
+ print_generic_expr (stderr, c, 0);
+ fprintf (stderr," but comes before it.\n");
+ err = 1;
+ }
+ prev = c;
+ }
+ if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
+ {
+ error ("No default case found at end of case vector");
+ err = 1;
+ }
+
for (e = bb->succ; e; e = e->succ_next)
{
if (!e->dest->aux)
{
edge e;
basic_block dummy, bb;
- tree phi, new_phi, var;
+ tree phi, new_phi, var, prev, next;
dummy = fallthru->src;
bb = fallthru->dest;
/* If we redirected a branch we must create new phi nodes at the
start of BB. */
- for (phi = phi_nodes (dummy); phi; phi = TREE_CHAIN (phi))
+ for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
{
var = PHI_RESULT (phi);
new_phi = create_phi_node (var, bb);
SSA_NAME_DEF_STMT (var) = new_phi;
- PHI_RESULT (phi) = make_ssa_name (SSA_NAME_VAR (var), phi);
+ SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
add_phi_arg (&new_phi, PHI_RESULT (phi), fallthru);
}
- /* Ensure that the PHI node chains are in the same order. */
- set_phi_nodes (bb, nreverse (phi_nodes (bb)));
+ /* Ensure that the PHI node chain is in the same order. */
+ prev = NULL;
+ for (phi = phi_nodes (bb); phi; phi = next)
+ {
+ next = PHI_CHAIN (phi);
+ PHI_CHAIN (phi) = prev;
+ prev = phi;
+ }
+ set_phi_nodes (bb, prev);
/* Add the arguments we have stored on edges. */
for (e = bb->pred; e; e = e->pred_next)
for (phi = phi_nodes (bb), var = PENDING_STMT (e);
phi;
- phi = TREE_CHAIN (phi), var = TREE_CHAIN (var))
+ phi = PHI_CHAIN (phi), var = TREE_CHAIN (var))
add_phi_arg (&phi, TREE_VALUE (var), e);
PENDING_STMT (e) = NULL;
thread_jumps (void)
{
edge e, next, last, old;
- basic_block bb, dest, tmp;
+ basic_block bb, dest, tmp, old_dest, dom;
tree phi;
int arg;
bool retval = false;
dest = dest->succ->dest)
{
/* An infinite loop detected. We redirect the edge anyway, so
- that the loop is shrinked into single basic block. */
+ that the loop is shrunk into single basic block. */
if (!bb_ann (dest)->forwardable)
break;
/* Perform the redirection. */
retval = true;
+ old_dest = e->dest;
e = redirect_edge_and_branch (e, dest);
- /* TODO -- updating dominators in this case is simple. */
- free_dominance_info (CDI_DOMINATORS);
-
if (!old)
{
/* Update PHI nodes. We know that the new argument should
have the same value as the argument associated with LAST.
Otherwise we would have changed our target block above. */
- for (phi = phi_nodes (dest); phi; phi = TREE_CHAIN (phi))
+ for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
{
arg = phi_arg_from_edge (phi, last);
if (arg < 0)
add_phi_arg (&phi, PHI_ARG_DEF (phi, arg), e);
}
}
+
+ /* Update the dominators. */
+ if (dom_computed[CDI_DOMINATORS] >= DOM_CONS_OK)
+ {
+ /* Remove the unreachable blocks (observe that if all blocks
+ were reachable before, only those in the path we threaded
+ over and did not have any predecessor outside of the path
+ become unreachable). */
+ for (; old_dest != dest; old_dest = tmp)
+ {
+ tmp = old_dest->succ->dest;
+
+ if (old_dest->pred)
+ break;
+
+ delete_basic_block (old_dest);
+ }
+ /* If the dominator of the destination was in the path, set its
+ dominator to the start of the redirected edge. */
+ if (get_immediate_dominator (CDI_DOMINATORS, old_dest) == NULL)
+ set_immediate_dominator (CDI_DOMINATORS, old_dest, bb);
+
+ /* Now proceed like if we forwarded just over one edge at a time.
+ Algorithm for forwarding over edge A --> B then is
+
+ if (idom (B) == A)
+ idom (B) = idom (A);
+ recount_idom (A); */
+
+ for (; old_dest != dest; old_dest = tmp)
+ {
+ tmp = old_dest->succ->dest;
+
+ if (get_immediate_dominator (CDI_DOMINATORS, tmp) == old_dest)
+ {
+ dom = get_immediate_dominator (CDI_DOMINATORS, old_dest);
+ set_immediate_dominator (CDI_DOMINATORS, tmp, dom);
+ }
+
+ dom = recount_dominator (CDI_DOMINATORS, old_dest);
+ set_immediate_dominator (CDI_DOMINATORS, old_dest, dom);
+ }
+ }
}
/* Reset the forwardable bit on our block since it's no longer in
/* Return a non-special label in the head of basic block BLOCK.
Create one if it doesn't exist. */
-static tree
+tree
tree_block_label (basic_block bb)
{
block_stmt_iterator i, s = bsi_start (bb);
{
basic_block new_bb;
block_stmt_iterator bsi, bsi_tgt;
+ tree phi, val;
+ ssa_op_iter op_iter;
new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
+
+ for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
+ {
+ mark_for_rewrite (PHI_RESULT (phi));
+ }
+
bsi_tgt = bsi_start (new_bb);
for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
{
tree stmt = bsi_stmt (bsi);
+ tree copy;
if (TREE_CODE (stmt) == LABEL_EXPR)
continue;
- bsi_insert_after (&bsi_tgt, unshare_expr (stmt), BSI_NEW_STMT);
+ /* Record the definitions. */
+ get_stmt_operands (stmt);
+
+ FOR_EACH_SSA_TREE_OPERAND (val, stmt, op_iter, SSA_OP_ALL_DEFS)
+ mark_for_rewrite (val);
+
+ copy = unshare_expr (stmt);
+
+ /* Copy also the virtual operands. */
+ get_stmt_ann (copy);
+ copy_virtual_operands (copy, stmt);
+
+ bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
}
return new_bb;
basic_block bb;
tree chain;
- fprintf (file, "%s (", (*lang_hooks.decl_printable_name) (fn, 2));
+ fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
arg = DECL_ARGUMENTS (fn);
while (arg)
if (basic_block_info)
{
/* Make a CFG based dump. */
+ check_bb_profile (ENTRY_BLOCK_PTR, file);
if (!ignore_topmost_bind)
fprintf (file, "{\n");
dump_generic_bb (file, bb, 2, flags);
fprintf (file, "}\n");
+ check_bb_profile (EXIT_BLOCK_PTR, file);
}
else
{
tree_block_ends_with_call_p (basic_block bb)
{
block_stmt_iterator bsi = bsi_last (bb);
- tree t = tsi_stmt (bsi.tsi);
-
- if (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0))
- t = TREE_OPERAND (t, 0);
-
- if (TREE_CODE (t) == MODIFY_EXPR)
- t = TREE_OPERAND (t, 1);
-
- return TREE_CODE (t) == CALL_EXPR;
+ return get_call_expr_in (bsi_stmt (bsi)) != NULL;
}
static bool
need_fake_edge_p (tree t)
{
- if (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0))
- t = TREE_OPERAND (t, 0);
-
- if (TREE_CODE (t) == MODIFY_EXPR)
- t = TREE_OPERAND (t, 1);
+ tree call;
/* NORETURN and LONGJMP calls already have an edge to exit.
CONST, PURE and ALWAYS_RETURN calls do not need one.
figured out from the RTL in mark_constant_function, and
the counter incrementation code from -fprofile-arcs
leads to different results from -fbranch-probabilities. */
- if (TREE_CODE (t) == CALL_EXPR
- && !(call_expr_flags (t) &
- (ECF_NORETURN | ECF_LONGJMP | ECF_ALWAYS_RETURN)))
+ call = get_call_expr_in (t);
+ if (call
+ && !(call_expr_flags (call) &
+ (ECF_NORETURN | ECF_LONGJMP | ECF_ALWAYS_RETURN)))
return true;
if (TREE_CODE (t) == ASM_EXPR
return blocks_split;
}
+bool
+tree_purge_dead_eh_edges (basic_block bb)
+{
+ bool changed = false;
+ edge e, next;
+ tree stmt = last_stmt (bb);
+
+ if (stmt && tree_can_throw_internal (stmt))
+ return false;
+
+ for (e = bb->succ; e ; e = next)
+ {
+ next = e->succ_next;
+ if (e->flags & EDGE_EH)
+ {
+ ssa_remove_edge (e);
+ changed = true;
+ }
+ }
+
+ return changed;
+}
+
+bool
+tree_purge_all_dead_eh_edges (bitmap blocks)
+{
+ bool changed = false;
+ size_t i;
+
+ EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i,
+ { changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i)); });
+
+ return changed;
+}
struct cfg_hooks tree_cfg_hooks = {
"tree",
struct tree_opt_pass pass_split_crit_edges =
{
- NULL, /* name */
+ "crited", /* name */
NULL, /* gate */
split_critical_edges, /* execute */
NULL, /* sub */
PROP_no_crit_edges, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- 0, /* todo_flags_finish */
+ TODO_dump_func, /* todo_flags_finish */
+ 0 /* letter */
};
+
+\f
+/* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
+ a temporary, make sure and register it to be renamed if necessary,
+ and finally return the temporary. Put the statements to compute
+ EXP before the current statement in BSI. */
+
+tree
+gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
+{
+ tree t, new_stmt, orig_stmt;
+
+ if (is_gimple_val (exp))
+ return exp;
+
+ t = make_rename_temp (type, NULL);
+ new_stmt = build (MODIFY_EXPR, type, t, exp);
+
+ orig_stmt = bsi_stmt (*bsi);
+ SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
+ TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
+
+ bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
+
+ return t;
+}
+
+/* Build a ternary operation and gimplify it. Emit code before BSI.
+ Return the gimple_val holding the result. */
+
+tree
+gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
+ tree type, tree a, tree b, tree c)
+{
+ tree ret;
+
+ ret = fold (build3 (code, type, a, b, c));
+ STRIP_NOPS (ret);
+
+ return gimplify_val (bsi, type, ret);
+}
+
+/* Build a binary operation and gimplify it. Emit code before BSI.
+ Return the gimple_val holding the result. */
+
+tree
+gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
+ tree type, tree a, tree b)
+{
+ tree ret;
+
+ ret = fold (build2 (code, type, a, b));
+ STRIP_NOPS (ret);
+
+ return gimplify_val (bsi, type, ret);
+}
+
+/* Build a unary operation and gimplify it. Emit code before BSI.
+ Return the gimple_val holding the result. */
+
+tree
+gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
+ tree a)
+{
+ tree ret;
+
+ ret = fold (build1 (code, type, a));
+ STRIP_NOPS (ret);
+
+ return gimplify_val (bsi, type, ret);
+}
+
+
\f
/* Emit return warnings. */
static void
execute_warn_function_return (void)
{
+#ifdef USE_MAPPED_LOCATION
+ source_location location;
+#else
location_t *locus;
+#endif
tree last;
edge e;
if (TREE_THIS_VOLATILE (cfun->decl)
&& EXIT_BLOCK_PTR->pred != NULL)
{
+#ifdef USE_MAPPED_LOCATION
+ location = UNKNOWN_LOCATION;
+#else
locus = NULL;
+#endif
for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
{
last = last_stmt (e->src);
if (TREE_CODE (last) == RETURN_EXPR
+#ifdef USE_MAPPED_LOCATION
+ && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
+#else
&& (locus = EXPR_LOCUS (last)) != NULL)
+#endif
break;
}
+#ifdef USE_MAPPED_LOCATION
+ if (location == UNKNOWN_LOCATION)
+ location = cfun->function_end_locus;
+ warning ("%H`noreturn' function does return", &location);
+#else
if (!locus)
locus = &cfun->function_end_locus;
warning ("%H`noreturn' function does return", locus);
+#endif
}
/* If we see "return;" in some basic block, then we do reach the end
if (TREE_CODE (last) == RETURN_EXPR
&& TREE_OPERAND (last, 0) == NULL)
{
+#ifdef USE_MAPPED_LOCATION
+ location = EXPR_LOCATION (last);
+ if (location == UNKNOWN_LOCATION)
+ location = cfun->function_end_locus;
+ warning ("%Hcontrol reaches end of non-void function", &location);
+#else
locus = EXPR_LOCUS (last);
if (!locus)
locus = &cfun->function_end_locus;
warning ("%Hcontrol reaches end of non-void function", locus);
+#endif
break;
}
}
NULL, /* next */
0, /* static_pass_number */
0, /* tv_id */
- PROP_ssa, /* properties_required */
+ PROP_cfg, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- 0 /* todo_flags_finish */
+ 0, /* todo_flags_finish */
+ 0 /* letter */
};
#include "gt-tree-cfg.h"