/* Control flow functions for trees.
- Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007
+ Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
Free Software Foundation, Inc.
Contributed by Diego Novillo <dnovillo@redhat.com>
#include "tree-ssa-propagate.h"
#include "value-prof.h"
#include "pointer-set.h"
+#include "tree-inline.h"
/* This file contains functions for building the Control Flow Graph (CFG)
for a function tree. */
/* Flowgraph optimization and cleanup. */
static void tree_merge_blocks (basic_block, basic_block);
-static bool tree_can_merge_blocks_p (const_basic_block, const_basic_block);
+static bool tree_can_merge_blocks_p (basic_block, basic_block);
static void remove_bb (basic_block);
static edge find_taken_edge_computed_goto (basic_block, tree);
static edge find_taken_edge_cond_expr (basic_block, tree);
return 0;
}
-struct tree_opt_pass pass_build_cfg =
+struct gimple_opt_pass pass_build_cfg =
{
+ {
+ GIMPLE_PASS,
"cfg", /* name */
NULL, /* gate */
execute_build_cfg, /* execute */
PROP_cfg, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_verify_stmts | TODO_cleanup_cfg, /* todo_flags_finish */
- 0 /* letter */
+ TODO_verify_stmts | TODO_cleanup_cfg /* todo_flags_finish */
+ }
};
/* Search the CFG for any computed gotos. If found, factor them to a
cond = fold (COND_EXPR_COND (stmt));
zerop = integer_zerop (cond);
onep = integer_onep (cond);
- fold_undefer_overflow_warnings (((zerop || onep)
- && !TREE_NO_WARNING (stmt)),
+ fold_undefer_overflow_warnings (zerop || onep,
stmt,
WARN_STRICT_OVERFLOW_CONDITIONAL);
if (zerop)
fallthru = false;
break;
+
+ case OMP_ATOMIC_LOAD:
+ case OMP_ATOMIC_STORE:
+ fallthru = true;
+ break;
+
+
case OMP_RETURN:
/* In the case of an OMP_SECTION, the edge will go somewhere
other than the next block. This will be created later. */
switch (cur_region->type)
{
case OMP_FOR:
+ /* Mark all OMP_FOR and OMP_CONTINUE succs edges as abnormal
+ to prevent splitting them. */
+ single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
/* Make the loopback edge. */
- make_edge (bb, single_succ (cur_region->entry), 0);
-
+ make_edge (bb, single_succ (cur_region->entry),
+ EDGE_ABNORMAL);
+
/* Create an edge from OMP_FOR to exit, which corresponds to
the case that the body of the loop is not executed at
all. */
- make_edge (cur_region->entry, bb->next_bb, 0);
- fallthru = true;
+ make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
+ make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
+ fallthru = false;
break;
case OMP_SECTIONS:
else_bb = label_to_block (else_label);
e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
-#ifdef USE_MAPPED_LOCATION
e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
-#else
- e->goto_locus = EXPR_LOCUS (COND_EXPR_THEN (entry));
-#endif
e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
if (e)
- {
-#ifdef USE_MAPPED_LOCATION
- e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
-#else
- e->goto_locus = EXPR_LOCUS (COND_EXPR_ELSE (entry));
-#endif
- }
+ e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
/* We do not need the gotos anymore. */
COND_EXPR_THEN (entry) = NULL_TREE;
{
tree dest = GOTO_DESTINATION (goto_t);
edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
-#ifdef USE_MAPPED_LOCATION
e->goto_locus = EXPR_LOCATION (goto_t);
-#else
- e->goto_locus = EXPR_LOCUS (goto_t);
-#endif
bsi_remove (&last, true);
return;
}
tree labels = SWITCH_LABELS (stmt);
int old_size = TREE_VEC_LENGTH (labels);
int i, j, new_size = old_size;
- tree default_case = TREE_VEC_ELT (labels, old_size - 1);
- tree default_label;
+ tree default_case = NULL_TREE;
+ tree default_label = NULL_TREE;
/* The default label is always the last case in a switch
- statement after gimplification. */
- default_label = CASE_LABEL (default_case);
+ statement after gimplification if it was not optimized
+ away. */
+ if (!CASE_LOW (TREE_VEC_ELT (labels, old_size - 1))
+ && !CASE_HIGH (TREE_VEC_ELT (labels, old_size - 1)))
+ {
+ default_case = TREE_VEC_ELT (labels, old_size - 1);
+ default_label = CASE_LABEL (default_case);
+ old_size--;
+ }
- /* Look for possible opportunities to merge cases.
- Ignore the last element of the label vector because it
- must be the default case. */
+ /* Look for possible opportunities to merge cases. */
i = 0;
- while (i < old_size - 1)
+ while (i < old_size)
{
tree base_case, base_label, base_high;
base_case = TREE_VEC_ELT (labels, i);
/* Try to merge case labels. Break out when we reach the end
of the label vector or when we cannot merge the next case
label with the current one. */
- while (i < old_size - 1)
+ while (i < old_size)
{
tree merge_case = TREE_VEC_ELT (labels, i);
tree merge_label = CASE_LABEL (merge_case);
/* Checks whether we can merge block B into block A. */
static bool
-tree_can_merge_blocks_p (const_basic_block a, const_basic_block b)
+tree_can_merge_blocks_p (basic_block a, basic_block b)
{
const_tree stmt;
- const_block_stmt_iterator bsi;
+ block_stmt_iterator bsi;
tree phi;
if (!single_succ_p (a))
cannot merge the blocks. */
/* This CONST_CAST is okay because last_stmt doesn't modify its
argument and the return value is assign to a const_tree. */
- stmt = last_stmt (CONST_CAST_BB(a));
+ stmt = last_stmt (CONST_CAST_BB (a));
if (stmt && stmt_ends_bb_p (stmt))
return false;
}
/* Do not remove user labels. */
- for (bsi = cbsi_start (b); !cbsi_end_p (bsi); cbsi_next (&bsi))
+ for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
{
- stmt = cbsi_stmt (bsi);
+ stmt = bsi_stmt (bsi);
if (TREE_CODE (stmt) != LABEL_EXPR)
break;
if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
}
else
{
- replace_uses_by (def, use);
+ /* If we deal with a PHI for virtual operands, we can simply
+ propagate these without fussing with folding or updating
+ the stmt. */
+ if (!is_gimple_reg (def))
+ {
+ imm_use_iterator iter;
+ use_operand_p use_p;
+ tree stmt;
+
+ FOR_EACH_IMM_USE_STMT (stmt, iter, def)
+ FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
+ SET_USE (use_p, use);
+ }
+ else
+ replace_uses_by (def, use);
remove_phi_node (phi, NULL, true);
}
}
location_t loc = EXPR_LOCATION (stmt);
if (LOCATION_LINE (loc) > 0)
{
- warning (0, "%Hwill never be executed", &loc);
+ warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
return true;
}
}
}
-struct tree_opt_pass pass_remove_useless_stmts =
+struct gimple_opt_pass pass_remove_useless_stmts =
{
+ {
+ GIMPLE_PASS,
"useless", /* name */
NULL, /* gate */
remove_useless_stmts, /* execute */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func, /* todo_flags_finish */
- 0 /* letter */
+ TODO_dump_func /* todo_flags_finish */
+ }
};
/* Remove PHI nodes associated with basic block BB and all edges out of BB. */
remove_bb (basic_block bb)
{
block_stmt_iterator i;
-#ifdef USE_MAPPED_LOCATION
source_location loc = UNKNOWN_LOCATION;
-#else
- source_locus loc = 0;
-#endif
if (dump_file)
{
program that are indeed unreachable. */
if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
{
-#ifdef USE_MAPPED_LOCATION
if (EXPR_HAS_LOCATION (stmt))
loc = EXPR_LOCATION (stmt);
-#else
- source_locus t;
- t = EXPR_LOCUS (stmt);
- if (t && LOCATION_LINE (*t) > 0)
- loc = t;
-#endif
}
}
}
block is unreachable. We walk statements backwards in the
loop above, so the last statement we process is the first statement
in the block. */
-#ifdef USE_MAPPED_LOCATION
if (loc > BUILTINS_LOCATION && LOCATION_LINE (loc) > 0)
warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
-#else
- if (loc)
- warning (OPT_Wunreachable_code, "%Hwill never be executed", loc);
-#endif
remove_phi_nodes_and_edges_for_unreachable_block (bb);
bb->il.tree = NULL;
const_tree call;
gcc_assert (t);
- call = const_get_call_expr_in (t);
+ call = get_call_expr_in (CONST_CAST_TREE (t));
if (call)
{
/* A non-pure/const CALL_EXPR alters flow control if the current
static void
reinstall_phi_args (edge new_edge, edge old_edge)
{
- tree var, phi;
+ tree phi;
+ edge_var_map_vector v;
+ edge_var_map *vm;
+ int i;
- if (!PENDING_STMT (old_edge))
+ v = redirect_edge_var_map_vector (old_edge);
+ if (!v)
return;
- for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
- var && phi;
- var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
+ for (i = 0, phi = phi_nodes (new_edge->dest);
+ VEC_iterate (edge_var_map, v, i, vm) && phi;
+ i++, phi = PHI_CHAIN (phi))
{
- tree result = TREE_PURPOSE (var);
- tree arg = TREE_VALUE (var);
+ tree result = redirect_edge_var_map_result (vm);
+ tree arg = redirect_edge_var_map_def (vm);
gcc_assert (result == PHI_RESULT (phi));
add_phi_arg (phi, arg, new_edge);
}
- PENDING_STMT (old_edge) = NULL;
+ redirect_edge_var_map_clear (old_edge);
}
/* Returns the basic block after which the new basic block created
verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
{
tree t = *tp, x;
- bool in_phi = (data != NULL);
if (TYPE_P (t))
*walk_subtrees = 0;
bool new_constant;
bool new_side_effects;
- /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
- dead PHIs that take the address of something. But if the PHI
- result is dead, the fact that it takes the address of anything
- is irrelevant. Because we can not tell from here if a PHI result
- is dead, we just skip this check for PHIs altogether. This means
- we may be missing "valid" checks, but what can you do?
- This was PR19217. */
- if (in_phi)
- break;
-
old_invariant = TREE_INVARIANT (t);
old_constant = TREE_CONSTANT (t);
old_side_effects = TREE_SIDE_EFFECTS (t);
error ("address taken, but ADDRESSABLE bit not set");
return x;
}
+
break;
}
}
else if (TREE_CODE (t) == BIT_FIELD_REF)
{
- CHECK_OP (1, "invalid operand to BIT_FIELD_REF");
- CHECK_OP (2, "invalid operand to BIT_FIELD_REF");
+ if (!host_integerp (TREE_OPERAND (t, 1), 1)
+ || !host_integerp (TREE_OPERAND (t, 2), 1))
+ {
+ error ("invalid position or size operand to BIT_FIELD_REF");
+ return t;
+ }
+ else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
+ && (TYPE_PRECISION (TREE_TYPE (t))
+ != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
+ {
+ error ("integral result type precision does not match "
+ "field size of BIT_FIELD_REF");
+ return t;
+ }
+ if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
+ && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
+ != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
+ {
+ error ("mode precision of non-integral result does not "
+ "match field size of BIT_FIELD_REF");
+ return t;
+ }
}
t = TREE_OPERAND (t, 0);
}
- if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
+ if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
{
error ("invalid reference prefix");
return t;
return verify_gimple_min_lval (expr);
}
+/* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
+ list of pointer-to types that is trivially convertible to DEST. */
+
+static bool
+one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
+{
+ tree src;
+
+ if (!TYPE_POINTER_TO (src_obj))
+ return true;
+
+ for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
+ if (useless_type_conversion_p (dest, src))
+ return true;
+
+ return false;
+}
+
/* Verify the GIMPLE expression EXPR. Returns true if there is an
error, otherwise false. */
return true;
}
if (!POINTER_TYPE_P (TREE_TYPE (op0))
- || TREE_CODE (TREE_TYPE (op1)) != INTEGER_TYPE
|| !useless_type_conversion_p (type, TREE_TYPE (op0))
|| !useless_type_conversion_p (sizetype, TREE_TYPE (op1)))
{
case ADDR_EXPR:
{
tree op = TREE_OPERAND (expr, 0);
- tree ptr_type;
if (!is_gimple_addressable (op))
{
error ("invalid operand in unary expression");
return true;
}
- ptr_type = build_pointer_type (TREE_TYPE (op));
- if (!useless_type_conversion_p (type, ptr_type)
+ if (!one_pointer_to_useless_type_conversion_p (type, TREE_TYPE (op))
/* FIXME: a longstanding wart, &a == &a[0]. */
&& (TREE_CODE (TREE_TYPE (op)) != ARRAY_TYPE
- || !useless_type_conversion_p (type,
- build_pointer_type (TREE_TYPE (TREE_TYPE (op))))))
+ || !one_pointer_to_useless_type_conversion_p (type,
+ TREE_TYPE (TREE_TYPE (op)))))
{
error ("type mismatch in address expression");
debug_generic_stmt (TREE_TYPE (expr));
- debug_generic_stmt (ptr_type);
+ debug_generic_stmt (TYPE_POINTER_TO (TREE_TYPE (op)));
return true;
}
didn't see a function declaration before the call. */
return false;
+ case OBJ_TYPE_REF:
+ /* FIXME. */
+ return false;
+
default:;
}
case NOP_EXPR:
case CHANGE_DYNAMIC_TYPE_EXPR:
case ASM_EXPR:
+ case PREDICT_EXPR:
return false;
default:
}
}
-/* Verify the GIMPLE statements inside the statement list STMTS. */
+/* Verify the GIMPLE statements inside the statement list STMTS.
+ Returns true if there were any errors. */
-void
-verify_gimple_1 (tree stmts)
+static bool
+verify_gimple_2 (tree stmts)
{
tree_stmt_iterator tsi;
+ bool err = false;
for (tsi = tsi_start (stmts); !tsi_end_p (tsi); tsi_next (&tsi))
{
switch (TREE_CODE (stmt))
{
case BIND_EXPR:
- verify_gimple_1 (BIND_EXPR_BODY (stmt));
+ err |= verify_gimple_2 (BIND_EXPR_BODY (stmt));
break;
case TRY_CATCH_EXPR:
case TRY_FINALLY_EXPR:
- verify_gimple_1 (TREE_OPERAND (stmt, 0));
- verify_gimple_1 (TREE_OPERAND (stmt, 1));
+ err |= verify_gimple_2 (TREE_OPERAND (stmt, 0));
+ err |= verify_gimple_2 (TREE_OPERAND (stmt, 1));
break;
case CATCH_EXPR:
- verify_gimple_1 (CATCH_BODY (stmt));
+ err |= verify_gimple_2 (CATCH_BODY (stmt));
break;
case EH_FILTER_EXPR:
- verify_gimple_1 (EH_FILTER_FAILURE (stmt));
+ err |= verify_gimple_2 (EH_FILTER_FAILURE (stmt));
break;
default:
- if (verify_gimple_stmt (stmt))
- debug_generic_expr (stmt);
+ {
+ bool err2 = verify_gimple_stmt (stmt);
+ if (err2)
+ debug_generic_expr (stmt);
+ err |= err2;
+ }
}
}
+
+ return err;
+}
+
+
+/* Verify the GIMPLE statements inside the statement list STMTS. */
+
+void
+verify_gimple_1 (tree stmts)
+{
+ if (verify_gimple_2 (stmts))
+ internal_error ("verify_gimple failed");
}
/* Verify the GIMPLE statements inside the current function. */
tree t = PHI_ARG_DEF (phi, i);
tree addr;
+ if (!t)
+ {
+ error ("missing PHI def");
+ debug_generic_stmt (phi);
+ err |= true;
+ continue;
+ }
/* Addressable variables do have SSA_NAMEs but they
are not considered gimple values. */
- if (TREE_CODE (t) != SSA_NAME
- && TREE_CODE (t) != FUNCTION_DECL
- && !is_gimple_val (t))
+ else if (TREE_CODE (t) != SSA_NAME
+ && TREE_CODE (t) != FUNCTION_DECL
+ && !is_gimple_min_invariant (t))
{
error ("PHI def is not a GIMPLE value");
debug_generic_stmt (phi);
err |= true;
}
- addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
- if (addr)
- {
- debug_generic_stmt (addr);
- err |= true;
- }
-
addr = walk_tree (&t, verify_node_sharing, visited, NULL);
if (addr)
{
/* Verify that the case labels are sorted. */
prev = TREE_VEC_ELT (vec, 0);
- for (i = 1; i < n - 1; ++i)
+ for (i = 1; i < n; ++i)
{
tree c = TREE_VEC_ELT (vec, i);
if (! CASE_LOW (c))
{
- error ("found default case not at end of case vector");
- err = 1;
+ if (i != n - 1)
+ {
+ error ("found default case not at end of case vector");
+ err = 1;
+ }
continue;
}
if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
}
prev = c;
}
- if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
- {
- error ("no default case found at end of case vector");
- err = 1;
- }
+ /* VRP will remove the default case if it can prove it will
+ never be executed. So do not verify there always exists
+ a default case here. */
FOR_EACH_EDGE (e, ei, bb->succs)
{
return new_bb;
}
+/* Adds phi node arguments for edge E_COPY after basic block duplication. */
+
+static void
+add_phi_args_after_copy_edge (edge e_copy)
+{
+ basic_block bb, bb_copy = e_copy->src, dest;
+ edge e;
+ edge_iterator ei;
+ tree phi, phi_copy, phi_next, def;
+
+ if (!phi_nodes (e_copy->dest))
+ return;
+
+ bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
+
+ if (e_copy->dest->flags & BB_DUPLICATED)
+ dest = get_bb_original (e_copy->dest);
+ else
+ dest = e_copy->dest;
+
+ e = find_edge (bb, dest);
+ if (!e)
+ {
+ /* During loop unrolling the target of the latch edge is copied.
+ In this case we are not looking for edge to dest, but to
+ duplicated block whose original was dest. */
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ {
+ if ((e->dest->flags & BB_DUPLICATED)
+ && get_bb_original (e->dest) == dest)
+ break;
+ }
+
+ gcc_assert (e != NULL);
+ }
+
+ for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
+ phi;
+ phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
+ {
+ phi_next = PHI_CHAIN (phi);
+ def = PHI_ARG_DEF_FROM_EDGE (phi, e);
+ add_phi_arg (phi_copy, def, e_copy);
+ }
+}
+
/* Basic block BB_COPY was created by code duplication. Add phi node
arguments for edges going out of BB_COPY. The blocks that were
void
add_phi_args_after_copy_bb (basic_block bb_copy)
{
- basic_block bb, dest;
- edge e, e_copy;
edge_iterator ei;
- tree phi, phi_copy, phi_next, def;
-
- bb = get_bb_original (bb_copy);
+ edge e_copy;
FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
{
- if (!phi_nodes (e_copy->dest))
- continue;
-
- if (e_copy->dest->flags & BB_DUPLICATED)
- dest = get_bb_original (e_copy->dest);
- else
- dest = e_copy->dest;
-
- e = find_edge (bb, dest);
- if (!e)
- {
- /* During loop unrolling the target of the latch edge is copied.
- In this case we are not looking for edge to dest, but to
- duplicated block whose original was dest. */
- FOR_EACH_EDGE (e, ei, bb->succs)
- if ((e->dest->flags & BB_DUPLICATED)
- && get_bb_original (e->dest) == dest)
- break;
-
- gcc_assert (e != NULL);
- }
-
- for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
- phi;
- phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
- {
- phi_next = PHI_CHAIN (phi);
- def = PHI_ARG_DEF_FROM_EDGE (phi, e);
- add_phi_arg (phi_copy, def, e_copy);
- }
+ add_phi_args_after_copy_edge (e_copy);
}
}
/* Blocks in REGION_COPY array of length N_REGION were created by
duplication of basic blocks. Add phi node arguments for edges
- going from these blocks. */
+ going from these blocks. If E_COPY is not NULL, also add
+ phi node arguments for its destination.*/
void
-add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
+add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
+ edge e_copy)
{
unsigned i;
for (i = 0; i < n_region; i++)
add_phi_args_after_copy_bb (region_copy[i]);
+ if (e_copy)
+ add_phi_args_after_copy_edge (e_copy);
for (i = 0; i < n_region; i++)
region_copy[i]->flags &= ~BB_DUPLICATED;
set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
iterate_fix_dominators (CDI_DOMINATORS, doms, false);
- free (doms);
+ VEC_free (basic_block, heap, doms);
/* Add the other PHI node arguments. */
- add_phi_args_after_copy (region_copy, n_region);
+ add_phi_args_after_copy (region_copy, n_region, NULL);
+
+ /* Update the SSA web. */
+ update_ssa (TODO_update_ssa);
+
+ if (free_region_copy)
+ free (region_copy);
+
+ free_original_copy_tables ();
+ return true;
+}
+
+/* Duplicates REGION consisting of N_REGION blocks. The new blocks
+ are stored to REGION_COPY in the same order in that they appear
+ in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
+ the region, EXIT an exit from it. The condition guarding EXIT
+ is moved to ENTRY. Returns true if duplication succeeds, false
+ otherwise.
+
+ For example,
+
+ some_code;
+ if (cond)
+ A;
+ else
+ B;
+
+ is transformed to
+
+ if (cond)
+ {
+ some_code;
+ A;
+ }
+ else
+ {
+ some_code;
+ B;
+ }
+*/
+
+bool
+tree_duplicate_sese_tail (edge entry, edge exit,
+ basic_block *region, unsigned n_region,
+ basic_block *region_copy)
+{
+ unsigned i;
+ bool free_region_copy = false;
+ struct loop *loop = exit->dest->loop_father;
+ struct loop *orig_loop = entry->dest->loop_father;
+ basic_block switch_bb, entry_bb, nentry_bb;
+ VEC (basic_block, heap) *doms;
+ int total_freq = 0, exit_freq = 0;
+ gcov_type total_count = 0, exit_count = 0;
+ edge exits[2], nexits[2], e;
+ block_stmt_iterator bsi;
+ tree cond;
+ edge sorig, snew;
+
+ gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
+ exits[0] = exit;
+ exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
+
+ if (!can_copy_bbs_p (region, n_region))
+ return false;
+
+ /* Some sanity checking. Note that we do not check for all possible
+ missuses of the functions. I.e. if you ask to copy something weird
+ (e.g., in the example, if there is a jump from inside to the middle
+ of some_code, or come_code defines some of the values used in cond)
+ it will work, but the resulting code will not be correct. */
+ for (i = 0; i < n_region; i++)
+ {
+ /* We do not handle subloops, i.e. all the blocks must belong to the
+ same loop. */
+ if (region[i]->loop_father != orig_loop)
+ return false;
+
+ if (region[i] == orig_loop->latch)
+ return false;
+ }
+
+ initialize_original_copy_tables ();
+ set_loop_copy (orig_loop, loop);
+
+ if (!region_copy)
+ {
+ region_copy = XNEWVEC (basic_block, n_region);
+ free_region_copy = true;
+ }
+
+ gcc_assert (!need_ssa_update_p ());
+
+ /* Record blocks outside the region that are dominated by something
+ inside. */
+ doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
+
+ if (exit->src->count)
+ {
+ total_count = exit->src->count;
+ exit_count = exit->count;
+ /* Fix up corner cases, to avoid division by zero or creation of negative
+ frequencies. */
+ if (exit_count > total_count)
+ exit_count = total_count;
+ }
+ else
+ {
+ total_freq = exit->src->frequency;
+ exit_freq = EDGE_FREQUENCY (exit);
+ /* Fix up corner cases, to avoid division by zero or creation of negative
+ frequencies. */
+ if (total_freq == 0)
+ total_freq = 1;
+ if (exit_freq > total_freq)
+ exit_freq = total_freq;
+ }
+
+ copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
+ split_edge_bb_loc (exit));
+ if (total_count)
+ {
+ scale_bbs_frequencies_gcov_type (region, n_region,
+ total_count - exit_count,
+ total_count);
+ scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
+ total_count);
+ }
+ else
+ {
+ scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
+ total_freq);
+ scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
+ }
+
+ /* Create the switch block, and put the exit condition to it. */
+ entry_bb = entry->dest;
+ nentry_bb = get_bb_copy (entry_bb);
+ if (!last_stmt (entry->src)
+ || !stmt_ends_bb_p (last_stmt (entry->src)))
+ switch_bb = entry->src;
+ else
+ switch_bb = split_edge (entry);
+ set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
+
+ bsi = bsi_last (switch_bb);
+ cond = last_stmt (exit->src);
+ gcc_assert (TREE_CODE (cond) == COND_EXPR);
+ bsi_insert_after (&bsi, unshare_expr (cond), BSI_NEW_STMT);
+
+ sorig = single_succ_edge (switch_bb);
+ sorig->flags = exits[1]->flags;
+ snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
+
+ /* Register the new edge from SWITCH_BB in loop exit lists. */
+ rescan_loop_exit (snew, true, false);
+
+ /* Add the PHI node arguments. */
+ add_phi_args_after_copy (region_copy, n_region, snew);
+
+ /* Get rid of now superfluous conditions and associated edges (and phi node
+ arguments). */
+ e = redirect_edge_and_branch (exits[0], exits[1]->dest);
+ PENDING_STMT (e) = NULL_TREE;
+ e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
+ PENDING_STMT (e) = NULL_TREE;
+
+ /* Anything that is outside of the region, but was dominated by something
+ inside needs to update dominance info. */
+ iterate_fix_dominators (CDI_DOMINATORS, doms, false);
+ VEC_free (basic_block, heap, doms);
/* Update the SSA web. */
update_ssa (TODO_update_ssa);
}
}
+/* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
+ The duplicates are recorded in VARS_MAP. */
+
+static void
+replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
+ tree to_context)
+{
+ tree t = *tp, new_t;
+ struct function *f = DECL_STRUCT_FUNCTION (to_context);
+ void **loc;
+
+ if (DECL_CONTEXT (t) == to_context)
+ return;
+
+ loc = pointer_map_contains (vars_map, t);
+
+ if (!loc)
+ {
+ loc = pointer_map_insert (vars_map, t);
+
+ if (SSA_VAR_P (t))
+ {
+ new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
+ f->unexpanded_var_list
+ = tree_cons (NULL_TREE, new_t, f->unexpanded_var_list);
+ }
+ else
+ {
+ gcc_assert (TREE_CODE (t) == CONST_DECL);
+ new_t = copy_node (t);
+ }
+ DECL_CONTEXT (new_t) = to_context;
+
+ *loc = new_t;
+ }
+ else
+ new_t = *loc;
+
+ *tp = new_t;
+}
+
+/* Creates an ssa name in TO_CONTEXT equivalent to NAME.
+ VARS_MAP maps old ssa names and var_decls to the new ones. */
+
+static tree
+replace_ssa_name (tree name, struct pointer_map_t *vars_map,
+ tree to_context)
+{
+ void **loc;
+ tree new_name, decl = SSA_NAME_VAR (name);
+
+ gcc_assert (is_gimple_reg (name));
+
+ loc = pointer_map_contains (vars_map, name);
+
+ if (!loc)
+ {
+ replace_by_duplicate_decl (&decl, vars_map, to_context);
+
+ push_cfun (DECL_STRUCT_FUNCTION (to_context));
+ if (gimple_in_ssa_p (cfun))
+ add_referenced_var (decl);
+
+ new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
+ if (SSA_NAME_IS_DEFAULT_DEF (name))
+ set_default_def (decl, new_name);
+ pop_cfun ();
+
+ loc = pointer_map_insert (vars_map, name);
+ *loc = new_name;
+ }
+ else
+ new_name = *loc;
+
+ return new_name;
+}
struct move_stmt_d
{
tree block;
tree from_context;
tree to_context;
- bitmap vars_to_remove;
+ struct pointer_map_t *vars_map;
htab_t new_label_map;
bool remap_decls_p;
};
p->remap_decls_p = save_remap_decls_p;
}
- else if (DECL_P (t) && DECL_CONTEXT (t) == p->from_context)
+ else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
{
- if (TREE_CODE (t) == LABEL_DECL)
+ if (TREE_CODE (t) == SSA_NAME)
+ *tp = replace_ssa_name (t, p->vars_map, p->to_context);
+ else if (TREE_CODE (t) == LABEL_DECL)
{
if (p->new_label_map)
{
}
else if (p->remap_decls_p)
{
- DECL_CONTEXT (t) = p->to_context;
-
- if (TREE_CODE (t) == VAR_DECL)
+ /* Replace T with its duplicate. T should no longer appear in the
+ parent function, so this looks wasteful; however, it may appear
+ in referenced_vars, and more importantly, as virtual operands of
+ statements, and in alias lists of other variables. It would be
+ quite difficult to expunge it from all those places. ??? It might
+ suffice to do this for addressable variables. */
+ if ((TREE_CODE (t) == VAR_DECL
+ && !is_global_var (t))
+ || TREE_CODE (t) == CONST_DECL)
+ replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
+
+ if (SSA_VAR_P (t)
+ && gimple_in_ssa_p (cfun))
{
- struct function *f = DECL_STRUCT_FUNCTION (p->to_context);
- f->unexpanded_var_list
- = tree_cons (0, t, f->unexpanded_var_list);
-
- /* Mark T to be removed from the original function,
- otherwise it will be given a DECL_RTL when the
- original function is expanded. */
- bitmap_set_bit (p->vars_to_remove, DECL_UID (t));
+ push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
+ add_referenced_var (*tp);
+ pop_cfun ();
}
}
+ *walk_subtrees = 0;
}
else if (TYPE_P (t))
*walk_subtrees = 0;
return NULL_TREE;
}
+/* Marks virtual operands of all statements in basic blocks BBS for
+ renaming. */
+
+void
+mark_virtual_ops_in_bb (basic_block bb)
+{
+ tree phi;
+ block_stmt_iterator bsi;
+
+ for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+ mark_virtual_ops_for_renaming (phi);
+
+ for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ mark_virtual_ops_for_renaming (bsi_stmt (bsi));
+}
+
+/* Marks virtual operands of all statements in basic blocks BBS for
+ renaming. */
+
+static void
+mark_virtual_ops_in_region (VEC (basic_block,heap) *bbs)
+{
+ basic_block bb;
+ unsigned i;
+
+ for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
+ mark_virtual_ops_in_bb (bb);
+}
/* Move basic block BB from function CFUN to function DEST_FN. The
block is moved out of the original linked list and placed after
If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
updated to reflect the moved edges.
- On exit, local variables that need to be removed from
- CFUN->UNEXPANDED_VAR_LIST will have been added to VARS_TO_REMOVE. */
+ The local variables are remapped to new instances, VARS_MAP is used
+ to record the mapping. */
static void
move_block_to_fn (struct function *dest_cfun, basic_block bb,
basic_block after, bool update_edge_count_p,
- bitmap vars_to_remove, htab_t new_label_map, int eh_offset)
+ struct pointer_map_t *vars_map, htab_t new_label_map,
+ int eh_offset)
{
struct control_flow_graph *cfg;
edge_iterator ei;
block_stmt_iterator si;
struct move_stmt_d d;
unsigned old_len, new_len;
+ tree phi, next_phi;
/* Remove BB from dominance structures. */
delete_from_dominance_info (CDI_DOMINATORS, bb);
+ if (current_loops)
+ remove_bb_from_loops (bb);
/* Link BB to the new linked list. */
move_block_after (bb, after);
VEC_replace (basic_block, cfg->x_basic_block_info,
bb->index, bb);
+ /* Remap the variables in phi nodes. */
+ for (phi = phi_nodes (bb); phi; phi = next_phi)
+ {
+ use_operand_p use;
+ tree op = PHI_RESULT (phi);
+ ssa_op_iter oi;
+
+ next_phi = PHI_CHAIN (phi);
+ if (!is_gimple_reg (op))
+ {
+ /* Remove the phi nodes for virtual operands (alias analysis will be
+ run for the new function, anyway). */
+ remove_phi_node (phi, NULL, true);
+ continue;
+ }
+
+ SET_PHI_RESULT (phi, replace_ssa_name (op, vars_map, dest_cfun->decl));
+ FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
+ {
+ op = USE_FROM_PTR (use);
+ if (TREE_CODE (op) == SSA_NAME)
+ SET_USE (use, replace_ssa_name (op, vars_map, dest_cfun->decl));
+ }
+ }
+
/* The statements in BB need to be associated with a new TREE_BLOCK.
Labels need to be associated with a new label-to-block map. */
memset (&d, 0, sizeof (d));
- d.vars_to_remove = vars_to_remove;
+ d.vars_map = vars_map;
+ d.from_context = cfun->decl;
+ d.to_context = dest_cfun->decl;
+ d.new_label_map = new_label_map;
for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
{
tree stmt = bsi_stmt (si);
int region;
- d.from_context = cfun->decl;
- d.to_context = dest_cfun->decl;
d.remap_decls_p = true;
- d.new_label_map = new_label_map;
if (TREE_BLOCK (stmt))
d.block = DECL_INITIAL (dest_cfun->decl);
gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
gimple_remove_stmt_histograms (cfun, stmt);
}
+
+ /* We cannot leave any operands allocated from the operand caches of
+ the current function. */
+ free_stmt_operands (stmt);
+ push_cfun (dest_cfun);
+ update_stmt (stmt);
+ pop_cfun ();
}
}
m->base.from = decl;
m->to = create_artificial_label ();
LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
+ if (LABEL_DECL_UID (m->to) >= cfun->last_label_uid)
+ cfun->last_label_uid = LABEL_DECL_UID (m->to) + 1;
slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
gcc_assert (*slot == NULL);
move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
basic_block exit_bb)
{
- VEC(basic_block,heap) *bbs;
- basic_block after, bb, *entry_pred, *exit_succ;
- struct function *saved_cfun;
+ VEC(basic_block,heap) *bbs, *dom_bbs;
+ basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
+ basic_block after, bb, *entry_pred, *exit_succ, abb;
+ struct function *saved_cfun = cfun;
int *entry_flag, *exit_flag, eh_offset;
+ unsigned *entry_prob, *exit_prob;
unsigned i, num_entry_edges, num_exit_edges;
edge e;
edge_iterator ei;
- bitmap vars_to_remove;
htab_t new_label_map;
-
- saved_cfun = cfun;
-
- /* Collect all the blocks in the region. Manually add ENTRY_BB
- because it won't be added by dfs_enumerate_from. */
- calculate_dominance_info (CDI_DOMINATORS);
+ struct pointer_map_t *vars_map;
+ struct loop *loop = entry_bb->loop_father;
/* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
region. */
&& (!exit_bb
|| dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
+ /* Collect all the blocks in the region. Manually add ENTRY_BB
+ because it won't be added by dfs_enumerate_from. */
bbs = NULL;
VEC_safe_push (basic_block, heap, bbs, entry_bb);
gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
+ /* The blocks that used to be dominated by something in BBS will now be
+ dominated by the new block. */
+ dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
+ VEC_address (basic_block, bbs),
+ VEC_length (basic_block, bbs));
+
/* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
the predecessor edges to ENTRY_BB and the successor edges to
EXIT_BB so that we can re-attach them to the new basic block that
num_entry_edges = EDGE_COUNT (entry_bb->preds);
entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
+ entry_prob = XNEWVEC (unsigned, num_entry_edges);
i = 0;
for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
{
+ entry_prob[i] = e->probability;
entry_flag[i] = e->flags;
entry_pred[i++] = e->src;
remove_edge (e);
exit_succ = (basic_block *) xcalloc (num_exit_edges,
sizeof (basic_block));
exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
+ exit_prob = XNEWVEC (unsigned, num_exit_edges);
i = 0;
for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
{
+ exit_prob[i] = e->probability;
exit_flag[i] = e->flags;
exit_succ[i++] = e->dest;
remove_edge (e);
num_exit_edges = 0;
exit_succ = NULL;
exit_flag = NULL;
+ exit_prob = NULL;
}
/* Switch context to the child function to initialize DEST_FN's CFG. */
gcc_assert (dest_cfun->cfg == NULL);
- set_cfun (dest_cfun);
+ push_cfun (dest_cfun);
init_empty_tree_cfg ();
}
}
- set_cfun (saved_cfun);
+ pop_cfun ();
+
+ /* The ssa form for virtual operands in the source function will have to
+ be repaired. We do not care for the real operands -- the sese region
+ must be closed with respect to those. */
+ mark_virtual_ops_in_region (bbs);
/* Move blocks from BBS into DEST_CFUN. */
gcc_assert (VEC_length (basic_block, bbs) >= 2);
after = dest_cfun->cfg->x_entry_block_ptr;
- vars_to_remove = BITMAP_ALLOC (NULL);
+ vars_map = pointer_map_create ();
for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
{
/* No need to update edge counts on the last block. It has
already been updated earlier when we detached the region from
the original CFG. */
- move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, vars_to_remove,
+ move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, vars_map,
new_label_map, eh_offset);
after = bb;
}
if (new_label_map)
htab_delete (new_label_map);
-
- /* Remove the variables marked in VARS_TO_REMOVE from
- CFUN->UNEXPANDED_VAR_LIST. Otherwise, they will be given a
- DECL_RTL in the context of CFUN. */
- if (!bitmap_empty_p (vars_to_remove))
- {
- tree *p;
-
- for (p = &cfun->unexpanded_var_list; *p; )
- {
- tree var = TREE_VALUE (*p);
- if (bitmap_bit_p (vars_to_remove, DECL_UID (var)))
- {
- *p = TREE_CHAIN (*p);
- continue;
- }
-
- p = &TREE_CHAIN (*p);
- }
- }
-
- BITMAP_FREE (vars_to_remove);
+ pointer_map_destroy (vars_map);
/* Rewire the entry and exit blocks. The successor to the entry
block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
FIXME, this is silly. The CFG ought to become a parameter to
these helpers. */
- set_cfun (dest_cfun);
+ push_cfun (dest_cfun);
make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
if (exit_bb)
make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
- set_cfun (saved_cfun);
+ pop_cfun ();
/* Back in the original function, the SESE region has disappeared,
create a new basic block in its place. */
bb = create_empty_bb (entry_pred[0]);
+ if (current_loops)
+ add_bb_to_loop (bb, loop);
for (i = 0; i < num_entry_edges; i++)
- make_edge (entry_pred[i], bb, entry_flag[i]);
+ {
+ e = make_edge (entry_pred[i], bb, entry_flag[i]);
+ e->probability = entry_prob[i];
+ }
for (i = 0; i < num_exit_edges; i++)
- make_edge (bb, exit_succ[i], exit_flag[i]);
+ {
+ e = make_edge (bb, exit_succ[i], exit_flag[i]);
+ e->probability = exit_prob[i];
+ }
+
+ set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
+ for (i = 0; VEC_iterate (basic_block, dom_bbs, i, abb); i++)
+ set_immediate_dominator (CDI_DOMINATORS, abb, bb);
+ VEC_free (basic_block, heap, dom_bbs);
if (exit_bb)
{
+ free (exit_prob);
free (exit_flag);
free (exit_succ);
}
+ free (entry_prob);
free (entry_flag);
free (entry_pred);
- free_dominance_info (CDI_DOMINATORS);
- free_dominance_info (CDI_POST_DOMINATORS);
VEC_free (basic_block, heap, bbs);
return bb;
}
-/* Pretty print of the loops intermediate representation. */
-static void print_loop (FILE *, struct loop *, int);
-static void print_pred_bbs (FILE *, basic_block bb);
-static void print_succ_bbs (FILE *, basic_block bb);
-
-
/* Print on FILE the indexes for the predecessors of basic_block BB. */
static void
fprintf (file, "bb_%d ", e->dest->index);
}
+/* Print to FILE the basic block BB following the VERBOSITY level. */
+
+void
+print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
+{
+ char *s_indent = (char *) alloca ((size_t) indent + 1);
+ memset ((void *) s_indent, ' ', (size_t) indent);
+ s_indent[indent] = '\0';
+
+ /* Print basic_block's header. */
+ if (verbosity >= 2)
+ {
+ fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
+ print_pred_bbs (file, bb);
+ fprintf (file, "}, succs = {");
+ print_succ_bbs (file, bb);
+ fprintf (file, "})\n");
+ }
+
+ /* Print basic_block's body. */
+ if (verbosity >= 3)
+ {
+ fprintf (file, "%s {\n", s_indent);
+ tree_dump_bb (bb, file, indent + 4);
+ fprintf (file, "%s }\n", s_indent);
+ }
+}
+
+static void print_loop_and_siblings (FILE *, struct loop *, int, int);
-/* Pretty print LOOP on FILE, indented INDENT spaces. */
+/* Pretty print LOOP on FILE, indented INDENT spaces. Following
+ VERBOSITY level this outputs the contents of the loop, or just its
+ structure. */
static void
-print_loop (FILE *file, struct loop *loop, int indent)
+print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
{
char *s_indent;
basic_block bb;
memset ((void *) s_indent, ' ', (size_t) indent);
s_indent[indent] = '\0';
- /* Print the loop's header. */
- fprintf (file, "%sloop_%d\n", s_indent, loop->num);
+ /* Print loop's header. */
+ fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
+ loop->num, loop->header->index, loop->latch->index);
+ fprintf (file, ", niter = ");
+ print_generic_expr (file, loop->nb_iterations, 0);
- /* Print the loop's body. */
- fprintf (file, "%s{\n", s_indent);
- FOR_EACH_BB (bb)
- if (bb->loop_father == loop)
- {
- /* Print the basic_block's header. */
- fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
- print_pred_bbs (file, bb);
- fprintf (file, "}, succs = {");
- print_succ_bbs (file, bb);
- fprintf (file, "})\n");
-
- /* Print the basic_block's body. */
- fprintf (file, "%s {\n", s_indent);
- tree_dump_bb (bb, file, indent + 4);
- fprintf (file, "%s }\n", s_indent);
- }
+ if (loop->any_upper_bound)
+ {
+ fprintf (file, ", upper_bound = ");
+ dump_double_int (file, loop->nb_iterations_upper_bound, true);
+ }
+
+ if (loop->any_estimate)
+ {
+ fprintf (file, ", estimate = ");
+ dump_double_int (file, loop->nb_iterations_estimate, true);
+ }
+ fprintf (file, ")\n");
- print_loop (file, loop->inner, indent + 2);
- fprintf (file, "%s}\n", s_indent);
- print_loop (file, loop->next, indent);
+ /* Print loop's body. */
+ if (verbosity >= 1)
+ {
+ fprintf (file, "%s{\n", s_indent);
+ FOR_EACH_BB (bb)
+ if (bb->loop_father == loop)
+ print_loops_bb (file, bb, indent, verbosity);
+
+ print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
+ fprintf (file, "%s}\n", s_indent);
+ }
}
+/* Print the LOOP and its sibling loops on FILE, indented INDENT
+ spaces. Following VERBOSITY level this outputs the contents of the
+ loop, or just its structure. */
+
+static void
+print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
+{
+ if (loop == NULL)
+ return;
+
+ print_loop (file, loop, indent, verbosity);
+ print_loop_and_siblings (file, loop->next, indent, verbosity);
+}
/* Follow a CFG edge from the entry point of the program, and on entry
of a loop, pretty print the loop structure on FILE. */
void
-print_loop_ir (FILE *file)
+print_loops (FILE *file, int verbosity)
{
basic_block bb;
bb = BASIC_BLOCK (NUM_FIXED_BLOCKS);
if (bb && bb->loop_father)
- print_loop (file, bb->loop_father, 0);
+ print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
}
-/* Debugging loops structure at tree level. */
+/* Debugging loops structure at tree level, at some VERBOSITY level. */
void
-debug_loop_ir (void)
+debug_loops (int verbosity)
{
- print_loop_ir (stderr);
+ print_loops (stderr, verbosity);
}
+/* Print on stderr the code of LOOP, at some VERBOSITY level. */
+
+void
+debug_loop (struct loop *loop, int verbosity)
+{
+ print_loop (stderr, loop, 0, verbosity);
+}
+
+/* Print on stderr the code of loop number NUM, at some VERBOSITY
+ level. */
+
+void
+debug_loop_num (unsigned num, int verbosity)
+{
+ debug_loop (get_loop (num), verbosity);
+}
/* Return true if BB ends with a call, possibly followed by some
instructions that must stay with the call. Return false,
otherwise. */
static bool
-tree_block_ends_with_call_p (const_basic_block bb)
+tree_block_ends_with_call_p (basic_block bb)
{
- const_block_stmt_iterator bsi = cbsi_last (bb);
- return const_get_call_expr_in (cbsi_stmt (bsi)) != NULL;
+ block_stmt_iterator bsi = bsi_last (bb);
+ return get_call_expr_in (bsi_stmt (bsi)) != NULL;
}
return 0;
}
-struct tree_opt_pass pass_split_crit_edges =
+struct gimple_opt_pass pass_split_crit_edges =
{
+ {
+ GIMPLE_PASS,
"crited", /* name */
NULL, /* gate */
split_critical_edges, /* execute */
PROP_no_crit_edges, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func, /* todo_flags_finish */
- 0 /* letter */
+ TODO_dump_func /* todo_flags_finish */
+ }
};
\f
static unsigned int
execute_warn_function_return (void)
{
-#ifdef USE_MAPPED_LOCATION
source_location location;
-#else
- location_t *locus;
-#endif
tree last;
edge e;
edge_iterator ei;
if (TREE_THIS_VOLATILE (cfun->decl)
&& EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
{
-#ifdef USE_MAPPED_LOCATION
location = UNKNOWN_LOCATION;
-#else
- locus = NULL;
-#endif
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
{
last = last_stmt (e->src);
if (TREE_CODE (last) == RETURN_EXPR
-#ifdef USE_MAPPED_LOCATION
&& (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
-#else
- && (locus = EXPR_LOCUS (last)) != NULL)
-#endif
break;
}
-#ifdef USE_MAPPED_LOCATION
if (location == UNKNOWN_LOCATION)
location = cfun->function_end_locus;
warning (0, "%H%<noreturn%> function does return", &location);
-#else
- if (!locus)
- locus = &cfun->function_end_locus;
- warning (0, "%H%<noreturn%> function does return", locus);
-#endif
}
/* If we see "return;" in some basic block, then we do reach the end
&& TREE_OPERAND (last, 0) == NULL
&& !TREE_NO_WARNING (last))
{
-#ifdef USE_MAPPED_LOCATION
location = EXPR_LOCATION (last);
if (location == UNKNOWN_LOCATION)
location = cfun->function_end_locus;
- warning (0, "%Hcontrol reaches end of non-void function", &location);
-#else
- locus = EXPR_LOCUS (last);
- if (!locus)
- locus = &cfun->function_end_locus;
- warning (0, "%Hcontrol reaches end of non-void function", locus);
-#endif
+ warning (OPT_Wreturn_type, "%Hcontrol reaches end of non-void function", &location);
TREE_NO_WARNING (cfun->decl) = 1;
break;
}
}
}
-struct tree_opt_pass pass_warn_function_return =
+struct gimple_opt_pass pass_warn_function_return =
{
+ {
+ GIMPLE_PASS,
NULL, /* name */
NULL, /* gate */
execute_warn_function_return, /* execute */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- 0, /* todo_flags_finish */
- 0 /* letter */
+ 0 /* todo_flags_finish */
+ }
};
/* Emit noreturn warnings. */
return 0;
}
-struct tree_opt_pass pass_warn_function_noreturn =
+struct gimple_opt_pass pass_warn_function_noreturn =
{
+ {
+ GIMPLE_PASS,
NULL, /* name */
NULL, /* gate */
execute_warn_function_noreturn, /* execute */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- 0, /* todo_flags_finish */
- 0 /* letter */
+ 0 /* todo_flags_finish */
+ }
};