#include "cfgloop.h"
#include "cfglayout.h"
#include "hashtab.h"
+#include "tree-ssa-propagate.h"
/* This file contains functions for building the Control Flow Graph (CFG)
for a function tree. */
static bool phi_alternatives_equal (basic_block, edge, edge);
static bool cleanup_forwarder_blocks (void);
+void
+init_empty_tree_cfg (void)
+{
+ /* Initialize the basic block array. */
+ init_flow ();
+ profile_status = PROFILE_ABSENT;
+ n_basic_blocks = 0;
+ last_basic_block = 0;
+ VARRAY_BB_INIT (basic_block_info, initial_cfg_capacity, "basic_block_info");
+
+ /* Build a mapping of labels to their associated blocks. */
+ VARRAY_BB_INIT (label_to_block_map, initial_cfg_capacity,
+ "label to block map");
+
+ ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
+ EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
+
+ create_block_annotation (ENTRY_BLOCK_PTR);
+ create_block_annotation (EXIT_BLOCK_PTR);
+}
/*---------------------------------------------------------------------------
Create basic blocks
/* Register specific tree functions. */
tree_register_cfg_hooks ();
- /* Initialize the basic block array. */
- init_flow ();
- profile_status = PROFILE_ABSENT;
- n_basic_blocks = 0;
- last_basic_block = 0;
- VARRAY_BB_INIT (basic_block_info, initial_cfg_capacity, "basic_block_info");
memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
- /* Build a mapping of labels to their associated blocks. */
- VARRAY_BB_INIT (label_to_block_map, initial_cfg_capacity,
- "label to block map");
-
- ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
- EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
+ init_empty_tree_cfg ();
found_computed_goto = 0;
make_blocks (*tp);
if (n_basic_blocks == 0)
create_empty_bb (ENTRY_BLOCK_PTR);
- create_block_annotation (ENTRY_BLOCK_PTR);
- create_block_annotation (EXIT_BLOCK_PTR);
-
/* Adjust the size of the array. */
VARRAY_GROW (basic_block_info, n_basic_blocks);
}
}
+#ifdef ENABLE_CHECKING
+ verify_stmts ();
+#endif
+
/* Dump a textual representation of the flowgraph. */
if (dump_file)
dump_tree_cfg (dump_file, dump_flags);
/* Fold COND_EXPR_COND of each COND_EXPR. */
-static void
+void
fold_cond_expr_cond (void)
{
basic_block bb;
gcc_assert (last);
switch (TREE_CODE (last))
{
+ case RESX_EXPR:
+ break;
case CALL_EXPR:
/* If this function receives a nonlocal goto, then we need to
make edges from this call site to all the nonlocal goto
bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
uid = LABEL_DECL_UID (dest);
}
+ if (VARRAY_SIZE (ifun->cfg->x_label_to_block_map) <= (unsigned int)uid)
+ return NULL;
return VARRAY_BB (ifun->cfg->x_label_to_block_map, uid);
}
{
tree stmt;
block_stmt_iterator bsi;
+ tree phi;
if (!single_succ_p (a))
return false;
&& DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
return false;
- /* There may be no PHI nodes at the start of B. */
- if (phi_nodes (b))
- return false;
+ /* It must be possible to eliminate all phi nodes in B. If ssa form
+ is not up-to-date, we cannot eliminate any phis. */
+ phi = phi_nodes (b);
+ if (phi)
+ {
+ if (need_ssa_update_p ())
+ return false;
+
+ for (; phi; phi = PHI_CHAIN (phi))
+ if (!is_gimple_reg (PHI_RESULT (phi))
+ && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
+ return false;
+ }
/* Do not remove user labels. */
for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
return true;
}
+/* Replaces all uses of NAME by VAL. */
+
+void
+replace_uses_by (tree name, tree val)
+{
+ imm_use_iterator imm_iter;
+ use_operand_p use;
+ tree stmt;
+ edge e;
+ unsigned i;
+ VEC(tree,heap) *stmts = VEC_alloc (tree, heap, 20);
+
+ FOR_EACH_IMM_USE_SAFE (use, imm_iter, name)
+ {
+ stmt = USE_STMT (use);
+
+ SET_USE (use, val);
+
+ if (TREE_CODE (stmt) == PHI_NODE)
+ {
+ e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
+ if (e->flags & EDGE_ABNORMAL)
+ {
+ /* This can only occur for virtual operands, since
+ for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
+ would prevent replacement. */
+ gcc_assert (!is_gimple_reg (name));
+ SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
+ }
+ }
+ else
+ VEC_safe_push (tree, heap, stmts, stmt);
+ }
+
+ /* We do not update the statements in the loop above. Consider
+ x = w * w;
+
+ If we performed the update in the first loop, the statement
+ would be rescanned after first occurrence of w is replaced,
+ the new uses would be placed to the beginning of the list,
+ and we would never process them. */
+ for (i = 0; VEC_iterate (tree, stmts, i, stmt); i++)
+ {
+ tree rhs;
+
+ fold_stmt_inplace (stmt);
+
+ rhs = get_rhs (stmt);
+ if (TREE_CODE (rhs) == ADDR_EXPR)
+ recompute_tree_invarant_for_addr_expr (rhs);
+
+ update_stmt (stmt);
+ }
+
+ VEC_free (tree, heap, stmts);
+}
/* Merge block B into block A. */
{
block_stmt_iterator bsi;
tree_stmt_iterator last;
+ tree phi;
if (dump_file)
fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
+ /* Remove the phi nodes. */
+ bsi = bsi_last (a);
+ for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
+ {
+ tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
+ tree copy;
+
+ if (!may_propagate_copy (def, use)
+ /* Propagating pointers might cause the set of vops for statements
+ to be changed, and thus require ssa form update. */
+ || (is_gimple_reg (def)
+ && POINTER_TYPE_P (TREE_TYPE (def))))
+ {
+ gcc_assert (is_gimple_reg (def));
+
+ /* Note that just emitting the copies is fine -- there is no problem
+ with ordering of phi nodes. This is because A is the single
+ predecessor of B, therefore results of the phi nodes cannot
+ appear as arguments of the phi nodes. */
+ copy = build2 (MODIFY_EXPR, void_type_node, def, use);
+ bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
+ SET_PHI_RESULT (phi, NULL_TREE);
+ SSA_NAME_DEF_STMT (def) = copy;
+ }
+ else
+ replace_uses_by (def, use);
+ remove_phi_node (phi, NULL);
+ }
+
/* Ensure that B follows A. */
move_block_after (b, a);
{
release_defs (stmt);
- set_bb_for_stmt (stmt, NULL);
bsi_remove (&i);
}
loop above, so the last statement we process is the first statement
in the block. */
#ifdef USE_MAPPED_LOCATION
- if (warn_notreached && loc > BUILTINS_LOCATION)
- warning (0, "%Hwill never be executed", &loc);
+ if (loc > BUILTINS_LOCATION)
+ warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
#else
- if (warn_notreached && loc)
- warning (0, "%Hwill never be executed", loc);
+ if (loc)
+ warning (OPT_Wunreachable_code, "%Hwill never be executed", loc);
#endif
remove_phi_nodes_and_edges_for_unreachable_block (bb);
add_stmt_to_eh_region (stmt, eh_region);
}
+ delink_stmt_imm_use (orig_stmt);
*bsi_stmt_ptr (*bsi) = stmt;
mark_stmt_modified (stmt);
update_modified_stmts (stmt);
break;
case ADDR_EXPR:
- /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
- dead PHIs that take the address of something. But if the PHI
- result is dead, the fact that it takes the address of anything
- is irrelevant. Because we can not tell from here if a PHI result
- is dead, we just skip this check for PHIs altogether. This means
- we may be missing "valid" checks, but what can you do?
- This was PR19217. */
- if (in_phi)
- break;
+ {
+ bool old_invariant;
+ bool old_constant;
+ bool old_side_effects;
+ bool new_invariant;
+ bool new_constant;
+ bool new_side_effects;
+
+ /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
+ dead PHIs that take the address of something. But if the PHI
+ result is dead, the fact that it takes the address of anything
+ is irrelevant. Because we can not tell from here if a PHI result
+ is dead, we just skip this check for PHIs altogether. This means
+ we may be missing "valid" checks, but what can you do?
+ This was PR19217. */
+ if (in_phi)
+ break;
- /* Skip any references (they will be checked when we recurse down the
- tree) and ensure that any variable used as a prefix is marked
- addressable. */
- for (x = TREE_OPERAND (t, 0);
- handled_component_p (x);
- x = TREE_OPERAND (x, 0))
- ;
-
- if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
- return NULL;
- if (!TREE_ADDRESSABLE (x))
- {
- error ("address taken, but ADDRESSABLE bit not set");
- return x;
- }
- break;
+ old_invariant = TREE_INVARIANT (t);
+ old_constant = TREE_CONSTANT (t);
+ old_side_effects = TREE_SIDE_EFFECTS (t);
+
+ recompute_tree_invarant_for_addr_expr (t);
+ new_invariant = TREE_INVARIANT (t);
+ new_side_effects = TREE_SIDE_EFFECTS (t);
+ new_constant = TREE_CONSTANT (t);
+
+ if (old_invariant != new_invariant)
+ {
+ error ("invariant not recomputed when ADDR_EXPR changed");
+ return t;
+ }
+
+ if (old_constant != new_constant)
+ {
+ error ("constant not recomputed when ADDR_EXPR changed");
+ return t;
+ }
+ if (old_side_effects != new_side_effects)
+ {
+ error ("side effects not recomputed when ADDR_EXPR changed");
+ return t;
+ }
+
+ /* Skip any references (they will be checked when we recurse down the
+ tree) and ensure that any variable used as a prefix is marked
+ addressable. */
+ for (x = TREE_OPERAND (t, 0);
+ handled_component_p (x);
+ x = TREE_OPERAND (x, 0))
+ ;
+
+ if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
+ return NULL;
+ if (!TREE_ADDRESSABLE (x))
+ {
+ error ("address taken, but ADDRESSABLE bit not set");
+ return x;
+ }
+ break;
+ }
case COND_EXPR:
x = COND_EXPR_COND (t);
stmt = bsi_stmt (bsi);
+ err |= verify_eh_edges (stmt);
+
if (is_ctrl_stmt (stmt))
{
FOR_EACH_EDGE (e, ei, bb->succs)
/* Now walk through the statements backward. We can ignore labels,
anything else means this is not a forwarder block. */
- for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
{
tree stmt = bsi_stmt (bsi);
def_operand_p def_p;
ssa_op_iter op_iter;
tree stmt, copy;
+ int region;
stmt = bsi_stmt (bsi);
if (TREE_CODE (stmt) == LABEL_EXPR)
copy = unshare_expr (stmt);
bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
copy_virtual_operands (copy, stmt);
+ region = lookup_stmt_eh_region (stmt);
+ if (region >= 0)
+ add_stmt_to_eh_region (copy, region);
/* Create new names for all the definitions created by COPY and
add replacement mappings for each new name. */
edge exit_copy;
basic_block *doms;
edge redirected;
+ int total_freq, entry_freq;
if (!can_copy_bbs_p (region, n_region))
return false;
gcc_assert (!need_ssa_update_p ());
- /* Record blocks outside the region that are duplicated by something
+ /* Record blocks outside the region that are dominated by something
inside. */
doms = xmalloc (sizeof (basic_block) * n_basic_blocks);
n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
+ total_freq = entry->dest->frequency;
+ entry_freq = EDGE_FREQUENCY (entry);
+ /* Fix up corner cases, to avoid division by zero or creation of negative
+ frequencies. */
+ if (total_freq == 0)
+ total_freq = 1;
+ else if (entry_freq > total_freq)
+ entry_freq = total_freq;
+
copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop);
+ scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
+ total_freq);
+ scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
if (copying_header)
{
}
fprintf (file, ")\n");
+ if (flags & TDF_DETAILS)
+ dump_eh_tree (file, DECL_STRUCT_FUNCTION (fn));
if (flags & TDF_RAW)
{
dump_node (fn, TDF_SLIM | flags, file);
basic_block new_head, edge e)
{
tree phi1, phi2;
+ edge e2 = find_edge (new_head, second);
+
+ /* Because NEW_HEAD has been created by splitting SECOND's incoming
+ edge, we should always have an edge from NEW_HEAD to SECOND. */
+ gcc_assert (e2 != NULL);
/* Browse all 'second' basic block phi nodes and add phi args to
edge 'e' for 'first' head. PHI args are always in correct order. */
phi2 && phi1;
phi2 = PHI_CHAIN (phi2), phi1 = PHI_CHAIN (phi1))
{
- edge e2 = find_edge (new_head, second);
-
- if (e2)
- {
- tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
- add_phi_arg (phi1, def, e);
- }
+ tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
+ add_phi_arg (phi1, def, e);
}
}
edge e;
edge_iterator ei;
- if (warn_missing_noreturn
- && !TREE_THIS_VOLATILE (cfun->decl)
- && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
- && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
- warning (0, "%Jfunction might be possible candidate for "
- "attribute %<noreturn%>",
- cfun->decl);
-
/* If we have a path to EXIT, then we do return. */
if (TREE_THIS_VOLATILE (cfun->decl)
&& EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
0, /* todo_flags_finish */
0 /* letter */
};
+
+/* Emit noreturn warnings. */
+
+static void
+execute_warn_function_noreturn (void)
+{
+ if (warn_missing_noreturn
+ && !TREE_THIS_VOLATILE (cfun->decl)
+ && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
+ && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
+ warning (0, "%Jfunction might be possible candidate for "
+ "attribute %<noreturn%>",
+ cfun->decl);
+}
+
+struct tree_opt_pass pass_warn_function_noreturn =
+{
+ NULL, /* name */
+ NULL, /* gate */
+ execute_warn_function_noreturn, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ 0, /* tv_id */
+ PROP_cfg, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ 0, /* todo_flags_finish */
+ 0 /* letter */
+};