/* Control flow functions for trees.
- Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007
+ Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
Free Software Foundation, Inc.
Contributed by Diego Novillo <dnovillo@redhat.com>
static void tree_make_forwarder_block (edge);
static void tree_cfg2vcg (FILE *);
static inline void change_bb_for_stmt (tree t, basic_block bb);
+static bool computed_goto_p (const_tree);
/* Flowgraph optimization and cleanup. */
static void tree_merge_blocks (basic_block, basic_block);
static tree find_case_label_for_value (tree, tree);
void
-init_empty_tree_cfg (void)
+init_empty_tree_cfg_for_function (struct function *fn)
{
/* Initialize the basic block array. */
- init_flow ();
- profile_status = PROFILE_ABSENT;
- n_basic_blocks = NUM_FIXED_BLOCKS;
- last_basic_block = NUM_FIXED_BLOCKS;
- basic_block_info = VEC_alloc (basic_block, gc, initial_cfg_capacity);
- VEC_safe_grow_cleared (basic_block, gc, basic_block_info,
+ init_flow (fn);
+ profile_status_for_function (fn) = PROFILE_ABSENT;
+ n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
+ last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
+ basic_block_info_for_function (fn)
+ = VEC_alloc (basic_block, gc, initial_cfg_capacity);
+ VEC_safe_grow_cleared (basic_block, gc,
+ basic_block_info_for_function (fn),
initial_cfg_capacity);
/* Build a mapping of labels to their associated blocks. */
- label_to_block_map = VEC_alloc (basic_block, gc, initial_cfg_capacity);
- VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
+ label_to_block_map_for_function (fn)
+ = VEC_alloc (basic_block, gc, initial_cfg_capacity);
+ VEC_safe_grow_cleared (basic_block, gc,
+ label_to_block_map_for_function (fn),
initial_cfg_capacity);
- SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR);
- SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR);
- ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
- EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
+ SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
+ ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
+ SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
+ EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
+
+ ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
+ = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
+ EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
+ = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
+}
+
+void
+init_empty_tree_cfg (void)
+{
+ init_empty_tree_cfg_for_function (cfun);
}
/*---------------------------------------------------------------------------
return 0;
}
-struct tree_opt_pass pass_build_cfg =
+struct gimple_opt_pass pass_build_cfg =
{
+ {
+ GIMPLE_PASS,
"cfg", /* name */
NULL, /* gate */
execute_build_cfg, /* execute */
PROP_cfg, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_verify_stmts | TODO_cleanup_cfg, /* todo_flags_finish */
- 0 /* letter */
+ TODO_verify_stmts | TODO_cleanup_cfg /* todo_flags_finish */
+ }
};
/* Search the CFG for any computed gotos. If found, factor them to a
break;
case OMP_PARALLEL:
+ case OMP_TASK:
case OMP_FOR:
case OMP_SINGLE:
case OMP_MASTER:
fallthru = false;
break;
+
+ case OMP_ATOMIC_LOAD:
+ case OMP_ATOMIC_STORE:
+ fallthru = true;
+ break;
+
+
case OMP_RETURN:
/* In the case of an OMP_SECTION, the edge will go somewhere
other than the next block. This will be created later. */
switch (cur_region->type)
{
case OMP_FOR:
+ /* Mark all OMP_FOR and OMP_CONTINUE succs edges as abnormal
+ to prevent splitting them. */
+ single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
/* Make the loopback edge. */
- make_edge (bb, single_succ (cur_region->entry), 0);
-
+ make_edge (bb, single_succ (cur_region->entry),
+ EDGE_ABNORMAL);
+
/* Create an edge from OMP_FOR to exit, which corresponds to
the case that the body of the loop is not executed at
all. */
- make_edge (cur_region->entry, bb->next_bb, 0);
- fallthru = true;
+ make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
+ make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
+ fallthru = false;
break;
case OMP_SECTIONS:
else_bb = label_to_block (else_label);
e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
-#ifdef USE_MAPPED_LOCATION
e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
-#else
- e->goto_locus = EXPR_LOCUS (COND_EXPR_THEN (entry));
-#endif
e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
if (e)
- {
-#ifdef USE_MAPPED_LOCATION
- e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
-#else
- e->goto_locus = EXPR_LOCUS (COND_EXPR_ELSE (entry));
-#endif
- }
+ e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
/* We do not need the gotos anymore. */
COND_EXPR_THEN (entry) = NULL_TREE;
{
tree dest = GOTO_DESTINATION (goto_t);
edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
-#ifdef USE_MAPPED_LOCATION
e->goto_locus = EXPR_LOCATION (goto_t);
-#else
- e->goto_locus = EXPR_LOCUS (goto_t);
-#endif
bsi_remove (&last, true);
return;
}
tree labels = SWITCH_LABELS (stmt);
int old_size = TREE_VEC_LENGTH (labels);
int i, j, new_size = old_size;
- tree default_case = TREE_VEC_ELT (labels, old_size - 1);
- tree default_label;
+ tree default_case = NULL_TREE;
+ tree default_label = NULL_TREE;
/* The default label is always the last case in a switch
- statement after gimplification. */
- default_label = CASE_LABEL (default_case);
+ statement after gimplification if it was not optimized
+ away. */
+ if (!CASE_LOW (TREE_VEC_ELT (labels, old_size - 1))
+ && !CASE_HIGH (TREE_VEC_ELT (labels, old_size - 1)))
+ {
+ default_case = TREE_VEC_ELT (labels, old_size - 1);
+ default_label = CASE_LABEL (default_case);
+ old_size--;
+ }
- /* Look for possible opportunities to merge cases.
- Ignore the last element of the label vector because it
- must be the default case. */
+ /* Look for possible opportunities to merge cases. */
i = 0;
- while (i < old_size - 1)
+ while (i < old_size)
{
tree base_case, base_label, base_high;
base_case = TREE_VEC_ELT (labels, i);
/* Try to merge case labels. Break out when we reach the end
of the label vector or when we cannot merge the next case
label with the current one. */
- while (i < old_size - 1)
+ while (i < old_size)
{
tree merge_case = TREE_VEC_ELT (labels, i);
tree merge_label = CASE_LABEL (merge_case);
}
else
{
- replace_uses_by (def, use);
+ /* If we deal with a PHI for virtual operands, we can simply
+ propagate these without fussing with folding or updating
+ the stmt. */
+ if (!is_gimple_reg (def))
+ {
+ imm_use_iterator iter;
+ use_operand_p use_p;
+ tree stmt;
+
+ FOR_EACH_IMM_USE_STMT (stmt, iter, def)
+ FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
+ SET_USE (use_p, use);
+ }
+ else
+ replace_uses_by (def, use);
remove_phi_node (phi, NULL, true);
}
}
location_t loc = EXPR_LOCATION (stmt);
if (LOCATION_LINE (loc) > 0)
{
- warning (0, "%Hwill never be executed", &loc);
+ warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
return true;
}
}
update_call_expr_flags (tree call)
{
tree decl = get_callee_fndecl (call);
+ int flags;
if (!decl)
return;
- if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
+ flags = call_expr_flags (call);
+ if (flags & (ECF_CONST | ECF_PURE) && !(flags & ECF_LOOPING_CONST_OR_PURE))
TREE_SIDE_EFFECTS (call) = 0;
if (TREE_NOTHROW (decl))
TREE_NOTHROW (call) = 1;
int flags = call_expr_flags (t);
if (flags & ECF_MAY_BE_ALLOCA)
- current_function_calls_alloca = true;
+ cfun->calls_alloca = true;
if (flags & ECF_RETURNS_TWICE)
- current_function_calls_setjmp = true;
+ cfun->calls_setjmp = true;
}
void
clear_special_calls (void)
{
- current_function_calls_alloca = false;
- current_function_calls_setjmp = false;
+ cfun->calls_alloca = false;
+ cfun->calls_setjmp = false;
}
data->last_goto = NULL;
break;
+ case OMP_PARALLEL:
+ case OMP_TASK:
+ /* Make sure the outermost BIND_EXPR in OMP_BODY isn't removed
+ as useless. */
+ remove_useless_stmts_1 (&BIND_EXPR_BODY (OMP_TASKREG_BODY (*tp)), data);
+ data->last_goto = NULL;
+ break;
+
+ case OMP_SECTIONS:
+ case OMP_SINGLE:
+ case OMP_SECTION:
+ case OMP_MASTER:
+ case OMP_ORDERED:
+ case OMP_CRITICAL:
+ remove_useless_stmts_1 (&OMP_BODY (*tp), data);
+ data->last_goto = NULL;
+ break;
+
+ case OMP_FOR:
+ remove_useless_stmts_1 (&OMP_FOR_BODY (*tp), data);
+ data->last_goto = NULL;
+ if (OMP_FOR_PRE_BODY (*tp))
+ {
+ remove_useless_stmts_1 (&OMP_FOR_PRE_BODY (*tp), data);
+ data->last_goto = NULL;
+ }
+ break;
+
default:
data->last_goto = NULL;
break;
}
-struct tree_opt_pass pass_remove_useless_stmts =
+struct gimple_opt_pass pass_remove_useless_stmts =
{
+ {
+ GIMPLE_PASS,
"useless", /* name */
NULL, /* gate */
remove_useless_stmts, /* execute */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func, /* todo_flags_finish */
- 0 /* letter */
+ TODO_dump_func /* todo_flags_finish */
+ }
};
/* Remove PHI nodes associated with basic block BB and all edges out of BB. */
remove_bb (basic_block bb)
{
block_stmt_iterator i;
-#ifdef USE_MAPPED_LOCATION
source_location loc = UNKNOWN_LOCATION;
-#else
- source_locus loc = 0;
-#endif
if (dump_file)
{
program that are indeed unreachable. */
if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
{
-#ifdef USE_MAPPED_LOCATION
if (EXPR_HAS_LOCATION (stmt))
loc = EXPR_LOCATION (stmt);
-#else
- source_locus t;
- t = EXPR_LOCUS (stmt);
- if (t && LOCATION_LINE (*t) > 0)
- loc = t;
-#endif
}
}
}
block is unreachable. We walk statements backwards in the
loop above, so the last statement we process is the first statement
in the block. */
-#ifdef USE_MAPPED_LOCATION
if (loc > BUILTINS_LOCATION && LOCATION_LINE (loc) > 0)
warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
-#else
- if (loc)
- warning (OPT_Wunreachable_code, "%Hwill never be executed", loc);
-#endif
remove_phi_nodes_and_edges_for_unreachable_block (bb);
bb->il.tree = NULL;
{
/* A non-pure/const CALL_EXPR alters flow control if the current
function has nonlocal labels. */
- if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
+ if (TREE_SIDE_EFFECTS (call) && cfun->has_nonlocal_label)
return true;
/* A CALL_EXPR also alters control flow if it does not return. */
/* Return true if T is a computed goto. */
-bool
+static bool
computed_goto_p (const_tree t)
{
return (TREE_CODE (t) == GOTO_EXPR
if (TREE_CODE (t) == WITH_SIZE_EXPR)
t = TREE_OPERAND (t, 0);
if (TREE_CODE (t) == CALL_EXPR)
- return TREE_SIDE_EFFECTS (t) && current_function_has_nonlocal_label;
+ return TREE_SIDE_EFFECTS (t) && cfun->has_nonlocal_label;
return false;
}
if (uid == -1)
{
unsigned old_len = VEC_length (basic_block, label_to_block_map);
- LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
+ LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
if (old_len <= (unsigned) uid)
{
unsigned new_len = 3 * uid / 2;
static void
reinstall_phi_args (edge new_edge, edge old_edge)
{
- tree var, phi;
+ tree phi;
+ edge_var_map_vector v;
+ edge_var_map *vm;
+ int i;
- if (!PENDING_STMT (old_edge))
+ v = redirect_edge_var_map_vector (old_edge);
+ if (!v)
return;
- for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
- var && phi;
- var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
+ for (i = 0, phi = phi_nodes (new_edge->dest);
+ VEC_iterate (edge_var_map, v, i, vm) && phi;
+ i++, phi = PHI_CHAIN (phi))
{
- tree result = TREE_PURPOSE (var);
- tree arg = TREE_VALUE (var);
+ tree result = redirect_edge_var_map_result (vm);
+ tree arg = redirect_edge_var_map_def (vm);
gcc_assert (result == PHI_RESULT (phi));
add_phi_arg (phi, arg, new_edge);
}
- PENDING_STMT (old_edge) = NULL;
+ redirect_edge_var_map_clear (old_edge);
}
/* Returns the basic block after which the new basic block created
verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
{
tree t = *tp, x;
- bool in_phi = (data != NULL);
if (TYPE_P (t))
*walk_subtrees = 0;
case ADDR_EXPR:
{
- bool old_invariant;
bool old_constant;
bool old_side_effects;
- bool new_invariant;
bool new_constant;
bool new_side_effects;
- /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
- dead PHIs that take the address of something. But if the PHI
- result is dead, the fact that it takes the address of anything
- is irrelevant. Because we can not tell from here if a PHI result
- is dead, we just skip this check for PHIs altogether. This means
- we may be missing "valid" checks, but what can you do?
- This was PR19217. */
- if (in_phi)
- break;
+ gcc_assert (is_gimple_address (t));
- old_invariant = TREE_INVARIANT (t);
old_constant = TREE_CONSTANT (t);
old_side_effects = TREE_SIDE_EFFECTS (t);
recompute_tree_invariant_for_addr_expr (t);
- new_invariant = TREE_INVARIANT (t);
new_side_effects = TREE_SIDE_EFFECTS (t);
new_constant = TREE_CONSTANT (t);
- if (old_invariant != new_invariant)
- {
- error ("invariant not recomputed when ADDR_EXPR changed");
- return t;
- }
-
if (old_constant != new_constant)
{
error ("constant not recomputed when ADDR_EXPR changed");
error ("address taken, but ADDRESSABLE bit not set");
return x;
}
+
break;
}
}
break;
- case NOP_EXPR:
- case CONVERT_EXPR:
+ case NON_LVALUE_EXPR:
+ gcc_unreachable ();
+
+ CASE_CONVERT:
case FIX_TRUNC_EXPR:
case FLOAT_EXPR:
case NEGATE_EXPR:
case ABS_EXPR:
case BIT_NOT_EXPR:
- case NON_LVALUE_EXPR:
case TRUTH_NOT_EXPR:
CHECK_OP (0, "invalid operand to unary operator");
break;
}
else if (TREE_CODE (t) == BIT_FIELD_REF)
{
- CHECK_OP (1, "invalid operand to BIT_FIELD_REF");
- CHECK_OP (2, "invalid operand to BIT_FIELD_REF");
+ if (!host_integerp (TREE_OPERAND (t, 1), 1)
+ || !host_integerp (TREE_OPERAND (t, 2), 1))
+ {
+ error ("invalid position or size operand to BIT_FIELD_REF");
+ return t;
+ }
+ else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
+ && (TYPE_PRECISION (TREE_TYPE (t))
+ != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
+ {
+ error ("integral result type precision does not match "
+ "field size of BIT_FIELD_REF");
+ return t;
+ }
+ if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
+ && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
+ != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
+ {
+ error ("mode precision of non-integral result does not "
+ "match field size of BIT_FIELD_REF");
+ return t;
+ }
}
t = TREE_OPERAND (t, 0);
}
- if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
+ if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
{
error ("invalid reference prefix");
return t;
return false;
}
+/* Return true if TYPE1 is a fixed-point type and if conversions to and
+ from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
+
+static bool
+valid_fixed_convert_types_p (tree type1, tree type2)
+{
+ return (FIXED_POINT_TYPE_P (type1)
+ && (INTEGRAL_TYPE_P (type2)
+ || SCALAR_FLOAT_TYPE_P (type2)
+ || FIXED_POINT_TYPE_P (type2)));
+}
+
/* Verify the GIMPLE expression EXPR. Returns true if there is an
error, otherwise false. */
/* Special codes we cannot handle via their class. */
switch (TREE_CODE (expr))
{
- case NOP_EXPR:
- case CONVERT_EXPR:
+ CASE_CONVERT:
{
tree op = TREE_OPERAND (expr, 0);
if (!is_gimple_val (op))
there is no sign or zero extension involved. */
if (((POINTER_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (op)))
|| (POINTER_TYPE_P (TREE_TYPE (op)) && INTEGRAL_TYPE_P (type)))
- && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op)))
+ && (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op))
+ /* For targets were the precision of sizetype doesn't
+ match that of pointers we need the following. */
+ || type == sizetype || TREE_TYPE (op) == sizetype))
return false;
/* Allow conversion from integer to offset type and vice versa. */
return false;
}
+ case FIXED_CONVERT_EXPR:
+ {
+ tree op = TREE_OPERAND (expr, 0);
+ if (!is_gimple_val (op))
+ {
+ error ("invalid operand in conversion");
+ return true;
+ }
+
+ if (!valid_fixed_convert_types_p (type, TREE_TYPE (op))
+ && !valid_fixed_convert_types_p (TREE_TYPE (op), type))
+ {
+ error ("invalid types in fixed-point conversion");
+ debug_generic_expr (type);
+ debug_generic_expr (TREE_TYPE (op));
+ return true;
+ }
+
+ return false;
+ }
+
case FLOAT_EXPR:
{
tree op = TREE_OPERAND (expr, 0);
case TRUTH_ANDIF_EXPR:
case TRUTH_ORIF_EXPR:
+ gcc_unreachable ();
+
case TRUTH_AND_EXPR:
case TRUTH_OR_EXPR:
case TRUTH_XOR_EXPR:
case CALL_EXPR:
/* FIXME. The C frontend passes unpromoted arguments in case it
didn't see a function declaration before the call. */
- return false;
+ {
+ tree decl = CALL_EXPR_FN (expr);
+
+ if (TREE_CODE (decl) == FUNCTION_DECL
+ && DECL_LOOPING_CONST_OR_PURE_P (decl)
+ && (!DECL_PURE_P (decl))
+ && (!TREE_READONLY (decl)))
+ {
+ error ("invalid pure const state for function");
+ return true;
+ }
+ return false;
+ }
case OBJ_TYPE_REF:
/* FIXME. */
case NOP_EXPR:
case CHANGE_DYNAMIC_TYPE_EXPR:
case ASM_EXPR:
+ case PREDICT_EXPR:
return false;
default:
if (addr)
{
debug_generic_stmt (addr);
+ if (addr != stmt)
+ {
+ inform ("in statement");
+ debug_generic_stmt (stmt);
+ }
return true;
}
are not considered gimple values. */
else if (TREE_CODE (t) != SSA_NAME
&& TREE_CODE (t) != FUNCTION_DECL
- && !is_gimple_val (t))
+ && !is_gimple_min_invariant (t))
{
error ("PHI def is not a GIMPLE value");
debug_generic_stmt (phi);
err |= true;
}
- addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
- if (addr)
- {
- debug_generic_stmt (addr);
- err |= true;
- }
-
addr = walk_tree (&t, verify_node_sharing, visited, NULL);
if (addr)
{
/* Verify that the case labels are sorted. */
prev = TREE_VEC_ELT (vec, 0);
- for (i = 1; i < n - 1; ++i)
+ for (i = 1; i < n; ++i)
{
tree c = TREE_VEC_ELT (vec, i);
if (! CASE_LOW (c))
{
- error ("found default case not at end of case vector");
- err = 1;
+ if (i != n - 1)
+ {
+ error ("found default case not at end of case vector");
+ err = 1;
+ }
continue;
}
if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
}
prev = c;
}
- if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
- {
- error ("no default case found at end of case vector");
- err = 1;
- }
+ /* VRP will remove the default case if it can prove it will
+ never be executed. So do not verify there always exists
+ a default case here. */
FOR_EACH_EDGE (e, ei, bb->succs)
{
adding blocks when the dominator traversal reaches EXIT. This
function silently assumes that ENTRY strictly dominates EXIT. */
-static void
+void
gather_blocks_in_sese_region (basic_block entry, basic_block exit,
VEC(basic_block,heap) **bbs_p)
{
if (SSA_VAR_P (t))
{
new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
- f->unexpanded_var_list
- = tree_cons (NULL_TREE, new_t, f->unexpanded_var_list);
+ f->local_decls = tree_cons (NULL_TREE, new_t, f->local_decls);
}
else
{
/* Marks virtual operands of all statements in basic blocks BBS for
renaming. */
-static void
-mark_virtual_ops_in_region (VEC (basic_block,heap) *bbs)
+void
+mark_virtual_ops_in_bb (basic_block bb)
{
tree phi;
block_stmt_iterator bsi;
+
+ for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+ mark_virtual_ops_for_renaming (phi);
+
+ for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ mark_virtual_ops_for_renaming (bsi_stmt (bsi));
+}
+
+/* Marks virtual operands of all statements in basic blocks BBS for
+ renaming. */
+
+static void
+mark_virtual_ops_in_region (VEC (basic_block,heap) *bbs)
+{
basic_block bb;
unsigned i;
for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
- {
- for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
- mark_virtual_ops_for_renaming (phi);
-
- for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
- mark_virtual_ops_for_renaming (bsi_stmt (bsi));
- }
+ mark_virtual_ops_in_bb (bb);
}
/* Move basic block BB from function CFUN to function DEST_FN. The
gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
- if (uid >= dest_cfun->last_label_uid)
- dest_cfun->last_label_uid = uid + 1;
+ if (uid >= dest_cfun->cfg->last_label_uid)
+ dest_cfun->cfg->last_label_uid = uid + 1;
}
else if (TREE_CODE (stmt) == RESX_EXPR && eh_offset != 0)
TREE_OPERAND (stmt, 0) =
m->base.from = decl;
m->to = create_artificial_label ();
LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
+ if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
+ cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
gcc_assert (*slot == NULL);
arg = DECL_ARGUMENTS (fn);
while (arg)
{
+ print_generic_expr (file, TREE_TYPE (arg), dump_flags);
+ fprintf (file, " ");
print_generic_expr (file, arg, dump_flags);
+ if (flags & TDF_VERBOSE)
+ print_node (file, "", arg, 4);
if (TREE_CHAIN (arg))
fprintf (file, ", ");
arg = TREE_CHAIN (arg);
}
fprintf (file, ")\n");
+ if (flags & TDF_VERBOSE)
+ print_node (file, "", fn, 2);
+
dsf = DECL_STRUCT_FUNCTION (fn);
if (dsf && (flags & TDF_DETAILS))
dump_eh_tree (file, dsf);
/* When GIMPLE is lowered, the variables are no longer available in
BIND_EXPRs, so display them separately. */
- if (cfun && cfun->decl == fn && cfun->unexpanded_var_list)
+ if (cfun && cfun->decl == fn && cfun->local_decls)
{
ignore_topmost_bind = true;
fprintf (file, "{\n");
- for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
+ for (vars = cfun->local_decls; vars; vars = TREE_CHAIN (vars))
{
var = TREE_VALUE (vars);
print_generic_decl (file, var, flags);
+ if (flags & TDF_VERBOSE)
+ print_node (file, "", var, 4);
fprintf (file, "\n");
any_var = true;
}
-/* Pretty print of the loops intermediate representation. */
-static void print_loop (FILE *, struct loop *, int);
-static void print_pred_bbs (FILE *, basic_block bb);
-static void print_succ_bbs (FILE *, basic_block bb);
-
-
/* Print on FILE the indexes for the predecessors of basic_block BB. */
static void
fprintf (file, "bb_%d ", e->dest->index);
}
+/* Print to FILE the basic block BB following the VERBOSITY level. */
+
+void
+print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
+{
+ char *s_indent = (char *) alloca ((size_t) indent + 1);
+ memset ((void *) s_indent, ' ', (size_t) indent);
+ s_indent[indent] = '\0';
+
+ /* Print basic_block's header. */
+ if (verbosity >= 2)
+ {
+ fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
+ print_pred_bbs (file, bb);
+ fprintf (file, "}, succs = {");
+ print_succ_bbs (file, bb);
+ fprintf (file, "})\n");
+ }
+
+ /* Print basic_block's body. */
+ if (verbosity >= 3)
+ {
+ fprintf (file, "%s {\n", s_indent);
+ tree_dump_bb (bb, file, indent + 4);
+ fprintf (file, "%s }\n", s_indent);
+ }
+}
+
+static void print_loop_and_siblings (FILE *, struct loop *, int, int);
-/* Pretty print LOOP on FILE, indented INDENT spaces. */
+/* Pretty print LOOP on FILE, indented INDENT spaces. Following
+ VERBOSITY level this outputs the contents of the loop, or just its
+ structure. */
static void
-print_loop (FILE *file, struct loop *loop, int indent)
+print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
{
char *s_indent;
basic_block bb;
memset ((void *) s_indent, ' ', (size_t) indent);
s_indent[indent] = '\0';
- /* Print the loop's header. */
- fprintf (file, "%sloop_%d\n", s_indent, loop->num);
+ /* Print loop's header. */
+ fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
+ loop->num, loop->header->index, loop->latch->index);
+ fprintf (file, ", niter = ");
+ print_generic_expr (file, loop->nb_iterations, 0);
- /* Print the loop's body. */
- fprintf (file, "%s{\n", s_indent);
- FOR_EACH_BB (bb)
- if (bb->loop_father == loop)
- {
- /* Print the basic_block's header. */
- fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
- print_pred_bbs (file, bb);
- fprintf (file, "}, succs = {");
- print_succ_bbs (file, bb);
- fprintf (file, "})\n");
-
- /* Print the basic_block's body. */
- fprintf (file, "%s {\n", s_indent);
- tree_dump_bb (bb, file, indent + 4);
- fprintf (file, "%s }\n", s_indent);
- }
+ if (loop->any_upper_bound)
+ {
+ fprintf (file, ", upper_bound = ");
+ dump_double_int (file, loop->nb_iterations_upper_bound, true);
+ }
- print_loop (file, loop->inner, indent + 2);
- fprintf (file, "%s}\n", s_indent);
- print_loop (file, loop->next, indent);
+ if (loop->any_estimate)
+ {
+ fprintf (file, ", estimate = ");
+ dump_double_int (file, loop->nb_iterations_estimate, true);
+ }
+ fprintf (file, ")\n");
+
+ /* Print loop's body. */
+ if (verbosity >= 1)
+ {
+ fprintf (file, "%s{\n", s_indent);
+ FOR_EACH_BB (bb)
+ if (bb->loop_father == loop)
+ print_loops_bb (file, bb, indent, verbosity);
+
+ print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
+ fprintf (file, "%s}\n", s_indent);
+ }
}
+/* Print the LOOP and its sibling loops on FILE, indented INDENT
+ spaces. Following VERBOSITY level this outputs the contents of the
+ loop, or just its structure. */
+
+static void
+print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
+{
+ if (loop == NULL)
+ return;
+
+ print_loop (file, loop, indent, verbosity);
+ print_loop_and_siblings (file, loop->next, indent, verbosity);
+}
/* Follow a CFG edge from the entry point of the program, and on entry
of a loop, pretty print the loop structure on FILE. */
void
-print_loop_ir (FILE *file)
+print_loops (FILE *file, int verbosity)
{
basic_block bb;
bb = BASIC_BLOCK (NUM_FIXED_BLOCKS);
if (bb && bb->loop_father)
- print_loop (file, bb->loop_father, 0);
+ print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
}
-/* Debugging loops structure at tree level. */
+/* Debugging loops structure at tree level, at some VERBOSITY level. */
void
-debug_loop_ir (void)
+debug_loops (int verbosity)
{
- print_loop_ir (stderr);
+ print_loops (stderr, verbosity);
}
+/* Print on stderr the code of LOOP, at some VERBOSITY level. */
+
+void
+debug_loop (struct loop *loop, int verbosity)
+{
+ print_loop (stderr, loop, 0, verbosity);
+}
+
+/* Print on stderr the code of loop number NUM, at some VERBOSITY
+ level. */
+
+void
+debug_loop_num (unsigned num, int verbosity)
+{
+ debug_loop (get_loop (num), verbosity);
+}
/* Return true if BB ends with a call, possibly followed by some
instructions that must stay with the call. Return false,
static bool
need_fake_edge_p (tree t)
{
- tree call;
+ tree call, fndecl = NULL_TREE;
+ int call_flags;
/* NORETURN and LONGJMP calls already have an edge to exit.
CONST and PURE calls do not need one.
the counter incrementation code from -fprofile-arcs
leads to different results from -fbranch-probabilities. */
call = get_call_expr_in (t);
- if (call
- && !(call_expr_flags (call) & ECF_NORETURN))
+ if (call)
+ {
+ fndecl = get_callee_fndecl (call);
+ call_flags = call_expr_flags (call);
+ }
+
+ if (call && fndecl && DECL_BUILT_IN (fndecl)
+ && (call_flags & ECF_NOTHROW)
+ && !(call_flags & ECF_NORETURN)
+ && !(call_flags & ECF_RETURNS_TWICE))
+ return false;
+
+ if (call && !(call_flags & ECF_NORETURN))
return true;
if (TREE_CODE (t) == ASM_EXPR
{
bool changed = tree_purge_dead_eh_edges (bb);
- if (current_function_has_nonlocal_label)
+ if (cfun->has_nonlocal_label)
{
tree stmt = last_stmt (bb);
edge_iterator ei;
return 0;
}
-struct tree_opt_pass pass_split_crit_edges =
+struct gimple_opt_pass pass_split_crit_edges =
{
+ {
+ GIMPLE_PASS,
"crited", /* name */
NULL, /* gate */
split_critical_edges, /* execute */
PROP_no_crit_edges, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func, /* todo_flags_finish */
- 0 /* letter */
+ TODO_dump_func /* todo_flags_finish */
+ }
};
\f
static unsigned int
execute_warn_function_return (void)
{
-#ifdef USE_MAPPED_LOCATION
source_location location;
-#else
- location_t *locus;
-#endif
tree last;
edge e;
edge_iterator ei;
if (TREE_THIS_VOLATILE (cfun->decl)
&& EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
{
-#ifdef USE_MAPPED_LOCATION
location = UNKNOWN_LOCATION;
-#else
- locus = NULL;
-#endif
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
{
last = last_stmt (e->src);
if (TREE_CODE (last) == RETURN_EXPR
-#ifdef USE_MAPPED_LOCATION
&& (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
-#else
- && (locus = EXPR_LOCUS (last)) != NULL)
-#endif
break;
}
-#ifdef USE_MAPPED_LOCATION
if (location == UNKNOWN_LOCATION)
location = cfun->function_end_locus;
warning (0, "%H%<noreturn%> function does return", &location);
-#else
- if (!locus)
- locus = &cfun->function_end_locus;
- warning (0, "%H%<noreturn%> function does return", locus);
-#endif
}
/* If we see "return;" in some basic block, then we do reach the end
&& TREE_OPERAND (last, 0) == NULL
&& !TREE_NO_WARNING (last))
{
-#ifdef USE_MAPPED_LOCATION
location = EXPR_LOCATION (last);
if (location == UNKNOWN_LOCATION)
location = cfun->function_end_locus;
- warning (0, "%Hcontrol reaches end of non-void function", &location);
-#else
- locus = EXPR_LOCUS (last);
- if (!locus)
- locus = &cfun->function_end_locus;
- warning (0, "%Hcontrol reaches end of non-void function", locus);
-#endif
+ warning (OPT_Wreturn_type, "%Hcontrol reaches end of non-void function", &location);
TREE_NO_WARNING (cfun->decl) = 1;
break;
}
}
}
-struct tree_opt_pass pass_warn_function_return =
+struct gimple_opt_pass pass_warn_function_return =
{
+ {
+ GIMPLE_PASS,
NULL, /* name */
NULL, /* gate */
execute_warn_function_return, /* execute */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- 0, /* todo_flags_finish */
- 0 /* letter */
+ 0 /* todo_flags_finish */
+ }
};
/* Emit noreturn warnings. */
if (warn_missing_noreturn
&& !TREE_THIS_VOLATILE (cfun->decl)
&& EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
- && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
+ && !lang_hooks.missing_noreturn_ok_p (cfun->decl))
warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
"for attribute %<noreturn%>",
cfun->decl);
return 0;
}
-struct tree_opt_pass pass_warn_function_noreturn =
+struct gimple_opt_pass pass_warn_function_noreturn =
{
+ {
+ GIMPLE_PASS,
NULL, /* name */
NULL, /* gate */
execute_warn_function_noreturn, /* execute */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- 0, /* todo_flags_finish */
- 0 /* letter */
+ 0 /* todo_flags_finish */
+ }
};