/* Miscellaneous SSA utility functions.
- Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007 Free Software Foundation, Inc.
+ Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009
+ Free Software Foundation, Inc.
This file is part of GCC.
#include "flags.h"
#include "rtl.h"
#include "tm_p.h"
+#include "target.h"
#include "ggc.h"
#include "langhooks.h"
#include "hard-reg-set.h"
#include "bitmap.h"
#include "pointer-set.h"
#include "tree-flow.h"
-#include "tree-gimple.h"
+#include "gimple.h"
#include "tree-inline.h"
#include "varray.h"
#include "timevar.h"
/* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
void
-redirect_edge_var_map_add (edge e, tree result, tree def)
+redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus)
{
void **slot;
edge_var_map_vector old_head, head;
edge_var_maps = pointer_map_create ();
slot = pointer_map_insert (edge_var_maps, e);
- old_head = head = *slot;
+ old_head = head = (edge_var_map_vector) *slot;
if (!head)
{
head = VEC_alloc (edge_var_map, heap, 5);
}
new_node.def = def;
new_node.result = result;
+ new_node.locus = locus;
VEC_safe_push (edge_var_map, heap, head, &new_node);
if (old_head != head)
if (slot)
{
- head = *slot;
+ head = (edge_var_map_vector) *slot;
VEC_free (edge_var_map, heap, head);
*slot = NULL;
}
void
redirect_edge_var_map_dup (edge newe, edge olde)
{
- void **new_slot, **old_slot; edge_var_map_vector head;
+ void **new_slot, **old_slot;
+ edge_var_map_vector head;
if (!edge_var_maps)
return;
old_slot = pointer_map_contains (edge_var_maps, olde);
if (!old_slot)
return;
- head = *old_slot;
+ head = (edge_var_map_vector) *old_slot;
if (head)
*new_slot = VEC_copy (edge_var_map, heap, head);
}
-/* Return the varable mappings for a given edge. If there is none, return
+/* Return the variable mappings for a given edge. If there is none, return
NULL. */
edge_var_map_vector
return (edge_var_map_vector) *slot;
}
+/* Used by redirect_edge_var_map_destroy to free all memory. */
+
+static bool
+free_var_map_entry (const void *key ATTRIBUTE_UNUSED,
+ void **value,
+ void *data ATTRIBUTE_UNUSED)
+{
+ edge_var_map_vector head = (edge_var_map_vector) *value;
+ VEC_free (edge_var_map, heap, head);
+ return true;
+}
/* Clear the edge variable mappings. */
{
if (edge_var_maps)
{
+ pointer_map_traverse (edge_var_maps, free_var_map_entry, NULL);
pointer_map_destroy (edge_var_maps);
edge_var_maps = NULL;
}
edge
ssa_redirect_edge (edge e, basic_block dest)
{
- tree phi;
+ gimple_stmt_iterator gsi;
+ gimple phi;
redirect_edge_var_map_clear (e);
/* Remove the appropriate PHI arguments in E's destination block. */
- for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
+ for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
{
- tree def = PHI_ARG_DEF (phi, e->dest_idx);
+ tree def;
+ source_location locus ;
+
+ phi = gsi_stmt (gsi);
+ def = gimple_phi_arg_def (phi, e->dest_idx);
+ locus = gimple_phi_arg_location (phi, e->dest_idx);
if (def == NULL_TREE)
continue;
- redirect_edge_var_map_add (e, PHI_RESULT (phi), def);
+ redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
}
e = redirect_edge_succ_nodup (e, dest);
return e;
}
+
/* Add PHI arguments queued in PENDING_STMT list on edge E to edge
E->dest. */
void
flush_pending_stmts (edge e)
{
- tree phi;
+ gimple phi;
edge_var_map_vector v;
edge_var_map *vm;
int i;
+ gimple_stmt_iterator gsi;
v = redirect_edge_var_map_vector (e);
if (!v)
return;
- for (phi = phi_nodes (e->dest), i = 0;
- phi && VEC_iterate (edge_var_map, v, i, vm);
- phi = PHI_CHAIN (phi), i++)
+ for (gsi = gsi_start_phis (e->dest), i = 0;
+ !gsi_end_p (gsi) && VEC_iterate (edge_var_map, v, i, vm);
+ gsi_next (&gsi), i++)
{
- tree def = redirect_edge_var_map_def (vm);
- add_phi_arg (phi, def, e);
+ tree def;
+
+ phi = gsi_stmt (gsi);
+ def = redirect_edge_var_map_def (vm);
+ add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
}
redirect_edge_var_map_clear (e);
}
+/* Given a tree for an expression for which we might want to emit
+ locations or values in debug information (generally a variable, but
+ we might deal with other kinds of trees in the future), return the
+ tree that should be used as the variable of a DEBUG_BIND STMT or
+ VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
+
+tree
+target_for_debug_bind (tree var)
+{
+ if (!MAY_HAVE_DEBUG_STMTS)
+ return NULL_TREE;
+
+ if (TREE_CODE (var) != VAR_DECL
+ && TREE_CODE (var) != PARM_DECL)
+ return NULL_TREE;
+
+ if (DECL_HAS_VALUE_EXPR_P (var))
+ return target_for_debug_bind (DECL_VALUE_EXPR (var));
+
+ if (DECL_IGNORED_P (var))
+ return NULL_TREE;
+
+ if (!is_gimple_reg (var))
+ return NULL_TREE;
+
+ return var;
+}
+
+/* Called via walk_tree, look for SSA_NAMEs that have already been
+ released. */
+
+static tree
+find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
+{
+ struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
+
+ if (wi->is_lhs)
+ return NULL_TREE;
+
+ if (TREE_CODE (*tp) == SSA_NAME)
+ {
+ if (SSA_NAME_IN_FREE_LIST (*tp))
+ return *tp;
+
+ *walk_subtrees = 0;
+ }
+ else if (IS_TYPE_OR_DECL_P (*tp))
+ *walk_subtrees = 0;
+
+ return NULL_TREE;
+}
+
+/* Given a VAR whose definition STMT is to be moved to the iterator
+ position TOGSIP in the TOBB basic block, verify whether we're
+ moving it across any of the debug statements that use it, and
+ adjust them as needed. If TOBB is NULL, then the definition is
+ understood as being removed, and TOGSIP is unused. */
+void
+propagate_var_def_into_debug_stmts (tree var,
+ basic_block tobb,
+ const gimple_stmt_iterator *togsip)
+{
+ imm_use_iterator imm_iter;
+ gimple stmt;
+ use_operand_p use_p;
+ tree value = NULL;
+ bool no_value = false;
+
+ if (!MAY_HAVE_DEBUG_STMTS)
+ return;
+
+ FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
+ {
+ basic_block bb;
+ gimple_stmt_iterator si;
+
+ if (!is_gimple_debug (stmt))
+ continue;
+
+ if (tobb)
+ {
+ bb = gimple_bb (stmt);
+
+ if (bb != tobb)
+ {
+ gcc_assert (dom_info_available_p (CDI_DOMINATORS));
+ if (dominated_by_p (CDI_DOMINATORS, bb, tobb))
+ continue;
+ }
+ else
+ {
+ si = *togsip;
+
+ if (gsi_end_p (si))
+ continue;
+
+ do
+ {
+ gsi_prev (&si);
+ if (gsi_end_p (si))
+ break;
+ }
+ while (gsi_stmt (si) != stmt);
+
+ if (gsi_end_p (si))
+ continue;
+ }
+ }
+
+ /* Here we compute (lazily) the value assigned to VAR, but we
+ remember if we tried before and failed, so that we don't try
+ again. */
+ if (!value && !no_value)
+ {
+ gimple def_stmt = SSA_NAME_DEF_STMT (var);
+
+ if (is_gimple_assign (def_stmt))
+ {
+ if (!dom_info_available_p (CDI_DOMINATORS))
+ {
+ struct walk_stmt_info wi;
+
+ memset (&wi, 0, sizeof (wi));
+
+ /* When removing blocks without following reverse
+ dominance order, we may sometimes encounter SSA_NAMEs
+ that have already been released, referenced in other
+ SSA_DEFs that we're about to release. Consider:
+
+ <bb X>:
+ v_1 = foo;
+
+ <bb Y>:
+ w_2 = v_1 + bar;
+ # DEBUG w => w_2
+
+ If we deleted BB X first, propagating the value of
+ w_2 won't do us any good. It's too late to recover
+ their original definition of v_1: when it was
+ deleted, it was only referenced in other DEFs, it
+ couldn't possibly know it should have been retained,
+ and propagating every single DEF just in case it
+ might have to be propagated into a DEBUG STMT would
+ probably be too wasteful.
+
+ When dominator information is not readily
+ available, we check for and accept some loss of
+ debug information. But if it is available,
+ there's no excuse for us to remove blocks in the
+ wrong order, so we don't even check for dead SSA
+ NAMEs. SSA verification shall catch any
+ errors. */
+ if (!walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
+ no_value = true;
+ }
+
+ if (!no_value)
+ value = gimple_assign_rhs_to_tree (def_stmt);
+ }
+
+ if (!value)
+ no_value = true;
+ }
+
+ if (no_value)
+ gimple_debug_bind_reset_value (stmt);
+ else
+ FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
+ SET_USE (use_p, unshare_expr (value));
+
+ update_stmt (stmt);
+ }
+}
+
+
+/* Given a STMT to be moved to the iterator position TOBSIP in the
+ TOBB basic block, verify whether we're moving it across any of the
+ debug statements that use it. If TOBB is NULL, then the definition
+ is understood as being removed, and TOBSIP is unused. */
+
+void
+propagate_defs_into_debug_stmts (gimple def, basic_block tobb,
+ const gimple_stmt_iterator *togsip)
+{
+ ssa_op_iter op_iter;
+ def_operand_p def_p;
+
+ if (!MAY_HAVE_DEBUG_STMTS)
+ return;
+
+ FOR_EACH_SSA_DEF_OPERAND (def_p, def, op_iter, SSA_OP_DEF)
+ {
+ tree var = DEF_FROM_PTR (def_p);
+
+ if (TREE_CODE (var) != SSA_NAME)
+ continue;
+
+ propagate_var_def_into_debug_stmts (var, tobb, togsip);
+ }
+}
+
+/* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
+ dominated stmts before their dominators, so that release_ssa_defs
+ stands a chance of propagating DEFs into debug bind stmts. */
+
+void
+release_defs_bitset (bitmap toremove)
+{
+ unsigned j;
+ bitmap_iterator bi;
+
+ /* Performing a topological sort is probably overkill, this will
+ most likely run in slightly superlinear time, rather than the
+ pathological quadratic worst case. */
+ while (!bitmap_empty_p (toremove))
+ EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
+ {
+ bool remove_now = true;
+ tree var = ssa_name (j);
+ gimple stmt;
+ imm_use_iterator uit;
+
+ FOR_EACH_IMM_USE_STMT (stmt, uit, var)
+ {
+ ssa_op_iter dit;
+ def_operand_p def_p;
+
+ /* We can't propagate PHI nodes into debug stmts. */
+ if (gimple_code (stmt) == GIMPLE_PHI
+ || is_gimple_debug (stmt))
+ continue;
+
+ /* If we find another definition to remove that uses
+ the one we're looking at, defer the removal of this
+ one, so that it can be propagated into debug stmts
+ after the other is. */
+ FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
+ {
+ tree odef = DEF_FROM_PTR (def_p);
+
+ if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
+ {
+ remove_now = false;
+ break;
+ }
+ }
+
+ if (!remove_now)
+ BREAK_FROM_IMM_USE_STMT (uit);
+ }
+
+ if (remove_now)
+ {
+ gimple def = SSA_NAME_DEF_STMT (var);
+ gimple_stmt_iterator gsi = gsi_for_stmt (def);
+
+ if (gimple_code (def) == GIMPLE_PHI)
+ remove_phi_node (&gsi, true);
+ else
+ {
+ gsi_remove (&gsi, true);
+ release_defs (def);
+ }
+
+ bitmap_clear_bit (toremove, j);
+ }
+ }
+}
+
/* Return true if SSA_NAME is malformed and mark it visited.
IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
return true;
}
+ if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
+ {
+ error ("virtual SSA name for non-VOP decl");
+ return true;
+ }
+
if (!is_virtual && !is_gimple_reg (ssa_name))
{
error ("found a real definition for a non-register");
}
if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
- && !IS_EMPTY_STMT (SSA_NAME_DEF_STMT (ssa_name)))
+ && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
{
error ("found a default name with a non-empty defining statement");
return true;
static bool
verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
- tree stmt, bool is_virtual)
+ gimple stmt, bool is_virtual)
{
if (verify_ssa_name (ssa_name, is_virtual))
goto err;
{
error ("SSA_NAME_DEF_STMT is wrong");
fprintf (stderr, "Expected definition statement:\n");
- print_generic_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), TDF_VOPS);
+ print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
fprintf (stderr, "\nActual definition statement:\n");
- print_generic_stmt (stderr, stmt, TDF_VOPS);
+ print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
goto err;
}
fprintf (stderr, "while verifying SSA_NAME ");
print_generic_expr (stderr, ssa_name, 0);
fprintf (stderr, " in statement\n");
- print_generic_stmt (stderr, stmt, TDF_VOPS);
+ print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
return true;
}
static bool
verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
- tree stmt, bool check_abnormal, bitmap names_defined_in_bb)
+ gimple stmt, bool check_abnormal, bitmap names_defined_in_bb)
{
bool err = false;
tree ssa_name = USE_FROM_PTR (use_p);
TREE_VISITED (ssa_name) = 1;
- if (IS_EMPTY_STMT (SSA_NAME_DEF_STMT (ssa_name))
+ if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
&& SSA_NAME_IS_DEFAULT_DEF (ssa_name))
; /* Default definitions have empty statements. Nothing to do. */
else if (!def_bb)
}
else
{
- tree listvar ;
+ tree listvar;
if (use_p->prev->use == NULL)
- listvar = use_p->prev->stmt;
+ listvar = use_p->prev->loc.ssa_name;
else
listvar = USE_FROM_PTR (use_p->prev);
if (listvar != ssa_name)
fprintf (stderr, "for SSA_NAME: ");
print_generic_expr (stderr, ssa_name, TDF_VOPS);
fprintf (stderr, " in statement:\n");
- print_generic_stmt (stderr, stmt, TDF_VOPS);
+ print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
}
return err;
definition of SSA_NAME. */
static bool
-verify_phi_args (tree phi, basic_block bb, basic_block *definition_block)
+verify_phi_args (gimple phi, basic_block bb, basic_block *definition_block)
{
edge e;
bool err = false;
- unsigned i, phi_num_args = PHI_NUM_ARGS (phi);
+ size_t i, phi_num_args = gimple_phi_num_args (phi);
if (EDGE_COUNT (bb->preds) != phi_num_args)
{
for (i = 0; i < phi_num_args; i++)
{
- use_operand_p op_p = PHI_ARG_DEF_PTR (phi, i);
+ use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
tree op = USE_FROM_PTR (op_p);
e = EDGE_PRED (bb, i);
if (TREE_CODE (op) == SSA_NAME)
{
- err = verify_ssa_name (op, !is_gimple_reg (PHI_RESULT (phi)));
+ err = verify_ssa_name (op, !is_gimple_reg (gimple_phi_result (phi)));
err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
}
+ if (TREE_CODE (op) == ADDR_EXPR)
+ {
+ tree base = TREE_OPERAND (op, 0);
+ while (handled_component_p (base))
+ base = TREE_OPERAND (base, 0);
+ if ((TREE_CODE (base) == VAR_DECL
+ || TREE_CODE (base) == PARM_DECL
+ || TREE_CODE (base) == RESULT_DECL)
+ && !TREE_ADDRESSABLE (base))
+ {
+ error ("address taken, but ADDRESSABLE bit not set");
+ err = true;
+ }
+ }
+
if (e->dest != bb)
{
error ("wrong edge %d->%d for PHI argument",
if (err)
{
fprintf (stderr, "for PHI node\n");
- print_generic_stmt (stderr, phi, TDF_VOPS|TDF_MEMSYMS);
+ print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
}
}
-static void
-verify_flow_insensitive_alias_info (void)
-{
- tree var;
- referenced_var_iterator rvi;
-
- FOR_EACH_REFERENCED_VAR (var, rvi)
- {
- unsigned int j;
- bitmap aliases;
- tree alias;
- bitmap_iterator bi;
-
- if (!MTAG_P (var) || !MTAG_ALIASES (var))
- continue;
-
- aliases = MTAG_ALIASES (var);
-
- EXECUTE_IF_SET_IN_BITMAP (aliases, 0, j, bi)
- {
- alias = referenced_var (j);
-
- if (TREE_CODE (alias) != MEMORY_PARTITION_TAG
- && !may_be_aliased (alias))
- {
- error ("non-addressable variable inside an alias set");
- debug_variable (alias);
- goto err;
- }
- }
- }
-
- return;
-
-err:
- debug_variable (var);
- internal_error ("verify_flow_insensitive_alias_info failed");
-}
-
-
-static void
-verify_flow_sensitive_alias_info (void)
-{
- size_t i;
- tree ptr;
-
- for (i = 1; i < num_ssa_names; i++)
- {
- tree var;
- var_ann_t ann;
- struct ptr_info_def *pi;
-
-
- ptr = ssa_name (i);
- if (!ptr)
- continue;
-
- /* We only care for pointers that are actually referenced in the
- program. */
- if (!POINTER_TYPE_P (TREE_TYPE (ptr)) || !TREE_VISITED (ptr))
- continue;
-
- /* RESULT_DECL is special. If it's a GIMPLE register, then it
- is only written-to only once in the return statement.
- Otherwise, aggregate RESULT_DECLs may be written-to more than
- once in virtual operands. */
- var = SSA_NAME_VAR (ptr);
- if (TREE_CODE (var) == RESULT_DECL
- && is_gimple_reg (ptr))
- continue;
-
- pi = SSA_NAME_PTR_INFO (ptr);
- if (pi == NULL)
- continue;
-
- ann = var_ann (var);
- if (pi->is_dereferenced && !pi->name_mem_tag && !ann->symbol_mem_tag)
- {
- error ("dereferenced pointers should have a name or a symbol tag");
- goto err;
- }
-
- if (pi->name_mem_tag
- && (pi->pt_vars == NULL || bitmap_empty_p (pi->pt_vars)))
- {
- error ("pointers with a memory tag, should have points-to sets");
- goto err;
- }
-
- if (pi->value_escapes_p && pi->name_mem_tag)
- {
- tree t = memory_partition (pi->name_mem_tag);
- if (t == NULL_TREE)
- t = pi->name_mem_tag;
-
- if (!is_call_clobbered (t))
- {
- error ("pointer escapes but its name tag is not call-clobbered");
- goto err;
- }
- }
- }
-
- return;
-
-err:
- debug_variable (ptr);
- internal_error ("verify_flow_sensitive_alias_info failed");
-}
-
-
-/* Verify the consistency of call clobbering information. */
-
-static void
-verify_call_clobbering (void)
-{
- unsigned int i;
- bitmap_iterator bi;
- tree var;
- referenced_var_iterator rvi;
-
- /* At all times, the result of the call_clobbered flag should
- match the result of the call_clobbered_vars bitmap. Verify both
- that everything in call_clobbered_vars is marked
- call_clobbered, and that everything marked
- call_clobbered is in call_clobbered_vars. */
- EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
- {
- var = referenced_var (i);
-
- if (memory_partition (var))
- var = memory_partition (var);
-
- if (!MTAG_P (var) && !var_ann (var)->call_clobbered)
- {
- error ("variable in call_clobbered_vars but not marked "
- "call_clobbered");
- debug_variable (var);
- goto err;
- }
- }
-
- FOR_EACH_REFERENCED_VAR (var, rvi)
- {
- if (is_gimple_reg (var))
- continue;
-
- if (memory_partition (var))
- var = memory_partition (var);
-
- if (!MTAG_P (var)
- && var_ann (var)->call_clobbered
- && !bitmap_bit_p (gimple_call_clobbered_vars (cfun), DECL_UID (var)))
- {
- error ("variable marked call_clobbered but not in "
- "call_clobbered_vars bitmap.");
- debug_variable (var);
- goto err;
- }
- }
-
- return;
-
- err:
- internal_error ("verify_call_clobbering failed");
-}
-
-
-/* Verify invariants in memory partitions. */
-
-static void
-verify_memory_partitions (void)
-{
- unsigned i;
- tree mpt;
- VEC(tree,heap) *mpt_table = gimple_ssa_operands (cfun)->mpt_table;
- struct pointer_set_t *partitioned_syms = pointer_set_create ();
-
- for (i = 0; VEC_iterate (tree, mpt_table, i, mpt); i++)
- {
- unsigned j;
- bitmap_iterator bj;
-
- if (MPT_SYMBOLS (mpt) == NULL)
- {
- error ("Memory partitions should have at least one symbol");
- debug_variable (mpt);
- goto err;
- }
-
- EXECUTE_IF_SET_IN_BITMAP (MPT_SYMBOLS (mpt), 0, j, bj)
- {
- tree var = referenced_var (j);
- if (pointer_set_insert (partitioned_syms, var))
- {
- error ("Partitioned symbols should belong to exactly one "
- "partition");
- debug_variable (var);
- goto err;
- }
- }
- }
-
- pointer_set_destroy (partitioned_syms);
-
- return;
-
-err:
- internal_error ("verify_memory_partitions failed");
-}
-
-
-/* Verify the consistency of aliasing information. */
-
-static void
-verify_alias_info (void)
-{
- verify_flow_sensitive_alias_info ();
- verify_call_clobbering ();
- verify_flow_insensitive_alias_info ();
- verify_memory_partitions ();
-}
-
-
/* Verify common invariants in the SSA web.
TODO: verify the variable annotations. */
enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
- gcc_assert (!need_ssa_update_p ());
+ gcc_assert (!need_ssa_update_p (cfun));
verify_stmts ();
tree name = ssa_name (i);
if (name)
{
- tree stmt;
+ gimple stmt;
TREE_VISITED (name) = 0;
stmt = SSA_NAME_DEF_STMT (name);
- if (!IS_EMPTY_STMT (stmt))
+ if (!gimple_nop_p (stmt))
{
- basic_block bb = bb_for_stmt (stmt);
+ basic_block bb = gimple_bb (stmt);
verify_def (bb, definition_block,
name, stmt, !is_gimple_reg (name));
FOR_EACH_BB (bb)
{
edge e;
- tree phi;
+ gimple phi;
edge_iterator ei;
- block_stmt_iterator bsi;
+ gimple_stmt_iterator gsi;
/* Make sure that all edges have a clear 'aux' field. */
FOR_EACH_EDGE (e, ei, bb->preds)
}
/* Verify the arguments for every PHI node in the block. */
- for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+ for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
+ phi = gsi_stmt (gsi);
if (verify_phi_args (phi, bb, definition_block))
goto err;
bitmap_set_bit (names_defined_in_bb,
- SSA_NAME_VERSION (PHI_RESULT (phi)));
+ SSA_NAME_VERSION (gimple_phi_result (phi)));
}
/* Now verify all the uses and vuses in every statement of the block. */
- for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- tree stmt = bsi_stmt (bsi);
+ gimple stmt = gsi_stmt (gsi);
use_operand_p use_p;
+ bool has_err;
- if (check_modified_stmt && stmt_modified_p (stmt))
+ if (check_modified_stmt && gimple_modified_p (stmt))
{
error ("stmt (%p) marked modified after optimization pass: ",
(void *)stmt);
- print_generic_stmt (stderr, stmt, TDF_VOPS);
+ print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
goto err;
}
- if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
- && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) != SSA_NAME)
+ if (is_gimple_assign (stmt)
+ && TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
{
tree lhs, base_address;
- lhs = GIMPLE_STMT_OPERAND (stmt, 0);
+ lhs = gimple_assign_lhs (stmt);
base_address = get_base_address (lhs);
if (base_address
- && gimple_aliases_computed_p (cfun)
&& SSA_VAR_P (base_address)
- && !stmt_ann (stmt)->has_volatile_ops
- && ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF))
+ && !gimple_vdef (stmt)
+ && optimize > 0)
{
error ("statement makes a memory store, but has no VDEFS");
- print_generic_stmt (stderr, stmt, TDF_VOPS);
+ print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
goto err;
}
}
+ else if (gimple_debug_bind_p (stmt)
+ && !gimple_debug_bind_has_value_p (stmt))
+ continue;
- FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_VIRTUALS)
+ /* Verify the single virtual operand and its constraints. */
+ has_err = false;
+ if (gimple_vdef (stmt))
{
- if (verify_ssa_name (op, true))
+ if (gimple_vdef_op (stmt) == NULL_DEF_OPERAND_P)
{
- error ("in statement");
- print_generic_stmt (stderr, stmt, TDF_VOPS|TDF_MEMSYMS);
- goto err;
+ error ("statement has VDEF operand not in defs list");
+ has_err = true;
}
+ if (!gimple_vuse (stmt))
+ {
+ error ("statement has VDEF but no VUSE operand");
+ has_err = true;
+ }
+ else if (SSA_NAME_VAR (gimple_vdef (stmt))
+ != SSA_NAME_VAR (gimple_vuse (stmt)))
+ {
+ error ("VDEF and VUSE do not use the same symbol");
+ has_err = true;
+ }
+ has_err |= verify_ssa_name (gimple_vdef (stmt), true);
+ }
+ if (gimple_vuse (stmt))
+ {
+ if (gimple_vuse_op (stmt) == NULL_USE_OPERAND_P)
+ {
+ error ("statement has VUSE operand not in uses list");
+ has_err = true;
+ }
+ has_err |= verify_ssa_name (gimple_vuse (stmt), true);
+ }
+ if (has_err)
+ {
+ error ("in statement");
+ print_gimple_stmt (stderr, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
+ goto err;
}
FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE|SSA_OP_DEF)
if (verify_ssa_name (op, false))
{
error ("in statement");
- print_generic_stmt (stderr, stmt, TDF_VOPS|TDF_MEMSYMS);
+ print_gimple_stmt (stderr, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
goto err;
}
}
}
FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
- bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
+ {
+ if (SSA_NAME_DEF_STMT (op) != stmt)
+ {
+ error ("SSA_NAME_DEF_STMT is wrong");
+ fprintf (stderr, "Expected definition statement:\n");
+ print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
+ fprintf (stderr, "\nActual definition statement:\n");
+ print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
+ 4, TDF_VOPS);
+ goto err;
+ }
+ bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
+ }
}
bitmap_clear (names_defined_in_bb);
}
- /* Finally, verify alias information. */
- if (gimple_aliases_computed_p (cfun))
- verify_alias_info ();
-
free (definition_block);
/* Restore the dominance information to its prior known state, so
uid_decl_map_eq, NULL);
fn->gimple_df->default_defs = htab_create_ggc (20, uid_ssaname_map_hash,
uid_ssaname_map_eq, NULL);
- fn->gimple_df->call_clobbered_vars = BITMAP_GGC_ALLOC ();
- fn->gimple_df->addressable_vars = BITMAP_GGC_ALLOC ();
+ pt_solution_reset (&fn->gimple_df->escaped);
+ pt_solution_reset (&fn->gimple_df->callused);
init_ssanames (fn, 0);
init_phinodes ();
}
void
delete_tree_ssa (void)
{
- size_t i;
- basic_block bb;
- block_stmt_iterator bsi;
referenced_var_iterator rvi;
tree var;
- /* Release any ssa_names still in use. */
- for (i = 0; i < num_ssa_names; i++)
- {
- tree var = ssa_name (i);
- if (var && TREE_CODE (var) == SSA_NAME)
- {
- SSA_NAME_IMM_USE_NODE (var).prev = &(SSA_NAME_IMM_USE_NODE (var));
- SSA_NAME_IMM_USE_NODE (var).next = &(SSA_NAME_IMM_USE_NODE (var));
- }
- release_ssa_name (var);
- }
-
- /* Remove annotations from every tree in the function. */
- FOR_EACH_BB (bb)
- {
- for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
- {
- tree stmt = bsi_stmt (bsi);
- stmt_ann_t ann = get_stmt_ann (stmt);
-
- free_ssa_operands (&ann->operands);
- ann->addresses_taken = 0;
- mark_stmt_modified (stmt);
- }
- set_phi_nodes (bb, NULL);
- }
-
/* Remove annotations from every referenced local variable. */
FOR_EACH_REFERENCED_VAR (var, rvi)
{
- if (!MTAG_P (var)
- && (TREE_STATIC (var) || DECL_EXTERNAL (var)))
- {
- var_ann (var)->mpt = NULL_TREE;
- var_ann (var)->symbol_mem_tag = NULL_TREE;
- continue;
- }
+ if (is_global_var (var))
+ continue;
if (var->base.ann)
ggc_free (var->base.ann);
var->base.ann = NULL;
fini_ssanames ();
fini_phinodes ();
- /* we no longer maintain the SSA operand cache at this point. */
+
+ /* We no longer maintain the SSA operand cache at this point. */
if (ssa_operands_active ())
fini_ssa_operands ();
- cfun->gimple_df->global_var = NULL_TREE;
-
+ delete_alias_heapvars ();
+
htab_delete (cfun->gimple_df->default_defs);
cfun->gimple_df->default_defs = NULL;
- cfun->gimple_df->call_clobbered_vars = NULL;
- cfun->gimple_df->addressable_vars = NULL;
+ pt_solution_reset (&cfun->gimple_df->escaped);
+ pt_solution_reset (&cfun->gimple_df->callused);
+ if (cfun->gimple_df->decls_to_pointers != NULL)
+ pointer_map_destroy (cfun->gimple_df->decls_to_pointers);
+ cfun->gimple_df->decls_to_pointers = NULL;
cfun->gimple_df->modified_noreturn_calls = NULL;
- if (gimple_aliases_computed_p (cfun))
- {
- delete_alias_heapvars ();
- gcc_assert (!need_ssa_update_p ());
- }
- cfun->gimple_df->aliases_computed_p = false;
- delete_mem_ref_stats (cfun);
-
cfun->gimple_df = NULL;
/* We no longer need the edge variable maps. */
redirect_edge_var_map_destroy ();
}
-/* Helper function for useless_type_conversion_p. */
+/* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
+ useless type conversion, otherwise return false.
-static bool
-useless_type_conversion_p_1 (tree outer_type, tree inner_type)
+ This function implicitly defines the middle-end type system. With
+ the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
+ holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
+ the following invariants shall be fulfilled:
+
+ 1) useless_type_conversion_p is transitive.
+ If a < b and b < c then a < c.
+
+ 2) useless_type_conversion_p is not symmetric.
+ From a < b does not follow a > b.
+
+ 3) Types define the available set of operations applicable to values.
+ A type conversion is useless if the operations for the target type
+ is a subset of the operations for the source type. For example
+ casts to void* are useless, casts from void* are not (void* can't
+ be dereferenced or offsetted, but copied, hence its set of operations
+ is a strict subset of that of all other data pointer types). Casts
+ to const T* are useless (can't be written to), casts from const T*
+ to T* are not. */
+
+bool
+useless_type_conversion_p (tree outer_type, tree inner_type)
{
- /* Qualifiers on value types do not matter. */
+ /* Do the following before stripping toplevel qualifiers. */
+ if (POINTER_TYPE_P (inner_type)
+ && POINTER_TYPE_P (outer_type))
+ {
+ /* If the outer type is (void *) or a pointer to an incomplete
+ record type or a pointer to an unprototyped function,
+ then the conversion is not necessary. */
+ if (VOID_TYPE_P (TREE_TYPE (outer_type))
+ || (AGGREGATE_TYPE_P (TREE_TYPE (outer_type))
+ && TREE_CODE (TREE_TYPE (outer_type)) != ARRAY_TYPE
+ && (TREE_CODE (TREE_TYPE (outer_type))
+ == TREE_CODE (TREE_TYPE (inner_type)))
+ && !COMPLETE_TYPE_P (TREE_TYPE (outer_type)))
+ || ((TREE_CODE (TREE_TYPE (outer_type)) == FUNCTION_TYPE
+ || TREE_CODE (TREE_TYPE (outer_type)) == METHOD_TYPE)
+ && (TREE_CODE (TREE_TYPE (outer_type))
+ == TREE_CODE (TREE_TYPE (inner_type)))
+ && !TYPE_ARG_TYPES (TREE_TYPE (outer_type))
+ && useless_type_conversion_p (TREE_TYPE (TREE_TYPE (outer_type)),
+ TREE_TYPE (TREE_TYPE (inner_type)))))
+ return true;
+
+ /* Do not lose casts to restrict qualified pointers. */
+ if ((TYPE_RESTRICT (outer_type)
+ != TYPE_RESTRICT (inner_type))
+ && TYPE_RESTRICT (outer_type))
+ return false;
+ }
+
+ /* From now on qualifiers on value types do not matter. */
inner_type = TYPE_MAIN_VARIANT (inner_type);
outer_type = TYPE_MAIN_VARIANT (outer_type);
&& TYPE_CANONICAL (inner_type) == TYPE_CANONICAL (outer_type))
return true;
- /* Changes in machine mode are never useless conversions. */
- if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type))
+ /* Changes in machine mode are never useless conversions unless we
+ deal with aggregate types in which case we defer to later checks. */
+ if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type)
+ && !AGGREGATE_TYPE_P (inner_type))
return false;
/* If both the inner and outer types are integral types, then the
|| TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
return false;
- /* Conversions from a non-base to a base type are not useless.
- This way we preserve the invariant to do arithmetic in
- base types only. */
- if (TREE_TYPE (inner_type)
- && TREE_TYPE (inner_type) != inner_type
- && (TREE_TYPE (outer_type) == outer_type
- || TREE_TYPE (outer_type) == NULL_TREE))
- return false;
-
/* We don't need to preserve changes in the types minimum or
maximum value in general as these do not generate code
unless the types precisions are different. */
-
return true;
}
&& SCALAR_FLOAT_TYPE_P (outer_type))
return true;
+ /* Fixed point types with the same mode are compatible. */
+ else if (FIXED_POINT_TYPE_P (inner_type)
+ && FIXED_POINT_TYPE_P (outer_type))
+ return true;
+
/* We need to take special care recursing to pointed-to types. */
else if (POINTER_TYPE_P (inner_type)
&& POINTER_TYPE_P (outer_type))
{
/* Don't lose casts between pointers to volatile and non-volatile
qualified types. Doing so would result in changing the semantics
- of later accesses. */
- if ((TYPE_VOLATILE (TREE_TYPE (outer_type))
- != TYPE_VOLATILE (TREE_TYPE (inner_type)))
+ of later accesses. For function types the volatile qualifier
+ is used to indicate noreturn functions. */
+ if (TREE_CODE (TREE_TYPE (outer_type)) != FUNCTION_TYPE
+ && TREE_CODE (TREE_TYPE (outer_type)) != METHOD_TYPE
+ && TREE_CODE (TREE_TYPE (inner_type)) != FUNCTION_TYPE
+ && TREE_CODE (TREE_TYPE (inner_type)) != METHOD_TYPE
+ && (TYPE_VOLATILE (TREE_TYPE (outer_type))
+ != TYPE_VOLATILE (TREE_TYPE (inner_type)))
&& TYPE_VOLATILE (TREE_TYPE (outer_type)))
return false;
- /* Do not lose casts between pointers with different
- TYPE_REF_CAN_ALIAS_ALL setting or alias sets. */
- if ((TYPE_REF_CAN_ALIAS_ALL (inner_type)
- != TYPE_REF_CAN_ALIAS_ALL (outer_type))
- || (get_alias_set (TREE_TYPE (inner_type))
- != get_alias_set (TREE_TYPE (outer_type))))
+ /* We require explicit conversions from incomplete target types. */
+ if (!COMPLETE_TYPE_P (TREE_TYPE (inner_type))
+ && COMPLETE_TYPE_P (TREE_TYPE (outer_type)))
+ return false;
+
+ /* Do not lose casts between pointers that when dereferenced access
+ memory with different alias sets. */
+ if (get_deref_alias_set (inner_type) != get_deref_alias_set (outer_type))
return false;
/* We do not care for const qualification of the pointed-to types
as const qualification has no semantic value to the middle-end. */
- /* Do not lose casts to restrict qualified pointers. */
- if ((TYPE_RESTRICT (outer_type)
- != TYPE_RESTRICT (inner_type))
- && TYPE_RESTRICT (outer_type))
- return false;
-
/* Otherwise pointers/references are equivalent if their pointed
to types are effectively the same. We can strip qualifiers
on pointed-to types for further comparison, which is done in
- the callee. */
- return useless_type_conversion_p_1 (TREE_TYPE (outer_type),
- TREE_TYPE (inner_type));
+ the callee. Note we have to use true compatibility here
+ because addresses are subject to propagation into dereferences
+ and thus might get the original type exposed which is equivalent
+ to a reverse conversion. */
+ return types_compatible_p (TREE_TYPE (outer_type),
+ TREE_TYPE (inner_type));
}
/* Recurse for complex types. */
else if (TREE_CODE (inner_type) == COMPLEX_TYPE
&& TREE_CODE (outer_type) == COMPLEX_TYPE)
- return useless_type_conversion_p_1 (TREE_TYPE (outer_type),
- TREE_TYPE (inner_type));
+ return useless_type_conversion_p (TREE_TYPE (outer_type),
+ TREE_TYPE (inner_type));
/* Recurse for vector types with the same number of subparts. */
else if (TREE_CODE (inner_type) == VECTOR_TYPE
&& TREE_CODE (outer_type) == VECTOR_TYPE
&& TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type))
- return useless_type_conversion_p_1 (TREE_TYPE (outer_type),
- TREE_TYPE (inner_type));
+ return useless_type_conversion_p (TREE_TYPE (outer_type),
+ TREE_TYPE (inner_type));
- /* For aggregates we may need to fall back to structural equality
- checks. */
- else if (AGGREGATE_TYPE_P (inner_type)
- && AGGREGATE_TYPE_P (outer_type))
+ else if (TREE_CODE (inner_type) == ARRAY_TYPE
+ && TREE_CODE (outer_type) == ARRAY_TYPE)
{
- /* Different types of aggregates are incompatible. */
- if (TREE_CODE (inner_type) != TREE_CODE (outer_type))
+ /* Preserve string attributes. */
+ if (TYPE_STRING_FLAG (inner_type) != TYPE_STRING_FLAG (outer_type))
return false;
- /* ??? Add structural equivalence check. */
+ /* Conversions from array types with unknown extent to
+ array types with known extent are not useless. */
+ if (!TYPE_DOMAIN (inner_type)
+ && TYPE_DOMAIN (outer_type))
+ return false;
- /* ??? This should eventually just return false. */
- return lang_hooks.types_compatible_p (inner_type, outer_type);
- }
+ /* Nor are conversions from array types with non-constant size to
+ array types with constant size or to different size. */
+ if (TYPE_SIZE (outer_type)
+ && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST
+ && (!TYPE_SIZE (inner_type)
+ || TREE_CODE (TYPE_SIZE (inner_type)) != INTEGER_CST
+ || !tree_int_cst_equal (TYPE_SIZE (outer_type),
+ TYPE_SIZE (inner_type))))
+ return false;
- return false;
-}
+ /* Check conversions between arrays with partially known extents.
+ If the array min/max values are constant they have to match.
+ Otherwise allow conversions to unknown and variable extents.
+ In particular this declares conversions that may change the
+ mode to BLKmode as useless. */
+ if (TYPE_DOMAIN (inner_type)
+ && TYPE_DOMAIN (outer_type)
+ && TYPE_DOMAIN (inner_type) != TYPE_DOMAIN (outer_type))
+ {
+ tree inner_min = TYPE_MIN_VALUE (TYPE_DOMAIN (inner_type));
+ tree outer_min = TYPE_MIN_VALUE (TYPE_DOMAIN (outer_type));
+ tree inner_max = TYPE_MAX_VALUE (TYPE_DOMAIN (inner_type));
+ tree outer_max = TYPE_MAX_VALUE (TYPE_DOMAIN (outer_type));
+
+ /* After gimplification a variable min/max value carries no
+ additional information compared to a NULL value. All that
+ matters has been lowered to be part of the IL. */
+ if (inner_min && TREE_CODE (inner_min) != INTEGER_CST)
+ inner_min = NULL_TREE;
+ if (outer_min && TREE_CODE (outer_min) != INTEGER_CST)
+ outer_min = NULL_TREE;
+ if (inner_max && TREE_CODE (inner_max) != INTEGER_CST)
+ inner_max = NULL_TREE;
+ if (outer_max && TREE_CODE (outer_max) != INTEGER_CST)
+ outer_max = NULL_TREE;
+
+ /* Conversions NULL / variable <- cst are useless, but not
+ the other way around. */
+ if (outer_min
+ && (!inner_min
+ || !tree_int_cst_equal (inner_min, outer_min)))
+ return false;
+ if (outer_max
+ && (!inner_max
+ || !tree_int_cst_equal (inner_max, outer_max)))
+ return false;
+ }
-/* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
- useless type conversion, otherwise return false.
+ /* Recurse on the element check. */
+ return useless_type_conversion_p (TREE_TYPE (outer_type),
+ TREE_TYPE (inner_type));
+ }
- This function implicitly defines the middle-end type system. With
- the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
- holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
- the following invariants shall be fulfilled:
+ else if ((TREE_CODE (inner_type) == FUNCTION_TYPE
+ || TREE_CODE (inner_type) == METHOD_TYPE)
+ && TREE_CODE (inner_type) == TREE_CODE (outer_type))
+ {
+ tree outer_parm, inner_parm;
- 1) useless_type_conversion_p is transitive.
- If a < b and b < c then a < c.
+ /* If the return types are not compatible bail out. */
+ if (!useless_type_conversion_p (TREE_TYPE (outer_type),
+ TREE_TYPE (inner_type)))
+ return false;
- 2) useless_type_conversion_p is not symmetric.
- From a < b does not follow a > b.
+ /* Method types should belong to a compatible base class. */
+ if (TREE_CODE (inner_type) == METHOD_TYPE
+ && !useless_type_conversion_p (TYPE_METHOD_BASETYPE (outer_type),
+ TYPE_METHOD_BASETYPE (inner_type)))
+ return false;
- 3) Types define the available set of operations applicable to values.
- A type conversion is useless if the operations for the target type
- is a subset of the operations for the source type. For example
- casts to void* are useless, casts from void* are not (void* can't
- be dereferenced or offsetted, but copied, hence its set of operations
- is a strict subset of that of all other data pointer types). Casts
- to const T* are useless (can't be written to), casts from const T*
- to T* are not. */
+ /* A conversion to an unprototyped argument list is ok. */
+ if (!TYPE_ARG_TYPES (outer_type))
+ return true;
+
+ /* If the unqualified argument types are compatible the conversion
+ is useless. */
+ if (TYPE_ARG_TYPES (outer_type) == TYPE_ARG_TYPES (inner_type))
+ return true;
+
+ for (outer_parm = TYPE_ARG_TYPES (outer_type),
+ inner_parm = TYPE_ARG_TYPES (inner_type);
+ outer_parm && inner_parm;
+ outer_parm = TREE_CHAIN (outer_parm),
+ inner_parm = TREE_CHAIN (inner_parm))
+ if (!useless_type_conversion_p
+ (TYPE_MAIN_VARIANT (TREE_VALUE (outer_parm)),
+ TYPE_MAIN_VARIANT (TREE_VALUE (inner_parm))))
+ return false;
+
+ /* If there is a mismatch in the number of arguments the functions
+ are not compatible. */
+ if (outer_parm || inner_parm)
+ return false;
-bool
-useless_type_conversion_p (tree outer_type, tree inner_type)
-{
- /* If the outer type is (void *), then the conversion is not
- necessary. We have to make sure to not apply this while
- recursing though. */
- if (POINTER_TYPE_P (inner_type)
- && POINTER_TYPE_P (outer_type)
- && TREE_CODE (TREE_TYPE (outer_type)) == VOID_TYPE)
- return true;
+ /* Defer to the target if necessary. */
+ if (TYPE_ATTRIBUTES (inner_type) || TYPE_ATTRIBUTES (outer_type))
+ return targetm.comp_type_attributes (outer_type, inner_type) != 0;
+
+ return true;
+ }
- return useless_type_conversion_p_1 (outer_type, inner_type);
+ /* For aggregates we rely on TYPE_CANONICAL exclusively and require
+ explicit conversions for types involving to be structurally
+ compared types. */
+ else if (AGGREGATE_TYPE_P (inner_type)
+ && TREE_CODE (inner_type) == TREE_CODE (outer_type))
+ return false;
+
+ return false;
}
/* Return true if a conversion from either type of TYPE1 and TYPE2
if (CONVERT_EXPR_P (expr)
|| TREE_CODE (expr) == VIEW_CONVERT_EXPR
|| TREE_CODE (expr) == NON_LVALUE_EXPR)
- /* FIXME: Use of GENERIC_TREE_TYPE here is a temporary measure to work
- around known bugs with GIMPLE_MODIFY_STMTs appearing in places
- they shouldn't. See PR 30391. */
return useless_type_conversion_p
(TREE_TYPE (expr),
- GENERIC_TREE_TYPE (TREE_OPERAND (expr, 0)));
+ TREE_TYPE (TREE_OPERAND (expr, 0)));
return false;
}
+/* Strip conversions from EXP according to
+ tree_ssa_useless_type_conversion and return the resulting
+ expression. */
+
+tree
+tree_ssa_strip_useless_type_conversions (tree exp)
+{
+ while (tree_ssa_useless_type_conversion (exp))
+ exp = TREE_OPERAND (exp, 0);
+ return exp;
+}
+
/* Internal helper for walk_use_def_chains. VAR, FN and DATA are as
described in walk_use_def_chains.
walk_use_def_chains_1 (tree var, walk_use_def_chains_fn fn, void *data,
struct pointer_set_t *visited, bool is_dfs)
{
- tree def_stmt;
+ gimple def_stmt;
if (pointer_set_insert (visited, var))
return false;
def_stmt = SSA_NAME_DEF_STMT (var);
- if (TREE_CODE (def_stmt) != PHI_NODE)
+ if (gimple_code (def_stmt) != GIMPLE_PHI)
{
/* If we reached the end of the use-def chain, call FN. */
return fn (var, def_stmt, data);
}
else
{
- int i;
+ size_t i;
/* When doing a breadth-first search, call FN before following the
use-def links for each argument. */
if (!is_dfs)
- for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
- if (fn (PHI_ARG_DEF (def_stmt, i), def_stmt, data))
+ for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
+ if (fn (gimple_phi_arg_def (def_stmt, i), def_stmt, data))
return true;
/* Follow use-def links out of each PHI argument. */
- for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
+ for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
{
- tree arg = PHI_ARG_DEF (def_stmt, i);
+ tree arg = gimple_phi_arg_def (def_stmt, i);
/* ARG may be NULL for newly introduced PHI nodes. */
if (arg
/* When doing a depth-first search, call FN after following the
use-def links for each argument. */
if (is_dfs)
- for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
- if (fn (PHI_ARG_DEF (def_stmt, i), def_stmt, data))
+ for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
+ if (fn (gimple_phi_arg_def (def_stmt, i), def_stmt, data))
return true;
}
walk_use_def_chains (tree var, walk_use_def_chains_fn fn, void *data,
bool is_dfs)
{
- tree def_stmt;
+ gimple def_stmt;
gcc_assert (TREE_CODE (var) == SSA_NAME);
/* We only need to recurse if the reaching definition comes from a PHI
node. */
- if (TREE_CODE (def_stmt) != PHI_NODE)
+ if (gimple_code (def_stmt) != GIMPLE_PHI)
(*fn) (var, def_stmt, data);
else
{
return false;
/* The value is undefined iff its definition statement is empty. */
- return IS_EMPTY_STMT (SSA_NAME_DEF_STMT (t));
+ return gimple_nop_p (SSA_NAME_DEF_STMT (t));
}
/* Emit warnings for uninitialized variables. This is done in two passes.
warn_uninit (tree t, const char *gmsgid, void *data)
{
tree var = SSA_NAME_VAR (t);
- tree context = (tree) data;
- location_t *locus;
+ gimple context = (gimple) data;
+ location_t location;
expanded_location xloc, floc;
if (!ssa_undefined_value_p (t))
if (TREE_NO_WARNING (var))
return;
- locus = (context != NULL && EXPR_HAS_LOCATION (context)
- ? EXPR_LOCUS (context)
- : &DECL_SOURCE_LOCATION (var));
- warning (OPT_Wuninitialized, gmsgid, locus, var);
- xloc = expand_location (*locus);
+ /* Do not warn if it can be initialized outside this module. */
+ if (is_global_var (var))
+ return;
+
+ location = (context != NULL && gimple_has_location (context))
+ ? gimple_location (context)
+ : DECL_SOURCE_LOCATION (var);
+ xloc = expand_location (location);
floc = expand_location (DECL_SOURCE_LOCATION (cfun->decl));
- if (xloc.file != floc.file
- || xloc.line < floc.line
- || xloc.line > LOCATION_LINE (cfun->function_end_locus))
- inform ("%J%qD was declared here", var, var);
+ if (warning_at (location, OPT_Wuninitialized, gmsgid, var))
+ {
+ TREE_NO_WARNING (var) = 1;
- TREE_NO_WARNING (var) = 1;
+ if (xloc.file != floc.file
+ || xloc.line < floc.line
+ || xloc.line > LOCATION_LINE (cfun->function_end_locus))
+ inform (DECL_SOURCE_LOCATION (var), "%qD was declared here", var);
+ }
}
struct walk_data {
- tree stmt;
+ gimple stmt;
bool always_executed;
+ bool warn_possibly_uninitialized;
};
/* Called via walk_tree, look for SSA_NAMEs that have empty definitions
static tree
warn_uninitialized_var (tree *tp, int *walk_subtrees, void *data_)
{
- struct walk_data *data = (struct walk_data *)data_;
+ struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
+ struct walk_data *data = (struct walk_data *) wi->info;
tree t = *tp;
+ /* We do not care about LHS. */
+ if (wi->is_lhs)
+ {
+ /* Except for operands of INDIRECT_REF. */
+ if (!INDIRECT_REF_P (t))
+ return NULL_TREE;
+ t = TREE_OPERAND (t, 0);
+ }
+
switch (TREE_CODE (t))
{
+ case ADDR_EXPR:
+ /* Taking the address of an uninitialized variable does not
+ count as using it. */
+ *walk_subtrees = 0;
+ break;
+
+ case VAR_DECL:
+ {
+ /* A VAR_DECL in the RHS of a gimple statement may mean that
+ this variable is loaded from memory. */
+ use_operand_p vuse;
+ tree op;
+
+ /* If there is not gimple stmt,
+ or alias information has not been computed,
+ then we cannot check VUSE ops. */
+ if (data->stmt == NULL)
+ return NULL_TREE;
+
+ /* If the load happens as part of a call do not warn about it. */
+ if (is_gimple_call (data->stmt))
+ return NULL_TREE;
+
+ vuse = gimple_vuse_op (data->stmt);
+ if (vuse == NULL_USE_OPERAND_P)
+ return NULL_TREE;
+
+ op = USE_FROM_PTR (vuse);
+ if (t != SSA_NAME_VAR (op)
+ || !SSA_NAME_IS_DEFAULT_DEF (op))
+ return NULL_TREE;
+ /* If this is a VUSE of t and it is the default definition,
+ then warn about op. */
+ t = op;
+ /* Fall through into SSA_NAME. */
+ }
+
case SSA_NAME:
/* We only do data flow with SSA_NAMEs, so that's all we
can warn about. */
if (data->always_executed)
- warn_uninit (t, "%H%qD is used uninitialized in this function",
+ warn_uninit (t, "%qD is used uninitialized in this function",
data->stmt);
- else
- warn_uninit (t, "%H%qD may be used uninitialized in this function",
+ else if (data->warn_possibly_uninitialized)
+ warn_uninit (t, "%qD may be used uninitialized in this function",
data->stmt);
*walk_subtrees = 0;
break;
and warn about them. */
static void
-warn_uninitialized_phi (tree phi)
+warn_uninitialized_phi (gimple phi)
{
- int i, n = PHI_NUM_ARGS (phi);
+ size_t i, n = gimple_phi_num_args (phi);
/* Don't look at memory tags. */
- if (!is_gimple_reg (PHI_RESULT (phi)))
+ if (!is_gimple_reg (gimple_phi_result (phi)))
return;
for (i = 0; i < n; ++i)
{
- tree op = PHI_ARG_DEF (phi, i);
+ tree op = gimple_phi_arg_def (phi, i);
if (TREE_CODE (op) == SSA_NAME)
- warn_uninit (op, "%H%qD may be used uninitialized in this function",
+ warn_uninit (op, "%qD may be used uninitialized in this function",
NULL);
}
}
static unsigned int
-execute_early_warn_uninitialized (void)
+warn_uninitialized_vars (bool warn_possibly_uninitialized)
{
- block_stmt_iterator bsi;
+ gimple_stmt_iterator gsi;
basic_block bb;
struct walk_data data;
+ data.warn_possibly_uninitialized = warn_possibly_uninitialized;
+
calculate_dominance_info (CDI_POST_DOMINATORS);
FOR_EACH_BB (bb)
{
data.always_executed = dominated_by_p (CDI_POST_DOMINATORS,
single_succ (ENTRY_BLOCK_PTR), bb);
- for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
- {
- data.stmt = bsi_stmt (bsi);
- walk_tree (bsi_stmt_ptr (bsi), warn_uninitialized_var,
- &data, NULL);
- }
+ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ {
+ struct walk_stmt_info wi;
+ data.stmt = gsi_stmt (gsi);
+ if (is_gimple_debug (data.stmt))
+ continue;
+ memset (&wi, 0, sizeof (wi));
+ wi.info = &data;
+ walk_gimple_op (gsi_stmt (gsi), warn_uninitialized_var, &wi);
+ }
}
+
+ /* Post-dominator information can not be reliably updated. Free it
+ after the use. */
+
+ free_dominance_info (CDI_POST_DOMINATORS);
+ return 0;
+}
+
+static unsigned int
+execute_early_warn_uninitialized (void)
+{
+ /* Currently, this pass runs always but
+ execute_late_warn_uninitialized only runs with optimization. With
+ optimization we want to warn about possible uninitialized as late
+ as possible, thus don't do it here. However, without
+ optimization we need to warn here about "may be uninitialized".
+ */
+ warn_uninitialized_vars (/*warn_possibly_uninitialized=*/!optimize);
return 0;
}
execute_late_warn_uninitialized (void)
{
basic_block bb;
- tree phi;
+ gimple_stmt_iterator gsi;
/* Re-do the plain uninitialized variable check, as optimization may have
straightened control flow. Do this first so that we don't accidentally
get a "may be" warning when we'd have seen an "is" warning later. */
- execute_early_warn_uninitialized ();
+ warn_uninitialized_vars (/*warn_possibly_uninitialized=*/1);
FOR_EACH_BB (bb)
- for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
- warn_uninitialized_phi (phi);
+ for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ warn_uninitialized_phi (gsi_stmt (gsi));
+
return 0;
}
NULL, /* sub */
NULL, /* next */
0, /* static_pass_number */
- 0, /* tv_id */
+ TV_NONE, /* tv_id */
PROP_ssa, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
NULL, /* sub */
NULL, /* next */
0, /* static_pass_number */
- 0, /* tv_id */
+ TV_NONE, /* tv_id */
PROP_ssa, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
}
};
-/* Compute TREE_ADDRESSABLE for local variables. */
+/* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
-static unsigned int
-execute_update_addresses_taken (void)
+void
+execute_update_addresses_taken (bool do_optimize)
{
tree var;
referenced_var_iterator rvi;
- block_stmt_iterator bsi;
+ gimple_stmt_iterator gsi;
basic_block bb;
bitmap addresses_taken = BITMAP_ALLOC (NULL);
- bitmap vars_updated = BITMAP_ALLOC (NULL);
+ bitmap not_reg_needs = BITMAP_ALLOC (NULL);
bool update_vops = false;
- tree phi;
/* Collect into ADDRESSES_TAKEN all variables whose address is taken within
the function body. */
FOR_EACH_BB (bb)
{
- for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- stmt_ann_t s_ann = stmt_ann (bsi_stmt (bsi));
+ gimple stmt = gsi_stmt (gsi);
+ enum gimple_code code = gimple_code (stmt);
+
+ /* Note all addresses taken by the stmt. */
+ gimple_ior_addresses_taken (addresses_taken, stmt);
- if (s_ann->addresses_taken)
- bitmap_ior_into (addresses_taken, s_ann->addresses_taken);
+ /* If we have a call or an assignment, see if the lhs contains
+ a local decl that requires not to be a gimple register. */
+ if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
+ {
+ tree lhs = gimple_get_lhs (stmt);
+
+ /* We may not rewrite TMR_SYMBOL to SSA. */
+ if (lhs && TREE_CODE (lhs) == TARGET_MEM_REF
+ && TMR_SYMBOL (lhs))
+ bitmap_set_bit (not_reg_needs, DECL_UID (TMR_SYMBOL (lhs)));
+
+ /* A plain decl does not need it set. */
+ else if (lhs && handled_component_p (lhs))
+ {
+ var = get_base_address (lhs);
+ if (DECL_P (var))
+ bitmap_set_bit (not_reg_needs, DECL_UID (var));
+ }
+ }
}
- for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+
+ for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- unsigned i, phi_num_args = PHI_NUM_ARGS (phi);
- for (i = 0; i < phi_num_args; i++)
+ size_t i;
+ gimple phi = gsi_stmt (gsi);
+
+ for (i = 0; i < gimple_phi_num_args (phi); i++)
{
tree op = PHI_ARG_DEF (phi, i), var;
if (TREE_CODE (op) == ADDR_EXPR
- && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL_TREE
+ && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
&& DECL_P (var))
bitmap_set_bit (addresses_taken, DECL_UID (var));
}
}
}
- /* When possible, clear ADDRESSABLE bit and mark variable for conversion into
- SSA. */
- FOR_EACH_REFERENCED_VAR (var, rvi)
- if (!is_global_var (var)
- && TREE_CODE (var) != RESULT_DECL
- && TREE_ADDRESSABLE (var)
- && !bitmap_bit_p (addresses_taken, DECL_UID (var)))
+ /* When possible, clear ADDRESSABLE bit or set the REGISTER bit
+ and mark variable for conversion into SSA. */
+ if (optimize && do_optimize)
+ FOR_EACH_REFERENCED_VAR (var, rvi)
{
- TREE_ADDRESSABLE (var) = 0;
- if (is_gimple_reg (var))
- mark_sym_for_renaming (var);
- update_vops = true;
- bitmap_set_bit (vars_updated, DECL_UID (var));
- if (dump_file)
+ /* Global Variables, result decls cannot be changed. */
+ if (is_global_var (var)
+ || TREE_CODE (var) == RESULT_DECL
+ || bitmap_bit_p (addresses_taken, DECL_UID (var)))
+ continue;
+
+ if (TREE_ADDRESSABLE (var)
+ /* Do not change TREE_ADDRESSABLE if we need to preserve var as
+ a non-register. Otherwise we are confused and forget to
+ add virtual operands for it. */
+ && (!is_gimple_reg_type (TREE_TYPE (var))
+ || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
+ {
+ TREE_ADDRESSABLE (var) = 0;
+ if (is_gimple_reg (var))
+ mark_sym_for_renaming (var);
+ update_vops = true;
+ if (dump_file)
+ {
+ fprintf (dump_file, "No longer having address taken ");
+ print_generic_expr (dump_file, var, 0);
+ fprintf (dump_file, "\n");
+ }
+ }
+ if (!DECL_GIMPLE_REG_P (var)
+ && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
+ && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
+ || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
+ && !TREE_THIS_VOLATILE (var)
+ && (TREE_CODE (var) != VAR_DECL || !DECL_HARD_REGISTER (var)))
{
- fprintf (dump_file, "No longer having address taken ");
- print_generic_expr (dump_file, var, 0);
- fprintf (dump_file, "\n");
+ DECL_GIMPLE_REG_P (var) = 1;
+ mark_sym_for_renaming (var);
+ update_vops = true;
+ if (dump_file)
+ {
+ fprintf (dump_file, "Decl is now a gimple register ");
+ print_generic_expr (dump_file, var, 0);
+ fprintf (dump_file, "\n");
+ }
}
}
/* Operand caches needs to be recomputed for operands referencing the updated
variables. */
if (update_vops)
- FOR_EACH_BB (bb)
- for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
- {
- tree stmt = bsi_stmt (bsi);
+ {
+ FOR_EACH_BB (bb)
+ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ {
+ gimple stmt = gsi_stmt (gsi);
- if ((LOADED_SYMS (stmt)
- && bitmap_intersect_p (LOADED_SYMS (stmt), vars_updated))
- || (STORED_SYMS (stmt)
- && bitmap_intersect_p (STORED_SYMS (stmt), vars_updated)))
- update_stmt (stmt);
- }
+ if (gimple_references_memory_p (stmt)
+ || is_gimple_debug (stmt))
+ update_stmt (stmt);
+ }
+
+ /* Update SSA form here, we are called as non-pass as well. */
+ update_ssa (TODO_update_ssa);
+ }
+
+ BITMAP_FREE (not_reg_needs);
BITMAP_FREE (addresses_taken);
- BITMAP_FREE (vars_updated);
- return 0;
}
struct gimple_opt_pass pass_update_address_taken =
GIMPLE_PASS,
"addressables", /* name */
NULL, /* gate */
- execute_update_addresses_taken, /* execute */
+ NULL, /* execute */
NULL, /* sub */
NULL, /* next */
0, /* static_pass_number */
- 0, /* tv_id */
+ TV_NONE, /* tv_id */
PROP_ssa, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_update_ssa /* todo_flags_finish */
+ TODO_update_address_taken
+ | TODO_dump_func /* todo_flags_finish */
}
};