#include "tree-pass.h"
#include "toplev.h"
+/* Pointer map of variable mappings, keyed by edge. */
+static struct pointer_map_t *edge_var_maps;
+
+
+/* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
+
+void
+redirect_edge_var_map_add (edge e, tree result, tree def)
+{
+ void **slot;
+ edge_var_map_vector old_head, head;
+ edge_var_map new_node;
+
+ if (edge_var_maps == NULL)
+ edge_var_maps = pointer_map_create ();
+
+ slot = pointer_map_insert (edge_var_maps, e);
+ old_head = head = *slot;
+ if (!head)
+ {
+ head = VEC_alloc (edge_var_map, heap, 5);
+ *slot = head;
+ }
+ new_node.def = def;
+ new_node.result = result;
+
+ VEC_safe_push (edge_var_map, heap, head, &new_node);
+ if (old_head != head)
+ {
+ /* The push did some reallocation. Update the pointer map. */
+ *slot = head;
+ }
+}
+
+
+/* Clear the var mappings in edge E. */
+
+void
+redirect_edge_var_map_clear (edge e)
+{
+ void **slot;
+ edge_var_map_vector head;
+
+ if (!edge_var_maps)
+ return;
+
+ slot = pointer_map_contains (edge_var_maps, e);
+
+ if (slot)
+ {
+ head = *slot;
+ VEC_free (edge_var_map, heap, head);
+ *slot = NULL;
+ }
+}
+
+
+/* Duplicate the redirected var mappings in OLDE in NEWE.
+
+ Since we can't remove a mapping, let's just duplicate it. This assumes a
+ pointer_map can have multiple edges mapping to the same var_map (many to
+ one mapping), since we don't remove the previous mappings. */
+
+void
+redirect_edge_var_map_dup (edge newe, edge olde)
+{
+ void **new_slot, **old_slot; edge_var_map_vector head;
+
+ if (!edge_var_maps)
+ return;
+
+ new_slot = pointer_map_insert (edge_var_maps, newe);
+ old_slot = pointer_map_contains (edge_var_maps, olde);
+ if (!old_slot)
+ return;
+ head = *old_slot;
+
+ if (head)
+ *new_slot = VEC_copy (edge_var_map, heap, head);
+ else
+ *new_slot = VEC_alloc (edge_var_map, heap, 5);
+}
+
+
+/* Return the varable mappings for a given edge. If there is none, return
+ NULL. */
+
+edge_var_map_vector
+redirect_edge_var_map_vector (edge e)
+{
+ void **slot;
+
+ /* Hey, what kind of idiot would... you'd be surprised. */
+ if (!edge_var_maps)
+ return NULL;
+
+ slot = pointer_map_contains (edge_var_maps, e);
+ if (!slot)
+ return NULL;
+
+ return (edge_var_map_vector) *slot;
+}
+
+
+/* Clear the edge variable mappings. */
+
+void
+redirect_edge_var_map_destroy (void)
+{
+ if (edge_var_maps)
+ {
+ pointer_map_destroy (edge_var_maps);
+ edge_var_maps = NULL;
+ }
+}
+
+
/* Remove the corresponding arguments from the PHI nodes in E's
destination block and redirect it to DEST. Return redirected edge.
- The list of removed arguments is stored in PENDING_STMT (e). */
+ The list of removed arguments is stored in a vector accessed
+ through edge_var_maps. */
edge
ssa_redirect_edge (edge e, basic_block dest)
{
tree phi;
- tree list = NULL, *last = &list;
- tree src, dst, node;
+
+ redirect_edge_var_map_clear (e);
/* Remove the appropriate PHI arguments in E's destination block. */
for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
{
- if (PHI_ARG_DEF (phi, e->dest_idx) == NULL_TREE)
+ tree def = PHI_ARG_DEF (phi, e->dest_idx);
+
+ if (def == NULL_TREE)
continue;
- src = PHI_ARG_DEF (phi, e->dest_idx);
- dst = PHI_RESULT (phi);
- node = build_tree_list (dst, src);
- *last = node;
- last = &TREE_CHAIN (node);
+ redirect_edge_var_map_add (e, PHI_RESULT (phi), def);
}
e = redirect_edge_succ_nodup (e, dest);
- PENDING_STMT (e) = list;
return e;
}
void
flush_pending_stmts (edge e)
{
- tree phi, arg;
+ tree phi;
+ edge_var_map_vector v;
+ edge_var_map *vm;
+ int i;
- if (!PENDING_STMT (e))
+ v = redirect_edge_var_map_vector (e);
+ if (!v)
return;
- for (phi = phi_nodes (e->dest), arg = PENDING_STMT (e);
- phi;
- phi = PHI_CHAIN (phi), arg = TREE_CHAIN (arg))
+ for (phi = phi_nodes (e->dest), i = 0;
+ phi && VEC_iterate (edge_var_map, v, i, vm);
+ phi = PHI_CHAIN (phi), i++)
{
- tree def = TREE_VALUE (arg);
+ tree def = redirect_edge_var_map_def (vm);
add_phi_arg (phi, def, e);
}
- PENDING_STMT (e) = NULL;
+ redirect_edge_var_map_clear (e);
}
/* Return true if SSA_NAME is malformed and mark it visited.
return ((const struct int_tree_map *)item)->uid;
}
+/* Return true if the DECL_UID in both trees are equal. */
+
+int
+uid_decl_map_eq (const void *va, const void *vb)
+{
+ const_tree a = (const_tree) va;
+ const_tree b = (const_tree) vb;
+ return (a->decl_minimal.uid == b->decl_minimal.uid);
+}
+
+/* Hash a tree in a uid_decl_map. */
+
+unsigned int
+uid_decl_map_hash (const void *item)
+{
+ return ((const_tree)item)->decl_minimal.uid;
+}
+
/* Return true if the uid in both int tree maps are equal. */
static int
return ((const struct static_var_ann_d *)item)->uid;
}
+/* Return true if the DECL_UID in both trees are equal. */
+
+static int
+uid_ssaname_map_eq (const void *va, const void *vb)
+{
+ const_tree a = (const_tree) va;
+ const_tree b = (const_tree) vb;
+ return (a->ssa_name.var->decl_minimal.uid == b->ssa_name.var->decl_minimal.uid);
+}
+
+/* Hash a tree in a uid_decl_map. */
+
+static unsigned int
+uid_ssaname_map_hash (const void *item)
+{
+ return ((const_tree)item)->ssa_name.var->decl_minimal.uid;
+}
+
/* Initialize global DFA and SSA structures. */
init_tree_ssa (void)
{
cfun->gimple_df = GGC_CNEW (struct gimple_df);
- cfun->gimple_df->referenced_vars = htab_create_ggc (20, int_tree_map_hash,
- int_tree_map_eq, NULL);
- cfun->gimple_df->default_defs = htab_create_ggc (20, int_tree_map_hash,
- int_tree_map_eq, NULL);
+ cfun->gimple_df->referenced_vars = htab_create_ggc (20, uid_decl_map_hash,
+ uid_decl_map_eq, NULL);
+ cfun->gimple_df->default_defs = htab_create_ggc (20, uid_ssaname_map_hash,
+ uid_ssaname_map_eq, NULL);
cfun->gimple_df->var_anns = htab_create_ggc (20, var_ann_hash,
var_ann_eq, NULL);
cfun->gimple_df->call_clobbered_vars = BITMAP_GGC_ALLOC ();
fini_ssanames ();
fini_phinodes ();
/* we no longer maintain the SSA operand cache at this point. */
- fini_ssa_operands ();
+ if (ssa_operands_active ())
+ fini_ssa_operands ();
cfun->gimple_df->global_var = NULL_TREE;
delete_mem_ref_stats (cfun);
cfun->gimple_df = NULL;
-}
+ /* We no longer need the edge variable maps. */
+ redirect_edge_var_map_destroy ();
+}
-/* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
- useless type conversion, otherwise return false.
-
- This function implicitly defines the middle-end type system. With
- the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
- holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
- the following invariants shall be fulfilled:
-
- 1) useless_type_conversion_p is transitive.
- If a < b and b < c then a < c.
-
- 2) useless_type_conversion_p is not symmetric.
- From a < b does not follow a > b.
+/* Helper function for useless_type_conversion_p. */
- 3) Types define the available set of operations applicable to values.
- A type conversion is useless if the operations for the target type
- is a subset of the operations for the source type. For example
- casts to void* are useless, casts from void* are not (void* can't
- be dereferenced or offsetted, but copied, hence its set of operations
- is a strict subset of that of all other data pointer types). Casts
- to const T* are useless (can't be written to), casts from const T*
- to T* are not. */
-
-bool
-useless_type_conversion_p (tree outer_type, tree inner_type)
+static bool
+useless_type_conversion_p_1 (tree outer_type, tree inner_type)
{
/* Qualifiers on value types do not matter. */
inner_type = TYPE_MAIN_VARIANT (inner_type);
else if (POINTER_TYPE_P (inner_type)
&& POINTER_TYPE_P (outer_type))
{
- /* If the outer type is (void *), then the conversion is not
- necessary. */
- if (TREE_CODE (TREE_TYPE (outer_type)) == VOID_TYPE)
- return true;
-
/* Don't lose casts between pointers to volatile and non-volatile
qualified types. Doing so would result in changing the semantics
of later accesses. */
!= get_alias_set (TREE_TYPE (outer_type))))
return false;
- /* Do not lose casts from const qualified to non-const
- qualified. */
- if ((TYPE_READONLY (TREE_TYPE (outer_type))
- != TYPE_READONLY (TREE_TYPE (inner_type)))
- && TYPE_READONLY (TREE_TYPE (inner_type)))
- return false;
+ /* We do not care for const qualification of the pointed-to types
+ as const qualification has no semantic value to the middle-end. */
/* Do not lose casts to restrict qualified pointers. */
if ((TYPE_RESTRICT (outer_type)
to types are effectively the same. We can strip qualifiers
on pointed-to types for further comparison, which is done in
the callee. */
- return useless_type_conversion_p (TREE_TYPE (outer_type),
- TREE_TYPE (inner_type));
+ return useless_type_conversion_p_1 (TREE_TYPE (outer_type),
+ TREE_TYPE (inner_type));
}
/* Recurse for complex types. */
else if (TREE_CODE (inner_type) == COMPLEX_TYPE
&& TREE_CODE (outer_type) == COMPLEX_TYPE)
- return useless_type_conversion_p (TREE_TYPE (outer_type),
- TREE_TYPE (inner_type));
+ return useless_type_conversion_p_1 (TREE_TYPE (outer_type),
+ TREE_TYPE (inner_type));
/* Recurse for vector types with the same number of subparts. */
else if (TREE_CODE (inner_type) == VECTOR_TYPE
&& TREE_CODE (outer_type) == VECTOR_TYPE
&& TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type))
- return useless_type_conversion_p (TREE_TYPE (outer_type),
- TREE_TYPE (inner_type));
+ return useless_type_conversion_p_1 (TREE_TYPE (outer_type),
+ TREE_TYPE (inner_type));
/* For aggregates we may need to fall back to structural equality
checks. */
return false;
}
+/* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
+ useless type conversion, otherwise return false.
+
+ This function implicitly defines the middle-end type system. With
+ the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
+ holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
+ the following invariants shall be fulfilled:
+
+ 1) useless_type_conversion_p is transitive.
+ If a < b and b < c then a < c.
+
+ 2) useless_type_conversion_p is not symmetric.
+ From a < b does not follow a > b.
+
+ 3) Types define the available set of operations applicable to values.
+ A type conversion is useless if the operations for the target type
+ is a subset of the operations for the source type. For example
+ casts to void* are useless, casts from void* are not (void* can't
+ be dereferenced or offsetted, but copied, hence its set of operations
+ is a strict subset of that of all other data pointer types). Casts
+ to const T* are useless (can't be written to), casts from const T*
+ to T* are not. */
+
+bool
+useless_type_conversion_p (tree outer_type, tree inner_type)
+{
+ /* If the outer type is (void *), then the conversion is not
+ necessary. We have to make sure to not apply this while
+ recursing though. */
+ if (POINTER_TYPE_P (inner_type)
+ && POINTER_TYPE_P (outer_type)
+ && TREE_CODE (TREE_TYPE (outer_type)) == VOID_TYPE)
+ return true;
+
+ return useless_type_conversion_p_1 (outer_type, inner_type);
+}
+
/* Return true if a conversion from either type of TYPE1 and TYPE2
to the other is not required. Otherwise return false. */
}
\f
+/* Return true if T, an SSA_NAME, has an undefined value. */
+
+bool
+ssa_undefined_value_p (tree t)
+{
+ tree var = SSA_NAME_VAR (t);
+
+ /* Parameters get their initial value from the function entry. */
+ if (TREE_CODE (var) == PARM_DECL)
+ return false;
+
+ /* Hard register variables get their initial value from the ether. */
+ if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
+ return false;
+
+ /* The value is undefined iff its definition statement is empty. */
+ return IS_EMPTY_STMT (SSA_NAME_DEF_STMT (t));
+}
+
/* Emit warnings for uninitialized variables. This is done in two passes.
- The first pass notices real uses of SSA names with default definitions.
+ The first pass notices real uses of SSA names with undefined values.
Such uses are unconditionally uninitialized, and we can be certain that
such a use is a mistake. This pass is run before most optimizations,
so that we catch as many as we can.
warn_uninit (tree t, const char *gmsgid, void *data)
{
tree var = SSA_NAME_VAR (t);
- tree def = SSA_NAME_DEF_STMT (t);
tree context = (tree) data;
location_t *locus;
expanded_location xloc, floc;
- /* Default uses (indicated by an empty definition statement),
- are uninitialized. */
- if (!IS_EMPTY_STMT (def))
- return;
-
- /* Except for PARMs of course, which are always initialized. */
- if (TREE_CODE (var) == PARM_DECL)
- return;
-
- /* Hard register variables get their initial value from the ether. */
- if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
+ if (!ssa_undefined_value_p (t))
return;
/* TREE_NO_WARNING either means we already warned, or the front end
TREE_NO_WARNING (var) = 1;
}
-
+
+struct walk_data {
+ tree stmt;
+ bool always_executed;
+};
+
/* Called via walk_tree, look for SSA_NAMEs that have empty definitions
and warn about them. */
static tree
-warn_uninitialized_var (tree *tp, int *walk_subtrees, void *data)
+warn_uninitialized_var (tree *tp, int *walk_subtrees, void *data_)
{
+ struct walk_data *data = (struct walk_data *)data_;
tree t = *tp;
switch (TREE_CODE (t))
case SSA_NAME:
/* We only do data flow with SSA_NAMEs, so that's all we
can warn about. */
- warn_uninit (t, "%H%qD is used uninitialized in this function", data);
+ if (data->always_executed)
+ warn_uninit (t, "%H%qD is used uninitialized in this function",
+ data->stmt);
+ else
+ warn_uninit (t, "%H%qD may be used uninitialized in this function",
+ data->stmt);
*walk_subtrees = 0;
break;
{
block_stmt_iterator bsi;
basic_block bb;
+ struct walk_data data;
+
+ calculate_dominance_info (CDI_POST_DOMINATORS);
FOR_EACH_BB (bb)
- for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
- {
- tree context = bsi_stmt (bsi);
- walk_tree (bsi_stmt_ptr (bsi), warn_uninitialized_var,
- context, NULL);
- }
+ {
+ data.always_executed = dominated_by_p (CDI_POST_DOMINATORS,
+ single_succ (ENTRY_BLOCK_PTR), bb);
+ for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ {
+ data.stmt = bsi_stmt (bsi);
+ walk_tree (bsi_stmt_ptr (bsi), warn_uninitialized_var,
+ &data, NULL);
+ }
+ }
return 0;
}
return warn_uninitialized != 0;
}
-struct tree_opt_pass pass_early_warn_uninitialized =
+struct gimple_opt_pass pass_early_warn_uninitialized =
{
+ {
+ GIMPLE_PASS,
NULL, /* name */
gate_warn_uninitialized, /* gate */
execute_early_warn_uninitialized, /* execute */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- 0, /* todo_flags_finish */
- 0 /* letter */
+ 0 /* todo_flags_finish */
+ }
};
-struct tree_opt_pass pass_late_warn_uninitialized =
+struct gimple_opt_pass pass_late_warn_uninitialized =
{
+ {
+ GIMPLE_PASS,
NULL, /* name */
gate_warn_uninitialized, /* gate */
execute_late_warn_uninitialized, /* execute */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- 0, /* todo_flags_finish */
- 0 /* letter */
+ 0 /* todo_flags_finish */
+ }
+};
+
+/* Compute TREE_ADDRESSABLE for local variables. */
+
+static unsigned int
+execute_update_addresses_taken (void)
+{
+ tree var;
+ referenced_var_iterator rvi;
+ block_stmt_iterator bsi;
+ basic_block bb;
+ bitmap addresses_taken = BITMAP_ALLOC (NULL);
+ bitmap vars_updated = BITMAP_ALLOC (NULL);
+ bool update_vops = false;
+ tree phi;
+
+ /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
+ the function body. */
+ FOR_EACH_BB (bb)
+ {
+ for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ {
+ stmt_ann_t s_ann = stmt_ann (bsi_stmt (bsi));
+
+ if (s_ann->addresses_taken)
+ bitmap_ior_into (addresses_taken, s_ann->addresses_taken);
+ }
+ for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+ {
+ unsigned i, phi_num_args = PHI_NUM_ARGS (phi);
+ for (i = 0; i < phi_num_args; i++)
+ {
+ tree op = PHI_ARG_DEF (phi, i), var;
+ if (TREE_CODE (op) == ADDR_EXPR
+ && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL_TREE
+ && DECL_P (var))
+ bitmap_set_bit (addresses_taken, DECL_UID (var));
+ }
+ }
+ }
+
+ /* When possible, clear ADDRESSABLE bit and mark variable for conversion into
+ SSA. */
+ FOR_EACH_REFERENCED_VAR (var, rvi)
+ if (!is_global_var (var)
+ && TREE_CODE (var) != RESULT_DECL
+ && TREE_ADDRESSABLE (var)
+ && !bitmap_bit_p (addresses_taken, DECL_UID (var)))
+ {
+ TREE_ADDRESSABLE (var) = 0;
+ if (is_gimple_reg (var))
+ mark_sym_for_renaming (var);
+ update_vops = true;
+ bitmap_set_bit (vars_updated, DECL_UID (var));
+ if (dump_file)
+ {
+ fprintf (dump_file, "No longer having address taken ");
+ print_generic_expr (dump_file, var, 0);
+ fprintf (dump_file, "\n");
+ }
+ }
+
+ /* Operand caches needs to be recomputed for operands referencing the updated
+ variables. */
+ if (update_vops)
+ FOR_EACH_BB (bb)
+ for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ {
+ tree stmt = bsi_stmt (bsi);
+
+ if ((LOADED_SYMS (stmt)
+ && bitmap_intersect_p (LOADED_SYMS (stmt), vars_updated))
+ || (STORED_SYMS (stmt)
+ && bitmap_intersect_p (STORED_SYMS (stmt), vars_updated)))
+ update_stmt (stmt);
+ }
+ BITMAP_FREE (addresses_taken);
+ BITMAP_FREE (vars_updated);
+ return 0;
+}
+
+struct gimple_opt_pass pass_update_address_taken =
+{
+ {
+ GIMPLE_PASS,
+ "addressables", /* name */
+ NULL, /* gate */
+ execute_update_addresses_taken, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ 0, /* tv_id */
+ PROP_ssa, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_update_ssa /* todo_flags_finish */
+ }
};