/* Variable tracking routines for the GNU compiler.
- Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009
+ Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010
Free Software Foundation, Inc.
This file is part of GCC.
#include "expr.h"
#include "timevar.h"
#include "tree-pass.h"
+#include "tree-flow.h"
#include "cselib.h"
#include "target.h"
+/* var-tracking.c assumes that tree code with the same value as VALUE rtx code
+ has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
+ Currently the value is the same as IDENTIFIER_NODE, which has such
+ a property. If this compile time assertion ever fails, make sure that
+ the new tree code that equals (int) VALUE has the same property. */
+extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
+
/* Type of micro operation. */
enum micro_operation_type
{
} *value_chain;
typedef const struct value_chain_def *const_value_chain;
-/* Hash function for DECL for VARIABLE_HTAB. */
-#define VARIABLE_HASH_VAL(decl) (DECL_UID (decl))
-
/* Pointer to the BB's information specific to variable tracking pass. */
#define VTI(BB) ((variable_tracking_info) (BB)->aux)
static void vt_find_locations (void);
static void dump_attrs_list (attrs);
-static int dump_variable_slot (void **, void *);
-static void dump_variable (variable);
+static int dump_var_slot (void **, void *);
+static void dump_var (variable);
static void dump_vars (htab_t);
static void dump_dataflow_set (dataflow_set *);
static void dump_dataflow_sets (void);
static inline bool
dv_is_decl_p (decl_or_value dv)
{
- if (!dv)
- return true;
-
- /* Make sure relevant codes don't overlap. */
- switch ((int)TREE_CODE ((tree)dv))
- {
- case (int)VAR_DECL:
- case (int)PARM_DECL:
- case (int)RESULT_DECL:
- case (int)FUNCTION_DECL:
- case (int)DEBUG_EXPR_DECL:
- case (int)COMPONENT_REF:
- return true;
-
- case (int)VALUE:
- return false;
-
- default:
- gcc_unreachable ();
- }
+ return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
}
/* Return true if a decl_or_value is a VALUE rtl. */
static inline tree
dv_as_decl (decl_or_value dv)
{
+#ifdef ENABLE_CHECKING
gcc_assert (dv_is_decl_p (dv));
+#endif
return (tree) dv;
}
static inline rtx
dv_as_value (decl_or_value dv)
{
+#ifdef ENABLE_CHECKING
gcc_assert (dv_is_value_p (dv));
+#endif
return (rtx)dv;
}
{
decl_or_value dv;
dv = decl;
+#ifdef ENABLE_CHECKING
gcc_assert (dv_is_decl_p (dv));
+#endif
return dv;
}
{
decl_or_value dv;
dv = value;
+#ifdef ENABLE_CHECKING
gcc_assert (dv_is_value_p (dv));
+#endif
return dv;
}
-static inline hashval_t
-dv_htab_hash (decl_or_value dv)
+typedef unsigned int dvuid;
+
+/* Return the uid of DV. */
+
+static inline dvuid
+dv_uid (decl_or_value dv)
{
if (dv_is_value_p (dv))
- return -(hashval_t)(CSELIB_VAL_PTR (dv_as_value (dv))->value);
+ return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
else
- return (VARIABLE_HASH_VAL (dv_as_decl (dv)));
+ return DECL_UID (dv_as_decl (dv));
+}
+
+/* Compute the hash from the uid. */
+
+static inline hashval_t
+dv_uid2hash (dvuid uid)
+{
+ return uid;
+}
+
+/* The hash function for a mask table in a shared_htab chain. */
+
+static inline hashval_t
+dv_htab_hash (decl_or_value dv)
+{
+ return dv_uid2hash (dv_uid (dv));
}
/* The hash function for variable_htab, computes the hash value
const_variable const v = (const_variable) x;
decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
- if (dv_as_opaque (v->dv) == dv_as_opaque (dv))
- return true;
-
-#if ENABLE_CHECKING
- {
- bool visv, dvisv;
-
- visv = dv_is_value_p (v->dv);
- dvisv = dv_is_value_p (dv);
-
- if (visv != dvisv)
- return false;
-
- if (visv)
- gcc_assert (CSELIB_VAL_PTR (dv_as_value (v->dv))
- != CSELIB_VAL_PTR (dv_as_value (dv)));
- else
- gcc_assert (VARIABLE_HASH_VAL (dv_as_decl (v->dv))
- != VARIABLE_HASH_VAL (dv_as_decl (dv)));
- }
-#endif
-
- return false;
+ return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
}
/* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
}
-/* Determine a total order between two distinct pointers. Compare the
- pointers as integral types if size_t is wide enough, otherwise
- resort to bitwise memory compare. The actual order does not
- matter, we just need to be consistent, so endianness is
- irrelevant. */
-
-static int
-tie_break_pointers (const void *p1, const void *p2)
-{
- gcc_assert (p1 != p2);
-
- if (sizeof (size_t) >= sizeof (void*))
- return (size_t)p1 < (size_t)p2 ? -1 : 1;
- else
- return memcmp (&p1, &p2, sizeof (p1));
-}
-
/* Return true if TVAL is better than CVAL as a canonival value. We
choose lowest-numbered VALUEs, using the RTX address as a
tie-breaker. The idea is to arrange them into a star topology,
canon_value_cmp (rtx tval, rtx cval)
{
return !cval
- || CSELIB_VAL_PTR (tval)->value < CSELIB_VAL_PTR (cval)->value
- || (CSELIB_VAL_PTR (tval)->value == CSELIB_VAL_PTR (cval)->value
- && tie_break_pointers (tval, cval) < 0);
+ || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
}
static bool dst_can_be_shared;
var_reg_set (set, loc, initialized, set_src);
}
-/* Delete current content of register LOC in dataflow set SET. If
- CLOBBER is true, also delete any other live copies of the same
- variable part. */
+/* Delete the association of register LOC in dataflow set SET with any
+ variables that aren't onepart. If CLOBBER is true, also delete any
+ other live copies of the same variable part, and delete the
+ association with onepart dvs too. */
static void
var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
{
- attrs *reg = &set->regs[REGNO (loc)];
+ attrs *nextp = &set->regs[REGNO (loc)];
attrs node, next;
if (clobber)
clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
}
- for (node = *reg; node; node = next)
+ for (node = *nextp; node; node = next)
{
next = node->next;
- delete_variable_part (set, node->loc, node->dv, node->offset);
- pool_free (attrs_pool, node);
+ if (clobber || !dv_onepart_p (node->dv))
+ {
+ delete_variable_part (set, node->loc, node->dv, node->offset);
+ pool_free (attrs_pool, node);
+ *nextp = next;
+ }
+ else
+ nextp = &node->next;
}
- *reg = NULL;
}
/* Delete content of register with number REGNO in dataflow set SET. */
delete_variable_part (set, loc, dv_from_decl (decl), offset);
}
-/* Map a value to a location it was just stored in. */
+/* Bind a value to a location it was just stored in. If MODIFIED
+ holds, assume the location was modified, detaching it from any
+ values bound to it. */
static void
-val_store (dataflow_set *set, rtx val, rtx loc, rtx insn)
+val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified)
{
cselib_val *v = CSELIB_VAL_PTR (val);
if (REG_P (loc))
{
- var_regno_delete (set, REGNO (loc));
+ if (modified)
+ var_regno_delete (set, REGNO (loc));
var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
dv_from_value (val), 0, NULL_RTX, INSERT);
}
: DECL_CHANGED (dv_as_decl (dv)));
}
-/* Return a location list node whose loc is rtx_equal to LOC, in the
+/* Vector of VALUEs that should have VALUE_RECURSED_INTO bit cleared
+ at the end of find_loc_in_1pdv. Not a static variable in find_loc_in_1pdv
+ to avoid constant allocation/freeing of it. */
+static VEC(rtx, heap) *values_to_unmark;
+
+/* Helper function for find_loc_in_1pdv.
+ Return a location list node whose loc is rtx_equal to LOC, in the
location list of a one-part variable or value VAR, or in that of
any values recursively mentioned in the location lists. */
static location_chain
-find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
+find_loc_in_1pdv_1 (rtx loc, variable var, htab_t vars)
{
location_chain node;
{
location_chain where;
VALUE_RECURSED_INTO (node->loc) = true;
- if ((where = find_loc_in_1pdv (loc, var, vars)))
- {
- VALUE_RECURSED_INTO (node->loc) = false;
- return where;
- }
- VALUE_RECURSED_INTO (node->loc) = false;
+ VEC_safe_push (rtx, heap, values_to_unmark, node->loc);
+ if ((where = find_loc_in_1pdv_1 (loc, var, vars)))
+ return where;
}
}
return NULL;
}
+/* Return a location list node whose loc is rtx_equal to LOC, in the
+ location list of a one-part variable or value VAR, or in that of
+ any values recursively mentioned in the location lists. */
+
+static location_chain
+find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
+{
+ location_chain ret;
+ unsigned int i;
+ rtx value;
+
+ ret = find_loc_in_1pdv_1 (loc, var, vars);
+ for (i = 0; VEC_iterate (rtx, values_to_unmark, i, value); i++)
+ VALUE_RECURSED_INTO (value) = false;
+ VEC_truncate (rtx, values_to_unmark, 0);
+ return ret;
+}
+
/* Hash table iteration argument passed to variable_merge. */
struct dfset_merge
{
cdv = dv_from_value (cval);
if (dump_file)
fprintf (dump_file,
- "Created new value %i for reg %i\n",
- v->value, REGNO (node->loc));
+ "Created new value %u:%u for reg %i\n",
+ v->uid, v->hash, REGNO (node->loc));
}
var_reg_decl_set (*dfpm->permp, node->loc,
return where;
}
+/* Return TRUE if the value of MEM may vary across a call. */
+
+static bool
+mem_dies_at_call (rtx mem)
+{
+ tree expr = MEM_EXPR (mem);
+ tree decl;
+
+ if (!expr)
+ return true;
+
+ decl = get_base_address (expr);
+
+ if (!decl)
+ return true;
+
+ if (!DECL_P (decl))
+ return true;
+
+ return (may_be_aliased (decl)
+ || (!TREE_READONLY (decl) && is_global_var (decl)));
+}
+
/* Remove all MEMs from the location list of a hash table entry for a
one-part variable, except those whose MEM attributes map back to
- the variable itself, directly or within a VALUE.
-
- ??? We could also preserve MEMs that reference stack slots that are
- annotated as not addressable. This is arguably even more reliable
- than the current heuristic. */
+ the variable itself, directly or within a VALUE. */
static int
dataflow_set_preserve_mem_locs (void **slot, void *data)
{
for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
{
- /* We want to remove a MEM that doesn't refer to DECL. */
+ /* We want to remove dying MEMs that doesn't refer to
+ DECL. */
if (GET_CODE (loc->loc) == MEM
&& (MEM_EXPR (loc->loc) != decl
- || MEM_OFFSET (loc->loc)))
+ || MEM_OFFSET (loc->loc))
+ && !mem_dies_at_call (loc->loc))
break;
- /* We want to move here a MEM that does refer to DECL. */
+ /* We want to move here MEMs that do refer to DECL. */
else if (GET_CODE (loc->loc) == VALUE
&& find_mem_expr_in_1pdv (decl, loc->loc,
shared_hash_htab (set->vars)))
- break;
+ break;
}
if (!loc)
if (GET_CODE (loc->loc) != MEM
|| (MEM_EXPR (loc->loc) == decl
- && MEM_OFFSET (loc->loc) == 0))
+ && MEM_OFFSET (loc->loc) == 0)
+ || !mem_dies_at_call (loc->loc))
{
if (old_loc != loc->loc && emit_notes)
{
if (var->refcount > 1 || shared_hash_shared (set->vars))
{
for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
- if (GET_CODE (loc->loc) == MEM)
+ if (GET_CODE (loc->loc) == MEM
+ && mem_dies_at_call (loc->loc))
break;
if (!loc)
for (locp = &var->var_part[0].loc_chain, loc = *locp;
loc; loc = *locp)
{
- if (GET_CODE (loc->loc) != MEM)
+ if (GET_CODE (loc->loc) != MEM
+ || !mem_dies_at_call (loc->loc))
{
locp = &loc->next;
continue;
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "dataflow difference found: removal of:\n");
- dump_variable (var1);
+ dump_var (var1);
}
/* Stop traversing the hash table. */
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "dataflow difference found: old and new follow:\n");
- dump_variable (var1);
- dump_variable (var2);
+ dump_var (var1);
+ dump_var (var2);
}
/* Stop traversing the hash table. */
MO_CLOBBER if no micro operation is to be generated. */
static enum micro_operation_type
-use_type (rtx *loc, struct count_use_info *cui, enum machine_mode *modep)
+use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
{
tree expr;
- cselib_val *val;
if (cui && cui->sets)
{
- if (GET_CODE (*loc) == VAR_LOCATION)
+ if (GET_CODE (loc) == VAR_LOCATION)
{
- if (track_expr_p (PAT_VAR_LOCATION_DECL (*loc), false))
+ if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
{
- rtx ploc = PAT_VAR_LOCATION_LOC (*loc);
- cselib_val *val = cselib_lookup (ploc, GET_MODE (*loc), 1);
+ rtx ploc = PAT_VAR_LOCATION_LOC (loc);
+ cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1);
/* ??? flag_float_store and volatile mems are never
given values, but we could in theory use them for
return MO_CLOBBER;
}
- if ((REG_P (*loc) || MEM_P (*loc))
- && (val = find_use_val (*loc, GET_MODE (*loc), cui)))
+ if (REG_P (loc) || MEM_P (loc))
{
if (modep)
- *modep = GET_MODE (*loc);
+ *modep = GET_MODE (loc);
if (cui->store_p)
{
- if (REG_P (*loc)
- || cselib_lookup (XEXP (*loc, 0), GET_MODE (*loc), 0))
+ if (REG_P (loc)
+ || (find_use_val (loc, GET_MODE (loc), cui)
+ && cselib_lookup (XEXP (loc, 0), GET_MODE (loc), 0)))
return MO_VAL_SET;
}
- else if (!cselib_preserved_value_p (val))
- return MO_VAL_USE;
+ else
+ {
+ cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
+
+ if (val && !cselib_preserved_value_p (val))
+ return MO_VAL_USE;
+ }
}
}
- if (REG_P (*loc))
+ if (REG_P (loc))
{
- gcc_assert (REGNO (*loc) < FIRST_PSEUDO_REGISTER);
+ gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
- expr = REG_EXPR (*loc);
+ expr = REG_EXPR (loc);
if (!expr)
return MO_USE_NO_VAR;
else if (target_for_debug_bind (var_debug_decl (expr)))
return MO_CLOBBER;
- else if (track_loc_p (*loc, expr, REG_OFFSET (*loc),
+ else if (track_loc_p (loc, expr, REG_OFFSET (loc),
false, modep, NULL))
return MO_USE;
else
return MO_USE_NO_VAR;
}
- else if (MEM_P (*loc))
+ else if (MEM_P (loc))
{
- expr = MEM_EXPR (*loc);
+ expr = MEM_EXPR (loc);
if (!expr)
return MO_CLOBBER;
else if (target_for_debug_bind (var_debug_decl (expr)))
return MO_CLOBBER;
- else if (track_loc_p (*loc, expr, INT_MEM_OFFSET (*loc),
+ else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
false, modep, NULL))
return MO_USE;
else
INSN is instruction which the LOC is part of. */
static int
-count_uses (rtx *loc, void *cuip)
+count_uses (rtx *ploc, void *cuip)
{
+ rtx loc = *ploc;
struct count_use_info *cui = (struct count_use_info *) cuip;
enum micro_operation_type mopt = use_type (loc, cui, NULL);
if (mopt != MO_CLOBBER)
{
cselib_val *val;
- enum machine_mode mode = GET_MODE (*loc);
-
- VTI (cui->bb)->n_mos++;
-
- if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (*loc, cui->bb, cui->insn, mopt, dump_file);
+ enum machine_mode mode = GET_MODE (loc);
switch (mopt)
{
case MO_VAL_LOC:
- loc = &PAT_VAR_LOCATION_LOC (*loc);
- if (VAR_LOC_UNKNOWN_P (*loc))
+ loc = PAT_VAR_LOCATION_LOC (loc);
+ if (VAR_LOC_UNKNOWN_P (loc))
break;
/* Fall through. */
case MO_VAL_USE:
case MO_VAL_SET:
- if (MEM_P (*loc)
- && !REG_P (XEXP (*loc, 0)) && !MEM_P (XEXP (*loc, 0)))
+ if (MEM_P (loc)
+ && !REG_P (XEXP (loc, 0)) && !MEM_P (XEXP (loc, 0)))
{
enum machine_mode address_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (*loc));
- val = cselib_lookup (XEXP (*loc, 0), address_mode, false);
+ = targetm.addr_space.address_mode (MEM_ADDR_SPACE (loc));
+ val = cselib_lookup (XEXP (loc, 0), address_mode, 0);
if (val && !cselib_preserved_value_p (val))
{
VTI (cui->bb)->n_mos++;
cselib_preserve_value (val);
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ log_op_type (XEXP (loc, 0), cui->bb, cui->insn,
+ MO_VAL_USE, dump_file);
}
}
- val = find_use_val (*loc, mode, cui);
+ val = find_use_val (loc, mode, cui);
if (val)
- cselib_preserve_value (val);
+ {
+ if (mopt == MO_VAL_SET
+ && GET_CODE (PATTERN (cui->insn)) == COND_EXEC
+ && (REG_P (loc)
+ || (MEM_P (loc)
+ && (use_type (loc, NULL, NULL) == MO_USE
+ || cui->sets))))
+ {
+ cselib_val *oval = cselib_lookup (loc, GET_MODE (loc), 0);
+
+ gcc_assert (oval != val);
+ gcc_assert (REG_P (loc) || MEM_P (loc));
+
+ if (!cselib_preserved_value_p (oval))
+ {
+ VTI (cui->bb)->n_mos++;
+ cselib_preserve_value (oval);
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ log_op_type (loc, cui->bb, cui->insn,
+ MO_VAL_USE, dump_file);
+ }
+ }
+
+ cselib_preserve_value (val);
+ }
else
- gcc_assert (mopt == MO_VAL_LOC);
+ gcc_assert (mopt == MO_VAL_LOC
+ || (mopt == MO_VAL_SET && cui->store_p));
break;
default:
break;
}
+
+ VTI (cui->bb)->n_mos++;
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ log_op_type (loc, cui->bb, cui->insn, mopt, dump_file);
}
return 0;
to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
static int
-add_uses (rtx *loc, void *data)
+add_uses (rtx *ploc, void *data)
{
+ rtx loc = *ploc;
enum machine_mode mode = VOIDmode;
struct count_use_info *cui = (struct count_use_info *)data;
enum micro_operation_type type = use_type (loc, cui, &mode);
micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
mo->type = type;
- mo->u.loc = type == MO_USE ? var_lowpart (mode, *loc) : *loc;
+ mo->u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
mo->insn = cui->insn;
if (type == MO_VAL_LOC)
{
- rtx oloc = *loc;
+ rtx oloc = loc;
rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
cselib_val *val;
oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
- type2 = use_type (&vloc, 0, &mode2);
+ type2 = use_type (vloc, 0, &mode2);
gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
|| type2 == MO_CLOBBER);
{
enum machine_mode mode2 = VOIDmode;
enum micro_operation_type type2;
- cselib_val *val = find_use_val (*loc, GET_MODE (*loc), cui);
- rtx vloc, oloc = *loc, nloc;
+ cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
+ rtx vloc, oloc = loc, nloc;
gcc_assert (cui->sets);
|| type2 == MO_CLOBBER);
if (type2 == MO_USE)
- vloc = var_lowpart (mode2, *loc);
+ vloc = var_lowpart (mode2, loc);
else
vloc = oloc;
*/
- nloc = replace_expr_with_values (*loc);
+ nloc = replace_expr_with_values (loc);
if (!nloc)
nloc = oloc;
basic_block bb = cui->bb;
micro_operation *mo;
rtx oloc = loc, nloc, src = NULL;
- enum micro_operation_type type = use_type (&loc, cui, &mode);
+ enum micro_operation_type type = use_type (loc, cui, &mode);
bool track_p = false;
cselib_val *v;
bool resolve, preserve;
mo = VTI (bb)->mos + VTI (bb)->n_mos++;
if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
- || !(track_p = use_type (&loc, NULL, &mode2) == MO_USE)
+ || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
|| GET_CODE (expr) == CLOBBER)
{
mo->type = MO_CLOBBER;
mo->insn = cui->insn;
}
else if (MEM_P (loc)
- && ((track_p = use_type (&loc, NULL, &mode2) == MO_USE)
+ && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
|| cui->sets))
{
mo = VTI (bb)->mos + VTI (bb)->n_mos++;
v = find_use_val (oloc, mode, cui);
+ if (!v)
+ goto log_and_return;
+
resolve = preserve = !cselib_preserved_value_p (v);
nloc = replace_expr_with_values (oloc);
if (nloc)
oloc = nloc;
- if (resolve && GET_CODE (mo->u.loc) == SET)
+ if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
+ {
+ cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0);
+
+ gcc_assert (oval != v);
+ gcc_assert (REG_P (oloc) || MEM_P (oloc));
+
+ if (!cselib_preserved_value_p (oval))
+ {
+ micro_operation *nmo = VTI (bb)->mos + VTI (bb)->n_mos++;
+
+ cselib_preserve_value (oval);
+
+ nmo->type = MO_VAL_USE;
+ nmo->u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
+ VAL_NEEDS_RESOLUTION (nmo->u.loc) = 1;
+ nmo->insn = mo->insn;
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ log_op_type (nmo->u.loc, cui->bb, cui->insn,
+ nmo->type, dump_file);
+ }
+
+ resolve = false;
+ }
+ else if (resolve && GET_CODE (mo->u.loc) == SET)
{
nloc = replace_expr_with_values (SET_SRC (expr));
if (VAL_NEEDS_RESOLUTION (loc))
val_resolve (out, val, vloc, insn);
+ else
+ val_store (out, val, uloc, insn, false);
if (VAL_HOLDS_TRACK_EXPR (loc))
{
else if (REG_P (uloc))
var_regno_delete (out, REGNO (uloc));
- val_store (out, val, vloc, insn);
+ val_store (out, val, vloc, insn, true);
}
break;
FOR_EACH_BB (bb)
gcc_assert (VTI (bb)->flooded);
+ VEC_free (rtx, heap, values_to_unmark);
free (bb_order);
fibheap_delete (worklist);
fibheap_delete (pending);
/* Print the information about variable *SLOT to dump file. */
static int
-dump_variable_slot (void **slot, void *data ATTRIBUTE_UNUSED)
+dump_var_slot (void **slot, void *data ATTRIBUTE_UNUSED)
{
variable var = (variable) *slot;
- dump_variable (var);
+ dump_var (var);
/* Continue traversing the hash table. */
return 1;
/* Print the information about variable VAR to dump file. */
static void
-dump_variable (variable var)
+dump_var (variable var)
{
int i;
location_chain node;
if (htab_elements (vars) > 0)
{
fprintf (dump_file, "Variables:\n");
- htab_traverse (vars, dump_variable_slot, NULL);
+ htab_traverse (vars, dump_var_slot, NULL);
}
}
if (VAL_NEEDS_RESOLUTION (loc))
val_resolve (set, val, vloc, insn);
+ else
+ val_store (set, val, uloc, insn, false);
if (VAL_HOLDS_TRACK_EXPR (loc))
{
else if (REG_P (uloc))
var_regno_delete (set, REGNO (uloc));
- val_store (set, val, vloc, insn);
+ val_store (set, val, vloc, insn, true);
emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
set->vars);
if (MAY_HAVE_DEBUG_INSNS)
{
cselib_preserve_only_values (true);
- cselib_reset_table_with_next_value (cselib_get_next_unknown_value ());
+ cselib_reset_table (cselib_get_next_uid ());
}
}
rtx insn;
HOST_WIDE_INT pre, post = 0;
int count;
- unsigned int next_value_before = cselib_get_next_unknown_value ();
- unsigned int next_value_after = next_value_before;
+ unsigned int next_uid_before = cselib_get_next_uid ();
+ unsigned int next_uid_after = next_uid_before;
if (MAY_HAVE_DEBUG_INSNS)
{
cselib_record_sets_hook = count_with_sets;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "first value: %i\n",
- cselib_get_next_unknown_value ());
+ cselib_get_next_uid ());
}
/* Count the number of micro operations. */
if (MAY_HAVE_DEBUG_INSNS)
{
cselib_preserve_only_values (false);
- next_value_after = cselib_get_next_unknown_value ();
- cselib_reset_table_with_next_value (next_value_before);
+ next_uid_after = cselib_get_next_uid ();
+ cselib_reset_table (next_uid_before);
cselib_record_sets_hook = add_with_sets;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "first value: %i\n",
- cselib_get_next_unknown_value ());
+ cselib_get_next_uid ());
}
/* Add the micro-operations to the array. */
if (MAY_HAVE_DEBUG_INSNS)
{
cselib_preserve_only_values (true);
- gcc_assert (next_value_after == cselib_get_next_unknown_value ());
- cselib_reset_table_with_next_value (next_value_after);
+ gcc_assert (next_uid_after == cselib_get_next_uid ());
+ cselib_reset_table (next_uid_after);
cselib_record_sets_hook = NULL;
}
}