/* Variable tracking routines for the GNU compiler.
- Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011
+ Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011, 2012
Free Software Foundation, Inc.
This file is part of GCC.
}
}
+/* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
+ bound to it. */
+
+static inline void
+val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
+{
+ if (REG_P (loc))
+ {
+ if (modified)
+ var_regno_delete (set, REGNO (loc));
+ var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
+ dv_from_value (val), 0, NULL_RTX, INSERT);
+ }
+ else if (MEM_P (loc))
+ {
+ struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
+
+ if (l && GET_CODE (l->loc) == VALUE)
+ l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
+
+ /* If this MEM is a global constant, we don't need it in the
+ dynamic tables. ??? We should test this before emitting the
+ micro-op in the first place. */
+ while (l)
+ if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
+ break;
+ else
+ l = l->next;
+
+ if (!l)
+ var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
+ dv_from_value (val), 0, NULL_RTX, INSERT);
+ }
+ else
+ {
+ /* Other kinds of equivalences are necessarily static, at least
+ so long as we do not perform substitutions while merging
+ expressions. */
+ gcc_unreachable ();
+ set_variable_part (set, loc, dv_from_value (val), 0,
+ VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
+ }
+}
+
/* Bind a value to a location it was just stored in. If MODIFIED
holds, assume the location was modified, detaching it from any
values bound to it. */
gcc_checking_assert (!unsuitable_loc (loc));
- if (REG_P (loc))
- {
- if (modified)
- var_regno_delete (set, REGNO (loc));
- var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
- dv_from_value (val), 0, NULL_RTX, INSERT);
- }
- else if (MEM_P (loc))
- var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
- dv_from_value (val), 0, NULL_RTX, INSERT);
- else
- /* ??? Ideally we wouldn't get these, and use them from the static
- cselib loc list. */
- set_variable_part (set, loc, dv_from_value (val), 0,
- VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
+ val_bind (set, val, loc, modified);
}
/* Reset this node, detaching all its equivalences. Return the slot
/* If we didn't find any equivalence, we need to remember that
this value is held in the named register. */
- if (!found)
- var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
- dv_from_value (val), 0, NULL_RTX, INSERT);
+ if (found)
+ return;
}
- else if (MEM_P (loc))
- /* ??? Merge equivalent MEMs. */
- var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
- dv_from_value (val), 0, NULL_RTX, INSERT);
- else
- /* ??? Ideally we wouldn't get these, and use them from the static
- cselib loc list. */
- /* ??? Merge equivalent expressions. */
- set_variable_part (set, loc, dv_from_value (val), 0,
- VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
+ /* ??? Attempt to find and merge equivalent MEMs or other
+ expressions too. */
+
+ val_bind (set, val, loc, false);
}
/* Initialize dataflow set SET to be empty.
if (GET_CODE (y) != ENTRY_VALUE)
return 1;
gcc_assert (GET_MODE (x) == GET_MODE (y));
- return loc_cmp (XEXP (x, 0), XEXP (y, 0));
+ return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
}
if (GET_CODE (y) == ENTRY_VALUE)
MO_CLOBBER as well. */
#define VAL_EXPR_IS_CLOBBERED(x) \
(RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
-/* Whether the location is a CONCAT of the MO_VAL_SET expression and
- a reverse operation that should be handled afterwards. */
-#define VAL_EXPR_HAS_REVERSE(x) \
- (RTL_FLAG_CHECK1 ("VAL_EXPR_HAS_REVERSE", (x), CONCAT)->return_val)
/* All preserved VALUEs. */
static VEC (rtx, heap) *preserved_values;
GET_MODE (mloc));
if (val && !cselib_preserved_value_p (val))
- {
- micro_operation moa;
- preserve_value (val);
-
- if (GET_CODE (XEXP (mloc, 0)) != ENTRY_VALUE
- && (GET_CODE (XEXP (mloc, 0)) != PLUS
- || XEXP (XEXP (mloc, 0), 0) != cfa_base_rtx
- || !CONST_INT_P (XEXP (XEXP (mloc, 0), 1))))
- {
- mloc = cselib_subst_to_values (XEXP (mloc, 0),
- GET_MODE (mloc));
- moa.type = MO_VAL_USE;
- moa.insn = cui->insn;
- moa.u.loc = gen_rtx_CONCAT (address_mode,
- val->val_rtx, mloc);
- if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (moa.u.loc, cui->bb, cui->insn,
- moa.type, dump_file);
- VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
- &moa);
- }
- }
+ preserve_value (val);
}
if (CONSTANT_P (vloc)
{
enum machine_mode mode2;
enum micro_operation_type type2;
- rtx nloc = replace_expr_with_values (vloc);
+ rtx nloc = NULL;
+ bool resolvable = REG_P (vloc) || MEM_P (vloc);
+
+ if (resolvable)
+ nloc = replace_expr_with_values (vloc);
if (nloc)
{
if (type2 == MO_CLOBBER
&& !cselib_preserved_value_p (val))
{
- VAL_NEEDS_RESOLUTION (oloc) = 1;
+ VAL_NEEDS_RESOLUTION (oloc) = resolvable;
preserve_value (val);
}
}
GET_MODE (mloc));
if (val && !cselib_preserved_value_p (val))
- {
- micro_operation moa;
- preserve_value (val);
-
- if (GET_CODE (XEXP (mloc, 0)) != ENTRY_VALUE
- && (GET_CODE (XEXP (mloc, 0)) != PLUS
- || XEXP (XEXP (mloc, 0), 0) != cfa_base_rtx
- || !CONST_INT_P (XEXP (XEXP (mloc, 0), 1))))
- {
- mloc = cselib_subst_to_values (XEXP (mloc, 0),
- GET_MODE (mloc));
- moa.type = MO_VAL_USE;
- moa.insn = cui->insn;
- moa.u.loc = gen_rtx_CONCAT (address_mode,
- val->val_rtx, mloc);
- if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (moa.u.loc, cui->bb, cui->insn,
- moa.type, dump_file);
- VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
- &moa);
- }
- }
+ preserve_value (val);
}
type2 = use_type (loc, 0, &mode2);
*/
+ gcc_checking_assert (REG_P (loc) || MEM_P (loc));
nloc = replace_expr_with_values (loc);
if (!nloc)
nloc = oloc;
representable anyway. */
#define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
-/* Attempt to reverse the EXPR operation in the debug info. Say for
- reg1 = reg2 + 6 even when reg2 is no longer live we
- can express its value as VAL - 6. */
+/* Attempt to reverse the EXPR operation in the debug info and record
+ it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
+ no longer live we can express its value as VAL - 6. */
-static rtx
-reverse_op (rtx val, const_rtx expr)
+static void
+reverse_op (rtx val, const_rtx expr, rtx insn)
{
rtx src, arg, ret;
cselib_val *v;
+ struct elt_loc_list *l;
enum rtx_code code;
if (GET_CODE (expr) != SET)
- return NULL_RTX;
+ return;
if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
- return NULL_RTX;
+ return;
src = SET_SRC (expr);
switch (GET_CODE (src))
case NOT:
case NEG:
if (!REG_P (XEXP (src, 0)))
- return NULL_RTX;
+ return;
break;
case SIGN_EXTEND:
case ZERO_EXTEND:
if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
- return NULL_RTX;
+ return;
break;
default:
- return NULL_RTX;
+ return;
}
if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
- return NULL_RTX;
+ return;
v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
if (!v || !cselib_preserved_value_p (v))
- return NULL_RTX;
+ return;
+
+ /* Use canonical V to avoid creating multiple redundant expressions
+ for different VALUES equivalent to V. */
+ v = canonical_cselib_val (v);
+
+ /* Adding a reverse op isn't useful if V already has an always valid
+ location. Ignore ENTRY_VALUE, while it is always constant, we should
+ prefer non-ENTRY_VALUE locations whenever possible. */
+ for (l = v->locs; l; l = l->next)
+ if (CONSTANT_P (l->loc)
+ && (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
+ return;
switch (GET_CODE (src))
{
case NOT:
case NEG:
if (GET_MODE (v->val_rtx) != GET_MODE (val))
- return NULL_RTX;
+ return;
ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
break;
case SIGN_EXTEND:
goto binary;
binary:
if (GET_MODE (v->val_rtx) != GET_MODE (val))
- return NULL_RTX;
+ return;
arg = XEXP (src, 1);
if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
{
arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
if (arg == NULL_RTX)
- return NULL_RTX;
+ return;
if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
- return NULL_RTX;
+ return;
}
ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
if (ret == val)
gcc_unreachable ();
}
- return gen_rtx_CONCAT (GET_MODE (v->val_rtx), v->val_rtx, ret);
+ cselib_add_permanent_equiv (v, ret, insn);
}
/* Add stores (register and memory references) LOC which will be tracked
bool track_p = false;
cselib_val *v;
bool resolve, preserve;
- rtx reverse;
if (type == MO_CLOBBER)
return;
GET_MODE (mloc));
if (val && !cselib_preserved_value_p (val))
- {
- preserve_value (val);
-
- if (GET_CODE (XEXP (mloc, 0)) != ENTRY_VALUE
- && (GET_CODE (XEXP (mloc, 0)) != PLUS
- || XEXP (XEXP (mloc, 0), 0) != cfa_base_rtx
- || !CONST_INT_P (XEXP (XEXP (mloc, 0), 1))))
- {
- mloc = cselib_subst_to_values (XEXP (mloc, 0),
- GET_MODE (mloc));
- mo.type = MO_VAL_USE;
- mo.insn = cui->insn;
- mo.u.loc = gen_rtx_CONCAT (address_mode,
- val->val_rtx, mloc);
- if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (mo.u.loc, cui->bb, cui->insn,
- mo.type, dump_file);
- VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
- }
- }
+ preserve_value (val);
}
if (GET_CODE (expr) == CLOBBER || !track_p)
gcc_assert (oval != v);
gcc_assert (REG_P (oloc) || MEM_P (oloc));
- if (!cselib_preserved_value_p (oval))
+ if (oval && !cselib_preserved_value_p (oval))
{
micro_operation moa;
}
else if (resolve && GET_CODE (mo.u.loc) == SET)
{
- nloc = replace_expr_with_values (SET_SRC (expr));
+ if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
+ nloc = replace_expr_with_values (SET_SRC (expr));
+ else
+ nloc = NULL_RTX;
/* Avoid the mode mismatch between oexpr and expr. */
if (!nloc && mode != mode2)
gcc_assert (oloc == SET_DEST (expr));
}
- if (nloc)
+ if (nloc && nloc != SET_SRC (mo.u.loc))
oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc);
else
{
*/
if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
- {
- reverse = reverse_op (v->val_rtx, expr);
- if (reverse)
- {
- loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, reverse);
- VAL_EXPR_HAS_REVERSE (loc) = 1;
- }
- }
+ reverse_op (v->val_rtx, expr, cui->insn);
mo.u.loc = loc;
case MO_VAL_SET:
{
rtx loc = mo->u.loc;
- rtx val, vloc, uloc, reverse = NULL_RTX;
+ rtx val, vloc, uloc;
vloc = loc;
- if (VAL_EXPR_HAS_REVERSE (loc))
- {
- reverse = XEXP (loc, 1);
- vloc = XEXP (loc, 0);
- }
uloc = XEXP (vloc, 1);
val = XEXP (vloc, 0);
vloc = uloc;
var_regno_delete (out, REGNO (uloc));
val_store (out, val, vloc, insn, true);
-
- if (reverse)
- val_store (out, XEXP (reverse, 0), XEXP (reverse, 1),
- insn, false);
}
break;
/* We won't notify variables that are being expanded,
because their dependency list is cleared before
recursing. */
+ NO_LOC_P (value) = false;
VALUE_RECURSED_INTO (value) = false;
gcc_checking_assert (dv_changed_p (dv));
bool pending_recursion;
rtx loc_from = NULL;
struct elt_loc_list *cloc = NULL;
- int depth, saved_depth = elcd->depth;
+ int depth = 0, saved_depth = elcd->depth;
/* Clear all backlinks pointing at this, so that we're not notified
while we're active. */
VAR_LOC_FROM (var) = loc_from;
VAR_LOC_DEPTH (var) = depth;
+ gcc_checking_assert (!depth == !result);
+
elcd->depth = update_depth (saved_depth, depth);
/* Indicate whether any of the dependencies are pending recursion
gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
if (NO_LOC_P (x))
- return NULL;
+ {
+ gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
+ return NULL;
+ }
var = (variable) htab_find_with_hash (elcd->vars, dv, dv_htab_hash (dv));
/* Make sure that the call related notes come first. */
while (NEXT_INSN (insn)
&& NOTE_P (insn)
- && NOTE_DURING_CALL_P (insn))
+ && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
+ && NOTE_DURING_CALL_P (insn))
+ || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
insn = NEXT_INSN (insn);
- if (NOTE_P (insn) && NOTE_DURING_CALL_P (insn))
+ if (NOTE_P (insn)
+ && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
+ && NOTE_DURING_CALL_P (insn))
+ || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
else
note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
case MO_VAL_SET:
{
rtx loc = mo->u.loc;
- rtx val, vloc, uloc, reverse = NULL_RTX;
+ rtx val, vloc, uloc;
vloc = loc;
- if (VAL_EXPR_HAS_REVERSE (loc))
- {
- reverse = XEXP (loc, 1);
- vloc = XEXP (loc, 0);
- }
uloc = XEXP (vloc, 1);
val = XEXP (vloc, 0);
vloc = uloc;
val_store (set, val, vloc, insn, true);
- if (reverse)
- val_store (set, XEXP (reverse, 0), XEXP (reverse, 1),
- insn, false);
-
emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
set->vars);
}
return false;
}
-/* Mark the value for the ENTRY_VALUE of RTL as equivalent to EQVAL in
- OUT. */
+/* Record the value for the ENTRY_VALUE of RTL as a global equivalence
+ of VAL. */
static void
-create_entry_value (dataflow_set *out, rtx eqval, rtx rtl)
+record_entry_value (cselib_val *val, rtx rtl)
{
rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
- cselib_val *val;
ENTRY_VALUE_EXP (ev) = rtl;
- val = cselib_lookup_from_insn (ev, GET_MODE (ev), true,
- VOIDmode, get_insns ());
-
- if (val->val_rtx != eqval)
- {
- preserve_value (val);
- set_variable_part (out, val->val_rtx, dv_from_value (eqval), 0,
- VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
- set_variable_part (out, eqval, dv_from_value (val->val_rtx), 0,
- VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
- }
+ cselib_add_permanent_equiv (val, ev, get_insns ());
}
/* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
if (dv_is_value_p (dv))
{
- create_entry_value (out, dv_as_value (dv), incoming);
+ record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
&& INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
{
if (val)
{
preserve_value (val);
+ record_entry_value (val, mem);
set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
- create_entry_value (out, val->val_rtx, mem);
}
}
}
return true;
}
+/* This is *not* reset after each function. It gives each
+ NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
+ a unique label number. */
+
+static int debug_label_num = 1;
+
/* Get rid of all debug insns from the insn stream. */
static void
{
FOR_BB_INSNS_SAFE (bb, insn, next)
if (DEBUG_INSN_P (insn))
- delete_insn (insn);
+ {
+ tree decl = INSN_VAR_LOCATION_DECL (insn);
+ if (TREE_CODE (decl) == LABEL_DECL
+ && DECL_NAME (decl)
+ && !DECL_RTL_SET_P (decl))
+ {
+ PUT_CODE (insn, NOTE);
+ NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
+ NOTE_DELETED_LABEL_NAME (insn)
+ = IDENTIFIER_POINTER (DECL_NAME (decl));
+ SET_DECL_RTL (decl, insn);
+ CODE_LABEL_NUMBER (insn) = debug_label_num++;
+ }
+ else
+ delete_insn (insn);
+ }
}
}