return 1;
}
+/* Callback for for_each_rtx, that returns 1 upon encountering a VALUE
+ whose UID is greater than the int uid that D points to. */
+
+static int
+refs_newer_value_cb (rtx *x, void *d)
+{
+ if (GET_CODE (*x) == VALUE && CSELIB_VAL_PTR (*x)->uid > *(int *)d)
+ return 1;
+
+ return 0;
+}
+
+/* Return TRUE if EXPR refers to a VALUE whose uid is greater than
+ that of V. */
+
+static bool
+refs_newer_value_p (rtx expr, rtx v)
+{
+ int minuid = CSELIB_VAL_PTR (v)->uid;
+
+ return for_each_rtx (&expr, refs_newer_value_cb, &minuid);
+}
+
/* Convert the address X into something we can use. This is done by returning
it unchanged unless it is a value; in the latter case we call cselib to get
a more useful rtx. */
v = CSELIB_VAL_PTR (x);
if (v)
{
+ bool have_equivs = cselib_have_permanent_equivalences ();
+ if (have_equivs)
+ v = canonical_cselib_val (v);
for (l = v->locs; l; l = l->next)
if (CONSTANT_P (l->loc))
return l->loc;
for (l = v->locs; l; l = l->next)
- if (!REG_P (l->loc) && !MEM_P (l->loc))
+ if (!REG_P (l->loc) && !MEM_P (l->loc)
+ /* Avoid infinite recursion when potentially dealing with
+ var-tracking artificial equivalences, by skipping the
+ equivalences themselves, and not choosing expressions
+ that refer to newer VALUEs. */
+ && (!have_equivs
+ || (GET_CODE (l->loc) != VALUE
+ && !refs_newer_value_p (l->loc, x))))
return l->loc;
+ if (have_equivs)
+ {
+ for (l = v->locs; l; l = l->next)
+ if (REG_P (l->loc)
+ || (GET_CODE (l->loc) != VALUE
+ && !refs_newer_value_p (l->loc, x)))
+ return l->loc;
+ /* Return the canonical value. */
+ return v->val_rtx;
+ }
if (v->locs)
return v->locs->loc;
}
{
struct elt_loc_list *l = NULL;
if (CSELIB_VAL_PTR (x))
- for (l = CSELIB_VAL_PTR (x)->locs; l; l = l->next)
+ for (l = canonical_cselib_val (CSELIB_VAL_PTR (x))->locs;
+ l; l = l->next)
if (REG_P (l->loc) && rtx_equal_for_memref_p (l->loc, y))
break;
if (l)
{
struct elt_loc_list *l = NULL;
if (CSELIB_VAL_PTR (y))
- for (l = CSELIB_VAL_PTR (y)->locs; l; l = l->next)
+ for (l = canonical_cselib_val (CSELIB_VAL_PTR (y))->locs;
+ l; l = l->next)
if (REG_P (l->loc) && rtx_equal_for_memref_p (l->loc, x))
break;
if (l)