+ dst->var_part[k].offset = dst->var_part[j].offset;
+ }
+ i--;
+ j--;
+ }
+ else if ((i >= 0 && j >= 0
+ && src->var_part[i].offset < dst->var_part[j].offset)
+ || i < 0)
+ {
+ dst->var_part[k] = dst->var_part[j];
+ j--;
+ }
+ else if ((i >= 0 && j >= 0
+ && src->var_part[i].offset > dst->var_part[j].offset)
+ || j < 0)
+ {
+ location_chain *nextp;
+
+ /* Copy the chain from SRC. */
+ nextp = &dst->var_part[k].loc_chain;
+ for (node = src->var_part[i].loc_chain; node; node = node->next)
+ {
+ location_chain new_lc;
+
+ new_lc = (location_chain) pool_alloc (loc_chain_pool);
+ new_lc->next = NULL;
+ new_lc->init = node->init;
+ if (!node->set_src || MEM_P (node->set_src))
+ new_lc->set_src = NULL;
+ else
+ new_lc->set_src = node->set_src;
+ new_lc->loc = node->loc;
+
+ *nextp = new_lc;
+ nextp = &new_lc->next;
+ }
+
+ dst->var_part[k].offset = src->var_part[i].offset;
+ i--;
+ }
+ dst->var_part[k].cur_loc = NULL;
+ }
+
+ if (flag_var_tracking_uninit)
+ for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
+ {
+ location_chain node, node2;
+ for (node = src->var_part[i].loc_chain; node; node = node->next)
+ for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
+ if (rtx_equal_p (node->loc, node2->loc))
+ {
+ if (node->init > node2->init)
+ node2->init = node->init;
+ }
+ }
+
+ /* Continue traversing the hash table. */
+ return 1;
+}
+
+/* Compute union of dataflow sets SRC and DST and store it to DST. */
+
+static void
+dataflow_set_union (dataflow_set *dst, dataflow_set *src)
+{
+ int i;
+
+ for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
+ attrs_list_union (&dst->regs[i], src->regs[i]);
+
+ if (dst->vars == empty_shared_hash)
+ {
+ shared_hash_destroy (dst->vars);
+ dst->vars = shared_hash_copy (src->vars);
+ }
+ else
+ htab_traverse (shared_hash_htab (src->vars), variable_union, dst);
+}
+
+/* Whether the value is currently being expanded. */
+#define VALUE_RECURSED_INTO(x) \
+ (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
+/* Whether the value is in changed_variables hash table. */
+#define VALUE_CHANGED(x) \
+ (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
+/* Whether the decl is in changed_variables hash table. */
+#define DECL_CHANGED(x) TREE_VISITED (x)
+
+/* Record that DV has been added into resp. removed from changed_variables
+ hashtable. */
+
+static inline void
+set_dv_changed (decl_or_value dv, bool newv)
+{
+ if (dv_is_value_p (dv))
+ VALUE_CHANGED (dv_as_value (dv)) = newv;
+ else
+ DECL_CHANGED (dv_as_decl (dv)) = newv;
+}
+
+/* Return true if DV is present in changed_variables hash table. */
+
+static inline bool
+dv_changed_p (decl_or_value dv)
+{
+ return (dv_is_value_p (dv)
+ ? VALUE_CHANGED (dv_as_value (dv))
+ : DECL_CHANGED (dv_as_decl (dv)));
+}
+
+/* Return a location list node whose loc is rtx_equal to LOC, in the
+ location list of a one-part variable or value VAR, or in that of
+ any values recursively mentioned in the location lists. */
+
+static location_chain
+find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
+{
+ location_chain node;
+
+ if (!var)
+ return NULL;
+
+ gcc_assert (dv_onepart_p (var->dv));
+
+ if (!var->n_var_parts)
+ return NULL;
+
+ gcc_assert (var->var_part[0].offset == 0);
+
+ for (node = var->var_part[0].loc_chain; node; node = node->next)
+ if (rtx_equal_p (loc, node->loc))
+ return node;
+ else if (GET_CODE (node->loc) == VALUE
+ && !VALUE_RECURSED_INTO (node->loc))
+ {
+ decl_or_value dv = dv_from_value (node->loc);
+ variable var = (variable)
+ htab_find_with_hash (vars, dv, dv_htab_hash (dv));
+
+ if (var)
+ {
+ location_chain where;
+ VALUE_RECURSED_INTO (node->loc) = true;
+ if ((where = find_loc_in_1pdv (loc, var, vars)))
+ {
+ VALUE_RECURSED_INTO (node->loc) = false;
+ return where;
+ }
+ VALUE_RECURSED_INTO (node->loc) = false;
+ }
+ }
+
+ return NULL;
+}
+
+/* Hash table iteration argument passed to variable_merge. */
+struct dfset_merge
+{
+ /* The set in which the merge is to be inserted. */
+ dataflow_set *dst;
+ /* The set that we're iterating in. */
+ dataflow_set *cur;
+ /* The set that may contain the other dv we are to merge with. */
+ dataflow_set *src;
+ /* Number of onepart dvs in src. */
+ int src_onepart_cnt;
+};
+
+/* Insert LOC in *DNODE, if it's not there yet. The list must be in
+ loc_cmp order, and it is maintained as such. */
+
+static void
+insert_into_intersection (location_chain *nodep, rtx loc,
+ enum var_init_status status)
+{
+ location_chain node;
+ int r;
+
+ for (node = *nodep; node; nodep = &node->next, node = *nodep)
+ if ((r = loc_cmp (node->loc, loc)) == 0)
+ {
+ node->init = MIN (node->init, status);
+ return;
+ }
+ else if (r > 0)
+ break;
+
+ node = (location_chain) pool_alloc (loc_chain_pool);
+
+ node->loc = loc;
+ node->set_src = NULL;
+ node->init = status;
+ node->next = *nodep;
+ *nodep = node;
+}
+
+/* Insert in DEST the intersection the locations present in both
+ S1NODE and S2VAR, directly or indirectly. S1NODE is from a
+ variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
+ DSM->dst. */
+
+static void
+intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
+ location_chain s1node, variable s2var)
+{
+ dataflow_set *s1set = dsm->cur;
+ dataflow_set *s2set = dsm->src;
+ location_chain found;
+
+ for (; s1node; s1node = s1node->next)
+ {
+ if (s1node->loc == val)
+ continue;
+
+ if ((found = find_loc_in_1pdv (s1node->loc, s2var,
+ shared_hash_htab (s2set->vars))))
+ {
+ insert_into_intersection (dest, s1node->loc,
+ MIN (s1node->init, found->init));
+ continue;
+ }
+
+ if (GET_CODE (s1node->loc) == VALUE
+ && !VALUE_RECURSED_INTO (s1node->loc))
+ {
+ decl_or_value dv = dv_from_value (s1node->loc);
+ variable svar = shared_hash_find (s1set->vars, dv);
+ if (svar)
+ {
+ if (svar->n_var_parts == 1)
+ {
+ VALUE_RECURSED_INTO (s1node->loc) = true;
+ intersect_loc_chains (val, dest, dsm,
+ svar->var_part[0].loc_chain,
+ s2var);
+ VALUE_RECURSED_INTO (s1node->loc) = false;
+ }
+ }
+ }
+
+ /* ??? if the location is equivalent to any location in src,
+ searched recursively
+
+ add to dst the values needed to represent the equivalence
+
+ telling whether locations S is equivalent to another dv's
+ location list:
+
+ for each location D in the list
+
+ if S and D satisfy rtx_equal_p, then it is present
+
+ else if D is a value, recurse without cycles
+
+ else if S and D have the same CODE and MODE
+
+ for each operand oS and the corresponding oD
+
+ if oS and oD are not equivalent, then S an D are not equivalent
+
+ else if they are RTX vectors
+
+ if any vector oS element is not equivalent to its respective oD,
+ then S and D are not equivalent
+
+ */
+
+
+ }
+}
+
+/* Return -1 if X should be before Y in a location list for a 1-part
+ variable, 1 if Y should be before X, and 0 if they're equivalent
+ and should not appear in the list. */
+
+static int
+loc_cmp (rtx x, rtx y)
+{
+ int i, j, r;
+ RTX_CODE code = GET_CODE (x);
+ const char *fmt;
+
+ if (x == y)
+ return 0;
+
+ if (REG_P (x))
+ {
+ if (!REG_P (y))
+ return -1;
+ gcc_assert (GET_MODE (x) == GET_MODE (y));
+ if (REGNO (x) == REGNO (y))
+ return 0;
+ else if (REGNO (x) < REGNO (y))
+ return -1;
+ else
+ return 1;
+ }
+
+ if (REG_P (y))
+ return 1;
+
+ if (MEM_P (x))
+ {
+ if (!MEM_P (y))
+ return -1;
+ gcc_assert (GET_MODE (x) == GET_MODE (y));
+ return loc_cmp (XEXP (x, 0), XEXP (y, 0));
+ }
+
+ if (MEM_P (y))
+ return 1;
+
+ if (GET_CODE (x) == VALUE)
+ {
+ if (GET_CODE (y) != VALUE)
+ return -1;
+ /* Don't assert the modes are the same, that is true only
+ when not recursing. (subreg:QI (value:SI 1:1) 0)
+ and (subreg:QI (value:DI 2:2) 0) can be compared,
+ even when the modes are different. */
+ if (canon_value_cmp (x, y))
+ return -1;
+ else
+ return 1;
+ }
+
+ if (GET_CODE (y) == VALUE)
+ return 1;
+
+ if (GET_CODE (x) == GET_CODE (y))
+ /* Compare operands below. */;
+ else if (GET_CODE (x) < GET_CODE (y))
+ return -1;
+ else
+ return 1;
+
+ gcc_assert (GET_MODE (x) == GET_MODE (y));
+
+ if (GET_CODE (x) == DEBUG_EXPR)
+ {
+ if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
+ < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
+ return -1;
+#ifdef ENABLE_CHECKING
+ gcc_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
+ > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
+#endif
+ return 1;
+ }
+
+ fmt = GET_RTX_FORMAT (code);
+ for (i = 0; i < GET_RTX_LENGTH (code); i++)
+ switch (fmt[i])
+ {
+ case 'w':
+ if (XWINT (x, i) == XWINT (y, i))
+ break;
+ else if (XWINT (x, i) < XWINT (y, i))
+ return -1;
+ else
+ return 1;
+
+ case 'n':
+ case 'i':
+ if (XINT (x, i) == XINT (y, i))
+ break;
+ else if (XINT (x, i) < XINT (y, i))
+ return -1;
+ else
+ return 1;
+
+ case 'V':
+ case 'E':
+ /* Compare the vector length first. */
+ if (XVECLEN (x, i) == XVECLEN (y, i))
+ /* Compare the vectors elements. */;
+ else if (XVECLEN (x, i) < XVECLEN (y, i))
+ return -1;
+ else
+ return 1;
+
+ for (j = 0; j < XVECLEN (x, i); j++)
+ if ((r = loc_cmp (XVECEXP (x, i, j),
+ XVECEXP (y, i, j))))
+ return r;
+ break;
+
+ case 'e':
+ if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
+ return r;
+ break;
+
+ case 'S':
+ case 's':
+ if (XSTR (x, i) == XSTR (y, i))
+ break;
+ if (!XSTR (x, i))
+ return -1;
+ if (!XSTR (y, i))
+ return 1;
+ if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
+ break;
+ else if (r < 0)
+ return -1;
+ else
+ return 1;
+
+ case 'u':
+ /* These are just backpointers, so they don't matter. */
+ break;
+
+ case '0':
+ case 't':
+ break;
+
+ /* It is believed that rtx's at this level will never
+ contain anything but integers and other rtx's,
+ except for within LABEL_REFs and SYMBOL_REFs. */
+ default:
+ gcc_unreachable ();
+ }
+
+ return 0;
+}
+
+/* If decl or value DVP refers to VALUE from *LOC, add backlinks
+ from VALUE to DVP. */
+
+static int
+add_value_chain (rtx *loc, void *dvp)
+{
+ decl_or_value dv, ldv;
+ value_chain vc, nvc;
+ void **slot;
+
+ if (GET_CODE (*loc) == VALUE)
+ ldv = dv_from_value (*loc);
+ else if (GET_CODE (*loc) == DEBUG_EXPR)
+ ldv = dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc));
+ else
+ return 0;
+
+ if (dv_as_opaque (ldv) == dvp)
+ return 0;
+
+ dv = (decl_or_value) dvp;
+ slot = htab_find_slot_with_hash (value_chains, ldv, dv_htab_hash (ldv),
+ INSERT);
+ if (!*slot)
+ {
+ vc = (value_chain) pool_alloc (value_chain_pool);
+ vc->dv = ldv;
+ vc->next = NULL;
+ vc->refcount = 0;
+ *slot = (void *) vc;
+ }
+ else
+ {
+ for (vc = ((value_chain) *slot)->next; vc; vc = vc->next)
+ if (dv_as_opaque (vc->dv) == dv_as_opaque (dv))
+ break;
+ if (vc)
+ {
+ vc->refcount++;
+ return 0;
+ }
+ }
+ vc = (value_chain) *slot;
+ nvc = (value_chain) pool_alloc (value_chain_pool);
+ nvc->dv = dv;
+ nvc->next = vc->next;
+ nvc->refcount = 1;
+ vc->next = nvc;
+ return 0;
+}
+
+/* If decl or value DVP refers to VALUEs from within LOC, add backlinks
+ from those VALUEs to DVP. */
+
+static void
+add_value_chains (decl_or_value dv, rtx loc)
+{
+ if (GET_CODE (loc) == VALUE || GET_CODE (loc) == DEBUG_EXPR)
+ {
+ add_value_chain (&loc, dv_as_opaque (dv));
+ return;
+ }
+ if (REG_P (loc))
+ return;
+ if (MEM_P (loc))
+ loc = XEXP (loc, 0);
+ for_each_rtx (&loc, add_value_chain, dv_as_opaque (dv));
+}
+
+/* If CSELIB_VAL_PTR of value DV refer to VALUEs, add backlinks from those
+ VALUEs to DV. Add the same time get rid of ASM_OPERANDS from locs list,
+ that is something we never can express in .debug_info and can prevent
+ reverse ops from being used. */
+
+static void
+add_cselib_value_chains (decl_or_value dv)
+{
+ struct elt_loc_list **l;
+
+ for (l = &CSELIB_VAL_PTR (dv_as_value (dv))->locs; *l;)
+ if (GET_CODE ((*l)->loc) == ASM_OPERANDS)
+ *l = (*l)->next;
+ else
+ {
+ for_each_rtx (&(*l)->loc, add_value_chain, dv_as_opaque (dv));
+ l = &(*l)->next;
+ }
+}
+
+/* If decl or value DVP refers to VALUE from *LOC, remove backlinks
+ from VALUE to DVP. */
+
+static int
+remove_value_chain (rtx *loc, void *dvp)
+{
+ decl_or_value dv, ldv;
+ value_chain vc;
+ void **slot;
+
+ if (GET_CODE (*loc) == VALUE)
+ ldv = dv_from_value (*loc);
+ else if (GET_CODE (*loc) == DEBUG_EXPR)
+ ldv = dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc));
+ else
+ return 0;
+
+ if (dv_as_opaque (ldv) == dvp)
+ return 0;
+
+ dv = (decl_or_value) dvp;
+ slot = htab_find_slot_with_hash (value_chains, ldv, dv_htab_hash (ldv),
+ NO_INSERT);
+ for (vc = (value_chain) *slot; vc->next; vc = vc->next)
+ if (dv_as_opaque (vc->next->dv) == dv_as_opaque (dv))
+ {
+ value_chain dvc = vc->next;
+ gcc_assert (dvc->refcount > 0);
+ if (--dvc->refcount == 0)
+ {
+ vc->next = dvc->next;
+ pool_free (value_chain_pool, dvc);
+ if (vc->next == NULL && vc == (value_chain) *slot)
+ {
+ pool_free (value_chain_pool, vc);
+ htab_clear_slot (value_chains, slot);
+ }
+ }
+ return 0;
+ }
+ gcc_unreachable ();
+}
+
+/* If decl or value DVP refers to VALUEs from within LOC, remove backlinks
+ from those VALUEs to DVP. */
+
+static void
+remove_value_chains (decl_or_value dv, rtx loc)
+{
+ if (GET_CODE (loc) == VALUE || GET_CODE (loc) == DEBUG_EXPR)
+ {
+ remove_value_chain (&loc, dv_as_opaque (dv));
+ return;
+ }
+ if (REG_P (loc))
+ return;
+ if (MEM_P (loc))
+ loc = XEXP (loc, 0);
+ for_each_rtx (&loc, remove_value_chain, dv_as_opaque (dv));
+}
+
+#if ENABLE_CHECKING
+/* If CSELIB_VAL_PTR of value DV refer to VALUEs, remove backlinks from those
+ VALUEs to DV. */
+
+static void
+remove_cselib_value_chains (decl_or_value dv)
+{
+ struct elt_loc_list *l;
+
+ for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
+ for_each_rtx (&l->loc, remove_value_chain, dv_as_opaque (dv));
+}
+
+/* Check the order of entries in one-part variables. */
+
+static int
+canonicalize_loc_order_check (void **slot, void *data ATTRIBUTE_UNUSED)
+{
+ variable var = (variable) *slot;
+ decl_or_value dv = var->dv;
+ location_chain node, next;
+
+#ifdef ENABLE_RTL_CHECKING
+ int i;
+ for (i = 0; i < var->n_var_parts; i++)
+ gcc_assert (var->var_part[0].cur_loc == NULL);
+ gcc_assert (!var->cur_loc_changed && !var->in_changed_variables);
+#endif
+
+ if (!dv_onepart_p (dv))
+ return 1;
+
+ gcc_assert (var->n_var_parts == 1);
+ node = var->var_part[0].loc_chain;
+ gcc_assert (node);
+
+ while ((next = node->next))
+ {
+ gcc_assert (loc_cmp (node->loc, next->loc) < 0);
+ node = next;
+ }
+
+ return 1;
+}
+#endif
+
+/* Mark with VALUE_RECURSED_INTO values that have neighbors that are
+ more likely to be chosen as canonical for an equivalence set.
+ Ensure less likely values can reach more likely neighbors, making
+ the connections bidirectional. */
+
+static int
+canonicalize_values_mark (void **slot, void *data)
+{
+ dataflow_set *set = (dataflow_set *)data;
+ variable var = (variable) *slot;
+ decl_or_value dv = var->dv;
+ rtx val;
+ location_chain node;
+
+ if (!dv_is_value_p (dv))
+ return 1;
+
+ gcc_assert (var->n_var_parts == 1);
+
+ val = dv_as_value (dv);
+
+ for (node = var->var_part[0].loc_chain; node; node = node->next)
+ if (GET_CODE (node->loc) == VALUE)
+ {
+ if (canon_value_cmp (node->loc, val))
+ VALUE_RECURSED_INTO (val) = true;
+ else
+ {
+ decl_or_value odv = dv_from_value (node->loc);
+ void **oslot = shared_hash_find_slot_noinsert (set->vars, odv);
+
+ oslot = set_slot_part (set, val, oslot, odv, 0,
+ node->init, NULL_RTX);
+
+ VALUE_RECURSED_INTO (node->loc) = true;
+ }
+ }
+
+ return 1;
+}
+
+/* Remove redundant entries from equivalence lists in onepart
+ variables, canonicalizing equivalence sets into star shapes. */
+
+static int
+canonicalize_values_star (void **slot, void *data)
+{
+ dataflow_set *set = (dataflow_set *)data;
+ variable var = (variable) *slot;
+ decl_or_value dv = var->dv;
+ location_chain node;
+ decl_or_value cdv;
+ rtx val, cval;
+ void **cslot;
+ bool has_value;
+ bool has_marks;
+
+ if (!dv_onepart_p (dv))
+ return 1;
+
+ gcc_assert (var->n_var_parts == 1);
+
+ if (dv_is_value_p (dv))
+ {
+ cval = dv_as_value (dv);
+ if (!VALUE_RECURSED_INTO (cval))
+ return 1;
+ VALUE_RECURSED_INTO (cval) = false;
+ }
+ else
+ cval = NULL_RTX;
+
+ restart:
+ val = cval;
+ has_value = false;
+ has_marks = false;
+
+ gcc_assert (var->n_var_parts == 1);
+
+ for (node = var->var_part[0].loc_chain; node; node = node->next)
+ if (GET_CODE (node->loc) == VALUE)
+ {
+ has_value = true;
+ if (VALUE_RECURSED_INTO (node->loc))
+ has_marks = true;
+ if (canon_value_cmp (node->loc, cval))
+ cval = node->loc;
+ }
+
+ if (!has_value)
+ return 1;
+
+ if (cval == val)
+ {
+ if (!has_marks || dv_is_decl_p (dv))
+ return 1;
+
+ /* Keep it marked so that we revisit it, either after visiting a
+ child node, or after visiting a new parent that might be
+ found out. */
+ VALUE_RECURSED_INTO (val) = true;
+
+ for (node = var->var_part[0].loc_chain; node; node = node->next)
+ if (GET_CODE (node->loc) == VALUE
+ && VALUE_RECURSED_INTO (node->loc))
+ {
+ cval = node->loc;
+ restart_with_cval:
+ VALUE_RECURSED_INTO (cval) = false;
+ dv = dv_from_value (cval);
+ slot = shared_hash_find_slot_noinsert (set->vars, dv);
+ if (!slot)
+ {
+ gcc_assert (dv_is_decl_p (var->dv));
+ /* The canonical value was reset and dropped.
+ Remove it. */
+ clobber_variable_part (set, NULL, var->dv, 0, NULL);
+ return 1;
+ }
+ var = (variable)*slot;
+ gcc_assert (dv_is_value_p (var->dv));
+ if (var->n_var_parts == 0)
+ return 1;
+ gcc_assert (var->n_var_parts == 1);
+ goto restart;
+ }
+
+ VALUE_RECURSED_INTO (val) = false;
+
+ return 1;
+ }
+
+ /* Push values to the canonical one. */
+ cdv = dv_from_value (cval);
+ cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
+
+ for (node = var->var_part[0].loc_chain; node; node = node->next)
+ if (node->loc != cval)
+ {
+ cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
+ node->init, NULL_RTX);
+ if (GET_CODE (node->loc) == VALUE)
+ {
+ decl_or_value ndv = dv_from_value (node->loc);
+
+ set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
+ NO_INSERT);
+
+ if (canon_value_cmp (node->loc, val))
+ {
+ /* If it could have been a local minimum, it's not any more,
+ since it's now neighbor to cval, so it may have to push
+ to it. Conversely, if it wouldn't have prevailed over
+ val, then whatever mark it has is fine: if it was to
+ push, it will now push to a more canonical node, but if
+ it wasn't, then it has already pushed any values it might
+ have to. */
+ VALUE_RECURSED_INTO (node->loc) = true;
+ /* Make sure we visit node->loc by ensuring we cval is
+ visited too. */
+ VALUE_RECURSED_INTO (cval) = true;
+ }
+ else if (!VALUE_RECURSED_INTO (node->loc))
+ /* If we have no need to "recurse" into this node, it's
+ already "canonicalized", so drop the link to the old
+ parent. */
+ clobber_variable_part (set, cval, ndv, 0, NULL);
+ }
+ else if (GET_CODE (node->loc) == REG)
+ {
+ attrs list = set->regs[REGNO (node->loc)], *listp;
+
+ /* Change an existing attribute referring to dv so that it
+ refers to cdv, removing any duplicate this might
+ introduce, and checking that no previous duplicates
+ existed, all in a single pass. */
+
+ while (list)
+ {
+ if (list->offset == 0
+ && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
+ || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
+ break;
+
+ list = list->next;
+ }
+
+ gcc_assert (list);
+ if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
+ {
+ list->dv = cdv;
+ for (listp = &list->next; (list = *listp); listp = &list->next)
+ {
+ if (list->offset)
+ continue;
+
+ if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
+ {
+ *listp = list->next;
+ pool_free (attrs_pool, list);
+ list = *listp;
+ break;
+ }
+
+ gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
+ }
+ }
+ else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
+ {
+ for (listp = &list->next; (list = *listp); listp = &list->next)
+ {
+ if (list->offset)
+ continue;
+
+ if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
+ {
+ *listp = list->next;
+ pool_free (attrs_pool, list);
+ list = *listp;
+ break;
+ }
+
+ gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
+ }
+ }
+ else
+ gcc_unreachable ();
+
+#if ENABLE_CHECKING
+ while (list)
+ {
+ if (list->offset == 0
+ && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
+ || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
+ gcc_unreachable ();
+
+ list = list->next;
+ }
+#endif
+ }
+ }
+
+ if (val)
+ cslot = set_slot_part (set, val, cslot, cdv, 0,
+ VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
+
+ slot = clobber_slot_part (set, cval, slot, 0, NULL);
+
+ /* Variable may have been unshared. */
+ var = (variable)*slot;
+ gcc_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
+ && var->var_part[0].loc_chain->next == NULL);
+
+ if (VALUE_RECURSED_INTO (cval))
+ goto restart_with_cval;
+
+ return 1;
+}
+
+/* Bind one-part variables to the canonical value in an equivalence
+ set. Not doing this causes dataflow convergence failure in rare
+ circumstances, see PR42873. Unfortunately we can't do this
+ efficiently as part of canonicalize_values_star, since we may not
+ have determined or even seen the canonical value of a set when we
+ get to a variable that references another member of the set. */
+
+static int
+canonicalize_vars_star (void **slot, void *data)
+{
+ dataflow_set *set = (dataflow_set *)data;
+ variable var = (variable) *slot;
+ decl_or_value dv = var->dv;
+ location_chain node;
+ rtx cval;
+ decl_or_value cdv;
+ void **cslot;
+ variable cvar;
+ location_chain cnode;
+
+ if (!dv_onepart_p (dv) || dv_is_value_p (dv))
+ return 1;
+
+ gcc_assert (var->n_var_parts == 1);
+
+ node = var->var_part[0].loc_chain;
+
+ if (GET_CODE (node->loc) != VALUE)
+ return 1;
+
+ gcc_assert (!node->next);
+ cval = node->loc;
+
+ /* Push values to the canonical one. */
+ cdv = dv_from_value (cval);
+ cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
+ if (!cslot)
+ return 1;
+ cvar = (variable)*cslot;
+ gcc_assert (cvar->n_var_parts == 1);
+
+ cnode = cvar->var_part[0].loc_chain;
+
+ /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
+ that are not “more canonical” than it. */
+ if (GET_CODE (cnode->loc) != VALUE
+ || !canon_value_cmp (cnode->loc, cval))
+ return 1;
+
+ /* CVAL was found to be non-canonical. Change the variable to point
+ to the canonical VALUE. */
+ gcc_assert (!cnode->next);
+ cval = cnode->loc;
+
+ slot = set_slot_part (set, cval, slot, dv, 0,
+ node->init, node->set_src);
+ slot = clobber_slot_part (set, cval, slot, 0, node->set_src);
+
+ return 1;
+}
+
+/* Combine variable or value in *S1SLOT (in DSM->cur) with the
+ corresponding entry in DSM->src. Multi-part variables are combined
+ with variable_union, whereas onepart dvs are combined with
+ intersection. */
+
+static int
+variable_merge_over_cur (void **s1slot, void *data)
+{
+ struct dfset_merge *dsm = (struct dfset_merge *)data;
+ dataflow_set *dst = dsm->dst;
+ void **dstslot;
+ variable s1var = (variable) *s1slot;
+ variable s2var, dvar = NULL;
+ decl_or_value dv = s1var->dv;
+ bool onepart = dv_onepart_p (dv);
+ rtx val;
+ hashval_t dvhash;
+ location_chain node, *nodep;
+
+ /* If the incoming onepart variable has an empty location list, then
+ the intersection will be just as empty. For other variables,
+ it's always union. */
+ gcc_assert (s1var->n_var_parts);
+ gcc_assert (s1var->var_part[0].loc_chain);
+
+ if (!onepart)
+ return variable_union (s1slot, dst);
+
+ gcc_assert (s1var->n_var_parts == 1);
+ gcc_assert (s1var->var_part[0].offset == 0);
+
+ dvhash = dv_htab_hash (dv);
+ if (dv_is_value_p (dv))
+ val = dv_as_value (dv);
+ else
+ val = NULL;
+
+ s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
+ if (!s2var)
+ {
+ dst_can_be_shared = false;
+ return 1;
+ }
+
+ dsm->src_onepart_cnt--;
+ gcc_assert (s2var->var_part[0].loc_chain);
+ gcc_assert (s2var->n_var_parts == 1);
+ gcc_assert (s2var->var_part[0].offset == 0);
+
+ dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
+ if (dstslot)
+ {
+ dvar = (variable)*dstslot;
+ gcc_assert (dvar->refcount == 1);
+ gcc_assert (dvar->n_var_parts == 1);
+ gcc_assert (dvar->var_part[0].offset == 0);
+ nodep = &dvar->var_part[0].loc_chain;
+ }
+ else
+ {
+ nodep = &node;
+ node = NULL;
+ }
+
+ if (!dstslot && !onepart_variable_different_p (s1var, s2var))
+ {
+ dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
+ dvhash, INSERT);
+ *dstslot = dvar = s2var;
+ dvar->refcount++;
+ }
+ else
+ {
+ dst_can_be_shared = false;
+
+ intersect_loc_chains (val, nodep, dsm,
+ s1var->var_part[0].loc_chain, s2var);
+
+ if (!dstslot)
+ {
+ if (node)
+ {
+ dvar = (variable) pool_alloc (dv_pool (dv));
+ dvar->dv = dv;
+ dvar->refcount = 1;
+ dvar->n_var_parts = 1;
+ dvar->cur_loc_changed = false;
+ dvar->in_changed_variables = false;
+ dvar->var_part[0].offset = 0;
+ dvar->var_part[0].loc_chain = node;
+ dvar->var_part[0].cur_loc = NULL;
+
+ dstslot
+ = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
+ INSERT);
+ gcc_assert (!*dstslot);
+ *dstslot = dvar;
+ }
+ else
+ return 1;
+ }
+ }
+
+ nodep = &dvar->var_part[0].loc_chain;
+ while ((node = *nodep))
+ {
+ location_chain *nextp = &node->next;
+
+ if (GET_CODE (node->loc) == REG)
+ {
+ attrs list;
+
+ for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
+ if (GET_MODE (node->loc) == GET_MODE (list->loc)
+ && dv_is_value_p (list->dv))
+ break;
+
+ if (!list)
+ attrs_list_insert (&dst->regs[REGNO (node->loc)],
+ dv, 0, node->loc);
+ /* If this value became canonical for another value that had
+ this register, we want to leave it alone. */
+ else if (dv_as_value (list->dv) != val)
+ {
+ dstslot = set_slot_part (dst, dv_as_value (list->dv),
+ dstslot, dv, 0,
+ node->init, NULL_RTX);
+ dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
+
+ /* Since nextp points into the removed node, we can't
+ use it. The pointer to the next node moved to nodep.
+ However, if the variable we're walking is unshared
+ during our walk, we'll keep walking the location list
+ of the previously-shared variable, in which case the
+ node won't have been removed, and we'll want to skip
+ it. That's why we test *nodep here. */
+ if (*nodep != node)
+ nextp = nodep;
+ }
+ }
+ else
+ /* Canonicalization puts registers first, so we don't have to
+ walk it all. */
+ break;
+ nodep = nextp;
+ }
+
+ if (dvar != (variable)*dstslot)
+ dvar = (variable)*dstslot;
+ nodep = &dvar->var_part[0].loc_chain;
+
+ if (val)
+ {
+ /* Mark all referenced nodes for canonicalization, and make sure
+ we have mutual equivalence links. */
+ VALUE_RECURSED_INTO (val) = true;
+ for (node = *nodep; node; node = node->next)
+ if (GET_CODE (node->loc) == VALUE)
+ {
+ VALUE_RECURSED_INTO (node->loc) = true;
+ set_variable_part (dst, val, dv_from_value (node->loc), 0,
+ node->init, NULL, INSERT);
+ }
+
+ dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
+ gcc_assert (*dstslot == dvar);
+ canonicalize_values_star (dstslot, dst);
+#ifdef ENABLE_CHECKING
+ gcc_assert (dstslot
+ == shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash));
+#endif
+ dvar = (variable)*dstslot;
+ }
+ else
+ {
+ bool has_value = false, has_other = false;
+
+ /* If we have one value and anything else, we're going to
+ canonicalize this, so make sure all values have an entry in
+ the table and are marked for canonicalization. */
+ for (node = *nodep; node; node = node->next)
+ {
+ if (GET_CODE (node->loc) == VALUE)
+ {
+ /* If this was marked during register canonicalization,
+ we know we have to canonicalize values. */
+ if (has_value)
+ has_other = true;
+ has_value = true;
+ if (has_other)
+ break;
+ }
+ else
+ {
+ has_other = true;
+ if (has_value)
+ break;
+ }
+ }
+
+ if (has_value && has_other)
+ {
+ for (node = *nodep; node; node = node->next)
+ {
+ if (GET_CODE (node->loc) == VALUE)
+ {
+ decl_or_value dv = dv_from_value (node->loc);
+ void **slot = NULL;
+
+ if (shared_hash_shared (dst->vars))
+ slot = shared_hash_find_slot_noinsert (dst->vars, dv);
+ if (!slot)
+ slot = shared_hash_find_slot_unshare (&dst->vars, dv,
+ INSERT);
+ if (!*slot)
+ {
+ variable var = (variable) pool_alloc (dv_pool (dv));
+ var->dv = dv;
+ var->refcount = 1;
+ var->n_var_parts = 1;
+ var->cur_loc_changed = false;
+ var->in_changed_variables = false;
+ var->var_part[0].offset = 0;
+ var->var_part[0].loc_chain = NULL;
+ var->var_part[0].cur_loc = NULL;
+ *slot = var;
+ }
+
+ VALUE_RECURSED_INTO (node->loc) = true;
+ }
+ }
+
+ dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
+ gcc_assert (*dstslot == dvar);
+ canonicalize_values_star (dstslot, dst);
+#ifdef ENABLE_CHECKING
+ gcc_assert (dstslot
+ == shared_hash_find_slot_noinsert_1 (dst->vars,
+ dv, dvhash));
+#endif
+ dvar = (variable)*dstslot;
+ }
+ }
+
+ if (!onepart_variable_different_p (dvar, s2var))
+ {
+ variable_htab_free (dvar);
+ *dstslot = dvar = s2var;
+ dvar->refcount++;
+ }
+ else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
+ {
+ variable_htab_free (dvar);
+ *dstslot = dvar = s1var;
+ dvar->refcount++;
+ dst_can_be_shared = false;
+ }
+ else
+ dst_can_be_shared = false;
+
+ return 1;
+}
+
+/* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
+ multi-part variable. Unions of multi-part variables and
+ intersections of one-part ones will be handled in
+ variable_merge_over_cur(). */
+
+static int
+variable_merge_over_src (void **s2slot, void *data)
+{
+ struct dfset_merge *dsm = (struct dfset_merge *)data;
+ dataflow_set *dst = dsm->dst;
+ variable s2var = (variable) *s2slot;
+ decl_or_value dv = s2var->dv;
+ bool onepart = dv_onepart_p (dv);
+
+ if (!onepart)
+ {
+ void **dstp = shared_hash_find_slot (dst->vars, dv);
+ *dstp = s2var;
+ s2var->refcount++;
+ return 1;
+ }
+
+ dsm->src_onepart_cnt++;
+ return 1;
+}
+
+/* Combine dataflow set information from SRC2 into DST, using PDST
+ to carry over information across passes. */
+
+static void
+dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
+{
+ dataflow_set cur = *dst;
+ dataflow_set *src1 = &cur;
+ struct dfset_merge dsm;
+ int i;
+ size_t src1_elems, src2_elems;
+
+ src1_elems = htab_elements (shared_hash_htab (src1->vars));
+ src2_elems = htab_elements (shared_hash_htab (src2->vars));
+ dataflow_set_init (dst);
+ dst->stack_adjust = cur.stack_adjust;
+ shared_hash_destroy (dst->vars);
+ dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
+ dst->vars->refcount = 1;
+ dst->vars->htab
+ = htab_create (MAX (src1_elems, src2_elems), variable_htab_hash,
+ variable_htab_eq, variable_htab_free);
+
+ for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
+ attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
+
+ dsm.dst = dst;
+ dsm.src = src2;
+ dsm.cur = src1;
+ dsm.src_onepart_cnt = 0;
+
+ htab_traverse (shared_hash_htab (dsm.src->vars), variable_merge_over_src,
+ &dsm);
+ htab_traverse (shared_hash_htab (dsm.cur->vars), variable_merge_over_cur,
+ &dsm);
+
+ if (dsm.src_onepart_cnt)
+ dst_can_be_shared = false;
+
+ dataflow_set_destroy (src1);
+}
+
+/* Mark register equivalences. */
+
+static void
+dataflow_set_equiv_regs (dataflow_set *set)
+{
+ int i;
+ attrs list, *listp;
+
+ for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
+ {
+ rtx canon[NUM_MACHINE_MODES];
+
+ memset (canon, 0, sizeof (canon));
+
+ for (list = set->regs[i]; list; list = list->next)
+ if (list->offset == 0 && dv_is_value_p (list->dv))
+ {
+ rtx val = dv_as_value (list->dv);
+ rtx *cvalp = &canon[(int)GET_MODE (val)];
+ rtx cval = *cvalp;
+
+ if (canon_value_cmp (val, cval))
+ *cvalp = val;
+ }
+
+ for (list = set->regs[i]; list; list = list->next)
+ if (list->offset == 0 && dv_onepart_p (list->dv))
+ {
+ rtx cval = canon[(int)GET_MODE (list->loc)];
+
+ if (!cval)
+ continue;
+
+ if (dv_is_value_p (list->dv))
+ {
+ rtx val = dv_as_value (list->dv);
+
+ if (val == cval)
+ continue;
+
+ VALUE_RECURSED_INTO (val) = true;
+ set_variable_part (set, val, dv_from_value (cval), 0,
+ VAR_INIT_STATUS_INITIALIZED,
+ NULL, NO_INSERT);
+ }
+
+ VALUE_RECURSED_INTO (cval) = true;
+ set_variable_part (set, cval, list->dv, 0,
+ VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
+ }
+
+ for (listp = &set->regs[i]; (list = *listp);
+ listp = list ? &list->next : listp)
+ if (list->offset == 0 && dv_onepart_p (list->dv))
+ {
+ rtx cval = canon[(int)GET_MODE (list->loc)];
+ void **slot;
+
+ if (!cval)
+ continue;
+
+ if (dv_is_value_p (list->dv))
+ {
+ rtx val = dv_as_value (list->dv);
+ if (!VALUE_RECURSED_INTO (val))
+ continue;
+ }
+
+ slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
+ canonicalize_values_star (slot, set);
+ if (*listp != list)
+ list = NULL;
+ }
+ }
+}
+
+/* Remove any redundant values in the location list of VAR, which must
+ be unshared and 1-part. */
+
+static void
+remove_duplicate_values (variable var)
+{
+ location_chain node, *nodep;
+
+ gcc_assert (dv_onepart_p (var->dv));
+ gcc_assert (var->n_var_parts == 1);
+ gcc_assert (var->refcount == 1);
+
+ for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
+ {
+ if (GET_CODE (node->loc) == VALUE)
+ {
+ if (VALUE_RECURSED_INTO (node->loc))
+ {
+ /* Remove duplicate value node. */
+ *nodep = node->next;
+ pool_free (loc_chain_pool, node);
+ continue;
+ }
+ else
+ VALUE_RECURSED_INTO (node->loc) = true;
+ }
+ nodep = &node->next;
+ }
+
+ for (node = var->var_part[0].loc_chain; node; node = node->next)
+ if (GET_CODE (node->loc) == VALUE)
+ {
+ gcc_assert (VALUE_RECURSED_INTO (node->loc));
+ VALUE_RECURSED_INTO (node->loc) = false;
+ }
+}
+
+
+/* Hash table iteration argument passed to variable_post_merge. */
+struct dfset_post_merge
+{
+ /* The new input set for the current block. */
+ dataflow_set *set;
+ /* Pointer to the permanent input set for the current block, or
+ NULL. */
+ dataflow_set **permp;
+};
+
+/* Create values for incoming expressions associated with one-part
+ variables that don't have value numbers for them. */
+
+static int
+variable_post_merge_new_vals (void **slot, void *info)
+{
+ struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
+ dataflow_set *set = dfpm->set;
+ variable var = (variable)*slot;
+ location_chain node;
+
+ if (!dv_onepart_p (var->dv) || !var->n_var_parts)
+ return 1;
+
+ gcc_assert (var->n_var_parts == 1);
+
+ if (dv_is_decl_p (var->dv))
+ {
+ bool check_dupes = false;
+
+ restart:
+ for (node = var->var_part[0].loc_chain; node; node = node->next)
+ {
+ if (GET_CODE (node->loc) == VALUE)
+ gcc_assert (!VALUE_RECURSED_INTO (node->loc));
+ else if (GET_CODE (node->loc) == REG)
+ {
+ attrs att, *attp, *curp = NULL;
+
+ if (var->refcount != 1)
+ {
+ slot = unshare_variable (set, slot, var,
+ VAR_INIT_STATUS_INITIALIZED);
+ var = (variable)*slot;
+ goto restart;
+ }
+
+ for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
+ attp = &att->next)
+ if (att->offset == 0
+ && GET_MODE (att->loc) == GET_MODE (node->loc))
+ {
+ if (dv_is_value_p (att->dv))
+ {
+ rtx cval = dv_as_value (att->dv);
+ node->loc = cval;
+ check_dupes = true;
+ break;
+ }
+ else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
+ curp = attp;
+ }
+
+ if (!curp)
+ {
+ curp = attp;
+ while (*curp)
+ if ((*curp)->offset == 0
+ && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
+ && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
+ break;
+ else
+ curp = &(*curp)->next;
+ gcc_assert (*curp);
+ }
+
+ if (!att)
+ {
+ decl_or_value cdv;
+ rtx cval;
+
+ if (!*dfpm->permp)
+ {
+ *dfpm->permp = XNEW (dataflow_set);
+ dataflow_set_init (*dfpm->permp);
+ }
+
+ for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
+ att; att = att->next)
+ if (GET_MODE (att->loc) == GET_MODE (node->loc))
+ {
+ gcc_assert (att->offset == 0);
+ gcc_assert (dv_is_value_p (att->dv));
+ val_reset (set, att->dv);
+ break;
+ }
+
+ if (att)
+ {
+ cdv = att->dv;
+ cval = dv_as_value (cdv);
+ }
+ else
+ {
+ /* Create a unique value to hold this register,
+ that ought to be found and reused in
+ subsequent rounds. */
+ cselib_val *v;
+ gcc_assert (!cselib_lookup (node->loc,
+ GET_MODE (node->loc), 0));
+ v = cselib_lookup (node->loc, GET_MODE (node->loc), 1);
+ cselib_preserve_value (v);
+ cselib_invalidate_rtx (node->loc);
+ cval = v->val_rtx;
+ cdv = dv_from_value (cval);
+ if (dump_file)
+ fprintf (dump_file,
+ "Created new value %u:%u for reg %i\n",
+ v->uid, v->hash, REGNO (node->loc));
+ }
+
+ var_reg_decl_set (*dfpm->permp, node->loc,
+ VAR_INIT_STATUS_INITIALIZED,
+ cdv, 0, NULL, INSERT);
+
+ node->loc = cval;
+ check_dupes = true;
+ }
+
+ /* Remove attribute referring to the decl, which now
+ uses the value for the register, already existing or
+ to be added when we bring perm in. */
+ att = *curp;
+ *curp = att->next;
+ pool_free (attrs_pool, att);
+ }
+ }
+
+ if (check_dupes)
+ remove_duplicate_values (var);
+ }
+
+ return 1;
+}
+
+/* Reset values in the permanent set that are not associated with the
+ chosen expression. */
+
+static int
+variable_post_merge_perm_vals (void **pslot, void *info)
+{
+ struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
+ dataflow_set *set = dfpm->set;
+ variable pvar = (variable)*pslot, var;
+ location_chain pnode;
+ decl_or_value dv;
+ attrs att;
+
+ gcc_assert (dv_is_value_p (pvar->dv));
+ gcc_assert (pvar->n_var_parts == 1);
+ pnode = pvar->var_part[0].loc_chain;
+ gcc_assert (pnode);
+ gcc_assert (!pnode->next);
+ gcc_assert (REG_P (pnode->loc));
+
+ dv = pvar->dv;
+
+ var = shared_hash_find (set->vars, dv);
+ if (var)
+ {
+ if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
+ return 1;
+ val_reset (set, dv);
+ }
+
+ for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
+ if (att->offset == 0
+ && GET_MODE (att->loc) == GET_MODE (pnode->loc)
+ && dv_is_value_p (att->dv))
+ break;
+
+ /* If there is a value associated with this register already, create
+ an equivalence. */
+ if (att && dv_as_value (att->dv) != dv_as_value (dv))
+ {
+ rtx cval = dv_as_value (att->dv);
+ set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
+ set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
+ NULL, INSERT);
+ }
+ else if (!att)
+ {
+ attrs_list_insert (&set->regs[REGNO (pnode->loc)],
+ dv, 0, pnode->loc);
+ variable_union (pslot, set);
+ }