enum micro_operation_type type;
union {
- /* Location. */
+ /* Location. For MO_SET and MO_COPY, this is the SET that performs
+ the assignment, if known, otherwise it is the target of the
+ assignment. */
rtx loc;
/* Stack adjustment. */
/* Pointer to the BB's information specific to variable tracking pass. */
#define VTI(BB) ((variable_tracking_info) (BB)->aux)
+/* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
+#define INT_MEM_OFFSET(mem) (MEM_OFFSET (mem) ? INTVAL (MEM_OFFSET (mem)) : 0)
+
/* Alloc pool for struct attrs_def. */
static alloc_pool attrs_pool;
insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
HOST_WIDE_INT *post)
{
+ rtx pattern;
+
*pre = 0;
*post = 0;
- if (GET_CODE (PATTERN (insn)) == SET)
- stack_adjust_offset_pre_post (PATTERN (insn), pre, post);
- else if (GET_CODE (PATTERN (insn)) == PARALLEL
- || GET_CODE (PATTERN (insn)) == SEQUENCE)
+ pattern = PATTERN (insn);
+ if (RTX_FRAME_RELATED_P (insn))
+ {
+ rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
+ if (expr)
+ pattern = XEXP (expr, 0);
+ }
+
+ if (GET_CODE (pattern) == SET)
+ stack_adjust_offset_pre_post (pattern, pre, post);
+ else if (GET_CODE (pattern) == PARALLEL
+ || GET_CODE (pattern) == SEQUENCE)
{
int i;
/* There may be stack adjustments inside compound insns. Search
for them. */
- for ( i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
- if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
- stack_adjust_offset_pre_post (XVECEXP (PATTERN (insn), 0, i),
- pre, post);
+ for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
+ if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
+ stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
}
}
{
attrs list;
- list = pool_alloc (attrs_pool);
+ list = (attrs) pool_alloc (attrs_pool);
list->loc = loc;
list->decl = decl;
list->offset = offset;
attrs_list_clear (dstp);
for (; src; src = src->next)
{
- n = pool_alloc (attrs_pool);
+ n = (attrs) pool_alloc (attrs_pool);
n->loc = src->loc;
n->decl = src->decl;
n->offset = src->offset;
variable new_var;
int i;
- new_var = pool_alloc (var_pool);
+ new_var = (variable) pool_alloc (var_pool);
new_var->decl = var->decl;
new_var->refcount = 1;
var->refcount--;
{
location_chain new_lc;
- new_lc = pool_alloc (loc_chain_pool);
+ new_lc = (location_chain) pool_alloc (loc_chain_pool);
new_lc->next = NULL;
if (node->init > initialized)
new_lc->init = node->init;
rtx set_src)
{
tree decl = MEM_EXPR (loc);
- HOST_WIDE_INT offset = MEM_OFFSET (loc) ? INTVAL (MEM_OFFSET (loc)) : 0;
+ HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
decl = var_debug_decl (decl);
enum var_init_status initialized, rtx set_src)
{
tree decl = MEM_EXPR (loc);
- HOST_WIDE_INT offset = MEM_OFFSET (loc) ? INTVAL (MEM_OFFSET (loc)) : 0;
+ HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
decl = var_debug_decl (decl);
var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
{
tree decl = MEM_EXPR (loc);
- HOST_WIDE_INT offset = MEM_OFFSET (loc) ? INTVAL (MEM_OFFSET (loc)) : 0;
+ HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
decl = var_debug_decl (decl);
if (clobber)
static int
variable_union_info_cmp_pos (const void *n1, const void *n2)
{
- const struct variable_union_info *i1 = n1;
- const struct variable_union_info *i2 = n2;
+ const struct variable_union_info *const i1 =
+ (const struct variable_union_info *) n1;
+ const struct variable_union_info *const i2 =
+ ( const struct variable_union_info *) n2;
if (i1->pos != i2->pos)
return i1->pos - i2->pos;
&& REG_P (node->loc)
&& REGNO (node2->loc) == REGNO (node->loc))
|| rtx_equal_p (node2->loc, node->loc)))
- if (node2->init < node->init)
- node2->init = node->init;
- break;
+ {
+ if (node2->init < node->init)
+ node2->init = node->init;
+ break;
+ }
}
if (node || node2)
dst = unshare_variable (set, dst, VAR_INIT_STATUS_UNKNOWN);
location_chain new_node;
/* Copy the location from SRC. */
- new_node = pool_alloc (loc_chain_pool);
+ new_node = (location_chain) pool_alloc (loc_chain_pool);
new_node->loc = node->loc;
new_node->init = node->init;
if (!node->set_src || MEM_P (node->set_src))
{
location_chain new_lc;
- new_lc = pool_alloc (loc_chain_pool);
+ new_lc = (location_chain) pool_alloc (loc_chain_pool);
new_lc->next = NULL;
new_lc->init = node->init;
if (!node->set_src || MEM_P (node->set_src))
variable var1, var2;
var1 = *(variable *) slot;
- var2 = htab_find_with_hash (htab, var1->decl,
+ var2 = (variable) htab_find_with_hash (htab, var1->decl,
VARIABLE_HASH_VAL (var1->decl));
if (!var2)
{
variable var1, var2;
var1 = *(variable *) slot;
- var2 = htab_find_with_hash (htab, var1->decl,
+ var2 = (variable) htab_find_with_hash (htab, var1->decl,
VARIABLE_HASH_VAL (var1->decl));
if (!var2)
{
else if (MEM_P (loc))
{
expr2 = MEM_EXPR (loc);
- offset2 = MEM_OFFSET (loc) ? INTVAL (MEM_OFFSET (loc)) : 0;
+ offset2 = INT_MEM_OFFSET (loc);
}
else
return false;
return (expr == expr2 && offset == offset2);
}
+/* LOC is a REG or MEM that we would like to track if possible.
+ If EXPR is null, we don't know what expression LOC refers to,
+ otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
+ LOC is an lvalue register.
+
+ Return true if EXPR is nonnull and if LOC, or some lowpart of it,
+ is something we can track. When returning true, store the mode of
+ the lowpart we can track in *MODE_OUT (if nonnull) and its offset
+ from EXPR in *OFFSET_OUT (if nonnull). */
+
+static bool
+track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
+ enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
+{
+ enum machine_mode mode;
+
+ if (expr == NULL || !track_expr_p (expr))
+ return false;
+
+ /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
+ whole subreg, but only the old inner part is really relevant. */
+ mode = GET_MODE (loc);
+ if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
+ {
+ enum machine_mode pseudo_mode;
+
+ pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
+ if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
+ {
+ offset += byte_lowpart_offset (pseudo_mode, mode);
+ mode = pseudo_mode;
+ }
+ }
+
+ /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
+ Do the same if we are storing to a register and EXPR occupies
+ the whole of register LOC; in that case, the whole of EXPR is
+ being changed. We exclude complex modes from the second case
+ because the real and imaginary parts are represented as separate
+ pseudo registers, even if the whole complex value fits into one
+ hard register. */
+ if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
+ || (store_reg_p
+ && !COMPLEX_MODE_P (DECL_MODE (expr))
+ && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
+ && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
+ {
+ mode = DECL_MODE (expr);
+ offset = 0;
+ }
+
+ if (offset < 0 || offset >= MAX_VAR_PARTS)
+ return false;
+
+ if (mode_out)
+ *mode_out = mode;
+ if (offset_out)
+ *offset_out = offset;
+ return true;
+}
+
+/* Return the MODE lowpart of LOC, or null if LOC is not something we
+ want to track. When returning nonnull, make sure that the attributes
+ on the returned value are updated. */
+
+static rtx
+var_lowpart (enum machine_mode mode, rtx loc)
+{
+ unsigned int offset, reg_offset, regno;
+
+ if (!REG_P (loc) && !MEM_P (loc))
+ return NULL;
+
+ if (GET_MODE (loc) == mode)
+ return loc;
+
+ offset = byte_lowpart_offset (mode, GET_MODE (loc));
+
+ if (MEM_P (loc))
+ return adjust_address_nv (loc, mode, offset);
+
+ reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
+ regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
+ reg_offset, mode);
+ return gen_rtx_REG_offset (loc, mode, regno, offset);
+}
/* Count uses (register and memory references) LOC which will be tracked.
INSN is instruction which the LOC is part of. */
VTI (bb)->n_mos++;
}
else if (MEM_P (*loc)
- && MEM_EXPR (*loc)
- && track_expr_p (MEM_EXPR (*loc)))
+ && track_loc_p (*loc, MEM_EXPR (*loc), INT_MEM_OFFSET (*loc),
+ false, NULL, NULL))
{
VTI (bb)->n_mos++;
}
static int
add_uses (rtx *loc, void *insn)
{
+ enum machine_mode mode;
+
if (REG_P (*loc))
{
basic_block bb = BLOCK_FOR_INSN ((rtx) insn);
micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
- mo->type = ((REG_EXPR (*loc) && track_expr_p (REG_EXPR (*loc)))
- ? MO_USE : MO_USE_NO_VAR);
- mo->u.loc = *loc;
+ if (track_loc_p (*loc, REG_EXPR (*loc), REG_OFFSET (*loc),
+ false, &mode, NULL))
+ {
+ mo->type = MO_USE;
+ mo->u.loc = var_lowpart (mode, *loc);
+ }
+ else
+ {
+ mo->type = MO_USE_NO_VAR;
+ mo->u.loc = *loc;
+ }
mo->insn = (rtx) insn;
}
else if (MEM_P (*loc)
- && MEM_EXPR (*loc)
- && track_expr_p (MEM_EXPR (*loc)))
+ && track_loc_p (*loc, MEM_EXPR (*loc), INT_MEM_OFFSET (*loc),
+ false, &mode, NULL))
{
basic_block bb = BLOCK_FOR_INSN ((rtx) insn);
micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
mo->type = MO_USE;
- mo->u.loc = *loc;
+ mo->u.loc = var_lowpart (mode, *loc);
mo->insn = (rtx) insn;
}
static void
add_stores (rtx loc, const_rtx expr, void *insn)
{
+ enum machine_mode mode;
+
if (REG_P (loc))
{
basic_block bb = BLOCK_FOR_INSN ((rtx) insn);
micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
if (GET_CODE (expr) == CLOBBER
- || ! REG_EXPR (loc)
- || ! track_expr_p (REG_EXPR (loc)))
- mo->type = MO_CLOBBER;
- else if (GET_CODE (expr) == SET
- && SET_DEST (expr) == loc
- && same_variable_part_p (SET_SRC (expr),
- REG_EXPR (loc),
- REG_OFFSET (loc)))
- mo->type = MO_COPY;
+ || !track_loc_p (loc, REG_EXPR (loc), REG_OFFSET (loc),
+ true, &mode, NULL))
+ {
+ mo->type = MO_CLOBBER;
+ mo->u.loc = loc;
+ }
else
- mo->type = MO_SET;
- mo->u.loc = loc;
+ {
+ rtx src = NULL;
+
+ if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
+ src = var_lowpart (mode, SET_SRC (expr));
+ loc = var_lowpart (mode, loc);
+
+ if (src == NULL)
+ {
+ mo->type = MO_SET;
+ mo->u.loc = loc;
+ }
+ else
+ {
+ if (SET_SRC (expr) != src)
+ expr = gen_rtx_SET (VOIDmode, loc, src);
+ if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
+ mo->type = MO_COPY;
+ else
+ mo->type = MO_SET;
+ mo->u.loc = CONST_CAST_RTX (expr);
+ }
+ }
mo->insn = (rtx) insn;
}
else if (MEM_P (loc)
- && MEM_EXPR (loc)
- && track_expr_p (MEM_EXPR (loc)))
+ && track_loc_p (loc, MEM_EXPR (loc), INT_MEM_OFFSET (loc),
+ false, &mode, NULL))
{
basic_block bb = BLOCK_FOR_INSN ((rtx) insn);
micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
if (GET_CODE (expr) == CLOBBER)
- mo->type = MO_CLOBBER;
- else if (GET_CODE (expr) == SET
- && SET_DEST (expr) == loc
- && same_variable_part_p (SET_SRC (expr),
- MEM_EXPR (loc),
- MEM_OFFSET (loc)
- ? INTVAL (MEM_OFFSET (loc)) : 0))
- mo->type = MO_COPY;
+ {
+ mo->type = MO_CLOBBER;
+ mo->u.loc = var_lowpart (mode, loc);
+ }
else
- mo->type = MO_SET;
- mo->u.loc = loc;
+ {
+ rtx src = NULL;
+
+ if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
+ src = var_lowpart (mode, SET_SRC (expr));
+ loc = var_lowpart (mode, loc);
+
+ if (src == NULL)
+ {
+ mo->type = MO_SET;
+ mo->u.loc = loc;
+ }
+ else
+ {
+ if (SET_SRC (expr) != src)
+ expr = gen_rtx_SET (VOIDmode, loc, src);
+ if (same_variable_part_p (SET_SRC (expr),
+ MEM_EXPR (loc),
+ INT_MEM_OFFSET (loc)))
+ mo->type = MO_COPY;
+ else
+ mo->type = MO_SET;
+ mo->u.loc = CONST_CAST_RTX (expr);
+ }
+ }
mo->insn = (rtx) insn;
}
}
static enum var_init_status
-find_src_status (dataflow_set *in, rtx loc, rtx insn)
+find_src_status (dataflow_set *in, rtx src)
{
- rtx src = NULL_RTX;
- rtx pattern;
tree decl = NULL_TREE;
enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
if (! flag_var_tracking_uninit)
status = VAR_INIT_STATUS_INITIALIZED;
- pattern = PATTERN (insn);
-
- if (GET_CODE (pattern) == COND_EXEC)
- pattern = COND_EXEC_CODE (pattern);
-
- if (GET_CODE (pattern) == SET)
- src = SET_SRC (pattern);
- else if (GET_CODE (pattern) == PARALLEL
- || GET_CODE (pattern) == SEQUENCE)
- {
- int i;
- for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
- if (GET_CODE (XVECEXP (pattern, 0, i)) == SET
- && SET_DEST (XVECEXP (pattern, 0, i)) == loc)
- src = SET_SRC (XVECEXP (pattern, 0, i));
- }
-
if (src && REG_P (src))
decl = var_debug_decl (REG_EXPR (src));
else if (src && MEM_P (src))
return status;
}
-/* LOC is the destination the variable is being copied to. INSN
- contains the copy instruction. SET is the dataflow set containing
- the variable in LOC. */
+/* SRC is the source of an assignment. Use SET to try to find what
+ was ultimately assigned to SRC. Return that value if known,
+ otherwise return SRC itself. */
static rtx
-find_src_set_src (dataflow_set *set, rtx loc, rtx insn)
+find_src_set_src (dataflow_set *set, rtx src)
{
tree decl = NULL_TREE; /* The variable being copied around. */
- rtx src = NULL_RTX; /* The location "decl" is being copied from. */
rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
- rtx pattern;
void **slot;
variable var;
location_chain nextp;
int i;
bool found;
-
- pattern = PATTERN (insn);
- if (GET_CODE (pattern) == COND_EXEC)
- pattern = COND_EXEC_CODE (pattern);
-
- if (GET_CODE (pattern) == SET)
- src = SET_SRC (pattern);
- else if (GET_CODE (pattern) == PARALLEL
- || GET_CODE (pattern) == SEQUENCE)
- {
- for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
- if (GET_CODE (XVECEXP (pattern, 0, i)) == SET
- && SET_DEST (XVECEXP (pattern, 0, i)) == loc)
- src = SET_SRC (XVECEXP (pattern, 0, i));
- }
-
if (src && REG_P (src))
decl = var_debug_decl (REG_EXPR (src));
else if (src && MEM_P (src))
case MO_SET:
{
rtx loc = VTI (bb)->mos[i].u.loc;
- rtx set_src = NULL;
- rtx insn = VTI (bb)->mos[i].insn;
+ rtx set_src = NULL;
- if (GET_CODE (PATTERN (insn)) == SET)
- set_src = SET_SRC (PATTERN (insn));
- else if (GET_CODE (PATTERN (insn)) == PARALLEL
- || GET_CODE (PATTERN (insn)) == SEQUENCE)
+ if (GET_CODE (loc) == SET)
{
- int j;
- for (j = XVECLEN (PATTERN (insn), 0) - 1; j >= 0; j--)
- if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET
- && SET_DEST (XVECEXP (PATTERN (insn), 0, j)) == loc)
- set_src = SET_SRC (XVECEXP (PATTERN (insn), 0, j));
+ set_src = SET_SRC (loc);
+ loc = SET_DEST (loc);
}
if (REG_P (loc))
{
rtx loc = VTI (bb)->mos[i].u.loc;
enum var_init_status src_status;
- rtx set_src;
+ rtx set_src = NULL;
+
+ if (GET_CODE (loc) == SET)
+ {
+ set_src = SET_SRC (loc);
+ loc = SET_DEST (loc);
+ }
if (! flag_var_tracking_uninit)
src_status = VAR_INIT_STATUS_INITIALIZED;
else
- src_status = find_src_status (in, loc, VTI (bb)->mos[i].insn);
+ src_status = find_src_status (in, set_src);
if (src_status == VAR_INIT_STATUS_UNKNOWN)
- src_status = find_src_status (out, loc, VTI (bb)->mos[i].insn);
+ src_status = find_src_status (out, set_src);
- set_src = find_src_set_src (in, loc, VTI (bb)->mos[i].insn);
+ set_src = find_src_set_src (in, set_src);
if (REG_P (loc))
var_reg_delete_and_set (out, loc, false, src_status, set_src);
while (!fibheap_empty (worklist))
{
- bb = fibheap_extract_min (worklist);
+ bb = (basic_block) fibheap_extract_min (worklist);
RESET_BIT (in_worklist, bb->index);
if (!TEST_BIT (visited, bb->index))
{
int i;
location_chain node;
- fprintf (dump_file, " name: %s\n",
+ fprintf (dump_file, " name: %s",
IDENTIFIER_POINTER (DECL_NAME (var->decl)));
+ if (dump_flags & TDF_UID)
+ fprintf (dump_file, " D.%u\n", DECL_UID (var->decl));
+ else
+ fprintf (dump_file, "\n");
+
for (i = 0; i < var->n_var_parts; i++)
{
fprintf (dump_file, " offset %ld\n",
variable empty_var;
void **old;
- empty_var = pool_alloc (var_pool);
+ empty_var = (variable) pool_alloc (var_pool);
empty_var->decl = var->decl;
empty_var->refcount = 1;
empty_var->n_var_parts = 0;
if (!*slot)
{
/* Create new variable information. */
- var = pool_alloc (var_pool);
+ var = (variable) pool_alloc (var_pool);
var->decl = decl;
var->refcount = 1;
var->n_var_parts = 1;
}
/* Add the location to the beginning. */
- node = pool_alloc (loc_chain_pool);
+ node = (location_chain) pool_alloc (loc_chain_pool);
node->loc = loc;
node->init = initialized;
node->set_src = set_src;
pool_free (attrs_pool, anode);
*anextp = anext;
}
+ else
+ anextp = &anode->next;
}
}
variable old_var, new_var;
old_var = *(variable *) slot;
- new_var = htab_find_with_hash (new_vars, old_var->decl,
+ new_var = (variable) htab_find_with_hash (new_vars, old_var->decl,
VARIABLE_HASH_VAL (old_var->decl));
if (!new_var)
/* Variable has disappeared. */
variable empty_var;
- empty_var = pool_alloc (var_pool);
+ empty_var = (variable) pool_alloc (var_pool);
empty_var->decl = old_var->decl;
empty_var->refcount = 1;
empty_var->n_var_parts = 0;
variable old_var, new_var;
new_var = *(variable *) slot;
- old_var = htab_find_with_hash (old_vars, new_var->decl,
+ old_var = (variable) htab_find_with_hash (old_vars, new_var->decl,
VARIABLE_HASH_VAL (new_var->decl));
if (!old_var)
{
case MO_SET:
{
rtx loc = VTI (bb)->mos[i].u.loc;
- rtx set_src = NULL;
+ rtx set_src = NULL;
- if (GET_CODE (PATTERN (insn)) == SET)
- set_src = SET_SRC (PATTERN (insn));
- else if (GET_CODE (PATTERN (insn)) == PARALLEL
- || GET_CODE (PATTERN (insn)) == SEQUENCE)
+ if (GET_CODE (loc) == SET)
{
- int j;
- for (j = XVECLEN (PATTERN (insn), 0) - 1; j >= 0; j--)
- if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET
- && SET_DEST (XVECEXP (PATTERN (insn), 0, j)) == loc)
- set_src = SET_SRC (XVECEXP (PATTERN (insn), 0, j));
+ set_src = SET_SRC (loc);
+ loc = SET_DEST (loc);
}
if (REG_P (loc))
{
rtx loc = VTI (bb)->mos[i].u.loc;
enum var_init_status src_status;
- rtx set_src;
+ rtx set_src = NULL;
- src_status = find_src_status (&set, loc, VTI (bb)->mos[i].insn);
- set_src = find_src_set_src (&set, loc, VTI (bb)->mos[i].insn);
+ if (GET_CODE (loc) == SET)
+ {
+ set_src = SET_SRC (loc);
+ loc = SET_DEST (loc);
+ }
+
+ src_status = find_src_status (&set, set_src);
+ set_src = find_src_set_src (&set, set_src);
if (REG_P (loc))
var_reg_delete_and_set (&set, loc, false, src_status, set_src);
if (MEM_ATTRS (rtl))
{
*declp = MEM_EXPR (rtl);
- *offsetp = MEM_OFFSET (rtl) ? INTVAL (MEM_OFFSET (rtl)) : 0;
+ *offsetp = INT_MEM_OFFSET (rtl);
return true;
}
}
rtx decl_rtl = DECL_RTL_IF_SET (parm);
rtx incoming = DECL_INCOMING_RTL (parm);
tree decl;
+ enum machine_mode mode;
HOST_WIDE_INT offset;
dataflow_set *out;
continue;
if (!vt_get_decl_and_offset (incoming, &decl, &offset))
- if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
- continue;
+ {
+ if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
+ continue;
+ offset += byte_lowpart_offset (GET_MODE (incoming),
+ GET_MODE (decl_rtl));
+ }
if (!decl)
continue;
- gcc_assert (parm == decl);
+ if (parm != decl)
+ {
+ /* Assume that DECL_RTL was a pseudo that got spilled to
+ memory. The spill slot sharing code will force the
+ memory to reference spill_slot_decl (%sfp), so we don't
+ match above. That's ok, the pseudo must have referenced
+ the entire parameter, so just reset OFFSET. */
+ gcc_assert (decl == get_spill_slot_decl (false));
+ offset = 0;
+ }
+
+ if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
+ continue;
out = &VTI (ENTRY_BLOCK_PTR)->out;
if (REG_P (incoming))
{
+ incoming = var_lowpart (mode, incoming);
gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
attrs_list_insert (&out->regs[REGNO (incoming)],
parm, offset, incoming);
NULL);
}
else if (MEM_P (incoming))
- set_variable_part (out, incoming, parm, offset, VAR_INIT_STATUS_INITIALIZED,
- NULL);
+ {
+ incoming = var_lowpart (mode, incoming);
+ set_variable_part (out, incoming, parm, offset,
+ VAR_INIT_STATUS_INITIALIZED, NULL);
+ }
}
}
-struct tree_opt_pass pass_variable_tracking =
+struct rtl_opt_pass pass_variable_tracking =
{
+ {
+ RTL_PASS,
"vartrack", /* name */
gate_handle_var_tracking, /* gate */
variable_tracking_main, /* execute */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func | TODO_verify_rtl_sharing,/* todo_flags_finish */
- 'V' /* letter */
+ TODO_dump_func | TODO_verify_rtl_sharing/* todo_flags_finish */
+ }
};