#include "toplev.h"
#include "params.h"
#include "diagnostic.h"
+#include "tree-pretty-print.h"
#include "pointer-set.h"
+#include "recog.h"
/* var-tracking.c assumes that tree code with the same value as VALUE rtx code
has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
/* Type of micro operation. */
enum micro_operation_type type;
+ /* The instruction which the micro operation is in, for MO_USE,
+ MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
+ instruction or note in the original flow (before any var-tracking
+ notes are inserted, to simplify emission of notes), for MO_SET
+ and MO_CLOBBER. */
+ rtx insn;
+
union {
/* Location. For MO_SET and MO_COPY, this is the SET that
performs the assignment, if known, otherwise it is the target
/* Stack adjustment. */
HOST_WIDE_INT adjust;
} u;
-
- /* The instruction which the micro operation is in, for MO_USE,
- MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
- instruction or note in the original flow (before any var-tracking
- notes are inserted, to simplify emission of notes), for MO_SET
- and MO_CLOBBER. */
- rtx insn;
} micro_operation;
+DEF_VEC_O(micro_operation);
+DEF_VEC_ALLOC_O(micro_operation,heap);
+
/* A declaration of a variable, or an RTL value being handled like a
declaration. */
typedef void *decl_or_value;
needed for variable tracking. */
typedef struct variable_tracking_info_def
{
- /* Number of micro operations stored in the MOS array. */
- int n_mos;
-
- /* The array of micro operations. */
- micro_operation *mos;
+ /* The vector of micro operations. */
+ VEC(micro_operation, heap) *mos;
/* The IN and OUT set for dataflow analysis. */
dataflow_set in;
HOST_WIDE_INT *);
static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
HOST_WIDE_INT *);
-static void bb_stack_adjust_offset (basic_block);
static bool vt_stack_adjustments (void);
-static rtx adjust_stack_reference (rtx, HOST_WIDE_INT);
+static rtx compute_cfa_pointer (HOST_WIDE_INT);
static hashval_t variable_htab_hash (const void *);
static int variable_htab_eq (const void *, const void *);
static void variable_htab_free (void *);
static void **unshare_variable (dataflow_set *set, void **slot, variable var,
enum var_init_status);
-static int vars_copy_1 (void **, void *);
static void vars_copy (htab_t, htab_t);
static tree var_debug_decl (tree);
static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
static void dataflow_set_clear (dataflow_set *);
static void dataflow_set_copy (dataflow_set *, dataflow_set *);
static int variable_union_info_cmp_pos (const void *, const void *);
-static int variable_union (void **, void *);
static void dataflow_set_union (dataflow_set *, dataflow_set *);
static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
static bool canon_value_cmp (rtx, rtx);
static bool variable_part_different_p (variable_part *, variable_part *);
static bool onepart_variable_different_p (variable, variable);
static bool variable_different_p (variable, variable);
-static int dataflow_set_different_1 (void **, void *);
static bool dataflow_set_different (dataflow_set *, dataflow_set *);
static void dataflow_set_destroy (dataflow_set *);
static bool contains_symbol_ref (rtx);
static bool track_expr_p (tree, bool);
static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
-static int count_uses (rtx *, void *);
-static void count_uses_1 (rtx *, void *);
-static void count_stores (rtx, const_rtx, void *);
static int add_uses (rtx *, void *);
static void add_uses_1 (rtx *, void *);
static void add_stores (rtx, const_rtx, void *);
static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
static void vt_add_function_parameters (void);
-static void vt_initialize (void);
+static bool vt_initialize (void);
static void vt_finalize (void);
/* Given a SET, calculate the amount of stack adjustment it contains
}
}
-/* Compute stack adjustment in basic block BB. */
-
-static void
-bb_stack_adjust_offset (basic_block bb)
-{
- HOST_WIDE_INT offset;
- int i;
-
- offset = VTI (bb)->in.stack_adjust;
- for (i = 0; i < VTI (bb)->n_mos; i++)
- {
- if (VTI (bb)->mos[i].type == MO_ADJUST)
- offset += VTI (bb)->mos[i].u.adjust;
- else if (VTI (bb)->mos[i].type != MO_CALL)
- {
- if (MEM_P (VTI (bb)->mos[i].u.loc))
- {
- VTI (bb)->mos[i].u.loc
- = adjust_stack_reference (VTI (bb)->mos[i].u.loc, -offset);
- }
- }
- }
- VTI (bb)->out.stack_adjust = offset;
-}
-
/* Compute stack adjustments for all blocks by traversing DFS tree.
Return true when the adjustments on all incoming edges are consistent.
Heavily borrowed from pre_and_rev_post_order_compute. */
/* Initialize entry block. */
VTI (ENTRY_BLOCK_PTR)->visited = true;
+ VTI (ENTRY_BLOCK_PTR)->in.stack_adjust = INCOMING_FRAME_SP_OFFSET;
VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
/* Allocate stack for back-tracking up CFG. */
/* Check if the edge destination has been visited yet. */
if (!VTI (dest)->visited)
{
+ rtx insn;
+ HOST_WIDE_INT pre, post, offset;
VTI (dest)->visited = true;
- VTI (dest)->in.stack_adjust = VTI (src)->out.stack_adjust;
- bb_stack_adjust_offset (dest);
+ VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
+
+ if (dest != EXIT_BLOCK_PTR)
+ for (insn = BB_HEAD (dest);
+ insn != NEXT_INSN (BB_END (dest));
+ insn = NEXT_INSN (insn))
+ if (INSN_P (insn))
+ {
+ insn_stack_adjust_offset_pre_post (insn, &pre, &post);
+ offset += pre + post;
+ }
+
+ VTI (dest)->out.stack_adjust = offset;
if (EDGE_COUNT (dest->succs) > 0)
/* Since the DEST node has been visited for the first
return true;
}
-/* Adjust stack reference MEM by ADJUSTMENT bytes and make it relative
- to the argument pointer. Return the new rtx. */
+/* Compute a CFA-based value for the stack pointer. */
static rtx
-adjust_stack_reference (rtx mem, HOST_WIDE_INT adjustment)
+compute_cfa_pointer (HOST_WIDE_INT adjustment)
{
- rtx addr, cfa, tmp;
+ rtx cfa;
#ifdef FRAME_POINTER_CFA_OFFSET
adjustment -= FRAME_POINTER_CFA_OFFSET (current_function_decl);
cfa = plus_constant (arg_pointer_rtx, adjustment);
#endif
- addr = replace_rtx (copy_rtx (XEXP (mem, 0)), stack_pointer_rtx, cfa);
- tmp = simplify_rtx (addr);
- if (tmp)
- addr = tmp;
+ return cfa;
+}
+
+/* Adjustment for hard_frame_pointer_rtx to cfa base reg,
+ or -1 if the replacement shouldn't be done. */
+static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
+
+/* Data for adjust_mems callback. */
+
+struct adjust_mem_data
+{
+ bool store;
+ enum machine_mode mem_mode;
+ HOST_WIDE_INT stack_adjust;
+ rtx side_effects;
+};
+
+/* Helper for adjust_mems. Return 1 if *loc is unsuitable for
+ transformation of wider mode arithmetics to narrower mode,
+ -1 if it is suitable and subexpressions shouldn't be
+ traversed and 0 if it is suitable and subexpressions should
+ be traversed. Called through for_each_rtx. */
+
+static int
+use_narrower_mode_test (rtx *loc, void *data)
+{
+ rtx subreg = (rtx) data;
+
+ if (CONSTANT_P (*loc))
+ return -1;
+ switch (GET_CODE (*loc))
+ {
+ case REG:
+ if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0))
+ return 1;
+ return -1;
+ case PLUS:
+ case MINUS:
+ case MULT:
+ return 0;
+ case ASHIFT:
+ if (for_each_rtx (&XEXP (*loc, 0), use_narrower_mode_test, data))
+ return 1;
+ else
+ return -1;
+ default:
+ return 1;
+ }
+}
+
+/* Transform X into narrower mode MODE from wider mode WMODE. */
+
+static rtx
+use_narrower_mode (rtx x, enum machine_mode mode, enum machine_mode wmode)
+{
+ rtx op0, op1;
+ if (CONSTANT_P (x))
+ return lowpart_subreg (mode, x, wmode);
+ switch (GET_CODE (x))
+ {
+ case REG:
+ return lowpart_subreg (mode, x, wmode);
+ case PLUS:
+ case MINUS:
+ case MULT:
+ op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
+ op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
+ return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
+ case ASHIFT:
+ op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
+ return simplify_gen_binary (ASHIFT, mode, op0, XEXP (x, 1));
+ default:
+ gcc_unreachable ();
+ }
+}
+
+/* Helper function for adjusting used MEMs. */
+
+static rtx
+adjust_mems (rtx loc, const_rtx old_rtx, void *data)
+{
+ struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
+ rtx mem, addr = loc, tem;
+ enum machine_mode mem_mode_save;
+ bool store_save;
+ switch (GET_CODE (loc))
+ {
+ case REG:
+ /* Don't do any sp or fp replacements outside of MEM addresses. */
+ if (amd->mem_mode == VOIDmode)
+ return loc;
+ if (loc == stack_pointer_rtx
+ && !frame_pointer_needed)
+ return compute_cfa_pointer (amd->stack_adjust);
+ else if (loc == hard_frame_pointer_rtx
+ && frame_pointer_needed
+ && hard_frame_pointer_adjustment != -1)
+ return compute_cfa_pointer (hard_frame_pointer_adjustment);
+ return loc;
+ case MEM:
+ mem = loc;
+ if (!amd->store)
+ {
+ mem = targetm.delegitimize_address (mem);
+ if (mem != loc && !MEM_P (mem))
+ return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
+ }
+
+ addr = XEXP (mem, 0);
+ mem_mode_save = amd->mem_mode;
+ amd->mem_mode = GET_MODE (mem);
+ store_save = amd->store;
+ amd->store = false;
+ addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
+ amd->store = store_save;
+ amd->mem_mode = mem_mode_save;
+ if (mem == loc)
+ addr = targetm.delegitimize_address (addr);
+ if (addr != XEXP (mem, 0))
+ mem = replace_equiv_address_nv (mem, addr);
+ if (!amd->store)
+ mem = avoid_constant_pool_reference (mem);
+ return mem;
+ case PRE_INC:
+ case PRE_DEC:
+ addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
+ GEN_INT (GET_CODE (loc) == PRE_INC
+ ? GET_MODE_SIZE (amd->mem_mode)
+ : -GET_MODE_SIZE (amd->mem_mode)));
+ case POST_INC:
+ case POST_DEC:
+ if (addr == loc)
+ addr = XEXP (loc, 0);
+ gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
+ addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
+ tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
+ GEN_INT ((GET_CODE (loc) == PRE_INC
+ || GET_CODE (loc) == POST_INC)
+ ? GET_MODE_SIZE (amd->mem_mode)
+ : -GET_MODE_SIZE (amd->mem_mode)));
+ amd->side_effects = alloc_EXPR_LIST (0,
+ gen_rtx_SET (VOIDmode,
+ XEXP (loc, 0),
+ tem),
+ amd->side_effects);
+ return addr;
+ case PRE_MODIFY:
+ addr = XEXP (loc, 1);
+ case POST_MODIFY:
+ if (addr == loc)
+ addr = XEXP (loc, 0);
+ gcc_assert (amd->mem_mode != VOIDmode);
+ addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
+ amd->side_effects = alloc_EXPR_LIST (0,
+ gen_rtx_SET (VOIDmode,
+ XEXP (loc, 0),
+ XEXP (loc, 1)),
+ amd->side_effects);
+ return addr;
+ case SUBREG:
+ /* First try without delegitimization of whole MEMs and
+ avoid_constant_pool_reference, which is more likely to succeed. */
+ store_save = amd->store;
+ amd->store = true;
+ addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
+ data);
+ amd->store = store_save;
+ mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
+ if (mem == SUBREG_REG (loc))
+ {
+ tem = loc;
+ goto finish_subreg;
+ }
+ tem = simplify_gen_subreg (GET_MODE (loc), mem,
+ GET_MODE (SUBREG_REG (loc)),
+ SUBREG_BYTE (loc));
+ if (tem)
+ goto finish_subreg;
+ tem = simplify_gen_subreg (GET_MODE (loc), addr,
+ GET_MODE (SUBREG_REG (loc)),
+ SUBREG_BYTE (loc));
+ if (tem == NULL_RTX)
+ tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
+ finish_subreg:
+ if (MAY_HAVE_DEBUG_INSNS
+ && GET_CODE (tem) == SUBREG
+ && (GET_CODE (SUBREG_REG (tem)) == PLUS
+ || GET_CODE (SUBREG_REG (tem)) == MINUS
+ || GET_CODE (SUBREG_REG (tem)) == MULT
+ || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
+ && GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
+ && GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
+ && GET_MODE_SIZE (GET_MODE (tem))
+ < GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem)))
+ && subreg_lowpart_p (tem)
+ && !for_each_rtx (&SUBREG_REG (tem), use_narrower_mode_test, tem))
+ return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
+ GET_MODE (SUBREG_REG (tem)));
+ return tem;
+ default:
+ break;
+ }
+ return NULL_RTX;
+}
+
+/* Helper function for replacement of uses. */
+
+static void
+adjust_mem_uses (rtx *x, void *data)
+{
+ rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
+ if (new_x != *x)
+ validate_change (NULL_RTX, x, new_x, true);
+}
+
+/* Helper function for replacement of stores. */
+
+static void
+adjust_mem_stores (rtx loc, const_rtx expr, void *data)
+{
+ if (MEM_P (loc))
+ {
+ rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
+ adjust_mems, data);
+ if (new_dest != SET_DEST (expr))
+ {
+ rtx xexpr = CONST_CAST_RTX (expr);
+ validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
+ }
+ }
+}
+
+/* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
+ replace them with their value in the insn and add the side-effects
+ as other sets to the insn. */
+
+static void
+adjust_insn (basic_block bb, rtx insn)
+{
+ struct adjust_mem_data amd;
+ rtx set;
+ amd.mem_mode = VOIDmode;
+ amd.stack_adjust = -VTI (bb)->out.stack_adjust;
+ amd.side_effects = NULL_RTX;
+
+ amd.store = true;
+ note_stores (PATTERN (insn), adjust_mem_stores, &amd);
+
+ amd.store = false;
+ note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
+
+ /* For read-only MEMs containing some constant, prefer those
+ constants. */
+ set = single_set (insn);
+ if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
+ {
+ rtx note = find_reg_equal_equiv_note (insn);
+
+ if (note && CONSTANT_P (XEXP (note, 0)))
+ validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
+ }
+
+ if (amd.side_effects)
+ {
+ rtx *pat, new_pat, s;
+ int i, oldn, newn;
- return replace_equiv_address_nv (mem, addr);
+ pat = &PATTERN (insn);
+ if (GET_CODE (*pat) == COND_EXEC)
+ pat = &COND_EXEC_CODE (*pat);
+ if (GET_CODE (*pat) == PARALLEL)
+ oldn = XVECLEN (*pat, 0);
+ else
+ oldn = 1;
+ for (s = amd.side_effects, newn = 0; s; newn++)
+ s = XEXP (s, 1);
+ new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
+ if (GET_CODE (*pat) == PARALLEL)
+ for (i = 0; i < oldn; i++)
+ XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
+ else
+ XVECEXP (new_pat, 0, 0) = *pat;
+ for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
+ XVECEXP (new_pat, 0, i) = XEXP (s, 0);
+ free_EXPR_LIST_list (&amd.side_effects);
+ validate_change (NULL_RTX, pat, new_pat, true);
+ }
}
/* Return true if a decl_or_value DV is a DECL or NULL. */
return slot;
}
-/* Add a variable from *SLOT to hash table DATA and increase its reference
- count. */
-
-static int
-vars_copy_1 (void **slot, void *data)
-{
- htab_t dst = (htab_t) data;
- variable src;
- void **dstp;
-
- src = (variable) *slot;
- src->refcount++;
-
- dstp = htab_find_slot_with_hash (dst, src->dv,
- dv_htab_hash (src->dv),
- INSERT);
- *dstp = src;
-
- /* Continue traversing the hash table. */
- return 1;
-}
-
/* Copy all variables from hash table SRC to hash table DST. */
static void
vars_copy (htab_t dst, htab_t src)
{
- htab_traverse_noresize (src, vars_copy_1, dst);
+ htab_iterator hi;
+ variable var;
+
+ FOR_EACH_HTAB_ELEMENT (src, var, variable, hi)
+ {
+ void **dstp;
+ var->refcount++;
+ dstp = htab_find_slot_with_hash (dst, var->dv,
+ dv_htab_hash (var->dv),
+ INSERT);
+ *dstp = var;
+ }
}
/* Map a decl to its main debug decl. */
var_debug_decl (tree decl)
{
if (decl && DECL_P (decl)
- && DECL_DEBUG_EXPR_IS_FROM (decl) && DECL_DEBUG_EXPR (decl)
- && DECL_P (DECL_DEBUG_EXPR (decl)))
- decl = DECL_DEBUG_EXPR (decl);
+ && DECL_DEBUG_EXPR_IS_FROM (decl))
+ {
+ tree debugdecl = DECL_DEBUG_EXPR (decl);
+ if (debugdecl && DECL_P (debugdecl))
+ decl = debugdecl;
+ }
return decl;
}
we keep the newest locations in the beginning. */
static int
-variable_union (void **slot, void *data)
+variable_union (variable src, dataflow_set *set)
{
- variable src, dst;
+ variable dst;
void **dstp;
- dataflow_set *set = (dataflow_set *) data;
int i, j, k;
- src = (variable) *slot;
dstp = shared_hash_find_slot (set->vars, src->dv);
if (!dstp || !*dstp)
{
{
location_chain *nodep, dnode, snode;
- gcc_assert (src->n_var_parts == 1);
- gcc_assert (dst->n_var_parts == 1);
+ gcc_assert (src->n_var_parts == 1
+ && dst->n_var_parts == 1);
snode = src->var_part[0].loc_chain;
gcc_assert (snode);
dst->vars = shared_hash_copy (src->vars);
}
else
- htab_traverse (shared_hash_htab (src->vars), variable_union, dst);
+ {
+ htab_iterator hi;
+ variable var;
+
+ FOR_EACH_HTAB_ELEMENT (shared_hash_htab (src->vars), var, variable, hi)
+ variable_union (var, dst);
+ }
}
/* Whether the value is currently being expanded. */
find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
{
location_chain node;
+ enum rtx_code loc_code;
if (!var)
return NULL;
+#ifdef ENABLE_CHECKING
gcc_assert (dv_onepart_p (var->dv));
+#endif
if (!var->n_var_parts)
return NULL;
+#ifdef ENABLE_CHECKING
gcc_assert (var->var_part[0].offset == 0);
+#endif
+ loc_code = GET_CODE (loc);
for (node = var->var_part[0].loc_chain; node; node = node->next)
- if (rtx_equal_p (loc, node->loc))
- return node;
- else if (GET_CODE (node->loc) == VALUE
- && !VALUE_RECURSED_INTO (node->loc))
- {
- decl_or_value dv = dv_from_value (node->loc);
- variable var = (variable)
- htab_find_with_hash (vars, dv, dv_htab_hash (dv));
+ {
+ if (GET_CODE (node->loc) != loc_code)
+ {
+ if (GET_CODE (node->loc) != VALUE)
+ continue;
+ }
+ else if (loc == node->loc)
+ return node;
+ else if (loc_code != VALUE)
+ {
+ if (rtx_equal_p (loc, node->loc))
+ return node;
+ continue;
+ }
+ if (!VALUE_RECURSED_INTO (node->loc))
+ {
+ decl_or_value dv = dv_from_value (node->loc);
+ variable var = (variable)
+ htab_find_with_hash (vars, dv, dv_htab_hash (dv));
- if (var)
- {
- location_chain where;
- VALUE_RECURSED_INTO (node->loc) = true;
- if ((where = find_loc_in_1pdv (loc, var, vars)))
- {
- VALUE_RECURSED_INTO (node->loc) = false;
- return where;
- }
- VALUE_RECURSED_INTO (node->loc) = false;
- }
- }
+ if (var)
+ {
+ location_chain where;
+ VALUE_RECURSED_INTO (node->loc) = true;
+ if ((where = find_loc_in_1pdv (loc, var, vars)))
+ {
+ VALUE_RECURSED_INTO (node->loc) = false;
+ return where;
+ }
+ VALUE_RECURSED_INTO (node->loc) = false;
+ }
+ }
+ }
return NULL;
}
dataflow_set *s2set = dsm->src;
location_chain found;
+ if (s2var)
+ {
+ location_chain s2node;
+
+#ifdef ENABLE_CHECKING
+ gcc_assert (dv_onepart_p (s2var->dv));
+#endif
+
+ if (s2var->n_var_parts)
+ {
+#ifdef ENABLE_CHECKING
+ gcc_assert (s2var->var_part[0].offset == 0);
+#endif
+ s2node = s2var->var_part[0].loc_chain;
+
+ for (; s1node && s2node;
+ s1node = s1node->next, s2node = s2node->next)
+ if (s1node->loc != s2node->loc)
+ break;
+ else if (s1node->loc == val)
+ continue;
+ else
+ insert_into_intersection (dest, s1node->loc,
+ MIN (s1node->init, s2node->init));
+ }
+ }
+
for (; s1node; s1node = s1node->next)
{
if (s1node->loc == val)
}
/* If CSELIB_VAL_PTR of value DV refer to VALUEs, add backlinks from those
- VALUEs to DV. */
+ VALUEs to DV. Add the same time get rid of ASM_OPERANDS from locs list,
+ that is something we never can express in .debug_info and can prevent
+ reverse ops from being used. */
static void
add_cselib_value_chains (decl_or_value dv)
{
- struct elt_loc_list *l;
+ struct elt_loc_list **l;
- for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
- for_each_rtx (&l->loc, add_value_chain, dv_as_opaque (dv));
+ for (l = &CSELIB_VAL_PTR (dv_as_value (dv))->locs; *l;)
+ if (GET_CODE ((*l)->loc) == ASM_OPERANDS)
+ *l = (*l)->next;
+ else
+ {
+ for_each_rtx (&(*l)->loc, add_value_chain, dv_as_opaque (dv));
+ l = &(*l)->next;
+ }
}
/* If decl or value DVP refers to VALUE from *LOC, remove backlinks
for_each_rtx (&loc, remove_value_chain, dv_as_opaque (dv));
}
+#if ENABLE_CHECKING
/* If CSELIB_VAL_PTR of value DV refer to VALUEs, remove backlinks from those
VALUEs to DV. */
for_each_rtx (&l->loc, remove_value_chain, dv_as_opaque (dv));
}
-#if ENABLE_CHECKING
/* Check the order of entries in one-part variables. */
static int
return 1;
}
+/* Bind one-part variables to the canonical value in an equivalence
+ set. Not doing this causes dataflow convergence failure in rare
+ circumstances, see PR42873. Unfortunately we can't do this
+ efficiently as part of canonicalize_values_star, since we may not
+ have determined or even seen the canonical value of a set when we
+ get to a variable that references another member of the set. */
+
+static int
+canonicalize_vars_star (void **slot, void *data)
+{
+ dataflow_set *set = (dataflow_set *)data;
+ variable var = (variable) *slot;
+ decl_or_value dv = var->dv;
+ location_chain node;
+ rtx cval;
+ decl_or_value cdv;
+ void **cslot;
+ variable cvar;
+ location_chain cnode;
+
+ if (!dv_onepart_p (dv) || dv_is_value_p (dv))
+ return 1;
+
+ gcc_assert (var->n_var_parts == 1);
+
+ node = var->var_part[0].loc_chain;
+
+ if (GET_CODE (node->loc) != VALUE)
+ return 1;
+
+ gcc_assert (!node->next);
+ cval = node->loc;
+
+ /* Push values to the canonical one. */
+ cdv = dv_from_value (cval);
+ cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
+ if (!cslot)
+ return 1;
+ cvar = (variable)*cslot;
+ gcc_assert (cvar->n_var_parts == 1);
+
+ cnode = cvar->var_part[0].loc_chain;
+
+ /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
+ that are not “more canonical” than it. */
+ if (GET_CODE (cnode->loc) != VALUE
+ || !canon_value_cmp (cnode->loc, cval))
+ return 1;
+
+ /* CVAL was found to be non-canonical. Change the variable to point
+ to the canonical VALUE. */
+ gcc_assert (!cnode->next);
+ cval = cnode->loc;
+
+ slot = set_slot_part (set, cval, slot, dv, 0,
+ node->init, node->set_src);
+ slot = clobber_slot_part (set, cval, slot, 0, node->set_src);
+
+ return 1;
+}
+
/* Combine variable or value in *S1SLOT (in DSM->cur) with the
corresponding entry in DSM->src. Multi-part variables are combined
with variable_union, whereas onepart dvs are combined with
intersection. */
static int
-variable_merge_over_cur (void **s1slot, void *data)
+variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
{
- struct dfset_merge *dsm = (struct dfset_merge *)data;
dataflow_set *dst = dsm->dst;
void **dstslot;
- variable s1var = (variable) *s1slot;
variable s2var, dvar = NULL;
decl_or_value dv = s1var->dv;
bool onepart = dv_onepart_p (dv);
/* If the incoming onepart variable has an empty location list, then
the intersection will be just as empty. For other variables,
it's always union. */
- gcc_assert (s1var->n_var_parts);
- gcc_assert (s1var->var_part[0].loc_chain);
+ gcc_assert (s1var->n_var_parts
+ && s1var->var_part[0].loc_chain);
if (!onepart)
- return variable_union (s1slot, dst);
+ return variable_union (s1var, dst);
- gcc_assert (s1var->n_var_parts == 1);
- gcc_assert (s1var->var_part[0].offset == 0);
+ gcc_assert (s1var->n_var_parts == 1
+ && s1var->var_part[0].offset == 0);
dvhash = dv_htab_hash (dv);
if (dv_is_value_p (dv))
}
dsm->src_onepart_cnt--;
- gcc_assert (s2var->var_part[0].loc_chain);
- gcc_assert (s2var->n_var_parts == 1);
- gcc_assert (s2var->var_part[0].offset == 0);
+ gcc_assert (s2var->var_part[0].loc_chain
+ && s2var->n_var_parts == 1
+ && s2var->var_part[0].offset == 0);
dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
if (dstslot)
{
dvar = (variable)*dstslot;
- gcc_assert (dvar->refcount == 1);
- gcc_assert (dvar->n_var_parts == 1);
- gcc_assert (dvar->var_part[0].offset == 0);
+ gcc_assert (dvar->refcount == 1
+ && dvar->n_var_parts == 1
+ && dvar->var_part[0].offset == 0);
nodep = &dvar->var_part[0].loc_chain;
}
else
variable_merge_over_cur(). */
static int
-variable_merge_over_src (void **s2slot, void *data)
+variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
{
- struct dfset_merge *dsm = (struct dfset_merge *)data;
dataflow_set *dst = dsm->dst;
- variable s2var = (variable) *s2slot;
decl_or_value dv = s2var->dv;
bool onepart = dv_onepart_p (dv);
struct dfset_merge dsm;
int i;
size_t src1_elems, src2_elems;
+ htab_iterator hi;
+ variable var;
src1_elems = htab_elements (shared_hash_htab (src1->vars));
src2_elems = htab_elements (shared_hash_htab (src2->vars));
dsm.cur = src1;
dsm.src_onepart_cnt = 0;
- htab_traverse (shared_hash_htab (dsm.src->vars), variable_merge_over_src,
- &dsm);
- htab_traverse (shared_hash_htab (dsm.cur->vars), variable_merge_over_cur,
- &dsm);
+ FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.src->vars), var, variable, hi)
+ variable_merge_over_src (var, &dsm);
+ FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.cur->vars), var, variable, hi)
+ variable_merge_over_cur (var, &dsm);
if (dsm.src_onepart_cnt)
dst_can_be_shared = false;
att; att = att->next)
if (GET_MODE (att->loc) == GET_MODE (node->loc))
{
- gcc_assert (att->offset == 0);
- gcc_assert (dv_is_value_p (att->dv));
+ gcc_assert (att->offset == 0
+ && dv_is_value_p (att->dv));
val_reset (set, att->dv);
break;
}
decl_or_value dv;
attrs att;
- gcc_assert (dv_is_value_p (pvar->dv));
- gcc_assert (pvar->n_var_parts == 1);
+ gcc_assert (dv_is_value_p (pvar->dv)
+ && pvar->n_var_parts == 1);
pnode = pvar->var_part[0].loc_chain;
- gcc_assert (pnode);
- gcc_assert (!pnode->next);
- gcc_assert (REG_P (pnode->loc));
+ gcc_assert (pnode
+ && !pnode->next
+ && REG_P (pnode->loc));
dv = pvar->dv;
{
attrs_list_insert (&set->regs[REGNO (pnode->loc)],
dv, 0, pnode->loc);
- variable_union (pslot, set);
+ variable_union (pvar, set);
}
return 1;
htab_traverse (shared_hash_htab ((*permp)->vars),
variable_post_merge_perm_vals, &dfpm);
htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set);
+ htab_traverse (shared_hash_htab (set->vars), canonicalize_vars_star, set);
}
/* Return a node whose loc is a MEM that refers to EXPR in the
if (!val)
return NULL;
- gcc_assert (GET_CODE (val) == VALUE);
-
- gcc_assert (!VALUE_RECURSED_INTO (val));
+ gcc_assert (GET_CODE (val) == VALUE
+ && !VALUE_RECURSED_INTO (val));
dv = dv_from_value (val);
var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
if (!var->var_part[0].loc_chain)
{
var->n_var_parts--;
- if (emit_notes && dv_is_value_p (var->dv))
- remove_cselib_value_chains (var->dv);
changed = true;
}
if (changed)
if (!var->var_part[0].loc_chain)
{
var->n_var_parts--;
- if (emit_notes && dv_is_value_p (var->dv))
- remove_cselib_value_chains (var->dv);
changed = true;
}
if (changed)
int r;
for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
- if (TEST_HARD_REG_BIT (call_used_reg_set, r))
+ if (TEST_HARD_REG_BIT (regs_invalidated_by_call, r))
var_regno_delete (set, r);
if (MAY_HAVE_DEBUG_INSNS)
}
}
-/* Flag whether two dataflow sets being compared contain different data. */
-static bool
-dataflow_set_different_value;
-
static bool
variable_part_different_p (variable_part *vp1, variable_part *vp2)
{
if (var1 == var2)
return false;
- gcc_assert (var1->n_var_parts == 1);
- gcc_assert (var2->n_var_parts == 1);
+ gcc_assert (var1->n_var_parts == 1
+ && var2->n_var_parts == 1);
lc1 = var1->var_part[0].loc_chain;
lc2 = var2->var_part[0].loc_chain;
- gcc_assert (lc1);
- gcc_assert (lc2);
+ gcc_assert (lc1 && lc2);
while (lc1 && lc2)
{
/* One-part values have locations in a canonical order. */
if (i == 0 && var1->var_part[i].offset == 0 && dv_onepart_p (var1->dv))
{
- gcc_assert (var1->n_var_parts == 1);
- gcc_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv));
+ gcc_assert (var1->n_var_parts == 1
+ && dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv));
return onepart_variable_different_p (var1, var2);
}
if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
return false;
}
-/* Compare variable *SLOT with the same variable in hash table DATA
- and set DATAFLOW_SET_DIFFERENT_VALUE if they are different. */
-
-static int
-dataflow_set_different_1 (void **slot, void *data)
-{
- htab_t htab = (htab_t) data;
- variable var1, var2;
-
- var1 = (variable) *slot;
- var2 = (variable) htab_find_with_hash (htab, var1->dv,
- dv_htab_hash (var1->dv));
- if (!var2)
- {
- dataflow_set_different_value = true;
-
- if (dump_file && (dump_flags & TDF_DETAILS))
- {
- fprintf (dump_file, "dataflow difference found: removal of:\n");
- dump_var (var1);
- }
-
- /* Stop traversing the hash table. */
- return 0;
- }
-
- if (variable_different_p (var1, var2))
- {
- dataflow_set_different_value = true;
-
- if (dump_file && (dump_flags & TDF_DETAILS))
- {
- fprintf (dump_file, "dataflow difference found: old and new follow:\n");
- dump_var (var1);
- dump_var (var2);
- }
-
- /* Stop traversing the hash table. */
- return 0;
- }
-
- /* Continue traversing the hash table. */
- return 1;
-}
-
/* Return true if dataflow sets OLD_SET and NEW_SET differ. */
static bool
dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
{
+ htab_iterator hi;
+ variable var1;
+
if (old_set->vars == new_set->vars)
return false;
!= htab_elements (shared_hash_htab (new_set->vars)))
return true;
- dataflow_set_different_value = false;
-
- htab_traverse (shared_hash_htab (old_set->vars), dataflow_set_different_1,
- shared_hash_htab (new_set->vars));
- /* No need to traverse the second hashtab, if both have the same number
- of elements and the second one had all entries found in the first one,
- then it can't have any extra entries. */
- return dataflow_set_different_value;
+ FOR_EACH_HTAB_ELEMENT (shared_hash_htab (old_set->vars), var1, variable, hi)
+ {
+ htab_t htab = shared_hash_htab (new_set->vars);
+ variable var2 = (variable) htab_find_with_hash (htab, var1->dv,
+ dv_htab_hash (var1->dv));
+ if (!var2)
+ {
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "dataflow difference found: removal of:\n");
+ dump_var (var1);
+ }
+ return true;
+ }
+
+ if (variable_different_p (var1, var2))
+ {
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "dataflow difference found: "
+ "old and new follow:\n");
+ dump_var (var1);
+ dump_var (var2);
+ }
+ return true;
+ }
+ }
+
+ /* No need to traverse the second hashtab, if both have the same number
+ of elements and the second one had all entries found in the first one,
+ then it can't have any extra entries. */
+ return false;
}
/* Free the contents of dataflow set SET. */
don't need to track this expression if the ultimate declaration is
ignored. */
realdecl = expr;
- if (DECL_DEBUG_EXPR_IS_FROM (realdecl) && DECL_DEBUG_EXPR (realdecl))
+ if (DECL_DEBUG_EXPR_IS_FROM (realdecl))
{
realdecl = DECL_DEBUG_EXPR (realdecl);
- /* ??? We don't yet know how to emit DW_OP_piece for variable
- that has been SRA'ed. */
- if (!DECL_P (realdecl))
- return 0;
+ if (realdecl == NULL_TREE)
+ realdecl = expr;
+ else if (!DECL_P (realdecl))
+ {
+ if (handled_component_p (realdecl))
+ {
+ HOST_WIDE_INT bitsize, bitpos, maxsize;
+ tree innerdecl
+ = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
+ &maxsize);
+ if (!DECL_P (innerdecl)
+ || DECL_IGNORED_P (innerdecl)
+ || TREE_STATIC (innerdecl)
+ || bitsize <= 0
+ || bitpos + bitsize > 256
+ || bitsize != maxsize)
+ return 0;
+ else
+ realdecl = expr;
+ }
+ else
+ return 0;
+ }
}
/* Do not track EXPR if REALDECL it should be ignored for debugging
return gen_rtx_REG_offset (loc, mode, regno, offset);
}
+/* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
+ hard_frame_pointer_rtx is being mapped to it. */
+static rtx cfa_base_rtx;
+
/* Carry information about uses and stores while walking rtx. */
struct count_use_info
return NULL;
}
+/* Helper function to get mode of MEM's address. */
+
+static inline enum machine_mode
+get_address_mode (rtx mem)
+{
+ enum machine_mode mode = GET_MODE (XEXP (mem, 0));
+ if (mode != VOIDmode)
+ return mode;
+ return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem));
+}
+
/* Replace all registers and addresses in an expression with VALUE
expressions that map back to them, unless the expression is a
register. If no mapping is or can be performed, returns NULL. */
return NULL;
else if (MEM_P (loc))
{
- enum machine_mode address_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (loc));
- cselib_val *addr = cselib_lookup (XEXP (loc, 0), address_mode, 0);
+ cselib_val *addr = cselib_lookup (XEXP (loc, 0),
+ get_address_mode (loc), 0);
if (addr)
return replace_equiv_address_nv (loc, addr->val_rtx);
else
if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
{
rtx ploc = PAT_VAR_LOCATION_LOC (loc);
- cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1);
+ if (! VAR_LOC_UNKNOWN_P (ploc))
+ {
+ cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1);
- /* ??? flag_float_store and volatile mems are never
- given values, but we could in theory use them for
- locations. */
- gcc_assert (val || 1);
+ /* ??? flag_float_store and volatile mems are never
+ given values, but we could in theory use them for
+ locations. */
+ gcc_assert (val || 1);
+ }
return MO_VAL_LOC;
}
else
{
if (REG_P (loc)
|| (find_use_val (loc, GET_MODE (loc), cui)
- && cselib_lookup (XEXP (loc, 0), GET_MODE (loc), 0)))
+ && cselib_lookup (XEXP (loc, 0),
+ get_address_mode (loc), 0)))
return MO_VAL_SET;
}
else
{
gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
+ if (loc == cfa_base_rtx)
+ return MO_CLOBBER;
expr = REG_EXPR (loc);
if (!expr)
enum micro_operation_type mopt, FILE *out)
{
fprintf (out, "bb %i op %i insn %i %s ",
- bb->index, VTI (bb)->n_mos - 1,
+ bb->index, VEC_length (micro_operation, VTI (bb)->mos),
INSN_UID (insn), micro_operation_type_name[mopt]);
print_inline_rtx (out, x, 2);
fputc ('\n', out);
}
-/* Count uses (register and memory references) LOC which will be tracked.
- INSN is instruction which the LOC is part of. */
-
-static int
-count_uses (rtx *ploc, void *cuip)
-{
- rtx loc = *ploc;
- struct count_use_info *cui = (struct count_use_info *) cuip;
- enum micro_operation_type mopt = use_type (loc, cui, NULL);
-
- if (mopt != MO_CLOBBER)
- {
- cselib_val *val;
- enum machine_mode mode = GET_MODE (loc);
-
- switch (mopt)
- {
- case MO_VAL_LOC:
- loc = PAT_VAR_LOCATION_LOC (loc);
- if (VAR_LOC_UNKNOWN_P (loc))
- break;
- /* Fall through. */
-
- case MO_VAL_USE:
- case MO_VAL_SET:
- if (MEM_P (loc)
- && !REG_P (XEXP (loc, 0)) && !MEM_P (XEXP (loc, 0)))
- {
- enum machine_mode address_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (loc));
- val = cselib_lookup (XEXP (loc, 0), address_mode, 0);
-
- if (val && !cselib_preserved_value_p (val))
- {
- VTI (cui->bb)->n_mos++;
- cselib_preserve_value (val);
- if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (XEXP (loc, 0), cui->bb, cui->insn,
- MO_VAL_USE, dump_file);
- }
- }
-
- val = find_use_val (loc, mode, cui);
- if (val)
- {
- if (mopt == MO_VAL_SET
- && GET_CODE (PATTERN (cui->insn)) == COND_EXEC
- && (REG_P (loc)
- || (MEM_P (loc)
- && (use_type (loc, NULL, NULL) == MO_USE
- || cui->sets))))
- {
- cselib_val *oval = cselib_lookup (loc, GET_MODE (loc), 0);
-
- gcc_assert (oval != val);
- gcc_assert (REG_P (loc) || MEM_P (loc));
-
- if (!cselib_preserved_value_p (oval))
- {
- VTI (cui->bb)->n_mos++;
- cselib_preserve_value (oval);
- if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (loc, cui->bb, cui->insn,
- MO_VAL_USE, dump_file);
- }
- }
-
- cselib_preserve_value (val);
- }
- else
- gcc_assert (mopt == MO_VAL_LOC
- || (mopt == MO_VAL_SET && cui->store_p));
-
- break;
-
- default:
- break;
- }
-
- VTI (cui->bb)->n_mos++;
- if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (loc, cui->bb, cui->insn, mopt, dump_file);
- }
-
- return 0;
-}
-
-/* Helper function for finding all uses of REG/MEM in X in CUI's
- insn. */
-
-static void
-count_uses_1 (rtx *x, void *cui)
-{
- for_each_rtx (x, count_uses, cui);
-}
-
-/* Count stores (register and memory references) LOC which will be
- tracked. CUI is a count_use_info object containing the instruction
- which the LOC is part of. */
-
-static void
-count_stores (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *cui)
-{
- count_uses (&loc, cui);
-}
-
-/* Callback for cselib_record_sets_hook, that counts how many micro
- operations it takes for uses and stores in an insn after
- cselib_record_sets has analyzed the sets in an insn, but before it
- modifies the stored values in the internal tables, unless
- cselib_record_sets doesn't call it directly (perhaps because we're
- not doing cselib in the first place, in which case sets and n_sets
- will be 0). */
-
-static void
-count_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
-{
- basic_block bb = BLOCK_FOR_INSN (insn);
- struct count_use_info cui;
-
- cselib_hook_called = true;
-
- cui.insn = insn;
- cui.bb = bb;
- cui.sets = sets;
- cui.n_sets = n_sets;
-
- cui.store_p = false;
- note_uses (&PATTERN (insn), count_uses_1, &cui);
- cui.store_p = true;
- note_stores (PATTERN (insn), count_stores, &cui);
-}
-
/* Tell whether the CONCAT used to holds a VALUE and its location
needs value resolution, i.e., an attempt of mapping the location
back to other incoming values. */
#define VAL_EXPR_HAS_REVERSE(x) \
(RTL_FLAG_CHECK1 ("VAL_EXPR_HAS_REVERSE", (x), CONCAT)->return_val)
+/* All preserved VALUEs. */
+static VEC (rtx, heap) *preserved_values;
+
+/* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
+
+static void
+preserve_value (cselib_val *val)
+{
+ cselib_preserve_value (val);
+ VEC_safe_push (rtx, heap, preserved_values, val->val_rtx);
+}
+
+/* Helper function for MO_VAL_LOC handling. Return non-zero if
+ any rtxes not suitable for CONST use not replaced by VALUEs
+ are discovered. */
+
+static int
+non_suitable_const (rtx *x, void *data ATTRIBUTE_UNUSED)
+{
+ if (*x == NULL_RTX)
+ return 0;
+
+ switch (GET_CODE (*x))
+ {
+ case REG:
+ case DEBUG_EXPR:
+ case PC:
+ case SCRATCH:
+ case CC0:
+ case ASM_INPUT:
+ case ASM_OPERANDS:
+ return 1;
+ case MEM:
+ return !MEM_READONLY_P (*x);
+ default:
+ return 0;
+ }
+}
+
/* Add uses (register and memory references) LOC which will be tracked
to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
if (type != MO_CLOBBER)
{
basic_block bb = cui->bb;
- micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
+ micro_operation mo;
- mo->type = type;
- mo->u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
- mo->insn = cui->insn;
+ mo.type = type;
+ mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
+ mo.insn = cui->insn;
if (type == MO_VAL_LOC)
{
gcc_assert (cui->sets);
if (MEM_P (vloc)
- && !REG_P (XEXP (vloc, 0)) && !MEM_P (XEXP (vloc, 0)))
+ && !REG_P (XEXP (vloc, 0))
+ && !MEM_P (XEXP (vloc, 0))
+ && (GET_CODE (XEXP (vloc, 0)) != PLUS
+ || XEXP (XEXP (vloc, 0), 0) != cfa_base_rtx
+ || !CONST_INT_P (XEXP (XEXP (vloc, 0), 1))))
{
rtx mloc = vloc;
- enum machine_mode address_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
+ enum machine_mode address_mode = get_address_mode (mloc);
cselib_val *val
= cselib_lookup (XEXP (mloc, 0), address_mode, 0);
if (val && !cselib_preserved_value_p (val))
{
- micro_operation *mon = VTI (bb)->mos + VTI (bb)->n_mos++;
- mon->type = mo->type;
- mon->u.loc = mo->u.loc;
- mon->insn = mo->insn;
- cselib_preserve_value (val);
- mo->type = MO_VAL_USE;
+ micro_operation moa;
+ preserve_value (val);
mloc = cselib_subst_to_values (XEXP (mloc, 0));
- mo->u.loc = gen_rtx_CONCAT (address_mode,
+ moa.type = MO_VAL_USE;
+ moa.insn = cui->insn;
+ moa.u.loc = gen_rtx_CONCAT (address_mode,
val->val_rtx, mloc);
if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (mo->u.loc, cui->bb, cui->insn,
- mo->type, dump_file);
- mo = mon;
+ log_op_type (moa.u.loc, cui->bb, cui->insn,
+ moa.type, dump_file);
+ VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
}
}
- if (!VAR_LOC_UNKNOWN_P (vloc)
- && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
+ if (CONSTANT_P (vloc)
+ && (GET_CODE (vloc) != CONST
+ || for_each_rtx (&vloc, non_suitable_const, NULL)))
+ /* For constants don't look up any value. */;
+ else if (!VAR_LOC_UNKNOWN_P (vloc)
+ && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
{
enum machine_mode mode2;
enum micro_operation_type type2;
&& !cselib_preserved_value_p (val))
{
VAL_NEEDS_RESOLUTION (oloc) = 1;
- cselib_preserve_value (val);
+ preserve_value (val);
}
}
else if (!VAR_LOC_UNKNOWN_P (vloc))
PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
}
- mo->u.loc = oloc;
+ mo.u.loc = oloc;
}
else if (type == MO_VAL_USE)
{
gcc_assert (cui->sets);
if (MEM_P (oloc)
- && !REG_P (XEXP (oloc, 0)) && !MEM_P (XEXP (oloc, 0)))
+ && !REG_P (XEXP (oloc, 0))
+ && !MEM_P (XEXP (oloc, 0))
+ && (GET_CODE (XEXP (oloc, 0)) != PLUS
+ || XEXP (XEXP (oloc, 0), 0) != cfa_base_rtx
+ || !CONST_INT_P (XEXP (XEXP (oloc, 0), 1))))
{
rtx mloc = oloc;
- enum machine_mode address_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
+ enum machine_mode address_mode = get_address_mode (mloc);
cselib_val *val
= cselib_lookup (XEXP (mloc, 0), address_mode, 0);
if (val && !cselib_preserved_value_p (val))
{
- micro_operation *mon = VTI (bb)->mos + VTI (bb)->n_mos++;
- mon->type = mo->type;
- mon->u.loc = mo->u.loc;
- mon->insn = mo->insn;
- cselib_preserve_value (val);
- mo->type = MO_VAL_USE;
+ micro_operation moa;
+ preserve_value (val);
mloc = cselib_subst_to_values (XEXP (mloc, 0));
- mo->u.loc = gen_rtx_CONCAT (address_mode,
+ moa.type = MO_VAL_USE;
+ moa.insn = cui->insn;
+ moa.u.loc = gen_rtx_CONCAT (address_mode,
val->val_rtx, mloc);
- mo->insn = cui->insn;
if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (mo->u.loc, cui->bb, cui->insn,
- mo->type, dump_file);
- mo = mon;
+ log_op_type (moa.u.loc, cui->bb, cui->insn,
+ moa.type, dump_file);
+ VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
}
}
else
oloc = val->val_rtx;
- mo->u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
+ mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
if (type2 == MO_USE)
- VAL_HOLDS_TRACK_EXPR (mo->u.loc) = 1;
+ VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
if (!cselib_preserved_value_p (val))
{
- VAL_NEEDS_RESOLUTION (mo->u.loc) = 1;
- cselib_preserve_value (val);
+ VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
+ preserve_value (val);
}
}
else
gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (mo->u.loc, cui->bb, cui->insn, mo->type, dump_file);
+ log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
+ VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
}
return 0;
enum machine_mode mode = VOIDmode, mode2;
struct count_use_info *cui = (struct count_use_info *)cuip;
basic_block bb = cui->bb;
- micro_operation *mo;
+ micro_operation mo;
rtx oloc = loc, nloc, src = NULL;
enum micro_operation_type type = use_type (loc, cui, &mode);
bool track_p = false;
if (REG_P (loc))
{
- mo = VTI (bb)->mos + VTI (bb)->n_mos++;
-
+ gcc_assert (loc != cfa_base_rtx);
if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
|| !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
|| GET_CODE (expr) == CLOBBER)
{
- mo->type = MO_CLOBBER;
- mo->u.loc = loc;
+ mo.type = MO_CLOBBER;
+ mo.u.loc = loc;
}
else
{
if (src == NULL)
{
- mo->type = MO_SET;
- mo->u.loc = loc;
+ mo.type = MO_SET;
+ mo.u.loc = loc;
}
else
{
- rtx xexpr = CONST_CAST_RTX (expr);
-
- if (SET_SRC (expr) != src)
- xexpr = gen_rtx_SET (VOIDmode, loc, src);
+ rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
- mo->type = MO_COPY;
+ mo.type = MO_COPY;
else
- mo->type = MO_SET;
- mo->u.loc = xexpr;
+ mo.type = MO_SET;
+ mo.u.loc = xexpr;
}
}
- mo->insn = cui->insn;
+ mo.insn = cui->insn;
}
else if (MEM_P (loc)
&& ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
|| cui->sets))
{
- mo = VTI (bb)->mos + VTI (bb)->n_mos++;
-
if (MEM_P (loc) && type == MO_VAL_SET
- && !REG_P (XEXP (loc, 0)) && !MEM_P (XEXP (loc, 0)))
+ && !REG_P (XEXP (loc, 0))
+ && !MEM_P (XEXP (loc, 0))
+ && (GET_CODE (XEXP (loc, 0)) != PLUS
+ || XEXP (XEXP (loc, 0), 0) != cfa_base_rtx
+ || !CONST_INT_P (XEXP (XEXP (loc, 0), 1))))
{
rtx mloc = loc;
- enum machine_mode address_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
- cselib_val *val = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
+ enum machine_mode address_mode = get_address_mode (mloc);
+ cselib_val *val = cselib_lookup (XEXP (mloc, 0),
+ address_mode, 0);
if (val && !cselib_preserved_value_p (val))
{
- cselib_preserve_value (val);
- mo->type = MO_VAL_USE;
+ preserve_value (val);
+ mo.type = MO_VAL_USE;
mloc = cselib_subst_to_values (XEXP (mloc, 0));
- mo->u.loc = gen_rtx_CONCAT (address_mode, val->val_rtx, mloc);
- mo->insn = cui->insn;
+ mo.u.loc = gen_rtx_CONCAT (address_mode, val->val_rtx, mloc);
+ mo.insn = cui->insn;
if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (mo->u.loc, cui->bb, cui->insn,
- mo->type, dump_file);
- mo = VTI (bb)->mos + VTI (bb)->n_mos++;
+ log_op_type (mo.u.loc, cui->bb, cui->insn,
+ mo.type, dump_file);
+ VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
}
}
if (GET_CODE (expr) == CLOBBER || !track_p)
{
- mo->type = MO_CLOBBER;
- mo->u.loc = track_p ? var_lowpart (mode2, loc) : loc;
+ mo.type = MO_CLOBBER;
+ mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
}
else
{
if (src == NULL)
{
- mo->type = MO_SET;
- mo->u.loc = loc;
+ mo.type = MO_SET;
+ mo.u.loc = loc;
}
else
{
- rtx xexpr = CONST_CAST_RTX (expr);
-
- if (SET_SRC (expr) != src)
- xexpr = gen_rtx_SET (VOIDmode, loc, src);
+ rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
if (same_variable_part_p (SET_SRC (xexpr),
MEM_EXPR (loc),
INT_MEM_OFFSET (loc)))
- mo->type = MO_COPY;
+ mo.type = MO_COPY;
else
- mo->type = MO_SET;
- mo->u.loc = xexpr;
+ mo.type = MO_SET;
+ mo.u.loc = xexpr;
}
}
- mo->insn = cui->insn;
+ mo.insn = cui->insn;
}
else
return;
if (!cselib_preserved_value_p (oval))
{
- micro_operation *nmo = VTI (bb)->mos + VTI (bb)->n_mos++;
+ micro_operation moa;
- cselib_preserve_value (oval);
+ preserve_value (oval);
- nmo->type = MO_VAL_USE;
- nmo->u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
- VAL_NEEDS_RESOLUTION (nmo->u.loc) = 1;
- nmo->insn = mo->insn;
+ moa.type = MO_VAL_USE;
+ moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
+ VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
+ moa.insn = cui->insn;
if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (nmo->u.loc, cui->bb, cui->insn,
- nmo->type, dump_file);
+ log_op_type (moa.u.loc, cui->bb, cui->insn,
+ moa.type, dump_file);
+ VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
}
resolve = false;
}
- else if (resolve && GET_CODE (mo->u.loc) == SET)
+ else if (resolve && GET_CODE (mo.u.loc) == SET)
{
nloc = replace_expr_with_values (SET_SRC (expr));
}
if (nloc)
- oloc = gen_rtx_SET (GET_MODE (mo->u.loc), oloc, nloc);
+ oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc);
else
{
- if (oloc == SET_DEST (mo->u.loc))
+ if (oloc == SET_DEST (mo.u.loc))
/* No point in duplicating. */
- oloc = mo->u.loc;
- if (!REG_P (SET_SRC (mo->u.loc)))
+ oloc = mo.u.loc;
+ if (!REG_P (SET_SRC (mo.u.loc)))
resolve = false;
}
}
else if (!resolve)
{
- if (GET_CODE (mo->u.loc) == SET
- && oloc == SET_DEST (mo->u.loc))
+ if (GET_CODE (mo.u.loc) == SET
+ && oloc == SET_DEST (mo.u.loc))
/* No point in duplicating. */
- oloc = mo->u.loc;
+ oloc = mo.u.loc;
}
else
resolve = false;
loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
- if (mo->u.loc != oloc)
- loc = gen_rtx_CONCAT (GET_MODE (mo->u.loc), loc, mo->u.loc);
+ if (mo.u.loc != oloc)
+ loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
/* The loc of a MO_VAL_SET may have various forms:
reverse = reverse_op (v->val_rtx, expr);
if (reverse)
{
- loc = gen_rtx_CONCAT (GET_MODE (mo->u.loc), loc, reverse);
+ loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, reverse);
VAL_EXPR_HAS_REVERSE (loc) = 1;
}
}
- mo->u.loc = loc;
+ mo.u.loc = loc;
if (track_p)
VAL_HOLDS_TRACK_EXPR (loc) = 1;
if (preserve)
{
VAL_NEEDS_RESOLUTION (loc) = resolve;
- cselib_preserve_value (v);
+ preserve_value (v);
}
- if (mo->type == MO_CLOBBER)
+ if (mo.type == MO_CLOBBER)
VAL_EXPR_IS_CLOBBERED (loc) = 1;
- if (mo->type == MO_COPY)
+ if (mo.type == MO_COPY)
VAL_EXPR_IS_COPIED (loc) = 1;
- mo->type = MO_VAL_SET;
+ mo.type = MO_VAL_SET;
log_and_return:
if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (mo->u.loc, cui->bb, cui->insn, mo->type, dump_file);
+ log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
+ VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
}
/* Callback for cselib_record_sets_hook, that records as micro
basic_block bb = BLOCK_FOR_INSN (insn);
int n1, n2;
struct count_use_info cui;
+ micro_operation *mos;
cselib_hook_called = true;
cui.sets = sets;
cui.n_sets = n_sets;
- n1 = VTI (bb)->n_mos;
+ n1 = VEC_length (micro_operation, VTI (bb)->mos);
cui.store_p = false;
note_uses (&PATTERN (insn), add_uses_1, &cui);
- n2 = VTI (bb)->n_mos - 1;
+ n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
+ mos = VEC_address (micro_operation, VTI (bb)->mos);
/* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
MO_VAL_LOC last. */
while (n1 < n2)
{
- while (n1 < n2 && VTI (bb)->mos[n1].type == MO_USE)
+ while (n1 < n2 && mos[n1].type == MO_USE)
n1++;
- while (n1 < n2 && VTI (bb)->mos[n2].type != MO_USE)
+ while (n1 < n2 && mos[n2].type != MO_USE)
n2--;
if (n1 < n2)
{
micro_operation sw;
- sw = VTI (bb)->mos[n1];
- VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
- VTI (bb)->mos[n2] = sw;
+ sw = mos[n1];
+ mos[n1] = mos[n2];
+ mos[n2] = sw;
}
}
- n2 = VTI (bb)->n_mos - 1;
-
+ n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
while (n1 < n2)
{
- while (n1 < n2 && VTI (bb)->mos[n1].type != MO_VAL_LOC)
+ while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
n1++;
- while (n1 < n2 && VTI (bb)->mos[n2].type == MO_VAL_LOC)
+ while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
n2--;
if (n1 < n2)
{
micro_operation sw;
- sw = VTI (bb)->mos[n1];
- VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
- VTI (bb)->mos[n2] = sw;
+ sw = mos[n1];
+ mos[n1] = mos[n2];
+ mos[n2] = sw;
}
}
if (CALL_P (insn))
{
- micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
+ micro_operation mo;
- mo->type = MO_CALL;
- mo->insn = insn;
+ mo.type = MO_CALL;
+ mo.insn = insn;
+ mo.u.loc = NULL_RTX;
if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (PATTERN (insn), bb, insn, mo->type, dump_file);
+ log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
+ VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
}
- n1 = VTI (bb)->n_mos;
+ n1 = VEC_length (micro_operation, VTI (bb)->mos);
/* This will record NEXT_INSN (insn), such that we can
insert notes before it without worrying about any
notes that MO_USEs might emit after the insn. */
cui.store_p = true;
note_stores (PATTERN (insn), add_stores, &cui);
- n2 = VTI (bb)->n_mos - 1;
+ n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
+ mos = VEC_address (micro_operation, VTI (bb)->mos);
- /* Order the MO_CLOBBERs to be before MO_SETs. */
+ /* Order the MO_VAL_USEs first (note_stores does nothing
+ on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
+ insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
while (n1 < n2)
{
- while (n1 < n2 && VTI (bb)->mos[n1].type == MO_CLOBBER)
+ while (n1 < n2 && mos[n1].type == MO_VAL_USE)
n1++;
- while (n1 < n2 && VTI (bb)->mos[n2].type != MO_CLOBBER)
+ while (n1 < n2 && mos[n2].type != MO_VAL_USE)
n2--;
if (n1 < n2)
{
micro_operation sw;
- sw = VTI (bb)->mos[n1];
- VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
- VTI (bb)->mos[n2] = sw;
+ sw = mos[n1];
+ mos[n1] = mos[n2];
+ mos[n2] = sw;
+ }
+ }
+
+ n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
+ while (n1 < n2)
+ {
+ while (n1 < n2 && mos[n1].type == MO_CLOBBER)
+ n1++;
+ while (n1 < n2 && mos[n2].type != MO_CLOBBER)
+ n2--;
+ if (n1 < n2)
+ {
+ micro_operation sw;
+
+ sw = mos[n1];
+ mos[n1] = mos[n2];
+ mos[n2] = sw;
}
}
}
static bool
compute_bb_dataflow (basic_block bb)
{
- int i, n;
+ unsigned int i;
+ micro_operation *mo;
bool changed;
dataflow_set old_out;
dataflow_set *in = &VTI (bb)->in;
dataflow_set_copy (&old_out, out);
dataflow_set_copy (out, in);
- n = VTI (bb)->n_mos;
- for (i = 0; i < n; i++)
+ for (i = 0; VEC_iterate (micro_operation, VTI (bb)->mos, i, mo); i++)
{
- rtx insn = VTI (bb)->mos[i].insn;
+ rtx insn = mo->insn;
- switch (VTI (bb)->mos[i].type)
+ switch (mo->type)
{
case MO_CALL:
dataflow_set_clear_at_call (out);
case MO_USE:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
if (REG_P (loc))
var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
case MO_VAL_LOC:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
rtx val, vloc;
tree var;
VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
INSERT);
}
+ else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
+ set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
+ dv_from_decl (var), 0,
+ VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
+ INSERT);
}
break;
case MO_VAL_USE:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
rtx val, vloc, uloc;
vloc = uloc = XEXP (loc, 1);
case MO_VAL_SET:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
rtx val, vloc, uloc, reverse = NULL_RTX;
vloc = loc;
case MO_SET:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
rtx set_src = NULL;
if (GET_CODE (loc) == SET)
case MO_COPY:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
enum var_init_status src_status;
rtx set_src = NULL;
case MO_USE_NO_VAR:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
if (REG_P (loc))
var_reg_delete (out, loc, false);
case MO_CLOBBER:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
if (REG_P (loc))
var_reg_delete (out, loc, true);
break;
case MO_ADJUST:
- out->stack_adjust += VTI (bb)->mos[i].u.adjust;
+ out->stack_adjust += mo->u.adjust;
break;
}
}
*slot = var;
pos = 0;
nextp = &var->var_part[0].loc_chain;
- if (emit_notes && dv_is_value_p (dv))
- add_cselib_value_chains (dv);
}
else if (onepart)
{
changed = true;
var->n_var_parts--;
if (emit_notes)
- {
- var->cur_loc_changed = true;
- if (var->n_var_parts == 0 && dv_is_value_p (var->dv))
- remove_cselib_value_chains (var->dv);
- }
+ var->cur_loc_changed = true;
while (pos < var->n_var_parts)
{
var->var_part[pos] = var->var_part[pos + 1];
switch (GET_CODE (x))
{
case SUBREG:
+ if (dummy)
+ {
+ if (cselib_dummy_expand_value_rtx_cb (SUBREG_REG (x), regs,
+ max_depth - 1,
+ vt_expand_loc_callback, data))
+ return pc_rtx;
+ else
+ return NULL;
+ }
+
subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
max_depth - 1,
vt_expand_loc_callback, data);
if (!subreg)
return NULL;
- if (dummy)
- return pc_rtx;
-
result = simplify_gen_subreg (GET_MODE (x), subreg,
GET_MODE (SUBREG_REG (x)),
SUBREG_BYTE (x));
result = pc_rtx;
break;
}
- else
- {
- result = cselib_expand_value_rtx_cb (loc->loc, regs, max_depth,
- vt_expand_loc_callback,
- data);
- if (result)
- break;
- }
+ }
+ else
+ {
+ result = cselib_expand_value_rtx_cb (loc->loc, regs, max_depth,
+ vt_expand_loc_callback, data);
+ if (result)
+ break;
}
if (dummy && (result || var->var_part[0].cur_loc))
var->cur_loc_changed = true;
complete = true;
last_limit = 0;
n_var_parts = 0;
- if (!MAY_HAVE_DEBUG_STMTS)
+ if (!MAY_HAVE_DEBUG_INSNS)
{
for (i = 0; i < var->n_var_parts; i++)
if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
}
loc[n_var_parts] = loc2;
mode = GET_MODE (var->var_part[i].cur_loc);
+ if (mode == VOIDmode && dv_onepart_p (var->dv))
+ mode = DECL_MODE (decl);
for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
if (var->var_part[i].cur_loc == lc->loc)
{
(int) initialized);
else if (n_var_parts == 1)
{
- rtx expr_list
- = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
+ rtx expr_list;
+
+ if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
+ expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
+ else
+ expr_list = loc[0];
note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list,
(int) initialized);
static VEC (variable, heap) *changed_variables_stack;
+/* VALUEs with no variables that need set_dv_changed (val, false)
+ called before check_changed_vars_3. */
+
+static VEC (rtx, heap) *changed_values_stack;
+
+/* Helper function for check_changed_vars_1 and check_changed_vars_2. */
+
+static void
+check_changed_vars_0 (decl_or_value dv, htab_t htab)
+{
+ value_chain vc
+ = (value_chain) htab_find_with_hash (value_chains, dv, dv_htab_hash (dv));
+
+ if (vc == NULL)
+ return;
+ for (vc = vc->next; vc; vc = vc->next)
+ if (!dv_changed_p (vc->dv))
+ {
+ variable vcvar
+ = (variable) htab_find_with_hash (htab, vc->dv,
+ dv_htab_hash (vc->dv));
+ if (vcvar)
+ {
+ set_dv_changed (vc->dv, true);
+ VEC_safe_push (variable, heap, changed_variables_stack, vcvar);
+ }
+ else if (dv_is_value_p (vc->dv))
+ {
+ set_dv_changed (vc->dv, true);
+ VEC_safe_push (rtx, heap, changed_values_stack,
+ dv_as_value (vc->dv));
+ check_changed_vars_0 (vc->dv, htab);
+ }
+ }
+}
+
/* Populate changed_variables_stack with variable_def pointers
that need variable_was_changed called on them. */
if (dv_is_value_p (var->dv)
|| TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
- {
- value_chain vc
- = (value_chain) htab_find_with_hash (value_chains, var->dv,
- dv_htab_hash (var->dv));
-
- if (vc == NULL)
- return 1;
- for (vc = vc->next; vc; vc = vc->next)
- if (!dv_changed_p (vc->dv))
- {
- variable vcvar
- = (variable) htab_find_with_hash (htab, vc->dv,
- dv_htab_hash (vc->dv));
- if (vcvar)
- VEC_safe_push (variable, heap, changed_variables_stack,
- vcvar);
- }
- }
+ check_changed_vars_0 (var->dv, htab);
return 1;
}
variable_was_changed (var, NULL);
if (dv_is_value_p (var->dv)
|| TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
- {
- value_chain vc
- = (value_chain) htab_find_with_hash (value_chains, var->dv,
- dv_htab_hash (var->dv));
-
- if (vc == NULL)
- return;
- for (vc = vc->next; vc; vc = vc->next)
- if (!dv_changed_p (vc->dv))
- {
- variable vcvar
- = (variable) htab_find_with_hash (htab, vc->dv,
- dv_htab_hash (vc->dv));
- if (vcvar)
- check_changed_vars_2 (vcvar, htab);
- }
- }
+ check_changed_vars_0 (var->dv, htab);
}
/* For each changed decl (except DEBUG_EXPR_DECLs) recompute
while (VEC_length (variable, changed_variables_stack) > 0)
check_changed_vars_2 (VEC_pop (variable, changed_variables_stack),
htab);
+ while (VEC_length (rtx, changed_values_stack) > 0)
+ set_dv_changed (dv_from_value (VEC_pop (rtx, changed_values_stack)),
+ false);
htab_traverse (changed_variables, check_changed_vars_3, htab);
}
gcc_assert (old_var->n_var_parts == 1);
for (lc = old_var->var_part[0].loc_chain; lc; lc = lc->next)
remove_value_chains (old_var->dv, lc->loc);
- if (dv_is_value_p (old_var->dv))
- remove_cselib_value_chains (old_var->dv);
}
variable_was_changed (empty_var, NULL);
/* Continue traversing the hash table. */
{
location_chain lc1, lc2;
- gcc_assert (old_var->n_var_parts == 1);
- gcc_assert (new_var->n_var_parts == 1);
+ gcc_assert (old_var->n_var_parts == 1
+ && new_var->n_var_parts == 1);
lc1 = old_var->var_part[0].loc_chain;
lc2 = new_var->var_part[0].loc_chain;
while (lc1
gcc_assert (new_var->n_var_parts == 1);
for (lc = new_var->var_part[0].loc_chain; lc; lc = lc->next)
add_value_chains (new_var->dv, lc->loc);
- if (dv_is_value_p (new_var->dv))
- add_cselib_value_chains (new_var->dv);
}
for (i = 0; i < new_var->n_var_parts; i++)
new_var->var_part[i].cur_loc = NULL;
static void
emit_notes_in_bb (basic_block bb, dataflow_set *set)
{
- int i;
+ unsigned int i;
+ micro_operation *mo;
dataflow_set_clear (set);
dataflow_set_copy (set, &VTI (bb)->in);
- for (i = 0; i < VTI (bb)->n_mos; i++)
+ for (i = 0; VEC_iterate (micro_operation, VTI (bb)->mos, i, mo); i++)
{
- rtx insn = VTI (bb)->mos[i].insn;
+ rtx insn = mo->insn;
- switch (VTI (bb)->mos[i].type)
+ switch (mo->type)
{
case MO_CALL:
dataflow_set_clear_at_call (set);
case MO_USE:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
if (REG_P (loc))
var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
case MO_VAL_LOC:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
rtx val, vloc;
tree var;
VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
INSERT);
}
+ else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
+ set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
+ dv_from_decl (var), 0,
+ VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
+ INSERT);
emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
}
case MO_VAL_USE:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
rtx val, vloc, uloc;
vloc = uloc = XEXP (loc, 1);
case MO_VAL_SET:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
rtx val, vloc, uloc, reverse = NULL_RTX;
vloc = loc;
case MO_SET:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
rtx set_src = NULL;
if (GET_CODE (loc) == SET)
case MO_COPY:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
enum var_init_status src_status;
rtx set_src = NULL;
case MO_USE_NO_VAR:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
if (REG_P (loc))
var_reg_delete (set, loc, false);
case MO_CLOBBER:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
if (REG_P (loc))
var_reg_delete (set, loc, true);
break;
case MO_ADJUST:
- set->stack_adjust += VTI (bb)->mos[i].u.adjust;
+ set->stack_adjust += mo->u.adjust;
break;
}
}
emit_notes = true;
if (MAY_HAVE_DEBUG_INSNS)
- changed_variables_stack = VEC_alloc (variable, heap, 40);
+ {
+ unsigned int i;
+ rtx val;
+
+ for (i = 0; VEC_iterate (rtx, preserved_values, i, val); i++)
+ add_cselib_value_chains (dv_from_value (val));
+ changed_variables_stack = VEC_alloc (variable, heap, 40);
+ changed_values_stack = VEC_alloc (rtx, heap, 40);
+ }
dataflow_set_init (&cur);
emit_notes_for_differences_1,
shared_hash_htab (empty_shared_hash));
if (MAY_HAVE_DEBUG_INSNS)
- gcc_assert (htab_elements (value_chains) == 0);
+ {
+ unsigned int i;
+ rtx val;
+
+ for (i = 0; VEC_iterate (rtx, preserved_values, i, val); i++)
+ remove_cselib_value_chains (dv_from_value (val));
+ gcc_assert (htab_elements (value_chains) == 0);
+ }
#endif
dataflow_set_destroy (&cur);
if (MAY_HAVE_DEBUG_INSNS)
- VEC_free (variable, heap, changed_variables_stack);
+ {
+ VEC_free (variable, heap, changed_variables_stack);
+ VEC_free (rtx, heap, changed_values_stack);
+ }
#ifdef ENABLE_RTL_CHECKING
pointer_map_destroy (emitted_notes);
cselib. */
if (val)
{
- cselib_preserve_value (val);
+ preserve_value (val);
set_variable_part (out, val->val_rtx, dv, offset,
VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
dv = dv_from_value (val->val_rtx);
if (MAY_HAVE_DEBUG_INSNS)
{
- cselib_preserve_only_values (true);
+ cselib_preserve_only_values ();
cselib_reset_table (cselib_get_next_uid ());
}
}
+/* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
+
+static bool
+fp_setter (rtx insn)
+{
+ rtx pat = PATTERN (insn);
+ if (RTX_FRAME_RELATED_P (insn))
+ {
+ rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
+ if (expr)
+ pat = XEXP (expr, 0);
+ }
+ if (GET_CODE (pat) == SET)
+ return SET_DEST (pat) == hard_frame_pointer_rtx;
+ else if (GET_CODE (pat) == PARALLEL)
+ {
+ int i;
+ for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
+ if (GET_CODE (XVECEXP (pat, 0, i)) == SET
+ && SET_DEST (XVECEXP (pat, 0, i)) == hard_frame_pointer_rtx)
+ return true;
+ }
+ return false;
+}
+
+/* Initialize cfa_base_rtx, create a preserved VALUE for it and
+ ensure it isn't flushed during cselib_reset_table.
+ Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
+ has been eliminated. */
+
+static void
+vt_init_cfa_base (void)
+{
+ cselib_val *val;
+
+#ifdef FRAME_POINTER_CFA_OFFSET
+ cfa_base_rtx = frame_pointer_rtx;
+#else
+ cfa_base_rtx = arg_pointer_rtx;
+#endif
+ if (cfa_base_rtx == hard_frame_pointer_rtx
+ || !fixed_regs[REGNO (cfa_base_rtx)])
+ {
+ cfa_base_rtx = NULL_RTX;
+ return;
+ }
+ if (!MAY_HAVE_DEBUG_INSNS)
+ return;
+
+ val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
+ get_insns ());
+ preserve_value (val);
+ cselib_preserve_cfa_base_value (val);
+ var_reg_decl_set (&VTI (ENTRY_BLOCK_PTR)->out, cfa_base_rtx,
+ VAR_INIT_STATUS_INITIALIZED, dv_from_value (val->val_rtx),
+ 0, NULL_RTX, INSERT);
+}
+
/* Allocate and initialize the data structures for variable tracking
and parse the RTL to get the micro operations. */
-static void
+static bool
vt_initialize (void)
{
- basic_block bb;
+ basic_block bb, prologue_bb = NULL;
+ HOST_WIDE_INT fp_cfa_offset = -1;
alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
+ attrs_pool = create_alloc_pool ("attrs_def pool",
+ sizeof (struct attrs_def), 1024);
+ var_pool = create_alloc_pool ("variable_def pool",
+ sizeof (struct variable_def)
+ + (MAX_VAR_PARTS - 1)
+ * sizeof (((variable)NULL)->var_part[0]), 64);
+ loc_chain_pool = create_alloc_pool ("location_chain_def pool",
+ sizeof (struct location_chain_def),
+ 1024);
+ shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
+ sizeof (struct shared_hash_def), 256);
+ empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
+ empty_shared_hash->refcount = 1;
+ empty_shared_hash->htab
+ = htab_create (1, variable_htab_hash, variable_htab_eq,
+ variable_htab_free);
+ changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
+ variable_htab_free);
if (MAY_HAVE_DEBUG_INSNS)
{
- cselib_init (true);
+ value_chain_pool = create_alloc_pool ("value_chain_def pool",
+ sizeof (struct value_chain_def),
+ 1024);
+ value_chains = htab_create (32, value_chain_htab_hash,
+ value_chain_htab_eq, NULL);
+ }
+
+ /* Init the IN and OUT sets. */
+ FOR_ALL_BB (bb)
+ {
+ VTI (bb)->visited = false;
+ VTI (bb)->flooded = false;
+ dataflow_set_init (&VTI (bb)->in);
+ dataflow_set_init (&VTI (bb)->out);
+ VTI (bb)->permp = NULL;
+ }
+
+ if (MAY_HAVE_DEBUG_INSNS)
+ {
+ cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
scratch_regs = BITMAP_ALLOC (NULL);
valvar_pool = create_alloc_pool ("small variable_def pool",
sizeof (struct variable_def), 256);
+ preserved_values = VEC_alloc (rtx, heap, 256);
}
else
{
valvar_pool = NULL;
}
+ if (!frame_pointer_needed)
+ {
+ rtx reg, elim;
+
+ if (!vt_stack_adjustments ())
+ return false;
+
+#ifdef FRAME_POINTER_CFA_OFFSET
+ reg = frame_pointer_rtx;
+#else
+ reg = arg_pointer_rtx;
+#endif
+ elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
+ if (elim != reg)
+ {
+ if (GET_CODE (elim) == PLUS)
+ elim = XEXP (elim, 0);
+ if (elim == stack_pointer_rtx)
+ vt_init_cfa_base ();
+ }
+ }
+ else if (!crtl->stack_realign_tried)
+ {
+ rtx reg, elim;
+
+#ifdef FRAME_POINTER_CFA_OFFSET
+ reg = frame_pointer_rtx;
+ fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
+#else
+ reg = arg_pointer_rtx;
+ fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
+#endif
+ elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
+ if (elim != reg)
+ {
+ if (GET_CODE (elim) == PLUS)
+ {
+ fp_cfa_offset -= INTVAL (XEXP (elim, 1));
+ elim = XEXP (elim, 0);
+ }
+ if (elim != hard_frame_pointer_rtx)
+ fp_cfa_offset = -1;
+ else
+ prologue_bb = single_succ (ENTRY_BLOCK_PTR);
+ }
+ }
+
+ hard_frame_pointer_adjustment = -1;
+
FOR_EACH_BB (bb)
{
rtx insn;
HOST_WIDE_INT pre, post = 0;
- unsigned int next_uid_before = cselib_get_next_uid ();
- unsigned int next_uid_after = next_uid_before;
basic_block first_bb, last_bb;
if (MAY_HAVE_DEBUG_INSNS)
{
- cselib_record_sets_hook = count_with_sets;
+ cselib_record_sets_hook = add_with_sets;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "first value: %i\n",
cselib_get_next_uid ());
}
last_bb = bb;
- /* Count the number of micro operations. */
- FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
- {
- VTI (bb)->n_mos = 0;
- for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
- insn = NEXT_INSN (insn))
- {
- if (INSN_P (insn))
- {
- if (!frame_pointer_needed)
- {
- insn_stack_adjust_offset_pre_post (insn, &pre, &post);
- if (pre)
- {
- VTI (bb)->n_mos++;
- if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (GEN_INT (pre), bb, insn,
- MO_ADJUST, dump_file);
- }
- if (post)
- {
- VTI (bb)->n_mos++;
- if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (GEN_INT (post), bb, insn,
- MO_ADJUST, dump_file);
- }
- }
- cselib_hook_called = false;
- if (MAY_HAVE_DEBUG_INSNS)
- {
- cselib_process_insn (insn);
- if (dump_file && (dump_flags & TDF_DETAILS))
- {
- print_rtl_single (dump_file, insn);
- dump_cselib_table (dump_file);
- }
- }
- if (!cselib_hook_called)
- count_with_sets (insn, 0, 0);
- if (CALL_P (insn))
- {
- VTI (bb)->n_mos++;
- if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (PATTERN (insn), bb, insn,
- MO_CALL, dump_file);
- }
- }
- }
- }
-
- if (MAY_HAVE_DEBUG_INSNS)
- {
- cselib_preserve_only_values (false);
- next_uid_after = cselib_get_next_uid ();
- cselib_reset_table (next_uid_before);
- cselib_record_sets_hook = add_with_sets;
- if (dump_file && (dump_flags & TDF_DETAILS))
- fprintf (dump_file, "first value: %i\n",
- cselib_get_next_uid ());
- }
-
- /* Add the micro-operations to the array. */
+ /* Add the micro-operations to the vector. */
FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
{
- int count = VTI (bb)->n_mos;
- VTI (bb)->mos = XNEWVEC (micro_operation, count);
- VTI (bb)->n_mos = 0;
+ HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
+ VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
insn = NEXT_INSN (insn))
{
insn_stack_adjust_offset_pre_post (insn, &pre, &post);
if (pre)
{
- micro_operation *mo
- = VTI (bb)->mos + VTI (bb)->n_mos++;
-
- mo->type = MO_ADJUST;
- mo->u.adjust = pre;
- mo->insn = insn;
-
+ micro_operation mo;
+ mo.type = MO_ADJUST;
+ mo.u.adjust = pre;
+ mo.insn = insn;
if (dump_file && (dump_flags & TDF_DETAILS))
log_op_type (PATTERN (insn), bb, insn,
MO_ADJUST, dump_file);
+ VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
+ &mo);
+ VTI (bb)->out.stack_adjust += pre;
}
}
cselib_hook_called = false;
+ adjust_insn (bb, insn);
if (MAY_HAVE_DEBUG_INSNS)
{
cselib_process_insn (insn);
}
if (!cselib_hook_called)
add_with_sets (insn, 0, 0);
+ cancel_changes (0);
if (!frame_pointer_needed && post)
{
- micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
-
- mo->type = MO_ADJUST;
- mo->u.adjust = post;
- mo->insn = insn;
-
+ micro_operation mo;
+ mo.type = MO_ADJUST;
+ mo.u.adjust = post;
+ mo.insn = insn;
if (dump_file && (dump_flags & TDF_DETAILS))
log_op_type (PATTERN (insn), bb, insn,
MO_ADJUST, dump_file);
+ VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
+ &mo);
+ VTI (bb)->out.stack_adjust += post;
+ }
+
+ if (bb == prologue_bb
+ && hard_frame_pointer_adjustment == -1
+ && RTX_FRAME_RELATED_P (insn)
+ && fp_setter (insn))
+ {
+ vt_init_cfa_base ();
+ hard_frame_pointer_adjustment = fp_cfa_offset;
}
}
}
- gcc_assert (count == VTI (bb)->n_mos);
+ gcc_assert (offset == VTI (bb)->out.stack_adjust);
}
bb = last_bb;
if (MAY_HAVE_DEBUG_INSNS)
{
- cselib_preserve_only_values (true);
- gcc_assert (next_uid_after == cselib_get_next_uid ());
- cselib_reset_table (next_uid_after);
+ cselib_preserve_only_values ();
+ cselib_reset_table (cselib_get_next_uid ());
cselib_record_sets_hook = NULL;
}
}
- attrs_pool = create_alloc_pool ("attrs_def pool",
- sizeof (struct attrs_def), 1024);
- var_pool = create_alloc_pool ("variable_def pool",
- sizeof (struct variable_def)
- + (MAX_VAR_PARTS - 1)
- * sizeof (((variable)NULL)->var_part[0]), 64);
- loc_chain_pool = create_alloc_pool ("location_chain_def pool",
- sizeof (struct location_chain_def),
- 1024);
- shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
- sizeof (struct shared_hash_def), 256);
- empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
- empty_shared_hash->refcount = 1;
- empty_shared_hash->htab
- = htab_create (1, variable_htab_hash, variable_htab_eq,
- variable_htab_free);
- changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
- variable_htab_free);
- if (MAY_HAVE_DEBUG_INSNS)
- {
- value_chain_pool = create_alloc_pool ("value_chain_def pool",
- sizeof (struct value_chain_def),
- 1024);
- value_chains = htab_create (32, value_chain_htab_hash,
- value_chain_htab_eq, NULL);
- }
-
- /* Init the IN and OUT sets. */
- FOR_ALL_BB (bb)
- {
- VTI (bb)->visited = false;
- VTI (bb)->flooded = false;
- dataflow_set_init (&VTI (bb)->in);
- dataflow_set_init (&VTI (bb)->out);
- VTI (bb)->permp = NULL;
- }
-
+ hard_frame_pointer_adjustment = -1;
VTI (ENTRY_BLOCK_PTR)->flooded = true;
vt_add_function_parameters ();
+ cfa_base_rtx = NULL_RTX;
+ return true;
}
/* Get rid of all debug insns from the insn stream. */
FOR_EACH_BB (bb)
{
- free (VTI (bb)->mos);
+ VEC_free (micro_operation, heap, VTI (bb)->mos);
}
FOR_ALL_BB (bb)
htab_delete (value_chains);
free_alloc_pool (value_chain_pool);
free_alloc_pool (valvar_pool);
+ VEC_free (rtx, heap, preserved_values);
cselib_finish ();
BITMAP_FREE (scratch_regs);
scratch_regs = NULL;
}
mark_dfs_back_edges ();
- vt_initialize ();
- if (!frame_pointer_needed)
+ if (!vt_initialize ())
{
- if (!vt_stack_adjustments ())
- {
- vt_finalize ();
- vt_debug_insns_local (true);
- return 0;
- }
+ vt_finalize ();
+ vt_debug_insns_local (true);
+ return 0;
}
success = vt_find_locations ();
/* This is later restored by our caller. */
flag_var_tracking_assignments = 0;
- vt_initialize ();
-
- if (!frame_pointer_needed && !vt_stack_adjustments ())
- gcc_unreachable ();
+ success = vt_initialize ();
+ gcc_assert (success);
success = vt_find_locations ();
}