#include "regs.h"
#include "hard-reg-set.h"
#include "flags.h"
+#include "real.h"
#include "insn-config.h"
#include "recog.h"
#include "function.h"
#include "params.h"
#include "alloc-pool.h"
#include "target.h"
-#include "bitmap.h"
static bool cselib_record_memory;
-static bool cselib_preserve_constants;
static int entry_and_rtx_equal_p (const void *, const void *);
static hashval_t get_value_hash (const void *);
static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
/* The number of registers we had when the varrays were last resized. */
static unsigned int cselib_nregs;
-/* Count values without known locations, or with only locations that
- wouldn't have been known except for debug insns. Whenever this
- grows too big, we remove these useless values from the table.
-
- Counting values with only debug values is a bit tricky. We don't
- want to increment n_useless_values when we create a value for a
- debug insn, for this would get n_useless_values out of sync, but we
- want increment it if all locs in the list that were ever referenced
- in nondebug insns are removed from the list.
-
- In the general case, once we do that, we'd have to stop accepting
- nondebug expressions in the loc list, to avoid having two values
- equivalent that, without debug insns, would have been made into
- separate values. However, because debug insns never introduce
- equivalences themselves (no assignments), the only means for
- growing loc lists is through nondebug assignments. If the locs
- also happen to be referenced in debug insns, it will work just fine.
-
- A consequence of this is that there's at most one debug-only loc in
- each loc list. If we keep it in the first entry, testing whether
- we have a debug-only loc list takes O(1).
-
- Furthermore, since any additional entry in a loc list containing a
- debug loc would have to come from an assignment (nondebug) that
- references both the initial debug loc and the newly-equivalent loc,
- the initial debug loc would be promoted to a nondebug loc, and the
- loc list would not contain debug locs any more.
-
- So the only case we have to be careful with in order to keep
- n_useless_values in sync between debug and nondebug compilations is
- to avoid incrementing n_useless_values when removing the single loc
- from a value that turns out to not appear outside debug values. We
- increment n_useless_debug_values instead, and leave such values
- alone until, for other reasons, we garbage-collect useless
- values. */
+/* Count values without known locations. Whenever this grows too big, we
+ remove these useless values from the table. */
static int n_useless_values;
-static int n_useless_debug_values;
-
-/* Count values whose locs have been taken exclusively from debug
- insns for the entire life of the value. */
-static int n_debug_values;
/* Number of useless values before we remove them from the hash table. */
#define MAX_USELESS_VALUES 32
presence in the list by checking the next pointer. */
static cselib_val dummy_val;
-/* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx
- that is constant through the whole function and should never be
- eliminated. */
-static cselib_val *cfa_base_preserved_val;
-
/* Used to list all values that contain memory reference.
May or may not contain the useless values - the list is compacted
each time memory is invalidated. */
el->next = next;
el->loc = loc;
el->setting_insn = cselib_current_insn;
- gcc_assert (!next || !next->setting_insn
- || !DEBUG_INSN_P (next->setting_insn));
-
- /* If we're creating the first loc in a debug insn context, we've
- just created a debug value. Count it. */
- if (!next && cselib_current_insn && DEBUG_INSN_P (cselib_current_insn))
- n_debug_values++;
-
return el;
}
-/* Promote loc L to a nondebug cselib_current_insn if L is marked as
- originating from a debug insn, maintaining the debug values
- count. */
-
-static inline void
-promote_debug_loc (struct elt_loc_list *l)
-{
- if (l->setting_insn && DEBUG_INSN_P (l->setting_insn)
- && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
- {
- n_debug_values--;
- l->setting_insn = cselib_current_insn;
- gcc_assert (!l->next);
- }
-}
-
/* The elt_list at *PL is no longer needed. Unchain it and free its
storage. */
cselib_reset_table (1);
}
-/* Remove from hash table all VALUEs except constants. */
-
-static int
-preserve_only_constants (void **x, void *info ATTRIBUTE_UNUSED)
-{
- cselib_val *v = (cselib_val *)*x;
-
- if (v->locs != NULL
- && v->locs->next == NULL)
- {
- if (CONSTANT_P (v->locs->loc)
- && (GET_CODE (v->locs->loc) != CONST
- || !references_value_p (v->locs->loc, 0)))
- return 1;
- if (cfa_base_preserved_val)
- {
- if (v == cfa_base_preserved_val)
- return 1;
- if (GET_CODE (v->locs->loc) == PLUS
- && CONST_INT_P (XEXP (v->locs->loc, 1))
- && XEXP (v->locs->loc, 0) == cfa_base_preserved_val->val_rtx)
- return 1;
- }
- }
-
- htab_clear_slot (cselib_hash_table, x);
- return 1;
-}
-
/* Remove all entries from the hash table, arranging for the next
value to be numbered NUM. */
{
unsigned int i;
+ for (i = 0; i < n_used_regs; i++)
+ REG_VALUES (used_regs[i]) = 0;
+
max_value_regs = 0;
- if (cfa_base_preserved_val)
- {
- unsigned int regno = REGNO (cfa_base_preserved_val->locs->loc);
- unsigned int new_used_regs = 0;
- for (i = 0; i < n_used_regs; i++)
- if (used_regs[i] == regno)
- {
- new_used_regs = 1;
- continue;
- }
- else
- REG_VALUES (used_regs[i]) = 0;
- gcc_assert (new_used_regs == 1);
- n_used_regs = new_used_regs;
- used_regs[0] = regno;
- max_value_regs
- = hard_regno_nregs[regno][GET_MODE (cfa_base_preserved_val->locs->loc)];
- }
- else
- {
- for (i = 0; i < n_used_regs; i++)
- REG_VALUES (used_regs[i]) = 0;
- n_used_regs = 0;
- }
+ n_used_regs = 0;
- if (cselib_preserve_constants)
- htab_traverse (cselib_hash_table, preserve_only_constants, NULL);
- else
- htab_empty (cselib_hash_table);
+ /* ??? Preserve constants? */
+ htab_empty (cselib_hash_table);
n_useless_values = 0;
- n_useless_debug_values = 0;
- n_debug_values = 0;
next_uid = num;
so we need to do a comparison. */
for (l = v->locs; l; l = l->next)
if (rtx_equal_for_cselib_p (l->loc, x))
- {
- promote_debug_loc (l);
- return 1;
- }
+ return 1;
return 0;
}
{
cselib_val *v = (cselib_val *)*x;
struct elt_loc_list **p = &v->locs;
- bool had_locs = v->locs != NULL;
- rtx setting_insn = v->locs ? v->locs->setting_insn : NULL;
+ int had_locs = v->locs != 0;
while (*p)
{
if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
{
- if (setting_insn && DEBUG_INSN_P (setting_insn))
- n_useless_debug_values++;
- else
- n_useless_values++;
+ n_useless_values++;
values_became_useless = 1;
}
return 1;
remove_useless_values (void)
{
cselib_val **p, *v;
-
/* First pass: eliminate locations that reference the value. That in
turn can make more values useless. */
do
}
*p = &dummy_val;
- n_useless_values += n_useless_debug_values;
- n_debug_values -= n_useless_debug_values;
- n_useless_debug_values = 0;
-
htab_traverse (cselib_hash_table, discard_useless_values, 0);
gcc_assert (!n_useless_values);
return PRESERVED_VALUE_P (v->val_rtx);
}
-/* Arrange for a REG value to be assumed constant through the whole function,
- never invalidated and preserved across cselib_reset_table calls. */
-
-void
-cselib_preserve_cfa_base_value (cselib_val *v)
-{
- if (cselib_preserve_constants
- && v->locs
- && REG_P (v->locs->loc))
- cfa_base_preserved_val = v;
-}
-
/* Clean all non-constant expressions in the hash table, but retain
their values. */
for (l = mem_elt->locs; l; l = l->next)
if (MEM_P (l->loc)
&& CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt)
- {
- promote_debug_loc (l);
- return;
- }
+ return;
addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
mem_elt->locs
/* Find a value that describes a value of our mode at that address. */
for (l = addr->addr_list; l; l = l->next)
if (GET_MODE (l->elt->val_rtx) == mode)
- {
- promote_debug_loc (l->elt->locs);
- return l->elt;
- }
+ return l->elt;
if (! create)
return 0;
return copy;
}
+/* Log a lookup of X to the cselib table along with the result RET. */
+
+static cselib_val *
+cselib_log_lookup (rtx x, cselib_val *ret)
+{
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fputs ("cselib lookup ", dump_file);
+ print_inline_rtx (dump_file, x, 2);
+ fprintf (dump_file, " => %u:%u\n",
+ ret ? ret->uid : 0,
+ ret ? ret->hash : 0);
+ }
+
+ return ret;
+}
+
/* Look up the rtl expression X in our tables and return the value it has.
If CREATE is zero, we return NULL if we don't know the value. Otherwise,
we create a new one if possible, using mode MODE if X doesn't have a mode
(i.e. because it's a constant). */
-static cselib_val *
-cselib_lookup_1 (rtx x, enum machine_mode mode, int create)
+cselib_val *
+cselib_lookup (rtx x, enum machine_mode mode, int create)
{
void **slot;
cselib_val *e;
l = l->next;
for (; l; l = l->next)
if (mode == GET_MODE (l->elt->val_rtx))
- {
- promote_debug_loc (l->elt->locs);
- return l->elt;
- }
+ return cselib_log_lookup (x, l->elt);
if (! create)
- return 0;
+ return cselib_log_lookup (x, 0);
if (i < FIRST_PSEUDO_REGISTER)
{
REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
slot = htab_find_slot_with_hash (cselib_hash_table, x, e->hash, INSERT);
*slot = e;
- return e;
+ return cselib_log_lookup (x, e);
}
if (MEM_P (x))
- return cselib_lookup_mem (x, create);
+ return cselib_log_lookup (x, cselib_lookup_mem (x, create));
hashval = cselib_hash_rtx (x, create);
/* Can't even create if hashing is not possible. */
if (! hashval)
- return 0;
+ return cselib_log_lookup (x, 0);
slot = htab_find_slot_with_hash (cselib_hash_table, wrap_constant (mode, x),
hashval, create ? INSERT : NO_INSERT);
if (slot == 0)
- return 0;
+ return cselib_log_lookup (x, 0);
e = (cselib_val *) *slot;
if (e)
- return e;
+ return cselib_log_lookup (x, e);
e = new_cselib_val (hashval, mode, x);
cselib_subst_to_values will need to do lookups. */
*slot = (void *) e;
e->locs = new_elt_loc_list (e->locs, cselib_subst_to_values (x));
- return e;
-}
-
-/* Wrapper for cselib_lookup, that indicates X is in INSN. */
-
-cselib_val *
-cselib_lookup_from_insn (rtx x, enum machine_mode mode,
- int create, rtx insn)
-{
- cselib_val *ret;
-
- gcc_assert (!cselib_current_insn);
- cselib_current_insn = insn;
-
- ret = cselib_lookup (x, mode, create);
-
- cselib_current_insn = NULL;
-
- return ret;
-}
-
-/* Wrapper for cselib_lookup_1, that logs the lookup result and
- maintains invariants related with debug insns. */
-
-cselib_val *
-cselib_lookup (rtx x, enum machine_mode mode, int create)
-{
- cselib_val *ret = cselib_lookup_1 (x, mode, create);
-
- /* ??? Should we return NULL if we're not to create an entry, the
- found loc is a debug loc and cselib_current_insn is not DEBUG?
- If so, we should also avoid converting val to non-DEBUG; probably
- easiest setting cselib_current_insn to NULL before the call
- above. */
-
- if (dump_file && (dump_flags & TDF_DETAILS))
- {
- fputs ("cselib lookup ", dump_file);
- print_inline_rtx (dump_file, x, 2);
- fprintf (dump_file, " => %u:%u\n",
- ret ? ret->uid : 0,
- ret ? ret->hash : 0);
- }
-
- return ret;
+ return cselib_log_lookup (x, e);
}
/* Invalidate any entries in reg_values that overlap REGNO. This is called
while (*l)
{
cselib_val *v = (*l)->elt;
- bool had_locs;
- rtx setting_insn;
struct elt_loc_list **p;
unsigned int this_last = i;
if (i < FIRST_PSEUDO_REGISTER && v != NULL)
this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1;
- if (this_last < regno || v == NULL || v == cfa_base_preserved_val)
+ if (this_last < regno || v == NULL)
{
l = &(*l)->next;
continue;
else
unchain_one_elt_list (l);
- had_locs = v->locs != NULL;
- setting_insn = v->locs ? v->locs->setting_insn : NULL;
-
/* Now, we clear the mapping from value to reg. It must exist, so
this code will crash intentionally if it doesn't. */
for (p = &v->locs; ; p = &(*p)->next)
break;
}
}
-
- if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
- {
- if (setting_insn && DEBUG_INSN_P (setting_insn))
- n_useless_debug_values++;
- else
- n_useless_values++;
- }
+ if (v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
+ n_useless_values++;
}
}
}
{
bool has_mem = false;
struct elt_loc_list **p = &v->locs;
- bool had_locs = v->locs != NULL;
- rtx setting_insn = v->locs ? v->locs->setting_insn : NULL;
+ int had_locs = v->locs != 0;
while (*p)
{
}
if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
- {
- if (setting_insn && DEBUG_INSN_P (setting_insn))
- n_useless_debug_values++;
- else
- n_useless_values++;
- }
+ n_useless_values++;
next = v->next_containing_mem;
if (has_mem)
&& MEM_VOLATILE_P (PATTERN (insn))))
{
cselib_reset_table (next_uid);
- cselib_current_insn = NULL_RTX;
return;
}
if (! INSN_P (insn))
{
- cselib_current_insn = NULL_RTX;
+ cselib_current_insn = 0;
return;
}
if (GET_CODE (XEXP (x, 0)) == CLOBBER)
cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
- cselib_current_insn = NULL_RTX;
+ cselib_current_insn = 0;
if (n_useless_values > MAX_USELESS_VALUES
/* remove_useless_values is linear in the hash table size. Avoid
quadratic behavior for very large hashtables with very few
useless elements. */
- && ((unsigned int)n_useless_values
- > (cselib_hash_table->n_elements
- - cselib_hash_table->n_deleted
- - n_debug_values) / 4))
+ && (unsigned int)n_useless_values > cselib_hash_table->n_elements / 4)
remove_useless_values ();
}
init_alias_analysis. */
void
-cselib_init (int record_what)
+cselib_init (bool record_memory)
{
elt_list_pool = create_alloc_pool ("elt_list",
sizeof (struct elt_list), 10);
cselib_val_pool = create_alloc_pool ("cselib_val_list",
sizeof (cselib_val), 10);
value_pool = create_alloc_pool ("value", RTX_CODE_SIZE (VALUE), 100);
- cselib_record_memory = record_what & CSELIB_RECORD_MEMORY;
- cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS;
+ cselib_record_memory = record_memory;
/* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
see canon_true_dependence. This is only created once. */
cselib_finish (void)
{
cselib_discard_hook = NULL;
- cselib_preserve_constants = false;
- cfa_base_preserved_val = NULL;
free_alloc_pool (elt_list_pool);
free_alloc_pool (elt_loc_list_pool);
free_alloc_pool (cselib_val_pool);
used_regs = 0;
cselib_hash_table = 0;
n_useless_values = 0;
- n_useless_debug_values = 0;
- n_debug_values = 0;
next_uid = 0;
}
#include "toplev.h"
#include "params.h"
#include "diagnostic.h"
-#include "tree-pretty-print.h"
#include "pointer-set.h"
-#include "recog.h"
/* var-tracking.c assumes that tree code with the same value as VALUE rtx code
has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
HOST_WIDE_INT *);
static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
HOST_WIDE_INT *);
+static void bb_stack_adjust_offset (basic_block);
static bool vt_stack_adjustments (void);
-static rtx compute_cfa_pointer (HOST_WIDE_INT);
+static rtx adjust_stack_reference (rtx, HOST_WIDE_INT);
static hashval_t variable_htab_hash (const void *);
static int variable_htab_eq (const void *, const void *);
static void variable_htab_free (void *);
static void **unshare_variable (dataflow_set *set, void **slot, variable var,
enum var_init_status);
+static int vars_copy_1 (void **, void *);
static void vars_copy (htab_t, htab_t);
static tree var_debug_decl (tree);
static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
static void dataflow_set_clear (dataflow_set *);
static void dataflow_set_copy (dataflow_set *, dataflow_set *);
static int variable_union_info_cmp_pos (const void *, const void *);
+static int variable_union (void **, void *);
static void dataflow_set_union (dataflow_set *, dataflow_set *);
static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
static bool canon_value_cmp (rtx, rtx);
static bool variable_part_different_p (variable_part *, variable_part *);
static bool onepart_variable_different_p (variable, variable);
static bool variable_different_p (variable, variable);
+static int dataflow_set_different_1 (void **, void *);
static bool dataflow_set_different (dataflow_set *, dataflow_set *);
static void dataflow_set_destroy (dataflow_set *);
static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
static void vt_add_function_parameters (void);
-static bool vt_initialize (void);
+static void vt_initialize (void);
static void vt_finalize (void);
/* Given a SET, calculate the amount of stack adjustment it contains
}
}
+/* Compute stack adjustment in basic block BB. */
+
+static void
+bb_stack_adjust_offset (basic_block bb)
+{
+ HOST_WIDE_INT offset;
+ unsigned int i;
+ micro_operation *mo;
+
+ offset = VTI (bb)->in.stack_adjust;
+ for (i = 0; VEC_iterate (micro_operation, VTI (bb)->mos, i, mo); i++)
+ {
+ if (mo->type == MO_ADJUST)
+ offset += mo->u.adjust;
+ else if (mo->type != MO_CALL)
+ {
+ if (MEM_P (mo->u.loc))
+ mo->u.loc = adjust_stack_reference (mo->u.loc, -offset);
+ }
+ }
+ VTI (bb)->out.stack_adjust = offset;
+}
+
/* Compute stack adjustments for all blocks by traversing DFS tree.
Return true when the adjustments on all incoming edges are consistent.
Heavily borrowed from pre_and_rev_post_order_compute. */
/* Initialize entry block. */
VTI (ENTRY_BLOCK_PTR)->visited = true;
- VTI (ENTRY_BLOCK_PTR)->in.stack_adjust = INCOMING_FRAME_SP_OFFSET;
VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
/* Allocate stack for back-tracking up CFG. */
/* Check if the edge destination has been visited yet. */
if (!VTI (dest)->visited)
{
- rtx insn;
- HOST_WIDE_INT pre, post, offset;
VTI (dest)->visited = true;
- VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
-
- if (dest != EXIT_BLOCK_PTR)
- for (insn = BB_HEAD (dest);
- insn != NEXT_INSN (BB_END (dest));
- insn = NEXT_INSN (insn))
- if (INSN_P (insn))
- {
- insn_stack_adjust_offset_pre_post (insn, &pre, &post);
- offset += pre + post;
- }
-
- VTI (dest)->out.stack_adjust = offset;
+ VTI (dest)->in.stack_adjust = VTI (src)->out.stack_adjust;
+ bb_stack_adjust_offset (dest);
if (EDGE_COUNT (dest->succs) > 0)
/* Since the DEST node has been visited for the first
return true;
}
-/* Compute a CFA-based value for the stack pointer. */
+/* Adjust stack reference MEM by ADJUSTMENT bytes and make it relative
+ to the argument pointer. Return the new rtx. */
static rtx
-compute_cfa_pointer (HOST_WIDE_INT adjustment)
+adjust_stack_reference (rtx mem, HOST_WIDE_INT adjustment)
{
- rtx cfa;
+ rtx addr, cfa, tmp;
#ifdef FRAME_POINTER_CFA_OFFSET
adjustment -= FRAME_POINTER_CFA_OFFSET (current_function_decl);
cfa = plus_constant (arg_pointer_rtx, adjustment);
#endif
- return cfa;
-}
-
-/* Adjustment for hard_frame_pointer_rtx to cfa base reg,
- or -1 if the replacement shouldn't be done. */
-static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
-
-/* Data for adjust_mems callback. */
+ addr = replace_rtx (copy_rtx (XEXP (mem, 0)), stack_pointer_rtx, cfa);
+ tmp = simplify_rtx (addr);
+ if (tmp)
+ addr = tmp;
-struct adjust_mem_data
-{
- bool store;
- enum machine_mode mem_mode;
- HOST_WIDE_INT stack_adjust;
- rtx side_effects;
-};
-
-/* Helper for adjust_mems. Return 1 if *loc is unsuitable for
- transformation of wider mode arithmetics to narrower mode,
- -1 if it is suitable and subexpressions shouldn't be
- traversed and 0 if it is suitable and subexpressions should
- be traversed. Called through for_each_rtx. */
-
-static int
-use_narrower_mode_test (rtx *loc, void *data)
-{
- rtx subreg = (rtx) data;
-
- if (CONSTANT_P (*loc))
- return -1;
- switch (GET_CODE (*loc))
- {
- case REG:
- if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0))
- return 1;
- return -1;
- case PLUS:
- case MINUS:
- case MULT:
- return 0;
- case ASHIFT:
- if (for_each_rtx (&XEXP (*loc, 0), use_narrower_mode_test, data))
- return 1;
- else
- return -1;
- default:
- return 1;
- }
-}
-
-/* Transform X into narrower mode MODE from wider mode WMODE. */
-
-static rtx
-use_narrower_mode (rtx x, enum machine_mode mode, enum machine_mode wmode)
-{
- rtx op0, op1;
- if (CONSTANT_P (x))
- return lowpart_subreg (mode, x, wmode);
- switch (GET_CODE (x))
- {
- case REG:
- return lowpart_subreg (mode, x, wmode);
- case PLUS:
- case MINUS:
- case MULT:
- op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
- op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
- return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
- case ASHIFT:
- op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
- return simplify_gen_binary (ASHIFT, mode, op0, XEXP (x, 1));
- default:
- gcc_unreachable ();
- }
-}
-
-/* Helper function for adjusting used MEMs. */
-
-static rtx
-adjust_mems (rtx loc, const_rtx old_rtx, void *data)
-{
- struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
- rtx mem, addr = loc, tem;
- enum machine_mode mem_mode_save;
- bool store_save;
- switch (GET_CODE (loc))
- {
- case REG:
- /* Don't do any sp or fp replacements outside of MEM addresses. */
- if (amd->mem_mode == VOIDmode)
- return loc;
- if (loc == stack_pointer_rtx
- && !frame_pointer_needed)
- return compute_cfa_pointer (amd->stack_adjust);
- else if (loc == hard_frame_pointer_rtx
- && frame_pointer_needed
- && hard_frame_pointer_adjustment != -1)
- return compute_cfa_pointer (hard_frame_pointer_adjustment);
- return loc;
- case MEM:
- mem = loc;
- if (!amd->store)
- {
- mem = targetm.delegitimize_address (mem);
- if (mem != loc && !MEM_P (mem))
- return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
- }
-
- addr = XEXP (mem, 0);
- mem_mode_save = amd->mem_mode;
- amd->mem_mode = GET_MODE (mem);
- store_save = amd->store;
- amd->store = false;
- addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
- amd->store = store_save;
- amd->mem_mode = mem_mode_save;
- if (mem == loc)
- addr = targetm.delegitimize_address (addr);
- if (addr != XEXP (mem, 0))
- mem = replace_equiv_address_nv (mem, addr);
- if (!amd->store)
- mem = avoid_constant_pool_reference (mem);
- return mem;
- case PRE_INC:
- case PRE_DEC:
- addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
- GEN_INT (GET_CODE (loc) == PRE_INC
- ? GET_MODE_SIZE (amd->mem_mode)
- : -GET_MODE_SIZE (amd->mem_mode)));
- case POST_INC:
- case POST_DEC:
- if (addr == loc)
- addr = XEXP (loc, 0);
- gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
- addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
- tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
- GEN_INT ((GET_CODE (loc) == PRE_INC
- || GET_CODE (loc) == POST_INC)
- ? GET_MODE_SIZE (amd->mem_mode)
- : -GET_MODE_SIZE (amd->mem_mode)));
- amd->side_effects = alloc_EXPR_LIST (0,
- gen_rtx_SET (VOIDmode,
- XEXP (loc, 0),
- tem),
- amd->side_effects);
- return addr;
- case PRE_MODIFY:
- addr = XEXP (loc, 1);
- case POST_MODIFY:
- if (addr == loc)
- addr = XEXP (loc, 0);
- gcc_assert (amd->mem_mode != VOIDmode);
- addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
- amd->side_effects = alloc_EXPR_LIST (0,
- gen_rtx_SET (VOIDmode,
- XEXP (loc, 0),
- XEXP (loc, 1)),
- amd->side_effects);
- return addr;
- case SUBREG:
- /* First try without delegitimization of whole MEMs and
- avoid_constant_pool_reference, which is more likely to succeed. */
- store_save = amd->store;
- amd->store = true;
- addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
- data);
- amd->store = store_save;
- mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
- if (mem == SUBREG_REG (loc))
- {
- tem = loc;
- goto finish_subreg;
- }
- tem = simplify_gen_subreg (GET_MODE (loc), mem,
- GET_MODE (SUBREG_REG (loc)),
- SUBREG_BYTE (loc));
- if (tem)
- goto finish_subreg;
- tem = simplify_gen_subreg (GET_MODE (loc), addr,
- GET_MODE (SUBREG_REG (loc)),
- SUBREG_BYTE (loc));
- if (tem == NULL_RTX)
- tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
- finish_subreg:
- if (MAY_HAVE_DEBUG_INSNS
- && GET_CODE (tem) == SUBREG
- && (GET_CODE (SUBREG_REG (tem)) == PLUS
- || GET_CODE (SUBREG_REG (tem)) == MINUS
- || GET_CODE (SUBREG_REG (tem)) == MULT
- || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
- && GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
- && GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
- && GET_MODE_SIZE (GET_MODE (tem))
- < GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem)))
- && subreg_lowpart_p (tem)
- && !for_each_rtx (&SUBREG_REG (tem), use_narrower_mode_test, tem))
- return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
- GET_MODE (SUBREG_REG (tem)));
- return tem;
- default:
- break;
- }
- return NULL_RTX;
-}
-
-/* Helper function for replacement of uses. */
-
-static void
-adjust_mem_uses (rtx *x, void *data)
-{
- rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
- if (new_x != *x)
- validate_change (NULL_RTX, x, new_x, true);
-}
-
-/* Helper function for replacement of stores. */
-
-static void
-adjust_mem_stores (rtx loc, const_rtx expr, void *data)
-{
- if (MEM_P (loc))
- {
- rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
- adjust_mems, data);
- if (new_dest != SET_DEST (expr))
- {
- rtx xexpr = CONST_CAST_RTX (expr);
- validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
- }
- }
-}
-
-/* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
- replace them with their value in the insn and add the side-effects
- as other sets to the insn. */
-
-static void
-adjust_insn (basic_block bb, rtx insn)
-{
- struct adjust_mem_data amd;
- rtx set;
- amd.mem_mode = VOIDmode;
- amd.stack_adjust = -VTI (bb)->out.stack_adjust;
- amd.side_effects = NULL_RTX;
-
- amd.store = true;
- note_stores (PATTERN (insn), adjust_mem_stores, &amd);
-
- amd.store = false;
- note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
-
- /* For read-only MEMs containing some constant, prefer those
- constants. */
- set = single_set (insn);
- if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
- {
- rtx note = find_reg_equal_equiv_note (insn);
-
- if (note && CONSTANT_P (XEXP (note, 0)))
- validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
- }
-
- if (amd.side_effects)
- {
- rtx *pat, new_pat, s;
- int i, oldn, newn;
-
- pat = &PATTERN (insn);
- if (GET_CODE (*pat) == COND_EXEC)
- pat = &COND_EXEC_CODE (*pat);
- if (GET_CODE (*pat) == PARALLEL)
- oldn = XVECLEN (*pat, 0);
- else
- oldn = 1;
- for (s = amd.side_effects, newn = 0; s; newn++)
- s = XEXP (s, 1);
- new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
- if (GET_CODE (*pat) == PARALLEL)
- for (i = 0; i < oldn; i++)
- XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
- else
- XVECEXP (new_pat, 0, 0) = *pat;
- for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
- XVECEXP (new_pat, 0, i) = XEXP (s, 0);
- free_EXPR_LIST_list (&amd.side_effects);
- validate_change (NULL_RTX, pat, new_pat, true);
- }
+ return replace_equiv_address_nv (mem, addr);
}
/* Return true if a decl_or_value DV is a DECL or NULL. */
return slot;
}
+/* Add a variable from *SLOT to hash table DATA and increase its reference
+ count. */
+
+static int
+vars_copy_1 (void **slot, void *data)
+{
+ htab_t dst = (htab_t) data;
+ variable src;
+ void **dstp;
+
+ src = (variable) *slot;
+ src->refcount++;
+
+ dstp = htab_find_slot_with_hash (dst, src->dv,
+ dv_htab_hash (src->dv),
+ INSERT);
+ *dstp = src;
+
+ /* Continue traversing the hash table. */
+ return 1;
+}
+
/* Copy all variables from hash table SRC to hash table DST. */
static void
vars_copy (htab_t dst, htab_t src)
{
- htab_iterator hi;
- variable var;
-
- FOR_EACH_HTAB_ELEMENT (src, var, variable, hi)
- {
- void **dstp;
- var->refcount++;
- dstp = htab_find_slot_with_hash (dst, var->dv,
- dv_htab_hash (var->dv),
- INSERT);
- *dstp = var;
- }
+ htab_traverse_noresize (src, vars_copy_1, dst);
}
/* Map a decl to its main debug decl. */
var_debug_decl (tree decl)
{
if (decl && DECL_P (decl)
- && DECL_DEBUG_EXPR_IS_FROM (decl))
- {
- tree debugdecl = DECL_DEBUG_EXPR (decl);
- if (debugdecl && DECL_P (debugdecl))
- decl = debugdecl;
- }
+ && DECL_DEBUG_EXPR_IS_FROM (decl) && DECL_DEBUG_EXPR (decl)
+ && DECL_P (DECL_DEBUG_EXPR (decl)))
+ decl = DECL_DEBUG_EXPR (decl);
return decl;
}
we keep the newest locations in the beginning. */
static int
-variable_union (variable src, dataflow_set *set)
+variable_union (void **slot, void *data)
{
- variable dst;
+ variable src, dst;
void **dstp;
+ dataflow_set *set = (dataflow_set *) data;
int i, j, k;
+ src = (variable) *slot;
dstp = shared_hash_find_slot (set->vars, src->dv);
if (!dstp || !*dstp)
{
{
location_chain *nodep, dnode, snode;
- gcc_assert (src->n_var_parts == 1
- && dst->n_var_parts == 1);
+ gcc_assert (src->n_var_parts == 1);
+ gcc_assert (dst->n_var_parts == 1);
snode = src->var_part[0].loc_chain;
gcc_assert (snode);
dst->vars = shared_hash_copy (src->vars);
}
else
- {
- htab_iterator hi;
- variable var;
-
- FOR_EACH_HTAB_ELEMENT (shared_hash_htab (src->vars), var, variable, hi)
- variable_union (var, dst);
- }
+ htab_traverse (shared_hash_htab (src->vars), variable_union, dst);
}
/* Whether the value is currently being expanded. */
find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
{
location_chain node;
- enum rtx_code loc_code;
if (!var)
return NULL;
gcc_assert (var->var_part[0].offset == 0);
- loc_code = GET_CODE (loc);
for (node = var->var_part[0].loc_chain; node; node = node->next)
- {
- if (GET_CODE (node->loc) != loc_code)
- {
- if (GET_CODE (node->loc) != VALUE)
- continue;
- }
- else if (loc == node->loc)
- return node;
- else if (loc_code != VALUE)
- {
- if (rtx_equal_p (loc, node->loc))
- return node;
- continue;
- }
- if (!VALUE_RECURSED_INTO (node->loc))
- {
- decl_or_value dv = dv_from_value (node->loc);
- variable var = (variable)
- htab_find_with_hash (vars, dv, dv_htab_hash (dv));
+ if (rtx_equal_p (loc, node->loc))
+ return node;
+ else if (GET_CODE (node->loc) == VALUE
+ && !VALUE_RECURSED_INTO (node->loc))
+ {
+ decl_or_value dv = dv_from_value (node->loc);
+ variable var = (variable)
+ htab_find_with_hash (vars, dv, dv_htab_hash (dv));
- if (var)
- {
- location_chain where;
- VALUE_RECURSED_INTO (node->loc) = true;
- if ((where = find_loc_in_1pdv (loc, var, vars)))
- {
- VALUE_RECURSED_INTO (node->loc) = false;
- return where;
- }
- VALUE_RECURSED_INTO (node->loc) = false;
- }
- }
- }
+ if (var)
+ {
+ location_chain where;
+ VALUE_RECURSED_INTO (node->loc) = true;
+ if ((where = find_loc_in_1pdv (loc, var, vars)))
+ {
+ VALUE_RECURSED_INTO (node->loc) = false;
+ return where;
+ }
+ VALUE_RECURSED_INTO (node->loc) = false;
+ }
+ }
return NULL;
}
}
/* If CSELIB_VAL_PTR of value DV refer to VALUEs, add backlinks from those
- VALUEs to DV. Add the same time get rid of ASM_OPERANDS from locs list,
- that is something we never can express in .debug_info and can prevent
- reverse ops from being used. */
+ VALUEs to DV. */
static void
add_cselib_value_chains (decl_or_value dv)
{
- struct elt_loc_list **l;
+ struct elt_loc_list *l;
- for (l = &CSELIB_VAL_PTR (dv_as_value (dv))->locs; *l;)
- if (GET_CODE ((*l)->loc) == ASM_OPERANDS)
- *l = (*l)->next;
- else
- {
- for_each_rtx (&(*l)->loc, add_value_chain, dv_as_opaque (dv));
- l = &(*l)->next;
- }
+ for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
+ for_each_rtx (&l->loc, add_value_chain, dv_as_opaque (dv));
}
/* If decl or value DVP refers to VALUE from *LOC, remove backlinks
return 1;
}
-/* Bind one-part variables to the canonical value in an equivalence
- set. Not doing this causes dataflow convergence failure in rare
- circumstances, see PR42873. Unfortunately we can't do this
- efficiently as part of canonicalize_values_star, since we may not
- have determined or even seen the canonical value of a set when we
- get to a variable that references another member of the set. */
-
-static int
-canonicalize_vars_star (void **slot, void *data)
-{
- dataflow_set *set = (dataflow_set *)data;
- variable var = (variable) *slot;
- decl_or_value dv = var->dv;
- location_chain node;
- rtx cval;
- decl_or_value cdv;
- void **cslot;
- variable cvar;
- location_chain cnode;
-
- if (!dv_onepart_p (dv) || dv_is_value_p (dv))
- return 1;
-
- gcc_assert (var->n_var_parts == 1);
-
- node = var->var_part[0].loc_chain;
-
- if (GET_CODE (node->loc) != VALUE)
- return 1;
-
- gcc_assert (!node->next);
- cval = node->loc;
-
- /* Push values to the canonical one. */
- cdv = dv_from_value (cval);
- cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
- if (!cslot)
- return 1;
- cvar = (variable)*cslot;
- gcc_assert (cvar->n_var_parts == 1);
-
- cnode = cvar->var_part[0].loc_chain;
-
- /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
- that are not “more canonical” than it. */
- if (GET_CODE (cnode->loc) != VALUE
- || !canon_value_cmp (cnode->loc, cval))
- return 1;
-
- /* CVAL was found to be non-canonical. Change the variable to point
- to the canonical VALUE. */
- gcc_assert (!cnode->next);
- cval = cnode->loc;
-
- slot = set_slot_part (set, cval, slot, dv, 0,
- node->init, node->set_src);
- slot = clobber_slot_part (set, cval, slot, 0, node->set_src);
-
- return 1;
-}
-
/* Combine variable or value in *S1SLOT (in DSM->cur) with the
corresponding entry in DSM->src. Multi-part variables are combined
with variable_union, whereas onepart dvs are combined with
intersection. */
static int
-variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
+variable_merge_over_cur (void **s1slot, void *data)
{
+ struct dfset_merge *dsm = (struct dfset_merge *)data;
dataflow_set *dst = dsm->dst;
void **dstslot;
+ variable s1var = (variable) *s1slot;
variable s2var, dvar = NULL;
decl_or_value dv = s1var->dv;
bool onepart = dv_onepart_p (dv);
/* If the incoming onepart variable has an empty location list, then
the intersection will be just as empty. For other variables,
it's always union. */
- gcc_assert (s1var->n_var_parts
- && s1var->var_part[0].loc_chain);
+ gcc_assert (s1var->n_var_parts);
+ gcc_assert (s1var->var_part[0].loc_chain);
if (!onepart)
- return variable_union (s1var, dst);
+ return variable_union (s1slot, dst);
- gcc_assert (s1var->n_var_parts == 1
- && s1var->var_part[0].offset == 0);
+ gcc_assert (s1var->n_var_parts == 1);
+ gcc_assert (s1var->var_part[0].offset == 0);
dvhash = dv_htab_hash (dv);
if (dv_is_value_p (dv))
}
dsm->src_onepart_cnt--;
- gcc_assert (s2var->var_part[0].loc_chain
- && s2var->n_var_parts == 1
- && s2var->var_part[0].offset == 0);
+ gcc_assert (s2var->var_part[0].loc_chain);
+ gcc_assert (s2var->n_var_parts == 1);
+ gcc_assert (s2var->var_part[0].offset == 0);
dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
if (dstslot)
{
dvar = (variable)*dstslot;
- gcc_assert (dvar->refcount == 1
- && dvar->n_var_parts == 1
- && dvar->var_part[0].offset == 0);
+ gcc_assert (dvar->refcount == 1);
+ gcc_assert (dvar->n_var_parts == 1);
+ gcc_assert (dvar->var_part[0].offset == 0);
nodep = &dvar->var_part[0].loc_chain;
}
else
variable_merge_over_cur(). */
static int
-variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
+variable_merge_over_src (void **s2slot, void *data)
{
+ struct dfset_merge *dsm = (struct dfset_merge *)data;
dataflow_set *dst = dsm->dst;
+ variable s2var = (variable) *s2slot;
decl_or_value dv = s2var->dv;
bool onepart = dv_onepart_p (dv);
struct dfset_merge dsm;
int i;
size_t src1_elems, src2_elems;
- htab_iterator hi;
- variable var;
src1_elems = htab_elements (shared_hash_htab (src1->vars));
src2_elems = htab_elements (shared_hash_htab (src2->vars));
dsm.cur = src1;
dsm.src_onepart_cnt = 0;
- FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.src->vars), var, variable, hi)
- variable_merge_over_src (var, &dsm);
- FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.cur->vars), var, variable, hi)
- variable_merge_over_cur (var, &dsm);
+ htab_traverse (shared_hash_htab (dsm.src->vars), variable_merge_over_src,
+ &dsm);
+ htab_traverse (shared_hash_htab (dsm.cur->vars), variable_merge_over_cur,
+ &dsm);
if (dsm.src_onepart_cnt)
dst_can_be_shared = false;
att; att = att->next)
if (GET_MODE (att->loc) == GET_MODE (node->loc))
{
- gcc_assert (att->offset == 0
- && dv_is_value_p (att->dv));
+ gcc_assert (att->offset == 0);
+ gcc_assert (dv_is_value_p (att->dv));
val_reset (set, att->dv);
break;
}
decl_or_value dv;
attrs att;
- gcc_assert (dv_is_value_p (pvar->dv)
- && pvar->n_var_parts == 1);
+ gcc_assert (dv_is_value_p (pvar->dv));
+ gcc_assert (pvar->n_var_parts == 1);
pnode = pvar->var_part[0].loc_chain;
- gcc_assert (pnode
- && !pnode->next
- && REG_P (pnode->loc));
+ gcc_assert (pnode);
+ gcc_assert (!pnode->next);
+ gcc_assert (REG_P (pnode->loc));
dv = pvar->dv;
{
attrs_list_insert (&set->regs[REGNO (pnode->loc)],
dv, 0, pnode->loc);
- variable_union (pvar, set);
+ variable_union (pslot, set);
}
return 1;
htab_traverse (shared_hash_htab ((*permp)->vars),
variable_post_merge_perm_vals, &dfpm);
htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set);
- htab_traverse (shared_hash_htab (set->vars), canonicalize_vars_star, set);
}
/* Return a node whose loc is a MEM that refers to EXPR in the
if (!val)
return NULL;
- gcc_assert (GET_CODE (val) == VALUE
- && !VALUE_RECURSED_INTO (val));
+ gcc_assert (GET_CODE (val) == VALUE);
+
+ gcc_assert (!VALUE_RECURSED_INTO (val));
dv = dv_from_value (val);
var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
int r;
for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
- if (TEST_HARD_REG_BIT (regs_invalidated_by_call, r))
+ if (TEST_HARD_REG_BIT (call_used_reg_set, r))
var_regno_delete (set, r);
if (MAY_HAVE_DEBUG_INSNS)
}
}
+/* Flag whether two dataflow sets being compared contain different data. */
+static bool
+dataflow_set_different_value;
+
static bool
variable_part_different_p (variable_part *vp1, variable_part *vp2)
{
if (var1 == var2)
return false;
- gcc_assert (var1->n_var_parts == 1
- && var2->n_var_parts == 1);
+ gcc_assert (var1->n_var_parts == 1);
+ gcc_assert (var2->n_var_parts == 1);
lc1 = var1->var_part[0].loc_chain;
lc2 = var2->var_part[0].loc_chain;
- gcc_assert (lc1 && lc2);
+ gcc_assert (lc1);
+ gcc_assert (lc2);
while (lc1 && lc2)
{
/* One-part values have locations in a canonical order. */
if (i == 0 && var1->var_part[i].offset == 0 && dv_onepart_p (var1->dv))
{
- gcc_assert (var1->n_var_parts == 1
- && dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv));
+ gcc_assert (var1->n_var_parts == 1);
+ gcc_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv));
return onepart_variable_different_p (var1, var2);
}
if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
return false;
}
+/* Compare variable *SLOT with the same variable in hash table DATA
+ and set DATAFLOW_SET_DIFFERENT_VALUE if they are different. */
+
+static int
+dataflow_set_different_1 (void **slot, void *data)
+{
+ htab_t htab = (htab_t) data;
+ variable var1, var2;
+
+ var1 = (variable) *slot;
+ var2 = (variable) htab_find_with_hash (htab, var1->dv,
+ dv_htab_hash (var1->dv));
+ if (!var2)
+ {
+ dataflow_set_different_value = true;
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "dataflow difference found: removal of:\n");
+ dump_var (var1);
+ }
+
+ /* Stop traversing the hash table. */
+ return 0;
+ }
+
+ if (variable_different_p (var1, var2))
+ {
+ dataflow_set_different_value = true;
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "dataflow difference found: old and new follow:\n");
+ dump_var (var1);
+ dump_var (var2);
+ }
+
+ /* Stop traversing the hash table. */
+ return 0;
+ }
+
+ /* Continue traversing the hash table. */
+ return 1;
+}
+
/* Return true if dataflow sets OLD_SET and NEW_SET differ. */
static bool
dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
{
- htab_iterator hi;
- variable var1;
-
if (old_set->vars == new_set->vars)
return false;
!= htab_elements (shared_hash_htab (new_set->vars)))
return true;
- FOR_EACH_HTAB_ELEMENT (shared_hash_htab (old_set->vars), var1, variable, hi)
- {
- htab_t htab = shared_hash_htab (new_set->vars);
- variable var2 = (variable) htab_find_with_hash (htab, var1->dv,
- dv_htab_hash (var1->dv));
- if (!var2)
- {
- if (dump_file && (dump_flags & TDF_DETAILS))
- {
- fprintf (dump_file, "dataflow difference found: removal of:\n");
- dump_var (var1);
- }
- return true;
- }
-
- if (variable_different_p (var1, var2))
- {
- if (dump_file && (dump_flags & TDF_DETAILS))
- {
- fprintf (dump_file, "dataflow difference found: "
- "old and new follow:\n");
- dump_var (var1);
- dump_var (var2);
- }
- return true;
- }
- }
+ dataflow_set_different_value = false;
+ htab_traverse (shared_hash_htab (old_set->vars), dataflow_set_different_1,
+ shared_hash_htab (new_set->vars));
/* No need to traverse the second hashtab, if both have the same number
of elements and the second one had all entries found in the first one,
then it can't have any extra entries. */
- return false;
+ return dataflow_set_different_value;
}
/* Free the contents of dataflow set SET. */
don't need to track this expression if the ultimate declaration is
ignored. */
realdecl = expr;
- if (DECL_DEBUG_EXPR_IS_FROM (realdecl))
+ if (DECL_DEBUG_EXPR_IS_FROM (realdecl) && DECL_DEBUG_EXPR (realdecl))
{
realdecl = DECL_DEBUG_EXPR (realdecl);
- if (realdecl == NULL_TREE)
- realdecl = expr;
- else if (!DECL_P (realdecl))
- {
- if (handled_component_p (realdecl))
- {
- HOST_WIDE_INT bitsize, bitpos, maxsize;
- tree innerdecl
- = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
- &maxsize);
- if (!DECL_P (innerdecl)
- || DECL_IGNORED_P (innerdecl)
- || TREE_STATIC (innerdecl)
- || bitsize <= 0
- || bitpos + bitsize > 256
- || bitsize != maxsize)
- return 0;
- else
- realdecl = expr;
- }
- else
- return 0;
- }
+ /* ??? We don't yet know how to emit DW_OP_piece for variable
+ that has been SRA'ed. */
+ if (!DECL_P (realdecl))
+ return 0;
}
/* Do not track EXPR if REALDECL it should be ignored for debugging
return gen_rtx_REG_offset (loc, mode, regno, offset);
}
-/* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
- hard_frame_pointer_rtx is being mapped to it. */
-static rtx cfa_base_rtx;
-
/* Carry information about uses and stores while walking rtx. */
struct count_use_info
return NULL;
}
-/* Helper function to get mode of MEM's address. */
-
-static inline enum machine_mode
-get_address_mode (rtx mem)
-{
- enum machine_mode mode = GET_MODE (XEXP (mem, 0));
- if (mode != VOIDmode)
- return mode;
- return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem));
-}
-
/* Replace all registers and addresses in an expression with VALUE
expressions that map back to them, unless the expression is a
register. If no mapping is or can be performed, returns NULL. */
return NULL;
else if (MEM_P (loc))
{
- cselib_val *addr = cselib_lookup (XEXP (loc, 0),
- get_address_mode (loc), 0);
+ enum machine_mode address_mode
+ = targetm.addr_space.address_mode (MEM_ADDR_SPACE (loc));
+ cselib_val *addr = cselib_lookup (XEXP (loc, 0), address_mode, 0);
if (addr)
return replace_equiv_address_nv (loc, addr->val_rtx);
else
if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
{
rtx ploc = PAT_VAR_LOCATION_LOC (loc);
- if (! VAR_LOC_UNKNOWN_P (ploc))
- {
- cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1);
+ cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1);
- /* ??? flag_float_store and volatile mems are never
- given values, but we could in theory use them for
- locations. */
- gcc_assert (val || 1);
- }
+ /* ??? flag_float_store and volatile mems are never
+ given values, but we could in theory use them for
+ locations. */
+ gcc_assert (val || 1);
return MO_VAL_LOC;
}
else
{
if (REG_P (loc)
|| (find_use_val (loc, GET_MODE (loc), cui)
- && cselib_lookup (XEXP (loc, 0),
- get_address_mode (loc), 0)))
+ && cselib_lookup (XEXP (loc, 0), GET_MODE (loc), 0)))
return MO_VAL_SET;
}
else
{
gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
- if (loc == cfa_base_rtx)
- return MO_CLOBBER;
expr = REG_EXPR (loc);
if (!expr)
fputc ('\n', out);
}
+/* Adjust sets if needed. Currently this optimizes read-only MEM loads
+ if REG_EQUAL/REG_EQUIV note is present. */
+
+static void
+adjust_sets (rtx insn, struct cselib_set *sets, int n_sets)
+{
+ if (n_sets == 1 && MEM_P (sets[0].src) && MEM_READONLY_P (sets[0].src))
+ {
+ /* For read-only MEMs containing some constant, prefer those
+ constants. */
+ rtx note = find_reg_equal_equiv_note (insn), src;
+
+ if (note && CONSTANT_P (XEXP (note, 0)))
+ {
+ sets[0].src = src = XEXP (note, 0);
+ if (GET_CODE (PATTERN (insn)) == COND_EXEC)
+ src = gen_rtx_IF_THEN_ELSE (GET_MODE (sets[0].dest),
+ COND_EXEC_TEST (PATTERN (insn)),
+ src, sets[0].dest);
+ sets[0].src_elt = cselib_lookup (src, GET_MODE (sets[0].dest), 1);
+ }
+ }
+}
+
/* Tell whether the CONCAT used to holds a VALUE and its location
needs value resolution, i.e., an attempt of mapping the location
back to other incoming values. */
VEC_safe_push (rtx, heap, preserved_values, val->val_rtx);
}
-/* Helper function for MO_VAL_LOC handling. Return non-zero if
- any rtxes not suitable for CONST use not replaced by VALUEs
- are discovered. */
-
-static int
-non_suitable_const (rtx *x, void *data ATTRIBUTE_UNUSED)
-{
- if (*x == NULL_RTX)
- return 0;
-
- switch (GET_CODE (*x))
- {
- case REG:
- case DEBUG_EXPR:
- case PC:
- case SCRATCH:
- case CC0:
- case ASM_INPUT:
- case ASM_OPERANDS:
- return 1;
- case MEM:
- return !MEM_READONLY_P (*x);
- default:
- return 0;
- }
-}
-
/* Add uses (register and memory references) LOC which will be tracked
to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
gcc_assert (cui->sets);
if (MEM_P (vloc)
- && !REG_P (XEXP (vloc, 0))
- && !MEM_P (XEXP (vloc, 0))
- && (GET_CODE (XEXP (vloc, 0)) != PLUS
- || XEXP (XEXP (vloc, 0), 0) != cfa_base_rtx
- || !CONST_INT_P (XEXP (XEXP (vloc, 0), 1))))
+ && !REG_P (XEXP (vloc, 0)) && !MEM_P (XEXP (vloc, 0)))
{
rtx mloc = vloc;
- enum machine_mode address_mode = get_address_mode (mloc);
+ enum machine_mode address_mode
+ = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
cselib_val *val
= cselib_lookup (XEXP (mloc, 0), address_mode, 0);
}
}
- if (CONSTANT_P (vloc)
- && (GET_CODE (vloc) != CONST
- || for_each_rtx (&vloc, non_suitable_const, NULL)))
- /* For constants don't look up any value. */;
- else if (!VAR_LOC_UNKNOWN_P (vloc)
- && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
+ if (!VAR_LOC_UNKNOWN_P (vloc)
+ && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
{
enum machine_mode mode2;
enum micro_operation_type type2;
gcc_assert (cui->sets);
if (MEM_P (oloc)
- && !REG_P (XEXP (oloc, 0))
- && !MEM_P (XEXP (oloc, 0))
- && (GET_CODE (XEXP (oloc, 0)) != PLUS
- || XEXP (XEXP (oloc, 0), 0) != cfa_base_rtx
- || !CONST_INT_P (XEXP (XEXP (oloc, 0), 1))))
+ && !REG_P (XEXP (oloc, 0)) && !MEM_P (XEXP (oloc, 0)))
{
rtx mloc = oloc;
- enum machine_mode address_mode = get_address_mode (mloc);
+ enum machine_mode address_mode
+ = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
cselib_val *val
= cselib_lookup (XEXP (mloc, 0), address_mode, 0);
return gen_rtx_CONCAT (GET_MODE (v->val_rtx), v->val_rtx, ret);
}
+/* Return SRC, or, if it is a read-only MEM for which adjust_sets
+ replated it with a constant from REG_EQUIV/REG_EQUAL note,
+ that constant. */
+
+static inline rtx
+get_adjusted_src (struct count_use_info *cui, rtx src)
+{
+ if (cui->n_sets == 1
+ && MEM_P (src)
+ && MEM_READONLY_P (src)
+ && CONSTANT_P (cui->sets[0].src))
+ return cui->sets[0].src;
+ return src;
+}
+
/* Add stores (register and memory references) LOC which will be tracked
to VTI (bb)->mos. EXPR is the RTL expression containing the store.
CUIP->insn is instruction which the LOC is part of. */
if (REG_P (loc))
{
- gcc_assert (loc != cfa_base_rtx);
if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
|| !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
|| GET_CODE (expr) == CLOBBER)
else
{
if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
- src = var_lowpart (mode2, SET_SRC (expr));
+ {
+ src = get_adjusted_src (cui, SET_SRC (expr));
+ src = var_lowpart (mode2, src);
+ }
loc = var_lowpart (mode2, loc);
if (src == NULL)
}
else
{
- rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
+ rtx xexpr = CONST_CAST_RTX (expr);
+
+ if (SET_SRC (expr) != src)
+ xexpr = gen_rtx_SET (VOIDmode, loc, src);
if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
mo.type = MO_COPY;
else
|| cui->sets))
{
if (MEM_P (loc) && type == MO_VAL_SET
- && !REG_P (XEXP (loc, 0))
- && !MEM_P (XEXP (loc, 0))
- && (GET_CODE (XEXP (loc, 0)) != PLUS
- || XEXP (XEXP (loc, 0), 0) != cfa_base_rtx
- || !CONST_INT_P (XEXP (XEXP (loc, 0), 1))))
+ && !REG_P (XEXP (loc, 0)) && !MEM_P (XEXP (loc, 0)))
{
rtx mloc = loc;
- enum machine_mode address_mode = get_address_mode (mloc);
- cselib_val *val = cselib_lookup (XEXP (mloc, 0),
- address_mode, 0);
+ enum machine_mode address_mode
+ = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
+ cselib_val *val = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
if (val && !cselib_preserved_value_p (val))
{
else
{
if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
- src = var_lowpart (mode2, SET_SRC (expr));
+ {
+ src = get_adjusted_src (cui, SET_SRC (expr));
+ src = var_lowpart (mode2, src);
+ }
loc = var_lowpart (mode2, loc);
if (src == NULL)
}
else
{
- rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
+ rtx xexpr = CONST_CAST_RTX (expr);
+
+ if (SET_SRC (expr) != src)
+ xexpr = gen_rtx_SET (VOIDmode, loc, src);
if (same_variable_part_p (SET_SRC (xexpr),
MEM_EXPR (loc),
INT_MEM_OFFSET (loc)))
}
else if (resolve && GET_CODE (mo.u.loc) == SET)
{
- nloc = replace_expr_with_values (SET_SRC (expr));
+ src = get_adjusted_src (cui, SET_SRC (expr));
+ nloc = replace_expr_with_values (src);
/* Avoid the mode mismatch between oexpr and expr. */
if (!nloc && mode != mode2)
{
- nloc = SET_SRC (expr);
+ nloc = src;
gcc_assert (oloc == SET_DEST (expr));
}
cselib_hook_called = true;
+ adjust_sets (insn, sets, n_sets);
+
cui.insn = insn;
cui.bb = bb;
cui.sets = sets;
VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
INSERT);
}
- else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
- set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
- dv_from_decl (var), 0,
- VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
- INSERT);
}
break;
result = pc_rtx;
break;
}
- }
- else
- {
- result = cselib_expand_value_rtx_cb (loc->loc, regs, max_depth,
- vt_expand_loc_callback, data);
- if (result)
- break;
+ else
+ {
+ result = cselib_expand_value_rtx_cb (loc->loc, regs, max_depth,
+ vt_expand_loc_callback,
+ data);
+ if (result)
+ break;
+ }
}
if (dummy && (result || var->var_part[0].cur_loc))
var->cur_loc_changed = true;
complete = true;
last_limit = 0;
n_var_parts = 0;
- if (!MAY_HAVE_DEBUG_INSNS)
+ if (!MAY_HAVE_DEBUG_STMTS)
{
for (i = 0; i < var->n_var_parts; i++)
if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
}
loc[n_var_parts] = loc2;
mode = GET_MODE (var->var_part[i].cur_loc);
- if (mode == VOIDmode && dv_onepart_p (var->dv))
- mode = DECL_MODE (decl);
for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
if (var->var_part[i].cur_loc == lc->loc)
{
(int) initialized);
else if (n_var_parts == 1)
{
- rtx expr_list;
-
- if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
- expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
- else
- expr_list = loc[0];
+ rtx expr_list
+ = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list,
(int) initialized);
{
location_chain lc1, lc2;
- gcc_assert (old_var->n_var_parts == 1
- && new_var->n_var_parts == 1);
+ gcc_assert (old_var->n_var_parts == 1);
+ gcc_assert (new_var->n_var_parts == 1);
lc1 = old_var->var_part[0].loc_chain;
lc2 = new_var->var_part[0].loc_chain;
while (lc1
VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
INSERT);
}
- else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
- set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
- dv_from_decl (var), 0,
- VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
- INSERT);
emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
}
}
-/* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
-
-static bool
-fp_setter (rtx insn)
-{
- rtx pat = PATTERN (insn);
- if (RTX_FRAME_RELATED_P (insn))
- {
- rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
- if (expr)
- pat = XEXP (expr, 0);
- }
- if (GET_CODE (pat) == SET)
- return SET_DEST (pat) == hard_frame_pointer_rtx;
- else if (GET_CODE (pat) == PARALLEL)
- {
- int i;
- for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
- if (GET_CODE (XVECEXP (pat, 0, i)) == SET
- && SET_DEST (XVECEXP (pat, 0, i)) == hard_frame_pointer_rtx)
- return true;
- }
- return false;
-}
-
-/* Initialize cfa_base_rtx, create a preserved VALUE for it and
- ensure it isn't flushed during cselib_reset_table.
- Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
- has been eliminated. */
-
-static void
-vt_init_cfa_base (void)
-{
- cselib_val *val;
-
-#ifdef FRAME_POINTER_CFA_OFFSET
- cfa_base_rtx = frame_pointer_rtx;
-#else
- cfa_base_rtx = arg_pointer_rtx;
-#endif
- if (cfa_base_rtx == hard_frame_pointer_rtx
- || !fixed_regs[REGNO (cfa_base_rtx)])
- {
- cfa_base_rtx = NULL_RTX;
- return;
- }
- if (!MAY_HAVE_DEBUG_INSNS)
- return;
-
- val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
- get_insns ());
- preserve_value (val);
- cselib_preserve_cfa_base_value (val);
- var_reg_decl_set (&VTI (ENTRY_BLOCK_PTR)->out, cfa_base_rtx,
- VAR_INIT_STATUS_INITIALIZED, dv_from_value (val->val_rtx),
- 0, NULL_RTX, INSERT);
-}
-
/* Allocate and initialize the data structures for variable tracking
and parse the RTL to get the micro operations. */
-static bool
+static void
vt_initialize (void)
{
- basic_block bb, prologue_bb = NULL;
- HOST_WIDE_INT fp_cfa_offset = -1;
+ basic_block bb;
alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
- attrs_pool = create_alloc_pool ("attrs_def pool",
- sizeof (struct attrs_def), 1024);
- var_pool = create_alloc_pool ("variable_def pool",
- sizeof (struct variable_def)
- + (MAX_VAR_PARTS - 1)
- * sizeof (((variable)NULL)->var_part[0]), 64);
- loc_chain_pool = create_alloc_pool ("location_chain_def pool",
- sizeof (struct location_chain_def),
- 1024);
- shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
- sizeof (struct shared_hash_def), 256);
- empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
- empty_shared_hash->refcount = 1;
- empty_shared_hash->htab
- = htab_create (1, variable_htab_hash, variable_htab_eq,
- variable_htab_free);
- changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
- variable_htab_free);
- if (MAY_HAVE_DEBUG_INSNS)
- {
- value_chain_pool = create_alloc_pool ("value_chain_def pool",
- sizeof (struct value_chain_def),
- 1024);
- value_chains = htab_create (32, value_chain_htab_hash,
- value_chain_htab_eq, NULL);
- }
-
- /* Init the IN and OUT sets. */
- FOR_ALL_BB (bb)
- {
- VTI (bb)->visited = false;
- VTI (bb)->flooded = false;
- dataflow_set_init (&VTI (bb)->in);
- dataflow_set_init (&VTI (bb)->out);
- VTI (bb)->permp = NULL;
- }
-
if (MAY_HAVE_DEBUG_INSNS)
{
- cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
+ cselib_init (true);
scratch_regs = BITMAP_ALLOC (NULL);
valvar_pool = create_alloc_pool ("small variable_def pool",
sizeof (struct variable_def), 256);
valvar_pool = NULL;
}
- if (!frame_pointer_needed)
- {
- rtx reg, elim;
-
- if (!vt_stack_adjustments ())
- return false;
-
-#ifdef FRAME_POINTER_CFA_OFFSET
- reg = frame_pointer_rtx;
-#else
- reg = arg_pointer_rtx;
-#endif
- elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
- if (elim != reg)
- {
- if (GET_CODE (elim) == PLUS)
- elim = XEXP (elim, 0);
- if (elim == stack_pointer_rtx)
- vt_init_cfa_base ();
- }
- }
- else if (!crtl->stack_realign_tried)
- {
- rtx reg, elim;
-
-#ifdef FRAME_POINTER_CFA_OFFSET
- reg = frame_pointer_rtx;
- fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
-#else
- reg = arg_pointer_rtx;
- fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
-#endif
- elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
- if (elim != reg)
- {
- if (GET_CODE (elim) == PLUS)
- {
- fp_cfa_offset -= INTVAL (XEXP (elim, 1));
- elim = XEXP (elim, 0);
- }
- if (elim != hard_frame_pointer_rtx)
- fp_cfa_offset = -1;
- else
- prologue_bb = single_succ (ENTRY_BLOCK_PTR);
- }
- }
-
- hard_frame_pointer_adjustment = -1;
-
FOR_EACH_BB (bb)
{
rtx insn;
/* Add the micro-operations to the vector. */
FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
{
- HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
- VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
insn = NEXT_INSN (insn))
{
mo.type = MO_ADJUST;
mo.u.adjust = pre;
mo.insn = insn;
+ VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
+ &mo);
+
if (dump_file && (dump_flags & TDF_DETAILS))
log_op_type (PATTERN (insn), bb, insn,
MO_ADJUST, dump_file);
- VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
- &mo);
- VTI (bb)->out.stack_adjust += pre;
}
}
cselib_hook_called = false;
- adjust_insn (bb, insn);
if (MAY_HAVE_DEBUG_INSNS)
{
cselib_process_insn (insn);
}
if (!cselib_hook_called)
add_with_sets (insn, 0, 0);
- cancel_changes (0);
if (!frame_pointer_needed && post)
{
mo.type = MO_ADJUST;
mo.u.adjust = post;
mo.insn = insn;
- if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (PATTERN (insn), bb, insn,
- MO_ADJUST, dump_file);
VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
&mo);
- VTI (bb)->out.stack_adjust += post;
- }
- if (bb == prologue_bb
- && hard_frame_pointer_adjustment == -1
- && RTX_FRAME_RELATED_P (insn)
- && fp_setter (insn))
- {
- vt_init_cfa_base ();
- hard_frame_pointer_adjustment = fp_cfa_offset;
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ log_op_type (PATTERN (insn), bb, insn,
+ MO_ADJUST, dump_file);
}
}
}
- gcc_assert (offset == VTI (bb)->out.stack_adjust);
}
bb = last_bb;
}
}
- hard_frame_pointer_adjustment = -1;
+ attrs_pool = create_alloc_pool ("attrs_def pool",
+ sizeof (struct attrs_def), 1024);
+ var_pool = create_alloc_pool ("variable_def pool",
+ sizeof (struct variable_def)
+ + (MAX_VAR_PARTS - 1)
+ * sizeof (((variable)NULL)->var_part[0]), 64);
+ loc_chain_pool = create_alloc_pool ("location_chain_def pool",
+ sizeof (struct location_chain_def),
+ 1024);
+ shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
+ sizeof (struct shared_hash_def), 256);
+ empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
+ empty_shared_hash->refcount = 1;
+ empty_shared_hash->htab
+ = htab_create (1, variable_htab_hash, variable_htab_eq,
+ variable_htab_free);
+ changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
+ variable_htab_free);
+ if (MAY_HAVE_DEBUG_INSNS)
+ {
+ value_chain_pool = create_alloc_pool ("value_chain_def pool",
+ sizeof (struct value_chain_def),
+ 1024);
+ value_chains = htab_create (32, value_chain_htab_hash,
+ value_chain_htab_eq, NULL);
+ }
+
+ /* Init the IN and OUT sets. */
+ FOR_ALL_BB (bb)
+ {
+ VTI (bb)->visited = false;
+ VTI (bb)->flooded = false;
+ dataflow_set_init (&VTI (bb)->in);
+ dataflow_set_init (&VTI (bb)->out);
+ VTI (bb)->permp = NULL;
+ }
+
VTI (ENTRY_BLOCK_PTR)->flooded = true;
vt_add_function_parameters ();
- cfa_base_rtx = NULL_RTX;
- return true;
}
/* Get rid of all debug insns from the insn stream. */
}
mark_dfs_back_edges ();
- if (!vt_initialize ())
+ vt_initialize ();
+ if (!frame_pointer_needed)
{
- vt_finalize ();
- vt_debug_insns_local (true);
- return 0;
+ if (!vt_stack_adjustments ())
+ {
+ vt_finalize ();
+ vt_debug_insns_local (true);
+ return 0;
+ }
}
success = vt_find_locations ();
/* This is later restored by our caller. */
flag_var_tracking_assignments = 0;
- success = vt_initialize ();
- gcc_assert (success);
+ vt_initialize ();
+
+ if (!frame_pointer_needed && !vt_stack_adjustments ())
+ gcc_unreachable ();
success = vt_find_locations ();
}