#include "tree-flow.h"
#include "cselib.h"
#include "target.h"
+#include "toplev.h"
+#include "params.h"
+#include "diagnostic.h"
+#include "pointer-set.h"
+#include "recog.h"
/* var-tracking.c assumes that tree code with the same value as VALUE rtx code
has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
/* Type of micro operation. */
enum micro_operation_type type;
+ /* The instruction which the micro operation is in, for MO_USE,
+ MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
+ instruction or note in the original flow (before any var-tracking
+ notes are inserted, to simplify emission of notes), for MO_SET
+ and MO_CLOBBER. */
+ rtx insn;
+
union {
/* Location. For MO_SET and MO_COPY, this is the SET that
performs the assignment, if known, otherwise it is the target
/* Stack adjustment. */
HOST_WIDE_INT adjust;
} u;
-
- /* The instruction which the micro operation is in, for MO_USE,
- MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
- instruction or note in the original flow (before any var-tracking
- notes are inserted, to simplify emission of notes), for MO_SET
- and MO_CLOBBER. */
- rtx insn;
} micro_operation;
+DEF_VEC_O(micro_operation);
+DEF_VEC_ALLOC_O(micro_operation,heap);
+
/* A declaration of a variable, or an RTL value being handled like a
declaration. */
typedef void *decl_or_value;
needed for variable tracking. */
typedef struct variable_tracking_info_def
{
- /* Number of micro operations stored in the MOS array. */
- int n_mos;
-
- /* The array of micro operations. */
- micro_operation *mos;
+ /* The vector of micro operations. */
+ VEC(micro_operation, heap) *mos;
/* The IN and OUT set for dataflow analysis. */
dataflow_set in;
int refcount;
/* Number of variable parts. */
- int n_var_parts;
+ char n_var_parts;
+
+ /* True if this variable changed (any of its) cur_loc fields
+ during the current emit_notes_for_changes resp.
+ emit_notes_for_differences call. */
+ bool cur_loc_changed;
+
+ /* True if this variable_def struct is currently in the
+ changed_variables hash table. */
+ bool in_changed_variables;
/* The variable parts. */
variable_part var_part[1];
} *value_chain;
typedef const struct value_chain_def *const_value_chain;
-/* Hash function for DECL for VARIABLE_HTAB. */
-#define VARIABLE_HASH_VAL(decl) (DECL_UID (decl))
-
/* Pointer to the BB's information specific to variable tracking pass. */
#define VTI(BB) ((variable_tracking_info) (BB)->aux)
HOST_WIDE_INT *);
static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
HOST_WIDE_INT *);
-static void bb_stack_adjust_offset (basic_block);
static bool vt_stack_adjustments (void);
-static rtx adjust_stack_reference (rtx, HOST_WIDE_INT);
+static rtx compute_cfa_pointer (HOST_WIDE_INT);
static hashval_t variable_htab_hash (const void *);
static int variable_htab_eq (const void *, const void *);
static void variable_htab_free (void *);
static void dataflow_set_copy (dataflow_set *, dataflow_set *);
static int variable_union_info_cmp_pos (const void *, const void *);
static int variable_union (void **, void *);
-static int variable_canonicalize (void **, void *);
static void dataflow_set_union (dataflow_set *, dataflow_set *);
static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
static bool canon_value_cmp (rtx, rtx);
static int loc_cmp (rtx, rtx);
static bool variable_part_different_p (variable_part *, variable_part *);
static bool onepart_variable_different_p (variable, variable);
-static bool variable_different_p (variable, variable, bool);
+static bool variable_different_p (variable, variable);
static int dataflow_set_different_1 (void **, void *);
static bool dataflow_set_different (dataflow_set *, dataflow_set *);
static void dataflow_set_destroy (dataflow_set *);
static bool contains_symbol_ref (rtx);
static bool track_expr_p (tree, bool);
static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
-static int count_uses (rtx *, void *);
-static void count_uses_1 (rtx *, void *);
-static void count_stores (rtx, const_rtx, void *);
static int add_uses (rtx *, void *);
static void add_uses_1 (rtx *, void *);
static void add_stores (rtx, const_rtx, void *);
static bool compute_bb_dataflow (basic_block);
-static void vt_find_locations (void);
+static bool vt_find_locations (void);
static void dump_attrs_list (attrs);
static int dump_var_slot (void **, void *);
static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
static void vt_add_function_parameters (void);
-static void vt_initialize (void);
+static bool vt_initialize (void);
static void vt_finalize (void);
/* Given a SET, calculate the amount of stack adjustment it contains
}
}
-/* Compute stack adjustment in basic block BB. */
-
-static void
-bb_stack_adjust_offset (basic_block bb)
-{
- HOST_WIDE_INT offset;
- int i;
-
- offset = VTI (bb)->in.stack_adjust;
- for (i = 0; i < VTI (bb)->n_mos; i++)
- {
- if (VTI (bb)->mos[i].type == MO_ADJUST)
- offset += VTI (bb)->mos[i].u.adjust;
- else if (VTI (bb)->mos[i].type != MO_CALL)
- {
- if (MEM_P (VTI (bb)->mos[i].u.loc))
- {
- VTI (bb)->mos[i].u.loc
- = adjust_stack_reference (VTI (bb)->mos[i].u.loc, -offset);
- }
- }
- }
- VTI (bb)->out.stack_adjust = offset;
-}
-
/* Compute stack adjustments for all blocks by traversing DFS tree.
Return true when the adjustments on all incoming edges are consistent.
Heavily borrowed from pre_and_rev_post_order_compute. */
/* Initialize entry block. */
VTI (ENTRY_BLOCK_PTR)->visited = true;
+ VTI (ENTRY_BLOCK_PTR)->in.stack_adjust = INCOMING_FRAME_SP_OFFSET;
VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
/* Allocate stack for back-tracking up CFG. */
/* Check if the edge destination has been visited yet. */
if (!VTI (dest)->visited)
{
+ rtx insn;
+ HOST_WIDE_INT pre, post, offset;
VTI (dest)->visited = true;
- VTI (dest)->in.stack_adjust = VTI (src)->out.stack_adjust;
- bb_stack_adjust_offset (dest);
+ VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
+
+ if (dest != EXIT_BLOCK_PTR)
+ for (insn = BB_HEAD (dest);
+ insn != NEXT_INSN (BB_END (dest));
+ insn = NEXT_INSN (insn))
+ if (INSN_P (insn))
+ {
+ insn_stack_adjust_offset_pre_post (insn, &pre, &post);
+ offset += pre + post;
+ }
+
+ VTI (dest)->out.stack_adjust = offset;
if (EDGE_COUNT (dest->succs) > 0)
/* Since the DEST node has been visited for the first
return true;
}
-/* Adjust stack reference MEM by ADJUSTMENT bytes and make it relative
- to the argument pointer. Return the new rtx. */
+/* Compute a CFA-based value for the stack pointer. */
static rtx
-adjust_stack_reference (rtx mem, HOST_WIDE_INT adjustment)
+compute_cfa_pointer (HOST_WIDE_INT adjustment)
{
- rtx addr, cfa, tmp;
+ rtx cfa;
#ifdef FRAME_POINTER_CFA_OFFSET
adjustment -= FRAME_POINTER_CFA_OFFSET (current_function_decl);
cfa = plus_constant (arg_pointer_rtx, adjustment);
#endif
- addr = replace_rtx (copy_rtx (XEXP (mem, 0)), stack_pointer_rtx, cfa);
- tmp = simplify_rtx (addr);
- if (tmp)
- addr = tmp;
+ return cfa;
+}
+
+/* Adjustment for hard_frame_pointer_rtx to cfa base reg,
+ or -1 if the replacement shouldn't be done. */
+static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
+
+/* Data for adjust_mems callback. */
+
+struct adjust_mem_data
+{
+ bool store;
+ enum machine_mode mem_mode;
+ HOST_WIDE_INT stack_adjust;
+ rtx side_effects;
+};
+
+/* Helper for adjust_mems. Return 1 if *loc is unsuitable for
+ transformation of wider mode arithmetics to narrower mode,
+ -1 if it is suitable and subexpressions shouldn't be
+ traversed and 0 if it is suitable and subexpressions should
+ be traversed. Called through for_each_rtx. */
+
+static int
+use_narrower_mode_test (rtx *loc, void *data)
+{
+ rtx subreg = (rtx) data;
+
+ if (CONSTANT_P (*loc))
+ return -1;
+ switch (GET_CODE (*loc))
+ {
+ case REG:
+ if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0))
+ return 1;
+ return -1;
+ case PLUS:
+ case MINUS:
+ case MULT:
+ return 0;
+ case ASHIFT:
+ if (for_each_rtx (&XEXP (*loc, 0), use_narrower_mode_test, data))
+ return 1;
+ else
+ return -1;
+ default:
+ return 1;
+ }
+}
+
+/* Transform X into narrower mode MODE from wider mode WMODE. */
+
+static rtx
+use_narrower_mode (rtx x, enum machine_mode mode, enum machine_mode wmode)
+{
+ rtx op0, op1;
+ if (CONSTANT_P (x))
+ return lowpart_subreg (mode, x, wmode);
+ switch (GET_CODE (x))
+ {
+ case REG:
+ return lowpart_subreg (mode, x, wmode);
+ case PLUS:
+ case MINUS:
+ case MULT:
+ op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
+ op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
+ return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
+ case ASHIFT:
+ op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
+ return simplify_gen_binary (ASHIFT, mode, op0, XEXP (x, 1));
+ default:
+ gcc_unreachable ();
+ }
+}
+
+/* Helper function for adjusting used MEMs. */
+
+static rtx
+adjust_mems (rtx loc, const_rtx old_rtx, void *data)
+{
+ struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
+ rtx mem, addr = loc, tem;
+ enum machine_mode mem_mode_save;
+ bool store_save;
+ switch (GET_CODE (loc))
+ {
+ case REG:
+ /* Don't do any sp or fp replacements outside of MEM addresses. */
+ if (amd->mem_mode == VOIDmode)
+ return loc;
+ if (loc == stack_pointer_rtx
+ && !frame_pointer_needed)
+ return compute_cfa_pointer (amd->stack_adjust);
+ else if (loc == hard_frame_pointer_rtx
+ && frame_pointer_needed
+ && hard_frame_pointer_adjustment != -1)
+ return compute_cfa_pointer (hard_frame_pointer_adjustment);
+ return loc;
+ case MEM:
+ mem = loc;
+ if (!amd->store)
+ {
+ mem = targetm.delegitimize_address (mem);
+ if (mem != loc && !MEM_P (mem))
+ return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
+ }
+
+ addr = XEXP (mem, 0);
+ mem_mode_save = amd->mem_mode;
+ amd->mem_mode = GET_MODE (mem);
+ store_save = amd->store;
+ amd->store = false;
+ addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
+ amd->store = store_save;
+ amd->mem_mode = mem_mode_save;
+ if (mem == loc)
+ addr = targetm.delegitimize_address (addr);
+ if (addr != XEXP (mem, 0))
+ mem = replace_equiv_address_nv (mem, addr);
+ if (!amd->store)
+ mem = avoid_constant_pool_reference (mem);
+ return mem;
+ case PRE_INC:
+ case PRE_DEC:
+ addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
+ GEN_INT (GET_CODE (loc) == PRE_INC
+ ? GET_MODE_SIZE (amd->mem_mode)
+ : -GET_MODE_SIZE (amd->mem_mode)));
+ case POST_INC:
+ case POST_DEC:
+ if (addr == loc)
+ addr = XEXP (loc, 0);
+ gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
+ addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
+ tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
+ GEN_INT ((GET_CODE (loc) == PRE_INC
+ || GET_CODE (loc) == POST_INC)
+ ? GET_MODE_SIZE (amd->mem_mode)
+ : -GET_MODE_SIZE (amd->mem_mode)));
+ amd->side_effects = alloc_EXPR_LIST (0,
+ gen_rtx_SET (VOIDmode,
+ XEXP (loc, 0),
+ tem),
+ amd->side_effects);
+ return addr;
+ case PRE_MODIFY:
+ addr = XEXP (loc, 1);
+ case POST_MODIFY:
+ if (addr == loc)
+ addr = XEXP (loc, 0);
+ gcc_assert (amd->mem_mode != VOIDmode);
+ addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
+ amd->side_effects = alloc_EXPR_LIST (0,
+ gen_rtx_SET (VOIDmode,
+ XEXP (loc, 0),
+ XEXP (loc, 1)),
+ amd->side_effects);
+ return addr;
+ case SUBREG:
+ /* First try without delegitimization of whole MEMs and
+ avoid_constant_pool_reference, which is more likely to succeed. */
+ store_save = amd->store;
+ amd->store = true;
+ addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
+ data);
+ amd->store = store_save;
+ mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
+ if (mem == SUBREG_REG (loc))
+ {
+ tem = loc;
+ goto finish_subreg;
+ }
+ tem = simplify_gen_subreg (GET_MODE (loc), mem,
+ GET_MODE (SUBREG_REG (loc)),
+ SUBREG_BYTE (loc));
+ if (tem)
+ goto finish_subreg;
+ tem = simplify_gen_subreg (GET_MODE (loc), addr,
+ GET_MODE (SUBREG_REG (loc)),
+ SUBREG_BYTE (loc));
+ if (tem == NULL_RTX)
+ tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
+ finish_subreg:
+ if (MAY_HAVE_DEBUG_INSNS
+ && GET_CODE (tem) == SUBREG
+ && (GET_CODE (SUBREG_REG (tem)) == PLUS
+ || GET_CODE (SUBREG_REG (tem)) == MINUS
+ || GET_CODE (SUBREG_REG (tem)) == MULT
+ || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
+ && GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
+ && GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
+ && GET_MODE_SIZE (GET_MODE (tem))
+ < GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem)))
+ && subreg_lowpart_p (tem)
+ && !for_each_rtx (&SUBREG_REG (tem), use_narrower_mode_test, tem))
+ return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
+ GET_MODE (SUBREG_REG (tem)));
+ return tem;
+ default:
+ break;
+ }
+ return NULL_RTX;
+}
+
+/* Helper function for replacement of uses. */
+
+static void
+adjust_mem_uses (rtx *x, void *data)
+{
+ rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
+ if (new_x != *x)
+ validate_change (NULL_RTX, x, new_x, true);
+}
+
+/* Helper function for replacement of stores. */
+
+static void
+adjust_mem_stores (rtx loc, const_rtx expr, void *data)
+{
+ if (MEM_P (loc))
+ {
+ rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
+ adjust_mems, data);
+ if (new_dest != SET_DEST (expr))
+ {
+ rtx xexpr = CONST_CAST_RTX (expr);
+ validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
+ }
+ }
+}
+
+/* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
+ replace them with their value in the insn and add the side-effects
+ as other sets to the insn. */
+
+static void
+adjust_insn (basic_block bb, rtx insn)
+{
+ struct adjust_mem_data amd;
+ rtx set;
+ amd.mem_mode = VOIDmode;
+ amd.stack_adjust = -VTI (bb)->out.stack_adjust;
+ amd.side_effects = NULL_RTX;
+
+ amd.store = true;
+ note_stores (PATTERN (insn), adjust_mem_stores, &amd);
+
+ amd.store = false;
+ note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
+
+ /* For read-only MEMs containing some constant, prefer those
+ constants. */
+ set = single_set (insn);
+ if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
+ {
+ rtx note = find_reg_equal_equiv_note (insn);
+
+ if (note && CONSTANT_P (XEXP (note, 0)))
+ validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
+ }
+
+ if (amd.side_effects)
+ {
+ rtx *pat, new_pat, s;
+ int i, oldn, newn;
- return replace_equiv_address_nv (mem, addr);
+ pat = &PATTERN (insn);
+ if (GET_CODE (*pat) == COND_EXEC)
+ pat = &COND_EXEC_CODE (*pat);
+ if (GET_CODE (*pat) == PARALLEL)
+ oldn = XVECLEN (*pat, 0);
+ else
+ oldn = 1;
+ for (s = amd.side_effects, newn = 0; s; newn++)
+ s = XEXP (s, 1);
+ new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
+ if (GET_CODE (*pat) == PARALLEL)
+ for (i = 0; i < oldn; i++)
+ XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
+ else
+ XVECEXP (new_pat, 0, 0) = *pat;
+ for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
+ XVECEXP (new_pat, 0, i) = XEXP (s, 0);
+ free_EXPR_LIST_list (&amd.side_effects);
+ validate_change (NULL_RTX, pat, new_pat, true);
+ }
}
/* Return true if a decl_or_value DV is a DECL or NULL. */
if (!decl)
return true;
+ if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
+ return true;
+
return (target_for_debug_bind (decl) != NULL_TREE);
}
return dv;
}
-static inline hashval_t
-dv_htab_hash (decl_or_value dv)
+extern void debug_dv (decl_or_value dv);
+
+void
+debug_dv (decl_or_value dv)
+{
+ if (dv_is_value_p (dv))
+ debug_rtx (dv_as_value (dv));
+ else
+ debug_generic_stmt (dv_as_decl (dv));
+}
+
+typedef unsigned int dvuid;
+
+/* Return the uid of DV. */
+
+static inline dvuid
+dv_uid (decl_or_value dv)
{
if (dv_is_value_p (dv))
- return -(hashval_t)(CSELIB_VAL_PTR (dv_as_value (dv))->value);
+ return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
else
- return (VARIABLE_HASH_VAL (dv_as_decl (dv)));
+ return DECL_UID (dv_as_decl (dv));
+}
+
+/* Compute the hash from the uid. */
+
+static inline hashval_t
+dv_uid2hash (dvuid uid)
+{
+ return uid;
+}
+
+/* The hash function for a mask table in a shared_htab chain. */
+
+static inline hashval_t
+dv_htab_hash (decl_or_value dv)
+{
+ return dv_uid2hash (dv_uid (dv));
}
/* The hash function for variable_htab, computes the hash value
const_variable const v = (const_variable) x;
decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
- if (dv_as_opaque (v->dv) == dv_as_opaque (dv))
- return true;
-
-#if ENABLE_CHECKING
- {
- bool visv, dvisv;
-
- visv = dv_is_value_p (v->dv);
- dvisv = dv_is_value_p (dv);
-
- if (visv != dvisv)
- return false;
-
- if (visv)
- gcc_assert (CSELIB_VAL_PTR (dv_as_value (v->dv))
- != CSELIB_VAL_PTR (dv_as_value (dv)));
- else
- gcc_assert (VARIABLE_HASH_VAL (dv_as_decl (v->dv))
- != VARIABLE_HASH_VAL (dv_as_decl (dv)));
- }
-#endif
-
- return false;
+ return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
}
/* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
return vars->htab;
}
+/* Return true if VAR is shared, or maybe because VARS is shared. */
+
+static inline bool
+shared_var_p (variable var, shared_hash vars)
+{
+ /* Don't count an entry in the changed_variables table as a duplicate. */
+ return ((var->refcount > 1 + (int) var->in_changed_variables)
+ || shared_hash_shared (vars));
+}
+
/* Copy variables into a new hash table. */
static shared_hash
return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
}
-/* Determine a total order between two distinct pointers. Compare the
- pointers as integral types if size_t is wide enough, otherwise
- resort to bitwise memory compare. The actual order does not
- matter, we just need to be consistent, so endianness is
- irrelevant. */
-
-static int
-tie_break_pointers (const void *p1, const void *p2)
-{
- gcc_assert (p1 != p2);
-
- if (sizeof (size_t) >= sizeof (void*))
- return (size_t)p1 < (size_t)p2 ? -1 : 1;
- else
- return memcmp (&p1, &p2, sizeof (p1));
-}
-
/* Return true if TVAL is better than CVAL as a canonival value. We
choose lowest-numbered VALUEs, using the RTX address as a
tie-breaker. The idea is to arrange them into a star topology,
canon_value_cmp (rtx tval, rtx cval)
{
return !cval
- || CSELIB_VAL_PTR (tval)->value < CSELIB_VAL_PTR (cval)->value
- || (CSELIB_VAL_PTR (tval)->value == CSELIB_VAL_PTR (cval)->value
- && tie_break_pointers (tval, cval) < 0);
+ || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
}
static bool dst_can_be_shared;
new_var->refcount = 1;
var->refcount--;
new_var->n_var_parts = var->n_var_parts;
+ new_var->cur_loc_changed = var->cur_loc_changed;
+ var->cur_loc_changed = false;
+ new_var->in_changed_variables = false;
if (! flag_var_tracking_uninit)
initialized = VAR_INIT_STATUS_INITIALIZED;
nextp = &new_lc->next;
}
- /* We are at the basic block boundary when copying variable description
- so set the CUR_LOC to be the first element of the chain. */
- if (new_var->var_part[i].loc_chain)
- new_var->var_part[i].cur_loc = new_var->var_part[i].loc_chain->loc;
- else
- new_var->var_part[i].cur_loc = NULL;
+ new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
}
dst_can_be_shared = false;
else if (set->traversed_vars && set->vars != set->traversed_vars)
slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
*slot = new_var;
+ if (var->in_changed_variables)
+ {
+ void **cslot
+ = htab_find_slot_with_hash (changed_variables, var->dv,
+ dv_htab_hash (var->dv), NO_INSERT);
+ gcc_assert (*cslot == (void *) var);
+ var->in_changed_variables = false;
+ variable_htab_free (var);
+ *cslot = new_var;
+ new_var->in_changed_variables = true;
+ }
return slot;
}
delete_variable_part (set, loc, dv_from_decl (decl), offset);
}
-/* Map a value to a location it was just stored in. */
+/* Bind a value to a location it was just stored in. If MODIFIED
+ holds, assume the location was modified, detaching it from any
+ values bound to it. */
static void
-val_store (dataflow_set *set, rtx val, rtx loc, rtx insn)
+val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified)
{
cselib_val *v = CSELIB_VAL_PTR (val);
if (REG_P (loc))
{
- var_regno_delete (set, REGNO (loc));
+ if (modified)
+ var_regno_delete (set, REGNO (loc));
var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
dv_from_value (val), 0, NULL_RTX, INSERT);
}
*dstp = src;
- /* If CUR_LOC of some variable part is not the first element of
- the location chain we are going to change it so we have to make
- a copy of the variable. */
- for (k = 0; k < src->n_var_parts; k++)
- {
- gcc_assert (!src->var_part[k].loc_chain
- == !src->var_part[k].cur_loc);
- if (src->var_part[k].loc_chain)
- {
- gcc_assert (src->var_part[k].cur_loc);
- if (src->var_part[k].cur_loc != src->var_part[k].loc_chain->loc)
- break;
- }
- }
- if (k < src->n_var_parts)
- dstp = unshare_variable (set, dstp, src, VAR_INIT_STATUS_UNKNOWN);
-
/* Continue traversing the hash table. */
return 1;
}
{
location_chain nnode;
- if (dst->refcount != 1 || shared_hash_shared (set->vars))
+ if (shared_var_p (dst, set->vars))
{
dstp = unshare_variable (set, dstp, dst,
VAR_INIT_STATUS_INITIALIZED);
dnode = *nodep;
}
- dst->var_part[0].cur_loc = dst->var_part[0].loc_chain->loc;
-
return 1;
}
thus there are at most MAX_VAR_PARTS different offsets. */
gcc_assert (dv_onepart_p (dst->dv) ? k == 1 : k <= MAX_VAR_PARTS);
- if ((dst->refcount > 1 || shared_hash_shared (set->vars))
- && dst->n_var_parts != k)
+ if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
{
dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
dst = (variable)*dstp;
/* If DST is shared compare the location chains.
If they are different we will modify the chain in DST with
high probability so make a copy of DST. */
- if (dst->refcount > 1 || shared_hash_shared (set->vars))
+ if (shared_var_p (dst, set->vars))
{
for (node = src->var_part[i].loc_chain,
node2 = dst->var_part[j].loc_chain; node && node2;
dst->var_part[k].offset = src->var_part[i].offset;
i--;
}
-
- /* We are at the basic block boundary when computing union
- so set the CUR_LOC to be the first element of the chain. */
- if (dst->var_part[k].loc_chain)
- dst->var_part[k].cur_loc = dst->var_part[k].loc_chain->loc;
- else
- dst->var_part[k].cur_loc = NULL;
+ dst->var_part[k].cur_loc = NULL;
}
if (flag_var_tracking_uninit)
return 1;
}
-/* Like variable_union, but only used when doing dataflow_set_union
- into an empty hashtab. To allow sharing, dst is initially shared
- with src (so all variables are "copied" from src to dst hashtab),
- so only unshare_variable for variables that need canonicalization
- are needed. */
-
-static int
-variable_canonicalize (void **slot, void *data)
-{
- variable src;
- dataflow_set *set = (dataflow_set *) data;
- int k;
-
- src = *(variable *) slot;
-
- /* If CUR_LOC of some variable part is not the first element of
- the location chain we are going to change it so we have to make
- a copy of the variable. */
- for (k = 0; k < src->n_var_parts; k++)
- {
- gcc_assert (!src->var_part[k].loc_chain == !src->var_part[k].cur_loc);
- if (src->var_part[k].loc_chain)
- {
- gcc_assert (src->var_part[k].cur_loc);
- if (src->var_part[k].cur_loc != src->var_part[k].loc_chain->loc)
- break;
- }
- }
- if (k < src->n_var_parts)
- slot = unshare_variable (set, slot, src, VAR_INIT_STATUS_UNKNOWN);
- return 1;
-}
-
/* Compute union of dataflow sets SRC and DST and store it to DST. */
static void
{
shared_hash_destroy (dst->vars);
dst->vars = shared_hash_copy (src->vars);
- dst->traversed_vars = dst->vars;
- htab_traverse (shared_hash_htab (dst->vars), variable_canonicalize, dst);
- dst->traversed_vars = NULL;
}
else
htab_traverse (shared_hash_htab (src->vars), variable_union, dst);
: DECL_CHANGED (dv_as_decl (dv)));
}
-/* Vector of VALUEs that should have VALUE_RECURSED_INTO bit cleared
- at the end of find_loc_in_1pdv. Not a static variable in find_loc_in_1pdv
- to avoid constant allocation/freeing of it. */
-static VEC(rtx, heap) *values_to_unmark;
-
-/* Helper function for find_loc_in_1pdv.
- Return a location list node whose loc is rtx_equal to LOC, in the
+/* Return a location list node whose loc is rtx_equal to LOC, in the
location list of a one-part variable or value VAR, or in that of
any values recursively mentioned in the location lists. */
static location_chain
-find_loc_in_1pdv_1 (rtx loc, variable var, htab_t vars)
+find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
{
location_chain node;
{
location_chain where;
VALUE_RECURSED_INTO (node->loc) = true;
- VEC_safe_push (rtx, heap, values_to_unmark, node->loc);
- if ((where = find_loc_in_1pdv_1 (loc, var, vars)))
- return where;
+ if ((where = find_loc_in_1pdv (loc, var, vars)))
+ {
+ VALUE_RECURSED_INTO (node->loc) = false;
+ return where;
+ }
+ VALUE_RECURSED_INTO (node->loc) = false;
}
}
return NULL;
}
-/* Return a location list node whose loc is rtx_equal to LOC, in the
- location list of a one-part variable or value VAR, or in that of
- any values recursively mentioned in the location lists. */
-
-static location_chain
-find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
-{
- location_chain ret;
- unsigned int i;
- rtx value;
-
- ret = find_loc_in_1pdv_1 (loc, var, vars);
- for (i = 0; VEC_iterate (rtx, values_to_unmark, i, value); i++)
- VALUE_RECURSED_INTO (value) = false;
- VEC_truncate (rtx, values_to_unmark, 0);
- return ret;
-}
-
/* Hash table iteration argument passed to variable_merge. */
struct dfset_merge
{
{
if (GET_CODE (y) != VALUE)
return -1;
- gcc_assert (GET_MODE (x) == GET_MODE (y));
+ /* Don't assert the modes are the same, that is true only
+ when not recursing. (subreg:QI (value:SI 1:1) 0)
+ and (subreg:QI (value:DI 2:2) 0) can be compared,
+ even when the modes are different. */
if (canon_value_cmp (x, y))
return -1;
else
gcc_assert (GET_MODE (x) == GET_MODE (y));
+ if (GET_CODE (x) == DEBUG_EXPR)
+ {
+ if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
+ < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
+ return -1;
+#ifdef ENABLE_CHECKING
+ gcc_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
+ > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
+#endif
+ return 1;
+ }
+
fmt = GET_RTX_FORMAT (code);
for (i = 0; i < GET_RTX_LENGTH (code); i++)
switch (fmt[i])
static int
add_value_chain (rtx *loc, void *dvp)
{
- if (GET_CODE (*loc) == VALUE && (void *) *loc != dvp)
+ decl_or_value dv, ldv;
+ value_chain vc, nvc;
+ void **slot;
+
+ if (GET_CODE (*loc) == VALUE)
+ ldv = dv_from_value (*loc);
+ else if (GET_CODE (*loc) == DEBUG_EXPR)
+ ldv = dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc));
+ else
+ return 0;
+
+ if (dv_as_opaque (ldv) == dvp)
+ return 0;
+
+ dv = (decl_or_value) dvp;
+ slot = htab_find_slot_with_hash (value_chains, ldv, dv_htab_hash (ldv),
+ INSERT);
+ if (!*slot)
{
- decl_or_value dv = (decl_or_value) dvp;
- decl_or_value ldv = dv_from_value (*loc);
- value_chain vc, nvc;
- void **slot = htab_find_slot_with_hash (value_chains, ldv,
- dv_htab_hash (ldv), INSERT);
- if (!*slot)
- {
- vc = (value_chain) pool_alloc (value_chain_pool);
- vc->dv = ldv;
- vc->next = NULL;
- vc->refcount = 0;
- *slot = (void *) vc;
- }
- else
+ vc = (value_chain) pool_alloc (value_chain_pool);
+ vc->dv = ldv;
+ vc->next = NULL;
+ vc->refcount = 0;
+ *slot = (void *) vc;
+ }
+ else
+ {
+ for (vc = ((value_chain) *slot)->next; vc; vc = vc->next)
+ if (dv_as_opaque (vc->dv) == dv_as_opaque (dv))
+ break;
+ if (vc)
{
- for (vc = ((value_chain) *slot)->next; vc; vc = vc->next)
- if (dv_as_opaque (vc->dv) == dv_as_opaque (dv))
- break;
- if (vc)
- {
- vc->refcount++;
- return 0;
- }
+ vc->refcount++;
+ return 0;
}
- vc = (value_chain) *slot;
- nvc = (value_chain) pool_alloc (value_chain_pool);
- nvc->dv = dv;
- nvc->next = vc->next;
- nvc->refcount = 1;
- vc->next = nvc;
}
+ vc = (value_chain) *slot;
+ nvc = (value_chain) pool_alloc (value_chain_pool);
+ nvc->dv = dv;
+ nvc->next = vc->next;
+ nvc->refcount = 1;
+ vc->next = nvc;
return 0;
}
static void
add_value_chains (decl_or_value dv, rtx loc)
{
- if (GET_CODE (loc) == VALUE)
+ if (GET_CODE (loc) == VALUE || GET_CODE (loc) == DEBUG_EXPR)
{
add_value_chain (&loc, dv_as_opaque (dv));
return;
}
/* If CSELIB_VAL_PTR of value DV refer to VALUEs, add backlinks from those
- VALUEs to DV. */
+ VALUEs to DV. Add the same time get rid of ASM_OPERANDS from locs list,
+ that is something we never can express in .debug_info and can prevent
+ reverse ops from being used. */
static void
add_cselib_value_chains (decl_or_value dv)
{
- struct elt_loc_list *l;
+ struct elt_loc_list **l;
- for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
- for_each_rtx (&l->loc, add_value_chain, dv_as_opaque (dv));
+ for (l = &CSELIB_VAL_PTR (dv_as_value (dv))->locs; *l;)
+ if (GET_CODE ((*l)->loc) == ASM_OPERANDS)
+ *l = (*l)->next;
+ else
+ {
+ for_each_rtx (&(*l)->loc, add_value_chain, dv_as_opaque (dv));
+ l = &(*l)->next;
+ }
}
/* If decl or value DVP refers to VALUE from *LOC, remove backlinks
static int
remove_value_chain (rtx *loc, void *dvp)
{
- if (GET_CODE (*loc) == VALUE && (void *) *loc != dvp)
- {
- decl_or_value dv = (decl_or_value) dvp;
- decl_or_value ldv = dv_from_value (*loc);
- value_chain vc, dvc = NULL;
- void **slot = htab_find_slot_with_hash (value_chains, ldv,
- dv_htab_hash (ldv), NO_INSERT);
- for (vc = (value_chain) *slot; vc->next; vc = vc->next)
- if (dv_as_opaque (vc->next->dv) == dv_as_opaque (dv))
+ decl_or_value dv, ldv;
+ value_chain vc;
+ void **slot;
+
+ if (GET_CODE (*loc) == VALUE)
+ ldv = dv_from_value (*loc);
+ else if (GET_CODE (*loc) == DEBUG_EXPR)
+ ldv = dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc));
+ else
+ return 0;
+
+ if (dv_as_opaque (ldv) == dvp)
+ return 0;
+
+ dv = (decl_or_value) dvp;
+ slot = htab_find_slot_with_hash (value_chains, ldv, dv_htab_hash (ldv),
+ NO_INSERT);
+ for (vc = (value_chain) *slot; vc->next; vc = vc->next)
+ if (dv_as_opaque (vc->next->dv) == dv_as_opaque (dv))
+ {
+ value_chain dvc = vc->next;
+ gcc_assert (dvc->refcount > 0);
+ if (--dvc->refcount == 0)
{
- dvc = vc->next;
- gcc_assert (dvc->refcount > 0);
- if (--dvc->refcount == 0)
+ vc->next = dvc->next;
+ pool_free (value_chain_pool, dvc);
+ if (vc->next == NULL && vc == (value_chain) *slot)
{
- vc->next = dvc->next;
- pool_free (value_chain_pool, dvc);
- if (vc->next == NULL && vc == (value_chain) *slot)
- {
- pool_free (value_chain_pool, vc);
- htab_clear_slot (value_chains, slot);
- }
+ pool_free (value_chain_pool, vc);
+ htab_clear_slot (value_chains, slot);
}
- return 0;
}
- gcc_unreachable ();
- }
- return 0;
+ return 0;
+ }
+ gcc_unreachable ();
}
/* If decl or value DVP refers to VALUEs from within LOC, remove backlinks
static void
remove_value_chains (decl_or_value dv, rtx loc)
{
- if (GET_CODE (loc) == VALUE)
+ if (GET_CODE (loc) == VALUE || GET_CODE (loc) == DEBUG_EXPR)
{
remove_value_chain (&loc, dv_as_opaque (dv));
return;
for_each_rtx (&loc, remove_value_chain, dv_as_opaque (dv));
}
+#if ENABLE_CHECKING
/* If CSELIB_VAL_PTR of value DV refer to VALUEs, remove backlinks from those
VALUEs to DV. */
for_each_rtx (&l->loc, remove_value_chain, dv_as_opaque (dv));
}
-#if ENABLE_CHECKING
/* Check the order of entries in one-part variables. */
static int
decl_or_value dv = var->dv;
location_chain node, next;
+#ifdef ENABLE_RTL_CHECKING
+ int i;
+ for (i = 0; i < var->n_var_parts; i++)
+ gcc_assert (var->var_part[0].cur_loc == NULL);
+ gcc_assert (!var->cur_loc_changed && !var->in_changed_variables);
+#endif
+
if (!dv_onepart_p (dv))
return 1;
return 1;
}
+/* Bind one-part variables to the canonical value in an equivalence
+ set. Not doing this causes dataflow convergence failure in rare
+ circumstances, see PR42873. Unfortunately we can't do this
+ efficiently as part of canonicalize_values_star, since we may not
+ have determined or even seen the canonical value of a set when we
+ get to a variable that references another member of the set. */
+
+static int
+canonicalize_vars_star (void **slot, void *data)
+{
+ dataflow_set *set = (dataflow_set *)data;
+ variable var = (variable) *slot;
+ decl_or_value dv = var->dv;
+ location_chain node;
+ rtx cval;
+ decl_or_value cdv;
+ void **cslot;
+ variable cvar;
+ location_chain cnode;
+
+ if (!dv_onepart_p (dv) || dv_is_value_p (dv))
+ return 1;
+
+ gcc_assert (var->n_var_parts == 1);
+
+ node = var->var_part[0].loc_chain;
+
+ if (GET_CODE (node->loc) != VALUE)
+ return 1;
+
+ gcc_assert (!node->next);
+ cval = node->loc;
+
+ /* Push values to the canonical one. */
+ cdv = dv_from_value (cval);
+ cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
+ if (!cslot)
+ return 1;
+ cvar = (variable)*cslot;
+ gcc_assert (cvar->n_var_parts == 1);
+
+ cnode = cvar->var_part[0].loc_chain;
+
+ /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
+ that are not “more canonical” than it. */
+ if (GET_CODE (cnode->loc) != VALUE
+ || !canon_value_cmp (cnode->loc, cval))
+ return 1;
+
+ /* CVAL was found to be non-canonical. Change the variable to point
+ to the canonical VALUE. */
+ gcc_assert (!cnode->next);
+ cval = cnode->loc;
+
+ slot = set_slot_part (set, cval, slot, dv, 0,
+ node->init, node->set_src);
+ slot = clobber_slot_part (set, cval, slot, 0, node->set_src);
+
+ return 1;
+}
+
/* Combine variable or value in *S1SLOT (in DSM->cur) with the
corresponding entry in DSM->src. Multi-part variables are combined
with variable_union, whereas onepart dvs are combined with
dvar->dv = dv;
dvar->refcount = 1;
dvar->n_var_parts = 1;
+ dvar->cur_loc_changed = false;
+ dvar->in_changed_variables = false;
dvar->var_part[0].offset = 0;
dvar->var_part[0].loc_chain = node;
- dvar->var_part[0].cur_loc = node->loc;
+ dvar->var_part[0].cur_loc = NULL;
dstslot
= shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
var->dv = dv;
var->refcount = 1;
var->n_var_parts = 1;
+ var->cur_loc_changed = false;
+ var->in_changed_variables = false;
var->var_part[0].offset = 0;
var->var_part[0].loc_chain = NULL;
var->var_part[0].cur_loc = NULL;
dst_can_be_shared = false;
}
else
- {
- if (dvar->refcount == 1)
- dvar->var_part[0].cur_loc = dvar->var_part[0].loc_chain->loc;
- dst_can_be_shared = false;
- }
+ dst_can_be_shared = false;
return 1;
}
-/* Combine variable in *S1SLOT (in DSM->src) with the corresponding
- entry in DSM->src. Only multi-part variables are combined, using
- variable_union. onepart dvs were already combined with
- intersection in variable_merge_over_cur(). */
+/* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
+ multi-part variable. Unions of multi-part variables and
+ intersections of one-part ones will be handled in
+ variable_merge_over_cur(). */
static int
variable_merge_over_src (void **s2slot, void *data)
void **dstp = shared_hash_find_slot (dst->vars, dv);
*dstp = s2var;
s2var->refcount++;
- return variable_canonicalize (dstp, dst);
+ return 1;
}
dsm->src_onepart_cnt++;
return 1;
}
-/* Combine dataflow set information from SRC into DST, using PDST
+/* Combine dataflow set information from SRC2 into DST, using PDST
to carry over information across passes. */
static void
-dataflow_set_merge (dataflow_set *dst, dataflow_set *src)
+dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
{
- dataflow_set src2 = *dst;
+ dataflow_set cur = *dst;
+ dataflow_set *src1 = &cur;
struct dfset_merge dsm;
int i;
- size_t src_elems, dst_elems;
+ size_t src1_elems, src2_elems;
- src_elems = htab_elements (shared_hash_htab (src->vars));
- dst_elems = htab_elements (shared_hash_htab (src2.vars));
+ src1_elems = htab_elements (shared_hash_htab (src1->vars));
+ src2_elems = htab_elements (shared_hash_htab (src2->vars));
dataflow_set_init (dst);
- dst->stack_adjust = src2.stack_adjust;
+ dst->stack_adjust = cur.stack_adjust;
shared_hash_destroy (dst->vars);
dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
dst->vars->refcount = 1;
dst->vars->htab
- = htab_create (MAX (src_elems, dst_elems), variable_htab_hash,
+ = htab_create (MAX (src1_elems, src2_elems), variable_htab_hash,
variable_htab_eq, variable_htab_free);
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
- attrs_list_mpdv_union (&dst->regs[i], src->regs[i], src2.regs[i]);
+ attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
dsm.dst = dst;
- dsm.src = &src2;
- dsm.cur = src;
+ dsm.src = src2;
+ dsm.cur = src1;
dsm.src_onepart_cnt = 0;
htab_traverse (shared_hash_htab (dsm.src->vars), variable_merge_over_src,
if (dsm.src_onepart_cnt)
dst_can_be_shared = false;
- dataflow_set_destroy (&src2);
+ dataflow_set_destroy (src1);
}
/* Mark register equivalences. */
cdv = dv_from_value (cval);
if (dump_file)
fprintf (dump_file,
- "Created new value %i for reg %i\n",
- v->value, REGNO (node->loc));
+ "Created new value %u:%u for reg %i\n",
+ v->uid, v->hash, REGNO (node->loc));
}
var_reg_decl_set (*dfpm->permp, node->loc,
htab_traverse (shared_hash_htab ((*permp)->vars),
variable_post_merge_perm_vals, &dfpm);
htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set);
+ htab_traverse (shared_hash_htab (set->vars), canonicalize_vars_star, set);
}
/* Return a node whose loc is a MEM that refers to EXPR in the
{
tree decl = dv_as_decl (var->dv);
location_chain loc, *locp;
+ bool changed = false;
if (!var->n_var_parts)
return 1;
gcc_assert (var->n_var_parts == 1);
- if (var->refcount > 1 || shared_hash_shared (set->vars))
+ if (shared_var_p (var, set->vars))
{
for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
{
{
if (old_loc != loc->loc && emit_notes)
{
+ if (old_loc == var->var_part[0].cur_loc)
+ {
+ changed = true;
+ var->var_part[0].cur_loc = NULL;
+ var->cur_loc_changed = true;
+ }
add_value_chains (var->dv, loc->loc);
remove_value_chains (var->dv, old_loc);
}
}
if (emit_notes)
- remove_value_chains (var->dv, old_loc);
+ {
+ remove_value_chains (var->dv, old_loc);
+ if (old_loc == var->var_part[0].cur_loc)
+ {
+ changed = true;
+ var->var_part[0].cur_loc = NULL;
+ var->cur_loc_changed = true;
+ }
+ }
*locp = loc->next;
pool_free (loc_chain_pool, loc);
}
if (!var->var_part[0].loc_chain)
{
var->n_var_parts--;
- if (emit_notes && dv_is_value_p (var->dv))
- remove_cselib_value_chains (var->dv);
- variable_was_changed (var, set);
+ changed = true;
}
+ if (changed)
+ variable_was_changed (var, set);
}
return 1;
gcc_assert (var->n_var_parts == 1);
- if (var->refcount > 1 || shared_hash_shared (set->vars))
+ if (shared_var_p (var, set->vars))
{
for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
if (GET_CODE (loc->loc) == MEM
/* If we have deleted the location which was last emitted
we have to emit new location so add the variable to set
of changed variables. */
- if (var->var_part[0].cur_loc
- && rtx_equal_p (loc->loc, var->var_part[0].cur_loc))
- changed = true;
+ if (var->var_part[0].cur_loc == loc->loc)
+ {
+ changed = true;
+ var->var_part[0].cur_loc = NULL;
+ var->cur_loc_changed = true;
+ }
pool_free (loc_chain_pool, loc);
}
if (!var->var_part[0].loc_chain)
{
var->n_var_parts--;
- if (emit_notes && dv_is_value_p (var->dv))
- remove_cselib_value_chains (var->dv);
- gcc_assert (changed);
+ changed = true;
}
if (changed)
- {
- if (var->n_var_parts && var->var_part[0].loc_chain)
- var->var_part[0].cur_loc = var->var_part[0].loc_chain->loc;
- variable_was_changed (var, set);
- }
+ variable_was_changed (var, set);
}
return 1;
int r;
for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
- if (TEST_HARD_REG_BIT (call_used_reg_set, r))
+ if (TEST_HARD_REG_BIT (regs_invalidated_by_call, r))
var_regno_delete (set, r);
if (MAY_HAVE_DEBUG_INSNS)
return lc1 != lc2;
}
-/* Return true if variables VAR1 and VAR2 are different.
- If COMPARE_CURRENT_LOCATION is true compare also the cur_loc of each
- variable part. */
+/* Return true if variables VAR1 and VAR2 are different. */
static bool
-variable_different_p (variable var1, variable var2,
- bool compare_current_location)
+variable_different_p (variable var1, variable var2)
{
int i;
{
if (var1->var_part[i].offset != var2->var_part[i].offset)
return true;
- if (compare_current_location)
- {
- if (!((REG_P (var1->var_part[i].cur_loc)
- && REG_P (var2->var_part[i].cur_loc)
- && (REGNO (var1->var_part[i].cur_loc)
- == REGNO (var2->var_part[i].cur_loc)))
- || rtx_equal_p (var1->var_part[i].cur_loc,
- var2->var_part[i].cur_loc)))
- return true;
- }
/* One-part values have locations in a canonical order. */
if (i == 0 && var1->var_part[i].offset == 0 && dv_onepart_p (var1->dv))
{
return 0;
}
- if (variable_different_p (var1, var2, false))
+ if (variable_different_p (var1, var2))
{
dataflow_set_different_value = true;
return 0;
/* It also must have a name... */
- if (!DECL_NAME (expr))
+ if (!DECL_NAME (expr) && need_rtl)
return 0;
/* ... and a RTL assigned to it. */
return gen_rtx_REG_offset (loc, mode, regno, offset);
}
+/* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
+ hard_frame_pointer_rtx is being mapped to it. */
+static rtx cfa_base_rtx;
+
/* Carry information about uses and stores while walking rtx. */
struct count_use_info
return NULL;
}
+/* Helper function to get mode of MEM's address. */
+
+static inline enum machine_mode
+get_address_mode (rtx mem)
+{
+ enum machine_mode mode = GET_MODE (XEXP (mem, 0));
+ if (mode != VOIDmode)
+ return mode;
+ return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem));
+}
+
/* Replace all registers and addresses in an expression with VALUE
expressions that map back to them, unless the expression is a
register. If no mapping is or can be performed, returns NULL. */
return NULL;
else if (MEM_P (loc))
{
- enum machine_mode address_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (loc));
- cselib_val *addr = cselib_lookup (XEXP (loc, 0), address_mode, 0);
+ cselib_val *addr = cselib_lookup (XEXP (loc, 0),
+ get_address_mode (loc), 0);
if (addr)
return replace_equiv_address_nv (loc, addr->val_rtx);
else
use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
{
tree expr;
- cselib_val *val;
if (cui && cui->sets)
{
if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
{
rtx ploc = PAT_VAR_LOCATION_LOC (loc);
- cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1);
+ if (! VAR_LOC_UNKNOWN_P (ploc))
+ {
+ cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1);
- /* ??? flag_float_store and volatile mems are never
- given values, but we could in theory use them for
- locations. */
- gcc_assert (val || 1);
+ /* ??? flag_float_store and volatile mems are never
+ given values, but we could in theory use them for
+ locations. */
+ gcc_assert (val || 1);
+ }
return MO_VAL_LOC;
}
else
return MO_CLOBBER;
}
- if ((REG_P (loc) || MEM_P (loc))
- && (val = find_use_val (loc, GET_MODE (loc), cui)))
+ if (REG_P (loc) || MEM_P (loc))
{
if (modep)
*modep = GET_MODE (loc);
if (cui->store_p)
{
if (REG_P (loc)
- || cselib_lookup (XEXP (loc, 0), GET_MODE (loc), 0))
+ || (find_use_val (loc, GET_MODE (loc), cui)
+ && cselib_lookup (XEXP (loc, 0),
+ get_address_mode (loc), 0)))
return MO_VAL_SET;
}
- else if (!cselib_preserved_value_p (val))
- return MO_VAL_USE;
+ else
+ {
+ cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
+
+ if (val && !cselib_preserved_value_p (val))
+ return MO_VAL_USE;
+ }
}
}
{
gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
+ if (loc == cfa_base_rtx)
+ return MO_CLOBBER;
expr = REG_EXPR (loc);
if (!expr)
enum micro_operation_type mopt, FILE *out)
{
fprintf (out, "bb %i op %i insn %i %s ",
- bb->index, VTI (bb)->n_mos - 1,
+ bb->index, VEC_length (micro_operation, VTI (bb)->mos),
INSN_UID (insn), micro_operation_type_name[mopt]);
print_inline_rtx (out, x, 2);
fputc ('\n', out);
}
-/* Count uses (register and memory references) LOC which will be tracked.
- INSN is instruction which the LOC is part of. */
-
-static int
-count_uses (rtx *ploc, void *cuip)
-{
- rtx loc = *ploc;
- struct count_use_info *cui = (struct count_use_info *) cuip;
- enum micro_operation_type mopt = use_type (loc, cui, NULL);
-
- if (mopt != MO_CLOBBER)
- {
- cselib_val *val;
- enum machine_mode mode = GET_MODE (loc);
-
- VTI (cui->bb)->n_mos++;
-
- if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (loc, cui->bb, cui->insn, mopt, dump_file);
-
- switch (mopt)
- {
- case MO_VAL_LOC:
- loc = PAT_VAR_LOCATION_LOC (loc);
- if (VAR_LOC_UNKNOWN_P (loc))
- break;
- /* Fall through. */
-
- case MO_VAL_USE:
- case MO_VAL_SET:
- if (MEM_P (loc)
- && !REG_P (XEXP (loc, 0)) && !MEM_P (XEXP (loc, 0)))
- {
- enum machine_mode address_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (loc));
- val = cselib_lookup (XEXP (loc, 0), address_mode, false);
-
- if (val && !cselib_preserved_value_p (val))
- {
- VTI (cui->bb)->n_mos++;
- cselib_preserve_value (val);
- }
- }
-
- val = find_use_val (loc, mode, cui);
- if (val)
- {
- if (mopt == MO_VAL_SET
- && GET_CODE (PATTERN (cui->insn)) == COND_EXEC
- && (REG_P (loc)
- || (MEM_P (loc)
- && (use_type (loc, NULL, NULL) == MO_USE
- || cui->sets))))
- {
- cselib_val *oval = cselib_lookup (loc, GET_MODE (loc), 0);
-
- gcc_assert (oval != val);
- gcc_assert (REG_P (loc) || MEM_P (loc));
-
- if (!cselib_preserved_value_p (oval))
- {
- VTI (cui->bb)->n_mos++;
- cselib_preserve_value (oval);
- }
- }
-
- cselib_preserve_value (val);
- }
- else
- gcc_assert (mopt == MO_VAL_LOC);
-
- break;
-
- default:
- break;
- }
- }
-
- return 0;
-}
-
-/* Helper function for finding all uses of REG/MEM in X in CUI's
- insn. */
-
-static void
-count_uses_1 (rtx *x, void *cui)
-{
- for_each_rtx (x, count_uses, cui);
-}
-
-/* Count stores (register and memory references) LOC which will be
- tracked. CUI is a count_use_info object containing the instruction
- which the LOC is part of. */
-
-static void
-count_stores (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *cui)
-{
- count_uses (&loc, cui);
-}
-
-/* Callback for cselib_record_sets_hook, that counts how many micro
- operations it takes for uses and stores in an insn after
- cselib_record_sets has analyzed the sets in an insn, but before it
- modifies the stored values in the internal tables, unless
- cselib_record_sets doesn't call it directly (perhaps because we're
- not doing cselib in the first place, in which case sets and n_sets
- will be 0). */
-
-static void
-count_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
-{
- basic_block bb = BLOCK_FOR_INSN (insn);
- struct count_use_info cui;
-
- cselib_hook_called = true;
-
- cui.insn = insn;
- cui.bb = bb;
- cui.sets = sets;
- cui.n_sets = n_sets;
-
- cui.store_p = false;
- note_uses (&PATTERN (insn), count_uses_1, &cui);
- cui.store_p = true;
- note_stores (PATTERN (insn), count_stores, &cui);
-}
-
/* Tell whether the CONCAT used to holds a VALUE and its location
needs value resolution, i.e., an attempt of mapping the location
back to other incoming values. */
MO_CLOBBER as well. */
#define VAL_EXPR_IS_CLOBBERED(x) \
(RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
+/* Whether the location is a CONCAT of the MO_VAL_SET expression and
+ a reverse operation that should be handled afterwards. */
+#define VAL_EXPR_HAS_REVERSE(x) \
+ (RTL_FLAG_CHECK1 ("VAL_EXPR_HAS_REVERSE", (x), CONCAT)->return_val)
+
+/* All preserved VALUEs. */
+static VEC (rtx, heap) *preserved_values;
+
+/* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
+
+static void
+preserve_value (cselib_val *val)
+{
+ cselib_preserve_value (val);
+ VEC_safe_push (rtx, heap, preserved_values, val->val_rtx);
+}
+
+/* Helper function for MO_VAL_LOC handling. Return non-zero if
+ any rtxes not suitable for CONST use not replaced by VALUEs
+ are discovered. */
+
+static int
+non_suitable_const (rtx *x, void *data ATTRIBUTE_UNUSED)
+{
+ if (*x == NULL_RTX)
+ return 0;
+
+ switch (GET_CODE (*x))
+ {
+ case REG:
+ case DEBUG_EXPR:
+ case PC:
+ case SCRATCH:
+ case CC0:
+ case ASM_INPUT:
+ case ASM_OPERANDS:
+ return 1;
+ case MEM:
+ return !MEM_READONLY_P (*x);
+ default:
+ return 0;
+ }
+}
/* Add uses (register and memory references) LOC which will be tracked
to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
if (type != MO_CLOBBER)
{
basic_block bb = cui->bb;
- micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
+ micro_operation mo;
- mo->type = type;
- mo->u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
- mo->insn = cui->insn;
+ mo.type = type;
+ mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
+ mo.insn = cui->insn;
if (type == MO_VAL_LOC)
{
gcc_assert (cui->sets);
if (MEM_P (vloc)
- && !REG_P (XEXP (vloc, 0)) && !MEM_P (XEXP (vloc, 0)))
+ && !REG_P (XEXP (vloc, 0))
+ && !MEM_P (XEXP (vloc, 0))
+ && (GET_CODE (XEXP (vloc, 0)) != PLUS
+ || XEXP (XEXP (vloc, 0), 0) != cfa_base_rtx
+ || !CONST_INT_P (XEXP (XEXP (vloc, 0), 1))))
{
rtx mloc = vloc;
- enum machine_mode address_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
+ enum machine_mode address_mode = get_address_mode (mloc);
cselib_val *val
= cselib_lookup (XEXP (mloc, 0), address_mode, 0);
if (val && !cselib_preserved_value_p (val))
{
- micro_operation *mon = VTI (bb)->mos + VTI (bb)->n_mos++;
- mon->type = mo->type;
- mon->u.loc = mo->u.loc;
- mon->insn = mo->insn;
- cselib_preserve_value (val);
- mo->type = MO_VAL_USE;
+ micro_operation moa;
+ preserve_value (val);
mloc = cselib_subst_to_values (XEXP (mloc, 0));
- mo->u.loc = gen_rtx_CONCAT (address_mode,
+ moa.type = MO_VAL_USE;
+ moa.insn = cui->insn;
+ moa.u.loc = gen_rtx_CONCAT (address_mode,
val->val_rtx, mloc);
if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (mo->u.loc, cui->bb, cui->insn,
- mo->type, dump_file);
- mo = mon;
+ log_op_type (moa.u.loc, cui->bb, cui->insn,
+ moa.type, dump_file);
+ VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
}
}
- if (!VAR_LOC_UNKNOWN_P (vloc)
- && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
+ if (CONSTANT_P (vloc)
+ && (GET_CODE (vloc) != CONST
+ || for_each_rtx (&vloc, non_suitable_const, NULL)))
+ /* For constants don't look up any value. */;
+ else if (!VAR_LOC_UNKNOWN_P (vloc)
+ && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
{
enum machine_mode mode2;
enum micro_operation_type type2;
&& !cselib_preserved_value_p (val))
{
VAL_NEEDS_RESOLUTION (oloc) = 1;
- cselib_preserve_value (val);
+ preserve_value (val);
}
}
else if (!VAR_LOC_UNKNOWN_P (vloc))
PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
}
- mo->u.loc = oloc;
+ mo.u.loc = oloc;
}
else if (type == MO_VAL_USE)
{
gcc_assert (cui->sets);
if (MEM_P (oloc)
- && !REG_P (XEXP (oloc, 0)) && !MEM_P (XEXP (oloc, 0)))
+ && !REG_P (XEXP (oloc, 0))
+ && !MEM_P (XEXP (oloc, 0))
+ && (GET_CODE (XEXP (oloc, 0)) != PLUS
+ || XEXP (XEXP (oloc, 0), 0) != cfa_base_rtx
+ || !CONST_INT_P (XEXP (XEXP (oloc, 0), 1))))
{
rtx mloc = oloc;
- enum machine_mode address_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
+ enum machine_mode address_mode = get_address_mode (mloc);
cselib_val *val
= cselib_lookup (XEXP (mloc, 0), address_mode, 0);
if (val && !cselib_preserved_value_p (val))
{
- micro_operation *mon = VTI (bb)->mos + VTI (bb)->n_mos++;
- mon->type = mo->type;
- mon->u.loc = mo->u.loc;
- mon->insn = mo->insn;
- cselib_preserve_value (val);
- mo->type = MO_VAL_USE;
+ micro_operation moa;
+ preserve_value (val);
mloc = cselib_subst_to_values (XEXP (mloc, 0));
- mo->u.loc = gen_rtx_CONCAT (address_mode,
+ moa.type = MO_VAL_USE;
+ moa.insn = cui->insn;
+ moa.u.loc = gen_rtx_CONCAT (address_mode,
val->val_rtx, mloc);
- mo->insn = cui->insn;
if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (mo->u.loc, cui->bb, cui->insn,
- mo->type, dump_file);
- mo = mon;
+ log_op_type (moa.u.loc, cui->bb, cui->insn,
+ moa.type, dump_file);
+ VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
}
}
else
oloc = val->val_rtx;
- mo->u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
+ mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
if (type2 == MO_USE)
- VAL_HOLDS_TRACK_EXPR (mo->u.loc) = 1;
+ VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
if (!cselib_preserved_value_p (val))
{
- VAL_NEEDS_RESOLUTION (mo->u.loc) = 1;
- cselib_preserve_value (val);
+ VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
+ preserve_value (val);
}
}
else
gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (mo->u.loc, cui->bb, cui->insn, mo->type, dump_file);
+ log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
+ VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
}
return 0;
for_each_rtx (x, add_uses, cui);
}
+/* Attempt to reverse the EXPR operation in the debug info. Say for
+ reg1 = reg2 + 6 even when reg2 is no longer live we
+ can express its value as VAL - 6. */
+
+static rtx
+reverse_op (rtx val, const_rtx expr)
+{
+ rtx src, arg, ret;
+ cselib_val *v;
+ enum rtx_code code;
+
+ if (GET_CODE (expr) != SET)
+ return NULL_RTX;
+
+ if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
+ return NULL_RTX;
+
+ src = SET_SRC (expr);
+ switch (GET_CODE (src))
+ {
+ case PLUS:
+ case MINUS:
+ case XOR:
+ case NOT:
+ case NEG:
+ case SIGN_EXTEND:
+ case ZERO_EXTEND:
+ break;
+ default:
+ return NULL_RTX;
+ }
+
+ if (!REG_P (XEXP (src, 0)) || !SCALAR_INT_MODE_P (GET_MODE (src)))
+ return NULL_RTX;
+
+ v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0);
+ if (!v || !cselib_preserved_value_p (v))
+ return NULL_RTX;
+
+ switch (GET_CODE (src))
+ {
+ case NOT:
+ case NEG:
+ if (GET_MODE (v->val_rtx) != GET_MODE (val))
+ return NULL_RTX;
+ ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
+ break;
+ case SIGN_EXTEND:
+ case ZERO_EXTEND:
+ ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
+ break;
+ case XOR:
+ code = XOR;
+ goto binary;
+ case PLUS:
+ code = MINUS;
+ goto binary;
+ case MINUS:
+ code = PLUS;
+ goto binary;
+ binary:
+ if (GET_MODE (v->val_rtx) != GET_MODE (val))
+ return NULL_RTX;
+ arg = XEXP (src, 1);
+ if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
+ {
+ arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
+ if (arg == NULL_RTX)
+ return NULL_RTX;
+ if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
+ return NULL_RTX;
+ }
+ ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
+ if (ret == val)
+ /* Ensure ret isn't VALUE itself (which can happen e.g. for
+ (plus (reg1) (reg2)) when reg2 is known to be 0), as that
+ breaks a lot of routines during var-tracking. */
+ ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
+ break;
+ default:
+ gcc_unreachable ();
+ }
+
+ return gen_rtx_CONCAT (GET_MODE (v->val_rtx), v->val_rtx, ret);
+}
+
/* Add stores (register and memory references) LOC which will be tracked
to VTI (bb)->mos. EXPR is the RTL expression containing the store.
CUIP->insn is instruction which the LOC is part of. */
enum machine_mode mode = VOIDmode, mode2;
struct count_use_info *cui = (struct count_use_info *)cuip;
basic_block bb = cui->bb;
- micro_operation *mo;
+ micro_operation mo;
rtx oloc = loc, nloc, src = NULL;
enum micro_operation_type type = use_type (loc, cui, &mode);
bool track_p = false;
cselib_val *v;
bool resolve, preserve;
+ rtx reverse;
if (type == MO_CLOBBER)
return;
if (REG_P (loc))
{
- mo = VTI (bb)->mos + VTI (bb)->n_mos++;
-
+ gcc_assert (loc != cfa_base_rtx);
if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
|| !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
|| GET_CODE (expr) == CLOBBER)
{
- mo->type = MO_CLOBBER;
- mo->u.loc = loc;
+ mo.type = MO_CLOBBER;
+ mo.u.loc = loc;
}
else
{
if (src == NULL)
{
- mo->type = MO_SET;
- mo->u.loc = loc;
+ mo.type = MO_SET;
+ mo.u.loc = loc;
}
else
{
- rtx xexpr = CONST_CAST_RTX (expr);
-
- if (SET_SRC (expr) != src)
- xexpr = gen_rtx_SET (VOIDmode, loc, src);
+ rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
- mo->type = MO_COPY;
+ mo.type = MO_COPY;
else
- mo->type = MO_SET;
- mo->u.loc = xexpr;
+ mo.type = MO_SET;
+ mo.u.loc = xexpr;
}
}
- mo->insn = cui->insn;
+ mo.insn = cui->insn;
}
else if (MEM_P (loc)
&& ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
|| cui->sets))
{
- mo = VTI (bb)->mos + VTI (bb)->n_mos++;
-
if (MEM_P (loc) && type == MO_VAL_SET
- && !REG_P (XEXP (loc, 0)) && !MEM_P (XEXP (loc, 0)))
+ && !REG_P (XEXP (loc, 0))
+ && !MEM_P (XEXP (loc, 0))
+ && (GET_CODE (XEXP (loc, 0)) != PLUS
+ || XEXP (XEXP (loc, 0), 0) != cfa_base_rtx
+ || !CONST_INT_P (XEXP (XEXP (loc, 0), 1))))
{
rtx mloc = loc;
- enum machine_mode address_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
- cselib_val *val = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
+ enum machine_mode address_mode = get_address_mode (mloc);
+ cselib_val *val = cselib_lookup (XEXP (mloc, 0),
+ address_mode, 0);
if (val && !cselib_preserved_value_p (val))
{
- cselib_preserve_value (val);
- mo->type = MO_VAL_USE;
+ preserve_value (val);
+ mo.type = MO_VAL_USE;
mloc = cselib_subst_to_values (XEXP (mloc, 0));
- mo->u.loc = gen_rtx_CONCAT (address_mode, val->val_rtx, mloc);
- mo->insn = cui->insn;
+ mo.u.loc = gen_rtx_CONCAT (address_mode, val->val_rtx, mloc);
+ mo.insn = cui->insn;
if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (mo->u.loc, cui->bb, cui->insn,
- mo->type, dump_file);
- mo = VTI (bb)->mos + VTI (bb)->n_mos++;
+ log_op_type (mo.u.loc, cui->bb, cui->insn,
+ mo.type, dump_file);
+ VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
}
}
if (GET_CODE (expr) == CLOBBER || !track_p)
{
- mo->type = MO_CLOBBER;
- mo->u.loc = track_p ? var_lowpart (mode2, loc) : loc;
+ mo.type = MO_CLOBBER;
+ mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
}
else
{
if (src == NULL)
{
- mo->type = MO_SET;
- mo->u.loc = loc;
+ mo.type = MO_SET;
+ mo.u.loc = loc;
}
else
{
- rtx xexpr = CONST_CAST_RTX (expr);
-
- if (SET_SRC (expr) != src)
- xexpr = gen_rtx_SET (VOIDmode, loc, src);
+ rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
if (same_variable_part_p (SET_SRC (xexpr),
MEM_EXPR (loc),
INT_MEM_OFFSET (loc)))
- mo->type = MO_COPY;
+ mo.type = MO_COPY;
else
- mo->type = MO_SET;
- mo->u.loc = xexpr;
+ mo.type = MO_SET;
+ mo.u.loc = xexpr;
}
}
- mo->insn = cui->insn;
+ mo.insn = cui->insn;
}
else
return;
v = find_use_val (oloc, mode, cui);
+ if (!v)
+ goto log_and_return;
+
resolve = preserve = !cselib_preserved_value_p (v);
nloc = replace_expr_with_values (oloc);
if (!cselib_preserved_value_p (oval))
{
- micro_operation *nmo = VTI (bb)->mos + VTI (bb)->n_mos++;
+ micro_operation moa;
- cselib_preserve_value (oval);
+ preserve_value (oval);
- nmo->type = MO_VAL_USE;
- nmo->u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
- VAL_NEEDS_RESOLUTION (nmo->u.loc) = 1;
- nmo->insn = mo->insn;
+ moa.type = MO_VAL_USE;
+ moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
+ VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
+ moa.insn = cui->insn;
if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (nmo->u.loc, cui->bb, cui->insn,
- nmo->type, dump_file);
+ log_op_type (moa.u.loc, cui->bb, cui->insn,
+ moa.type, dump_file);
+ VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
}
resolve = false;
}
- else if (resolve && GET_CODE (mo->u.loc) == SET)
+ else if (resolve && GET_CODE (mo.u.loc) == SET)
{
nloc = replace_expr_with_values (SET_SRC (expr));
}
if (nloc)
- oloc = gen_rtx_SET (GET_MODE (mo->u.loc), oloc, nloc);
+ oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc);
else
{
- if (oloc == SET_DEST (mo->u.loc))
+ if (oloc == SET_DEST (mo.u.loc))
/* No point in duplicating. */
- oloc = mo->u.loc;
- if (!REG_P (SET_SRC (mo->u.loc)))
+ oloc = mo.u.loc;
+ if (!REG_P (SET_SRC (mo.u.loc)))
resolve = false;
}
}
else if (!resolve)
{
- if (GET_CODE (mo->u.loc) == SET
- && oloc == SET_DEST (mo->u.loc))
+ if (GET_CODE (mo.u.loc) == SET
+ && oloc == SET_DEST (mo.u.loc))
/* No point in duplicating. */
- oloc = mo->u.loc;
+ oloc = mo.u.loc;
}
else
resolve = false;
loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
- if (mo->u.loc != oloc)
- loc = gen_rtx_CONCAT (GET_MODE (mo->u.loc), loc, mo->u.loc);
+ if (mo.u.loc != oloc)
+ loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
/* The loc of a MO_VAL_SET may have various forms:
*/
- mo->u.loc = loc;
+ if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
+ {
+ reverse = reverse_op (v->val_rtx, expr);
+ if (reverse)
+ {
+ loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, reverse);
+ VAL_EXPR_HAS_REVERSE (loc) = 1;
+ }
+ }
+
+ mo.u.loc = loc;
if (track_p)
VAL_HOLDS_TRACK_EXPR (loc) = 1;
if (preserve)
{
VAL_NEEDS_RESOLUTION (loc) = resolve;
- cselib_preserve_value (v);
+ preserve_value (v);
}
- if (mo->type == MO_CLOBBER)
+ if (mo.type == MO_CLOBBER)
VAL_EXPR_IS_CLOBBERED (loc) = 1;
- if (mo->type == MO_COPY)
+ if (mo.type == MO_COPY)
VAL_EXPR_IS_COPIED (loc) = 1;
- mo->type = MO_VAL_SET;
+ mo.type = MO_VAL_SET;
log_and_return:
if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (mo->u.loc, cui->bb, cui->insn, mo->type, dump_file);
+ log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
+ VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
}
/* Callback for cselib_record_sets_hook, that records as micro
basic_block bb = BLOCK_FOR_INSN (insn);
int n1, n2;
struct count_use_info cui;
+ micro_operation *mos;
cselib_hook_called = true;
cui.sets = sets;
cui.n_sets = n_sets;
- n1 = VTI (bb)->n_mos;
+ n1 = VEC_length (micro_operation, VTI (bb)->mos);
cui.store_p = false;
note_uses (&PATTERN (insn), add_uses_1, &cui);
- n2 = VTI (bb)->n_mos - 1;
+ n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
+ mos = VEC_address (micro_operation, VTI (bb)->mos);
/* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
MO_VAL_LOC last. */
while (n1 < n2)
{
- while (n1 < n2 && VTI (bb)->mos[n1].type == MO_USE)
+ while (n1 < n2 && mos[n1].type == MO_USE)
n1++;
- while (n1 < n2 && VTI (bb)->mos[n2].type != MO_USE)
+ while (n1 < n2 && mos[n2].type != MO_USE)
n2--;
if (n1 < n2)
{
micro_operation sw;
- sw = VTI (bb)->mos[n1];
- VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
- VTI (bb)->mos[n2] = sw;
+ sw = mos[n1];
+ mos[n1] = mos[n2];
+ mos[n2] = sw;
}
}
- n2 = VTI (bb)->n_mos - 1;
-
+ n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
while (n1 < n2)
{
- while (n1 < n2 && VTI (bb)->mos[n1].type != MO_VAL_LOC)
+ while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
n1++;
- while (n1 < n2 && VTI (bb)->mos[n2].type == MO_VAL_LOC)
+ while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
n2--;
if (n1 < n2)
{
micro_operation sw;
- sw = VTI (bb)->mos[n1];
- VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
- VTI (bb)->mos[n2] = sw;
+ sw = mos[n1];
+ mos[n1] = mos[n2];
+ mos[n2] = sw;
}
}
if (CALL_P (insn))
{
- micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
+ micro_operation mo;
- mo->type = MO_CALL;
- mo->insn = insn;
+ mo.type = MO_CALL;
+ mo.insn = insn;
+ mo.u.loc = NULL_RTX;
if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (PATTERN (insn), bb, insn, mo->type, dump_file);
+ log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
+ VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
}
- n1 = VTI (bb)->n_mos;
+ n1 = VEC_length (micro_operation, VTI (bb)->mos);
/* This will record NEXT_INSN (insn), such that we can
insert notes before it without worrying about any
notes that MO_USEs might emit after the insn. */
cui.store_p = true;
note_stores (PATTERN (insn), add_stores, &cui);
- n2 = VTI (bb)->n_mos - 1;
+ n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
+ mos = VEC_address (micro_operation, VTI (bb)->mos);
+
+ /* Order the MO_VAL_USEs first (note_stores does nothing
+ on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
+ insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
+ while (n1 < n2)
+ {
+ while (n1 < n2 && mos[n1].type == MO_VAL_USE)
+ n1++;
+ while (n1 < n2 && mos[n2].type != MO_VAL_USE)
+ n2--;
+ if (n1 < n2)
+ {
+ micro_operation sw;
+
+ sw = mos[n1];
+ mos[n1] = mos[n2];
+ mos[n2] = sw;
+ }
+ }
- /* Order the MO_CLOBBERs to be before MO_SETs. */
+ n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
while (n1 < n2)
{
- while (n1 < n2 && VTI (bb)->mos[n1].type == MO_CLOBBER)
+ while (n1 < n2 && mos[n1].type == MO_CLOBBER)
n1++;
- while (n1 < n2 && VTI (bb)->mos[n2].type != MO_CLOBBER)
+ while (n1 < n2 && mos[n2].type != MO_CLOBBER)
n2--;
if (n1 < n2)
{
micro_operation sw;
- sw = VTI (bb)->mos[n1];
- VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
- VTI (bb)->mos[n2] = sw;
+ sw = mos[n1];
+ mos[n1] = mos[n2];
+ mos[n2] = sw;
}
}
}
static bool
compute_bb_dataflow (basic_block bb)
{
- int i, n;
+ unsigned int i;
+ micro_operation *mo;
bool changed;
dataflow_set old_out;
dataflow_set *in = &VTI (bb)->in;
dataflow_set_copy (&old_out, out);
dataflow_set_copy (out, in);
- n = VTI (bb)->n_mos;
- for (i = 0; i < n; i++)
+ for (i = 0; VEC_iterate (micro_operation, VTI (bb)->mos, i, mo); i++)
{
- rtx insn = VTI (bb)->mos[i].insn;
+ rtx insn = mo->insn;
- switch (VTI (bb)->mos[i].type)
+ switch (mo->type)
{
case MO_CALL:
dataflow_set_clear_at_call (out);
case MO_USE:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
if (REG_P (loc))
var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
case MO_VAL_LOC:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
rtx val, vloc;
tree var;
VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
INSERT);
}
+ else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
+ set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
+ dv_from_decl (var), 0,
+ VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
+ INSERT);
}
break;
case MO_VAL_USE:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
rtx val, vloc, uloc;
vloc = uloc = XEXP (loc, 1);
if (VAL_NEEDS_RESOLUTION (loc))
val_resolve (out, val, vloc, insn);
+ else
+ val_store (out, val, uloc, insn, false);
if (VAL_HOLDS_TRACK_EXPR (loc))
{
case MO_VAL_SET:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
- rtx val, vloc, uloc;
+ rtx loc = mo->u.loc;
+ rtx val, vloc, uloc, reverse = NULL_RTX;
- vloc = uloc = XEXP (loc, 1);
- val = XEXP (loc, 0);
+ vloc = loc;
+ if (VAL_EXPR_HAS_REVERSE (loc))
+ {
+ reverse = XEXP (loc, 1);
+ vloc = XEXP (loc, 0);
+ }
+ uloc = XEXP (vloc, 1);
+ val = XEXP (vloc, 0);
+ vloc = uloc;
if (GET_CODE (val) == CONCAT)
{
else if (REG_P (uloc))
var_regno_delete (out, REGNO (uloc));
- val_store (out, val, vloc, insn);
+ val_store (out, val, vloc, insn, true);
+
+ if (reverse)
+ val_store (out, XEXP (reverse, 0), XEXP (reverse, 1),
+ insn, false);
}
break;
case MO_SET:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
rtx set_src = NULL;
if (GET_CODE (loc) == SET)
case MO_COPY:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
enum var_init_status src_status;
rtx set_src = NULL;
case MO_USE_NO_VAR:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
if (REG_P (loc))
var_reg_delete (out, loc, false);
case MO_CLOBBER:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
if (REG_P (loc))
var_reg_delete (out, loc, true);
break;
case MO_ADJUST:
- out->stack_adjust += VTI (bb)->mos[i].u.adjust;
+ out->stack_adjust += mo->u.adjust;
break;
}
}
/* Find the locations of variables in the whole function. */
-static void
+static bool
vt_find_locations (void)
{
fibheap_t worklist, pending, fibheap_swap;
int *rc_order;
int i;
int htabsz = 0;
+ int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
+ bool success = true;
/* Compute reverse completion order of depth first search of the CFG
so that the data-flow runs faster. */
fibheap_insert (pending, bb_order[bb->index], bb);
sbitmap_ones (in_pending);
- while (!fibheap_empty (pending))
+ while (success && !fibheap_empty (pending))
{
fibheap_swap = pending;
pending = worklist;
SET_BIT (visited, bb->index);
- if (dump_file && VTI (bb)->in.vars)
+ if (VTI (bb)->in.vars)
{
htabsz
- -= htab_size (shared_hash_htab (VTI (bb)->in.vars))
- + htab_size (shared_hash_htab (VTI (bb)->out.vars));
+ -= (htab_size (shared_hash_htab (VTI (bb)->in.vars))
+ + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
oldinsz
= htab_elements (shared_hash_htab (VTI (bb)->in.vars));
oldoutsz
}
changed = compute_bb_dataflow (bb);
- if (dump_file)
- htabsz += htab_size (shared_hash_htab (VTI (bb)->in.vars))
- + htab_size (shared_hash_htab (VTI (bb)->out.vars));
+ htabsz += (htab_size (shared_hash_htab (VTI (bb)->in.vars))
+ + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
+
+ if (htabmax && htabsz > htabmax)
+ {
+ if (MAY_HAVE_DEBUG_INSNS)
+ inform (DECL_SOURCE_LOCATION (cfun->decl),
+ "variable tracking size limit exceeded with "
+ "-fvar-tracking-assignments, retrying without");
+ else
+ inform (DECL_SOURCE_LOCATION (cfun->decl),
+ "variable tracking size limit exceeded");
+ success = false;
+ break;
+ }
if (changed)
{
}
}
- if (MAY_HAVE_DEBUG_INSNS)
+ if (success && MAY_HAVE_DEBUG_INSNS)
FOR_EACH_BB (bb)
gcc_assert (VTI (bb)->flooded);
- VEC_free (rtx, heap, values_to_unmark);
free (bb_order);
fibheap_delete (worklist);
fibheap_delete (pending);
sbitmap_free (visited);
sbitmap_free (in_worklist);
sbitmap_free (in_pending);
+
+ return success;
}
/* Print the content of the LIST to dump file. */
const_tree decl = dv_as_decl (var->dv);
if (DECL_NAME (decl))
- fprintf (dump_file, " name: %s",
- IDENTIFIER_POINTER (DECL_NAME (decl)));
+ {
+ fprintf (dump_file, " name: %s",
+ IDENTIFIER_POINTER (DECL_NAME (decl)));
+ if (dump_flags & TDF_UID)
+ fprintf (dump_file, "D.%u", DECL_UID (decl));
+ }
+ else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
+ fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
else
fprintf (dump_file, " name: D.%u", DECL_UID (decl));
- if (dump_flags & TDF_UID)
- fprintf (dump_file, " D.%u\n", DECL_UID (decl));
- else
- fprintf (dump_file, "\n");
+ fprintf (dump_file, "\n");
}
else
{
if (emit_notes)
{
void **slot;
+ bool old_cur_loc_changed = false;
/* Remember this decl or VALUE has been added to changed_variables. */
set_dv_changed (var->dv, true);
var->dv,
hash, INSERT);
+ if (*slot)
+ {
+ variable old_var = (variable) *slot;
+ gcc_assert (old_var->in_changed_variables);
+ old_var->in_changed_variables = false;
+ old_cur_loc_changed = old_var->cur_loc_changed;
+ variable_htab_free (*slot);
+ }
if (set && var->n_var_parts == 0)
{
variable empty_var;
empty_var->dv = var->dv;
empty_var->refcount = 1;
empty_var->n_var_parts = 0;
+ empty_var->cur_loc_changed = true;
+ empty_var->in_changed_variables = true;
*slot = empty_var;
goto drop_var;
}
else
{
var->refcount++;
+ var->in_changed_variables = true;
+ /* If within processing one uop a variable is deleted
+ and then readded, we need to assume it has changed. */
+ if (old_cur_loc_changed)
+ var->cur_loc_changed = true;
*slot = var;
}
}
var->dv = dv;
var->refcount = 1;
var->n_var_parts = 1;
+ var->cur_loc_changed = false;
+ var->in_changed_variables = false;
var->var_part[0].offset = offset;
var->var_part[0].loc_chain = NULL;
var->var_part[0].cur_loc = NULL;
*slot = var;
pos = 0;
nextp = &var->var_part[0].loc_chain;
- if (emit_notes && dv_is_value_p (dv))
- add_cselib_value_chains (dv);
}
else if (onepart)
{
if (r == 0)
return slot;
- if (var->refcount > 1 || shared_hash_shared (set->vars))
+ if (shared_var_p (var, set->vars))
{
slot = unshare_variable (set, slot, var, initialized);
var = (variable)*slot;
else
{
/* We have to make a copy of a shared variable. */
- if (var->refcount > 1 || shared_hash_shared (set->vars))
+ if (shared_var_p (var, set->vars))
{
slot = unshare_variable (set, slot, var, initialized);
var = (variable)*slot;
/* We have not found the location part, new one will be created. */
/* We have to make a copy of the shared variable. */
- if (var->refcount > 1 || shared_hash_shared (set->vars))
+ if (shared_var_p (var, set->vars))
{
slot = unshare_variable (set, slot, var, initialized);
var = (variable)*slot;
initialized = node->init;
if (node->set_src != NULL && set_src == NULL)
set_src = node->set_src;
+ if (var->var_part[pos].cur_loc == node->loc)
+ {
+ var->var_part[pos].cur_loc = NULL;
+ var->cur_loc_changed = true;
+ }
pool_free (loc_chain_pool, node);
*nextp = next;
break;
/* If no location was emitted do so. */
if (var->var_part[pos].cur_loc == NULL)
- {
- var->var_part[pos].cur_loc = loc;
- variable_was_changed (var, set);
- }
+ variable_was_changed (var, set);
return slot;
}
location_chain *nextp;
bool changed;
- if (var->refcount > 1 || shared_hash_shared (set->vars))
+ if (shared_var_p (var, set->vars))
{
/* If the variable contains the location part we have to
make a copy of the variable. */
}
/* Delete the location part. */
+ changed = false;
nextp = &var->var_part[pos].loc_chain;
for (node = *nextp; node; node = next)
{
{
if (emit_notes && pos == 0 && dv_onepart_p (var->dv))
remove_value_chains (var->dv, node->loc);
+ /* If we have deleted the location which was last emitted
+ we have to emit new location so add the variable to set
+ of changed variables. */
+ if (var->var_part[pos].cur_loc == node->loc)
+ {
+ changed = true;
+ var->var_part[pos].cur_loc = NULL;
+ var->cur_loc_changed = true;
+ }
pool_free (loc_chain_pool, node);
*nextp = next;
break;
nextp = &node->next;
}
- /* If we have deleted the location which was last emitted
- we have to emit new location so add the variable to set
- of changed variables. */
- if (var->var_part[pos].cur_loc
- && ((REG_P (loc)
- && REG_P (var->var_part[pos].cur_loc)
- && REGNO (loc) == REGNO (var->var_part[pos].cur_loc))
- || rtx_equal_p (loc, var->var_part[pos].cur_loc)))
- {
- changed = true;
- if (var->var_part[pos].loc_chain)
- var->var_part[pos].cur_loc = var->var_part[pos].loc_chain->loc;
- }
- else
- changed = false;
-
if (var->var_part[pos].loc_chain == NULL)
{
- gcc_assert (changed);
+ changed = true;
var->n_var_parts--;
- if (emit_notes && var->n_var_parts == 0 && dv_is_value_p (var->dv))
- remove_cselib_value_chains (var->dv);
+ if (emit_notes)
+ var->cur_loc_changed = true;
while (pos < var->n_var_parts)
{
var->var_part[pos] = var->var_part[pos + 1];
slot = delete_slot_part (set, loc, slot, offset);
}
+/* Structure for passing some other parameters to function
+ vt_expand_loc_callback. */
+struct expand_loc_callback_data
+{
+ /* The variables and values active at this point. */
+ htab_t vars;
+
+ /* True in vt_expand_loc_dummy calls, no rtl should be allocated.
+ Non-NULL should be returned if vt_expand_loc would return
+ non-NULL in that case, NULL otherwise. cur_loc_changed should be
+ computed and cur_loc recomputed when possible (but just once
+ per emit_notes_for_changes call). */
+ bool dummy;
+
+ /* True if expansion of subexpressions had to recompute some
+ VALUE/DEBUG_EXPR_DECL's cur_loc or used a VALUE/DEBUG_EXPR_DECL
+ whose cur_loc has been already recomputed during current
+ emit_notes_for_changes call. */
+ bool cur_loc_changed;
+};
+
/* Callback for cselib_expand_value, that looks for expressions
holding the value in the var-tracking hash tables. Return X for
standard processing, anything else is to be used as-is. */
static rtx
vt_expand_loc_callback (rtx x, bitmap regs, int max_depth, void *data)
{
- htab_t vars = (htab_t)data;
+ struct expand_loc_callback_data *elcd
+ = (struct expand_loc_callback_data *) data;
+ bool dummy = elcd->dummy;
+ bool cur_loc_changed = elcd->cur_loc_changed;
decl_or_value dv;
variable var;
location_chain loc;
switch (GET_CODE (x))
{
case SUBREG:
- subreg = SUBREG_REG (x);
-
- if (GET_CODE (SUBREG_REG (x)) != VALUE)
- return x;
+ if (dummy)
+ {
+ if (cselib_dummy_expand_value_rtx_cb (SUBREG_REG (x), regs,
+ max_depth - 1,
+ vt_expand_loc_callback, data))
+ return pc_rtx;
+ else
+ return NULL;
+ }
subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
max_depth - 1,
/* Invalid SUBREGs are ok in debug info. ??? We could try
alternate expansions for the VALUE as well. */
- if (!result && (REG_P (subreg) || MEM_P (subreg)))
+ if (!result)
result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
return result;
if (VALUE_RECURSED_INTO (x))
return NULL;
- var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
+ var = (variable) htab_find_with_hash (elcd->vars, dv, dv_htab_hash (dv));
if (!var)
- return xret;
+ {
+ if (dummy && dv_changed_p (dv))
+ elcd->cur_loc_changed = true;
+ return xret;
+ }
if (var->n_var_parts == 0)
- return xret;
+ {
+ if (dummy)
+ elcd->cur_loc_changed = true;
+ return xret;
+ }
gcc_assert (var->n_var_parts == 1);
VALUE_RECURSED_INTO (x) = true;
result = NULL;
- for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
+ if (var->var_part[0].cur_loc)
{
- result = cselib_expand_value_rtx_cb (loc->loc, regs, max_depth,
- vt_expand_loc_callback, vars);
+ if (dummy)
+ {
+ if (cselib_dummy_expand_value_rtx_cb (var->var_part[0].cur_loc, regs,
+ max_depth,
+ vt_expand_loc_callback, data))
+ result = pc_rtx;
+ }
+ else
+ result = cselib_expand_value_rtx_cb (var->var_part[0].cur_loc, regs,
+ max_depth,
+ vt_expand_loc_callback, data);
if (result)
- break;
+ set_dv_changed (dv, false);
+ }
+ if (!result && dv_changed_p (dv))
+ {
+ set_dv_changed (dv, false);
+ for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
+ if (loc->loc == var->var_part[0].cur_loc)
+ continue;
+ else if (dummy)
+ {
+ elcd->cur_loc_changed = cur_loc_changed;
+ if (cselib_dummy_expand_value_rtx_cb (loc->loc, regs, max_depth,
+ vt_expand_loc_callback,
+ data))
+ {
+ result = pc_rtx;
+ break;
+ }
+ }
+ else
+ {
+ result = cselib_expand_value_rtx_cb (loc->loc, regs, max_depth,
+ vt_expand_loc_callback, data);
+ if (result)
+ break;
+ }
+ if (dummy && (result || var->var_part[0].cur_loc))
+ var->cur_loc_changed = true;
+ var->var_part[0].cur_loc = loc ? loc->loc : NULL_RTX;
+ }
+ if (dummy)
+ {
+ if (var->cur_loc_changed)
+ elcd->cur_loc_changed = true;
+ else if (!result && var->var_part[0].cur_loc == NULL_RTX)
+ elcd->cur_loc_changed = cur_loc_changed;
}
VALUE_RECURSED_INTO (x) = false;
static rtx
vt_expand_loc (rtx loc, htab_t vars)
{
+ struct expand_loc_callback_data data;
+
if (!MAY_HAVE_DEBUG_INSNS)
return loc;
+ data.vars = vars;
+ data.dummy = false;
+ data.cur_loc_changed = false;
loc = cselib_expand_value_rtx_cb (loc, scratch_regs, 5,
- vt_expand_loc_callback, vars);
+ vt_expand_loc_callback, &data);
if (loc && MEM_P (loc))
loc = targetm.delegitimize_address (loc);
-
return loc;
}
+/* Like vt_expand_loc, but only return true/false (whether vt_expand_loc
+ would succeed or not, without actually allocating new rtxes. */
+
+static bool
+vt_expand_loc_dummy (rtx loc, htab_t vars, bool *pcur_loc_changed)
+{
+ struct expand_loc_callback_data data;
+ bool ret;
+
+ gcc_assert (MAY_HAVE_DEBUG_INSNS);
+ data.vars = vars;
+ data.dummy = true;
+ data.cur_loc_changed = false;
+ ret = cselib_dummy_expand_value_rtx_cb (loc, scratch_regs, 5,
+ vt_expand_loc_callback, &data);
+ *pcur_loc_changed = data.cur_loc_changed;
+ return ret;
+}
+
+#ifdef ENABLE_RTL_CHECKING
+/* Used to verify that cur_loc_changed updating is safe. */
+static struct pointer_map_t *emitted_notes;
+#endif
+
/* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
additional parameters: WHERE specifies whether the note shall be emitted
before or after instruction INSN. */
rtx insn = ((emit_note_data *)data)->insn;
enum emit_note_where where = ((emit_note_data *)data)->where;
htab_t vars = ((emit_note_data *)data)->vars;
- rtx note;
+ rtx note, note_vl;
int i, j, n_var_parts;
bool complete;
enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
HOST_WIDE_INT offsets[MAX_VAR_PARTS];
rtx loc[MAX_VAR_PARTS];
tree decl;
+ location_chain lc;
if (dv_is_value_p (var->dv))
- goto clear;
+ goto value_or_debug_decl;
decl = dv_as_decl (var->dv);
if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
- goto clear;
-
- gcc_assert (decl);
+ goto value_or_debug_decl;
complete = true;
last_limit = 0;
n_var_parts = 0;
+ if (!MAY_HAVE_DEBUG_INSNS)
+ {
+ for (i = 0; i < var->n_var_parts; i++)
+ if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
+ {
+ var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
+ var->cur_loc_changed = true;
+ }
+ if (var->n_var_parts == 0)
+ var->cur_loc_changed = true;
+ }
+#ifndef ENABLE_RTL_CHECKING
+ if (!var->cur_loc_changed)
+ goto clear;
+#endif
for (i = 0; i < var->n_var_parts; i++)
{
enum machine_mode mode, wider_mode;
else if (last_limit > var->var_part[i].offset)
continue;
offsets[n_var_parts] = var->var_part[i].offset;
- loc2 = vt_expand_loc (var->var_part[i].loc_chain->loc, vars);
+ if (!var->var_part[i].cur_loc)
+ {
+ complete = false;
+ continue;
+ }
+ loc2 = vt_expand_loc (var->var_part[i].cur_loc, vars);
if (!loc2)
{
complete = false;
continue;
}
loc[n_var_parts] = loc2;
- mode = GET_MODE (var->var_part[i].loc_chain->loc);
- initialized = var->var_part[i].loc_chain->init;
+ mode = GET_MODE (var->var_part[i].cur_loc);
+ if (mode == VOIDmode && dv_onepart_p (var->dv))
+ mode = DECL_MODE (decl);
+ for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
+ if (var->var_part[i].cur_loc == lc->loc)
+ {
+ initialized = lc->init;
+ break;
+ }
+ gcc_assert (lc);
last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
/* Attempt to merge adjacent registers or memory. */
break;
if (j < var->n_var_parts
&& wider_mode != VOIDmode
- && mode == GET_MODE (var->var_part[j].loc_chain->loc)
+ && var->var_part[j].cur_loc
+ && mode == GET_MODE (var->var_part[j].cur_loc)
&& (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
- && (loc2 = vt_expand_loc (var->var_part[j].loc_chain->loc, vars))
- && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2)
- && last_limit == var->var_part[j].offset)
+ && last_limit == var->var_part[j].offset
+ && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
+ && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
{
rtx new_loc = NULL;
if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
complete = false;
- if (where != EMIT_NOTE_BEFORE_INSN)
- {
- note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
- if (where == EMIT_NOTE_AFTER_CALL_INSN)
- NOTE_DURING_CALL_P (note) = true;
- }
- else
- note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
-
if (! flag_var_tracking_uninit)
initialized = VAR_INIT_STATUS_INITIALIZED;
+ note_vl = NULL_RTX;
if (!complete)
- {
- NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, decl,
- NULL_RTX, (int) initialized);
- }
+ note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX,
+ (int) initialized);
else if (n_var_parts == 1)
{
- rtx expr_list
- = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
+ rtx expr_list;
+
+ if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
+ expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
+ else
+ expr_list = loc[0];
- NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, decl,
- expr_list,
- (int) initialized);
+ note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list,
+ (int) initialized);
}
else if (n_var_parts)
{
parallel = gen_rtx_PARALLEL (VOIDmode,
gen_rtvec_v (n_var_parts, loc));
- NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, decl,
- parallel,
- (int) initialized);
+ note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
+ parallel, (int) initialized);
+ }
+
+#ifdef ENABLE_RTL_CHECKING
+ if (note_vl)
+ {
+ void **note_slot = pointer_map_insert (emitted_notes, decl);
+ rtx pnote = (rtx) *note_slot;
+ if (!var->cur_loc_changed && (pnote || PAT_VAR_LOCATION_LOC (note_vl)))
+ {
+ gcc_assert (pnote);
+ gcc_assert (rtx_equal_p (PAT_VAR_LOCATION_LOC (pnote),
+ PAT_VAR_LOCATION_LOC (note_vl)));
+ }
+ *note_slot = (void *) note_vl;
+ }
+ if (!var->cur_loc_changed)
+ goto clear;
+#endif
+
+ if (where != EMIT_NOTE_BEFORE_INSN)
+ {
+ note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
+ if (where == EMIT_NOTE_AFTER_CALL_INSN)
+ NOTE_DURING_CALL_P (note) = true;
}
+ else
+ note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
+ NOTE_VAR_LOCATION (note) = note_vl;
clear:
set_dv_changed (var->dv, false);
+ var->cur_loc_changed = false;
+ gcc_assert (var->in_changed_variables);
+ var->in_changed_variables = false;
htab_clear_slot (changed_variables, varp);
/* Continue traversing the hash table. */
return 1;
+
+ value_or_debug_decl:
+ if (dv_changed_p (var->dv) && var->n_var_parts)
+ {
+ location_chain lc;
+ bool cur_loc_changed;
+
+ if (var->var_part[0].cur_loc
+ && vt_expand_loc_dummy (var->var_part[0].cur_loc, vars,
+ &cur_loc_changed))
+ goto clear;
+ for (lc = var->var_part[0].loc_chain; lc; lc = lc->next)
+ if (lc->loc != var->var_part[0].cur_loc
+ && vt_expand_loc_dummy (lc->loc, vars, &cur_loc_changed))
+ break;
+ var->var_part[0].cur_loc = lc ? lc->loc : NULL_RTX;
+ }
+ goto clear;
}
DEF_VEC_P (variable);
static VEC (variable, heap) *changed_variables_stack;
+/* VALUEs with no variables that need set_dv_changed (val, false)
+ called before check_changed_vars_3. */
+
+static VEC (rtx, heap) *changed_values_stack;
+
+/* Helper function for check_changed_vars_1 and check_changed_vars_2. */
+
+static void
+check_changed_vars_0 (decl_or_value dv, htab_t htab)
+{
+ value_chain vc
+ = (value_chain) htab_find_with_hash (value_chains, dv, dv_htab_hash (dv));
+
+ if (vc == NULL)
+ return;
+ for (vc = vc->next; vc; vc = vc->next)
+ if (!dv_changed_p (vc->dv))
+ {
+ variable vcvar
+ = (variable) htab_find_with_hash (htab, vc->dv,
+ dv_htab_hash (vc->dv));
+ if (vcvar)
+ {
+ set_dv_changed (vc->dv, true);
+ VEC_safe_push (variable, heap, changed_variables_stack, vcvar);
+ }
+ else if (dv_is_value_p (vc->dv))
+ {
+ set_dv_changed (vc->dv, true);
+ VEC_safe_push (rtx, heap, changed_values_stack,
+ dv_as_value (vc->dv));
+ check_changed_vars_0 (vc->dv, htab);
+ }
+ }
+}
+
/* Populate changed_variables_stack with variable_def pointers
that need variable_was_changed called on them. */
variable var = (variable) *slot;
htab_t htab = (htab_t) data;
- if (dv_is_value_p (var->dv))
- {
- value_chain vc
- = (value_chain) htab_find_with_hash (value_chains, var->dv,
- dv_htab_hash (var->dv));
-
- if (vc == NULL)
- return 1;
- for (vc = vc->next; vc; vc = vc->next)
- if (!dv_changed_p (vc->dv))
- {
- variable vcvar
- = (variable) htab_find_with_hash (htab, vc->dv,
- dv_htab_hash (vc->dv));
- if (vcvar)
- VEC_safe_push (variable, heap, changed_variables_stack,
- vcvar);
- }
- }
+ if (dv_is_value_p (var->dv)
+ || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
+ check_changed_vars_0 (var->dv, htab);
return 1;
}
check_changed_vars_2 (variable var, htab_t htab)
{
variable_was_changed (var, NULL);
- if (dv_is_value_p (var->dv))
- {
- value_chain vc
- = (value_chain) htab_find_with_hash (value_chains, var->dv,
- dv_htab_hash (var->dv));
+ if (dv_is_value_p (var->dv)
+ || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
+ check_changed_vars_0 (var->dv, htab);
+}
- if (vc == NULL)
- return;
- for (vc = vc->next; vc; vc = vc->next)
- if (!dv_changed_p (vc->dv))
- {
- variable vcvar
- = (variable) htab_find_with_hash (htab, vc->dv,
- dv_htab_hash (vc->dv));
- if (vcvar)
- check_changed_vars_2 (vcvar, htab);
- }
+/* For each changed decl (except DEBUG_EXPR_DECLs) recompute
+ cur_loc if needed (and cur_loc of all VALUEs and DEBUG_EXPR_DECLs
+ it needs and are also in changed variables) and track whether
+ cur_loc (or anything it uses to compute location) had to change
+ during the current emit_notes_for_changes call. */
+
+static int
+check_changed_vars_3 (void **slot, void *data)
+{
+ variable var = (variable) *slot;
+ htab_t vars = (htab_t) data;
+ int i;
+ location_chain lc;
+ bool cur_loc_changed;
+
+ if (dv_is_value_p (var->dv)
+ || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
+ return 1;
+
+ for (i = 0; i < var->n_var_parts; i++)
+ {
+ if (var->var_part[i].cur_loc
+ && vt_expand_loc_dummy (var->var_part[i].cur_loc, vars,
+ &cur_loc_changed))
+ {
+ if (cur_loc_changed)
+ var->cur_loc_changed = true;
+ continue;
+ }
+ for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
+ if (lc->loc != var->var_part[i].cur_loc
+ && vt_expand_loc_dummy (lc->loc, vars, &cur_loc_changed))
+ break;
+ if (lc || var->var_part[i].cur_loc)
+ var->cur_loc_changed = true;
+ var->var_part[i].cur_loc = lc ? lc->loc : NULL_RTX;
}
+ if (var->n_var_parts == 0)
+ var->cur_loc_changed = true;
+ return 1;
}
/* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
while (VEC_length (variable, changed_variables_stack) > 0)
check_changed_vars_2 (VEC_pop (variable, changed_variables_stack),
htab);
+ while (VEC_length (rtx, changed_values_stack) > 0)
+ set_dv_changed (dv_from_value (VEC_pop (rtx, changed_values_stack)),
+ false);
+ htab_traverse (changed_variables, check_changed_vars_3, htab);
}
data.insn = insn;
empty_var->dv = old_var->dv;
empty_var->refcount = 0;
empty_var->n_var_parts = 0;
+ empty_var->cur_loc_changed = false;
+ empty_var->in_changed_variables = false;
if (dv_onepart_p (old_var->dv))
{
location_chain lc;
gcc_assert (old_var->n_var_parts == 1);
for (lc = old_var->var_part[0].loc_chain; lc; lc = lc->next)
remove_value_chains (old_var->dv, lc->loc);
- if (dv_is_value_p (old_var->dv))
- remove_cselib_value_chains (old_var->dv);
}
variable_was_changed (empty_var, NULL);
+ /* Continue traversing the hash table. */
+ return 1;
}
- else if (variable_different_p (old_var, new_var, true))
+ if (variable_different_p (old_var, new_var))
{
if (dv_onepart_p (old_var->dv))
{
}
variable_was_changed (new_var, NULL);
}
+ /* Update cur_loc. */
+ if (old_var != new_var)
+ {
+ int i;
+ for (i = 0; i < new_var->n_var_parts; i++)
+ {
+ new_var->var_part[i].cur_loc = NULL;
+ if (old_var->n_var_parts != new_var->n_var_parts
+ || old_var->var_part[i].offset != new_var->var_part[i].offset)
+ new_var->cur_loc_changed = true;
+ else if (old_var->var_part[i].cur_loc != NULL)
+ {
+ location_chain lc;
+ rtx cur_loc = old_var->var_part[i].cur_loc;
+
+ for (lc = new_var->var_part[i].loc_chain; lc; lc = lc->next)
+ if (lc->loc == cur_loc
+ || rtx_equal_p (cur_loc, lc->loc))
+ {
+ new_var->var_part[i].cur_loc = lc->loc;
+ break;
+ }
+ if (lc == NULL)
+ new_var->cur_loc_changed = true;
+ }
+ }
+ }
/* Continue traversing the hash table. */
return 1;
dv_htab_hash (new_var->dv));
if (!old_var)
{
+ int i;
/* Variable has appeared. */
if (dv_onepart_p (new_var->dv))
{
gcc_assert (new_var->n_var_parts == 1);
for (lc = new_var->var_part[0].loc_chain; lc; lc = lc->next)
add_value_chains (new_var->dv, lc->loc);
- if (dv_is_value_p (new_var->dv))
- add_cselib_value_chains (new_var->dv);
}
+ for (i = 0; i < new_var->n_var_parts; i++)
+ new_var->var_part[i].cur_loc = NULL;
variable_was_changed (new_var, NULL);
}
static void
emit_notes_in_bb (basic_block bb, dataflow_set *set)
{
- int i;
+ unsigned int i;
+ micro_operation *mo;
dataflow_set_clear (set);
dataflow_set_copy (set, &VTI (bb)->in);
- for (i = 0; i < VTI (bb)->n_mos; i++)
+ for (i = 0; VEC_iterate (micro_operation, VTI (bb)->mos, i, mo); i++)
{
- rtx insn = VTI (bb)->mos[i].insn;
+ rtx insn = mo->insn;
- switch (VTI (bb)->mos[i].type)
+ switch (mo->type)
{
case MO_CALL:
dataflow_set_clear_at_call (set);
case MO_USE:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
if (REG_P (loc))
var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
case MO_VAL_LOC:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
rtx val, vloc;
tree var;
VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
INSERT);
}
+ else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
+ set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
+ dv_from_decl (var), 0,
+ VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
+ INSERT);
emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
}
case MO_VAL_USE:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
rtx val, vloc, uloc;
vloc = uloc = XEXP (loc, 1);
if (VAL_NEEDS_RESOLUTION (loc))
val_resolve (set, val, vloc, insn);
+ else
+ val_store (set, val, uloc, insn, false);
if (VAL_HOLDS_TRACK_EXPR (loc))
{
case MO_VAL_SET:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
- rtx val, vloc, uloc;
+ rtx loc = mo->u.loc;
+ rtx val, vloc, uloc, reverse = NULL_RTX;
- vloc = uloc = XEXP (loc, 1);
- val = XEXP (loc, 0);
+ vloc = loc;
+ if (VAL_EXPR_HAS_REVERSE (loc))
+ {
+ reverse = XEXP (loc, 1);
+ vloc = XEXP (loc, 0);
+ }
+ uloc = XEXP (vloc, 1);
+ val = XEXP (vloc, 0);
+ vloc = uloc;
if (GET_CODE (val) == CONCAT)
{
else if (REG_P (uloc))
var_regno_delete (set, REGNO (uloc));
- val_store (set, val, vloc, insn);
+ val_store (set, val, vloc, insn, true);
+
+ if (reverse)
+ val_store (set, XEXP (reverse, 0), XEXP (reverse, 1),
+ insn, false);
emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
set->vars);
case MO_SET:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
rtx set_src = NULL;
if (GET_CODE (loc) == SET)
case MO_COPY:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
enum var_init_status src_status;
rtx set_src = NULL;
case MO_USE_NO_VAR:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
if (REG_P (loc))
var_reg_delete (set, loc, false);
case MO_CLOBBER:
{
- rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx loc = mo->u.loc;
if (REG_P (loc))
var_reg_delete (set, loc, true);
break;
case MO_ADJUST:
- set->stack_adjust += VTI (bb)->mos[i].u.adjust;
+ set->stack_adjust += mo->u.adjust;
break;
}
}
basic_block bb;
dataflow_set cur;
+#ifdef ENABLE_RTL_CHECKING
+ emitted_notes = pointer_map_create ();
+#endif
gcc_assert (!htab_elements (changed_variables));
/* Free memory occupied by the out hash tables, as they aren't used
emit_notes = true;
if (MAY_HAVE_DEBUG_INSNS)
- changed_variables_stack = VEC_alloc (variable, heap, 40);
+ {
+ unsigned int i;
+ rtx val;
+
+ for (i = 0; VEC_iterate (rtx, preserved_values, i, val); i++)
+ add_cselib_value_chains (dv_from_value (val));
+ changed_variables_stack = VEC_alloc (variable, heap, 40);
+ changed_values_stack = VEC_alloc (rtx, heap, 40);
+ }
dataflow_set_init (&cur);
emit_notes_for_differences_1,
shared_hash_htab (empty_shared_hash));
if (MAY_HAVE_DEBUG_INSNS)
- gcc_assert (htab_elements (value_chains) == 0);
+ {
+ unsigned int i;
+ rtx val;
+
+ for (i = 0; VEC_iterate (rtx, preserved_values, i, val); i++)
+ remove_cselib_value_chains (dv_from_value (val));
+ gcc_assert (htab_elements (value_chains) == 0);
+ }
#endif
dataflow_set_destroy (&cur);
if (MAY_HAVE_DEBUG_INSNS)
- VEC_free (variable, heap, changed_variables_stack);
+ {
+ VEC_free (variable, heap, changed_variables_stack);
+ VEC_free (rtx, heap, changed_values_stack);
+ }
+#ifdef ENABLE_RTL_CHECKING
+ pointer_map_destroy (emitted_notes);
+#endif
emit_notes = false;
}
cselib. */
if (val)
{
- cselib_preserve_value (val);
+ preserve_value (val);
set_variable_part (out, val->val_rtx, dv, offset,
VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
dv = dv_from_value (val->val_rtx);
if (MAY_HAVE_DEBUG_INSNS)
{
- cselib_preserve_only_values (true);
- cselib_reset_table_with_next_value (cselib_get_next_unknown_value ());
+ cselib_preserve_only_values ();
+ cselib_reset_table (cselib_get_next_uid ());
}
}
+/* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
+
+static bool
+fp_setter (rtx insn)
+{
+ rtx pat = PATTERN (insn);
+ if (RTX_FRAME_RELATED_P (insn))
+ {
+ rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
+ if (expr)
+ pat = XEXP (expr, 0);
+ }
+ if (GET_CODE (pat) == SET)
+ return SET_DEST (pat) == hard_frame_pointer_rtx;
+ else if (GET_CODE (pat) == PARALLEL)
+ {
+ int i;
+ for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
+ if (GET_CODE (XVECEXP (pat, 0, i)) == SET
+ && SET_DEST (XVECEXP (pat, 0, i)) == hard_frame_pointer_rtx)
+ return true;
+ }
+ return false;
+}
+
+/* Initialize cfa_base_rtx, create a preserved VALUE for it and
+ ensure it isn't flushed during cselib_reset_table.
+ Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
+ has been eliminated. */
+
+static void
+vt_init_cfa_base (void)
+{
+ cselib_val *val;
+
+#ifdef FRAME_POINTER_CFA_OFFSET
+ cfa_base_rtx = frame_pointer_rtx;
+#else
+ cfa_base_rtx = arg_pointer_rtx;
+#endif
+ if (cfa_base_rtx == hard_frame_pointer_rtx
+ || !fixed_regs[REGNO (cfa_base_rtx)])
+ {
+ cfa_base_rtx = NULL_RTX;
+ return;
+ }
+ if (!MAY_HAVE_DEBUG_INSNS)
+ return;
+
+ val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
+ get_insns ());
+ preserve_value (val);
+ cselib_preserve_cfa_base_value (val);
+ var_reg_decl_set (&VTI (ENTRY_BLOCK_PTR)->out, cfa_base_rtx,
+ VAR_INIT_STATUS_INITIALIZED, dv_from_value (val->val_rtx),
+ 0, NULL_RTX, INSERT);
+}
+
/* Allocate and initialize the data structures for variable tracking
and parse the RTL to get the micro operations. */
-static void
+static bool
vt_initialize (void)
{
- basic_block bb;
+ basic_block bb, prologue_bb = NULL;
+ HOST_WIDE_INT fp_cfa_offset = -1;
alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
+ attrs_pool = create_alloc_pool ("attrs_def pool",
+ sizeof (struct attrs_def), 1024);
+ var_pool = create_alloc_pool ("variable_def pool",
+ sizeof (struct variable_def)
+ + (MAX_VAR_PARTS - 1)
+ * sizeof (((variable)NULL)->var_part[0]), 64);
+ loc_chain_pool = create_alloc_pool ("location_chain_def pool",
+ sizeof (struct location_chain_def),
+ 1024);
+ shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
+ sizeof (struct shared_hash_def), 256);
+ empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
+ empty_shared_hash->refcount = 1;
+ empty_shared_hash->htab
+ = htab_create (1, variable_htab_hash, variable_htab_eq,
+ variable_htab_free);
+ changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
+ variable_htab_free);
+ if (MAY_HAVE_DEBUG_INSNS)
+ {
+ value_chain_pool = create_alloc_pool ("value_chain_def pool",
+ sizeof (struct value_chain_def),
+ 1024);
+ value_chains = htab_create (32, value_chain_htab_hash,
+ value_chain_htab_eq, NULL);
+ }
+
+ /* Init the IN and OUT sets. */
+ FOR_ALL_BB (bb)
+ {
+ VTI (bb)->visited = false;
+ VTI (bb)->flooded = false;
+ dataflow_set_init (&VTI (bb)->in);
+ dataflow_set_init (&VTI (bb)->out);
+ VTI (bb)->permp = NULL;
+ }
+
if (MAY_HAVE_DEBUG_INSNS)
{
- cselib_init (true);
+ cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
scratch_regs = BITMAP_ALLOC (NULL);
valvar_pool = create_alloc_pool ("small variable_def pool",
sizeof (struct variable_def), 256);
+ preserved_values = VEC_alloc (rtx, heap, 256);
}
else
{
valvar_pool = NULL;
}
- FOR_EACH_BB (bb)
+ if (!frame_pointer_needed)
{
- rtx insn;
- HOST_WIDE_INT pre, post = 0;
- int count;
- unsigned int next_value_before = cselib_get_next_unknown_value ();
- unsigned int next_value_after = next_value_before;
+ rtx reg, elim;
- if (MAY_HAVE_DEBUG_INSNS)
+ if (!vt_stack_adjustments ())
+ return false;
+
+#ifdef FRAME_POINTER_CFA_OFFSET
+ reg = frame_pointer_rtx;
+#else
+ reg = arg_pointer_rtx;
+#endif
+ elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
+ if (elim != reg)
{
- cselib_record_sets_hook = count_with_sets;
- if (dump_file && (dump_flags & TDF_DETAILS))
- fprintf (dump_file, "first value: %i\n",
- cselib_get_next_unknown_value ());
+ if (GET_CODE (elim) == PLUS)
+ elim = XEXP (elim, 0);
+ if (elim == stack_pointer_rtx)
+ vt_init_cfa_base ();
}
+ }
+ else if (!crtl->stack_realign_tried)
+ {
+ rtx reg, elim;
- /* Count the number of micro operations. */
- VTI (bb)->n_mos = 0;
- for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
- insn = NEXT_INSN (insn))
+#ifdef FRAME_POINTER_CFA_OFFSET
+ reg = frame_pointer_rtx;
+ fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
+#else
+ reg = arg_pointer_rtx;
+ fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
+#endif
+ elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
+ if (elim != reg)
{
- if (INSN_P (insn))
+ if (GET_CODE (elim) == PLUS)
{
- if (!frame_pointer_needed)
- {
- insn_stack_adjust_offset_pre_post (insn, &pre, &post);
- if (pre)
- {
- VTI (bb)->n_mos++;
- if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (GEN_INT (pre), bb, insn,
- MO_ADJUST, dump_file);
- }
- if (post)
- {
- VTI (bb)->n_mos++;
- if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (GEN_INT (post), bb, insn,
- MO_ADJUST, dump_file);
- }
- }
- cselib_hook_called = false;
- if (MAY_HAVE_DEBUG_INSNS)
- {
- cselib_process_insn (insn);
- if (dump_file && (dump_flags & TDF_DETAILS))
- {
- print_rtl_single (dump_file, insn);
- dump_cselib_table (dump_file);
- }
- }
- if (!cselib_hook_called)
- count_with_sets (insn, 0, 0);
- if (CALL_P (insn))
- {
- VTI (bb)->n_mos++;
- if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (PATTERN (insn), bb, insn,
- MO_CALL, dump_file);
- }
+ fp_cfa_offset -= INTVAL (XEXP (elim, 1));
+ elim = XEXP (elim, 0);
}
+ if (elim != hard_frame_pointer_rtx)
+ fp_cfa_offset = -1;
+ else
+ prologue_bb = single_succ (ENTRY_BLOCK_PTR);
}
+ }
- count = VTI (bb)->n_mos;
+ hard_frame_pointer_adjustment = -1;
+
+ FOR_EACH_BB (bb)
+ {
+ rtx insn;
+ HOST_WIDE_INT pre, post = 0;
+ basic_block first_bb, last_bb;
if (MAY_HAVE_DEBUG_INSNS)
{
- cselib_preserve_only_values (false);
- next_value_after = cselib_get_next_unknown_value ();
- cselib_reset_table_with_next_value (next_value_before);
cselib_record_sets_hook = add_with_sets;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "first value: %i\n",
- cselib_get_next_unknown_value ());
+ cselib_get_next_uid ());
}
- /* Add the micro-operations to the array. */
- VTI (bb)->mos = XNEWVEC (micro_operation, VTI (bb)->n_mos);
- VTI (bb)->n_mos = 0;
- for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
- insn = NEXT_INSN (insn))
+ first_bb = bb;
+ for (;;)
{
- if (INSN_P (insn))
+ edge e;
+ if (bb->next_bb == EXIT_BLOCK_PTR
+ || ! single_pred_p (bb->next_bb))
+ break;
+ e = find_edge (bb, bb->next_bb);
+ if (! e || (e->flags & EDGE_FALLTHRU) == 0)
+ break;
+ bb = bb->next_bb;
+ }
+ last_bb = bb;
+
+ /* Add the micro-operations to the vector. */
+ FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
+ {
+ HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
+ VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
+ for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
+ insn = NEXT_INSN (insn))
{
- if (!frame_pointer_needed)
+ if (INSN_P (insn))
{
- insn_stack_adjust_offset_pre_post (insn, &pre, &post);
- if (pre)
+ if (!frame_pointer_needed)
{
- micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
+ insn_stack_adjust_offset_pre_post (insn, &pre, &post);
+ if (pre)
+ {
+ micro_operation mo;
+ mo.type = MO_ADJUST;
+ mo.u.adjust = pre;
+ mo.insn = insn;
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ log_op_type (PATTERN (insn), bb, insn,
+ MO_ADJUST, dump_file);
+ VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
+ &mo);
+ VTI (bb)->out.stack_adjust += pre;
+ }
+ }
- mo->type = MO_ADJUST;
- mo->u.adjust = pre;
- mo->insn = insn;
+ cselib_hook_called = false;
+ adjust_insn (bb, insn);
+ if (MAY_HAVE_DEBUG_INSNS)
+ {
+ cselib_process_insn (insn);
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ print_rtl_single (dump_file, insn);
+ dump_cselib_table (dump_file);
+ }
+ }
+ if (!cselib_hook_called)
+ add_with_sets (insn, 0, 0);
+ cancel_changes (0);
+ if (!frame_pointer_needed && post)
+ {
+ micro_operation mo;
+ mo.type = MO_ADJUST;
+ mo.u.adjust = post;
+ mo.insn = insn;
if (dump_file && (dump_flags & TDF_DETAILS))
log_op_type (PATTERN (insn), bb, insn,
MO_ADJUST, dump_file);
+ VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
+ &mo);
+ VTI (bb)->out.stack_adjust += post;
}
- }
- cselib_hook_called = false;
- if (MAY_HAVE_DEBUG_INSNS)
- {
- cselib_process_insn (insn);
- if (dump_file && (dump_flags & TDF_DETAILS))
+ if (bb == prologue_bb
+ && hard_frame_pointer_adjustment == -1
+ && RTX_FRAME_RELATED_P (insn)
+ && fp_setter (insn))
{
- print_rtl_single (dump_file, insn);
- dump_cselib_table (dump_file);
+ vt_init_cfa_base ();
+ hard_frame_pointer_adjustment = fp_cfa_offset;
}
}
- if (!cselib_hook_called)
- add_with_sets (insn, 0, 0);
-
- if (!frame_pointer_needed && post)
- {
- micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
-
- mo->type = MO_ADJUST;
- mo->u.adjust = post;
- mo->insn = insn;
-
- if (dump_file && (dump_flags & TDF_DETAILS))
- log_op_type (PATTERN (insn), bb, insn,
- MO_ADJUST, dump_file);
- }
}
+ gcc_assert (offset == VTI (bb)->out.stack_adjust);
}
- gcc_assert (count == VTI (bb)->n_mos);
+
+ bb = last_bb;
+
if (MAY_HAVE_DEBUG_INSNS)
{
- cselib_preserve_only_values (true);
- gcc_assert (next_value_after == cselib_get_next_unknown_value ());
- cselib_reset_table_with_next_value (next_value_after);
+ cselib_preserve_only_values ();
+ cselib_reset_table (cselib_get_next_uid ());
cselib_record_sets_hook = NULL;
}
}
- attrs_pool = create_alloc_pool ("attrs_def pool",
- sizeof (struct attrs_def), 1024);
- var_pool = create_alloc_pool ("variable_def pool",
- sizeof (struct variable_def)
- + (MAX_VAR_PARTS - 1)
- * sizeof (((variable)NULL)->var_part[0]), 64);
- loc_chain_pool = create_alloc_pool ("location_chain_def pool",
- sizeof (struct location_chain_def),
- 1024);
- shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
- sizeof (struct shared_hash_def), 256);
- empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
- empty_shared_hash->refcount = 1;
- empty_shared_hash->htab
- = htab_create (1, variable_htab_hash, variable_htab_eq,
- variable_htab_free);
- changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
- variable_htab_free);
- if (MAY_HAVE_DEBUG_INSNS)
- {
- value_chain_pool = create_alloc_pool ("value_chain_def pool",
- sizeof (struct value_chain_def),
- 1024);
- value_chains = htab_create (32, value_chain_htab_hash,
- value_chain_htab_eq, NULL);
- }
-
- /* Init the IN and OUT sets. */
- FOR_ALL_BB (bb)
- {
- VTI (bb)->visited = false;
- VTI (bb)->flooded = false;
- dataflow_set_init (&VTI (bb)->in);
- dataflow_set_init (&VTI (bb)->out);
- VTI (bb)->permp = NULL;
- }
-
+ hard_frame_pointer_adjustment = -1;
VTI (ENTRY_BLOCK_PTR)->flooded = true;
vt_add_function_parameters ();
+ cfa_base_rtx = NULL_RTX;
+ return true;
}
/* Get rid of all debug insns from the insn stream. */
FOR_EACH_BB (bb)
{
- free (VTI (bb)->mos);
+ VEC_free (micro_operation, heap, VTI (bb)->mos);
}
FOR_ALL_BB (bb)
htab_delete (value_chains);
free_alloc_pool (value_chain_pool);
free_alloc_pool (valvar_pool);
+ VEC_free (rtx, heap, preserved_values);
cselib_finish ();
BITMAP_FREE (scratch_regs);
scratch_regs = NULL;
/* The entry point to variable tracking pass. */
-unsigned int
-variable_tracking_main (void)
+static inline unsigned int
+variable_tracking_main_1 (void)
{
+ bool success;
+
if (flag_var_tracking_assignments < 0)
{
delete_debug_insns ();
}
mark_dfs_back_edges ();
- vt_initialize ();
- if (!frame_pointer_needed)
+ if (!vt_initialize ())
{
- if (!vt_stack_adjustments ())
- {
- vt_finalize ();
- vt_debug_insns_local (true);
- return 0;
- }
+ vt_finalize ();
+ vt_debug_insns_local (true);
+ return 0;
+ }
+
+ success = vt_find_locations ();
+
+ if (!success && flag_var_tracking_assignments > 0)
+ {
+ vt_finalize ();
+
+ delete_debug_insns ();
+
+ /* This is later restored by our caller. */
+ flag_var_tracking_assignments = 0;
+
+ success = vt_initialize ();
+ gcc_assert (success);
+
+ success = vt_find_locations ();
}
- vt_find_locations ();
+ if (!success)
+ {
+ vt_finalize ();
+ vt_debug_insns_local (false);
+ return 0;
+ }
if (dump_file && (dump_flags & TDF_DETAILS))
{
vt_debug_insns_local (false);
return 0;
}
+
+unsigned int
+variable_tracking_main (void)
+{
+ unsigned int ret;
+ int save = flag_var_tracking_assignments;
+
+ ret = variable_tracking_main_1 ();
+
+ flag_var_tracking_assignments = save;
+
+ return ret;
+}
\f
static bool
gate_handle_var_tracking (void)