#include "params.h"
#include "diagnostic.h"
#include "pointer-set.h"
+#include "recog.h"
/* var-tracking.c assumes that tree code with the same value as VALUE rtx code
has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
HOST_WIDE_INT *);
static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
HOST_WIDE_INT *);
-static void bb_stack_adjust_offset (basic_block);
static bool vt_stack_adjustments (void);
-static rtx adjust_stack_reference (rtx, HOST_WIDE_INT);
+static rtx compute_cfa_pointer (HOST_WIDE_INT);
static hashval_t variable_htab_hash (const void *);
static int variable_htab_eq (const void *, const void *);
static void variable_htab_free (void *);
static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
static void vt_add_function_parameters (void);
-static void vt_initialize (void);
+static bool vt_initialize (void);
static void vt_finalize (void);
/* Given a SET, calculate the amount of stack adjustment it contains
}
}
-/* Compute stack adjustment in basic block BB. */
-
-static void
-bb_stack_adjust_offset (basic_block bb)
-{
- HOST_WIDE_INT offset;
- unsigned int i;
- micro_operation *mo;
-
- offset = VTI (bb)->in.stack_adjust;
- for (i = 0; VEC_iterate (micro_operation, VTI (bb)->mos, i, mo); i++)
- {
- if (mo->type == MO_ADJUST)
- offset += mo->u.adjust;
- else if (mo->type != MO_CALL)
- {
- if (MEM_P (mo->u.loc))
- mo->u.loc = adjust_stack_reference (mo->u.loc, -offset);
- }
- }
- VTI (bb)->out.stack_adjust = offset;
-}
-
/* Compute stack adjustments for all blocks by traversing DFS tree.
Return true when the adjustments on all incoming edges are consistent.
Heavily borrowed from pre_and_rev_post_order_compute. */
/* Initialize entry block. */
VTI (ENTRY_BLOCK_PTR)->visited = true;
+ VTI (ENTRY_BLOCK_PTR)->in.stack_adjust = INCOMING_FRAME_SP_OFFSET;
VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
/* Allocate stack for back-tracking up CFG. */
/* Check if the edge destination has been visited yet. */
if (!VTI (dest)->visited)
{
+ rtx insn;
+ HOST_WIDE_INT pre, post, offset;
VTI (dest)->visited = true;
- VTI (dest)->in.stack_adjust = VTI (src)->out.stack_adjust;
- bb_stack_adjust_offset (dest);
+ VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
+
+ if (dest != EXIT_BLOCK_PTR)
+ for (insn = BB_HEAD (dest);
+ insn != NEXT_INSN (BB_END (dest));
+ insn = NEXT_INSN (insn))
+ if (INSN_P (insn))
+ {
+ insn_stack_adjust_offset_pre_post (insn, &pre, &post);
+ offset += pre + post;
+ }
+
+ VTI (dest)->out.stack_adjust = offset;
if (EDGE_COUNT (dest->succs) > 0)
/* Since the DEST node has been visited for the first
return true;
}
-/* Adjust stack reference MEM by ADJUSTMENT bytes and make it relative
- to the argument pointer. Return the new rtx. */
+/* Compute a CFA-based value for the stack pointer. */
static rtx
-adjust_stack_reference (rtx mem, HOST_WIDE_INT adjustment)
+compute_cfa_pointer (HOST_WIDE_INT adjustment)
{
- rtx addr, cfa, tmp;
+ rtx cfa;
#ifdef FRAME_POINTER_CFA_OFFSET
adjustment -= FRAME_POINTER_CFA_OFFSET (current_function_decl);
cfa = plus_constant (arg_pointer_rtx, adjustment);
#endif
- addr = replace_rtx (copy_rtx (XEXP (mem, 0)), stack_pointer_rtx, cfa);
- tmp = simplify_rtx (addr);
- if (tmp)
- addr = tmp;
+ return cfa;
+}
+
+/* Adjustment for hard_frame_pointer_rtx to cfa base reg,
+ or -1 if the replacement shouldn't be done. */
+static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
+
+/* Data for adjust_mems callback. */
+
+struct adjust_mem_data
+{
+ bool store;
+ enum machine_mode mem_mode;
+ HOST_WIDE_INT stack_adjust;
+ rtx side_effects;
+};
+
+/* Helper for adjust_mems. Return 1 if *loc is unsuitable for
+ transformation of wider mode arithmetics to narrower mode,
+ -1 if it is suitable and subexpressions shouldn't be
+ traversed and 0 if it is suitable and subexpressions should
+ be traversed. Called through for_each_rtx. */
+
+static int
+use_narrower_mode_test (rtx *loc, void *data)
+{
+ rtx subreg = (rtx) data;
+
+ if (CONSTANT_P (*loc))
+ return -1;
+ switch (GET_CODE (*loc))
+ {
+ case REG:
+ if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0))
+ return 1;
+ return -1;
+ case PLUS:
+ case MINUS:
+ case MULT:
+ return 0;
+ case ASHIFT:
+ if (for_each_rtx (&XEXP (*loc, 0), use_narrower_mode_test, data))
+ return 1;
+ else
+ return -1;
+ default:
+ return 1;
+ }
+}
+
+/* Transform X into narrower mode MODE from wider mode WMODE. */
+
+static rtx
+use_narrower_mode (rtx x, enum machine_mode mode, enum machine_mode wmode)
+{
+ rtx op0, op1;
+ if (CONSTANT_P (x))
+ return lowpart_subreg (mode, x, wmode);
+ switch (GET_CODE (x))
+ {
+ case REG:
+ return lowpart_subreg (mode, x, wmode);
+ case PLUS:
+ case MINUS:
+ case MULT:
+ op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
+ op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
+ return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
+ case ASHIFT:
+ op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
+ return simplify_gen_binary (ASHIFT, mode, op0, XEXP (x, 1));
+ default:
+ gcc_unreachable ();
+ }
+}
+
+/* Helper function for adjusting used MEMs. */
+
+static rtx
+adjust_mems (rtx loc, const_rtx old_rtx, void *data)
+{
+ struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
+ rtx mem, addr = loc, tem;
+ enum machine_mode mem_mode_save;
+ bool store_save;
+ switch (GET_CODE (loc))
+ {
+ case REG:
+ /* Don't do any sp or fp replacements outside of MEM addresses. */
+ if (amd->mem_mode == VOIDmode)
+ return loc;
+ if (loc == stack_pointer_rtx
+ && !frame_pointer_needed)
+ return compute_cfa_pointer (amd->stack_adjust);
+ else if (loc == hard_frame_pointer_rtx
+ && frame_pointer_needed
+ && hard_frame_pointer_adjustment != -1)
+ return compute_cfa_pointer (hard_frame_pointer_adjustment);
+ return loc;
+ case MEM:
+ mem = loc;
+ if (!amd->store)
+ {
+ mem = targetm.delegitimize_address (mem);
+ if (mem != loc && !MEM_P (mem))
+ return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
+ }
+
+ addr = XEXP (mem, 0);
+ mem_mode_save = amd->mem_mode;
+ amd->mem_mode = GET_MODE (mem);
+ store_save = amd->store;
+ amd->store = false;
+ addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
+ amd->store = store_save;
+ amd->mem_mode = mem_mode_save;
+ if (mem == loc)
+ addr = targetm.delegitimize_address (addr);
+ if (addr != XEXP (mem, 0))
+ mem = replace_equiv_address_nv (mem, addr);
+ if (!amd->store)
+ mem = avoid_constant_pool_reference (mem);
+ return mem;
+ case PRE_INC:
+ case PRE_DEC:
+ addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
+ GEN_INT (GET_CODE (loc) == PRE_INC
+ ? GET_MODE_SIZE (amd->mem_mode)
+ : -GET_MODE_SIZE (amd->mem_mode)));
+ case POST_INC:
+ case POST_DEC:
+ if (addr == loc)
+ addr = XEXP (loc, 0);
+ gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
+ addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
+ tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
+ GEN_INT ((GET_CODE (loc) == PRE_INC
+ || GET_CODE (loc) == POST_INC)
+ ? GET_MODE_SIZE (amd->mem_mode)
+ : -GET_MODE_SIZE (amd->mem_mode)));
+ amd->side_effects = alloc_EXPR_LIST (0,
+ gen_rtx_SET (VOIDmode,
+ XEXP (loc, 0),
+ tem),
+ amd->side_effects);
+ return addr;
+ case PRE_MODIFY:
+ addr = XEXP (loc, 1);
+ case POST_MODIFY:
+ if (addr == loc)
+ addr = XEXP (loc, 0);
+ gcc_assert (amd->mem_mode != VOIDmode);
+ addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
+ amd->side_effects = alloc_EXPR_LIST (0,
+ gen_rtx_SET (VOIDmode,
+ XEXP (loc, 0),
+ XEXP (loc, 1)),
+ amd->side_effects);
+ return addr;
+ case SUBREG:
+ /* First try without delegitimization of whole MEMs and
+ avoid_constant_pool_reference, which is more likely to succeed. */
+ store_save = amd->store;
+ amd->store = true;
+ addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
+ data);
+ amd->store = store_save;
+ mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
+ if (mem == SUBREG_REG (loc))
+ {
+ tem = loc;
+ goto finish_subreg;
+ }
+ tem = simplify_gen_subreg (GET_MODE (loc), mem,
+ GET_MODE (SUBREG_REG (loc)),
+ SUBREG_BYTE (loc));
+ if (tem)
+ goto finish_subreg;
+ tem = simplify_gen_subreg (GET_MODE (loc), addr,
+ GET_MODE (SUBREG_REG (loc)),
+ SUBREG_BYTE (loc));
+ if (tem == NULL_RTX)
+ tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
+ finish_subreg:
+ if (MAY_HAVE_DEBUG_INSNS
+ && GET_CODE (tem) == SUBREG
+ && (GET_CODE (SUBREG_REG (tem)) == PLUS
+ || GET_CODE (SUBREG_REG (tem)) == MINUS
+ || GET_CODE (SUBREG_REG (tem)) == MULT
+ || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
+ && GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
+ && GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
+ && GET_MODE_SIZE (GET_MODE (tem))
+ < GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem)))
+ && subreg_lowpart_p (tem)
+ && !for_each_rtx (&SUBREG_REG (tem), use_narrower_mode_test, tem))
+ return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
+ GET_MODE (SUBREG_REG (tem)));
+ return tem;
+ default:
+ break;
+ }
+ return NULL_RTX;
+}
+
+/* Helper function for replacement of uses. */
+
+static void
+adjust_mem_uses (rtx *x, void *data)
+{
+ rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
+ if (new_x != *x)
+ validate_change (NULL_RTX, x, new_x, true);
+}
+
+/* Helper function for replacement of stores. */
+
+static void
+adjust_mem_stores (rtx loc, const_rtx expr, void *data)
+{
+ if (MEM_P (loc))
+ {
+ rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
+ adjust_mems, data);
+ if (new_dest != SET_DEST (expr))
+ {
+ rtx xexpr = CONST_CAST_RTX (expr);
+ validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
+ }
+ }
+}
+
+/* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
+ replace them with their value in the insn and add the side-effects
+ as other sets to the insn. */
+
+static void
+adjust_insn (basic_block bb, rtx insn)
+{
+ struct adjust_mem_data amd;
+ rtx set;
+ amd.mem_mode = VOIDmode;
+ amd.stack_adjust = -VTI (bb)->out.stack_adjust;
+ amd.side_effects = NULL_RTX;
+
+ amd.store = true;
+ note_stores (PATTERN (insn), adjust_mem_stores, &amd);
+
+ amd.store = false;
+ note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
+
+ /* For read-only MEMs containing some constant, prefer those
+ constants. */
+ set = single_set (insn);
+ if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
+ {
+ rtx note = find_reg_equal_equiv_note (insn);
- return replace_equiv_address_nv (mem, addr);
+ if (note && CONSTANT_P (XEXP (note, 0)))
+ validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
+ }
+
+ if (amd.side_effects)
+ {
+ rtx *pat, new_pat, s;
+ int i, oldn, newn;
+
+ pat = &PATTERN (insn);
+ if (GET_CODE (*pat) == COND_EXEC)
+ pat = &COND_EXEC_CODE (*pat);
+ if (GET_CODE (*pat) == PARALLEL)
+ oldn = XVECLEN (*pat, 0);
+ else
+ oldn = 1;
+ for (s = amd.side_effects, newn = 0; s; newn++)
+ s = XEXP (s, 1);
+ new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
+ if (GET_CODE (*pat) == PARALLEL)
+ for (i = 0; i < oldn; i++)
+ XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
+ else
+ XVECEXP (new_pat, 0, 0) = *pat;
+ for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
+ XVECEXP (new_pat, 0, i) = XEXP (s, 0);
+ free_EXPR_LIST_list (&amd.side_effects);
+ validate_change (NULL_RTX, pat, new_pat, true);
+ }
}
/* Return true if a decl_or_value DV is a DECL or NULL. */
var_debug_decl (tree decl)
{
if (decl && DECL_P (decl)
- && DECL_DEBUG_EXPR_IS_FROM (decl) && DECL_DEBUG_EXPR (decl)
- && DECL_P (DECL_DEBUG_EXPR (decl)))
- decl = DECL_DEBUG_EXPR (decl);
+ && DECL_DEBUG_EXPR_IS_FROM (decl))
+ {
+ tree debugdecl = DECL_DEBUG_EXPR (decl);
+ if (debugdecl && DECL_P (debugdecl))
+ decl = debugdecl;
+ }
return decl;
}
}
/* If CSELIB_VAL_PTR of value DV refer to VALUEs, add backlinks from those
- VALUEs to DV. */
+ VALUEs to DV. Add the same time get rid of ASM_OPERANDS from locs list,
+ that is something we never can express in .debug_info and can prevent
+ reverse ops from being used. */
static void
add_cselib_value_chains (decl_or_value dv)
{
- struct elt_loc_list *l;
+ struct elt_loc_list **l;
- for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
- for_each_rtx (&l->loc, add_value_chain, dv_as_opaque (dv));
+ for (l = &CSELIB_VAL_PTR (dv_as_value (dv))->locs; *l;)
+ if (GET_CODE ((*l)->loc) == ASM_OPERANDS)
+ *l = (*l)->next;
+ else
+ {
+ for_each_rtx (&(*l)->loc, add_value_chain, dv_as_opaque (dv));
+ l = &(*l)->next;
+ }
}
/* If decl or value DVP refers to VALUE from *LOC, remove backlinks
return 1;
}
+/* Bind one-part variables to the canonical value in an equivalence
+ set. Not doing this causes dataflow convergence failure in rare
+ circumstances, see PR42873. Unfortunately we can't do this
+ efficiently as part of canonicalize_values_star, since we may not
+ have determined or even seen the canonical value of a set when we
+ get to a variable that references another member of the set. */
+
+static int
+canonicalize_vars_star (void **slot, void *data)
+{
+ dataflow_set *set = (dataflow_set *)data;
+ variable var = (variable) *slot;
+ decl_or_value dv = var->dv;
+ location_chain node;
+ rtx cval;
+ decl_or_value cdv;
+ void **cslot;
+ variable cvar;
+ location_chain cnode;
+
+ if (!dv_onepart_p (dv) || dv_is_value_p (dv))
+ return 1;
+
+ gcc_assert (var->n_var_parts == 1);
+
+ node = var->var_part[0].loc_chain;
+
+ if (GET_CODE (node->loc) != VALUE)
+ return 1;
+
+ gcc_assert (!node->next);
+ cval = node->loc;
+
+ /* Push values to the canonical one. */
+ cdv = dv_from_value (cval);
+ cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
+ if (!cslot)
+ return 1;
+ cvar = (variable)*cslot;
+ gcc_assert (cvar->n_var_parts == 1);
+
+ cnode = cvar->var_part[0].loc_chain;
+
+ /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
+ that are not “more canonical” than it. */
+ if (GET_CODE (cnode->loc) != VALUE
+ || !canon_value_cmp (cnode->loc, cval))
+ return 1;
+
+ /* CVAL was found to be non-canonical. Change the variable to point
+ to the canonical VALUE. */
+ gcc_assert (!cnode->next);
+ cval = cnode->loc;
+
+ slot = set_slot_part (set, cval, slot, dv, 0,
+ node->init, node->set_src);
+ slot = clobber_slot_part (set, cval, slot, 0, node->set_src);
+
+ return 1;
+}
+
/* Combine variable or value in *S1SLOT (in DSM->cur) with the
corresponding entry in DSM->src. Multi-part variables are combined
with variable_union, whereas onepart dvs are combined with
htab_traverse (shared_hash_htab ((*permp)->vars),
variable_post_merge_perm_vals, &dfpm);
htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set);
+ htab_traverse (shared_hash_htab (set->vars), canonicalize_vars_star, set);
}
/* Return a node whose loc is a MEM that refers to EXPR in the
int r;
for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
- if (TEST_HARD_REG_BIT (call_used_reg_set, r))
+ if (TEST_HARD_REG_BIT (regs_invalidated_by_call, r))
var_regno_delete (set, r);
if (MAY_HAVE_DEBUG_INSNS)
don't need to track this expression if the ultimate declaration is
ignored. */
realdecl = expr;
- if (DECL_DEBUG_EXPR_IS_FROM (realdecl) && DECL_DEBUG_EXPR (realdecl))
+ if (DECL_DEBUG_EXPR_IS_FROM (realdecl))
{
realdecl = DECL_DEBUG_EXPR (realdecl);
+ if (realdecl == NULL_TREE)
+ realdecl = expr;
/* ??? We don't yet know how to emit DW_OP_piece for variable
that has been SRA'ed. */
- if (!DECL_P (realdecl))
+ else if (!DECL_P (realdecl))
return 0;
}
return gen_rtx_REG_offset (loc, mode, regno, offset);
}
+/* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
+ hard_frame_pointer_rtx is being mapped to it. */
+static rtx cfa_base_rtx;
+
/* Carry information about uses and stores while walking rtx. */
struct count_use_info
return NULL;
}
+/* Helper function to get mode of MEM's address. */
+
+static inline enum machine_mode
+get_address_mode (rtx mem)
+{
+ enum machine_mode mode = GET_MODE (XEXP (mem, 0));
+ if (mode != VOIDmode)
+ return mode;
+ return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem));
+}
+
/* Replace all registers and addresses in an expression with VALUE
expressions that map back to them, unless the expression is a
register. If no mapping is or can be performed, returns NULL. */
return NULL;
else if (MEM_P (loc))
{
- enum machine_mode address_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (loc));
- cselib_val *addr = cselib_lookup (XEXP (loc, 0), address_mode, 0);
+ cselib_val *addr = cselib_lookup (XEXP (loc, 0),
+ get_address_mode (loc), 0);
if (addr)
return replace_equiv_address_nv (loc, addr->val_rtx);
else
if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
{
rtx ploc = PAT_VAR_LOCATION_LOC (loc);
- cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1);
+ if (! VAR_LOC_UNKNOWN_P (ploc))
+ {
+ cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1);
- /* ??? flag_float_store and volatile mems are never
- given values, but we could in theory use them for
- locations. */
- gcc_assert (val || 1);
+ /* ??? flag_float_store and volatile mems are never
+ given values, but we could in theory use them for
+ locations. */
+ gcc_assert (val || 1);
+ }
return MO_VAL_LOC;
}
else
{
if (REG_P (loc)
|| (find_use_val (loc, GET_MODE (loc), cui)
- && cselib_lookup (XEXP (loc, 0), GET_MODE (loc), 0)))
+ && cselib_lookup (XEXP (loc, 0),
+ get_address_mode (loc), 0)))
return MO_VAL_SET;
}
else
{
gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
+ if (loc == cfa_base_rtx)
+ return MO_CLOBBER;
expr = REG_EXPR (loc);
if (!expr)
fputc ('\n', out);
}
-/* Adjust sets if needed. Currently this optimizes read-only MEM loads
- if REG_EQUAL/REG_EQUIV note is present. */
-
-static void
-adjust_sets (rtx insn, struct cselib_set *sets, int n_sets)
-{
- if (n_sets == 1 && MEM_P (sets[0].src) && MEM_READONLY_P (sets[0].src))
- {
- /* For read-only MEMs containing some constant, prefer those
- constants. */
- rtx note = find_reg_equal_equiv_note (insn), src;
-
- if (note && CONSTANT_P (XEXP (note, 0)))
- {
- sets[0].src = src = XEXP (note, 0);
- if (GET_CODE (PATTERN (insn)) == COND_EXEC)
- src = gen_rtx_IF_THEN_ELSE (GET_MODE (sets[0].dest),
- COND_EXEC_TEST (PATTERN (insn)),
- src, sets[0].dest);
- sets[0].src_elt = cselib_lookup (src, GET_MODE (sets[0].dest), 1);
- }
- }
-}
-
/* Tell whether the CONCAT used to holds a VALUE and its location
needs value resolution, i.e., an attempt of mapping the location
back to other incoming values. */
VEC_safe_push (rtx, heap, preserved_values, val->val_rtx);
}
+/* Helper function for MO_VAL_LOC handling. Return non-zero if
+ any rtxes not suitable for CONST use not replaced by VALUEs
+ are discovered. */
+
+static int
+non_suitable_const (rtx *x, void *data ATTRIBUTE_UNUSED)
+{
+ if (*x == NULL_RTX)
+ return 0;
+
+ switch (GET_CODE (*x))
+ {
+ case REG:
+ case DEBUG_EXPR:
+ case PC:
+ case SCRATCH:
+ case CC0:
+ case ASM_INPUT:
+ case ASM_OPERANDS:
+ return 1;
+ case MEM:
+ return !MEM_READONLY_P (*x);
+ default:
+ return 0;
+ }
+}
+
/* Add uses (register and memory references) LOC which will be tracked
to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
gcc_assert (cui->sets);
if (MEM_P (vloc)
- && !REG_P (XEXP (vloc, 0)) && !MEM_P (XEXP (vloc, 0)))
+ && !REG_P (XEXP (vloc, 0))
+ && !MEM_P (XEXP (vloc, 0))
+ && (GET_CODE (XEXP (vloc, 0)) != PLUS
+ || XEXP (XEXP (vloc, 0), 0) != cfa_base_rtx
+ || !CONST_INT_P (XEXP (XEXP (vloc, 0), 1))))
{
rtx mloc = vloc;
- enum machine_mode address_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
+ enum machine_mode address_mode = get_address_mode (mloc);
cselib_val *val
= cselib_lookup (XEXP (mloc, 0), address_mode, 0);
}
}
- if (!VAR_LOC_UNKNOWN_P (vloc)
- && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
+ if (CONSTANT_P (vloc)
+ && (GET_CODE (vloc) != CONST
+ || for_each_rtx (&vloc, non_suitable_const, NULL)))
+ /* For constants don't look up any value. */;
+ else if (!VAR_LOC_UNKNOWN_P (vloc)
+ && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
{
enum machine_mode mode2;
enum micro_operation_type type2;
gcc_assert (cui->sets);
if (MEM_P (oloc)
- && !REG_P (XEXP (oloc, 0)) && !MEM_P (XEXP (oloc, 0)))
+ && !REG_P (XEXP (oloc, 0))
+ && !MEM_P (XEXP (oloc, 0))
+ && (GET_CODE (XEXP (oloc, 0)) != PLUS
+ || XEXP (XEXP (oloc, 0), 0) != cfa_base_rtx
+ || !CONST_INT_P (XEXP (XEXP (oloc, 0), 1))))
{
rtx mloc = oloc;
- enum machine_mode address_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
+ enum machine_mode address_mode = get_address_mode (mloc);
cselib_val *val
= cselib_lookup (XEXP (mloc, 0), address_mode, 0);
return gen_rtx_CONCAT (GET_MODE (v->val_rtx), v->val_rtx, ret);
}
-/* Return SRC, or, if it is a read-only MEM for which adjust_sets
- replated it with a constant from REG_EQUIV/REG_EQUAL note,
- that constant. */
-
-static inline rtx
-get_adjusted_src (struct count_use_info *cui, rtx src)
-{
- if (cui->n_sets == 1
- && MEM_P (src)
- && MEM_READONLY_P (src)
- && CONSTANT_P (cui->sets[0].src))
- return cui->sets[0].src;
- return src;
-}
-
/* Add stores (register and memory references) LOC which will be tracked
to VTI (bb)->mos. EXPR is the RTL expression containing the store.
CUIP->insn is instruction which the LOC is part of. */
if (REG_P (loc))
{
+ gcc_assert (loc != cfa_base_rtx);
if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
|| !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
|| GET_CODE (expr) == CLOBBER)
else
{
if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
- {
- src = get_adjusted_src (cui, SET_SRC (expr));
- src = var_lowpart (mode2, src);
- }
+ src = var_lowpart (mode2, SET_SRC (expr));
loc = var_lowpart (mode2, loc);
if (src == NULL)
}
else
{
- rtx xexpr = CONST_CAST_RTX (expr);
-
- if (SET_SRC (expr) != src)
- xexpr = gen_rtx_SET (VOIDmode, loc, src);
+ rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
mo.type = MO_COPY;
else
|| cui->sets))
{
if (MEM_P (loc) && type == MO_VAL_SET
- && !REG_P (XEXP (loc, 0)) && !MEM_P (XEXP (loc, 0)))
+ && !REG_P (XEXP (loc, 0))
+ && !MEM_P (XEXP (loc, 0))
+ && (GET_CODE (XEXP (loc, 0)) != PLUS
+ || XEXP (XEXP (loc, 0), 0) != cfa_base_rtx
+ || !CONST_INT_P (XEXP (XEXP (loc, 0), 1))))
{
rtx mloc = loc;
- enum machine_mode address_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
- cselib_val *val = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
+ enum machine_mode address_mode = get_address_mode (mloc);
+ cselib_val *val = cselib_lookup (XEXP (mloc, 0),
+ address_mode, 0);
if (val && !cselib_preserved_value_p (val))
{
else
{
if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
- {
- src = get_adjusted_src (cui, SET_SRC (expr));
- src = var_lowpart (mode2, src);
- }
+ src = var_lowpart (mode2, SET_SRC (expr));
loc = var_lowpart (mode2, loc);
if (src == NULL)
}
else
{
- rtx xexpr = CONST_CAST_RTX (expr);
-
- if (SET_SRC (expr) != src)
- xexpr = gen_rtx_SET (VOIDmode, loc, src);
+ rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
if (same_variable_part_p (SET_SRC (xexpr),
MEM_EXPR (loc),
INT_MEM_OFFSET (loc)))
}
else if (resolve && GET_CODE (mo.u.loc) == SET)
{
- src = get_adjusted_src (cui, SET_SRC (expr));
- nloc = replace_expr_with_values (src);
+ nloc = replace_expr_with_values (SET_SRC (expr));
/* Avoid the mode mismatch between oexpr and expr. */
if (!nloc && mode != mode2)
{
- nloc = src;
+ nloc = SET_SRC (expr);
gcc_assert (oloc == SET_DEST (expr));
}
cselib_hook_called = true;
- adjust_sets (insn, sets, n_sets);
-
cui.insn = insn;
cui.bb = bb;
cui.sets = sets;
VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
INSERT);
}
+ else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
+ set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
+ dv_from_decl (var), 0,
+ VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
+ INSERT);
}
break;
result = pc_rtx;
break;
}
- else
- {
- result = cselib_expand_value_rtx_cb (loc->loc, regs, max_depth,
- vt_expand_loc_callback,
- data);
- if (result)
- break;
- }
+ }
+ else
+ {
+ result = cselib_expand_value_rtx_cb (loc->loc, regs, max_depth,
+ vt_expand_loc_callback, data);
+ if (result)
+ break;
}
if (dummy && (result || var->var_part[0].cur_loc))
var->cur_loc_changed = true;
complete = true;
last_limit = 0;
n_var_parts = 0;
- if (!MAY_HAVE_DEBUG_STMTS)
+ if (!MAY_HAVE_DEBUG_INSNS)
{
for (i = 0; i < var->n_var_parts; i++)
if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
}
loc[n_var_parts] = loc2;
mode = GET_MODE (var->var_part[i].cur_loc);
+ if (mode == VOIDmode && dv_onepart_p (var->dv))
+ mode = DECL_MODE (decl);
for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
if (var->var_part[i].cur_loc == lc->loc)
{
(int) initialized);
else if (n_var_parts == 1)
{
- rtx expr_list
- = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
+ rtx expr_list;
+
+ if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
+ expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
+ else
+ expr_list = loc[0];
note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list,
(int) initialized);
VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
INSERT);
}
+ else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
+ set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
+ dv_from_decl (var), 0,
+ VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
+ INSERT);
emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
}
}
+/* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
+
+static bool
+fp_setter (rtx insn)
+{
+ rtx pat = PATTERN (insn);
+ if (RTX_FRAME_RELATED_P (insn))
+ {
+ rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
+ if (expr)
+ pat = XEXP (expr, 0);
+ }
+ if (GET_CODE (pat) == SET)
+ return SET_DEST (pat) == hard_frame_pointer_rtx;
+ else if (GET_CODE (pat) == PARALLEL)
+ {
+ int i;
+ for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
+ if (GET_CODE (XVECEXP (pat, 0, i)) == SET
+ && SET_DEST (XVECEXP (pat, 0, i)) == hard_frame_pointer_rtx)
+ return true;
+ }
+ return false;
+}
+
+/* Initialize cfa_base_rtx, create a preserved VALUE for it and
+ ensure it isn't flushed during cselib_reset_table.
+ Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
+ has been eliminated. */
+
+static void
+vt_init_cfa_base (void)
+{
+ cselib_val *val;
+
+#ifdef FRAME_POINTER_CFA_OFFSET
+ cfa_base_rtx = frame_pointer_rtx;
+#else
+ cfa_base_rtx = arg_pointer_rtx;
+#endif
+ if (cfa_base_rtx == hard_frame_pointer_rtx
+ || !fixed_regs[REGNO (cfa_base_rtx)])
+ {
+ cfa_base_rtx = NULL_RTX;
+ return;
+ }
+ if (!MAY_HAVE_DEBUG_INSNS)
+ return;
+
+ val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
+ get_insns ());
+ preserve_value (val);
+ cselib_preserve_cfa_base_value (val);
+ var_reg_decl_set (&VTI (ENTRY_BLOCK_PTR)->out, cfa_base_rtx,
+ VAR_INIT_STATUS_INITIALIZED, dv_from_value (val->val_rtx),
+ 0, NULL_RTX, INSERT);
+}
+
/* Allocate and initialize the data structures for variable tracking
and parse the RTL to get the micro operations. */
-static void
+static bool
vt_initialize (void)
{
- basic_block bb;
+ basic_block bb, prologue_bb = NULL;
+ HOST_WIDE_INT fp_cfa_offset = -1;
alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
+ attrs_pool = create_alloc_pool ("attrs_def pool",
+ sizeof (struct attrs_def), 1024);
+ var_pool = create_alloc_pool ("variable_def pool",
+ sizeof (struct variable_def)
+ + (MAX_VAR_PARTS - 1)
+ * sizeof (((variable)NULL)->var_part[0]), 64);
+ loc_chain_pool = create_alloc_pool ("location_chain_def pool",
+ sizeof (struct location_chain_def),
+ 1024);
+ shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
+ sizeof (struct shared_hash_def), 256);
+ empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
+ empty_shared_hash->refcount = 1;
+ empty_shared_hash->htab
+ = htab_create (1, variable_htab_hash, variable_htab_eq,
+ variable_htab_free);
+ changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
+ variable_htab_free);
+ if (MAY_HAVE_DEBUG_INSNS)
+ {
+ value_chain_pool = create_alloc_pool ("value_chain_def pool",
+ sizeof (struct value_chain_def),
+ 1024);
+ value_chains = htab_create (32, value_chain_htab_hash,
+ value_chain_htab_eq, NULL);
+ }
+
+ /* Init the IN and OUT sets. */
+ FOR_ALL_BB (bb)
+ {
+ VTI (bb)->visited = false;
+ VTI (bb)->flooded = false;
+ dataflow_set_init (&VTI (bb)->in);
+ dataflow_set_init (&VTI (bb)->out);
+ VTI (bb)->permp = NULL;
+ }
+
if (MAY_HAVE_DEBUG_INSNS)
{
- cselib_init (true);
+ cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
scratch_regs = BITMAP_ALLOC (NULL);
valvar_pool = create_alloc_pool ("small variable_def pool",
sizeof (struct variable_def), 256);
valvar_pool = NULL;
}
+ if (!frame_pointer_needed)
+ {
+ rtx reg, elim;
+
+ if (!vt_stack_adjustments ())
+ return false;
+
+#ifdef FRAME_POINTER_CFA_OFFSET
+ reg = frame_pointer_rtx;
+#else
+ reg = arg_pointer_rtx;
+#endif
+ elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
+ if (elim != reg)
+ {
+ if (GET_CODE (elim) == PLUS)
+ elim = XEXP (elim, 0);
+ if (elim == stack_pointer_rtx)
+ vt_init_cfa_base ();
+ }
+ }
+ else if (!crtl->stack_realign_tried)
+ {
+ rtx reg, elim;
+
+#ifdef FRAME_POINTER_CFA_OFFSET
+ reg = frame_pointer_rtx;
+ fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
+#else
+ reg = arg_pointer_rtx;
+ fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
+#endif
+ elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
+ if (elim != reg)
+ {
+ if (GET_CODE (elim) == PLUS)
+ {
+ fp_cfa_offset -= INTVAL (XEXP (elim, 1));
+ elim = XEXP (elim, 0);
+ }
+ if (elim != hard_frame_pointer_rtx)
+ fp_cfa_offset = -1;
+ else
+ prologue_bb = single_succ (ENTRY_BLOCK_PTR);
+ }
+ }
+
+ hard_frame_pointer_adjustment = -1;
+
FOR_EACH_BB (bb)
{
rtx insn;
/* Add the micro-operations to the vector. */
FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
{
+ HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
+ VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
insn = NEXT_INSN (insn))
{
mo.type = MO_ADJUST;
mo.u.adjust = pre;
mo.insn = insn;
- VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
- &mo);
-
if (dump_file && (dump_flags & TDF_DETAILS))
log_op_type (PATTERN (insn), bb, insn,
MO_ADJUST, dump_file);
+ VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
+ &mo);
+ VTI (bb)->out.stack_adjust += pre;
}
}
cselib_hook_called = false;
+ adjust_insn (bb, insn);
if (MAY_HAVE_DEBUG_INSNS)
{
cselib_process_insn (insn);
}
if (!cselib_hook_called)
add_with_sets (insn, 0, 0);
+ cancel_changes (0);
if (!frame_pointer_needed && post)
{
mo.type = MO_ADJUST;
mo.u.adjust = post;
mo.insn = insn;
- VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
- &mo);
-
if (dump_file && (dump_flags & TDF_DETAILS))
log_op_type (PATTERN (insn), bb, insn,
MO_ADJUST, dump_file);
+ VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
+ &mo);
+ VTI (bb)->out.stack_adjust += post;
+ }
+
+ if (bb == prologue_bb
+ && hard_frame_pointer_adjustment == -1
+ && RTX_FRAME_RELATED_P (insn)
+ && fp_setter (insn))
+ {
+ vt_init_cfa_base ();
+ hard_frame_pointer_adjustment = fp_cfa_offset;
}
}
}
+ gcc_assert (offset == VTI (bb)->out.stack_adjust);
}
bb = last_bb;
}
}
- attrs_pool = create_alloc_pool ("attrs_def pool",
- sizeof (struct attrs_def), 1024);
- var_pool = create_alloc_pool ("variable_def pool",
- sizeof (struct variable_def)
- + (MAX_VAR_PARTS - 1)
- * sizeof (((variable)NULL)->var_part[0]), 64);
- loc_chain_pool = create_alloc_pool ("location_chain_def pool",
- sizeof (struct location_chain_def),
- 1024);
- shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
- sizeof (struct shared_hash_def), 256);
- empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
- empty_shared_hash->refcount = 1;
- empty_shared_hash->htab
- = htab_create (1, variable_htab_hash, variable_htab_eq,
- variable_htab_free);
- changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
- variable_htab_free);
- if (MAY_HAVE_DEBUG_INSNS)
- {
- value_chain_pool = create_alloc_pool ("value_chain_def pool",
- sizeof (struct value_chain_def),
- 1024);
- value_chains = htab_create (32, value_chain_htab_hash,
- value_chain_htab_eq, NULL);
- }
-
- /* Init the IN and OUT sets. */
- FOR_ALL_BB (bb)
- {
- VTI (bb)->visited = false;
- VTI (bb)->flooded = false;
- dataflow_set_init (&VTI (bb)->in);
- dataflow_set_init (&VTI (bb)->out);
- VTI (bb)->permp = NULL;
- }
-
+ hard_frame_pointer_adjustment = -1;
VTI (ENTRY_BLOCK_PTR)->flooded = true;
vt_add_function_parameters ();
+ cfa_base_rtx = NULL_RTX;
+ return true;
}
/* Get rid of all debug insns from the insn stream. */
}
mark_dfs_back_edges ();
- vt_initialize ();
- if (!frame_pointer_needed)
+ if (!vt_initialize ())
{
- if (!vt_stack_adjustments ())
- {
- vt_finalize ();
- vt_debug_insns_local (true);
- return 0;
- }
+ vt_finalize ();
+ vt_debug_insns_local (true);
+ return 0;
}
success = vt_find_locations ();
/* This is later restored by our caller. */
flag_var_tracking_assignments = 0;
- vt_initialize ();
-
- if (!frame_pointer_needed && !vt_stack_adjustments ())
- gcc_unreachable ();
+ success = vt_initialize ();
+ gcc_assert (success);
success = vt_find_locations ();
}