/* Helper routine for add_scope_conflicts, calculating the active partitions
at the end of BB, leaving the result in WORK. We're called to generate
- conflicts when FOR_CONFLICT is true, otherwise we're just tracking
- liveness. */
+ conflicts when OLD_CONFLICTS is non-null, otherwise we're just tracking
+ liveness. If we generate conflicts then OLD_CONFLICTS stores the bits
+ for which we generated conflicts already. */
static void
-add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
+add_scope_conflicts_1 (basic_block bb, bitmap work, bitmap old_conflicts)
{
edge e;
edge_iterator ei;
FOR_EACH_EDGE (e, ei, bb->preds)
bitmap_ior_into (work, (bitmap)e->src->aux);
- if (for_conflict)
- {
- /* We need to add conflicts for everything life at the start of
- this block. Unlike classical lifeness for named objects we can't
- rely on seeing a def/use of the names we're interested in.
- There might merely be indirect loads/stores. We'd not add any
- conflicts for such partitions. */
- bitmap_iterator bi;
- unsigned i;
- EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
- {
- unsigned j;
- bitmap_iterator bj;
- EXECUTE_IF_SET_IN_BITMAP (work, i, j, bj)
- add_stack_var_conflict (i, j);
- }
- visit = visit_conflict;
- }
- else
- visit = visit_op;
+ visit = visit_op;
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
- if (!is_gimple_debug (stmt))
- walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
+ walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
}
- for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
bitmap_clear_bit (work, *v);
}
else if (!is_gimple_debug (stmt))
- walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
+ {
+ if (old_conflicts
+ && visit == visit_op)
+ {
+ /* If this is the first real instruction in this BB we need
+ to add conflicts for everything live at this point now.
+ Unlike classical liveness for named objects we can't
+ rely on seeing a def/use of the names we're interested in.
+ There might merely be indirect loads/stores. We'd not add any
+ conflicts for such partitions. We know that we generated
+ conflicts between all partitions in old_conflicts already,
+ so we need to generate only the new ones, avoiding to
+ repeatedly pay the O(N^2) cost for each basic block. */
+ bitmap_iterator bi;
+ unsigned i;
+
+ EXECUTE_IF_AND_COMPL_IN_BITMAP (work, old_conflicts, 0, i, bi)
+ {
+ unsigned j;
+ bitmap_iterator bj;
+ /* First the conflicts between new and old_conflicts. */
+ EXECUTE_IF_SET_IN_BITMAP (old_conflicts, 0, j, bj)
+ add_stack_var_conflict (i, j);
+ /* Then the conflicts between only the new members. */
+ EXECUTE_IF_AND_COMPL_IN_BITMAP (work, old_conflicts, i + 1,
+ j, bj)
+ add_stack_var_conflict (i, j);
+ }
+ /* And remember for the next basic block. */
+ bitmap_ior_into (old_conflicts, work);
+ visit = visit_conflict;
+ }
+ walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
+ }
}
}
basic_block bb;
bool changed;
bitmap work = BITMAP_ALLOC (NULL);
+ bitmap old_conflicts;
- /* We approximate the life range of a stack variable by taking the first
+ /* We approximate the live range of a stack variable by taking the first
mention of its name as starting point(s), and by the end-of-scope
death clobber added by gimplify as ending point(s) of the range.
This overapproximates in the case we for instance moved an address-taken
But it's conservatively correct as a variable never can hold values
before its name is mentioned at least once.
- We then do a mostly classical bitmap lifeness algorithm. */
+ We then do a mostly classical bitmap liveness algorithm. */
FOR_ALL_BB (bb)
bb->aux = BITMAP_ALLOC (NULL);
FOR_EACH_BB (bb)
{
bitmap active = (bitmap)bb->aux;
- add_scope_conflicts_1 (bb, work, false);
+ add_scope_conflicts_1 (bb, work, NULL);
if (bitmap_ior_into (active, work))
changed = true;
}
}
+ old_conflicts = BITMAP_ALLOC (NULL);
+
FOR_EACH_BB (bb)
- add_scope_conflicts_1 (bb, work, true);
+ add_scope_conflicts_1 (bb, work, old_conflicts);
+ BITMAP_FREE (old_conflicts);
BITMAP_FREE (work);
FOR_ALL_BB (bb)
BITMAP_FREE (bb->aux);
CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
else
CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
- CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
SET_EXPR_LOCATION (exp, gimple_location (stmt));
TREE_BLOCK (exp) = gimple_block (stmt);
gcc_assert (xmode == mode || xmode == VOIDmode);
#else
rtx temp;
- enum machine_mode address_mode = targetm.addr_space.address_mode (as);
- enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
- gcc_assert (mode == address_mode || mode == pointer_mode);
+ gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
return x;
if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
|| TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
|| TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
- && !TREE_ADDRESSABLE (TREE_OPERAND (exp, 0)))
+ && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
+ || target_for_debug_bind (TREE_OPERAND (exp, 0))))
return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
if (handled_component_p (TREE_OPERAND (exp, 0)))
if ((TREE_CODE (decl) == VAR_DECL
|| TREE_CODE (decl) == PARM_DECL
|| TREE_CODE (decl) == RESULT_DECL)
- && !TREE_ADDRESSABLE (decl)
+ && (!TREE_ADDRESSABLE (decl)
+ || target_for_debug_bind (decl))
&& (bitoffset % BITS_PER_UNIT) == 0
&& bitsize > 0
&& bitsize == maxsize)
case REDUC_MIN_EXPR:
case REDUC_PLUS_EXPR:
case VEC_COND_EXPR:
- case VEC_EXTRACT_EVEN_EXPR:
- case VEC_EXTRACT_ODD_EXPR:
- case VEC_INTERLEAVE_HIGH_EXPR:
- case VEC_INTERLEAVE_LOW_EXPR:
case VEC_LSHIFT_EXPR:
case VEC_PACK_FIX_TRUNC_EXPR:
case VEC_PACK_SAT_EXPR: