and due to type based aliasing rules decides that for two overlapping
union temporaries { short s; int i; } accesses to the same mem through
different types may not alias and happily reorders stores across
- life-time boundaries of the temporaries (See PR25654).
- We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
+ life-time boundaries of the temporaries (See PR25654). */
static void
add_alias_set_conflicts (void)
/* Helper routine for add_scope_conflicts, calculating the active partitions
at the end of BB, leaving the result in WORK. We're called to generate
- conflicts when OLD_CONFLICTS is non-null, otherwise we're just tracking
- liveness. If we generate conflicts then OLD_CONFLICTS stores the bits
- for which we generated conflicts already. */
+ conflicts when FOR_CONFLICT is true, otherwise we're just tracking
+ liveness. */
static void
-add_scope_conflicts_1 (basic_block bb, bitmap work, bitmap old_conflicts)
+add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
{
edge e;
edge_iterator ei;
}
else if (!is_gimple_debug (stmt))
{
- if (old_conflicts
+ if (for_conflict
&& visit == visit_op)
{
/* If this is the first real instruction in this BB we need
Unlike classical liveness for named objects we can't
rely on seeing a def/use of the names we're interested in.
There might merely be indirect loads/stores. We'd not add any
- conflicts for such partitions. We know that we generated
- conflicts between all partitions in old_conflicts already,
- so we need to generate only the new ones, avoiding to
- repeatedly pay the O(N^2) cost for each basic block. */
+ conflicts for such partitions. */
bitmap_iterator bi;
unsigned i;
-
- EXECUTE_IF_AND_COMPL_IN_BITMAP (work, old_conflicts, 0, i, bi)
+ EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
{
unsigned j;
bitmap_iterator bj;
- /* First the conflicts between new and old_conflicts. */
- EXECUTE_IF_SET_IN_BITMAP (old_conflicts, 0, j, bj)
- add_stack_var_conflict (i, j);
- /* Then the conflicts between only the new members. */
- EXECUTE_IF_AND_COMPL_IN_BITMAP (work, old_conflicts, i + 1,
- j, bj)
+ EXECUTE_IF_SET_IN_BITMAP (work, i + 1, j, bj)
add_stack_var_conflict (i, j);
}
- /* And remember for the next basic block. */
- bitmap_ior_into (old_conflicts, work);
visit = visit_conflict;
}
walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
basic_block bb;
bool changed;
bitmap work = BITMAP_ALLOC (NULL);
- bitmap old_conflicts;
/* We approximate the live range of a stack variable by taking the first
mention of its name as starting point(s), and by the end-of-scope
FOR_EACH_BB (bb)
{
bitmap active = (bitmap)bb->aux;
- add_scope_conflicts_1 (bb, work, NULL);
+ add_scope_conflicts_1 (bb, work, false);
if (bitmap_ior_into (active, work))
changed = true;
}
}
- old_conflicts = BITMAP_ALLOC (NULL);
-
FOR_EACH_BB (bb)
- add_scope_conflicts_1 (bb, work, old_conflicts);
+ add_scope_conflicts_1 (bb, work, true);
- BITMAP_FREE (old_conflicts);
BITMAP_FREE (work);
FOR_ALL_BB (bb)
BITMAP_FREE (bb->aux);
(void *)(size_t) uid)) = part;
*((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
decl)) = name;
+ if (TREE_ADDRESSABLE (decl))
+ TREE_ADDRESSABLE (name) = 1;
}
/* Make the SSA name point to all partition members. */
reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
incoming = replace_equiv_address_nv (incoming, reg);
}
+ else
+ incoming = copy_rtx (incoming);
}
#endif
|| (GET_CODE (XEXP (incoming, 0)) == PLUS
&& XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
&& CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
- return incoming;
+ return copy_rtx (incoming);
return NULL_RTX;
}
return op0;
}
+/* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
+ Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
+ deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
+
+static void
+avoid_complex_debug_insns (rtx insn, rtx *exp_p, int depth)
+{
+ rtx exp = *exp_p;
+ const char *format_ptr;
+ int i, j;
+
+ if (exp == NULL_RTX)
+ return;
+
+ if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
+ return;
+
+ if (depth == 4)
+ {
+ /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
+ rtx dval = make_debug_expr_from_rtl (exp);
+
+ /* Emit a debug bind insn before INSN. */
+ rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
+ DEBUG_EXPR_TREE_DECL (dval), exp,
+ VAR_INIT_STATUS_INITIALIZED);
+
+ emit_debug_insn_before (bind, insn);
+ *exp_p = dval;
+ return;
+ }
+
+ format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
+ for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
+ switch (*format_ptr++)
+ {
+ case 'e':
+ avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
+ break;
+
+ case 'E':
+ case 'V':
+ for (j = 0; j < XVECLEN (exp, i); j++)
+ avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
+ break;
+
+ default:
+ break;
+ }
+}
+
/* Expand the _LOCs in debug insns. We run this after expanding all
regular insns, so that any variables referenced in the function
will have their DECL_RTLs set. */
if (DEBUG_INSN_P (insn))
{
tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
- rtx val;
+ rtx val, prev_insn, insn2;
enum machine_mode mode;
if (value == NULL_TREE)
}
INSN_VAR_LOCATION_LOC (insn) = val;
+ prev_insn = PREV_INSN (insn);
+ for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
+ avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
}
flag_strict_aliasing = save_strict_alias;