2010-03-12 Jakub Jelinek <jakub@redhat.com>
+ * cselib.c (LONG_TERM_PRESERVED_VALUE_P): Remove.
+ (cselib_preserve_definitely, cselib_clear_preserve): Remove.
+ (cselib_preserve_only_values): Remove retain argument, don't
+ traverse hash table with cselib_{preserve_definitely,clear_preserve}.
+ * cselib.h (cselib_preserve_only_values): Remove retain argument.
+ * var-tracking.c (micro_operation): Move insn field before union.
+ Add DEF_VEC_O and DEF_VEC_ALLOC_O for this type.
+ (struct variable_tracking_info_def): Remove n_mos field, change
+ mos into a vector of micro_operations.
+ (count_uses, count_uses_1, count_stores, count_with_sets): Remove.
+ (bb_stack_adjust_offset, log_op_type, add_uses, add_stores,
+ compute_bb_dataflow, emit_notes_in_bb): Adjust for VTI (bb)->mos
+ changing into a vector.
+ (add_with_sets): Likewise. Ensure MO_VAL_USE uops from add_stores
+ come before all other uops generated by add_stores.
+ (vt_add_function_parameters): Adjust for cselib_preserve_only_values
+ argument removal.
+ (vt_initialize): Likewise. Adjust for VTI (bb)->mos changing into
+ a vector. Run just one pass over the bbs instead of separate counting
+ and computation phase.
+ (vt_finalize): Free VTI (bb)->mos vector instead of array.
+
PR debug/43329
* tree-inline.c (remap_decls): Put old_var rather than origin_var
into *nonlocalized_list vector.
}
}
+/* Compute stack adjustment in basic block BB. */
+
+static void
+bb_stack_adjust_offset (basic_block bb)
+{
+ HOST_WIDE_INT offset;
+ unsigned int i;
+ micro_operation *mo;
+
+ offset = VTI (bb)->in.stack_adjust;
+ for (i = 0; VEC_iterate (micro_operation, VTI (bb)->mos, i, mo); i++)
+ {
+ if (mo->type == MO_ADJUST)
+ offset += mo->u.adjust;
+ else if (mo->type != MO_CALL)
+ {
+ if (MEM_P (mo->u.loc))
+ mo->u.loc = adjust_stack_reference (mo->u.loc, -offset);
+ }
+ }
+ VTI (bb)->out.stack_adjust = offset;
+}
+
/* Compute stack adjustments for all blocks by traversing DFS tree.
Return true when the adjustments on all incoming edges are consistent.
Heavily borrowed from pre_and_rev_post_order_compute. */
fputc ('\n', out);
}
+/* Adjust sets if needed. Currently this optimizes read-only MEM loads
+ if REG_EQUAL/REG_EQUIV note is present. */
+
+static void
+adjust_sets (rtx insn, struct cselib_set *sets, int n_sets)
+{
+ if (n_sets == 1 && MEM_P (sets[0].src) && MEM_READONLY_P (sets[0].src))
+ {
+ /* For read-only MEMs containing some constant, prefer those
+ constants. */
+ rtx note = find_reg_equal_equiv_note (insn), src;
+
+ if (note && CONSTANT_P (XEXP (note, 0)))
+ {
+ sets[0].src = src = XEXP (note, 0);
+ if (GET_CODE (PATTERN (insn)) == COND_EXEC)
+ src = gen_rtx_IF_THEN_ELSE (GET_MODE (sets[0].dest),
+ COND_EXEC_TEST (PATTERN (insn)),
+ src, sets[0].dest);
+ sets[0].src_elt = cselib_lookup (src, GET_MODE (sets[0].dest), 1);
+ }
+ }
+}
+
/* Tell whether the CONCAT used to holds a VALUE and its location
needs value resolution, i.e., an attempt of mapping the location
back to other incoming values. */
if (REG_P (loc))
{
- gcc_assert (loc != cfa_base_rtx);
if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
|| !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
|| GET_CODE (expr) == CLOBBER)
/* Add the micro-operations to the vector. */
FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
{
- HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
- VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
insn = NEXT_INSN (insn))
{
mo.type = MO_ADJUST;
mo.u.adjust = pre;
mo.insn = insn;
+ VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
+ &mo);
+
if (dump_file && (dump_flags & TDF_DETAILS))
log_op_type (PATTERN (insn), bb, insn,
MO_ADJUST, dump_file);
mo.type = MO_ADJUST;
mo.u.adjust = post;
mo.insn = insn;
+ VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
+ &mo);
+
if (dump_file && (dump_flags & TDF_DETAILS))
log_op_type (PATTERN (insn), bb, insn,
MO_ADJUST, dump_file);
}
}
}
- gcc_assert (offset == VTI (bb)->out.stack_adjust);
}
bb = last_bb;