+2004-01-12 Jan Hubicka <jh@suse.cz>
+
+ PR opt/12826
+ * loop.c (insert_loop_mem): Preffer VOLATILE memory references to be
+ stored.
+
+ PR opt/12863
+ * cfgcleanup.c (label_is_jump_target_p): Move to...
+ * rtlanal.c (label_is_jump_target_p): ... here.
+ * cfgrtl.c (cfg_layout_redirect_edge_and_branch): Fix redirecting of fallthru
+ edges unified with branch edges.
+
2004-01-12 Richard Earnshaw <rearnsha@arm.com>
* simplify-rtx.c (simplify_immed_subreg): Correctly extract the
static int flow_find_cross_jump (int, basic_block, basic_block, rtx *, rtx *);
static bool insns_match_p (int, rtx, rtx);
-static bool label_is_jump_target_p (rtx, rtx);
static bool tail_recursion_label_p (rtx);
static void merge_blocks_move_predecessor_nojumps (basic_block, basic_block);
static void merge_blocks_move_successor_nojumps (basic_block, basic_block);
return changed;
}
\f
-/* Return true if LABEL is a target of JUMP_INSN. This applies only
- to non-complex jumps. That is, direct unconditional, conditional,
- and tablejumps, but not computed jumps or returns. It also does
- not apply to the fallthru case of a conditional jump. */
-
-static bool
-label_is_jump_target_p (rtx label, rtx jump_insn)
-{
- rtx tmp = JUMP_LABEL (jump_insn);
-
- if (label == tmp)
- return true;
-
- if (tablejump_p (jump_insn, NULL, &tmp))
- {
- rtvec vec = XVEC (PATTERN (tmp),
- GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
- int i, veclen = GET_NUM_ELEM (vec);
-
- for (i = 0; i < veclen; ++i)
- if (XEXP (RTVEC_ELT (vec, i), 0) == label)
- return true;
- }
-
- return false;
-}
-
/* Return true if LABEL is used for tail recursion. */
static bool
{
/* Redirect any branch edges unified with the fallthru one. */
if (GET_CODE (BB_END (src)) == JUMP_INSN
- && JUMP_LABEL (BB_END (src)) == BB_HEAD (e->dest))
+ && label_is_jump_target_p (BB_HEAD (e->dest),
+ BB_END (src)))
{
- if (!redirect_jump (BB_END (src), block_label (dest), 0))
+ if (rtl_dump_file)
+ fprintf (rtl_dump_file, "Fallthru edge unified with branch "
+ "%i->%i redirected to %i\n",
+ e->src->index, e->dest->index, dest->index);
+ e->flags &= ~EDGE_FALLTHRU;
+ if (!redirect_branch_edge (e, dest))
abort ();
+ e->flags |= EDGE_FALLTHRU;
+ return true;
}
/* In case we are redirecting fallthru edge to the branch edge
of conditional jump, remove it. */
for (i = 0; i < loop_info->mems_idx; ++i)
if (rtx_equal_p (m, loop_info->mems[i].mem))
{
+ if (MEM_VOLATILE_P (m) && !MEM_VOLATILE_P (loop_info->mems[i].mem))
+ loop_info->mems[i].mem = m;
if (GET_MODE (m) != GET_MODE (loop_info->mems[i].mem))
/* The modes of the two memory accesses are different. If
this happens, something tricky is going on, and we just
extern int loc_mentioned_in_p (rtx *, rtx);
extern rtx find_first_parameter_load (rtx, rtx);
extern bool keep_with_call_p (rtx);
+extern bool label_is_jump_target_p (rtx, rtx);
/* flow.c */
end_sequence ();
return new_insn;
}
+
+/* Return true if LABEL is a target of JUMP_INSN. This applies only
+ to non-complex jumps. That is, direct unconditional, conditional,
+ and tablejumps, but not computed jumps or returns. It also does
+ not apply to the fallthru case of a conditional jump. */
+
+bool
+label_is_jump_target_p (rtx label, rtx jump_insn)
+{
+ rtx tmp = JUMP_LABEL (jump_insn);
+
+ if (label == tmp)
+ return true;
+
+ if (tablejump_p (jump_insn, NULL, &tmp))
+ {
+ rtvec vec = XVEC (PATTERN (tmp),
+ GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
+ int i, veclen = GET_NUM_ELEM (vec);
+
+ for (i = 0; i < veclen; ++i)
+ if (XEXP (RTVEC_ELT (vec, i), 0) == label)
+ return true;
+ }
+
+ return false;
+}
+