/* Optimize by combining instructions for GNU compiler.
- Copyright (C) 1987, 88, 92-98, 1999 Free Software Foundation, Inc.
+ Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
+ 1999, 2000 Free Software Foundation, Inc.
This file is part of GNU CC.
#include "config.h"
#include "system.h"
-#include "rtl.h" /* stdio.h must precede rtl.h for FFS. */
+#include "rtl.h"
+#include "tm_p.h"
#include "flags.h"
#include "regs.h"
#include "hard-reg-set.h"
#include "basic-block.h"
#include "insn-config.h"
+#include "function.h"
/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
#include "expr.h"
#include "insn-flags.h"
#include "recog.h"
#include "real.h"
#include "toplev.h"
+#include "defaults.h"
+
+#ifndef ACCUMULATE_OUTGOING_ARGS
+#define ACCUMULATE_OUTGOING_ARGS 0
+#endif
+
+/* Supply a default definition for PUSH_ARGS. */
+#ifndef PUSH_ARGS
+#ifdef PUSH_ROUNDING
+#define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
+#else
+#define PUSH_ARGS 0
+#endif
+#endif
/* It is not safe to use ordinary gen_lowpart in combine.
Use gen_lowpart_for_combine instead. See comments there. */
/* Maximum register number, which is the size of the tables below. */
-static int combine_max_regno;
+static unsigned int combine_max_regno;
/* Record last point of death of (hard or pseudo) register n. */
/* Basic block number of the block in which we are performing combines. */
static int this_basic_block;
+
+/* A bitmap indicating which blocks had registers go dead at entry.
+ After combine, we'll need to re-do global life analysis with
+ those blocks as starting points. */
+static sbitmap refresh_blocks;
+static int need_refresh;
\f
/* The next group of arrays allows the recording of the last value assigned
to (hard or pseudo) register n. We use this information to see if a
/* Nonzero if we know that a register has some leading bits that are always
equal to the sign bit. */
-static char *reg_sign_bit_copies;
+static unsigned char *reg_sign_bit_copies;
/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
It is zero while computing them and after combine has completed. This
static struct undobuf undobuf;
+/* Number of times the pseudo being substituted for
+ was found and replaced. */
+
+static int n_occurrences;
+
+static void do_SUBST PARAMS ((rtx *, rtx));
+static void do_SUBST_INT PARAMS ((int *, int));
+static void init_reg_last_arrays PARAMS ((void));
+static void setup_incoming_promotions PARAMS ((void));
+static void set_nonzero_bits_and_sign_copies PARAMS ((rtx, rtx, void *));
+static int can_combine_p PARAMS ((rtx, rtx, rtx, rtx, rtx *, rtx *));
+static int sets_function_arg_p PARAMS ((rtx));
+static int combinable_i3pat PARAMS ((rtx, rtx *, rtx, rtx, int, rtx *));
+static int contains_muldiv PARAMS ((rtx));
+static rtx try_combine PARAMS ((rtx, rtx, rtx, int *));
+static void undo_all PARAMS ((void));
+static void undo_commit PARAMS ((void));
+static rtx *find_split_point PARAMS ((rtx *, rtx));
+static rtx subst PARAMS ((rtx, rtx, rtx, int, int));
+static rtx combine_simplify_rtx PARAMS ((rtx, enum machine_mode, int, int));
+static rtx simplify_if_then_else PARAMS ((rtx));
+static rtx simplify_set PARAMS ((rtx));
+static rtx simplify_logical PARAMS ((rtx, int));
+static rtx expand_compound_operation PARAMS ((rtx));
+static rtx expand_field_assignment PARAMS ((rtx));
+static rtx make_extraction PARAMS ((enum machine_mode, rtx, HOST_WIDE_INT,
+ rtx, unsigned HOST_WIDE_INT, int,
+ int, int));
+static rtx extract_left_shift PARAMS ((rtx, int));
+static rtx make_compound_operation PARAMS ((rtx, enum rtx_code));
+static int get_pos_from_mask PARAMS ((unsigned HOST_WIDE_INT,
+ unsigned HOST_WIDE_INT *));
+static rtx force_to_mode PARAMS ((rtx, enum machine_mode,
+ unsigned HOST_WIDE_INT, rtx, int));
+static rtx if_then_else_cond PARAMS ((rtx, rtx *, rtx *));
+static rtx known_cond PARAMS ((rtx, enum rtx_code, rtx, rtx));
+static int rtx_equal_for_field_assignment_p PARAMS ((rtx, rtx));
+static rtx make_field_assignment PARAMS ((rtx));
+static rtx apply_distributive_law PARAMS ((rtx));
+static rtx simplify_and_const_int PARAMS ((rtx, enum machine_mode, rtx,
+ unsigned HOST_WIDE_INT));
+static unsigned HOST_WIDE_INT nonzero_bits PARAMS ((rtx, enum machine_mode));
+static unsigned int num_sign_bit_copies PARAMS ((rtx, enum machine_mode));
+static int merge_outer_ops PARAMS ((enum rtx_code *, HOST_WIDE_INT *,
+ enum rtx_code, HOST_WIDE_INT,
+ enum machine_mode, int *));
+static rtx simplify_shift_const PARAMS ((rtx, enum rtx_code, enum machine_mode,
+ rtx, int));
+static int recog_for_combine PARAMS ((rtx *, rtx, rtx *));
+static rtx gen_lowpart_for_combine PARAMS ((enum machine_mode, rtx));
+static rtx gen_rtx_combine PARAMS ((enum rtx_code code, enum machine_mode mode,
+ ...));
+static rtx gen_binary PARAMS ((enum rtx_code, enum machine_mode,
+ rtx, rtx));
+static rtx gen_unary PARAMS ((enum rtx_code, enum machine_mode,
+ enum machine_mode, rtx));
+static enum rtx_code simplify_comparison PARAMS ((enum rtx_code, rtx *, rtx *));
+static int reversible_comparison_p PARAMS ((rtx));
+static void update_table_tick PARAMS ((rtx));
+static void record_value_for_reg PARAMS ((rtx, rtx, rtx));
+static void check_promoted_subreg PARAMS ((rtx, rtx));
+static void record_dead_and_set_regs_1 PARAMS ((rtx, rtx, void *));
+static void record_dead_and_set_regs PARAMS ((rtx));
+static int get_last_value_validate PARAMS ((rtx *, rtx, int, int));
+static rtx get_last_value PARAMS ((rtx));
+static int use_crosses_set_p PARAMS ((rtx, int));
+static void reg_dead_at_p_1 PARAMS ((rtx, rtx, void *));
+static int reg_dead_at_p PARAMS ((rtx, rtx));
+static void move_deaths PARAMS ((rtx, rtx, int, rtx, rtx *));
+static int reg_bitfield_target_p PARAMS ((rtx, rtx));
+static void distribute_notes PARAMS ((rtx, rtx, rtx, rtx, rtx, rtx));
+static void distribute_links PARAMS ((rtx));
+static void mark_used_regs_combine PARAMS ((rtx));
+static int insn_cuid PARAMS ((rtx));
+static void record_promoted_value PARAMS ((rtx, rtx));
+\f
/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
insn. The substitution can be undone by undo_all. If INTO is already
set to NEWVAL, do not record this change. Because computing NEWVAL might
also call SUBST, we have to compute it before we put anything into
the undo table. */
-#define SUBST(INTO, NEWVAL) \
- do { rtx _new = (NEWVAL); \
- struct undo *_buf; \
- \
- if (undobuf.frees) \
- _buf = undobuf.frees, undobuf.frees = _buf->next; \
- else \
- _buf = (struct undo *) xmalloc (sizeof (struct undo)); \
- \
- _buf->is_int = 0; \
- _buf->where.r = &INTO; \
- _buf->old_contents.r = INTO; \
- INTO = _new; \
- if (_buf->old_contents.r == INTO) \
- _buf->next = undobuf.frees, undobuf.frees = _buf; \
- else \
- _buf->next = undobuf.undos, undobuf.undos = _buf; \
- } while (0)
+static void
+do_SUBST(into, newval)
+ rtx *into, newval;
+{
+ struct undo *buf;
+ rtx oldval = *into;
+
+ if (oldval == newval)
+ return;
+
+ if (undobuf.frees)
+ buf = undobuf.frees, undobuf.frees = buf->next;
+ else
+ buf = (struct undo *) xmalloc (sizeof (struct undo));
+
+ buf->is_int = 0;
+ buf->where.r = into;
+ buf->old_contents.r = oldval;
+ *into = newval;
+
+ buf->next = undobuf.undos, undobuf.undos = buf;
+}
+
+#define SUBST(INTO, NEWVAL) do_SUBST(&(INTO), (NEWVAL))
/* Similar to SUBST, but NEWVAL is an int expression. Note that substitution
for the value of a HOST_WIDE_INT value (including CONST_INT) is
not safe. */
-#define SUBST_INT(INTO, NEWVAL) \
- do { struct undo *_buf; \
- \
- if (undobuf.frees) \
- _buf = undobuf.frees, undobuf.frees = _buf->next; \
- else \
- _buf = (struct undo *) xmalloc (sizeof (struct undo)); \
- \
- _buf->is_int = 1; \
- _buf->where.i = (int *) &INTO; \
- _buf->old_contents.i = INTO; \
- INTO = NEWVAL; \
- if (_buf->old_contents.i == INTO) \
- _buf->next = undobuf.frees, undobuf.frees = _buf; \
- else \
- _buf->next = undobuf.undos, undobuf.undos = _buf; \
- } while (0)
+static void
+do_SUBST_INT(into, newval)
+ int *into, newval;
+{
+ struct undo *buf;
+ int oldval = *into;
-/* Number of times the pseudo being substituted for
- was found and replaced. */
+ if (oldval == newval)
+ return;
-static int n_occurrences;
+ if (undobuf.frees)
+ buf = undobuf.frees, undobuf.frees = buf->next;
+ else
+ buf = (struct undo *) xmalloc (sizeof (struct undo));
+
+ buf->is_int = 1;
+ buf->where.i = into;
+ buf->old_contents.i = oldval;
+ *into = newval;
+
+ buf->next = undobuf.undos, undobuf.undos = buf;
+}
-static void init_reg_last_arrays PROTO((void));
-static void setup_incoming_promotions PROTO((void));
-static void set_nonzero_bits_and_sign_copies PROTO((rtx, rtx));
-static int can_combine_p PROTO((rtx, rtx, rtx, rtx, rtx *, rtx *));
-static int sets_function_arg_p PROTO((rtx));
-static int combinable_i3pat PROTO((rtx, rtx *, rtx, rtx, int, rtx *));
-static rtx try_combine PROTO((rtx, rtx, rtx));
-static void undo_all PROTO((void));
-static rtx *find_split_point PROTO((rtx *, rtx));
-static rtx subst PROTO((rtx, rtx, rtx, int, int));
-static rtx simplify_rtx PROTO((rtx, enum machine_mode, int, int));
-static rtx simplify_if_then_else PROTO((rtx));
-static rtx simplify_set PROTO((rtx));
-static rtx simplify_logical PROTO((rtx, int));
-static rtx expand_compound_operation PROTO((rtx));
-static rtx expand_field_assignment PROTO((rtx));
-static rtx make_extraction PROTO((enum machine_mode, rtx, int, rtx, int,
- int, int, int));
-static rtx extract_left_shift PROTO((rtx, int));
-static rtx make_compound_operation PROTO((rtx, enum rtx_code));
-static int get_pos_from_mask PROTO((unsigned HOST_WIDE_INT, int *));
-static rtx force_to_mode PROTO((rtx, enum machine_mode,
- unsigned HOST_WIDE_INT, rtx, int));
-static rtx if_then_else_cond PROTO((rtx, rtx *, rtx *));
-static rtx known_cond PROTO((rtx, enum rtx_code, rtx, rtx));
-static int rtx_equal_for_field_assignment_p PROTO((rtx, rtx));
-static rtx make_field_assignment PROTO((rtx));
-static rtx apply_distributive_law PROTO((rtx));
-static rtx simplify_and_const_int PROTO((rtx, enum machine_mode, rtx,
- unsigned HOST_WIDE_INT));
-static unsigned HOST_WIDE_INT nonzero_bits PROTO((rtx, enum machine_mode));
-static int num_sign_bit_copies PROTO((rtx, enum machine_mode));
-static int merge_outer_ops PROTO((enum rtx_code *, HOST_WIDE_INT *,
- enum rtx_code, HOST_WIDE_INT,
- enum machine_mode, int *));
-static rtx simplify_shift_const PROTO((rtx, enum rtx_code, enum machine_mode,
- rtx, int));
-static int recog_for_combine PROTO((rtx *, rtx, rtx *));
-static rtx gen_lowpart_for_combine PROTO((enum machine_mode, rtx));
-static rtx gen_rtx_combine PVPROTO((enum rtx_code code, enum machine_mode mode,
- ...));
-static rtx gen_binary PROTO((enum rtx_code, enum machine_mode,
- rtx, rtx));
-static rtx gen_unary PROTO((enum rtx_code, enum machine_mode,
- enum machine_mode, rtx));
-static enum rtx_code simplify_comparison PROTO((enum rtx_code, rtx *, rtx *));
-static int reversible_comparison_p PROTO((rtx));
-static void update_table_tick PROTO((rtx));
-static void record_value_for_reg PROTO((rtx, rtx, rtx));
-static void record_dead_and_set_regs_1 PROTO((rtx, rtx));
-static void record_dead_and_set_regs PROTO((rtx));
-static int get_last_value_validate PROTO((rtx *, rtx, int, int));
-static rtx get_last_value PROTO((rtx));
-static int use_crosses_set_p PROTO((rtx, int));
-static void reg_dead_at_p_1 PROTO((rtx, rtx));
-static int reg_dead_at_p PROTO((rtx, rtx));
-static void move_deaths PROTO((rtx, rtx, int, rtx, rtx *));
-static int reg_bitfield_target_p PROTO((rtx, rtx));
-static void distribute_notes PROTO((rtx, rtx, rtx, rtx, rtx, rtx));
-static void distribute_links PROTO((rtx));
-static void mark_used_regs_combine PROTO((rtx));
-static int insn_cuid PROTO((rtx));
+#define SUBST_INT(INTO, NEWVAL) do_SUBST_INT(&(INTO), (NEWVAL))
\f
/* Main entry point for combiner. F is the first insn of the function.
- NREGS is the first unused pseudo-reg number. */
+ NREGS is the first unused pseudo-reg number.
-void
+ Return non-zero if the combiner has turned an indirect jump
+ instruction into a direct jump. */
+int
combine_instructions (f, nregs)
rtx f;
- int nregs;
+ unsigned int nregs;
{
register rtx insn, next;
#ifdef HAVE_cc0
register int i;
register rtx links, nextlinks;
+ int new_direct_jump_p = 0;
+
combine_attempts = 0;
combine_merges = 0;
combine_extras = 0;
combine_successes = 0;
- undobuf.undos = undobuf.previous_undos = 0;
combine_max_regno = nregs;
- reg_nonzero_bits
- = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
- reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
-
- bzero ((char *) reg_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
- bzero (reg_sign_bit_copies, nregs * sizeof (char));
-
- reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
- reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
- reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
- reg_last_set_table_tick = (int *) alloca (nregs * sizeof (int));
- reg_last_set_label = (int *) alloca (nregs * sizeof (int));
- reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
+ reg_nonzero_bits = ((unsigned HOST_WIDE_INT *)
+ xcalloc (nregs, sizeof (unsigned HOST_WIDE_INT)));
+ reg_sign_bit_copies
+ = (unsigned char *) xcalloc (nregs, sizeof (unsigned char));
+
+ reg_last_death = (rtx *) xmalloc (nregs * sizeof (rtx));
+ reg_last_set = (rtx *) xmalloc (nregs * sizeof (rtx));
+ reg_last_set_value = (rtx *) xmalloc (nregs * sizeof (rtx));
+ reg_last_set_table_tick = (int *) xmalloc (nregs * sizeof (int));
+ reg_last_set_label = (int *) xmalloc (nregs * sizeof (int));
+ reg_last_set_invalid = (char *) xmalloc (nregs * sizeof (char));
reg_last_set_mode
- = (enum machine_mode *) alloca (nregs * sizeof (enum machine_mode));
+ = (enum machine_mode *) xmalloc (nregs * sizeof (enum machine_mode));
reg_last_set_nonzero_bits
- = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
+ = (unsigned HOST_WIDE_INT *) xmalloc (nregs * sizeof (HOST_WIDE_INT));
reg_last_set_sign_bit_copies
- = (char *) alloca (nregs * sizeof (char));
+ = (char *) xmalloc (nregs * sizeof (char));
init_reg_last_arrays ();
if (INSN_UID (insn) > i)
i = INSN_UID (insn);
- uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
+ uid_cuid = (int *) xmalloc ((i + 1) * sizeof (int));
max_uid_cuid = i;
nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
setup_incoming_promotions ();
+ refresh_blocks = sbitmap_alloc (n_basic_blocks);
+ sbitmap_zero (refresh_blocks);
+ need_refresh = 0;
+
for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
{
uid_cuid[INSN_UID (insn)] = ++i;
if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
{
- note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies);
+ note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies,
+ NULL);
record_dead_and_set_regs (insn);
#ifdef AUTO_INC_DEC
for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
if (REG_NOTE_KIND (links) == REG_INC)
- set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX);
+ set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
+ NULL);
#endif
}
else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
{
+ /* See if we know about function return values before this
+ insn based upon SUBREG flags. */
+ check_promoted_subreg (insn, PATTERN (insn));
+
/* Try this insn with each insn it links back to. */
for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
- if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
+ if ((next = try_combine (insn, XEXP (links, 0),
+ NULL_RTX, &new_direct_jump_p)) != 0)
goto retry;
/* Try each sequence of three linked insns ending with this one. */
for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
- for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
- nextlinks = XEXP (nextlinks, 1))
- if ((next = try_combine (insn, XEXP (links, 0),
- XEXP (nextlinks, 0))) != 0)
- goto retry;
+ {
+ rtx link = XEXP (links, 0);
+
+ /* If the linked insn has been replaced by a note, then there
+ is no point in persuing this chain any further. */
+ if (GET_CODE (link) == NOTE)
+ break;
+
+ for (nextlinks = LOG_LINKS (link);
+ nextlinks;
+ nextlinks = XEXP (nextlinks, 1))
+ if ((next = try_combine (insn, XEXP (links, 0),
+ XEXP (nextlinks, 0),
+ &new_direct_jump_p)) != 0)
+ goto retry;
+ }
#ifdef HAVE_cc0
/* Try to combine a jump insn that uses CC0
&& GET_CODE (prev) == INSN
&& sets_cc0_p (PATTERN (prev)))
{
- if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
+ if ((next = try_combine (insn, prev,
+ NULL_RTX, &new_direct_jump_p)) != 0)
goto retry;
for (nextlinks = LOG_LINKS (prev); nextlinks;
nextlinks = XEXP (nextlinks, 1))
if ((next = try_combine (insn, prev,
- XEXP (nextlinks, 0))) != 0)
+ XEXP (nextlinks, 0),
+ &new_direct_jump_p)) != 0)
goto retry;
}
&& GET_CODE (PATTERN (insn)) == SET
&& reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
{
- if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
+ if ((next = try_combine (insn, prev,
+ NULL_RTX, &new_direct_jump_p)) != 0)
goto retry;
for (nextlinks = LOG_LINKS (prev); nextlinks;
nextlinks = XEXP (nextlinks, 1))
if ((next = try_combine (insn, prev,
- XEXP (nextlinks, 0))) != 0)
+ XEXP (nextlinks, 0),
+ &new_direct_jump_p)) != 0)
goto retry;
}
&& (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
&& GET_CODE (prev) == INSN
&& sets_cc0_p (PATTERN (prev))
- && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
+ && (next = try_combine (insn, XEXP (links, 0),
+ prev, &new_direct_jump_p)) != 0)
goto retry;
#endif
for (nextlinks = XEXP (links, 1); nextlinks;
nextlinks = XEXP (nextlinks, 1))
if ((next = try_combine (insn, XEXP (links, 0),
- XEXP (nextlinks, 0))) != 0)
+ XEXP (nextlinks, 0),
+ &new_direct_jump_p)) != 0)
goto retry;
if (GET_CODE (insn) != NOTE)
}
}
+ if (need_refresh)
+ {
+ compute_bb_for_insn (get_max_uid ());
+ update_life_info (refresh_blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
+ PROP_DEATH_NOTES);
+ }
+
+ /* Clean up. */
+ sbitmap_free (refresh_blocks);
+ free (reg_nonzero_bits);
+ free (reg_sign_bit_copies);
+ free (reg_last_death);
+ free (reg_last_set);
+ free (reg_last_set_value);
+ free (reg_last_set_table_tick);
+ free (reg_last_set_label);
+ free (reg_last_set_invalid);
+ free (reg_last_set_mode);
+ free (reg_last_set_nonzero_bits);
+ free (reg_last_set_sign_bit_copies);
+ free (uid_cuid);
+
+ {
+ struct undo *undo, *next;
+ for (undo = undobuf.frees; undo; undo = next)
+ {
+ next = undo->next;
+ free (undo);
+ }
+ undobuf.frees = 0;
+ }
+
total_attempts += combine_attempts;
total_merges += combine_merges;
total_extras += combine_extras;
/* Make recognizer allow volatile MEMs again. */
init_recog ();
+
+ return new_direct_jump_p;
}
/* Wipe the reg_last_xxx arrays in preparation for another pass. */
static void
init_reg_last_arrays ()
{
- int nregs = combine_max_regno;
+ unsigned int nregs = combine_max_regno;
bzero ((char *) reg_last_death, nregs * sizeof (rtx));
bzero ((char *) reg_last_set, nregs * sizeof (rtx));
setup_incoming_promotions ()
{
#ifdef PROMOTE_FUNCTION_ARGS
- int regno;
+ unsigned int regno;
rtx reg;
enum machine_mode mode;
int unsignedp;
rtx first = get_insns ();
+#ifndef OUTGOING_REGNO
+#define OUTGOING_REGNO(N) N
+#endif
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
- if (FUNCTION_ARG_REGNO_P (regno)
+ /* Check whether this register can hold an incoming pointer
+ argument. FUNCTION_ARG_REGNO_P tests outgoing register
+ numbers, so translate if necessary due to register windows. */
+ if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (regno))
&& (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
{
record_value_for_reg
by any set of X. */
static void
-set_nonzero_bits_and_sign_copies (x, set)
+set_nonzero_bits_and_sign_copies (x, set, data)
rtx x;
rtx set;
+ void *data ATTRIBUTE_UNUSED;
{
- int num;
+ unsigned int num;
if (GET_CODE (x) == REG
&& REGNO (x) >= FIRST_PSEUDO_REGISTER
{
rtx i3pat = PATTERN (i3);
int i = XVECLEN (i3pat, 0) - 1;
- int regno = REGNO (XEXP (elt, 0));
+ unsigned int regno = REGNO (XEXP (elt, 0));
+
do
{
rtx i3elt = XVECEXP (i3pat, 0, i);
+
if (GET_CODE (i3elt) == USE
&& GET_CODE (XEXP (i3elt, 0)) == REG
&& (REGNO (XEXP (i3elt, 0)) == regno
return 1;
}
\f
+/* Return 1 if X is an arithmetic expression that contains a multiplication
+ and division. We don't count multiplications by powers of two here. */
+
+static int
+contains_muldiv (x)
+ rtx x;
+{
+ switch (GET_CODE (x))
+ {
+ case MOD: case DIV: case UMOD: case UDIV:
+ return 1;
+
+ case MULT:
+ return ! (GET_CODE (XEXP (x, 1)) == CONST_INT
+ && exact_log2 (INTVAL (XEXP (x, 1))) >= 0);
+ default:
+ switch (GET_RTX_CLASS (GET_CODE (x)))
+ {
+ case 'c': case '<': case '2':
+ return contains_muldiv (XEXP (x, 0))
+ || contains_muldiv (XEXP (x, 1));
+
+ case '1':
+ return contains_muldiv (XEXP (x, 0));
+
+ default:
+ return 0;
+ }
+ }
+}
+\f
/* Try to combine the insns I1 and I2 into I3.
Here I1 and I2 appear earlier than I3.
I1 can be zero; then we combine just I2 into I3.
Return 0 if the combination does not work. Then nothing is changed.
If we did the combination, return the insn at which combine should
- resume scanning. */
+ resume scanning.
+
+ Set NEW_DIRECT_JUMP_P to a non-zero value if try_combine creates a
+ new direct jump instruction. */
static rtx
-try_combine (i3, i2, i1)
+try_combine (i3, i2, i1, new_direct_jump_p)
register rtx i3, i2, i1;
+ register int *new_direct_jump_p;
{
- /* New patterns for I3 and I3, respectively. */
+ /* New patterns for I3 and I2, respectively. */
rtx newpat, newi2pat = 0;
/* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
int added_sets_1, added_sets_2;
/* Nonzero is I2's body now appears in I3. */
int i2_is_used;
/* INSN_CODEs for new I3, new I2, and user of condition code. */
- int insn_code_number, i2_code_number, other_code_number;
+ int insn_code_number, i2_code_number = 0, other_code_number = 0;
/* Contains I3 if the destination of I3 is used in its source, which means
that the old life of I3 is being killed. If that usage is placed into
I2 and not in I3, a REG_DEAD note must be made. */
return 0;
combine_attempts++;
-
- undobuf.undos = undobuf.previous_undos = 0;
undobuf.other_insn = 0;
/* Save the current high-water-mark so we can free storage if we didn't
if (i == XVECLEN (p2, 0))
for (i = 0; i < XVECLEN (p2, 0); i++)
- if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
+ if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
+ || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
+ && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
{
combine_merges++;
}
}
+ /* If I2 is setting a double-word pseudo to a constant and I3 is setting
+ one of those words to another constant, merge them by making a new
+ constant. */
+ if (i1 == 0
+ && (temp = single_set (i2)) != 0
+ && (GET_CODE (SET_SRC (temp)) == CONST_INT
+ || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE)
+ && GET_CODE (SET_DEST (temp)) == REG
+ && GET_MODE_CLASS (GET_MODE (SET_DEST (temp))) == MODE_INT
+ && GET_MODE_SIZE (GET_MODE (SET_DEST (temp))) == 2 * UNITS_PER_WORD
+ && GET_CODE (PATTERN (i3)) == SET
+ && GET_CODE (SET_DEST (PATTERN (i3))) == SUBREG
+ && SUBREG_REG (SET_DEST (PATTERN (i3))) == SET_DEST (temp)
+ && GET_MODE_CLASS (GET_MODE (SET_DEST (PATTERN (i3)))) == MODE_INT
+ && GET_MODE_SIZE (GET_MODE (SET_DEST (PATTERN (i3)))) == UNITS_PER_WORD
+ && GET_CODE (SET_SRC (PATTERN (i3))) == CONST_INT)
+ {
+ HOST_WIDE_INT lo, hi;
+
+ if (GET_CODE (SET_SRC (temp)) == CONST_INT)
+ lo = INTVAL (SET_SRC (temp)), hi = lo < 0 ? -1 : 0;
+ else
+ {
+ lo = CONST_DOUBLE_LOW (SET_SRC (temp));
+ hi = CONST_DOUBLE_HIGH (SET_SRC (temp));
+ }
+
+ if (subreg_lowpart_p (SET_DEST (PATTERN (i3))))
+ lo = INTVAL (SET_SRC (PATTERN (i3)));
+ else
+ hi = INTVAL (SET_SRC (PATTERN (i3)));
+
+ combine_merges++;
+ subst_insn = i3;
+ subst_low_cuid = INSN_CUID (i2);
+ added_sets_2 = added_sets_1 = 0;
+ i2dest = SET_DEST (temp);
+
+ SUBST (SET_SRC (temp),
+ immed_double_const (lo, hi, GET_MODE (SET_DEST (temp))));
+
+ newpat = PATTERN (i2);
+ i3_subst_into_i2 = 1;
+ goto validate_replacement;
+ }
+
#ifndef HAVE_cc0
/* If we have no I1 and I2 looks like:
(parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
i2src, const0_rtx))
!= GET_MODE (SET_DEST (newpat))))
{
- int regno = REGNO (SET_DEST (newpat));
+ unsigned int regno = REGNO (SET_DEST (newpat));
rtx new_dest = gen_rtx_REG (compare_mode, regno);
if (regno < FIRST_PSEUDO_REGISTER
? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
if (added_sets_2)
- {
- /* If there is no I1, use I2's body as is. We used to also not do
- the subst call below if I2 was substituted into I3,
- but that could lose a simplification. */
- if (i1 == 0)
- XVECEXP (newpat, 0, --total_sets) = i2pat;
- else
- /* See comment where i2pat is assigned. */
- XVECEXP (newpat, 0, --total_sets)
- = subst (i2pat, i1dest, i1src, 0, 0);
- }
+ {
+ /* If there is no I1, use I2's body as is. We used to also not do
+ the subst call below if I2 was substituted into I3,
+ but that could lose a simplification. */
+ if (i1 == 0)
+ XVECEXP (newpat, 0, --total_sets) = i2pat;
+ else
+ /* See comment where i2pat is assigned. */
+ XVECEXP (newpat, 0, --total_sets)
+ = subst (i2pat, i1dest, i1src, 0, 0);
+ }
}
/* We come here when we are replacing a destination in I2 with the
|| (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
&& ! REG_USERVAR_P (i2dest))))
ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)),
- REGNO (i2dest));
-
- m_split = split_insns
- (gen_rtx_PARALLEL (VOIDmode,
- gen_rtvec (2, newpat,
- gen_rtx_CLOBBER (VOIDmode,
- ni2dest))),
- i3);
+ REGNO (i2dest));
+
+ m_split = split_insns (gen_rtx_PARALLEL
+ (VOIDmode,
+ gen_rtvec (2, newpat,
+ gen_rtx_CLOBBER (VOIDmode,
+ ni2dest))),
+ i3);
}
if (m_split && GET_CODE (m_split) == SEQUENCE
&& ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
XVECEXP (newpat, 0, 0))
&& ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
- XVECEXP (newpat, 0, 1)))
+ XVECEXP (newpat, 0, 1))
+ && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
+ && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1)))))
{
/* Normally, it doesn't matter which of the two is done first,
but it does if one references cc0. In that case, it has to
distribute_notes (new_other_notes, undobuf.other_insn,
undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
}
+#ifdef HAVE_cc0
+ /* If I2 is the setter CC0 and I3 is the user CC0 then check whether
+ they are adjacent to each other or not. */
+ {
+ rtx p = prev_nonnote_insn (i3);
+ if (p && p != i2 && GET_CODE (p) == INSN && newi2pat && sets_cc0_p (newi2pat))
+ {
+ undo_all ();
+ return 0;
+ }
+ }
+#endif
/* We now know that we can do this combination. Merge the insns and
update the status of registers and LOG_LINKS. */
rtx i3notes, i2notes, i1notes = 0;
rtx i3links, i2links, i1links = 0;
rtx midnotes = 0;
- register int regno;
+ unsigned int regno;
/* Compute which registers we expect to eliminate. newi2pat may be setting
either i3dest or i2dest, so we must check it. Also, i1dest may be the
same as i3dest, in which case newi2pat may be setting i1dest. */
actually came from I3, so that REG_UNUSED notes from I2 will be
properly handled. */
- if (i3_subst_into_i2)
+ if (i3_subst_into_i2 && GET_CODE (PATTERN (i2)) == PARALLEL)
{
- for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
- if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
- && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
- && ! find_reg_note (i2, REG_UNUSED,
- SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
- for (temp = NEXT_INSN (i2);
- temp && (this_basic_block == n_basic_blocks - 1
- || BLOCK_HEAD (this_basic_block) != temp);
- temp = NEXT_INSN (temp))
- if (temp != i3 && GET_RTX_CLASS (GET_CODE (temp)) == 'i')
- for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
- if (XEXP (link, 0) == i2)
- XEXP (link, 0) = i3;
+ if (GET_CODE (PATTERN (i2)) == PARALLEL)
+ {
+ for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
+ if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
+ && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
+ && ! find_reg_note (i2, REG_UNUSED,
+ SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
+ for (temp = NEXT_INSN (i2);
+ temp && (this_basic_block == n_basic_blocks - 1
+ || BLOCK_HEAD (this_basic_block) != temp);
+ temp = NEXT_INSN (temp))
+ if (temp != i3 && GET_RTX_CLASS (GET_CODE (temp)) == 'i')
+ for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
+ if (XEXP (link, 0) == i2)
+ XEXP (link, 0) = i3;
+ }
if (i3notes)
{
{
regno = REGNO (i2dest);
REG_N_SETS (regno)--;
- if (REG_N_SETS (regno) == 0
- && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
- regno))
- REG_N_REFS (regno) = 0;
}
}
regno = REGNO (i1dest);
if (! added_sets_1 && ! i1dest_in_i1src)
- {
- REG_N_SETS (regno)--;
- if (REG_N_SETS (regno) == 0
- && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
- regno))
- REG_N_REFS (regno) = 0;
- }
+ REG_N_SETS (regno)--;
}
/* Update reg_nonzero_bits et al for any changes that may have been made
- to this insn. */
-
- note_stores (newpat, set_nonzero_bits_and_sign_copies);
+ to this insn. The order of set_nonzero_bits_and_sign_copies() is
+ important. Because newi2pat can affect nonzero_bits of newpat */
if (newi2pat)
- note_stores (newi2pat, set_nonzero_bits_and_sign_copies);
+ note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
+ note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
+
+ /* Set new_direct_jump_p if a new return or simple jump instruction
+ has been created.
- /* If I3 is now an unconditional jump, ensure that it has a
+ If I3 is now an unconditional jump, ensure that it has a
BARRIER following it since it may have initially been a
conditional jump. It may also be the last nonnote insn. */
+
+ if (GET_CODE (newpat) == RETURN || any_uncondjump_p (i3))
+ {
+ *new_direct_jump_p = 1;
- if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
- && ((temp = next_nonnote_insn (i3)) == NULL_RTX
- || GET_CODE (temp) != BARRIER))
- emit_barrier_after (i3);
+ if ((temp = next_nonnote_insn (i3)) == NULL_RTX
+ || GET_CODE (temp) != BARRIER)
+ emit_barrier_after (i3);
+ }
}
combine_successes++;
+ undo_commit ();
/* Clear this here, so that subsequent get_last_value calls are not
affected. */
affected. */
subst_prev_insn = NULL_RTX;
}
+
+/* We've committed to accepting the changes we made. Move all
+ of the undos to the free list. */
+
+static void
+undo_commit ()
+{
+ struct undo *undo, *next;
+
+ for (undo = undobuf.undos; undo; undo = next)
+ {
+ next = undo->next;
+ undo->next = undobuf.frees;
+ undobuf.frees = undo;
+ }
+ undobuf.undos = undobuf.previous_undos = 0;
+}
+
\f
/* Find the innermost point within the rtx at LOC, possibly LOC itself,
where we have an arithmetic expression and return that point. LOC will
rtx x = *loc;
enum rtx_code code = GET_CODE (x);
rtx *split;
- int len = 0, pos, unsignedp;
- rtx inner;
+ unsigned HOST_WIDE_INT len = 0;
+ HOST_WIDE_INT pos = 0;
+ int unsignedp = 0;
+ rtx inner = NULL_RTX;
/* First special-case some codes. */
switch (code)
<= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
&& ! side_effects_p (XEXP (SET_DEST (x), 0)))
{
- int pos = INTVAL (XEXP (SET_DEST (x), 2));
- int len = INTVAL (XEXP (SET_DEST (x), 1));
- int src = INTVAL (SET_SRC (x));
+ HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
+ unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
+ unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x));
rtx dest = XEXP (SET_DEST (x), 0);
enum machine_mode mode = GET_MODE (dest);
unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
if (BITS_BIG_ENDIAN)
pos = GET_MODE_BITSIZE (mode) - len - pos;
- if ((unsigned HOST_WIDE_INT) src == mask)
+ if (src == mask)
SUBST (SET_SRC (x),
gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
else
{
register enum rtx_code code = GET_CODE (x);
enum machine_mode op0_mode = VOIDmode;
- register char *fmt;
+ register const char *fmt;
register int len, i;
rtx new;
)
return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
+#ifdef CLASS_CANNOT_CHANGE_MODE
+ if (code == SUBREG
+ && GET_CODE (to) == REG
+ && REGNO (to) < FIRST_PSEUDO_REGISTER
+ && (TEST_HARD_REG_BIT
+ (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
+ REGNO (to)))
+ && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (to),
+ GET_MODE (x)))
+ return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
+#endif
+
new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
n_occurrences++;
}
/* If X is sufficiently simple, don't bother trying to do anything
with it. */
if (code != CONST_INT && code != REG && code != CLOBBER)
- x = simplify_rtx (x, op0_mode, i == 3, in_dest);
+ x = combine_simplify_rtx (x, op0_mode, i == 3, in_dest);
if (GET_CODE (x) == code)
break;
X is returned; IN_DEST is nonzero if we are inside a SET_DEST. */
static rtx
-simplify_rtx (x, op0_mode, last, in_dest)
+combine_simplify_rtx (x, op0_mode, last, in_dest)
rtx x;
enum machine_mode op0_mode;
int last;
true = subst (true, pc_rtx, pc_rtx, 0, 0);
false = subst (false, pc_rtx, pc_rtx, 0, 0);
- /* Restarting if we generate a store-flag expression will cause
- us to loop. Just drop through in this case. */
-
- /* If the result values are STORE_FLAG_VALUE and zero, we can
- just make the comparison operation. */
- if (true == const_true_rtx && false == const0_rtx)
- x = gen_binary (cond_code, mode, cond, cop1);
- else if (true == const0_rtx && false == const_true_rtx)
- x = gen_binary (reverse_condition (cond_code), mode, cond, cop1);
-
- /* Likewise, we can make the negate of a comparison operation
- if the result values are - STORE_FLAG_VALUE and zero. */
- else if (GET_CODE (true) == CONST_INT
- && INTVAL (true) == - STORE_FLAG_VALUE
- && false == const0_rtx)
- x = gen_unary (NEG, mode, mode,
- gen_binary (cond_code, mode, cond, cop1));
- else if (GET_CODE (false) == CONST_INT
- && INTVAL (false) == - STORE_FLAG_VALUE
- && true == const0_rtx)
- x = gen_unary (NEG, mode, mode,
- gen_binary (reverse_condition (cond_code),
- mode, cond, cop1));
- else
- return gen_rtx_IF_THEN_ELSE (mode,
- gen_binary (cond_code, VOIDmode,
- cond, cop1),
- true, false);
+ /* If true and false are not general_operands, an if_then_else
+ is unlikely to be simpler. */
+ if (general_operand (true, VOIDmode)
+ && general_operand (false, VOIDmode))
+ {
+ /* Restarting if we generate a store-flag expression will cause
+ us to loop. Just drop through in this case. */
+
+ /* If the result values are STORE_FLAG_VALUE and zero, we can
+ just make the comparison operation. */
+ if (true == const_true_rtx && false == const0_rtx)
+ x = gen_binary (cond_code, mode, cond, cop1);
+ else if (true == const0_rtx && false == const_true_rtx)
+ x = gen_binary (reverse_condition (cond_code),
+ mode, cond, cop1);
+
+ /* Likewise, we can make the negate of a comparison operation
+ if the result values are - STORE_FLAG_VALUE and zero. */
+ else if (GET_CODE (true) == CONST_INT
+ && INTVAL (true) == - STORE_FLAG_VALUE
+ && false == const0_rtx)
+ x = gen_unary (NEG, mode, mode,
+ gen_binary (cond_code, mode, cond, cop1));
+ else if (GET_CODE (false) == CONST_INT
+ && INTVAL (false) == - STORE_FLAG_VALUE
+ && true == const0_rtx)
+ x = gen_unary (NEG, mode, mode,
+ gen_binary (reverse_condition (cond_code),
+ mode, cond, cop1));
+ else
+ return gen_rtx_IF_THEN_ELSE (mode,
+ gen_binary (cond_code, VOIDmode,
+ cond, cop1),
+ true, false);
- code = GET_CODE (x);
- op0_mode = VOIDmode;
+ code = GET_CODE (x);
+ op0_mode = VOIDmode;
+ }
}
}
temp = simplify_relational_operation (code, op0_mode,
XEXP (x, 0), XEXP (x, 1));
#ifdef FLOAT_STORE_FLAG_VALUE
- if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
- temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
- : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
+ if (temp != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
+ {
+ if (temp == const0_rtx)
+ temp = CONST0_RTX (mode);
+ else
+ temp = immed_real_const_1 (FLOAT_STORE_FLAG_VALUE (mode), mode);
+ }
#endif
break;
case 'c':
plus_constant (XEXP (inner, 0),
(SUBREG_WORD (x) * UNITS_PER_WORD
+ endian_offset)));
- RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
MEM_COPY_ATTRIBUTES (x, inner);
return x;
}
since we are saying that the high bits don't matter. */
if (CONSTANT_P (SUBREG_REG (x)) && GET_MODE (SUBREG_REG (x)) == VOIDmode
&& GET_MODE_SIZE (mode) > GET_MODE_SIZE (op0_mode))
- return SUBREG_REG (x);
+ {
+ if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
+ && (WORDS_BIG_ENDIAN || SUBREG_WORD (x) != 0))
+ return operand_subword (SUBREG_REG (x), SUBREG_WORD (x), 0, mode);
+ return SUBREG_REG (x);
+ }
/* Note that we cannot do any narrowing for non-constants since
we might have been counting on using the fact that some bits were
perform the above simplification. */
if (STORE_FLAG_VALUE == -1
- && XEXP (x, 1) == const1_rtx
&& GET_CODE (XEXP (x, 0)) == ASHIFTRT
+ && XEXP (x, 1) == const1_rtx
&& GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
&& INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
return SUBREG_REG (XEXP (x, 0));
/* If we know that the value is already truncated, we can
- replace the TRUNCATE with a SUBREG if TRULY_NOOP_TRUNCATION is
- nonzero for the corresponding modes. */
+ replace the TRUNCATE with a SUBREG if TRULY_NOOP_TRUNCATION
+ is nonzero for the corresponding modes. But don't do this
+ for an (LSHIFTRT (MULT ...)) since this will cause problems
+ with the umulXi3_highpart patterns. */
if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
&& num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
- >= GET_MODE_BITSIZE (mode) + 1)
+ >= GET_MODE_BITSIZE (mode) + 1
+ && ! (GET_CODE (XEXP (x, 0)) == LSHIFTRT
+ && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT))
return gen_lowpart_for_combine (mode, XEXP (x, 0));
/* A truncate of a comparison can be replaced with a subreg if
/* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
when c is (const_int (pow2 + 1) / 2) is a sign extension of a
bit-field and can be replaced by either a sign_extend or a
- sign_extract. The `and' may be a zero_extend. */
+ sign_extract. The `and' may be a zero_extend and the two
+ <c>, -<c> constants may be reversed. */
if (GET_CODE (XEXP (x, 0)) == XOR
&& GET_CODE (XEXP (x, 1)) == CONST_INT
&& GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
&& INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
- && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
+ && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
+ || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
&& GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
&& ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
&& GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
== ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
|| (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
&& (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
- == i + 1))))
+ == (unsigned int) i + 1))))
return simplify_shift_const
(NULL_RTX, ASHIFTRT, mode,
simplify_shift_const (NULL_RTX, ASHIFT, mode,
if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
&& (nonzero_bits (XEXP (x, 0), mode)
& nonzero_bits (XEXP (x, 1), mode)) == 0)
- return gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
+ {
+ /* Try to simplify the expression further. */
+ rtx tor = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
+ temp = combine_simplify_rtx (tor, mode, last, in_dest);
+
+ /* If we could, great. If not, do not go ahead with the IOR
+ replacement, since PLUS appears in many special purpose
+ address arithmetic instructions. */
+ if (GET_CODE (temp) != CLOBBER && temp != tor)
+ return temp;
+ }
break;
case MINUS:
gen_binary (MULT, mode,
XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
gen_binary (MULT, mode,
- XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
+ XEXP (XEXP (x, 0), 1),
+ copy_rtx (XEXP (x, 1)))));
if (GET_CODE (x) != MULT)
return x;
rtx f = make_compound_operation (false, SET);
rtx cond_op0 = XEXP (cond, 0);
rtx cond_op1 = XEXP (cond, 1);
- enum rtx_code op, extend_op = NIL;
+ enum rtx_code op = NIL, extend_op = NIL;
enum machine_mode m = mode;
- rtx z = 0, c1;
+ rtx z = 0, c1 = NULL_RTX;
if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
|| GET_CODE (t) == IOR || GET_CODE (t) == XOR
low-order bits. */
if (GET_MODE_CLASS (mode) == MODE_INT)
- src = force_to_mode (src, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
+ {
+ src = force_to_mode (src, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
+ SUBST (SET_SRC (x), src);
+ }
/* If we are setting CC0 or if the source is a COMPARE, look for the use of
the comparison result and try to simplify it unless we already have used
which case we can safely change its mode. */
if (compare_mode != GET_MODE (dest))
{
- int regno = REGNO (dest);
+ unsigned int regno = REGNO (dest);
rtx new_dest = gen_rtx_REG (compare_mode, regno);
if (regno < FIRST_PSEUDO_REGISTER
&& (GET_MODE_SIZE (GET_MODE (src))
< GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
#endif
-#ifdef CLASS_CANNOT_CHANGE_SIZE
+#ifdef CLASS_CANNOT_CHANGE_MODE
&& ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
&& (TEST_HARD_REG_BIT
- (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
+ (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
REGNO (dest)))
- && (GET_MODE_SIZE (GET_MODE (src))
- != GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
+ && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (src),
+ GET_MODE (SUBREG_REG (src))))
#endif
&& (GET_CODE (dest) == REG
|| (GET_CODE (dest) == SUBREG
src = SET_SRC (x);
}
+#ifdef HAVE_conditional_arithmetic
+ /* If we have conditional arithmetic and the operand of a SET is
+ a conditional expression, replace this with an IF_THEN_ELSE.
+ We can either have a conditional expression or a MULT of that expression
+ with a constant. */
+ if ((GET_RTX_CLASS (GET_CODE (src)) == '1'
+ || GET_RTX_CLASS (GET_CODE (src)) == '2'
+ || GET_RTX_CLASS (GET_CODE (src)) == 'c')
+ && (GET_RTX_CLASS (GET_CODE (XEXP (src, 0))) == '<'
+ || (GET_CODE (XEXP (src, 0)) == MULT
+ && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (src, 0), 0))) == '<'
+ && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT)))
+ {
+ rtx cond = XEXP (src, 0);
+ rtx true_val = const1_rtx;
+ rtx false_arm, true_arm;
+
+ if (GET_CODE (cond) == MULT)
+ {
+ true_val = XEXP (cond, 1);
+ cond = XEXP (cond, 0);
+ }
+
+ if (GET_RTX_CLASS (GET_CODE (src)) == '1')
+ {
+ true_arm = gen_unary (GET_CODE (src), GET_MODE (src),
+ GET_MODE (XEXP (src, 0)), true_val);
+ false_arm = gen_unary (GET_CODE (src), GET_MODE (src),
+ GET_MODE (XEXP (src, 0)), const0_rtx);
+ }
+ else
+ {
+ true_arm = gen_binary (GET_CODE (src), GET_MODE (src),
+ true_val, XEXP (src, 1));
+ false_arm = gen_binary (GET_CODE (src), GET_MODE (src),
+ const0_rtx, XEXP (src, 1));
+ }
+
+ /* Canonicalize if true_arm is the simpler one. */
+ if (GET_RTX_CLASS (GET_CODE (true_arm)) == 'o'
+ && GET_RTX_CLASS (GET_CODE (false_arm)) != 'o'
+ && reversible_comparison_p (cond))
+ {
+ rtx temp = true_arm;
+
+ true_arm = false_arm;
+ false_arm = temp;
+
+ cond = gen_rtx_combine (reverse_condition (GET_CODE (cond)),
+ GET_MODE (cond), XEXP (cond, 0),
+ XEXP (cond, 1));
+ }
+
+ src = gen_rtx_combine (IF_THEN_ELSE, GET_MODE (src),
+ gen_rtx_combine (GET_CODE (cond), VOIDmode,
+ XEXP (cond, 0),
+ XEXP (cond, 1)),
+ true_arm, false_arm);
+ SUBST (SET_SRC (x), src);
+ }
+#endif
+
/* If either SRC or DEST is a CLOBBER of (const_int 0), make this
whole thing fail. */
if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
x = apply_distributive_law
(gen_binary (GET_CODE (op0), mode,
gen_binary (AND, mode, XEXP (op0, 0), op1),
- gen_binary (AND, mode, XEXP (op0, 1), op1)));
+ gen_binary (AND, mode, XEXP (op0, 1),
+ copy_rtx (op1))));
if (GET_CODE (x) != AND)
return x;
}
return apply_distributive_law
(gen_binary (GET_CODE (op1), mode,
gen_binary (AND, mode, XEXP (op1, 0), op0),
- gen_binary (AND, mode, XEXP (op1, 1), op0)));
+ gen_binary (AND, mode, XEXP (op1, 1),
+ copy_rtx (op0))));
/* Similarly, taking advantage of the fact that
(and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
return apply_distributive_law
(gen_binary (XOR, mode,
gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
- gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 1))));
+ gen_binary (IOR, mode, copy_rtx (XEXP (op0, 0)),
+ XEXP (op1, 1))));
else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
return apply_distributive_law
(gen_binary (XOR, mode,
gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
- gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 1))));
+ gen_binary (IOR, mode, copy_rtx (XEXP (op1, 0)), XEXP (op0, 1))));
break;
case IOR:
x = apply_distributive_law
(gen_binary (AND, mode,
gen_binary (IOR, mode, XEXP (op0, 0), op1),
- gen_binary (IOR, mode, XEXP (op0, 1), op1)));
+ gen_binary (IOR, mode, XEXP (op0, 1),
+ copy_rtx (op1))));
if (GET_CODE (x) != IOR)
return x;
x = apply_distributive_law
(gen_binary (AND, mode,
gen_binary (IOR, mode, XEXP (op1, 0), op0),
- gen_binary (IOR, mode, XEXP (op1, 1), op0)));
+ gen_binary (IOR, mode, XEXP (op1, 1),
+ copy_rtx (op0))));
if (GET_CODE (x) != IOR)
return x;
break;
case XOR:
+ /* If we are XORing two things that have no bits in common,
+ convert them into an IOR. This helps to detect rotation encoded
+ using those methods and possibly other simplifications. */
+
+ if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
+ && (nonzero_bits (op0, mode)
+ & nonzero_bits (op1, mode)) == 0)
+ return (gen_binary (IOR, mode, op0, op1));
+
/* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
(NOT y). */
&& reversible_comparison_p (op0))
return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
mode, XEXP (op0, 0), XEXP (op0, 1));
+
break;
default:
expand_compound_operation (x)
rtx x;
{
- int pos = 0, len;
+ unsigned HOST_WIDE_INT pos = 0, len;
int unsignedp = 0;
- int modewidth;
+ unsigned int modewidth;
rtx tem;
switch (GET_CODE (x))
default:
return x;
}
+ /* Convert sign extension to zero extension, if we know that the high
+ bit is not set, as this is easier to optimize. It will be converted
+ back to cheaper alternative in make_extraction. */
+ if (GET_CODE (x) == SIGN_EXTEND
+ && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
+ && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
+ & ~ (((unsigned HOST_WIDE_INT)
+ GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
+ >> 1))
+ == 0)))
+ {
+ rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0));
+ return expand_compound_operation (temp);
+ }
/* We can optimize some special cases of ZERO_EXTEND. */
if (GET_CODE (x) == ZERO_EXTEND)
& ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
return SUBREG_REG (XEXP (x, 0));
- /* If sign extension is cheaper than zero extension, then use it
- if we know that no extraneous bits are set, and that the high
- bit is not set. */
- if (flag_expensive_optimizations
- && ((GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
- && ((nonzero_bits (XEXP (x, 0), GET_MODE (x))
- & ~ (((unsigned HOST_WIDE_INT)
- GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
- >> 1))
- == 0))
- || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
- && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
- <= HOST_BITS_PER_WIDE_INT)
- && (((HOST_WIDE_INT) STORE_FLAG_VALUE
- & ~ (((unsigned HOST_WIDE_INT)
- GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
- >> 1))
- == 0))))
- {
- rtx temp = gen_rtx_SIGN_EXTEND (GET_MODE (x), XEXP (x, 0));
-
- if (rtx_cost (temp, SET) < rtx_cost (x, SET))
- return expand_compound_operation (temp);
- }
}
/* If we reach here, we want to return a pair of shifts. The inner
a such a position. */
modewidth = GET_MODE_BITSIZE (GET_MODE (x));
- if (modewidth >= pos - len)
+ if (modewidth + len >= pos)
tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
GET_MODE (x),
simplify_shift_const (NULL_RTX, ASHIFT,
/* A SUBREG between two modes that occupy the same numbers of words
can be done by moving the SUBREG to the source. */
else if (GET_CODE (SET_DEST (x)) == SUBREG
+ /* We need SUBREGs to compute nonzero_bits properly. */
+ && nonzero_sign_valid
&& (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
+ (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
== ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
+ (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
{
x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
- gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
- SET_SRC (x)));
+ gen_lowpart_for_combine
+ (GET_MODE (SUBREG_REG (SET_DEST (x))),
+ SET_SRC (x)));
continue;
}
else
/* Now compute the equivalent expression. Make a copy of INNER
for the SET_DEST in case it is a MEM into which we will substitute;
we don't want shared RTL in that case. */
- x = gen_rtx_SET (VOIDmode, copy_rtx (inner),
- gen_binary (IOR, compute_mode,
- gen_binary (AND, compute_mode,
- gen_unary (NOT, compute_mode,
- compute_mode,
- gen_binary (ASHIFT,
- compute_mode,
- mask, pos)),
- inner),
- gen_binary (ASHIFT, compute_mode,
- gen_binary (AND, compute_mode,
- gen_lowpart_for_combine
- (compute_mode,
- SET_SRC (x)),
- mask),
- pos)));
+ x = gen_rtx_SET
+ (VOIDmode, copy_rtx (inner),
+ gen_binary (IOR, compute_mode,
+ gen_binary (AND, compute_mode,
+ gen_unary (NOT, compute_mode,
+ compute_mode,
+ gen_binary (ASHIFT,
+ compute_mode,
+ mask, pos)),
+ inner),
+ gen_binary (ASHIFT, compute_mode,
+ gen_binary (AND, compute_mode,
+ gen_lowpart_for_combine
+ (compute_mode, SET_SRC (x)),
+ mask),
+ pos)));
}
return x;
unsignedp, in_dest, in_compare)
enum machine_mode mode;
rtx inner;
- int pos;
+ HOST_WIDE_INT pos;
rtx pos_rtx;
- int len;
+ unsigned HOST_WIDE_INT len;
int unsignedp;
int in_dest, in_compare;
{
int spans_byte = 0;
rtx new = 0;
rtx orig_pos_rtx = pos_rtx;
- int orig_pos;
+ HOST_WIDE_INT orig_pos;
/* Get some information about INNER and get the innermost object. */
if (GET_CODE (inner) == USE)
offset = pos / BITS_PER_UNIT;
new = gen_rtx_MEM (tmode, plus_constant (XEXP (inner, 0), offset));
- RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
MEM_COPY_ATTRIBUTES (new, inner);
}
else if (GET_CODE (inner) == REG)
if (tmode != inner_mode)
new = gen_rtx_SUBREG (tmode, inner,
(WORDS_BIG_ENDIAN
- && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
+ && (GET_MODE_SIZE (inner_mode)
+ > UNITS_PER_WORD)
? (((GET_MODE_SIZE (inner_mode)
- GET_MODE_SIZE (tmode))
/ UNITS_PER_WORD)
new = force_to_mode (inner, tmode,
len >= HOST_BITS_PER_WIDE_INT
? GET_MODE_MASK (tmode)
- : ((HOST_WIDE_INT) 1 << len) - 1,
+ : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
NULL_RTX, 0);
/* If this extraction is going into the destination of a SET,
? gen_rtx_CLOBBER (tmode, const0_rtx)
: gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
+ if (mode == tmode)
+ return new;
+
+ /* If we know that no extraneous bits are set, and that the high
+ bit is not set, convert the extraction to the cheaper of
+ sign and zero extension, that are equivalent in these cases. */
+ if (flag_expensive_optimizations
+ && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
+ && ((nonzero_bits (new, tmode)
+ & ~ (((unsigned HOST_WIDE_INT)
+ GET_MODE_MASK (tmode))
+ >> 1))
+ == 0)))
+ {
+ rtx temp = gen_rtx_ZERO_EXTEND (mode, new);
+ rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new);
+
+ /* Prefer ZERO_EXTENSION, since it gives more information to
+ backends. */
+ if (rtx_cost (temp, SET) < rtx_cost (temp1, SET))
+ return temp;
+ return temp1;
+ }
+
/* Otherwise, sign- or zero-extend unless we already are in the
proper mode. */
- return (mode == tmode ? new
- : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
- mode, new));
+ return (gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
+ mode, new));
}
/* Unless this is a COMPARE or we have a funny memory reference,
&& ! in_compare && ! spans_byte && unsignedp)
return 0;
- /* Unless we are allowed to span bytes, reject this if we would be
- spanning bytes or if the position is not a constant and the length
- is not 1. In all other cases, we would only be going outside
- out object in cases when an original shift would have been
+ /* Unless we are allowed to span bytes or INNER is not MEM, reject this if
+ we would be spanning bytes or if the position is not a constant and the
+ length is not 1. In all other cases, we would only be going outside
+ our object in cases when an original shift would have been
undefined. */
- if (! spans_byte
+ if (! spans_byte && GET_CODE (inner) == MEM
&& ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
|| (pos_rtx != 0 && len != 1)))
return 0;
if (in_dest)
{
wanted_inner_reg_mode
- = (insn_operand_mode[(int) CODE_FOR_insv][0] == VOIDmode
- ? word_mode
- : insn_operand_mode[(int) CODE_FOR_insv][0]);
- pos_mode = (insn_operand_mode[(int) CODE_FOR_insv][2] == VOIDmode
- ? word_mode : insn_operand_mode[(int) CODE_FOR_insv][2]);
- extraction_mode = (insn_operand_mode[(int) CODE_FOR_insv][3] == VOIDmode
- ? word_mode
- : insn_operand_mode[(int) CODE_FOR_insv][3]);
+ = insn_data[(int) CODE_FOR_insv].operand[0].mode;
+ if (wanted_inner_reg_mode == VOIDmode)
+ wanted_inner_reg_mode = word_mode;
+
+ pos_mode = insn_data[(int) CODE_FOR_insv].operand[2].mode;
+ if (pos_mode == VOIDmode)
+ pos_mode = word_mode;
+
+ extraction_mode = insn_data[(int) CODE_FOR_insv].operand[3].mode;
+ if (extraction_mode == VOIDmode)
+ extraction_mode = word_mode;
}
#endif
if (! in_dest && unsignedp)
{
wanted_inner_reg_mode
- = (insn_operand_mode[(int) CODE_FOR_extzv][1] == VOIDmode
- ? word_mode
- : insn_operand_mode[(int) CODE_FOR_extzv][1]);
- pos_mode = (insn_operand_mode[(int) CODE_FOR_extzv][3] == VOIDmode
- ? word_mode : insn_operand_mode[(int) CODE_FOR_extzv][3]);
- extraction_mode = (insn_operand_mode[(int) CODE_FOR_extzv][0] == VOIDmode
- ? word_mode
- : insn_operand_mode[(int) CODE_FOR_extzv][0]);
+ = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
+ if (wanted_inner_reg_mode == VOIDmode)
+ wanted_inner_reg_mode = word_mode;
+
+ pos_mode = insn_data[(int) CODE_FOR_extzv].operand[3].mode;
+ if (pos_mode == VOIDmode)
+ pos_mode = word_mode;
+
+ extraction_mode = insn_data[(int) CODE_FOR_extzv].operand[0].mode;
+ if (extraction_mode == VOIDmode)
+ extraction_mode = word_mode;
}
#endif
if (! in_dest && ! unsignedp)
{
wanted_inner_reg_mode
- = (insn_operand_mode[(int) CODE_FOR_extv][1] == VOIDmode
- ? word_mode
- : insn_operand_mode[(int) CODE_FOR_extv][1]);
- pos_mode = (insn_operand_mode[(int) CODE_FOR_extv][3] == VOIDmode
- ? word_mode : insn_operand_mode[(int) CODE_FOR_extv][3]);
- extraction_mode = (insn_operand_mode[(int) CODE_FOR_extv][0] == VOIDmode
- ? word_mode
- : insn_operand_mode[(int) CODE_FOR_extv][0]);
+ = insn_data[(int) CODE_FOR_extv].operand[1].mode;
+ if (wanted_inner_reg_mode == VOIDmode)
+ wanted_inner_reg_mode = word_mode;
+
+ pos_mode = insn_data[(int) CODE_FOR_extv].operand[3].mode;
+ if (pos_mode == VOIDmode)
+ pos_mode = word_mode;
+
+ extraction_mode = insn_data[(int) CODE_FOR_extv].operand[0].mode;
+ if (extraction_mode == VOIDmode)
+ extraction_mode = word_mode;
}
#endif
{
rtx newmem = gen_rtx_MEM (wanted_inner_mode,
plus_constant (XEXP (inner, 0), offset));
- RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
+
MEM_COPY_ATTRIBUTES (newmem, inner);
inner = newmem;
}
pos_rtx
|| len + orig_pos >= HOST_BITS_PER_WIDE_INT
? GET_MODE_MASK (wanted_inner_mode)
- : (((HOST_WIDE_INT) 1 << len) - 1) << orig_pos,
+ : ((((unsigned HOST_WIDE_INT) 1 << len) - 1)
+ << orig_pos),
NULL_RTX, 0);
}
have to zero extend. Otherwise, we can just use a SUBREG. */
if (pos_rtx != 0
&& GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
- pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
+ {
+ rtx temp = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
+
+ /* If we know that no extraneous bits are set, and that the high
+ bit is not set, convert extraction to cheaper one - eighter
+ SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
+ cases. */
+ if (flag_expensive_optimizations
+ && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT
+ && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
+ & ~ (((unsigned HOST_WIDE_INT)
+ GET_MODE_MASK (GET_MODE (pos_rtx)))
+ >> 1))
+ == 0)))
+ {
+ rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx);
+
+ /* Preffer ZERO_EXTENSION, since it gives more information to
+ backends. */
+ if (rtx_cost (temp1, SET) < rtx_cost (temp, SET))
+ temp = temp1;
+ }
+ pos_rtx = temp;
+ }
else if (pos_rtx != 0
&& GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
int i;
rtx new = 0;
rtx tem;
- char *fmt;
+ const char *fmt;
/* Select the code to be used in recursive calls. Once we are inside an
address, we stay there. If we have a comparison, set to COMPARE,
static int
get_pos_from_mask (m, plen)
unsigned HOST_WIDE_INT m;
- int *plen;
+ unsigned HOST_WIDE_INT *plen;
{
/* Get the bit number of the first 1 bit from the right, -1 if none. */
int pos = exact_log2 (m & - m);
+ int len;
if (pos < 0)
return -1;
/* Now shift off the low-order zero bits and see if we have a power of
two minus 1. */
- *plen = exact_log2 ((m >> pos) + 1);
+ len = exact_log2 ((m >> pos) + 1);
- if (*plen <= 0)
+ if (len <= 0)
return -1;
+ *plen = len;
return pos;
}
\f
if (op_mode)
fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
? GET_MODE_MASK (op_mode)
- : ((HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1)) - 1);
+ : (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1))
+ - 1));
else
fuller_mask = ~ (HOST_WIDE_INT) 0;
This may eliminate that PLUS and, later, the AND. */
{
- int width = GET_MODE_BITSIZE (mode);
+ unsigned int width = GET_MODE_BITSIZE (mode);
unsigned HOST_WIDE_INT smask = mask;
/* If MODE is narrower than HOST_WIDE_INT and mask is a negative
/* ... fall through ... */
- case MINUS:
case MULT:
/* For PLUS, MINUS and MULT, we need any bits less significant than the
most significant bit in MASK since carries from those bits will
mask = fuller_mask;
goto binop;
+ case MINUS:
+ /* If X is (minus C Y) where C's least set bit is larger than any bit
+ in the mask, then we may replace with (neg Y). */
+ if (GET_CODE (XEXP (x, 0)) == CONST_INT
+ && (INTVAL (XEXP (x, 0)) & -INTVAL (XEXP (x, 0))) > mask)
+ {
+ x = gen_unary (NEG, GET_MODE (x), GET_MODE (x), XEXP (x, 1));
+ return force_to_mode (x, mode, mask, reg, next_select);
+ }
+
+ /* Similarly, if C contains every bit in the mask, then we may
+ replace with (not Y). */
+ if (GET_CODE (XEXP (x, 0)) == CONST_INT
+ && (INTVAL (XEXP (x, 0)) | mask) == INTVAL (XEXP (x, 0)))
+ {
+ x = gen_unary (NOT, GET_MODE (x), GET_MODE (x), XEXP (x, 1));
+ return force_to_mode (x, mode, mask, reg, next_select);
+ }
+
+ mask = fuller_mask;
+ goto binop;
+
case IOR:
case XOR:
/* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
&& GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
{
rtx inner = XEXP (x, 0);
+ unsigned HOST_WIDE_INT inner_mask;
/* Select the mask of the bits we need for the shift operand. */
- mask <<= INTVAL (XEXP (x, 1));
+ inner_mask = mask << INTVAL (XEXP (x, 1));
/* We can only change the mode of the shift if we can do arithmetic
- in the mode of the shift and MASK is no wider than the width of
- OP_MODE. */
+ in the mode of the shift and INNER_MASK is no wider than the
+ width of OP_MODE. */
if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
- || (mask & ~ GET_MODE_MASK (op_mode)) != 0)
+ || (inner_mask & ~ GET_MODE_MASK (op_mode)) != 0)
op_mode = GET_MODE (x);
- inner = force_to_mode (inner, op_mode, mask, reg, next_select);
+ inner = force_to_mode (inner, op_mode, inner_mask, reg, next_select);
if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
if (GET_CODE (x) == LSHIFTRT
&& GET_CODE (XEXP (x, 1)) == CONST_INT
+ /* The shift puts one of the sign bit copies in the least significant
+ bit. */
&& ((INTVAL (XEXP (x, 1))
+ num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
>= GET_MODE_BITSIZE (GET_MODE (x)))
&& exact_log2 (mask + 1) >= 0
- && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
+ /* Number of bits left after the shift must be more than the mask
+ needs. */
+ && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
+ <= GET_MODE_BITSIZE (GET_MODE (x)))
+ /* Must be more sign bit copies than the mask needs. */
+ && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
>= exact_log2 (mask + 1)))
x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
- exact_log2 (mask + 1)));
- break;
+
+ goto shiftrt;
case ASHIFTRT:
/* If we are just looking for the sign bit, we don't need this shift at
if (mask == 1)
x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
- /* If this is a sign-extension operation that just affects bits
+ shiftrt:
+
+ /* If this is a zero- or sign-extension operation that just affects bits
we don't care about, remove it. Be sure the call above returned
something that is still a shift. */
{
enum machine_mode mode = GET_MODE (x);
enum rtx_code code = GET_CODE (x);
- int size = GET_MODE_BITSIZE (mode);
+ unsigned int size = GET_MODE_BITSIZE (mode);
rtx cond0, cond1, true0, true1, false0, false1;
unsigned HOST_WIDE_INT nz;
+ /* If we are comparing a value against zero, we are done. */
+ if ((code == NE || code == EQ)
+ && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0)
+ {
+ *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
+ *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
+ return XEXP (x, 0);
+ }
+
/* If this is a unary operation whose operand has one of two values, apply
our opcode to compute those values. */
- if (GET_RTX_CLASS (code) == '1'
- && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
+ else if (GET_RTX_CLASS (code) == '1'
+ && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
{
*ptrue = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), true0);
*pfalse = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), false0);
&& 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
&true0, &false0)))
{
+ if ((GET_CODE (SUBREG_REG (x)) == REG
+ || GET_CODE (SUBREG_REG (x)) == MEM
+ || CONSTANT_P (SUBREG_REG (x)))
+ && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
+ && (WORDS_BIG_ENDIAN || SUBREG_WORD (x) != 0))
+ {
+ true0 = operand_subword (true0, SUBREG_WORD (x), 0, mode);
+ false0 = operand_subword (false0, SUBREG_WORD (x), 0, mode);
+ }
*ptrue = force_to_mode (true0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
*pfalse
= force_to_mode (false0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
{
enum rtx_code code = GET_CODE (x);
rtx temp;
- char *fmt;
+ const char *fmt;
int i, j;
if (side_effects_p (x))
if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
{
if (GET_RTX_CLASS (code) == '<')
- return (comparison_dominates_p (cond, code) ? const_true_rtx
- : (comparison_dominates_p (cond,
- reverse_condition (code))
- ? const0_rtx : x));
+ {
+ if (comparison_dominates_p (cond, code))
+ return const_true_rtx;
+ code = reverse_condition (code);
+ if (code != UNKNOWN
+ && comparison_dominates_p (cond, code))
+ return const0_rtx;
+ else
+ return x;
+ }
else if (code == SMAX || code == SMIN
|| code == UMIN || code == UMAX)
{
rtx assign;
rtx rhs, lhs;
HOST_WIDE_INT c1;
- int pos, len;
+ HOST_WIDE_INT pos;
+ unsigned HOST_WIDE_INT len;
rtx other;
enum machine_mode mode;
mode,
GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
? GET_MODE_MASK (mode)
- : ((HOST_WIDE_INT) 1 << len) - 1,
+ : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
dest, 0);
return gen_rtx_combine (SET, VOIDmode, assign, src);
unsigned HOST_WIDE_INT constop;
{
unsigned HOST_WIDE_INT nonzero;
- int width = GET_MODE_BITSIZE (mode);
int i;
/* Simplify VAROP knowing that we will be only looking at some of the
unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
unsigned HOST_WIDE_INT inner_nz;
enum rtx_code code;
- int mode_width = GET_MODE_BITSIZE (mode);
+ unsigned int mode_width = GET_MODE_BITSIZE (mode);
rtx tem;
/* For floating-point values, assume all bits are needed. */
int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
#ifdef PUSH_ROUNDING
- if (REGNO (x) == STACK_POINTER_REGNUM)
+ if (REGNO (x) == STACK_POINTER_REGNUM && PUSH_ARGS)
sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
#endif
if (reg_last_set_value[REGNO (x)] != 0
&& reg_last_set_mode[REGNO (x)] == mode
- && (REG_N_SETS (REGNO (x)) == 1
- || reg_last_set_label[REGNO (x)] == label_tick)
+ && (reg_last_set_label[REGNO (x)] == label_tick
+ || (REGNO (x) >= FIRST_PSEUDO_REGISTER
+ && REG_N_SETS (REGNO (x)) == 1
+ && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
+ REGNO (x))))
&& INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
return reg_last_set_nonzero_bits[REGNO (x)];
= (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
HOST_WIDE_INT op1_maybe_minusp
= (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
- int result_width = mode_width;
+ unsigned int result_width = mode_width;
int result_low = 0;
switch (code)
/* If this is a typical RISC machine, we only have to worry
about the way loads are extended. */
if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
- ? (nonzero
- & (1L << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1)))
+ ? (((nonzero
+ & (((unsigned HOST_WIDE_INT) 1
+ << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
+ != 0))
: LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
#endif
{
&& INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
{
enum machine_mode inner_mode = GET_MODE (x);
- int width = GET_MODE_BITSIZE (inner_mode);
+ unsigned int width = GET_MODE_BITSIZE (inner_mode);
int count = INTVAL (XEXP (x, 1));
unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
VOIDmode, X will be used in its own mode. The returned value will always
be between 1 and the number of bits in MODE. */
-static int
+static unsigned int
num_sign_bit_copies (x, mode)
rtx x;
enum machine_mode mode;
{
enum rtx_code code = GET_CODE (x);
- int bitwidth;
+ unsigned int bitwidth;
int num0, num1, result;
unsigned HOST_WIDE_INT nonzero;
rtx tem;
/* For a smaller object, just ignore the high bits. */
if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
- return MAX (1, (num_sign_bit_copies (x, GET_MODE (x))
- - (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)));
+ {
+ num0 = num_sign_bit_copies (x, GET_MODE (x));
+ return MAX (1,
+ num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
+ }
if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
{
if (reg_last_set_value[REGNO (x)] != 0
&& reg_last_set_mode[REGNO (x)] == mode
- && (REG_N_SETS (REGNO (x)) == 1
- || reg_last_set_label[REGNO (x)] == label_tick)
+ && (reg_last_set_label[REGNO (x)] == label_tick
+ || (REGNO (x) >= FIRST_PSEUDO_REGISTER
+ && REG_N_SETS (REGNO (x)) == 1
+ && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
+ REGNO (x))))
&& INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
return reg_last_set_sign_bit_copies[REGNO (x)];
#ifdef LOAD_EXTEND_OP
/* Some RISC machines sign-extend all loads of smaller than a word. */
if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
- return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
+ return MAX (1, ((int) bitwidth
+ - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
#endif
break;
high-order bits are known to be sign bit copies. */
if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
- return MAX (bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1,
- num_sign_bit_copies (SUBREG_REG (x), mode));
-
+ {
+ num0 = num_sign_bit_copies (SUBREG_REG (x), mode);
+ return MAX ((int) bitwidth
+ - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
+ num0);
+ }
+
/* For a smaller object, just ignore the high bits. */
if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
{
num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
return MAX (1, (num0
- - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
- - bitwidth)));
+ - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
+ - bitwidth)));
}
#ifdef WORD_REGISTER_OPERATIONS
case SIGN_EXTRACT:
if (GET_CODE (XEXP (x, 1)) == CONST_INT)
- return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
+ return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
break;
case SIGN_EXTEND:
case TRUNCATE:
/* For a smaller object, just ignore the high bits. */
num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
- return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
- - bitwidth)));
+ return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
+ - bitwidth)));
case NOT:
return num_sign_bit_copies (XEXP (x, 0), mode);
{
num0 = num_sign_bit_copies (XEXP (x, 0), mode);
return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
- : bitwidth - INTVAL (XEXP (x, 1))));
+ : (int) bitwidth - INTVAL (XEXP (x, 1))));
}
break;
This function will always return 0 unless called during combine, which
implies that it must be called from a define_split. */
-int
+unsigned int
extended_count (x, mode, unsignedp)
rtx x;
enum machine_mode mode;
return (unsignedp
? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
- && (GET_MODE_BITSIZE (mode) - 1
- - floor_log2 (nonzero_bits (x, mode))))
+ ? (GET_MODE_BITSIZE (mode) - 1
+ - floor_log2 (nonzero_bits (x, mode)))
+ : 0)
: num_sign_bit_copies (x, mode) - 1);
}
\f
{
enum rtx_code op0 = *pop0;
HOST_WIDE_INT const0 = *pconst0;
- int width = GET_MODE_BITSIZE (mode);
const0 &= GET_MODE_MASK (mode);
const1 &= GET_MODE_MASK (mode);
are ASHIFTRT and ROTATE, which are always done in their original mode, */
static rtx
-simplify_shift_const (x, code, result_mode, varop, count)
+simplify_shift_const (x, code, result_mode, varop, input_count)
rtx x;
enum rtx_code code;
enum machine_mode result_mode;
rtx varop;
- int count;
+ int input_count;
{
enum rtx_code orig_code = code;
- int orig_count = count;
+ int orig_count = input_count;
+ unsigned int count;
+ int signed_count;
enum machine_mode mode = result_mode;
enum machine_mode shift_mode, tmode;
- int mode_words
+ unsigned int mode_words
= (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
/* We form (outer_op (code varop count) (outer_const)). */
enum rtx_code outer_op = NIL;
/* If we were given an invalid count, don't do anything except exactly
what was requested. */
- if (count < 0 || count > GET_MODE_BITSIZE (mode))
+ if (input_count < 0 || input_count > (int) GET_MODE_BITSIZE (mode))
{
if (x)
return x;
- return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (count));
+ return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (input_count));
}
+ count = input_count;
+
/* Unless one of the branches of the `if' in this loop does a `continue',
we will `break' the loop after the `if'. */
}
}
- /* Negative counts are invalid and should not have been made (a
- programmer-specified negative count should have been handled
- above). */
- else if (count < 0)
- abort ();
-
/* An arithmetic right shift of a quantity known to be -1 or 0
is a no-op. */
if (code == ASHIFTRT
new = gen_rtx_MEM (tmode,
plus_constant (XEXP (varop, 0),
count / BITS_PER_UNIT));
- RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
+
MEM_COPY_ATTRIBUTES (new, varop);
varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
: ZERO_EXTEND, mode, new);
if (GET_CODE (XEXP (varop, 1)) == CONST_INT
&& exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
{
- varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
- GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
+ varop
+ = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
+ GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
continue;
}
break;
if (GET_CODE (XEXP (varop, 1)) == CONST_INT
&& exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
{
- varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
- GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
+ varop
+ = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
+ GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
continue;
}
break;
&& GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
{
enum rtx_code first_code = GET_CODE (varop);
- int first_count = INTVAL (XEXP (varop, 1));
+ unsigned int first_count = INTVAL (XEXP (varop, 1));
unsigned HOST_WIDE_INT mask;
rtx mask_rtx;
&& (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
> first_count))
{
- count -= first_count;
- if (count < 0)
- count = - count, code = ASHIFT;
varop = XEXP (varop, 0);
+
+ signed_count = count - first_count;
+ if (signed_count < 0)
+ count = - signed_count, code = ASHIFT;
+ else
+ count = signed_count;
+
continue;
}
/* If the shifts are in the same direction, we add the
counts. Otherwise, we subtract them. */
+ signed_count = count;
if ((code == ASHIFTRT || code == LSHIFTRT)
== (first_code == ASHIFTRT || first_code == LSHIFTRT))
- count += first_count;
+ signed_count += first_count;
else
- count -= first_count;
+ signed_count -= first_count;
/* If COUNT is positive, the new shift is usually CODE,
except for the two exceptions below, in which case it is
FIRST_CODE. If the count is negative, FIRST_CODE should
always be used */
- if (count > 0
+ if (signed_count > 0
&& ((first_code == ROTATE && code == ASHIFT)
|| (first_code == ASHIFTRT && code == LSHIFTRT)))
- code = first_code;
- else if (count < 0)
- code = first_code, count = - count;
+ code = first_code, count = signed_count;
+ else if (signed_count < 0)
+ code = first_code, count = - signed_count;
+ else
+ count = signed_count;
varop = XEXP (varop, 0);
continue;
&& count == GET_MODE_BITSIZE (result_mode) - 1
&& GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
&& ((STORE_FLAG_VALUE
- & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
+ & ((HOST_WIDE_INT) 1
+ < (GET_MODE_BITSIZE (result_mode) - 1))))
&& nonzero_bits (XEXP (varop, 0), result_mode) == 1
&& merge_outer_ops (&outer_op, &outer_const, XOR,
(HOST_WIDE_INT) 1, result_mode,
&& (new = simplify_binary_operation (ASHIFT, result_mode,
XEXP (varop, 1),
GEN_INT (count))) != 0
- && GET_CODE(new) == CONST_INT
+ && GET_CODE (new) == CONST_INT
&& merge_outer_ops (&outer_op, &outer_const, PLUS,
INTVAL (new), result_mode, &complement_p))
{
{
rtx varop_inner = XEXP (varop, 0);
- varop_inner = gen_rtx_combine (LSHIFTRT,
- GET_MODE (varop_inner),
- XEXP (varop_inner, 0),
- GEN_INT (count + INTVAL (XEXP (varop_inner, 1))));
+ varop_inner
+ = gen_rtx_combine (LSHIFTRT, GET_MODE (varop_inner),
+ XEXP (varop_inner, 0),
+ GEN_INT (count
+ + INTVAL (XEXP (varop_inner, 1))));
varop = gen_rtx_combine (TRUNCATE, GET_MODE (varop),
varop_inner);
count = 0;
{
rtx newpat = gen_rtx_PARALLEL (VOIDmode,
gen_rtvec (GET_CODE (pat) == PARALLEL
- ? XVECLEN (pat, 0) + num_clobbers_to_add
+ ? (XVECLEN (pat, 0)
+ + num_clobbers_to_add)
: num_clobbers_to_add + 1));
if (GET_CODE (pat) == PARALLEL)
}
result = gen_lowpart_common (mode, x);
+#ifdef CLASS_CANNOT_CHANGE_MODE
if (result != 0
&& GET_CODE (result) == SUBREG
&& GET_CODE (SUBREG_REG (result)) == REG
&& REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER
- && (GET_MODE_SIZE (GET_MODE (result))
- != GET_MODE_SIZE (GET_MODE (SUBREG_REG (result)))))
- REG_CHANGES_SIZE (REGNO (SUBREG_REG (result))) = 1;
+ && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (result),
+ GET_MODE (SUBREG_REG (result))))
+ REG_CHANGES_MODE (REGNO (SUBREG_REG (result))) = 1;
+#endif
if (result)
return result;
if (WORDS_BIG_ENDIAN)
offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
- MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
+
if (BYTES_BIG_ENDIAN)
{
/* Adjust the address so that the address-after-the-data is
- MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
}
new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
- RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
MEM_COPY_ATTRIBUTES (new, x);
return new;
}
/*VARARGS2*/
static rtx
-gen_rtx_combine VPROTO((enum rtx_code code, enum machine_mode mode, ...))
+gen_rtx_combine VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
{
#ifndef ANSI_PROTOTYPES
enum rtx_code code;
int n_args;
rtx args[3];
int j;
- char *fmt;
+ const char *fmt;
rtx rt;
struct undo *undo;
args[j] = va_arg (p, rtx);
}
+ va_end (p);
+
/* See if this is in undobuf. Be sure we don't use objects that came
from another insn; this could produce circular rtl structures. */
while (GET_CODE (op1) == CONST_INT)
{
enum machine_mode mode = GET_MODE (op0);
- int mode_width = GET_MODE_BITSIZE (mode);
+ unsigned int mode_width = GET_MODE_BITSIZE (mode);
unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
int equality_comparison_p;
int sign_bit_comparison_p;
/* Get the constant we are comparing against and turn off all bits
not on in our mode. */
- const_op = INTVAL (op1);
- if (mode_width <= HOST_BITS_PER_WIDE_INT)
- const_op &= mask;
+ const_op = trunc_int_for_mode (INTVAL (op1), mode);
/* If we are comparing against a constant power of two and the value
being compared can only have that single bit nonzero (e.g., it was
equality_comparison_p = (code == EQ || code == NE);
sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
- || code == LEU);
+ || code == GEU);
/* If this is a sign bit comparison and we can do arithmetic in
MODE, say that we will only be needing the sign bit of OP0. */
if (BITS_BIG_ENDIAN)
{
#ifdef HAVE_extzv
- mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
+ mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
if (mode == VOIDmode)
mode = word_mode;
i = (GET_MODE_BITSIZE (mode) - 1 - i);
break;
case MINUS:
+ /* We used to optimize signed comparisons against zero, but that
+ was incorrect. Unsigned comparisons against zero (GTU, LEU)
+ arrive here as equality comparisons, or (GEU, LTU) are
+ optimized away. No need to special-case them. */
+
/* (eq (minus A B) C) -> (eq A (plus B C)) or
(eq B (minus A C)), whichever simplifies. We can only do
this for equality comparisons due to pathological cases involving
represents the low part, permute the SUBREG and the AND and
try again. */
if (GET_CODE (XEXP (op0, 0)) == SUBREG
- && ((mode_width
- >= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
+ && (0
#ifdef WORD_REGISTER_OPERATIONS
- || subreg_lowpart_p (XEXP (op0, 0))
+ || ((mode_width
+ > (GET_MODE_BITSIZE
+ (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
+ && mode_width <= BITS_PER_WORD)
#endif
- )
+ || ((mode_width
+ <= (GET_MODE_BITSIZE
+ (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
+ && subreg_lowpart_p (XEXP (op0, 0))))
#ifndef WORD_REGISTER_OPERATIONS
/* It is unsafe to commute the AND into the SUBREG if the SUBREG
is paradoxical and WORD_REGISTER_OPERATIONS is not defined.
& ~ (mask >> (INTVAL (XEXP (op0, 1))
+ ! equality_comparison_p))) == 0)
{
- const_op >>= INTVAL (XEXP (op0, 1));
- op1 = GEN_INT (const_op);
+ /* We must perform a logical shift, not an arithmetic one,
+ as we want the top N bits of C to be zero. */
+ unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
+
+ temp >>= INTVAL (XEXP (op0, 1));
+ op1 = GEN_INT (trunc_int_for_mode (temp, mode));
op0 = XEXP (op0, 0);
continue;
}
continue;
}
+ /* Likewise if OP0 is a PLUS of a sign extension with a
+ constant, which is usually represented with the PLUS
+ between the shifts. */
+ if (! unsigned_comparison_p
+ && GET_CODE (XEXP (op0, 1)) == CONST_INT
+ && GET_CODE (XEXP (op0, 0)) == PLUS
+ && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
+ && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
+ && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
+ && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
+ MODE_INT, 1)) != BLKmode
+ && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
+ || ((unsigned HOST_WIDE_INT) - const_op
+ <= GET_MODE_MASK (tmode))))
+ {
+ rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
+ rtx add_const = XEXP (XEXP (op0, 0), 1);
+ rtx new_const = gen_binary (ASHIFTRT, GET_MODE (op0), add_const,
+ XEXP (op0, 1));
+
+ op0 = gen_binary (PLUS, tmode,
+ gen_lowpart_for_combine (tmode, inner),
+ new_const);
+ continue;
+ }
+
/* ... fall through ... */
case LSHIFTRT:
/* If we have (compare (xshiftrt FOO N) (const_int C)) and
&& (num_sign_bit_copies (op1, tmode)
> GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
{
+ /* If OP0 is an AND and we don't have an AND in MODE either,
+ make a new AND in the proper mode. */
+ if (GET_CODE (op0) == AND
+ && (add_optab->handlers[(int) mode].insn_code
+ == CODE_FOR_nothing))
+ op0 = gen_binary (AND, tmode,
+ gen_lowpart_for_combine (tmode,
+ XEXP (op0, 0)),
+ gen_lowpart_for_combine (tmode,
+ XEXP (op0, 1)));
+
op0 = gen_lowpart_for_combine (tmode, op0);
op1 = gen_lowpart_for_combine (tmode, op1);
break;
{
if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
|| flag_fast_math
- || GET_CODE (x) == NE || GET_CODE (x) == EQ)
+ || GET_CODE (x) == NE || GET_CODE (x) == EQ
+ || GET_CODE (x) == UNORDERED || GET_CODE (x) == ORDERED)
return 1;
switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
rtx x;
{
register enum rtx_code code = GET_CODE (x);
- register char *fmt = GET_RTX_FORMAT (code);
+ register const char *fmt = GET_RTX_FORMAT (code);
register int i;
if (code == REG)
{
- int regno = REGNO (x);
- int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
- ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
+ unsigned int regno = REGNO (x);
+ unsigned int endregno
+ = regno + (regno < FIRST_PSEUDO_REGISTER
+ ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
+ unsigned int r;
- for (i = regno; i < endregno; i++)
- reg_last_set_table_tick[i] = label_tick;
+ for (r = regno; r < endregno; r++)
+ reg_last_set_table_tick[r] = label_tick;
return;
}
rtx insn;
rtx value;
{
- int regno = REGNO (reg);
- int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
- ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
- int i;
+ unsigned int regno = REGNO (reg);
+ unsigned int endregno
+ = regno + (regno < FIRST_PSEUDO_REGISTER
+ ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
+ unsigned int i;
/* If VALUE contains REG and we have a previous value for REG, substitute
the previous value. */
subst_low_cuid = INSN_CUID (insn);
tem = get_last_value (reg);
+ /* If TEM is simply a binary operation with two CLOBBERs as operands,
+ it isn't going to be useful and will take a lot of time to process,
+ so just use the CLOBBER. */
+
if (tem)
- value = replace_rtx (copy_rtx (value), reg, tem);
+ {
+ if ((GET_RTX_CLASS (GET_CODE (tem)) == '2'
+ || GET_RTX_CLASS (GET_CODE (tem)) == 'c')
+ && GET_CODE (XEXP (tem, 0)) == CLOBBER
+ && GET_CODE (XEXP (tem, 1)) == CLOBBER)
+ tem = XEXP (tem, 0);
+
+ value = replace_rtx (copy_rtx (value), reg, tem);
+ }
}
/* For each register modified, show we don't know its value, that
we don't know about its bitwise content, that its value has been
updated, and that we don't know the location of the death of the
register. */
- for (i = regno; i < endregno; i ++)
+ for (i = regno; i < endregno; i++)
{
if (insn)
reg_last_set[i] = insn;
+
reg_last_set_value[i] = 0;
reg_last_set_mode[i] = 0;
reg_last_set_nonzero_bits[i] = 0;
}
}
-/* Used for communication between the following two routines. */
-static rtx record_dead_insn;
-
/* Called via note_stores from record_dead_and_set_regs to handle one
- SET or CLOBBER in an insn. */
+ SET or CLOBBER in an insn. DATA is the instruction in which the
+ set is occurring. */
static void
-record_dead_and_set_regs_1 (dest, setter)
+record_dead_and_set_regs_1 (dest, setter, data)
rtx dest, setter;
+ void *data;
{
+ rtx record_dead_insn = (rtx) data;
+
if (GET_CODE (dest) == SUBREG)
dest = SUBREG_REG (dest);
rtx insn;
{
register rtx link;
- int i;
+ unsigned int i;
for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
{
if (REG_NOTE_KIND (link) == REG_DEAD
&& GET_CODE (XEXP (link, 0)) == REG)
{
- int regno = REGNO (XEXP (link, 0));
- int endregno
+ unsigned int regno = REGNO (XEXP (link, 0));
+ unsigned int endregno
= regno + (regno < FIRST_PSEUDO_REGISTER
? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
: 1);
last_call_cuid = mem_last_set = INSN_CUID (insn);
}
- record_dead_insn = insn;
- note_stores (PATTERN (insn), record_dead_and_set_regs_1);
+ note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
+}
+
+/* If a SUBREG has the promoted bit set, it is in fact a property of the
+ register present in the SUBREG, so for each such SUBREG go back and
+ adjust nonzero and sign bit information of the registers that are
+ known to have some zero/sign bits set.
+
+ This is needed because when combine blows the SUBREGs away, the
+ information on zero/sign bits is lost and further combines can be
+ missed because of that. */
+
+static void
+record_promoted_value (insn, subreg)
+ rtx insn;
+ rtx subreg;
+{
+ rtx links, set;
+ unsigned int regno = REGNO (SUBREG_REG (subreg));
+ enum machine_mode mode = GET_MODE (subreg);
+
+ if (GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT)
+ return;
+
+ for (links = LOG_LINKS (insn); links; )
+ {
+ insn = XEXP (links, 0);
+ set = single_set (insn);
+
+ if (! set || GET_CODE (SET_DEST (set)) != REG
+ || REGNO (SET_DEST (set)) != regno
+ || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
+ {
+ links = XEXP (links, 1);
+ continue;
+ }
+
+ if (reg_last_set [regno] == insn)
+ {
+ if (SUBREG_PROMOTED_UNSIGNED_P (subreg))
+ reg_last_set_nonzero_bits [regno] &= GET_MODE_MASK (mode);
+ }
+
+ if (GET_CODE (SET_SRC (set)) == REG)
+ {
+ regno = REGNO (SET_SRC (set));
+ links = LOG_LINKS (insn);
+ }
+ else
+ break;
+ }
+}
+
+/* Scan X for promoted SUBREGs. For each one found,
+ note what it implies to the registers used in it. */
+
+static void
+check_promoted_subreg (insn, x)
+ rtx insn;
+ rtx x;
+{
+ if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
+ && GET_CODE (SUBREG_REG (x)) == REG)
+ record_promoted_value (insn, x);
+ else
+ {
+ const char *format = GET_RTX_FORMAT (GET_CODE (x));
+ int i, j;
+
+ for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
+ switch (format [i])
+ {
+ case 'e':
+ check_promoted_subreg (insn, XEXP (x, i));
+ break;
+ case 'V':
+ case 'E':
+ if (XVEC (x, i) != 0)
+ for (j = 0; j < XVECLEN (x, i); j++)
+ check_promoted_subreg (insn, XVECEXP (x, i, j));
+ break;
+ }
+ }
}
\f
/* Utility routine for the following function. Verify that all the registers
int replace;
{
rtx x = *loc;
- char *fmt = GET_RTX_FORMAT (GET_CODE (x));
+ const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
int len = GET_RTX_LENGTH (GET_CODE (x));
int i;
if (GET_CODE (x) == REG)
{
- int regno = REGNO (x);
- int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
- ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
- int j;
+ unsigned int regno = REGNO (x);
+ unsigned int endregno
+ = regno + (regno < FIRST_PSEUDO_REGISTER
+ ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
+ unsigned int j;
for (j = regno; j < endregno; j++)
if (reg_last_set_invalid[j]
- /* If this is a pseudo-register that was only set once, it is
- always valid. */
- || (! (regno >= FIRST_PSEUDO_REGISTER && REG_N_SETS (regno) == 1)
+ /* If this is a pseudo-register that was only set once and not
+ live at the beginning of the function, it is always valid. */
+ || (! (regno >= FIRST_PSEUDO_REGISTER
+ && REG_N_SETS (regno) == 1
+ && (! REGNO_REG_SET_P
+ (BASIC_BLOCK (0)->global_live_at_start, regno)))
&& reg_last_set_label[j] > tick))
{
if (replace)
get_last_value (x)
rtx x;
{
- int regno;
+ unsigned int regno;
rtx value;
/* If this is a non-paradoxical SUBREG, get the value of its operand and
regno = REGNO (x);
value = reg_last_set_value[regno];
- /* If we don't have a value or if it isn't for this basic block,
- return 0. */
+ /* If we don't have a value, or if it isn't for this basic block and
+ it's either a hard register, set more than once, or it's a live
+ at the beginning of the function, return 0.
+
+ Because if it's not live at the beginnning of the function then the reg
+ is always set before being used (is never used without being set).
+ And, if it's set only once, and it's always set before use, then all
+ uses must have the same last value, even if it's not from this basic
+ block. */
if (value == 0
- || (REG_N_SETS (regno) != 1
- && reg_last_set_label[regno] != label_tick))
+ || (reg_last_set_label[regno] != label_tick
+ && (regno < FIRST_PSEUDO_REGISTER
+ || REG_N_SETS (regno) != 1
+ || (REGNO_REG_SET_P
+ (BASIC_BLOCK (0)->global_live_at_start, regno)))))
return 0;
/* If the value was set in a later insn than the ones we are processing,
- we can't use it even if the register was only set once, but make a quick
- check to see if the previous insn set it to something. This is commonly
- the case when the same pseudo is used by repeated insns.
-
- This does not work if there exists an instruction which is temporarily
- not on the insn chain. */
-
+ we can't use it even if the register was only set once. */
if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
- {
- rtx insn, set;
-
- /* We can not do anything useful in this case, because there is
- an instruction which is not on the insn chain. */
- if (subst_prev_insn)
- return 0;
-
- /* Skip over USE insns. They are not useful here, and they may have
- been made by combine, in which case they do not have a INSN_CUID
- value. We can't use prev_real_insn, because that would incorrectly
- take us backwards across labels. Skip over BARRIERs also, since
- they could have been made by combine. If we see one, we must be
- optimizing dead code, so it doesn't matter what we do. */
- for (insn = prev_nonnote_insn (subst_insn);
- insn && ((GET_CODE (insn) == INSN
- && GET_CODE (PATTERN (insn)) == USE)
- || GET_CODE (insn) == BARRIER
- || INSN_CUID (insn) >= subst_low_cuid);
- insn = prev_nonnote_insn (insn))
- ;
-
- if (insn
- && (set = single_set (insn)) != 0
- && rtx_equal_p (SET_DEST (set), x))
- {
- value = SET_SRC (set);
-
- /* Make sure that VALUE doesn't reference X. Replace any
- explicit references with a CLOBBER. If there are any remaining
- references (rare), don't use the value. */
-
- if (reg_mentioned_p (x, value))
- value = replace_rtx (copy_rtx (value), x,
- gen_rtx_CLOBBER (GET_MODE (x), const0_rtx));
-
- if (reg_overlap_mentioned_p (x, value))
- return 0;
- }
- else
- return 0;
- }
+ return 0;
/* If the value has all its registers valid, return it. */
if (get_last_value_validate (&value, reg_last_set[regno],
register rtx x;
int from_cuid;
{
- register char *fmt;
+ register const char *fmt;
register int i;
register enum rtx_code code = GET_CODE (x);
if (code == REG)
{
- register int regno = REGNO (x);
- int endreg = regno + (regno < FIRST_PSEUDO_REGISTER
+ unsigned int regno = REGNO (x);
+ unsigned endreg = regno + (regno < FIRST_PSEUDO_REGISTER
? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
#ifdef PUSH_ROUNDING
/* Don't allow uses of the stack pointer to be moved,
because we don't know whether the move crosses a push insn. */
- if (regno == STACK_POINTER_REGNUM)
+ if (regno == STACK_POINTER_REGNUM && PUSH_ARGS)
return 1;
#endif
- for (;regno < endreg; regno++)
+ for (; regno < endreg; regno++)
if (reg_last_set[regno]
&& INSN_CUID (reg_last_set[regno]) > from_cuid)
return 1;
/* Define three variables used for communication between the following
routines. */
-static int reg_dead_regno, reg_dead_endregno;
+static unsigned int reg_dead_regno, reg_dead_endregno;
static int reg_dead_flag;
/* Function called via note_stores from reg_dead_at_p.
reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
static void
-reg_dead_at_p_1 (dest, x)
+reg_dead_at_p_1 (dest, x, data)
rtx dest;
rtx x;
+ void *data ATTRIBUTE_UNUSED;
{
- int regno, endregno;
+ unsigned int regno, endregno;
if (GET_CODE (dest) != REG)
return;
rtx reg;
rtx insn;
{
- int block, i;
+ int block;
+ unsigned int i;
/* Set variables for reg_dead_at_p_1. */
reg_dead_regno = REGNO (reg);
for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
insn = prev_nonnote_insn (insn))
{
- note_stores (PATTERN (insn), reg_dead_at_p_1);
+ note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
if (reg_dead_flag)
return reg_dead_flag == 1 ? 1 : 0;
mark_used_regs_combine (x)
rtx x;
{
- register RTX_CODE code = GET_CODE (x);
- register int regno;
+ RTX_CODE code = GET_CODE (x);
+ unsigned int regno;
int i;
switch (code)
If so, mark all of them just like the first. */
if (regno < FIRST_PSEUDO_REGISTER)
{
+ unsigned int endregno, r;
+
/* None of this applies to the stack, frame or arg pointers */
if (regno == STACK_POINTER_REGNUM
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
|| regno == FRAME_POINTER_REGNUM)
return;
- i = HARD_REGNO_NREGS (regno, GET_MODE (x));
- while (i-- > 0)
- SET_HARD_REG_BIT (newpat_used_regs, regno + i);
+ endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
+ for (r = regno; r < endregno; r++)
+ SET_HARD_REG_BIT (newpat_used_regs, r);
}
return;
/* Recursively scan the operands of this expression. */
{
- register char *fmt = GET_RTX_FORMAT (code);
+ register const char *fmt = GET_RTX_FORMAT (code);
for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
{
rtx
remove_death (regno, insn)
- int regno;
+ unsigned int regno;
rtx insn;
{
register rtx note = find_regno_note (insn, REG_DEAD, regno);
rtx to_insn;
rtx *pnotes;
{
- register char *fmt;
+ register const char *fmt;
register int len, i;
register enum rtx_code code = GET_CODE (x);
if (code == REG)
{
- register int regno = REGNO (x);
+ unsigned int regno = REGNO (x);
register rtx where_dead = reg_last_death[regno];
register rtx before_dead, after_dead;
/* Don't move the register if it gets killed in between from and to */
if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
- && !reg_referenced_p (x, maybe_kill_insn))
+ && ! reg_referenced_p (x, maybe_kill_insn))
return;
/* WHERE_DEAD could be a USE insn made by combine, so first we
before_dead = where_dead;
while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
before_dead = PREV_INSN (before_dead);
+
after_dead = where_dead;
while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
after_dead = NEXT_INSN (after_dead);
&& (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
> GET_MODE_SIZE (GET_MODE (x))))
{
- int deadregno = REGNO (XEXP (note, 0));
- int deadend
+ unsigned int deadregno = REGNO (XEXP (note, 0));
+ unsigned int deadend
= (deadregno + HARD_REGNO_NREGS (deadregno,
GET_MODE (XEXP (note, 0))));
- int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
- int i;
+ unsigned int ourend
+ = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
+ unsigned int i;
for (i = deadregno; i < deadend; i++)
if (i < regno || i >= ourend)
gen_rtx_REG (reg_raw_mode[i], i),
REG_NOTES (where_dead));
}
+
/* If we didn't find any note, or if we found a REG_DEAD note that
covers only part of the given reg, and we have a multi-reg hard
register, then to be safe we must check for REG_DEAD notes
&& regno < FIRST_PSEUDO_REGISTER
&& HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
{
- int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
- int i, offset;
+ unsigned int ourend
+ = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
+ unsigned int i, offset;
rtx oldnotes = 0;
if (note)
{
rtx dest = SET_DEST (body);
rtx target;
- int regno, tregno, endregno, endtregno;
+ unsigned int regno, tregno, endregno, endtregno;
if (GET_CODE (dest) == ZERO_EXTRACT)
target = XEXP (dest, 0);
break;
case REG_EH_REGION:
- /* This note must remain with the call. It should not be possible
- for both I2 and I3 to be a call. */
+ case REG_EH_RETHROW:
+ /* These notes must remain with the call. It should not be
+ possible for both I2 and I3 to be a call. */
if (GET_CODE (i3) == CALL_INSN)
place = i3;
else if (i2 && GET_CODE (i2) == CALL_INSN)
is one already. */
else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
&& ! (GET_CODE (XEXP (note, 0)) == REG
- ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
+ ? find_regno_note (i3, REG_DEAD,
+ REGNO (XEXP (note, 0)))
: find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
{
PUT_REG_NOTE_KIND (note, REG_DEAD);
if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
break;
- /* If the register is used in both I2 and I3 and it dies in I3,
- we might have added another reference to it. If reg_n_refs
- was 2, bump it to 3. This has to be correct since the
- register must have been set somewhere. The reason this is
- done is because local-alloc.c treats 2 references as a
- special case. */
-
- if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
- && REG_N_REFS (REGNO (XEXP (note, 0)))== 2
- && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
- REG_N_REFS (REGNO (XEXP (note, 0))) = 3;
-
if (place == 0)
{
- for (tem = prev_nonnote_insn (i3);
- place == 0 && tem
- && (GET_CODE (tem) == INSN || GET_CODE (tem) == CALL_INSN);
- tem = prev_nonnote_insn (tem))
+ basic_block bb = BASIC_BLOCK (this_basic_block);
+
+ for (tem = PREV_INSN (i3); place == 0; tem = PREV_INSN (tem))
{
+ if (GET_RTX_CLASS (GET_CODE (tem)) != 'i')
+ {
+ if (tem == bb->head)
+ break;
+ continue;
+ }
+
/* If the register is being set at TEM, see if that is all
TEM is doing. If so, delete TEM. Otherwise, make this
into a REG_UNUSED note instead. */
if (set != 0)
for (inner_dest = SET_DEST (set);
GET_CODE (inner_dest) == STRICT_LOW_PART
- || GET_CODE (inner_dest) == SUBREG
- || GET_CODE (inner_dest) == ZERO_EXTRACT;
+ || GET_CODE (inner_dest) == SUBREG
+ || GET_CODE (inner_dest) == ZERO_EXTRACT;
inner_dest = XEXP (inner_dest, 0))
;
distribute_links (LOG_LINKS (cc0_setter));
PUT_CODE (cc0_setter, NOTE);
- NOTE_LINE_NUMBER (cc0_setter) = NOTE_INSN_DELETED;
+ NOTE_LINE_NUMBER (cc0_setter)
+ = NOTE_INSN_DELETED;
NOTE_SOURCE_FILE (cc0_setter) = 0;
}
#endif
if (! find_regno_note (tem, REG_UNUSED,
REGNO (XEXP (note, 0))))
REG_NOTES (tem)
- = gen_rtx_EXPR_LIST (REG_UNUSED,
- XEXP (note, 0),
+ = gen_rtx_EXPR_LIST (REG_UNUSED, XEXP (note, 0),
REG_NOTES (tem));
}
else
REGNO (XEXP (note, 0))))
place = tem;
break;
- }
- }
- else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
- || (GET_CODE (tem) == CALL_INSN
- && find_reg_fusage (tem, USE, XEXP (note, 0))))
- {
- place = tem;
-
- /* If we are doing a 3->2 combination, and we have a
- register which formerly died in i3 and was not used
- by i2, which now no longer dies in i3 and is used in
- i2 but does not die in i2, and place is between i2
- and i3, then we may need to move a link from place to
- i2. */
- if (i2 && INSN_UID (place) <= max_uid_cuid
- && INSN_CUID (place) > INSN_CUID (i2)
- && from_insn && INSN_CUID (from_insn) > INSN_CUID (i2)
- && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
- {
- rtx links = LOG_LINKS (place);
- LOG_LINKS (place) = 0;
- distribute_links (links);
- }
+ }
+ }
+ else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
+ || (GET_CODE (tem) == CALL_INSN
+ && find_reg_fusage (tem, USE, XEXP (note, 0))))
+ {
+ place = tem;
+
+ /* If we are doing a 3->2 combination, and we have a
+ register which formerly died in i3 and was not used
+ by i2, which now no longer dies in i3 and is used in
+ i2 but does not die in i2, and place is between i2
+ and i3, then we may need to move a link from place to
+ i2. */
+ if (i2 && INSN_UID (place) <= max_uid_cuid
+ && INSN_CUID (place) > INSN_CUID (i2)
+ && from_insn && INSN_CUID (from_insn) > INSN_CUID (i2)
+ && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
+ {
+ rtx links = LOG_LINKS (place);
+ LOG_LINKS (place) = 0;
+ distribute_links (links);
+ }
+ break;
+ }
+
+ if (tem == bb->head)
break;
- }
}
- /* If we haven't found an insn for the death note and it
- is still a REG_DEAD note, but we have hit a CODE_LABEL,
- insert a USE insn for the register at that label and
- put the death node there. This prevents problems with
- call-state tracking in caller-save.c. */
- if (REG_NOTE_KIND (note) == REG_DEAD && place == 0 && tem != 0)
+ /* We haven't found an insn for the death note and it
+ is still a REG_DEAD note, but we have hit the beginning
+ of the block. If the existing life info says the reg
+ was dead, there's nothing left to do. Otherwise, we'll
+ need to do a global life update after combine. */
+ if (REG_NOTE_KIND (note) == REG_DEAD && place == 0
+ && REGNO_REG_SET_P (bb->global_live_at_start,
+ REGNO (XEXP (note, 0))))
{
- place
- = emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (note, 0)),
- tem);
-
- /* If this insn was emitted between blocks, then update
- BLOCK_HEAD of the current block to include it. */
- if (BLOCK_END (this_basic_block - 1) == tem)
- BLOCK_HEAD (this_basic_block) = place;
+ SET_BIT (refresh_blocks, this_basic_block);
+ need_refresh = 1;
}
}
which is what `dead_or_set_p' checks, so also check for it being
set partially. */
-
if (place && REG_NOTE_KIND (note) == REG_DEAD)
{
- int regno = REGNO (XEXP (note, 0));
+ unsigned int regno = REGNO (XEXP (note, 0));
if (dead_or_set_p (place, XEXP (note, 0))
|| reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
if (place && regno < FIRST_PSEUDO_REGISTER
&& HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
{
- int endregno
+ unsigned int endregno
= regno + HARD_REGNO_NREGS (regno,
GET_MODE (XEXP (note, 0)));
int all_used = 1;
- int i;
+ unsigned int i;
for (i = regno; i < endregno; i++)
if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
&& ! reg_bitfield_target_p (piece,
PATTERN (place)))
REG_NOTES (place)
- = gen_rtx_EXPR_LIST (REG_DEAD,
- piece, REG_NOTES (place));
+ = gen_rtx_EXPR_LIST (REG_DEAD, piece,
+ REG_NOTES (place));
}
place = 0;