static htab_t const_int_htab;
+/* A hash table storing memory attribute structures. */
+static htab_t mem_attrs_htab;
+
/* start_sequence and gen_sequence can make a lot of rtx expressions which are
shortly thrown away. We use two mechanisms to prevent this waste:
static hashval_t const_int_htab_hash PARAMS ((const void *));
static int const_int_htab_eq PARAMS ((const void *,
const void *));
-static int rtx_htab_mark_1 PARAMS ((void **, void *));
-static void rtx_htab_mark PARAMS ((void *));
+static hashval_t mem_attrs_htab_hash PARAMS ((const void *));
+static int mem_attrs_htab_eq PARAMS ((const void *,
+ const void *));
+static void mem_attrs_mark PARAMS ((const void *));
+static mem_attrs *get_mem_attrs PARAMS ((HOST_WIDE_INT, tree, rtx,
+ rtx, unsigned int));
/* Probability of the conditional branch currently proceeded by try_split.
Set to -1 otherwise. */
int split_branch_probability = -1;
-
\f
/* Returns a hash code for X (which is a really a CONST_INT). */
return (INTVAL ((const struct rtx_def *) x) == *((const HOST_WIDE_INT *) y));
}
-/* Mark the hash-table element X (which is really a pointer to an
- rtx). */
+/* Returns a hash code for X (which is a really a mem_attrs *). */
+
+static hashval_t
+mem_attrs_htab_hash (x)
+ const void *x;
+{
+ mem_attrs *p = (mem_attrs *) x;
+
+ return (p->alias ^ (p->align * 1000)
+ ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
+ ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
+ ^ (long) p->decl);
+}
+
+/* Returns non-zero if the value represented by X (which is really a
+ mem_attrs *) is the same as that given by Y (which is also really a
+ mem_attrs *). */
static int
-rtx_htab_mark_1 (x, data)
- void **x;
- void *data ATTRIBUTE_UNUSED;
+mem_attrs_htab_eq (x, y)
+ const void *x;
+ const void *y;
{
- ggc_mark_rtx (*x);
- return 1;
+ mem_attrs *p = (mem_attrs *) x;
+ mem_attrs *q = (mem_attrs *) y;
+
+ return (p->alias == q->alias && p->decl == q->decl && p->offset == q->offset
+ && p->size == q->size && p->align == q->align);
}
-/* Mark all the elements of HTAB (which is really an htab_t full of
- rtxs). */
+/* This routine is called when we determine that we need a mem_attrs entry.
+ It marks the associated decl and RTL as being used, if present. */
static void
-rtx_htab_mark (htab)
- void *htab;
+mem_attrs_mark (x)
+ const void *x;
{
- htab_traverse (*((htab_t *) htab), rtx_htab_mark_1, NULL);
+ mem_attrs *p = (mem_attrs *) x;
+
+ if (p->decl)
+ ggc_mark_tree (p->decl);
+
+ if (p->offset)
+ ggc_mark_rtx (p->offset);
+
+ if (p->size)
+ ggc_mark_rtx (p->size);
+}
+
+/* Allocate a new mem_attrs structure and insert it into the hash table if
+ one identical to it is not already in the table. */
+
+static mem_attrs *
+get_mem_attrs (alias, decl, offset, size, align)
+ HOST_WIDE_INT alias;
+ tree decl;
+ rtx offset;
+ rtx size;
+ unsigned int align;
+{
+ mem_attrs attrs;
+ void **slot;
+
+ attrs.alias = alias;
+ attrs.decl = decl;
+ attrs.offset = offset;
+ attrs.size = size;
+ attrs.align = align;
+
+ slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
+ if (*slot == 0)
+ {
+ *slot = ggc_alloc (sizeof (mem_attrs));
+ memcpy (*slot, &attrs, sizeof (mem_attrs));
+ }
+
+ return *slot;
}
/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
/* This field is not cleared by the mere allocation of the rtx, so
we clear it here. */
- MEM_ALIAS_SET (rt) = 0;
+ MEM_ATTRS (rt) = 0;
return rt;
}
return gen_rtx_fmt_ei (SUBREG, mode, reg, offset);
}
-/* Generate a SUBREG representing the least-significant part
- * of REG if MODE is smaller than mode of REG, otherwise
- * paradoxical SUBREG. */
+/* Generate a SUBREG representing the least-significant part of REG if MODE
+ is smaller than mode of REG, otherwise paradoxical SUBREG. */
+
rtx
gen_lowpart_SUBREG (mode, reg)
enum machine_mode mode;
return rt_val;
}
-
\f
/* Generate a REG rtx for a new pseudo register of mode MODE.
This pseudo is assigned the next sequential register number. */
}
}
\f
+
+/* Given REF, a MEM, and T, either the type of X or the expression
+ corresponding to REF, set the memory attributes. OBJECTP is nonzero
+ if we are making a new object of this type. */
+
+void
+set_mem_attributes (ref, t, objectp)
+ rtx ref;
+ tree t;
+ int objectp;
+{
+ tree type;
+
+ /* It can happen that type_for_mode was given a mode for which there
+ is no language-level type. In which case it returns NULL, which
+ we can see here. */
+ if (t == NULL_TREE)
+ return;
+
+ type = TYPE_P (t) ? t : TREE_TYPE (t);
+
+ /* Get the alias set from the expression or type (perhaps using a
+ front-end routine) and then copy bits from the type. */
+
+ /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY (type)
+ here, because, in C and C++, the fact that a location is accessed
+ through a const expression does not mean that the value there can
+ never change. */
+
+ /* If we have already set DECL_RTL = ref, get_alias_set will get the
+ wrong answer, as it assumes that DECL_RTL already has the right alias
+ info. Callers should not set DECL_RTL until after the call to
+ set_mem_attributes. */
+ if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
+ abort ();
+
+ set_mem_alias_set (ref, get_alias_set (t));
+
+ MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
+ MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
+
+ /* If we are making an object of this type, we know that it is a scalar if
+ the type is not an aggregate. */
+ if (objectp && ! AGGREGATE_TYPE_P (type))
+ MEM_SCALAR_P (ref) = 1;
+
+ /* If T is a type, this is all we can do. Otherwise, we may be able
+ to deduce some more information about the expression. */
+ if (TYPE_P (t))
+ return;
+
+ maybe_set_unchanging (ref, t);
+ if (TREE_THIS_VOLATILE (t))
+ MEM_VOLATILE_P (ref) = 1;
+
+ /* Now see if we can say more about whether it's an aggregate or
+ scalar. If we already know it's an aggregate, don't bother. */
+ if (MEM_IN_STRUCT_P (ref))
+ return;
+
+ /* Now remove any NOPs: they don't change what the underlying object is.
+ Likewise for SAVE_EXPR. */
+ while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
+ || TREE_CODE (t) == NON_LVALUE_EXPR || TREE_CODE (t) == SAVE_EXPR)
+ t = TREE_OPERAND (t, 0);
+
+ /* Since we already know the type isn't an aggregate, if this is a decl,
+ it must be a scalar. Or if it is a reference into an aggregate,
+ this is part of an aggregate. Otherwise we don't know. */
+ if (DECL_P (t))
+ MEM_SCALAR_P (ref) = 1;
+ else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
+ || TREE_CODE (t) == ARRAY_RANGE_REF
+ || TREE_CODE (t) == BIT_FIELD_REF)
+ MEM_IN_STRUCT_P (ref) = 1;
+}
+
+/* Set the alias set of MEM to SET. */
+
+void
+set_mem_alias_set (mem, set)
+ rtx mem;
+ HOST_WIDE_INT set;
+{
+ /* It would be nice to enable this check, but we can't quite yet. */
+#if 0
+#ifdef ENABLE_CHECKING
+ /* If the new and old alias sets don't conflict, something is wrong. */
+ if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
+ abort ();
+#endif
+#endif
+
+ MEM_ATTRS (mem) = get_mem_attrs (set, MEM_DECL (mem), MEM_OFFSET (mem),
+ MEM_SIZE (mem), MEM_ALIGN (mem));
+}
+\f
/* Return a memory reference like MEMREF, but with its mode changed
to MODE and its address changed to ADDR.
(VOIDmode means don't change the mode.
&& offset >= 0
&& (unsigned HOST_WIDE_INT) offset
< GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
- addr = gen_rtx_LO_SUM (mode, XEXP (addr, 0),
+ addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
plus_constant (XEXP (addr, 1), offset));
else
addr = plus_constant (addr, offset);
if (GET_CODE (seq) == SEQUENCE)
{
int i, njumps = 0;
- rtx eh_note;
/* Avoid infinite loop if any insn of the result matches
the original pattern. */
REG_NOTES (insn));
}
}
+
/* If we are splitting a CALL_INSN, look for the CALL_INSN
in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
if (GET_CODE (trial) == CALL_INSN)
CALL_INSN_FUNCTION_USAGE (XVECEXP (seq, 0, i))
= CALL_INSN_FUNCTION_USAGE (trial);
- /* Copy EH notes. */
- if ((eh_note = find_reg_note (trial, REG_EH_REGION, NULL_RTX)))
- for (i = 0; i < XVECLEN (seq, 0); i++)
- {
- rtx insn = XVECEXP (seq, 0, i);
- if (GET_CODE (insn) == CALL_INSN
- || (flag_non_call_exceptions
- && may_trap_p (PATTERN (insn))))
- REG_NOTES (insn)
- = gen_rtx_EXPR_LIST (REG_EH_REGION, XEXP (eh_note, 0),
- REG_NOTES (insn));
- }
+ /* Copy notes, particularly those related to the CFG. */
+ for (note = REG_NOTES (trial); note ; note = XEXP (note, 1))
+ {
+ switch (REG_NOTE_KIND (note))
+ {
+ case REG_EH_REGION:
+ for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
+ {
+ rtx insn = XVECEXP (seq, 0, i);
+ if (GET_CODE (insn) == CALL_INSN
+ || (flag_non_call_exceptions
+ && may_trap_p (PATTERN (insn))))
+ REG_NOTES (insn)
+ = gen_rtx_EXPR_LIST (REG_EH_REGION,
+ XEXP (note, 0),
+ REG_NOTES (insn));
+ }
+ break;
+
+ case REG_NORETURN:
+ case REG_SETJMP:
+ case REG_ALWAYS_RETURN:
+ for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
+ {
+ rtx insn = XVECEXP (seq, 0, i);
+ if (GET_CODE (insn) == CALL_INSN)
+ REG_NOTES (insn)
+ = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
+ XEXP (note, 0),
+ REG_NOTES (insn));
+ }
+ break;
+
+ case REG_NON_LOCAL_GOTO:
+ for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
+ {
+ rtx insn = XVECEXP (seq, 0, i);
+ if (GET_CODE (insn) == JUMP_INSN)
+ REG_NOTES (insn)
+ = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
+ XEXP (note, 0),
+ REG_NOTES (insn));
+ }
+ break;
+
+ default:
+ break;
+ }
+ }
/* If there are LABELS inside the split insns increment the
usage count so we don't delete the label. */
if (GET_CODE (XVECEXP (seq, 0, i)) == INSN)
mark_label_nuses (PATTERN (XVECEXP (seq, 0, i)));
- tem = emit_insn_after (seq, before);
+ tem = emit_insn_after (seq, trial);
- delete_insn (trial);
+ delete_related_insns (trial);
if (has_barrier)
emit_barrier_after (tem);
rtx insn, after;
{
rtx next = NEXT_INSN (after);
+ basic_block bb;
if (optimize && INSN_DELETED_P (after))
abort ();
abort ();
}
+ if (basic_block_for_insn
+ && (unsigned int)INSN_UID (after) < basic_block_for_insn->num_elements
+ && (bb = BLOCK_FOR_INSN (after)))
+ {
+ set_block_for_insn (insn, bb);
+ /* Should not happen as first in the BB is always
+ eigther NOTE or LABEL. */
+ if (bb->end == after
+ /* Avoid clobbering of structure when creating new BB. */
+ && GET_CODE (insn) != BARRIER
+ && (GET_CODE (insn) != NOTE
+ || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
+ bb->end = insn;
+ }
+
NEXT_INSN (after) = insn;
if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
{
rtx insn, before;
{
rtx prev = PREV_INSN (before);
+ basic_block bb;
if (optimize && INSN_DELETED_P (before))
abort ();
abort ();
}
+ if (basic_block_for_insn
+ && (unsigned int)INSN_UID (before) < basic_block_for_insn->num_elements
+ && (bb = BLOCK_FOR_INSN (before)))
+ {
+ set_block_for_insn (insn, bb);
+ /* Should not happen as first in the BB is always
+ eigther NOTE or LABEl. */
+ if (bb->head == insn
+ /* Avoid clobbering of structure when creating new BB. */
+ && GET_CODE (insn) != BARRIER
+ && (GET_CODE (insn) != NOTE
+ || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
+ abort ();
+ }
+
PREV_INSN (before) = insn;
if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
{
rtx next = NEXT_INSN (insn);
rtx prev = PREV_INSN (insn);
+ basic_block bb;
+
if (prev)
{
NEXT_INSN (prev) = next;
if (stack == 0)
abort ();
}
+ if (basic_block_for_insn
+ && (unsigned int)INSN_UID (insn) < basic_block_for_insn->num_elements
+ && (bb = BLOCK_FOR_INSN (insn)))
+ {
+ if (bb->head == insn)
+ {
+ /* Never ever delete the basic block note without deleting whole basic
+ block. */
+ if (GET_CODE (insn) == NOTE)
+ abort ();
+ bb->head = next;
+ }
+ if (bb->end == insn)
+ bb->end = prev;
+ }
}
/* Delete all insns made since FROM.
called after delay-slot filling has been done. */
void
-reorder_insns (from, to, after)
+reorder_insns_nobb (from, to, after)
rtx from, to, after;
{
/* Splice this bunch out of where it is now. */
last_insn = to;
}
+/* Same as function above, but take care to update BB boundaries. */
+void
+reorder_insns (from, to, after)
+ rtx from, to, after;
+{
+ rtx prev = PREV_INSN (from);
+ basic_block bb, bb2;
+
+ reorder_insns_nobb (from, to, after);
+
+ if (basic_block_for_insn
+ && (unsigned int)INSN_UID (after) < basic_block_for_insn->num_elements
+ && (bb = BLOCK_FOR_INSN (after)))
+ {
+ rtx x;
+
+ if (basic_block_for_insn
+ && (unsigned int)INSN_UID (from) < basic_block_for_insn->num_elements
+ && (bb2 = BLOCK_FOR_INSN (from)))
+ {
+ if (bb2->end == to)
+ bb2->end = prev;
+ }
+
+ if (bb->end == after)
+ bb->end = to;
+
+ for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
+ set_block_for_insn (x, bb);
+ }
+}
+
/* Return the line note insn preceding INSN. */
static rtx
return insn;
}
-/* Similar to emit_insn_before, but update basic block boundaries as well. */
-
-rtx
-emit_block_insn_before (pattern, before, block)
- rtx pattern, before;
- basic_block block;
-{
- rtx prev = PREV_INSN (before);
- rtx r = emit_insn_before (pattern, before);
- if (block && block->head == before)
- block->head = NEXT_INSN (prev);
- return r;
-}
-
/* Make an instruction with body PATTERN and code JUMP_INSN
and output it before the instruction BEFORE. */
insn);
}
-/* Similar to emit_insn_after, but update basic block boundaries as well. */
-
-rtx
-emit_block_insn_after (pattern, after, block)
- rtx pattern, after;
- basic_block block;
-{
- rtx r = emit_insn_after (pattern, after);
- if (block && block->end == after)
- block->end = r;
- return r;
-}
-
/* Make an insn of code JUMP_INSN with body PATTERN
and output it after the insn AFTER. */
{
register rtx last;
register rtx after_after;
+ basic_block bb;
if (!after)
abort ();
if (!first)
- return first;
+ return after;
- for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
- continue;
+ if (basic_block_for_insn
+ && (unsigned int)INSN_UID (after) < basic_block_for_insn->num_elements
+ && (bb = BLOCK_FOR_INSN (after)))
+ {
+ for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
+ set_block_for_insn (last, bb);
+ set_block_for_insn (last, bb);
+ if (bb->end == after)
+ bb->end = last;
+ }
+ else
+ for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
+ continue;
after_after = NEXT_INSN (after);
enum machine_mode mode;
enum machine_mode double_mode;
- /* Initialize the CONST_INT hash table. */
+ /* Initialize the CONST_INT and memory attribute hash tables. */
const_int_htab = htab_create (37, const_int_htab_hash,
const_int_htab_eq, NULL);
- ggc_add_root (&const_int_htab, 1, sizeof (const_int_htab),
- rtx_htab_mark);
+ ggc_add_deletable_htab (const_int_htab, 0, 0);
+
+ mem_attrs_htab = htab_create (37, mem_attrs_htab_hash,
+ mem_attrs_htab_eq, NULL);
+ ggc_add_deletable_htab (mem_attrs_htab, 0, mem_attrs_mark);
no_line_numbers = ! line_numbers;