Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
1999, 2000, 2001 Free Software Foundation, Inc.
-This file is part of GNU CC.
+This file is part of GCC.
-GNU CC is free software; you can redistribute it and/or modify
-it under the terms of the GNU General Public License as published by
-the Free Software Foundation; either version 2, or (at your option)
-any later version.
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 2, or (at your option) any later
+version.
-GNU CC is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-GNU General Public License for more details.
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+for more details.
You should have received a copy of the GNU General Public License
-along with GNU CC; see the file COPYING. If not, write to
-the Free Software Foundation, 59 Temple Place - Suite 330,
-Boston, MA 02111-1307, USA. */
+along with GCC; see the file COPYING. If not, write to the Free
+Software Foundation, 59 Temple Place - Suite 330, Boston, MA
+02111-1307, USA. */
/* Middle-to-low level generation of rtx code and insns.
#include "bitmap.h"
#include "basic-block.h"
#include "ggc.h"
+#include "debug.h"
/* Commonly used modes. */
static htab_t const_int_htab;
+/* A hash table storing memory attribute structures. */
+static htab_t mem_attrs_htab;
+
/* start_sequence and gen_sequence can make a lot of rtx expressions which are
shortly thrown away. We use two mechanisms to prevent this waste:
static hashval_t const_int_htab_hash PARAMS ((const void *));
static int const_int_htab_eq PARAMS ((const void *,
const void *));
-static int rtx_htab_mark_1 PARAMS ((void **, void *));
-static void rtx_htab_mark PARAMS ((void *));
+static hashval_t mem_attrs_htab_hash PARAMS ((const void *));
+static int mem_attrs_htab_eq PARAMS ((const void *,
+ const void *));
+static void mem_attrs_mark PARAMS ((const void *));
+static mem_attrs *get_mem_attrs PARAMS ((HOST_WIDE_INT, tree, rtx,
+ rtx, unsigned int));
+/* Probability of the conditional branch currently proceeded by try_split.
+ Set to -1 otherwise. */
+int split_branch_probability = -1;
\f
/* Returns a hash code for X (which is a really a CONST_INT). */
return (INTVAL ((const struct rtx_def *) x) == *((const HOST_WIDE_INT *) y));
}
-/* Mark the hash-table element X (which is really a pointer to an
- rtx). */
+/* Returns a hash code for X (which is a really a mem_attrs *). */
+
+static hashval_t
+mem_attrs_htab_hash (x)
+ const void *x;
+{
+ mem_attrs *p = (mem_attrs *) x;
+
+ return (p->alias ^ (p->align * 1000)
+ ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
+ ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
+ ^ (long) p->decl);
+}
+
+/* Returns non-zero if the value represented by X (which is really a
+ mem_attrs *) is the same as that given by Y (which is also really a
+ mem_attrs *). */
static int
-rtx_htab_mark_1 (x, data)
- void **x;
- void *data ATTRIBUTE_UNUSED;
+mem_attrs_htab_eq (x, y)
+ const void *x;
+ const void *y;
{
- ggc_mark_rtx (*x);
- return 1;
+ mem_attrs *p = (mem_attrs *) x;
+ mem_attrs *q = (mem_attrs *) y;
+
+ return (p->alias == q->alias && p->decl == q->decl && p->offset == q->offset
+ && p->size == q->size && p->align == q->align);
}
-/* Mark all the elements of HTAB (which is really an htab_t full of
- rtxs). */
+/* This routine is called when we determine that we need a mem_attrs entry.
+ It marks the associated decl and RTL as being used, if present. */
static void
-rtx_htab_mark (htab)
- void *htab;
+mem_attrs_mark (x)
+ const void *x;
{
- htab_traverse (*((htab_t *) htab), rtx_htab_mark_1, NULL);
+ mem_attrs *p = (mem_attrs *) x;
+
+ if (p->decl)
+ ggc_mark_tree (p->decl);
+
+ if (p->offset)
+ ggc_mark_rtx (p->offset);
+
+ if (p->size)
+ ggc_mark_rtx (p->size);
+}
+
+/* Allocate a new mem_attrs structure and insert it into the hash table if
+ one identical to it is not already in the table. */
+
+static mem_attrs *
+get_mem_attrs (alias, decl, offset, size, align)
+ HOST_WIDE_INT alias;
+ tree decl;
+ rtx offset;
+ rtx size;
+ unsigned int align;
+{
+ mem_attrs attrs;
+ void **slot;
+
+ attrs.alias = alias;
+ attrs.decl = decl;
+ attrs.offset = offset;
+ attrs.size = size;
+ attrs.align = align;
+
+ slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
+ if (*slot == 0)
+ {
+ *slot = ggc_alloc (sizeof (mem_attrs));
+ memcpy (*slot, &attrs, sizeof (mem_attrs));
+ }
+
+ return *slot;
}
/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
/* This field is not cleared by the mere allocation of the rtx, so
we clear it here. */
- MEM_ALIAS_SET (rt) = 0;
+ MEM_ATTRS (rt) = 0;
return rt;
}
return gen_rtx_fmt_ei (SUBREG, mode, reg, offset);
}
-/* Generate a SUBREG representing the least-significant part
- * of REG if MODE is smaller than mode of REG, otherwise
- * paradoxical SUBREG. */
+/* Generate a SUBREG representing the least-significant part of REG if MODE
+ is smaller than mode of REG, otherwise paradoxical SUBREG. */
+
rtx
gen_lowpart_SUBREG (mode, reg)
enum machine_mode mode;
rtx
gen_rtx VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
{
-#ifndef ANSI_PROTOTYPES
- enum rtx_code code;
- enum machine_mode mode;
-#endif
- va_list p;
register int i; /* Array indices... */
register const char *fmt; /* Current rtx's format... */
register rtx rt_val; /* RTX to return to caller... */
- VA_START (p, mode);
-
-#ifndef ANSI_PROTOTYPES
- code = va_arg (p, enum rtx_code);
- mode = va_arg (p, enum machine_mode);
-#endif
+ VA_OPEN (p, mode);
+ VA_FIXEDARG (p, enum rtx_code, code);
+ VA_FIXEDARG (p, enum machine_mode, mode);
switch (code)
{
break;
}
- va_end (p);
+ VA_CLOSE (p);
return rt_val;
}
rtvec
gen_rtvec VPARAMS ((int n, ...))
{
-#ifndef ANSI_PROTOTYPES
- int n;
-#endif
- int i;
- va_list p;
+ int i, save_n;
rtx *vector;
- VA_START (p, n);
-
-#ifndef ANSI_PROTOTYPES
- n = va_arg (p, int);
-#endif
+ VA_OPEN (p, n);
+ VA_FIXEDARG (p, int, n);
if (n == 0)
return NULL_RTVEC; /* Don't allocate an empty rtvec... */
for (i = 0; i < n; i++)
vector[i] = va_arg (p, rtx);
- va_end (p);
- return gen_rtvec_v (n, vector);
+ /* The definition of VA_* in K&R C causes `n' to go out of scope. */
+ save_n = n;
+ VA_CLOSE (p);
+
+ return gen_rtvec_v (save_n, vector);
}
rtvec
return rt_val;
}
-
\f
/* Generate a REG rtx for a new pseudo register of mode MODE.
This pseudo is assigned the next sequential register number. */
abort ();
return result;
}
+
+/* Like gen_highpart_mode, but accept mode of EXP operand in case EXP can
+ be VOIDmode constant. */
+rtx
+gen_highpart_mode (outermode, innermode, exp)
+ enum machine_mode outermode, innermode;
+ rtx exp;
+{
+ if (GET_MODE (exp) != VOIDmode)
+ {
+ if (GET_MODE (exp) != innermode)
+ abort ();
+ return gen_highpart (outermode, exp);
+ }
+ return simplify_gen_subreg (outermode, exp, innermode,
+ subreg_highpart_offset (outermode, innermode));
+}
/* Return offset in bytes to get OUTERMODE low part
of the value in mode INNERMODE stored in memory in target format. */
if (mode == VOIDmode)
abort ();
- /* If OP is narrower than a word, fail. */
+ /* If OP is narrower than a word, fail. */
if (mode != BLKmode
&& (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
return 0;
- /* If we want a word outside OP, return zero. */
+ /* If we want a word outside OP, return zero. */
if (mode != BLKmode
&& (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
return const0_rtx;
/* Form a new MEM at the requested address. */
if (GET_CODE (op) == MEM)
{
- rtx addr = plus_constant (XEXP (op, 0), (offset * UNITS_PER_WORD));
- rtx new;
+ rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
- if (validate_address)
+ if (! validate_address)
+ return new;
+
+ else if (reload_completed)
{
- if (reload_completed)
- {
- if (! strict_memory_address_p (word_mode, addr))
- return 0;
- }
- else
- addr = memory_address (word_mode, addr);
+ if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
+ return 0;
}
-
- new = gen_rtx_MEM (word_mode, addr);
- MEM_COPY_ATTRIBUTES (new, op);
- return new;
+ else
+ return replace_equiv_address (new, XEXP (new, 0));
}
/* Rest can be handled by simplify_subreg. */
}
}
\f
+
+/* Given REF, a MEM, and T, either the type of X or the expression
+ corresponding to REF, set the memory attributes. OBJECTP is nonzero
+ if we are making a new object of this type. */
+
+void
+set_mem_attributes (ref, t, objectp)
+ rtx ref;
+ tree t;
+ int objectp;
+{
+ tree type;
+
+ /* It can happen that type_for_mode was given a mode for which there
+ is no language-level type. In which case it returns NULL, which
+ we can see here. */
+ if (t == NULL_TREE)
+ return;
+
+ type = TYPE_P (t) ? t : TREE_TYPE (t);
+
+ /* Get the alias set from the expression or type (perhaps using a
+ front-end routine) and then copy bits from the type. */
+
+ /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY (type)
+ here, because, in C and C++, the fact that a location is accessed
+ through a const expression does not mean that the value there can
+ never change. */
+
+ /* If we have already set DECL_RTL = ref, get_alias_set will get the
+ wrong answer, as it assumes that DECL_RTL already has the right alias
+ info. Callers should not set DECL_RTL until after the call to
+ set_mem_attributes. */
+ if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
+ abort ();
+
+ set_mem_alias_set (ref, get_alias_set (t));
+
+ MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
+ MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
+
+ /* If we are making an object of this type, we know that it is a scalar if
+ the type is not an aggregate. */
+ if (objectp && ! AGGREGATE_TYPE_P (type))
+ MEM_SCALAR_P (ref) = 1;
+
+ /* If T is a type, this is all we can do. Otherwise, we may be able
+ to deduce some more information about the expression. */
+ if (TYPE_P (t))
+ return;
+
+ maybe_set_unchanging (ref, t);
+ if (TREE_THIS_VOLATILE (t))
+ MEM_VOLATILE_P (ref) = 1;
+
+ /* Now see if we can say more about whether it's an aggregate or
+ scalar. If we already know it's an aggregate, don't bother. */
+ if (MEM_IN_STRUCT_P (ref))
+ return;
+
+ /* Now remove any NOPs: they don't change what the underlying object is.
+ Likewise for SAVE_EXPR. */
+ while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
+ || TREE_CODE (t) == NON_LVALUE_EXPR || TREE_CODE (t) == SAVE_EXPR)
+ t = TREE_OPERAND (t, 0);
+
+ /* Since we already know the type isn't an aggregate, if this is a decl,
+ it must be a scalar. Or if it is a reference into an aggregate,
+ this is part of an aggregate. Otherwise we don't know. */
+ if (DECL_P (t))
+ MEM_SCALAR_P (ref) = 1;
+ else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
+ || TREE_CODE (t) == ARRAY_RANGE_REF
+ || TREE_CODE (t) == BIT_FIELD_REF)
+ MEM_IN_STRUCT_P (ref) = 1;
+}
+
+/* Set the alias set of MEM to SET. */
+
+void
+set_mem_alias_set (mem, set)
+ rtx mem;
+ HOST_WIDE_INT set;
+{
+ /* It would be nice to enable this check, but we can't quite yet. */
+#if 0
+#ifdef ENABLE_CHECKING
+ /* If the new and old alias sets don't conflict, something is wrong. */
+ if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
+ abort ();
+#endif
+#endif
+
+ MEM_ATTRS (mem) = get_mem_attrs (set, MEM_DECL (mem), MEM_OFFSET (mem),
+ MEM_SIZE (mem), MEM_ALIGN (mem));
+}
+\f
/* Return a memory reference like MEMREF, but with its mode changed
to MODE and its address changed to ADDR.
(VOIDmode means don't change the mode.
- NULL for ADDR means don't change the address.) */
+ NULL for ADDR means don't change the address.)
+ VALIDATE is nonzero if the returned memory location is required to be
+ valid. */
rtx
-change_address (memref, mode, addr)
+change_address_1 (memref, mode, addr, validate)
rtx memref;
enum machine_mode mode;
rtx addr;
+ int validate;
{
rtx new;
if (addr == 0)
addr = XEXP (memref, 0);
- /* If reload is in progress or has completed, ADDR must be valid.
- Otherwise, we can call memory_address to make it valid. */
- if (reload_completed || reload_in_progress)
+ if (validate)
{
- if (! memory_address_p (mode, addr))
- abort ();
+ if (reload_in_progress || reload_completed)
+ {
+ if (! memory_address_p (mode, addr))
+ abort ();
+ }
+ else
+ addr = memory_address (mode, addr);
}
- else
- addr = memory_address (mode, addr);
if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
return memref;
{
/* For now, this is just a wrapper for change_address, but eventually
will do memref tracking. */
- return
- change_address (memref, mode, plus_constant (XEXP (memref, 0), offset));
+ rtx addr = XEXP (memref, 0);
+
+ /* ??? Prefer to create garbage instead of creating shared rtl. */
+ addr = copy_rtx (addr);
+
+ /* If MEMREF is a LO_SUM and the offset is within the alignment of the
+ object, we can merge it into the LO_SUM. */
+ if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
+ && offset >= 0
+ && (unsigned HOST_WIDE_INT) offset
+ < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
+ addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
+ plus_constant (XEXP (addr, 1), offset));
+ else
+ addr = plus_constant (addr, offset);
+
+ return change_address (memref, mode, addr);
+}
+
+/* Likewise, but the reference is not required to be valid. */
+
+rtx
+adjust_address_nv (memref, mode, offset)
+ rtx memref;
+ enum machine_mode mode;
+ HOST_WIDE_INT offset;
+{
+ /* For now, this is just a wrapper for change_address, but eventually
+ will do memref tracking. */
+ rtx addr = XEXP (memref, 0);
+
+ /* If MEMREF is a LO_SUM and the offset is within the size of the
+ object, we can merge it into the LO_SUM. */
+ if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
+ && offset >= 0
+ && (unsigned HOST_WIDE_INT) offset
+ < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
+ addr = gen_rtx_LO_SUM (mode, XEXP (addr, 0),
+ plus_constant (XEXP (addr, 1), offset));
+ else
+ addr = plus_constant (addr, offset);
+
+ return change_address_1 (memref, mode, addr, 0);
}
/* Return a memory reference like MEMREF, but with its address changed to
will do memref tracking. */
return change_address (memref, VOIDmode, addr);
}
+/* Likewise, but the reference is not required to be valid. */
+
+rtx
+replace_equiv_address_nv (memref, addr)
+ rtx memref;
+ rtx addr;
+{
+ /* For now, this is just a wrapper for change_address, but eventually
+ will do memref tracking. */
+ return change_address_1 (memref, VOIDmode, addr, 0);
+}
\f
/* Return a newly created CODE_LABEL rtx with a unique label number. */
}
/* Go through all virtual stack slots of a function and mark them as
- not shared. */
+ not shared. */
static void
reset_used_decls (blk)
tree blk;
{
rtx before = PREV_INSN (trial);
rtx after = NEXT_INSN (trial);
- rtx seq = split_insns (pat, trial);
int has_barrier = 0;
rtx tem;
+ rtx note, seq;
+ int probability;
+
+ if (any_condjump_p (trial)
+ && (note = find_reg_note (trial, REG_BR_PROB, 0)))
+ split_branch_probability = INTVAL (XEXP (note, 0));
+ probability = split_branch_probability;
+
+ seq = split_insns (pat, trial);
+
+ split_branch_probability = -1;
/* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
We may need to handle this specially. */
it, in turn, will be split (SFmode on the 29k is an example). */
if (GET_CODE (seq) == SEQUENCE)
{
- int i;
- rtx eh_note;
+ int i, njumps = 0;
/* Avoid infinite loop if any insn of the result matches
the original pattern. */
/* Mark labels. */
for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
if (GET_CODE (XVECEXP (seq, 0, i)) == JUMP_INSN)
- mark_jump_label (PATTERN (XVECEXP (seq, 0, i)),
- XVECEXP (seq, 0, i), 0, 0);
+ {
+ rtx insn = XVECEXP (seq, 0, i);
+ mark_jump_label (PATTERN (insn),
+ XVECEXP (seq, 0, i), 0);
+ njumps++;
+ if (probability != -1
+ && any_condjump_p (insn)
+ && !find_reg_note (insn, REG_BR_PROB, 0))
+ {
+ /* We can preserve the REG_BR_PROB notes only if exactly
+ one jump is created, otherwise the machinde description
+ is responsible for this step using
+ split_branch_probability variable. */
+ if (njumps != 1)
+ abort ();
+ REG_NOTES (insn)
+ = gen_rtx_EXPR_LIST (REG_BR_PROB,
+ GEN_INT (probability),
+ REG_NOTES (insn));
+ }
+ }
/* If we are splitting a CALL_INSN, look for the CALL_INSN
in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
CALL_INSN_FUNCTION_USAGE (XVECEXP (seq, 0, i))
= CALL_INSN_FUNCTION_USAGE (trial);
- /* Copy EH notes. */
- if ((eh_note = find_reg_note (trial, REG_EH_REGION, NULL_RTX)))
- for (i = 0; i < XVECLEN (seq, 0); i++)
- {
- rtx insn = XVECEXP (seq, 0, i);
- if (GET_CODE (insn) == CALL_INSN
- || (flag_non_call_exceptions
- && may_trap_p (PATTERN (insn))))
- REG_NOTES (insn)
- = gen_rtx_EXPR_LIST (REG_EH_REGION, XEXP (eh_note, 0),
- REG_NOTES (insn));
- }
+ /* Copy notes, particularly those related to the CFG. */
+ for (note = REG_NOTES (trial); note ; note = XEXP (note, 1))
+ {
+ switch (REG_NOTE_KIND (note))
+ {
+ case REG_EH_REGION:
+ for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
+ {
+ rtx insn = XVECEXP (seq, 0, i);
+ if (GET_CODE (insn) == CALL_INSN
+ || (flag_non_call_exceptions
+ && may_trap_p (PATTERN (insn))))
+ REG_NOTES (insn)
+ = gen_rtx_EXPR_LIST (REG_EH_REGION,
+ XEXP (note, 0),
+ REG_NOTES (insn));
+ }
+ break;
+
+ case REG_NORETURN:
+ case REG_SETJMP:
+ case REG_ALWAYS_RETURN:
+ for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
+ {
+ rtx insn = XVECEXP (seq, 0, i);
+ if (GET_CODE (insn) == CALL_INSN)
+ REG_NOTES (insn)
+ = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
+ XEXP (note, 0),
+ REG_NOTES (insn));
+ }
+ break;
+
+ case REG_NON_LOCAL_GOTO:
+ for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
+ {
+ rtx insn = XVECEXP (seq, 0, i);
+ if (GET_CODE (insn) == JUMP_INSN)
+ REG_NOTES (insn)
+ = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
+ XEXP (note, 0),
+ REG_NOTES (insn));
+ }
+ break;
+
+ default:
+ break;
+ }
+ }
/* If there are LABELS inside the split insns increment the
usage count so we don't delete the label. */
if (GET_CODE (XVECEXP (seq, 0, i)) == INSN)
mark_label_nuses (PATTERN (XVECEXP (seq, 0, i)));
- tem = emit_insn_after (seq, before);
+ tem = emit_insn_after (seq, trial);
- delete_insn (trial);
+ delete_related_insns (trial);
if (has_barrier)
emit_barrier_after (tem);
/* Return either the first or the last insn, depending on which was
requested. */
return last
- ? (after ? prev_active_insn (after) : last_insn)
- : next_active_insn (before);
+ ? (after ? PREV_INSN (after) : last_insn)
+ : NEXT_INSN (before);
}
return trial;
rtx insn, after;
{
rtx next = NEXT_INSN (after);
+ basic_block bb;
if (optimize && INSN_DELETED_P (after))
abort ();
abort ();
}
+ if (basic_block_for_insn
+ && (unsigned int)INSN_UID (after) < basic_block_for_insn->num_elements
+ && (bb = BLOCK_FOR_INSN (after)))
+ {
+ set_block_for_insn (insn, bb);
+ /* Should not happen as first in the BB is always
+ eigther NOTE or LABEL. */
+ if (bb->end == after
+ /* Avoid clobbering of structure when creating new BB. */
+ && GET_CODE (insn) != BARRIER
+ && (GET_CODE (insn) != NOTE
+ || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
+ bb->end = insn;
+ }
+
NEXT_INSN (after) = insn;
if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
{
rtx insn, before;
{
rtx prev = PREV_INSN (before);
+ basic_block bb;
if (optimize && INSN_DELETED_P (before))
abort ();
abort ();
}
+ if (basic_block_for_insn
+ && (unsigned int)INSN_UID (before) < basic_block_for_insn->num_elements
+ && (bb = BLOCK_FOR_INSN (before)))
+ {
+ set_block_for_insn (insn, bb);
+ /* Should not happen as first in the BB is always
+ eigther NOTE or LABEl. */
+ if (bb->head == insn
+ /* Avoid clobbering of structure when creating new BB. */
+ && GET_CODE (insn) != BARRIER
+ && (GET_CODE (insn) != NOTE
+ || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
+ abort ();
+ }
+
PREV_INSN (before) = insn;
if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
{
rtx next = NEXT_INSN (insn);
rtx prev = PREV_INSN (insn);
+ basic_block bb;
+
if (prev)
{
NEXT_INSN (prev) = next;
if (stack == 0)
abort ();
}
+ if (basic_block_for_insn
+ && (unsigned int)INSN_UID (insn) < basic_block_for_insn->num_elements
+ && (bb = BLOCK_FOR_INSN (insn)))
+ {
+ if (bb->head == insn)
+ {
+ /* Never ever delete the basic block note without deleting whole basic
+ block. */
+ if (GET_CODE (insn) == NOTE)
+ abort ();
+ bb->head = next;
+ }
+ if (bb->end == insn)
+ bb->end = prev;
+ }
}
/* Delete all insns made since FROM.
called after delay-slot filling has been done. */
void
-reorder_insns (from, to, after)
+reorder_insns_nobb (from, to, after)
rtx from, to, after;
{
/* Splice this bunch out of where it is now. */
last_insn = to;
}
+/* Same as function above, but take care to update BB boundaries. */
+void
+reorder_insns (from, to, after)
+ rtx from, to, after;
+{
+ rtx prev = PREV_INSN (from);
+ basic_block bb, bb2;
+
+ reorder_insns_nobb (from, to, after);
+
+ if (basic_block_for_insn
+ && (unsigned int)INSN_UID (after) < basic_block_for_insn->num_elements
+ && (bb = BLOCK_FOR_INSN (after)))
+ {
+ rtx x;
+
+ if (basic_block_for_insn
+ && (unsigned int)INSN_UID (from) < basic_block_for_insn->num_elements
+ && (bb2 = BLOCK_FOR_INSN (from)))
+ {
+ if (bb2->end == to)
+ bb2->end = prev;
+ }
+
+ if (bb->end == after)
+ bb->end = to;
+
+ for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
+ set_block_for_insn (x, bb);
+ }
+}
+
/* Return the line note insn preceding INSN. */
static rtx
if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
{
- /* We just verified that this BLOCK matches us
- with the block_stack check above. */
- if (debug_ignore_block (NOTE_BLOCK (insn)))
+ /* We just verified that this BLOCK matches us with
+ the block_stack check above. Never delete the
+ BLOCK for the outermost scope of the function; we
+ can refer to names from that scope even if the
+ block notes are messed up. */
+ if (! is_body_block (NOTE_BLOCK (insn))
+ && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
{
remove_insn (tmp);
remove_insn (insn);
return insn;
}
-/* Similar to emit_insn_before, but update basic block boundaries as well. */
-
-rtx
-emit_block_insn_before (pattern, before, block)
- rtx pattern, before;
- basic_block block;
-{
- rtx prev = PREV_INSN (before);
- rtx r = emit_insn_before (pattern, before);
- if (block && block->head == before)
- block->head = NEXT_INSN (prev);
- return r;
-}
-
/* Make an instruction with body PATTERN and code JUMP_INSN
and output it before the instruction BEFORE. */
insn);
}
-/* Similar to emit_insn_after, but update basic block boundaries as well. */
-
-rtx
-emit_block_insn_after (pattern, after, block)
- rtx pattern, after;
- basic_block block;
-{
- rtx r = emit_insn_after (pattern, after);
- if (block && block->end == after)
- block->end = r;
- return r;
-}
-
/* Make an insn of code JUMP_INSN with body PATTERN
and output it after the insn AFTER. */
{
register rtx last;
register rtx after_after;
+ basic_block bb;
if (!after)
abort ();
if (!first)
- return first;
+ return after;
- for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
- continue;
+ if (basic_block_for_insn
+ && (unsigned int)INSN_UID (after) < basic_block_for_insn->num_elements
+ && (bb = BLOCK_FOR_INSN (after)))
+ {
+ for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
+ set_block_for_insn (last, bb);
+ set_block_for_insn (last, bb);
+ if (bb->end == after)
+ bb->end = last;
+ }
+ else
+ for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
+ continue;
after_after = NEXT_INSN (after);
}
/* Place a note of KIND on insn INSN with DATUM as the datum. If a
- note of this type already exists, remove it first. */
+ note of this type already exists, remove it first. */
void
set_unique_reg_note (insn, kind, datum)
if (len == 1
&& ! RTX_FRAME_RELATED_P (first_insn)
&& GET_CODE (first_insn) == INSN
- /* Don't throw away any reg notes. */
+ /* Don't throw away any reg notes. */
&& REG_NOTES (first_insn) == 0)
return PATTERN (first_insn);
enum machine_mode mode;
enum machine_mode double_mode;
- /* Initialize the CONST_INT hash table. */
+ /* Initialize the CONST_INT and memory attribute hash tables. */
const_int_htab = htab_create (37, const_int_htab_hash,
const_int_htab_eq, NULL);
- ggc_add_root (&const_int_htab, 1, sizeof (const_int_htab),
- rtx_htab_mark);
+ ggc_add_deletable_htab (const_int_htab, 0, 0);
+
+ mem_attrs_htab = htab_create (37, mem_attrs_htab_hash,
+ mem_attrs_htab_eq, NULL);
+ ggc_add_deletable_htab (mem_attrs_htab, 0, mem_attrs_mark);
no_line_numbers = ! line_numbers;