GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
-Software Foundation; either version 2, or (at your option) any later
+Software Foundation; either version 3, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
-02110-1301, USA. */
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
/* Middle-to-low level generation of rtx code and insns.
#include "insn-config.h"
#include "recog.h"
#include "real.h"
+#include "fixed-value.h"
#include "bitmap.h"
#include "basic-block.h"
#include "ggc.h"
#include "debug.h"
#include "langhooks.h"
#include "tree-pass.h"
+#include "df.h"
/* Commonly used modes. */
REAL_VALUE_TYPE dconstsqrt2;
REAL_VALUE_TYPE dconste;
+/* Record fixed-point constant 0 and 1. */
+FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
+FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
+
/* All references to the following fixed hard registers go through
these unique rtl objects. On machines where the frame-pointer and
arg-pointer are the same register, they use the same unique object.
static rtx make_call_insn_raw (rtx);
static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
-static void reset_used_decls (tree);
+static void set_used_decls (tree);
static void mark_label_nuses (rtx);
static hashval_t const_int_htab_hash (const void *);
static int const_int_htab_eq (const void *, const void *);
static hashval_t
const_int_htab_hash (const void *x)
{
- return (hashval_t) INTVAL ((rtx) x);
+ return (hashval_t) INTVAL ((const_rtx) x);
}
/* Returns nonzero if the value represented by X (which is really a
static int
const_int_htab_eq (const void *x, const void *y)
{
- return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
+ return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
}
/* Returns a hash code for X (which is really a CONST_DOUBLE). */
static hashval_t
const_double_htab_hash (const void *x)
{
- rtx value = (rtx) x;
+ const_rtx const value = (const_rtx) x;
hashval_t h;
if (GET_MODE (value) == VOIDmode)
static int
const_double_htab_eq (const void *x, const void *y)
{
- rtx a = (rtx)x, b = (rtx)y;
+ const_rtx const a = (const_rtx)x, b = (const_rtx)y;
if (GET_MODE (a) != GET_MODE (b))
return 0;
static hashval_t
mem_attrs_htab_hash (const void *x)
{
- mem_attrs *p = (mem_attrs *) x;
+ const mem_attrs *const p = (const mem_attrs *) x;
return (p->alias ^ (p->align * 1000)
^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
static int
mem_attrs_htab_eq (const void *x, const void *y)
{
- mem_attrs *p = (mem_attrs *) x;
- mem_attrs *q = (mem_attrs *) y;
+ const mem_attrs *const p = (const mem_attrs *) x;
+ const mem_attrs *const q = (const mem_attrs *) y;
return (p->alias == q->alias && p->offset == q->offset
&& p->size == q->size && p->align == q->align
static hashval_t
reg_attrs_htab_hash (const void *x)
{
- reg_attrs *p = (reg_attrs *) x;
+ const reg_attrs *const p = (const reg_attrs *) x;
return ((p->offset * 1000) ^ (long) p->decl);
}
static int
reg_attrs_htab_eq (const void *x, const void *y)
{
- reg_attrs *p = (reg_attrs *) x;
- reg_attrs *q = (reg_attrs *) y;
+ const reg_attrs *const p = (const reg_attrs *) x;
+ const reg_attrs *const q = (const reg_attrs *) y;
return (p->decl == q->decl && p->offset == q->offset);
}
return *slot;
}
+
+#if !HAVE_blockage
+/* Generate an empty ASM_INPUT, which is used to block attempts to schedule
+ across this insn. */
+
+rtx
+gen_blockage (void)
+{
+ rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
+ MEM_VOLATILE_P (x) = true;
+ return x;
+}
+#endif
+
+
/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
don't attempt to share with the various global pieces of rtl (such as
frame_pointer_rtx). */
struct function *f = cfun;
rtx val;
- /* Don't let anything called after initial flow analysis create new
- registers. */
- gcc_assert (!no_new_pseudos);
+ gcc_assert (can_create_pseudo_p ());
if (generating_concat_p
&& (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
If X is not a SUBREG, always return 1 (it is its own low part!). */
int
-subreg_lowpart_p (rtx x)
+subreg_lowpart_p (const_rtx x)
{
if (GET_CODE (x) != SUBREG)
return 1;
and 0 otherwise. */
int
-mem_expr_equal_p (tree expr1, tree expr2)
+mem_expr_equal_p (const_tree expr1, const_tree expr2)
{
if (expr1 == expr2)
return 1;
{
reset_used_flags (PATTERN (p));
reset_used_flags (REG_NOTES (p));
- reset_used_flags (LOG_LINKS (p));
}
/* Make sure that virtual stack slots are not shared. */
- reset_used_decls (DECL_INITIAL (cfun->decl));
+ set_used_decls (DECL_INITIAL (cfun->decl));
/* Make sure that virtual parameters are not shared. */
for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
- reset_used_flags (DECL_RTL (decl));
+ set_used_flags (DECL_RTL (decl));
reset_used_flags (stack_slot_list);
break;
case CONST:
- /* CONST can be shared if it contains a SYMBOL_REF. If it contains
- a LABEL_REF, it isn't sharable. */
- if (GET_CODE (XEXP (x, 0)) == PLUS
- && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
- && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
+ if (shared_const_p (orig))
return;
break;
{
reset_used_flags (PATTERN (p));
reset_used_flags (REG_NOTES (p));
- reset_used_flags (LOG_LINKS (p));
if (GET_CODE (PATTERN (p)) == SEQUENCE)
{
int i;
gcc_assert (INSN_P (q));
reset_used_flags (PATTERN (q));
reset_used_flags (REG_NOTES (q));
- reset_used_flags (LOG_LINKS (q));
}
}
}
{
verify_rtx_sharing (PATTERN (p), p);
verify_rtx_sharing (REG_NOTES (p), p);
- verify_rtx_sharing (LOG_LINKS (p), p);
}
}
{
PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
- LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
}
}
/* Go through all virtual stack slots of a function and mark them as
- not shared. */
+ shared. We never replace the DECL_RTLs themselves with a copy,
+ but expressions mentioned into a DECL_RTL cannot be shared with
+ expressions in the instruction stream.
+
+ Note that reload may convert pseudo registers into memories in-place.
+ Pseudo registers are always shared, but MEMs never are. Thus if we
+ reset the used flags on MEMs in the instruction stream, we must set
+ them again on MEMs that appear in DECL_RTLs. */
+
static void
-reset_used_decls (tree blk)
+set_used_decls (tree blk)
{
tree t;
/* Mark decls. */
for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
if (DECL_RTL_SET_P (t))
- reset_used_flags (DECL_RTL (t));
+ set_used_flags (DECL_RTL (t));
/* Now process sub-blocks. */
for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
- reset_used_decls (t);
+ set_used_decls (t);
}
/* Mark ORIG as in use, and return a copy of it if it was already in use.
break;
case CONST:
- /* CONST can be shared if it contains a SYMBOL_REF. If it contains
- a LABEL_REF, it isn't sharable. */
- if (GET_CODE (XEXP (x, 0)) == PLUS
- && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
- && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
+ if (shared_const_p (x))
return;
break;
same as next_real_insn. */
int
-active_insn_p (rtx insn)
+active_insn_p (const_rtx insn)
{
return (CALL_P (insn) || JUMP_P (insn)
|| (NONJUMP_INSN_P (insn)
insn_last = NEXT_INSN (insn_last);
}
+ /* We will be adding the new sequence to the function. The splitters
+ may have introduced invalid RTL sharing, so unshare the sequence now. */
+ unshare_all_rtl_in_chain (seq);
+
/* Mark labels. */
for (insn = insn_last; insn ; insn = PREV_INSN (insn))
{
INSN_UID (insn) = cur_insn_uid++;
PATTERN (insn) = pattern;
INSN_CODE (insn) = -1;
- LOG_LINKS (insn) = NULL;
REG_NOTES (insn) = NULL;
INSN_LOCATOR (insn) = curr_insn_locator ();
BLOCK_FOR_INSN (insn) = NULL;
PATTERN (insn) = pattern;
INSN_CODE (insn) = -1;
- LOG_LINKS (insn) = NULL;
REG_NOTES (insn) = NULL;
JUMP_LABEL (insn) = NULL;
INSN_LOCATOR (insn) = curr_insn_locator ();
PATTERN (insn) = pattern;
INSN_CODE (insn) = -1;
- LOG_LINKS (insn) = NULL;
REG_NOTES (insn) = NULL;
CALL_INSN_FUNCTION_USAGE (insn) = NULL;
INSN_LOCATOR (insn) = curr_insn_locator ();
SEQUENCE. */
void
-add_insn_after (rtx insn, rtx after)
+add_insn_after (rtx insn, rtx after, basic_block bb)
{
rtx next = NEXT_INSN (after);
- basic_block bb;
gcc_assert (!optimize || !INSN_DELETED_P (after));
{
set_block_for_insn (insn, bb);
if (INSN_P (insn))
- bb->flags |= BB_DIRTY;
+ df_insn_rescan (insn);
/* Should not happen as first in the BB is always
either NOTE or LABEL. */
if (BB_END (bb) == after
}
/* Add INSN into the doubly-linked list before insn BEFORE. This and
- the previous should be the only functions called to insert an insn once
- delay slots have been filled since only they know how to update a
- SEQUENCE. */
+ the previous should be the only functions called to insert an insn
+ once delay slots have been filled since only they know how to
+ update a SEQUENCE. If BB is NULL, an attempt is made to infer the
+ bb from before. */
void
-add_insn_before (rtx insn, rtx before)
+add_insn_before (rtx insn, rtx before, basic_block bb)
{
rtx prev = PREV_INSN (before);
- basic_block bb;
gcc_assert (!optimize || !INSN_DELETED_P (before));
gcc_assert (stack);
}
- if (!BARRIER_P (before)
- && !BARRIER_P (insn)
- && (bb = BLOCK_FOR_INSN (before)))
+ if (!bb
+ && !BARRIER_P (before)
+ && !BARRIER_P (insn))
+ bb = BLOCK_FOR_INSN (before);
+
+ if (bb)
{
set_block_for_insn (insn, bb);
if (INSN_P (insn))
- bb->flags |= BB_DIRTY;
+ df_insn_rescan (insn);
/* Should not happen as first in the BB is always either NOTE or
LABEL. */
gcc_assert (BB_HEAD (bb) != insn
PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
}
+
+/* Replace insn with an deleted instruction note. */
+
+void set_insn_deleted (rtx insn)
+{
+ df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
+ PUT_CODE (insn, NOTE);
+ NOTE_KIND (insn) = NOTE_INSN_DELETED;
+}
+
+
/* Remove an insn from its doubly-linked list. This function knows how
to handle sequences. */
void
rtx prev = PREV_INSN (insn);
basic_block bb;
+ /* Later in the code, the block will be marked dirty. */
+ df_insn_delete (NULL, INSN_UID (insn));
+
if (prev)
{
NEXT_INSN (prev) = next;
&& (bb = BLOCK_FOR_INSN (insn)))
{
if (INSN_P (insn))
- bb->flags |= BB_DIRTY;
+ df_set_bb_dirty (bb);
if (BB_HEAD (bb) == insn)
{
/* Never ever delete the basic block note without deleting whole
&& (bb = BLOCK_FOR_INSN (after)))
{
rtx x;
- bb->flags |= BB_DIRTY;
+ df_set_bb_dirty (bb);
if (!BARRIER_P (from)
&& (bb2 = BLOCK_FOR_INSN (from)))
{
if (BB_END (bb2) == to)
BB_END (bb2) = prev;
- bb2->flags |= BB_DIRTY;
+ df_set_bb_dirty (bb2);
}
if (BB_END (bb) == after)
for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
if (!BARRIER_P (x))
- set_block_for_insn (x, bb);
+ {
+ set_block_for_insn (x, bb);
+ df_insn_change_bb (x);
+ }
}
}
/* Make X be output before the instruction BEFORE. */
rtx
-emit_insn_before_noloc (rtx x, rtx before)
+emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
{
rtx last = before;
rtx insn;
while (insn)
{
rtx next = NEXT_INSN (insn);
- add_insn_before (insn, before);
+ add_insn_before (insn, before, bb);
last = insn;
insn = next;
}
default:
last = make_insn_raw (x);
- add_insn_before (last, before);
+ add_insn_before (last, before, bb);
break;
}
while (insn)
{
rtx next = NEXT_INSN (insn);
- add_insn_before (insn, before);
+ add_insn_before (insn, before, NULL);
last = insn;
insn = next;
}
default:
last = make_jump_insn_raw (x);
- add_insn_before (last, before);
+ add_insn_before (last, before, NULL);
break;
}
while (insn)
{
rtx next = NEXT_INSN (insn);
- add_insn_before (insn, before);
+ add_insn_before (insn, before, NULL);
last = insn;
insn = next;
}
default:
last = make_call_insn_raw (x);
- add_insn_before (last, before);
+ add_insn_before (last, before, NULL);
break;
}
INSN_UID (insn) = cur_insn_uid++;
- add_insn_before (insn, before);
+ add_insn_before (insn, before, NULL);
return insn;
}
if (INSN_UID (label) == 0)
{
INSN_UID (label) = cur_insn_uid++;
- add_insn_before (label, before);
+ add_insn_before (label, before, NULL);
}
return label;
INSN_UID (note) = cur_insn_uid++;
NOTE_KIND (note) = subtype;
BLOCK_FOR_INSN (note) = NULL;
+ memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
- add_insn_before (note, before);
+ add_insn_before (note, before, NULL);
return note;
}
\f
/* Helper for emit_insn_after, handles lists of instructions
efficiently. */
-static rtx emit_insn_after_1 (rtx, rtx);
-
static rtx
-emit_insn_after_1 (rtx first, rtx after)
+emit_insn_after_1 (rtx first, rtx after, basic_block bb)
{
rtx last;
rtx after_after;
- basic_block bb;
+ if (!bb && !BARRIER_P (after))
+ bb = BLOCK_FOR_INSN (after);
- if (!BARRIER_P (after)
- && (bb = BLOCK_FOR_INSN (after)))
+ if (bb)
{
- bb->flags |= BB_DIRTY;
+ df_set_bb_dirty (bb);
for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
if (!BARRIER_P (last))
- set_block_for_insn (last, bb);
+ {
+ set_block_for_insn (last, bb);
+ df_insn_rescan (last);
+ }
if (!BARRIER_P (last))
- set_block_for_insn (last, bb);
+ {
+ set_block_for_insn (last, bb);
+ df_insn_rescan (last);
+ }
if (BB_END (bb) == after)
BB_END (bb) = last;
}
return last;
}
-/* Make X be output after the insn AFTER. */
+/* Make X be output after the insn AFTER and set the BB of insn. If
+ BB is NULL, an attempt is made to infer the BB from AFTER. */
rtx
-emit_insn_after_noloc (rtx x, rtx after)
+emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
{
rtx last = after;
case CODE_LABEL:
case BARRIER:
case NOTE:
- last = emit_insn_after_1 (x, after);
+ last = emit_insn_after_1 (x, after, bb);
break;
#ifdef ENABLE_RTL_CHECKING
default:
last = make_insn_raw (x);
- add_insn_after (last, after);
+ add_insn_after (last, after, bb);
break;
}
case CODE_LABEL:
case BARRIER:
case NOTE:
- last = emit_insn_after_1 (x, after);
+ last = emit_insn_after_1 (x, after, NULL);
break;
#ifdef ENABLE_RTL_CHECKING
default:
last = make_jump_insn_raw (x);
- add_insn_after (last, after);
+ add_insn_after (last, after, NULL);
break;
}
case CODE_LABEL:
case BARRIER:
case NOTE:
- last = emit_insn_after_1 (x, after);
+ last = emit_insn_after_1 (x, after, NULL);
break;
#ifdef ENABLE_RTL_CHECKING
default:
last = make_call_insn_raw (x);
- add_insn_after (last, after);
+ add_insn_after (last, after, NULL);
break;
}
INSN_UID (insn) = cur_insn_uid++;
- add_insn_after (insn, after);
+ add_insn_after (insn, after, NULL);
return insn;
}
if (INSN_UID (label) == 0)
{
INSN_UID (label) = cur_insn_uid++;
- add_insn_after (label, after);
+ add_insn_after (label, after, NULL);
}
return label;
INSN_UID (note) = cur_insn_uid++;
NOTE_KIND (note) = subtype;
BLOCK_FOR_INSN (note) = NULL;
- add_insn_after (note, after);
+ memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
+ add_insn_after (note, after, NULL);
return note;
}
\f
rtx
emit_insn_after_setloc (rtx pattern, rtx after, int loc)
{
- rtx last = emit_insn_after_noloc (pattern, after);
+ rtx last = emit_insn_after_noloc (pattern, after, NULL);
if (pattern == NULL_RTX || !loc)
return last;
if (INSN_P (after))
return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
else
- return emit_insn_after_noloc (pattern, after);
+ return emit_insn_after_noloc (pattern, after, NULL);
}
/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
emit_insn_before_setloc (rtx pattern, rtx before, int loc)
{
rtx first = PREV_INSN (before);
- rtx last = emit_insn_before_noloc (pattern, before);
+ rtx last = emit_insn_before_noloc (pattern, before, NULL);
if (pattern == NULL_RTX || !loc)
return last;
if (INSN_P (before))
return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
else
- return emit_insn_before_noloc (pattern, before);
+ return emit_insn_before_noloc (pattern, before, NULL);
}
/* like emit_insn_before_noloc, but set insn_locator according to scope. */
set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
{
rtx note = find_reg_note (insn, kind, NULL_RTX);
+ rtx new_note = NULL;
switch (kind)
{
It serves no useful purpose and breaks eliminate_regs. */
if (GET_CODE (datum) == ASM_OPERANDS)
return NULL_RTX;
+
+ if (note)
+ {
+ XEXP (note, 0) = datum;
+ df_notes_rescan (insn);
+ return note;
+ }
break;
default:
+ if (note)
+ {
+ XEXP (note, 0) = datum;
+ return note;
+ }
break;
}
- if (note)
+ new_note = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
+ REG_NOTES (insn) = new_note;
+
+ switch (kind)
{
- XEXP (note, 0) = datum;
- return note;
+ case REG_EQUAL:
+ case REG_EQUIV:
+ df_notes_rescan (insn);
+ break;
+ default:
+ break;
}
- REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
return REG_NOTES (insn);
}
\f
last_insn = last;
}
+/* Like push_to_sequence, but take the last insn as an argument to avoid
+ looping through the list. */
+
+void
+push_to_sequence2 (rtx first, rtx last)
+{
+ start_sequence ();
+
+ first_insn = first;
+ last_insn = last;
+}
+
/* Set up the outer-level insn chain
as the current sequence, saving the previously current one. */
break;
case CONST:
- /* CONST can be shared if it contains a SYMBOL_REF. If it contains
- a LABEL_REF, it isn't sharable. */
- if (GET_CODE (XEXP (orig, 0)) == PLUS
- && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
- && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
+ if (shared_const_p (orig))
return orig;
break;
const_tiny_rtx[i][(int) mode] = GEN_INT (i);
}
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
+ mode != VOIDmode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ {
+ rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
+ const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
+ }
+
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
+ mode != VOIDmode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ {
+ rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
+ const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
+ }
+
for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode))
const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
}
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
+ mode != VOIDmode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ {
+ FCONST0(mode).data.high = 0;
+ FCONST0(mode).data.low = 0;
+ FCONST0(mode).mode = mode;
+ }
+
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
+ mode != VOIDmode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ {
+ FCONST0(mode).data.high = 0;
+ FCONST0(mode).data.low = 0;
+ FCONST0(mode).mode = mode;
+ }
+
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
+ mode != VOIDmode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ {
+ FCONST0(mode).data.high = 0;
+ FCONST0(mode).data.low = 0;
+ FCONST0(mode).mode = mode;
+
+ /* We store the value 1. */
+ FCONST1(mode).data.high = 0;
+ FCONST1(mode).data.low = 0;
+ FCONST1(mode).mode = mode;
+ lshift_double (1, 0, GET_MODE_FBIT (mode),
+ 2 * HOST_BITS_PER_WIDE_INT,
+ &FCONST1(mode).data.low,
+ &FCONST1(mode).data.high,
+ SIGNED_FIXED_POINT_MODE_P (mode));
+ }
+
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
+ mode != VOIDmode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ {
+ FCONST0(mode).data.high = 0;
+ FCONST0(mode).data.low = 0;
+ FCONST0(mode).mode = mode;
+
+ /* We store the value 1. */
+ FCONST1(mode).data.high = 0;
+ FCONST1(mode).data.low = 0;
+ FCONST1(mode).mode = mode;
+ lshift_double (1, 0, GET_MODE_FBIT (mode),
+ 2 * HOST_BITS_PER_WIDE_INT,
+ &FCONST1(mode).data.low,
+ &FCONST1(mode).data.high,
+ SIGNED_FIXED_POINT_MODE_P (mode));
+ }
+
for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
const_tiny_rtx[0][i] = const0_rtx;