/* Convert tree expression to rtl instructions, for GNU compiler.
- Copyright (C) 1988, 1992 Free Software Foundation, Inc.
+ Copyright (C) 1988, 1992, 1993 Free Software Foundation, Inc.
This file is part of GNU CC.
#include "insn-config.h"
#include "recog.h"
#include "output.h"
-#include "gvarargs.h"
#include "typeclass.h"
#define CEIL(x,y) (((x) + (y) - 1) / (y))
/* Decide whether a function's arguments should be processed
- from first to last or from last to first. */
+ from first to last or from last to first.
+
+ They should if the stack and args grow in opposite directions, but
+ only if we have push insns. */
-#ifdef STACK_GROWS_DOWNWARD
#ifdef PUSH_ROUNDING
+
+#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
#define PUSH_ARGS_REVERSED /* If it's last to first */
#endif
+
#endif
#ifndef STACK_PUSH_CODE
returned. */
static rtx saveregs_value;
-rtx store_expr ();
-static void store_constructor ();
-static rtx store_field ();
-static rtx expand_builtin ();
-static rtx compare ();
-static rtx do_store_flag ();
-static void preexpand_calls ();
-static rtx expand_increment ();
-static void init_queue ();
-
-void do_pending_stack_adjust ();
-static void do_jump_for_compare ();
-static void do_jump_by_parts_equality ();
-static void do_jump_by_parts_equality_rtx ();
-static void do_jump_by_parts_greater ();
+/* Similarly for __builtin_apply_args. */
+static rtx apply_args_value;
+
+/* This structure is used by move_by_pieces to describe the move to
+ be performed. */
+
+struct move_by_pieces
+{
+ rtx to;
+ rtx to_addr;
+ int autinc_to;
+ int explicit_inc_to;
+ rtx from;
+ rtx from_addr;
+ int autinc_from;
+ int explicit_inc_from;
+ int len;
+ int offset;
+ int reverse;
+};
+
+static rtx enqueue_insn PROTO((rtx, rtx));
+static int queued_subexp_p PROTO((rtx));
+static void init_queue PROTO((void));
+static void move_by_pieces PROTO((rtx, rtx, int, int));
+static int move_by_pieces_ninsns PROTO((unsigned int, int));
+static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
+ struct move_by_pieces *));
+static void group_insns PROTO((rtx));
+static void store_constructor PROTO((tree, rtx));
+static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
+ enum machine_mode, int, int, int));
+static tree save_noncopied_parts PROTO((tree, tree));
+static tree init_noncopied_parts PROTO((tree, tree));
+static int safe_from_p PROTO((rtx, tree));
+static int fixed_type_p PROTO((tree));
+static int get_pointer_alignment PROTO((tree, unsigned));
+static tree string_constant PROTO((tree, tree *));
+static tree c_strlen PROTO((tree));
+static rtx expand_builtin PROTO((tree, rtx, rtx, enum machine_mode, int));
+static int apply_args_size PROTO((void));
+static int apply_result_size PROTO((void));
+static rtx result_vector PROTO((int, rtx));
+static rtx expand_builtin_apply_args PROTO((void));
+static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
+static void expand_builtin_return PROTO((rtx));
+static rtx expand_increment PROTO((tree, int));
+static void preexpand_calls PROTO((tree));
+static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
+static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
+static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
+static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
+static void do_jump_for_compare PROTO((rtx, rtx, rtx));
+static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
+static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
/* Record for each mode whether we can move a register directly to or
from an object of that mode in memory. If we can't, we won't try
#endif
/* This array records the insn_code of insns to perform block moves. */
-static enum insn_code movstr_optab[NUM_MACHINE_MODES];
+enum insn_code movstr_optab[NUM_MACHINE_MODES];
/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
#ifndef SLOW_UNALIGNED_ACCESS
#define SLOW_UNALIGNED_ACCESS 0
#endif
+
+/* Register mappings for target machines without register windows. */
+#ifndef INCOMING_REGNO
+#define INCOMING_REGNO(OUT) (OUT)
+#endif
+#ifndef OUTGOING_REGNO
+#define OUTGOING_REGNO(IN) (IN)
+#endif
\f
/* This is run once per compilation to set up which modes can be used
directly in memory and to initialize the block move optab. */
{
rtx insn, pat;
enum machine_mode mode;
+ /* Try indexing by frame ptr and try by stack ptr.
+ It is known that on the Convex the stack ptr isn't a valid index.
+ With luck, one or the other is valid on any machine. */
rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
+ rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
start_sequence ();
insn = emit_insn (gen_rtx (SET, 0, 0));
direct_load[(int) mode] = direct_store[(int) mode] = 0;
PUT_MODE (mem, mode);
+ PUT_MODE (mem1, mode);
/* See if there is some register that can be used in this mode and
directly loaded or stored from memory. */
if (recog (pat, insn, &num_clobbers) >= 0)
direct_load[(int) mode] = 1;
+ SET_SRC (pat) = mem1;
+ SET_DEST (pat) = reg;
+ if (recog (pat, insn, &num_clobbers) >= 0)
+ direct_load[(int) mode] = 1;
+
SET_SRC (pat) = reg;
SET_DEST (pat) = mem;
if (recog (pat, insn, &num_clobbers) >= 0)
direct_store[(int) mode] = 1;
- }
- movstr_optab[(int) mode] = CODE_FOR_nothing;
+ SET_SRC (pat) = reg;
+ SET_DEST (pat) = mem1;
+ if (recog (pat, insn, &num_clobbers) >= 0)
+ direct_store[(int) mode] = 1;
+ }
}
end_sequence ();
-
-#ifdef HAVE_movstrqi
- if (HAVE_movstrqi)
- movstr_optab[(int) QImode] = CODE_FOR_movstrqi;
-#endif
-#ifdef HAVE_movstrhi
- if (HAVE_movstrhi)
- movstr_optab[(int) HImode] = CODE_FOR_movstrhi;
-#endif
-#ifdef HAVE_movstrsi
- if (HAVE_movstrsi)
- movstr_optab[(int) SImode] = CODE_FOR_movstrsi;
-#endif
-#ifdef HAVE_movstrdi
- if (HAVE_movstrdi)
- movstr_optab[(int) DImode] = CODE_FOR_movstrdi;
-#endif
-#ifdef HAVE_movstrti
- if (HAVE_movstrti)
- movstr_optab[(int) TImode] = CODE_FOR_movstrti;
-#endif
}
/* This is run at the start of compiling a function. */
inhibit_defer_pop = 0;
cleanups_this_call = 0;
saveregs_value = 0;
+ apply_args_value = 0;
forced_labels = 0;
}
p->inhibit_defer_pop = inhibit_defer_pop;
p->cleanups_this_call = cleanups_this_call;
p->saveregs_value = saveregs_value;
+ p->apply_args_value = apply_args_value;
p->forced_labels = forced_labels;
pending_stack_adjust = 0;
inhibit_defer_pop = 0;
cleanups_this_call = 0;
saveregs_value = 0;
+ apply_args_value = 0;
forced_labels = 0;
}
inhibit_defer_pop = p->inhibit_defer_pop;
cleanups_this_call = p->cleanups_this_call;
saveregs_value = p->saveregs_value;
+ apply_args_value = p->apply_args_value;
forced_labels = p->forced_labels;
}
\f
if (to_real)
{
+#ifdef HAVE_extendqfhf2
+ if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
+ {
+ emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
+ return;
+ }
+#endif
+#ifdef HAVE_extendqfsf2
+ if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
+ {
+ emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
+ return;
+ }
+#endif
+#ifdef HAVE_extendqfdf2
+ if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
+ {
+ emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
+ return;
+ }
+#endif
+#ifdef HAVE_extendqfxf2
+ if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
+ {
+ emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
+ return;
+ }
+#endif
+#ifdef HAVE_extendqftf2
+ if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
+ {
+ emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
+ return;
+ }
+#endif
+
+#ifdef HAVE_extendhfsf2
+ if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
+ {
+ emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
+ return;
+ }
+#endif
+#ifdef HAVE_extendhfdf2
+ if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
+ {
+ emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
+ return;
+ }
+#endif
+#ifdef HAVE_extendhfxf2
+ if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
+ {
+ emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
+ return;
+ }
+#endif
+#ifdef HAVE_extendhftf2
+ if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
+ {
+ emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
+ return;
+ }
+#endif
+
#ifdef HAVE_extendsfdf2
if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
{
return;
}
#endif
+
+#ifdef HAVE_trunchfqf2
+ if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
+ {
+ emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
+ return;
+ }
+#endif
+#ifdef HAVE_truncsfqf2
+ if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
+ {
+ emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
+ return;
+ }
+#endif
+#ifdef HAVE_truncdfqf2
+ if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
+ {
+ emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
+ return;
+ }
+#endif
+#ifdef HAVE_truncxfqf2
+ if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
+ {
+ emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
+ return;
+ }
+#endif
+#ifdef HAVE_trunctfqf2
+ if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
+ {
+ emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
+ return;
+ }
+#endif
+#ifdef HAVE_truncsfhf2
+ if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
+ {
+ emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
+ return;
+ }
+#endif
+#ifdef HAVE_truncdfhf2
+ if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
+ {
+ emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
+ return;
+ }
+#endif
+#ifdef HAVE_truncxfhf2
+ if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
+ {
+ emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
+ return;
+ }
+#endif
+#ifdef HAVE_trunctfhf2
+ if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
+ {
+ emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
+ return;
+ }
+#endif
#ifdef HAVE_truncdfsf2
if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
{
end_sequence ();
emit_no_conflict_block (insns, to, from, NULL_RTX,
- gen_rtx (equiv_code, to_mode, from));
+ gen_rtx (equiv_code, to_mode, copy_rtx (from)));
return;
}
- if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
+ /* Truncating multi-word to a word or less. */
+ if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
+ && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
{
convert_move (to, gen_lowpart (word_mode, from), 0);
return;
/* For truncation, usually we can just refer to FROM in a narrower mode. */
if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
&& TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
- GET_MODE_BITSIZE (from_mode))
- && ((GET_CODE (from) == MEM
- && ! MEM_VOLATILE_P (from)
- && direct_load[(int) to_mode]
- && ! mode_dependent_address_p (XEXP (from, 0)))
- || GET_CODE (from) == REG
- || GET_CODE (from) == SUBREG))
+ GET_MODE_BITSIZE (from_mode)))
{
+ if (!((GET_CODE (from) == MEM
+ && ! MEM_VOLATILE_P (from)
+ && direct_load[(int) to_mode]
+ && ! mode_dependent_address_p (XEXP (from, 0)))
+ || GET_CODE (from) == REG
+ || GET_CODE (from) == SUBREG))
+ from = force_reg (from_mode, from);
emit_move_insn (to, gen_lowpart (to_mode, from));
return;
}
- /* For truncation, usually we can just refer to FROM in a narrower mode. */
+ /* Handle extension. */
if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
{
/* Convert directly if that works. */
through protect_from_queue before calling.
ALIGN (in bytes) is maximum alignment we can assume. */
-struct move_by_pieces
-{
- rtx to;
- rtx to_addr;
- int autinc_to;
- int explicit_inc_to;
- rtx from;
- rtx from_addr;
- int autinc_from;
- int explicit_inc_from;
- int len;
- int offset;
- int reverse;
-};
-
-static void move_by_pieces_1 ();
-static int move_by_pieces_ninsns ();
-
static void
move_by_pieces (to, from, len, align)
rtx to, from;
if (code != CODE_FOR_nothing
/* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
here because if SIZE is less than the mode mask, as it is
- returned by the macro, it will definately be less than the
+ returned by the macro, it will definitely be less than the
actual mode mask. */
- && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
+ && (unsigned HOST_WIDE_INT) INTVAL (size) <= GET_MODE_MASK (mode)
&& (insn_operand_predicate[(int) code][0] == 0
|| (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
&& (insn_operand_predicate[(int) code][1] == 0
emit_library_call (memcpy_libfunc, 0,
VOIDmode, 3, XEXP (x, 0), Pmode,
XEXP (y, 0), Pmode,
- convert_to_mode (Pmode, size, 1), Pmode);
+ convert_to_mode (TYPE_MODE (sizetype), size,
+ TREE_UNSIGNED (sizetype)),
+ TYPE_MODE (sizetype));
#else
emit_library_call (bcopy_libfunc, 0,
VOIDmode, 3, XEXP (y, 0), Pmode,
XEXP (x, 0), Pmode,
- convert_to_mode (Pmode, size, 1), Pmode);
+ convert_to_mode (TYPE_MODE (sizetype), size,
+ TREE_UNSIGNED (sizetype)),
+ TYPE_MODE (sizetype));
#endif
}
}
/* Mark the instructions since PREV as a libcall block.
Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
-static rtx
+static void
group_insns (prev)
rtx prev;
{
if (mode == BLKmode)
abort ();
+ return emit_move_insn_1 (x, y);
+}
+
+/* Low level part of emit_move_insn.
+ Called just like emit_move_insn, but assumes X and Y
+ are basically valid. */
+
+rtx
+emit_move_insn_1 (x, y)
+ rtx x, y;
+{
+ enum machine_mode mode = GET_MODE (x);
+ enum machine_mode submode;
+ enum mode_class class = GET_MODE_CLASS (mode);
+ int i;
+
if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
(class == MODE_COMPLEX_INT
return
emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
- /* Expand complex moves by moving real part and imag part, if posible. */
+ /* Expand complex moves by moving real part and imag part, if possible. */
else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
&& submode != BLKmode
&& (mov_optab->handlers[(int) submode].insn_code
gen_lowpart (submode, y)));
group_insns (prev);
+
+ return get_last_insn ();
}
/* This will handle any multi-word mode that lacks a move_insn pattern.
return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
}
-static rtx
+rtx
gen_push_operand ()
{
return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
ALIGN (in bytes) is maximum alignment we can assume.
- If PARTIAL is nonzero, then copy that many of the first words
- of X into registers starting with REG, and push the rest of X.
+ If PARTIAL and REG are both nonzero, then copy that many of the first
+ words of X into registers starting with REG, and push the rest of X.
The amount of space pushed is decreased by PARTIAL words,
rounded *down* to a multiple of PARM_BOUNDARY.
REG must be a hard register in this case.
+ If REG is zero but PARTIAL is not, take any all others actions for an
+ argument partially in registers, but do not actually load any
+ registers.
EXTRA is the amount in bytes of extra space to leave next to this arg.
This is ignored if an argument block has already been allocated.
&& ((unsigned) INTVAL (size)
< (1 << (GET_MODE_BITSIZE (QImode) - 1))))
{
- emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
- xinner, size, GEN_INT (align)));
- goto ret;
+ rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
+ xinner, size, GEN_INT (align));
+ if (pat != 0)
+ {
+ emit_insn (pat);
+ goto ret;
+ }
}
#endif
#ifdef HAVE_movstrhi
&& ((unsigned) INTVAL (size)
< (1 << (GET_MODE_BITSIZE (HImode) - 1))))
{
- emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
- xinner, size, GEN_INT (align)));
- goto ret;
+ rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
+ xinner, size, GEN_INT (align));
+ if (pat != 0)
+ {
+ emit_insn (pat);
+ goto ret;
+ }
}
#endif
#ifdef HAVE_movstrsi
if (HAVE_movstrsi)
{
- emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
- xinner, size, GEN_INT (align)));
- goto ret;
+ rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
+ xinner, size, GEN_INT (align));
+ if (pat != 0)
+ {
+ emit_insn (pat);
+ goto ret;
+ }
}
#endif
#ifdef HAVE_movstrdi
if (HAVE_movstrdi)
{
- emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
- xinner, size, GEN_INT (align)));
- goto ret;
+ rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
+ xinner, size, GEN_INT (align));
+ if (pat != 0)
+ {
+ emit_insn (pat);
+ goto ret;
+ }
}
#endif
#ifdef TARGET_MEM_FUNCTIONS
emit_library_call (memcpy_libfunc, 0,
VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
- size, Pmode);
+ convert_to_mode (TYPE_MODE (sizetype),
+ size, TREE_UNSIGNED (sizetype)),
+ TYPE_MODE (sizetype));
#else
emit_library_call (bcopy_libfunc, 0,
VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
- size, Pmode);
+ convert_to_mode (TYPE_MODE (sizetype),
+ size, TREE_UNSIGNED (sizetype)),
+ TYPE_MODE (sizetype));
#endif
OK_DEFER_POP;
}
/* If part should go in registers, copy that part
into the appropriate registers. Do this now, at the end,
since mem-to-mem copies above may do function calls. */
- if (partial > 0)
+ if (partial > 0 && reg != 0)
move_block_to_reg (REGNO (reg), x, partial, mode);
if (extra && args_addr == 0 && where_pad == stack_direction)
anti_adjust_stack (GEN_INT (extra));
}
\f
-/* Output a library call to function FUN (a SYMBOL_REF rtx)
- (emitting the queue unless NO_QUEUE is nonzero),
- for a value of mode OUTMODE,
- with NARGS different arguments, passed as alternating rtx values
- and machine_modes to convert them to.
- The rtx values should have been passed through protect_from_queue already.
-
- NO_QUEUE will be true if and only if the library call is a `const' call
- which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
- to the variable is_const in expand_call.
-
- NO_QUEUE must be true for const calls, because if it isn't, then
- any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
- and will be lost if the libcall sequence is optimized away.
-
- NO_QUEUE must be false for non-const calls, because if it isn't, the
- call insn will have its CONST_CALL_P bit set, and it will be incorrectly
- optimized. For instance, the instruction scheduler may incorrectly
- move memory references across the non-const call. */
-
-void
-emit_library_call (va_alist)
- va_dcl
-{
- va_list p;
- struct args_size args_size;
- register int argnum;
- enum machine_mode outmode;
- int nargs;
- rtx fun;
- rtx orgfun;
- int inc;
- int count;
- rtx argblock = 0;
- CUMULATIVE_ARGS args_so_far;
- struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
- struct args_size offset; struct args_size size; };
- struct arg *argvec;
- int old_inhibit_defer_pop = inhibit_defer_pop;
- int no_queue = 0;
- rtx use_insns;
-
- va_start (p);
- orgfun = fun = va_arg (p, rtx);
- no_queue = va_arg (p, int);
- outmode = va_arg (p, enum machine_mode);
- nargs = va_arg (p, int);
-
- /* Copy all the libcall-arguments out of the varargs data
- and into a vector ARGVEC.
-
- Compute how to pass each argument. We only support a very small subset
- of the full argument passing conventions to limit complexity here since
- library functions shouldn't have many args. */
-
- argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
-
- INIT_CUMULATIVE_ARGS (args_so_far, (tree)0, fun);
-
- args_size.constant = 0;
- args_size.var = 0;
-
- for (count = 0; count < nargs; count++)
- {
- rtx val = va_arg (p, rtx);
- enum machine_mode mode = va_arg (p, enum machine_mode);
-
- /* We cannot convert the arg value to the mode the library wants here;
- must do it earlier where we know the signedness of the arg. */
- if (mode == BLKmode
- || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
- abort ();
-
- /* On some machines, there's no way to pass a float to a library fcn.
- Pass it as a double instead. */
-#ifdef LIBGCC_NEEDS_DOUBLE
- if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
- val = convert_to_mode (DFmode, val, 0), mode = DFmode;
-#endif
-
- /* There's no need to call protect_from_queue, because
- either emit_move_insn or emit_push_insn will do that. */
-
- /* Make sure it is a reasonable operand for a move or push insn. */
- if (GET_CODE (val) != REG && GET_CODE (val) != MEM
- && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
- val = force_operand (val, NULL_RTX);
-
- argvec[count].value = val;
- argvec[count].mode = mode;
-
-#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
- if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
- abort ();
-#endif
-
- argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
- if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
- abort ();
-#ifdef FUNCTION_ARG_PARTIAL_NREGS
- argvec[count].partial
- = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
-#else
- argvec[count].partial = 0;
-#endif
-
- locate_and_pad_parm (mode, NULL_TREE,
- argvec[count].reg && argvec[count].partial == 0,
- NULL_TREE, &args_size, &argvec[count].offset,
- &argvec[count].size);
-
- if (argvec[count].size.var)
- abort ();
-
-#ifndef REG_PARM_STACK_SPACE
- if (argvec[count].partial)
- argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
-#endif
-
- if (argvec[count].reg == 0 || argvec[count].partial != 0
-#ifdef REG_PARM_STACK_SPACE
- || 1
-#endif
- )
- args_size.constant += argvec[count].size.constant;
-
-#ifdef ACCUMULATE_OUTGOING_ARGS
- /* If this arg is actually passed on the stack, it might be
- clobbering something we already put there (this library call might
- be inside the evaluation of an argument to a function whose call
- requires the stack). This will only occur when the library call
- has sufficient args to run out of argument registers. Abort in
- this case; if this ever occurs, code must be added to save and
- restore the arg slot. */
-
- if (argvec[count].reg == 0 || argvec[count].partial != 0)
- abort ();
-#endif
-
- FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
- }
- va_end (p);
-
- /* If this machine requires an external definition for library
- functions, write one out. */
- assemble_external_libcall (fun);
-
-#ifdef STACK_BOUNDARY
- args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
- / STACK_BYTES) * STACK_BYTES);
-#endif
-
-#ifdef REG_PARM_STACK_SPACE
- args_size.constant = MAX (args_size.constant,
- REG_PARM_STACK_SPACE ((tree) 0));
-#endif
-
-#ifdef ACCUMULATE_OUTGOING_ARGS
- if (args_size.constant > current_function_outgoing_args_size)
- current_function_outgoing_args_size = args_size.constant;
- args_size.constant = 0;
-#endif
-
-#ifndef PUSH_ROUNDING
- argblock = push_block (GEN_INT (args_size.constant), 0, 0);
-#endif
-
-#ifdef PUSH_ARGS_REVERSED
- inc = -1;
- argnum = nargs - 1;
-#else
- inc = 1;
- argnum = 0;
-#endif
-
- /* Push the args that need to be pushed. */
-
- for (count = 0; count < nargs; count++, argnum += inc)
- {
- register enum machine_mode mode = argvec[argnum].mode;
- register rtx val = argvec[argnum].value;
- rtx reg = argvec[argnum].reg;
- int partial = argvec[argnum].partial;
-
- if (! (reg != 0 && partial == 0))
- emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
- argblock, GEN_INT (argvec[count].offset.constant));
- NO_DEFER_POP;
- }
-
-#ifdef PUSH_ARGS_REVERSED
- argnum = nargs - 1;
-#else
- argnum = 0;
-#endif
-
- /* Now load any reg parms into their regs. */
-
- for (count = 0; count < nargs; count++, argnum += inc)
- {
- register enum machine_mode mode = argvec[argnum].mode;
- register rtx val = argvec[argnum].value;
- rtx reg = argvec[argnum].reg;
- int partial = argvec[argnum].partial;
-
- if (reg != 0 && partial == 0)
- emit_move_insn (reg, val);
- NO_DEFER_POP;
- }
-
- /* For version 1.37, try deleting this entirely. */
- if (! no_queue)
- emit_queue ();
-
- /* Any regs containing parms remain in use through the call. */
- start_sequence ();
- for (count = 0; count < nargs; count++)
- if (argvec[count].reg != 0)
- emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
-
- use_insns = get_insns ();
- end_sequence ();
-
- fun = prepare_call_address (fun, NULL_TREE, &use_insns);
-
- /* Don't allow popping to be deferred, since then
- cse'ing of library calls could delete a call and leave the pop. */
- NO_DEFER_POP;
-
- /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
- will set inhibit_defer_pop to that value. */
-
- emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
- FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
- outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
- old_inhibit_defer_pop + 1, use_insns, no_queue);
-
- /* Now restore inhibit_defer_pop to its actual original value. */
- OK_DEFER_POP;
-}
-\f
/* Expand an assignment that stores the value of FROM into TO.
If WANT_VALUE is nonzero, return an rtx for the value of TO.
(This may contain a QUEUED rtx.)
preserve_temp_slots (result);
free_temp_slots ();
- return result;
+ /* If we aren't returning a result, just pass on what expand_expr
+ returned; it was probably const0_rtx. Otherwise, convert RESULT
+ to the proper mode. */
+ return (want_value ? convert_to_mode (TYPE_MODE (TREE_TYPE (to)), result,
+ TREE_UNSIGNED (TREE_TYPE (to)))
+ : result);
+ }
+
+ /* If the rhs is a function call and its value is not an aggregate,
+ call the function before we start to compute the lhs.
+ This is needed for correct code for cases such as
+ val = setjmp (buf) on machines where reference to val
+ requires loading up part of an address in a separate insn. */
+ if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from))
+ {
+ rtx value = expand_expr (from, NULL_RTX, VOIDmode, 0);
+ if (to_rtx == 0)
+ to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
+ emit_move_insn (to_rtx, value);
+ preserve_temp_slots (to_rtx);
+ free_temp_slots ();
+ return to_rtx;
}
/* Ordinary treatment. Expand TO to get a REG or MEM rtx.
if (to_rtx == 0)
to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
+ /* Don't move directly into a return register. */
+ if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
+ {
+ rtx temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
+ emit_move_insn (to_rtx, temp);
+ preserve_temp_slots (to_rtx);
+ free_temp_slots ();
+ return to_rtx;
+ }
+
/* In case we are returning the contents of an object which overlaps
the place the value is being stored, use a safe function when copying
a value through a pointer into a structure value return block. */
emit_library_call (memcpy_libfunc, 0,
VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
XEXP (from_rtx, 0), Pmode,
- size, Pmode);
+ convert_to_mode (TYPE_MODE (sizetype),
+ size, TREE_UNSIGNED (sizetype)),
+ TYPE_MODE (sizetype));
#else
emit_library_call (bcopy_libfunc, 0,
VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
XEXP (to_rtx, 0), Pmode,
- size, Pmode);
+ convert_to_mode (TYPE_MODE (sizetype),
+ size, TREE_UNSIGNED (sizetype)),
+ TYPE_MODE (sizetype));
#endif
preserve_temp_slots (to_rtx);
OK_DEFER_POP;
return target;
}
- else if (suggest_reg && GET_CODE (target) == MEM
+ else if (suggest_reg && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
&& GET_MODE (target) != BLKmode)
/* If target is in memory and caller wants value in a register instead,
arrange that. Pass TARGET as target for expand_expr so that,
if EXP is another assignment, SUGGEST_REG will be nonzero for it.
- We know expand_expr will not use the target in that case. */
+ We know expand_expr will not use the target in that case.
+ Don't do this if TARGET is volatile because we are supposed
+ to write it and then read it. */
{
temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
GET_MODE (target), 0);
So copy the value through a temporary and use that temp
as the result. */
{
+ /* ??? There may be a bug here in the case of a target
+ that is volatile, but I' too sleepy today to write anything
+ to handle it. */
if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
{
/* Expand EXP into a new pseudo. */
{
temp = expand_expr (exp, target, GET_MODE (target), 0);
/* DO return TARGET if it's a specified hardware register.
- expand_return relies on this. */
+ expand_return relies on this.
+ DO return TARGET if it's a volatile mem ref; ANSI requires this. */
if (!(target && GET_CODE (target) == REG
&& REGNO (target) < FIRST_PSEUDO_REGISTER)
- && CONSTANT_P (temp))
+ && CONSTANT_P (temp)
+ && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
dont_return_target = 1;
}
{
/* Compute the size of the data to copy from the string. */
tree copy_size
- = fold (build (MIN_EXPR, sizetype,
- size_binop (CEIL_DIV_EXPR,
- TYPE_SIZE (TREE_TYPE (exp)),
- size_int (BITS_PER_UNIT)),
- convert (sizetype,
- build_int_2 (TREE_STRING_LENGTH (exp), 0))));
+ = size_binop (MIN_EXPR,
+ size_binop (CEIL_DIV_EXPR,
+ TYPE_SIZE (TREE_TYPE (exp)),
+ size_int (BITS_PER_UNIT)),
+ convert (sizetype,
+ build_int_2 (TREE_STRING_LENGTH (exp), 0)));
rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
VOIDmode, 0);
rtx label = 0;
}
#endif
- if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
+ if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
+ || TREE_CODE (type) == QUAL_UNION_TYPE)
{
register tree elt;
/* Inform later passes that the whole union value is dead. */
- if (TREE_CODE (type) == UNION_TYPE)
+ if (TREE_CODE (type) == UNION_TYPE
+ || TREE_CODE (type) == QUAL_UNION_TYPE)
emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
/* If we are building a static constructor into a register,
if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
|| (GET_CODE (target) == REG && TREE_STATIC (exp)))
- clear_storage (target, maxelt - minelt + 1);
+ clear_storage (target, int_size_in_bytes (type));
else
/* Inform later passes that the old value is dead. */
emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
if (mode == VOIDmode
|| (mode != BLKmode && ! direct_store[(int) mode])
|| GET_CODE (target) == REG
- || GET_CODE (target) == SUBREG)
+ || GET_CODE (target) == SUBREG
+ /* If the field isn't aligned enough to fetch as a unit,
+ fetch it as a bit field. */
+#ifdef STRICT_ALIGNMENT
+ || align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode)
+ || bitpos % GET_MODE_ALIGNMENT (mode) != 0
+#endif
+ )
{
rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
/* Store the value in the bitfield. */
/* If possible, avoid refetching from the bitfield itself. */
if (width_mask != 0
&& ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
- return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
+ {
+ tree count;
+ enum machine_mode tmode;
+
+ if (unsignedp)
+ return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
+ tmode = GET_MODE (temp);
+ if (tmode == VOIDmode)
+ tmode = value_mode;
+ count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
+ temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
+ return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
+ }
return extract_bit_field (target, bitsize, bitpos, unsignedp,
NULL_RTX, value_mode, 0, align,
total_size);
\f
/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
- ARRAY_REFs at constant positions and find the ultimate containing object,
- which we return.
+ ARRAY_REFs and find the ultimate containing object, which we return.
We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
bit position, and *PUNSIGNEDP to the signedness of the field.
this case, but the address of the object can be found. */
tree
-get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, punsignedp, pvolatilep)
+get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
+ punsignedp, pvolatilep)
tree exp;
int *pbitsize;
int *pbitpos;
{
tree size_tree = 0;
enum machine_mode mode = VOIDmode;
- tree offset = 0;
+ tree offset = integer_zero_node;
if (TREE_CODE (exp) == COMPONENT_REF)
{
? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
: TREE_OPERAND (exp, 2));
+ /* If this field hasn't been filled in yet, don't go
+ past it. This should only happen when folding expressions
+ made during type construction. */
+ if (pos == 0)
+ break;
+
if (TREE_CODE (pos) == PLUS_EXPR)
{
tree constant, var;
}
else
abort ();
+
*pbitpos += TREE_INT_CST_LOW (constant);
- if (offset)
- offset = size_binop (PLUS_EXPR, offset,
- size_binop (FLOOR_DIV_EXPR, var,
- size_int (BITS_PER_UNIT)));
- else
- offset = size_binop (FLOOR_DIV_EXPR, var,
- size_int (BITS_PER_UNIT));
+ offset = size_binop (PLUS_EXPR, offset,
+ size_binop (FLOOR_DIV_EXPR, var,
+ size_int (BITS_PER_UNIT)));
}
else if (TREE_CODE (pos) == INTEGER_CST)
*pbitpos += TREE_INT_CST_LOW (pos);
{
/* Assume here that the offset is a multiple of a unit.
If not, there should be an explicitly added constant. */
- if (offset)
- offset = size_binop (PLUS_EXPR, offset,
- size_binop (FLOOR_DIV_EXPR, pos,
- size_int (BITS_PER_UNIT)));
- else
- offset = size_binop (FLOOR_DIV_EXPR, pos,
- size_int (BITS_PER_UNIT));
+ offset = size_binop (PLUS_EXPR, offset,
+ size_binop (FLOOR_DIV_EXPR, pos,
+ size_int (BITS_PER_UNIT)));
}
}
- else if (TREE_CODE (exp) == ARRAY_REF
- && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
- && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
+ else if (TREE_CODE (exp) == ARRAY_REF)
{
- *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
- * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
+ /* This code is based on the code in case ARRAY_REF in expand_expr
+ below. We assume here that the size of an array element is
+ always an integral multiple of BITS_PER_UNIT. */
+
+ tree index = TREE_OPERAND (exp, 1);
+ tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
+ tree low_bound
+ = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
+ tree index_type = TREE_TYPE (index);
+
+ if (! integer_zerop (low_bound))
+ index = fold (build (MINUS_EXPR, index_type, index, low_bound));
+
+ if (TYPE_PRECISION (index_type) != POINTER_SIZE)
+ {
+ index = convert (type_for_size (POINTER_SIZE, 0), index);
+ index_type = TREE_TYPE (index);
+ }
+
+ index = fold (build (MULT_EXPR, index_type, index,
+ TYPE_SIZE (TREE_TYPE (exp))));
+
+ if (TREE_CODE (index) == INTEGER_CST
+ && TREE_INT_CST_HIGH (index) == 0)
+ *pbitpos += TREE_INT_CST_LOW (index);
+ else
+ offset = size_binop (PLUS_EXPR, offset,
+ size_binop (FLOOR_DIV_EXPR, index,
+ size_int (BITS_PER_UNIT)));
}
else if (TREE_CODE (exp) != NON_LVALUE_EXPR
&& ! ((TREE_CODE (exp) == NOP_EXPR
/* If this was a bit-field, see if there is a mode that allows direct
access in case EXP is in memory. */
- if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
+ if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
{
mode = mode_for_size (*pbitsize, MODE_INT, 0);
if (mode == BLKmode)
mode = VOIDmode;
}
+ if (integer_zerop (offset))
+ offset = 0;
+
*pmode = mode;
*poffset = offset;
#if 0
/* Given an rtx VALUE that may contain additions and multiplications,
return an equivalent value that just refers to a register or memory.
This is done by generating instructions to perform the arithmetic
- and returning a pseudo-register containing the value. */
+ and returning a pseudo-register containing the value.
+
+ The returned value may be a REG, SUBREG, MEM or constant. */
rtx
force_operand (value, target)
return expand_binop (GET_MODE (value), binoptab, tmp,
force_operand (op2, NULL_RTX),
target, 0, OPTAB_LIB_WIDEN);
- /* We give UNSIGNEP = 0 to expand_binop
+ /* We give UNSIGNEDP = 0 to expand_binop
because the only operations we are expanding here are signed ones. */
}
return value;
switch (TREE_CODE (exp))
{
case ADDR_EXPR:
- return staticp (TREE_OPERAND (exp, 0));
+ return (staticp (TREE_OPERAND (exp, 0))
+ || safe_from_p (x, TREE_OPERAND (exp, 0)));
case INDIRECT_REF:
if (GET_CODE (x) == MEM)
/* Use subtarget as the target for operand 0 of a binary operation. */
rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
rtx original_target = target;
- int ignore = target == const0_rtx;
+ int ignore = (target == const0_rtx
+ || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
+ || code == CONVERT_EXPR || code == REFERENCE_EXPR)
+ && TREE_CODE (type) == VOID_TYPE));
tree context;
/* Don't use hard regs as subtargets, because the combiner
if (preserve_subexpressions_p ())
subtarget = 0;
- if (ignore) target = 0, original_target = 0;
+ /* If we are going to ignore this result, we need only do something
+ if there is a side-effect somewhere in the expression. If there
+ is, short-circuit the most common cases here. */
+
+ if (ignore)
+ {
+ if (! TREE_SIDE_EFFECTS (exp))
+ return const0_rtx;
+
+ /* Ensure we reference a volatile object even if value is ignored. */
+ if (TREE_THIS_VOLATILE (exp)
+ && TREE_CODE (exp) != FUNCTION_DECL
+ && mode != VOIDmode && mode != BLKmode)
+ {
+ temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
+ if (GET_CODE (temp) == MEM)
+ temp = copy_to_reg (temp);
+ return const0_rtx;
+ }
+
+ if (TREE_CODE_CLASS (code) == '1')
+ return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
+ VOIDmode, modifier);
+ else if (TREE_CODE_CLASS (code) == '2'
+ || TREE_CODE_CLASS (code) == '<')
+ {
+ expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
+ expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
+ return const0_rtx;
+ }
+ else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
+ && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
+ /* If the second operand has no side effects, just evaluate
+ the first. */
+ return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
+ VOIDmode, modifier);
+
+ target = 0, original_target = 0;
+ }
/* If will do cse, generate all results into pseudo registers
since 1) that allows cse to find more things
&& (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
target = subtarget;
- /* Ensure we reference a volatile object even if value is ignored. */
- if (ignore && TREE_THIS_VOLATILE (exp)
- && mode != VOIDmode && mode != BLKmode)
- {
- target = gen_reg_rtx (mode);
- temp = expand_expr (exp, target, VOIDmode, modifier);
- if (temp != target)
- emit_move_insn (target, temp);
- return target;
- }
-
switch (code)
{
case LABEL_DECL:
case RESULT_DECL:
if (DECL_RTL (exp) == 0)
abort ();
- /* Ensure variable marked as used
- even if it doesn't go through a parser. */
- TREE_USED (exp) = 1;
+ /* Ensure variable marked as used even if it doesn't go through
+ a parser. If it hasn't be used yet, write out an external
+ definition. */
+ if (! TREE_USED (exp))
+ {
+ assemble_external (exp);
+ TREE_USED (exp) = 1;
+ }
+
/* Handle variables inherited from containing functions. */
context = decl_function_context (exp);
}
SAVE_EXPR_RTL (exp) = temp;
- store_expr (TREE_OPERAND (exp, 0), temp, 0);
if (!optimize && GET_CODE (temp) == REG)
save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
save_expr_regs);
+
+ /* If the mode of TEMP does not match that of the expression, it
+ must be a promoted value. We pass store_expr a SUBREG of the
+ wanted mode but mark it so that we know that it was already
+ extended. Note that `unsignedp' was modified above in
+ this case. */
+
+ if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
+ {
+ temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
+ SUBREG_PROMOTED_VAR_P (temp) = 1;
+ SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
+ }
+
+ store_expr (TREE_OPERAND (exp, 0), temp, 0);
}
/* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
return SAVE_EXPR_RTL (exp);
case EXIT_EXPR:
- /* Exit the current loop if the body-expression is true. */
- {
- rtx label = gen_label_rtx ();
- do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
- expand_exit_loop (NULL_PTR);
- emit_label (label);
- }
+ expand_exit_loop_if_false (NULL_PTR,
+ invert_truthvalue (TREE_OPERAND (exp, 0)));
return const0_rtx;
case LOOP_EXPR:
return RTL_EXPR_RTL (exp);
case CONSTRUCTOR:
+ /* If we don't need the result, just ensure we evaluate any
+ subexpressions. */
+ if (ignore)
+ {
+ tree elt;
+ for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
+ expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
+ return const0_rtx;
+ }
/* All elts simple constants => refer to a constant in memory. But
if this is a non-BLKmode mode, let it store a field at a time
since that should make a CONST_INT or CONST_DOUBLE when we
- fold. */
- if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
+ fold. If we are making an initializer and all operands are
+ constant, put it in memory as well. */
+ else if ((TREE_STATIC (exp)
+ && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
+ || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
{
rtx constructor = output_constant_def (exp);
if (modifier != EXPAND_CONST_ADDRESS
return constructor;
}
- if (ignore)
- {
- tree elt;
- for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
- expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
- return const0_rtx;
- }
else
{
if (target == 0 || ! safe_from_p (target, exp))
target = gen_reg_rtx (mode);
else
{
- rtx safe_target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
- if (target)
- MEM_IN_STRUCT_P (safe_target) = MEM_IN_STRUCT_P (target);
- target = safe_target;
+ enum tree_code c = TREE_CODE (type);
+ target
+ = assign_stack_temp (mode, int_size_in_bytes (type), 0);
+ if (c == RECORD_TYPE || c == UNION_TYPE
+ || c == QUAL_UNION_TYPE || c == ARRAY_TYPE)
+ MEM_IN_STRUCT_P (target) = 1;
}
}
store_constructor (exp, target);
|| TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
|| TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
|| TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
+ || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
|| (TREE_CODE (exp1) == ADDR_EXPR
&& (exp2 = TREE_OPERAND (exp1, 0))
&& (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
|| TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
- || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
+ || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE
+ || TREE_CODE (TREE_TYPE (exp2)) == QUAL_UNION_TYPE)))
MEM_IN_STRUCT_P (temp) = 1;
- MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
-#if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
+ MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
+#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
a location is accessed through a pointer to const does not mean
that the value there can never change. */
RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
}
case ARRAY_REF:
- if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
- || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
- {
- /* Nonconstant array index or nonconstant element size.
- Generate the tree for *(&array+index) and expand that,
- except do it in a language-independent way
- and don't complain about non-lvalue arrays.
- `mark_addressable' should already have been called
- for any array for which this case will be reached. */
-
- /* Don't forget the const or volatile flag from the array element. */
- tree variant_type = build_type_variant (type,
- TREE_READONLY (exp),
- TREE_THIS_VOLATILE (exp));
- tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
- TREE_OPERAND (exp, 0));
- tree index = TREE_OPERAND (exp, 1);
- tree elt;
-
- /* Convert the integer argument to a type the same size as a pointer
- so the multiply won't overflow spuriously. */
- if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
- index = convert (type_for_size (POINTER_SIZE, 0), index);
-
- /* Don't think the address has side effects
- just because the array does.
- (In some cases the address might have side effects,
- and we fail to record that fact here. However, it should not
- matter, since expand_expr should not care.) */
- TREE_SIDE_EFFECTS (array_adr) = 0;
-
- elt = build1 (INDIRECT_REF, type,
- fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
- array_adr,
- fold (build (MULT_EXPR,
- TYPE_POINTER_TO (variant_type),
- index, size_in_bytes (type))))));
-
- /* Volatility, etc., of new expression is same as old expression. */
- TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
- TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
- TREE_READONLY (elt) = TREE_READONLY (exp);
-
- return expand_expr (elt, target, tmode, modifier);
- }
+ if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
+ abort ();
- /* Fold an expression like: "foo"[2].
- This is not done in fold so it won't happen inside &. */
{
+ tree array = TREE_OPERAND (exp, 0);
+ tree domain = TYPE_DOMAIN (TREE_TYPE (array));
+ tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
+ tree index = TREE_OPERAND (exp, 1);
+ tree index_type = TREE_TYPE (index);
int i;
- tree arg0 = TREE_OPERAND (exp, 0);
- tree arg1 = TREE_OPERAND (exp, 1);
- if (TREE_CODE (arg0) == STRING_CST
- && TREE_CODE (arg1) == INTEGER_CST
- && !TREE_INT_CST_HIGH (arg1)
- && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
+ /* Optimize the special-case of a zero lower bound. */
+ if (! integer_zerop (low_bound))
+ index = fold (build (MINUS_EXPR, index_type, index, low_bound));
+
+ if (TREE_CODE (index) != INTEGER_CST
+ || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
+ {
+ /* Nonconstant array index or nonconstant element size.
+ Generate the tree for *(&array+index) and expand that,
+ except do it in a language-independent way
+ and don't complain about non-lvalue arrays.
+ `mark_addressable' should already have been called
+ for any array for which this case will be reached. */
+
+ /* Don't forget the const or volatile flag from the array
+ element. */
+ tree variant_type = build_type_variant (type,
+ TREE_READONLY (exp),
+ TREE_THIS_VOLATILE (exp));
+ tree array_adr = build1 (ADDR_EXPR,
+ build_pointer_type (variant_type), array);
+ tree elt;
+
+ /* Convert the integer argument to a type the same size as a
+ pointer so the multiply won't overflow spuriously. */
+ if (TYPE_PRECISION (index_type) != POINTER_SIZE)
+ index = convert (type_for_size (POINTER_SIZE, 0), index);
+
+ /* Don't think the address has side effects
+ just because the array does.
+ (In some cases the address might have side effects,
+ and we fail to record that fact here. However, it should not
+ matter, since expand_expr should not care.) */
+ TREE_SIDE_EFFECTS (array_adr) = 0;
+
+ elt = build1 (INDIRECT_REF, type,
+ fold (build (PLUS_EXPR,
+ TYPE_POINTER_TO (variant_type),
+ array_adr,
+ fold (build (MULT_EXPR,
+ TYPE_POINTER_TO (variant_type),
+ index,
+ size_in_bytes (type))))));
+
+ /* Volatility, etc., of new expression is same as old
+ expression. */
+ TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
+ TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
+ TREE_READONLY (elt) = TREE_READONLY (exp);
+
+ return expand_expr (elt, target, tmode, modifier);
+ }
+
+ /* Fold an expression like: "foo"[2].
+ This is not done in fold so it won't happen inside &. */
+
+ if (TREE_CODE (array) == STRING_CST
+ && TREE_CODE (index) == INTEGER_CST
+ && !TREE_INT_CST_HIGH (index)
+ && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array))
{
- if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
+ if (TREE_TYPE (TREE_TYPE (array)) == integer_type_node)
{
- exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
+ exp = build_int_2 (((int *)TREE_STRING_POINTER (array))[i], 0);
TREE_TYPE (exp) = integer_type_node;
return expand_expr (exp, target, tmode, modifier);
}
- if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
+ if (TREE_TYPE (TREE_TYPE (array)) == char_type_node)
{
- exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
+ exp = build_int_2 (TREE_STRING_POINTER (array)[i], 0);
TREE_TYPE (exp) = integer_type_node;
- return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
+ return expand_expr (convert (TREE_TYPE (TREE_TYPE (array)),
+ exp),
+ target, tmode, modifier);
}
}
- }
- /* If this is a constant index into a constant array,
- just get the value from the array. Handle both the cases when
- we have an explicit constructor and when our operand is a variable
- that was declared const. */
+ /* If this is a constant index into a constant array,
+ just get the value from the array. Handle both the cases when
+ we have an explicit constructor and when our operand is a variable
+ that was declared const. */
- if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
- && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
- {
- tree index = fold (TREE_OPERAND (exp, 1));
- if (TREE_CODE (index) == INTEGER_CST
- && TREE_INT_CST_HIGH (index) == 0)
- {
- int i = TREE_INT_CST_LOW (index);
- tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
-
- while (elem && i--)
- elem = TREE_CHAIN (elem);
- if (elem)
- return expand_expr (fold (TREE_VALUE (elem)), target,
- tmode, modifier);
- }
- }
+ if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
+ {
+ if (TREE_CODE (index) == INTEGER_CST
+ && TREE_INT_CST_HIGH (index) == 0)
+ {
+ tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
+
+ i = TREE_INT_CST_LOW (index);
+ while (elem && i--)
+ elem = TREE_CHAIN (elem);
+ if (elem)
+ return expand_expr (fold (TREE_VALUE (elem)), target,
+ tmode, modifier);
+ }
+ }
- else if (TREE_READONLY (TREE_OPERAND (exp, 0))
- && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
- && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
- && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
- && DECL_INITIAL (TREE_OPERAND (exp, 0))
- && optimize >= 1
- && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
- != ERROR_MARK))
- {
- tree index = fold (TREE_OPERAND (exp, 1));
- if (TREE_CODE (index) == INTEGER_CST
- && TREE_INT_CST_HIGH (index) == 0)
- {
- int i = TREE_INT_CST_LOW (index);
- tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
-
- if (TREE_CODE (init) == CONSTRUCTOR)
- {
- tree elem = CONSTRUCTOR_ELTS (init);
+ else if (optimize >= 1
+ && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
+ && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
+ && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
+ {
+ if (TREE_CODE (index) == INTEGER_CST
+ && TREE_INT_CST_HIGH (index) == 0)
+ {
+ tree init = DECL_INITIAL (array);
+
+ i = TREE_INT_CST_LOW (index);
+ if (TREE_CODE (init) == CONSTRUCTOR)
+ {
+ tree elem = CONSTRUCTOR_ELTS (init);
+
+ while (elem && i--)
+ elem = TREE_CHAIN (elem);
+ if (elem)
+ return expand_expr (fold (TREE_VALUE (elem)), target,
+ tmode, modifier);
+ }
+ else if (TREE_CODE (init) == STRING_CST
+ && i < TREE_STRING_LENGTH (init))
+ {
+ temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
+ return convert_to_mode (mode, temp, 0);
+ }
+ }
+ }
+ }
- while (elem && i--)
- elem = TREE_CHAIN (elem);
- if (elem)
- return expand_expr (fold (TREE_VALUE (elem)), target,
- tmode, modifier);
- }
- else if (TREE_CODE (init) == STRING_CST
- && i < TREE_STRING_LENGTH (init))
- {
- temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
- return convert_to_mode (mode, temp, 0);
- }
- }
- }
/* Treat array-ref with constant index as a component-ref. */
case COMPONENT_REF:
tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
&mode1, &unsignedp, &volatilep);
+ /* If we got back the original object, something is wrong. Perhaps
+ we are evaluating an expression too early. In any event, don't
+ infinitely recurse. */
+ if (tem == exp)
+ abort ();
+
/* In some cases, we will be offsetting OP0's address by a constant.
So get it as a sum, if possible. If we will be using it
directly in an insn, we validate it. */
op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
/* If this is a constant, put it into a register if it is a
- legimate constant and memory if it isn't. */
+ legitimate constant and memory if it isn't. */
if (CONSTANT_P (op0))
{
enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
- if (LEGITIMATE_CONSTANT_P (op0))
+ if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
op0 = force_reg (mode, op0);
else
op0 = validize_mem (force_const_mem (mode, op0));
case OFFSET_REF:
{
- tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
+ tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
temp = gen_rtx (MEM, mode, memory_address (mode, op0));
MEM_IN_STRUCT_P (temp) = 1;
- MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
-#if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
+ MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
+#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
a location is accessed through a pointer to const does not mean
that the value there can never change. */
RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
if (target == 0)
- target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
+ target = gen_reg_rtx (mode);
/* If domain is empty, answer is no. */
if (tree_int_cst_lt (set_high_bound, set_low_bound))
/* Compare index against bounds, if they are constant. */
if (GET_CODE (index_val) == CONST_INT
- && GET_CODE (lo_r) == CONST_INT)
- {
- if (INTVAL (index_val) < INTVAL (lo_r))
- return const0_rtx;
- }
+ && GET_CODE (lo_r) == CONST_INT
+ && INTVAL (index_val) < INTVAL (lo_r))
+ return const0_rtx;
if (GET_CODE (index_val) == CONST_INT
- && GET_CODE (hi_r) == CONST_INT)
- {
- if (INTVAL (hi_r) < INTVAL (index_val))
- return const0_rtx;
- }
+ && GET_CODE (hi_r) == CONST_INT
+ && INTVAL (hi_r) < INTVAL (index_val))
+ return const0_rtx;
/* If we get here, we have to generate the code for both cases
(in range and out of range). */
if (! (GET_CODE (index_val) == CONST_INT
&& GET_CODE (lo_r) == CONST_INT))
{
- emit_cmp_insn (index_val, lo_r, LT, 0, GET_MODE (index_val), 0, 0);
+ emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
+ GET_MODE (index_val), 0, 0);
emit_jump_insn (gen_blt (op1));
}
if (! (GET_CODE (index_val) == CONST_INT
&& GET_CODE (hi_r) == CONST_INT))
{
- emit_cmp_insn (index_val, hi_r, GT, 0, GET_MODE (index_val), 0, 0);
+ emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
+ GET_MODE (index_val), 0, 0);
emit_jump_insn (gen_bgt (op1));
}
/* Calculate the element number of bit zero in the first word
of the set. */
if (GET_CODE (lo_r) == CONST_INT)
- rlow = gen_rtx (CONST_INT, VOIDmode,
- INTVAL (lo_r) & ~ (1 << BITS_PER_UNIT));
+ rlow = GEN_INT (INTVAL (lo_r)
+ & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
else
- rlow = expand_binop (index_mode, and_optab,
- lo_r, gen_rtx (CONST_INT, VOIDmode,
- ~ (1 << BITS_PER_UNIT)),
- 0, 0, OPTAB_LIB_WIDEN);
+ rlow = expand_binop (index_mode, and_optab, lo_r,
+ GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
+ NULL_RTX, 0, OPTAB_LIB_WIDEN);
diff = expand_binop (index_mode, sub_optab,
- index_val, rlow, 0, 0, OPTAB_LIB_WIDEN);
+ index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
- gen_rtx (CONST_INT, VOIDmode, BITS_PER_UNIT),
- 0, 0);
+ GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
- gen_rtx (CONST_INT, VOIDmode, BITS_PER_UNIT),
- 0, 0);
+ GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
addr = memory_address (byte_mode,
expand_binop (index_mode, add_optab,
- diff, setaddr));
+ diff, setaddr, NULL_RTX, 0,
+ OPTAB_LIB_WIDEN));
/* Extract the bit we want to examine */
bit = expand_shift (RSHIFT_EXPR, byte_mode,
- gen_rtx (MEM, byte_mode, addr), rem, 0, 1);
- result = expand_binop (SImode, and_optab, bit, const1_rtx, target,
+ gen_rtx (MEM, byte_mode, addr),
+ make_tree (TREE_TYPE (index), rem),
+ NULL_RTX, 1);
+ result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
+ GET_MODE (target) == byte_mode ? target : 0,
1, OPTAB_LIB_WIDEN);
- emit_move_insn (target, result);
+
+ if (result != target)
+ convert_move (target, result, 1);
/* Output the code to handle the out-of-range case. */
emit_jump (op0);
case NOP_EXPR:
case CONVERT_EXPR:
case REFERENCE_EXPR:
- if (TREE_CODE (type) == VOID_TYPE || ignore)
- {
- expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
- return const0_rtx;
- }
if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
if (TREE_CODE (type) == UNION_TYPE)
return target;
}
op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
- if (GET_MODE (op0) == mode || GET_MODE (op0) == VOIDmode)
+ if (GET_MODE (op0) == mode)
+ return op0;
+ /* If arg is a constant integer being extended from a narrower mode,
+ we must really truncate to get the extended bits right. Otherwise
+ (unsigned long) (unsigned char) ("\377"[0])
+ would come out as ffffffff. */
+ if (GET_MODE (op0) == VOIDmode
+ && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ < GET_MODE_BITSIZE (mode)))
+ {
+ /* MODE must be narrower than HOST_BITS_PER_INT. */
+ int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
+
+ if (width < HOST_BITS_PER_WIDE_INT)
+ {
+ HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
+ : CONST_DOUBLE_LOW (op0));
+ if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
+ || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
+ val &= ((HOST_WIDE_INT) 1 << width) - 1;
+ else
+ val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
+
+ op0 = GEN_INT (val);
+ }
+ else
+ {
+ op0 = (simplify_unary_operation
+ ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
+ ? ZERO_EXTEND : SIGN_EXTEND),
+ mode, op0,
+ TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
+ if (op0 == 0)
+ abort ();
+ }
+ }
+ if (GET_MODE (op0) == VOIDmode)
return op0;
if (modifier == EXPAND_INITIALIZER)
return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
address.
If this is an EXPAND_SUM call, always return the sum. */
- if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
- && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
- && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
- || mode == Pmode))
+ if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
+ || mode == Pmode)
{
- op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
- EXPAND_SUM);
- op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
- if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
- op1 = force_operand (op1, target);
- return op1;
- }
+ if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
+ && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
+ && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
+ {
+ op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
+ EXPAND_SUM);
+ op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
+ if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
+ op1 = force_operand (op1, target);
+ return op1;
+ }
- else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
- && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
- && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
- || mode == Pmode))
- {
- op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
- EXPAND_SUM);
- op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
- if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
- op0 = force_operand (op0, target);
- return op0;
+ else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
+ && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
+ && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
+ {
+ op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
+ EXPAND_SUM);
+ if (! CONSTANT_P (op0))
+ {
+ op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
+ VOIDmode, modifier);
+ goto both_summands;
+ }
+ op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
+ if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
+ op0 = force_operand (op0, target);
+ return op0;
+ }
}
/* No sense saving up arithmetic to be done
And force_operand won't know whether to sign-extend or
zero-extend. */
if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
- || mode != Pmode) goto binop;
+ || mode != Pmode)
+ goto binop;
preexpand_calls (exp);
if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
+ both_summands:
/* Make sure any term that's a sum with a constant comes last. */
if (GET_CODE (op0) == PLUS
&& CONSTANT_P (XEXP (op0, 1)))
case ABS_EXPR:
op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
+ /* Handle complex values specially. */
+ {
+ enum machine_mode opmode
+ = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
+
+ if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
+ || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
+ return expand_complex_abs (opmode, op0, target, unsignedp);
+ }
+
/* Unsigned abs is simply the operand. Testing here means we don't
risk generating incorrect code below. */
if (TREE_UNSIGNED (type))
if (target != op0)
emit_move_insn (target, op0);
op0 = gen_label_rtx ();
- if (code == MAX_EXPR)
- temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
- ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
- : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
- else
- temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
- ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
- : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
- if (temp == const0_rtx)
- emit_move_insn (target, op1);
- else if (temp != const_true_rtx)
+ /* If this mode is an integer too wide to compare properly,
+ compare word by word. Rely on cse to optimize constant cases. */
+ if (GET_MODE_CLASS (mode) == MODE_INT
+ && !can_compare_p (mode))
{
- if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
- emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
+ if (code == MAX_EXPR)
+ do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), target, op1, NULL, op0);
else
- abort ();
+ do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), op1, target, NULL, op0);
emit_move_insn (target, op1);
}
+ else
+ {
+ if (code == MAX_EXPR)
+ temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
+ ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
+ : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
+ else
+ temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
+ ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
+ : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
+ if (temp == const0_rtx)
+ emit_move_insn (target, op1);
+ else if (temp != const_true_rtx)
+ {
+ if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
+ emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
+ else
+ abort ();
+ emit_move_insn (target, op1);
+ }
+ }
emit_label (op0);
return target;
this_optab = ior_optab;
goto binop;
+ case TRUTH_XOR_EXPR:
case BIT_XOR_EXPR:
this_optab = xor_optab;
goto binop;
case TRUTH_ANDIF_EXPR:
case TRUTH_ORIF_EXPR:
- if (target == 0 || ! safe_from_p (target, exp)
- /* Make sure we don't have a hard reg (such as function's return
- value) live across basic blocks, if not optimizing. */
- || (!optimize && GET_CODE (target) == REG
- && REGNO (target) < FIRST_PSEUDO_REGISTER))
+ if (! ignore
+ && (target == 0 || ! safe_from_p (target, exp)
+ /* Make sure we don't have a hard reg (such as function's return
+ value) live across basic blocks, if not optimizing. */
+ || (!optimize && GET_CODE (target) == REG
+ && REGNO (target) < FIRST_PSEUDO_REGISTER)))
target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
- emit_clr_insn (target);
+
+ if (target)
+ emit_clr_insn (target);
+
op1 = gen_label_rtx ();
jumpifnot (exp, op1);
- emit_0_to_1_insn (target);
+
+ if (target)
+ emit_0_to_1_insn (target);
+
emit_label (op1);
- return target;
+ return ignore ? const0_rtx : target;
case TRUTH_NOT_EXPR:
op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
&& integer_zerop (TREE_OPERAND (exp, 2))
&& TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
{
+ if (ignore)
+ {
+ expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
+ modifier);
+ return const0_rtx;
+ }
+
op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
if (GET_MODE (op0) == mode)
return op0;
intermediate target unless it is safe. If no target, use a
temporary. */
- if (mode == VOIDmode || ignore)
+ if (ignore)
temp = 0;
else if (original_target
&& safe_from_p (original_target, TREE_OPERAND (exp, 0)))
/* If we had X ? A + 1 : A and we can do the test of X as a store-flag
operation, do this as A + (X != 0). Similarly for other simple
binary operators. */
- if (singleton && binary_op
+ if (temp && singleton && binary_op
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
&& (TREE_CODE (binary_op) == PLUS_EXPR
|| TREE_CODE (binary_op) == MINUS_EXPR
}
else
expand_expr (singleton,
- ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
+ ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
if (cleanups_this_call)
{
sorry ("aggregate value in COND_EXPR");
is the actual stack address that we want to initialize.
The function we call will perform the cleanup in this case. */
+ /* If we have already assigned it space, use that space,
+ not target that we were passed in, as our target
+ parameter is only a hint. */
+ if (DECL_RTL (slot) != 0)
+ {
+ target = DECL_RTL (slot);
+ /* If we have already expanded the slot, so don't do
+ it again. (mrs) */
+ if (TREE_OPERAND (exp, 1) == NULL_TREE)
+ return target;
+ }
+
DECL_RTL (slot) = target;
}
op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
(modifier == EXPAND_INITIALIZER
? modifier : EXPAND_CONST_ADDRESS));
+
+ /* We would like the object in memory. If it is a constant,
+ we can have it be statically allocated into memory. For
+ a non-constant (REG or SUBREG), we need to allocate some
+ memory and store the value into it. */
+
+ if (CONSTANT_P (op0))
+ op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
+ op0);
+
+ if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
+ {
+ /* If this object is in a register, it must be not
+ be BLKmode. */
+ tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
+ enum machine_mode inner_mode = TYPE_MODE (inner_type);
+ rtx memloc
+ = assign_stack_temp (inner_mode,
+ int_size_in_bytes (inner_type), 1);
+
+ emit_move_insn (memloc, op0);
+ op0 = memloc;
+ }
+
if (GET_CODE (op0) != MEM)
abort ();
emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
/* Move the real (op0) and imaginary (op1) parts to their location. */
- emit_move_insn (gen_lowpart (mode, target), op0);
- emit_move_insn (gen_highpart (mode, target), op1);
+ emit_move_insn (gen_realpart (mode, target), op0);
+ emit_move_insn (gen_imagpart (mode, target), op1);
/* Complex construction should appear as a single unit. */
group_insns (prev);
}
case REALPART_EXPR:
- {
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
- op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
- if (! target)
- target = gen_reg_rtx (mode);
- emit_move_insn (target, gen_lowpart (mode, op0));
- return target;
- }
+ op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
+ return gen_realpart (mode, op0);
case IMAGPART_EXPR:
- {
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
- op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
- if (! target)
- target = gen_reg_rtx (mode);
- emit_move_insn (target, gen_highpart (mode, op0));
- return target;
- }
+ op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
+ return gen_imagpart (mode, op0);
case CONJ_EXPR:
{
emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
/* Store the realpart and the negated imagpart to target. */
- emit_move_insn (gen_lowpart (mode, target), gen_lowpart (mode, op0));
+ emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
- imag_t = gen_highpart (mode, target);
+ imag_t = gen_imagpart (mode, target);
temp = expand_unop (mode, neg_optab,
- gen_highpart (mode, op0), imag_t, 0);
+ gen_imagpart (mode, op0), imag_t, 0);
if (temp != imag_t)
emit_move_insn (imag_t, temp);
}
case ERROR_MARK:
+ op0 = CONST0_RTX (tmode);
+ if (op0 != 0)
+ return op0;
return const0_rtx;
default:
return target;
+ /* __builtin_apply_args returns block of memory allocated on
+ the stack into which is stored the arg pointer, structure
+ value address, static chain, and all the registers that might
+ possibly be used in performing a function call. The code is
+ moved to the start of the function so the incoming values are
+ saved. */
+ case BUILT_IN_APPLY_ARGS:
+ /* Don't do __builtin_apply_args more than once in a function.
+ Save the result of the first call and reuse it. */
+ if (apply_args_value != 0)
+ return apply_args_value;
+ {
+ /* When this function is called, it means that registers must be
+ saved on entry to this function. So we migrate the
+ call to the first insn of this function. */
+ rtx temp;
+ rtx seq;
+
+ start_sequence ();
+ temp = expand_builtin_apply_args ();
+ seq = get_insns ();
+ end_sequence ();
+
+ apply_args_value = temp;
+
+ /* Put the sequence after the NOTE that starts the function.
+ If this is inside a SEQUENCE, make the outer-level insn
+ chain current, so the code is placed at the start of the
+ function. */
+ push_topmost_sequence ();
+ emit_insns_before (seq, NEXT_INSN (get_insns ()));
+ pop_topmost_sequence ();
+ return temp;
+ }
+
+ /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
+ FUNCTION with a copy of the parameters described by
+ ARGUMENTS, and ARGSIZE. It returns a block of memory
+ allocated on the stack into which is stored all the registers
+ that might possibly be used for returning the result of a
+ function. ARGUMENTS is the value returned by
+ __builtin_apply_args. ARGSIZE is the number of bytes of
+ arguments that must be copied. ??? How should this value be
+ computed? We'll also need a safe worst case value for varargs
+ functions. */
+ case BUILT_IN_APPLY:
+ if (arglist == 0
+ /* Arg could be non-pointer if user redeclared this fcn wrong. */
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
+ || TREE_CHAIN (arglist) == 0
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
+ || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
+ return const0_rtx;
+ else
+ {
+ int i;
+ tree t;
+ rtx ops[3];
+
+ for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
+ ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
+
+ return expand_builtin_apply (ops[0], ops[1], ops[2]);
+ }
+
+ /* __builtin_return (RESULT) causes the function to return the
+ value described by RESULT. RESULT is address of the block of
+ memory returned by __builtin_apply. */
+ case BUILT_IN_RETURN:
+ if (arglist
+ /* Arg could be non-pointer if user redeclared this fcn wrong. */
+ && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
+ expand_builtin_return (expand_expr (TREE_VALUE (arglist),
+ NULL_RTX, VOIDmode, 0));
+ return const0_rtx;
+
case BUILT_IN_SAVEREGS:
/* Don't do __builtin_saveregs more than once in a function.
Save the result of the first call and reuse it. */
saveregs_value = temp;
- /* This won't work inside a SEQUENCE--it really has to be
- at the start of the function. */
- if (in_sequence_p ())
- {
- /* Better to do this than to crash. */
- error ("`va_start' used within `({...})'");
- return temp;
- }
-
- /* Put the sequence after the NOTE that starts the function. */
+ /* Put the sequence after the NOTE that starts the function.
+ If this is inside a SEQUENCE, make the outer-level insn
+ chain current, so the code is placed at the start of the
+ function. */
+ push_topmost_sequence ();
emit_insns_before (seq, NEXT_INSN (get_insns ()));
+ pop_topmost_sequence ();
return temp;
}
{
tree arg = TREE_VALUE (arglist);
if (TREE_CODE (arg) != INTEGER_CST)
- error ("argument of __builtin_args_info must be constant");
+ error ("argument of `__builtin_args_info' must be constant");
else
{
int wordnum = TREE_INT_CST_LOW (arg);
- if (wordnum < 0 || wordnum >= nwords)
- error ("argument of __builtin_args_info out of range");
+ if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
+ error ("argument of `__builtin_args_info' out of range");
else
return GEN_INT (word_ptr[wordnum]);
}
}
else
- error ("missing argument in __builtin_args_info");
+ error ("missing argument in `__builtin_args_info'");
return const0_rtx;
return GEN_INT (method_type_class);
if (code == RECORD_TYPE)
return GEN_INT (record_type_class);
- if (code == UNION_TYPE)
+ if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
return GEN_INT (union_type_class);
if (code == ARRAY_TYPE)
return GEN_INT (array_type_class);
return const0_rtx;
else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
{
- error ("invalid arg to __builtin_return_address");
+ error ("invalid arg to `__builtin_return_address'");
return const0_rtx;
}
else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
{
- error ("invalid arg to __builtin_return_address");
+ error ("invalid arg to `__builtin_return_address'");
return const0_rtx;
}
else
rtx tem = frame_pointer_rtx;
int i;
+ /* Some machines need special handling before we can access arbitrary
+ frames. For example, on the sparc, we must first flush all
+ register windows to the stack. */
+#ifdef SETUP_FRAME_ADDRESSES
+ SETUP_FRAME_ADDRESSES ();
+#endif
+
+ /* On the sparc, the return address is not in the frame, it is
+ in a register. There is no way to access it off of the current
+ frame pointer, but it can be accessed off the previous frame
+ pointer by reading the value from the register window save
+ area. */
+#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
+ if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
+ count--;
+#endif
+
/* Scan back COUNT frames to the specified frame. */
for (i = 0; i < count; i++)
{
= get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
int dest_align
= get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
- rtx dest_rtx;
+ rtx dest_rtx, dest_mem, src_mem;
/* If either SRC or DEST is not a pointer type, don't do
this operation in-line. */
}
dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
+ dest_mem = gen_rtx (MEM, BLKmode,
+ memory_address (BLKmode, dest_rtx));
+ src_mem = gen_rtx (MEM, BLKmode,
+ memory_address (BLKmode,
+ expand_expr (src, NULL_RTX,
+ Pmode,
+ EXPAND_NORMAL)));
/* Copy word part most expediently. */
- emit_block_move (gen_rtx (MEM, BLKmode,
- memory_address (BLKmode, dest_rtx)),
- gen_rtx (MEM, BLKmode,
- memory_address (BLKmode,
- expand_expr (src, NULL_RTX,
- Pmode,
- EXPAND_NORMAL))),
+ emit_block_move (dest_mem, src_mem,
expand_expr (len, NULL_RTX, VOIDmode, 0),
MIN (src_align, dest_align));
return dest_rtx;
#endif
default: /* just do library call, if unknown builtin */
- error ("built-in function %s not currently supported",
+ error ("built-in function `%s' not currently supported",
IDENTIFIER_POINTER (DECL_NAME (fndecl)));
}
return expand_call (exp, target, ignore);
}
\f
+/* Built-in functions to perform an untyped call and return. */
+
+/* For each register that may be used for calling a function, this
+ gives a mode used to copy the register's value. VOIDmode indicates
+ the register is not used for calling a function. If the machine
+ has register windows, this gives only the outbound registers.
+ INCOMING_REGNO gives the corresponding inbound register. */
+static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
+
+/* For each register that may be used for returning values, this gives
+ a mode used to copy the register's value. VOIDmode indicates the
+ register is not used for returning values. If the machine has
+ register windows, this gives only the outbound registers.
+ INCOMING_REGNO gives the corresponding inbound register. */
+static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
+
+/* Return the size required for the block returned by __builtin_apply_args,
+ and initialize apply_args_mode. */
+static int
+apply_args_size ()
+{
+ static int size = -1;
+ int align, regno;
+ enum machine_mode mode;
+
+ /* The values computed by this function never change. */
+ if (size < 0)
+ {
+ /* The first value is the incoming arg-pointer. */
+ size = GET_MODE_SIZE (Pmode);
+
+ /* The second value is the structure value address unless this is
+ passed as an "invisible" first argument. */
+ if (struct_value_rtx)
+ size += GET_MODE_SIZE (Pmode);
+
+ for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
+ if (FUNCTION_ARG_REGNO_P (regno))
+ {
+ /* Search for the proper mode for copying this register's
+ value. I'm not sure this is right, but it works so far. */
+ enum machine_mode best_mode = VOIDmode;
+
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
+ mode != VOIDmode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ if (HARD_REGNO_MODE_OK (regno, mode)
+ && HARD_REGNO_NREGS (regno, mode) == 1)
+ best_mode = mode;
+
+ if (best_mode == VOIDmode)
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
+ mode != VOIDmode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ if (HARD_REGNO_MODE_OK (regno, mode)
+ && (mov_optab->handlers[(int) mode].insn_code
+ != CODE_FOR_nothing))
+ best_mode = mode;
+
+ mode = best_mode;
+ if (mode == VOIDmode)
+ abort ();
+
+ align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
+ if (size % align != 0)
+ size = CEIL (size, align) * align;
+ size += GET_MODE_SIZE (mode);
+ apply_args_mode[regno] = mode;
+ }
+ else
+ apply_args_mode[regno] = VOIDmode;
+ }
+ return size;
+}
+
+/* Return the size required for the block returned by __builtin_apply,
+ and initialize apply_result_mode. */
+static int
+apply_result_size ()
+{
+ static int size = -1;
+ int align, regno;
+ enum machine_mode mode;
+
+ /* The values computed by this function never change. */
+ if (size < 0)
+ {
+ size = 0;
+
+ for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
+ if (FUNCTION_VALUE_REGNO_P (regno))
+ {
+ /* Search for the proper mode for copying this register's
+ value. I'm not sure this is right, but it works so far. */
+ enum machine_mode best_mode = VOIDmode;
+
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
+ mode != TImode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ if (HARD_REGNO_MODE_OK (regno, mode))
+ best_mode = mode;
+
+ if (best_mode == VOIDmode)
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
+ mode != VOIDmode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ if (HARD_REGNO_MODE_OK (regno, mode)
+ && (mov_optab->handlers[(int) mode].insn_code
+ != CODE_FOR_nothing))
+ best_mode = mode;
+
+ mode = best_mode;
+ if (mode == VOIDmode)
+ abort ();
+
+ align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
+ if (size % align != 0)
+ size = CEIL (size, align) * align;
+ size += GET_MODE_SIZE (mode);
+ apply_result_mode[regno] = mode;
+ }
+ else
+ apply_result_mode[regno] = VOIDmode;
+
+ /* Allow targets that use untyped_call and untyped_return to override
+ the size so that machine-specific information can be stored here. */
+#ifdef APPLY_RESULT_SIZE
+ size = APPLY_RESULT_SIZE;
+#endif
+ }
+ return size;
+}
+
+#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
+/* Create a vector describing the result block RESULT. If SAVEP is true,
+ the result block is used to save the values; otherwise it is used to
+ restore the values. */
+static rtx
+result_vector (savep, result)
+ int savep;
+ rtx result;
+{
+ int regno, size, align, nelts;
+ enum machine_mode mode;
+ rtx reg, mem;
+ rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
+
+ size = nelts = 0;
+ for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
+ if ((mode = apply_result_mode[regno]) != VOIDmode)
+ {
+ align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
+ if (size % align != 0)
+ size = CEIL (size, align) * align;
+ reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
+ mem = change_address (result, mode,
+ plus_constant (XEXP (result, 0), size));
+ savevec[nelts++] = (savep
+ ? gen_rtx (SET, VOIDmode, mem, reg)
+ : gen_rtx (SET, VOIDmode, reg, mem));
+ size += GET_MODE_SIZE (mode);
+ }
+ return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
+}
+#endif /* HAVE_untyped_call or HAVE_untyped_return */
+
+
+/* Save the state required to perform an untyped call with the same
+ arguments as were passed to the current function. */
+static rtx
+expand_builtin_apply_args ()
+{
+ rtx registers;
+ int size, align, regno;
+ enum machine_mode mode;
+
+ /* Create a block where the arg-pointer, structure value address,
+ and argument registers can be saved. */
+ registers = assign_stack_local (BLKmode, apply_args_size (), -1);
+
+ /* Walk past the arg-pointer and structure value address. */
+ size = GET_MODE_SIZE (Pmode);
+ if (struct_value_rtx)
+ size += GET_MODE_SIZE (Pmode);
+
+ /* Save each register used in calling a function to the block. */
+ for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
+ if ((mode = apply_args_mode[regno]) != VOIDmode)
+ {
+ align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
+ if (size % align != 0)
+ size = CEIL (size, align) * align;
+ emit_move_insn (change_address (registers, mode,
+ plus_constant (XEXP (registers, 0),
+ size)),
+ gen_rtx (REG, mode, INCOMING_REGNO (regno)));
+ size += GET_MODE_SIZE (mode);
+ }
+
+ /* Save the arg pointer to the block. */
+ emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
+ copy_to_reg (virtual_incoming_args_rtx));
+ size = GET_MODE_SIZE (Pmode);
+
+ /* Save the structure value address unless this is passed as an
+ "invisible" first argument. */
+ if (struct_value_incoming_rtx)
+ {
+ emit_move_insn (change_address (registers, Pmode,
+ plus_constant (XEXP (registers, 0),
+ size)),
+ copy_to_reg (struct_value_incoming_rtx));
+ size += GET_MODE_SIZE (Pmode);
+ }
+
+ /* Return the address of the block. */
+ return copy_addr_to_reg (XEXP (registers, 0));
+}
+
+/* Perform an untyped call and save the state required to perform an
+ untyped return of whatever value was returned by the given function. */
+static rtx
+expand_builtin_apply (function, arguments, argsize)
+ rtx function, arguments, argsize;
+{
+ int size, align, regno;
+ enum machine_mode mode;
+ rtx incoming_args, result, reg, dest, call_insn;
+ rtx old_stack_level = 0;
+ rtx use_insns = 0;
+
+ /* Create a block where the return registers can be saved. */
+ result = assign_stack_local (BLKmode, apply_result_size (), -1);
+
+ /* ??? The argsize value should be adjusted here. */
+
+ /* Fetch the arg pointer from the ARGUMENTS block. */
+ incoming_args = gen_reg_rtx (Pmode);
+ emit_move_insn (incoming_args,
+ gen_rtx (MEM, Pmode, arguments));
+#ifndef STACK_GROWS_DOWNWARD
+ incoming_args = expand_binop (Pmode, add_optab, incoming_args, argsize,
+ incoming_args, 0, OPTAB_LIB_WIDEN);
+#endif
+
+ /* Perform postincrements before actually calling the function. */
+ emit_queue ();
+
+ /* Push a new argument block and copy the arguments. */
+ do_pending_stack_adjust ();
+ emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
+
+ /* Push a block of memory onto the stack to store the memory arguments.
+ Save the address in a register, and copy the memory arguments. ??? I
+ haven't figured out how the calling convention macros effect this,
+ but it's likely that the source and/or destination addresses in
+ the block copy will need updating in machine specific ways. */
+ dest = copy_addr_to_reg (push_block (argsize, 0, 0));
+ emit_block_move (gen_rtx (MEM, BLKmode, dest),
+ gen_rtx (MEM, BLKmode, incoming_args),
+ argsize,
+ PARM_BOUNDARY / BITS_PER_UNIT);
+
+ /* Refer to the argument block. */
+ apply_args_size ();
+ arguments = gen_rtx (MEM, BLKmode, arguments);
+
+ /* Walk past the arg-pointer and structure value address. */
+ size = GET_MODE_SIZE (Pmode);
+ if (struct_value_rtx)
+ size += GET_MODE_SIZE (Pmode);
+
+ /* Restore each of the registers previously saved. Make USE insns
+ for each of these registers for use in making the call. */
+ for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
+ if ((mode = apply_args_mode[regno]) != VOIDmode)
+ {
+ align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
+ if (size % align != 0)
+ size = CEIL (size, align) * align;
+ reg = gen_rtx (REG, mode, regno);
+ emit_move_insn (reg,
+ change_address (arguments, mode,
+ plus_constant (XEXP (arguments, 0),
+ size)));
+
+ push_to_sequence (use_insns);
+ emit_insn (gen_rtx (USE, VOIDmode, reg));
+ use_insns = get_insns ();
+ end_sequence ();
+ size += GET_MODE_SIZE (mode);
+ }
+
+ /* Restore the structure value address unless this is passed as an
+ "invisible" first argument. */
+ size = GET_MODE_SIZE (Pmode);
+ if (struct_value_rtx)
+ {
+ rtx value = gen_reg_rtx (Pmode);
+ emit_move_insn (value,
+ change_address (arguments, Pmode,
+ plus_constant (XEXP (arguments, 0),
+ size)));
+ emit_move_insn (struct_value_rtx, value);
+ if (GET_CODE (struct_value_rtx) == REG)
+ {
+ push_to_sequence (use_insns);
+ emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
+ use_insns = get_insns ();
+ end_sequence ();
+ }
+ size += GET_MODE_SIZE (Pmode);
+ }
+
+ /* All arguments and registers used for the call are set up by now! */
+ function = prepare_call_address (function, NULL_TREE, &use_insns);
+
+ /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
+ and we don't want to load it into a register as an optimization,
+ because prepare_call_address already did it if it should be done. */
+ if (GET_CODE (function) != SYMBOL_REF)
+ function = memory_address (FUNCTION_MODE, function);
+
+ /* Generate the actual call instruction and save the return value. */
+#ifdef HAVE_untyped_call
+ if (HAVE_untyped_call)
+ emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
+ result, result_vector (1, result)));
+ else
+#endif
+#ifdef HAVE_call_value
+ if (HAVE_call_value)
+ {
+ rtx valreg = 0;
+
+ /* Locate the unique return register. It is not possible to
+ express a call that sets more than one return register using
+ call_value; use untyped_call for that. In fact, untyped_call
+ only needs to save the return registers in the given block. */
+ for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
+ if ((mode = apply_result_mode[regno]) != VOIDmode)
+ {
+ if (valreg)
+ abort (); /* HAVE_untyped_call required. */
+ valreg = gen_rtx (REG, mode, regno);
+ }
+
+ emit_call_insn (gen_call_value (valreg,
+ gen_rtx (MEM, FUNCTION_MODE, function),
+ const0_rtx, NULL_RTX, const0_rtx));
+
+ emit_move_insn (change_address (result, GET_MODE (valreg),
+ XEXP (result, 0)),
+ valreg);
+ }
+ else
+#endif
+ abort ();
+
+ /* Find the CALL insn we just emitted and write the USE insns before it. */
+ for (call_insn = get_last_insn ();
+ call_insn && GET_CODE (call_insn) != CALL_INSN;
+ call_insn = PREV_INSN (call_insn))
+ ;
+
+ if (! call_insn)
+ abort ();
+
+ /* Put the USE insns before the CALL. */
+ emit_insns_before (use_insns, call_insn);
+
+ /* Restore the stack. */
+ emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
+
+ /* Return the address of the result block. */
+ return copy_addr_to_reg (XEXP (result, 0));
+}
+
+/* Perform an untyped return. */
+static void
+expand_builtin_return (result)
+ rtx result;
+{
+ int size, align, regno;
+ enum machine_mode mode;
+ rtx reg;
+ rtx use_insns = 0;
+
+ apply_result_size ();
+ result = gen_rtx (MEM, BLKmode, result);
+
+#ifdef HAVE_untyped_return
+ if (HAVE_untyped_return)
+ {
+ emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
+ emit_barrier ();
+ return;
+ }
+#endif
+
+ /* Restore the return value and note that each value is used. */
+ size = 0;
+ for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
+ if ((mode = apply_result_mode[regno]) != VOIDmode)
+ {
+ align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
+ if (size % align != 0)
+ size = CEIL (size, align) * align;
+ reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
+ emit_move_insn (reg,
+ change_address (result, mode,
+ plus_constant (XEXP (result, 0),
+ size)));
+
+ push_to_sequence (use_insns);
+ emit_insn (gen_rtx (USE, VOIDmode, reg));
+ use_insns = get_insns ();
+ end_sequence ();
+ size += GET_MODE_SIZE (mode);
+ }
+
+ /* Put the USE insns before the return. */
+ emit_insns (use_insns);
+
+ /* Return whatever values was restored by jumping directly to the end
+ of the function. */
+ expand_null_return ();
+}
+\f
/* Expand code for a post- or pre- increment or decrement
and return the RTX for the result.
POST is 1 for postinc/decrements and 0 for preinc/decrements. */
int icode;
enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
int op0_is_copy = 0;
+ int single_insn = 0;
/* Stabilize any component ref that might need to be
evaluated more than once below. */
- if (TREE_CODE (incremented) == BIT_FIELD_REF
+ if (!post
+ || TREE_CODE (incremented) == BIT_FIELD_REF
|| (TREE_CODE (incremented) == COMPONENT_REF
&& (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
|| DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
|| TREE_CODE (exp) == PREDECREMENT_EXPR)
this_optab = sub_optab;
+ /* For a preincrement, see if we can do this with a single instruction. */
+ if (!post)
+ {
+ icode = (int) this_optab->handlers[(int) mode].insn_code;
+ if (icode != (int) CODE_FOR_nothing
+ /* Make sure that OP0 is valid for operands 0 and 1
+ of the insn we want to queue. */
+ && (*insn_operand_predicate[icode][0]) (op0, mode)
+ && (*insn_operand_predicate[icode][1]) (op0, mode)
+ && (*insn_operand_predicate[icode][2]) (op1, mode))
+ single_insn = 1;
+ }
+
/* If OP0 is not the actual lvalue, but rather a copy in a register,
- then we cannot just increment OP0. We must
- therefore contrive to increment the original value.
- Then we can return OP0 since it is a copy of the old value. */
- if (op0_is_copy)
+ then we cannot just increment OP0. We must therefore contrive to
+ increment the original value. Then, for postincrement, we can return
+ OP0 since it is a copy of the old value. For preincrement, expand here
+ unless we can do it with a single insn. */
+ if (op0_is_copy || (!post && !single_insn))
{
/* This is the easiest way to increment the value wherever it is.
- Problems with multiple evaluation of INCREMENTED
- are prevented because either (1) it is a component_ref,
+ Problems with multiple evaluation of INCREMENTED are prevented
+ because either (1) it is a component_ref or preincrement,
in which case it was stabilized above, or (2) it is an array_ref
with constant index in an array in a register, which is
safe to reevaluate. */
emit_label (drop_through_label);
}
+/* Compare OP0 with OP1, word at a time, in mode MODE.
+ UNSIGNEDP says to do unsigned comparison.
+ Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
+
+static void
+do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
+ enum machine_mode mode;
+ int unsignedp;
+ rtx op0, op1;
+ rtx if_false_label, if_true_label;
+{
+ int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
+ rtx drop_through_label = 0;
+ int i;
+
+ if (! if_true_label || ! if_false_label)
+ drop_through_label = gen_label_rtx ();
+ if (! if_true_label)
+ if_true_label = drop_through_label;
+ if (! if_false_label)
+ if_false_label = drop_through_label;
+
+ /* Compare a word at a time, high order first. */
+ for (i = 0; i < nwords; i++)
+ {
+ rtx comp;
+ rtx op0_word, op1_word;
+
+ if (WORDS_BIG_ENDIAN)
+ {
+ op0_word = operand_subword_force (op0, i, mode);
+ op1_word = operand_subword_force (op1, i, mode);
+ }
+ else
+ {
+ op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
+ op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
+ }
+
+ /* All but high-order word must be compared as unsigned. */
+ comp = compare_from_rtx (op0_word, op1_word,
+ (unsignedp || i > 0) ? GTU : GT,
+ unsignedp, word_mode, NULL_RTX, 0);
+ if (comp == const_true_rtx)
+ emit_jump (if_true_label);
+ else if (comp != const0_rtx)
+ do_jump_for_compare (comp, NULL_RTX, if_true_label);
+
+ /* Consider lower words only if these are equal. */
+ comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
+ NULL_RTX, 0);
+ if (comp == const_true_rtx)
+ emit_jump (if_false_label);
+ else if (comp != const0_rtx)
+ do_jump_for_compare (comp, NULL_RTX, if_false_label);
+ }
+
+ if (if_false_label)
+ emit_jump (if_false_label);
+ if (drop_through_label)
+ emit_label (drop_through_label);
+}
+
/* Given an EQ_EXPR expression EXP for values too wide to be compared
with one insn, test the comparison and jump to the appropriate label. */
rtx size;
int align;
{
- /* If one operand is constant, make it the second one. */
+ rtx tem;
- if (GET_CODE (op0) == CONST_INT || GET_CODE (op0) == CONST_DOUBLE)
+ /* If one operand is constant, make it the second one. Only do this
+ if the other operand is not constant as well. */
+
+ if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
+ || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
{
- rtx tem = op0;
+ tem = op0;
op0 = op1;
op1 = tem;
code = swap_condition (code);
do_pending_stack_adjust ();
- if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT)
- return simplify_relational_operation (code, mode, op0, op1);
+ if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
+ && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
+ return tem;
#if 0
/* There's no need to do this now that combine.c can eliminate lots of
sign extensions. This can be less efficient in certain cases on other
- machines.
+ machines. */
/* If this is a signed equality comparison, we can do it as an
unsigned comparison since zero-extension is cheaper than sign
extension and comparisons with zero are done as unsigned. This is
the case even on machines that can do fast sign extension, since
- zero-extension is easier to combinen with other operations than
+ zero-extension is easier to combine with other operations than
sign-extension is. If we are comparing against a constant, we must
convert it to what it would look like unsigned. */
if ((code == EQ || code == NE) && ! unsignedp
code = unsignedp ? LTU : LT;
break;
case LE_EXPR:
- if (integer_all_onesp (arg1))
- arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
+ if (! unsignedp && integer_all_onesp (arg1))
+ arg1 = integer_zero_node, code = LT;
else
code = unsignedp ? LEU : LE;
break;
case GT_EXPR:
- if (integer_all_onesp (arg1))
- arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
+ if (! unsignedp && integer_all_onesp (arg1))
+ arg1 = integer_zero_node, code = GE;
else
code = unsignedp ? GTU : GT;
break;
&& integer_pow2p (TREE_OPERAND (arg0, 1))
&& TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
{
+ tree inner = TREE_OPERAND (arg0, 0);
int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
NULL_RTX, VOIDmode, 0)));
+ int ops_unsignedp;
+
+ /* If INNER is a right shift of a constant and it plus BITNUM does
+ not overflow, adjust BITNUM and INNER. */
+
+ if (TREE_CODE (inner) == RSHIFT_EXPR
+ && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
+ && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
+ && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
+ < TYPE_PRECISION (type)))
+ {
+ bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
+ inner = TREE_OPERAND (inner, 0);
+ }
+
+ /* If we are going to be able to omit the AND below, we must do our
+ operations as unsigned. If we must use the AND, we have a choice.
+ Normally unsigned is faster, but for some machines signed is. */
+ ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
+#ifdef BYTE_LOADS_SIGN_EXTEND
+ : 0
+#else
+ : 1
+#endif
+ );
if (subtarget == 0 || GET_CODE (subtarget) != REG
|| GET_MODE (subtarget) != operand_mode
- || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
+ || ! safe_from_p (subtarget, inner))
subtarget = 0;
- op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
+ op0 = expand_expr (inner, subtarget, VOIDmode, 0);
if (bitnum != 0)
op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
- size_int (bitnum), target, 1);
+ size_int (bitnum), target, ops_unsignedp);
if (GET_MODE (op0) != mode)
- op0 = convert_to_mode (mode, op0, 1);
+ op0 = convert_to_mode (mode, op0, ops_unsignedp);
+
+ if ((code == EQ && ! invert) || (code == NE && invert))
+ op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target,
+ ops_unsignedp, OPTAB_LIB_WIDEN);
+ /* Put the AND last so it can combine with more things. */
if (bitnum != TYPE_PRECISION (type) - 1)
op0 = expand_and (op0, const1_rtx, target);
- if ((code == EQ && ! invert) || (code == NE && invert))
- op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
- OPTAB_LIB_WIDEN);
-
return op0;
}
if (target == 0)
target = gen_reg_rtx (mode);
- result = emit_store_flag (target, code, op0, op1, operand_mode,
- unsignedp, 1);
+ /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
+ because, if the emit_store_flag does anything it will succeed and
+ OP0 and OP1 will not be used subsequently. */
+
+ result = emit_store_flag (target, code,
+ queued_subexp_p (op0) ? copy_rtx (op0) : op0,
+ queued_subexp_p (op1) ? copy_rtx (op1) : op1,
+ operand_mode, unsignedp, 1);
if (result)
{
or equal to the minimum value of the range and less than or equal to
the maximum value of the range. */
- emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 0, 0);
+ emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
emit_jump_insn (gen_bltu (default_label));
/* If index is in range, it must fit in Pmode.