/* Convert tree expression to rtl instructions, for GNU compiler.
- Copyright (C) 1988, 92, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
+ Copyright (C) 1988, 92-97, 1998 Free Software Foundation, Inc.
This file is part of GNU CC.
#include "typeclass.h"
#include "defaults.h"
-#include "bytecode.h"
-#include "bc-opcode.h"
-#include "bc-typecd.h"
-#include "bc-optab.h"
-#include "bc-emit.h"
-
-
#define CEIL(x,y) (((x) + (y) - 1) / (y))
/* Decide whether a function's arguments should be processed
/* Like STACK_BOUNDARY but in units of bytes, not bits. */
#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
+/* Assume that case vectors are not pc-relative. */
+#ifndef CASE_VECTOR_PC_RELATIVE
+#define CASE_VECTOR_PC_RELATIVE 0
+#endif
+
/* If this is nonzero, we do not bother generating VOLATILE
around volatile memory references, and we are willing to
output indirect addresses. If cse is to follow, we reject
/* Similarly for __builtin_apply_args. */
static rtx apply_args_value;
+/* Don't check memory usage, since code is being emitted to check a memory
+ usage. Used when flag_check_memory_usage is true, to avoid infinite
+ recursion. */
+static int in_check_memory_usage;
+
/* This structure is used by move_by_pieces to describe the move to
be performed. */
-
struct move_by_pieces
{
rtx to;
int reverse;
};
-/* Used to generate bytecodes: keep track of size of local variables,
- as well as depth of arithmetic stack. (Notice that variables are
- stored on the machine's stack, not the arithmetic stack.) */
-
-static rtx get_push_address PROTO ((int));
-extern int local_vars_size;
-extern int stack_depth;
-extern int max_stack_depth;
extern struct obstack permanent_obstack;
extern rtx arg_pointer_save_area;
+static rtx get_push_address PROTO ((int));
+
static rtx enqueue_insn PROTO((rtx, rtx));
static int queued_subexp_p PROTO((rtx));
static void init_queue PROTO((void));
static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
static void expand_builtin_return PROTO((rtx));
static rtx expand_increment PROTO((tree, int, int));
-void bc_expand_increment PROTO((struct increment_operator *, tree));
-rtx bc_allocate_local PROTO((int, int));
-void bc_store_memory PROTO((tree, tree));
-tree bc_expand_component_address PROTO((tree));
-tree bc_expand_address PROTO((tree));
-void bc_expand_constructor PROTO((tree));
-void bc_adjust_stack PROTO((int));
-tree bc_canonicalize_array_ref PROTO((tree));
-void bc_load_memory PROTO((tree, tree));
-void bc_load_externaddr PROTO((rtx));
-void bc_load_externaddr_id PROTO((tree, int));
-void bc_load_localaddr PROTO((rtx));
-void bc_load_parmaddr PROTO((rtx));
static void preexpand_calls PROTO((tree));
static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
#define OUTGOING_REGNO(IN) (IN)
#endif
\f
-/* Maps used to convert modes to const, load, and store bytecodes. */
-enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
-enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
-enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
-
-/* Initialize maps used to convert modes to const, load, and store
- bytecodes. */
-
-void
-bc_init_mode_to_opcode_maps ()
-{
- int mode;
-
- for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
- mode_to_const_map[mode]
- = mode_to_load_map[mode]
- = mode_to_store_map[mode] = neverneverland;
-
-#define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
- mode_to_const_map[(int) SYM] = CONST; \
- mode_to_load_map[(int) SYM] = LOAD; \
- mode_to_store_map[(int) SYM] = STORE;
-
-#include "modemap.def"
-#undef DEF_MODEMAP
-}
-\f
/* This is run once per compilation to set up which modes can be used
directly in memory and to initialize the block move optab. */
/* Try indexing by frame ptr and try by stack ptr.
It is known that on the Convex the stack ptr isn't a valid index.
With luck, one or the other is valid on any machine. */
- rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
- rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
+ rtx mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
+ rtx mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
start_sequence ();
- insn = emit_insn (gen_rtx (SET, 0, NULL_RTX, NULL_RTX));
+ insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
pat = PATTERN (insn);
for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
if (! HARD_REGNO_MODE_OK (regno, mode))
continue;
- reg = gen_rtx (REG, mode, regno);
+ reg = gen_rtx_REG (mode, regno);
SET_SRC (pat) = mem;
SET_DEST (pat) = reg;
enqueue_insn (var, body)
rtx var, body;
{
- pending_chain = gen_rtx (QUEUED, GET_MODE (var),
- var, NULL_RTX, NULL_RTX, body, pending_chain);
+ pending_chain = gen_rtx_QUEUED (GET_MODE (var),
+ var, NULL_RTX, NULL_RTX, body,
+ pending_chain);
return pending_chain;
}
&& GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
{
register rtx y = XEXP (x, 0);
- register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
+ register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
emit_queue ()
{
register rtx p;
- while (p = pending_chain)
+ while ((p = pending_chain))
{
QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
pending_chain = QUEUED_NEXT (p);
!= CODE_FOR_nothing))
{
if (GET_CODE (to) == REG)
- emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
convert_move (gen_lowpart (word_mode, to), from, unsignedp);
emit_unop_insn (code, to,
gen_lowpart (word_mode, to), equiv_code);
end_sequence ();
emit_no_conflict_block (insns, to, from, NULL_RTX,
- gen_rtx (equiv_code, to_mode, copy_rtx (from)));
+ gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
return;
}
if (data->reverse) data->offset -= size;
to1 = (data->autinc_to
- ? gen_rtx (MEM, mode, data->to_addr)
+ ? gen_rtx_MEM (mode, data->to_addr)
: copy_rtx (change_address (data->to, mode,
plus_constant (data->to_addr,
data->offset))));
from1
= (data->autinc_from
- ? gen_rtx (MEM, mode, data->from_addr)
+ ? gen_rtx_MEM (mode, data->from_addr)
: copy_rtx (change_address (data->from, mode,
plus_constant (data->from_addr,
data->offset))));
enum machine_mode mode;
{
int i;
- rtx pat, last;
+#ifdef HAVE_load_multiple
+ rtx pat;
+ rtx last;
+#endif
if (nregs == 0)
return;
if (HAVE_load_multiple)
{
last = get_last_insn ();
- pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
+ pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
GEN_INT (nregs));
if (pat)
{
#endif
for (i = 0; i < nregs; i++)
- emit_move_insn (gen_rtx (REG, word_mode, regno + i),
+ emit_move_insn (gen_rtx_REG (word_mode, regno + i),
operand_subword_force (x, i, mode));
}
int size;
{
int i;
- rtx pat, last;
+#ifdef HAVE_store_multiple
+ rtx pat;
+ rtx last;
+#endif
enum machine_mode mode;
/* If SIZE is that of a mode no bigger than a word, just use that
&& (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
{
emit_move_insn (change_address (x, mode, NULL),
- gen_rtx (REG, mode, regno));
+ gen_rtx_REG (mode, regno));
return;
}
abort ();
shift = expand_shift (LSHIFT_EXPR, word_mode,
- gen_rtx (REG, word_mode, regno),
+ gen_rtx_REG (word_mode, regno),
build_int_2 ((UNITS_PER_WORD - size)
* BITS_PER_UNIT, 0), NULL_RTX, 0);
emit_move_insn (tem, shift);
if (HAVE_store_multiple)
{
last = get_last_insn ();
- pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
+ pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
GEN_INT (nregs));
if (pat)
{
if (tem == 0)
abort ();
- emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
+ emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
}
}
abort();
*call_fusage
- = gen_rtx (EXPR_LIST, VOIDmode,
- gen_rtx (USE, VOIDmode, reg), *call_fusage);
+ = gen_rtx_EXPR_LIST (VOIDmode,
+ gen_rtx_USE (VOIDmode, reg), *call_fusage);
}
/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
abort ();
for (i = 0; i < nregs; i++)
- use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
+ use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
}
/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
if (data->reverse) data->offset -= size;
to1 = (data->autinc_to
- ? gen_rtx (MEM, mode, data->to_addr)
+ ? gen_rtx_MEM (mode, data->to_addr)
: copy_rtx (change_address (data->to, mode,
plus_constant (data->to_addr,
data->offset))));
{
/* Don't split destination if it is a stack push. */
int stack = push_operand (x, GET_MODE (x));
- rtx insns;
/* If this is a stack, push the highpart first, so it
will be in the argument order.
regardless of machine's endianness. */
#ifdef STACK_GROWS_DOWNWARD
emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
- (gen_rtx (MEM, submode, (XEXP (x, 0))),
+ (gen_rtx_MEM (submode, (XEXP (x, 0))),
gen_imagpart (submode, y)));
emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
- (gen_rtx (MEM, submode, (XEXP (x, 0))),
+ (gen_rtx_MEM (submode, (XEXP (x, 0))),
gen_realpart (submode, y)));
#else
emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
- (gen_rtx (MEM, submode, (XEXP (x, 0))),
+ (gen_rtx_MEM (submode, (XEXP (x, 0))),
gen_realpart (submode, y)));
emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
- (gen_rtx (MEM, submode, (XEXP (x, 0))),
+ (gen_rtx_MEM (submode, (XEXP (x, 0))),
gen_imagpart (submode, y)));
#endif
}
else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
{
rtx last_insn = 0;
- rtx insns;
#ifdef PUSH_ROUNDING
/* Show the output dies here. */
if (x != y)
- emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
for (i = 0;
i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
temp = plus_constant (virtual_outgoing_args_rtx,
- INTVAL (size) - (below ? 0 : extra));
else if (extra != 0 && !below)
- temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
+ temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
negate_rtx (Pmode, plus_constant (size, extra)));
else
- temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
+ temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
negate_rtx (Pmode, size));
#endif
rtx
gen_push_operand ()
{
- return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
+ return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
}
/* Return an rtx for the address of the beginning of a as-if-it-was-pushed
register rtx temp;
if (STACK_PUSH_CODE == POST_DEC)
- temp = gen_rtx (PLUS, Pmode, stack_pointer_rtx, GEN_INT (size));
+ temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
else if (STACK_PUSH_CODE == POST_INC)
- temp = gen_rtx (MINUS, Pmode, stack_pointer_rtx, GEN_INT (size));
+ temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
else
temp = stack_pointer_rtx;
- return force_operand (temp, NULL_RTX);
+ return copy_to_reg (temp);
}
/* Generate code to push X onto the stack, assuming it has mode MODE and
&& where_pad != none && where_pad != stack_direction)
anti_adjust_stack (GEN_INT (extra));
- move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
+ move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
INTVAL (size) - used, align);
- if (flag_check_memory_usage)
+ if (flag_check_memory_usage && ! in_check_memory_usage)
{
rtx temp;
+ in_check_memory_usage = 1;
temp = get_push_address (INTVAL(size) - used);
- if (GET_CODE (x) == MEM && AGGREGATE_TYPE_P (type))
+ if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
temp, ptr_mode,
XEXP (xinner, 0), ptr_mode,
temp, ptr_mode,
GEN_INT (INTVAL(size) - used),
TYPE_MODE (sizetype),
- GEN_INT (MEMORY_USE_RW), QImode);
+ GEN_INT (MEMORY_USE_RW),
+ TYPE_MODE (integer_type_node));
+ in_check_memory_usage = 0;
}
}
else
skip + INTVAL (args_so_far)));
else
temp = memory_address (BLKmode,
- plus_constant (gen_rtx (PLUS, Pmode,
- args_addr, args_so_far),
+ plus_constant (gen_rtx_PLUS (Pmode,
+ args_addr,
+ args_so_far),
skip));
- if (flag_check_memory_usage)
+ if (flag_check_memory_usage && ! in_check_memory_usage)
{
rtx target;
+ in_check_memory_usage = 1;
target = copy_to_reg (temp);
- if (GET_CODE (x) == MEM && AGGREGATE_TYPE_P (type))
+ if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
target, ptr_mode,
XEXP (xinner, 0), ptr_mode,
emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
target, ptr_mode,
size, TYPE_MODE (sizetype),
- GEN_INT (MEMORY_USE_RW), QImode);
+ GEN_INT (MEMORY_USE_RW),
+ TYPE_MODE (integer_type_node));
+ in_check_memory_usage = 0;
}
/* TEMP is the address of the block. Copy the data there. */
&& (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
< MOVE_RATIO))
{
- move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
+ move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
INTVAL (size), align);
goto ret;
}
#ifdef HAVE_movstrqi
if (HAVE_movstrqi
&& GET_CODE (size) == CONST_INT
- && ((unsigned) INTVAL (size)
- < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
+ && ((unsigned HOST_WIDE_INT) INTVAL (size)
+ <= GET_MODE_MASK (QImode)))
{
- rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
+ rtx pat = gen_movstrqi (gen_rtx_MEM (BLKmode, temp),
xinner, size, GEN_INT (align));
if (pat != 0)
{
#ifdef HAVE_movstrhi
if (HAVE_movstrhi
&& GET_CODE (size) == CONST_INT
- && ((unsigned) INTVAL (size)
- < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
+ && ((unsigned HOST_WIDE_INT) INTVAL (size)
+ <= GET_MODE_MASK (HImode)))
{
- rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
+ rtx pat = gen_movstrhi (gen_rtx_MEM (BLKmode, temp),
xinner, size, GEN_INT (align));
if (pat != 0)
{
#ifdef HAVE_movstrsi
if (HAVE_movstrsi)
{
- rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
+ rtx pat = gen_movstrsi (gen_rtx_MEM (BLKmode, temp),
xinner, size, GEN_INT (align));
if (pat != 0)
{
#ifdef HAVE_movstrdi
if (HAVE_movstrdi)
{
- rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
+ rtx pat = gen_movstrdi (gen_rtx_MEM (BLKmode, temp),
xinner, size, GEN_INT (align));
if (pat != 0)
{
}
}
#endif
+#ifdef HAVE_movstrti
+ if (HAVE_movstrti)
+ {
+ rtx pat = gen_movstrti (gen_rtx (MEM, BLKmode, temp),
+ xinner, size, GEN_INT (align));
+ if (pat != 0)
+ {
+ emit_insn (pat);
+ goto ret;
+ }
+ }
+#endif
#ifndef ACCUMULATE_OUTGOING_ARGS
/* If the source is referenced relative to the stack pointer,
plus_constant (args_addr,
INTVAL (args_so_far)));
else
- addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
- args_so_far));
+ addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
+ args_so_far));
target = addr;
}
- emit_move_insn (gen_rtx (MEM, mode, addr), x);
+ emit_move_insn (gen_rtx_MEM (mode, addr), x);
- if (flag_check_memory_usage)
+ if (flag_check_memory_usage && ! in_check_memory_usage)
{
+ in_check_memory_usage = 1;
if (target == 0)
target = get_push_address (GET_MODE_SIZE (mode));
- if (GET_CODE (x) == MEM && AGGREGATE_TYPE_P (type))
+ if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
target, ptr_mode,
XEXP (x, 0), ptr_mode,
target, ptr_mode,
GEN_INT (GET_MODE_SIZE (mode)),
TYPE_MODE (sizetype),
- GEN_INT (MEMORY_USE_RW), QImode);
+ GEN_INT (MEMORY_USE_RW),
+ TYPE_MODE (integer_type_node));
+ in_check_memory_usage = 0;
}
}
return want_value ? result : NULL_RTX;
}
- if (output_bytecode)
- {
- tree dest_innermost;
-
- bc_expand_expr (from);
- bc_emit_instruction (duplicate);
-
- dest_innermost = bc_expand_address (to);
-
- /* Can't deduce from TYPE that we're dealing with a bitfield, so
- take care of it here. */
-
- bc_store_memory (TREE_TYPE (to), dest_innermost);
- return NULL;
- }
-
/* Assignment of a structure component needs special treatment
if the structure component's rtx is not simply a MEM.
Assignment of an array element at a constant index, and assignment of
if (GET_CODE (to_rtx) != MEM)
abort ();
to_rtx = change_address (to_rtx, VOIDmode,
- gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
- force_reg (ptr_mode, offset_rtx)));
+ gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
+ force_reg (ptr_mode, offset_rtx)));
}
if (volatilep)
{
#endif
}
+ if (TREE_CODE (to) == COMPONENT_REF
+ && TREE_READONLY (TREE_OPERAND (to, 1)))
+ {
+ if (offset == 0)
+ to_rtx = copy_rtx (to_rtx);
+
+ RTX_UNCHANGING_P (to_rtx) = 1;
+ }
+
/* Check the access. */
if (flag_check_memory_usage && GET_CODE (to_rtx) == MEM)
{
emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
to_addr, ptr_mode,
GEN_INT (size), TYPE_MODE (sizetype),
- GEN_INT (MEMORY_USE_WO), QImode);
+ GEN_INT (MEMORY_USE_WO),
+ TYPE_MODE (integer_type_node));
}
result = store_field (to_rtx, bitsize, bitpos, mode1, from,
do_pending_stack_adjust ();
NO_DEFER_POP;
jumpifnot (TREE_OPERAND (exp, 0), lab1);
- start_cleanup_deferal ();
+ start_cleanup_deferral ();
store_expr (TREE_OPERAND (exp, 1), target, 0);
- end_cleanup_deferal ();
+ end_cleanup_deferral ();
emit_queue ();
emit_jump_insn (gen_jump (lab2));
emit_barrier ();
emit_label (lab1);
- start_cleanup_deferal ();
+ start_cleanup_deferral ();
store_expr (TREE_OPERAND (exp, 2), target, 0);
- end_cleanup_deferal ();
+ end_cleanup_deferral ();
emit_queue ();
emit_label (lab2);
OK_DEFER_POP;
emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
XEXP (target, 0), ptr_mode,
expr_size (exp), TYPE_MODE (sizetype),
- GEN_INT (MEMORY_USE_WO), QImode);
+ GEN_INT (MEMORY_USE_WO),
+ TYPE_MODE (integer_type_node));
}
/* If value was not generated in the target, store it there.
emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
addr, ptr_mode,
size, TYPE_MODE (sizetype),
- GEN_INT (MEMORY_USE_WO), QImode);
+ GEN_INT (MEMORY_USE_WO),
+ TYPE_MODE (integer_type_node));
#ifdef TARGET_MEM_FUNCTIONS
emit_library_call (memset_libfunc, 0, VOIDmode, 3,
addr, ptr_mode,
is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
case REAL_CST:
- return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
+ return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
case CONSTRUCTOR:
if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
/* Inform later passes that the whole union value is dead. */
if (TREE_CODE (type) == UNION_TYPE
|| TREE_CODE (type) == QUAL_UNION_TYPE)
- emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
/* If we are building a static constructor into a register,
set the initial value as zero so we can fold the value into
}
else
/* Inform later passes that the old value is dead. */
- emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
/* Store each element of the constructor into
the corresponding field of TARGET. */
if (contains_placeholder_p (offset))
offset = build (WITH_RECORD_EXPR, sizetype,
- offset, exp);
+ offset, make_tree (TREE_TYPE (exp), target));
offset = size_binop (FLOOR_DIV_EXPR, offset,
size_int (BITS_PER_UNIT));
to_rtx
= change_address (to_rtx, VOIDmode,
- gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
+ gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
force_reg (ptr_mode, offset_rtx)));
}
if (TREE_READONLY (field))
}
else
/* Inform later passes that the old value is dead. */
- emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
/* Store each element of the constructor into
the corresponding element of TARGET, determined
TYPE_MIN_VALUE (domain)),
position);
pos_rtx = expand_expr (position, 0, VOIDmode, 0);
- addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
+ addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
xtarget = change_address (target, mode, addr);
if (TREE_CODE (value) == CONSTRUCTOR)
store_constructor (value, xtarget, cleared);
/* Needed by stupid register allocation. to extend the
lifetime of pseudo-regs used by target past the end
of the loop. */
- emit_insn (gen_rtx (USE, GET_MODE (target), target));
+ emit_insn (gen_rtx_USE (GET_MODE (target), target));
}
}
else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
size_int (BITS_PER_UNIT));
position = size_binop (MULT_EXPR, index, position);
pos_rtx = expand_expr (position, 0, VOIDmode, 0);
- addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
+ addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
xtarget = change_address (target, mode, addr);
store_expr (value, xtarget, 0);
}
else if (TREE_CODE (type) == SET_TYPE)
{
tree elt = CONSTRUCTOR_ELTS (exp);
- rtx xtarget = XEXP (target, 0);
- int set_word_size = TYPE_ALIGN (type);
int nbytes = int_size_in_bytes (type), nbits;
tree domain = TYPE_DOMAIN (type);
tree domain_min, domain_max, bitlength;
tree startbit = TREE_PURPOSE (elt);
/* end of range of element, or element value */
tree endbit = TREE_VALUE (elt);
+#ifdef TARGET_MEM_FUNCTIONS
HOST_WIDE_INT startb, endb;
+#endif
rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
bitlength_rtx = expand_expr (bitlength,
else
#endif
{
- emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
+ emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
bitlength_rtx, TYPE_MODE (sizetype),
startbit_rtx, TYPE_MODE (sizetype),
if (! integer_zerop (low_bound))
index = fold (build (MINUS_EXPR, index_type, index, low_bound));
- index = fold (build (MULT_EXPR, index_type, index,
- convert (index_type,
+ if (TREE_CODE (index) == INTEGER_CST)
+ {
+ index = convert (sbitsizetype, index);
+ index_type = TREE_TYPE (index);
+ }
+
+ index = fold (build (MULT_EXPR, sbitsizetype, index,
+ convert (sbitsizetype,
TYPE_SIZE (TREE_TYPE (exp)))));
if (TREE_CODE (index) == INTEGER_CST
&& TREE_INT_CST_HIGH (index) == 0)
*pbitpos += TREE_INT_CST_LOW (index);
else
- offset = size_binop (PLUS_EXPR, offset,
- size_binop (FLOOR_DIV_EXPR, index,
- size_int (BITS_PER_UNIT)));
+ {
+ offset = size_binop (PLUS_EXPR, offset,
+ convert (sizetype,
+ size_binop (FLOOR_DIV_EXPR, index,
+ size_int (BITS_PER_UNIT))));
+ if (contains_placeholder_p (offset))
+ offset = build (WITH_RECORD_EXPR, sizetype, offset, exp);
+ }
}
else if (TREE_CODE (exp) != NON_LVALUE_EXPR
&& ! ((TREE_CODE (exp) == NOP_EXPR
/* Use subtarget as the target for operand 0 of a binary operation. */
rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
rtx original_target = target;
- /* Maybe defer this until sure not doing bytecode? */
int ignore = (target == const0_rtx
|| ((code == NON_LVALUE_EXPR || code == NOP_EXPR
|| code == CONVERT_EXPR || code == REFERENCE_EXPR
else
ro_modifier = EXPAND_NORMAL;
- if (output_bytecode && modifier != EXPAND_INITIALIZER)
- {
- bc_expand_expr (exp);
- return NULL;
- }
-
/* Don't use hard regs as subtargets, because the combiner
can only handle pseudo regs. */
if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
push_obstacks (p->function_obstack,
p->function_maybepermanent_obstack);
- p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
- label_rtx (exp), p->forced_labels);
+ p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
+ label_rtx (exp),
+ p->forced_labels);
pop_obstacks ();
}
else if (modifier == EXPAND_INITIALIZER)
- forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
- label_rtx (exp), forced_labels);
- temp = gen_rtx (MEM, FUNCTION_MODE,
- gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
+ forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
+ label_rtx (exp), forced_labels);
+ temp = gen_rtx_MEM (FUNCTION_MODE,
+ gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
if (function != current_function_decl
&& function != inline_function_decl && function != 0)
LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
XEXP (DECL_RTL (exp), 0), ptr_mode,
GEN_INT (int_size_in_bytes (type)),
TYPE_MODE (sizetype),
- GEN_INT (memory_usage), QImode);
+ GEN_INT (memory_usage),
+ TYPE_MODE (integer_type_node));
}
/* ... fall through ... */
abort ();
addr = XEXP (DECL_RTL (exp), 0);
if (GET_CODE (addr) == MEM)
- addr = gen_rtx (MEM, Pmode,
- fix_lexical_addr (XEXP (addr, 0), exp));
+ addr = gen_rtx_MEM (Pmode,
+ fix_lexical_addr (XEXP (addr, 0), exp));
else
addr = fix_lexical_addr (addr, exp);
temp = change_address (DECL_RTL (exp), mode, addr);
!= promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
abort ();
- temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
+ temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
SUBREG_PROMOTED_VAR_P (temp) = 1;
SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
return temp;
SAVE_EXPR_RTL (exp) = temp;
if (!optimize && GET_CODE (temp) == REG)
- save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
- save_expr_regs);
+ save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
+ save_expr_regs);
/* If the mode of TEMP does not match that of the expression, it
must be a promoted value. We pass store_expr a SUBREG of the
if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
{
- temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
+ temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
SUBREG_PROMOTED_VAR_P (temp) = 1;
SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
}
{
/* Compute the signedness and make the proper SUBREG. */
promote_mode (type, mode, &unsignedp, 0);
- temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
+ temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
SUBREG_PROMOTED_VAR_P (temp) = 1;
SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
return temp;
tree placeholder_expr;
/* If there is an object on the head of the placeholder list,
- see if some object in it's references is of type TYPE. For
+ see if some object in its references is of type TYPE. For
further information, see tree.def. */
for (placeholder_expr = placeholder_list;
placeholder_expr != 0;
== need_type))
object = TREE_PURPOSE (placeholder_expr);
- /* Find the innermost reference that is of the type we want. */
+ /* Find the outermost reference that is of the type we want. */
for (elt = TREE_PURPOSE (placeholder_expr);
- elt != 0
+ elt != 0 && object == 0
&& (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
|| TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
|| TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
if (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
&& (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (elt, 0)))
== need_type))
- {
- object = TREE_OPERAND (elt, 0);
- break;
- }
+ object = TREE_OPERAND (elt, 0);
if (object != 0)
{
memory_usage = get_memory_usage_from_modifier (modifier);
if (memory_usage != MEMORY_USE_DONT)
- emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
- op0, ptr_mode,
- GEN_INT (int_size_in_bytes (type)),
- TYPE_MODE (sizetype),
- GEN_INT (memory_usage), QImode);
+ {
+ in_check_memory_usage = 1;
+ emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
+ op0, ptr_mode,
+ GEN_INT (int_size_in_bytes (type)),
+ TYPE_MODE (sizetype),
+ GEN_INT (memory_usage),
+ TYPE_MODE (integer_type_node));
+ in_check_memory_usage = 0;
+ }
}
- temp = gen_rtx (MEM, mode, op0);
+ temp = gen_rtx_MEM (mode, op0);
/* If address was computed by addition,
mark this as an element of an aggregate. */
if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
if (GET_CODE (op0) != MEM)
abort ();
+
+ if (GET_MODE (offset_rtx) != ptr_mode)
+#ifdef POINTERS_EXTEND_UNSIGNED
+ offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 1);
+#else
+ offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
+#endif
+
op0 = change_address (op0, VOIDmode,
- gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
- force_reg (ptr_mode, offset_rtx)));
+ gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
+ force_reg (ptr_mode, offset_rtx)));
}
/* Don't forget about volatility even if this is a bitfield. */
to, ptr_mode,
GEN_INT (size / BITS_PER_UNIT),
TYPE_MODE (sizetype),
- GEN_INT (memory_usage), QImode);
+ GEN_INT (memory_usage),
+ TYPE_MODE (integer_type_node));
}
}
/* Get a reference to just this component. */
if (modifier == EXPAND_CONST_ADDRESS
|| modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
- op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
- (bitpos / BITS_PER_UNIT)));
+ op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
+ (bitpos / BITS_PER_UNIT)));
else
op0 = change_address (op0, mode1,
plus_constant (XEXP (op0, 0),
/* Extract the bit we want to examine */
bit = expand_shift (RSHIFT_EXPR, byte_mode,
- gen_rtx (MEM, byte_mode, addr),
+ gen_rtx_MEM (byte_mode, addr),
make_tree (TREE_TYPE (index), rem),
NULL_RTX, 1);
result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
if (modifier == EXPAND_INITIALIZER)
- return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
+ return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
if (target == 0)
return
op0 = temp;
/* Ensure that MULT comes first if there is one. */
else if (GET_CODE (op0) == MULT)
- op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
+ op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
else
- op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
+ op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
/* Let's also eliminate constants from op0 if possible. */
op0 = eliminate_constant_term (op0, &constant_term);
if (temp != 0)
op1 = temp;
else
- op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
+ op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
}
/* Put a constant term last and put a multiplication first. */
temp = op1, op1 = op0, op0 = temp;
temp = simplify_binary_operation (PLUS, mode, op0, op1);
- return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
+ return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
case MINUS_EXPR:
/* For initializers, we are allowed to return a MINUS of two
if (GET_CODE (op1) == CONST_INT)
return plus_constant (op0, - INTVAL (op1));
else
- return gen_rtx (MINUS, mode, op0, op1);
+ return gen_rtx_MINUS (mode, op0, op1);
}
/* Convert A - const to A + (-const). */
if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
/* Apply distributive law if OP0 is x+c. */
if (GET_CODE (op0) == PLUS
&& GET_CODE (XEXP (op0, 1)) == CONST_INT)
- return gen_rtx (PLUS, mode,
- gen_rtx (MULT, mode, XEXP (op0, 0),
- GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
+ return gen_rtx_PLUS (mode,
+ gen_rtx_MULT (mode, XEXP (op0, 0),
+ GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
* INTVAL (XEXP (op0, 1))));
if (GET_CODE (op0) != REG)
op0 = copy_to_mode_reg (mode, op0);
- return gen_rtx (MULT, mode, op0,
- GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
+ return gen_rtx_MULT (mode, op0,
+ GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
}
if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
else
jumpifnot (TREE_OPERAND (exp, 0), op0);
- start_cleanup_deferal ();
+ start_cleanup_deferral ();
if (binary_op && temp == 0)
/* Just touch the other operand. */
expand_expr (TREE_OPERAND (binary_op, 1),
store_expr (TREE_OPERAND (exp, 1), temp, 0);
jumpif (TREE_OPERAND (exp, 0), op0);
- start_cleanup_deferal ();
+ start_cleanup_deferral ();
store_expr (TREE_OPERAND (exp, 2), temp, 0);
op1 = op0;
}
store_expr (TREE_OPERAND (exp, 2), temp, 0);
jumpifnot (TREE_OPERAND (exp, 0), op0);
- start_cleanup_deferal ();
+ start_cleanup_deferral ();
store_expr (TREE_OPERAND (exp, 1), temp, 0);
op1 = op0;
}
op1 = gen_label_rtx ();
jumpifnot (TREE_OPERAND (exp, 0), op0);
- start_cleanup_deferal ();
+ start_cleanup_deferral ();
if (temp != 0)
store_expr (TREE_OPERAND (exp, 1), temp, 0);
else
expand_expr (TREE_OPERAND (exp, 1),
ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
- end_cleanup_deferal ();
+ end_cleanup_deferral ();
emit_queue ();
emit_jump_insn (gen_jump (op1));
emit_barrier ();
emit_label (op0);
- start_cleanup_deferal ();
+ start_cleanup_deferral ();
if (temp != 0)
store_expr (TREE_OPERAND (exp, 2), temp, 0);
else
ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
}
- end_cleanup_deferal ();
+ end_cleanup_deferral ();
emit_queue ();
emit_label (op1);
tree slot = TREE_OPERAND (exp, 0);
tree cleanups = NULL_TREE;
tree exp1;
- rtx temp;
if (TREE_CODE (slot) != VAR_DECL)
abort ();
case POPDCC_EXPR:
{
rtx dcc = get_dynamic_cleanup_chain ();
- emit_move_insn (dcc, validize_mem (gen_rtx (MEM, Pmode, dcc)));
+ emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
return const0_rtx;
}
case POPDHC_EXPR:
{
rtx dhc = get_dynamic_handler_chain ();
- emit_move_insn (dhc, validize_mem (gen_rtx (MEM, Pmode, dhc)));
+ emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
return const0_rtx;
}
}
-/* Emit bytecode to evaluate the given expression EXP to the stack. */
+\f
+/* Return the alignment in bits of EXP, a pointer valued expression.
+ But don't return more than MAX_ALIGN no matter what.
+ The alignment returned is, by default, the alignment of the thing that
+ EXP points to (if it is not a POINTER_TYPE, 0 is returned).
+
+ Otherwise, look at the expression to see if we can do better, i.e., if the
+ expression is actually pointing at an object whose alignment is tighter. */
-void
-bc_expand_expr (exp)
- tree exp;
+static int
+get_pointer_alignment (exp, max_align)
+ tree exp;
+ unsigned max_align;
{
- enum tree_code code;
- tree type, arg0;
- rtx r;
- struct binary_operator *binoptab;
- struct unary_operator *unoptab;
- struct increment_operator *incroptab;
- struct bc_label *lab, *lab1;
- enum bytecode_opcode opcode;
-
-
- code = TREE_CODE (exp);
-
- switch (code)
+ unsigned align, inner;
+
+ if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
+ return 0;
+
+ align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
+ align = MIN (align, max_align);
+
+ while (1)
{
- case PARM_DECL:
-
- if (DECL_RTL (exp) == 0)
+ switch (TREE_CODE (exp))
{
- error_with_decl (exp, "prior parameter's size depends on `%s'");
- return;
- }
-
- bc_load_parmaddr (DECL_RTL (exp));
- bc_load_memory (TREE_TYPE (exp), exp);
-
- return;
-
- case VAR_DECL:
-
- if (DECL_RTL (exp) == 0)
- abort ();
-
-#if 0
- if (BYTECODE_LABEL (DECL_RTL (exp)))
- bc_load_externaddr (DECL_RTL (exp));
- else
- bc_load_localaddr (DECL_RTL (exp));
-#endif
- if (TREE_PUBLIC (exp))
- bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
- BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
- else
- bc_load_localaddr (DECL_RTL (exp));
-
- bc_load_memory (TREE_TYPE (exp), exp);
- return;
-
- case INTEGER_CST:
-
-#ifdef DEBUG_PRINT_CODE
- fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
-#endif
- bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
- ? SImode
- : TYPE_MODE (TREE_TYPE (exp)))],
- (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
- return;
-
- case REAL_CST:
-
-#if 0
-#ifdef DEBUG_PRINT_CODE
- fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
-#endif
- /* FIX THIS: find a better way to pass real_cst's. -bson */
- bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
- (double) TREE_REAL_CST (exp));
-#else
- abort ();
+ case NOP_EXPR:
+ case CONVERT_EXPR:
+ case NON_LVALUE_EXPR:
+ exp = TREE_OPERAND (exp, 0);
+ if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
+ return align;
+ inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
+ align = MIN (inner, max_align);
+ break;
+
+ case PLUS_EXPR:
+ /* If sum of pointer + int, restrict our maximum alignment to that
+ imposed by the integer. If not, we can't do any better than
+ ALIGN. */
+ if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
+ return align;
+
+ while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
+ & (max_align - 1))
+ != 0)
+ max_align >>= 1;
+
+ exp = TREE_OPERAND (exp, 0);
+ break;
+
+ case ADDR_EXPR:
+ /* See what we are pointing at and look at its alignment. */
+ exp = TREE_OPERAND (exp, 0);
+ if (TREE_CODE (exp) == FUNCTION_DECL)
+ align = FUNCTION_BOUNDARY;
+ else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
+ align = DECL_ALIGN (exp);
+#ifdef CONSTANT_ALIGNMENT
+ else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
+ align = CONSTANT_ALIGNMENT (exp, align);
#endif
+ return MIN (align, max_align);
- return;
-
- case CALL_EXPR:
-
- /* We build a call description vector describing the type of
- the return value and of the arguments; this call vector,
- together with a pointer to a location for the return value
- and the base of the argument list, is passed to the low
- level machine dependent call subroutine, which is responsible
- for putting the arguments wherever real functions expect
- them, as well as getting the return value back. */
- {
- tree calldesc = 0, arg;
- int nargs = 0, i;
- rtx retval;
-
- /* Push the evaluated args on the evaluation stack in reverse
- order. Also make an entry for each arg in the calldesc
- vector while we're at it. */
-
- TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
-
- for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
- {
- ++nargs;
- bc_expand_expr (TREE_VALUE (arg));
-
- calldesc = tree_cons ((tree) 0,
- size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
- calldesc);
- calldesc = tree_cons ((tree) 0,
- bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
- calldesc);
- }
-
- TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
-
- /* Allocate a location for the return value and push its
- address on the evaluation stack. Also make an entry
- at the front of the calldesc for the return value type. */
-
- type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
- retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
- bc_load_localaddr (retval);
-
- calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
- calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
-
- /* Prepend the argument count. */
- calldesc = tree_cons ((tree) 0,
- build_int_2 (nargs, 0),
- calldesc);
-
- /* Push the address of the call description vector on the stack. */
- calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
- TREE_TYPE (calldesc) = build_array_type (integer_type_node,
- build_index_type (build_int_2 (nargs * 2, 0)));
- r = output_constant_def (calldesc);
- bc_load_externaddr (r);
-
- /* Push the address of the function to be called. */
- bc_expand_expr (TREE_OPERAND (exp, 0));
-
- /* Call the function, popping its address and the calldesc vector
- address off the evaluation stack in the process. */
- bc_emit_instruction (call);
-
- /* Pop the arguments off the stack. */
- bc_adjust_stack (nargs);
-
- /* Load the return value onto the stack. */
- bc_load_localaddr (retval);
- bc_load_memory (type, TREE_OPERAND (exp, 0));
- }
- return;
-
- case SAVE_EXPR:
-
- if (!SAVE_EXPR_RTL (exp))
+ default:
+ return align;
+ }
+ }
+}
+\f
+/* Return the tree node and offset if a given argument corresponds to
+ a string constant. */
+
+static tree
+string_constant (arg, ptr_offset)
+ tree arg;
+ tree *ptr_offset;
+{
+ STRIP_NOPS (arg);
+
+ if (TREE_CODE (arg) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
+ {
+ *ptr_offset = integer_zero_node;
+ return TREE_OPERAND (arg, 0);
+ }
+ else if (TREE_CODE (arg) == PLUS_EXPR)
+ {
+ tree arg0 = TREE_OPERAND (arg, 0);
+ tree arg1 = TREE_OPERAND (arg, 1);
+
+ STRIP_NOPS (arg0);
+ STRIP_NOPS (arg1);
+
+ if (TREE_CODE (arg0) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
{
- /* First time around: copy to local variable */
- SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
- TYPE_ALIGN (TREE_TYPE(exp)));
- bc_expand_expr (TREE_OPERAND (exp, 0));
- bc_emit_instruction (duplicate);
-
- bc_load_localaddr (SAVE_EXPR_RTL (exp));
- bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
+ *ptr_offset = arg1;
+ return TREE_OPERAND (arg0, 0);
}
- else
+ else if (TREE_CODE (arg1) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
{
- /* Consecutive reference: use saved copy */
- bc_load_localaddr (SAVE_EXPR_RTL (exp));
- bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
+ *ptr_offset = arg0;
+ return TREE_OPERAND (arg1, 0);
}
- return;
-
-#if 0
- /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
- how are they handled instead? */
- case LET_STMT:
-
- TREE_USED (exp) = 1;
- bc_expand_expr (STMT_BODY (exp));
- return;
-#endif
-
- case NOP_EXPR:
- case CONVERT_EXPR:
-
- bc_expand_expr (TREE_OPERAND (exp, 0));
- bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
- return;
-
- case MODIFY_EXPR:
-
- expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
- return;
-
- case ADDR_EXPR:
-
- bc_expand_address (TREE_OPERAND (exp, 0));
- return;
-
- case INDIRECT_REF:
-
- bc_expand_expr (TREE_OPERAND (exp, 0));
- bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
- return;
-
- case ARRAY_REF:
-
- bc_expand_expr (bc_canonicalize_array_ref (exp));
- return;
-
- case COMPONENT_REF:
-
- bc_expand_component_address (exp);
-
- /* If we have a bitfield, generate a proper load */
- bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
- return;
-
- case COMPOUND_EXPR:
-
- bc_expand_expr (TREE_OPERAND (exp, 0));
- bc_emit_instruction (drop);
- bc_expand_expr (TREE_OPERAND (exp, 1));
- return;
-
- case COND_EXPR:
-
- bc_expand_expr (TREE_OPERAND (exp, 0));
- bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
- lab = bc_get_bytecode_label ();
- bc_emit_bytecode (xjumpifnot);
- bc_emit_bytecode_labelref (lab);
-
-#ifdef DEBUG_PRINT_CODE
- fputc ('\n', stderr);
-#endif
- bc_expand_expr (TREE_OPERAND (exp, 1));
- lab1 = bc_get_bytecode_label ();
- bc_emit_bytecode (jump);
- bc_emit_bytecode_labelref (lab1);
-
-#ifdef DEBUG_PRINT_CODE
- fputc ('\n', stderr);
-#endif
-
- bc_emit_bytecode_labeldef (lab);
- bc_expand_expr (TREE_OPERAND (exp, 2));
- bc_emit_bytecode_labeldef (lab1);
- return;
-
- case TRUTH_ANDIF_EXPR:
-
- opcode = xjumpifnot;
- goto andorif;
-
- case TRUTH_ORIF_EXPR:
-
- opcode = xjumpif;
- goto andorif;
-
- case PLUS_EXPR:
-
- binoptab = optab_plus_expr;
- goto binop;
-
- case MINUS_EXPR:
-
- binoptab = optab_minus_expr;
- goto binop;
-
- case MULT_EXPR:
-
- binoptab = optab_mult_expr;
- goto binop;
-
- case TRUNC_DIV_EXPR:
- case FLOOR_DIV_EXPR:
- case CEIL_DIV_EXPR:
- case ROUND_DIV_EXPR:
- case EXACT_DIV_EXPR:
-
- binoptab = optab_trunc_div_expr;
- goto binop;
-
- case TRUNC_MOD_EXPR:
- case FLOOR_MOD_EXPR:
- case CEIL_MOD_EXPR:
- case ROUND_MOD_EXPR:
-
- binoptab = optab_trunc_mod_expr;
- goto binop;
-
- case FIX_ROUND_EXPR:
- case FIX_FLOOR_EXPR:
- case FIX_CEIL_EXPR:
- abort (); /* Not used for C. */
-
- case FIX_TRUNC_EXPR:
- case FLOAT_EXPR:
- case MAX_EXPR:
- case MIN_EXPR:
- case FFS_EXPR:
- case LROTATE_EXPR:
- case RROTATE_EXPR:
- abort (); /* FIXME */
-
- case RDIV_EXPR:
-
- binoptab = optab_rdiv_expr;
- goto binop;
-
- case BIT_AND_EXPR:
-
- binoptab = optab_bit_and_expr;
- goto binop;
-
- case BIT_IOR_EXPR:
-
- binoptab = optab_bit_ior_expr;
- goto binop;
-
- case BIT_XOR_EXPR:
-
- binoptab = optab_bit_xor_expr;
- goto binop;
-
- case LSHIFT_EXPR:
-
- binoptab = optab_lshift_expr;
- goto binop;
-
- case RSHIFT_EXPR:
-
- binoptab = optab_rshift_expr;
- goto binop;
-
- case TRUTH_AND_EXPR:
-
- binoptab = optab_truth_and_expr;
- goto binop;
-
- case TRUTH_OR_EXPR:
-
- binoptab = optab_truth_or_expr;
- goto binop;
-
- case LT_EXPR:
-
- binoptab = optab_lt_expr;
- goto binop;
-
- case LE_EXPR:
-
- binoptab = optab_le_expr;
- goto binop;
-
- case GE_EXPR:
-
- binoptab = optab_ge_expr;
- goto binop;
-
- case GT_EXPR:
-
- binoptab = optab_gt_expr;
- goto binop;
-
- case EQ_EXPR:
-
- binoptab = optab_eq_expr;
- goto binop;
-
- case NE_EXPR:
-
- binoptab = optab_ne_expr;
- goto binop;
-
- case NEGATE_EXPR:
-
- unoptab = optab_negate_expr;
- goto unop;
-
- case BIT_NOT_EXPR:
-
- unoptab = optab_bit_not_expr;
- goto unop;
-
- case TRUTH_NOT_EXPR:
-
- unoptab = optab_truth_not_expr;
- goto unop;
-
- case PREDECREMENT_EXPR:
-
- incroptab = optab_predecrement_expr;
- goto increment;
-
- case PREINCREMENT_EXPR:
-
- incroptab = optab_preincrement_expr;
- goto increment;
-
- case POSTDECREMENT_EXPR:
-
- incroptab = optab_postdecrement_expr;
- goto increment;
-
- case POSTINCREMENT_EXPR:
-
- incroptab = optab_postincrement_expr;
- goto increment;
-
- case CONSTRUCTOR:
-
- bc_expand_constructor (exp);
- return;
-
- case ERROR_MARK:
- case RTL_EXPR:
-
- return;
-
- case BIND_EXPR:
- {
- tree vars = TREE_OPERAND (exp, 0);
- int vars_need_expansion = 0;
-
- /* Need to open a binding contour here because
- if there are any cleanups they most be contained here. */
- expand_start_bindings (0);
-
- /* Mark the corresponding BLOCK for output. */
- if (TREE_OPERAND (exp, 2) != 0)
- TREE_USED (TREE_OPERAND (exp, 2)) = 1;
-
- /* If VARS have not yet been expanded, expand them now. */
- while (vars)
- {
- if (DECL_RTL (vars) == 0)
- {
- vars_need_expansion = 1;
- expand_decl (vars);
- }
- expand_decl_init (vars);
- vars = TREE_CHAIN (vars);
- }
-
- bc_expand_expr (TREE_OPERAND (exp, 1));
-
- expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
-
- return;
- }
-
- default:
- abort ();
- }
-
- abort ();
-
- binop:
-
- bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
- TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
- return;
-
-
- unop:
-
- bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
- return;
-
-
- andorif:
-
- bc_expand_expr (TREE_OPERAND (exp, 0));
- bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
- lab = bc_get_bytecode_label ();
-
- bc_emit_instruction (duplicate);
- bc_emit_bytecode (opcode);
- bc_emit_bytecode_labelref (lab);
-
-#ifdef DEBUG_PRINT_CODE
- fputc ('\n', stderr);
-#endif
-
- bc_emit_instruction (drop);
-
- bc_expand_expr (TREE_OPERAND (exp, 1));
- bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
- bc_emit_bytecode_labeldef (lab);
- return;
-
-
- increment:
-
- type = TREE_TYPE (TREE_OPERAND (exp, 0));
-
- /* Push the quantum. */
- bc_expand_expr (TREE_OPERAND (exp, 1));
-
- /* Convert it to the lvalue's type. */
- bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
-
- /* Push the address of the lvalue */
- bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
-
- /* Perform actual increment */
- bc_expand_increment (incroptab, type);
- return;
-}
-\f
-/* Return the alignment in bits of EXP, a pointer valued expression.
- But don't return more than MAX_ALIGN no matter what.
- The alignment returned is, by default, the alignment of the thing that
- EXP points to (if it is not a POINTER_TYPE, 0 is returned).
-
- Otherwise, look at the expression to see if we can do better, i.e., if the
- expression is actually pointing at an object whose alignment is tighter. */
-
-static int
-get_pointer_alignment (exp, max_align)
- tree exp;
- unsigned max_align;
-{
- unsigned align, inner;
-
- if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
- return 0;
-
- align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
- align = MIN (align, max_align);
-
- while (1)
- {
- switch (TREE_CODE (exp))
- {
- case NOP_EXPR:
- case CONVERT_EXPR:
- case NON_LVALUE_EXPR:
- exp = TREE_OPERAND (exp, 0);
- if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
- return align;
- inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
- align = MIN (inner, max_align);
- break;
-
- case PLUS_EXPR:
- /* If sum of pointer + int, restrict our maximum alignment to that
- imposed by the integer. If not, we can't do any better than
- ALIGN. */
- if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
- return align;
-
- while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
- & (max_align - 1))
- != 0)
- max_align >>= 1;
-
- exp = TREE_OPERAND (exp, 0);
- break;
-
- case ADDR_EXPR:
- /* See what we are pointing at and look at its alignment. */
- exp = TREE_OPERAND (exp, 0);
- if (TREE_CODE (exp) == FUNCTION_DECL)
- align = FUNCTION_BOUNDARY;
- else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
- align = DECL_ALIGN (exp);
-#ifdef CONSTANT_ALIGNMENT
- else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
- align = CONSTANT_ALIGNMENT (exp, align);
-#endif
- return MIN (align, max_align);
-
- default:
- return align;
- }
- }
-}
-\f
-/* Return the tree node and offset if a given argument corresponds to
- a string constant. */
-
-static tree
-string_constant (arg, ptr_offset)
- tree arg;
- tree *ptr_offset;
-{
- STRIP_NOPS (arg);
-
- if (TREE_CODE (arg) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
- {
- *ptr_offset = integer_zero_node;
- return TREE_OPERAND (arg, 0);
- }
- else if (TREE_CODE (arg) == PLUS_EXPR)
- {
- tree arg0 = TREE_OPERAND (arg, 0);
- tree arg1 = TREE_OPERAND (arg, 1);
-
- STRIP_NOPS (arg0);
- STRIP_NOPS (arg1);
-
- if (TREE_CODE (arg0) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
- {
- *ptr_offset = arg1;
- return TREE_OPERAND (arg0, 0);
- }
- else if (TREE_CODE (arg1) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
- {
- *ptr_offset = arg0;
- return TREE_OPERAND (arg1, 0);
- }
- }
-
- return 0;
-}
-
-/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
- way, because it could contain a zero byte in the middle.
- TREE_STRING_LENGTH is the size of the character array, not the string.
-
- Unfortunately, string_constant can't access the values of const char
- arrays with initializers, so neither can we do so here. */
-
-static tree
-c_strlen (src)
- tree src;
-{
- tree offset_node;
- int offset, max;
- char *ptr;
-
- src = string_constant (src, &offset_node);
- if (src == 0)
- return 0;
- max = TREE_STRING_LENGTH (src);
- ptr = TREE_STRING_POINTER (src);
- if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
- {
- /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
- compute the offset to the following null if we don't know where to
- start searching for it. */
- int i;
- for (i = 0; i < max; i++)
- if (ptr[i] == 0)
- return 0;
- /* We don't know the starting offset, but we do know that the string
- has no internal zero bytes. We can assume that the offset falls
- within the bounds of the string; otherwise, the programmer deserves
- what he gets. Subtract the offset from the length of the string,
- and return that. */
- /* This would perhaps not be valid if we were dealing with named
- arrays in addition to literal string constants. */
- return size_binop (MINUS_EXPR, size_int (max), offset_node);
- }
-
- /* We have a known offset into the string. Start searching there for
- a null character. */
- if (offset_node == 0)
- offset = 0;
- else
- {
- /* Did we get a long long offset? If so, punt. */
- if (TREE_INT_CST_HIGH (offset_node) != 0)
- return 0;
- offset = TREE_INT_CST_LOW (offset_node);
- }
- /* If the offset is known to be out of bounds, warn, and call strlen at
- runtime. */
- if (offset < 0 || offset > max)
- {
- warning ("offset outside bounds of constant string");
- return 0;
- }
- /* Use strlen to search for the first zero byte. Since any strings
- constructed with build_string will have nulls appended, we win even
- if we get handed something like (char[4])"abcd".
-
- Since OFFSET is our starting index into the string, no further
- calculation is needed. */
- return size_int (strlen (ptr + offset));
-}
-
-rtx
-expand_builtin_return_addr (fndecl_code, count, tem)
- enum built_in_function fndecl_code;
- int count;
- rtx tem;
-{
- int i;
-
- /* Some machines need special handling before we can access
- arbitrary frames. For example, on the sparc, we must first flush
- all register windows to the stack. */
-#ifdef SETUP_FRAME_ADDRESSES
- if (count > 0)
- SETUP_FRAME_ADDRESSES ();
-#endif
-
- /* On the sparc, the return address is not in the frame, it is in a
- register. There is no way to access it off of the current frame
- pointer, but it can be accessed off the previous frame pointer by
- reading the value from the register window save area. */
-#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
- if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
- count--;
-#endif
-
- /* Scan back COUNT frames to the specified frame. */
- for (i = 0; i < count; i++)
- {
- /* Assume the dynamic chain pointer is in the word that the
- frame address points to, unless otherwise specified. */
-#ifdef DYNAMIC_CHAIN_ADDRESS
- tem = DYNAMIC_CHAIN_ADDRESS (tem);
-#endif
- tem = memory_address (Pmode, tem);
- tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
- }
-
- /* For __builtin_frame_address, return what we've got. */
- if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
- return tem;
-
- /* For __builtin_return_address, Get the return address from that
- frame. */
-#ifdef RETURN_ADDR_RTX
- tem = RETURN_ADDR_RTX (count, tem);
-#else
- tem = memory_address (Pmode,
- plus_constant (tem, GET_MODE_SIZE (Pmode)));
- tem = gen_rtx (MEM, Pmode, tem);
-#endif
- return tem;
-}
-
-/* __builtin_setjmp is passed a pointer to an array of five words (not
- all will be used on all machines). It operates similarly to the C
- library function of the same name, but is more efficient. Much of
- the code below (and for longjmp) is copied from the handling of
- non-local gotos.
-
- NOTE: This is intended for use by GNAT and the exception handling
- scheme in the compiler and will only work in the method used by
- them. */
-
-rtx
-expand_builtin_setjmp (buf_addr, target)
- rtx buf_addr;
- rtx target;
-{
- rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
- enum machine_mode sa_mode = Pmode, value_mode;
- rtx stack_save;
- int old_inhibit_defer_pop = inhibit_defer_pop;
- int return_pops
- = RETURN_POPS_ARGS (get_identifier ("__dummy"),
- build_function_type (void_type_node, NULL_TREE),
- 0);
- rtx next_arg_reg;
- CUMULATIVE_ARGS args_so_far;
- rtx op0;
- int i;
-
- value_mode = TYPE_MODE (integer_type_node);
-
-#ifdef POINTERS_EXTEND_UNSIGNED
- buf_addr = convert_memory_address (Pmode, buf_addr);
-#endif
-
- buf_addr = force_reg (Pmode, buf_addr);
-
- if (target == 0 || GET_CODE (target) != REG
- || REGNO (target) < FIRST_PSEUDO_REGISTER)
- target = gen_reg_rtx (value_mode);
-
- emit_queue ();
-
- /* We store the frame pointer and the address of lab1 in the buffer
- and use the rest of it for the stack save area, which is
- machine-dependent. */
- emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
- virtual_stack_vars_rtx);
- emit_move_insn
- (validize_mem (gen_rtx (MEM, Pmode,
- plus_constant (buf_addr,
- GET_MODE_SIZE (Pmode)))),
- gen_rtx (LABEL_REF, Pmode, lab1));
-
-#ifdef HAVE_save_stack_nonlocal
- if (HAVE_save_stack_nonlocal)
- sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
-#endif
-
- stack_save = gen_rtx (MEM, sa_mode,
- plus_constant (buf_addr,
- 2 * GET_MODE_SIZE (Pmode)));
- emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
-
-#ifdef HAVE_setjmp
- if (HAVE_setjmp)
- emit_insn (gen_setjmp ());
-#endif
-
- /* Set TARGET to zero and branch around the other case. */
- emit_move_insn (target, const0_rtx);
- emit_jump_insn (gen_jump (lab2));
- emit_barrier ();
- emit_label (lab1);
-
- /* Note that setjmp clobbers FP when we get here, so we have to make
- sure it's marked as used by this function. */
- emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
-
- /* Mark the static chain as clobbered here so life information
- doesn't get messed up for it. */
- emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
-
- /* Now put in the code to restore the frame pointer, and argument
- pointer, if needed. The code below is from expand_end_bindings
- in stmt.c; see detailed documentation there. */
-#ifdef HAVE_nonlocal_goto
- if (! HAVE_nonlocal_goto)
-#endif
- emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
-
- /* Do we need to do something like:
-
- current_function_has_nonlocal_label = 1;
-
- here? It seems like we might have to, or some subset of that
- functionality, but I am unsure. (mrs) */
-
-#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
- if (fixed_regs[ARG_POINTER_REGNUM])
- {
-#ifdef ELIMINABLE_REGS
- static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
-
- for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
- if (elim_regs[i].from == ARG_POINTER_REGNUM
- && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
- break;
-
- if (i == sizeof elim_regs / sizeof elim_regs [0])
-#endif
- {
- /* Now restore our arg pointer from the address at which it
- was saved in our stack frame.
- If there hasn't be space allocated for it yet, make
- some now. */
- if (arg_pointer_save_area == 0)
- arg_pointer_save_area
- = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
- emit_move_insn (virtual_incoming_args_rtx,
- copy_to_reg (arg_pointer_save_area));
- }
- }
-#endif
-
-#ifdef HAVE_nonlocal_goto_receiver
- if (HAVE_nonlocal_goto_receiver)
- emit_insn (gen_nonlocal_goto_receiver ());
-#endif
- /* The static chain pointer contains the address of dummy function.
- We need to call it here to handle some PIC cases of restoring a
- global pointer. Then return 1. */
- op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
-
- /* We can't actually call emit_library_call here, so do everything
- it does, which isn't much for a libfunc with no args. */
- op0 = memory_address (FUNCTION_MODE, op0);
-
- INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
- gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
- next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
-
-#ifndef ACCUMULATE_OUTGOING_ARGS
-#ifdef HAVE_call_pop
- if (HAVE_call_pop)
- emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
- const0_rtx, next_arg_reg,
- GEN_INT (return_pops)));
- else
-#endif
-#endif
-
-#ifdef HAVE_call
- if (HAVE_call)
- emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
- const0_rtx, next_arg_reg, const0_rtx));
- else
-#endif
- abort ();
-
- emit_move_insn (target, const1_rtx);
- emit_label (lab2);
- return target;
-}
-
-\f
-/* Expand an expression EXP that calls a built-in function,
- with result going to TARGET if that's convenient
- (and in mode MODE if that's convenient).
- SUBTARGET may be used as the target for computing one of EXP's operands.
- IGNORE is nonzero if the value is to be ignored. */
-
-#define CALLED_AS_BUILT_IN(NODE) \
- (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
-
-static rtx
-expand_builtin (exp, target, subtarget, mode, ignore)
- tree exp;
- rtx target;
- rtx subtarget;
- enum machine_mode mode;
- int ignore;
-{
- tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
- tree arglist = TREE_OPERAND (exp, 1);
- rtx op0;
- rtx lab1, insns;
- enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
- optab builtin_optab;
-
- switch (DECL_FUNCTION_CODE (fndecl))
- {
- case BUILT_IN_ABS:
- case BUILT_IN_LABS:
- case BUILT_IN_FABS:
- /* build_function_call changes these into ABS_EXPR. */
- abort ();
-
- case BUILT_IN_SIN:
- case BUILT_IN_COS:
- /* Treat these like sqrt, but only if the user asks for them. */
- if (! flag_fast_math)
- break;
- case BUILT_IN_FSQRT:
- /* If not optimizing, call the library function. */
- if (! optimize)
- break;
-
- if (arglist == 0
- /* Arg could be wrong type if user redeclared this fcn wrong. */
- || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
- break;
-
- /* Stabilize and compute the argument. */
- if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
- && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
- {
- exp = copy_node (exp);
- arglist = copy_node (arglist);
- TREE_OPERAND (exp, 1) = arglist;
- TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
- }
- op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
-
- /* Make a suitable register to place result in. */
- target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
-
- emit_queue ();
- start_sequence ();
-
- switch (DECL_FUNCTION_CODE (fndecl))
- {
- case BUILT_IN_SIN:
- builtin_optab = sin_optab; break;
- case BUILT_IN_COS:
- builtin_optab = cos_optab; break;
- case BUILT_IN_FSQRT:
- builtin_optab = sqrt_optab; break;
- default:
- abort ();
- }
-
- /* Compute into TARGET.
- Set TARGET to wherever the result comes back. */
- target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
- builtin_optab, op0, target, 0);
-
- /* If we were unable to expand via the builtin, stop the
- sequence (without outputting the insns) and break, causing
- a call the the library function. */
- if (target == 0)
- {
- end_sequence ();
- break;
- }
-
- /* Check the results by default. But if flag_fast_math is turned on,
- then assume sqrt will always be called with valid arguments. */
-
- if (! flag_fast_math)
- {
- /* Don't define the builtin FP instructions
- if your machine is not IEEE. */
- if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
- abort ();
-
- lab1 = gen_label_rtx ();
-
- /* Test the result; if it is NaN, set errno=EDOM because
- the argument was not in the domain. */
- emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
- emit_jump_insn (gen_beq (lab1));
-
-#ifdef TARGET_EDOM
- {
-#ifdef GEN_ERRNO_RTX
- rtx errno_rtx = GEN_ERRNO_RTX;
-#else
- rtx errno_rtx
- = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
-#endif
-
- emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
- }
-#else
- /* We can't set errno=EDOM directly; let the library call do it.
- Pop the arguments right away in case the call gets deleted. */
- NO_DEFER_POP;
- expand_call (exp, target, 0);
- OK_DEFER_POP;
-#endif
-
- emit_label (lab1);
- }
-
- /* Output the entire sequence. */
- insns = get_insns ();
- end_sequence ();
- emit_insns (insns);
-
- return target;
-
- /* __builtin_apply_args returns block of memory allocated on
- the stack into which is stored the arg pointer, structure
- value address, static chain, and all the registers that might
- possibly be used in performing a function call. The code is
- moved to the start of the function so the incoming values are
- saved. */
- case BUILT_IN_APPLY_ARGS:
- /* Don't do __builtin_apply_args more than once in a function.
- Save the result of the first call and reuse it. */
- if (apply_args_value != 0)
- return apply_args_value;
- {
- /* When this function is called, it means that registers must be
- saved on entry to this function. So we migrate the
- call to the first insn of this function. */
- rtx temp;
- rtx seq;
-
- start_sequence ();
- temp = expand_builtin_apply_args ();
- seq = get_insns ();
- end_sequence ();
-
- apply_args_value = temp;
-
- /* Put the sequence after the NOTE that starts the function.
- If this is inside a SEQUENCE, make the outer-level insn
- chain current, so the code is placed at the start of the
- function. */
- push_topmost_sequence ();
- emit_insns_before (seq, NEXT_INSN (get_insns ()));
- pop_topmost_sequence ();
- return temp;
- }
-
- /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
- FUNCTION with a copy of the parameters described by
- ARGUMENTS, and ARGSIZE. It returns a block of memory
- allocated on the stack into which is stored all the registers
- that might possibly be used for returning the result of a
- function. ARGUMENTS is the value returned by
- __builtin_apply_args. ARGSIZE is the number of bytes of
- arguments that must be copied. ??? How should this value be
- computed? We'll also need a safe worst case value for varargs
- functions. */
- case BUILT_IN_APPLY:
- if (arglist == 0
- /* Arg could be non-pointer if user redeclared this fcn wrong. */
- || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
- || TREE_CHAIN (arglist) == 0
- || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
- || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
- || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
- return const0_rtx;
- else
- {
- int i;
- tree t;
- rtx ops[3];
-
- for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
- ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
-
- return expand_builtin_apply (ops[0], ops[1], ops[2]);
- }
-
- /* __builtin_return (RESULT) causes the function to return the
- value described by RESULT. RESULT is address of the block of
- memory returned by __builtin_apply. */
- case BUILT_IN_RETURN:
- if (arglist
- /* Arg could be non-pointer if user redeclared this fcn wrong. */
- && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
- expand_builtin_return (expand_expr (TREE_VALUE (arglist),
- NULL_RTX, VOIDmode, 0));
- return const0_rtx;
-
- case BUILT_IN_SAVEREGS:
- /* Don't do __builtin_saveregs more than once in a function.
- Save the result of the first call and reuse it. */
- if (saveregs_value != 0)
- return saveregs_value;
- {
- /* When this function is called, it means that registers must be
- saved on entry to this function. So we migrate the
- call to the first insn of this function. */
- rtx temp;
- rtx seq;
-
- /* Now really call the function. `expand_call' does not call
- expand_builtin, so there is no danger of infinite recursion here. */
- start_sequence ();
-
-#ifdef EXPAND_BUILTIN_SAVEREGS
- /* Do whatever the machine needs done in this case. */
- temp = EXPAND_BUILTIN_SAVEREGS (arglist);
-#else
- /* The register where the function returns its value
- is likely to have something else in it, such as an argument.
- So preserve that register around the call. */
-
- if (value_mode != VOIDmode)
- {
- rtx valreg = hard_libcall_value (value_mode);
- rtx saved_valreg = gen_reg_rtx (value_mode);
-
- emit_move_insn (saved_valreg, valreg);
- temp = expand_call (exp, target, ignore);
- emit_move_insn (valreg, saved_valreg);
- }
- else
- /* Generate the call, putting the value in a pseudo. */
- temp = expand_call (exp, target, ignore);
-#endif
-
- seq = get_insns ();
- end_sequence ();
-
- saveregs_value = temp;
-
- /* Put the sequence after the NOTE that starts the function.
- If this is inside a SEQUENCE, make the outer-level insn
- chain current, so the code is placed at the start of the
- function. */
- push_topmost_sequence ();
- emit_insns_before (seq, NEXT_INSN (get_insns ()));
- pop_topmost_sequence ();
- return temp;
- }
-
- /* __builtin_args_info (N) returns word N of the arg space info
- for the current function. The number and meanings of words
- is controlled by the definition of CUMULATIVE_ARGS. */
- case BUILT_IN_ARGS_INFO:
- {
- int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
- int i;
- int *word_ptr = (int *) ¤t_function_args_info;
- tree type, elts, result;
-
- if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
- fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
- __FILE__, __LINE__);
-
- if (arglist != 0)
- {
- tree arg = TREE_VALUE (arglist);
- if (TREE_CODE (arg) != INTEGER_CST)
- error ("argument of `__builtin_args_info' must be constant");
- else
- {
- int wordnum = TREE_INT_CST_LOW (arg);
-
- if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
- error ("argument of `__builtin_args_info' out of range");
- else
- return GEN_INT (word_ptr[wordnum]);
- }
- }
- else
- error ("missing argument in `__builtin_args_info'");
-
- return const0_rtx;
-
-#if 0
- for (i = 0; i < nwords; i++)
- elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
-
- type = build_array_type (integer_type_node,
- build_index_type (build_int_2 (nwords, 0)));
- result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
- TREE_CONSTANT (result) = 1;
- TREE_STATIC (result) = 1;
- result = build (INDIRECT_REF, build_pointer_type (type), result);
- TREE_CONSTANT (result) = 1;
- return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
-#endif
- }
-
- /* Return the address of the first anonymous stack arg. */
- case BUILT_IN_NEXT_ARG:
- {
- tree fntype = TREE_TYPE (current_function_decl);
-
- if ((TYPE_ARG_TYPES (fntype) == 0
- || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
- == void_type_node))
- && ! current_function_varargs)
- {
- error ("`va_start' used in function with fixed args");
- return const0_rtx;
- }
-
- if (arglist)
- {
- tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
- tree arg = TREE_VALUE (arglist);
-
- /* Strip off all nops for the sake of the comparison. This
- is not quite the same as STRIP_NOPS. It does more.
- We must also strip off INDIRECT_EXPR for C++ reference
- parameters. */
- while (TREE_CODE (arg) == NOP_EXPR
- || TREE_CODE (arg) == CONVERT_EXPR
- || TREE_CODE (arg) == NON_LVALUE_EXPR
- || TREE_CODE (arg) == INDIRECT_REF)
- arg = TREE_OPERAND (arg, 0);
- if (arg != last_parm)
- warning ("second parameter of `va_start' not last named argument");
- }
- else if (! current_function_varargs)
- /* Evidently an out of date version of <stdarg.h>; can't validate
- va_start's second argument, but can still work as intended. */
- warning ("`__builtin_next_arg' called without an argument");
- }
-
- return expand_binop (Pmode, add_optab,
- current_function_internal_arg_pointer,
- current_function_arg_offset_rtx,
- NULL_RTX, 0, OPTAB_LIB_WIDEN);
-
- case BUILT_IN_CLASSIFY_TYPE:
- if (arglist != 0)
- {
- tree type = TREE_TYPE (TREE_VALUE (arglist));
- enum tree_code code = TREE_CODE (type);
- if (code == VOID_TYPE)
- return GEN_INT (void_type_class);
- if (code == INTEGER_TYPE)
- return GEN_INT (integer_type_class);
- if (code == CHAR_TYPE)
- return GEN_INT (char_type_class);
- if (code == ENUMERAL_TYPE)
- return GEN_INT (enumeral_type_class);
- if (code == BOOLEAN_TYPE)
- return GEN_INT (boolean_type_class);
- if (code == POINTER_TYPE)
- return GEN_INT (pointer_type_class);
- if (code == REFERENCE_TYPE)
- return GEN_INT (reference_type_class);
- if (code == OFFSET_TYPE)
- return GEN_INT (offset_type_class);
- if (code == REAL_TYPE)
- return GEN_INT (real_type_class);
- if (code == COMPLEX_TYPE)
- return GEN_INT (complex_type_class);
- if (code == FUNCTION_TYPE)
- return GEN_INT (function_type_class);
- if (code == METHOD_TYPE)
- return GEN_INT (method_type_class);
- if (code == RECORD_TYPE)
- return GEN_INT (record_type_class);
- if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
- return GEN_INT (union_type_class);
- if (code == ARRAY_TYPE)
- {
- if (TYPE_STRING_FLAG (type))
- return GEN_INT (string_type_class);
- else
- return GEN_INT (array_type_class);
- }
- if (code == SET_TYPE)
- return GEN_INT (set_type_class);
- if (code == FILE_TYPE)
- return GEN_INT (file_type_class);
- if (code == LANG_TYPE)
- return GEN_INT (lang_type_class);
- }
- return GEN_INT (no_type_class);
-
- case BUILT_IN_CONSTANT_P:
- if (arglist == 0)
- return const0_rtx;
- else
- {
- tree arg = TREE_VALUE (arglist);
-
- STRIP_NOPS (arg);
- return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
- || (TREE_CODE (arg) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
- ? const1_rtx : const0_rtx);
- }
-
- case BUILT_IN_FRAME_ADDRESS:
- /* The argument must be a nonnegative integer constant.
- It counts the number of frames to scan up the stack.
- The value is the address of that frame. */
- case BUILT_IN_RETURN_ADDRESS:
- /* The argument must be a nonnegative integer constant.
- It counts the number of frames to scan up the stack.
- The value is the return address saved in that frame. */
- if (arglist == 0)
- /* Warning about missing arg was already issued. */
- return const0_rtx;
- else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
- || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
- {
- if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
- error ("invalid arg to `__builtin_frame_address'");
- else
- error ("invalid arg to `__builtin_return_address'");
- return const0_rtx;
- }
- else
- {
- rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
- TREE_INT_CST_LOW (TREE_VALUE (arglist)),
- hard_frame_pointer_rtx);
-
- /* Some ports cannot access arbitrary stack frames. */
- if (tem == NULL)
- {
- if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
- warning ("unsupported arg to `__builtin_frame_address'");
- else
- warning ("unsupported arg to `__builtin_return_address'");
- return const0_rtx;
- }
-
- /* For __builtin_frame_address, return what we've got. */
- if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
- return tem;
-
- if (GET_CODE (tem) != REG)
- tem = copy_to_reg (tem);
- return tem;
- }
-
- /* Returns the address of the area where the structure is returned.
- 0 otherwise. */
- case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
- if (arglist != 0
- || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
- || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
- return const0_rtx;
- else
- return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
-
- case BUILT_IN_ALLOCA:
- if (arglist == 0
- /* Arg could be non-integer if user redeclared this fcn wrong. */
- || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
- break;
-
- /* Compute the argument. */
- op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
-
- /* Allocate the desired space. */
- return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
-
- case BUILT_IN_FFS:
- /* If not optimizing, call the library function. */
- if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
- break;
-
- if (arglist == 0
- /* Arg could be non-integer if user redeclared this fcn wrong. */
- || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
- break;
-
- /* Compute the argument. */
- op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
- /* Compute ffs, into TARGET if possible.
- Set TARGET to wherever the result comes back. */
- target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
- ffs_optab, op0, target, 1);
- if (target == 0)
- abort ();
- return target;
-
- case BUILT_IN_STRLEN:
- /* If not optimizing, call the library function. */
- if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
- break;
-
- if (arglist == 0
- /* Arg could be non-pointer if user redeclared this fcn wrong. */
- || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
- break;
- else
- {
- tree src = TREE_VALUE (arglist);
- tree len = c_strlen (src);
-
- int align
- = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
-
- rtx result, src_rtx, char_rtx;
- enum machine_mode insn_mode = value_mode, char_mode;
- enum insn_code icode;
-
- /* If the length is known, just return it. */
- if (len != 0)
- return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
-
- /* If SRC is not a pointer type, don't do this operation inline. */
- if (align == 0)
- break;
-
- /* Call a function if we can't compute strlen in the right mode. */
-
- while (insn_mode != VOIDmode)
- {
- icode = strlen_optab->handlers[(int) insn_mode].insn_code;
- if (icode != CODE_FOR_nothing)
- break;
-
- insn_mode = GET_MODE_WIDER_MODE (insn_mode);
- }
- if (insn_mode == VOIDmode)
- break;
-
- /* Make a place to write the result of the instruction. */
- result = target;
- if (! (result != 0
- && GET_CODE (result) == REG
- && GET_MODE (result) == insn_mode
- && REGNO (result) >= FIRST_PSEUDO_REGISTER))
- result = gen_reg_rtx (insn_mode);
-
- /* Make sure the operands are acceptable to the predicates. */
-
- if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
- result = gen_reg_rtx (insn_mode);
- src_rtx = memory_address (BLKmode,
- expand_expr (src, NULL_RTX, ptr_mode,
- EXPAND_NORMAL));
-
- if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
- src_rtx = copy_to_mode_reg (Pmode, src_rtx);
-
- /* Check the string is readable and has an end. */
- if (flag_check_memory_usage)
- emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
- src_rtx, ptr_mode,
- GEN_INT (MEMORY_USE_RO), QImode);
-
- char_rtx = const0_rtx;
- char_mode = insn_operand_mode[(int)icode][2];
- if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
- char_rtx = copy_to_mode_reg (char_mode, char_rtx);
-
- emit_insn (GEN_FCN (icode) (result,
- gen_rtx (MEM, BLKmode, src_rtx),
- char_rtx, GEN_INT (align)));
-
- /* Return the value in the proper mode for this function. */
- if (GET_MODE (result) == value_mode)
- return result;
- else if (target != 0)
- {
- convert_move (target, result, 0);
- return target;
- }
- else
- return convert_to_mode (value_mode, result, 0);
- }
-
- case BUILT_IN_STRCPY:
- /* If not optimizing, call the library function. */
- if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
- break;
-
- if (arglist == 0
- /* Arg could be non-pointer if user redeclared this fcn wrong. */
- || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
- || TREE_CHAIN (arglist) == 0
- || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
- break;
- else
- {
- tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
-
- if (len == 0)
- break;
-
- len = size_binop (PLUS_EXPR, len, integer_one_node);
-
- chainon (arglist, build_tree_list (NULL_TREE, len));
- }
-
- /* Drops in. */
- case BUILT_IN_MEMCPY:
- /* If not optimizing, call the library function. */
- if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
- break;
-
- if (arglist == 0
- /* Arg could be non-pointer if user redeclared this fcn wrong. */
- || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
- || TREE_CHAIN (arglist) == 0
- || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
- != POINTER_TYPE)
- || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
- || (TREE_CODE (TREE_TYPE (TREE_VALUE
- (TREE_CHAIN (TREE_CHAIN (arglist)))))
- != INTEGER_TYPE))
- break;
- else
- {
- tree dest = TREE_VALUE (arglist);
- tree src = TREE_VALUE (TREE_CHAIN (arglist));
- tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
- tree type;
-
- int src_align
- = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
- int dest_align
- = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
- rtx dest_rtx, dest_mem, src_mem, src_rtx, dest_addr, len_rtx;
-
- /* If either SRC or DEST is not a pointer type, don't do
- this operation in-line. */
- if (src_align == 0 || dest_align == 0)
- {
- if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
- TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
- break;
- }
+ }
- dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
- dest_mem = gen_rtx (MEM, BLKmode,
- memory_address (BLKmode, dest_rtx));
- /* There could be a void* cast on top of the object. */
- while (TREE_CODE (dest) == NOP_EXPR)
- dest = TREE_OPERAND (dest, 0);
- type = TREE_TYPE (TREE_TYPE (dest));
- MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
- src_rtx = expand_expr (src, NULL_RTX, ptr_mode, EXPAND_SUM);
- src_mem = gen_rtx (MEM, BLKmode,
- memory_address (BLKmode, src_rtx));
- len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
+ return 0;
+}
- /* Just copy the rights of SRC to the rights of DEST. */
- if (flag_check_memory_usage)
- emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
- src_rtx, ptr_mode,
- dest_rtx, ptr_mode,
- len_rtx, TYPE_MODE (sizetype));
+/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
+ way, because it could contain a zero byte in the middle.
+ TREE_STRING_LENGTH is the size of the character array, not the string.
- /* There could be a void* cast on top of the object. */
- while (TREE_CODE (src) == NOP_EXPR)
- src = TREE_OPERAND (src, 0);
- type = TREE_TYPE (TREE_TYPE (src));
- MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
+ Unfortunately, string_constant can't access the values of const char
+ arrays with initializers, so neither can we do so here. */
- /* Copy word part most expediently. */
- dest_addr
- = emit_block_move (dest_mem, src_mem, len_rtx,
- MIN (src_align, dest_align));
+static tree
+c_strlen (src)
+ tree src;
+{
+ tree offset_node;
+ int offset, max;
+ char *ptr;
- if (dest_addr == 0)
- dest_addr = force_operand (dest_rtx, NULL_RTX);
+ src = string_constant (src, &offset_node);
+ if (src == 0)
+ return 0;
+ max = TREE_STRING_LENGTH (src);
+ ptr = TREE_STRING_POINTER (src);
+ if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
+ {
+ /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
+ compute the offset to the following null if we don't know where to
+ start searching for it. */
+ int i;
+ for (i = 0; i < max; i++)
+ if (ptr[i] == 0)
+ return 0;
+ /* We don't know the starting offset, but we do know that the string
+ has no internal zero bytes. We can assume that the offset falls
+ within the bounds of the string; otherwise, the programmer deserves
+ what he gets. Subtract the offset from the length of the string,
+ and return that. */
+ /* This would perhaps not be valid if we were dealing with named
+ arrays in addition to literal string constants. */
+ return size_binop (MINUS_EXPR, size_int (max), offset_node);
+ }
- return dest_addr;
- }
+ /* We have a known offset into the string. Start searching there for
+ a null character. */
+ if (offset_node == 0)
+ offset = 0;
+ else
+ {
+ /* Did we get a long long offset? If so, punt. */
+ if (TREE_INT_CST_HIGH (offset_node) != 0)
+ return 0;
+ offset = TREE_INT_CST_LOW (offset_node);
+ }
+ /* If the offset is known to be out of bounds, warn, and call strlen at
+ runtime. */
+ if (offset < 0 || offset > max)
+ {
+ warning ("offset outside bounds of constant string");
+ return 0;
+ }
+ /* Use strlen to search for the first zero byte. Since any strings
+ constructed with build_string will have nulls appended, we win even
+ if we get handed something like (char[4])"abcd".
- case BUILT_IN_MEMSET:
- /* If not optimizing, call the library function. */
- if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
- break;
+ Since OFFSET is our starting index into the string, no further
+ calculation is needed. */
+ return size_int (strlen (ptr + offset));
+}
- if (arglist == 0
- /* Arg could be non-pointer if user redeclared this fcn wrong. */
- || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
- || TREE_CHAIN (arglist) == 0
- || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
- != INTEGER_TYPE)
- || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
- || (INTEGER_CST
- != (TREE_CODE (TREE_TYPE
- (TREE_VALUE
- (TREE_CHAIN (TREE_CHAIN (arglist))))))))
- break;
- else
- {
- tree dest = TREE_VALUE (arglist);
- tree val = TREE_VALUE (TREE_CHAIN (arglist));
- tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
- tree type;
+rtx
+expand_builtin_return_addr (fndecl_code, count, tem)
+ enum built_in_function fndecl_code;
+ int count;
+ rtx tem;
+{
+ int i;
- int dest_align
- = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
- rtx dest_rtx, dest_mem, dest_addr, len_rtx;
+ /* Some machines need special handling before we can access
+ arbitrary frames. For example, on the sparc, we must first flush
+ all register windows to the stack. */
+#ifdef SETUP_FRAME_ADDRESSES
+ if (count > 0)
+ SETUP_FRAME_ADDRESSES ();
+#endif
- /* If DEST is not a pointer type, don't do this
- operation in-line. */
- if (dest_align == 0)
- break;
+ /* On the sparc, the return address is not in the frame, it is in a
+ register. There is no way to access it off of the current frame
+ pointer, but it can be accessed off the previous frame pointer by
+ reading the value from the register window save area. */
+#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
+ if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
+ count--;
+#endif
- /* If VAL is not 0, don't do this operation in-line. */
- if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
- break;
+ /* Scan back COUNT frames to the specified frame. */
+ for (i = 0; i < count; i++)
+ {
+ /* Assume the dynamic chain pointer is in the word that the
+ frame address points to, unless otherwise specified. */
+#ifdef DYNAMIC_CHAIN_ADDRESS
+ tem = DYNAMIC_CHAIN_ADDRESS (tem);
+#endif
+ tem = memory_address (Pmode, tem);
+ tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
+ }
- dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
- dest_mem = gen_rtx (MEM, BLKmode,
- memory_address (BLKmode, dest_rtx));
- len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
+ /* For __builtin_frame_address, return what we've got. */
+ if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
+ return tem;
- /* Just check DST is writable and mark it as readable. */
- if (flag_check_memory_usage)
- emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
- dest_rtx, ptr_mode,
- len_rtx, TYPE_MODE (sizetype),
- GEN_INT (MEMORY_USE_WO), QImode);
+ /* For __builtin_return_address, Get the return address from that
+ frame. */
+#ifdef RETURN_ADDR_RTX
+ tem = RETURN_ADDR_RTX (count, tem);
+#else
+ tem = memory_address (Pmode,
+ plus_constant (tem, GET_MODE_SIZE (Pmode)));
+ tem = gen_rtx_MEM (Pmode, tem);
+#endif
+ return tem;
+}
+/* __builtin_setjmp is passed a pointer to an array of five words (not
+ all will be used on all machines). It operates similarly to the C
+ library function of the same name, but is more efficient. Much of
+ the code below (and for longjmp) is copied from the handling of
+ non-local gotos.
- /* There could be a void* cast on top of the object. */
- while (TREE_CODE (dest) == NOP_EXPR)
- dest = TREE_OPERAND (dest, 0);
- type = TREE_TYPE (TREE_TYPE (dest));
- MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
+ NOTE: This is intended for use by GNAT and the exception handling
+ scheme in the compiler and will only work in the method used by
+ them. */
- dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
+rtx
+expand_builtin_setjmp (buf_addr, target, first_label, next_label)
+ rtx buf_addr;
+ rtx target;
+ rtx first_label, next_label;
+{
+ rtx lab1 = gen_label_rtx ();
+ enum machine_mode sa_mode = Pmode, value_mode;
+ rtx stack_save;
+ int i;
- if (dest_addr == 0)
- dest_addr = force_operand (dest_rtx, NULL_RTX);
+ value_mode = TYPE_MODE (integer_type_node);
- return dest_addr;
- }
+#ifdef POINTERS_EXTEND_UNSIGNED
+ buf_addr = convert_memory_address (Pmode, buf_addr);
+#endif
-/* These comparison functions need an instruction that returns an actual
- index. An ordinary compare that just sets the condition codes
- is not enough. */
-#ifdef HAVE_cmpstrsi
- case BUILT_IN_STRCMP:
- /* If not optimizing, call the library function. */
- if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
- break;
+ buf_addr = force_reg (Pmode, buf_addr);
- /* If we need to check memory accesses, call the library function. */
- if (flag_check_memory_usage)
- break;
+ if (target == 0 || GET_CODE (target) != REG
+ || REGNO (target) < FIRST_PSEUDO_REGISTER)
+ target = gen_reg_rtx (value_mode);
- if (arglist == 0
- /* Arg could be non-pointer if user redeclared this fcn wrong. */
- || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
- || TREE_CHAIN (arglist) == 0
- || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
- break;
- else if (!HAVE_cmpstrsi)
- break;
- {
- tree arg1 = TREE_VALUE (arglist);
- tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
- tree offset;
- tree len, len2;
+ emit_queue ();
- len = c_strlen (arg1);
- if (len)
- len = size_binop (PLUS_EXPR, integer_one_node, len);
- len2 = c_strlen (arg2);
- if (len2)
- len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
+ /* We store the frame pointer and the address of lab1 in the buffer
+ and use the rest of it for the stack save area, which is
+ machine-dependent. */
+ emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
+ virtual_stack_vars_rtx);
+ emit_move_insn (validize_mem
+ (gen_rtx_MEM (Pmode,
+ plus_constant (buf_addr,
+ GET_MODE_SIZE (Pmode)))),
+ gen_rtx_LABEL_REF (Pmode, lab1));
- /* If we don't have a constant length for the first, use the length
- of the second, if we know it. We don't require a constant for
- this case; some cost analysis could be done if both are available
- but neither is constant. For now, assume they're equally cheap.
+#ifdef HAVE_save_stack_nonlocal
+ if (HAVE_save_stack_nonlocal)
+ sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
+#endif
- If both strings have constant lengths, use the smaller. This
- could arise if optimization results in strcpy being called with
- two fixed strings, or if the code was machine-generated. We should
- add some code to the `memcmp' handler below to deal with such
- situations, someday. */
- if (!len || TREE_CODE (len) != INTEGER_CST)
- {
- if (len2)
- len = len2;
- else if (len == 0)
- break;
- }
- else if (len2 && TREE_CODE (len2) == INTEGER_CST)
- {
- if (tree_int_cst_lt (len2, len))
- len = len2;
- }
+ stack_save = gen_rtx_MEM (sa_mode,
+ plus_constant (buf_addr,
+ 2 * GET_MODE_SIZE (Pmode)));
+ emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
- chainon (arglist, build_tree_list (NULL_TREE, len));
- }
+ /* If there is further processing to do, do it. */
+#ifdef HAVE_builtin_setjmp_setup
+ if (HAVE_builtin_setjmp_setup)
+ emit_insn (gen_builtin_setjmp_setup (buf_addr));
+#endif
- /* Drops in. */
- case BUILT_IN_MEMCMP:
- /* If not optimizing, call the library function. */
- if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
- break;
+ /* Set TARGET to zero and branch to the first-time-through label. */
+ emit_move_insn (target, const0_rtx);
+ emit_jump_insn (gen_jump (first_label));
+ emit_barrier ();
+ emit_label (lab1);
- /* If we need to check memory accesses, call the library function. */
- if (flag_check_memory_usage)
- break;
+ /* Tell flow about the strange goings on. */
+ current_function_has_nonlocal_label = 1;
- if (arglist == 0
- /* Arg could be non-pointer if user redeclared this fcn wrong. */
- || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
- || TREE_CHAIN (arglist) == 0
- || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
- || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
- || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
- break;
- else if (!HAVE_cmpstrsi)
- break;
- {
- tree arg1 = TREE_VALUE (arglist);
- tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
- tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
- rtx result;
+ /* Clobber the FP when we get here, so we have to make sure it's
+ marked as used by this function. */
+ emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
- int arg1_align
- = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
- int arg2_align
- = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
- enum machine_mode insn_mode
- = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
+ /* Mark the static chain as clobbered here so life information
+ doesn't get messed up for it. */
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
- /* If we don't have POINTER_TYPE, call the function. */
- if (arg1_align == 0 || arg2_align == 0)
- {
- if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
- TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
- break;
- }
+ /* Now put in the code to restore the frame pointer, and argument
+ pointer, if needed. The code below is from expand_end_bindings
+ in stmt.c; see detailed documentation there. */
+#ifdef HAVE_nonlocal_goto
+ if (! HAVE_nonlocal_goto)
+#endif
+ emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
- /* Make a place to write the result of the instruction. */
- result = target;
- if (! (result != 0
- && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
- && REGNO (result) >= FIRST_PSEUDO_REGISTER))
- result = gen_reg_rtx (insn_mode);
+#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
+ if (fixed_regs[ARG_POINTER_REGNUM])
+ {
+#ifdef ELIMINABLE_REGS
+ static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
- emit_insn (gen_cmpstrsi (result,
- gen_rtx (MEM, BLKmode,
- expand_expr (arg1, NULL_RTX,
- ptr_mode,
- EXPAND_NORMAL)),
- gen_rtx (MEM, BLKmode,
- expand_expr (arg2, NULL_RTX,
- ptr_mode,
- EXPAND_NORMAL)),
- expand_expr (len, NULL_RTX, VOIDmode, 0),
- GEN_INT (MIN (arg1_align, arg2_align))));
+ for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
+ if (elim_regs[i].from == ARG_POINTER_REGNUM
+ && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
+ break;
- /* Return the value in the proper mode for this function. */
- mode = TYPE_MODE (TREE_TYPE (exp));
- if (GET_MODE (result) == mode)
- return result;
- else if (target != 0)
- {
- convert_move (target, result, 0);
- return target;
- }
- else
- return convert_to_mode (mode, result, 0);
- }
-#else
- case BUILT_IN_STRCMP:
- case BUILT_IN_MEMCMP:
- break;
+ if (i == sizeof elim_regs / sizeof elim_regs [0])
+#endif
+ {
+ /* Now restore our arg pointer from the address at which it
+ was saved in our stack frame.
+ If there hasn't be space allocated for it yet, make
+ some now. */
+ if (arg_pointer_save_area == 0)
+ arg_pointer_save_area
+ = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
+ emit_move_insn (virtual_incoming_args_rtx,
+ copy_to_reg (arg_pointer_save_area));
+ }
+ }
+#endif
+
+#ifdef HAVE_builtin_setjmp_receiver
+ if (HAVE_builtin_setjmp_receiver)
+ emit_insn (gen_builtin_setjmp_receiver (lab1));
+ else
+#endif
+#ifdef HAVE_nonlocal_goto_receiver
+ if (HAVE_nonlocal_goto_receiver)
+ emit_insn (gen_nonlocal_goto_receiver ());
+ else
#endif
+ ; /* Nothing */
- case BUILT_IN_SETJMP:
- if (arglist == 0
- || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
- break;
+ /* Set TARGET, and branch to the next-time-through label. */
+ emit_move_insn (target, gen_lowpart (GET_MODE (target), static_chain_rtx));
+ emit_jump_insn (gen_jump (next_label));
+ emit_barrier ();
- {
- rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
- VOIDmode, 0);
- return expand_builtin_setjmp (buf_addr, target);
- }
+ return target;
+}
- /* __builtin_longjmp is passed a pointer to an array of five words
- and a value, which is a dummy. It's similar to the C library longjmp
- function but works with __builtin_setjmp above. */
- case BUILT_IN_LONGJMP:
- if (arglist == 0 || TREE_CHAIN (arglist) == 0
- || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
- break;
+void
+expand_builtin_longjmp (buf_addr, value)
+ rtx buf_addr, value;
+{
+ rtx fp, lab, stack;
+ enum machine_mode sa_mode;
- {
- tree dummy_id = get_identifier ("__dummy");
- tree dummy_type = build_function_type (void_type_node, NULL_TREE);
- tree dummy_decl = build_decl (FUNCTION_DECL, dummy_id, dummy_type);
#ifdef POINTERS_EXTEND_UNSIGNED
- rtx buf_addr
- = force_reg (Pmode,
- convert_memory_address
- (Pmode,
- expand_expr (TREE_VALUE (arglist),
- NULL_RTX, VOIDmode, 0)));
-#else
- rtx buf_addr
- = force_reg (Pmode, expand_expr (TREE_VALUE (arglist),
- NULL_RTX,
- VOIDmode, 0));
+ buf_addr = convert_memory_address (Pmode, buf_addr);
+#endif
+ buf_addr = force_reg (Pmode, buf_addr);
+
+ /* The value sent by longjmp is not allowed to be zero. Force it
+ to one if so. */
+ if (GET_CODE (value) == CONST_INT)
+ {
+ if (INTVAL (value) == 0)
+ value = const1_rtx;
+ }
+ else
+ {
+ lab = gen_label_rtx ();
+
+ emit_cmp_insn (value, const0_rtx, NE, NULL_RTX, GET_MODE (value), 0, 0);
+ emit_jump_insn (gen_bne (lab));
+ emit_move_insn (value, const1_rtx);
+ emit_label (lab);
+ }
+
+ /* Make sure the value is in the right mode to be copied to the chain. */
+ if (GET_MODE (value) != VOIDmode)
+ value = gen_lowpart (GET_MODE (static_chain_rtx), value);
+
+#ifdef HAVE_builtin_longjmp
+ if (HAVE_builtin_longjmp)
+ {
+ /* Copy the "return value" to the static chain reg. */
+ emit_move_insn (static_chain_rtx, value);
+ emit_insn (gen_rtx_USE (VOIDmode, static_chain_rtx));
+ emit_insn (gen_builtin_longjmp (buf_addr));
+ }
+ else
#endif
- rtx fp = gen_rtx (MEM, Pmode, buf_addr);
- rtx lab = gen_rtx (MEM, Pmode,
- plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
- enum machine_mode sa_mode
+ {
+ fp = gen_rtx_MEM (Pmode, buf_addr);
+ lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
+ GET_MODE_SIZE (Pmode)));
+
#ifdef HAVE_save_stack_nonlocal
- = (HAVE_save_stack_nonlocal
- ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
- : Pmode);
+ sa_mode = (HAVE_save_stack_nonlocal
+ ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
+ : Pmode);
#else
- = Pmode;
+ sa_mode = Pmode;
#endif
- rtx stack = gen_rtx (MEM, sa_mode,
- plus_constant (buf_addr,
- 2 * GET_MODE_SIZE (Pmode)));
-
- DECL_EXTERNAL (dummy_decl) = 1;
- TREE_PUBLIC (dummy_decl) = 1;
- make_decl_rtl (dummy_decl, NULL_PTR, 1);
-
- /* Expand the second expression just for side-effects. */
- expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
- const0_rtx, VOIDmode, 0);
- assemble_external (dummy_decl);
+ stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
+ 2 * GET_MODE_SIZE (Pmode)));
- /* Pick up FP, label, and SP from the block and jump. This code is
- from expand_goto in stmt.c; see there for detailed comments. */
+ /* Pick up FP, label, and SP from the block and jump. This code is
+ from expand_goto in stmt.c; see there for detailed comments. */
#if HAVE_nonlocal_goto
- if (HAVE_nonlocal_goto)
- emit_insn (gen_nonlocal_goto (fp, lab, stack,
- XEXP (DECL_RTL (dummy_decl), 0)));
+ if (HAVE_nonlocal_goto)
+ emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
else
#endif
{
lab = copy_to_reg (lab);
+
+ /* Copy the "return value" to the static chain reg. */
+ emit_move_insn (static_chain_rtx, value);
+
emit_move_insn (hard_frame_pointer_rtx, fp);
emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
- /* Put in the static chain register the address of the dummy
- function. */
- emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
- emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
- emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
- emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
+ emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
+ emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
+ emit_insn (gen_rtx_USE (VOIDmode, static_chain_rtx));
emit_indirect_jump (lab);
}
+ }
+}
- return const0_rtx;
- }
+\f
+/* Expand an expression EXP that calls a built-in function,
+ with result going to TARGET if that's convenient
+ (and in mode MODE if that's convenient).
+ SUBTARGET may be used as the target for computing one of EXP's operands.
+ IGNORE is nonzero if the value is to be ignored. */
- /* Various hooks for the DWARF 2 __throw routine. */
- case BUILT_IN_UNWIND_INIT:
- expand_builtin_unwind_init ();
- return const0_rtx;
- case BUILT_IN_FP:
- return frame_pointer_rtx;
- case BUILT_IN_SP:
- return stack_pointer_rtx;
-#ifdef DWARF2_UNWIND_INFO
- case BUILT_IN_DWARF_FP_REGNUM:
- return expand_builtin_dwarf_fp_regnum ();
- case BUILT_IN_DWARF_REG_SIZE:
- return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
-#endif
- case BUILT_IN_FROB_RETURN_ADDR:
- return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
- case BUILT_IN_EXTRACT_RETURN_ADDR:
- return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
- case BUILT_IN_SET_RETURN_ADDR_REG:
- expand_builtin_set_return_addr_reg (TREE_VALUE (arglist));
- return const0_rtx;
- case BUILT_IN_EH_STUB:
- return expand_builtin_eh_stub ();
- case BUILT_IN_SET_EH_REGS:
- expand_builtin_set_eh_regs (TREE_VALUE (arglist),
- TREE_VALUE (TREE_CHAIN (arglist)));
- return const0_rtx;
+#define CALLED_AS_BUILT_IN(NODE) \
+ (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
- default: /* just do library call, if unknown builtin */
- error ("built-in function `%s' not currently supported",
- IDENTIFIER_POINTER (DECL_NAME (fndecl)));
- }
+static rtx
+expand_builtin (exp, target, subtarget, mode, ignore)
+ tree exp;
+ rtx target;
+ rtx subtarget;
+ enum machine_mode mode;
+ int ignore;
+{
+ tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
+ tree arglist = TREE_OPERAND (exp, 1);
+ rtx op0;
+ rtx lab1, insns;
+ enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
+ optab builtin_optab;
- /* The switch statement above can drop through to cause the function
- to be called normally. */
+ switch (DECL_FUNCTION_CODE (fndecl))
+ {
+ case BUILT_IN_ABS:
+ case BUILT_IN_LABS:
+ case BUILT_IN_FABS:
+ /* build_function_call changes these into ABS_EXPR. */
+ abort ();
- return expand_call (exp, target, ignore);
-}
-\f
-/* Built-in functions to perform an untyped call and return. */
+ case BUILT_IN_SIN:
+ case BUILT_IN_COS:
+ /* Treat these like sqrt, but only if the user asks for them. */
+ if (! flag_fast_math)
+ break;
+ case BUILT_IN_FSQRT:
+ /* If not optimizing, call the library function. */
+ if (! optimize)
+ break;
-/* For each register that may be used for calling a function, this
- gives a mode used to copy the register's value. VOIDmode indicates
- the register is not used for calling a function. If the machine
- has register windows, this gives only the outbound registers.
- INCOMING_REGNO gives the corresponding inbound register. */
-static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
+ if (arglist == 0
+ /* Arg could be wrong type if user redeclared this fcn wrong. */
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
+ break;
-/* For each register that may be used for returning values, this gives
- a mode used to copy the register's value. VOIDmode indicates the
- register is not used for returning values. If the machine has
- register windows, this gives only the outbound registers.
- INCOMING_REGNO gives the corresponding inbound register. */
-static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
+ /* Stabilize and compute the argument. */
+ if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
+ && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
+ {
+ exp = copy_node (exp);
+ arglist = copy_node (arglist);
+ TREE_OPERAND (exp, 1) = arglist;
+ TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
+ }
+ op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
-/* For each register that may be used for calling a function, this
- gives the offset of that register into the block returned by
- __builtin_apply_args. 0 indicates that the register is not
- used for calling a function. */
-static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
+ /* Make a suitable register to place result in. */
+ target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
-/* Return the offset of register REGNO into the block returned by
- __builtin_apply_args. This is not declared static, since it is
- needed in objc-act.c. */
+ emit_queue ();
+ start_sequence ();
-int
-apply_args_register_offset (regno)
- int regno;
-{
- apply_args_size ();
+ switch (DECL_FUNCTION_CODE (fndecl))
+ {
+ case BUILT_IN_SIN:
+ builtin_optab = sin_optab; break;
+ case BUILT_IN_COS:
+ builtin_optab = cos_optab; break;
+ case BUILT_IN_FSQRT:
+ builtin_optab = sqrt_optab; break;
+ default:
+ abort ();
+ }
- /* Arguments are always put in outgoing registers (in the argument
- block) if such make sense. */
-#ifdef OUTGOING_REGNO
- regno = OUTGOING_REGNO(regno);
+ /* Compute into TARGET.
+ Set TARGET to wherever the result comes back. */
+ target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
+ builtin_optab, op0, target, 0);
+
+ /* If we were unable to expand via the builtin, stop the
+ sequence (without outputting the insns) and break, causing
+ a call the the library function. */
+ if (target == 0)
+ {
+ end_sequence ();
+ break;
+ }
+
+ /* Check the results by default. But if flag_fast_math is turned on,
+ then assume sqrt will always be called with valid arguments. */
+
+ if (! flag_fast_math)
+ {
+ /* Don't define the builtin FP instructions
+ if your machine is not IEEE. */
+ if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
+ abort ();
+
+ lab1 = gen_label_rtx ();
+
+ /* Test the result; if it is NaN, set errno=EDOM because
+ the argument was not in the domain. */
+ emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
+ emit_jump_insn (gen_beq (lab1));
+
+#ifdef TARGET_EDOM
+ {
+#ifdef GEN_ERRNO_RTX
+ rtx errno_rtx = GEN_ERRNO_RTX;
+#else
+ rtx errno_rtx
+ = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
#endif
- return apply_args_reg_offset[regno];
-}
-/* Return the size required for the block returned by __builtin_apply_args,
- and initialize apply_args_mode. */
+ emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
+ }
+#else
+ /* We can't set errno=EDOM directly; let the library call do it.
+ Pop the arguments right away in case the call gets deleted. */
+ NO_DEFER_POP;
+ expand_call (exp, target, 0);
+ OK_DEFER_POP;
+#endif
-static int
-apply_args_size ()
-{
- static int size = -1;
- int align, regno;
- enum machine_mode mode;
+ emit_label (lab1);
+ }
+
+ /* Output the entire sequence. */
+ insns = get_insns ();
+ end_sequence ();
+ emit_insns (insns);
+
+ return target;
+
+ case BUILT_IN_FMOD:
+ break;
+
+ /* __builtin_apply_args returns block of memory allocated on
+ the stack into which is stored the arg pointer, structure
+ value address, static chain, and all the registers that might
+ possibly be used in performing a function call. The code is
+ moved to the start of the function so the incoming values are
+ saved. */
+ case BUILT_IN_APPLY_ARGS:
+ /* Don't do __builtin_apply_args more than once in a function.
+ Save the result of the first call and reuse it. */
+ if (apply_args_value != 0)
+ return apply_args_value;
+ {
+ /* When this function is called, it means that registers must be
+ saved on entry to this function. So we migrate the
+ call to the first insn of this function. */
+ rtx temp;
+ rtx seq;
- /* The values computed by this function never change. */
- if (size < 0)
- {
- /* The first value is the incoming arg-pointer. */
- size = GET_MODE_SIZE (Pmode);
+ start_sequence ();
+ temp = expand_builtin_apply_args ();
+ seq = get_insns ();
+ end_sequence ();
- /* The second value is the structure value address unless this is
- passed as an "invisible" first argument. */
- if (struct_value_rtx)
- size += GET_MODE_SIZE (Pmode);
+ apply_args_value = temp;
- for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
- if (FUNCTION_ARG_REGNO_P (regno))
- {
- /* Search for the proper mode for copying this register's
- value. I'm not sure this is right, but it works so far. */
- enum machine_mode best_mode = VOIDmode;
+ /* Put the sequence after the NOTE that starts the function.
+ If this is inside a SEQUENCE, make the outer-level insn
+ chain current, so the code is placed at the start of the
+ function. */
+ push_topmost_sequence ();
+ emit_insns_before (seq, NEXT_INSN (get_insns ()));
+ pop_topmost_sequence ();
+ return temp;
+ }
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
- if (HARD_REGNO_MODE_OK (regno, mode)
- && HARD_REGNO_NREGS (regno, mode) == 1)
- best_mode = mode;
+ /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
+ FUNCTION with a copy of the parameters described by
+ ARGUMENTS, and ARGSIZE. It returns a block of memory
+ allocated on the stack into which is stored all the registers
+ that might possibly be used for returning the result of a
+ function. ARGUMENTS is the value returned by
+ __builtin_apply_args. ARGSIZE is the number of bytes of
+ arguments that must be copied. ??? How should this value be
+ computed? We'll also need a safe worst case value for varargs
+ functions. */
+ case BUILT_IN_APPLY:
+ if (arglist == 0
+ /* Arg could be non-pointer if user redeclared this fcn wrong. */
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
+ || TREE_CHAIN (arglist) == 0
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
+ || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
+ return const0_rtx;
+ else
+ {
+ int i;
+ tree t;
+ rtx ops[3];
- if (best_mode == VOIDmode)
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
- if (HARD_REGNO_MODE_OK (regno, mode)
- && (mov_optab->handlers[(int) mode].insn_code
- != CODE_FOR_nothing))
- best_mode = mode;
+ for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
+ ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
- mode = best_mode;
- if (mode == VOIDmode)
- abort ();
+ return expand_builtin_apply (ops[0], ops[1], ops[2]);
+ }
- align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
- if (size % align != 0)
- size = CEIL (size, align) * align;
- apply_args_reg_offset[regno] = size;
- size += GET_MODE_SIZE (mode);
- apply_args_mode[regno] = mode;
- }
- else
- {
- apply_args_mode[regno] = VOIDmode;
- apply_args_reg_offset[regno] = 0;
- }
- }
- return size;
-}
+ /* __builtin_return (RESULT) causes the function to return the
+ value described by RESULT. RESULT is address of the block of
+ memory returned by __builtin_apply. */
+ case BUILT_IN_RETURN:
+ if (arglist
+ /* Arg could be non-pointer if user redeclared this fcn wrong. */
+ && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
+ expand_builtin_return (expand_expr (TREE_VALUE (arglist),
+ NULL_RTX, VOIDmode, 0));
+ return const0_rtx;
-/* Return the size required for the block returned by __builtin_apply,
- and initialize apply_result_mode. */
+ case BUILT_IN_SAVEREGS:
+ /* Don't do __builtin_saveregs more than once in a function.
+ Save the result of the first call and reuse it. */
+ if (saveregs_value != 0)
+ return saveregs_value;
+ {
+ /* When this function is called, it means that registers must be
+ saved on entry to this function. So we migrate the
+ call to the first insn of this function. */
+ rtx temp;
+ rtx seq;
-static int
-apply_result_size ()
-{
- static int size = -1;
- int align, regno;
- enum machine_mode mode;
+ /* Now really call the function. `expand_call' does not call
+ expand_builtin, so there is no danger of infinite recursion here. */
+ start_sequence ();
- /* The values computed by this function never change. */
- if (size < 0)
- {
- size = 0;
+#ifdef EXPAND_BUILTIN_SAVEREGS
+ /* Do whatever the machine needs done in this case. */
+ temp = EXPAND_BUILTIN_SAVEREGS (arglist);
+#else
+ /* The register where the function returns its value
+ is likely to have something else in it, such as an argument.
+ So preserve that register around the call. */
- for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
- if (FUNCTION_VALUE_REGNO_P (regno))
+ if (value_mode != VOIDmode)
{
- /* Search for the proper mode for copying this register's
- value. I'm not sure this is right, but it works so far. */
- enum machine_mode best_mode = VOIDmode;
-
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
- mode != TImode;
- mode = GET_MODE_WIDER_MODE (mode))
- if (HARD_REGNO_MODE_OK (regno, mode))
- best_mode = mode;
-
- if (best_mode == VOIDmode)
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
- if (HARD_REGNO_MODE_OK (regno, mode)
- && (mov_optab->handlers[(int) mode].insn_code
- != CODE_FOR_nothing))
- best_mode = mode;
-
- mode = best_mode;
- if (mode == VOIDmode)
- abort ();
+ rtx valreg = hard_libcall_value (value_mode);
+ rtx saved_valreg = gen_reg_rtx (value_mode);
- align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
- if (size % align != 0)
- size = CEIL (size, align) * align;
- size += GET_MODE_SIZE (mode);
- apply_result_mode[regno] = mode;
+ emit_move_insn (saved_valreg, valreg);
+ temp = expand_call (exp, target, ignore);
+ emit_move_insn (valreg, saved_valreg);
}
else
- apply_result_mode[regno] = VOIDmode;
-
- /* Allow targets that use untyped_call and untyped_return to override
- the size so that machine-specific information can be stored here. */
-#ifdef APPLY_RESULT_SIZE
- size = APPLY_RESULT_SIZE;
+ /* Generate the call, putting the value in a pseudo. */
+ temp = expand_call (exp, target, ignore);
#endif
- }
- return size;
-}
-
-#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
-/* Create a vector describing the result block RESULT. If SAVEP is true,
- the result block is used to save the values; otherwise it is used to
- restore the values. */
-static rtx
-result_vector (savep, result)
- int savep;
- rtx result;
-{
- int regno, size, align, nelts;
- enum machine_mode mode;
- rtx reg, mem;
- rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
-
- size = nelts = 0;
- for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
- if ((mode = apply_result_mode[regno]) != VOIDmode)
- {
- align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
- if (size % align != 0)
- size = CEIL (size, align) * align;
- reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
- mem = change_address (result, mode,
- plus_constant (XEXP (result, 0), size));
- savevec[nelts++] = (savep
- ? gen_rtx (SET, VOIDmode, mem, reg)
- : gen_rtx (SET, VOIDmode, reg, mem));
- size += GET_MODE_SIZE (mode);
- }
- return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
-}
-#endif /* HAVE_untyped_call or HAVE_untyped_return */
+ seq = get_insns ();
+ end_sequence ();
-/* Save the state required to perform an untyped call with the same
- arguments as were passed to the current function. */
+ saveregs_value = temp;
-static rtx
-expand_builtin_apply_args ()
-{
- rtx registers;
- int size, align, regno;
- enum machine_mode mode;
+ /* Put the sequence after the NOTE that starts the function.
+ If this is inside a SEQUENCE, make the outer-level insn
+ chain current, so the code is placed at the start of the
+ function. */
+ push_topmost_sequence ();
+ emit_insns_before (seq, NEXT_INSN (get_insns ()));
+ pop_topmost_sequence ();
+ return temp;
+ }
- /* Create a block where the arg-pointer, structure value address,
- and argument registers can be saved. */
- registers = assign_stack_local (BLKmode, apply_args_size (), -1);
+ /* __builtin_args_info (N) returns word N of the arg space info
+ for the current function. The number and meanings of words
+ is controlled by the definition of CUMULATIVE_ARGS. */
+ case BUILT_IN_ARGS_INFO:
+ {
+ int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
+ int *word_ptr = (int *) ¤t_function_args_info;
+#if 0
+ /* These are used by the code below that is if 0'ed away */
+ int i;
+ tree type, elts, result;
+#endif
- /* Walk past the arg-pointer and structure value address. */
- size = GET_MODE_SIZE (Pmode);
- if (struct_value_rtx)
- size += GET_MODE_SIZE (Pmode);
+ if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
+ fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
+ __FILE__, __LINE__);
- /* Save each register used in calling a function to the block. */
- for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
- if ((mode = apply_args_mode[regno]) != VOIDmode)
- {
- rtx tem;
+ if (arglist != 0)
+ {
+ tree arg = TREE_VALUE (arglist);
+ if (TREE_CODE (arg) != INTEGER_CST)
+ error ("argument of `__builtin_args_info' must be constant");
+ else
+ {
+ int wordnum = TREE_INT_CST_LOW (arg);
- align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
- if (size % align != 0)
- size = CEIL (size, align) * align;
+ if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
+ error ("argument of `__builtin_args_info' out of range");
+ else
+ return GEN_INT (word_ptr[wordnum]);
+ }
+ }
+ else
+ error ("missing argument in `__builtin_args_info'");
- tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
+ return const0_rtx;
-#ifdef STACK_REGS
- /* For reg-stack.c's stack register household.
- Compare with a similar piece of code in function.c. */
+#if 0
+ for (i = 0; i < nwords; i++)
+ elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
- emit_insn (gen_rtx (USE, mode, tem));
+ type = build_array_type (integer_type_node,
+ build_index_type (build_int_2 (nwords, 0)));
+ result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
+ TREE_CONSTANT (result) = 1;
+ TREE_STATIC (result) = 1;
+ result = build (INDIRECT_REF, build_pointer_type (type), result);
+ TREE_CONSTANT (result) = 1;
+ return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
#endif
-
- emit_move_insn (change_address (registers, mode,
- plus_constant (XEXP (registers, 0),
- size)),
- tem);
- size += GET_MODE_SIZE (mode);
}
- /* Save the arg pointer to the block. */
- emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
- copy_to_reg (virtual_incoming_args_rtx));
- size = GET_MODE_SIZE (Pmode);
-
- /* Save the structure value address unless this is passed as an
- "invisible" first argument. */
- if (struct_value_incoming_rtx)
- {
- emit_move_insn (change_address (registers, Pmode,
- plus_constant (XEXP (registers, 0),
- size)),
- copy_to_reg (struct_value_incoming_rtx));
- size += GET_MODE_SIZE (Pmode);
- }
-
- /* Return the address of the block. */
- return copy_addr_to_reg (XEXP (registers, 0));
-}
+ /* Return the address of the first anonymous stack arg. */
+ case BUILT_IN_NEXT_ARG:
+ {
+ tree fntype = TREE_TYPE (current_function_decl);
-/* Perform an untyped call and save the state required to perform an
- untyped return of whatever value was returned by the given function. */
+ if ((TYPE_ARG_TYPES (fntype) == 0
+ || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
+ == void_type_node))
+ && ! current_function_varargs)
+ {
+ error ("`va_start' used in function with fixed args");
+ return const0_rtx;
+ }
-static rtx
-expand_builtin_apply (function, arguments, argsize)
- rtx function, arguments, argsize;
-{
- int size, align, regno;
- enum machine_mode mode;
- rtx incoming_args, result, reg, dest, call_insn;
- rtx old_stack_level = 0;
- rtx call_fusage = 0;
+ if (arglist)
+ {
+ tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
+ tree arg = TREE_VALUE (arglist);
- /* Create a block where the return registers can be saved. */
- result = assign_stack_local (BLKmode, apply_result_size (), -1);
+ /* Strip off all nops for the sake of the comparison. This
+ is not quite the same as STRIP_NOPS. It does more.
+ We must also strip off INDIRECT_EXPR for C++ reference
+ parameters. */
+ while (TREE_CODE (arg) == NOP_EXPR
+ || TREE_CODE (arg) == CONVERT_EXPR
+ || TREE_CODE (arg) == NON_LVALUE_EXPR
+ || TREE_CODE (arg) == INDIRECT_REF)
+ arg = TREE_OPERAND (arg, 0);
+ if (arg != last_parm)
+ warning ("second parameter of `va_start' not last named argument");
+ }
+ else if (! current_function_varargs)
+ /* Evidently an out of date version of <stdarg.h>; can't validate
+ va_start's second argument, but can still work as intended. */
+ warning ("`__builtin_next_arg' called without an argument");
+ }
- /* ??? The argsize value should be adjusted here. */
+ return expand_binop (Pmode, add_optab,
+ current_function_internal_arg_pointer,
+ current_function_arg_offset_rtx,
+ NULL_RTX, 0, OPTAB_LIB_WIDEN);
- /* Fetch the arg pointer from the ARGUMENTS block. */
- incoming_args = gen_reg_rtx (Pmode);
- emit_move_insn (incoming_args,
- gen_rtx (MEM, Pmode, arguments));
-#ifndef STACK_GROWS_DOWNWARD
- incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
- incoming_args, 0, OPTAB_LIB_WIDEN);
-#endif
+ case BUILT_IN_CLASSIFY_TYPE:
+ if (arglist != 0)
+ {
+ tree type = TREE_TYPE (TREE_VALUE (arglist));
+ enum tree_code code = TREE_CODE (type);
+ if (code == VOID_TYPE)
+ return GEN_INT (void_type_class);
+ if (code == INTEGER_TYPE)
+ return GEN_INT (integer_type_class);
+ if (code == CHAR_TYPE)
+ return GEN_INT (char_type_class);
+ if (code == ENUMERAL_TYPE)
+ return GEN_INT (enumeral_type_class);
+ if (code == BOOLEAN_TYPE)
+ return GEN_INT (boolean_type_class);
+ if (code == POINTER_TYPE)
+ return GEN_INT (pointer_type_class);
+ if (code == REFERENCE_TYPE)
+ return GEN_INT (reference_type_class);
+ if (code == OFFSET_TYPE)
+ return GEN_INT (offset_type_class);
+ if (code == REAL_TYPE)
+ return GEN_INT (real_type_class);
+ if (code == COMPLEX_TYPE)
+ return GEN_INT (complex_type_class);
+ if (code == FUNCTION_TYPE)
+ return GEN_INT (function_type_class);
+ if (code == METHOD_TYPE)
+ return GEN_INT (method_type_class);
+ if (code == RECORD_TYPE)
+ return GEN_INT (record_type_class);
+ if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
+ return GEN_INT (union_type_class);
+ if (code == ARRAY_TYPE)
+ {
+ if (TYPE_STRING_FLAG (type))
+ return GEN_INT (string_type_class);
+ else
+ return GEN_INT (array_type_class);
+ }
+ if (code == SET_TYPE)
+ return GEN_INT (set_type_class);
+ if (code == FILE_TYPE)
+ return GEN_INT (file_type_class);
+ if (code == LANG_TYPE)
+ return GEN_INT (lang_type_class);
+ }
+ return GEN_INT (no_type_class);
- /* Perform postincrements before actually calling the function. */
- emit_queue ();
+ case BUILT_IN_CONSTANT_P:
+ if (arglist == 0)
+ return const0_rtx;
+ else
+ {
+ tree arg = TREE_VALUE (arglist);
- /* Push a new argument block and copy the arguments. */
- do_pending_stack_adjust ();
- emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
+ STRIP_NOPS (arg);
+ return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
+ || (TREE_CODE (arg) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
+ ? const1_rtx : const0_rtx);
+ }
- /* Push a block of memory onto the stack to store the memory arguments.
- Save the address in a register, and copy the memory arguments. ??? I
- haven't figured out how the calling convention macros effect this,
- but it's likely that the source and/or destination addresses in
- the block copy will need updating in machine specific ways. */
- dest = allocate_dynamic_stack_space (argsize, 0, 0);
- emit_block_move (gen_rtx (MEM, BLKmode, dest),
- gen_rtx (MEM, BLKmode, incoming_args),
- argsize,
- PARM_BOUNDARY / BITS_PER_UNIT);
+ case BUILT_IN_FRAME_ADDRESS:
+ /* The argument must be a nonnegative integer constant.
+ It counts the number of frames to scan up the stack.
+ The value is the address of that frame. */
+ case BUILT_IN_RETURN_ADDRESS:
+ /* The argument must be a nonnegative integer constant.
+ It counts the number of frames to scan up the stack.
+ The value is the return address saved in that frame. */
+ if (arglist == 0)
+ /* Warning about missing arg was already issued. */
+ return const0_rtx;
+ else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
+ || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
+ {
+ if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
+ error ("invalid arg to `__builtin_frame_address'");
+ else
+ error ("invalid arg to `__builtin_return_address'");
+ return const0_rtx;
+ }
+ else
+ {
+ rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
+ TREE_INT_CST_LOW (TREE_VALUE (arglist)),
+ hard_frame_pointer_rtx);
- /* Refer to the argument block. */
- apply_args_size ();
- arguments = gen_rtx (MEM, BLKmode, arguments);
+ /* Some ports cannot access arbitrary stack frames. */
+ if (tem == NULL)
+ {
+ if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
+ warning ("unsupported arg to `__builtin_frame_address'");
+ else
+ warning ("unsupported arg to `__builtin_return_address'");
+ return const0_rtx;
+ }
- /* Walk past the arg-pointer and structure value address. */
- size = GET_MODE_SIZE (Pmode);
- if (struct_value_rtx)
- size += GET_MODE_SIZE (Pmode);
+ /* For __builtin_frame_address, return what we've got. */
+ if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
+ return tem;
- /* Restore each of the registers previously saved. Make USE insns
- for each of these registers for use in making the call. */
- for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
- if ((mode = apply_args_mode[regno]) != VOIDmode)
- {
- align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
- if (size % align != 0)
- size = CEIL (size, align) * align;
- reg = gen_rtx (REG, mode, regno);
- emit_move_insn (reg,
- change_address (arguments, mode,
- plus_constant (XEXP (arguments, 0),
- size)));
+ if (GET_CODE (tem) != REG)
+ tem = copy_to_reg (tem);
+ return tem;
+ }
- use_reg (&call_fusage, reg);
- size += GET_MODE_SIZE (mode);
- }
+ /* Returns the address of the area where the structure is returned.
+ 0 otherwise. */
+ case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
+ if (arglist != 0
+ || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
+ || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
+ return const0_rtx;
+ else
+ return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
- /* Restore the structure value address unless this is passed as an
- "invisible" first argument. */
- size = GET_MODE_SIZE (Pmode);
- if (struct_value_rtx)
- {
- rtx value = gen_reg_rtx (Pmode);
- emit_move_insn (value,
- change_address (arguments, Pmode,
- plus_constant (XEXP (arguments, 0),
- size)));
- emit_move_insn (struct_value_rtx, value);
- if (GET_CODE (struct_value_rtx) == REG)
- use_reg (&call_fusage, struct_value_rtx);
- size += GET_MODE_SIZE (Pmode);
- }
+ case BUILT_IN_ALLOCA:
+ if (arglist == 0
+ /* Arg could be non-integer if user redeclared this fcn wrong. */
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
+ break;
- /* All arguments and registers used for the call are set up by now! */
- function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
+ /* Compute the argument. */
+ op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
- /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
- and we don't want to load it into a register as an optimization,
- because prepare_call_address already did it if it should be done. */
- if (GET_CODE (function) != SYMBOL_REF)
- function = memory_address (FUNCTION_MODE, function);
+ /* Allocate the desired space. */
+ return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
- /* Generate the actual call instruction and save the return value. */
-#ifdef HAVE_untyped_call
- if (HAVE_untyped_call)
- emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
- result, result_vector (1, result)));
- else
-#endif
-#ifdef HAVE_call_value
- if (HAVE_call_value)
- {
- rtx valreg = 0;
+ case BUILT_IN_FFS:
+ /* If not optimizing, call the library function. */
+ if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
+ break;
- /* Locate the unique return register. It is not possible to
- express a call that sets more than one return register using
- call_value; use untyped_call for that. In fact, untyped_call
- only needs to save the return registers in the given block. */
- for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
- if ((mode = apply_result_mode[regno]) != VOIDmode)
- {
- if (valreg)
- abort (); /* HAVE_untyped_call required. */
- valreg = gen_rtx (REG, mode, regno);
- }
+ if (arglist == 0
+ /* Arg could be non-integer if user redeclared this fcn wrong. */
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
+ break;
- emit_call_insn (gen_call_value (valreg,
- gen_rtx (MEM, FUNCTION_MODE, function),
- const0_rtx, NULL_RTX, const0_rtx));
+ /* Compute the argument. */
+ op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
+ /* Compute ffs, into TARGET if possible.
+ Set TARGET to wherever the result comes back. */
+ target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
+ ffs_optab, op0, target, 1);
+ if (target == 0)
+ abort ();
+ return target;
- emit_move_insn (change_address (result, GET_MODE (valreg),
- XEXP (result, 0)),
- valreg);
- }
- else
-#endif
- abort ();
+ case BUILT_IN_STRLEN:
+ /* If not optimizing, call the library function. */
+ if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
+ break;
- /* Find the CALL insn we just emitted. */
- for (call_insn = get_last_insn ();
- call_insn && GET_CODE (call_insn) != CALL_INSN;
- call_insn = PREV_INSN (call_insn))
- ;
+ if (arglist == 0
+ /* Arg could be non-pointer if user redeclared this fcn wrong. */
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
+ break;
+ else
+ {
+ tree src = TREE_VALUE (arglist);
+ tree len = c_strlen (src);
- if (! call_insn)
- abort ();
+ int align
+ = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
- /* Put the register usage information on the CALL. If there is already
- some usage information, put ours at the end. */
- if (CALL_INSN_FUNCTION_USAGE (call_insn))
- {
- rtx link;
+ rtx result, src_rtx, char_rtx;
+ enum machine_mode insn_mode = value_mode, char_mode;
+ enum insn_code icode;
- for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
- link = XEXP (link, 1))
- ;
+ /* If the length is known, just return it. */
+ if (len != 0)
+ return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
- XEXP (link, 1) = call_fusage;
- }
- else
- CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
+ /* If SRC is not a pointer type, don't do this operation inline. */
+ if (align == 0)
+ break;
- /* Restore the stack. */
- emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
+ /* Call a function if we can't compute strlen in the right mode. */
- /* Return the address of the result block. */
- return copy_addr_to_reg (XEXP (result, 0));
-}
+ while (insn_mode != VOIDmode)
+ {
+ icode = strlen_optab->handlers[(int) insn_mode].insn_code;
+ if (icode != CODE_FOR_nothing)
+ break;
-/* Perform an untyped return. */
+ insn_mode = GET_MODE_WIDER_MODE (insn_mode);
+ }
+ if (insn_mode == VOIDmode)
+ break;
-static void
-expand_builtin_return (result)
- rtx result;
-{
- int size, align, regno;
- enum machine_mode mode;
- rtx reg;
- rtx call_fusage = 0;
+ /* Make a place to write the result of the instruction. */
+ result = target;
+ if (! (result != 0
+ && GET_CODE (result) == REG
+ && GET_MODE (result) == insn_mode
+ && REGNO (result) >= FIRST_PSEUDO_REGISTER))
+ result = gen_reg_rtx (insn_mode);
- apply_result_size ();
- result = gen_rtx (MEM, BLKmode, result);
+ /* Make sure the operands are acceptable to the predicates. */
-#ifdef HAVE_untyped_return
- if (HAVE_untyped_return)
- {
- emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
- emit_barrier ();
- return;
- }
-#endif
+ if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
+ result = gen_reg_rtx (insn_mode);
+ src_rtx = memory_address (BLKmode,
+ expand_expr (src, NULL_RTX, ptr_mode,
+ EXPAND_NORMAL));
- /* Restore the return value and note that each value is used. */
- size = 0;
- for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
- if ((mode = apply_result_mode[regno]) != VOIDmode)
- {
- align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
- if (size % align != 0)
- size = CEIL (size, align) * align;
- reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
- emit_move_insn (reg,
- change_address (result, mode,
- plus_constant (XEXP (result, 0),
- size)));
+ if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
+ src_rtx = copy_to_mode_reg (Pmode, src_rtx);
- push_to_sequence (call_fusage);
- emit_insn (gen_rtx (USE, VOIDmode, reg));
- call_fusage = get_insns ();
- end_sequence ();
- size += GET_MODE_SIZE (mode);
- }
+ /* Check the string is readable and has an end. */
+ if (flag_check_memory_usage)
+ emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
+ src_rtx, ptr_mode,
+ GEN_INT (MEMORY_USE_RO),
+ TYPE_MODE (integer_type_node));
- /* Put the USE insns before the return. */
- emit_insns (call_fusage);
+ char_rtx = const0_rtx;
+ char_mode = insn_operand_mode[(int)icode][2];
+ if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
+ char_rtx = copy_to_mode_reg (char_mode, char_rtx);
- /* Return whatever values was restored by jumping directly to the end
- of the function. */
- expand_null_return ();
-}
-\f
-/* Expand code for a post- or pre- increment or decrement
- and return the RTX for the result.
- POST is 1 for postinc/decrements and 0 for preinc/decrements. */
+ emit_insn (GEN_FCN (icode) (result,
+ gen_rtx_MEM (BLKmode, src_rtx),
+ char_rtx, GEN_INT (align)));
-static rtx
-expand_increment (exp, post, ignore)
- register tree exp;
- int post, ignore;
-{
- register rtx op0, op1;
- register rtx temp, value;
- register tree incremented = TREE_OPERAND (exp, 0);
- optab this_optab = add_optab;
- int icode;
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
- int op0_is_copy = 0;
- int single_insn = 0;
- /* 1 means we can't store into OP0 directly,
- because it is a subreg narrower than a word,
- and we don't dare clobber the rest of the word. */
- int bad_subreg = 0;
+ /* Return the value in the proper mode for this function. */
+ if (GET_MODE (result) == value_mode)
+ return result;
+ else if (target != 0)
+ {
+ convert_move (target, result, 0);
+ return target;
+ }
+ else
+ return convert_to_mode (value_mode, result, 0);
+ }
- if (output_bytecode)
- {
- bc_expand_expr (exp);
- return NULL_RTX;
- }
+ case BUILT_IN_STRCPY:
+ /* If not optimizing, call the library function. */
+ if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
+ break;
- /* Stabilize any component ref that might need to be
- evaluated more than once below. */
- if (!post
- || TREE_CODE (incremented) == BIT_FIELD_REF
- || (TREE_CODE (incremented) == COMPONENT_REF
- && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
- || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
- incremented = stabilize_reference (incremented);
- /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
- ones into save exprs so that they don't accidentally get evaluated
- more than once by the code below. */
- if (TREE_CODE (incremented) == PREINCREMENT_EXPR
- || TREE_CODE (incremented) == PREDECREMENT_EXPR)
- incremented = save_expr (incremented);
+ if (arglist == 0
+ /* Arg could be non-pointer if user redeclared this fcn wrong. */
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
+ || TREE_CHAIN (arglist) == 0
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
+ break;
+ else
+ {
+ tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
- /* Compute the operands as RTX.
- Note whether OP0 is the actual lvalue or a copy of it:
- I believe it is a copy iff it is a register or subreg
- and insns were generated in computing it. */
+ if (len == 0)
+ break;
- temp = get_last_insn ();
- op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
+ len = size_binop (PLUS_EXPR, len, integer_one_node);
- /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
- in place but instead must do sign- or zero-extension during assignment,
- so we copy it into a new register and let the code below use it as
- a copy.
+ chainon (arglist, build_tree_list (NULL_TREE, len));
+ }
- Note that we can safely modify this SUBREG since it is know not to be
- shared (it was made by the expand_expr call above). */
+ /* Drops in. */
+ case BUILT_IN_MEMCPY:
+ /* If not optimizing, call the library function. */
+ if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
+ break;
- if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
- {
- if (post)
- SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
- else
- bad_subreg = 1;
- }
- else if (GET_CODE (op0) == SUBREG
- && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
- {
- /* We cannot increment this SUBREG in place. If we are
- post-incrementing, get a copy of the old value. Otherwise,
- just mark that we cannot increment in place. */
- if (post)
- op0 = copy_to_reg (op0);
+ if (arglist == 0
+ /* Arg could be non-pointer if user redeclared this fcn wrong. */
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
+ || TREE_CHAIN (arglist) == 0
+ || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
+ != POINTER_TYPE)
+ || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
+ || (TREE_CODE (TREE_TYPE (TREE_VALUE
+ (TREE_CHAIN (TREE_CHAIN (arglist)))))
+ != INTEGER_TYPE))
+ break;
else
- bad_subreg = 1;
- }
+ {
+ tree dest = TREE_VALUE (arglist);
+ tree src = TREE_VALUE (TREE_CHAIN (arglist));
+ tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
+ tree type;
- op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
- && temp != get_last_insn ());
- op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
- EXPAND_MEMORY_USE_BAD);
+ int src_align
+ = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
+ int dest_align
+ = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
+ rtx dest_rtx, dest_mem, src_mem, src_rtx, dest_addr, len_rtx;
- /* Decide whether incrementing or decrementing. */
- if (TREE_CODE (exp) == POSTDECREMENT_EXPR
- || TREE_CODE (exp) == PREDECREMENT_EXPR)
- this_optab = sub_optab;
+ /* If either SRC or DEST is not a pointer type, don't do
+ this operation in-line. */
+ if (src_align == 0 || dest_align == 0)
+ {
+ if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
+ TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
+ break;
+ }
- /* Convert decrement by a constant into a negative increment. */
- if (this_optab == sub_optab
- && GET_CODE (op1) == CONST_INT)
- {
- op1 = GEN_INT (- INTVAL (op1));
- this_optab = add_optab;
- }
+ dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
+ dest_mem = gen_rtx_MEM (BLKmode,
+ memory_address (BLKmode, dest_rtx));
+ /* There could be a void* cast on top of the object. */
+ while (TREE_CODE (dest) == NOP_EXPR)
+ dest = TREE_OPERAND (dest, 0);
+ type = TREE_TYPE (TREE_TYPE (dest));
+ MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
+ src_rtx = expand_expr (src, NULL_RTX, ptr_mode, EXPAND_SUM);
+ src_mem = gen_rtx_MEM (BLKmode,
+ memory_address (BLKmode, src_rtx));
+ len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
- /* For a preincrement, see if we can do this with a single instruction. */
- if (!post)
- {
- icode = (int) this_optab->handlers[(int) mode].insn_code;
- if (icode != (int) CODE_FOR_nothing
- /* Make sure that OP0 is valid for operands 0 and 1
- of the insn we want to queue. */
- && (*insn_operand_predicate[icode][0]) (op0, mode)
- && (*insn_operand_predicate[icode][1]) (op0, mode)
- && (*insn_operand_predicate[icode][2]) (op1, mode))
- single_insn = 1;
- }
+ /* Just copy the rights of SRC to the rights of DEST. */
+ if (flag_check_memory_usage)
+ emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
+ dest_rtx, ptr_mode,
+ src_rtx, ptr_mode,
+ len_rtx, TYPE_MODE (sizetype));
- /* If OP0 is not the actual lvalue, but rather a copy in a register,
- then we cannot just increment OP0. We must therefore contrive to
- increment the original value. Then, for postincrement, we can return
- OP0 since it is a copy of the old value. For preincrement, expand here
- unless we can do it with a single insn.
+ /* There could be a void* cast on top of the object. */
+ while (TREE_CODE (src) == NOP_EXPR)
+ src = TREE_OPERAND (src, 0);
+ type = TREE_TYPE (TREE_TYPE (src));
+ MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
- Likewise if storing directly into OP0 would clobber high bits
- we need to preserve (bad_subreg). */
- if (op0_is_copy || (!post && !single_insn) || bad_subreg)
- {
- /* This is the easiest way to increment the value wherever it is.
- Problems with multiple evaluation of INCREMENTED are prevented
- because either (1) it is a component_ref or preincrement,
- in which case it was stabilized above, or (2) it is an array_ref
- with constant index in an array in a register, which is
- safe to reevaluate. */
- tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
- || TREE_CODE (exp) == PREDECREMENT_EXPR)
- ? MINUS_EXPR : PLUS_EXPR),
- TREE_TYPE (exp),
- incremented,
- TREE_OPERAND (exp, 1));
+ /* Copy word part most expediently. */
+ dest_addr
+ = emit_block_move (dest_mem, src_mem, len_rtx,
+ MIN (src_align, dest_align));
- while (TREE_CODE (incremented) == NOP_EXPR
- || TREE_CODE (incremented) == CONVERT_EXPR)
- {
- newexp = convert (TREE_TYPE (incremented), newexp);
- incremented = TREE_OPERAND (incremented, 0);
+ if (dest_addr == 0)
+ dest_addr = force_operand (dest_rtx, NULL_RTX);
+
+ return dest_addr;
}
- temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
- return post ? op0 : temp;
- }
+ case BUILT_IN_MEMSET:
+ /* If not optimizing, call the library function. */
+ if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
+ break;
- if (post)
- {
- /* We have a true reference to the value in OP0.
- If there is an insn to add or subtract in this mode, queue it.
- Queueing the increment insn avoids the register shuffling
- that often results if we must increment now and first save
- the old value for subsequent use. */
+ if (arglist == 0
+ /* Arg could be non-pointer if user redeclared this fcn wrong. */
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
+ || TREE_CHAIN (arglist) == 0
+ || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
+ != INTEGER_TYPE)
+ || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
+ || (INTEGER_TYPE
+ != (TREE_CODE (TREE_TYPE
+ (TREE_VALUE
+ (TREE_CHAIN (TREE_CHAIN (arglist))))))))
+ break;
+ else
+ {
+ tree dest = TREE_VALUE (arglist);
+ tree val = TREE_VALUE (TREE_CHAIN (arglist));
+ tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
+ tree type;
-#if 0 /* Turned off to avoid making extra insn for indexed memref. */
- op0 = stabilize (op0);
-#endif
+ int dest_align
+ = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
+ rtx dest_rtx, dest_mem, dest_addr, len_rtx;
- icode = (int) this_optab->handlers[(int) mode].insn_code;
- if (icode != (int) CODE_FOR_nothing
- /* Make sure that OP0 is valid for operands 0 and 1
- of the insn we want to queue. */
- && (*insn_operand_predicate[icode][0]) (op0, mode)
- && (*insn_operand_predicate[icode][1]) (op0, mode))
- {
- if (! (*insn_operand_predicate[icode][2]) (op1, mode))
- op1 = force_reg (mode, op1);
+ /* If DEST is not a pointer type, don't do this
+ operation in-line. */
+ if (dest_align == 0)
+ break;
- return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
+ /* If VAL is not 0, don't do this operation in-line. */
+ if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
+ break;
+
+ /* If LEN does not expand to a constant, don't do this
+ operation in-line. */
+ len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
+ if (GET_CODE (len_rtx) != CONST_INT)
+ break;
+
+ dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
+ dest_mem = gen_rtx_MEM (BLKmode,
+ memory_address (BLKmode, dest_rtx));
+
+ /* Just check DST is writable and mark it as readable. */
+ if (flag_check_memory_usage)
+ emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
+ dest_rtx, ptr_mode,
+ len_rtx, TYPE_MODE (sizetype),
+ GEN_INT (MEMORY_USE_WO),
+ TYPE_MODE (integer_type_node));
+
+ /* There could be a void* cast on top of the object. */
+ while (TREE_CODE (dest) == NOP_EXPR)
+ dest = TREE_OPERAND (dest, 0);
+ type = TREE_TYPE (TREE_TYPE (dest));
+ MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
+
+ dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
+
+ if (dest_addr == 0)
+ dest_addr = force_operand (dest_rtx, NULL_RTX);
+
+ return dest_addr;
}
- if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
- {
- rtx addr = (general_operand (XEXP (op0, 0), mode)
- ? force_reg (Pmode, XEXP (op0, 0))
- : copy_to_reg (XEXP (op0, 0)));
- rtx temp, result;
- op0 = change_address (op0, VOIDmode, addr);
- temp = force_reg (GET_MODE (op0), op0);
- if (! (*insn_operand_predicate[icode][2]) (op1, mode))
- op1 = force_reg (mode, op1);
+/* These comparison functions need an instruction that returns an actual
+ index. An ordinary compare that just sets the condition codes
+ is not enough. */
+#ifdef HAVE_cmpstrsi
+ case BUILT_IN_STRCMP:
+ /* If not optimizing, call the library function. */
+ if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
+ break;
+
+ /* If we need to check memory accesses, call the library function. */
+ if (flag_check_memory_usage)
+ break;
- /* The increment queue is LIFO, thus we have to `queue'
- the instructions in reverse order. */
- enqueue_insn (op0, gen_move_insn (op0, temp));
- result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
- return result;
- }
- }
+ if (arglist == 0
+ /* Arg could be non-pointer if user redeclared this fcn wrong. */
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
+ || TREE_CHAIN (arglist) == 0
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
+ break;
+ else if (!HAVE_cmpstrsi)
+ break;
+ {
+ tree arg1 = TREE_VALUE (arglist);
+ tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
+ tree len, len2;
- /* Preincrement, or we can't increment with one simple insn. */
- if (post)
- /* Save a copy of the value before inc or dec, to return it later. */
- temp = value = copy_to_reg (op0);
- else
- /* Arrange to return the incremented value. */
- /* Copy the rtx because expand_binop will protect from the queue,
- and the results of that would be invalid for us to return
- if our caller does emit_queue before using our result. */
- temp = copy_rtx (value = op0);
+ len = c_strlen (arg1);
+ if (len)
+ len = size_binop (PLUS_EXPR, integer_one_node, len);
+ len2 = c_strlen (arg2);
+ if (len2)
+ len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
- /* Increment however we can. */
- op1 = expand_binop (mode, this_optab, value, op1,
- flag_check_memory_usage ? NULL_RTX : op0,
- TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
- /* Make sure the value is stored into OP0. */
- if (op1 != op0)
- emit_move_insn (op0, op1);
+ /* If we don't have a constant length for the first, use the length
+ of the second, if we know it. We don't require a constant for
+ this case; some cost analysis could be done if both are available
+ but neither is constant. For now, assume they're equally cheap.
- return temp;
-}
-\f
-/* Expand all function calls contained within EXP, innermost ones first.
- But don't look within expressions that have sequence points.
- For each CALL_EXPR, record the rtx for its value
- in the CALL_EXPR_RTL field. */
+ If both strings have constant lengths, use the smaller. This
+ could arise if optimization results in strcpy being called with
+ two fixed strings, or if the code was machine-generated. We should
+ add some code to the `memcmp' handler below to deal with such
+ situations, someday. */
+ if (!len || TREE_CODE (len) != INTEGER_CST)
+ {
+ if (len2)
+ len = len2;
+ else if (len == 0)
+ break;
+ }
+ else if (len2 && TREE_CODE (len2) == INTEGER_CST)
+ {
+ if (tree_int_cst_lt (len2, len))
+ len = len2;
+ }
-static void
-preexpand_calls (exp)
- tree exp;
-{
- register int nops, i;
- int type = TREE_CODE_CLASS (TREE_CODE (exp));
+ chainon (arglist, build_tree_list (NULL_TREE, len));
+ }
- if (! do_preexpand_calls)
- return;
+ /* Drops in. */
+ case BUILT_IN_MEMCMP:
+ /* If not optimizing, call the library function. */
+ if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
+ break;
- /* Only expressions and references can contain calls. */
+ /* If we need to check memory accesses, call the library function. */
+ if (flag_check_memory_usage)
+ break;
- if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
- return;
+ if (arglist == 0
+ /* Arg could be non-pointer if user redeclared this fcn wrong. */
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
+ || TREE_CHAIN (arglist) == 0
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
+ || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
+ break;
+ else if (!HAVE_cmpstrsi)
+ break;
+ {
+ tree arg1 = TREE_VALUE (arglist);
+ tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
+ tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
+ rtx result;
- switch (TREE_CODE (exp))
- {
- case CALL_EXPR:
- /* Do nothing if already expanded. */
- if (CALL_EXPR_RTL (exp) != 0
- /* Do nothing if the call returns a variable-sized object. */
- || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
- /* Do nothing to built-in functions. */
- || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
- && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
- == FUNCTION_DECL)
- && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
- return;
+ int arg1_align
+ = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
+ int arg2_align
+ = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
+ enum machine_mode insn_mode
+ = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
- CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
- return;
+ /* If we don't have POINTER_TYPE, call the function. */
+ if (arg1_align == 0 || arg2_align == 0)
+ {
+ if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
+ TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
+ break;
+ }
- case COMPOUND_EXPR:
- case COND_EXPR:
- case TRUTH_ANDIF_EXPR:
- case TRUTH_ORIF_EXPR:
- /* If we find one of these, then we can be sure
- the adjust will be done for it (since it makes jumps).
- Do it now, so that if this is inside an argument
- of a function, we don't get the stack adjustment
- after some other args have already been pushed. */
- do_pending_stack_adjust ();
- return;
+ /* Make a place to write the result of the instruction. */
+ result = target;
+ if (! (result != 0
+ && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
+ && REGNO (result) >= FIRST_PSEUDO_REGISTER))
+ result = gen_reg_rtx (insn_mode);
- case BLOCK:
- case RTL_EXPR:
- case WITH_CLEANUP_EXPR:
- case CLEANUP_POINT_EXPR:
- return;
+ emit_insn (gen_cmpstrsi (result,
+ gen_rtx_MEM (BLKmode,
+ expand_expr (arg1, NULL_RTX,
+ ptr_mode,
+ EXPAND_NORMAL)),
+ gen_rtx_MEM (BLKmode,
+ expand_expr (arg2, NULL_RTX,
+ ptr_mode,
+ EXPAND_NORMAL)),
+ expand_expr (len, NULL_RTX, VOIDmode, 0),
+ GEN_INT (MIN (arg1_align, arg2_align))));
- case SAVE_EXPR:
- if (SAVE_EXPR_RTL (exp) != 0)
- return;
-
- default:
+ /* Return the value in the proper mode for this function. */
+ mode = TYPE_MODE (TREE_TYPE (exp));
+ if (GET_MODE (result) == mode)
+ return result;
+ else if (target != 0)
+ {
+ convert_move (target, result, 0);
+ return target;
+ }
+ else
+ return convert_to_mode (mode, result, 0);
+ }
+#else
+ case BUILT_IN_STRCMP:
+ case BUILT_IN_MEMCMP:
break;
- }
-
- nops = tree_code_length[(int) TREE_CODE (exp)];
- for (i = 0; i < nops; i++)
- if (TREE_OPERAND (exp, i) != 0)
- {
- type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
- if (type == 'e' || type == '<' || type == '1' || type == '2'
- || type == 'r')
- preexpand_calls (TREE_OPERAND (exp, i));
- }
-}
-\f
-/* At the start of a function, record that we have no previously-pushed
- arguments waiting to be popped. */
+#endif
-void
-init_pending_stack_adjust ()
-{
- pending_stack_adjust = 0;
-}
+ case BUILT_IN_SETJMP:
+ if (arglist == 0
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
+ break;
+ else
+ {
+ rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
+ VOIDmode, 0);
+ rtx lab = gen_label_rtx ();
+ rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab);
+ emit_label (lab);
+ return ret;
+ }
-/* When exiting from function, if safe, clear out any pending stack adjust
- so the adjustment won't get done. */
+ /* __builtin_longjmp is passed a pointer to an array of five words.
+ It's similar to the C library longjmp function but works with
+ __builtin_setjmp above. */
+ case BUILT_IN_LONGJMP:
+ if (arglist == 0 || TREE_CHAIN (arglist) == 0
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
+ break;
+ else
+ {
+ rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
+ VOIDmode, 0);
+ rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
+ const0_rtx, VOIDmode, 0);
+ expand_builtin_longjmp (buf_addr, value);
+ return const0_rtx;
+ }
-void
-clear_pending_stack_adjust ()
-{
-#ifdef EXIT_IGNORE_STACK
- if (optimize > 0
- && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
- && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
- && ! flag_inline_functions)
- pending_stack_adjust = 0;
+ /* Various hooks for the DWARF 2 __throw routine. */
+ case BUILT_IN_UNWIND_INIT:
+ expand_builtin_unwind_init ();
+ return const0_rtx;
+ case BUILT_IN_FP:
+ return frame_pointer_rtx;
+ case BUILT_IN_SP:
+ return stack_pointer_rtx;
+#ifdef DWARF2_UNWIND_INFO
+ case BUILT_IN_DWARF_FP_REGNUM:
+ return expand_builtin_dwarf_fp_regnum ();
+ case BUILT_IN_DWARF_REG_SIZE:
+ return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
#endif
-}
+ case BUILT_IN_FROB_RETURN_ADDR:
+ return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
+ case BUILT_IN_EXTRACT_RETURN_ADDR:
+ return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
+ case BUILT_IN_SET_RETURN_ADDR_REG:
+ expand_builtin_set_return_addr_reg (TREE_VALUE (arglist));
+ return const0_rtx;
+ case BUILT_IN_EH_STUB:
+ return expand_builtin_eh_stub ();
+ case BUILT_IN_SET_EH_REGS:
+ expand_builtin_set_eh_regs (TREE_VALUE (arglist),
+ TREE_VALUE (TREE_CHAIN (arglist)));
+ return const0_rtx;
+
+ default: /* just do library call, if unknown builtin */
+ error ("built-in function `%s' not currently supported",
+ IDENTIFIER_POINTER (DECL_NAME (fndecl)));
+ }
-/* Pop any previously-pushed arguments that have not been popped yet. */
+ /* The switch statement above can drop through to cause the function
+ to be called normally. */
-void
-do_pending_stack_adjust ()
-{
- if (inhibit_defer_pop == 0)
- {
- if (pending_stack_adjust != 0)
- adjust_stack (GEN_INT (pending_stack_adjust));
- pending_stack_adjust = 0;
- }
+ return expand_call (exp, target, ignore);
}
\f
-/* Expand conditional expressions. */
+/* Built-in functions to perform an untyped call and return. */
-/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
- LABEL is an rtx of code CODE_LABEL, in this function and all the
- functions here. */
+/* For each register that may be used for calling a function, this
+ gives a mode used to copy the register's value. VOIDmode indicates
+ the register is not used for calling a function. If the machine
+ has register windows, this gives only the outbound registers.
+ INCOMING_REGNO gives the corresponding inbound register. */
+static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
-void
-jumpifnot (exp, label)
- tree exp;
- rtx label;
-{
- do_jump (exp, label, NULL_RTX);
-}
+/* For each register that may be used for returning values, this gives
+ a mode used to copy the register's value. VOIDmode indicates the
+ register is not used for returning values. If the machine has
+ register windows, this gives only the outbound registers.
+ INCOMING_REGNO gives the corresponding inbound register. */
+static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
-/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
+/* For each register that may be used for calling a function, this
+ gives the offset of that register into the block returned by
+ __builtin_apply_args. 0 indicates that the register is not
+ used for calling a function. */
+static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
-void
-jumpif (exp, label)
- tree exp;
- rtx label;
-{
- do_jump (exp, NULL_RTX, label);
-}
+/* Return the offset of register REGNO into the block returned by
+ __builtin_apply_args. This is not declared static, since it is
+ needed in objc-act.c. */
-/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
- the result is zero, or IF_TRUE_LABEL if the result is one.
- Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
- meaning fall through in that case.
+int
+apply_args_register_offset (regno)
+ int regno;
+{
+ apply_args_size ();
- do_jump always does any pending stack adjust except when it does not
- actually perform a jump. An example where there is no jump
- is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
+ /* Arguments are always put in outgoing registers (in the argument
+ block) if such make sense. */
+#ifdef OUTGOING_REGNO
+ regno = OUTGOING_REGNO(regno);
+#endif
+ return apply_args_reg_offset[regno];
+}
- This function is responsible for optimizing cases such as
- &&, || and comparison operators in EXP. */
+/* Return the size required for the block returned by __builtin_apply_args,
+ and initialize apply_args_mode. */
-void
-do_jump (exp, if_false_label, if_true_label)
- tree exp;
- rtx if_false_label, if_true_label;
+static int
+apply_args_size ()
{
- register enum tree_code code = TREE_CODE (exp);
- /* Some cases need to create a label to jump to
- in order to properly fall through.
- These cases set DROP_THROUGH_LABEL nonzero. */
- rtx drop_through_label = 0;
- rtx temp;
- rtx comparison = 0;
- int i;
- tree type;
+ static int size = -1;
+ int align, regno;
enum machine_mode mode;
- emit_queue ();
-
- switch (code)
+ /* The values computed by this function never change. */
+ if (size < 0)
{
- case ERROR_MARK:
- break;
-
- case INTEGER_CST:
- temp = integer_zerop (exp) ? if_false_label : if_true_label;
- if (temp)
- emit_jump (temp);
- break;
-
-#if 0
- /* This is not true with #pragma weak */
- case ADDR_EXPR:
- /* The address of something can never be zero. */
- if (if_true_label)
- emit_jump (if_true_label);
- break;
-#endif
-
- case NOP_EXPR:
- if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
- || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
- || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
- goto normal;
- case CONVERT_EXPR:
- /* If we are narrowing the operand, we have to do the compare in the
- narrower mode. */
- if ((TYPE_PRECISION (TREE_TYPE (exp))
- < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
- goto normal;
- case NON_LVALUE_EXPR:
- case REFERENCE_EXPR:
- case ABS_EXPR:
- case NEGATE_EXPR:
- case LROTATE_EXPR:
- case RROTATE_EXPR:
- /* These cannot change zero->non-zero or vice versa. */
- do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
- break;
-
-#if 0
- /* This is never less insns than evaluating the PLUS_EXPR followed by
- a test and can be longer if the test is eliminated. */
- case PLUS_EXPR:
- /* Reduce to minus. */
- exp = build (MINUS_EXPR, TREE_TYPE (exp),
- TREE_OPERAND (exp, 0),
- fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
- TREE_OPERAND (exp, 1))));
- /* Process as MINUS. */
-#endif
-
- case MINUS_EXPR:
- /* Non-zero iff operands of minus differ. */
- comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
- TREE_OPERAND (exp, 0),
- TREE_OPERAND (exp, 1)),
- NE, NE);
- break;
-
- case BIT_AND_EXPR:
- /* If we are AND'ing with a small constant, do this comparison in the
- smallest type that fits. If the machine doesn't have comparisons
- that small, it will be converted back to the wider comparison.
- This helps if we are testing the sign bit of a narrower object.
- combine can't do this for us because it can't know whether a
- ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
-
- if (! SLOW_BYTE_ACCESS
- && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
- && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
- && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
- && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
- && (type = type_for_mode (mode, 1)) != 0
- && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
- && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
- != CODE_FOR_nothing))
- {
- do_jump (convert (type, exp), if_false_label, if_true_label);
- break;
- }
- goto normal;
-
- case TRUTH_NOT_EXPR:
- do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
- break;
-
- case TRUTH_ANDIF_EXPR:
- if (if_false_label == 0)
- if_false_label = drop_through_label = gen_label_rtx ();
- do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
- start_cleanup_deferal ();
- do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
- end_cleanup_deferal ();
- break;
+ /* The first value is the incoming arg-pointer. */
+ size = GET_MODE_SIZE (Pmode);
- case TRUTH_ORIF_EXPR:
- if (if_true_label == 0)
- if_true_label = drop_through_label = gen_label_rtx ();
- do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
- start_cleanup_deferal ();
- do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
- end_cleanup_deferal ();
- break;
+ /* The second value is the structure value address unless this is
+ passed as an "invisible" first argument. */
+ if (struct_value_rtx)
+ size += GET_MODE_SIZE (Pmode);
- case COMPOUND_EXPR:
- push_temp_slots ();
- expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
- preserve_temp_slots (NULL_RTX);
- free_temp_slots ();
- pop_temp_slots ();
- emit_queue ();
- do_pending_stack_adjust ();
- do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
- break;
+ for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
+ if (FUNCTION_ARG_REGNO_P (regno))
+ {
+ /* Search for the proper mode for copying this register's
+ value. I'm not sure this is right, but it works so far. */
+ enum machine_mode best_mode = VOIDmode;
- case COMPONENT_REF:
- case BIT_FIELD_REF:
- case ARRAY_REF:
- {
- int bitsize, bitpos, unsignedp;
- enum machine_mode mode;
- tree type;
- tree offset;
- int volatilep = 0;
- int alignment;
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
+ mode != VOIDmode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ if (HARD_REGNO_MODE_OK (regno, mode)
+ && HARD_REGNO_NREGS (regno, mode) == 1)
+ best_mode = mode;
- /* Get description of this reference. We don't actually care
- about the underlying object here. */
- get_inner_reference (exp, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep,
- &alignment);
+ if (best_mode == VOIDmode)
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
+ mode != VOIDmode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ if (HARD_REGNO_MODE_OK (regno, mode)
+ && (mov_optab->handlers[(int) mode].insn_code
+ != CODE_FOR_nothing))
+ best_mode = mode;
- type = type_for_size (bitsize, unsignedp);
- if (! SLOW_BYTE_ACCESS
- && type != 0 && bitsize >= 0
- && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
- && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
- != CODE_FOR_nothing))
+ mode = best_mode;
+ if (mode == VOIDmode)
+ abort ();
+
+ align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
+ if (size % align != 0)
+ size = CEIL (size, align) * align;
+ apply_args_reg_offset[regno] = size;
+ size += GET_MODE_SIZE (mode);
+ apply_args_mode[regno] = mode;
+ }
+ else
{
- do_jump (convert (type, exp), if_false_label, if_true_label);
- break;
+ apply_args_mode[regno] = VOIDmode;
+ apply_args_reg_offset[regno] = 0;
}
- goto normal;
- }
+ }
+ return size;
+}
- case COND_EXPR:
- /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
- if (integer_onep (TREE_OPERAND (exp, 1))
- && integer_zerop (TREE_OPERAND (exp, 2)))
- do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
+/* Return the size required for the block returned by __builtin_apply,
+ and initialize apply_result_mode. */
- else if (integer_zerop (TREE_OPERAND (exp, 1))
- && integer_onep (TREE_OPERAND (exp, 2)))
- do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
+static int
+apply_result_size ()
+{
+ static int size = -1;
+ int align, regno;
+ enum machine_mode mode;
- else
- {
- register rtx label1 = gen_label_rtx ();
- drop_through_label = gen_label_rtx ();
+ /* The values computed by this function never change. */
+ if (size < 0)
+ {
+ size = 0;
- do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
+ for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
+ if (FUNCTION_VALUE_REGNO_P (regno))
+ {
+ /* Search for the proper mode for copying this register's
+ value. I'm not sure this is right, but it works so far. */
+ enum machine_mode best_mode = VOIDmode;
- start_cleanup_deferal ();
- /* Now the THEN-expression. */
- do_jump (TREE_OPERAND (exp, 1),
- if_false_label ? if_false_label : drop_through_label,
- if_true_label ? if_true_label : drop_through_label);
- /* In case the do_jump just above never jumps. */
- do_pending_stack_adjust ();
- emit_label (label1);
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
+ mode != TImode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ if (HARD_REGNO_MODE_OK (regno, mode))
+ best_mode = mode;
- /* Now the ELSE-expression. */
- do_jump (TREE_OPERAND (exp, 2),
- if_false_label ? if_false_label : drop_through_label,
- if_true_label ? if_true_label : drop_through_label);
- end_cleanup_deferal ();
- }
- break;
+ if (best_mode == VOIDmode)
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
+ mode != VOIDmode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ if (HARD_REGNO_MODE_OK (regno, mode)
+ && (mov_optab->handlers[(int) mode].insn_code
+ != CODE_FOR_nothing))
+ best_mode = mode;
- case EQ_EXPR:
- {
- tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
+ mode = best_mode;
+ if (mode == VOIDmode)
+ abort ();
- if (integer_zerop (TREE_OPERAND (exp, 1)))
- do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
- else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
- || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
- do_jump
- (fold
- (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
- fold (build (EQ_EXPR, TREE_TYPE (exp),
- fold (build1 (REALPART_EXPR,
- TREE_TYPE (inner_type),
- TREE_OPERAND (exp, 0))),
- fold (build1 (REALPART_EXPR,
- TREE_TYPE (inner_type),
- TREE_OPERAND (exp, 1))))),
- fold (build (EQ_EXPR, TREE_TYPE (exp),
- fold (build1 (IMAGPART_EXPR,
- TREE_TYPE (inner_type),
- TREE_OPERAND (exp, 0))),
- fold (build1 (IMAGPART_EXPR,
- TREE_TYPE (inner_type),
- TREE_OPERAND (exp, 1))))))),
- if_false_label, if_true_label);
- else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
- && !can_compare_p (TYPE_MODE (inner_type)))
- do_jump_by_parts_equality (exp, if_false_label, if_true_label);
+ align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
+ if (size % align != 0)
+ size = CEIL (size, align) * align;
+ size += GET_MODE_SIZE (mode);
+ apply_result_mode[regno] = mode;
+ }
else
- comparison = compare (exp, EQ, EQ);
- break;
- }
+ apply_result_mode[regno] = VOIDmode;
- case NE_EXPR:
- {
- tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
+ /* Allow targets that use untyped_call and untyped_return to override
+ the size so that machine-specific information can be stored here. */
+#ifdef APPLY_RESULT_SIZE
+ size = APPLY_RESULT_SIZE;
+#endif
+ }
+ return size;
+}
- if (integer_zerop (TREE_OPERAND (exp, 1)))
- do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
- else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
- || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
- do_jump
- (fold
- (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
- fold (build (NE_EXPR, TREE_TYPE (exp),
- fold (build1 (REALPART_EXPR,
- TREE_TYPE (inner_type),
- TREE_OPERAND (exp, 0))),
- fold (build1 (REALPART_EXPR,
- TREE_TYPE (inner_type),
- TREE_OPERAND (exp, 1))))),
- fold (build (NE_EXPR, TREE_TYPE (exp),
- fold (build1 (IMAGPART_EXPR,
- TREE_TYPE (inner_type),
- TREE_OPERAND (exp, 0))),
- fold (build1 (IMAGPART_EXPR,
- TREE_TYPE (inner_type),
- TREE_OPERAND (exp, 1))))))),
- if_false_label, if_true_label);
- else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
- && !can_compare_p (TYPE_MODE (inner_type)))
- do_jump_by_parts_equality (exp, if_true_label, if_false_label);
- else
- comparison = compare (exp, NE, NE);
- break;
+#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
+/* Create a vector describing the result block RESULT. If SAVEP is true,
+ the result block is used to save the values; otherwise it is used to
+ restore the values. */
+
+static rtx
+result_vector (savep, result)
+ int savep;
+ rtx result;
+{
+ int regno, size, align, nelts;
+ enum machine_mode mode;
+ rtx reg, mem;
+ rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
+
+ size = nelts = 0;
+ for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
+ if ((mode = apply_result_mode[regno]) != VOIDmode)
+ {
+ align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
+ if (size % align != 0)
+ size = CEIL (size, align) * align;
+ reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
+ mem = change_address (result, mode,
+ plus_constant (XEXP (result, 0), size));
+ savevec[nelts++] = (savep
+ ? gen_rtx_SET (VOIDmode, mem, reg)
+ : gen_rtx_SET (VOIDmode, reg, mem));
+ size += GET_MODE_SIZE (mode);
}
+ return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
+}
+#endif /* HAVE_untyped_call or HAVE_untyped_return */
- case LT_EXPR:
- if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
- == MODE_INT)
- && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
- do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
- else
- comparison = compare (exp, LT, LTU);
- break;
+/* Save the state required to perform an untyped call with the same
+ arguments as were passed to the current function. */
- case LE_EXPR:
- if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
- == MODE_INT)
- && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
- do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
- else
- comparison = compare (exp, LE, LEU);
- break;
+static rtx
+expand_builtin_apply_args ()
+{
+ rtx registers;
+ int size, align, regno;
+ enum machine_mode mode;
- case GT_EXPR:
- if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
- == MODE_INT)
- && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
- do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
- else
- comparison = compare (exp, GT, GTU);
- break;
+ /* Create a block where the arg-pointer, structure value address,
+ and argument registers can be saved. */
+ registers = assign_stack_local (BLKmode, apply_args_size (), -1);
- case GE_EXPR:
- if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
- == MODE_INT)
- && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
- do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
- else
- comparison = compare (exp, GE, GEU);
- break;
+ /* Walk past the arg-pointer and structure value address. */
+ size = GET_MODE_SIZE (Pmode);
+ if (struct_value_rtx)
+ size += GET_MODE_SIZE (Pmode);
- default:
- normal:
- temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
-#if 0
- /* This is not needed any more and causes poor code since it causes
- comparisons and tests from non-SI objects to have different code
- sequences. */
- /* Copy to register to avoid generating bad insns by cse
- from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
- if (!cse_not_expected && GET_CODE (temp) == MEM)
- temp = copy_to_reg (temp);
+ /* Save each register used in calling a function to the block. */
+ for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
+ if ((mode = apply_args_mode[regno]) != VOIDmode)
+ {
+ rtx tem;
+
+ align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
+ if (size % align != 0)
+ size = CEIL (size, align) * align;
+
+ tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
+
+#ifdef STACK_REGS
+ /* For reg-stack.c's stack register household.
+ Compare with a similar piece of code in function.c. */
+
+ emit_insn (gen_rtx_USE (mode, tem));
#endif
- do_pending_stack_adjust ();
- if (GET_CODE (temp) == CONST_INT)
- comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
- else if (GET_CODE (temp) == LABEL_REF)
- comparison = const_true_rtx;
- else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
- && !can_compare_p (GET_MODE (temp)))
- /* Note swapping the labels gives us not-equal. */
- do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
- else if (GET_MODE (temp) != VOIDmode)
- comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
- NE, TREE_UNSIGNED (TREE_TYPE (exp)),
- GET_MODE (temp), NULL_RTX, 0);
- else
- abort ();
- }
- /* Do any postincrements in the expression that was tested. */
- emit_queue ();
+ emit_move_insn (change_address (registers, mode,
+ plus_constant (XEXP (registers, 0),
+ size)),
+ tem);
+ size += GET_MODE_SIZE (mode);
+ }
- /* If COMPARISON is nonzero here, it is an rtx that can be substituted
- straight into a conditional jump instruction as the jump condition.
- Otherwise, all the work has been done already. */
+ /* Save the arg pointer to the block. */
+ emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
+ copy_to_reg (virtual_incoming_args_rtx));
+ size = GET_MODE_SIZE (Pmode);
- if (comparison == const_true_rtx)
- {
- if (if_true_label)
- emit_jump (if_true_label);
- }
- else if (comparison == const0_rtx)
+ /* Save the structure value address unless this is passed as an
+ "invisible" first argument. */
+ if (struct_value_incoming_rtx)
{
- if (if_false_label)
- emit_jump (if_false_label);
+ emit_move_insn (change_address (registers, Pmode,
+ plus_constant (XEXP (registers, 0),
+ size)),
+ copy_to_reg (struct_value_incoming_rtx));
+ size += GET_MODE_SIZE (Pmode);
}
- else if (comparison)
- do_jump_for_compare (comparison, if_false_label, if_true_label);
- if (drop_through_label)
- {
- /* If do_jump produces code that might be jumped around,
- do any stack adjusts from that code, before the place
- where control merges in. */
- do_pending_stack_adjust ();
- emit_label (drop_through_label);
- }
+ /* Return the address of the block. */
+ return copy_addr_to_reg (XEXP (registers, 0));
}
-\f
-/* Given a comparison expression EXP for values too wide to be compared
- with one insn, test the comparison and jump to the appropriate label.
- The code of EXP is ignored; we always test GT if SWAP is 0,
- and LT if SWAP is 1. */
-static void
-do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
- tree exp;
- int swap;
- rtx if_false_label, if_true_label;
+/* Perform an untyped call and save the state required to perform an
+ untyped return of whatever value was returned by the given function. */
+
+static rtx
+expand_builtin_apply (function, arguments, argsize)
+ rtx function, arguments, argsize;
{
- rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
- rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
- int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
- rtx drop_through_label = 0;
- int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
- int i;
+ int size, align, regno;
+ enum machine_mode mode;
+ rtx incoming_args, result, reg, dest, call_insn;
+ rtx old_stack_level = 0;
+ rtx call_fusage = 0;
- if (! if_true_label || ! if_false_label)
- drop_through_label = gen_label_rtx ();
- if (! if_true_label)
- if_true_label = drop_through_label;
- if (! if_false_label)
- if_false_label = drop_through_label;
+ /* Create a block where the return registers can be saved. */
+ result = assign_stack_local (BLKmode, apply_result_size (), -1);
- /* Compare a word at a time, high order first. */
- for (i = 0; i < nwords; i++)
- {
- rtx comp;
- rtx op0_word, op1_word;
+ /* ??? The argsize value should be adjusted here. */
- if (WORDS_BIG_ENDIAN)
- {
- op0_word = operand_subword_force (op0, i, mode);
- op1_word = operand_subword_force (op1, i, mode);
- }
- else
- {
- op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
- op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
- }
+ /* Fetch the arg pointer from the ARGUMENTS block. */
+ incoming_args = gen_reg_rtx (Pmode);
+ emit_move_insn (incoming_args,
+ gen_rtx_MEM (Pmode, arguments));
+#ifndef STACK_GROWS_DOWNWARD
+ incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
+ incoming_args, 0, OPTAB_LIB_WIDEN);
+#endif
- /* All but high-order word must be compared as unsigned. */
- comp = compare_from_rtx (op0_word, op1_word,
- (unsignedp || i > 0) ? GTU : GT,
- unsignedp, word_mode, NULL_RTX, 0);
- if (comp == const_true_rtx)
- emit_jump (if_true_label);
- else if (comp != const0_rtx)
- do_jump_for_compare (comp, NULL_RTX, if_true_label);
+ /* Perform postincrements before actually calling the function. */
+ emit_queue ();
- /* Consider lower words only if these are equal. */
- comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
- NULL_RTX, 0);
- if (comp == const_true_rtx)
- emit_jump (if_false_label);
- else if (comp != const0_rtx)
- do_jump_for_compare (comp, NULL_RTX, if_false_label);
+ /* Push a new argument block and copy the arguments. */
+ do_pending_stack_adjust ();
+
+ /* Save the stack with nonlocal if available */
+#ifdef HAVE_save_stack_nonlocal
+ if (HAVE_save_stack_nonlocal)
+ emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
+ else
+#endif
+ emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
+
+ /* Push a block of memory onto the stack to store the memory arguments.
+ Save the address in a register, and copy the memory arguments. ??? I
+ haven't figured out how the calling convention macros effect this,
+ but it's likely that the source and/or destination addresses in
+ the block copy will need updating in machine specific ways. */
+ dest = allocate_dynamic_stack_space (argsize, 0, 0);
+ emit_block_move (gen_rtx_MEM (BLKmode, dest),
+ gen_rtx_MEM (BLKmode, incoming_args),
+ argsize,
+ PARM_BOUNDARY / BITS_PER_UNIT);
+
+ /* Refer to the argument block. */
+ apply_args_size ();
+ arguments = gen_rtx_MEM (BLKmode, arguments);
+
+ /* Walk past the arg-pointer and structure value address. */
+ size = GET_MODE_SIZE (Pmode);
+ if (struct_value_rtx)
+ size += GET_MODE_SIZE (Pmode);
+
+ /* Restore each of the registers previously saved. Make USE insns
+ for each of these registers for use in making the call. */
+ for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
+ if ((mode = apply_args_mode[regno]) != VOIDmode)
+ {
+ align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
+ if (size % align != 0)
+ size = CEIL (size, align) * align;
+ reg = gen_rtx_REG (mode, regno);
+ emit_move_insn (reg,
+ change_address (arguments, mode,
+ plus_constant (XEXP (arguments, 0),
+ size)));
+
+ use_reg (&call_fusage, reg);
+ size += GET_MODE_SIZE (mode);
+ }
+
+ /* Restore the structure value address unless this is passed as an
+ "invisible" first argument. */
+ size = GET_MODE_SIZE (Pmode);
+ if (struct_value_rtx)
+ {
+ rtx value = gen_reg_rtx (Pmode);
+ emit_move_insn (value,
+ change_address (arguments, Pmode,
+ plus_constant (XEXP (arguments, 0),
+ size)));
+ emit_move_insn (struct_value_rtx, value);
+ if (GET_CODE (struct_value_rtx) == REG)
+ use_reg (&call_fusage, struct_value_rtx);
+ size += GET_MODE_SIZE (Pmode);
}
- if (if_false_label)
- emit_jump (if_false_label);
- if (drop_through_label)
- emit_label (drop_through_label);
-}
+ /* All arguments and registers used for the call are set up by now! */
+ function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
-/* Compare OP0 with OP1, word at a time, in mode MODE.
- UNSIGNEDP says to do unsigned comparison.
- Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
+ /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
+ and we don't want to load it into a register as an optimization,
+ because prepare_call_address already did it if it should be done. */
+ if (GET_CODE (function) != SYMBOL_REF)
+ function = memory_address (FUNCTION_MODE, function);
-void
-do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
- enum machine_mode mode;
- int unsignedp;
- rtx op0, op1;
- rtx if_false_label, if_true_label;
-{
- int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
- rtx drop_through_label = 0;
- int i;
+ /* Generate the actual call instruction and save the return value. */
+#ifdef HAVE_untyped_call
+ if (HAVE_untyped_call)
+ emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
+ result, result_vector (1, result)));
+ else
+#endif
+#ifdef HAVE_call_value
+ if (HAVE_call_value)
+ {
+ rtx valreg = 0;
- if (! if_true_label || ! if_false_label)
- drop_through_label = gen_label_rtx ();
- if (! if_true_label)
- if_true_label = drop_through_label;
- if (! if_false_label)
- if_false_label = drop_through_label;
+ /* Locate the unique return register. It is not possible to
+ express a call that sets more than one return register using
+ call_value; use untyped_call for that. In fact, untyped_call
+ only needs to save the return registers in the given block. */
+ for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
+ if ((mode = apply_result_mode[regno]) != VOIDmode)
+ {
+ if (valreg)
+ abort (); /* HAVE_untyped_call required. */
+ valreg = gen_rtx_REG (mode, regno);
+ }
+
+ emit_call_insn (gen_call_value (valreg,
+ gen_rtx_MEM (FUNCTION_MODE, function),
+ const0_rtx, NULL_RTX, const0_rtx));
+
+ emit_move_insn (change_address (result, GET_MODE (valreg),
+ XEXP (result, 0)),
+ valreg);
+ }
+ else
+#endif
+ abort ();
+
+ /* Find the CALL insn we just emitted. */
+ for (call_insn = get_last_insn ();
+ call_insn && GET_CODE (call_insn) != CALL_INSN;
+ call_insn = PREV_INSN (call_insn))
+ ;
- /* Compare a word at a time, high order first. */
- for (i = 0; i < nwords; i++)
- {
- rtx comp;
- rtx op0_word, op1_word;
+ if (! call_insn)
+ abort ();
- if (WORDS_BIG_ENDIAN)
- {
- op0_word = operand_subword_force (op0, i, mode);
- op1_word = operand_subword_force (op1, i, mode);
- }
- else
- {
- op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
- op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
- }
+ /* Put the register usage information on the CALL. If there is already
+ some usage information, put ours at the end. */
+ if (CALL_INSN_FUNCTION_USAGE (call_insn))
+ {
+ rtx link;
- /* All but high-order word must be compared as unsigned. */
- comp = compare_from_rtx (op0_word, op1_word,
- (unsignedp || i > 0) ? GTU : GT,
- unsignedp, word_mode, NULL_RTX, 0);
- if (comp == const_true_rtx)
- emit_jump (if_true_label);
- else if (comp != const0_rtx)
- do_jump_for_compare (comp, NULL_RTX, if_true_label);
+ for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
+ link = XEXP (link, 1))
+ ;
- /* Consider lower words only if these are equal. */
- comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
- NULL_RTX, 0);
- if (comp == const_true_rtx)
- emit_jump (if_false_label);
- else if (comp != const0_rtx)
- do_jump_for_compare (comp, NULL_RTX, if_false_label);
+ XEXP (link, 1) = call_fusage;
}
+ else
+ CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
- if (if_false_label)
- emit_jump (if_false_label);
- if (drop_through_label)
- emit_label (drop_through_label);
+ /* Restore the stack. */
+#ifdef HAVE_save_stack_nonlocal
+ if (HAVE_save_stack_nonlocal)
+ emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
+ else
+#endif
+ emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
+
+ /* Return the address of the result block. */
+ return copy_addr_to_reg (XEXP (result, 0));
}
-/* Given an EQ_EXPR expression EXP for values too wide to be compared
- with one insn, test the comparison and jump to the appropriate label. */
+/* Perform an untyped return. */
static void
-do_jump_by_parts_equality (exp, if_false_label, if_true_label)
- tree exp;
- rtx if_false_label, if_true_label;
+expand_builtin_return (result)
+ rtx result;
{
- rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
- rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
- int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
- int i;
- rtx drop_through_label = 0;
+ int size, align, regno;
+ enum machine_mode mode;
+ rtx reg;
+ rtx call_fusage = 0;
- if (! if_false_label)
- drop_through_label = if_false_label = gen_label_rtx ();
+ apply_result_size ();
+ result = gen_rtx_MEM (BLKmode, result);
- for (i = 0; i < nwords; i++)
+#ifdef HAVE_untyped_return
+ if (HAVE_untyped_return)
{
- rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
- operand_subword_force (op1, i, mode),
- EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
- word_mode, NULL_RTX, 0);
- if (comp == const_true_rtx)
- emit_jump (if_false_label);
- else if (comp != const0_rtx)
- do_jump_for_compare (comp, if_false_label, NULL_RTX);
+ emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
+ emit_barrier ();
+ return;
}
+#endif
- if (if_true_label)
- emit_jump (if_true_label);
- if (drop_through_label)
- emit_label (drop_through_label);
+ /* Restore the return value and note that each value is used. */
+ size = 0;
+ for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
+ if ((mode = apply_result_mode[regno]) != VOIDmode)
+ {
+ align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
+ if (size % align != 0)
+ size = CEIL (size, align) * align;
+ reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
+ emit_move_insn (reg,
+ change_address (result, mode,
+ plus_constant (XEXP (result, 0),
+ size)));
+
+ push_to_sequence (call_fusage);
+ emit_insn (gen_rtx_USE (VOIDmode, reg));
+ call_fusage = get_insns ();
+ end_sequence ();
+ size += GET_MODE_SIZE (mode);
+ }
+
+ /* Put the USE insns before the return. */
+ emit_insns (call_fusage);
+
+ /* Return whatever values was restored by jumping directly to the end
+ of the function. */
+ expand_null_return ();
}
\f
-/* Jump according to whether OP0 is 0.
- We assume that OP0 has an integer mode that is too wide
- for the available compare insns. */
+/* Expand code for a post- or pre- increment or decrement
+ and return the RTX for the result.
+ POST is 1 for postinc/decrements and 0 for preinc/decrements. */
-static void
-do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
- rtx op0;
- rtx if_false_label, if_true_label;
+static rtx
+expand_increment (exp, post, ignore)
+ register tree exp;
+ int post, ignore;
{
- int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
- rtx part;
- int i;
- rtx drop_through_label = 0;
+ register rtx op0, op1;
+ register rtx temp, value;
+ register tree incremented = TREE_OPERAND (exp, 0);
+ optab this_optab = add_optab;
+ int icode;
+ enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
+ int op0_is_copy = 0;
+ int single_insn = 0;
+ /* 1 means we can't store into OP0 directly,
+ because it is a subreg narrower than a word,
+ and we don't dare clobber the rest of the word. */
+ int bad_subreg = 0;
- /* The fastest way of doing this comparison on almost any machine is to
- "or" all the words and compare the result. If all have to be loaded
- from memory and this is a very wide item, it's possible this may
- be slower, but that's highly unlikely. */
+ /* Stabilize any component ref that might need to be
+ evaluated more than once below. */
+ if (!post
+ || TREE_CODE (incremented) == BIT_FIELD_REF
+ || (TREE_CODE (incremented) == COMPONENT_REF
+ && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
+ || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
+ incremented = stabilize_reference (incremented);
+ /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
+ ones into save exprs so that they don't accidentally get evaluated
+ more than once by the code below. */
+ if (TREE_CODE (incremented) == PREINCREMENT_EXPR
+ || TREE_CODE (incremented) == PREDECREMENT_EXPR)
+ incremented = save_expr (incremented);
- part = gen_reg_rtx (word_mode);
- emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
- for (i = 1; i < nwords && part != 0; i++)
- part = expand_binop (word_mode, ior_optab, part,
- operand_subword_force (op0, i, GET_MODE (op0)),
- part, 1, OPTAB_WIDEN);
+ /* Compute the operands as RTX.
+ Note whether OP0 is the actual lvalue or a copy of it:
+ I believe it is a copy iff it is a register or subreg
+ and insns were generated in computing it. */
- if (part != 0)
- {
- rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
- NULL_RTX, 0);
+ temp = get_last_insn ();
+ op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
- if (comp == const_true_rtx)
- emit_jump (if_false_label);
- else if (comp == const0_rtx)
- emit_jump (if_true_label);
- else
- do_jump_for_compare (comp, if_false_label, if_true_label);
+ /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
+ in place but instead must do sign- or zero-extension during assignment,
+ so we copy it into a new register and let the code below use it as
+ a copy.
- return;
+ Note that we can safely modify this SUBREG since it is know not to be
+ shared (it was made by the expand_expr call above). */
+
+ if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
+ {
+ if (post)
+ SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
+ else
+ bad_subreg = 1;
+ }
+ else if (GET_CODE (op0) == SUBREG
+ && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
+ {
+ /* We cannot increment this SUBREG in place. If we are
+ post-incrementing, get a copy of the old value. Otherwise,
+ just mark that we cannot increment in place. */
+ if (post)
+ op0 = copy_to_reg (op0);
+ else
+ bad_subreg = 1;
}
- /* If we couldn't do the "or" simply, do this with a series of compares. */
- if (! if_false_label)
- drop_through_label = if_false_label = gen_label_rtx ();
+ op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
+ && temp != get_last_insn ());
+ op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
+ EXPAND_MEMORY_USE_BAD);
- for (i = 0; i < nwords; i++)
+ /* Decide whether incrementing or decrementing. */
+ if (TREE_CODE (exp) == POSTDECREMENT_EXPR
+ || TREE_CODE (exp) == PREDECREMENT_EXPR)
+ this_optab = sub_optab;
+
+ /* Convert decrement by a constant into a negative increment. */
+ if (this_optab == sub_optab
+ && GET_CODE (op1) == CONST_INT)
{
- rtx comp = compare_from_rtx (operand_subword_force (op0, i,
- GET_MODE (op0)),
- const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
- if (comp == const_true_rtx)
- emit_jump (if_false_label);
- else if (comp != const0_rtx)
- do_jump_for_compare (comp, if_false_label, NULL_RTX);
+ op1 = GEN_INT (- INTVAL (op1));
+ this_optab = add_optab;
}
- if (if_true_label)
- emit_jump (if_true_label);
-
- if (drop_through_label)
- emit_label (drop_through_label);
-}
+ /* For a preincrement, see if we can do this with a single instruction. */
+ if (!post)
+ {
+ icode = (int) this_optab->handlers[(int) mode].insn_code;
+ if (icode != (int) CODE_FOR_nothing
+ /* Make sure that OP0 is valid for operands 0 and 1
+ of the insn we want to queue. */
+ && (*insn_operand_predicate[icode][0]) (op0, mode)
+ && (*insn_operand_predicate[icode][1]) (op0, mode)
+ && (*insn_operand_predicate[icode][2]) (op1, mode))
+ single_insn = 1;
+ }
-/* Given a comparison expression in rtl form, output conditional branches to
- IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
+ /* If OP0 is not the actual lvalue, but rather a copy in a register,
+ then we cannot just increment OP0. We must therefore contrive to
+ increment the original value. Then, for postincrement, we can return
+ OP0 since it is a copy of the old value. For preincrement, expand here
+ unless we can do it with a single insn.
-static void
-do_jump_for_compare (comparison, if_false_label, if_true_label)
- rtx comparison, if_false_label, if_true_label;
-{
- if (if_true_label)
+ Likewise if storing directly into OP0 would clobber high bits
+ we need to preserve (bad_subreg). */
+ if (op0_is_copy || (!post && !single_insn) || bad_subreg)
{
- if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
- emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
- else
- abort ();
+ /* This is the easiest way to increment the value wherever it is.
+ Problems with multiple evaluation of INCREMENTED are prevented
+ because either (1) it is a component_ref or preincrement,
+ in which case it was stabilized above, or (2) it is an array_ref
+ with constant index in an array in a register, which is
+ safe to reevaluate. */
+ tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
+ || TREE_CODE (exp) == PREDECREMENT_EXPR)
+ ? MINUS_EXPR : PLUS_EXPR),
+ TREE_TYPE (exp),
+ incremented,
+ TREE_OPERAND (exp, 1));
- if (if_false_label)
- emit_jump (if_false_label);
- }
- else if (if_false_label)
- {
- rtx insn;
- rtx prev = get_last_insn ();
- rtx branch = 0;
+ while (TREE_CODE (incremented) == NOP_EXPR
+ || TREE_CODE (incremented) == CONVERT_EXPR)
+ {
+ newexp = convert (TREE_TYPE (incremented), newexp);
+ incremented = TREE_OPERAND (incremented, 0);
+ }
- /* Output the branch with the opposite condition. Then try to invert
- what is generated. If more than one insn is a branch, or if the
- branch is not the last insn written, abort. If we can't invert
- the branch, emit make a true label, redirect this jump to that,
- emit a jump to the false label and define the true label. */
+ temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
+ return post ? op0 : temp;
+ }
- if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
- emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
- else
- abort ();
+ if (post)
+ {
+ /* We have a true reference to the value in OP0.
+ If there is an insn to add or subtract in this mode, queue it.
+ Queueing the increment insn avoids the register shuffling
+ that often results if we must increment now and first save
+ the old value for subsequent use. */
- /* Here we get the first insn that was just emitted. It used to be the
- case that, on some machines, emitting the branch would discard
- the previous compare insn and emit a replacement. This isn't
- done anymore, but abort if we see that PREV is deleted. */
+#if 0 /* Turned off to avoid making extra insn for indexed memref. */
+ op0 = stabilize (op0);
+#endif
- if (prev == 0)
- insn = get_insns ();
- else if (INSN_DELETED_P (prev))
- abort ();
- else
- insn = NEXT_INSN (prev);
+ icode = (int) this_optab->handlers[(int) mode].insn_code;
+ if (icode != (int) CODE_FOR_nothing
+ /* Make sure that OP0 is valid for operands 0 and 1
+ of the insn we want to queue. */
+ && (*insn_operand_predicate[icode][0]) (op0, mode)
+ && (*insn_operand_predicate[icode][1]) (op0, mode))
+ {
+ if (! (*insn_operand_predicate[icode][2]) (op1, mode))
+ op1 = force_reg (mode, op1);
- for (; insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == JUMP_INSN)
- {
- if (branch)
- abort ();
- branch = insn;
- }
+ return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
+ }
+ if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
+ {
+ rtx addr = (general_operand (XEXP (op0, 0), mode)
+ ? force_reg (Pmode, XEXP (op0, 0))
+ : copy_to_reg (XEXP (op0, 0)));
+ rtx temp, result;
- if (branch != get_last_insn ())
- abort ();
+ op0 = change_address (op0, VOIDmode, addr);
+ temp = force_reg (GET_MODE (op0), op0);
+ if (! (*insn_operand_predicate[icode][2]) (op1, mode))
+ op1 = force_reg (mode, op1);
- JUMP_LABEL (branch) = if_false_label;
- if (! invert_jump (branch, if_false_label))
- {
- if_true_label = gen_label_rtx ();
- redirect_jump (branch, if_true_label);
- emit_jump (if_false_label);
- emit_label (if_true_label);
+ /* The increment queue is LIFO, thus we have to `queue'
+ the instructions in reverse order. */
+ enqueue_insn (op0, gen_move_insn (op0, temp));
+ result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
+ return result;
}
}
+
+ /* Preincrement, or we can't increment with one simple insn. */
+ if (post)
+ /* Save a copy of the value before inc or dec, to return it later. */
+ temp = value = copy_to_reg (op0);
+ else
+ /* Arrange to return the incremented value. */
+ /* Copy the rtx because expand_binop will protect from the queue,
+ and the results of that would be invalid for us to return
+ if our caller does emit_queue before using our result. */
+ temp = copy_rtx (value = op0);
+
+ /* Increment however we can. */
+ op1 = expand_binop (mode, this_optab, value, op1,
+ flag_check_memory_usage ? NULL_RTX : op0,
+ TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
+ /* Make sure the value is stored into OP0. */
+ if (op1 != op0)
+ emit_move_insn (op0, op1);
+
+ return temp;
}
\f
-/* Generate code for a comparison expression EXP
- (including code to compute the values to be compared)
- and set (CC0) according to the result.
- SIGNED_CODE should be the rtx operation for this comparison for
- signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
-
- We force a stack adjustment unless there are currently
- things pushed on the stack that aren't yet used. */
+/* Expand all function calls contained within EXP, innermost ones first.
+ But don't look within expressions that have sequence points.
+ For each CALL_EXPR, record the rtx for its value
+ in the CALL_EXPR_RTL field. */
-static rtx
-compare (exp, signed_code, unsigned_code)
- register tree exp;
- enum rtx_code signed_code, unsigned_code;
+static void
+preexpand_calls (exp)
+ tree exp;
{
- register rtx op0
- = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
- register rtx op1
- = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
- register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
- register enum machine_mode mode = TYPE_MODE (type);
- int unsignedp = TREE_UNSIGNED (type);
- enum rtx_code code = unsignedp ? unsigned_code : signed_code;
+ register int nops, i;
+ int type = TREE_CODE_CLASS (TREE_CODE (exp));
-#ifdef HAVE_canonicalize_funcptr_for_compare
- /* If function pointers need to be "canonicalized" before they can
- be reliably compared, then canonicalize them. */
- if (HAVE_canonicalize_funcptr_for_compare
- && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
- && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
- == FUNCTION_TYPE))
- {
- rtx new_op0 = gen_reg_rtx (mode);
+ if (! do_preexpand_calls)
+ return;
- emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
- op0 = new_op0;
- }
+ /* Only expressions and references can contain calls. */
- if (HAVE_canonicalize_funcptr_for_compare
- && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
- && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
- == FUNCTION_TYPE))
+ if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
+ return;
+
+ switch (TREE_CODE (exp))
{
- rtx new_op1 = gen_reg_rtx (mode);
+ case CALL_EXPR:
+ /* Do nothing if already expanded. */
+ if (CALL_EXPR_RTL (exp) != 0
+ /* Do nothing if the call returns a variable-sized object. */
+ || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
+ /* Do nothing to built-in functions. */
+ || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
+ && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
+ == FUNCTION_DECL)
+ && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
+ return;
- emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
- op1 = new_op1;
+ CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
+ return;
+
+ case COMPOUND_EXPR:
+ case COND_EXPR:
+ case TRUTH_ANDIF_EXPR:
+ case TRUTH_ORIF_EXPR:
+ /* If we find one of these, then we can be sure
+ the adjust will be done for it (since it makes jumps).
+ Do it now, so that if this is inside an argument
+ of a function, we don't get the stack adjustment
+ after some other args have already been pushed. */
+ do_pending_stack_adjust ();
+ return;
+
+ case BLOCK:
+ case RTL_EXPR:
+ case WITH_CLEANUP_EXPR:
+ case CLEANUP_POINT_EXPR:
+ case TRY_CATCH_EXPR:
+ return;
+
+ case SAVE_EXPR:
+ if (SAVE_EXPR_RTL (exp) != 0)
+ return;
+
+ default:
+ break;
}
-#endif
- return compare_from_rtx (op0, op1, code, unsignedp, mode,
- ((mode == BLKmode)
- ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
- TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
+ nops = tree_code_length[(int) TREE_CODE (exp)];
+ for (i = 0; i < nops; i++)
+ if (TREE_OPERAND (exp, i) != 0)
+ {
+ type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
+ if (type == 'e' || type == '<' || type == '1' || type == '2'
+ || type == 'r')
+ preexpand_calls (TREE_OPERAND (exp, i));
+ }
}
+\f
+/* At the start of a function, record that we have no previously-pushed
+ arguments waiting to be popped. */
-/* Like compare but expects the values to compare as two rtx's.
- The decision as to signed or unsigned comparison must be made by the caller.
+void
+init_pending_stack_adjust ()
+{
+ pending_stack_adjust = 0;
+}
- If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
- compared.
+/* When exiting from function, if safe, clear out any pending stack adjust
+ so the adjustment won't get done.
- If ALIGN is non-zero, it is the alignment of this type; if zero, the
- size of MODE should be used. */
+ Note, if the current function calls alloca, then it must have a
+ frame pointer regardless of the value of flag_omit_frame_pointer. */
-rtx
-compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
- register rtx op0, op1;
- enum rtx_code code;
- int unsignedp;
- enum machine_mode mode;
- rtx size;
- int align;
+void
+clear_pending_stack_adjust ()
{
- rtx tem;
-
- /* If one operand is constant, make it the second one. Only do this
- if the other operand is not constant as well. */
+#ifdef EXIT_IGNORE_STACK
+ if (optimize > 0
+ && (! flag_omit_frame_pointer || current_function_calls_alloca)
+ && EXIT_IGNORE_STACK
+ && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
+ && ! flag_inline_functions)
+ pending_stack_adjust = 0;
+#endif
+}
- if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
- || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
- {
- tem = op0;
- op0 = op1;
- op1 = tem;
- code = swap_condition (code);
- }
+/* Pop any previously-pushed arguments that have not been popped yet. */
- if (flag_force_mem)
+void
+do_pending_stack_adjust ()
+{
+ if (inhibit_defer_pop == 0)
{
- op0 = force_not_mem (op0);
- op1 = force_not_mem (op1);
+ if (pending_stack_adjust != 0)
+ adjust_stack (GEN_INT (pending_stack_adjust));
+ pending_stack_adjust = 0;
}
+}
+\f
+/* Expand conditional expressions. */
- do_pending_stack_adjust ();
-
- if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
- && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
- return tem;
+/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
+ LABEL is an rtx of code CODE_LABEL, in this function and all the
+ functions here. */
-#if 0
- /* There's no need to do this now that combine.c can eliminate lots of
- sign extensions. This can be less efficient in certain cases on other
- machines. */
+void
+jumpifnot (exp, label)
+ tree exp;
+ rtx label;
+{
+ do_jump (exp, label, NULL_RTX);
+}
- /* If this is a signed equality comparison, we can do it as an
- unsigned comparison since zero-extension is cheaper than sign
- extension and comparisons with zero are done as unsigned. This is
- the case even on machines that can do fast sign extension, since
- zero-extension is easier to combine with other operations than
- sign-extension is. If we are comparing against a constant, we must
- convert it to what it would look like unsigned. */
- if ((code == EQ || code == NE) && ! unsignedp
- && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
- {
- if (GET_CODE (op1) == CONST_INT
- && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
- op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
- unsignedp = 1;
- }
-#endif
-
- emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
+/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
- return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
+void
+jumpif (exp, label)
+ tree exp;
+ rtx label;
+{
+ do_jump (exp, NULL_RTX, label);
}
-\f
-/* Generate code to calculate EXP using a store-flag instruction
- and return an rtx for the result. EXP is either a comparison
- or a TRUTH_NOT_EXPR whose operand is a comparison.
-
- If TARGET is nonzero, store the result there if convenient.
- If ONLY_CHEAP is non-zero, only do this if it is likely to be very
- cheap.
+/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
+ the result is zero, or IF_TRUE_LABEL if the result is one.
+ Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
+ meaning fall through in that case.
- Return zero if there is no suitable set-flag instruction
- available on this machine.
+ do_jump always does any pending stack adjust except when it does not
+ actually perform a jump. An example where there is no jump
+ is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
- Once expand_expr has been called on the arguments of the comparison,
- we are committed to doing the store flag, since it is not safe to
- re-evaluate the expression. We emit the store-flag insn by calling
- emit_store_flag, but only expand the arguments if we have a reason
- to believe that emit_store_flag will be successful. If we think that
- it will, but it isn't, we have to simulate the store-flag with a
- set/jump/set sequence. */
+ This function is responsible for optimizing cases such as
+ &&, || and comparison operators in EXP. */
-static rtx
-do_store_flag (exp, target, mode, only_cheap)
+void
+do_jump (exp, if_false_label, if_true_label)
tree exp;
- rtx target;
- enum machine_mode mode;
- int only_cheap;
+ rtx if_false_label, if_true_label;
{
- enum rtx_code code;
- tree arg0, arg1, type;
- tree tem;
- enum machine_mode operand_mode;
- int invert = 0;
- int unsignedp;
- rtx op0, op1;
- enum insn_code icode;
- rtx subtarget = target;
- rtx result, label, pattern, jump_pat;
+ register enum tree_code code = TREE_CODE (exp);
+ /* Some cases need to create a label to jump to
+ in order to properly fall through.
+ These cases set DROP_THROUGH_LABEL nonzero. */
+ rtx drop_through_label = 0;
+ rtx temp;
+ rtx comparison = 0;
+ int i;
+ tree type;
+ enum machine_mode mode;
- /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
- result at the end. We can't simply invert the test since it would
- have already been inverted if it were valid. This case occurs for
- some floating-point comparisons. */
+ emit_queue ();
- if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
- invert = 1, exp = TREE_OPERAND (exp, 0);
+ switch (code)
+ {
+ case ERROR_MARK:
+ break;
- arg0 = TREE_OPERAND (exp, 0);
- arg1 = TREE_OPERAND (exp, 1);
- type = TREE_TYPE (arg0);
- operand_mode = TYPE_MODE (type);
- unsignedp = TREE_UNSIGNED (type);
+ case INTEGER_CST:
+ temp = integer_zerop (exp) ? if_false_label : if_true_label;
+ if (temp)
+ emit_jump (temp);
+ break;
- /* We won't bother with BLKmode store-flag operations because it would mean
- passing a lot of information to emit_store_flag. */
- if (operand_mode == BLKmode)
- return 0;
+#if 0
+ /* This is not true with #pragma weak */
+ case ADDR_EXPR:
+ /* The address of something can never be zero. */
+ if (if_true_label)
+ emit_jump (if_true_label);
+ break;
+#endif
- /* We won't bother with store-flag operations involving function pointers
- when function pointers must be canonicalized before comparisons. */
-#ifdef HAVE_canonicalize_funcptr_for_compare
- if (HAVE_canonicalize_funcptr_for_compare
- && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
- && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
- == FUNCTION_TYPE))
- || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
- && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
- == FUNCTION_TYPE))))
- return 0;
+ case NOP_EXPR:
+ if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
+ || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
+ || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
+ goto normal;
+ case CONVERT_EXPR:
+ /* If we are narrowing the operand, we have to do the compare in the
+ narrower mode. */
+ if ((TYPE_PRECISION (TREE_TYPE (exp))
+ < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
+ goto normal;
+ case NON_LVALUE_EXPR:
+ case REFERENCE_EXPR:
+ case ABS_EXPR:
+ case NEGATE_EXPR:
+ case LROTATE_EXPR:
+ case RROTATE_EXPR:
+ /* These cannot change zero->non-zero or vice versa. */
+ do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
+ break;
+
+#if 0
+ /* This is never less insns than evaluating the PLUS_EXPR followed by
+ a test and can be longer if the test is eliminated. */
+ case PLUS_EXPR:
+ /* Reduce to minus. */
+ exp = build (MINUS_EXPR, TREE_TYPE (exp),
+ TREE_OPERAND (exp, 0),
+ fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
+ TREE_OPERAND (exp, 1))));
+ /* Process as MINUS. */
#endif
- STRIP_NOPS (arg0);
- STRIP_NOPS (arg1);
+ case MINUS_EXPR:
+ /* Non-zero iff operands of minus differ. */
+ comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
+ TREE_OPERAND (exp, 0),
+ TREE_OPERAND (exp, 1)),
+ NE, NE);
+ break;
- /* Get the rtx comparison code to use. We know that EXP is a comparison
- operation of some type. Some comparisons against 1 and -1 can be
- converted to comparisons with zero. Do so here so that the tests
- below will be aware that we have a comparison with zero. These
- tests will not catch constants in the first operand, but constants
- are rarely passed as the first operand. */
+ case BIT_AND_EXPR:
+ /* If we are AND'ing with a small constant, do this comparison in the
+ smallest type that fits. If the machine doesn't have comparisons
+ that small, it will be converted back to the wider comparison.
+ This helps if we are testing the sign bit of a narrower object.
+ combine can't do this for us because it can't know whether a
+ ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
+
+ if (! SLOW_BYTE_ACCESS
+ && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
+ && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
+ && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
+ && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
+ && (type = type_for_mode (mode, 1)) != 0
+ && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
+ && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
+ != CODE_FOR_nothing))
+ {
+ do_jump (convert (type, exp), if_false_label, if_true_label);
+ break;
+ }
+ goto normal;
- switch (TREE_CODE (exp))
- {
- case EQ_EXPR:
- code = EQ;
- break;
- case NE_EXPR:
- code = NE;
- break;
- case LT_EXPR:
- if (integer_onep (arg1))
- arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
- else
- code = unsignedp ? LTU : LT;
+ case TRUTH_NOT_EXPR:
+ do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
break;
- case LE_EXPR:
- if (! unsignedp && integer_all_onesp (arg1))
- arg1 = integer_zero_node, code = LT;
- else
- code = unsignedp ? LEU : LE;
+
+ case TRUTH_ANDIF_EXPR:
+ if (if_false_label == 0)
+ if_false_label = drop_through_label = gen_label_rtx ();
+ do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
+ start_cleanup_deferral ();
+ do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
+ end_cleanup_deferral ();
break;
- case GT_EXPR:
- if (! unsignedp && integer_all_onesp (arg1))
- arg1 = integer_zero_node, code = GE;
- else
- code = unsignedp ? GTU : GT;
+
+ case TRUTH_ORIF_EXPR:
+ if (if_true_label == 0)
+ if_true_label = drop_through_label = gen_label_rtx ();
+ do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
+ start_cleanup_deferral ();
+ do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
+ end_cleanup_deferral ();
break;
- case GE_EXPR:
- if (integer_onep (arg1))
- arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
- else
- code = unsignedp ? GEU : GE;
+
+ case COMPOUND_EXPR:
+ push_temp_slots ();
+ expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
+ preserve_temp_slots (NULL_RTX);
+ free_temp_slots ();
+ pop_temp_slots ();
+ emit_queue ();
+ do_pending_stack_adjust ();
+ do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
break;
- default:
- abort ();
- }
- /* Put a constant second. */
- if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
- {
- tem = arg0; arg0 = arg1; arg1 = tem;
- code = swap_condition (code);
- }
+ case COMPONENT_REF:
+ case BIT_FIELD_REF:
+ case ARRAY_REF:
+ {
+ int bitsize, bitpos, unsignedp;
+ enum machine_mode mode;
+ tree type;
+ tree offset;
+ int volatilep = 0;
+ int alignment;
- /* If this is an equality or inequality test of a single bit, we can
- do this by shifting the bit being tested to the low-order bit and
- masking the result with the constant 1. If the condition was EQ,
- we xor it with 1. This does not require an scc insn and is faster
- than an scc insn even if we have it. */
+ /* Get description of this reference. We don't actually care
+ about the underlying object here. */
+ get_inner_reference (exp, &bitsize, &bitpos, &offset,
+ &mode, &unsignedp, &volatilep,
+ &alignment);
- if ((code == NE || code == EQ)
- && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
- && integer_pow2p (TREE_OPERAND (arg0, 1))
- && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
- {
- tree inner = TREE_OPERAND (arg0, 0);
- HOST_WIDE_INT tem;
- int bitnum;
- int ops_unsignedp;
+ type = type_for_size (bitsize, unsignedp);
+ if (! SLOW_BYTE_ACCESS
+ && type != 0 && bitsize >= 0
+ && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
+ && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
+ != CODE_FOR_nothing))
+ {
+ do_jump (convert (type, exp), if_false_label, if_true_label);
+ break;
+ }
+ goto normal;
+ }
- tem = INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
- NULL_RTX, VOIDmode, 0));
- /* In this case, immed_double_const will sign extend the value to make
- it look the same on the host and target. We must remove the
- sign-extension before calling exact_log2, since exact_log2 will
- fail for negative values. */
- if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
- && BITS_PER_WORD == GET_MODE_BITSIZE (TYPE_MODE (type)))
- /* We don't use the obvious constant shift to generate the mask,
- because that generates compiler warnings when BITS_PER_WORD is
- greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
- code is unreachable in that case. */
- tem = tem & GET_MODE_MASK (word_mode);
- bitnum = exact_log2 (tem);
+ case COND_EXPR:
+ /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
+ if (integer_onep (TREE_OPERAND (exp, 1))
+ && integer_zerop (TREE_OPERAND (exp, 2)))
+ do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
- /* If INNER is a right shift of a constant and it plus BITNUM does
- not overflow, adjust BITNUM and INNER. */
+ else if (integer_zerop (TREE_OPERAND (exp, 1))
+ && integer_onep (TREE_OPERAND (exp, 2)))
+ do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
- if (TREE_CODE (inner) == RSHIFT_EXPR
- && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
- && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
- && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
- < TYPE_PRECISION (type)))
+ else
{
- bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
- inner = TREE_OPERAND (inner, 0);
- }
-
- /* If we are going to be able to omit the AND below, we must do our
- operations as unsigned. If we must use the AND, we have a choice.
- Normally unsigned is faster, but for some machines signed is. */
- ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
-#ifdef LOAD_EXTEND_OP
- : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
-#else
- : 1
-#endif
- );
-
- if (subtarget == 0 || GET_CODE (subtarget) != REG
- || GET_MODE (subtarget) != operand_mode
- || ! safe_from_p (subtarget, inner))
- subtarget = 0;
+ register rtx label1 = gen_label_rtx ();
+ drop_through_label = gen_label_rtx ();
- op0 = expand_expr (inner, subtarget, VOIDmode, 0);
+ do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
- if (bitnum != 0)
- op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
- size_int (bitnum), subtarget, ops_unsignedp);
+ start_cleanup_deferral ();
+ /* Now the THEN-expression. */
+ do_jump (TREE_OPERAND (exp, 1),
+ if_false_label ? if_false_label : drop_through_label,
+ if_true_label ? if_true_label : drop_through_label);
+ /* In case the do_jump just above never jumps. */
+ do_pending_stack_adjust ();
+ emit_label (label1);
- if (GET_MODE (op0) != mode)
- op0 = convert_to_mode (mode, op0, ops_unsignedp);
+ /* Now the ELSE-expression. */
+ do_jump (TREE_OPERAND (exp, 2),
+ if_false_label ? if_false_label : drop_through_label,
+ if_true_label ? if_true_label : drop_through_label);
+ end_cleanup_deferral ();
+ }
+ break;
- if ((code == EQ && ! invert) || (code == NE && invert))
- op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
- ops_unsignedp, OPTAB_LIB_WIDEN);
+ case EQ_EXPR:
+ {
+ tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
- /* Put the AND last so it can combine with more things. */
- if (bitnum != TYPE_PRECISION (type) - 1)
- op0 = expand_and (op0, const1_rtx, subtarget);
+ if (integer_zerop (TREE_OPERAND (exp, 1)))
+ do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
+ else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
+ || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
+ do_jump
+ (fold
+ (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
+ fold (build (EQ_EXPR, TREE_TYPE (exp),
+ fold (build1 (REALPART_EXPR,
+ TREE_TYPE (inner_type),
+ TREE_OPERAND (exp, 0))),
+ fold (build1 (REALPART_EXPR,
+ TREE_TYPE (inner_type),
+ TREE_OPERAND (exp, 1))))),
+ fold (build (EQ_EXPR, TREE_TYPE (exp),
+ fold (build1 (IMAGPART_EXPR,
+ TREE_TYPE (inner_type),
+ TREE_OPERAND (exp, 0))),
+ fold (build1 (IMAGPART_EXPR,
+ TREE_TYPE (inner_type),
+ TREE_OPERAND (exp, 1))))))),
+ if_false_label, if_true_label);
+ else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
+ && !can_compare_p (TYPE_MODE (inner_type)))
+ do_jump_by_parts_equality (exp, if_false_label, if_true_label);
+ else
+ comparison = compare (exp, EQ, EQ);
+ break;
+ }
- return op0;
- }
+ case NE_EXPR:
+ {
+ tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
- /* Now see if we are likely to be able to do this. Return if not. */
- if (! can_compare_p (operand_mode))
- return 0;
- icode = setcc_gen_code[(int) code];
- if (icode == CODE_FOR_nothing
- || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
- {
- /* We can only do this if it is one of the special cases that
- can be handled without an scc insn. */
- if ((code == LT && integer_zerop (arg1))
- || (! only_cheap && code == GE && integer_zerop (arg1)))
- ;
- else if (BRANCH_COST >= 0
- && ! only_cheap && (code == NE || code == EQ)
- && TREE_CODE (type) != REAL_TYPE
- && ((abs_optab->handlers[(int) operand_mode].insn_code
- != CODE_FOR_nothing)
- || (ffs_optab->handlers[(int) operand_mode].insn_code
- != CODE_FOR_nothing)))
- ;
- else
- return 0;
- }
-
- preexpand_calls (exp);
- if (subtarget == 0 || GET_CODE (subtarget) != REG
- || GET_MODE (subtarget) != operand_mode
- || ! safe_from_p (subtarget, arg1))
- subtarget = 0;
+ if (integer_zerop (TREE_OPERAND (exp, 1)))
+ do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
+ else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
+ || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
+ do_jump
+ (fold
+ (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
+ fold (build (NE_EXPR, TREE_TYPE (exp),
+ fold (build1 (REALPART_EXPR,
+ TREE_TYPE (inner_type),
+ TREE_OPERAND (exp, 0))),
+ fold (build1 (REALPART_EXPR,
+ TREE_TYPE (inner_type),
+ TREE_OPERAND (exp, 1))))),
+ fold (build (NE_EXPR, TREE_TYPE (exp),
+ fold (build1 (IMAGPART_EXPR,
+ TREE_TYPE (inner_type),
+ TREE_OPERAND (exp, 0))),
+ fold (build1 (IMAGPART_EXPR,
+ TREE_TYPE (inner_type),
+ TREE_OPERAND (exp, 1))))))),
+ if_false_label, if_true_label);
+ else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
+ && !can_compare_p (TYPE_MODE (inner_type)))
+ do_jump_by_parts_equality (exp, if_true_label, if_false_label);
+ else
+ comparison = compare (exp, NE, NE);
+ break;
+ }
- op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
- op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
+ case LT_EXPR:
+ if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ == MODE_INT)
+ && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
+ do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
+ else
+ comparison = compare (exp, LT, LTU);
+ break;
- if (target == 0)
- target = gen_reg_rtx (mode);
+ case LE_EXPR:
+ if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ == MODE_INT)
+ && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
+ do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
+ else
+ comparison = compare (exp, LE, LEU);
+ break;
- /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
- because, if the emit_store_flag does anything it will succeed and
- OP0 and OP1 will not be used subsequently. */
+ case GT_EXPR:
+ if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ == MODE_INT)
+ && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
+ do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
+ else
+ comparison = compare (exp, GT, GTU);
+ break;
- result = emit_store_flag (target, code,
- queued_subexp_p (op0) ? copy_rtx (op0) : op0,
- queued_subexp_p (op1) ? copy_rtx (op1) : op1,
- operand_mode, unsignedp, 1);
+ case GE_EXPR:
+ if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ == MODE_INT)
+ && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
+ do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
+ else
+ comparison = compare (exp, GE, GEU);
+ break;
- if (result)
- {
- if (invert)
- result = expand_binop (mode, xor_optab, result, const1_rtx,
- result, 0, OPTAB_LIB_WIDEN);
- return result;
+ default:
+ normal:
+ temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
+#if 0
+ /* This is not needed any more and causes poor code since it causes
+ comparisons and tests from non-SI objects to have different code
+ sequences. */
+ /* Copy to register to avoid generating bad insns by cse
+ from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
+ if (!cse_not_expected && GET_CODE (temp) == MEM)
+ temp = copy_to_reg (temp);
+#endif
+ do_pending_stack_adjust ();
+ if (GET_CODE (temp) == CONST_INT)
+ comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
+ else if (GET_CODE (temp) == LABEL_REF)
+ comparison = const_true_rtx;
+ else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
+ && !can_compare_p (GET_MODE (temp)))
+ /* Note swapping the labels gives us not-equal. */
+ do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
+ else if (GET_MODE (temp) != VOIDmode)
+ comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
+ NE, TREE_UNSIGNED (TREE_TYPE (exp)),
+ GET_MODE (temp), NULL_RTX, 0);
+ else
+ abort ();
}
- /* If this failed, we have to do this with set/compare/jump/set code. */
- if (GET_CODE (target) != REG
- || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
- target = gen_reg_rtx (GET_MODE (target));
-
- emit_move_insn (target, invert ? const0_rtx : const1_rtx);
- result = compare_from_rtx (op0, op1, code, unsignedp,
- operand_mode, NULL_RTX, 0);
- if (GET_CODE (result) == CONST_INT)
- return (((result == const0_rtx && ! invert)
- || (result != const0_rtx && invert))
- ? const0_rtx : const1_rtx);
+ /* Do any postincrements in the expression that was tested. */
+ emit_queue ();
- label = gen_label_rtx ();
- if (bcc_gen_fctn[(int) code] == 0)
- abort ();
+ /* If COMPARISON is nonzero here, it is an rtx that can be substituted
+ straight into a conditional jump instruction as the jump condition.
+ Otherwise, all the work has been done already. */
- emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
- emit_move_insn (target, invert ? const1_rtx : const0_rtx);
- emit_label (label);
+ if (comparison == const_true_rtx)
+ {
+ if (if_true_label)
+ emit_jump (if_true_label);
+ }
+ else if (comparison == const0_rtx)
+ {
+ if (if_false_label)
+ emit_jump (if_false_label);
+ }
+ else if (comparison)
+ do_jump_for_compare (comparison, if_false_label, if_true_label);
- return target;
+ if (drop_through_label)
+ {
+ /* If do_jump produces code that might be jumped around,
+ do any stack adjusts from that code, before the place
+ where control merges in. */
+ do_pending_stack_adjust ();
+ emit_label (drop_through_label);
+ }
}
\f
-/* Generate a tablejump instruction (used for switch statements). */
-
-#ifdef HAVE_tablejump
-
-/* INDEX is the value being switched on, with the lowest value
- in the table already subtracted.
- MODE is its expected mode (needed if INDEX is constant).
- RANGE is the length of the jump table.
- TABLE_LABEL is a CODE_LABEL rtx for the table itself.
-
- DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
- index value is out of range. */
+/* Given a comparison expression EXP for values too wide to be compared
+ with one insn, test the comparison and jump to the appropriate label.
+ The code of EXP is ignored; we always test GT if SWAP is 0,
+ and LT if SWAP is 1. */
-void
-do_tablejump (index, mode, range, table_label, default_label)
- rtx index, range, table_label, default_label;
- enum machine_mode mode;
+static void
+do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
+ tree exp;
+ int swap;
+ rtx if_false_label, if_true_label;
{
- register rtx temp, vector;
-
- /* Do an unsigned comparison (in the proper mode) between the index
- expression and the value which represents the length of the range.
- Since we just finished subtracting the lower bound of the range
- from the index expression, this comparison allows us to simultaneously
- check that the original index expression value is both greater than
- or equal to the minimum value of the range and less than or equal to
- the maximum value of the range. */
-
- emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
- emit_jump_insn (gen_bgtu (default_label));
-
- /* If index is in range, it must fit in Pmode.
- Convert to Pmode so we can index with it. */
- if (mode != Pmode)
- index = convert_to_mode (Pmode, index, 1);
-
- /* Don't let a MEM slip thru, because then INDEX that comes
- out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
- and break_out_memory_refs will go to work on it and mess it up. */
-#ifdef PIC_CASE_VECTOR_ADDRESS
- if (flag_pic && GET_CODE (index) != REG)
- index = copy_to_mode_reg (Pmode, index);
-#endif
-
- /* If flag_force_addr were to affect this address
- it could interfere with the tricky assumptions made
- about addresses that contain label-refs,
- which may be valid only very near the tablejump itself. */
- /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
- GET_MODE_SIZE, because this indicates how large insns are. The other
- uses should all be Pmode, because they are addresses. This code
- could fail if addresses and insns are not the same size. */
- index = gen_rtx (PLUS, Pmode,
- gen_rtx (MULT, Pmode, index,
- GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
- gen_rtx (LABEL_REF, Pmode, table_label));
-#ifdef PIC_CASE_VECTOR_ADDRESS
- if (flag_pic)
- index = PIC_CASE_VECTOR_ADDRESS (index);
- else
-#endif
- index = memory_address_noforce (CASE_VECTOR_MODE, index);
- temp = gen_reg_rtx (CASE_VECTOR_MODE);
- vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
- RTX_UNCHANGING_P (vector) = 1;
- convert_move (temp, vector, 0);
-
- emit_jump_insn (gen_tablejump (temp, table_label));
-
-#ifndef CASE_VECTOR_PC_RELATIVE
- /* If we are generating PIC code or if the table is PC-relative, the
- table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
- if (! flag_pic)
- emit_barrier ();
-#endif
-}
-
-#endif /* HAVE_tablejump */
-
-
-/* Emit a suitable bytecode to load a value from memory, assuming a pointer
- to that value is on the top of the stack. The resulting type is TYPE, and
- the source declaration is DECL. */
+ rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
+ rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
+ enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
+ int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
+ rtx drop_through_label = 0;
+ int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
+ int i;
-void
-bc_load_memory (type, decl)
- tree type, decl;
-{
- enum bytecode_opcode opcode;
-
-
- /* Bit fields are special. We only know about signed and
- unsigned ints, and enums. The latter are treated as
- signed integers. */
-
- if (DECL_BIT_FIELD (decl))
- if (TREE_CODE (type) == ENUMERAL_TYPE
- || TREE_CODE (type) == INTEGER_TYPE)
- opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
- else
- abort ();
- else
- /* See corresponding comment in bc_store_memory. */
- if (TYPE_MODE (type) == BLKmode
- || TYPE_MODE (type) == VOIDmode)
- return;
- else
- opcode = mode_to_load_map [(int) TYPE_MODE (type)];
+ if (! if_true_label || ! if_false_label)
+ drop_through_label = gen_label_rtx ();
+ if (! if_true_label)
+ if_true_label = drop_through_label;
+ if (! if_false_label)
+ if_false_label = drop_through_label;
- if (opcode == neverneverland)
- abort ();
-
- bc_emit_bytecode (opcode);
-
-#ifdef DEBUG_PRINT_CODE
- fputc ('\n', stderr);
-#endif
-}
+ /* Compare a word at a time, high order first. */
+ for (i = 0; i < nwords; i++)
+ {
+ rtx comp;
+ rtx op0_word, op1_word;
+ if (WORDS_BIG_ENDIAN)
+ {
+ op0_word = operand_subword_force (op0, i, mode);
+ op1_word = operand_subword_force (op1, i, mode);
+ }
+ else
+ {
+ op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
+ op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
+ }
-/* Store the contents of the second stack slot to the address in the
- top stack slot. DECL is the declaration of the destination and is used
- to determine whether we're dealing with a bitfield. */
+ /* All but high-order word must be compared as unsigned. */
+ comp = compare_from_rtx (op0_word, op1_word,
+ (unsignedp || i > 0) ? GTU : GT,
+ unsignedp, word_mode, NULL_RTX, 0);
+ if (comp == const_true_rtx)
+ emit_jump (if_true_label);
+ else if (comp != const0_rtx)
+ do_jump_for_compare (comp, NULL_RTX, if_true_label);
-void
-bc_store_memory (type, decl)
- tree type, decl;
-{
- enum bytecode_opcode opcode;
-
-
- if (DECL_BIT_FIELD (decl))
- {
- if (TREE_CODE (type) == ENUMERAL_TYPE
- || TREE_CODE (type) == INTEGER_TYPE)
- opcode = sstoreBI;
- else
- abort ();
+ /* Consider lower words only if these are equal. */
+ comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
+ NULL_RTX, 0);
+ if (comp == const_true_rtx)
+ emit_jump (if_false_label);
+ else if (comp != const0_rtx)
+ do_jump_for_compare (comp, NULL_RTX, if_false_label);
}
- else
- if (TYPE_MODE (type) == BLKmode)
- {
- /* Copy structure. This expands to a block copy instruction, storeBLK.
- In addition to the arguments expected by the other store instructions,
- it also expects a type size (SImode) on top of the stack, which is the
- structure size in size units (usually bytes). The two first arguments
- are already on the stack; so we just put the size on level 1. For some
- other languages, the size may be variable, this is why we don't encode
- it as a storeBLK literal, but rather treat it as a full-fledged expression. */
-
- bc_expand_expr (TYPE_SIZE (type));
- opcode = storeBLK;
- }
- else
- opcode = mode_to_store_map [(int) TYPE_MODE (type)];
-
- if (opcode == neverneverland)
- abort ();
- bc_emit_bytecode (opcode);
-
-#ifdef DEBUG_PRINT_CODE
- fputc ('\n', stderr);
-#endif
+ if (if_false_label)
+ emit_jump (if_false_label);
+ if (drop_through_label)
+ emit_label (drop_through_label);
}
+/* Compare OP0 with OP1, word at a time, in mode MODE.
+ UNSIGNEDP says to do unsigned comparison.
+ Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
-/* Allocate local stack space sufficient to hold a value of the given
- SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
- integral power of 2. A special case is locals of type VOID, which
- have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
- remapped into the corresponding attribute of SI. */
-
-rtx
-bc_allocate_local (size, alignment)
- int size, alignment;
+void
+do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
+ enum machine_mode mode;
+ int unsignedp;
+ rtx op0, op1;
+ rtx if_false_label, if_true_label;
{
- rtx retval;
- int byte_alignment;
+ int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
+ rtx drop_through_label = 0;
+ int i;
- if (size < 0)
- abort ();
+ if (! if_true_label || ! if_false_label)
+ drop_through_label = gen_label_rtx ();
+ if (! if_true_label)
+ if_true_label = drop_through_label;
+ if (! if_false_label)
+ if_false_label = drop_through_label;
- /* Normalize size and alignment */
- if (!size)
- size = UNITS_PER_WORD;
+ /* Compare a word at a time, high order first. */
+ for (i = 0; i < nwords; i++)
+ {
+ rtx comp;
+ rtx op0_word, op1_word;
- if (alignment < BITS_PER_UNIT)
- byte_alignment = 1 << (INT_ALIGN - 1);
- else
- /* Align */
- byte_alignment = alignment / BITS_PER_UNIT;
+ if (WORDS_BIG_ENDIAN)
+ {
+ op0_word = operand_subword_force (op0, i, mode);
+ op1_word = operand_subword_force (op1, i, mode);
+ }
+ else
+ {
+ op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
+ op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
+ }
- if (local_vars_size & (byte_alignment - 1))
- local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
+ /* All but high-order word must be compared as unsigned. */
+ comp = compare_from_rtx (op0_word, op1_word,
+ (unsignedp || i > 0) ? GTU : GT,
+ unsignedp, word_mode, NULL_RTX, 0);
+ if (comp == const_true_rtx)
+ emit_jump (if_true_label);
+ else if (comp != const0_rtx)
+ do_jump_for_compare (comp, NULL_RTX, if_true_label);
- retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
- local_vars_size += size;
+ /* Consider lower words only if these are equal. */
+ comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
+ NULL_RTX, 0);
+ if (comp == const_true_rtx)
+ emit_jump (if_false_label);
+ else if (comp != const0_rtx)
+ do_jump_for_compare (comp, NULL_RTX, if_false_label);
+ }
- return retval;
+ if (if_false_label)
+ emit_jump (if_false_label);
+ if (drop_through_label)
+ emit_label (drop_through_label);
}
+/* Given an EQ_EXPR expression EXP for values too wide to be compared
+ with one insn, test the comparison and jump to the appropriate label. */
-/* Allocate variable-sized local array. Variable-sized arrays are
- actually pointers to the address in memory where they are stored. */
-
-rtx
-bc_allocate_variable_array (size)
- tree size;
+static void
+do_jump_by_parts_equality (exp, if_false_label, if_true_label)
+ tree exp;
+ rtx if_false_label, if_true_label;
{
- rtx retval;
- const int ptralign = (1 << (PTR_ALIGN - 1));
+ rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
+ rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
+ enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
+ int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
+ int i;
+ rtx drop_through_label = 0;
- /* Align pointer */
- if (local_vars_size & ptralign)
- local_vars_size += ptralign - (local_vars_size & ptralign);
+ if (! if_false_label)
+ drop_through_label = if_false_label = gen_label_rtx ();
- /* Note down local space needed: pointer to block; also return
- dummy rtx */
+ for (i = 0; i < nwords; i++)
+ {
+ rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
+ operand_subword_force (op1, i, mode),
+ EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
+ word_mode, NULL_RTX, 0);
+ if (comp == const_true_rtx)
+ emit_jump (if_false_label);
+ else if (comp != const0_rtx)
+ do_jump_for_compare (comp, if_false_label, NULL_RTX);
+ }
- retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
- local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
- return retval;
+ if (if_true_label)
+ emit_jump (if_true_label);
+ if (drop_through_label)
+ emit_label (drop_through_label);
}
+\f
+/* Jump according to whether OP0 is 0.
+ We assume that OP0 has an integer mode that is too wide
+ for the available compare insns. */
-
-/* Push the machine address for the given external variable offset. */
-
-void
-bc_load_externaddr (externaddr)
- rtx externaddr;
+static void
+do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
+ rtx op0;
+ rtx if_false_label, if_true_label;
{
- bc_emit_bytecode (constP);
- bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
- BYTECODE_BC_LABEL (externaddr)->offset);
+ int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
+ rtx part;
+ int i;
+ rtx drop_through_label = 0;
-#ifdef DEBUG_PRINT_CODE
- fputc ('\n', stderr);
-#endif
-}
+ /* The fastest way of doing this comparison on almost any machine is to
+ "or" all the words and compare the result. If all have to be loaded
+ from memory and this is a very wide item, it's possible this may
+ be slower, but that's highly unlikely. */
+ part = gen_reg_rtx (word_mode);
+ emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
+ for (i = 1; i < nwords && part != 0; i++)
+ part = expand_binop (word_mode, ior_optab, part,
+ operand_subword_force (op0, i, GET_MODE (op0)),
+ part, 1, OPTAB_WIDEN);
-/* Like above, but expects an IDENTIFIER. */
+ if (part != 0)
+ {
+ rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
+ NULL_RTX, 0);
-void
-bc_load_externaddr_id (id, offset)
- tree id;
- int offset;
-{
- if (!IDENTIFIER_POINTER (id))
- abort ();
+ if (comp == const_true_rtx)
+ emit_jump (if_false_label);
+ else if (comp == const0_rtx)
+ emit_jump (if_true_label);
+ else
+ do_jump_for_compare (comp, if_false_label, if_true_label);
- bc_emit_bytecode (constP);
- bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id)), offset);
+ return;
+ }
-#ifdef DEBUG_PRINT_CODE
- fputc ('\n', stderr);
-#endif
-}
+ /* If we couldn't do the "or" simply, do this with a series of compares. */
+ if (! if_false_label)
+ drop_through_label = if_false_label = gen_label_rtx ();
+ for (i = 0; i < nwords; i++)
+ {
+ rtx comp = compare_from_rtx (operand_subword_force (op0, i,
+ GET_MODE (op0)),
+ const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
+ if (comp == const_true_rtx)
+ emit_jump (if_false_label);
+ else if (comp != const0_rtx)
+ do_jump_for_compare (comp, if_false_label, NULL_RTX);
+ }
-/* Push the machine address for the given local variable offset. */
+ if (if_true_label)
+ emit_jump (if_true_label);
-void
-bc_load_localaddr (localaddr)
- rtx localaddr;
-{
- bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
+ if (drop_through_label)
+ emit_label (drop_through_label);
}
+/* Given a comparison expression in rtl form, output conditional branches to
+ IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
-/* Push the machine address for the given parameter offset.
- NOTE: offset is in bits. */
-
-void
-bc_load_parmaddr (parmaddr)
- rtx parmaddr;
+static void
+do_jump_for_compare (comparison, if_false_label, if_true_label)
+ rtx comparison, if_false_label, if_true_label;
{
- bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
- / BITS_PER_UNIT));
-}
-
+ if (if_true_label)
+ {
+ if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
+ emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
+ else
+ abort ();
-/* Convert a[i] into *(a + i). */
+ if (if_false_label)
+ emit_jump (if_false_label);
+ }
+ else if (if_false_label)
+ {
+ rtx insn;
+ rtx prev = get_last_insn ();
+ rtx branch = 0;
-tree
-bc_canonicalize_array_ref (exp)
- tree exp;
-{
- tree type = TREE_TYPE (exp);
- tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
- TREE_OPERAND (exp, 0));
- tree index = TREE_OPERAND (exp, 1);
-
-
- /* Convert the integer argument to a type the same size as a pointer
- so the multiply won't overflow spuriously. */
-
- if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
- index = convert (type_for_size (POINTER_SIZE, 0), index);
-
- /* The array address isn't volatile even if the array is.
- (Of course this isn't terribly relevant since the bytecode
- translator treats nearly everything as volatile anyway.) */
- TREE_THIS_VOLATILE (array_adr) = 0;
-
- return build1 (INDIRECT_REF, type,
- fold (build (PLUS_EXPR,
- TYPE_POINTER_TO (type),
- array_adr,
- fold (build (MULT_EXPR,
- TYPE_POINTER_TO (type),
- index,
- size_in_bytes (type))))));
-}
+ /* Output the branch with the opposite condition. Then try to invert
+ what is generated. If more than one insn is a branch, or if the
+ branch is not the last insn written, abort. If we can't invert
+ the branch, emit make a true label, redirect this jump to that,
+ emit a jump to the false label and define the true label. */
+ if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
+ emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
+ else
+ abort ();
-/* Load the address of the component referenced by the given
- COMPONENT_REF expression.
+ /* Here we get the first insn that was just emitted. It used to be the
+ case that, on some machines, emitting the branch would discard
+ the previous compare insn and emit a replacement. This isn't
+ done anymore, but abort if we see that PREV is deleted. */
- Returns innermost lvalue. */
+ if (prev == 0)
+ insn = get_insns ();
+ else if (INSN_DELETED_P (prev))
+ abort ();
+ else
+ insn = NEXT_INSN (prev);
-tree
-bc_expand_component_address (exp)
- tree exp;
-{
- tree tem, chain;
- enum machine_mode mode;
- int bitpos = 0;
- HOST_WIDE_INT SIval;
+ for (; insn; insn = NEXT_INSN (insn))
+ if (GET_CODE (insn) == JUMP_INSN)
+ {
+ if (branch)
+ abort ();
+ branch = insn;
+ }
+ if (branch != get_last_insn ())
+ abort ();
- tem = TREE_OPERAND (exp, 1);
- mode = DECL_MODE (tem);
+ JUMP_LABEL (branch) = if_false_label;
+ if (! invert_jump (branch, if_false_label))
+ {
+ if_true_label = gen_label_rtx ();
+ redirect_jump (branch, if_true_label);
+ emit_jump (if_false_label);
+ emit_label (if_true_label);
+ }
+ }
+}
+\f
+/* Generate code for a comparison expression EXP
+ (including code to compute the values to be compared)
+ and set (CC0) according to the result.
+ SIGNED_CODE should be the rtx operation for this comparison for
+ signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
+ We force a stack adjustment unless there are currently
+ things pushed on the stack that aren't yet used. */
- /* Compute cumulative bit offset for nested component refs
- and array refs, and find the ultimate containing object. */
+static rtx
+compare (exp, signed_code, unsigned_code)
+ register tree exp;
+ enum rtx_code signed_code, unsigned_code;
+{
+ register rtx op0
+ = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
+ register rtx op1
+ = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
+ register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
+ register enum machine_mode mode = TYPE_MODE (type);
+ int unsignedp = TREE_UNSIGNED (type);
+ enum rtx_code code = unsignedp ? unsigned_code : signed_code;
- for (tem = exp;; tem = TREE_OPERAND (tem, 0))
+#ifdef HAVE_canonicalize_funcptr_for_compare
+ /* If function pointers need to be "canonicalized" before they can
+ be reliably compared, then canonicalize them. */
+ if (HAVE_canonicalize_funcptr_for_compare
+ && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
+ && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ == FUNCTION_TYPE))
{
- if (TREE_CODE (tem) == COMPONENT_REF)
- bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
- else
- if (TREE_CODE (tem) == ARRAY_REF
- && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
- && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
+ rtx new_op0 = gen_reg_rtx (mode);
- bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
- * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
- /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
- else
- break;
+ emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
+ op0 = new_op0;
}
- bc_expand_expr (tem);
-
-
- /* For bitfields also push their offset and size */
- if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
- bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
- else
- if (SIval = bitpos / BITS_PER_UNIT)
- bc_emit_instruction (addconstPSI, SIval);
-
- return (TREE_OPERAND (exp, 1));
-}
-
+ if (HAVE_canonicalize_funcptr_for_compare
+ && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
+ && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
+ == FUNCTION_TYPE))
+ {
+ rtx new_op1 = gen_reg_rtx (mode);
-/* Emit code to push two SI constants */
+ emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
+ op1 = new_op1;
+ }
+#endif
-void
-bc_push_offset_and_size (offset, size)
- HOST_WIDE_INT offset, size;
-{
- bc_emit_instruction (constSI, offset);
- bc_emit_instruction (constSI, size);
+ return compare_from_rtx (op0, op1, code, unsignedp, mode,
+ ((mode == BLKmode)
+ ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
+ TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
}
+/* Like compare but expects the values to compare as two rtx's.
+ The decision as to signed or unsigned comparison must be made by the caller.
-/* Emit byte code to push the address of the given lvalue expression to
- the stack. If it's a bit field, we also push offset and size info.
+ If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
+ compared.
- Returns innermost component, which allows us to determine not only
- its type, but also whether it's a bitfield. */
+ If ALIGN is non-zero, it is the alignment of this type; if zero, the
+ size of MODE should be used. */
-tree
-bc_expand_address (exp)
- tree exp;
+rtx
+compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
+ register rtx op0, op1;
+ enum rtx_code code;
+ int unsignedp;
+ enum machine_mode mode;
+ rtx size;
+ int align;
{
- /* Safeguard */
- if (!exp || TREE_CODE (exp) == ERROR_MARK)
- return (exp);
+ rtx tem;
+ /* If one operand is constant, make it the second one. Only do this
+ if the other operand is not constant as well. */
- switch (TREE_CODE (exp))
+ if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
+ || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
{
- case ARRAY_REF:
-
- return (bc_expand_address (bc_canonicalize_array_ref (exp)));
-
- case COMPONENT_REF:
-
- return (bc_expand_component_address (exp));
+ tem = op0;
+ op0 = op1;
+ op1 = tem;
+ code = swap_condition (code);
+ }
- case INDIRECT_REF:
+ if (flag_force_mem)
+ {
+ op0 = force_not_mem (op0);
+ op1 = force_not_mem (op1);
+ }
- bc_expand_expr (TREE_OPERAND (exp, 0));
+ do_pending_stack_adjust ();
- /* For variable-sized types: retrieve pointer. Sometimes the
- TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
- also make sure we have an operand, just in case... */
+ if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
+ && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
+ return tem;
- if (TREE_OPERAND (exp, 0)
- && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
- && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
- bc_emit_instruction (loadP);
+#if 0
+ /* There's no need to do this now that combine.c can eliminate lots of
+ sign extensions. This can be less efficient in certain cases on other
+ machines. */
- /* If packed, also return offset and size */
- if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
+ /* If this is a signed equality comparison, we can do it as an
+ unsigned comparison since zero-extension is cheaper than sign
+ extension and comparisons with zero are done as unsigned. This is
+ the case even on machines that can do fast sign extension, since
+ zero-extension is easier to combine with other operations than
+ sign-extension is. If we are comparing against a constant, we must
+ convert it to what it would look like unsigned. */
+ if ((code == EQ || code == NE) && ! unsignedp
+ && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
+ {
+ if (GET_CODE (op1) == CONST_INT
+ && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
+ op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
+ unsignedp = 1;
+ }
+#endif
- bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
- TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
-
- return (TREE_OPERAND (exp, 0));
+ emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
- case FUNCTION_DECL:
+ return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
+}
+\f
+/* Generate code to calculate EXP using a store-flag instruction
+ and return an rtx for the result. EXP is either a comparison
+ or a TRUTH_NOT_EXPR whose operand is a comparison.
- bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
- BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
- break;
+ If TARGET is nonzero, store the result there if convenient.
- case PARM_DECL:
+ If ONLY_CHEAP is non-zero, only do this if it is likely to be very
+ cheap.
- bc_load_parmaddr (DECL_RTL (exp));
+ Return zero if there is no suitable set-flag instruction
+ available on this machine.
- /* For variable-sized types: retrieve pointer */
- if (TYPE_SIZE (TREE_TYPE (exp))
- && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
- bc_emit_instruction (loadP);
+ Once expand_expr has been called on the arguments of the comparison,
+ we are committed to doing the store flag, since it is not safe to
+ re-evaluate the expression. We emit the store-flag insn by calling
+ emit_store_flag, but only expand the arguments if we have a reason
+ to believe that emit_store_flag will be successful. If we think that
+ it will, but it isn't, we have to simulate the store-flag with a
+ set/jump/set sequence. */
- /* If packed, also return offset and size */
- if (DECL_BIT_FIELD (exp))
- bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
- TREE_INT_CST_LOW (DECL_SIZE (exp)));
+static rtx
+do_store_flag (exp, target, mode, only_cheap)
+ tree exp;
+ rtx target;
+ enum machine_mode mode;
+ int only_cheap;
+{
+ enum rtx_code code;
+ tree arg0, arg1, type;
+ tree tem;
+ enum machine_mode operand_mode;
+ int invert = 0;
+ int unsignedp;
+ rtx op0, op1;
+ enum insn_code icode;
+ rtx subtarget = target;
+ rtx result, label;
- break;
+ /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
+ result at the end. We can't simply invert the test since it would
+ have already been inverted if it were valid. This case occurs for
+ some floating-point comparisons. */
- case RESULT_DECL:
+ if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
+ invert = 1, exp = TREE_OPERAND (exp, 0);
- bc_emit_instruction (returnP);
- break;
+ arg0 = TREE_OPERAND (exp, 0);
+ arg1 = TREE_OPERAND (exp, 1);
+ type = TREE_TYPE (arg0);
+ operand_mode = TYPE_MODE (type);
+ unsignedp = TREE_UNSIGNED (type);
- case VAR_DECL:
+ /* We won't bother with BLKmode store-flag operations because it would mean
+ passing a lot of information to emit_store_flag. */
+ if (operand_mode == BLKmode)
+ return 0;
-#if 0
- if (BYTECODE_LABEL (DECL_RTL (exp)))
- bc_load_externaddr (DECL_RTL (exp));
+ /* We won't bother with store-flag operations involving function pointers
+ when function pointers must be canonicalized before comparisons. */
+#ifdef HAVE_canonicalize_funcptr_for_compare
+ if (HAVE_canonicalize_funcptr_for_compare
+ && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
+ && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ == FUNCTION_TYPE))
+ || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
+ && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
+ == FUNCTION_TYPE))))
+ return 0;
#endif
- if (DECL_EXTERNAL (exp))
- bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
- (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
- else
- bc_load_localaddr (DECL_RTL (exp));
+ STRIP_NOPS (arg0);
+ STRIP_NOPS (arg1);
- /* For variable-sized types: retrieve pointer */
- if (TYPE_SIZE (TREE_TYPE (exp))
- && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
- bc_emit_instruction (loadP);
+ /* Get the rtx comparison code to use. We know that EXP is a comparison
+ operation of some type. Some comparisons against 1 and -1 can be
+ converted to comparisons with zero. Do so here so that the tests
+ below will be aware that we have a comparison with zero. These
+ tests will not catch constants in the first operand, but constants
+ are rarely passed as the first operand. */
- /* If packed, also return offset and size */
- if (DECL_BIT_FIELD (exp))
- bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
- TREE_INT_CST_LOW (DECL_SIZE (exp)));
-
+ switch (TREE_CODE (exp))
+ {
+ case EQ_EXPR:
+ code = EQ;
break;
-
- case STRING_CST:
- {
- rtx r;
-
- bc_emit_bytecode (constP);
- r = output_constant_def (exp);
- bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
-
-#ifdef DEBUG_PRINT_CODE
- fputc ('\n', stderr);
-#endif
- }
+ case NE_EXPR:
+ code = NE;
break;
-
- default:
-
- abort();
+ case LT_EXPR:
+ if (integer_onep (arg1))
+ arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
+ else
+ code = unsignedp ? LTU : LT;
break;
- }
-
- /* Most lvalues don't have components. */
- return (exp);
-}
-
-
-/* Emit a type code to be used by the runtime support in handling
- parameter passing. The type code consists of the machine mode
- plus the minimal alignment shifted left 8 bits. */
-
-tree
-bc_runtime_type_code (type)
- tree type;
-{
- int val;
-
- switch (TREE_CODE (type))
- {
- case VOID_TYPE:
- case INTEGER_TYPE:
- case REAL_TYPE:
- case COMPLEX_TYPE:
- case ENUMERAL_TYPE:
- case POINTER_TYPE:
- case RECORD_TYPE:
-
- val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
+ case LE_EXPR:
+ if (! unsignedp && integer_all_onesp (arg1))
+ arg1 = integer_zero_node, code = LT;
+ else
+ code = unsignedp ? LEU : LE;
break;
-
- case ERROR_MARK:
-
- val = 0;
+ case GT_EXPR:
+ if (! unsignedp && integer_all_onesp (arg1))
+ arg1 = integer_zero_node, code = GE;
+ else
+ code = unsignedp ? GTU : GT;
+ break;
+ case GE_EXPR:
+ if (integer_onep (arg1))
+ arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
+ else
+ code = unsignedp ? GEU : GE;
break;
-
default:
-
abort ();
}
- return build_int_2 (val, 0);
-}
-
-/* Generate constructor label */
+ /* Put a constant second. */
+ if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
+ {
+ tem = arg0; arg0 = arg1; arg1 = tem;
+ code = swap_condition (code);
+ }
+
+ /* If this is an equality or inequality test of a single bit, we can
+ do this by shifting the bit being tested to the low-order bit and
+ masking the result with the constant 1. If the condition was EQ,
+ we xor it with 1. This does not require an scc insn and is faster
+ than an scc insn even if we have it. */
-char *
-bc_gen_constr_label ()
-{
- static int label_counter;
- static char label[20];
+ if ((code == NE || code == EQ)
+ && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
+ && integer_pow2p (TREE_OPERAND (arg0, 1)))
+ {
+ tree inner = TREE_OPERAND (arg0, 0);
+ int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
+ int ops_unsignedp;
- sprintf (label, "*LR%d", label_counter++);
+ /* If INNER is a right shift of a constant and it plus BITNUM does
+ not overflow, adjust BITNUM and INNER. */
- return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
-}
+ if (TREE_CODE (inner) == RSHIFT_EXPR
+ && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
+ && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
+ && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
+ < TYPE_PRECISION (type)))
+ {
+ bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
+ inner = TREE_OPERAND (inner, 0);
+ }
+ /* If we are going to be able to omit the AND below, we must do our
+ operations as unsigned. If we must use the AND, we have a choice.
+ Normally unsigned is faster, but for some machines signed is. */
+ ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
+#ifdef LOAD_EXTEND_OP
+ : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
+#else
+ : 1
+#endif
+ );
-/* Evaluate constructor CONSTR and return pointer to it on level one. We
- expand the constructor data as static data, and push a pointer to it.
- The pointer is put in the pointer table and is retrieved by a constP
- bytecode instruction. We then loop and store each constructor member in
- the corresponding component. Finally, we return the original pointer on
- the stack. */
+ if (subtarget == 0 || GET_CODE (subtarget) != REG
+ || GET_MODE (subtarget) != operand_mode
+ || ! safe_from_p (subtarget, inner))
+ subtarget = 0;
-void
-bc_expand_constructor (constr)
- tree constr;
-{
- char *l;
- HOST_WIDE_INT ptroffs;
- rtx constr_rtx;
+ op0 = expand_expr (inner, subtarget, VOIDmode, 0);
-
- /* Literal constructors are handled as constants, whereas
- non-literals are evaluated and stored element by element
- into the data segment. */
-
- /* Allocate space in proper segment and push pointer to space on stack.
- */
+ if (bitnum != 0)
+ op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
+ size_int (bitnum), subtarget, ops_unsignedp);
- l = bc_gen_constr_label ();
+ if (GET_MODE (op0) != mode)
+ op0 = convert_to_mode (mode, op0, ops_unsignedp);
- if (TREE_CONSTANT (constr))
- {
- text_section ();
+ if ((code == EQ && ! invert) || (code == NE && invert))
+ op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
+ ops_unsignedp, OPTAB_LIB_WIDEN);
- bc_emit_const_labeldef (l);
- bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
- }
- else
- {
- data_section ();
+ /* Put the AND last so it can combine with more things. */
+ if (bitnum != TYPE_PRECISION (type) - 1)
+ op0 = expand_and (op0, const1_rtx, subtarget);
- bc_emit_data_labeldef (l);
- bc_output_data_constructor (constr);
+ return op0;
}
-
- /* Add reference to pointer table and recall pointer to stack;
- this code is common for both types of constructors: literals
- and non-literals. */
-
- ptroffs = bc_define_pointer (l);
- bc_emit_instruction (constP, ptroffs);
-
- /* This is all that has to be done if it's a literal. */
- if (TREE_CONSTANT (constr))
- return;
-
-
- /* At this point, we have the pointer to the structure on top of the stack.
- Generate sequences of store_memory calls for the constructor. */
-
- /* constructor type is structure */
- if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
+ /* Now see if we are likely to be able to do this. Return if not. */
+ if (! can_compare_p (operand_mode))
+ return 0;
+ icode = setcc_gen_code[(int) code];
+ if (icode == CODE_FOR_nothing
+ || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
{
- register tree elt;
-
- /* If the constructor has fewer fields than the structure,
- clear the whole structure first. */
-
- if (list_length (CONSTRUCTOR_ELTS (constr))
- != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
- {
- bc_emit_instruction (duplicate);
- bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
- bc_emit_instruction (clearBLK);
- }
-
- /* Store each element of the constructor into the corresponding
- field of TARGET. */
-
- for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
- {
- register tree field = TREE_PURPOSE (elt);
- register enum machine_mode mode;
- int bitsize;
- int bitpos;
- int unsignedp;
-
- bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
- mode = DECL_MODE (field);
- unsignedp = TREE_UNSIGNED (field);
-
- bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
-
- bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
- /* The alignment of TARGET is
- at least what its type requires. */
- VOIDmode, 0,
- TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
- int_size_in_bytes (TREE_TYPE (constr)));
- }
+ /* We can only do this if it is one of the special cases that
+ can be handled without an scc insn. */
+ if ((code == LT && integer_zerop (arg1))
+ || (! only_cheap && code == GE && integer_zerop (arg1)))
+ ;
+ else if (BRANCH_COST >= 0
+ && ! only_cheap && (code == NE || code == EQ)
+ && TREE_CODE (type) != REAL_TYPE
+ && ((abs_optab->handlers[(int) operand_mode].insn_code
+ != CODE_FOR_nothing)
+ || (ffs_optab->handlers[(int) operand_mode].insn_code
+ != CODE_FOR_nothing)))
+ ;
+ else
+ return 0;
}
- else
-
- /* Constructor type is array */
- if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
- {
- register tree elt;
- register int i;
- tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
- int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
- int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
- tree elttype = TREE_TYPE (TREE_TYPE (constr));
-
- /* If the constructor has fewer fields than the structure,
- clear the whole structure first. */
-
- if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
- {
- bc_emit_instruction (duplicate);
- bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
- bc_emit_instruction (clearBLK);
- }
-
-
- /* Store each element of the constructor into the corresponding
- element of TARGET, determined by counting the elements. */
-
- for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
- elt;
- elt = TREE_CHAIN (elt), i++)
- {
- register enum machine_mode mode;
- int bitsize;
- int bitpos;
- int unsignedp;
-
- mode = TYPE_MODE (elttype);
- bitsize = GET_MODE_BITSIZE (mode);
- unsignedp = TREE_UNSIGNED (elttype);
-
- bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
- /* * TYPE_SIZE_UNIT (elttype) */ );
-
- bc_store_field (elt, bitsize, bitpos, mode,
- TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
- /* The alignment of TARGET is
- at least what its type requires. */
- VOIDmode, 0,
- TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
- int_size_in_bytes (TREE_TYPE (constr)));
- }
-
- }
-}
-
-
-/* Store the value of EXP (an expression tree) into member FIELD of
- structure at address on stack, which has type TYPE, mode MODE and
- occupies BITSIZE bits, starting BITPOS bits from the beginning of the
- structure.
-
- ALIGN is the alignment that TARGET is known to have, measured in bytes.
- TOTAL_SIZE is its size in bytes, or -1 if variable. */
+
+ preexpand_calls (exp);
+ if (subtarget == 0 || GET_CODE (subtarget) != REG
+ || GET_MODE (subtarget) != operand_mode
+ || ! safe_from_p (subtarget, arg1))
+ subtarget = 0;
-void
-bc_store_field (field, bitsize, bitpos, mode, exp, type,
- value_mode, unsignedp, align, total_size)
- int bitsize, bitpos;
- enum machine_mode mode;
- tree field, exp, type;
- enum machine_mode value_mode;
- int unsignedp;
- int align;
- int total_size;
-{
+ op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
+ op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
- /* Expand expression and copy pointer */
- bc_expand_expr (exp);
- bc_emit_instruction (over);
+ if (target == 0)
+ target = gen_reg_rtx (mode);
+ /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
+ because, if the emit_store_flag does anything it will succeed and
+ OP0 and OP1 will not be used subsequently. */
- /* If the component is a bit field, we cannot use addressing to access
- it. Use bit-field techniques to store in it. */
+ result = emit_store_flag (target, code,
+ queued_subexp_p (op0) ? copy_rtx (op0) : op0,
+ queued_subexp_p (op1) ? copy_rtx (op1) : op1,
+ operand_mode, unsignedp, 1);
- if (DECL_BIT_FIELD (field))
+ if (result)
{
- bc_store_bit_field (bitpos, bitsize, unsignedp);
- return;
+ if (invert)
+ result = expand_binop (mode, xor_optab, result, const1_rtx,
+ result, 0, OPTAB_LIB_WIDEN);
+ return result;
}
- else
- /* Not bit field */
- {
- HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
- /* Advance pointer to the desired member */
- if (offset)
- bc_emit_instruction (addconstPSI, offset);
-
- /* Store */
- bc_store_memory (type, field);
- }
-}
+ /* If this failed, we have to do this with set/compare/jump/set code. */
+ if (GET_CODE (target) != REG
+ || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
+ target = gen_reg_rtx (GET_MODE (target));
+ emit_move_insn (target, invert ? const0_rtx : const1_rtx);
+ result = compare_from_rtx (op0, op1, code, unsignedp,
+ operand_mode, NULL_RTX, 0);
+ if (GET_CODE (result) == CONST_INT)
+ return (((result == const0_rtx && ! invert)
+ || (result != const0_rtx && invert))
+ ? const0_rtx : const1_rtx);
-/* Store SI/SU in bitfield */
+ label = gen_label_rtx ();
+ if (bcc_gen_fctn[(int) code] == 0)
+ abort ();
-void
-bc_store_bit_field (offset, size, unsignedp)
- int offset, size, unsignedp;
-{
- /* Push bitfield offset and size */
- bc_push_offset_and_size (offset, size);
+ emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
+ emit_move_insn (target, invert ? const1_rtx : const0_rtx);
+ emit_label (label);
- /* Store */
- bc_emit_instruction (sstoreBI);
+ return target;
}
+\f
+/* Generate a tablejump instruction (used for switch statements). */
+
+#ifdef HAVE_tablejump
+/* INDEX is the value being switched on, with the lowest value
+ in the table already subtracted.
+ MODE is its expected mode (needed if INDEX is constant).
+ RANGE is the length of the jump table.
+ TABLE_LABEL is a CODE_LABEL rtx for the table itself.
-/* Load SI/SU from bitfield */
+ DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
+ index value is out of range. */
void
-bc_load_bit_field (offset, size, unsignedp)
- int offset, size, unsignedp;
+do_tablejump (index, mode, range, table_label, default_label)
+ rtx index, range, table_label, default_label;
+ enum machine_mode mode;
{
- /* Push bitfield offset and size */
- bc_push_offset_and_size (offset, size);
+ register rtx temp, vector;
- /* Load: sign-extend if signed, else zero-extend */
- bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
-}
+ /* Do an unsigned comparison (in the proper mode) between the index
+ expression and the value which represents the length of the range.
+ Since we just finished subtracting the lower bound of the range
+ from the index expression, this comparison allows us to simultaneously
+ check that the original index expression value is both greater than
+ or equal to the minimum value of the range and less than or equal to
+ the maximum value of the range. */
+ emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
+ emit_jump_insn (gen_bgtu (default_label));
-/* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
- (adjust stack pointer upwards), negative means add that number of
- levels (adjust the stack pointer downwards). Only positive values
- normally make sense. */
+ /* If index is in range, it must fit in Pmode.
+ Convert to Pmode so we can index with it. */
+ if (mode != Pmode)
+ index = convert_to_mode (Pmode, index, 1);
-void
-bc_adjust_stack (nlevels)
- int nlevels;
-{
- switch (nlevels)
- {
- case 0:
- break;
-
- case 2:
- bc_emit_instruction (drop);
-
- case 1:
- bc_emit_instruction (drop);
- break;
-
- default:
-
- bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
- stack_depth -= nlevels;
- }
+ /* Don't let a MEM slip thru, because then INDEX that comes
+ out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
+ and break_out_memory_refs will go to work on it and mess it up. */
+#ifdef PIC_CASE_VECTOR_ADDRESS
+ if (flag_pic && GET_CODE (index) != REG)
+ index = copy_to_mode_reg (Pmode, index);
+#endif
-#if defined (VALIDATE_STACK_FOR_BC)
- VALIDATE_STACK_FOR_BC ();
+ /* If flag_force_addr were to affect this address
+ it could interfere with the tricky assumptions made
+ about addresses that contain label-refs,
+ which may be valid only very near the tablejump itself. */
+ /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
+ GET_MODE_SIZE, because this indicates how large insns are. The other
+ uses should all be Pmode, because they are addresses. This code
+ could fail if addresses and insns are not the same size. */
+ index = gen_rtx_PLUS (Pmode,
+ gen_rtx_MULT (Pmode, index,
+ GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
+ gen_rtx_LABEL_REF (Pmode, table_label));
+#ifdef PIC_CASE_VECTOR_ADDRESS
+ if (flag_pic)
+ index = PIC_CASE_VECTOR_ADDRESS (index);
+ else
#endif
+ index = memory_address_noforce (CASE_VECTOR_MODE, index);
+ temp = gen_reg_rtx (CASE_VECTOR_MODE);
+ vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
+ RTX_UNCHANGING_P (vector) = 1;
+ convert_move (temp, vector, 0);
+
+ emit_jump_insn (gen_tablejump (temp, table_label));
+
+ /* If we are generating PIC code or if the table is PC-relative, the
+ table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
+ if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
+ emit_barrier ();
}
+
+#endif /* HAVE_tablejump */