/* Convert tree expression to rtl instructions, for GNU compiler.
- Copyright (C) 1988, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
+ Copyright (C) 1988, 92-97, 1998 Free Software Foundation, Inc.
This file is part of GNU CC.
#include "config.h"
+#include <stdio.h>
#include "machmode.h"
#include "rtl.h"
#include "tree.h"
#include "flags.h"
#include "regs.h"
#include "hard-reg-set.h"
+#include "except.h"
#include "function.h"
#include "insn-flags.h"
#include "insn-codes.h"
#include "recog.h"
#include "output.h"
#include "typeclass.h"
-
-#include "bytecode.h"
-#include "bc-opcode.h"
-#include "bc-typecd.h"
-#include "bc-optab.h"
-#include "bc-emit.h"
-
+#include "defaults.h"
#define CEIL(x,y) (((x) + (y) - 1) / (y))
/* Like STACK_BOUNDARY but in units of bytes, not bits. */
#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
+/* Assume that case vectors are not pc-relative. */
+#ifndef CASE_VECTOR_PC_RELATIVE
+#define CASE_VECTOR_PC_RELATIVE 0
+#endif
+
/* If this is nonzero, we do not bother generating VOLATILE
around volatile memory references, and we are willing to
output indirect addresses. If cse is to follow, we reject
and in other cases as well. */
int inhibit_defer_pop;
-/* A list of all cleanups which belong to the arguments of
- function calls being expanded by expand_call. */
-tree cleanups_this_call;
-
/* When temporaries are created by TARGET_EXPRs, they are created at
this level of temp_slot_level, so that they can remain allocated
until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
/* Similarly for __builtin_apply_args. */
static rtx apply_args_value;
+/* Don't check memory usage, since code is being emitted to check a memory
+ usage. Used when flag_check_memory_usage is true, to avoid infinite
+ recursion. */
+static int in_check_memory_usage;
+
/* This structure is used by move_by_pieces to describe the move to
be performed. */
-
struct move_by_pieces
{
rtx to;
int reverse;
};
-/* Used to generate bytecodes: keep track of size of local variables,
- as well as depth of arithmetic stack. (Notice that variables are
- stored on the machine's stack, not the arithmetic stack.) */
-
-extern int local_vars_size;
-extern int stack_depth;
-extern int max_stack_depth;
extern struct obstack permanent_obstack;
extern rtx arg_pointer_save_area;
+static rtx get_push_address PROTO ((int));
+
static rtx enqueue_insn PROTO((rtx, rtx));
static int queued_subexp_p PROTO((rtx));
static void init_queue PROTO((void));
static void move_by_pieces PROTO((rtx, rtx, int, int));
static int move_by_pieces_ninsns PROTO((unsigned int, int));
-static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
+static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
struct move_by_pieces *));
static void clear_by_pieces PROTO((rtx, int, int));
-static void clear_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
+static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
struct clear_by_pieces *));
static int is_zeros_p PROTO((tree));
static int mostly_zeros_p PROTO((tree));
static void store_constructor PROTO((tree, rtx, int));
static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
enum machine_mode, int, int, int));
-static int get_inner_unaligned_p PROTO((tree));
static tree save_noncopied_parts PROTO((tree, tree));
static tree init_noncopied_parts PROTO((tree, tree));
static int safe_from_p PROTO((rtx, tree));
static int fixed_type_p PROTO((tree));
+static rtx var_rtx PROTO((tree));
static int get_pointer_alignment PROTO((tree, unsigned));
static tree string_constant PROTO((tree, tree *));
static tree c_strlen PROTO((tree));
static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
static void expand_builtin_return PROTO((rtx));
static rtx expand_increment PROTO((tree, int, int));
-void bc_expand_increment PROTO((struct increment_operator *, tree));
-rtx bc_allocate_local PROTO((int, int));
-void bc_store_memory PROTO((tree, tree));
-tree bc_expand_component_address PROTO((tree));
-tree bc_expand_address PROTO((tree));
-void bc_expand_constructor PROTO((tree));
-void bc_adjust_stack PROTO((int));
-tree bc_canonicalize_array_ref PROTO((tree));
-void bc_load_memory PROTO((tree, tree));
-void bc_load_externaddr PROTO((rtx));
-void bc_load_externaddr_id PROTO((tree, int));
-void bc_load_localaddr PROTO((rtx));
-void bc_load_parmaddr PROTO((rtx));
static void preexpand_calls PROTO((tree));
static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
static void do_jump_for_compare PROTO((rtx, rtx, rtx));
static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
-static tree defer_cleanups_to PROTO((tree));
-extern void (*interim_eh_hook) PROTO((tree));
extern tree truthvalue_conversion PROTO((tree));
/* Record for each mode whether we can move a register directly to or
/* This array records the insn_code of insns to perform block clears. */
enum insn_code clrstr_optab[NUM_MACHINE_MODES];
-/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
+/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
#ifndef SLOW_UNALIGNED_ACCESS
#define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
#define OUTGOING_REGNO(IN) (IN)
#endif
\f
-/* Maps used to convert modes to const, load, and store bytecodes. */
-enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
-enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
-enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
-
-/* Initialize maps used to convert modes to const, load, and store
- bytecodes. */
-void
-bc_init_mode_to_opcode_maps ()
-{
- int mode;
-
- for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
- mode_to_const_map[mode] =
- mode_to_load_map[mode] =
- mode_to_store_map[mode] = neverneverland;
-
-#define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
- mode_to_const_map[(int) SYM] = CONST; \
- mode_to_load_map[(int) SYM] = LOAD; \
- mode_to_store_map[(int) SYM] = STORE;
-
-#include "modemap.def"
-#undef DEF_MODEMAP
-}
-\f
/* This is run once per compilation to set up which modes can be used
directly in memory and to initialize the block move optab. */
/* Try indexing by frame ptr and try by stack ptr.
It is known that on the Convex the stack ptr isn't a valid index.
With luck, one or the other is valid on any machine. */
- rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
- rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
+ rtx mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
+ rtx mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
start_sequence ();
- insn = emit_insn (gen_rtx (SET, 0, 0));
+ insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
pat = PATTERN (insn);
for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
if (! HARD_REGNO_MODE_OK (regno, mode))
continue;
- reg = gen_rtx (REG, mode, regno);
+ reg = gen_rtx_REG (mode, regno);
SET_SRC (pat) = mem;
SET_DEST (pat) = reg;
pending_stack_adjust = 0;
inhibit_defer_pop = 0;
- cleanups_this_call = 0;
saveregs_value = 0;
apply_args_value = 0;
forced_labels = 0;
p->pending_stack_adjust = pending_stack_adjust;
p->inhibit_defer_pop = inhibit_defer_pop;
- p->cleanups_this_call = cleanups_this_call;
p->saveregs_value = saveregs_value;
p->apply_args_value = apply_args_value;
p->forced_labels = forced_labels;
pending_stack_adjust = 0;
inhibit_defer_pop = 0;
- cleanups_this_call = 0;
saveregs_value = 0;
apply_args_value = 0;
forced_labels = 0;
{
pending_stack_adjust = p->pending_stack_adjust;
inhibit_defer_pop = p->inhibit_defer_pop;
- cleanups_this_call = p->cleanups_this_call;
saveregs_value = p->saveregs_value;
apply_args_value = p->apply_args_value;
forced_labels = p->forced_labels;
enqueue_insn (var, body)
rtx var, body;
{
- pending_chain = gen_rtx (QUEUED, GET_MODE (var),
- var, NULL_RTX, NULL_RTX, body, pending_chain);
+ pending_chain = gen_rtx_QUEUED (GET_MODE (var),
+ var, NULL_RTX, NULL_RTX, body,
+ pending_chain);
return pending_chain;
}
&& GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
{
register rtx y = XEXP (x, 0);
- register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
+ register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
case MULT:
case PLUS:
case MINUS:
- return queued_subexp_p (XEXP (x, 0))
- || queued_subexp_p (XEXP (x, 1));
+ return (queued_subexp_p (XEXP (x, 0))
+ || queued_subexp_p (XEXP (x, 1)));
+ default:
+ return 0;
}
- return 0;
}
/* Perform all the pending incrementations. */
emit_queue ()
{
register rtx p;
- while (p = pending_chain)
+ while ((p = pending_chain))
{
QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
pending_chain = QUEUED_NEXT (p);
{
rtx value;
-#ifdef HAVE_extendqfhf2
- if (HAVE_extendqfhf2 && from_mode == QFmode && to_mode == HFmode)
- {
- emit_unop_insn (CODE_FOR_extendqfhf2, to, from, UNKNOWN);
- return;
- }
-#endif
-#ifdef HAVE_extendqfsf2
- if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
- {
- emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
- return;
- }
-#endif
-#ifdef HAVE_extendqfdf2
- if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
- {
- emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
- return;
- }
-#endif
-#ifdef HAVE_extendqfxf2
- if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
- {
- emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
- return;
- }
-#endif
-#ifdef HAVE_extendqftf2
- if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
- {
- emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
- return;
- }
-#endif
-
-#ifdef HAVE_extendhftqf2
- if (HAVE_extendhftqf2 && from_mode == HFmode && to_mode == TQFmode)
- {
- emit_unop_insn (CODE_FOR_extendhftqf2, to, from, UNKNOWN);
- return;
- }
-#endif
-
-#ifdef HAVE_extendhfsf2
- if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
- {
- emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
- return;
- }
-#endif
-#ifdef HAVE_extendhfdf2
- if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
- {
- emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
- return;
- }
-#endif
-#ifdef HAVE_extendhfxf2
- if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
- {
- emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
- return;
- }
-#endif
-#ifdef HAVE_extendhftf2
- if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
- {
- emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
- return;
- }
-#endif
-
-#ifdef HAVE_extendsfdf2
- if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
+ if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
{
- emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
- return;
- }
-#endif
-#ifdef HAVE_extendsfxf2
- if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
- {
- emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
- return;
- }
-#endif
-#ifdef HAVE_extendsftf2
- if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
- {
- emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
- return;
- }
-#endif
-#ifdef HAVE_extenddfxf2
- if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
- {
- emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
- return;
+ /* Try converting directly if the insn is supported. */
+ if ((code = can_extend_p (to_mode, from_mode, 0))
+ != CODE_FOR_nothing)
+ {
+ emit_unop_insn (code, to, from, UNKNOWN);
+ return;
+ }
}
-#endif
-#ifdef HAVE_extenddftf2
- if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
+
+#ifdef HAVE_trunchfqf2
+ if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
{
- emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
+ emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
return;
}
#endif
-
-#ifdef HAVE_trunchfqf2
- if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
+#ifdef HAVE_trunctqfqf2
+ if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
{
- emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
+ emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
return;
}
#endif
return;
}
#endif
+
+#ifdef HAVE_truncsftqf2
+ if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
+ {
+ emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
+ return;
+ }
+#endif
+#ifdef HAVE_truncdftqf2
+ if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
+ {
+ emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
+ return;
+ }
+#endif
+#ifdef HAVE_truncxftqf2
+ if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
+ {
+ emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
+ return;
+ }
+#endif
+#ifdef HAVE_trunctftqf2
+ if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
+ {
+ emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
+ return;
+ }
+#endif
+
#ifdef HAVE_truncdfsf2
if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
{
case TFmode:
libcall = extendsftf2_libfunc;
break;
+
+ default:
+ break;
}
break;
case TFmode:
libcall = extenddftf2_libfunc;
break;
+
+ default:
+ break;
}
break;
case DFmode:
libcall = truncxfdf2_libfunc;
break;
+
+ default:
+ break;
}
break;
case DFmode:
libcall = trunctfdf2_libfunc;
break;
+
+ default:
+ break;
}
break;
+
+ default:
+ break;
}
if (libcall == (rtx) 0)
!= CODE_FOR_nothing))
{
if (GET_CODE (to) == REG)
- emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
convert_move (gen_lowpart (word_mode, to), from, unsignedp);
emit_unop_insn (code, to,
gen_lowpart (word_mode, to), equiv_code);
end_sequence ();
emit_no_conflict_block (insns, to, from, NULL_RTX,
- gen_rtx (equiv_code, to_mode, copy_rtx (from)));
+ gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
return;
}
if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
&& GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
&& GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
- return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
+ {
+ HOST_WIDE_INT val = INTVAL (x);
+
+ if (oldmode != VOIDmode
+ && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
+ {
+ int width = GET_MODE_BITSIZE (oldmode);
+
+ /* We need to zero extend VAL. */
+ val &= ((HOST_WIDE_INT) 1 << width) - 1;
+ }
+
+ return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
+ }
/* We can do this with a gen_lowpart if both desired and current modes
are integer, and this is either a constant integer, a register, or a
}
/* The code above should have handled everything. */
- if (data.len != 0)
+ if (data.len > 0)
abort ();
}
static void
move_by_pieces_1 (genfun, mode, data)
- rtx (*genfun) ();
+ rtx (*genfun) PROTO ((rtx, ...));
enum machine_mode mode;
struct move_by_pieces *data;
{
if (data->reverse) data->offset -= size;
to1 = (data->autinc_to
- ? gen_rtx (MEM, mode, data->to_addr)
- : change_address (data->to, mode,
- plus_constant (data->to_addr, data->offset)));
+ ? gen_rtx_MEM (mode, data->to_addr)
+ : copy_rtx (change_address (data->to, mode,
+ plus_constant (data->to_addr,
+ data->offset))));
MEM_IN_STRUCT_P (to1) = data->to_struct;
- from1 =
- (data->autinc_from
- ? gen_rtx (MEM, mode, data->from_addr)
- : change_address (data->from, mode,
- plus_constant (data->from_addr, data->offset)));
+
+ from1
+ = (data->autinc_from
+ ? gen_rtx_MEM (mode, data->from_addr)
+ : copy_rtx (change_address (data->from, mode,
+ plus_constant (data->from_addr,
+ data->offset))));
MEM_IN_STRUCT_P (from1) = data->from_struct;
#ifdef HAVE_PRE_DECREMENT
with mode BLKmode.
SIZE is an rtx that says how long they are.
ALIGN is the maximum alignment we can assume they have,
- measured in bytes. */
+ measured in bytes.
-void
+ Return the address of the new block, if memcpy is called and returns it,
+ 0 otherwise. */
+
+rtx
emit_block_move (x, y, size, align)
rtx x, y;
rtx size;
int align;
{
+ rtx retval = 0;
+
if (GET_MODE (x) != BLKmode)
abort ();
if (pat)
{
emit_insn (pat);
- return;
+ return 0;
}
else
delete_insns_since (last);
}
#ifdef TARGET_MEM_FUNCTIONS
- emit_library_call (memcpy_libfunc, 0,
- VOIDmode, 3, XEXP (x, 0), Pmode,
- XEXP (y, 0), Pmode,
- convert_to_mode (TYPE_MODE (sizetype), size,
- TREE_UNSIGNED (sizetype)),
- TYPE_MODE (sizetype));
+ retval
+ = emit_library_call_value (memcpy_libfunc, NULL_RTX, 0,
+ ptr_mode, 3, XEXP (x, 0), Pmode,
+ XEXP (y, 0), Pmode,
+ convert_to_mode (TYPE_MODE (sizetype), size,
+ TREE_UNSIGNED (sizetype)),
+ TYPE_MODE (sizetype));
#else
emit_library_call (bcopy_libfunc, 0,
VOIDmode, 3, XEXP (y, 0), Pmode,
TYPE_MODE (integer_type_node));
#endif
}
+
+ return retval;
}
\f
/* Copy all or part of a value X into registers starting at REGNO.
enum machine_mode mode;
{
int i;
- rtx pat, last;
+#ifdef HAVE_load_multiple
+ rtx pat;
+ rtx last;
+#endif
if (nregs == 0)
return;
if (HAVE_load_multiple)
{
last = get_last_insn ();
- pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
+ pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
GEN_INT (nregs));
if (pat)
{
#endif
for (i = 0; i < nregs; i++)
- emit_move_insn (gen_rtx (REG, word_mode, regno + i),
+ emit_move_insn (gen_rtx_REG (word_mode, regno + i),
operand_subword_force (x, i, mode));
}
int size;
{
int i;
- rtx pat, last;
+#ifdef HAVE_store_multiple
+ rtx pat;
+ rtx last;
+#endif
+ enum machine_mode mode;
+ /* If SIZE is that of a mode no bigger than a word, just use that
+ mode's store operation. */
+ if (size <= UNITS_PER_WORD
+ && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
+ {
+ emit_move_insn (change_address (x, mode, NULL),
+ gen_rtx_REG (mode, regno));
+ return;
+ }
+
/* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
- to the left before storing to memory. */
+ to the left before storing to memory. Note that the previous test
+ doesn't handle all cases (e.g. SIZE == 3). */
if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
{
rtx tem = operand_subword (x, 0, 1, BLKmode);
abort ();
shift = expand_shift (LSHIFT_EXPR, word_mode,
- gen_rtx (REG, word_mode, regno),
+ gen_rtx_REG (word_mode, regno),
build_int_2 ((UNITS_PER_WORD - size)
* BITS_PER_UNIT, 0), NULL_RTX, 0);
emit_move_insn (tem, shift);
if (HAVE_store_multiple)
{
last = get_last_insn ();
- pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
+ pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
GEN_INT (nregs));
if (pat)
{
if (tem == 0)
abort ();
- emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
+ emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
}
}
{
if (GET_MODE (target_reg) == GET_MODE (y))
source = y;
+ /* Allow for the target_reg to be smaller than the input register
+ to allow for AIX with 4 DF arguments after a single SI arg. The
+ last DF argument will only load 1 word into the integer registers,
+ but load a DF value into the float registers. */
+ else if ((GET_MODE_SIZE (GET_MODE (target_reg))
+ <= GET_MODE_SIZE (GET_MODE (y)))
+ && GET_MODE (target_reg) == word_mode)
+ /* This might be a const_double, so we can't just use SUBREG. */
+ source = operand_subword (y, 0, 0, VOIDmode);
else if (GET_MODE_SIZE (GET_MODE (target_reg))
== GET_MODE_SIZE (GET_MODE (y)))
- source = gen_rtx (SUBREG, GET_MODE (target_reg), y, 0);
+ source = gen_lowpart (GET_MODE (target_reg), y);
else
abort ();
}
plus_constant (XEXP (x, 0),
INTVAL (XEXP (element, 1))));
else if (XEXP (element, 1) == const0_rtx)
- target = x;
+ {
+ target = x;
+ if (GET_MODE (target) != GET_MODE (source_reg))
+ target = gen_lowpart (GET_MODE (source_reg), target);
+ }
else
abort ();
abort();
*call_fusage
- = gen_rtx (EXPR_LIST, VOIDmode,
- gen_rtx (USE, VOIDmode, reg), *call_fusage);
+ = gen_rtx_EXPR_LIST (VOIDmode,
+ gen_rtx_USE (VOIDmode, reg), *call_fusage);
}
/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
abort ();
for (i = 0; i < nregs; i++)
- use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
+ use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
}
/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
{
int i;
- /* Check for a NULL entry, used to indicate that the parameter goes
- both on the stack and in registers. */
- if (XEXP (XVECEXP (regs, 0, 0), 0))
- i = 0;
- else
- i = 1;
+ for (i = 0; i < XVECLEN (regs, 0); i++)
+ {
+ rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
- for (; i < XVECLEN (regs, 0); i++)
- use_reg (call_fusage, XEXP (XVECEXP (regs, 0, i), 0));
+ /* A NULL entry means the parameter goes both on the stack and in
+ registers. This can also be a MEM for targets that pass values
+ partially on the stack and partially in registers. */
+ if (reg != 0 && GET_CODE (reg) == REG)
+ use_reg (call_fusage, reg);
+ }
}
\f
/* Generate several move instructions to clear LEN bytes of block TO.
static void
clear_by_pieces_1 (genfun, mode, data)
- rtx (*genfun) ();
+ rtx (*genfun) PROTO ((rtx, ...));
enum machine_mode mode;
struct clear_by_pieces *data;
{
if (data->reverse) data->offset -= size;
to1 = (data->autinc_to
- ? gen_rtx (MEM, mode, data->to_addr)
- : change_address (data->to, mode,
- plus_constant (data->to_addr, data->offset)));
+ ? gen_rtx_MEM (mode, data->to_addr)
+ : copy_rtx (change_address (data->to, mode,
+ plus_constant (data->to_addr,
+ data->offset))));
MEM_IN_STRUCT_P (to1) = data->to_struct;
#ifdef HAVE_PRE_DECREMENT
\f
/* Write zeros through the storage of OBJECT.
If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
- the maximum alignment we can is has, measured in bytes. */
+ the maximum alignment we can is has, measured in bytes.
-void
+ If we call a function that returns the length of the block, return it. */
+
+rtx
clear_storage (object, size, align)
rtx object;
rtx size;
int align;
{
+ rtx retval = 0;
+
if (GET_MODE (object) == BLKmode)
{
object = protect_from_queue (object, 1);
if (pat)
{
emit_insn (pat);
- return;
+ return 0;
}
else
delete_insns_since (last);
#ifdef TARGET_MEM_FUNCTIONS
- emit_library_call (memset_libfunc, 0,
- VOIDmode, 3,
- XEXP (object, 0), Pmode,
- const0_rtx, TYPE_MODE (integer_type_node),
- convert_to_mode (TYPE_MODE (sizetype),
- size, TREE_UNSIGNED (sizetype)),
- TYPE_MODE (sizetype));
+ retval
+ = emit_library_call_value (memset_libfunc, NULL_RTX, 0,
+ ptr_mode, 3,
+ XEXP (object, 0), Pmode,
+ const0_rtx,
+ TYPE_MODE (integer_type_node),
+ convert_to_mode
+ (TYPE_MODE (sizetype), size,
+ TREE_UNSIGNED (sizetype)),
+ TYPE_MODE (sizetype));
#else
emit_library_call (bzero_libfunc, 0,
VOIDmode, 2,
XEXP (object, 0), Pmode,
- convert_to_mode (TYPE_MODE (integer_type_node),
- size,
- TREE_UNSIGNED (integer_type_node)),
+ convert_to_mode
+ (TYPE_MODE (integer_type_node), size,
+ TREE_UNSIGNED (integer_type_node)),
TYPE_MODE (integer_type_node));
#endif
}
}
else
- emit_move_insn (object, const0_rtx);
+ emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
+
+ return retval;
}
/* Generate code to copy Y into X.
{
/* Don't split destination if it is a stack push. */
int stack = push_operand (x, GET_MODE (x));
- rtx insns;
/* If this is a stack, push the highpart first, so it
will be in the argument order.
regardless of machine's endianness. */
#ifdef STACK_GROWS_DOWNWARD
emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
- (gen_rtx (MEM, submode, (XEXP (x, 0))),
+ (gen_rtx_MEM (submode, (XEXP (x, 0))),
gen_imagpart (submode, y)));
emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
- (gen_rtx (MEM, submode, (XEXP (x, 0))),
+ (gen_rtx_MEM (submode, (XEXP (x, 0))),
gen_realpart (submode, y)));
#else
emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
- (gen_rtx (MEM, submode, (XEXP (x, 0))),
+ (gen_rtx_MEM (submode, (XEXP (x, 0))),
gen_realpart (submode, y)));
emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
- (gen_rtx (MEM, submode, (XEXP (x, 0))),
+ (gen_rtx_MEM (submode, (XEXP (x, 0))),
gen_imagpart (submode, y)));
#endif
}
else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
{
rtx last_insn = 0;
- rtx insns;
#ifdef PUSH_ROUNDING
/* Show the output dies here. */
if (x != y)
- emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
for (i = 0;
i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
temp = plus_constant (virtual_outgoing_args_rtx,
- INTVAL (size) - (below ? 0 : extra));
else if (extra != 0 && !below)
- temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
+ temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
negate_rtx (Pmode, plus_constant (size, extra)));
else
- temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
+ temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
negate_rtx (Pmode, size));
#endif
rtx
gen_push_operand ()
{
- return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
+ return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
+}
+
+/* Return an rtx for the address of the beginning of a as-if-it-was-pushed
+ block of SIZE bytes. */
+
+static rtx
+get_push_address (size)
+ int size;
+{
+ register rtx temp;
+
+ if (STACK_PUSH_CODE == POST_DEC)
+ temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
+ else if (STACK_PUSH_CODE == POST_INC)
+ temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
+ else
+ temp = stack_pointer_rtx;
+
+ return copy_to_reg (temp);
}
/* Generate code to push X onto the stack, assuming it has mode MODE and
Default is below for small data on big-endian machines; else above. */
enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
- /* If we're placing part of X into a register and part of X onto
- the stack, indicate that the entire register is clobbered to
- keep flow from thinking the unused part of the register is live. */
- if (partial > 0 && reg != 0)
- emit_insn (gen_rtx (CLOBBER, VOIDmode, reg));
-
/* Invert direction if stack is post-update. */
if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
if (where_pad != none)
&& where_pad != none && where_pad != stack_direction)
anti_adjust_stack (GEN_INT (extra));
- move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
+ move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
INTVAL (size) - used, align);
+
+ if (flag_check_memory_usage && ! in_check_memory_usage)
+ {
+ rtx temp;
+
+ in_check_memory_usage = 1;
+ temp = get_push_address (INTVAL(size) - used);
+ if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
+ emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
+ temp, ptr_mode,
+ XEXP (xinner, 0), ptr_mode,
+ GEN_INT (INTVAL(size) - used),
+ TYPE_MODE (sizetype));
+ else
+ emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
+ temp, ptr_mode,
+ GEN_INT (INTVAL(size) - used),
+ TYPE_MODE (sizetype),
+ GEN_INT (MEMORY_USE_RW),
+ TYPE_MODE (integer_type_node));
+ in_check_memory_usage = 0;
+ }
}
else
#endif /* PUSH_ROUNDING */
skip + INTVAL (args_so_far)));
else
temp = memory_address (BLKmode,
- plus_constant (gen_rtx (PLUS, Pmode,
- args_addr, args_so_far),
+ plus_constant (gen_rtx_PLUS (Pmode,
+ args_addr,
+ args_so_far),
skip));
+ if (flag_check_memory_usage && ! in_check_memory_usage)
+ {
+ rtx target;
+
+ in_check_memory_usage = 1;
+ target = copy_to_reg (temp);
+ if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
+ emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
+ target, ptr_mode,
+ XEXP (xinner, 0), ptr_mode,
+ size, TYPE_MODE (sizetype));
+ else
+ emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
+ target, ptr_mode,
+ size, TYPE_MODE (sizetype),
+ GEN_INT (MEMORY_USE_RW),
+ TYPE_MODE (integer_type_node));
+ in_check_memory_usage = 0;
+ }
/* TEMP is the address of the block. Copy the data there. */
if (GET_CODE (size) == CONST_INT
&& (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
< MOVE_RATIO))
{
- move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
+ move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
INTVAL (size), align);
goto ret;
}
#ifdef HAVE_movstrqi
if (HAVE_movstrqi
&& GET_CODE (size) == CONST_INT
- && ((unsigned) INTVAL (size)
- < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
+ && ((unsigned HOST_WIDE_INT) INTVAL (size)
+ <= GET_MODE_MASK (QImode)))
{
- rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
+ rtx pat = gen_movstrqi (gen_rtx_MEM (BLKmode, temp),
xinner, size, GEN_INT (align));
if (pat != 0)
{
#ifdef HAVE_movstrhi
if (HAVE_movstrhi
&& GET_CODE (size) == CONST_INT
- && ((unsigned) INTVAL (size)
- < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
+ && ((unsigned HOST_WIDE_INT) INTVAL (size)
+ <= GET_MODE_MASK (HImode)))
{
- rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
+ rtx pat = gen_movstrhi (gen_rtx_MEM (BLKmode, temp),
xinner, size, GEN_INT (align));
if (pat != 0)
{
#ifdef HAVE_movstrsi
if (HAVE_movstrsi)
{
- rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
+ rtx pat = gen_movstrsi (gen_rtx_MEM (BLKmode, temp),
xinner, size, GEN_INT (align));
if (pat != 0)
{
#ifdef HAVE_movstrdi
if (HAVE_movstrdi)
{
- rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
+ rtx pat = gen_movstrdi (gen_rtx_MEM (BLKmode, temp),
xinner, size, GEN_INT (align));
if (pat != 0)
{
}
}
#endif
+#ifdef HAVE_movstrti
+ if (HAVE_movstrti)
+ {
+ rtx pat = gen_movstrti (gen_rtx (MEM, BLKmode, temp),
+ xinner, size, GEN_INT (align));
+ if (pat != 0)
+ {
+ emit_insn (pat);
+ goto ret;
+ }
+ }
+#endif
#ifndef ACCUMULATE_OUTGOING_ARGS
/* If the source is referenced relative to the stack pointer,
else
{
rtx addr;
+ rtx target = NULL_RTX;
/* Push padding now if padding above and stack grows down,
or if padding below and stack grows up.
addr = gen_push_operand ();
else
#endif
- if (GET_CODE (args_so_far) == CONST_INT)
- addr
- = memory_address (mode,
- plus_constant (args_addr, INTVAL (args_so_far)));
- else
- addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
- args_so_far));
+ {
+ if (GET_CODE (args_so_far) == CONST_INT)
+ addr
+ = memory_address (mode,
+ plus_constant (args_addr,
+ INTVAL (args_so_far)));
+ else
+ addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
+ args_so_far));
+ target = addr;
+ }
- emit_move_insn (gen_rtx (MEM, mode, addr), x);
+ emit_move_insn (gen_rtx_MEM (mode, addr), x);
+
+ if (flag_check_memory_usage && ! in_check_memory_usage)
+ {
+ in_check_memory_usage = 1;
+ if (target == 0)
+ target = get_push_address (GET_MODE_SIZE (mode));
+
+ if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
+ emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
+ target, ptr_mode,
+ XEXP (x, 0), ptr_mode,
+ GEN_INT (GET_MODE_SIZE (mode)),
+ TYPE_MODE (sizetype));
+ else
+ emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
+ target, ptr_mode,
+ GEN_INT (GET_MODE_SIZE (mode)),
+ TYPE_MODE (sizetype),
+ GEN_INT (MEMORY_USE_RW),
+ TYPE_MODE (integer_type_node));
+ in_check_memory_usage = 0;
+ }
}
ret:
return want_value ? result : NULL_RTX;
}
- if (output_bytecode)
- {
- tree dest_innermost;
-
- bc_expand_expr (from);
- bc_emit_instruction (duplicate);
-
- dest_innermost = bc_expand_address (to);
-
- /* Can't deduce from TYPE that we're dealing with a bitfield, so
- take care of it here. */
-
- bc_store_memory (TREE_TYPE (to), dest_innermost);
- return NULL;
- }
-
/* Assignment of a structure component needs special treatment
if the structure component's rtx is not simply a MEM.
Assignment of an array element at a constant index, and assignment of
an array element in an unaligned packed structure field, has the same
problem. */
- if (TREE_CODE (to) == COMPONENT_REF
- || TREE_CODE (to) == BIT_FIELD_REF
- || (TREE_CODE (to) == ARRAY_REF
- && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
- && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
- || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
+ if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
+ || TREE_CODE (to) == ARRAY_REF)
{
enum machine_mode mode1;
int bitsize;
int alignment;
push_temp_slots ();
- tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
- &mode1, &unsignedp, &volatilep);
+ tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
+ &unsignedp, &volatilep, &alignment);
/* If we are going to use store_bit_field and extract_bit_field,
make sure to_rtx will be safe for multiple use. */
if (mode1 == VOIDmode && want_value)
tem = stabilize_reference (tem);
- alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
- to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
+ to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
if (offset != 0)
{
rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
if (GET_CODE (to_rtx) != MEM)
abort ();
to_rtx = change_address (to_rtx, VOIDmode,
- gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
- force_reg (ptr_mode, offset_rtx)));
- /* If we have a variable offset, the known alignment
- is only that of the innermost structure containing the field.
- (Actually, we could sometimes do better by using the
- align of an element of the innermost array, but no need.) */
- if (TREE_CODE (to) == COMPONENT_REF
- || TREE_CODE (to) == BIT_FIELD_REF)
- alignment
- = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
+ gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
+ force_reg (ptr_mode, offset_rtx)));
}
if (volatilep)
{
structure we are storing into, and hence may be shared.
We must make a new MEM before setting the volatile bit. */
if (offset == 0)
- to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
+ to_rtx = copy_rtx (to_rtx);
+
MEM_VOLATILE_P (to_rtx) = 1;
}
#if 0 /* This was turned off because, when a field is volatile
#endif
}
+ if (TREE_CODE (to) == COMPONENT_REF
+ && TREE_READONLY (TREE_OPERAND (to, 1)))
+ {
+ if (offset == 0)
+ to_rtx = copy_rtx (to_rtx);
+
+ RTX_UNCHANGING_P (to_rtx) = 1;
+ }
+
+ /* Check the access. */
+ if (flag_check_memory_usage && GET_CODE (to_rtx) == MEM)
+ {
+ rtx to_addr;
+ int size;
+ int best_mode_size;
+ enum machine_mode best_mode;
+
+ best_mode = get_best_mode (bitsize, bitpos,
+ TYPE_ALIGN (TREE_TYPE (tem)),
+ mode1, volatilep);
+ if (best_mode == VOIDmode)
+ best_mode = QImode;
+
+ best_mode_size = GET_MODE_BITSIZE (best_mode);
+ to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
+ size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
+ size *= GET_MODE_SIZE (best_mode);
+
+ /* Check the access right of the pointer. */
+ if (size)
+ emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
+ to_addr, ptr_mode,
+ GEN_INT (size), TYPE_MODE (sizetype),
+ GEN_INT (MEMORY_USE_WO),
+ TYPE_MODE (integer_type_node));
+ }
+
result = store_field (to_rtx, bitsize, bitpos, mode1, from,
(want_value
/* Spurious cast makes HPUX compiler happy. */
push_temp_slots ();
value = expand_expr (from, NULL_RTX, VOIDmode, 0);
if (to_rtx == 0)
- to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
+ to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
/* Handle calls that return values in multiple non-contiguous locations.
The Irix 6 ABI has examples of this. */
Don't re-expand if it was expanded already (in COMPONENT_REF case). */
if (to_rtx == 0)
- to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
+ to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
/* Don't move directly into a return register. */
if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
push_temp_slots ();
size = expr_size (from);
- from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
+ from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
+ EXPAND_MEMORY_USE_DONT);
+
+ /* Copy the rights of the bitmap. */
+ if (flag_check_memory_usage)
+ emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
+ XEXP (to_rtx, 0), ptr_mode,
+ XEXP (from_rtx, 0), ptr_mode,
+ convert_to_mode (TYPE_MODE (sizetype),
+ size, TREE_UNSIGNED (sizetype)),
+ TYPE_MODE (sizetype));
#ifdef TARGET_MEM_FUNCTIONS
emit_library_call (memcpy_libfunc, 0,
do_pending_stack_adjust ();
NO_DEFER_POP;
jumpifnot (TREE_OPERAND (exp, 0), lab1);
+ start_cleanup_deferral ();
store_expr (TREE_OPERAND (exp, 1), target, 0);
+ end_cleanup_deferral ();
emit_queue ();
emit_jump_insn (gen_jump (lab2));
emit_barrier ();
emit_label (lab1);
+ start_cleanup_deferral ();
store_expr (TREE_OPERAND (exp, 2), target, 0);
+ end_cleanup_deferral ();
emit_queue ();
emit_label (lab2);
OK_DEFER_POP;
+
return want_value ? target : NULL_RTX;
}
else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
/* If we don't want a value, we can do the conversion inside EXP,
which will often result in some optimizations. Do the conversion
in two steps: first change the signedness, if needed, then
- the extend. */
- if (! want_value)
+ the extend. But don't do this if the type of EXP is a subtype
+ of something else since then the conversion might involve
+ more than just converting modes. */
+ if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
+ && TREE_TYPE (TREE_TYPE (exp)) == 0)
{
if (TREE_UNSIGNED (TREE_TYPE (exp))
!= SUBREG_PROMOTED_UNSIGNED_P (target))
if (!(target && GET_CODE (target) == REG
&& REGNO (target) < FIRST_PSEUDO_REGISTER)
&& !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
- && temp != target
+ && ! rtx_equal_p (temp, target)
&& (CONSTANT_P (temp) || want_value))
dont_return_target = 1;
}
temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
temp, TREE_UNSIGNED (TREE_TYPE (exp)));
+ if (flag_check_memory_usage
+ && GET_CODE (target) == MEM
+ && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
+ {
+ if (GET_CODE (temp) == MEM)
+ emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
+ XEXP (target, 0), ptr_mode,
+ XEXP (temp, 0), ptr_mode,
+ expr_size (exp), TYPE_MODE (sizetype));
+ else
+ emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
+ XEXP (target, 0), ptr_mode,
+ expr_size (exp), TYPE_MODE (sizetype),
+ GEN_INT (MEMORY_USE_WO),
+ TYPE_MODE (integer_type_node));
+ }
+
/* If value was not generated in the target, store it there.
Convert the value to TARGET's type first if nec. */
- if (temp != target && TREE_CODE (exp) != ERROR_MARK)
+ if (! rtx_equal_p (temp, target) && TREE_CODE (exp) != ERROR_MARK)
{
target = protect_from_queue (target, 1);
if (GET_MODE (temp) != GET_MODE (target)
if (size != const0_rtx)
{
+ /* Be sure we can write on ADDR. */
+ if (flag_check_memory_usage)
+ emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
+ addr, ptr_mode,
+ size, TYPE_MODE (sizetype),
+ GEN_INT (MEMORY_USE_WO),
+ TYPE_MODE (integer_type_node));
#ifdef TARGET_MEM_FUNCTIONS
emit_library_call (memset_libfunc, 0, VOIDmode, 3,
- addr, Pmode,
+ addr, ptr_mode,
const0_rtx, TYPE_MODE (integer_type_node),
convert_to_mode (TYPE_MODE (sizetype),
size,
TYPE_MODE (sizetype));
#else
emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
- addr, Pmode,
+ addr, ptr_mode,
convert_to_mode (TYPE_MODE (integer_type_node),
size,
TREE_UNSIGNED (integer_type_node)),
is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
case REAL_CST:
- return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
+ return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
case CONSTRUCTOR:
if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
return 0;
return 1;
+
+ default:
+ return 0;
}
-
- return 0;
}
/* Return 1 if EXP contains mostly (3/4) zeros. */
so the statistic will be somewhat inaccurate.
We do make a more accurate count in store_constructor itself,
so since this function is only used for nested array elements,
- this should be close enough. */
+ this should be close enough. */
if (mostly_zeros_p (TREE_VALUE (elt)))
zeros++;
elts++;
/* Store the value of constructor EXP into the rtx TARGET.
TARGET is either a REG or a MEM.
- CLEARED is true if TARGET is known to have been zero'd. */
+ CLEARED is true if TARGET is known to have been zero'd. */
static void
store_constructor (exp, target, cleared)
/* Inform later passes that the whole union value is dead. */
if (TREE_CODE (type) == UNION_TYPE
|| TREE_CODE (type) == QUAL_UNION_TYPE)
- emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
/* If we are building a static constructor into a register,
set the initial value as zero so we can fold the value into
&& GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
{
if (! cleared)
- emit_move_insn (target, const0_rtx);
+ emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
cleared = 1;
}
}
else
/* Inform later passes that the old value is dead. */
- emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
/* Store each element of the constructor into
the corresponding field of TARGET. */
if (contains_placeholder_p (offset))
offset = build (WITH_RECORD_EXPR, sizetype,
- offset, exp);
+ offset, make_tree (TREE_TYPE (exp), target));
offset = size_binop (FLOOR_DIV_EXPR, offset,
size_int (BITS_PER_UNIT));
to_rtx
= change_address (to_rtx, VOIDmode,
- gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
+ gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
force_reg (ptr_mode, offset_rtx)));
}
if (TREE_READONLY (field))
{
if (GET_CODE (to_rtx) == MEM)
- to_rtx = change_address (to_rtx, GET_MODE (to_rtx),
- XEXP (to_rtx, 0));
+ to_rtx = copy_rtx (to_rtx);
+
RTX_UNCHANGING_P (to_rtx) = 1;
}
It is also needed to check for missing elements. */
for (elt = CONSTRUCTOR_ELTS (exp);
elt != NULL_TREE;
- elt = TREE_CHAIN (elt), i++)
+ elt = TREE_CHAIN (elt))
{
tree index = TREE_PURPOSE (elt);
HOST_WIDE_INT this_node_count;
zero_count += this_node_count;
}
/* Clear the entire array first if there are any missing elements,
- or if the incidence of zero elements is >= 75%. */
+ or if the incidence of zero elements is >= 75%. */
if (count < maxelt - minelt + 1
|| 4 * zero_count >= 3 * count)
need_to_clear = 1;
}
else
/* Inform later passes that the old value is dead. */
- emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
/* Store each element of the constructor into
the corresponding element of TARGET, determined
HOST_WIDE_INT lo, hi, count;
tree position;
- /* If the range is constant and "small", unroll the loop. */
+ /* If the range is constant and "small", unroll the loop. */
if (TREE_CODE (lo_index) == INTEGER_CST
&& TREE_CODE (hi_index) == INTEGER_CST
&& (lo = TREE_INT_CST_LOW (lo_index),
if (TREE_CODE (value) == SAVE_EXPR
&& SAVE_EXPR_RTL (value) == 0)
{
- /* Make sure value gets expanded once before the loop. */
+ /* Make sure value gets expanded once before the
+ loop. */
expand_expr (value, const0_rtx, VOIDmode, 0);
emit_queue ();
}
store_expr (lo_index, index_r, 0);
loop = expand_start_loop (0);
- /* Assign value to element index. */
+ /* Assign value to element index. */
position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
size_int (BITS_PER_UNIT));
position = size_binop (MULT_EXPR,
TYPE_MIN_VALUE (domain)),
position);
pos_rtx = expand_expr (position, 0, VOIDmode, 0);
- addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
+ addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
xtarget = change_address (target, mode, addr);
if (TREE_CODE (value) == CONSTRUCTOR)
store_constructor (value, xtarget, cleared);
/* Needed by stupid register allocation. to extend the
lifetime of pseudo-regs used by target past the end
of the loop. */
- emit_insn (gen_rtx (USE, GET_MODE (target), target));
+ emit_insn (gen_rtx_USE (GET_MODE (target), target));
}
}
else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
size_int (BITS_PER_UNIT));
position = size_binop (MULT_EXPR, index, position);
pos_rtx = expand_expr (position, 0, VOIDmode, 0);
- addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
+ addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
xtarget = change_address (target, mode, addr);
store_expr (value, xtarget, 0);
}
else if (TREE_CODE (type) == SET_TYPE)
{
tree elt = CONSTRUCTOR_ELTS (exp);
- rtx xtarget = XEXP (target, 0);
- int set_word_size = TYPE_ALIGN (type);
int nbytes = int_size_in_bytes (type), nbits;
tree domain = TYPE_DOMAIN (type);
tree domain_min, domain_max, bitlength;
probably better to set it using memset (if available) or bzero.
Also, if a large set has just a single range, it may also be
better to first clear all the first clear the set (using
- bzero/memset), and set the bits we want. */
+ bzero/memset), and set the bits we want. */
- /* Check for all zeros. */
+ /* Check for all zeros. */
if (elt == NULL_TREE)
{
if (!cleared)
{
int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
- char *bit_buffer = (char*) alloca (nbits);
+ char *bit_buffer = (char *) alloca (nbits);
HOST_WIDE_INT word = 0;
int bit_pos = 0;
int ibit = 0;
- int offset = 0; /* In bytes from beginning of set. */
+ int offset = 0; /* In bytes from beginning of set. */
elt = get_set_constructor_bits (exp, bit_buffer, nbits);
for (;;)
{
{
rtx datum = GEN_INT (word);
rtx to_rtx;
- /* The assumption here is that it is safe to use XEXP if
- the set is multi-word, but not if it's single-word. */
+ /* The assumption here is that it is safe to use
+ XEXP if the set is multi-word, but not if
+ it's single-word. */
if (GET_CODE (target) == MEM)
{
to_rtx = plus_constant (XEXP (target, 0), offset);
}
else if (!cleared)
{
- /* Don't bother clearing storage if the set is all ones. */
+ /* Don't bother clearing storage if the set is all ones. */
if (TREE_CHAIN (elt) != NULL_TREE
|| (TREE_PURPOSE (elt) == NULL_TREE
? nbits != 1
tree startbit = TREE_PURPOSE (elt);
/* end of range of element, or element value */
tree endbit = TREE_VALUE (elt);
+#ifdef TARGET_MEM_FUNCTIONS
HOST_WIDE_INT startb, endb;
+#endif
rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
bitlength_rtx = expand_expr (bitlength,
#ifdef TARGET_MEM_FUNCTIONS
/* Optimization: If startbit and endbit are
constants divisible by BITS_PER_UNIT,
- call memset instead. */
+ call memset instead. */
if (TREE_CODE (startbit) == INTEGER_CST
&& TREE_CODE (endbit) == INTEGER_CST
&& (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
else
#endif
{
- emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
+ emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
bitlength_rtx, TYPE_MODE (sizetype),
startbit_rtx, TYPE_MODE (sizetype),
{
HOST_WIDE_INT width_mask = 0;
+ if (TREE_CODE (exp) == ERROR_MARK)
+ return const0_rtx;
+
if (bitsize < HOST_BITS_PER_WIDE_INT)
width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
{
rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
+ /* If BITSIZE is narrower than the size of the type of EXP
+ we will be narrowing TEMP. Normally, what's wanted are the
+ low-order bits. However, if EXP's type is a record and this is
+ big-endian machine, we want the upper BITSIZE bits. */
+ if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
+ && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
+ && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
+ temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
+ size_int (GET_MODE_BITSIZE (GET_MODE (temp))
+ - bitsize),
+ temp, 1);
+
/* Unless MODE is VOIDmode or BLKmode, convert TEMP to
MODE. */
if (mode != VOIDmode && mode != BLKmode
/* Now build a reference to just the desired component. */
- to_rtx = change_address (target, mode,
- plus_constant (addr, (bitpos / BITS_PER_UNIT)));
+ to_rtx = copy_rtx (change_address (target, mode,
+ plus_constant (addr,
+ (bitpos
+ / BITS_PER_UNIT))));
MEM_IN_STRUCT_P (to_rtx) = 1;
return store_expr (exp, to_rtx, value_mode != VOIDmode);
}
}
\f
-/* Return true if any object containing the innermost array is an unaligned
- packed structure field. */
-
-static int
-get_inner_unaligned_p (exp)
- tree exp;
-{
- int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
-
- while (1)
- {
- if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
- {
- if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
- < needed_alignment)
- return 1;
- }
- else if (TREE_CODE (exp) != ARRAY_REF
- && TREE_CODE (exp) != NON_LVALUE_EXPR
- && ! ((TREE_CODE (exp) == NOP_EXPR
- || TREE_CODE (exp) == CONVERT_EXPR)
- && (TYPE_MODE (TREE_TYPE (exp))
- == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
- break;
-
- exp = TREE_OPERAND (exp, 0);
- }
-
- return 0;
-}
-
/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
ARRAY_REFs and find the ultimate containing object, which we return.
giving the variable offset (in units) in *POFFSET.
This offset is in addition to the bit position.
If the position is not variable, we store 0 in *POFFSET.
+ We set *PALIGNMENT to the alignment in bytes of the address that will be
+ computed. This is the alignment of the thing we return if *POFFSET
+ is zero, but can be more less strictly aligned if *POFFSET is nonzero.
If any of the extraction expressions is volatile,
we store 1 in *PVOLATILEP. Otherwise we don't change that.
If the field describes a variable-sized object, *PMODE is set to
VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
- this case, but the address of the object can be found. */
+ this case, but the address of the object can be found. */
tree
get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
- punsignedp, pvolatilep)
+ punsignedp, pvolatilep, palignment)
tree exp;
int *pbitsize;
int *pbitpos;
enum machine_mode *pmode;
int *punsignedp;
int *pvolatilep;
+ int *palignment;
{
tree orig_exp = exp;
tree size_tree = 0;
enum machine_mode mode = VOIDmode;
tree offset = integer_zero_node;
+ int alignment = BIGGEST_ALIGNMENT;
if (TREE_CODE (exp) == COMPONENT_REF)
{
constant = pos, var = integer_zero_node;
*pbitpos += TREE_INT_CST_LOW (constant);
-
- if (var)
- offset = size_binop (PLUS_EXPR, offset,
- size_binop (EXACT_DIV_EXPR, var,
- size_int (BITS_PER_UNIT)));
+ offset = size_binop (PLUS_EXPR, offset,
+ size_binop (EXACT_DIV_EXPR, var,
+ size_int (BITS_PER_UNIT)));
}
else if (TREE_CODE (exp) == ARRAY_REF)
= domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
tree index_type = TREE_TYPE (index);
- if (! integer_zerop (low_bound))
- index = fold (build (MINUS_EXPR, index_type, index, low_bound));
-
if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
{
index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
index_type = TREE_TYPE (index);
}
- index = fold (build (MULT_EXPR, index_type, index,
- TYPE_SIZE (TREE_TYPE (exp))));
+ if (! integer_zerop (low_bound))
+ index = fold (build (MINUS_EXPR, index_type, index, low_bound));
+
+ if (TREE_CODE (index) == INTEGER_CST)
+ {
+ index = convert (sbitsizetype, index);
+ index_type = TREE_TYPE (index);
+ }
+
+ index = fold (build (MULT_EXPR, sbitsizetype, index,
+ convert (sbitsizetype,
+ TYPE_SIZE (TREE_TYPE (exp)))));
if (TREE_CODE (index) == INTEGER_CST
&& TREE_INT_CST_HIGH (index) == 0)
*pbitpos += TREE_INT_CST_LOW (index);
else
- offset = size_binop (PLUS_EXPR, offset,
- size_binop (FLOOR_DIV_EXPR, index,
- size_int (BITS_PER_UNIT)));
+ {
+ offset = size_binop (PLUS_EXPR, offset,
+ convert (sizetype,
+ size_binop (FLOOR_DIV_EXPR, index,
+ size_int (BITS_PER_UNIT))));
+ if (contains_placeholder_p (offset))
+ offset = build (WITH_RECORD_EXPR, sizetype, offset, exp);
+ }
}
else if (TREE_CODE (exp) != NON_LVALUE_EXPR
&& ! ((TREE_CODE (exp) == NOP_EXPR
/* If any reference in the chain is volatile, the effect is volatile. */
if (TREE_THIS_VOLATILE (exp))
*pvolatilep = 1;
+
+ /* If the offset is non-constant already, then we can't assume any
+ alignment more than the alignment here. */
+ if (! integer_zerop (offset))
+ alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
+
exp = TREE_OPERAND (exp, 0);
}
- /* If this was a bit-field, see if there is a mode that allows direct
- access in case EXP is in memory. */
- if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
- {
- mode = mode_for_size (*pbitsize,
- (TYPE_MODE (TREE_TYPE (orig_exp)) == BLKmode
- ? MODE_INT
- : GET_MODE_CLASS (TYPE_MODE
- (TREE_TYPE (orig_exp)))),
- 0);
- if (mode == BLKmode)
- mode = VOIDmode;
- }
+ if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
+ alignment = MIN (alignment, DECL_ALIGN (exp));
+ else if (TREE_TYPE (exp) != 0)
+ alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
if (integer_zerop (offset))
offset = 0;
*pmode = mode;
*poffset = offset;
+ *palignment = alignment / BITS_PER_UNIT;
return exp;
}
+
+/* Subroutine of expand_exp: compute memory_usage from modifier. */
+static enum memory_use_mode
+get_memory_usage_from_modifier (modifier)
+ enum expand_modifier modifier;
+{
+ switch (modifier)
+ {
+ case EXPAND_NORMAL:
+ return MEMORY_USE_RO;
+ break;
+ case EXPAND_MEMORY_USE_WO:
+ return MEMORY_USE_WO;
+ break;
+ case EXPAND_MEMORY_USE_RW:
+ return MEMORY_USE_RW;
+ break;
+ case EXPAND_INITIALIZER:
+ case EXPAND_MEMORY_USE_DONT:
+ case EXPAND_SUM:
+ case EXPAND_CONST_ADDRESS:
+ return MEMORY_USE_DONT;
+ case EXPAND_MEMORY_USE_BAD:
+ default:
+ abort ();
+ }
+}
\f
/* Given an rtx VALUE that may contain additions and multiplications,
return an equivalent value that just refers to a register or memory.
return safe_from_p (x, TREE_OPERAND (exp, 1));
case METHOD_CALL_EXPR:
- /* This takes a rtx argument, but shouldn't appear here. */
+ /* This takes a rtx argument, but shouldn't appear here. */
abort ();
+
+ default:
+ break;
}
/* If we have an rtx, we do not need to scan our operands. */
return 1;
return 0;
}
+
+/* Subroutine of expand_expr: return rtx if EXP is a
+ variable or parameter; else return 0. */
+
+static rtx
+var_rtx (exp)
+ tree exp;
+{
+ STRIP_NOPS (exp);
+ switch (TREE_CODE (exp))
+ {
+ case PARM_DECL:
+ case VAR_DECL:
+ return DECL_RTL (exp);
+ default:
+ return 0;
+ }
+}
\f
/* expand_expr: generate code for computing expression EXP.
An rtx for the computed value is returned. The value is never null.
/* Use subtarget as the target for operand 0 of a binary operation. */
rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
rtx original_target = target;
- /* Maybe defer this until sure not doing bytecode? */
int ignore = (target == const0_rtx
|| ((code == NON_LVALUE_EXPR || code == NOP_EXPR
|| code == CONVERT_EXPR || code == REFERENCE_EXPR
|| code == COND_EXPR)
&& TREE_CODE (type) == VOID_TYPE));
tree context;
+ /* Used by check-memory-usage to make modifier read only. */
+ enum expand_modifier ro_modifier;
-
- if (output_bytecode && modifier != EXPAND_INITIALIZER)
- {
- bc_expand_expr (exp);
- return NULL;
- }
+ /* Make a read-only version of the modifier. */
+ if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
+ || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
+ ro_modifier = modifier;
+ else
+ ro_modifier = EXPAND_NORMAL;
/* Don't use hard regs as subtargets, because the combiner
can only handle pseudo regs. */
&& TREE_CODE (exp) != FUNCTION_DECL
&& mode != VOIDmode && mode != BLKmode)
{
- temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
+ temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
if (GET_CODE (temp) == MEM)
temp = copy_to_reg (temp);
return const0_rtx;
if (TREE_CODE_CLASS (code) == '1')
return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
- VOIDmode, modifier);
+ VOIDmode, ro_modifier);
else if (TREE_CODE_CLASS (code) == '2'
|| TREE_CODE_CLASS (code) == '<')
{
- expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
- expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
+ expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
+ expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
return const0_rtx;
}
else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
/* If the second operand has no side effects, just evaluate
- the first. */
+ the first. */
return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
- VOIDmode, modifier);
+ VOIDmode, ro_modifier);
target = 0;
}
{
tree function = decl_function_context (exp);
/* Handle using a label in a containing function. */
- if (function != current_function_decl && function != 0)
+ if (function != current_function_decl
+ && function != inline_function_decl && function != 0)
{
struct function *p = find_function_data (function);
/* Allocate in the memory associated with the function
push_obstacks (p->function_obstack,
p->function_maybepermanent_obstack);
- p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
- label_rtx (exp), p->forced_labels);
+ p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
+ label_rtx (exp),
+ p->forced_labels);
pop_obstacks ();
}
else if (modifier == EXPAND_INITIALIZER)
- forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
- label_rtx (exp), forced_labels);
- temp = gen_rtx (MEM, FUNCTION_MODE,
- gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
- if (function != current_function_decl && function != 0)
+ forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
+ label_rtx (exp), forced_labels);
+ temp = gen_rtx_MEM (FUNCTION_MODE,
+ gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
+ if (function != current_function_decl
+ && function != inline_function_decl && function != 0)
LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
return temp;
}
return CONST0_RTX (mode);
}
- /* ... fall through ... */
+ /* ... fall through ... */
case VAR_DECL:
/* If a static var's type was incomplete when the decl was written,
pop_obstacks ();
}
- /* ... fall through ... */
+ /* Only check automatic variables. Currently, function arguments are
+ not checked (this can be done at compile-time with prototypes).
+ Aggregates are not checked. */
+ if (flag_check_memory_usage && code == VAR_DECL
+ && GET_CODE (DECL_RTL (exp)) == MEM
+ && DECL_CONTEXT (exp) != NULL_TREE
+ && ! TREE_STATIC (exp)
+ && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
+ {
+ enum memory_use_mode memory_usage;
+ memory_usage = get_memory_usage_from_modifier (modifier);
+
+ if (memory_usage != MEMORY_USE_DONT)
+ emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
+ XEXP (DECL_RTL (exp), 0), ptr_mode,
+ GEN_INT (int_size_in_bytes (type)),
+ TYPE_MODE (sizetype),
+ GEN_INT (memory_usage),
+ TYPE_MODE (integer_type_node));
+ }
+
+ /* ... fall through ... */
case FUNCTION_DECL:
case RESULT_DECL:
abort ();
addr = XEXP (DECL_RTL (exp), 0);
if (GET_CODE (addr) == MEM)
- addr = gen_rtx (MEM, Pmode,
- fix_lexical_addr (XEXP (addr, 0), exp));
+ addr = gen_rtx_MEM (Pmode,
+ fix_lexical_addr (XEXP (addr, 0), exp));
else
addr = fix_lexical_addr (addr, exp);
temp = change_address (DECL_RTL (exp), mode, addr);
!= promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
abort ();
- temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
+ temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
SUBREG_PROMOTED_VAR_P (temp) = 1;
SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
return temp;
mode);
case CONST_DECL:
- return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
+ return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
+ EXPAND_MEMORY_USE_BAD);
case REAL_CST:
/* If optimized, generate immediate CONST_DOUBLE
case SAVE_EXPR:
context = decl_function_context (exp);
+ /* If this SAVE_EXPR was at global context, assume we are an
+ initialization function and move it into our context. */
+ if (context == 0)
+ SAVE_EXPR_CONTEXT (exp) = current_function_decl;
+
/* We treat inline_function_decl as an alias for the current function
because that is the inline function whose vars, types, etc.
are being merged into the current function.
/* If this is non-local, handle it. */
if (context)
{
+ /* The following call just exists to abort if the context is
+ not of a containing function. */
+ find_function_data (context);
+
temp = SAVE_EXPR_RTL (exp);
if (temp && GET_CODE (temp) == REG)
{
SAVE_EXPR_RTL (exp) = temp;
if (!optimize && GET_CODE (temp) == REG)
- save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
- save_expr_regs);
+ save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
+ save_expr_regs);
/* If the mode of TEMP does not match that of the expression, it
must be a promoted value. We pass store_expr a SUBREG of the
if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
{
- temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
+ temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
SUBREG_PROMOTED_VAR_P (temp) = 1;
SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
}
if (temp == const0_rtx)
- expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
+ expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
+ EXPAND_MEMORY_USE_BAD);
else
store_expr (TREE_OPERAND (exp, 0), temp, 0);
}
/* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
must be a promoted value. We return a SUBREG of the wanted mode,
- but mark it so that we know that it was already extended. */
+ but mark it so that we know that it was already extended. */
if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
&& GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
{
/* Compute the signedness and make the proper SUBREG. */
promote_mode (type, mode, &unsignedp, 0);
- temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
+ temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
SUBREG_PROMOTED_VAR_P (temp) = 1;
SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
return temp;
}
case PLACEHOLDER_EXPR:
- /* If there is an object on the head of the placeholder list,
- see if some object in it's references is of type TYPE. For
- further information, see tree.def. */
- if (placeholder_list)
- {
- tree object;
- tree old_list = placeholder_list;
-
- for (object = TREE_PURPOSE (placeholder_list);
- (TYPE_MAIN_VARIANT (TREE_TYPE (object))
- != TYPE_MAIN_VARIANT (type))
- && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
- || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
- || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
- || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
- object = TREE_OPERAND (object, 0))
- ;
-
- if (object != 0
- && (TYPE_MAIN_VARIANT (TREE_TYPE (object))
- == TYPE_MAIN_VARIANT (type)))
- {
- /* Expand this object skipping the list entries before
- it was found in case it is also a PLACEHOLDER_EXPR.
- In that case, we want to translate it using subsequent
- entries. */
- placeholder_list = TREE_CHAIN (placeholder_list);
- temp = expand_expr (object, original_target, tmode, modifier);
- placeholder_list = old_list;
- return temp;
- }
- }
+ {
+ tree placeholder_expr;
+
+ /* If there is an object on the head of the placeholder list,
+ see if some object in its references is of type TYPE. For
+ further information, see tree.def. */
+ for (placeholder_expr = placeholder_list;
+ placeholder_expr != 0;
+ placeholder_expr = TREE_CHAIN (placeholder_expr))
+ {
+ tree need_type = TYPE_MAIN_VARIANT (type);
+ tree object = 0;
+ tree old_list = placeholder_list;
+ tree elt;
+
+ /* See if the object is the type that we want. */
+ if ((TYPE_MAIN_VARIANT (TREE_TYPE
+ (TREE_PURPOSE (placeholder_expr)))
+ == need_type))
+ object = TREE_PURPOSE (placeholder_expr);
+
+ /* Find the outermost reference that is of the type we want. */
+ for (elt = TREE_PURPOSE (placeholder_expr);
+ elt != 0 && object == 0
+ && (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
+ || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
+ || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
+ || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e');
+ elt = ((TREE_CODE (elt) == COMPOUND_EXPR
+ || TREE_CODE (elt) == COND_EXPR)
+ ? TREE_OPERAND (elt, 1) : TREE_OPERAND (elt, 0)))
+ if (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
+ && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (elt, 0)))
+ == need_type))
+ object = TREE_OPERAND (elt, 0);
+
+ if (object != 0)
+ {
+ /* Expand this object skipping the list entries before
+ it was found in case it is also a PLACEHOLDER_EXPR.
+ In that case, we want to translate it using subsequent
+ entries. */
+ placeholder_list = TREE_CHAIN (placeholder_expr);
+ temp = expand_expr (object, original_target, tmode,
+ ro_modifier);
+ placeholder_list = old_list;
+ return temp;
+ }
+ }
+ }
/* We can't find the object or there was a missing WITH_RECORD_EXPR. */
abort ();
placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
placeholder_list);
target = expand_expr (TREE_OPERAND (exp, 0), original_target,
- tmode, modifier);
+ tmode, ro_modifier);
placeholder_list = TREE_CHAIN (placeholder_list);
return target;
int vars_need_expansion = 0;
/* Need to open a binding contour here because
- if there are any cleanups they most be contained here. */
+ if there are any cleanups they must be contained here. */
expand_start_bindings (0);
/* Mark the corresponding BLOCK for output in its proper place. */
vars = TREE_CHAIN (vars);
}
- temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
+ temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
}
case RTL_EXPR:
- if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
- abort ();
- emit_insns (RTL_EXPR_SEQUENCE (exp));
- RTL_EXPR_SEQUENCE (exp) = const0_rtx;
+ if (RTL_EXPR_SEQUENCE (exp))
+ {
+ if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
+ abort ();
+ emit_insns (RTL_EXPR_SEQUENCE (exp));
+ RTL_EXPR_SEQUENCE (exp) = const0_rtx;
+ }
preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
free_temps_for_rtl_expr (exp);
return RTL_EXPR_RTL (exp);
{
tree elt;
for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
- expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
+ expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
+ EXPAND_MEMORY_USE_BAD);
return const0_rtx;
}
else
{
- if (target == 0 || ! safe_from_p (target, exp))
+ /* Handle calls that pass values in multiple non-contiguous
+ locations. The Irix 6 ABI has examples of this. */
+ if (target == 0 || ! safe_from_p (target, exp)
+ || GET_CODE (target) == PARALLEL)
{
if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
if (TREE_READONLY (exp))
{
if (GET_CODE (target) == MEM)
- target = change_address (target, GET_MODE (target),
- XEXP (target, 0));
+ target = copy_rtx (target);
+
RTX_UNCHANGING_P (target) = 1;
}
{
tree exp1 = TREE_OPERAND (exp, 0);
tree exp2;
+ tree index;
+ tree string = string_constant (exp1, &index);
+ int i;
+
+ if (string
+ && TREE_CODE (string) == STRING_CST
+ && TREE_CODE (index) == INTEGER_CST
+ && !TREE_INT_CST_HIGH (index)
+ && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
+ && GET_MODE_CLASS (mode) == MODE_INT
+ && GET_MODE_SIZE (mode) == 1)
+ return GEN_INT (TREE_STRING_POINTER (string)[i]);
op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
op0 = memory_address (mode, op0);
- temp = gen_rtx (MEM, mode, op0);
+ if (flag_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
+ {
+ enum memory_use_mode memory_usage;
+ memory_usage = get_memory_usage_from_modifier (modifier);
+
+ if (memory_usage != MEMORY_USE_DONT)
+ {
+ in_check_memory_usage = 1;
+ emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
+ op0, ptr_mode,
+ GEN_INT (int_size_in_bytes (type)),
+ TYPE_MODE (sizetype),
+ GEN_INT (memory_usage),
+ TYPE_MODE (integer_type_node));
+ in_check_memory_usage = 0;
+ }
+ }
+
+ temp = gen_rtx_MEM (mode, op0);
/* If address was computed by addition,
mark this as an element of an aggregate. */
if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
tree index = TREE_OPERAND (exp, 1);
tree index_type = TREE_TYPE (index);
- int i;
-
- if (TREE_CODE (low_bound) != INTEGER_CST
- && contains_placeholder_p (low_bound))
- low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
+ HOST_WIDE_INT i;
/* Optimize the special-case of a zero lower bound.
index = fold (build (MINUS_EXPR, index_type, index,
convert (sizetype, low_bound)));
- if ((TREE_CODE (index) != INTEGER_CST
- || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
- && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
- {
- /* Nonconstant array index or nonconstant element size, and
- not an array in an unaligned (packed) structure field.
- Generate the tree for *(&array+index) and expand that,
- except do it in a language-independent way
- and don't complain about non-lvalue arrays.
- `mark_addressable' should already have been called
- for any array for which this case will be reached. */
-
- /* Don't forget the const or volatile flag from the array
- element. */
- tree variant_type = build_type_variant (type,
- TREE_READONLY (exp),
- TREE_THIS_VOLATILE (exp));
- tree array_adr = build1 (ADDR_EXPR,
- build_pointer_type (variant_type), array);
- tree elt;
- tree size = size_in_bytes (type);
-
- /* Convert the integer argument to a type the same size as sizetype
- so the multiply won't overflow spuriously. */
- if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
- index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
- index);
-
- if (TREE_CODE (size) != INTEGER_CST
- && contains_placeholder_p (size))
- size = build (WITH_RECORD_EXPR, sizetype, size, exp);
-
- /* Don't think the address has side effects
- just because the array does.
- (In some cases the address might have side effects,
- and we fail to record that fact here. However, it should not
- matter, since expand_expr should not care.) */
- TREE_SIDE_EFFECTS (array_adr) = 0;
-
- elt
- = build1
- (INDIRECT_REF, type,
- fold (build (PLUS_EXPR,
- TYPE_POINTER_TO (variant_type),
- array_adr,
- fold
- (build1
- (NOP_EXPR,
- TYPE_POINTER_TO (variant_type),
- fold (build (MULT_EXPR, TREE_TYPE (index),
- index,
- convert (TREE_TYPE (index),
- size))))))));;
-
- /* Volatility, etc., of new expression is same as old
- expression. */
- TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
- TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
- TREE_READONLY (elt) = TREE_READONLY (exp);
-
- return expand_expr (elt, target, tmode, modifier);
- }
-
/* Fold an expression like: "foo"[2].
This is not done in fold so it won't happen inside &.
Don't fold if this is for wide characters since it's too
elem = TREE_CHAIN (elem);
if (elem)
return expand_expr (fold (TREE_VALUE (elem)), target,
- tmode, modifier);
+ tmode, ro_modifier);
}
}
&& TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
&& TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
{
- if (TREE_CODE (index) == INTEGER_CST
- && TREE_INT_CST_HIGH (index) == 0)
+ if (TREE_CODE (index) == INTEGER_CST)
{
tree init = DECL_INITIAL (array);
elem = TREE_CHAIN (elem);
if (elem)
return expand_expr (fold (TREE_VALUE (elem)), target,
- tmode, modifier);
+ tmode, ro_modifier);
}
else if (TREE_CODE (init) == STRING_CST
- && i < TREE_STRING_LENGTH (init))
- return GEN_INT (TREE_STRING_POINTER (init)[i]);
+ && TREE_INT_CST_HIGH (index) == 0
+ && (TREE_INT_CST_LOW (index)
+ < TREE_STRING_LENGTH (init)))
+ return (GEN_INT
+ (TREE_STRING_POINTER
+ (init)[TREE_INT_CST_LOW (index)]));
}
}
}
- /* Treat array-ref with constant index as a component-ref. */
+ /* ... fall through ... */
case COMPONENT_REF:
case BIT_FIELD_REF:
for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
elt = TREE_CHAIN (elt))
- if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
- return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
+ if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
+ /* We can normally use the value of the field in the
+ CONSTRUCTOR. However, if this is a bitfield in
+ an integral mode that we can fit in a HOST_WIDE_INT,
+ we must mask only the number of bits in the bitfield,
+ since this is done implicitly by the constructor. If
+ the bitfield does not meet either of those conditions,
+ we can't do this optimization. */
+ && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
+ || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
+ == MODE_INT)
+ && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
+ <= HOST_BITS_PER_WIDE_INT))))
+ {
+ op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
+ if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
+ {
+ int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
+ enum machine_mode imode
+ = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
+
+ if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
+ {
+ op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
+ op0 = expand_and (op0, op1, target);
+ }
+ else
+ {
+ tree count
+ = build_int_2 (imode - bitsize, 0);
+
+ op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
+ target, 0);
+ op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
+ target, 0);
+ }
+ }
+
+ return op0;
+ }
}
{
int bitpos;
tree offset;
int volatilep = 0;
- tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
- &mode1, &unsignedp, &volatilep);
int alignment;
+ tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
+ &mode1, &unsignedp, &volatilep,
+ &alignment);
/* If we got back the original object, something is wrong. Perhaps
we are evaluating an expression too early. In any event, don't
op0 = validize_mem (force_const_mem (mode, op0));
}
- alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
if (offset != 0)
{
rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
if (GET_CODE (op0) != MEM)
abort ();
+
+ if (GET_MODE (offset_rtx) != ptr_mode)
+#ifdef POINTERS_EXTEND_UNSIGNED
+ offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 1);
+#else
+ offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
+#endif
+
op0 = change_address (op0, VOIDmode,
- gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
- force_reg (ptr_mode, offset_rtx)));
- /* If we have a variable offset, the known alignment
- is only that of the innermost structure containing the field.
- (Actually, we could sometimes do better by using the
- size of an element of the innermost array, but no need.) */
- if (TREE_CODE (exp) == COMPONENT_REF
- || TREE_CODE (exp) == BIT_FIELD_REF)
- alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
- / BITS_PER_UNIT);
+ gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
+ force_reg (ptr_mode, offset_rtx)));
}
/* Don't forget about volatility even if this is a bitfield. */
MEM_VOLATILE_P (op0) = 1;
}
+ /* Check the access. */
+ if (flag_check_memory_usage && GET_CODE (op0) == MEM)
+ {
+ enum memory_use_mode memory_usage;
+ memory_usage = get_memory_usage_from_modifier (modifier);
+
+ if (memory_usage != MEMORY_USE_DONT)
+ {
+ rtx to;
+ int size;
+
+ to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
+ size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
+
+ /* Check the access right of the pointer. */
+ if (size > BITS_PER_UNIT)
+ emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
+ to, ptr_mode,
+ GEN_INT (size / BITS_PER_UNIT),
+ TYPE_MODE (sizetype),
+ GEN_INT (memory_usage),
+ TYPE_MODE (integer_type_node));
+ }
+ }
+
/* In cases where an aligned union has an unaligned object
as a field, we might be extracting a BLKmode value from
an integer-mode (e.g., SImode) object. Handle this case
|| GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
|| (modifier != EXPAND_CONST_ADDRESS
&& modifier != EXPAND_INITIALIZER
- && ((mode1 != BLKmode && ! direct_load[(int) mode1])
+ && ((mode1 != BLKmode && ! direct_load[(int) mode1]
+ && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
+ && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
/* If the field isn't aligned enough to fetch as a memref,
fetch it as a bit field. */
|| (SLOW_UNALIGNED_ACCESS
unsignedp, target, ext_mode, ext_mode,
alignment,
int_size_in_bytes (TREE_TYPE (tem)));
+
+ /* If the result is a record type and BITSIZE is narrower than
+ the mode of OP0, an integral mode, and this is a big endian
+ machine, we must put the field into the high-order bits. */
+ if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
+ && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
+ && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
+ op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
+ size_int (GET_MODE_BITSIZE (GET_MODE (op0))
+ - bitsize),
+ op0, 1);
+
if (mode == BLKmode)
{
rtx new = assign_stack_temp (ext_mode,
/* Get a reference to just this component. */
if (modifier == EXPAND_CONST_ADDRESS
|| modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
- op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
- (bitpos / BITS_PER_UNIT)));
+ op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
+ (bitpos / BITS_PER_UNIT)));
else
op0 = change_address (op0, mode1,
plus_constant (XEXP (op0, 0),
MEM_IN_STRUCT_P (op0) = 1;
MEM_VOLATILE_P (op0) |= volatilep;
- if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
+ if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
+ || modifier == EXPAND_CONST_ADDRESS
+ || modifier == EXPAND_INITIALIZER)
return op0;
- if (target == 0)
+ else if (target == 0)
target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
+
convert_move (target, op0, unsignedp);
return target;
}
/* Extract the bit we want to examine */
bit = expand_shift (RSHIFT_EXPR, byte_mode,
- gen_rtx (MEM, byte_mode, addr),
+ gen_rtx_MEM (byte_mode, addr),
make_tree (TREE_TYPE (index), rem),
NULL_RTX, 1);
result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
if (RTL_EXPR_RTL (exp) == 0)
{
RTL_EXPR_RTL (exp)
- = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
- cleanups_this_call
- = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
+ = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
+ expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
+
/* That's it for this cleanup. */
TREE_OPERAND (exp, 2) = 0;
- (*interim_eh_hook) (NULL_TREE);
}
return RTL_EXPR_RTL (exp);
case CLEANUP_POINT_EXPR:
{
extern int temp_slot_level;
- tree old_cleanups = cleanups_this_call;
- int old_temp_level = target_temp_slot_level;
- push_temp_slots ();
+ /* Start a new binding layer that will keep track of all cleanup
+ actions to be performed. */
+ expand_start_bindings (0);
+
target_temp_slot_level = temp_slot_level;
- op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
+
+ op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
/* If we're going to use this value, load it up now. */
if (! ignore)
op0 = force_not_mem (op0);
- expand_cleanups_to (old_cleanups);
preserve_temp_slots (op0);
- free_temp_slots ();
- pop_temp_slots ();
- target_temp_slot_level = old_temp_level;
+ expand_end_bindings (NULL_TREE, 0, 0);
}
return op0;
if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
{
op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
- modifier);
+ ro_modifier);
/* If the signedness of the conversion differs and OP0 is
a promoted SUBREG, clear that indication since we now
op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
if (modifier == EXPAND_INITIALIZER)
- return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
+ return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
if (target == 0)
return
return target;
case PLUS_EXPR:
- /* We come here from MINUS_EXPR when the second operand is a constant. */
+ /* We come here from MINUS_EXPR when the second operand is a
+ constant. */
plus_expr:
this_optab = add_optab;
if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
subtarget = 0;
- op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
- op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
+ op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
+ op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
both_summands:
/* Make sure any term that's a sum with a constant comes last. */
op0 = temp;
/* Ensure that MULT comes first if there is one. */
else if (GET_CODE (op0) == MULT)
- op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
+ op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
else
- op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
+ op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
/* Let's also eliminate constants from op0 if possible. */
op0 = eliminate_constant_term (op0, &constant_term);
if (temp != 0)
op1 = temp;
else
- op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
+ op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
}
/* Put a constant term last and put a multiplication first. */
temp = op1, op1 = op0, op0 = temp;
temp = simplify_binary_operation (PLUS, mode, op0, op1);
- return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
+ return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
case MINUS_EXPR:
/* For initializers, we are allowed to return a MINUS of two
&& really_constant_p (TREE_OPERAND (exp, 1)))
{
rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
- VOIDmode, modifier);
+ VOIDmode, ro_modifier);
rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
- VOIDmode, modifier);
+ VOIDmode, ro_modifier);
/* If the last operand is a CONST_INT, use plus_constant of
the negated constant. Else make the MINUS. */
if (GET_CODE (op1) == CONST_INT)
return plus_constant (op0, - INTVAL (op1));
else
- return gen_rtx (MINUS, mode, op0, op1);
+ return gen_rtx_MINUS (mode, op0, op1);
}
/* Convert A - const to A + (-const). */
if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
return expand_expr (convert (type,
build (PLUS_EXPR, newtype,
newop0, newneg)),
- target, tmode, modifier);
+ target, tmode, ro_modifier);
}
else
{
&& TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
&& GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
{
- op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
+ op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
+ EXPAND_SUM);
/* Apply distributive law if OP0 is x+c. */
if (GET_CODE (op0) == PLUS
&& GET_CODE (XEXP (op0, 1)) == CONST_INT)
- return gen_rtx (PLUS, mode,
- gen_rtx (MULT, mode, XEXP (op0, 0),
- GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
+ return gen_rtx_PLUS (mode,
+ gen_rtx_MULT (mode, XEXP (op0, 0),
+ GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
* INTVAL (XEXP (op0, 1))));
if (GET_CODE (op0) != REG)
op0 = copy_to_mode_reg (mode, op0);
- return gen_rtx (MULT, mode, op0,
- GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
+ return gen_rtx_MULT (mode, op0,
+ GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
}
if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
if (temp != 0)
return temp;
- /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
+ /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
&& original_target
&& GET_CODE (original_target) == REG
VOIDmode, 0);
case COND_EXPR:
- {
- rtx flag = NULL_RTX;
- tree left_cleanups = NULL_TREE;
- tree right_cleanups = NULL_TREE;
-
- /* Used to save a pointer to the place to put the setting of
- the flag that indicates if this side of the conditional was
- taken. We backpatch the code, if we find out later that we
- have any conditional cleanups that need to be performed. */
- rtx dest_right_flag = NULL_RTX;
- rtx dest_left_flag = NULL_RTX;
+ /* If we would have a "singleton" (see below) were it not for a
+ conversion in each arm, bring that conversion back out. */
+ if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
+ && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
+ && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
+ == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
+ {
+ tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
+ tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
+
+ if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
+ && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
+ || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
+ && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
+ || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
+ && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
+ || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
+ && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
+ return expand_expr (build1 (NOP_EXPR, type,
+ build (COND_EXPR, TREE_TYPE (true),
+ TREE_OPERAND (exp, 0),
+ true, false)),
+ target, tmode, modifier);
+ }
+ {
/* Note that COND_EXPRs whose type is a structure or union
are required to be constructed to contain assignments of
a temporary variable, so that we can evaluate them here
tree singleton = 0;
tree binary_op = 0, unary_op = 0;
- tree old_cleanups = cleanups_this_call;
/* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
convert it to our mode, if necessary. */
if (ignore)
{
expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
- modifier);
+ ro_modifier);
return const0_rtx;
}
- op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
+ op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
if (GET_MODE (op0) == mode)
return op0;
return target;
}
- /* If we are not to produce a result, we have no target. Otherwise,
- if a target was specified use it; it will not be used as an
- intermediate target unless it is safe. If no target, use a
- temporary. */
-
- if (ignore)
- temp = 0;
- else if (original_target
- && safe_from_p (original_target, TREE_OPERAND (exp, 0))
- && GET_MODE (original_target) == mode
- && ! (GET_CODE (original_target) == MEM
- && MEM_VOLATILE_P (original_target)))
- temp = original_target;
- else
- temp = assign_temp (type, 0, 0, 1);
-
- /* Check for X ? A + B : A. If we have this, we can copy
- A to the output and conditionally add B. Similarly for unary
- operations. Don't do this if X has side-effects because
- those side effects might affect A or B and the "?" operation is
- a sequence point in ANSI. (We test for side effects later.) */
+ /* Check for X ? A + B : A. If we have this, we can copy A to the
+ output and conditionally add B. Similarly for unary operations.
+ Don't do this if X has side-effects because those side effects
+ might affect A or B and the "?" operation is a sequence point in
+ ANSI. (operand_equal_p tests for side effects.) */
if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
&& operand_equal_p (TREE_OPERAND (exp, 2),
TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
- /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
- operation, do this as A + (X != 0). Similarly for other simple
- binary operators. */
+ /* If we are not to produce a result, we have no target. Otherwise,
+ if a target was specified use it; it will not be used as an
+ intermediate target unless it is safe. If no target, use a
+ temporary. */
+
+ if (ignore)
+ temp = 0;
+ else if (original_target
+ && (safe_from_p (original_target, TREE_OPERAND (exp, 0))
+ || (singleton && GET_CODE (original_target) == REG
+ && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
+ && original_target == var_rtx (singleton)))
+ && GET_MODE (original_target) == mode
+ && ! (GET_CODE (original_target) == MEM
+ && MEM_VOLATILE_P (original_target)))
+ temp = original_target;
+ else if (TREE_ADDRESSABLE (type))
+ abort ();
+ else
+ temp = assign_temp (type, 0, 0, 1);
+
+ /* If we had X ? A + C : A, with C a constant power of 2, and we can
+ do the test of X as a store-flag operation, do this as
+ A + ((X != 0) << log C). Similarly for other simple binary
+ operators. Only do for C == 1 if BRANCH_COST is low. */
if (temp && singleton && binary_op
- && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
&& (TREE_CODE (binary_op) == PLUS_EXPR
|| TREE_CODE (binary_op) == MINUS_EXPR
|| TREE_CODE (binary_op) == BIT_IOR_EXPR
|| TREE_CODE (binary_op) == BIT_XOR_EXPR)
- && integer_onep (TREE_OPERAND (binary_op, 1))
+ && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
+ : integer_onep (TREE_OPERAND (binary_op, 1)))
&& TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
{
rtx result;
? temp : NULL_RTX),
mode, BRANCH_COST <= 1);
+ if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
+ result = expand_shift (LSHIFT_EXPR, mode, result,
+ build_int_2 (tree_log2
+ (TREE_OPERAND
+ (binary_op, 1)),
+ 0),
+ (safe_from_p (temp, singleton)
+ ? temp : NULL_RTX), 0);
+
if (result)
{
op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
NO_DEFER_POP;
op0 = gen_label_rtx ();
- flag = gen_reg_rtx (word_mode);
if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
{
if (temp != 0)
else
expand_expr (singleton,
ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
- dest_left_flag = get_last_insn ();
if (singleton == TREE_OPERAND (exp, 1))
jumpif (TREE_OPERAND (exp, 0), op0);
else
jumpifnot (TREE_OPERAND (exp, 0), op0);
- /* Allows cleanups up to here. */
- old_cleanups = cleanups_this_call;
+ start_cleanup_deferral ();
if (binary_op && temp == 0)
/* Just touch the other operand. */
expand_expr (TREE_OPERAND (binary_op, 1),
make_tree (type, temp)),
temp, 0);
op1 = op0;
- dest_right_flag = get_last_insn ();
- }
-#if 0
- /* This is now done in jump.c and is better done there because it
- produces shorter register lifetimes. */
-
- /* Check for both possibilities either constants or variables
- in registers (but not the same as the target!). If so, can
- save branches by assigning one, branching, and assigning the
- other. */
- else if (temp && GET_MODE (temp) != BLKmode
- && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
- || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
- || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
- && DECL_RTL (TREE_OPERAND (exp, 1))
- && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
- && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
- && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
- || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
- || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
- && DECL_RTL (TREE_OPERAND (exp, 2))
- && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
- && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
- {
- if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
- temp = gen_reg_rtx (mode);
- store_expr (TREE_OPERAND (exp, 2), temp, 0);
- dest_left_flag = get_last_insn ();
- jumpifnot (TREE_OPERAND (exp, 0), op0);
-
- /* Allows cleanups up to here. */
- old_cleanups = cleanups_this_call;
- store_expr (TREE_OPERAND (exp, 1), temp, 0);
- op1 = op0;
- dest_right_flag = get_last_insn ();
}
-#endif
/* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
comparison operator. If we have one of these cases, set the
output to A, branch on A (cse will merge these two references),
&& integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
&& operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
TREE_OPERAND (exp, 1), 0)
- && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
+ && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
+ || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
&& safe_from_p (temp, TREE_OPERAND (exp, 2)))
{
if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
temp = gen_reg_rtx (mode);
store_expr (TREE_OPERAND (exp, 1), temp, 0);
- dest_left_flag = get_last_insn ();
jumpif (TREE_OPERAND (exp, 0), op0);
- /* Allows cleanups up to here. */
- old_cleanups = cleanups_this_call;
+ start_cleanup_deferral ();
store_expr (TREE_OPERAND (exp, 2), temp, 0);
op1 = op0;
- dest_right_flag = get_last_insn ();
}
else if (temp
&& TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
&& integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
&& operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
TREE_OPERAND (exp, 2), 0)
- && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
+ && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
+ || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
&& safe_from_p (temp, TREE_OPERAND (exp, 1)))
{
if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
temp = gen_reg_rtx (mode);
store_expr (TREE_OPERAND (exp, 2), temp, 0);
- dest_left_flag = get_last_insn ();
jumpifnot (TREE_OPERAND (exp, 0), op0);
- /* Allows cleanups up to here. */
- old_cleanups = cleanups_this_call;
+ start_cleanup_deferral ();
store_expr (TREE_OPERAND (exp, 1), temp, 0);
op1 = op0;
- dest_right_flag = get_last_insn ();
}
else
{
op1 = gen_label_rtx ();
jumpifnot (TREE_OPERAND (exp, 0), op0);
- /* Allows cleanups up to here. */
- old_cleanups = cleanups_this_call;
+ start_cleanup_deferral ();
if (temp != 0)
store_expr (TREE_OPERAND (exp, 1), temp, 0);
else
expand_expr (TREE_OPERAND (exp, 1),
ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
- dest_left_flag = get_last_insn ();
-
- /* Handle conditional cleanups, if any. */
- left_cleanups = defer_cleanups_to (old_cleanups);
-
+ end_cleanup_deferral ();
emit_queue ();
emit_jump_insn (gen_jump (op1));
emit_barrier ();
emit_label (op0);
+ start_cleanup_deferral ();
if (temp != 0)
store_expr (TREE_OPERAND (exp, 2), temp, 0);
else
expand_expr (TREE_OPERAND (exp, 2),
ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
- dest_right_flag = get_last_insn ();
}
- /* Handle conditional cleanups, if any. */
- right_cleanups = defer_cleanups_to (old_cleanups);
+ end_cleanup_deferral ();
emit_queue ();
emit_label (op1);
OK_DEFER_POP;
- /* Add back in, any conditional cleanups. */
- if (left_cleanups || right_cleanups)
- {
- tree new_cleanups;
- tree cond;
- rtx last;
-
- /* Now that we know that a flag is needed, go back and add in the
- setting of the flag. */
-
- /* Do the left side flag. */
- last = get_last_insn ();
- /* Flag left cleanups as needed. */
- emit_move_insn (flag, const1_rtx);
- /* ??? deprecated, use sequences instead. */
- reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
-
- /* Do the right side flag. */
- last = get_last_insn ();
- /* Flag left cleanups as needed. */
- emit_move_insn (flag, const0_rtx);
- /* ??? deprecated, use sequences instead. */
- reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
-
- /* All cleanups must be on the function_obstack. */
- push_obstacks_nochange ();
- resume_temporary_allocation ();
-
- /* convert flag, which is an rtx, into a tree. */
- cond = make_node (RTL_EXPR);
- TREE_TYPE (cond) = integer_type_node;
- RTL_EXPR_RTL (cond) = flag;
- RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
- cond = save_expr (cond);
-
- if (! left_cleanups)
- left_cleanups = integer_zero_node;
- if (! right_cleanups)
- right_cleanups = integer_zero_node;
- new_cleanups = build (COND_EXPR, void_type_node,
- truthvalue_conversion (cond),
- left_cleanups, right_cleanups);
- new_cleanups = fold (new_cleanups);
-
- pop_obstacks ();
-
- /* Now add in the conditionalized cleanups. */
- cleanups_this_call
- = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
- (*interim_eh_hook) (NULL_TREE);
- }
return temp;
}
tree slot = TREE_OPERAND (exp, 0);
tree cleanups = NULL_TREE;
tree exp1;
- rtx temp;
if (TREE_CODE (slot) != VAR_DECL)
abort ();
}
else
{
- target = assign_temp (type, 2, 1, 1);
+ target = assign_temp (type, 2, 0, 1);
/* All temp slots at this level must not conflict. */
preserve_temp_slots (target);
DECL_RTL (slot) = target;
+ if (TREE_ADDRESSABLE (slot))
+ {
+ TREE_ADDRESSABLE (slot) = 0;
+ mark_addressable (slot);
+ }
/* Since SLOT is not known to the called function
to belong to its stack frame, we must build an explicit
if (TREE_OPERAND (exp, 1) == NULL_TREE)
return target;
}
-
- DECL_RTL (slot) = target;
+ else
+ {
+ DECL_RTL (slot) = target;
+ /* If we must have an addressable slot, then make sure that
+ the RTL that we just stored in slot is OK. */
+ if (TREE_ADDRESSABLE (slot))
+ {
+ TREE_ADDRESSABLE (slot) = 0;
+ mark_addressable (slot);
+ }
+ }
}
exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
store_expr (exp1, target, 0);
- if (cleanups)
- {
- cleanups_this_call = tree_cons (NULL_TREE,
- cleanups,
- cleanups_this_call);
- (*interim_eh_hook) (NULL_TREE);
- }
+ expand_decl_cleanup (NULL_TREE, cleanups);
return target;
}
if (TREE_CODE (lhs) != VAR_DECL
&& TREE_CODE (lhs) != RESULT_DECL
- && TREE_CODE (lhs) != PARM_DECL)
+ && TREE_CODE (lhs) != PARM_DECL
+ && ! (TREE_CODE (lhs) == INDIRECT_REF
+ && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
preexpand_calls (exp);
/* Check for |= or &= of a bitfield of size one into another bitfield
case ADDR_EXPR:
/* If nonzero, TEMP will be set to the address of something that might
- be a MEM corresponding to a stack slot. */
+ be a MEM corresponding to a stack slot. */
temp = 0;
/* Are we taking the address of a nested function? */
|| GET_CODE (op0) == CONCAT)
{
/* If this object is in a register, it must be not
- be BLKmode. */
+ be BLKmode. */
tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
rtx memloc = assign_temp (inner_type, 1, 1, 1);
return target;
}
+ case TRY_CATCH_EXPR:
+ {
+ tree handler = TREE_OPERAND (exp, 1);
+
+ expand_eh_region_start ();
+
+ op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
+
+ expand_eh_region_end (handler);
+
+ return op0;
+ }
+
+ case POPDCC_EXPR:
+ {
+ rtx dcc = get_dynamic_cleanup_chain ();
+ emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
+ return const0_rtx;
+ }
+
+ case POPDHC_EXPR:
+ {
+ rtx dhc = get_dynamic_handler_chain ();
+ emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
+ return const0_rtx;
+ }
+
case ERROR_MARK:
op0 = CONST0_RTX (tmode);
if (op0 != 0)
}
-/* Emit bytecode to evaluate the given expression EXP to the stack. */
-void
-bc_expand_expr (exp)
- tree exp;
+\f
+/* Return the alignment in bits of EXP, a pointer valued expression.
+ But don't return more than MAX_ALIGN no matter what.
+ The alignment returned is, by default, the alignment of the thing that
+ EXP points to (if it is not a POINTER_TYPE, 0 is returned).
+
+ Otherwise, look at the expression to see if we can do better, i.e., if the
+ expression is actually pointing at an object whose alignment is tighter. */
+
+static int
+get_pointer_alignment (exp, max_align)
+ tree exp;
+ unsigned max_align;
{
- enum tree_code code;
- tree type, arg0;
- rtx r;
- struct binary_operator *binoptab;
- struct unary_operator *unoptab;
- struct increment_operator *incroptab;
- struct bc_label *lab, *lab1;
- enum bytecode_opcode opcode;
-
-
- code = TREE_CODE (exp);
-
- switch (code)
+ unsigned align, inner;
+
+ if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
+ return 0;
+
+ align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
+ align = MIN (align, max_align);
+
+ while (1)
{
- case PARM_DECL:
-
- if (DECL_RTL (exp) == 0)
+ switch (TREE_CODE (exp))
{
- error_with_decl (exp, "prior parameter's size depends on `%s'");
- return;
- }
-
- bc_load_parmaddr (DECL_RTL (exp));
- bc_load_memory (TREE_TYPE (exp), exp);
-
- return;
-
- case VAR_DECL:
-
- if (DECL_RTL (exp) == 0)
- abort ();
-
-#if 0
- if (BYTECODE_LABEL (DECL_RTL (exp)))
- bc_load_externaddr (DECL_RTL (exp));
- else
- bc_load_localaddr (DECL_RTL (exp));
-#endif
- if (TREE_PUBLIC (exp))
- bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
- BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
- else
- bc_load_localaddr (DECL_RTL (exp));
-
- bc_load_memory (TREE_TYPE (exp), exp);
- return;
-
- case INTEGER_CST:
-
-#ifdef DEBUG_PRINT_CODE
- fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
-#endif
- bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
- ? SImode
- : TYPE_MODE (TREE_TYPE (exp)))],
- (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
- return;
-
- case REAL_CST:
-
-#if 0
-#ifdef DEBUG_PRINT_CODE
- fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
-#endif
- /* FIX THIS: find a better way to pass real_cst's. -bson */
- bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
- (double) TREE_REAL_CST (exp));
-#else
- abort ();
-#endif
-
- return;
-
- case CALL_EXPR:
-
- /* We build a call description vector describing the type of
- the return value and of the arguments; this call vector,
- together with a pointer to a location for the return value
- and the base of the argument list, is passed to the low
- level machine dependent call subroutine, which is responsible
- for putting the arguments wherever real functions expect
- them, as well as getting the return value back. */
- {
- tree calldesc = 0, arg;
- int nargs = 0, i;
- rtx retval;
-
- /* Push the evaluated args on the evaluation stack in reverse
- order. Also make an entry for each arg in the calldesc
- vector while we're at it. */
-
- TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
-
- for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
- {
- ++nargs;
- bc_expand_expr (TREE_VALUE (arg));
-
- calldesc = tree_cons ((tree) 0,
- size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
- calldesc);
- calldesc = tree_cons ((tree) 0,
- bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
- calldesc);
- }
-
- TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
-
- /* Allocate a location for the return value and push its
- address on the evaluation stack. Also make an entry
- at the front of the calldesc for the return value type. */
-
- type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
- retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
- bc_load_localaddr (retval);
-
- calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
- calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
-
- /* Prepend the argument count. */
- calldesc = tree_cons ((tree) 0,
- build_int_2 (nargs, 0),
- calldesc);
-
- /* Push the address of the call description vector on the stack. */
- calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
- TREE_TYPE (calldesc) = build_array_type (integer_type_node,
- build_index_type (build_int_2 (nargs * 2, 0)));
- r = output_constant_def (calldesc);
- bc_load_externaddr (r);
-
- /* Push the address of the function to be called. */
- bc_expand_expr (TREE_OPERAND (exp, 0));
-
- /* Call the function, popping its address and the calldesc vector
- address off the evaluation stack in the process. */
- bc_emit_instruction (call);
-
- /* Pop the arguments off the stack. */
- bc_adjust_stack (nargs);
-
- /* Load the return value onto the stack. */
- bc_load_localaddr (retval);
- bc_load_memory (type, TREE_OPERAND (exp, 0));
- }
- return;
-
- case SAVE_EXPR:
-
- if (!SAVE_EXPR_RTL (exp))
- {
- /* First time around: copy to local variable */
- SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
- TYPE_ALIGN (TREE_TYPE(exp)));
- bc_expand_expr (TREE_OPERAND (exp, 0));
- bc_emit_instruction (duplicate);
-
- bc_load_localaddr (SAVE_EXPR_RTL (exp));
- bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
- }
- else
- {
- /* Consecutive reference: use saved copy */
- bc_load_localaddr (SAVE_EXPR_RTL (exp));
- bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
- }
- return;
-
-#if 0
- /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
- how are they handled instead? */
- case LET_STMT:
-
- TREE_USED (exp) = 1;
- bc_expand_expr (STMT_BODY (exp));
- return;
-#endif
-
- case NOP_EXPR:
- case CONVERT_EXPR:
-
- bc_expand_expr (TREE_OPERAND (exp, 0));
- bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
- return;
-
- case MODIFY_EXPR:
-
- expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
- return;
-
- case ADDR_EXPR:
-
- bc_expand_address (TREE_OPERAND (exp, 0));
- return;
-
- case INDIRECT_REF:
-
- bc_expand_expr (TREE_OPERAND (exp, 0));
- bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
- return;
-
- case ARRAY_REF:
-
- bc_expand_expr (bc_canonicalize_array_ref (exp));
- return;
-
- case COMPONENT_REF:
-
- bc_expand_component_address (exp);
-
- /* If we have a bitfield, generate a proper load */
- bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
- return;
-
- case COMPOUND_EXPR:
-
- bc_expand_expr (TREE_OPERAND (exp, 0));
- bc_emit_instruction (drop);
- bc_expand_expr (TREE_OPERAND (exp, 1));
- return;
-
- case COND_EXPR:
-
- bc_expand_expr (TREE_OPERAND (exp, 0));
- bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
- lab = bc_get_bytecode_label ();
- bc_emit_bytecode (xjumpifnot);
- bc_emit_bytecode_labelref (lab);
-
-#ifdef DEBUG_PRINT_CODE
- fputc ('\n', stderr);
-#endif
- bc_expand_expr (TREE_OPERAND (exp, 1));
- lab1 = bc_get_bytecode_label ();
- bc_emit_bytecode (jump);
- bc_emit_bytecode_labelref (lab1);
-
-#ifdef DEBUG_PRINT_CODE
- fputc ('\n', stderr);
-#endif
-
- bc_emit_bytecode_labeldef (lab);
- bc_expand_expr (TREE_OPERAND (exp, 2));
- bc_emit_bytecode_labeldef (lab1);
- return;
-
- case TRUTH_ANDIF_EXPR:
-
- opcode = xjumpifnot;
- goto andorif;
-
- case TRUTH_ORIF_EXPR:
-
- opcode = xjumpif;
- goto andorif;
-
- case PLUS_EXPR:
-
- binoptab = optab_plus_expr;
- goto binop;
-
- case MINUS_EXPR:
-
- binoptab = optab_minus_expr;
- goto binop;
-
- case MULT_EXPR:
-
- binoptab = optab_mult_expr;
- goto binop;
-
- case TRUNC_DIV_EXPR:
- case FLOOR_DIV_EXPR:
- case CEIL_DIV_EXPR:
- case ROUND_DIV_EXPR:
- case EXACT_DIV_EXPR:
-
- binoptab = optab_trunc_div_expr;
- goto binop;
-
- case TRUNC_MOD_EXPR:
- case FLOOR_MOD_EXPR:
- case CEIL_MOD_EXPR:
- case ROUND_MOD_EXPR:
-
- binoptab = optab_trunc_mod_expr;
- goto binop;
-
- case FIX_ROUND_EXPR:
- case FIX_FLOOR_EXPR:
- case FIX_CEIL_EXPR:
- abort (); /* Not used for C. */
-
- case FIX_TRUNC_EXPR:
- case FLOAT_EXPR:
- case MAX_EXPR:
- case MIN_EXPR:
- case FFS_EXPR:
- case LROTATE_EXPR:
- case RROTATE_EXPR:
- abort (); /* FIXME */
-
- case RDIV_EXPR:
-
- binoptab = optab_rdiv_expr;
- goto binop;
-
- case BIT_AND_EXPR:
-
- binoptab = optab_bit_and_expr;
- goto binop;
-
- case BIT_IOR_EXPR:
-
- binoptab = optab_bit_ior_expr;
- goto binop;
-
- case BIT_XOR_EXPR:
-
- binoptab = optab_bit_xor_expr;
- goto binop;
-
- case LSHIFT_EXPR:
-
- binoptab = optab_lshift_expr;
- goto binop;
-
- case RSHIFT_EXPR:
-
- binoptab = optab_rshift_expr;
- goto binop;
-
- case TRUTH_AND_EXPR:
-
- binoptab = optab_truth_and_expr;
- goto binop;
-
- case TRUTH_OR_EXPR:
-
- binoptab = optab_truth_or_expr;
- goto binop;
-
- case LT_EXPR:
-
- binoptab = optab_lt_expr;
- goto binop;
-
- case LE_EXPR:
-
- binoptab = optab_le_expr;
- goto binop;
-
- case GE_EXPR:
-
- binoptab = optab_ge_expr;
- goto binop;
-
- case GT_EXPR:
-
- binoptab = optab_gt_expr;
- goto binop;
-
- case EQ_EXPR:
-
- binoptab = optab_eq_expr;
- goto binop;
-
- case NE_EXPR:
-
- binoptab = optab_ne_expr;
- goto binop;
-
- case NEGATE_EXPR:
-
- unoptab = optab_negate_expr;
- goto unop;
-
- case BIT_NOT_EXPR:
-
- unoptab = optab_bit_not_expr;
- goto unop;
-
- case TRUTH_NOT_EXPR:
-
- unoptab = optab_truth_not_expr;
- goto unop;
-
- case PREDECREMENT_EXPR:
-
- incroptab = optab_predecrement_expr;
- goto increment;
-
- case PREINCREMENT_EXPR:
-
- incroptab = optab_preincrement_expr;
- goto increment;
-
- case POSTDECREMENT_EXPR:
-
- incroptab = optab_postdecrement_expr;
- goto increment;
-
- case POSTINCREMENT_EXPR:
-
- incroptab = optab_postincrement_expr;
- goto increment;
-
- case CONSTRUCTOR:
-
- bc_expand_constructor (exp);
- return;
-
- case ERROR_MARK:
- case RTL_EXPR:
-
- return;
-
- case BIND_EXPR:
- {
- tree vars = TREE_OPERAND (exp, 0);
- int vars_need_expansion = 0;
-
- /* Need to open a binding contour here because
- if there are any cleanups they most be contained here. */
- expand_start_bindings (0);
-
- /* Mark the corresponding BLOCK for output. */
- if (TREE_OPERAND (exp, 2) != 0)
- TREE_USED (TREE_OPERAND (exp, 2)) = 1;
-
- /* If VARS have not yet been expanded, expand them now. */
- while (vars)
- {
- if (DECL_RTL (vars) == 0)
- {
- vars_need_expansion = 1;
- expand_decl (vars);
- }
- expand_decl_init (vars);
- vars = TREE_CHAIN (vars);
- }
-
- bc_expand_expr (TREE_OPERAND (exp, 1));
-
- expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
-
- return;
- }
- }
-
- abort ();
-
- binop:
-
- bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
- TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
- return;
-
-
- unop:
-
- bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
- return;
-
-
- andorif:
-
- bc_expand_expr (TREE_OPERAND (exp, 0));
- bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
- lab = bc_get_bytecode_label ();
-
- bc_emit_instruction (duplicate);
- bc_emit_bytecode (opcode);
- bc_emit_bytecode_labelref (lab);
-
-#ifdef DEBUG_PRINT_CODE
- fputc ('\n', stderr);
-#endif
-
- bc_emit_instruction (drop);
-
- bc_expand_expr (TREE_OPERAND (exp, 1));
- bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
- bc_emit_bytecode_labeldef (lab);
- return;
-
-
- increment:
-
- type = TREE_TYPE (TREE_OPERAND (exp, 0));
-
- /* Push the quantum. */
- bc_expand_expr (TREE_OPERAND (exp, 1));
-
- /* Convert it to the lvalue's type. */
- bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
-
- /* Push the address of the lvalue */
- bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
-
- /* Perform actual increment */
- bc_expand_increment (incroptab, type);
- return;
-}
-\f
-/* Return the alignment in bits of EXP, a pointer valued expression.
- But don't return more than MAX_ALIGN no matter what.
- The alignment returned is, by default, the alignment of the thing that
- EXP points to (if it is not a POINTER_TYPE, 0 is returned).
-
- Otherwise, look at the expression to see if we can do better, i.e., if the
- expression is actually pointing at an object whose alignment is tighter. */
-
-static int
-get_pointer_alignment (exp, max_align)
- tree exp;
- unsigned max_align;
-{
- unsigned align, inner;
-
- if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
- return 0;
-
- align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
- align = MIN (align, max_align);
-
- while (1)
- {
- switch (TREE_CODE (exp))
- {
- case NOP_EXPR:
- case CONVERT_EXPR:
- case NON_LVALUE_EXPR:
- exp = TREE_OPERAND (exp, 0);
- if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
- return align;
- inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
- align = MIN (inner, max_align);
- break;
-
- case PLUS_EXPR:
- /* If sum of pointer + int, restrict our maximum alignment to that
- imposed by the integer. If not, we can't do any better than
- ALIGN. */
- if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
- return align;
-
- while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
- & (max_align - 1))
- != 0)
- max_align >>= 1;
-
- exp = TREE_OPERAND (exp, 0);
- break;
-
- case ADDR_EXPR:
- /* See what we are pointing at and look at its alignment. */
- exp = TREE_OPERAND (exp, 0);
- if (TREE_CODE (exp) == FUNCTION_DECL)
- align = FUNCTION_BOUNDARY;
- else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
- align = DECL_ALIGN (exp);
-#ifdef CONSTANT_ALIGNMENT
- else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
- align = CONSTANT_ALIGNMENT (exp, align);
-#endif
- return MIN (align, max_align);
-
- default:
- return align;
- }
- }
-}
-\f
-/* Return the tree node and offset if a given argument corresponds to
- a string constant. */
-
-static tree
-string_constant (arg, ptr_offset)
- tree arg;
- tree *ptr_offset;
-{
- STRIP_NOPS (arg);
-
- if (TREE_CODE (arg) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
- {
- *ptr_offset = integer_zero_node;
- return TREE_OPERAND (arg, 0);
- }
- else if (TREE_CODE (arg) == PLUS_EXPR)
- {
- tree arg0 = TREE_OPERAND (arg, 0);
- tree arg1 = TREE_OPERAND (arg, 1);
-
- STRIP_NOPS (arg0);
- STRIP_NOPS (arg1);
-
- if (TREE_CODE (arg0) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
- {
- *ptr_offset = arg1;
- return TREE_OPERAND (arg0, 0);
- }
- else if (TREE_CODE (arg1) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
- {
- *ptr_offset = arg0;
- return TREE_OPERAND (arg1, 0);
- }
- }
-
- return 0;
-}
-
-/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
- way, because it could contain a zero byte in the middle.
- TREE_STRING_LENGTH is the size of the character array, not the string.
-
- Unfortunately, string_constant can't access the values of const char
- arrays with initializers, so neither can we do so here. */
-
-static tree
-c_strlen (src)
- tree src;
-{
- tree offset_node;
- int offset, max;
- char *ptr;
-
- src = string_constant (src, &offset_node);
- if (src == 0)
- return 0;
- max = TREE_STRING_LENGTH (src);
- ptr = TREE_STRING_POINTER (src);
- if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
- {
- /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
- compute the offset to the following null if we don't know where to
- start searching for it. */
- int i;
- for (i = 0; i < max; i++)
- if (ptr[i] == 0)
- return 0;
- /* We don't know the starting offset, but we do know that the string
- has no internal zero bytes. We can assume that the offset falls
- within the bounds of the string; otherwise, the programmer deserves
- what he gets. Subtract the offset from the length of the string,
- and return that. */
- /* This would perhaps not be valid if we were dealing with named
- arrays in addition to literal string constants. */
- return size_binop (MINUS_EXPR, size_int (max), offset_node);
- }
-
- /* We have a known offset into the string. Start searching there for
- a null character. */
- if (offset_node == 0)
- offset = 0;
- else
- {
- /* Did we get a long long offset? If so, punt. */
- if (TREE_INT_CST_HIGH (offset_node) != 0)
- return 0;
- offset = TREE_INT_CST_LOW (offset_node);
- }
- /* If the offset is known to be out of bounds, warn, and call strlen at
- runtime. */
- if (offset < 0 || offset > max)
- {
- warning ("offset outside bounds of constant string");
- return 0;
- }
- /* Use strlen to search for the first zero byte. Since any strings
- constructed with build_string will have nulls appended, we win even
- if we get handed something like (char[4])"abcd".
-
- Since OFFSET is our starting index into the string, no further
- calculation is needed. */
- return size_int (strlen (ptr + offset));
-}
-
-rtx
-expand_builtin_return_addr (fndecl_code, count, tem)
- enum built_in_function fndecl_code;
- rtx tem;
- int count;
-{
- int i;
-
- /* Some machines need special handling before we can access
- arbitrary frames. For example, on the sparc, we must first flush
- all register windows to the stack. */
-#ifdef SETUP_FRAME_ADDRESSES
- SETUP_FRAME_ADDRESSES ();
-#endif
-
- /* On the sparc, the return address is not in the frame, it is in a
- register. There is no way to access it off of the current frame
- pointer, but it can be accessed off the previous frame pointer by
- reading the value from the register window save area. */
-#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
- if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
- count--;
-#endif
-
- /* Scan back COUNT frames to the specified frame. */
- for (i = 0; i < count; i++)
- {
- /* Assume the dynamic chain pointer is in the word that the
- frame address points to, unless otherwise specified. */
-#ifdef DYNAMIC_CHAIN_ADDRESS
- tem = DYNAMIC_CHAIN_ADDRESS (tem);
-#endif
- tem = memory_address (Pmode, tem);
- tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
- }
-
- /* For __builtin_frame_address, return what we've got. */
- if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
- return tem;
-
- /* For __builtin_return_address, Get the return address from that
- frame. */
-#ifdef RETURN_ADDR_RTX
- tem = RETURN_ADDR_RTX (count, tem);
-#else
- tem = memory_address (Pmode,
- plus_constant (tem, GET_MODE_SIZE (Pmode)));
- tem = gen_rtx (MEM, Pmode, tem);
-#endif
- return tem;
-}
-\f
-/* Expand an expression EXP that calls a built-in function,
- with result going to TARGET if that's convenient
- (and in mode MODE if that's convenient).
- SUBTARGET may be used as the target for computing one of EXP's operands.
- IGNORE is nonzero if the value is to be ignored. */
-
-#define CALLED_AS_BUILT_IN(NODE) \
- (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
-
-static rtx
-expand_builtin (exp, target, subtarget, mode, ignore)
- tree exp;
- rtx target;
- rtx subtarget;
- enum machine_mode mode;
- int ignore;
-{
- tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
- tree arglist = TREE_OPERAND (exp, 1);
- rtx op0;
- rtx lab1, insns;
- enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
- optab builtin_optab;
-
- switch (DECL_FUNCTION_CODE (fndecl))
- {
- case BUILT_IN_ABS:
- case BUILT_IN_LABS:
- case BUILT_IN_FABS:
- /* build_function_call changes these into ABS_EXPR. */
- abort ();
-
- case BUILT_IN_SIN:
- case BUILT_IN_COS:
- /* Treat these like sqrt, but only if the user asks for them. */
- if (! flag_fast_math)
- break;
- case BUILT_IN_FSQRT:
- /* If not optimizing, call the library function. */
- if (! optimize)
- break;
-
- if (arglist == 0
- /* Arg could be wrong type if user redeclared this fcn wrong. */
- || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
- break;
-
- /* Stabilize and compute the argument. */
- if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
- && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
- {
- exp = copy_node (exp);
- arglist = copy_node (arglist);
- TREE_OPERAND (exp, 1) = arglist;
- TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
- }
- op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
-
- /* Make a suitable register to place result in. */
- target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
-
- emit_queue ();
- start_sequence ();
-
- switch (DECL_FUNCTION_CODE (fndecl))
- {
- case BUILT_IN_SIN:
- builtin_optab = sin_optab; break;
- case BUILT_IN_COS:
- builtin_optab = cos_optab; break;
- case BUILT_IN_FSQRT:
- builtin_optab = sqrt_optab; break;
- default:
- abort ();
- }
-
- /* Compute into TARGET.
- Set TARGET to wherever the result comes back. */
- target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
- builtin_optab, op0, target, 0);
-
- /* If we were unable to expand via the builtin, stop the
- sequence (without outputting the insns) and break, causing
- a call the the library function. */
- if (target == 0)
- {
- end_sequence ();
- break;
- }
-
- /* Check the results by default. But if flag_fast_math is turned on,
- then assume sqrt will always be called with valid arguments. */
-
- if (! flag_fast_math)
- {
- /* Don't define the builtin FP instructions
- if your machine is not IEEE. */
- if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
- abort ();
-
- lab1 = gen_label_rtx ();
-
- /* Test the result; if it is NaN, set errno=EDOM because
- the argument was not in the domain. */
- emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
- emit_jump_insn (gen_beq (lab1));
-
-#ifdef TARGET_EDOM
- {
-#ifdef GEN_ERRNO_RTX
- rtx errno_rtx = GEN_ERRNO_RTX;
-#else
- rtx errno_rtx
- = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
-#endif
-
- emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
- }
-#else
- /* We can't set errno=EDOM directly; let the library call do it.
- Pop the arguments right away in case the call gets deleted. */
- NO_DEFER_POP;
- expand_call (exp, target, 0);
- OK_DEFER_POP;
-#endif
-
- emit_label (lab1);
- }
-
- /* Output the entire sequence. */
- insns = get_insns ();
- end_sequence ();
- emit_insns (insns);
-
- return target;
-
- /* __builtin_apply_args returns block of memory allocated on
- the stack into which is stored the arg pointer, structure
- value address, static chain, and all the registers that might
- possibly be used in performing a function call. The code is
- moved to the start of the function so the incoming values are
- saved. */
- case BUILT_IN_APPLY_ARGS:
- /* Don't do __builtin_apply_args more than once in a function.
- Save the result of the first call and reuse it. */
- if (apply_args_value != 0)
- return apply_args_value;
- {
- /* When this function is called, it means that registers must be
- saved on entry to this function. So we migrate the
- call to the first insn of this function. */
- rtx temp;
- rtx seq;
-
- start_sequence ();
- temp = expand_builtin_apply_args ();
- seq = get_insns ();
- end_sequence ();
-
- apply_args_value = temp;
-
- /* Put the sequence after the NOTE that starts the function.
- If this is inside a SEQUENCE, make the outer-level insn
- chain current, so the code is placed at the start of the
- function. */
- push_topmost_sequence ();
- emit_insns_before (seq, NEXT_INSN (get_insns ()));
- pop_topmost_sequence ();
- return temp;
- }
-
- /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
- FUNCTION with a copy of the parameters described by
- ARGUMENTS, and ARGSIZE. It returns a block of memory
- allocated on the stack into which is stored all the registers
- that might possibly be used for returning the result of a
- function. ARGUMENTS is the value returned by
- __builtin_apply_args. ARGSIZE is the number of bytes of
- arguments that must be copied. ??? How should this value be
- computed? We'll also need a safe worst case value for varargs
- functions. */
- case BUILT_IN_APPLY:
- if (arglist == 0
- /* Arg could be non-pointer if user redeclared this fcn wrong. */
- || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
- || TREE_CHAIN (arglist) == 0
- || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
- || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
- || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
- return const0_rtx;
- else
- {
- int i;
- tree t;
- rtx ops[3];
-
- for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
- ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
-
- return expand_builtin_apply (ops[0], ops[1], ops[2]);
- }
-
- /* __builtin_return (RESULT) causes the function to return the
- value described by RESULT. RESULT is address of the block of
- memory returned by __builtin_apply. */
- case BUILT_IN_RETURN:
- if (arglist
- /* Arg could be non-pointer if user redeclared this fcn wrong. */
- && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
- expand_builtin_return (expand_expr (TREE_VALUE (arglist),
- NULL_RTX, VOIDmode, 0));
- return const0_rtx;
-
- case BUILT_IN_SAVEREGS:
- /* Don't do __builtin_saveregs more than once in a function.
- Save the result of the first call and reuse it. */
- if (saveregs_value != 0)
- return saveregs_value;
- {
- /* When this function is called, it means that registers must be
- saved on entry to this function. So we migrate the
- call to the first insn of this function. */
- rtx temp;
- rtx seq;
-
- /* Now really call the function. `expand_call' does not call
- expand_builtin, so there is no danger of infinite recursion here. */
- start_sequence ();
-
-#ifdef EXPAND_BUILTIN_SAVEREGS
- /* Do whatever the machine needs done in this case. */
- temp = EXPAND_BUILTIN_SAVEREGS (arglist);
-#else
- /* The register where the function returns its value
- is likely to have something else in it, such as an argument.
- So preserve that register around the call. */
-
- if (value_mode != VOIDmode)
- {
- rtx valreg = hard_libcall_value (value_mode);
- rtx saved_valreg = gen_reg_rtx (value_mode);
-
- emit_move_insn (saved_valreg, valreg);
- temp = expand_call (exp, target, ignore);
- emit_move_insn (valreg, saved_valreg);
- }
- else
- /* Generate the call, putting the value in a pseudo. */
- temp = expand_call (exp, target, ignore);
-#endif
-
- seq = get_insns ();
- end_sequence ();
-
- saveregs_value = temp;
-
- /* Put the sequence after the NOTE that starts the function.
- If this is inside a SEQUENCE, make the outer-level insn
- chain current, so the code is placed at the start of the
- function. */
- push_topmost_sequence ();
- emit_insns_before (seq, NEXT_INSN (get_insns ()));
- pop_topmost_sequence ();
- return temp;
- }
-
- /* __builtin_args_info (N) returns word N of the arg space info
- for the current function. The number and meanings of words
- is controlled by the definition of CUMULATIVE_ARGS. */
- case BUILT_IN_ARGS_INFO:
- {
- int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
- int i;
- int *word_ptr = (int *) ¤t_function_args_info;
- tree type, elts, result;
-
- if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
- fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
- __FILE__, __LINE__);
-
- if (arglist != 0)
- {
- tree arg = TREE_VALUE (arglist);
- if (TREE_CODE (arg) != INTEGER_CST)
- error ("argument of `__builtin_args_info' must be constant");
- else
- {
- int wordnum = TREE_INT_CST_LOW (arg);
-
- if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
- error ("argument of `__builtin_args_info' out of range");
- else
- return GEN_INT (word_ptr[wordnum]);
- }
- }
- else
- error ("missing argument in `__builtin_args_info'");
-
- return const0_rtx;
-
-#if 0
- for (i = 0; i < nwords; i++)
- elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
-
- type = build_array_type (integer_type_node,
- build_index_type (build_int_2 (nwords, 0)));
- result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
- TREE_CONSTANT (result) = 1;
- TREE_STATIC (result) = 1;
- result = build (INDIRECT_REF, build_pointer_type (type), result);
- TREE_CONSTANT (result) = 1;
- return expand_expr (result, NULL_RTX, VOIDmode, 0);
-#endif
- }
-
- /* Return the address of the first anonymous stack arg. */
- case BUILT_IN_NEXT_ARG:
- {
- tree fntype = TREE_TYPE (current_function_decl);
-
- if ((TYPE_ARG_TYPES (fntype) == 0
- || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
- == void_type_node))
- && ! current_function_varargs)
- {
- error ("`va_start' used in function with fixed args");
- return const0_rtx;
- }
-
- if (arglist)
- {
- tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
- tree arg = TREE_VALUE (arglist);
-
- /* Strip off all nops for the sake of the comparison. This
- is not quite the same as STRIP_NOPS. It does more.
- We must also strip off INDIRECT_EXPR for C++ reference
- parameters. */
- while (TREE_CODE (arg) == NOP_EXPR
- || TREE_CODE (arg) == CONVERT_EXPR
- || TREE_CODE (arg) == NON_LVALUE_EXPR
- || TREE_CODE (arg) == INDIRECT_REF)
- arg = TREE_OPERAND (arg, 0);
- if (arg != last_parm)
- warning ("second parameter of `va_start' not last named argument");
- }
- else if (! current_function_varargs)
- /* Evidently an out of date version of <stdarg.h>; can't validate
- va_start's second argument, but can still work as intended. */
- warning ("`__builtin_next_arg' called without an argument");
- }
-
- return expand_binop (Pmode, add_optab,
- current_function_internal_arg_pointer,
- current_function_arg_offset_rtx,
- NULL_RTX, 0, OPTAB_LIB_WIDEN);
-
- case BUILT_IN_CLASSIFY_TYPE:
- if (arglist != 0)
- {
- tree type = TREE_TYPE (TREE_VALUE (arglist));
- enum tree_code code = TREE_CODE (type);
- if (code == VOID_TYPE)
- return GEN_INT (void_type_class);
- if (code == INTEGER_TYPE)
- return GEN_INT (integer_type_class);
- if (code == CHAR_TYPE)
- return GEN_INT (char_type_class);
- if (code == ENUMERAL_TYPE)
- return GEN_INT (enumeral_type_class);
- if (code == BOOLEAN_TYPE)
- return GEN_INT (boolean_type_class);
- if (code == POINTER_TYPE)
- return GEN_INT (pointer_type_class);
- if (code == REFERENCE_TYPE)
- return GEN_INT (reference_type_class);
- if (code == OFFSET_TYPE)
- return GEN_INT (offset_type_class);
- if (code == REAL_TYPE)
- return GEN_INT (real_type_class);
- if (code == COMPLEX_TYPE)
- return GEN_INT (complex_type_class);
- if (code == FUNCTION_TYPE)
- return GEN_INT (function_type_class);
- if (code == METHOD_TYPE)
- return GEN_INT (method_type_class);
- if (code == RECORD_TYPE)
- return GEN_INT (record_type_class);
- if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
- return GEN_INT (union_type_class);
- if (code == ARRAY_TYPE)
- {
- if (TYPE_STRING_FLAG (type))
- return GEN_INT (string_type_class);
- else
- return GEN_INT (array_type_class);
- }
- if (code == SET_TYPE)
- return GEN_INT (set_type_class);
- if (code == FILE_TYPE)
- return GEN_INT (file_type_class);
- if (code == LANG_TYPE)
- return GEN_INT (lang_type_class);
- }
- return GEN_INT (no_type_class);
-
- case BUILT_IN_CONSTANT_P:
- if (arglist == 0)
- return const0_rtx;
- else
- {
- tree arg = TREE_VALUE (arglist);
-
- STRIP_NOPS (arg);
- return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
- || (TREE_CODE (arg) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
- ? const1_rtx : const0_rtx);
- }
-
- case BUILT_IN_FRAME_ADDRESS:
- /* The argument must be a nonnegative integer constant.
- It counts the number of frames to scan up the stack.
- The value is the address of that frame. */
- case BUILT_IN_RETURN_ADDRESS:
- /* The argument must be a nonnegative integer constant.
- It counts the number of frames to scan up the stack.
- The value is the return address saved in that frame. */
- if (arglist == 0)
- /* Warning about missing arg was already issued. */
- return const0_rtx;
- else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
- {
- error ("invalid arg to `__builtin_return_address'");
- return const0_rtx;
- }
- else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
- {
- error ("invalid arg to `__builtin_return_address'");
- return const0_rtx;
- }
- else
- {
- rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
- TREE_INT_CST_LOW (TREE_VALUE (arglist)),
- hard_frame_pointer_rtx);
-
- /* For __builtin_frame_address, return what we've got. */
- if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
- return tem;
-
- if (GET_CODE (tem) != REG)
- tem = copy_to_reg (tem);
- return tem;
- }
-
- case BUILT_IN_ALLOCA:
- if (arglist == 0
- /* Arg could be non-integer if user redeclared this fcn wrong. */
- || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
- break;
-
- /* Compute the argument. */
- op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
-
- /* Allocate the desired space. */
- return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
-
- case BUILT_IN_FFS:
- /* If not optimizing, call the library function. */
- if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
- break;
-
- if (arglist == 0
- /* Arg could be non-integer if user redeclared this fcn wrong. */
- || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
- break;
-
- /* Compute the argument. */
- op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
- /* Compute ffs, into TARGET if possible.
- Set TARGET to wherever the result comes back. */
- target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
- ffs_optab, op0, target, 1);
- if (target == 0)
- abort ();
- return target;
-
- case BUILT_IN_STRLEN:
- /* If not optimizing, call the library function. */
- if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
- break;
-
- if (arglist == 0
- /* Arg could be non-pointer if user redeclared this fcn wrong. */
- || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
- break;
- else
- {
- tree src = TREE_VALUE (arglist);
- tree len = c_strlen (src);
-
- int align
- = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
-
- rtx result, src_rtx, char_rtx;
- enum machine_mode insn_mode = value_mode, char_mode;
- enum insn_code icode;
-
- /* If the length is known, just return it. */
- if (len != 0)
- return expand_expr (len, target, mode, 0);
-
- /* If SRC is not a pointer type, don't do this operation inline. */
- if (align == 0)
- break;
-
- /* Call a function if we can't compute strlen in the right mode. */
-
- while (insn_mode != VOIDmode)
- {
- icode = strlen_optab->handlers[(int) insn_mode].insn_code;
- if (icode != CODE_FOR_nothing)
- break;
-
- insn_mode = GET_MODE_WIDER_MODE (insn_mode);
- }
- if (insn_mode == VOIDmode)
- break;
-
- /* Make a place to write the result of the instruction. */
- result = target;
- if (! (result != 0
- && GET_CODE (result) == REG
- && GET_MODE (result) == insn_mode
- && REGNO (result) >= FIRST_PSEUDO_REGISTER))
- result = gen_reg_rtx (insn_mode);
-
- /* Make sure the operands are acceptable to the predicates. */
-
- if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
- result = gen_reg_rtx (insn_mode);
-
- src_rtx = memory_address (BLKmode,
- expand_expr (src, NULL_RTX, ptr_mode,
- EXPAND_NORMAL));
- if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
- src_rtx = copy_to_mode_reg (Pmode, src_rtx);
-
- char_rtx = const0_rtx;
- char_mode = insn_operand_mode[(int)icode][2];
- if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
- char_rtx = copy_to_mode_reg (char_mode, char_rtx);
-
- emit_insn (GEN_FCN (icode) (result,
- gen_rtx (MEM, BLKmode, src_rtx),
- char_rtx, GEN_INT (align)));
-
- /* Return the value in the proper mode for this function. */
- if (GET_MODE (result) == value_mode)
- return result;
- else if (target != 0)
- {
- convert_move (target, result, 0);
- return target;
- }
- else
- return convert_to_mode (value_mode, result, 0);
- }
+ case NOP_EXPR:
+ case CONVERT_EXPR:
+ case NON_LVALUE_EXPR:
+ exp = TREE_OPERAND (exp, 0);
+ if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
+ return align;
+ inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
+ align = MIN (inner, max_align);
+ break;
- case BUILT_IN_STRCPY:
- /* If not optimizing, call the library function. */
- if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
- break;
+ case PLUS_EXPR:
+ /* If sum of pointer + int, restrict our maximum alignment to that
+ imposed by the integer. If not, we can't do any better than
+ ALIGN. */
+ if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
+ return align;
- if (arglist == 0
- /* Arg could be non-pointer if user redeclared this fcn wrong. */
- || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
- || TREE_CHAIN (arglist) == 0
- || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
- break;
- else
- {
- tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
+ while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
+ & (max_align - 1))
+ != 0)
+ max_align >>= 1;
- if (len == 0)
- break;
+ exp = TREE_OPERAND (exp, 0);
+ break;
- len = size_binop (PLUS_EXPR, len, integer_one_node);
+ case ADDR_EXPR:
+ /* See what we are pointing at and look at its alignment. */
+ exp = TREE_OPERAND (exp, 0);
+ if (TREE_CODE (exp) == FUNCTION_DECL)
+ align = FUNCTION_BOUNDARY;
+ else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
+ align = DECL_ALIGN (exp);
+#ifdef CONSTANT_ALIGNMENT
+ else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
+ align = CONSTANT_ALIGNMENT (exp, align);
+#endif
+ return MIN (align, max_align);
- chainon (arglist, build_tree_list (NULL_TREE, len));
+ default:
+ return align;
}
+ }
+}
+\f
+/* Return the tree node and offset if a given argument corresponds to
+ a string constant. */
- /* Drops in. */
- case BUILT_IN_MEMCPY:
- /* If not optimizing, call the library function. */
- if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
- break;
-
- if (arglist == 0
- /* Arg could be non-pointer if user redeclared this fcn wrong. */
- || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
- || TREE_CHAIN (arglist) == 0
- || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
- || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
- || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
- break;
- else
- {
- tree dest = TREE_VALUE (arglist);
- tree src = TREE_VALUE (TREE_CHAIN (arglist));
- tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
- tree type;
-
- int src_align
- = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
- int dest_align
- = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
- rtx dest_rtx, dest_mem, src_mem;
+static tree
+string_constant (arg, ptr_offset)
+ tree arg;
+ tree *ptr_offset;
+{
+ STRIP_NOPS (arg);
- /* If either SRC or DEST is not a pointer type, don't do
- this operation in-line. */
- if (src_align == 0 || dest_align == 0)
- {
- if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
- TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
- break;
- }
+ if (TREE_CODE (arg) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
+ {
+ *ptr_offset = integer_zero_node;
+ return TREE_OPERAND (arg, 0);
+ }
+ else if (TREE_CODE (arg) == PLUS_EXPR)
+ {
+ tree arg0 = TREE_OPERAND (arg, 0);
+ tree arg1 = TREE_OPERAND (arg, 1);
- dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
- dest_mem = gen_rtx (MEM, BLKmode,
- memory_address (BLKmode, dest_rtx));
- /* There could be a void* cast on top of the object. */
- while (TREE_CODE (dest) == NOP_EXPR)
- dest = TREE_OPERAND (dest, 0);
- type = TREE_TYPE (TREE_TYPE (dest));
- MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
- src_mem = gen_rtx (MEM, BLKmode,
- memory_address (BLKmode,
- expand_expr (src, NULL_RTX,
- ptr_mode,
- EXPAND_SUM)));
- /* There could be a void* cast on top of the object. */
- while (TREE_CODE (src) == NOP_EXPR)
- src = TREE_OPERAND (src, 0);
- type = TREE_TYPE (TREE_TYPE (src));
- MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
+ STRIP_NOPS (arg0);
+ STRIP_NOPS (arg1);
- /* Copy word part most expediently. */
- emit_block_move (dest_mem, src_mem,
- expand_expr (len, NULL_RTX, VOIDmode, 0),
- MIN (src_align, dest_align));
- return force_operand (dest_rtx, NULL_RTX);
+ if (TREE_CODE (arg0) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
+ {
+ *ptr_offset = arg1;
+ return TREE_OPERAND (arg0, 0);
+ }
+ else if (TREE_CODE (arg1) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
+ {
+ *ptr_offset = arg0;
+ return TREE_OPERAND (arg1, 0);
}
+ }
-/* These comparison functions need an instruction that returns an actual
- index. An ordinary compare that just sets the condition codes
- is not enough. */
-#ifdef HAVE_cmpstrsi
- case BUILT_IN_STRCMP:
- /* If not optimizing, call the library function. */
- if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
- break;
+ return 0;
+}
- if (arglist == 0
- /* Arg could be non-pointer if user redeclared this fcn wrong. */
- || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
- || TREE_CHAIN (arglist) == 0
- || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
- break;
- else if (!HAVE_cmpstrsi)
- break;
- {
- tree arg1 = TREE_VALUE (arglist);
- tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
- tree offset;
- tree len, len2;
+/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
+ way, because it could contain a zero byte in the middle.
+ TREE_STRING_LENGTH is the size of the character array, not the string.
- len = c_strlen (arg1);
- if (len)
- len = size_binop (PLUS_EXPR, integer_one_node, len);
- len2 = c_strlen (arg2);
- if (len2)
- len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
+ Unfortunately, string_constant can't access the values of const char
+ arrays with initializers, so neither can we do so here. */
- /* If we don't have a constant length for the first, use the length
- of the second, if we know it. We don't require a constant for
- this case; some cost analysis could be done if both are available
- but neither is constant. For now, assume they're equally cheap.
+static tree
+c_strlen (src)
+ tree src;
+{
+ tree offset_node;
+ int offset, max;
+ char *ptr;
- If both strings have constant lengths, use the smaller. This
- could arise if optimization results in strcpy being called with
- two fixed strings, or if the code was machine-generated. We should
- add some code to the `memcmp' handler below to deal with such
- situations, someday. */
- if (!len || TREE_CODE (len) != INTEGER_CST)
- {
- if (len2)
- len = len2;
- else if (len == 0)
- break;
- }
- else if (len2 && TREE_CODE (len2) == INTEGER_CST)
- {
- if (tree_int_cst_lt (len2, len))
- len = len2;
- }
+ src = string_constant (src, &offset_node);
+ if (src == 0)
+ return 0;
+ max = TREE_STRING_LENGTH (src);
+ ptr = TREE_STRING_POINTER (src);
+ if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
+ {
+ /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
+ compute the offset to the following null if we don't know where to
+ start searching for it. */
+ int i;
+ for (i = 0; i < max; i++)
+ if (ptr[i] == 0)
+ return 0;
+ /* We don't know the starting offset, but we do know that the string
+ has no internal zero bytes. We can assume that the offset falls
+ within the bounds of the string; otherwise, the programmer deserves
+ what he gets. Subtract the offset from the length of the string,
+ and return that. */
+ /* This would perhaps not be valid if we were dealing with named
+ arrays in addition to literal string constants. */
+ return size_binop (MINUS_EXPR, size_int (max), offset_node);
+ }
- chainon (arglist, build_tree_list (NULL_TREE, len));
- }
+ /* We have a known offset into the string. Start searching there for
+ a null character. */
+ if (offset_node == 0)
+ offset = 0;
+ else
+ {
+ /* Did we get a long long offset? If so, punt. */
+ if (TREE_INT_CST_HIGH (offset_node) != 0)
+ return 0;
+ offset = TREE_INT_CST_LOW (offset_node);
+ }
+ /* If the offset is known to be out of bounds, warn, and call strlen at
+ runtime. */
+ if (offset < 0 || offset > max)
+ {
+ warning ("offset outside bounds of constant string");
+ return 0;
+ }
+ /* Use strlen to search for the first zero byte. Since any strings
+ constructed with build_string will have nulls appended, we win even
+ if we get handed something like (char[4])"abcd".
- /* Drops in. */
- case BUILT_IN_MEMCMP:
- /* If not optimizing, call the library function. */
- if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
- break;
+ Since OFFSET is our starting index into the string, no further
+ calculation is needed. */
+ return size_int (strlen (ptr + offset));
+}
- if (arglist == 0
- /* Arg could be non-pointer if user redeclared this fcn wrong. */
- || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
- || TREE_CHAIN (arglist) == 0
- || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
- || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
- || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
- break;
- else if (!HAVE_cmpstrsi)
- break;
- {
- tree arg1 = TREE_VALUE (arglist);
- tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
- tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
- rtx result;
+rtx
+expand_builtin_return_addr (fndecl_code, count, tem)
+ enum built_in_function fndecl_code;
+ int count;
+ rtx tem;
+{
+ int i;
- int arg1_align
- = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
- int arg2_align
- = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
- enum machine_mode insn_mode
- = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
+ /* Some machines need special handling before we can access
+ arbitrary frames. For example, on the sparc, we must first flush
+ all register windows to the stack. */
+#ifdef SETUP_FRAME_ADDRESSES
+ if (count > 0)
+ SETUP_FRAME_ADDRESSES ();
+#endif
- /* If we don't have POINTER_TYPE, call the function. */
- if (arg1_align == 0 || arg2_align == 0)
- {
- if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
- TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
- break;
- }
+ /* On the sparc, the return address is not in the frame, it is in a
+ register. There is no way to access it off of the current frame
+ pointer, but it can be accessed off the previous frame pointer by
+ reading the value from the register window save area. */
+#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
+ if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
+ count--;
+#endif
- /* Make a place to write the result of the instruction. */
- result = target;
- if (! (result != 0
- && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
- && REGNO (result) >= FIRST_PSEUDO_REGISTER))
- result = gen_reg_rtx (insn_mode);
+ /* Scan back COUNT frames to the specified frame. */
+ for (i = 0; i < count; i++)
+ {
+ /* Assume the dynamic chain pointer is in the word that the
+ frame address points to, unless otherwise specified. */
+#ifdef DYNAMIC_CHAIN_ADDRESS
+ tem = DYNAMIC_CHAIN_ADDRESS (tem);
+#endif
+ tem = memory_address (Pmode, tem);
+ tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
+ }
- emit_insn (gen_cmpstrsi (result,
- gen_rtx (MEM, BLKmode,
- expand_expr (arg1, NULL_RTX,
- ptr_mode,
- EXPAND_NORMAL)),
- gen_rtx (MEM, BLKmode,
- expand_expr (arg2, NULL_RTX,
- ptr_mode,
- EXPAND_NORMAL)),
- expand_expr (len, NULL_RTX, VOIDmode, 0),
- GEN_INT (MIN (arg1_align, arg2_align))));
+ /* For __builtin_frame_address, return what we've got. */
+ if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
+ return tem;
- /* Return the value in the proper mode for this function. */
- mode = TYPE_MODE (TREE_TYPE (exp));
- if (GET_MODE (result) == mode)
- return result;
- else if (target != 0)
- {
- convert_move (target, result, 0);
- return target;
- }
- else
- return convert_to_mode (mode, result, 0);
- }
+ /* For __builtin_return_address, Get the return address from that
+ frame. */
+#ifdef RETURN_ADDR_RTX
+ tem = RETURN_ADDR_RTX (count, tem);
#else
- case BUILT_IN_STRCMP:
- case BUILT_IN_MEMCMP:
- break;
+ tem = memory_address (Pmode,
+ plus_constant (tem, GET_MODE_SIZE (Pmode)));
+ tem = gen_rtx_MEM (Pmode, tem);
#endif
+ return tem;
+}
- /* __builtin_setjmp is passed a pointer to an array of five words
- (not all will be used on all machines). It operates similarly to
- the C library function of the same name, but is more efficient.
- Much of the code below (and for longjmp) is copied from the handling
- of non-local gotos.
+/* __builtin_setjmp is passed a pointer to an array of five words (not
+ all will be used on all machines). It operates similarly to the C
+ library function of the same name, but is more efficient. Much of
+ the code below (and for longjmp) is copied from the handling of
+ non-local gotos.
- NOTE: This is intended for use by GNAT and will only work in
- the method used by it. This code will likely NOT survive to
- the GCC 2.8.0 release. */
- case BUILT_IN_SETJMP:
- if (arglist == 0
- || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
- break;
+ NOTE: This is intended for use by GNAT and the exception handling
+ scheme in the compiler and will only work in the method used by
+ them. */
- {
- rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
- VOIDmode, 0);
- rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
- enum machine_mode sa_mode = Pmode;
- rtx stack_save;
- int old_inhibit_defer_pop = inhibit_defer_pop;
- int return_pops = RETURN_POPS_ARGS (get_identifier ("__dummy"),
- get_identifier ("__dummy"), 0);
- rtx next_arg_reg;
- CUMULATIVE_ARGS args_so_far;
- int i;
+rtx
+expand_builtin_setjmp (buf_addr, target, first_label, next_label)
+ rtx buf_addr;
+ rtx target;
+ rtx first_label, next_label;
+{
+ rtx lab1 = gen_label_rtx ();
+ enum machine_mode sa_mode = Pmode, value_mode;
+ rtx stack_save;
+ int i;
+
+ value_mode = TYPE_MODE (integer_type_node);
#ifdef POINTERS_EXTEND_UNSIGNED
- buf_addr = convert_memory_address (Pmode, buf_addr);
+ buf_addr = convert_memory_address (Pmode, buf_addr);
#endif
- buf_addr = force_reg (Pmode, buf_addr);
+ buf_addr = force_reg (Pmode, buf_addr);
- if (target == 0 || GET_CODE (target) != REG
- || REGNO (target) < FIRST_PSEUDO_REGISTER)
- target = gen_reg_rtx (value_mode);
-
- emit_queue ();
+ if (target == 0 || GET_CODE (target) != REG
+ || REGNO (target) < FIRST_PSEUDO_REGISTER)
+ target = gen_reg_rtx (value_mode);
- CONST_CALL_P (emit_note (NULL_PTR, NOTE_INSN_SETJMP)) = 1;
- current_function_calls_setjmp = 1;
+ emit_queue ();
- /* We store the frame pointer and the address of lab1 in the buffer
- and use the rest of it for the stack save area, which is
- machine-dependent. */
- emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
- virtual_stack_vars_rtx);
- emit_move_insn
- (validize_mem (gen_rtx (MEM, Pmode,
- plus_constant (buf_addr,
- GET_MODE_SIZE (Pmode)))),
- gen_rtx (LABEL_REF, Pmode, lab1));
+ /* We store the frame pointer and the address of lab1 in the buffer
+ and use the rest of it for the stack save area, which is
+ machine-dependent. */
+ emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
+ virtual_stack_vars_rtx);
+ emit_move_insn (validize_mem
+ (gen_rtx_MEM (Pmode,
+ plus_constant (buf_addr,
+ GET_MODE_SIZE (Pmode)))),
+ gen_rtx_LABEL_REF (Pmode, lab1));
#ifdef HAVE_save_stack_nonlocal
- if (HAVE_save_stack_nonlocal)
- sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
+ if (HAVE_save_stack_nonlocal)
+ sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
#endif
- stack_save = gen_rtx (MEM, sa_mode,
- plus_constant (buf_addr,
- 2 * GET_MODE_SIZE (Pmode)));
- emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
+ stack_save = gen_rtx_MEM (sa_mode,
+ plus_constant (buf_addr,
+ 2 * GET_MODE_SIZE (Pmode)));
+ emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
-#ifdef HAVE_setjmp
- if (HAVE_setjmp)
- emit_insn (gen_setjmp ());
+ /* If there is further processing to do, do it. */
+#ifdef HAVE_builtin_setjmp_setup
+ if (HAVE_builtin_setjmp_setup)
+ emit_insn (gen_builtin_setjmp_setup (buf_addr));
#endif
- /* Set TARGET to zero and branch around the other case. */
- emit_move_insn (target, const0_rtx);
- emit_jump_insn (gen_jump (lab2));
- emit_barrier ();
- emit_label (lab1);
+ /* Set TARGET to zero and branch to the first-time-through label. */
+ emit_move_insn (target, const0_rtx);
+ emit_jump_insn (gen_jump (first_label));
+ emit_barrier ();
+ emit_label (lab1);
+
+ /* Tell flow about the strange goings on. */
+ current_function_has_nonlocal_label = 1;
- /* Note that setjmp clobbers FP when we get here, so we have to
- make sure it's marked as used by this function. */
- emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
+ /* Clobber the FP when we get here, so we have to make sure it's
+ marked as used by this function. */
+ emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
- /* Mark the static chain as clobbered here so life information
- doesn't get messed up for it. */
- emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
+ /* Mark the static chain as clobbered here so life information
+ doesn't get messed up for it. */
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
- /* Now put in the code to restore the frame pointer, and argument
- pointer, if needed. The code below is from expand_end_bindings
- in stmt.c; see detailed documentation there. */
+ /* Now put in the code to restore the frame pointer, and argument
+ pointer, if needed. The code below is from expand_end_bindings
+ in stmt.c; see detailed documentation there. */
#ifdef HAVE_nonlocal_goto
- if (! HAVE_nonlocal_goto)
+ if (! HAVE_nonlocal_goto)
#endif
- emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
-
- current_function_has_nonlocal_goto = 1;
+ emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
- if (fixed_regs[ARG_POINTER_REGNUM])
- {
+ if (fixed_regs[ARG_POINTER_REGNUM])
+ {
#ifdef ELIMINABLE_REGS
- static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
+ static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
- for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
- if (elim_regs[i].from == ARG_POINTER_REGNUM
- && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
- break;
+ for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
+ if (elim_regs[i].from == ARG_POINTER_REGNUM
+ && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
+ break;
- if (i == sizeof elim_regs / sizeof elim_regs [0])
+ if (i == sizeof elim_regs / sizeof elim_regs [0])
#endif
- {
- /* Now restore our arg pointer from the address at which it
- was saved in our stack frame.
- If there hasn't be space allocated for it yet, make
- some now. */
- if (arg_pointer_save_area == 0)
- arg_pointer_save_area
- = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
- emit_move_insn (virtual_incoming_args_rtx,
- copy_to_reg (arg_pointer_save_area));
- }
- }
+ {
+ /* Now restore our arg pointer from the address at which it
+ was saved in our stack frame.
+ If there hasn't be space allocated for it yet, make
+ some now. */
+ if (arg_pointer_save_area == 0)
+ arg_pointer_save_area
+ = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
+ emit_move_insn (virtual_incoming_args_rtx,
+ copy_to_reg (arg_pointer_save_area));
+ }
+ }
#endif
- /* The static chain pointer contains the address of dummy function.
- We need to call it here to handle some PIC cases of restoring
- a global pointer. Then return 1. */
- op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
+#ifdef HAVE_builtin_setjmp_receiver
+ if (HAVE_builtin_setjmp_receiver)
+ emit_insn (gen_builtin_setjmp_receiver (lab1));
+ else
+#endif
+#ifdef HAVE_nonlocal_goto_receiver
+ if (HAVE_nonlocal_goto_receiver)
+ emit_insn (gen_nonlocal_goto_receiver ());
+ else
+#endif
+ ; /* Nothing */
- /* We can't actually call emit_library_call here, so do everything
- it does, which isn't much for a libfunc with no args. */
- op0 = memory_address (FUNCTION_MODE, op0);
+ /* Set TARGET, and branch to the next-time-through label. */
+ emit_move_insn (target, gen_lowpart (GET_MODE (target), static_chain_rtx));
+ emit_jump_insn (gen_jump (next_label));
+ emit_barrier ();
- INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
- gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
- next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
+ return target;
+}
-#ifndef ACCUMULATE_OUTGOING_ARGS
-#ifdef HAVE_call_pop
- if (HAVE_call_pop)
- emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
- const0_rtx, next_arg_reg,
- GEN_INT (return_pops)));
- else
-#endif
-#endif
+void
+expand_builtin_longjmp (buf_addr, value)
+ rtx buf_addr, value;
+{
+ rtx fp, lab, stack;
+ enum machine_mode sa_mode;
-#ifdef HAVE_call
- if (HAVE_call)
- emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
- const0_rtx, next_arg_reg, const0_rtx));
- else
+#ifdef POINTERS_EXTEND_UNSIGNED
+ buf_addr = convert_memory_address (Pmode, buf_addr);
#endif
- abort ();
+ buf_addr = force_reg (Pmode, buf_addr);
- emit_move_insn (target, const1_rtx);
- emit_label (lab2);
- return target;
- }
+ /* The value sent by longjmp is not allowed to be zero. Force it
+ to one if so. */
+ if (GET_CODE (value) == CONST_INT)
+ {
+ if (INTVAL (value) == 0)
+ value = const1_rtx;
+ }
+ else
+ {
+ lab = gen_label_rtx ();
- /* __builtin_longjmp is passed a pointer to an array of five words
- and a value, which is a dummy. It's similar to the C library longjmp
- function but works with __builtin_setjmp above. */
- case BUILT_IN_LONGJMP:
- if (arglist == 0 || TREE_CHAIN (arglist) == 0
- || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
- break;
+ emit_cmp_insn (value, const0_rtx, NE, NULL_RTX, GET_MODE (value), 0, 0);
+ emit_jump_insn (gen_bne (lab));
+ emit_move_insn (value, const1_rtx);
+ emit_label (lab);
+ }
- {
- tree dummy_id = get_identifier ("__dummy");
- tree dummy_type = build_function_type (void_type_node, NULL_TREE);
- tree dummy_decl = build_decl (FUNCTION_DECL, dummy_id, dummy_type);
-#ifdef POINTERS_EXTEND_UNSIGNED
- rtx buf_addr
- = force_reg (Pmode,
- convert_memory_address
- (Pmode,
- expand_expr (TREE_VALUE (arglist),
- NULL_RTX, VOIDmode, 0)));
-#else
- rtx buf_addr
- = force_reg (Pmode, expand_expr (TREE_VALUE (arglist),
- NULL_RTX,
- VOIDmode, 0));
+ /* Make sure the value is in the right mode to be copied to the chain. */
+ if (GET_MODE (value) != VOIDmode)
+ value = gen_lowpart (GET_MODE (static_chain_rtx), value);
+
+#ifdef HAVE_builtin_longjmp
+ if (HAVE_builtin_longjmp)
+ {
+ /* Copy the "return value" to the static chain reg. */
+ emit_move_insn (static_chain_rtx, value);
+ emit_insn (gen_rtx_USE (VOIDmode, static_chain_rtx));
+ emit_insn (gen_builtin_longjmp (buf_addr));
+ }
+ else
#endif
- rtx fp = gen_rtx (MEM, Pmode, buf_addr);
- rtx lab = gen_rtx (MEM, Pmode,
- plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
- enum machine_mode sa_mode
+ {
+ fp = gen_rtx_MEM (Pmode, buf_addr);
+ lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
+ GET_MODE_SIZE (Pmode)));
+
#ifdef HAVE_save_stack_nonlocal
- = (HAVE_save_stack_nonlocal
- ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
- : Pmode);
+ sa_mode = (HAVE_save_stack_nonlocal
+ ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
+ : Pmode);
#else
- = Pmode;
+ sa_mode = Pmode;
#endif
- rtx stack = gen_rtx (MEM, sa_mode,
- plus_constant (buf_addr,
- 2 * GET_MODE_SIZE (Pmode)));
-
- DECL_EXTERNAL (dummy_decl) = 1;
- TREE_PUBLIC (dummy_decl) = 1;
- make_decl_rtl (dummy_decl, NULL_PTR, 1);
-
- /* Expand the second expression just for side-effects. */
- expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
- const0_rtx, VOIDmode, 0);
- assemble_external (dummy_decl);
+ stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
+ 2 * GET_MODE_SIZE (Pmode)));
- /* Pick up FP, label, and SP from the block and jump. This code is
- from expand_goto in stmt.c; see there for detailed comments. */
+ /* Pick up FP, label, and SP from the block and jump. This code is
+ from expand_goto in stmt.c; see there for detailed comments. */
#if HAVE_nonlocal_goto
- if (HAVE_nonlocal_goto)
- emit_insn (gen_nonlocal_goto (fp, lab, stack,
- XEXP (DECL_RTL (dummy_decl), 0)));
+ if (HAVE_nonlocal_goto)
+ emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
else
#endif
{
lab = copy_to_reg (lab);
+
+ /* Copy the "return value" to the static chain reg. */
+ emit_move_insn (static_chain_rtx, value);
+
emit_move_insn (hard_frame_pointer_rtx, fp);
emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
- /* Put in the static chain register the address of the dummy
- function. */
- emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
- emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
- emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
- emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
+ emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
+ emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
+ emit_insn (gen_rtx_USE (VOIDmode, static_chain_rtx));
emit_indirect_jump (lab);
}
-
- return const0_rtx;
- }
-
- default: /* just do library call, if unknown builtin */
- error ("built-in function `%s' not currently supported",
- IDENTIFIER_POINTER (DECL_NAME (fndecl)));
}
-
- /* The switch statement above can drop through to cause the function
- to be called normally. */
-
- return expand_call (exp, target, ignore);
}
-\f
-/* Built-in functions to perform an untyped call and return. */
-
-/* For each register that may be used for calling a function, this
- gives a mode used to copy the register's value. VOIDmode indicates
- the register is not used for calling a function. If the machine
- has register windows, this gives only the outbound registers.
- INCOMING_REGNO gives the corresponding inbound register. */
-static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
-
-/* For each register that may be used for returning values, this gives
- a mode used to copy the register's value. VOIDmode indicates the
- register is not used for returning values. If the machine has
- register windows, this gives only the outbound registers.
- INCOMING_REGNO gives the corresponding inbound register. */
-static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
-
-/* For each register that may be used for calling a function, this
- gives the offset of that register into the block returned by
- __builtin_apply_args. 0 indicates that the register is not
- used for calling a function. */
-static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
-
-/* Return the offset of register REGNO into the block returned by
- __builtin_apply_args. This is not declared static, since it is
- needed in objc-act.c. */
-
-int
-apply_args_register_offset (regno)
- int regno;
-{
- apply_args_size ();
- /* Arguments are always put in outgoing registers (in the argument
- block) if such make sense. */
-#ifdef OUTGOING_REGNO
- regno = OUTGOING_REGNO(regno);
-#endif
- return apply_args_reg_offset[regno];
-}
+\f
+/* Expand an expression EXP that calls a built-in function,
+ with result going to TARGET if that's convenient
+ (and in mode MODE if that's convenient).
+ SUBTARGET may be used as the target for computing one of EXP's operands.
+ IGNORE is nonzero if the value is to be ignored. */
-/* Return the size required for the block returned by __builtin_apply_args,
- and initialize apply_args_mode. */
+#define CALLED_AS_BUILT_IN(NODE) \
+ (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
-static int
-apply_args_size ()
+static rtx
+expand_builtin (exp, target, subtarget, mode, ignore)
+ tree exp;
+ rtx target;
+ rtx subtarget;
+ enum machine_mode mode;
+ int ignore;
{
- static int size = -1;
- int align, regno;
- enum machine_mode mode;
+ tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
+ tree arglist = TREE_OPERAND (exp, 1);
+ rtx op0;
+ rtx lab1, insns;
+ enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
+ optab builtin_optab;
- /* The values computed by this function never change. */
- if (size < 0)
+ switch (DECL_FUNCTION_CODE (fndecl))
{
- /* The first value is the incoming arg-pointer. */
- size = GET_MODE_SIZE (Pmode);
+ case BUILT_IN_ABS:
+ case BUILT_IN_LABS:
+ case BUILT_IN_FABS:
+ /* build_function_call changes these into ABS_EXPR. */
+ abort ();
- /* The second value is the structure value address unless this is
- passed as an "invisible" first argument. */
- if (struct_value_rtx)
- size += GET_MODE_SIZE (Pmode);
+ case BUILT_IN_SIN:
+ case BUILT_IN_COS:
+ /* Treat these like sqrt, but only if the user asks for them. */
+ if (! flag_fast_math)
+ break;
+ case BUILT_IN_FSQRT:
+ /* If not optimizing, call the library function. */
+ if (! optimize)
+ break;
- for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
- if (FUNCTION_ARG_REGNO_P (regno))
- {
- /* Search for the proper mode for copying this register's
- value. I'm not sure this is right, but it works so far. */
- enum machine_mode best_mode = VOIDmode;
+ if (arglist == 0
+ /* Arg could be wrong type if user redeclared this fcn wrong. */
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
+ break;
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
- if (HARD_REGNO_MODE_OK (regno, mode)
- && HARD_REGNO_NREGS (regno, mode) == 1)
- best_mode = mode;
+ /* Stabilize and compute the argument. */
+ if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
+ && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
+ {
+ exp = copy_node (exp);
+ arglist = copy_node (arglist);
+ TREE_OPERAND (exp, 1) = arglist;
+ TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
+ }
+ op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
- if (best_mode == VOIDmode)
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
- if (HARD_REGNO_MODE_OK (regno, mode)
- && (mov_optab->handlers[(int) mode].insn_code
- != CODE_FOR_nothing))
- best_mode = mode;
+ /* Make a suitable register to place result in. */
+ target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
- mode = best_mode;
- if (mode == VOIDmode)
- abort ();
+ emit_queue ();
+ start_sequence ();
- align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
- if (size % align != 0)
- size = CEIL (size, align) * align;
- apply_args_reg_offset[regno] = size;
- size += GET_MODE_SIZE (mode);
- apply_args_mode[regno] = mode;
- }
- else
- {
- apply_args_mode[regno] = VOIDmode;
- apply_args_reg_offset[regno] = 0;
- }
- }
- return size;
-}
+ switch (DECL_FUNCTION_CODE (fndecl))
+ {
+ case BUILT_IN_SIN:
+ builtin_optab = sin_optab; break;
+ case BUILT_IN_COS:
+ builtin_optab = cos_optab; break;
+ case BUILT_IN_FSQRT:
+ builtin_optab = sqrt_optab; break;
+ default:
+ abort ();
+ }
-/* Return the size required for the block returned by __builtin_apply,
- and initialize apply_result_mode. */
+ /* Compute into TARGET.
+ Set TARGET to wherever the result comes back. */
+ target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
+ builtin_optab, op0, target, 0);
-static int
-apply_result_size ()
-{
- static int size = -1;
- int align, regno;
- enum machine_mode mode;
+ /* If we were unable to expand via the builtin, stop the
+ sequence (without outputting the insns) and break, causing
+ a call the the library function. */
+ if (target == 0)
+ {
+ end_sequence ();
+ break;
+ }
- /* The values computed by this function never change. */
- if (size < 0)
- {
- size = 0;
+ /* Check the results by default. But if flag_fast_math is turned on,
+ then assume sqrt will always be called with valid arguments. */
- for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
- if (FUNCTION_VALUE_REGNO_P (regno))
- {
- /* Search for the proper mode for copying this register's
- value. I'm not sure this is right, but it works so far. */
- enum machine_mode best_mode = VOIDmode;
+ if (! flag_fast_math)
+ {
+ /* Don't define the builtin FP instructions
+ if your machine is not IEEE. */
+ if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
+ abort ();
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
- mode != TImode;
- mode = GET_MODE_WIDER_MODE (mode))
- if (HARD_REGNO_MODE_OK (regno, mode))
- best_mode = mode;
+ lab1 = gen_label_rtx ();
- if (best_mode == VOIDmode)
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
- if (HARD_REGNO_MODE_OK (regno, mode)
- && (mov_optab->handlers[(int) mode].insn_code
- != CODE_FOR_nothing))
- best_mode = mode;
+ /* Test the result; if it is NaN, set errno=EDOM because
+ the argument was not in the domain. */
+ emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
+ emit_jump_insn (gen_beq (lab1));
- mode = best_mode;
- if (mode == VOIDmode)
- abort ();
+#ifdef TARGET_EDOM
+ {
+#ifdef GEN_ERRNO_RTX
+ rtx errno_rtx = GEN_ERRNO_RTX;
+#else
+ rtx errno_rtx
+ = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
+#endif
- align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
- if (size % align != 0)
- size = CEIL (size, align) * align;
- size += GET_MODE_SIZE (mode);
- apply_result_mode[regno] = mode;
+ emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
}
- else
- apply_result_mode[regno] = VOIDmode;
-
- /* Allow targets that use untyped_call and untyped_return to override
- the size so that machine-specific information can be stored here. */
-#ifdef APPLY_RESULT_SIZE
- size = APPLY_RESULT_SIZE;
+#else
+ /* We can't set errno=EDOM directly; let the library call do it.
+ Pop the arguments right away in case the call gets deleted. */
+ NO_DEFER_POP;
+ expand_call (exp, target, 0);
+ OK_DEFER_POP;
#endif
- }
- return size;
-}
-#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
-/* Create a vector describing the result block RESULT. If SAVEP is true,
- the result block is used to save the values; otherwise it is used to
- restore the values. */
+ emit_label (lab1);
+ }
-static rtx
-result_vector (savep, result)
- int savep;
- rtx result;
-{
- int regno, size, align, nelts;
- enum machine_mode mode;
- rtx reg, mem;
- rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
-
- size = nelts = 0;
- for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
- if ((mode = apply_result_mode[regno]) != VOIDmode)
+ /* Output the entire sequence. */
+ insns = get_insns ();
+ end_sequence ();
+ emit_insns (insns);
+
+ return target;
+
+ case BUILT_IN_FMOD:
+ break;
+
+ /* __builtin_apply_args returns block of memory allocated on
+ the stack into which is stored the arg pointer, structure
+ value address, static chain, and all the registers that might
+ possibly be used in performing a function call. The code is
+ moved to the start of the function so the incoming values are
+ saved. */
+ case BUILT_IN_APPLY_ARGS:
+ /* Don't do __builtin_apply_args more than once in a function.
+ Save the result of the first call and reuse it. */
+ if (apply_args_value != 0)
+ return apply_args_value;
{
- align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
- if (size % align != 0)
- size = CEIL (size, align) * align;
- reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
- mem = change_address (result, mode,
- plus_constant (XEXP (result, 0), size));
- savevec[nelts++] = (savep
- ? gen_rtx (SET, VOIDmode, mem, reg)
- : gen_rtx (SET, VOIDmode, reg, mem));
- size += GET_MODE_SIZE (mode);
+ /* When this function is called, it means that registers must be
+ saved on entry to this function. So we migrate the
+ call to the first insn of this function. */
+ rtx temp;
+ rtx seq;
+
+ start_sequence ();
+ temp = expand_builtin_apply_args ();
+ seq = get_insns ();
+ end_sequence ();
+
+ apply_args_value = temp;
+
+ /* Put the sequence after the NOTE that starts the function.
+ If this is inside a SEQUENCE, make the outer-level insn
+ chain current, so the code is placed at the start of the
+ function. */
+ push_topmost_sequence ();
+ emit_insns_before (seq, NEXT_INSN (get_insns ()));
+ pop_topmost_sequence ();
+ return temp;
}
- return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
-}
-#endif /* HAVE_untyped_call or HAVE_untyped_return */
-/* Save the state required to perform an untyped call with the same
- arguments as were passed to the current function. */
+ /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
+ FUNCTION with a copy of the parameters described by
+ ARGUMENTS, and ARGSIZE. It returns a block of memory
+ allocated on the stack into which is stored all the registers
+ that might possibly be used for returning the result of a
+ function. ARGUMENTS is the value returned by
+ __builtin_apply_args. ARGSIZE is the number of bytes of
+ arguments that must be copied. ??? How should this value be
+ computed? We'll also need a safe worst case value for varargs
+ functions. */
+ case BUILT_IN_APPLY:
+ if (arglist == 0
+ /* Arg could be non-pointer if user redeclared this fcn wrong. */
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
+ || TREE_CHAIN (arglist) == 0
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
+ || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
+ return const0_rtx;
+ else
+ {
+ int i;
+ tree t;
+ rtx ops[3];
-static rtx
-expand_builtin_apply_args ()
-{
- rtx registers;
- int size, align, regno;
- enum machine_mode mode;
+ for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
+ ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
- /* Create a block where the arg-pointer, structure value address,
- and argument registers can be saved. */
- registers = assign_stack_local (BLKmode, apply_args_size (), -1);
+ return expand_builtin_apply (ops[0], ops[1], ops[2]);
+ }
- /* Walk past the arg-pointer and structure value address. */
- size = GET_MODE_SIZE (Pmode);
- if (struct_value_rtx)
- size += GET_MODE_SIZE (Pmode);
+ /* __builtin_return (RESULT) causes the function to return the
+ value described by RESULT. RESULT is address of the block of
+ memory returned by __builtin_apply. */
+ case BUILT_IN_RETURN:
+ if (arglist
+ /* Arg could be non-pointer if user redeclared this fcn wrong. */
+ && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
+ expand_builtin_return (expand_expr (TREE_VALUE (arglist),
+ NULL_RTX, VOIDmode, 0));
+ return const0_rtx;
- /* Save each register used in calling a function to the block. */
- for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
- if ((mode = apply_args_mode[regno]) != VOIDmode)
+ case BUILT_IN_SAVEREGS:
+ /* Don't do __builtin_saveregs more than once in a function.
+ Save the result of the first call and reuse it. */
+ if (saveregs_value != 0)
+ return saveregs_value;
{
- rtx tem;
+ /* When this function is called, it means that registers must be
+ saved on entry to this function. So we migrate the
+ call to the first insn of this function. */
+ rtx temp;
+ rtx seq;
- align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
- if (size % align != 0)
- size = CEIL (size, align) * align;
+ /* Now really call the function. `expand_call' does not call
+ expand_builtin, so there is no danger of infinite recursion here. */
+ start_sequence ();
- tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
+#ifdef EXPAND_BUILTIN_SAVEREGS
+ /* Do whatever the machine needs done in this case. */
+ temp = EXPAND_BUILTIN_SAVEREGS (arglist);
+#else
+ /* The register where the function returns its value
+ is likely to have something else in it, such as an argument.
+ So preserve that register around the call. */
-#ifdef STACK_REGS
- /* For reg-stack.c's stack register household.
- Compare with a similar piece of code in function.c. */
+ if (value_mode != VOIDmode)
+ {
+ rtx valreg = hard_libcall_value (value_mode);
+ rtx saved_valreg = gen_reg_rtx (value_mode);
- emit_insn (gen_rtx (USE, mode, tem));
+ emit_move_insn (saved_valreg, valreg);
+ temp = expand_call (exp, target, ignore);
+ emit_move_insn (valreg, saved_valreg);
+ }
+ else
+ /* Generate the call, putting the value in a pseudo. */
+ temp = expand_call (exp, target, ignore);
#endif
- emit_move_insn (change_address (registers, mode,
- plus_constant (XEXP (registers, 0),
- size)),
- tem);
- size += GET_MODE_SIZE (mode);
- }
+ seq = get_insns ();
+ end_sequence ();
- /* Save the arg pointer to the block. */
- emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
- copy_to_reg (virtual_incoming_args_rtx));
- size = GET_MODE_SIZE (Pmode);
+ saveregs_value = temp;
- /* Save the structure value address unless this is passed as an
- "invisible" first argument. */
- if (struct_value_incoming_rtx)
- {
- emit_move_insn (change_address (registers, Pmode,
- plus_constant (XEXP (registers, 0),
- size)),
- copy_to_reg (struct_value_incoming_rtx));
- size += GET_MODE_SIZE (Pmode);
- }
+ /* Put the sequence after the NOTE that starts the function.
+ If this is inside a SEQUENCE, make the outer-level insn
+ chain current, so the code is placed at the start of the
+ function. */
+ push_topmost_sequence ();
+ emit_insns_before (seq, NEXT_INSN (get_insns ()));
+ pop_topmost_sequence ();
+ return temp;
+ }
- /* Return the address of the block. */
- return copy_addr_to_reg (XEXP (registers, 0));
-}
+ /* __builtin_args_info (N) returns word N of the arg space info
+ for the current function. The number and meanings of words
+ is controlled by the definition of CUMULATIVE_ARGS. */
+ case BUILT_IN_ARGS_INFO:
+ {
+ int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
+ int *word_ptr = (int *) ¤t_function_args_info;
+#if 0
+ /* These are used by the code below that is if 0'ed away */
+ int i;
+ tree type, elts, result;
+#endif
-/* Perform an untyped call and save the state required to perform an
- untyped return of whatever value was returned by the given function. */
+ if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
+ fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
+ __FILE__, __LINE__);
-static rtx
-expand_builtin_apply (function, arguments, argsize)
- rtx function, arguments, argsize;
-{
- int size, align, regno;
- enum machine_mode mode;
- rtx incoming_args, result, reg, dest, call_insn;
- rtx old_stack_level = 0;
- rtx call_fusage = 0;
+ if (arglist != 0)
+ {
+ tree arg = TREE_VALUE (arglist);
+ if (TREE_CODE (arg) != INTEGER_CST)
+ error ("argument of `__builtin_args_info' must be constant");
+ else
+ {
+ int wordnum = TREE_INT_CST_LOW (arg);
- /* Create a block where the return registers can be saved. */
- result = assign_stack_local (BLKmode, apply_result_size (), -1);
+ if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
+ error ("argument of `__builtin_args_info' out of range");
+ else
+ return GEN_INT (word_ptr[wordnum]);
+ }
+ }
+ else
+ error ("missing argument in `__builtin_args_info'");
- /* ??? The argsize value should be adjusted here. */
+ return const0_rtx;
- /* Fetch the arg pointer from the ARGUMENTS block. */
- incoming_args = gen_reg_rtx (Pmode);
- emit_move_insn (incoming_args,
- gen_rtx (MEM, Pmode, arguments));
-#ifndef STACK_GROWS_DOWNWARD
- incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
- incoming_args, 0, OPTAB_LIB_WIDEN);
+#if 0
+ for (i = 0; i < nwords; i++)
+ elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
+
+ type = build_array_type (integer_type_node,
+ build_index_type (build_int_2 (nwords, 0)));
+ result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
+ TREE_CONSTANT (result) = 1;
+ TREE_STATIC (result) = 1;
+ result = build (INDIRECT_REF, build_pointer_type (type), result);
+ TREE_CONSTANT (result) = 1;
+ return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
#endif
+ }
- /* Perform postincrements before actually calling the function. */
- emit_queue ();
+ /* Return the address of the first anonymous stack arg. */
+ case BUILT_IN_NEXT_ARG:
+ {
+ tree fntype = TREE_TYPE (current_function_decl);
- /* Push a new argument block and copy the arguments. */
- do_pending_stack_adjust ();
- emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
+ if ((TYPE_ARG_TYPES (fntype) == 0
+ || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
+ == void_type_node))
+ && ! current_function_varargs)
+ {
+ error ("`va_start' used in function with fixed args");
+ return const0_rtx;
+ }
- /* Push a block of memory onto the stack to store the memory arguments.
- Save the address in a register, and copy the memory arguments. ??? I
- haven't figured out how the calling convention macros effect this,
- but it's likely that the source and/or destination addresses in
- the block copy will need updating in machine specific ways. */
- dest = copy_addr_to_reg (push_block (argsize, 0, 0));
- emit_block_move (gen_rtx (MEM, BLKmode, dest),
- gen_rtx (MEM, BLKmode, incoming_args),
- argsize,
- PARM_BOUNDARY / BITS_PER_UNIT);
+ if (arglist)
+ {
+ tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
+ tree arg = TREE_VALUE (arglist);
- /* Refer to the argument block. */
- apply_args_size ();
- arguments = gen_rtx (MEM, BLKmode, arguments);
+ /* Strip off all nops for the sake of the comparison. This
+ is not quite the same as STRIP_NOPS. It does more.
+ We must also strip off INDIRECT_EXPR for C++ reference
+ parameters. */
+ while (TREE_CODE (arg) == NOP_EXPR
+ || TREE_CODE (arg) == CONVERT_EXPR
+ || TREE_CODE (arg) == NON_LVALUE_EXPR
+ || TREE_CODE (arg) == INDIRECT_REF)
+ arg = TREE_OPERAND (arg, 0);
+ if (arg != last_parm)
+ warning ("second parameter of `va_start' not last named argument");
+ }
+ else if (! current_function_varargs)
+ /* Evidently an out of date version of <stdarg.h>; can't validate
+ va_start's second argument, but can still work as intended. */
+ warning ("`__builtin_next_arg' called without an argument");
+ }
- /* Walk past the arg-pointer and structure value address. */
- size = GET_MODE_SIZE (Pmode);
- if (struct_value_rtx)
- size += GET_MODE_SIZE (Pmode);
+ return expand_binop (Pmode, add_optab,
+ current_function_internal_arg_pointer,
+ current_function_arg_offset_rtx,
+ NULL_RTX, 0, OPTAB_LIB_WIDEN);
- /* Restore each of the registers previously saved. Make USE insns
- for each of these registers for use in making the call. */
- for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
- if ((mode = apply_args_mode[regno]) != VOIDmode)
- {
- align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
- if (size % align != 0)
- size = CEIL (size, align) * align;
- reg = gen_rtx (REG, mode, regno);
- emit_move_insn (reg,
- change_address (arguments, mode,
- plus_constant (XEXP (arguments, 0),
- size)));
+ case BUILT_IN_CLASSIFY_TYPE:
+ if (arglist != 0)
+ {
+ tree type = TREE_TYPE (TREE_VALUE (arglist));
+ enum tree_code code = TREE_CODE (type);
+ if (code == VOID_TYPE)
+ return GEN_INT (void_type_class);
+ if (code == INTEGER_TYPE)
+ return GEN_INT (integer_type_class);
+ if (code == CHAR_TYPE)
+ return GEN_INT (char_type_class);
+ if (code == ENUMERAL_TYPE)
+ return GEN_INT (enumeral_type_class);
+ if (code == BOOLEAN_TYPE)
+ return GEN_INT (boolean_type_class);
+ if (code == POINTER_TYPE)
+ return GEN_INT (pointer_type_class);
+ if (code == REFERENCE_TYPE)
+ return GEN_INT (reference_type_class);
+ if (code == OFFSET_TYPE)
+ return GEN_INT (offset_type_class);
+ if (code == REAL_TYPE)
+ return GEN_INT (real_type_class);
+ if (code == COMPLEX_TYPE)
+ return GEN_INT (complex_type_class);
+ if (code == FUNCTION_TYPE)
+ return GEN_INT (function_type_class);
+ if (code == METHOD_TYPE)
+ return GEN_INT (method_type_class);
+ if (code == RECORD_TYPE)
+ return GEN_INT (record_type_class);
+ if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
+ return GEN_INT (union_type_class);
+ if (code == ARRAY_TYPE)
+ {
+ if (TYPE_STRING_FLAG (type))
+ return GEN_INT (string_type_class);
+ else
+ return GEN_INT (array_type_class);
+ }
+ if (code == SET_TYPE)
+ return GEN_INT (set_type_class);
+ if (code == FILE_TYPE)
+ return GEN_INT (file_type_class);
+ if (code == LANG_TYPE)
+ return GEN_INT (lang_type_class);
+ }
+ return GEN_INT (no_type_class);
- use_reg (&call_fusage, reg);
- size += GET_MODE_SIZE (mode);
- }
+ case BUILT_IN_CONSTANT_P:
+ if (arglist == 0)
+ return const0_rtx;
+ else
+ {
+ tree arg = TREE_VALUE (arglist);
- /* Restore the structure value address unless this is passed as an
- "invisible" first argument. */
- size = GET_MODE_SIZE (Pmode);
- if (struct_value_rtx)
- {
- rtx value = gen_reg_rtx (Pmode);
- emit_move_insn (value,
- change_address (arguments, Pmode,
- plus_constant (XEXP (arguments, 0),
- size)));
- emit_move_insn (struct_value_rtx, value);
- if (GET_CODE (struct_value_rtx) == REG)
- use_reg (&call_fusage, struct_value_rtx);
- size += GET_MODE_SIZE (Pmode);
- }
+ STRIP_NOPS (arg);
+ return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
+ || (TREE_CODE (arg) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
+ ? const1_rtx : const0_rtx);
+ }
- /* All arguments and registers used for the call are set up by now! */
- function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
+ case BUILT_IN_FRAME_ADDRESS:
+ /* The argument must be a nonnegative integer constant.
+ It counts the number of frames to scan up the stack.
+ The value is the address of that frame. */
+ case BUILT_IN_RETURN_ADDRESS:
+ /* The argument must be a nonnegative integer constant.
+ It counts the number of frames to scan up the stack.
+ The value is the return address saved in that frame. */
+ if (arglist == 0)
+ /* Warning about missing arg was already issued. */
+ return const0_rtx;
+ else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
+ || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
+ {
+ if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
+ error ("invalid arg to `__builtin_frame_address'");
+ else
+ error ("invalid arg to `__builtin_return_address'");
+ return const0_rtx;
+ }
+ else
+ {
+ rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
+ TREE_INT_CST_LOW (TREE_VALUE (arglist)),
+ hard_frame_pointer_rtx);
- /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
- and we don't want to load it into a register as an optimization,
- because prepare_call_address already did it if it should be done. */
- if (GET_CODE (function) != SYMBOL_REF)
- function = memory_address (FUNCTION_MODE, function);
+ /* Some ports cannot access arbitrary stack frames. */
+ if (tem == NULL)
+ {
+ if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
+ warning ("unsupported arg to `__builtin_frame_address'");
+ else
+ warning ("unsupported arg to `__builtin_return_address'");
+ return const0_rtx;
+ }
- /* Generate the actual call instruction and save the return value. */
-#ifdef HAVE_untyped_call
- if (HAVE_untyped_call)
- emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
- result, result_vector (1, result)));
- else
-#endif
-#ifdef HAVE_call_value
- if (HAVE_call_value)
- {
- rtx valreg = 0;
+ /* For __builtin_frame_address, return what we've got. */
+ if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
+ return tem;
- /* Locate the unique return register. It is not possible to
- express a call that sets more than one return register using
- call_value; use untyped_call for that. In fact, untyped_call
- only needs to save the return registers in the given block. */
- for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
- if ((mode = apply_result_mode[regno]) != VOIDmode)
- {
- if (valreg)
- abort (); /* HAVE_untyped_call required. */
- valreg = gen_rtx (REG, mode, regno);
- }
+ if (GET_CODE (tem) != REG)
+ tem = copy_to_reg (tem);
+ return tem;
+ }
+
+ /* Returns the address of the area where the structure is returned.
+ 0 otherwise. */
+ case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
+ if (arglist != 0
+ || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
+ || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
+ return const0_rtx;
+ else
+ return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
- emit_call_insn (gen_call_value (valreg,
- gen_rtx (MEM, FUNCTION_MODE, function),
- const0_rtx, NULL_RTX, const0_rtx));
+ case BUILT_IN_ALLOCA:
+ if (arglist == 0
+ /* Arg could be non-integer if user redeclared this fcn wrong. */
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
+ break;
- emit_move_insn (change_address (result, GET_MODE (valreg),
- XEXP (result, 0)),
- valreg);
- }
- else
-#endif
- abort ();
+ /* Compute the argument. */
+ op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
- /* Find the CALL insn we just emitted. */
- for (call_insn = get_last_insn ();
- call_insn && GET_CODE (call_insn) != CALL_INSN;
- call_insn = PREV_INSN (call_insn))
- ;
+ /* Allocate the desired space. */
+ return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
- if (! call_insn)
- abort ();
+ case BUILT_IN_FFS:
+ /* If not optimizing, call the library function. */
+ if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
+ break;
- /* Put the register usage information on the CALL. If there is already
- some usage information, put ours at the end. */
- if (CALL_INSN_FUNCTION_USAGE (call_insn))
- {
- rtx link;
+ if (arglist == 0
+ /* Arg could be non-integer if user redeclared this fcn wrong. */
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
+ break;
- for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
- link = XEXP (link, 1))
- ;
+ /* Compute the argument. */
+ op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
+ /* Compute ffs, into TARGET if possible.
+ Set TARGET to wherever the result comes back. */
+ target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
+ ffs_optab, op0, target, 1);
+ if (target == 0)
+ abort ();
+ return target;
- XEXP (link, 1) = call_fusage;
- }
- else
- CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
+ case BUILT_IN_STRLEN:
+ /* If not optimizing, call the library function. */
+ if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
+ break;
- /* Restore the stack. */
- emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
+ if (arglist == 0
+ /* Arg could be non-pointer if user redeclared this fcn wrong. */
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
+ break;
+ else
+ {
+ tree src = TREE_VALUE (arglist);
+ tree len = c_strlen (src);
- /* Return the address of the result block. */
- return copy_addr_to_reg (XEXP (result, 0));
-}
+ int align
+ = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
-/* Perform an untyped return. */
+ rtx result, src_rtx, char_rtx;
+ enum machine_mode insn_mode = value_mode, char_mode;
+ enum insn_code icode;
-static void
-expand_builtin_return (result)
- rtx result;
-{
- int size, align, regno;
- enum machine_mode mode;
- rtx reg;
- rtx call_fusage = 0;
+ /* If the length is known, just return it. */
+ if (len != 0)
+ return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
- apply_result_size ();
- result = gen_rtx (MEM, BLKmode, result);
+ /* If SRC is not a pointer type, don't do this operation inline. */
+ if (align == 0)
+ break;
-#ifdef HAVE_untyped_return
- if (HAVE_untyped_return)
- {
- emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
- emit_barrier ();
- return;
- }
-#endif
+ /* Call a function if we can't compute strlen in the right mode. */
- /* Restore the return value and note that each value is used. */
- size = 0;
- for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
- if ((mode = apply_result_mode[regno]) != VOIDmode)
- {
- align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
- if (size % align != 0)
- size = CEIL (size, align) * align;
- reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
- emit_move_insn (reg,
- change_address (result, mode,
- plus_constant (XEXP (result, 0),
- size)));
+ while (insn_mode != VOIDmode)
+ {
+ icode = strlen_optab->handlers[(int) insn_mode].insn_code;
+ if (icode != CODE_FOR_nothing)
+ break;
- push_to_sequence (call_fusage);
- emit_insn (gen_rtx (USE, VOIDmode, reg));
- call_fusage = get_insns ();
- end_sequence ();
- size += GET_MODE_SIZE (mode);
- }
+ insn_mode = GET_MODE_WIDER_MODE (insn_mode);
+ }
+ if (insn_mode == VOIDmode)
+ break;
- /* Put the USE insns before the return. */
- emit_insns (call_fusage);
+ /* Make a place to write the result of the instruction. */
+ result = target;
+ if (! (result != 0
+ && GET_CODE (result) == REG
+ && GET_MODE (result) == insn_mode
+ && REGNO (result) >= FIRST_PSEUDO_REGISTER))
+ result = gen_reg_rtx (insn_mode);
- /* Return whatever values was restored by jumping directly to the end
- of the function. */
- expand_null_return ();
-}
-\f
-/* Expand code for a post- or pre- increment or decrement
- and return the RTX for the result.
- POST is 1 for postinc/decrements and 0 for preinc/decrements. */
+ /* Make sure the operands are acceptable to the predicates. */
-static rtx
-expand_increment (exp, post, ignore)
- register tree exp;
- int post, ignore;
-{
- register rtx op0, op1;
- register rtx temp, value;
- register tree incremented = TREE_OPERAND (exp, 0);
- optab this_optab = add_optab;
- int icode;
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
- int op0_is_copy = 0;
- int single_insn = 0;
- /* 1 means we can't store into OP0 directly,
- because it is a subreg narrower than a word,
- and we don't dare clobber the rest of the word. */
- int bad_subreg = 0;
+ if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
+ result = gen_reg_rtx (insn_mode);
+ src_rtx = memory_address (BLKmode,
+ expand_expr (src, NULL_RTX, ptr_mode,
+ EXPAND_NORMAL));
- if (output_bytecode)
- {
- bc_expand_expr (exp);
- return NULL_RTX;
- }
+ if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
+ src_rtx = copy_to_mode_reg (Pmode, src_rtx);
- /* Stabilize any component ref that might need to be
- evaluated more than once below. */
- if (!post
- || TREE_CODE (incremented) == BIT_FIELD_REF
- || (TREE_CODE (incremented) == COMPONENT_REF
- && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
- || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
- incremented = stabilize_reference (incremented);
- /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
- ones into save exprs so that they don't accidentally get evaluated
- more than once by the code below. */
- if (TREE_CODE (incremented) == PREINCREMENT_EXPR
- || TREE_CODE (incremented) == PREDECREMENT_EXPR)
- incremented = save_expr (incremented);
+ /* Check the string is readable and has an end. */
+ if (flag_check_memory_usage)
+ emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
+ src_rtx, ptr_mode,
+ GEN_INT (MEMORY_USE_RO),
+ TYPE_MODE (integer_type_node));
- /* Compute the operands as RTX.
- Note whether OP0 is the actual lvalue or a copy of it:
- I believe it is a copy iff it is a register or subreg
- and insns were generated in computing it. */
+ char_rtx = const0_rtx;
+ char_mode = insn_operand_mode[(int)icode][2];
+ if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
+ char_rtx = copy_to_mode_reg (char_mode, char_rtx);
- temp = get_last_insn ();
- op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
+ emit_insn (GEN_FCN (icode) (result,
+ gen_rtx_MEM (BLKmode, src_rtx),
+ char_rtx, GEN_INT (align)));
- /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
- in place but instead must do sign- or zero-extension during assignment,
- so we copy it into a new register and let the code below use it as
- a copy.
+ /* Return the value in the proper mode for this function. */
+ if (GET_MODE (result) == value_mode)
+ return result;
+ else if (target != 0)
+ {
+ convert_move (target, result, 0);
+ return target;
+ }
+ else
+ return convert_to_mode (value_mode, result, 0);
+ }
- Note that we can safely modify this SUBREG since it is know not to be
- shared (it was made by the expand_expr call above). */
+ case BUILT_IN_STRCPY:
+ /* If not optimizing, call the library function. */
+ if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
+ break;
- if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
- {
- if (post)
- SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
+ if (arglist == 0
+ /* Arg could be non-pointer if user redeclared this fcn wrong. */
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
+ || TREE_CHAIN (arglist) == 0
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
+ break;
else
- bad_subreg = 1;
- }
- else if (GET_CODE (op0) == SUBREG
- && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
- {
- /* We cannot increment this SUBREG in place. If we are
- post-incrementing, get a copy of the old value. Otherwise,
- just mark that we cannot increment in place. */
- if (post)
- op0 = copy_to_reg (op0);
+ {
+ tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
+
+ if (len == 0)
+ break;
+
+ len = size_binop (PLUS_EXPR, len, integer_one_node);
+
+ chainon (arglist, build_tree_list (NULL_TREE, len));
+ }
+
+ /* Drops in. */
+ case BUILT_IN_MEMCPY:
+ /* If not optimizing, call the library function. */
+ if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
+ break;
+
+ if (arglist == 0
+ /* Arg could be non-pointer if user redeclared this fcn wrong. */
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
+ || TREE_CHAIN (arglist) == 0
+ || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
+ != POINTER_TYPE)
+ || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
+ || (TREE_CODE (TREE_TYPE (TREE_VALUE
+ (TREE_CHAIN (TREE_CHAIN (arglist)))))
+ != INTEGER_TYPE))
+ break;
else
- bad_subreg = 1;
- }
+ {
+ tree dest = TREE_VALUE (arglist);
+ tree src = TREE_VALUE (TREE_CHAIN (arglist));
+ tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
+ tree type;
- op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
- && temp != get_last_insn ());
- op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
+ int src_align
+ = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
+ int dest_align
+ = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
+ rtx dest_rtx, dest_mem, src_mem, src_rtx, dest_addr, len_rtx;
- /* Decide whether incrementing or decrementing. */
- if (TREE_CODE (exp) == POSTDECREMENT_EXPR
- || TREE_CODE (exp) == PREDECREMENT_EXPR)
- this_optab = sub_optab;
+ /* If either SRC or DEST is not a pointer type, don't do
+ this operation in-line. */
+ if (src_align == 0 || dest_align == 0)
+ {
+ if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
+ TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
+ break;
+ }
- /* Convert decrement by a constant into a negative increment. */
- if (this_optab == sub_optab
- && GET_CODE (op1) == CONST_INT)
- {
- op1 = GEN_INT (- INTVAL (op1));
- this_optab = add_optab;
- }
+ dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
+ dest_mem = gen_rtx_MEM (BLKmode,
+ memory_address (BLKmode, dest_rtx));
+ /* There could be a void* cast on top of the object. */
+ while (TREE_CODE (dest) == NOP_EXPR)
+ dest = TREE_OPERAND (dest, 0);
+ type = TREE_TYPE (TREE_TYPE (dest));
+ MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
+ src_rtx = expand_expr (src, NULL_RTX, ptr_mode, EXPAND_SUM);
+ src_mem = gen_rtx_MEM (BLKmode,
+ memory_address (BLKmode, src_rtx));
+ len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
+
+ /* Just copy the rights of SRC to the rights of DEST. */
+ if (flag_check_memory_usage)
+ emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
+ dest_rtx, ptr_mode,
+ src_rtx, ptr_mode,
+ len_rtx, TYPE_MODE (sizetype));
- /* For a preincrement, see if we can do this with a single instruction. */
- if (!post)
- {
- icode = (int) this_optab->handlers[(int) mode].insn_code;
- if (icode != (int) CODE_FOR_nothing
- /* Make sure that OP0 is valid for operands 0 and 1
- of the insn we want to queue. */
- && (*insn_operand_predicate[icode][0]) (op0, mode)
- && (*insn_operand_predicate[icode][1]) (op0, mode)
- && (*insn_operand_predicate[icode][2]) (op1, mode))
- single_insn = 1;
- }
+ /* There could be a void* cast on top of the object. */
+ while (TREE_CODE (src) == NOP_EXPR)
+ src = TREE_OPERAND (src, 0);
+ type = TREE_TYPE (TREE_TYPE (src));
+ MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
- /* If OP0 is not the actual lvalue, but rather a copy in a register,
- then we cannot just increment OP0. We must therefore contrive to
- increment the original value. Then, for postincrement, we can return
- OP0 since it is a copy of the old value. For preincrement, expand here
- unless we can do it with a single insn.
+ /* Copy word part most expediently. */
+ dest_addr
+ = emit_block_move (dest_mem, src_mem, len_rtx,
+ MIN (src_align, dest_align));
- Likewise if storing directly into OP0 would clobber high bits
- we need to preserve (bad_subreg). */
- if (op0_is_copy || (!post && !single_insn) || bad_subreg)
- {
- /* This is the easiest way to increment the value wherever it is.
- Problems with multiple evaluation of INCREMENTED are prevented
- because either (1) it is a component_ref or preincrement,
- in which case it was stabilized above, or (2) it is an array_ref
- with constant index in an array in a register, which is
- safe to reevaluate. */
- tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
- || TREE_CODE (exp) == PREDECREMENT_EXPR)
- ? MINUS_EXPR : PLUS_EXPR),
- TREE_TYPE (exp),
- incremented,
- TREE_OPERAND (exp, 1));
+ if (dest_addr == 0)
+ dest_addr = force_operand (dest_rtx, NULL_RTX);
- while (TREE_CODE (incremented) == NOP_EXPR
- || TREE_CODE (incremented) == CONVERT_EXPR)
- {
- newexp = convert (TREE_TYPE (incremented), newexp);
- incremented = TREE_OPERAND (incremented, 0);
+ return dest_addr;
}
- temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
- return post ? op0 : temp;
- }
+ case BUILT_IN_MEMSET:
+ /* If not optimizing, call the library function. */
+ if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
+ break;
- if (post)
- {
- /* We have a true reference to the value in OP0.
- If there is an insn to add or subtract in this mode, queue it.
- Queueing the increment insn avoids the register shuffling
- that often results if we must increment now and first save
- the old value for subsequent use. */
+ if (arglist == 0
+ /* Arg could be non-pointer if user redeclared this fcn wrong. */
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
+ || TREE_CHAIN (arglist) == 0
+ || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
+ != INTEGER_TYPE)
+ || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
+ || (INTEGER_TYPE
+ != (TREE_CODE (TREE_TYPE
+ (TREE_VALUE
+ (TREE_CHAIN (TREE_CHAIN (arglist))))))))
+ break;
+ else
+ {
+ tree dest = TREE_VALUE (arglist);
+ tree val = TREE_VALUE (TREE_CHAIN (arglist));
+ tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
+ tree type;
-#if 0 /* Turned off to avoid making extra insn for indexed memref. */
- op0 = stabilize (op0);
-#endif
+ int dest_align
+ = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
+ rtx dest_rtx, dest_mem, dest_addr, len_rtx;
- icode = (int) this_optab->handlers[(int) mode].insn_code;
- if (icode != (int) CODE_FOR_nothing
- /* Make sure that OP0 is valid for operands 0 and 1
- of the insn we want to queue. */
- && (*insn_operand_predicate[icode][0]) (op0, mode)
- && (*insn_operand_predicate[icode][1]) (op0, mode))
- {
- if (! (*insn_operand_predicate[icode][2]) (op1, mode))
- op1 = force_reg (mode, op1);
+ /* If DEST is not a pointer type, don't do this
+ operation in-line. */
+ if (dest_align == 0)
+ break;
- return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
- }
- }
+ /* If VAL is not 0, don't do this operation in-line. */
+ if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
+ break;
- /* Preincrement, or we can't increment with one simple insn. */
- if (post)
- /* Save a copy of the value before inc or dec, to return it later. */
- temp = value = copy_to_reg (op0);
- else
- /* Arrange to return the incremented value. */
- /* Copy the rtx because expand_binop will protect from the queue,
- and the results of that would be invalid for us to return
- if our caller does emit_queue before using our result. */
- temp = copy_rtx (value = op0);
+ /* If LEN does not expand to a constant, don't do this
+ operation in-line. */
+ len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
+ if (GET_CODE (len_rtx) != CONST_INT)
+ break;
- /* Increment however we can. */
- op1 = expand_binop (mode, this_optab, value, op1, op0,
- TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
- /* Make sure the value is stored into OP0. */
- if (op1 != op0)
- emit_move_insn (op0, op1);
+ dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
+ dest_mem = gen_rtx_MEM (BLKmode,
+ memory_address (BLKmode, dest_rtx));
+
+ /* Just check DST is writable and mark it as readable. */
+ if (flag_check_memory_usage)
+ emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
+ dest_rtx, ptr_mode,
+ len_rtx, TYPE_MODE (sizetype),
+ GEN_INT (MEMORY_USE_WO),
+ TYPE_MODE (integer_type_node));
- return temp;
-}
-\f
-/* Expand all function calls contained within EXP, innermost ones first.
- But don't look within expressions that have sequence points.
- For each CALL_EXPR, record the rtx for its value
- in the CALL_EXPR_RTL field. */
+ /* There could be a void* cast on top of the object. */
+ while (TREE_CODE (dest) == NOP_EXPR)
+ dest = TREE_OPERAND (dest, 0);
+ type = TREE_TYPE (TREE_TYPE (dest));
+ MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
-static void
-preexpand_calls (exp)
- tree exp;
-{
- register int nops, i;
- int type = TREE_CODE_CLASS (TREE_CODE (exp));
+ dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
- if (! do_preexpand_calls)
- return;
+ if (dest_addr == 0)
+ dest_addr = force_operand (dest_rtx, NULL_RTX);
- /* Only expressions and references can contain calls. */
+ return dest_addr;
+ }
- if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
- return;
+/* These comparison functions need an instruction that returns an actual
+ index. An ordinary compare that just sets the condition codes
+ is not enough. */
+#ifdef HAVE_cmpstrsi
+ case BUILT_IN_STRCMP:
+ /* If not optimizing, call the library function. */
+ if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
+ break;
- switch (TREE_CODE (exp))
- {
- case CALL_EXPR:
- /* Do nothing if already expanded. */
- if (CALL_EXPR_RTL (exp) != 0
- /* Do nothing if the call returns a variable-sized object. */
- || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
- /* Do nothing to built-in functions. */
- || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
- && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
- == FUNCTION_DECL)
- && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
- return;
+ /* If we need to check memory accesses, call the library function. */
+ if (flag_check_memory_usage)
+ break;
- CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
- return;
+ if (arglist == 0
+ /* Arg could be non-pointer if user redeclared this fcn wrong. */
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
+ || TREE_CHAIN (arglist) == 0
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
+ break;
+ else if (!HAVE_cmpstrsi)
+ break;
+ {
+ tree arg1 = TREE_VALUE (arglist);
+ tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
+ tree len, len2;
- case COMPOUND_EXPR:
- case COND_EXPR:
- case TRUTH_ANDIF_EXPR:
- case TRUTH_ORIF_EXPR:
- /* If we find one of these, then we can be sure
- the adjust will be done for it (since it makes jumps).
- Do it now, so that if this is inside an argument
- of a function, we don't get the stack adjustment
- after some other args have already been pushed. */
- do_pending_stack_adjust ();
- return;
+ len = c_strlen (arg1);
+ if (len)
+ len = size_binop (PLUS_EXPR, integer_one_node, len);
+ len2 = c_strlen (arg2);
+ if (len2)
+ len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
- case BLOCK:
- case RTL_EXPR:
- case WITH_CLEANUP_EXPR:
- case CLEANUP_POINT_EXPR:
- return;
+ /* If we don't have a constant length for the first, use the length
+ of the second, if we know it. We don't require a constant for
+ this case; some cost analysis could be done if both are available
+ but neither is constant. For now, assume they're equally cheap.
+
+ If both strings have constant lengths, use the smaller. This
+ could arise if optimization results in strcpy being called with
+ two fixed strings, or if the code was machine-generated. We should
+ add some code to the `memcmp' handler below to deal with such
+ situations, someday. */
+ if (!len || TREE_CODE (len) != INTEGER_CST)
+ {
+ if (len2)
+ len = len2;
+ else if (len == 0)
+ break;
+ }
+ else if (len2 && TREE_CODE (len2) == INTEGER_CST)
+ {
+ if (tree_int_cst_lt (len2, len))
+ len = len2;
+ }
+
+ chainon (arglist, build_tree_list (NULL_TREE, len));
+ }
+
+ /* Drops in. */
+ case BUILT_IN_MEMCMP:
+ /* If not optimizing, call the library function. */
+ if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
+ break;
- case SAVE_EXPR:
- if (SAVE_EXPR_RTL (exp) != 0)
- return;
- }
+ /* If we need to check memory accesses, call the library function. */
+ if (flag_check_memory_usage)
+ break;
- nops = tree_code_length[(int) TREE_CODE (exp)];
- for (i = 0; i < nops; i++)
- if (TREE_OPERAND (exp, i) != 0)
+ if (arglist == 0
+ /* Arg could be non-pointer if user redeclared this fcn wrong. */
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
+ || TREE_CHAIN (arglist) == 0
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
+ || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
+ break;
+ else if (!HAVE_cmpstrsi)
+ break;
{
- type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
- if (type == 'e' || type == '<' || type == '1' || type == '2'
- || type == 'r')
- preexpand_calls (TREE_OPERAND (exp, i));
- }
-}
-\f
-/* At the start of a function, record that we have no previously-pushed
- arguments waiting to be popped. */
+ tree arg1 = TREE_VALUE (arglist);
+ tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
+ tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
+ rtx result;
-void
-init_pending_stack_adjust ()
-{
- pending_stack_adjust = 0;
-}
+ int arg1_align
+ = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
+ int arg2_align
+ = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
+ enum machine_mode insn_mode
+ = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
-/* When exiting from function, if safe, clear out any pending stack adjust
- so the adjustment won't get done. */
+ /* If we don't have POINTER_TYPE, call the function. */
+ if (arg1_align == 0 || arg2_align == 0)
+ {
+ if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
+ TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
+ break;
+ }
-void
-clear_pending_stack_adjust ()
-{
-#ifdef EXIT_IGNORE_STACK
- if (optimize > 0
- && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
- && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
- && ! flag_inline_functions)
- pending_stack_adjust = 0;
+ /* Make a place to write the result of the instruction. */
+ result = target;
+ if (! (result != 0
+ && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
+ && REGNO (result) >= FIRST_PSEUDO_REGISTER))
+ result = gen_reg_rtx (insn_mode);
+
+ emit_insn (gen_cmpstrsi (result,
+ gen_rtx_MEM (BLKmode,
+ expand_expr (arg1, NULL_RTX,
+ ptr_mode,
+ EXPAND_NORMAL)),
+ gen_rtx_MEM (BLKmode,
+ expand_expr (arg2, NULL_RTX,
+ ptr_mode,
+ EXPAND_NORMAL)),
+ expand_expr (len, NULL_RTX, VOIDmode, 0),
+ GEN_INT (MIN (arg1_align, arg2_align))));
+
+ /* Return the value in the proper mode for this function. */
+ mode = TYPE_MODE (TREE_TYPE (exp));
+ if (GET_MODE (result) == mode)
+ return result;
+ else if (target != 0)
+ {
+ convert_move (target, result, 0);
+ return target;
+ }
+ else
+ return convert_to_mode (mode, result, 0);
+ }
+#else
+ case BUILT_IN_STRCMP:
+ case BUILT_IN_MEMCMP:
+ break;
#endif
-}
-/* Pop any previously-pushed arguments that have not been popped yet. */
+ case BUILT_IN_SETJMP:
+ if (arglist == 0
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
+ break;
+ else
+ {
+ rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
+ VOIDmode, 0);
+ rtx lab = gen_label_rtx ();
+ rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab);
+ emit_label (lab);
+ return ret;
+ }
-void
-do_pending_stack_adjust ()
-{
- if (inhibit_defer_pop == 0)
- {
- if (pending_stack_adjust != 0)
- adjust_stack (GEN_INT (pending_stack_adjust));
- pending_stack_adjust = 0;
- }
-}
+ /* __builtin_longjmp is passed a pointer to an array of five words.
+ It's similar to the C library longjmp function but works with
+ __builtin_setjmp above. */
+ case BUILT_IN_LONGJMP:
+ if (arglist == 0 || TREE_CHAIN (arglist) == 0
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
+ break;
+ else
+ {
+ rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
+ VOIDmode, 0);
+ rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
+ const0_rtx, VOIDmode, 0);
+ expand_builtin_longjmp (buf_addr, value);
+ return const0_rtx;
+ }
-/* Defer the expansion all cleanups up to OLD_CLEANUPS.
- Returns the cleanups to be performed. */
+ /* Various hooks for the DWARF 2 __throw routine. */
+ case BUILT_IN_UNWIND_INIT:
+ expand_builtin_unwind_init ();
+ return const0_rtx;
+ case BUILT_IN_FP:
+ return frame_pointer_rtx;
+ case BUILT_IN_SP:
+ return stack_pointer_rtx;
+#ifdef DWARF2_UNWIND_INFO
+ case BUILT_IN_DWARF_FP_REGNUM:
+ return expand_builtin_dwarf_fp_regnum ();
+ case BUILT_IN_DWARF_REG_SIZE:
+ return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
+#endif
+ case BUILT_IN_FROB_RETURN_ADDR:
+ return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
+ case BUILT_IN_EXTRACT_RETURN_ADDR:
+ return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
+ case BUILT_IN_SET_RETURN_ADDR_REG:
+ expand_builtin_set_return_addr_reg (TREE_VALUE (arglist));
+ return const0_rtx;
+ case BUILT_IN_EH_STUB:
+ return expand_builtin_eh_stub ();
+ case BUILT_IN_SET_EH_REGS:
+ expand_builtin_set_eh_regs (TREE_VALUE (arglist),
+ TREE_VALUE (TREE_CHAIN (arglist)));
+ return const0_rtx;
-static tree
-defer_cleanups_to (old_cleanups)
- tree old_cleanups;
-{
- tree new_cleanups = NULL_TREE;
- tree cleanups = cleanups_this_call;
- tree last = NULL_TREE;
+ default: /* just do library call, if unknown builtin */
+ error ("built-in function `%s' not currently supported",
+ IDENTIFIER_POINTER (DECL_NAME (fndecl)));
+ }
- while (cleanups_this_call != old_cleanups)
- {
- (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
- last = cleanups_this_call;
- cleanups_this_call = TREE_CHAIN (cleanups_this_call);
- }
+ /* The switch statement above can drop through to cause the function
+ to be called normally. */
- if (last)
- {
- /* Remove the list from the chain of cleanups. */
- TREE_CHAIN (last) = NULL_TREE;
+ return expand_call (exp, target, ignore);
+}
+\f
+/* Built-in functions to perform an untyped call and return. */
- /* reverse them so that we can build them in the right order. */
- cleanups = nreverse (cleanups);
+/* For each register that may be used for calling a function, this
+ gives a mode used to copy the register's value. VOIDmode indicates
+ the register is not used for calling a function. If the machine
+ has register windows, this gives only the outbound registers.
+ INCOMING_REGNO gives the corresponding inbound register. */
+static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
- /* All cleanups must be on the function_obstack. */
- push_obstacks_nochange ();
- resume_temporary_allocation ();
+/* For each register that may be used for returning values, this gives
+ a mode used to copy the register's value. VOIDmode indicates the
+ register is not used for returning values. If the machine has
+ register windows, this gives only the outbound registers.
+ INCOMING_REGNO gives the corresponding inbound register. */
+static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
- while (cleanups)
- {
- if (new_cleanups)
- new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
- TREE_VALUE (cleanups), new_cleanups);
- else
- new_cleanups = TREE_VALUE (cleanups);
+/* For each register that may be used for calling a function, this
+ gives the offset of that register into the block returned by
+ __builtin_apply_args. 0 indicates that the register is not
+ used for calling a function. */
+static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
- cleanups = TREE_CHAIN (cleanups);
- }
+/* Return the offset of register REGNO into the block returned by
+ __builtin_apply_args. This is not declared static, since it is
+ needed in objc-act.c. */
- pop_obstacks ();
- }
+int
+apply_args_register_offset (regno)
+ int regno;
+{
+ apply_args_size ();
- return new_cleanups;
+ /* Arguments are always put in outgoing registers (in the argument
+ block) if such make sense. */
+#ifdef OUTGOING_REGNO
+ regno = OUTGOING_REGNO(regno);
+#endif
+ return apply_args_reg_offset[regno];
}
-/* Expand all cleanups up to OLD_CLEANUPS.
- Needed here, and also for language-dependent calls. */
+/* Return the size required for the block returned by __builtin_apply_args,
+ and initialize apply_args_mode. */
-void
-expand_cleanups_to (old_cleanups)
- tree old_cleanups;
+static int
+apply_args_size ()
{
- while (cleanups_this_call != old_cleanups)
+ static int size = -1;
+ int align, regno;
+ enum machine_mode mode;
+
+ /* The values computed by this function never change. */
+ if (size < 0)
{
- (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
- expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
- cleanups_this_call = TREE_CHAIN (cleanups_this_call);
- }
-}
-\f
-/* Expand conditional expressions. */
+ /* The first value is the incoming arg-pointer. */
+ size = GET_MODE_SIZE (Pmode);
-/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
- LABEL is an rtx of code CODE_LABEL, in this function and all the
- functions here. */
+ /* The second value is the structure value address unless this is
+ passed as an "invisible" first argument. */
+ if (struct_value_rtx)
+ size += GET_MODE_SIZE (Pmode);
-void
-jumpifnot (exp, label)
- tree exp;
- rtx label;
-{
- do_jump (exp, label, NULL_RTX);
-}
+ for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
+ if (FUNCTION_ARG_REGNO_P (regno))
+ {
+ /* Search for the proper mode for copying this register's
+ value. I'm not sure this is right, but it works so far. */
+ enum machine_mode best_mode = VOIDmode;
-/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
+ mode != VOIDmode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ if (HARD_REGNO_MODE_OK (regno, mode)
+ && HARD_REGNO_NREGS (regno, mode) == 1)
+ best_mode = mode;
-void
-jumpif (exp, label)
- tree exp;
- rtx label;
-{
- do_jump (exp, NULL_RTX, label);
-}
+ if (best_mode == VOIDmode)
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
+ mode != VOIDmode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ if (HARD_REGNO_MODE_OK (regno, mode)
+ && (mov_optab->handlers[(int) mode].insn_code
+ != CODE_FOR_nothing))
+ best_mode = mode;
-/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
- the result is zero, or IF_TRUE_LABEL if the result is one.
- Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
- meaning fall through in that case.
+ mode = best_mode;
+ if (mode == VOIDmode)
+ abort ();
- do_jump always does any pending stack adjust except when it does not
- actually perform a jump. An example where there is no jump
- is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
+ align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
+ if (size % align != 0)
+ size = CEIL (size, align) * align;
+ apply_args_reg_offset[regno] = size;
+ size += GET_MODE_SIZE (mode);
+ apply_args_mode[regno] = mode;
+ }
+ else
+ {
+ apply_args_mode[regno] = VOIDmode;
+ apply_args_reg_offset[regno] = 0;
+ }
+ }
+ return size;
+}
- This function is responsible for optimizing cases such as
- &&, || and comparison operators in EXP. */
+/* Return the size required for the block returned by __builtin_apply,
+ and initialize apply_result_mode. */
-void
-do_jump (exp, if_false_label, if_true_label)
- tree exp;
- rtx if_false_label, if_true_label;
+static int
+apply_result_size ()
{
- register enum tree_code code = TREE_CODE (exp);
- /* Some cases need to create a label to jump to
- in order to properly fall through.
- These cases set DROP_THROUGH_LABEL nonzero. */
- rtx drop_through_label = 0;
- rtx temp;
- rtx comparison = 0;
- int i;
- tree type;
+ static int size = -1;
+ int align, regno;
enum machine_mode mode;
- emit_queue ();
-
- switch (code)
+ /* The values computed by this function never change. */
+ if (size < 0)
{
- case ERROR_MARK:
- break;
-
- case INTEGER_CST:
- temp = integer_zerop (exp) ? if_false_label : if_true_label;
- if (temp)
- emit_jump (temp);
- break;
+ size = 0;
-#if 0
- /* This is not true with #pragma weak */
- case ADDR_EXPR:
- /* The address of something can never be zero. */
- if (if_true_label)
- emit_jump (if_true_label);
- break;
-#endif
+ for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
+ if (FUNCTION_VALUE_REGNO_P (regno))
+ {
+ /* Search for the proper mode for copying this register's
+ value. I'm not sure this is right, but it works so far. */
+ enum machine_mode best_mode = VOIDmode;
- case NOP_EXPR:
- if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
- || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
- || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
- goto normal;
- case CONVERT_EXPR:
- /* If we are narrowing the operand, we have to do the compare in the
- narrower mode. */
- if ((TYPE_PRECISION (TREE_TYPE (exp))
- < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
- goto normal;
- case NON_LVALUE_EXPR:
- case REFERENCE_EXPR:
- case ABS_EXPR:
- case NEGATE_EXPR:
- case LROTATE_EXPR:
- case RROTATE_EXPR:
- /* These cannot change zero->non-zero or vice versa. */
- do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
- break;
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
+ mode != TImode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ if (HARD_REGNO_MODE_OK (regno, mode))
+ best_mode = mode;
-#if 0
- /* This is never less insns than evaluating the PLUS_EXPR followed by
- a test and can be longer if the test is eliminated. */
- case PLUS_EXPR:
- /* Reduce to minus. */
- exp = build (MINUS_EXPR, TREE_TYPE (exp),
- TREE_OPERAND (exp, 0),
- fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
- TREE_OPERAND (exp, 1))));
- /* Process as MINUS. */
-#endif
+ if (best_mode == VOIDmode)
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
+ mode != VOIDmode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ if (HARD_REGNO_MODE_OK (regno, mode)
+ && (mov_optab->handlers[(int) mode].insn_code
+ != CODE_FOR_nothing))
+ best_mode = mode;
- case MINUS_EXPR:
- /* Non-zero iff operands of minus differ. */
- comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
- TREE_OPERAND (exp, 0),
- TREE_OPERAND (exp, 1)),
- NE, NE);
- break;
+ mode = best_mode;
+ if (mode == VOIDmode)
+ abort ();
- case BIT_AND_EXPR:
- /* If we are AND'ing with a small constant, do this comparison in the
- smallest type that fits. If the machine doesn't have comparisons
- that small, it will be converted back to the wider comparison.
- This helps if we are testing the sign bit of a narrower object.
- combine can't do this for us because it can't know whether a
- ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
+ align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
+ if (size % align != 0)
+ size = CEIL (size, align) * align;
+ size += GET_MODE_SIZE (mode);
+ apply_result_mode[regno] = mode;
+ }
+ else
+ apply_result_mode[regno] = VOIDmode;
- if (! SLOW_BYTE_ACCESS
- && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
- && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
- && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
- && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
- && (type = type_for_mode (mode, 1)) != 0
- && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
- && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
- != CODE_FOR_nothing))
- {
- do_jump (convert (type, exp), if_false_label, if_true_label);
- break;
- }
- goto normal;
+ /* Allow targets that use untyped_call and untyped_return to override
+ the size so that machine-specific information can be stored here. */
+#ifdef APPLY_RESULT_SIZE
+ size = APPLY_RESULT_SIZE;
+#endif
+ }
+ return size;
+}
- case TRUTH_NOT_EXPR:
- do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
- break;
+#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
+/* Create a vector describing the result block RESULT. If SAVEP is true,
+ the result block is used to save the values; otherwise it is used to
+ restore the values. */
- case TRUTH_ANDIF_EXPR:
+static rtx
+result_vector (savep, result)
+ int savep;
+ rtx result;
+{
+ int regno, size, align, nelts;
+ enum machine_mode mode;
+ rtx reg, mem;
+ rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
+
+ size = nelts = 0;
+ for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
+ if ((mode = apply_result_mode[regno]) != VOIDmode)
{
- rtx seq1, seq2;
- tree cleanups, old_cleanups;
-
- if (if_false_label == 0)
- if_false_label = drop_through_label = gen_label_rtx ();
- start_sequence ();
- do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
- seq1 = get_insns ();
- end_sequence ();
+ align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
+ if (size % align != 0)
+ size = CEIL (size, align) * align;
+ reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
+ mem = change_address (result, mode,
+ plus_constant (XEXP (result, 0), size));
+ savevec[nelts++] = (savep
+ ? gen_rtx_SET (VOIDmode, mem, reg)
+ : gen_rtx_SET (VOIDmode, reg, mem));
+ size += GET_MODE_SIZE (mode);
+ }
+ return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
+}
+#endif /* HAVE_untyped_call or HAVE_untyped_return */
- old_cleanups = cleanups_this_call;
- start_sequence ();
- do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
- seq2 = get_insns ();
- end_sequence ();
+/* Save the state required to perform an untyped call with the same
+ arguments as were passed to the current function. */
- cleanups = defer_cleanups_to (old_cleanups);
- if (cleanups)
- {
- rtx flag = gen_reg_rtx (word_mode);
- tree new_cleanups;
- tree cond;
-
- /* Flag cleanups as not needed. */
- emit_move_insn (flag, const0_rtx);
- emit_insns (seq1);
-
- /* Flag cleanups as needed. */
- emit_move_insn (flag, const1_rtx);
- emit_insns (seq2);
-
- /* All cleanups must be on the function_obstack. */
- push_obstacks_nochange ();
- resume_temporary_allocation ();
-
- /* convert flag, which is an rtx, into a tree. */
- cond = make_node (RTL_EXPR);
- TREE_TYPE (cond) = integer_type_node;
- RTL_EXPR_RTL (cond) = flag;
- RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
- cond = save_expr (cond);
-
- new_cleanups = build (COND_EXPR, void_type_node,
- truthvalue_conversion (cond),
- cleanups, integer_zero_node);
- new_cleanups = fold (new_cleanups);
+static rtx
+expand_builtin_apply_args ()
+{
+ rtx registers;
+ int size, align, regno;
+ enum machine_mode mode;
- pop_obstacks ();
+ /* Create a block where the arg-pointer, structure value address,
+ and argument registers can be saved. */
+ registers = assign_stack_local (BLKmode, apply_args_size (), -1);
- /* Now add in the conditionalized cleanups. */
- cleanups_this_call
- = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
- (*interim_eh_hook) (NULL_TREE);
- }
- else
- {
- emit_insns (seq1);
- emit_insns (seq2);
- }
- }
- break;
+ /* Walk past the arg-pointer and structure value address. */
+ size = GET_MODE_SIZE (Pmode);
+ if (struct_value_rtx)
+ size += GET_MODE_SIZE (Pmode);
- case TRUTH_ORIF_EXPR:
+ /* Save each register used in calling a function to the block. */
+ for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
+ if ((mode = apply_args_mode[regno]) != VOIDmode)
{
- rtx seq1, seq2;
- tree cleanups, old_cleanups;
+ rtx tem;
- if (if_true_label == 0)
- if_true_label = drop_through_label = gen_label_rtx ();
- start_sequence ();
- do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
- seq1 = get_insns ();
- end_sequence ();
+ align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
+ if (size % align != 0)
+ size = CEIL (size, align) * align;
- old_cleanups = cleanups_this_call;
- start_sequence ();
- do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
- seq2 = get_insns ();
- end_sequence ();
+ tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
- cleanups = defer_cleanups_to (old_cleanups);
- if (cleanups)
- {
- rtx flag = gen_reg_rtx (word_mode);
- tree new_cleanups;
- tree cond;
-
- /* Flag cleanups as not needed. */
- emit_move_insn (flag, const0_rtx);
- emit_insns (seq1);
-
- /* Flag cleanups as needed. */
- emit_move_insn (flag, const1_rtx);
- emit_insns (seq2);
-
- /* All cleanups must be on the function_obstack. */
- push_obstacks_nochange ();
- resume_temporary_allocation ();
-
- /* convert flag, which is an rtx, into a tree. */
- cond = make_node (RTL_EXPR);
- TREE_TYPE (cond) = integer_type_node;
- RTL_EXPR_RTL (cond) = flag;
- RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
- cond = save_expr (cond);
-
- new_cleanups = build (COND_EXPR, void_type_node,
- truthvalue_conversion (cond),
- cleanups, integer_zero_node);
- new_cleanups = fold (new_cleanups);
+#ifdef STACK_REGS
+ /* For reg-stack.c's stack register household.
+ Compare with a similar piece of code in function.c. */
- pop_obstacks ();
+ emit_insn (gen_rtx_USE (mode, tem));
+#endif
- /* Now add in the conditionalized cleanups. */
- cleanups_this_call
- = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
- (*interim_eh_hook) (NULL_TREE);
- }
- else
- {
- emit_insns (seq1);
- emit_insns (seq2);
- }
+ emit_move_insn (change_address (registers, mode,
+ plus_constant (XEXP (registers, 0),
+ size)),
+ tem);
+ size += GET_MODE_SIZE (mode);
}
- break;
- case COMPOUND_EXPR:
- push_temp_slots ();
- expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
- preserve_temp_slots (NULL_RTX);
- free_temp_slots ();
- pop_temp_slots ();
- emit_queue ();
- do_pending_stack_adjust ();
- do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
- break;
+ /* Save the arg pointer to the block. */
+ emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
+ copy_to_reg (virtual_incoming_args_rtx));
+ size = GET_MODE_SIZE (Pmode);
+
+ /* Save the structure value address unless this is passed as an
+ "invisible" first argument. */
+ if (struct_value_incoming_rtx)
+ {
+ emit_move_insn (change_address (registers, Pmode,
+ plus_constant (XEXP (registers, 0),
+ size)),
+ copy_to_reg (struct_value_incoming_rtx));
+ size += GET_MODE_SIZE (Pmode);
+ }
+
+ /* Return the address of the block. */
+ return copy_addr_to_reg (XEXP (registers, 0));
+}
+
+/* Perform an untyped call and save the state required to perform an
+ untyped return of whatever value was returned by the given function. */
+
+static rtx
+expand_builtin_apply (function, arguments, argsize)
+ rtx function, arguments, argsize;
+{
+ int size, align, regno;
+ enum machine_mode mode;
+ rtx incoming_args, result, reg, dest, call_insn;
+ rtx old_stack_level = 0;
+ rtx call_fusage = 0;
- case COMPONENT_REF:
- case BIT_FIELD_REF:
- case ARRAY_REF:
- {
- int bitsize, bitpos, unsignedp;
- enum machine_mode mode;
- tree type;
- tree offset;
- int volatilep = 0;
+ /* Create a block where the return registers can be saved. */
+ result = assign_stack_local (BLKmode, apply_result_size (), -1);
- /* Get description of this reference. We don't actually care
- about the underlying object here. */
- get_inner_reference (exp, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep);
+ /* ??? The argsize value should be adjusted here. */
- type = type_for_size (bitsize, unsignedp);
- if (! SLOW_BYTE_ACCESS
- && type != 0 && bitsize >= 0
- && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
- && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
- != CODE_FOR_nothing))
- {
- do_jump (convert (type, exp), if_false_label, if_true_label);
- break;
- }
- goto normal;
- }
+ /* Fetch the arg pointer from the ARGUMENTS block. */
+ incoming_args = gen_reg_rtx (Pmode);
+ emit_move_insn (incoming_args,
+ gen_rtx_MEM (Pmode, arguments));
+#ifndef STACK_GROWS_DOWNWARD
+ incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
+ incoming_args, 0, OPTAB_LIB_WIDEN);
+#endif
- case COND_EXPR:
- /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
- if (integer_onep (TREE_OPERAND (exp, 1))
- && integer_zerop (TREE_OPERAND (exp, 2)))
- do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
+ /* Perform postincrements before actually calling the function. */
+ emit_queue ();
- else if (integer_zerop (TREE_OPERAND (exp, 1))
- && integer_onep (TREE_OPERAND (exp, 2)))
- do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
+ /* Push a new argument block and copy the arguments. */
+ do_pending_stack_adjust ();
- else
- {
- register rtx label1 = gen_label_rtx ();
- drop_through_label = gen_label_rtx ();
- do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
- /* Now the THEN-expression. */
- do_jump (TREE_OPERAND (exp, 1),
- if_false_label ? if_false_label : drop_through_label,
- if_true_label ? if_true_label : drop_through_label);
- /* In case the do_jump just above never jumps. */
- do_pending_stack_adjust ();
- emit_label (label1);
- /* Now the ELSE-expression. */
- do_jump (TREE_OPERAND (exp, 2),
- if_false_label ? if_false_label : drop_through_label,
- if_true_label ? if_true_label : drop_through_label);
- }
- break;
+ /* Save the stack with nonlocal if available */
+#ifdef HAVE_save_stack_nonlocal
+ if (HAVE_save_stack_nonlocal)
+ emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
+ else
+#endif
+ emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
- case EQ_EXPR:
- {
- tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
+ /* Push a block of memory onto the stack to store the memory arguments.
+ Save the address in a register, and copy the memory arguments. ??? I
+ haven't figured out how the calling convention macros effect this,
+ but it's likely that the source and/or destination addresses in
+ the block copy will need updating in machine specific ways. */
+ dest = allocate_dynamic_stack_space (argsize, 0, 0);
+ emit_block_move (gen_rtx_MEM (BLKmode, dest),
+ gen_rtx_MEM (BLKmode, incoming_args),
+ argsize,
+ PARM_BOUNDARY / BITS_PER_UNIT);
- if (integer_zerop (TREE_OPERAND (exp, 1)))
- do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
- else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
- || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
- do_jump
- (fold
- (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
- fold (build (EQ_EXPR, TREE_TYPE (exp),
- fold (build1 (REALPART_EXPR,
- TREE_TYPE (inner_type),
- TREE_OPERAND (exp, 0))),
- fold (build1 (REALPART_EXPR,
- TREE_TYPE (inner_type),
- TREE_OPERAND (exp, 1))))),
- fold (build (EQ_EXPR, TREE_TYPE (exp),
- fold (build1 (IMAGPART_EXPR,
- TREE_TYPE (inner_type),
- TREE_OPERAND (exp, 0))),
- fold (build1 (IMAGPART_EXPR,
- TREE_TYPE (inner_type),
- TREE_OPERAND (exp, 1))))))),
- if_false_label, if_true_label);
- else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
- && !can_compare_p (TYPE_MODE (inner_type)))
- do_jump_by_parts_equality (exp, if_false_label, if_true_label);
- else
- comparison = compare (exp, EQ, EQ);
- break;
- }
+ /* Refer to the argument block. */
+ apply_args_size ();
+ arguments = gen_rtx_MEM (BLKmode, arguments);
- case NE_EXPR:
+ /* Walk past the arg-pointer and structure value address. */
+ size = GET_MODE_SIZE (Pmode);
+ if (struct_value_rtx)
+ size += GET_MODE_SIZE (Pmode);
+
+ /* Restore each of the registers previously saved. Make USE insns
+ for each of these registers for use in making the call. */
+ for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
+ if ((mode = apply_args_mode[regno]) != VOIDmode)
{
- tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
+ align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
+ if (size % align != 0)
+ size = CEIL (size, align) * align;
+ reg = gen_rtx_REG (mode, regno);
+ emit_move_insn (reg,
+ change_address (arguments, mode,
+ plus_constant (XEXP (arguments, 0),
+ size)));
- if (integer_zerop (TREE_OPERAND (exp, 1)))
- do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
- else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
- || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
- do_jump
- (fold
- (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
- fold (build (NE_EXPR, TREE_TYPE (exp),
- fold (build1 (REALPART_EXPR,
- TREE_TYPE (inner_type),
- TREE_OPERAND (exp, 0))),
- fold (build1 (REALPART_EXPR,
- TREE_TYPE (inner_type),
- TREE_OPERAND (exp, 1))))),
- fold (build (NE_EXPR, TREE_TYPE (exp),
- fold (build1 (IMAGPART_EXPR,
- TREE_TYPE (inner_type),
- TREE_OPERAND (exp, 0))),
- fold (build1 (IMAGPART_EXPR,
- TREE_TYPE (inner_type),
- TREE_OPERAND (exp, 1))))))),
- if_false_label, if_true_label);
- else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
- && !can_compare_p (TYPE_MODE (inner_type)))
- do_jump_by_parts_equality (exp, if_true_label, if_false_label);
- else
- comparison = compare (exp, NE, NE);
- break;
+ use_reg (&call_fusage, reg);
+ size += GET_MODE_SIZE (mode);
}
- case LT_EXPR:
- if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
- == MODE_INT)
- && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
- do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
- else
- comparison = compare (exp, LT, LTU);
- break;
-
- case LE_EXPR:
- if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
- == MODE_INT)
- && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
- do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
- else
- comparison = compare (exp, LE, LEU);
- break;
+ /* Restore the structure value address unless this is passed as an
+ "invisible" first argument. */
+ size = GET_MODE_SIZE (Pmode);
+ if (struct_value_rtx)
+ {
+ rtx value = gen_reg_rtx (Pmode);
+ emit_move_insn (value,
+ change_address (arguments, Pmode,
+ plus_constant (XEXP (arguments, 0),
+ size)));
+ emit_move_insn (struct_value_rtx, value);
+ if (GET_CODE (struct_value_rtx) == REG)
+ use_reg (&call_fusage, struct_value_rtx);
+ size += GET_MODE_SIZE (Pmode);
+ }
- case GT_EXPR:
- if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
- == MODE_INT)
- && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
- do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
- else
- comparison = compare (exp, GT, GTU);
- break;
+ /* All arguments and registers used for the call are set up by now! */
+ function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
- case GE_EXPR:
- if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
- == MODE_INT)
- && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
- do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
- else
- comparison = compare (exp, GE, GEU);
- break;
+ /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
+ and we don't want to load it into a register as an optimization,
+ because prepare_call_address already did it if it should be done. */
+ if (GET_CODE (function) != SYMBOL_REF)
+ function = memory_address (FUNCTION_MODE, function);
- default:
- normal:
- temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
-#if 0
- /* This is not needed any more and causes poor code since it causes
- comparisons and tests from non-SI objects to have different code
- sequences. */
- /* Copy to register to avoid generating bad insns by cse
- from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
- if (!cse_not_expected && GET_CODE (temp) == MEM)
- temp = copy_to_reg (temp);
+ /* Generate the actual call instruction and save the return value. */
+#ifdef HAVE_untyped_call
+ if (HAVE_untyped_call)
+ emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
+ result, result_vector (1, result)));
+ else
#endif
- do_pending_stack_adjust ();
- if (GET_CODE (temp) == CONST_INT)
- comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
- else if (GET_CODE (temp) == LABEL_REF)
- comparison = const_true_rtx;
- else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
- && !can_compare_p (GET_MODE (temp)))
- /* Note swapping the labels gives us not-equal. */
- do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
- else if (GET_MODE (temp) != VOIDmode)
- comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
- NE, TREE_UNSIGNED (TREE_TYPE (exp)),
- GET_MODE (temp), NULL_RTX, 0);
- else
- abort ();
+#ifdef HAVE_call_value
+ if (HAVE_call_value)
+ {
+ rtx valreg = 0;
+
+ /* Locate the unique return register. It is not possible to
+ express a call that sets more than one return register using
+ call_value; use untyped_call for that. In fact, untyped_call
+ only needs to save the return registers in the given block. */
+ for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
+ if ((mode = apply_result_mode[regno]) != VOIDmode)
+ {
+ if (valreg)
+ abort (); /* HAVE_untyped_call required. */
+ valreg = gen_rtx_REG (mode, regno);
+ }
+
+ emit_call_insn (gen_call_value (valreg,
+ gen_rtx_MEM (FUNCTION_MODE, function),
+ const0_rtx, NULL_RTX, const0_rtx));
+
+ emit_move_insn (change_address (result, GET_MODE (valreg),
+ XEXP (result, 0)),
+ valreg);
}
+ else
+#endif
+ abort ();
- /* Do any postincrements in the expression that was tested. */
- emit_queue ();
+ /* Find the CALL insn we just emitted. */
+ for (call_insn = get_last_insn ();
+ call_insn && GET_CODE (call_insn) != CALL_INSN;
+ call_insn = PREV_INSN (call_insn))
+ ;
- /* If COMPARISON is nonzero here, it is an rtx that can be substituted
- straight into a conditional jump instruction as the jump condition.
- Otherwise, all the work has been done already. */
+ if (! call_insn)
+ abort ();
- if (comparison == const_true_rtx)
- {
- if (if_true_label)
- emit_jump (if_true_label);
- }
- else if (comparison == const0_rtx)
+ /* Put the register usage information on the CALL. If there is already
+ some usage information, put ours at the end. */
+ if (CALL_INSN_FUNCTION_USAGE (call_insn))
{
- if (if_false_label)
- emit_jump (if_false_label);
+ rtx link;
+
+ for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
+ link = XEXP (link, 1))
+ ;
+
+ XEXP (link, 1) = call_fusage;
}
- else if (comparison)
- do_jump_for_compare (comparison, if_false_label, if_true_label);
+ else
+ CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
- if (drop_through_label)
+ /* Restore the stack. */
+#ifdef HAVE_save_stack_nonlocal
+ if (HAVE_save_stack_nonlocal)
+ emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
+ else
+#endif
+ emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
+
+ /* Return the address of the result block. */
+ return copy_addr_to_reg (XEXP (result, 0));
+}
+
+/* Perform an untyped return. */
+
+static void
+expand_builtin_return (result)
+ rtx result;
+{
+ int size, align, regno;
+ enum machine_mode mode;
+ rtx reg;
+ rtx call_fusage = 0;
+
+ apply_result_size ();
+ result = gen_rtx_MEM (BLKmode, result);
+
+#ifdef HAVE_untyped_return
+ if (HAVE_untyped_return)
{
- /* If do_jump produces code that might be jumped around,
- do any stack adjusts from that code, before the place
- where control merges in. */
- do_pending_stack_adjust ();
- emit_label (drop_through_label);
+ emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
+ emit_barrier ();
+ return;
}
+#endif
+
+ /* Restore the return value and note that each value is used. */
+ size = 0;
+ for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
+ if ((mode = apply_result_mode[regno]) != VOIDmode)
+ {
+ align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
+ if (size % align != 0)
+ size = CEIL (size, align) * align;
+ reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
+ emit_move_insn (reg,
+ change_address (result, mode,
+ plus_constant (XEXP (result, 0),
+ size)));
+
+ push_to_sequence (call_fusage);
+ emit_insn (gen_rtx_USE (VOIDmode, reg));
+ call_fusage = get_insns ();
+ end_sequence ();
+ size += GET_MODE_SIZE (mode);
+ }
+
+ /* Put the USE insns before the return. */
+ emit_insns (call_fusage);
+
+ /* Return whatever values was restored by jumping directly to the end
+ of the function. */
+ expand_null_return ();
}
\f
-/* Given a comparison expression EXP for values too wide to be compared
- with one insn, test the comparison and jump to the appropriate label.
- The code of EXP is ignored; we always test GT if SWAP is 0,
- and LT if SWAP is 1. */
+/* Expand code for a post- or pre- increment or decrement
+ and return the RTX for the result.
+ POST is 1 for postinc/decrements and 0 for preinc/decrements. */
-static void
-do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
- tree exp;
- int swap;
- rtx if_false_label, if_true_label;
+static rtx
+expand_increment (exp, post, ignore)
+ register tree exp;
+ int post, ignore;
{
- rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
- rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
- int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
- rtx drop_through_label = 0;
- int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
- int i;
+ register rtx op0, op1;
+ register rtx temp, value;
+ register tree incremented = TREE_OPERAND (exp, 0);
+ optab this_optab = add_optab;
+ int icode;
+ enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
+ int op0_is_copy = 0;
+ int single_insn = 0;
+ /* 1 means we can't store into OP0 directly,
+ because it is a subreg narrower than a word,
+ and we don't dare clobber the rest of the word. */
+ int bad_subreg = 0;
- if (! if_true_label || ! if_false_label)
- drop_through_label = gen_label_rtx ();
- if (! if_true_label)
- if_true_label = drop_through_label;
- if (! if_false_label)
- if_false_label = drop_through_label;
+ /* Stabilize any component ref that might need to be
+ evaluated more than once below. */
+ if (!post
+ || TREE_CODE (incremented) == BIT_FIELD_REF
+ || (TREE_CODE (incremented) == COMPONENT_REF
+ && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
+ || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
+ incremented = stabilize_reference (incremented);
+ /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
+ ones into save exprs so that they don't accidentally get evaluated
+ more than once by the code below. */
+ if (TREE_CODE (incremented) == PREINCREMENT_EXPR
+ || TREE_CODE (incremented) == PREDECREMENT_EXPR)
+ incremented = save_expr (incremented);
- /* Compare a word at a time, high order first. */
- for (i = 0; i < nwords; i++)
- {
- rtx comp;
- rtx op0_word, op1_word;
+ /* Compute the operands as RTX.
+ Note whether OP0 is the actual lvalue or a copy of it:
+ I believe it is a copy iff it is a register or subreg
+ and insns were generated in computing it. */
- if (WORDS_BIG_ENDIAN)
- {
- op0_word = operand_subword_force (op0, i, mode);
- op1_word = operand_subword_force (op1, i, mode);
- }
- else
- {
- op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
- op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
- }
+ temp = get_last_insn ();
+ op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
- /* All but high-order word must be compared as unsigned. */
- comp = compare_from_rtx (op0_word, op1_word,
- (unsignedp || i > 0) ? GTU : GT,
- unsignedp, word_mode, NULL_RTX, 0);
- if (comp == const_true_rtx)
- emit_jump (if_true_label);
- else if (comp != const0_rtx)
- do_jump_for_compare (comp, NULL_RTX, if_true_label);
+ /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
+ in place but instead must do sign- or zero-extension during assignment,
+ so we copy it into a new register and let the code below use it as
+ a copy.
- /* Consider lower words only if these are equal. */
- comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
- NULL_RTX, 0);
- if (comp == const_true_rtx)
- emit_jump (if_false_label);
- else if (comp != const0_rtx)
- do_jump_for_compare (comp, NULL_RTX, if_false_label);
+ Note that we can safely modify this SUBREG since it is know not to be
+ shared (it was made by the expand_expr call above). */
+
+ if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
+ {
+ if (post)
+ SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
+ else
+ bad_subreg = 1;
+ }
+ else if (GET_CODE (op0) == SUBREG
+ && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
+ {
+ /* We cannot increment this SUBREG in place. If we are
+ post-incrementing, get a copy of the old value. Otherwise,
+ just mark that we cannot increment in place. */
+ if (post)
+ op0 = copy_to_reg (op0);
+ else
+ bad_subreg = 1;
}
- if (if_false_label)
- emit_jump (if_false_label);
- if (drop_through_label)
- emit_label (drop_through_label);
-}
+ op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
+ && temp != get_last_insn ());
+ op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
+ EXPAND_MEMORY_USE_BAD);
-/* Compare OP0 with OP1, word at a time, in mode MODE.
- UNSIGNEDP says to do unsigned comparison.
- Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
+ /* Decide whether incrementing or decrementing. */
+ if (TREE_CODE (exp) == POSTDECREMENT_EXPR
+ || TREE_CODE (exp) == PREDECREMENT_EXPR)
+ this_optab = sub_optab;
-void
-do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
- enum machine_mode mode;
- int unsignedp;
- rtx op0, op1;
- rtx if_false_label, if_true_label;
-{
- int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
- rtx drop_through_label = 0;
- int i;
+ /* Convert decrement by a constant into a negative increment. */
+ if (this_optab == sub_optab
+ && GET_CODE (op1) == CONST_INT)
+ {
+ op1 = GEN_INT (- INTVAL (op1));
+ this_optab = add_optab;
+ }
+
+ /* For a preincrement, see if we can do this with a single instruction. */
+ if (!post)
+ {
+ icode = (int) this_optab->handlers[(int) mode].insn_code;
+ if (icode != (int) CODE_FOR_nothing
+ /* Make sure that OP0 is valid for operands 0 and 1
+ of the insn we want to queue. */
+ && (*insn_operand_predicate[icode][0]) (op0, mode)
+ && (*insn_operand_predicate[icode][1]) (op0, mode)
+ && (*insn_operand_predicate[icode][2]) (op1, mode))
+ single_insn = 1;
+ }
- if (! if_true_label || ! if_false_label)
- drop_through_label = gen_label_rtx ();
- if (! if_true_label)
- if_true_label = drop_through_label;
- if (! if_false_label)
- if_false_label = drop_through_label;
+ /* If OP0 is not the actual lvalue, but rather a copy in a register,
+ then we cannot just increment OP0. We must therefore contrive to
+ increment the original value. Then, for postincrement, we can return
+ OP0 since it is a copy of the old value. For preincrement, expand here
+ unless we can do it with a single insn.
- /* Compare a word at a time, high order first. */
- for (i = 0; i < nwords; i++)
+ Likewise if storing directly into OP0 would clobber high bits
+ we need to preserve (bad_subreg). */
+ if (op0_is_copy || (!post && !single_insn) || bad_subreg)
{
- rtx comp;
- rtx op0_word, op1_word;
+ /* This is the easiest way to increment the value wherever it is.
+ Problems with multiple evaluation of INCREMENTED are prevented
+ because either (1) it is a component_ref or preincrement,
+ in which case it was stabilized above, or (2) it is an array_ref
+ with constant index in an array in a register, which is
+ safe to reevaluate. */
+ tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
+ || TREE_CODE (exp) == PREDECREMENT_EXPR)
+ ? MINUS_EXPR : PLUS_EXPR),
+ TREE_TYPE (exp),
+ incremented,
+ TREE_OPERAND (exp, 1));
- if (WORDS_BIG_ENDIAN)
- {
- op0_word = operand_subword_force (op0, i, mode);
- op1_word = operand_subword_force (op1, i, mode);
- }
- else
+ while (TREE_CODE (incremented) == NOP_EXPR
+ || TREE_CODE (incremented) == CONVERT_EXPR)
{
- op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
- op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
+ newexp = convert (TREE_TYPE (incremented), newexp);
+ incremented = TREE_OPERAND (incremented, 0);
}
- /* All but high-order word must be compared as unsigned. */
- comp = compare_from_rtx (op0_word, op1_word,
- (unsignedp || i > 0) ? GTU : GT,
- unsignedp, word_mode, NULL_RTX, 0);
- if (comp == const_true_rtx)
- emit_jump (if_true_label);
- else if (comp != const0_rtx)
- do_jump_for_compare (comp, NULL_RTX, if_true_label);
-
- /* Consider lower words only if these are equal. */
- comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
- NULL_RTX, 0);
- if (comp == const_true_rtx)
- emit_jump (if_false_label);
- else if (comp != const0_rtx)
- do_jump_for_compare (comp, NULL_RTX, if_false_label);
+ temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
+ return post ? op0 : temp;
}
- if (if_false_label)
- emit_jump (if_false_label);
- if (drop_through_label)
- emit_label (drop_through_label);
-}
-
-/* Given an EQ_EXPR expression EXP for values too wide to be compared
- with one insn, test the comparison and jump to the appropriate label. */
+ if (post)
+ {
+ /* We have a true reference to the value in OP0.
+ If there is an insn to add or subtract in this mode, queue it.
+ Queueing the increment insn avoids the register shuffling
+ that often results if we must increment now and first save
+ the old value for subsequent use. */
-static void
-do_jump_by_parts_equality (exp, if_false_label, if_true_label)
- tree exp;
- rtx if_false_label, if_true_label;
-{
- rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
- rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
- int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
- int i;
- rtx drop_through_label = 0;
+#if 0 /* Turned off to avoid making extra insn for indexed memref. */
+ op0 = stabilize (op0);
+#endif
- if (! if_false_label)
- drop_through_label = if_false_label = gen_label_rtx ();
+ icode = (int) this_optab->handlers[(int) mode].insn_code;
+ if (icode != (int) CODE_FOR_nothing
+ /* Make sure that OP0 is valid for operands 0 and 1
+ of the insn we want to queue. */
+ && (*insn_operand_predicate[icode][0]) (op0, mode)
+ && (*insn_operand_predicate[icode][1]) (op0, mode))
+ {
+ if (! (*insn_operand_predicate[icode][2]) (op1, mode))
+ op1 = force_reg (mode, op1);
- for (i = 0; i < nwords; i++)
- {
- rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
- operand_subword_force (op1, i, mode),
- EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
- word_mode, NULL_RTX, 0);
- if (comp == const_true_rtx)
- emit_jump (if_false_label);
- else if (comp != const0_rtx)
- do_jump_for_compare (comp, if_false_label, NULL_RTX);
- }
+ return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
+ }
+ if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
+ {
+ rtx addr = (general_operand (XEXP (op0, 0), mode)
+ ? force_reg (Pmode, XEXP (op0, 0))
+ : copy_to_reg (XEXP (op0, 0)));
+ rtx temp, result;
- if (if_true_label)
- emit_jump (if_true_label);
- if (drop_through_label)
- emit_label (drop_through_label);
-}
-\f
-/* Jump according to whether OP0 is 0.
- We assume that OP0 has an integer mode that is too wide
- for the available compare insns. */
+ op0 = change_address (op0, VOIDmode, addr);
+ temp = force_reg (GET_MODE (op0), op0);
+ if (! (*insn_operand_predicate[icode][2]) (op1, mode))
+ op1 = force_reg (mode, op1);
-static void
-do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
- rtx op0;
- rtx if_false_label, if_true_label;
-{
- int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
- int i;
- rtx drop_through_label = 0;
+ /* The increment queue is LIFO, thus we have to `queue'
+ the instructions in reverse order. */
+ enqueue_insn (op0, gen_move_insn (op0, temp));
+ result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
+ return result;
+ }
+ }
- if (! if_false_label)
- drop_through_label = if_false_label = gen_label_rtx ();
+ /* Preincrement, or we can't increment with one simple insn. */
+ if (post)
+ /* Save a copy of the value before inc or dec, to return it later. */
+ temp = value = copy_to_reg (op0);
+ else
+ /* Arrange to return the incremented value. */
+ /* Copy the rtx because expand_binop will protect from the queue,
+ and the results of that would be invalid for us to return
+ if our caller does emit_queue before using our result. */
+ temp = copy_rtx (value = op0);
- for (i = 0; i < nwords; i++)
- {
- rtx comp = compare_from_rtx (operand_subword_force (op0, i,
- GET_MODE (op0)),
- const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
- if (comp == const_true_rtx)
- emit_jump (if_false_label);
- else if (comp != const0_rtx)
- do_jump_for_compare (comp, if_false_label, NULL_RTX);
- }
+ /* Increment however we can. */
+ op1 = expand_binop (mode, this_optab, value, op1,
+ flag_check_memory_usage ? NULL_RTX : op0,
+ TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
+ /* Make sure the value is stored into OP0. */
+ if (op1 != op0)
+ emit_move_insn (op0, op1);
- if (if_true_label)
- emit_jump (if_true_label);
- if (drop_through_label)
- emit_label (drop_through_label);
+ return temp;
}
-
-/* Given a comparison expression in rtl form, output conditional branches to
- IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
+\f
+/* Expand all function calls contained within EXP, innermost ones first.
+ But don't look within expressions that have sequence points.
+ For each CALL_EXPR, record the rtx for its value
+ in the CALL_EXPR_RTL field. */
static void
-do_jump_for_compare (comparison, if_false_label, if_true_label)
- rtx comparison, if_false_label, if_true_label;
+preexpand_calls (exp)
+ tree exp;
{
- if (if_true_label)
- {
- if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
- emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
- else
- abort ();
+ register int nops, i;
+ int type = TREE_CODE_CLASS (TREE_CODE (exp));
- if (if_false_label)
- emit_jump (if_false_label);
- }
- else if (if_false_label)
- {
- rtx insn;
- rtx prev = get_last_insn ();
- rtx branch = 0;
+ if (! do_preexpand_calls)
+ return;
- /* Output the branch with the opposite condition. Then try to invert
- what is generated. If more than one insn is a branch, or if the
- branch is not the last insn written, abort. If we can't invert
- the branch, emit make a true label, redirect this jump to that,
- emit a jump to the false label and define the true label. */
+ /* Only expressions and references can contain calls. */
- if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
- emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
- else
- abort ();
+ if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
+ return;
- /* Here we get the first insn that was just emitted. It used to be the
- case that, on some machines, emitting the branch would discard
- the previous compare insn and emit a replacement. This isn't
- done anymore, but abort if we see that PREV is deleted. */
+ switch (TREE_CODE (exp))
+ {
+ case CALL_EXPR:
+ /* Do nothing if already expanded. */
+ if (CALL_EXPR_RTL (exp) != 0
+ /* Do nothing if the call returns a variable-sized object. */
+ || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
+ /* Do nothing to built-in functions. */
+ || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
+ && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
+ == FUNCTION_DECL)
+ && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
+ return;
- if (prev == 0)
- insn = get_insns ();
- else if (INSN_DELETED_P (prev))
- abort ();
- else
- insn = NEXT_INSN (prev);
+ CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
+ return;
- for (; insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == JUMP_INSN)
- {
- if (branch)
- abort ();
- branch = insn;
- }
+ case COMPOUND_EXPR:
+ case COND_EXPR:
+ case TRUTH_ANDIF_EXPR:
+ case TRUTH_ORIF_EXPR:
+ /* If we find one of these, then we can be sure
+ the adjust will be done for it (since it makes jumps).
+ Do it now, so that if this is inside an argument
+ of a function, we don't get the stack adjustment
+ after some other args have already been pushed. */
+ do_pending_stack_adjust ();
+ return;
- if (branch != get_last_insn ())
- abort ();
+ case BLOCK:
+ case RTL_EXPR:
+ case WITH_CLEANUP_EXPR:
+ case CLEANUP_POINT_EXPR:
+ case TRY_CATCH_EXPR:
+ return;
- JUMP_LABEL (branch) = if_false_label;
- if (! invert_jump (branch, if_false_label))
- {
- if_true_label = gen_label_rtx ();
- redirect_jump (branch, if_true_label);
- emit_jump (if_false_label);
- emit_label (if_true_label);
- }
+ case SAVE_EXPR:
+ if (SAVE_EXPR_RTL (exp) != 0)
+ return;
+
+ default:
+ break;
}
+
+ nops = tree_code_length[(int) TREE_CODE (exp)];
+ for (i = 0; i < nops; i++)
+ if (TREE_OPERAND (exp, i) != 0)
+ {
+ type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
+ if (type == 'e' || type == '<' || type == '1' || type == '2'
+ || type == 'r')
+ preexpand_calls (TREE_OPERAND (exp, i));
+ }
}
\f
-/* Generate code for a comparison expression EXP
- (including code to compute the values to be compared)
- and set (CC0) according to the result.
- SIGNED_CODE should be the rtx operation for this comparison for
- signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
-
- We force a stack adjustment unless there are currently
- things pushed on the stack that aren't yet used. */
+/* At the start of a function, record that we have no previously-pushed
+ arguments waiting to be popped. */
-static rtx
-compare (exp, signed_code, unsigned_code)
- register tree exp;
- enum rtx_code signed_code, unsigned_code;
+void
+init_pending_stack_adjust ()
{
- register rtx op0
- = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
- register rtx op1
- = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
- register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
- register enum machine_mode mode = TYPE_MODE (type);
- int unsignedp = TREE_UNSIGNED (type);
- enum rtx_code code = unsignedp ? unsigned_code : signed_code;
-
- return compare_from_rtx (op0, op1, code, unsignedp, mode,
- ((mode == BLKmode)
- ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
- TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
+ pending_stack_adjust = 0;
}
-/* Like compare but expects the values to compare as two rtx's.
- The decision as to signed or unsigned comparison must be made by the caller.
-
- If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
- compared.
+/* When exiting from function, if safe, clear out any pending stack adjust
+ so the adjustment won't get done.
- If ALIGN is non-zero, it is the alignment of this type; if zero, the
- size of MODE should be used. */
+ Note, if the current function calls alloca, then it must have a
+ frame pointer regardless of the value of flag_omit_frame_pointer. */
-rtx
-compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
- register rtx op0, op1;
- enum rtx_code code;
- int unsignedp;
- enum machine_mode mode;
- rtx size;
- int align;
+void
+clear_pending_stack_adjust ()
{
- rtx tem;
-
- /* If one operand is constant, make it the second one. Only do this
- if the other operand is not constant as well. */
+#ifdef EXIT_IGNORE_STACK
+ if (optimize > 0
+ && (! flag_omit_frame_pointer || current_function_calls_alloca)
+ && EXIT_IGNORE_STACK
+ && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
+ && ! flag_inline_functions)
+ pending_stack_adjust = 0;
+#endif
+}
- if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
- || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
- {
- tem = op0;
- op0 = op1;
- op1 = tem;
- code = swap_condition (code);
- }
+/* Pop any previously-pushed arguments that have not been popped yet. */
- if (flag_force_mem)
+void
+do_pending_stack_adjust ()
+{
+ if (inhibit_defer_pop == 0)
{
- op0 = force_not_mem (op0);
- op1 = force_not_mem (op1);
+ if (pending_stack_adjust != 0)
+ adjust_stack (GEN_INT (pending_stack_adjust));
+ pending_stack_adjust = 0;
}
+}
+\f
+/* Expand conditional expressions. */
- do_pending_stack_adjust ();
-
- if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
- && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
- return tem;
+/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
+ LABEL is an rtx of code CODE_LABEL, in this function and all the
+ functions here. */
-#if 0
- /* There's no need to do this now that combine.c can eliminate lots of
- sign extensions. This can be less efficient in certain cases on other
- machines. */
+void
+jumpifnot (exp, label)
+ tree exp;
+ rtx label;
+{
+ do_jump (exp, label, NULL_RTX);
+}
- /* If this is a signed equality comparison, we can do it as an
- unsigned comparison since zero-extension is cheaper than sign
- extension and comparisons with zero are done as unsigned. This is
- the case even on machines that can do fast sign extension, since
- zero-extension is easier to combine with other operations than
- sign-extension is. If we are comparing against a constant, we must
- convert it to what it would look like unsigned. */
- if ((code == EQ || code == NE) && ! unsignedp
- && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
- {
- if (GET_CODE (op1) == CONST_INT
- && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
- op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
- unsignedp = 1;
- }
-#endif
-
- emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
+/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
- return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
+void
+jumpif (exp, label)
+ tree exp;
+ rtx label;
+{
+ do_jump (exp, NULL_RTX, label);
}
-\f
-/* Generate code to calculate EXP using a store-flag instruction
- and return an rtx for the result. EXP is either a comparison
- or a TRUTH_NOT_EXPR whose operand is a comparison.
-
- If TARGET is nonzero, store the result there if convenient.
- If ONLY_CHEAP is non-zero, only do this if it is likely to be very
- cheap.
+/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
+ the result is zero, or IF_TRUE_LABEL if the result is one.
+ Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
+ meaning fall through in that case.
- Return zero if there is no suitable set-flag instruction
- available on this machine.
+ do_jump always does any pending stack adjust except when it does not
+ actually perform a jump. An example where there is no jump
+ is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
- Once expand_expr has been called on the arguments of the comparison,
- we are committed to doing the store flag, since it is not safe to
- re-evaluate the expression. We emit the store-flag insn by calling
- emit_store_flag, but only expand the arguments if we have a reason
- to believe that emit_store_flag will be successful. If we think that
- it will, but it isn't, we have to simulate the store-flag with a
- set/jump/set sequence. */
+ This function is responsible for optimizing cases such as
+ &&, || and comparison operators in EXP. */
-static rtx
-do_store_flag (exp, target, mode, only_cheap)
+void
+do_jump (exp, if_false_label, if_true_label)
tree exp;
- rtx target;
- enum machine_mode mode;
- int only_cheap;
+ rtx if_false_label, if_true_label;
{
- enum rtx_code code;
- tree arg0, arg1, type;
- tree tem;
- enum machine_mode operand_mode;
- int invert = 0;
- int unsignedp;
- rtx op0, op1;
- enum insn_code icode;
- rtx subtarget = target;
- rtx result, label, pattern, jump_pat;
+ register enum tree_code code = TREE_CODE (exp);
+ /* Some cases need to create a label to jump to
+ in order to properly fall through.
+ These cases set DROP_THROUGH_LABEL nonzero. */
+ rtx drop_through_label = 0;
+ rtx temp;
+ rtx comparison = 0;
+ int i;
+ tree type;
+ enum machine_mode mode;
- /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
- result at the end. We can't simply invert the test since it would
- have already been inverted if it were valid. This case occurs for
- some floating-point comparisons. */
+ emit_queue ();
- if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
- invert = 1, exp = TREE_OPERAND (exp, 0);
+ switch (code)
+ {
+ case ERROR_MARK:
+ break;
+
+ case INTEGER_CST:
+ temp = integer_zerop (exp) ? if_false_label : if_true_label;
+ if (temp)
+ emit_jump (temp);
+ break;
+
+#if 0
+ /* This is not true with #pragma weak */
+ case ADDR_EXPR:
+ /* The address of something can never be zero. */
+ if (if_true_label)
+ emit_jump (if_true_label);
+ break;
+#endif
+
+ case NOP_EXPR:
+ if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
+ || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
+ || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
+ goto normal;
+ case CONVERT_EXPR:
+ /* If we are narrowing the operand, we have to do the compare in the
+ narrower mode. */
+ if ((TYPE_PRECISION (TREE_TYPE (exp))
+ < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
+ goto normal;
+ case NON_LVALUE_EXPR:
+ case REFERENCE_EXPR:
+ case ABS_EXPR:
+ case NEGATE_EXPR:
+ case LROTATE_EXPR:
+ case RROTATE_EXPR:
+ /* These cannot change zero->non-zero or vice versa. */
+ do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
+ break;
- arg0 = TREE_OPERAND (exp, 0);
- arg1 = TREE_OPERAND (exp, 1);
- type = TREE_TYPE (arg0);
- operand_mode = TYPE_MODE (type);
- unsignedp = TREE_UNSIGNED (type);
+#if 0
+ /* This is never less insns than evaluating the PLUS_EXPR followed by
+ a test and can be longer if the test is eliminated. */
+ case PLUS_EXPR:
+ /* Reduce to minus. */
+ exp = build (MINUS_EXPR, TREE_TYPE (exp),
+ TREE_OPERAND (exp, 0),
+ fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
+ TREE_OPERAND (exp, 1))));
+ /* Process as MINUS. */
+#endif
- /* We won't bother with BLKmode store-flag operations because it would mean
- passing a lot of information to emit_store_flag. */
- if (operand_mode == BLKmode)
- return 0;
+ case MINUS_EXPR:
+ /* Non-zero iff operands of minus differ. */
+ comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
+ TREE_OPERAND (exp, 0),
+ TREE_OPERAND (exp, 1)),
+ NE, NE);
+ break;
- STRIP_NOPS (arg0);
- STRIP_NOPS (arg1);
+ case BIT_AND_EXPR:
+ /* If we are AND'ing with a small constant, do this comparison in the
+ smallest type that fits. If the machine doesn't have comparisons
+ that small, it will be converted back to the wider comparison.
+ This helps if we are testing the sign bit of a narrower object.
+ combine can't do this for us because it can't know whether a
+ ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
- /* Get the rtx comparison code to use. We know that EXP is a comparison
- operation of some type. Some comparisons against 1 and -1 can be
- converted to comparisons with zero. Do so here so that the tests
- below will be aware that we have a comparison with zero. These
- tests will not catch constants in the first operand, but constants
- are rarely passed as the first operand. */
+ if (! SLOW_BYTE_ACCESS
+ && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
+ && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
+ && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
+ && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
+ && (type = type_for_mode (mode, 1)) != 0
+ && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
+ && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
+ != CODE_FOR_nothing))
+ {
+ do_jump (convert (type, exp), if_false_label, if_true_label);
+ break;
+ }
+ goto normal;
- switch (TREE_CODE (exp))
- {
- case EQ_EXPR:
- code = EQ;
- break;
- case NE_EXPR:
- code = NE;
- break;
- case LT_EXPR:
- if (integer_onep (arg1))
- arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
- else
- code = unsignedp ? LTU : LT;
+ case TRUTH_NOT_EXPR:
+ do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
break;
- case LE_EXPR:
- if (! unsignedp && integer_all_onesp (arg1))
- arg1 = integer_zero_node, code = LT;
- else
- code = unsignedp ? LEU : LE;
+
+ case TRUTH_ANDIF_EXPR:
+ if (if_false_label == 0)
+ if_false_label = drop_through_label = gen_label_rtx ();
+ do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
+ start_cleanup_deferral ();
+ do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
+ end_cleanup_deferral ();
break;
- case GT_EXPR:
- if (! unsignedp && integer_all_onesp (arg1))
- arg1 = integer_zero_node, code = GE;
- else
- code = unsignedp ? GTU : GT;
+
+ case TRUTH_ORIF_EXPR:
+ if (if_true_label == 0)
+ if_true_label = drop_through_label = gen_label_rtx ();
+ do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
+ start_cleanup_deferral ();
+ do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
+ end_cleanup_deferral ();
break;
- case GE_EXPR:
- if (integer_onep (arg1))
- arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
- else
- code = unsignedp ? GEU : GE;
+
+ case COMPOUND_EXPR:
+ push_temp_slots ();
+ expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
+ preserve_temp_slots (NULL_RTX);
+ free_temp_slots ();
+ pop_temp_slots ();
+ emit_queue ();
+ do_pending_stack_adjust ();
+ do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
break;
- default:
- abort ();
- }
- /* Put a constant second. */
- if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
- {
- tem = arg0; arg0 = arg1; arg1 = tem;
- code = swap_condition (code);
- }
+ case COMPONENT_REF:
+ case BIT_FIELD_REF:
+ case ARRAY_REF:
+ {
+ int bitsize, bitpos, unsignedp;
+ enum machine_mode mode;
+ tree type;
+ tree offset;
+ int volatilep = 0;
+ int alignment;
- /* If this is an equality or inequality test of a single bit, we can
- do this by shifting the bit being tested to the low-order bit and
- masking the result with the constant 1. If the condition was EQ,
- we xor it with 1. This does not require an scc insn and is faster
- than an scc insn even if we have it. */
+ /* Get description of this reference. We don't actually care
+ about the underlying object here. */
+ get_inner_reference (exp, &bitsize, &bitpos, &offset,
+ &mode, &unsignedp, &volatilep,
+ &alignment);
- if ((code == NE || code == EQ)
- && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
- && integer_pow2p (TREE_OPERAND (arg0, 1))
- && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
- {
- tree inner = TREE_OPERAND (arg0, 0);
- HOST_WIDE_INT tem;
- int bitnum;
- int ops_unsignedp;
+ type = type_for_size (bitsize, unsignedp);
+ if (! SLOW_BYTE_ACCESS
+ && type != 0 && bitsize >= 0
+ && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
+ && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
+ != CODE_FOR_nothing))
+ {
+ do_jump (convert (type, exp), if_false_label, if_true_label);
+ break;
+ }
+ goto normal;
+ }
- tem = INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
- NULL_RTX, VOIDmode, 0));
- /* In this case, immed_double_const will sign extend the value to make
- it look the same on the host and target. We must remove the
- sign-extension before calling exact_log2, since exact_log2 will
- fail for negative values. */
- if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
- && BITS_PER_WORD == GET_MODE_BITSIZE (TYPE_MODE (type)))
- /* We don't use the obvious constant shift to generate the mask,
- because that generates compiler warnings when BITS_PER_WORD is
- greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
- code is unreachable in that case. */
- tem = tem & GET_MODE_MASK (word_mode);
- bitnum = exact_log2 (tem);
+ case COND_EXPR:
+ /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
+ if (integer_onep (TREE_OPERAND (exp, 1))
+ && integer_zerop (TREE_OPERAND (exp, 2)))
+ do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
- /* If INNER is a right shift of a constant and it plus BITNUM does
- not overflow, adjust BITNUM and INNER. */
+ else if (integer_zerop (TREE_OPERAND (exp, 1))
+ && integer_onep (TREE_OPERAND (exp, 2)))
+ do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
- if (TREE_CODE (inner) == RSHIFT_EXPR
- && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
- && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
- && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
- < TYPE_PRECISION (type)))
+ else
{
- bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
- inner = TREE_OPERAND (inner, 0);
+ register rtx label1 = gen_label_rtx ();
+ drop_through_label = gen_label_rtx ();
+
+ do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
+
+ start_cleanup_deferral ();
+ /* Now the THEN-expression. */
+ do_jump (TREE_OPERAND (exp, 1),
+ if_false_label ? if_false_label : drop_through_label,
+ if_true_label ? if_true_label : drop_through_label);
+ /* In case the do_jump just above never jumps. */
+ do_pending_stack_adjust ();
+ emit_label (label1);
+
+ /* Now the ELSE-expression. */
+ do_jump (TREE_OPERAND (exp, 2),
+ if_false_label ? if_false_label : drop_through_label,
+ if_true_label ? if_true_label : drop_through_label);
+ end_cleanup_deferral ();
}
+ break;
- /* If we are going to be able to omit the AND below, we must do our
- operations as unsigned. If we must use the AND, we have a choice.
- Normally unsigned is faster, but for some machines signed is. */
- ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
-#ifdef LOAD_EXTEND_OP
- : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
-#else
- : 1
-#endif
- );
+ case EQ_EXPR:
+ {
+ tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
+
+ if (integer_zerop (TREE_OPERAND (exp, 1)))
+ do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
+ else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
+ || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
+ do_jump
+ (fold
+ (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
+ fold (build (EQ_EXPR, TREE_TYPE (exp),
+ fold (build1 (REALPART_EXPR,
+ TREE_TYPE (inner_type),
+ TREE_OPERAND (exp, 0))),
+ fold (build1 (REALPART_EXPR,
+ TREE_TYPE (inner_type),
+ TREE_OPERAND (exp, 1))))),
+ fold (build (EQ_EXPR, TREE_TYPE (exp),
+ fold (build1 (IMAGPART_EXPR,
+ TREE_TYPE (inner_type),
+ TREE_OPERAND (exp, 0))),
+ fold (build1 (IMAGPART_EXPR,
+ TREE_TYPE (inner_type),
+ TREE_OPERAND (exp, 1))))))),
+ if_false_label, if_true_label);
+ else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
+ && !can_compare_p (TYPE_MODE (inner_type)))
+ do_jump_by_parts_equality (exp, if_false_label, if_true_label);
+ else
+ comparison = compare (exp, EQ, EQ);
+ break;
+ }
+
+ case NE_EXPR:
+ {
+ tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
+
+ if (integer_zerop (TREE_OPERAND (exp, 1)))
+ do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
+ else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
+ || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
+ do_jump
+ (fold
+ (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
+ fold (build (NE_EXPR, TREE_TYPE (exp),
+ fold (build1 (REALPART_EXPR,
+ TREE_TYPE (inner_type),
+ TREE_OPERAND (exp, 0))),
+ fold (build1 (REALPART_EXPR,
+ TREE_TYPE (inner_type),
+ TREE_OPERAND (exp, 1))))),
+ fold (build (NE_EXPR, TREE_TYPE (exp),
+ fold (build1 (IMAGPART_EXPR,
+ TREE_TYPE (inner_type),
+ TREE_OPERAND (exp, 0))),
+ fold (build1 (IMAGPART_EXPR,
+ TREE_TYPE (inner_type),
+ TREE_OPERAND (exp, 1))))))),
+ if_false_label, if_true_label);
+ else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
+ && !can_compare_p (TYPE_MODE (inner_type)))
+ do_jump_by_parts_equality (exp, if_true_label, if_false_label);
+ else
+ comparison = compare (exp, NE, NE);
+ break;
+ }
- if (subtarget == 0 || GET_CODE (subtarget) != REG
- || GET_MODE (subtarget) != operand_mode
- || ! safe_from_p (subtarget, inner))
- subtarget = 0;
+ case LT_EXPR:
+ if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ == MODE_INT)
+ && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
+ do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
+ else
+ comparison = compare (exp, LT, LTU);
+ break;
- op0 = expand_expr (inner, subtarget, VOIDmode, 0);
+ case LE_EXPR:
+ if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ == MODE_INT)
+ && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
+ do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
+ else
+ comparison = compare (exp, LE, LEU);
+ break;
- if (bitnum != 0)
- op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
- size_int (bitnum), subtarget, ops_unsignedp);
+ case GT_EXPR:
+ if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ == MODE_INT)
+ && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
+ do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
+ else
+ comparison = compare (exp, GT, GTU);
+ break;
- if (GET_MODE (op0) != mode)
- op0 = convert_to_mode (mode, op0, ops_unsignedp);
+ case GE_EXPR:
+ if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ == MODE_INT)
+ && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
+ do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
+ else
+ comparison = compare (exp, GE, GEU);
+ break;
- if ((code == EQ && ! invert) || (code == NE && invert))
- op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
- ops_unsignedp, OPTAB_LIB_WIDEN);
+ default:
+ normal:
+ temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
+#if 0
+ /* This is not needed any more and causes poor code since it causes
+ comparisons and tests from non-SI objects to have different code
+ sequences. */
+ /* Copy to register to avoid generating bad insns by cse
+ from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
+ if (!cse_not_expected && GET_CODE (temp) == MEM)
+ temp = copy_to_reg (temp);
+#endif
+ do_pending_stack_adjust ();
+ if (GET_CODE (temp) == CONST_INT)
+ comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
+ else if (GET_CODE (temp) == LABEL_REF)
+ comparison = const_true_rtx;
+ else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
+ && !can_compare_p (GET_MODE (temp)))
+ /* Note swapping the labels gives us not-equal. */
+ do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
+ else if (GET_MODE (temp) != VOIDmode)
+ comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
+ NE, TREE_UNSIGNED (TREE_TYPE (exp)),
+ GET_MODE (temp), NULL_RTX, 0);
+ else
+ abort ();
+ }
- /* Put the AND last so it can combine with more things. */
- if (bitnum != TYPE_PRECISION (type) - 1)
- op0 = expand_and (op0, const1_rtx, subtarget);
+ /* Do any postincrements in the expression that was tested. */
+ emit_queue ();
- return op0;
- }
+ /* If COMPARISON is nonzero here, it is an rtx that can be substituted
+ straight into a conditional jump instruction as the jump condition.
+ Otherwise, all the work has been done already. */
- /* Now see if we are likely to be able to do this. Return if not. */
- if (! can_compare_p (operand_mode))
- return 0;
- icode = setcc_gen_code[(int) code];
- if (icode == CODE_FOR_nothing
- || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
+ if (comparison == const_true_rtx)
{
- /* We can only do this if it is one of the special cases that
- can be handled without an scc insn. */
- if ((code == LT && integer_zerop (arg1))
- || (! only_cheap && code == GE && integer_zerop (arg1)))
- ;
- else if (BRANCH_COST >= 0
- && ! only_cheap && (code == NE || code == EQ)
- && TREE_CODE (type) != REAL_TYPE
- && ((abs_optab->handlers[(int) operand_mode].insn_code
- != CODE_FOR_nothing)
- || (ffs_optab->handlers[(int) operand_mode].insn_code
- != CODE_FOR_nothing)))
- ;
- else
- return 0;
+ if (if_true_label)
+ emit_jump (if_true_label);
}
-
- preexpand_calls (exp);
- if (subtarget == 0 || GET_CODE (subtarget) != REG
- || GET_MODE (subtarget) != operand_mode
- || ! safe_from_p (subtarget, arg1))
- subtarget = 0;
-
- op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
- op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
+ else if (comparison == const0_rtx)
+ {
+ if (if_false_label)
+ emit_jump (if_false_label);
+ }
+ else if (comparison)
+ do_jump_for_compare (comparison, if_false_label, if_true_label);
- if (target == 0)
- target = gen_reg_rtx (mode);
+ if (drop_through_label)
+ {
+ /* If do_jump produces code that might be jumped around,
+ do any stack adjusts from that code, before the place
+ where control merges in. */
+ do_pending_stack_adjust ();
+ emit_label (drop_through_label);
+ }
+}
+\f
+/* Given a comparison expression EXP for values too wide to be compared
+ with one insn, test the comparison and jump to the appropriate label.
+ The code of EXP is ignored; we always test GT if SWAP is 0,
+ and LT if SWAP is 1. */
- /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
- because, if the emit_store_flag does anything it will succeed and
- OP0 and OP1 will not be used subsequently. */
+static void
+do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
+ tree exp;
+ int swap;
+ rtx if_false_label, if_true_label;
+{
+ rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
+ rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
+ enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
+ int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
+ rtx drop_through_label = 0;
+ int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
+ int i;
- result = emit_store_flag (target, code,
- queued_subexp_p (op0) ? copy_rtx (op0) : op0,
- queued_subexp_p (op1) ? copy_rtx (op1) : op1,
- operand_mode, unsignedp, 1);
+ if (! if_true_label || ! if_false_label)
+ drop_through_label = gen_label_rtx ();
+ if (! if_true_label)
+ if_true_label = drop_through_label;
+ if (! if_false_label)
+ if_false_label = drop_through_label;
- if (result)
+ /* Compare a word at a time, high order first. */
+ for (i = 0; i < nwords; i++)
{
- if (invert)
- result = expand_binop (mode, xor_optab, result, const1_rtx,
- result, 0, OPTAB_LIB_WIDEN);
- return result;
- }
-
- /* If this failed, we have to do this with set/compare/jump/set code. */
- if (target == 0 || GET_CODE (target) != REG
- || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
- target = gen_reg_rtx (GET_MODE (target));
+ rtx comp;
+ rtx op0_word, op1_word;
- emit_move_insn (target, invert ? const0_rtx : const1_rtx);
- result = compare_from_rtx (op0, op1, code, unsignedp,
- operand_mode, NULL_RTX, 0);
- if (GET_CODE (result) == CONST_INT)
- return (((result == const0_rtx && ! invert)
- || (result != const0_rtx && invert))
- ? const0_rtx : const1_rtx);
+ if (WORDS_BIG_ENDIAN)
+ {
+ op0_word = operand_subword_force (op0, i, mode);
+ op1_word = operand_subword_force (op1, i, mode);
+ }
+ else
+ {
+ op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
+ op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
+ }
- label = gen_label_rtx ();
- if (bcc_gen_fctn[(int) code] == 0)
- abort ();
+ /* All but high-order word must be compared as unsigned. */
+ comp = compare_from_rtx (op0_word, op1_word,
+ (unsignedp || i > 0) ? GTU : GT,
+ unsignedp, word_mode, NULL_RTX, 0);
+ if (comp == const_true_rtx)
+ emit_jump (if_true_label);
+ else if (comp != const0_rtx)
+ do_jump_for_compare (comp, NULL_RTX, if_true_label);
- emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
- emit_move_insn (target, invert ? const1_rtx : const0_rtx);
- emit_label (label);
+ /* Consider lower words only if these are equal. */
+ comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
+ NULL_RTX, 0);
+ if (comp == const_true_rtx)
+ emit_jump (if_false_label);
+ else if (comp != const0_rtx)
+ do_jump_for_compare (comp, NULL_RTX, if_false_label);
+ }
- return target;
+ if (if_false_label)
+ emit_jump (if_false_label);
+ if (drop_through_label)
+ emit_label (drop_through_label);
}
-\f
-/* Generate a tablejump instruction (used for switch statements). */
-#ifdef HAVE_tablejump
+/* Compare OP0 with OP1, word at a time, in mode MODE.
+ UNSIGNEDP says to do unsigned comparison.
+ Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
+
+void
+do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
+ enum machine_mode mode;
+ int unsignedp;
+ rtx op0, op1;
+ rtx if_false_label, if_true_label;
+{
+ int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
+ rtx drop_through_label = 0;
+ int i;
+
+ if (! if_true_label || ! if_false_label)
+ drop_through_label = gen_label_rtx ();
+ if (! if_true_label)
+ if_true_label = drop_through_label;
+ if (! if_false_label)
+ if_false_label = drop_through_label;
-/* INDEX is the value being switched on, with the lowest value
- in the table already subtracted.
- MODE is its expected mode (needed if INDEX is constant).
- RANGE is the length of the jump table.
- TABLE_LABEL is a CODE_LABEL rtx for the table itself.
+ /* Compare a word at a time, high order first. */
+ for (i = 0; i < nwords; i++)
+ {
+ rtx comp;
+ rtx op0_word, op1_word;
- DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
- index value is out of range. */
+ if (WORDS_BIG_ENDIAN)
+ {
+ op0_word = operand_subword_force (op0, i, mode);
+ op1_word = operand_subword_force (op1, i, mode);
+ }
+ else
+ {
+ op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
+ op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
+ }
-void
-do_tablejump (index, mode, range, table_label, default_label)
- rtx index, range, table_label, default_label;
- enum machine_mode mode;
-{
- register rtx temp, vector;
+ /* All but high-order word must be compared as unsigned. */
+ comp = compare_from_rtx (op0_word, op1_word,
+ (unsignedp || i > 0) ? GTU : GT,
+ unsignedp, word_mode, NULL_RTX, 0);
+ if (comp == const_true_rtx)
+ emit_jump (if_true_label);
+ else if (comp != const0_rtx)
+ do_jump_for_compare (comp, NULL_RTX, if_true_label);
- /* Do an unsigned comparison (in the proper mode) between the index
- expression and the value which represents the length of the range.
- Since we just finished subtracting the lower bound of the range
- from the index expression, this comparison allows us to simultaneously
- check that the original index expression value is both greater than
- or equal to the minimum value of the range and less than or equal to
- the maximum value of the range. */
+ /* Consider lower words only if these are equal. */
+ comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
+ NULL_RTX, 0);
+ if (comp == const_true_rtx)
+ emit_jump (if_false_label);
+ else if (comp != const0_rtx)
+ do_jump_for_compare (comp, NULL_RTX, if_false_label);
+ }
- emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
- emit_jump_insn (gen_bgtu (default_label));
+ if (if_false_label)
+ emit_jump (if_false_label);
+ if (drop_through_label)
+ emit_label (drop_through_label);
+}
- /* If index is in range, it must fit in Pmode.
- Convert to Pmode so we can index with it. */
- if (mode != Pmode)
- index = convert_to_mode (Pmode, index, 1);
+/* Given an EQ_EXPR expression EXP for values too wide to be compared
+ with one insn, test the comparison and jump to the appropriate label. */
- /* Don't let a MEM slip thru, because then INDEX that comes
- out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
- and break_out_memory_refs will go to work on it and mess it up. */
-#ifdef PIC_CASE_VECTOR_ADDRESS
- if (flag_pic && GET_CODE (index) != REG)
- index = copy_to_mode_reg (Pmode, index);
-#endif
+static void
+do_jump_by_parts_equality (exp, if_false_label, if_true_label)
+ tree exp;
+ rtx if_false_label, if_true_label;
+{
+ rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
+ rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
+ enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
+ int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
+ int i;
+ rtx drop_through_label = 0;
- /* If flag_force_addr were to affect this address
- it could interfere with the tricky assumptions made
- about addresses that contain label-refs,
- which may be valid only very near the tablejump itself. */
- /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
- GET_MODE_SIZE, because this indicates how large insns are. The other
- uses should all be Pmode, because they are addresses. This code
- could fail if addresses and insns are not the same size. */
- index = gen_rtx (PLUS, Pmode,
- gen_rtx (MULT, Pmode, index,
- GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
- gen_rtx (LABEL_REF, Pmode, table_label));
-#ifdef PIC_CASE_VECTOR_ADDRESS
- if (flag_pic)
- index = PIC_CASE_VECTOR_ADDRESS (index);
- else
-#endif
- index = memory_address_noforce (CASE_VECTOR_MODE, index);
- temp = gen_reg_rtx (CASE_VECTOR_MODE);
- vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
- RTX_UNCHANGING_P (vector) = 1;
- convert_move (temp, vector, 0);
+ if (! if_false_label)
+ drop_through_label = if_false_label = gen_label_rtx ();
- emit_jump_insn (gen_tablejump (temp, table_label));
+ for (i = 0; i < nwords; i++)
+ {
+ rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
+ operand_subword_force (op1, i, mode),
+ EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
+ word_mode, NULL_RTX, 0);
+ if (comp == const_true_rtx)
+ emit_jump (if_false_label);
+ else if (comp != const0_rtx)
+ do_jump_for_compare (comp, if_false_label, NULL_RTX);
+ }
-#ifndef CASE_VECTOR_PC_RELATIVE
- /* If we are generating PIC code or if the table is PC-relative, the
- table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
- if (! flag_pic)
- emit_barrier ();
-#endif
+ if (if_true_label)
+ emit_jump (if_true_label);
+ if (drop_through_label)
+ emit_label (drop_through_label);
}
+\f
+/* Jump according to whether OP0 is 0.
+ We assume that OP0 has an integer mode that is too wide
+ for the available compare insns. */
-#endif /* HAVE_tablejump */
+static void
+do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
+ rtx op0;
+ rtx if_false_label, if_true_label;
+{
+ int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
+ rtx part;
+ int i;
+ rtx drop_through_label = 0;
+ /* The fastest way of doing this comparison on almost any machine is to
+ "or" all the words and compare the result. If all have to be loaded
+ from memory and this is a very wide item, it's possible this may
+ be slower, but that's highly unlikely. */
-/* Emit a suitable bytecode to load a value from memory, assuming a pointer
- to that value is on the top of the stack. The resulting type is TYPE, and
- the source declaration is DECL. */
+ part = gen_reg_rtx (word_mode);
+ emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
+ for (i = 1; i < nwords && part != 0; i++)
+ part = expand_binop (word_mode, ior_optab, part,
+ operand_subword_force (op0, i, GET_MODE (op0)),
+ part, 1, OPTAB_WIDEN);
-void
-bc_load_memory (type, decl)
- tree type, decl;
-{
- enum bytecode_opcode opcode;
-
-
- /* Bit fields are special. We only know about signed and
- unsigned ints, and enums. The latter are treated as
- signed integers. */
-
- if (DECL_BIT_FIELD (decl))
- if (TREE_CODE (type) == ENUMERAL_TYPE
- || TREE_CODE (type) == INTEGER_TYPE)
- opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
- else
- abort ();
- else
- /* See corresponding comment in bc_store_memory(). */
- if (TYPE_MODE (type) == BLKmode
- || TYPE_MODE (type) == VOIDmode)
- return;
- else
- opcode = mode_to_load_map [(int) TYPE_MODE (type)];
+ if (part != 0)
+ {
+ rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
+ NULL_RTX, 0);
- if (opcode == neverneverland)
- abort ();
-
- bc_emit_bytecode (opcode);
-
-#ifdef DEBUG_PRINT_CODE
- fputc ('\n', stderr);
-#endif
-}
+ if (comp == const_true_rtx)
+ emit_jump (if_false_label);
+ else if (comp == const0_rtx)
+ emit_jump (if_true_label);
+ else
+ do_jump_for_compare (comp, if_false_label, if_true_label);
+ return;
+ }
-/* Store the contents of the second stack slot to the address in the
- top stack slot. DECL is the declaration of the destination and is used
- to determine whether we're dealing with a bitfield. */
+ /* If we couldn't do the "or" simply, do this with a series of compares. */
+ if (! if_false_label)
+ drop_through_label = if_false_label = gen_label_rtx ();
-void
-bc_store_memory (type, decl)
- tree type, decl;
-{
- enum bytecode_opcode opcode;
-
-
- if (DECL_BIT_FIELD (decl))
+ for (i = 0; i < nwords; i++)
{
- if (TREE_CODE (type) == ENUMERAL_TYPE
- || TREE_CODE (type) == INTEGER_TYPE)
- opcode = sstoreBI;
- else
- abort ();
+ rtx comp = compare_from_rtx (operand_subword_force (op0, i,
+ GET_MODE (op0)),
+ const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
+ if (comp == const_true_rtx)
+ emit_jump (if_false_label);
+ else if (comp != const0_rtx)
+ do_jump_for_compare (comp, if_false_label, NULL_RTX);
}
- else
- if (TYPE_MODE (type) == BLKmode)
- {
- /* Copy structure. This expands to a block copy instruction, storeBLK.
- In addition to the arguments expected by the other store instructions,
- it also expects a type size (SImode) on top of the stack, which is the
- structure size in size units (usually bytes). The two first arguments
- are already on the stack; so we just put the size on level 1. For some
- other languages, the size may be variable, this is why we don't encode
- it as a storeBLK literal, but rather treat it as a full-fledged expression. */
-
- bc_expand_expr (TYPE_SIZE (type));
- opcode = storeBLK;
- }
- else
- opcode = mode_to_store_map [(int) TYPE_MODE (type)];
- if (opcode == neverneverland)
- abort ();
+ if (if_true_label)
+ emit_jump (if_true_label);
- bc_emit_bytecode (opcode);
-
-#ifdef DEBUG_PRINT_CODE
- fputc ('\n', stderr);
-#endif
+ if (drop_through_label)
+ emit_label (drop_through_label);
}
+/* Given a comparison expression in rtl form, output conditional branches to
+ IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
-/* Allocate local stack space sufficient to hold a value of the given
- SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
- integral power of 2. A special case is locals of type VOID, which
- have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
- remapped into the corresponding attribute of SI. */
-
-rtx
-bc_allocate_local (size, alignment)
- int size, alignment;
+static void
+do_jump_for_compare (comparison, if_false_label, if_true_label)
+ rtx comparison, if_false_label, if_true_label;
{
- rtx retval;
- int byte_alignment;
-
- if (size < 0)
- abort ();
-
- /* Normalize size and alignment */
- if (!size)
- size = UNITS_PER_WORD;
-
- if (alignment < BITS_PER_UNIT)
- byte_alignment = 1 << (INT_ALIGN - 1);
- else
- /* Align */
- byte_alignment = alignment / BITS_PER_UNIT;
-
- if (local_vars_size & (byte_alignment - 1))
- local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
+ if (if_true_label)
+ {
+ if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
+ emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
+ else
+ abort ();
- retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
- local_vars_size += size;
+ if (if_false_label)
+ emit_jump (if_false_label);
+ }
+ else if (if_false_label)
+ {
+ rtx insn;
+ rtx prev = get_last_insn ();
+ rtx branch = 0;
- return retval;
-}
+ /* Output the branch with the opposite condition. Then try to invert
+ what is generated. If more than one insn is a branch, or if the
+ branch is not the last insn written, abort. If we can't invert
+ the branch, emit make a true label, redirect this jump to that,
+ emit a jump to the false label and define the true label. */
+ if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
+ emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
+ else
+ abort ();
-/* Allocate variable-sized local array. Variable-sized arrays are
- actually pointers to the address in memory where they are stored. */
+ /* Here we get the first insn that was just emitted. It used to be the
+ case that, on some machines, emitting the branch would discard
+ the previous compare insn and emit a replacement. This isn't
+ done anymore, but abort if we see that PREV is deleted. */
-rtx
-bc_allocate_variable_array (size)
- tree size;
-{
- rtx retval;
- const int ptralign = (1 << (PTR_ALIGN - 1));
+ if (prev == 0)
+ insn = get_insns ();
+ else if (INSN_DELETED_P (prev))
+ abort ();
+ else
+ insn = NEXT_INSN (prev);
- /* Align pointer */
- if (local_vars_size & ptralign)
- local_vars_size += ptralign - (local_vars_size & ptralign);
+ for (; insn; insn = NEXT_INSN (insn))
+ if (GET_CODE (insn) == JUMP_INSN)
+ {
+ if (branch)
+ abort ();
+ branch = insn;
+ }
- /* Note down local space needed: pointer to block; also return
- dummy rtx */
+ if (branch != get_last_insn ())
+ abort ();
- retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
- local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
- return retval;
+ JUMP_LABEL (branch) = if_false_label;
+ if (! invert_jump (branch, if_false_label))
+ {
+ if_true_label = gen_label_rtx ();
+ redirect_jump (branch, if_true_label);
+ emit_jump (if_false_label);
+ emit_label (if_true_label);
+ }
+ }
}
+\f
+/* Generate code for a comparison expression EXP
+ (including code to compute the values to be compared)
+ and set (CC0) according to the result.
+ SIGNED_CODE should be the rtx operation for this comparison for
+ signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
+ We force a stack adjustment unless there are currently
+ things pushed on the stack that aren't yet used. */
-/* Push the machine address for the given external variable offset. */
-void
-bc_load_externaddr (externaddr)
- rtx externaddr;
+static rtx
+compare (exp, signed_code, unsigned_code)
+ register tree exp;
+ enum rtx_code signed_code, unsigned_code;
{
- bc_emit_bytecode (constP);
- bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
- BYTECODE_BC_LABEL (externaddr)->offset);
-
-#ifdef DEBUG_PRINT_CODE
- fputc ('\n', stderr);
-#endif
-}
+ register rtx op0
+ = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
+ register rtx op1
+ = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
+ register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
+ register enum machine_mode mode = TYPE_MODE (type);
+ int unsignedp = TREE_UNSIGNED (type);
+ enum rtx_code code = unsignedp ? unsigned_code : signed_code;
+#ifdef HAVE_canonicalize_funcptr_for_compare
+ /* If function pointers need to be "canonicalized" before they can
+ be reliably compared, then canonicalize them. */
+ if (HAVE_canonicalize_funcptr_for_compare
+ && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
+ && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ == FUNCTION_TYPE))
+ {
+ rtx new_op0 = gen_reg_rtx (mode);
-/* Like above, but expects an IDENTIFIER. */
-void
-bc_load_externaddr_id (id, offset)
- tree id;
- int offset;
-{
- if (!IDENTIFIER_POINTER (id))
- abort ();
+ emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
+ op0 = new_op0;
+ }
- bc_emit_bytecode (constP);
- bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id)), offset);
+ if (HAVE_canonicalize_funcptr_for_compare
+ && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
+ && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
+ == FUNCTION_TYPE))
+ {
+ rtx new_op1 = gen_reg_rtx (mode);
-#ifdef DEBUG_PRINT_CODE
- fputc ('\n', stderr);
+ emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
+ op1 = new_op1;
+ }
#endif
-}
-
-/* Push the machine address for the given local variable offset. */
-void
-bc_load_localaddr (localaddr)
- rtx localaddr;
-{
- bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
+ return compare_from_rtx (op0, op1, code, unsignedp, mode,
+ ((mode == BLKmode)
+ ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
+ TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
}
+/* Like compare but expects the values to compare as two rtx's.
+ The decision as to signed or unsigned comparison must be made by the caller.
-/* Push the machine address for the given parameter offset.
- NOTE: offset is in bits. */
-void
-bc_load_parmaddr (parmaddr)
- rtx parmaddr;
-{
- bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
- / BITS_PER_UNIT));
-}
+ If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
+ compared.
+ If ALIGN is non-zero, it is the alignment of this type; if zero, the
+ size of MODE should be used. */
-/* Convert a[i] into *(a + i). */
-tree
-bc_canonicalize_array_ref (exp)
- tree exp;
+rtx
+compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
+ register rtx op0, op1;
+ enum rtx_code code;
+ int unsignedp;
+ enum machine_mode mode;
+ rtx size;
+ int align;
{
- tree type = TREE_TYPE (exp);
- tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
- TREE_OPERAND (exp, 0));
- tree index = TREE_OPERAND (exp, 1);
-
-
- /* Convert the integer argument to a type the same size as a pointer
- so the multiply won't overflow spuriously. */
-
- if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
- index = convert (type_for_size (POINTER_SIZE, 0), index);
-
- /* The array address isn't volatile even if the array is.
- (Of course this isn't terribly relevant since the bytecode
- translator treats nearly everything as volatile anyway.) */
- TREE_THIS_VOLATILE (array_adr) = 0;
-
- return build1 (INDIRECT_REF, type,
- fold (build (PLUS_EXPR,
- TYPE_POINTER_TO (type),
- array_adr,
- fold (build (MULT_EXPR,
- TYPE_POINTER_TO (type),
- index,
- size_in_bytes (type))))));
-}
-
-
-/* Load the address of the component referenced by the given
- COMPONENT_REF expression.
+ rtx tem;
- Returns innermost lvalue. */
+ /* If one operand is constant, make it the second one. Only do this
+ if the other operand is not constant as well. */
-tree
-bc_expand_component_address (exp)
- tree exp;
-{
- tree tem, chain;
- enum machine_mode mode;
- int bitpos = 0;
- HOST_WIDE_INT SIval;
+ if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
+ || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
+ {
+ tem = op0;
+ op0 = op1;
+ op1 = tem;
+ code = swap_condition (code);
+ }
+ if (flag_force_mem)
+ {
+ op0 = force_not_mem (op0);
+ op1 = force_not_mem (op1);
+ }
- tem = TREE_OPERAND (exp, 1);
- mode = DECL_MODE (tem);
+ do_pending_stack_adjust ();
+ if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
+ && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
+ return tem;
- /* Compute cumulative bit offset for nested component refs
- and array refs, and find the ultimate containing object. */
+#if 0
+ /* There's no need to do this now that combine.c can eliminate lots of
+ sign extensions. This can be less efficient in certain cases on other
+ machines. */
- for (tem = exp;; tem = TREE_OPERAND (tem, 0))
+ /* If this is a signed equality comparison, we can do it as an
+ unsigned comparison since zero-extension is cheaper than sign
+ extension and comparisons with zero are done as unsigned. This is
+ the case even on machines that can do fast sign extension, since
+ zero-extension is easier to combine with other operations than
+ sign-extension is. If we are comparing against a constant, we must
+ convert it to what it would look like unsigned. */
+ if ((code == EQ || code == NE) && ! unsignedp
+ && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
{
- if (TREE_CODE (tem) == COMPONENT_REF)
- bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
- else
- if (TREE_CODE (tem) == ARRAY_REF
- && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
- && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
-
- bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
- * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
- /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
- else
- break;
+ if (GET_CODE (op1) == CONST_INT
+ && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
+ op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
+ unsignedp = 1;
}
+#endif
+
+ emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
- bc_expand_expr (tem);
-
-
- /* For bitfields also push their offset and size */
- if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
- bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
- else
- if (SIval = bitpos / BITS_PER_UNIT)
- bc_emit_instruction (addconstPSI, SIval);
-
- return (TREE_OPERAND (exp, 1));
+ return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
}
+\f
+/* Generate code to calculate EXP using a store-flag instruction
+ and return an rtx for the result. EXP is either a comparison
+ or a TRUTH_NOT_EXPR whose operand is a comparison.
+ If TARGET is nonzero, store the result there if convenient.
-/* Emit code to push two SI constants */
-void
-bc_push_offset_and_size (offset, size)
- HOST_WIDE_INT offset, size;
-{
- bc_emit_instruction (constSI, offset);
- bc_emit_instruction (constSI, size);
-}
-
+ If ONLY_CHEAP is non-zero, only do this if it is likely to be very
+ cheap.
-/* Emit byte code to push the address of the given lvalue expression to
- the stack. If it's a bit field, we also push offset and size info.
+ Return zero if there is no suitable set-flag instruction
+ available on this machine.
- Returns innermost component, which allows us to determine not only
- its type, but also whether it's a bitfield. */
+ Once expand_expr has been called on the arguments of the comparison,
+ we are committed to doing the store flag, since it is not safe to
+ re-evaluate the expression. We emit the store-flag insn by calling
+ emit_store_flag, but only expand the arguments if we have a reason
+ to believe that emit_store_flag will be successful. If we think that
+ it will, but it isn't, we have to simulate the store-flag with a
+ set/jump/set sequence. */
-tree
-bc_expand_address (exp)
+static rtx
+do_store_flag (exp, target, mode, only_cheap)
tree exp;
+ rtx target;
+ enum machine_mode mode;
+ int only_cheap;
{
- /* Safeguard */
- if (!exp || TREE_CODE (exp) == ERROR_MARK)
- return (exp);
-
-
- switch (TREE_CODE (exp))
- {
- case ARRAY_REF:
-
- return (bc_expand_address (bc_canonicalize_array_ref (exp)));
-
- case COMPONENT_REF:
-
- return (bc_expand_component_address (exp));
+ enum rtx_code code;
+ tree arg0, arg1, type;
+ tree tem;
+ enum machine_mode operand_mode;
+ int invert = 0;
+ int unsignedp;
+ rtx op0, op1;
+ enum insn_code icode;
+ rtx subtarget = target;
+ rtx result, label;
- case INDIRECT_REF:
+ /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
+ result at the end. We can't simply invert the test since it would
+ have already been inverted if it were valid. This case occurs for
+ some floating-point comparisons. */
- bc_expand_expr (TREE_OPERAND (exp, 0));
+ if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
+ invert = 1, exp = TREE_OPERAND (exp, 0);
- /* For variable-sized types: retrieve pointer. Sometimes the
- TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
- also make sure we have an operand, just in case... */
+ arg0 = TREE_OPERAND (exp, 0);
+ arg1 = TREE_OPERAND (exp, 1);
+ type = TREE_TYPE (arg0);
+ operand_mode = TYPE_MODE (type);
+ unsignedp = TREE_UNSIGNED (type);
- if (TREE_OPERAND (exp, 0)
- && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
- && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
- bc_emit_instruction (loadP);
+ /* We won't bother with BLKmode store-flag operations because it would mean
+ passing a lot of information to emit_store_flag. */
+ if (operand_mode == BLKmode)
+ return 0;
- /* If packed, also return offset and size */
- if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
-
- bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
- TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
+ /* We won't bother with store-flag operations involving function pointers
+ when function pointers must be canonicalized before comparisons. */
+#ifdef HAVE_canonicalize_funcptr_for_compare
+ if (HAVE_canonicalize_funcptr_for_compare
+ && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
+ && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ == FUNCTION_TYPE))
+ || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
+ && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
+ == FUNCTION_TYPE))))
+ return 0;
+#endif
- return (TREE_OPERAND (exp, 0));
+ STRIP_NOPS (arg0);
+ STRIP_NOPS (arg1);
- case FUNCTION_DECL:
+ /* Get the rtx comparison code to use. We know that EXP is a comparison
+ operation of some type. Some comparisons against 1 and -1 can be
+ converted to comparisons with zero. Do so here so that the tests
+ below will be aware that we have a comparison with zero. These
+ tests will not catch constants in the first operand, but constants
+ are rarely passed as the first operand. */
- bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
- BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
+ switch (TREE_CODE (exp))
+ {
+ case EQ_EXPR:
+ code = EQ;
break;
-
- case PARM_DECL:
-
- bc_load_parmaddr (DECL_RTL (exp));
-
- /* For variable-sized types: retrieve pointer */
- if (TYPE_SIZE (TREE_TYPE (exp))
- && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
- bc_emit_instruction (loadP);
-
- /* If packed, also return offset and size */
- if (DECL_BIT_FIELD (exp))
- bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
- TREE_INT_CST_LOW (DECL_SIZE (exp)));
-
+ case NE_EXPR:
+ code = NE;
break;
-
- case RESULT_DECL:
-
- bc_emit_instruction (returnP);
+ case LT_EXPR:
+ if (integer_onep (arg1))
+ arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
+ else
+ code = unsignedp ? LTU : LT;
break;
-
- case VAR_DECL:
-
-#if 0
- if (BYTECODE_LABEL (DECL_RTL (exp)))
- bc_load_externaddr (DECL_RTL (exp));
-#endif
-
- if (DECL_EXTERNAL (exp))
- bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
- (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
+ case LE_EXPR:
+ if (! unsignedp && integer_all_onesp (arg1))
+ arg1 = integer_zero_node, code = LT;
else
- bc_load_localaddr (DECL_RTL (exp));
-
- /* For variable-sized types: retrieve pointer */
- if (TYPE_SIZE (TREE_TYPE (exp))
- && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
- bc_emit_instruction (loadP);
-
- /* If packed, also return offset and size */
- if (DECL_BIT_FIELD (exp))
- bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
- TREE_INT_CST_LOW (DECL_SIZE (exp)));
-
+ code = unsignedp ? LEU : LE;
break;
-
- case STRING_CST:
- {
- rtx r;
-
- bc_emit_bytecode (constP);
- r = output_constant_def (exp);
- bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
-
-#ifdef DEBUG_PRINT_CODE
- fputc ('\n', stderr);
-#endif
- }
+ case GT_EXPR:
+ if (! unsignedp && integer_all_onesp (arg1))
+ arg1 = integer_zero_node, code = GE;
+ else
+ code = unsignedp ? GTU : GT;
break;
-
- default:
-
- abort();
+ case GE_EXPR:
+ if (integer_onep (arg1))
+ arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
+ else
+ code = unsignedp ? GEU : GE;
break;
+ default:
+ abort ();
}
- /* Most lvalues don't have components. */
- return (exp);
-}
-
-
-/* Emit a type code to be used by the runtime support in handling
- parameter passing. The type code consists of the machine mode
- plus the minimal alignment shifted left 8 bits. */
+ /* Put a constant second. */
+ if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
+ {
+ tem = arg0; arg0 = arg1; arg1 = tem;
+ code = swap_condition (code);
+ }
-tree
-bc_runtime_type_code (type)
- tree type;
-{
- int val;
+ /* If this is an equality or inequality test of a single bit, we can
+ do this by shifting the bit being tested to the low-order bit and
+ masking the result with the constant 1. If the condition was EQ,
+ we xor it with 1. This does not require an scc insn and is faster
+ than an scc insn even if we have it. */
- switch (TREE_CODE (type))
+ if ((code == NE || code == EQ)
+ && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
+ && integer_pow2p (TREE_OPERAND (arg0, 1)))
{
- case VOID_TYPE:
- case INTEGER_TYPE:
- case REAL_TYPE:
- case COMPLEX_TYPE:
- case ENUMERAL_TYPE:
- case POINTER_TYPE:
- case RECORD_TYPE:
+ tree inner = TREE_OPERAND (arg0, 0);
+ int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
+ int ops_unsignedp;
- val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
- break;
+ /* If INNER is a right shift of a constant and it plus BITNUM does
+ not overflow, adjust BITNUM and INNER. */
- case ERROR_MARK:
+ if (TREE_CODE (inner) == RSHIFT_EXPR
+ && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
+ && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
+ && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
+ < TYPE_PRECISION (type)))
+ {
+ bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
+ inner = TREE_OPERAND (inner, 0);
+ }
- val = 0;
- break;
+ /* If we are going to be able to omit the AND below, we must do our
+ operations as unsigned. If we must use the AND, we have a choice.
+ Normally unsigned is faster, but for some machines signed is. */
+ ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
+#ifdef LOAD_EXTEND_OP
+ : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
+#else
+ : 1
+#endif
+ );
- default:
+ if (subtarget == 0 || GET_CODE (subtarget) != REG
+ || GET_MODE (subtarget) != operand_mode
+ || ! safe_from_p (subtarget, inner))
+ subtarget = 0;
- abort ();
- }
- return build_int_2 (val, 0);
-}
+ op0 = expand_expr (inner, subtarget, VOIDmode, 0);
+ if (bitnum != 0)
+ op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
+ size_int (bitnum), subtarget, ops_unsignedp);
-/* Generate constructor label */
-char *
-bc_gen_constr_label ()
-{
- static int label_counter;
- static char label[20];
+ if (GET_MODE (op0) != mode)
+ op0 = convert_to_mode (mode, op0, ops_unsignedp);
- sprintf (label, "*LR%d", label_counter++);
+ if ((code == EQ && ! invert) || (code == NE && invert))
+ op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
+ ops_unsignedp, OPTAB_LIB_WIDEN);
- return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
-}
+ /* Put the AND last so it can combine with more things. */
+ if (bitnum != TYPE_PRECISION (type) - 1)
+ op0 = expand_and (op0, const1_rtx, subtarget);
+ return op0;
+ }
-/* Evaluate constructor CONSTR and return pointer to it on level one. We
- expand the constructor data as static data, and push a pointer to it.
- The pointer is put in the pointer table and is retrieved by a constP
- bytecode instruction. We then loop and store each constructor member in
- the corresponding component. Finally, we return the original pointer on
- the stack. */
+ /* Now see if we are likely to be able to do this. Return if not. */
+ if (! can_compare_p (operand_mode))
+ return 0;
+ icode = setcc_gen_code[(int) code];
+ if (icode == CODE_FOR_nothing
+ || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
+ {
+ /* We can only do this if it is one of the special cases that
+ can be handled without an scc insn. */
+ if ((code == LT && integer_zerop (arg1))
+ || (! only_cheap && code == GE && integer_zerop (arg1)))
+ ;
+ else if (BRANCH_COST >= 0
+ && ! only_cheap && (code == NE || code == EQ)
+ && TREE_CODE (type) != REAL_TYPE
+ && ((abs_optab->handlers[(int) operand_mode].insn_code
+ != CODE_FOR_nothing)
+ || (ffs_optab->handlers[(int) operand_mode].insn_code
+ != CODE_FOR_nothing)))
+ ;
+ else
+ return 0;
+ }
+
+ preexpand_calls (exp);
+ if (subtarget == 0 || GET_CODE (subtarget) != REG
+ || GET_MODE (subtarget) != operand_mode
+ || ! safe_from_p (subtarget, arg1))
+ subtarget = 0;
-void
-bc_expand_constructor (constr)
- tree constr;
-{
- char *l;
- HOST_WIDE_INT ptroffs;
- rtx constr_rtx;
+ op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
+ op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
-
- /* Literal constructors are handled as constants, whereas
- non-literals are evaluated and stored element by element
- into the data segment. */
-
- /* Allocate space in proper segment and push pointer to space on stack.
- */
+ if (target == 0)
+ target = gen_reg_rtx (mode);
- l = bc_gen_constr_label ();
+ /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
+ because, if the emit_store_flag does anything it will succeed and
+ OP0 and OP1 will not be used subsequently. */
- if (TREE_CONSTANT (constr))
- {
- text_section ();
+ result = emit_store_flag (target, code,
+ queued_subexp_p (op0) ? copy_rtx (op0) : op0,
+ queued_subexp_p (op1) ? copy_rtx (op1) : op1,
+ operand_mode, unsignedp, 1);
- bc_emit_const_labeldef (l);
- bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
- }
- else
+ if (result)
{
- data_section ();
-
- bc_emit_data_labeldef (l);
- bc_output_data_constructor (constr);
+ if (invert)
+ result = expand_binop (mode, xor_optab, result, const1_rtx,
+ result, 0, OPTAB_LIB_WIDEN);
+ return result;
}
-
- /* Add reference to pointer table and recall pointer to stack;
- this code is common for both types of constructors: literals
- and non-literals. */
-
- ptroffs = bc_define_pointer (l);
- bc_emit_instruction (constP, ptroffs);
+ /* If this failed, we have to do this with set/compare/jump/set code. */
+ if (GET_CODE (target) != REG
+ || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
+ target = gen_reg_rtx (GET_MODE (target));
- /* This is all that has to be done if it's a literal. */
- if (TREE_CONSTANT (constr))
- return;
+ emit_move_insn (target, invert ? const0_rtx : const1_rtx);
+ result = compare_from_rtx (op0, op1, code, unsignedp,
+ operand_mode, NULL_RTX, 0);
+ if (GET_CODE (result) == CONST_INT)
+ return (((result == const0_rtx && ! invert)
+ || (result != const0_rtx && invert))
+ ? const0_rtx : const1_rtx);
+ label = gen_label_rtx ();
+ if (bcc_gen_fctn[(int) code] == 0)
+ abort ();
- /* At this point, we have the pointer to the structure on top of the stack.
- Generate sequences of store_memory calls for the constructor. */
-
- /* constructor type is structure */
- if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
- {
- register tree elt;
-
- /* If the constructor has fewer fields than the structure,
- clear the whole structure first. */
-
- if (list_length (CONSTRUCTOR_ELTS (constr))
- != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
- {
- bc_emit_instruction (duplicate);
- bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
- bc_emit_instruction (clearBLK);
- }
-
- /* Store each element of the constructor into the corresponding
- field of TARGET. */
-
- for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
- {
- register tree field = TREE_PURPOSE (elt);
- register enum machine_mode mode;
- int bitsize;
- int bitpos;
- int unsignedp;
-
- bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
- mode = DECL_MODE (field);
- unsignedp = TREE_UNSIGNED (field);
+ emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
+ emit_move_insn (target, invert ? const1_rtx : const0_rtx);
+ emit_label (label);
- bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
-
- bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
- /* The alignment of TARGET is
- at least what its type requires. */
- VOIDmode, 0,
- TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
- int_size_in_bytes (TREE_TYPE (constr)));
- }
- }
- else
-
- /* Constructor type is array */
- if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
- {
- register tree elt;
- register int i;
- tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
- int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
- int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
- tree elttype = TREE_TYPE (TREE_TYPE (constr));
-
- /* If the constructor has fewer fields than the structure,
- clear the whole structure first. */
-
- if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
- {
- bc_emit_instruction (duplicate);
- bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
- bc_emit_instruction (clearBLK);
- }
-
-
- /* Store each element of the constructor into the corresponding
- element of TARGET, determined by counting the elements. */
-
- for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
- elt;
- elt = TREE_CHAIN (elt), i++)
- {
- register enum machine_mode mode;
- int bitsize;
- int bitpos;
- int unsignedp;
-
- mode = TYPE_MODE (elttype);
- bitsize = GET_MODE_BITSIZE (mode);
- unsignedp = TREE_UNSIGNED (elttype);
-
- bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
- /* * TYPE_SIZE_UNIT (elttype) */ );
-
- bc_store_field (elt, bitsize, bitpos, mode,
- TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
- /* The alignment of TARGET is
- at least what its type requires. */
- VOIDmode, 0,
- TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
- int_size_in_bytes (TREE_TYPE (constr)));
- }
-
- }
+ return target;
}
+\f
+/* Generate a tablejump instruction (used for switch statements). */
+#ifdef HAVE_tablejump
-/* Store the value of EXP (an expression tree) into member FIELD of
- structure at address on stack, which has type TYPE, mode MODE and
- occupies BITSIZE bits, starting BITPOS bits from the beginning of the
- structure.
+/* INDEX is the value being switched on, with the lowest value
+ in the table already subtracted.
+ MODE is its expected mode (needed if INDEX is constant).
+ RANGE is the length of the jump table.
+ TABLE_LABEL is a CODE_LABEL rtx for the table itself.
- ALIGN is the alignment that TARGET is known to have, measured in bytes.
- TOTAL_SIZE is its size in bytes, or -1 if variable. */
+ DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
+ index value is out of range. */
void
-bc_store_field (field, bitsize, bitpos, mode, exp, type,
- value_mode, unsignedp, align, total_size)
- int bitsize, bitpos;
+do_tablejump (index, mode, range, table_label, default_label)
+ rtx index, range, table_label, default_label;
enum machine_mode mode;
- tree field, exp, type;
- enum machine_mode value_mode;
- int unsignedp;
- int align;
- int total_size;
{
+ register rtx temp, vector;
- /* Expand expression and copy pointer */
- bc_expand_expr (exp);
- bc_emit_instruction (over);
-
-
- /* If the component is a bit field, we cannot use addressing to access
- it. Use bit-field techniques to store in it. */
+ /* Do an unsigned comparison (in the proper mode) between the index
+ expression and the value which represents the length of the range.
+ Since we just finished subtracting the lower bound of the range
+ from the index expression, this comparison allows us to simultaneously
+ check that the original index expression value is both greater than
+ or equal to the minimum value of the range and less than or equal to
+ the maximum value of the range. */
- if (DECL_BIT_FIELD (field))
- {
- bc_store_bit_field (bitpos, bitsize, unsignedp);
- return;
- }
- else
- /* Not bit field */
- {
- HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
+ emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
+ emit_jump_insn (gen_bgtu (default_label));
- /* Advance pointer to the desired member */
- if (offset)
- bc_emit_instruction (addconstPSI, offset);
+ /* If index is in range, it must fit in Pmode.
+ Convert to Pmode so we can index with it. */
+ if (mode != Pmode)
+ index = convert_to_mode (Pmode, index, 1);
- /* Store */
- bc_store_memory (type, field);
- }
-}
+ /* Don't let a MEM slip thru, because then INDEX that comes
+ out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
+ and break_out_memory_refs will go to work on it and mess it up. */
+#ifdef PIC_CASE_VECTOR_ADDRESS
+ if (flag_pic && GET_CODE (index) != REG)
+ index = copy_to_mode_reg (Pmode, index);
+#endif
+ /* If flag_force_addr were to affect this address
+ it could interfere with the tricky assumptions made
+ about addresses that contain label-refs,
+ which may be valid only very near the tablejump itself. */
+ /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
+ GET_MODE_SIZE, because this indicates how large insns are. The other
+ uses should all be Pmode, because they are addresses. This code
+ could fail if addresses and insns are not the same size. */
+ index = gen_rtx_PLUS (Pmode,
+ gen_rtx_MULT (Pmode, index,
+ GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
+ gen_rtx_LABEL_REF (Pmode, table_label));
+#ifdef PIC_CASE_VECTOR_ADDRESS
+ if (flag_pic)
+ index = PIC_CASE_VECTOR_ADDRESS (index);
+ else
+#endif
+ index = memory_address_noforce (CASE_VECTOR_MODE, index);
+ temp = gen_reg_rtx (CASE_VECTOR_MODE);
+ vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
+ RTX_UNCHANGING_P (vector) = 1;
+ convert_move (temp, vector, 0);
-/* Store SI/SU in bitfield */
-void
-bc_store_bit_field (offset, size, unsignedp)
- int offset, size, unsignedp;
-{
- /* Push bitfield offset and size */
- bc_push_offset_and_size (offset, size);
+ emit_jump_insn (gen_tablejump (temp, table_label));
- /* Store */
- bc_emit_instruction (sstoreBI);
+ /* If we are generating PIC code or if the table is PC-relative, the
+ table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
+ if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
+ emit_barrier ();
}
-
-/* Load SI/SU from bitfield */
-void
-bc_load_bit_field (offset, size, unsignedp)
- int offset, size, unsignedp;
-{
- /* Push bitfield offset and size */
- bc_push_offset_and_size (offset, size);
-
- /* Load: sign-extend if signed, else zero-extend */
- bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
-}
-
-
-/* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
- (adjust stack pointer upwards), negative means add that number of
- levels (adjust the stack pointer downwards). Only positive values
- normally make sense. */
-
-void
-bc_adjust_stack (nlevels)
- int nlevels;
-{
- switch (nlevels)
- {
- case 0:
- break;
-
- case 2:
- bc_emit_instruction (drop);
-
- case 1:
- bc_emit_instruction (drop);
- break;
-
- default:
-
- bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
- stack_depth -= nlevels;
- }
-
-#if defined (VALIDATE_STACK_FOR_BC)
- VALIDATE_STACK_FOR_BC ();
-#endif
-}
+#endif /* HAVE_tablejump */