/* Expands front end tree to back end RTL for GCC
Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
- 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
+ 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
This file is part of GCC.
#include "langhooks.h"
#include "predict.h"
#include "optabs.h"
-
-/* Assume that case vectors are not pc-relative. */
-#ifndef CASE_VECTOR_PC_RELATIVE
-#define CASE_VECTOR_PC_RELATIVE 0
-#endif
+#include "target.h"
+#include "regs.h"
\f
/* Functions and data structures for expanding case statements. */
conditional branch points. */
rtx last_unconditional_cleanup;
} GTY ((tag ("BLOCK_NESTING"))) block;
- /* For switch (C) or case (Pascal) statements,
- and also for dummies (see `expand_start_case_dummy'). */
+ /* For switch (C) or case (Pascal) statements. */
struct nesting_case
{
/* The insn after which the case dispatch should finally
record the expr's type and its RTL value here. */
tree x_last_expr_type;
rtx x_last_expr_value;
+ rtx x_last_expr_alt_rtl;
/* Nonzero if within a ({...}) grouping, in which case we must
always compute a value for each expr-stmt in case it is the last one. */
#define current_block_start_count (cfun->stmt->x_block_start_count)
#define last_expr_type (cfun->stmt->x_last_expr_type)
#define last_expr_value (cfun->stmt->x_last_expr_value)
+#define last_expr_alt_rtl (cfun->stmt->x_last_expr_alt_rtl)
#define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
#define emit_locus (cfun->stmt->x_emit_locus)
#define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
static int using_eh_for_cleanups_p = 0;
static int n_occurrences (int, const char *);
-static bool parse_input_constraint (const char **, int, int, int, int,
- const char * const *, bool *, bool *);
static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
static void expand_goto_internal (tree, rtx, rtx);
static int expand_fixup (tree, rtx, rtx);
static char *resolve_operand_name_1 (char *, tree, tree);
static void expand_null_return_1 (rtx);
static enum br_predictor return_prediction (rtx);
+static rtx shift_return_value (rtx);
static void expand_value_return (rtx);
static int tail_recursion_args (tree, tree);
static void expand_cleanups (tree, int, int);
void
init_stmt_for_function (void)
{
- cfun->stmt =ggc_alloc (sizeof (struct stmt_status));
-
- /* We are not currently within any block, conditional, loop or case. */
- block_stack = 0;
- stack_block_stack = 0;
- loop_stack = 0;
- case_stack = 0;
- cond_stack = 0;
- nesting_stack = 0;
- nesting_depth = 0;
-
- current_block_start_count = 0;
-
- /* No gotos have been expanded yet. */
- goto_fixup_chain = 0;
-
- /* We are not processing a ({...}) grouping. */
- expr_stmts_for_value = 0;
- clear_last_expr ();
+ cfun->stmt = ggc_alloc_cleared (sizeof (struct stmt_status));
}
\f
/* Record the current file and line. Called from emit_line_note. */
{
rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
-#ifdef POINTERS_EXTEND_UNSIGNED
- if (GET_MODE (x) != Pmode)
- x = convert_memory_address (Pmode, x);
-#endif
+ x = convert_memory_address (Pmode, x);
emit_queue ();
{
cfun->computed_goto_common_reg = copy_to_mode_reg (Pmode, x);
cfun->computed_goto_common_label = gen_label_rtx ();
- emit_label (cfun->computed_goto_common_label);
do_pending_stack_adjust ();
+ emit_label (cfun->computed_goto_common_label);
emit_indirect_jump (cfun->computed_goto_common_reg);
current_function_has_computed_jump = 1;
else
#endif
{
+ emit_insn (gen_rtx_CLOBBER (VOIDmode,
+ gen_rtx_MEM (BLKmode,
+ gen_rtx_SCRATCH (VOIDmode))));
+ emit_insn (gen_rtx_CLOBBER (VOIDmode,
+ gen_rtx_MEM (BLKmode,
+ hard_frame_pointer_rtx)));
+
/* Restore frame pointer for containing function.
This sets the actual hard register used for the frame pointer
to the location of the function's incoming static chain info.
&& INSN_UID (first_insn) > INSN_UID (f->before_jump)
&& ! DECL_ERROR_ISSUED (f->target))
{
- error ("%Hlabel '%D' used before containing binding contour",
- &DECL_SOURCE_LOCATION (f->target), f->target);
+ error ("%Jlabel '%D' used before containing binding contour",
+ f->target, f->target);
/* Prevent multiple errors for one label. */
DECL_ERROR_ISSUED (f->target) = 1;
}
break;
}
+ if (*is_inout && !*allows_reg)
+ warning ("read-write constraint does not allow a register");
+
return true;
}
/* Similar, but for input constraints. */
-static bool
+bool
parse_input_constraint (const char **constraint_p, int input_num,
int ninputs, int noutputs, int ninout,
const char * const * constraints,
const char *orig_constraint = constraint;
size_t c_len = strlen (constraint);
size_t j;
+ bool saw_match = false;
/* Assume the constraint doesn't allow the use of either
a register or memory. */
char *end;
unsigned long match;
+ saw_match = true;
+
match = strtoul (constraint + j, &end, 10);
if (match >= (unsigned long) noutputs)
{
break;
}
+ if (saw_match && !*allows_reg)
+ warning ("matching constraint does not allow a register");
+
return true;
}
for (regno = REGNO (reg);
regno < (REGNO (reg)
- + HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)));
+ + hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]);
regno++)
if (TEST_HARD_REG_BIT (clobbered_regs, regno))
{
void
expand_asm_operands (tree string, tree outputs, tree inputs,
- tree clobbers, int vol, const char *filename, int line)
+ tree clobbers, int vol, location_t locus)
{
rtvec argvec, constraintvec;
rtx body;
if (! check_operand_nalternatives (outputs, inputs))
return;
- if (! check_unique_operand_names (outputs, inputs))
- return;
-
string = resolve_asm_operand_names (string, outputs, inputs);
/* Collect constraints. */
for (t = inputs; t ; t = TREE_CHAIN (t), i++)
constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
-#ifdef MD_ASM_CLOBBERS
/* Sometimes we wish to automatically clobber registers across an asm.
Case in point is when the i386 backend moved from cc0 to a hard reg --
maintaining source-level compatibility means automatically clobbering
the flags register. */
- MD_ASM_CLOBBERS (clobbers);
-#endif
+ clobbers = targetm.md_asm_clobbers (clobbers);
/* Count the number of meaningful clobbered registers, ignoring what
we would ignore later. */
: GET_MODE (output_rtx[0])),
TREE_STRING_POINTER (string),
empty_string, 0, argvec, constraintvec,
- filename, line);
+ locus.file, locus.line);
MEM_VOLATILE_P (body) = vol;
if (CONSTANT_P (op))
{
- op = force_const_mem (TYPE_MODE (type), op);
- op = validize_mem (op);
+ rtx mem = force_const_mem (TYPE_MODE (type), op);
+ if (mem)
+ op = validize_mem (mem);
+ else
+ op = force_reg (TYPE_MODE (type), op);
}
- else if (GET_CODE (op) == REG
- || GET_CODE (op) == SUBREG
- || GET_CODE (op) == ADDRESSOF
- || GET_CODE (op) == CONCAT)
+ if (GET_CODE (op) == REG
+ || GET_CODE (op) == SUBREG
+ || GET_CODE (op) == ADDRESSOF
+ || GET_CODE (op) == CONCAT)
{
tree qual_type = build_qualified_type (type,
(TYPE_QUALS (type)
(GET_MODE (output_rtx[i]),
TREE_STRING_POINTER (string),
constraints[i], i, argvec, constraintvec,
- filename, line));
+ locus.file, locus.line));
MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
}
const char *c;
tree t;
+ check_unique_operand_names (outputs, inputs);
+
/* Substitute [<name>] in input constraint strings. There should be no
named operands in output constraints. */
for (t = inputs; t ; t = TREE_CHAIN (t))
{
rtx value;
tree type;
+ rtx alt_rtl = NULL;
if (want_value == -1)
want_value = expr_stmts_for_value != 0;
except for last statement in ({...}) where they may be useful. */
if (! want_value
&& (expr_stmts_for_value == 0 || ! maybe_last)
- && exp != error_mark_node)
+ && exp != error_mark_node
+ && warn_unused_value)
{
- if (! TREE_SIDE_EFFECTS (exp))
- {
- if (warn_unused_value
- && !(TREE_CODE (exp) == CONVERT_EXPR
- && VOID_TYPE_P (TREE_TYPE (exp))))
- warning ("%Hstatement with no effect", &emit_locus);
- }
- else if (warn_unused_value)
+ if (TREE_SIDE_EFFECTS (exp))
warn_if_unused_value (exp);
+ else if (!VOID_TYPE_P (TREE_TYPE (exp)))
+ warning ("%Hstatement with no effect", &emit_locus);
}
/* If EXP is of function type and we are expanding statements for
/* The call to `expand_expr' could cause last_expr_type and
last_expr_value to get reset. Therefore, we set last_expr_value
and last_expr_type *after* calling expand_expr. */
- value = expand_expr (exp, want_value ? NULL_RTX : const0_rtx,
- VOIDmode, 0);
+ value = expand_expr_real (exp, want_value ? NULL_RTX : const0_rtx,
+ VOIDmode, 0, &alt_rtl);
type = TREE_TYPE (exp);
/* If all we do is reference a volatile value in memory,
if (want_value)
{
last_expr_value = value;
+ last_expr_alt_rtl = alt_rtl;
last_expr_type = type;
}
{
last_expr_type = NULL_TREE;
last_expr_value = NULL_RTX;
+ last_expr_alt_rtl = NULL_RTX;
}
/* Begin a statement-expression, i.e., a series of statements which
if (! last_expr_value || ! last_expr_type)
{
last_expr_value = const0_rtx;
+ last_expr_alt_rtl = NULL_RTX;
last_expr_type = void_type_node;
}
else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
TREE_TYPE (t) = last_expr_type;
RTL_EXPR_RTL (t) = last_expr_value;
+ RTL_EXPR_ALT_RTL (t) = last_expr_alt_rtl;
RTL_EXPR_SEQUENCE (t) = get_insns ();
rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
expand_null_return_1 (last_insn);
}
+/* Generate RTL to return directly from the current function.
+ (That is, we bypass any return value.) */
+
+void
+expand_naked_return (void)
+{
+ rtx last_insn, end_label;
+
+ last_insn = get_last_insn ();
+ end_label = naked_return_label;
+
+ clear_pending_stack_adjust ();
+ do_pending_stack_adjust ();
+ clear_last_expr ();
+
+ if (end_label == 0)
+ end_label = naked_return_label = gen_label_rtx ();
+ expand_goto_internal (NULL_TREE, end_label, last_insn);
+}
+
/* Try to guess whether the value of return means error code. */
static enum br_predictor
return_prediction (rtx val)
return PRED_NO_PREDICTION;
}
+
+/* If the current function returns values in the most significant part
+ of a register, shift return value VAL appropriately. The mode of
+ the function's return type is known not to be BLKmode. */
+
+static rtx
+shift_return_value (rtx val)
+{
+ tree type;
+
+ type = TREE_TYPE (DECL_RESULT (current_function_decl));
+ if (targetm.calls.return_in_msb (type))
+ {
+ rtx target;
+ HOST_WIDE_INT shift;
+
+ target = DECL_RTL (DECL_RESULT (current_function_decl));
+ shift = (GET_MODE_BITSIZE (GET_MODE (target))
+ - BITS_PER_UNIT * int_size_in_bytes (type));
+ if (shift > 0)
+ val = expand_binop (GET_MODE (target), ashl_optab,
+ gen_lowpart (GET_MODE (target), val),
+ GEN_INT (shift), target, 1, OPTAB_WIDEN);
+ }
+ return val;
+}
+
+
/* Generate RTL to return from the current function, with value VAL. */
static void
if (return_reg != val)
{
tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
-#ifdef PROMOTE_FUNCTION_RETURN
- int unsignedp = TREE_UNSIGNED (type);
- enum machine_mode old_mode
- = DECL_MODE (DECL_RESULT (current_function_decl));
- enum machine_mode mode
- = promote_mode (type, old_mode, &unsignedp, 1);
-
- if (mode != old_mode)
- val = convert_modes (mode, old_mode, val, unsignedp);
-#endif
+ if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
+ {
+ int unsignedp = TREE_UNSIGNED (type);
+ enum machine_mode old_mode
+ = DECL_MODE (DECL_RESULT (current_function_decl));
+ enum machine_mode mode
+ = promote_mode (type, old_mode, &unsignedp, 1);
+
+ if (mode != old_mode)
+ val = convert_modes (mode, old_mode, val, unsignedp);
+ }
if (GET_CODE (return_reg) == PARALLEL)
emit_group_load (return_reg, val, type, int_size_in_bytes (type));
else
{
int i;
unsigned HOST_WIDE_INT bitpos, xbitpos;
- unsigned HOST_WIDE_INT big_endian_correction = 0;
+ unsigned HOST_WIDE_INT padding_correction = 0;
unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (TREE_TYPE (retval_rhs));
int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
return;
}
- /* Structures whose size is not a multiple of a word are aligned
- to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
- machine, this means we must skip the empty high order bytes when
- calculating the bit offset. */
- if (BYTES_BIG_ENDIAN
- && bytes % UNITS_PER_WORD)
- big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
- * BITS_PER_UNIT));
+ /* If the structure doesn't take up a whole number of words, see
+ whether the register value should be padded on the left or on
+ the right. Set PADDING_CORRECTION to the number of padding
+ bits needed on the left side.
+
+ In most ABIs, the structure will be returned at the least end of
+ the register, which translates to right padding on little-endian
+ targets and left padding on big-endian targets. The opposite
+ holds if the structure is returned at the most significant
+ end of the register. */
+ if (bytes % UNITS_PER_WORD != 0
+ && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
+ ? !BYTES_BIG_ENDIAN
+ : BYTES_BIG_ENDIAN))
+ padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
+ * BITS_PER_UNIT));
/* Copy the structure BITSIZE bits at a time. */
- for (bitpos = 0, xbitpos = big_endian_correction;
+ for (bitpos = 0, xbitpos = padding_correction;
bitpos < bytes * BITS_PER_UNIT;
bitpos += bitsize, xbitpos += bitsize)
{
/* We need a new destination pseudo each time xbitpos is
- on a word boundary and when xbitpos == big_endian_correction
+ on a word boundary and when xbitpos == padding_correction
(the first time through). */
if (xbitpos % BITS_PER_WORD == 0
- || xbitpos == big_endian_correction)
+ || xbitpos == padding_correction)
{
/* Generate an appropriate register. */
dst = gen_reg_rtx (word_mode);
BITS_PER_WORD);
}
- /* Find the smallest integer mode large enough to hold the
- entire structure and use that mode instead of BLKmode
- on the USE insn for the return register. */
- for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
- tmpmode != VOIDmode;
- tmpmode = GET_MODE_WIDER_MODE (tmpmode))
- /* Have we found a large enough mode? */
- if (GET_MODE_SIZE (tmpmode) >= bytes)
- break;
+ tmpmode = GET_MODE (result_rtl);
+ if (tmpmode == BLKmode)
+ {
+ /* Find the smallest integer mode large enough to hold the
+ entire structure and use that mode instead of BLKmode
+ on the USE insn for the return register. */
+ for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
+ tmpmode != VOIDmode;
+ tmpmode = GET_MODE_WIDER_MODE (tmpmode))
+ /* Have we found a large enough mode? */
+ if (GET_MODE_SIZE (tmpmode) >= bytes)
+ break;
- /* No suitable mode found. */
- if (tmpmode == VOIDmode)
- abort ();
+ /* No suitable mode found. */
+ if (tmpmode == VOIDmode)
+ abort ();
- PUT_MODE (result_rtl, tmpmode);
+ PUT_MODE (result_rtl, tmpmode);
+ }
if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
result_reg_mode = word_mode;
val = force_not_mem (val);
emit_queue ();
/* Return the calculated value, doing cleanups first. */
- expand_value_return (val);
+ expand_value_return (shift_return_value (val));
}
else
{
static void
expand_nl_goto_receiver (void)
{
+ /* Clobber the FP when we get here, so we have to make sure it's
+ marked as used by this function. */
+ emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
+
+ /* Mark the static chain as clobbered here so life information
+ doesn't get messed up for it. */
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
+
#ifdef HAVE_nonlocal_goto
if (! HAVE_nonlocal_goto)
#endif
if (HAVE_nonlocal_goto_receiver)
emit_insn (gen_nonlocal_goto_receiver ());
#endif
+
+ /* @@@ This is a kludge. Not all machine descriptions define a blockage
+ insn, but we must not allow the code we just generated to be reordered
+ by scheduling. Specifically, the update of the frame pointer must
+ happen immediately, not later. So emit an ASM_INPUT to act as blockage
+ insn. */
+ emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
}
/* Make handlers for nonlocal gotos taking place in the function calls in
&& ! TREE_USED (decl)
&& ! DECL_IN_SYSTEM_HEADER (decl)
&& DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
- warning ("%Hunused variable '%D'", &DECL_SOURCE_LOCATION (decl), decl);
+ warning ("%Junused variable '%D'", decl, decl);
}
/* Generate RTL code to terminate a binding contour.
that must be an error, because gotos without fixups
come from outside all saved stack-levels. */
if (TREE_ADDRESSABLE (chain->label))
- error ("%Hlabel '%D' used before containing binding contour",
- &DECL_SOURCE_LOCATION (chain->label), chain->label);
+ error ("%Jlabel '%D' used before containing binding contour",
+ chain->label, chain->label);
}
}
/* Don't let cleanups affect ({...}) constructs. */
int old_expr_stmts_for_value = expr_stmts_for_value;
rtx old_last_expr_value = last_expr_value;
+ rtx old_last_expr_alt_rtl = last_expr_alt_rtl;
tree old_last_expr_type = last_expr_type;
expr_stmts_for_value = 0;
expr_stmts_for_value = old_expr_stmts_for_value;
last_expr_value = old_last_expr_value;
+ last_expr_alt_rtl = old_last_expr_alt_rtl;
last_expr_type = old_last_expr_type;
/* Restore the stack level. */
start_cleanup_deferral ();
}
-
-/* Start a "dummy case statement" within which case labels are invalid
- and are not connected to any larger real case statement.
- This can be used if you don't want to let a case statement jump
- into the middle of certain kinds of constructs. */
-
-void
-expand_start_case_dummy (void)
-{
- struct nesting *thiscase = ALLOC_NESTING ();
-
- /* Make an entry on case_stack for the dummy. */
-
- thiscase->desc = CASE_NESTING;
- thiscase->next = case_stack;
- thiscase->all = nesting_stack;
- thiscase->depth = ++nesting_depth;
- thiscase->exit_label = 0;
- thiscase->data.case_stmt.case_list = 0;
- thiscase->data.case_stmt.start = 0;
- thiscase->data.case_stmt.nominal_type = 0;
- thiscase->data.case_stmt.default_label = 0;
- case_stack = thiscase;
- nesting_stack = thiscase;
- start_cleanup_deferral ();
-}
\f
static void
check_seenlabel (void)
emit_jump (default_label);
}
+#ifndef HAVE_casesi
+#define HAVE_casesi 0
+#endif
+
+#ifndef HAVE_tablejump
+#define HAVE_tablejump 0
+#endif
+
/* Terminate a case (Pascal) or switch (C) statement
in which ORIG_INDEX is the expression to be tested.
If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
because we can optimize it. */
else if (count < case_values_threshold ()
- || compare_tree_int (range, 10 * count) > 0
+ || compare_tree_int (range,
+ (optimize_size ? 3 : 10) * count) > 0
/* RANGE may be signed, and really large ranges will show up
as negative numbers. */
|| compare_tree_int (range, 0) < 0
#ifndef ASM_OUTPUT_ADDR_DIFF_ELT
|| flag_pic
#endif
- || TREE_CONSTANT (index_expr))
+ || TREE_CONSTANT (index_expr)
+ /* If neither casesi or tablejump is available, we can
+ only go this way. */
+ || (!HAVE_casesi && !HAVE_tablejump))
{
index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);