/* Expands front end tree to back end RTL for GNU C-Compiler
Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
- 1998, 1999, 2000 Free Software Foundation, Inc.
+ 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
-This file is part of GNU CC.
+This file is part of GCC.
-GNU CC is free software; you can redistribute it and/or modify
-it under the terms of the GNU General Public License as published by
-the Free Software Foundation; either version 2, or (at your option)
-any later version.
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 2, or (at your option) any later
+version.
-GNU CC is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-GNU General Public License for more details.
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+for more details.
You should have received a copy of the GNU General Public License
-along with GNU CC; see the file COPYING. If not, write to
-the Free Software Foundation, 59 Temple Place - Suite 330,
-Boston, MA 02111-1307, USA. */
+along with GCC; see the file COPYING. If not, write to the Free
+Software Foundation, 59 Temple Place - Suite 330, Boston, MA
+02111-1307, USA. */
/* This file handles the generation of rtl code from tree structure
above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
#include "flags.h"
#include "except.h"
#include "function.h"
-#include "insn-flags.h"
#include "insn-config.h"
-#include "insn-codes.h"
#include "expr.h"
+#include "libfuncs.h"
#include "hard-reg-set.h"
#include "obstack.h"
#include "loop.h"
/* This must be a signed type, and non-ANSI compilers lack signed char. */
static short cost_table_[129];
-static short *cost_table;
static int use_cost_table;
+static int cost_table_initialized;
+
+/* Special care is needed because we allow -1, but TREE_INT_CST_LOW
+ is unsigned. */
+#define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT)((I) + 1)]
\f
/* Stack of control and binding constructs we are currently inside.
/* Non-zero if we are using EH to handle cleanus. */
static int using_eh_for_cleanups_p = 0;
-/* Character strings, each containing a single decimal digit. */
-static const char *digit_strings[10];
-
static int n_occurrences PARAMS ((int, const char *));
static void expand_goto_internal PARAMS ((tree, rtx, rtx));
static int expand_fixup PARAMS ((tree, rtx, rtx));
static void expand_nl_goto_receivers PARAMS ((struct nesting *));
static void fixup_gotos PARAMS ((struct nesting *, rtx, tree,
rtx, int));
-static void expand_null_return_1 PARAMS ((rtx, int));
+static void expand_null_return_1 PARAMS ((rtx));
static void expand_value_return PARAMS ((rtx));
static int tail_recursion_args PARAMS ((tree, tree));
static void expand_cleanups PARAMS ((tree, tree, int, int));
void
init_stmt ()
{
- int i;
- char buf[2];
-
gcc_obstack_init (&stmt_obstack);
-
- buf[1] = 0;
- for (i = 0; i < 10; i++)
- {
- buf[0] = '0' + i;
- digit_strings[i] = ggc_alloc_string (buf, 1);
- }
- ggc_add_string_root (digit_strings, 10);
}
void
if (TREE_CODE (label) != LABEL_DECL)
abort ();
- if (DECL_RTL (label))
- return DECL_RTL (label);
+ if (!DECL_RTL_SET_P (label))
+ SET_DECL_RTL (label, gen_label_rtx ());
- return DECL_RTL (label) = gen_label_rtx ();
+ return DECL_RTL (label);
}
+
/* Add an unconditional jump to LABEL as the next sequential instruction. */
void
{
struct function *p = find_function_data (context);
rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
- rtx handler_slot, static_chain, save_area;
+ rtx handler_slot, static_chain, save_area, insn;
tree link;
/* Find the corresponding handler slot for this label. */
emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
emit_indirect_jump (handler_slot);
}
+
+ /* Search backwards to the jump insn and mark it as a
+ non-local goto. */
+ for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
+ {
+ if (GET_CODE (insn) == JUMP_INSN)
+ {
+ REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
+ const0_rtx, REG_NOTES (insn));
+ break;
+ }
+ else if (GET_CODE (insn) == CALL_INSN)
+ break;
+ }
}
else
expand_goto_internal (label, label_rtx (label), NULL_RTX);
}
start_sequence ();
- start = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
+ start = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
if (cfun->x_whole_function_mode_p)
NOTE_BLOCK (start) = block;
- fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_DELETED);
- end = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
+ fixup->before_jump = emit_note (NULL, NOTE_INSN_DELETED);
+ end = emit_note (NULL, NOTE_INSN_BLOCK_END);
if (cfun->x_whole_function_mode_p)
NOTE_BLOCK (end) = block;
fixup->context = block;
expand_fixups (first_insn)
rtx first_insn;
{
- fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
+ fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
}
/* When exiting a binding contour, process all pending gotos requiring fixups.
last_expr_type = 0;
}
+/* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
+ OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
+ inputs and NOUTPUTS outputs to this extended-asm. Upon return,
+ *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
+ memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
+ constraint allows the use of a register operand. And, *IS_INOUT
+ will be true if the operand is read-write, i.e., if it is used as
+ an input as well as an output. If *CONSTRAINT_P is not in
+ canonical form, it will be made canonical. (Note that `+' will be
+ rpelaced with `=' as part of this process.)
+
+ Returns TRUE if all went well; FALSE if an error occurred. */
+
+bool
+parse_output_constraint (constraint_p,
+ operand_num,
+ ninputs,
+ noutputs,
+ allows_mem,
+ allows_reg,
+ is_inout)
+ const char **constraint_p;
+ int operand_num;
+ int ninputs;
+ int noutputs;
+ bool *allows_mem;
+ bool *allows_reg;
+ bool *is_inout;
+{
+ const char *constraint = *constraint_p;
+ const char *p;
+
+ /* Assume the constraint doesn't allow the use of either a register
+ or memory. */
+ *allows_mem = false;
+ *allows_reg = false;
+
+ /* Allow the `=' or `+' to not be at the beginning of the string,
+ since it wasn't explicitly documented that way, and there is a
+ large body of code that puts it last. Swap the character to
+ the front, so as not to uglify any place else. */
+ p = strchr (constraint, '=');
+ if (!p)
+ p = strchr (constraint, '+');
+
+ /* If the string doesn't contain an `=', issue an error
+ message. */
+ if (!p)
+ {
+ error ("output operand constraint lacks `='");
+ return false;
+ }
+
+ /* If the constraint begins with `+', then the operand is both read
+ from and written to. */
+ *is_inout = (*p == '+');
+
+ /* Make sure we can specify the matching operand. */
+ if (*is_inout && operand_num > 9)
+ {
+ error ("output operand constraint %d contains `+'",
+ operand_num);
+ return false;
+ }
+
+ /* Canonicalize the output constraint so that it begins with `='. */
+ if (p != constraint || is_inout)
+ {
+ char *buf;
+ size_t c_len = strlen (constraint);
+
+ if (p != constraint)
+ warning ("output constraint `%c' for operand %d is not at the beginning",
+ *p, operand_num);
+
+ /* Make a copy of the constraint. */
+ buf = alloca (c_len + 1);
+ strcpy (buf, constraint);
+ /* Swap the first character and the `=' or `+'. */
+ buf[p - constraint] = buf[0];
+ /* Make sure the first character is an `='. (Until we do this,
+ it might be a `+'.) */
+ buf[0] = '=';
+ /* Replace the constraint with the canonicalized string. */
+ *constraint_p = ggc_alloc_string (buf, c_len);
+ constraint = *constraint_p;
+ }
+
+ /* Loop through the constraint string. */
+ for (p = constraint + 1; *p; ++p)
+ switch (*p)
+ {
+ case '+':
+ case '=':
+ error ("operand constraint contains '+' or '=' at illegal position.");
+ return false;
+
+ case '%':
+ if (operand_num + 1 == ninputs + noutputs)
+ {
+ error ("`%%' constraint used with last operand");
+ return false;
+ }
+ break;
+
+ case 'V': case 'm': case 'o':
+ *allows_mem = true;
+ break;
+
+ case '?': case '!': case '*': case '&': case '#':
+ case 'E': case 'F': case 'G': case 'H':
+ case 's': case 'i': case 'n':
+ case 'I': case 'J': case 'K': case 'L': case 'M':
+ case 'N': case 'O': case 'P': case ',':
+ break;
+
+ case '0': case '1': case '2': case '3': case '4':
+ case '5': case '6': case '7': case '8': case '9':
+ error ("matching constraint not valid in output operand");
+ return false;
+
+ case '<': case '>':
+ /* ??? Before flow, auto inc/dec insns are not supposed to exist,
+ excepting those that expand_call created. So match memory
+ and hope. */
+ *allows_mem = true;
+ break;
+
+ case 'g': case 'X':
+ *allows_reg = true;
+ *allows_mem = true;
+ break;
+
+ case 'p': case 'r':
+ *allows_reg = true;
+ break;
+
+ default:
+ if (!ISALPHA (*p))
+ break;
+ if (REG_CLASS_FROM_LETTER (*p) != NO_REGS)
+ *allows_reg = true;
+#ifdef EXTRA_CONSTRAINT
+ else
+ {
+ /* Otherwise we can't assume anything about the nature of
+ the constraint except that it isn't purely registers.
+ Treat it like "g" and hope for the best. */
+ *allows_reg = true;
+ *allows_mem = true;
+ }
+#endif
+ break;
+ }
+
+ return true;
+}
+
/* Generate RTL for an asm statement with arguments.
STRING is the instruction template.
OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
enum machine_mode *inout_mode
= (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
+ const char **output_constraints
+ = alloca (noutputs * sizeof (const char *));
/* The insn we have emitted. */
rtx insn;
int old_generating_concat_p = generating_concat_p;
tree val = TREE_VALUE (tail);
tree type = TREE_TYPE (val);
const char *constraint;
- char *p;
- int c_len;
- int j;
- int is_inout = 0;
- int allows_reg = 0;
- int allows_mem = 0;
+ bool is_inout;
+ bool allows_reg;
+ bool allows_mem;
/* If there's an erroneous arg, emit no insn. */
- if (TREE_TYPE (val) == error_mark_node)
+ if (type == error_mark_node)
return;
/* Make sure constraint has `=' and does not have `+'. Also, see
message. */
constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
- c_len = strlen (constraint);
-
- /* Allow the `=' or `+' to not be at the beginning of the string,
- since it wasn't explicitly documented that way, and there is a
- large body of code that puts it last. Swap the character to
- the front, so as not to uglify any place else. */
- switch (c_len)
- {
- default:
- if ((p = strchr (constraint, '=')) != NULL)
- break;
- if ((p = strchr (constraint, '+')) != NULL)
- break;
- case 0:
- error ("output operand constraint lacks `='");
- return;
- }
- j = p - constraint;
- is_inout = *p == '+';
-
- if (j || is_inout)
- {
- /* Have to throw away this constraint string and get a new one. */
- char *buf = alloca (c_len + 1);
- buf[0] = '=';
- if (j)
- memcpy (buf + 1, constraint, j);
- memcpy (buf + 1 + j, p + 1, c_len - j); /* not -j-1 - copy null */
- constraint = ggc_alloc_string (buf, c_len);
-
- if (j)
- warning (
- "output constraint `%c' for operand %d is not at the beginning",
- *p, i);
- }
-
- /* Make sure we can specify the matching operand. */
- if (is_inout && i > 9)
- {
- error ("output operand constraint %d contains `+'", i);
- return;
- }
-
- for (j = 1; j < c_len; j++)
- switch (constraint[j])
- {
- case '+':
- case '=':
- error ("operand constraint contains '+' or '=' at illegal position.");
- return;
-
- case '%':
- if (i + 1 == ninputs + noutputs)
- {
- error ("`%%' constraint used with last operand");
- return;
- }
- break;
-
- case '?': case '!': case '*': case '&': case '#':
- case 'E': case 'F': case 'G': case 'H':
- case 's': case 'i': case 'n':
- case 'I': case 'J': case 'K': case 'L': case 'M':
- case 'N': case 'O': case 'P': case ',':
- break;
-
- case '0': case '1': case '2': case '3': case '4':
- case '5': case '6': case '7': case '8': case '9':
- error ("matching constraint not valid in output operand");
- break;
-
- case 'V': case 'm': case 'o':
- allows_mem = 1;
- break;
-
- case '<': case '>':
- /* ??? Before flow, auto inc/dec insns are not supposed to exist,
- excepting those that expand_call created. So match memory
- and hope. */
- allows_mem = 1;
- break;
-
- case 'g': case 'X':
- allows_reg = 1;
- allows_mem = 1;
- break;
-
- case 'p': case 'r':
- allows_reg = 1;
- break;
-
- default:
- if (! ISALPHA (constraint[j]))
- {
- error ("invalid punctuation `%c' in constraint",
- constraint[j]);
- return;
- }
- if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
- allows_reg = 1;
-#ifdef EXTRA_CONSTRAINT
- else
- {
- /* Otherwise we can't assume anything about the nature of
- the constraint except that it isn't purely registers.
- Treat it like "g" and hope for the best. */
- allows_reg = 1;
- allows_mem = 1;
- }
-#endif
- break;
- }
+ output_constraints[i] = constraint;
+
+ /* Try to parse the output constraint. If that fails, there's
+ no point in going further. */
+ if (!parse_output_constraint (&output_constraints[i],
+ i,
+ ninputs,
+ noutputs,
+ &allows_mem,
+ &allows_reg,
+ &is_inout))
+ return;
/* If an output operand is not a decl or indirect ref and our constraint
allows a register, make a temporary to act as an intermediate.
op);
else if (GET_CODE (op) == REG
|| GET_CODE (op) == SUBREG
+ || GET_CODE (op) == ADDRESSOF
|| GET_CODE (op) == CONCAT)
{
tree type = TREE_TYPE (TREE_VALUE (tail));
- rtx memloc = assign_temp (type, 1, 1, 1);
+ tree qual_type = build_qualified_type (type,
+ (TYPE_QUALS (type)
+ | TYPE_QUAL_CONST));
+ rtx memloc = assign_temp (qual_type, 1, 1, 1);
emit_move_insn (memloc, op);
op = memloc;
ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
= output_rtx[j];
ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
- = gen_rtx_ASM_INPUT (inout_mode[i], digit_strings[j]);
+ = gen_rtx_ASM_INPUT (inout_mode[i], digit_string (j));
}
generating_concat_p = old_generating_concat_p;
if (noutputs == 1 && nclobbers == 0)
{
ASM_OPERANDS_OUTPUT_CONSTRAINT (body)
- = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
+ = output_constraints[0];
insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
}
gen_rtx_ASM_OPERANDS
(GET_MODE (output_rtx[i]),
TREE_STRING_POINTER (string),
- TREE_STRING_POINTER (TREE_PURPOSE (tail)),
+ output_constraints[i],
i, argvec, constraints,
filename, line));
if (VOID_TYPE_P (TREE_TYPE (exp)))
return 0;
+ /* If this is an expression with side effects, don't warn. */
+ if (TREE_SIDE_EFFECTS (exp))
+ return 0;
+
switch (TREE_CODE (exp))
{
case PREINCREMENT_EXPR:
&& TREE_THIS_VOLATILE (exp))
return 0;
- /* If this is an expression with side effects, don't warn. */
- if (TREE_SIDE_EFFECTS (exp))
- return 0;
-
/* If this is an expression which has no operands, there is no value
to be unused. There are no such language-independent codes,
but front ends may define such. */
do_pending_stack_adjust ();
emit_queue ();
- emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
+ emit_note (NULL, NOTE_INSN_LOOP_BEG);
emit_label (thisloop->data.loop.start_label);
return thisloop;
expand_loop_continue_here ()
{
do_pending_stack_adjust ();
- emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
+ emit_note (NULL, NOTE_INSN_LOOP_CONT);
emit_label (loop_stack->data.loop.continue_label);
}
if (needs_end_jump)
{
emit_jump (start_label);
- emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
+ emit_note (NULL, NOTE_INSN_LOOP_END);
}
emit_label (loop_stack->data.loop.end_label);
void
expand_null_return ()
{
- struct nesting *block = block_stack;
rtx last_insn = get_last_insn ();
/* If this function was declared to return a value, but we
propogated live to the rest of the function. */
clobber_return_register ();
- /* Does any pending block have cleanups? */
- while (block && block->data.block.cleanups == 0)
- block = block->next;
-
- /* If yes, use a goto to return, since that runs cleanups. */
-
- expand_null_return_1 (last_insn, block != 0);
+ expand_null_return_1 (last_insn);
}
/* Generate RTL to return from the current function, with value VAL. */
expand_value_return (val)
rtx val;
{
- struct nesting *block = block_stack;
rtx last_insn = get_last_insn ();
rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
emit_move_insn (return_reg, val);
}
- /* Does any pending block have cleanups? */
-
- while (block && block->data.block.cleanups == 0)
- block = block->next;
-
- /* If yes, use a goto to return, since that runs cleanups.
- Use LAST_INSN to put cleanups *before* the move insn emitted above. */
-
- expand_null_return_1 (last_insn, block != 0);
+ expand_null_return_1 (last_insn);
}
/* Output a return with no value. If LAST_INSN is nonzero,
- pretend that the return takes place after LAST_INSN.
- If USE_GOTO is nonzero then don't use a return instruction;
- go to the return label instead. This causes any cleanups
- of pending blocks to be executed normally. */
+ pretend that the return takes place after LAST_INSN. */
static void
-expand_null_return_1 (last_insn, use_goto)
+expand_null_return_1 (last_insn)
rtx last_insn;
- int use_goto;
{
rtx end_label = cleanup_label ? cleanup_label : return_label;
do_pending_stack_adjust ();
last_expr_type = 0;
- /* PCC-struct return always uses an epilogue. */
- if (current_function_returns_pcc_struct || use_goto)
- {
- if (end_label == 0)
- end_label = return_label = gen_label_rtx ();
- expand_goto_internal (NULL_TREE, end_label, last_insn);
- return;
- }
-
- /* Otherwise output a simple return-insn if one is available,
- unless it won't do the job. */
-#ifdef HAVE_return
- if (HAVE_return && use_goto == 0 && cleanup_label == 0)
- {
- emit_jump_insn (gen_return ());
- emit_barrier ();
- return;
- }
-#endif
-
- /* Otherwise jump to the epilogue. */
+ if (end_label == 0)
+ end_label = return_label = gen_label_rtx ();
expand_goto_internal (NULL_TREE, end_label, last_insn);
}
\f
run destructors on variables that might be used in the subsequent
computation of the return value. */
rtx last_insn = 0;
- rtx result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
+ rtx result_rtl;
register rtx val = 0;
tree retval_rhs;
- int cleanups;
/* If function wants no value, give it none. */
if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
return;
}
- /* Are any cleanups needed? E.g. C++ destructors to be run? */
- /* This is not sufficient. We also need to watch for cleanups of the
- expression we are about to expand. Unfortunately, we cannot know
- if it has cleanups until we expand it, and we want to change how we
- expand it depending upon if we need cleanups. We can't win. */
-#if 0
- cleanups = any_pending_cleanups (1);
-#else
- cleanups = 1;
-#endif
-
if (retval == error_mark_node)
- retval_rhs = NULL_TREE;
+ {
+ /* Treat this like a return of no value from a function that
+ returns a value. */
+ expand_null_return ();
+ return;
+ }
else if (TREE_CODE (retval) == RESULT_DECL)
retval_rhs = retval;
else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
else
retval_rhs = NULL_TREE;
- /* Only use `last_insn' if there are cleanups which must be run. */
- if (cleanups || cleanup_label != 0)
- last_insn = get_last_insn ();
+ last_insn = get_last_insn ();
/* Distribute return down conditional expr if either of the sides
may involve tail recursion (see test below). This enhances the number
return;
}
+ result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
+
/* If the result is an aggregate that is being returned in one (or more)
registers, load the registers here. The compiler currently can't handle
copying a BLKmode value into registers. We could put this code in a
rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
enum machine_mode tmpmode, result_reg_mode;
+ if (bytes == 0)
+ {
+ expand_null_return ();
+ return;
+ }
+
/* Structures whose size is not a multiple of a word are aligned
to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
machine, this means we must skip the empty high order bytes when
/* Find the smallest integer mode large enough to hold the
entire structure and use that mode instead of BLKmode
on the USE insn for the return register. */
- bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
tmpmode != VOIDmode;
tmpmode = GET_MODE_WIDER_MODE (tmpmode))
- {
- /* Have we found a large enough mode? */
- if (GET_MODE_SIZE (tmpmode) >= bytes)
- break;
- }
+ /* Have we found a large enough mode? */
+ if (GET_MODE_SIZE (tmpmode) >= bytes)
+ break;
/* No suitable mode found. */
if (tmpmode == VOIDmode)
expand_value_return (result_reg);
}
- else if (cleanups
- && retval_rhs != 0
- && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
- && (GET_CODE (result_rtl) == REG
- || (GET_CODE (result_rtl) == PARALLEL)))
+ else if (retval_rhs != 0
+ && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
+ && (GET_CODE (result_rtl) == REG
+ || (GET_CODE (result_rtl) == PARALLEL)))
{
/* Calculate the return value into a temporary (usually a pseudo
reg). */
- val = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)),
- 0, 0, 1);
+ tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
+ tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
+
+ val = assign_temp (nt, 0, 0, 1);
val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
val = force_not_mem (val);
emit_queue ();
/* Create a note to mark the beginning of the block. */
if (block_flag)
{
- note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
+ note = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
NOTE_BLOCK (note) = block;
}
else
- note = emit_note (NULL_PTR, NOTE_INSN_DELETED);
+ note = emit_note (NULL, NOTE_INSN_DELETED);
/* Make an entry on block_stack for the block we are entering. */
fix this is to just insert another instruction here, so that the
instructions inserted after the last unconditional cleanup are
never the last instruction. */
- emit_note (NULL_PTR, NOTE_INSN_DELETED);
+ emit_note (NULL, NOTE_INSN_DELETED);
thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
if (block_stack
return 0;
}
-/* Mark top block of block_stack as an implicit binding for an
- exception region. This is used to prevent infinite recursion when
- ending a binding with expand_end_bindings. It is only ever called
- by expand_eh_region_start, as that it the only way to create a
- block stack for a exception region. */
-
-void
-mark_block_as_eh_region ()
-{
- block_stack->data.block.exception_region = 1;
- if (block_stack->next
- && block_stack->next->data.block.conditional_code)
- {
- block_stack->data.block.conditional_code
- = block_stack->next->data.block.conditional_code;
- block_stack->data.block.last_unconditional_cleanup
- = block_stack->next->data.block.last_unconditional_cleanup;
- block_stack->data.block.cleanup_ptr
- = block_stack->next->data.block.cleanup_ptr;
- }
-}
-
/* True if we are currently emitting insns in an area of output code
that is controlled by a conditional expression. This is used by
the cleanup handling code to generate conditional cleanup actions. */
return block_stack && block_stack->data.block.conditional_code;
}
-/* Mark top block of block_stack as not for an implicit binding for an
- exception region. This is only ever done by expand_eh_region_end
- to let expand_end_bindings know that it is being called explicitly
- to end the binding layer for just the binding layer associated with
- the exception region, otherwise expand_end_bindings would try and
- end all implicit binding layers for exceptions regions, and then
- one normal binding layer. */
+/* Return an opaque pointer to the current nesting level, so frontend code
+ can check its own sanity. */
-void
-mark_block_as_not_eh_region ()
-{
- block_stack->data.block.exception_region = 0;
-}
-
-/* True if the top block of block_stack was marked as for an exception
- region by mark_block_as_eh_region. */
-
-int
-is_eh_region ()
+struct nesting *
+current_nesting_level ()
{
- return cfun && block_stack && block_stack->data.block.exception_region;
+ return cfun ? block_stack : 0;
}
/* Emit a handler label for a nonlocal goto handler.
rtx insns;
rtx handler_label = gen_label_rtx ();
- /* Don't let jump_optimize delete the handler. */
+ /* Don't let cleanup_cfg delete the handler. */
LABEL_PRESERVE_P (handler_label) = 1;
start_sequence ();
#endif
{
/* Now restore our arg pointer from the address at which it
- was saved in our stack frame.
- If there hasn't be space allocated for it yet, make
- some now. */
- if (arg_pointer_save_area == 0)
- arg_pointer_save_area
- = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
+ was saved in our stack frame. */
emit_move_insn (virtual_incoming_args_rtx,
- /* We need a pseudo here, or else
- instantiate_virtual_regs_1 complains. */
- copy_to_reg (arg_pointer_save_area));
+ copy_to_reg (get_arg_pointer_save_area (cfun)));
}
}
#endif
int mark_ends;
int dont_jump_in;
{
- register struct nesting *thisblock;
-
- while (block_stack->data.block.exception_region)
- {
- /* Because we don't need or want a new temporary level and
- because we didn't create one in expand_eh_region_start,
- create a fake one now to avoid removing one in
- expand_end_bindings. */
- push_temp_slots ();
-
- block_stack->data.block.exception_region = 0;
-
- expand_end_bindings (NULL_TREE, 0, 0);
- }
-
- /* Since expand_eh_region_start does an expand_start_bindings, we
- have to first end all the bindings that were created by
- expand_eh_region_start. */
-
- thisblock = block_stack;
+ register struct nesting *thisblock = block_stack;
/* If any of the variables in this scope were not used, warn the
user. */
if (mark_ends)
{
- rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
+ rtx note = emit_note (NULL, NOTE_INSN_BLOCK_END);
NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
}
else
type = TREE_TYPE (decl);
- /* Only automatic variables need any expansion done.
- Static and external variables, and external functions,
- will be handled by `assemble_variable' (called from finish_decl).
- TYPE_DECL and CONST_DECL require nothing.
- PARM_DECLs are handled in `assign_parms'. */
+ /* For a CONST_DECL, set mode, alignment, and sizes from those of the
+ type in case this node is used in a reference. */
+ if (TREE_CODE (decl) == CONST_DECL)
+ {
+ DECL_MODE (decl) = TYPE_MODE (type);
+ DECL_ALIGN (decl) = TYPE_ALIGN (type);
+ DECL_SIZE (decl) = TYPE_SIZE (type);
+ DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
+ return;
+ }
+ /* Otherwise, only automatic variables need any expansion done. Static and
+ external variables, and external functions, will be handled by
+ `assemble_variable' (called from finish_decl). TYPE_DECL requires
+ nothing. PARM_DECLs are handled in `assign_parms'. */
if (TREE_CODE (decl) != VAR_DECL)
return;
+
if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
return;
/* Create the RTL representation for the variable. */
if (type == error_mark_node)
- DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
+ SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
+
else if (DECL_SIZE (decl) == 0)
/* Variable with incomplete type. */
{
+ rtx x;
if (DECL_INITIAL (decl) == 0)
/* Error message was already done; now avoid a crash. */
- DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
+ x = gen_rtx_MEM (BLKmode, const0_rtx);
else
/* An initializer is going to decide the size of this array.
Until we know the size, represent its address with a reg. */
- DECL_RTL (decl) = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
+ x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
- set_mem_attributes (DECL_RTL (decl), decl, 1);
+ set_mem_attributes (x, decl, 1);
+ SET_DECL_RTL (decl, x);
}
else if (DECL_MODE (decl) != BLKmode
/* If -ffloat-store, don't put explicit float vars
enum machine_mode reg_mode
= promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
- DECL_RTL (decl) = gen_reg_rtx (reg_mode);
+ SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
mark_user_reg (DECL_RTL (decl));
if (POINTER_TYPE_P (type))
whose size was determined by the initializer.
The old address was a register; set that register now
to the proper address. */
- if (DECL_RTL (decl) != 0)
+ if (DECL_RTL_SET_P (decl))
{
if (GET_CODE (DECL_RTL (decl)) != MEM
|| GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
oldaddr = XEXP (DECL_RTL (decl), 0);
}
- DECL_RTL (decl) = assign_temp (TREE_TYPE (decl), 1, 1, 1);
+ SET_DECL_RTL (decl,
+ assign_temp (TREE_TYPE (decl), 1, 1, 1));
/* Set alignment we actually gave this decl. */
DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
else
/* Dynamic-size object: must push space on the stack. */
{
- rtx address, size;
+ rtx address, size, x;
/* Record the stack pointer on entry to block, if have
not already done so. */
TYPE_ALIGN (TREE_TYPE (decl)));
/* Reference the variable indirect through that rtx. */
- DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl), address);
+ x = gen_rtx_MEM (DECL_MODE (decl), address);
+ set_mem_attributes (x, decl, 1);
+ SET_DECL_RTL (decl, x);
- set_mem_attributes (DECL_RTL (decl), decl, 1);
/* Indicate the alignment we actually gave this variable. */
#ifdef STACK_BOUNDARY
emit_move_insn (flag, const1_rtx);
cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
- DECL_RTL (cond) = flag;
+ SET_DECL_RTL (cond, flag);
/* Conditionalize the cleanup. */
cleanup = build (COND_EXPR, void_type_node,
start_sequence ();
}
- /* If this was optimized so that there is no exception region for the
- cleanup, then mark the TREE_LIST node, so that we can later tell
- if we need to call expand_eh_region_end. */
- if (! using_eh_for_cleanups_p
- || expand_eh_region_start_tree (decl, cleanup))
+ if (! using_eh_for_cleanups_p)
TREE_ADDRESSABLE (t) = 1;
- /* If that started a new EH region, we're in a new block. */
- thisblock = block_stack;
+ else
+ expand_eh_region_start ();
if (cond_context)
{
fix this is to just insert another instruction here, so that the
instructions inserted after the last unconditional cleanup are
never the last instruction. */
- emit_note (NULL_PTR, NOTE_INSN_DELETED);
+ emit_note (NULL, NOTE_INSN_DELETED);
thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
}
}
return 1;
}
-
-/* Like expand_decl_cleanup, but suppress generating an exception handler
- to perform the cleanup. */
-
-#if 0
-int
-expand_decl_cleanup_no_eh (decl, cleanup)
- tree decl, cleanup;
-{
- int save_eh = using_eh_for_cleanups_p;
- int result;
-
- using_eh_for_cleanups_p = 0;
- result = expand_decl_cleanup (decl, cleanup);
- using_eh_for_cleanups_p = save_eh;
-
- return result;
-}
-#endif
-
-/* Arrange for the top element of the dynamic cleanup chain to be
- popped if we exit the current binding contour. DECL is the
- associated declaration, if any, otherwise NULL_TREE. If the
- current contour is left via an exception, then __sjthrow will pop
- the top element off the dynamic cleanup chain. The code that
- avoids doing the action we push into the cleanup chain in the
- exceptional case is contained in expand_cleanups.
-
- This routine is only used by expand_eh_region_start, and that is
- the only way in which an exception region should be started. This
- routine is only used when using the setjmp/longjmp codegen method
- for exception handling. */
-
-int
-expand_dcc_cleanup (decl)
- tree decl;
-{
- struct nesting *thisblock;
- tree cleanup;
-
- /* Error if we are not in any block. */
- if (cfun == 0 || block_stack == 0)
- return 0;
- thisblock = block_stack;
-
- /* Record the cleanup for the dynamic handler chain. */
-
- cleanup = make_node (POPDCC_EXPR);
-
- /* Add the cleanup in a manner similar to expand_decl_cleanup. */
- thisblock->data.block.cleanups
- = tree_cons (decl, cleanup, thisblock->data.block.cleanups);
-
- /* If this block has a cleanup, it belongs in stack_block_stack. */
- stack_block_stack = thisblock;
- return 1;
-}
-
-/* Arrange for the top element of the dynamic handler chain to be
- popped if we exit the current binding contour. DECL is the
- associated declaration, if any, otherwise NULL_TREE. If the current
- contour is left via an exception, then __sjthrow will pop the top
- element off the dynamic handler chain. The code that avoids doing
- the action we push into the handler chain in the exceptional case
- is contained in expand_cleanups.
-
- This routine is only used by expand_eh_region_start, and that is
- the only way in which an exception region should be started. This
- routine is only used when using the setjmp/longjmp codegen method
- for exception handling. */
-
-int
-expand_dhc_cleanup (decl)
- tree decl;
-{
- struct nesting *thisblock;
- tree cleanup;
-
- /* Error if we are not in any block. */
- if (cfun == 0 || block_stack == 0)
- return 0;
- thisblock = block_stack;
-
- /* Record the cleanup for the dynamic handler chain. */
-
- cleanup = make_node (POPDHC_EXPR);
-
- /* Add the cleanup in a manner similar to expand_decl_cleanup. */
- thisblock->data.block.cleanups
- = tree_cons (decl, cleanup, thisblock->data.block.cleanups);
-
- /* If this block has a cleanup, it belongs in stack_block_stack. */
- stack_block_stack = thisblock;
- return 1;
-}
\f
/* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
DECL_ELTS is the list of elements that belong to DECL's type.
if (GET_CODE (x) == MEM)
{
if (mode == GET_MODE (x))
- DECL_RTL (decl_elt) = x;
+ SET_DECL_RTL (decl_elt, x);
else
- {
- DECL_RTL (decl_elt) = gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0)));
- MEM_COPY_ATTRIBUTES (DECL_RTL (decl_elt), x);
- }
+ SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
}
else if (GET_CODE (x) == REG)
{
if (mode == GET_MODE (x))
- DECL_RTL (decl_elt) = x;
+ SET_DECL_RTL (decl_elt, x);
else
- DECL_RTL (decl_elt) = gen_rtx_SUBREG (mode, x, 0);
+ SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
}
else
abort ();
expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
else
{
- if (! in_fixup)
- {
- tree cleanup = TREE_VALUE (tail);
-
- /* See expand_d{h,c}c_cleanup for why we avoid this. */
- if (TREE_CODE (cleanup) != POPDHC_EXPR
- && TREE_CODE (cleanup) != POPDCC_EXPR
- /* See expand_eh_region_start_tree for this case. */
- && ! TREE_ADDRESSABLE (tail))
- {
- cleanup = protect_with_terminate (cleanup);
- expand_eh_region_end (cleanup);
- }
- }
+ if (! in_fixup && using_eh_for_cleanups_p)
+ expand_eh_region_end_cleanup (TREE_VALUE (tail));
if (reachable)
{
times, the control paths are non-overlapping so the
cleanups will not be executed twice. */
- /* We may need to protect fixups with rethrow regions. */
- int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
+ /* We may need to protect from outer cleanups. */
+ if (in_fixup && using_eh_for_cleanups_p)
+ {
+ expand_eh_region_start ();
+
+ expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
- if (protect)
- expand_fixup_region_start ();
+ expand_eh_region_end_fixup (TREE_VALUE (tail));
+ }
+ else
+ expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
- /* The cleanup might contain try-blocks, so we have to
- preserve our current queue. */
- push_ehqueue ();
- expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
- pop_ehqueue ();
- if (protect)
- expand_fixup_region_end (TREE_VALUE (tail));
free_temp_slots ();
}
}
/* Make sure case_stmt.start points to something that won't
need any transformation before expand_end_case. */
if (GET_CODE (get_last_insn ()) != NOTE)
- emit_note (NULL_PTR, NOTE_INSN_DELETED);
+ emit_note (NULL, NOTE_INSN_DELETED);
thiscase->data.case_stmt.start = get_last_insn ();
}
}
- /* Add this label to the chain, and succeed.
- Copy LOW, HIGH so they are on temporary rather than momentary
- obstack and will thus survive till the end of the case statement. */
+ /* Add this label to the chain, and succeed. */
r = (struct case_node *) xmalloc (sizeof (struct case_node));
- r->low = copy_node (low);
+ r->low = low;
/* If the bounds are equal, turn this into the one-value case. */
if (tree_int_cst_equal (low, high))
r->high = r->low;
else
- r->high = copy_node (high);
+ r->high = high;
r->code_label = label;
expand_label (label);
{
register struct case_node *n;
register tree chain;
-#if 0 /* variable used by 'if 0'ed code below. */
- register struct case_node **l;
- int all_values = 1;
-#endif
/* True iff the selector type is a numbered set mode. */
int sparseness = 0;
/* We deliberately use calloc here, not cmalloc, so that we can suppress
this optimization if we don't have enough memory rather than
aborting, as xmalloc would do. */
- && (cases_seen = (unsigned char *) calloc (bytes_needed, 1)) != NULL)
+ && (cases_seen =
+ (unsigned char *) really_call_calloc (bytes_needed, 1)) != NULL)
{
HOST_WIDE_INT i;
tree v = TYPE_VALUES (type);
}
}
}
-
-#if 0
- /* ??? This optimization is disabled because it causes valid programs to
- fail. ANSI C does not guarantee that an expression with enum type
- will have a value that is the same as one of the enumeration literals. */
-
- /* If all values were found as case labels, make one of them the default
- label. Thus, this switch will never fall through. We arbitrarily pick
- the last one to make the default since this is likely the most
- efficient choice. */
-
- if (all_values)
- {
- for (l = &case_stack->data.case_stmt.case_list;
- (*l)->right != 0;
- l = &(*l)->right)
- ;
-
- case_stack->data.case_stmt.default_label = (*l)->code_label;
- *l = 0;
- }
-#endif /* 0 */
}
/* Free CN, and its children. */
}
\f
+
/* Terminate a case (Pascal) or switch (C) statement
in which ORIG_INDEX is the expression to be tested.
Generate the code to test it and jump to the right place. */
int ncases;
rtx *labelvec;
register int i;
- rtx before_case;
+ rtx before_case, end;
register struct nesting *thiscase = case_stack;
tree index_expr, index_type;
int unsignedp;
If the switch-index is a constant, do it this way
because we can optimize it. */
-#ifndef CASE_VALUES_THRESHOLD
-#ifdef HAVE_casesi
-#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
-#else
- /* If machine does not have a case insn that compares the
- bounds, this means extra overhead for dispatch tables
- which raises the threshold for using them. */
-#define CASE_VALUES_THRESHOLD 5
-#endif /* HAVE_casesi */
-#endif /* CASE_VALUES_THRESHOLD */
-
- else if (count < CASE_VALUES_THRESHOLD
+ else if (count < case_values_threshold ()
|| compare_tree_int (range, 10 * count) > 0
/* RANGE may be signed, and really large ranges will show up
as negative numbers. */
|| flag_pic
#endif
|| TREE_CODE (index_expr) == INTEGER_CST
- /* These will reduce to a constant. */
- || (TREE_CODE (index_expr) == CALL_EXPR
- && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
- && DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_NORMAL
- && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
|| (TREE_CODE (index_expr) == COMPOUND_EXPR
&& TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
{
generate the conversion. */
if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
- && (cmp_optab->handlers[(int) GET_MODE (index)].insn_code
- == CODE_FOR_nothing))
+ && ! have_insn_for (COMPARE, GET_MODE (index)))
{
enum machine_mode wider_mode;
for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
wider_mode = GET_MODE_WIDER_MODE (wider_mode))
- if (cmp_optab->handlers[(int) wider_mode].insn_code
- != CODE_FOR_nothing)
+ if (have_insn_for (COMPARE, wider_mode))
{
index = convert_to_mode (wider_mode, index, unsignedp);
break;
use_cost_table
= (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
&& estimate_case_costs (thiscase->data.case_stmt.case_list));
- balance_case_nodes (&thiscase->data.case_stmt.case_list,
- NULL_PTR);
+ balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
emit_case_nodes (index, thiscase->data.case_stmt.case_list,
default_label, index_type);
emit_jump_if_reachable (default_label);
}
else
{
- int win = 0;
-#ifdef HAVE_casesi
- if (HAVE_casesi)
- {
- enum machine_mode index_mode = SImode;
- int index_bits = GET_MODE_BITSIZE (index_mode);
- rtx op1, op2;
- enum machine_mode op_mode;
-
- /* Convert the index to SImode. */
- if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
- > GET_MODE_BITSIZE (index_mode))
- {
- enum machine_mode omode = TYPE_MODE (index_type);
- rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
-
- /* We must handle the endpoints in the original mode. */
- index_expr = build (MINUS_EXPR, index_type,
- index_expr, minval);
- minval = integer_zero_node;
- index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
- emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
- omode, 1, 0, default_label);
- /* Now we can safely truncate. */
- index = convert_to_mode (index_mode, index, 0);
- }
- else
- {
- if (TYPE_MODE (index_type) != index_mode)
- {
- index_expr = convert (type_for_size (index_bits, 0),
- index_expr);
- index_type = TREE_TYPE (index_expr);
- }
-
- index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
- }
- emit_queue ();
- index = protect_from_queue (index, 0);
- do_pending_stack_adjust ();
-
- op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
- if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
- (index, op_mode))
- index = copy_to_mode_reg (op_mode, index);
-
- op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
-
- op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
- if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
- (op1, op_mode))
- op1 = copy_to_mode_reg (op_mode, op1);
-
- op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
-
- op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
- if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
- (op2, op_mode))
- op2 = copy_to_mode_reg (op_mode, op2);
-
- emit_jump_insn (gen_casesi (index, op1, op2,
- table_label, default_label));
- win = 1;
- }
-#endif
-#ifdef HAVE_tablejump
- if (! win && HAVE_tablejump)
+ if (! try_casesi (index_type, index_expr, minval, range,
+ table_label, default_label))
{
index_type = thiscase->data.case_stmt.nominal_type;
- index_expr = fold (build (MINUS_EXPR, index_type,
- convert (index_type, index_expr),
- convert (index_type, minval)));
- index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
- emit_queue ();
- index = protect_from_queue (index, 0);
- do_pending_stack_adjust ();
-
- do_tablejump (index, TYPE_MODE (index_type),
- expand_expr (range, NULL_RTX, VOIDmode, 0),
- table_label, default_label);
- win = 1;
+ if (! try_tablejump (index_type, index_expr, minval, range,
+ table_label, default_label))
+ abort ();
}
-#endif
- if (! win)
- abort ();
-
+
/* Get table of labels to jump to, in order of case index. */
ncases = TREE_INT_CST_LOW (range) + 1;
#endif
}
- before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
- reorder_insns (before_case, get_last_insn (),
+ before_case = NEXT_INSN (before_case);
+ end = get_last_insn ();
+ squeeze_notes (&before_case, &end);
+ reorder_insns (before_case, end,
thiscase->data.case_stmt.start);
}
else
estimate_case_costs (node)
case_node_ptr node;
{
- tree min_ascii = build_int_2 (-1, -1);
+ tree min_ascii = integer_minus_one_node;
tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
case_node_ptr n;
int i;
/* If we haven't already made the cost table, make it now. Note that the
lower bound of the table is -1, not zero. */
- if (cost_table == NULL)
+ if (! cost_table_initialized)
{
- cost_table = cost_table_ + 1;
+ cost_table_initialized = 1;
for (i = 0; i < 128; i++)
{
if (ISALNUM (i))
- cost_table[i] = 16;
+ COST_TABLE (i) = 16;
else if (ISPUNCT (i))
- cost_table[i] = 8;
+ COST_TABLE (i) = 8;
else if (ISCNTRL (i))
- cost_table[i] = -1;
+ COST_TABLE (i) = -1;
}
- cost_table[' '] = 8;
- cost_table['\t'] = 4;
- cost_table['\0'] = 4;
- cost_table['\n'] = 2;
- cost_table['\f'] = 1;
- cost_table['\v'] = 1;
- cost_table['\b'] = 1;
+ COST_TABLE (' ') = 8;
+ COST_TABLE ('\t') = 4;
+ COST_TABLE ('\0') = 4;
+ COST_TABLE ('\n') = 2;
+ COST_TABLE ('\f') = 1;
+ COST_TABLE ('\v') = 1;
+ COST_TABLE ('\b') = 1;
}
/* See if all the case expressions look like text. It is text if the
for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
- if (cost_table[i] < 0)
+ if (COST_TABLE (i) < 0)
return 0;
}
{
ranges++;
if (use_cost_table)
- cost += cost_table[TREE_INT_CST_LOW (np->high)];
+ cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
}
if (use_cost_table)
- cost += cost_table[TREE_INT_CST_LOW (np->low)];
+ cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
i++;
np = np->right;
{
/* Skip nodes while their cost does not reach that amount. */
if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
- i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
- i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
+ i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
+ i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
if (i <= 0)
break;
npp = &(*npp)->right;
/* If INDEX has an unsigned type, we must make unsigned branches. */
int unsignedp = TREE_UNSIGNED (index_type);
enum machine_mode mode = GET_MODE (index);
+ enum machine_mode imode = TYPE_MODE (index_type);
/* See if our parents have already tested everything for us.
If they have, emit an unconditional jump for this node. */
/* Node is single valued. First see if the index expression matches
this node and then check our children, if any. */
- do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
+ do_jump_if_equal (index,
+ convert_modes (mode, imode,
+ expand_expr (node->low, NULL_RTX,
+ VOIDmode, 0),
+ unsignedp),
label_rtx (node->code_label), unsignedp);
if (node->right != 0 && node->left != 0)
if (node_is_bounded (node->right, index_type))
{
emit_cmp_and_jump_insns (index,
- expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
+ convert_modes
+ (mode, imode,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
+ unsignedp),
GT, NULL_RTX, mode, unsignedp, 0,
label_rtx (node->right->code_label));
emit_case_nodes (index, node->left, default_label, index_type);
else if (node_is_bounded (node->left, index_type))
{
emit_cmp_and_jump_insns (index,
- expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
+ convert_modes
+ (mode, imode,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
+ unsignedp),
LT, NULL_RTX, mode, unsignedp, 0,
label_rtx (node->left->code_label));
emit_case_nodes (index, node->right, default_label, index_type);
/* See if the value is on the right. */
emit_cmp_and_jump_insns (index,
- expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
+ convert_modes
+ (mode, imode,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
+ unsignedp),
GT, NULL_RTX, mode, unsignedp, 0,
label_rtx (test_label));
if (!node_has_low_bound (node, index_type))
{
emit_cmp_and_jump_insns (index,
- expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
+ convert_modes
+ (mode, imode,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
+ unsignedp),
LT, NULL_RTX, mode, unsignedp, 0,
default_label);
}
since we haven't ruled out the numbers less than
this node's value. So handle node->right explicitly. */
do_jump_if_equal (index,
- expand_expr (node->right->low, NULL_RTX,
- VOIDmode, 0),
+ convert_modes
+ (mode, imode,
+ expand_expr (node->right->low, NULL_RTX,
+ VOIDmode, 0),
+ unsignedp),
label_rtx (node->right->code_label), unsignedp);
}
else if (node->right == 0 && node->left != 0)
{
/* Just one subtree, on the left. */
-
-#if 0 /* The following code and comment were formerly part
- of the condition here, but they didn't work
- and I don't understand what the idea was. -- rms. */
- /* If our "most probable entry" is less probable
- than the default label, emit a jump to
- the default label using condition codes
- already lying around. With no right branch,
- a branch-greater-than will get us to the default
- label correctly. */
- if (use_cost_table
- && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
- ;
-#endif /* 0 */
if (node->left->left || node->left->right
|| !tree_int_cst_equal (node->left->low, node->left->high))
{
if (!node_has_high_bound (node, index_type))
{
- emit_cmp_and_jump_insns (index, expand_expr (node->high,
- NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ convert_modes
+ (mode, imode,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
+ unsignedp),
GT, NULL_RTX, mode, unsignedp, 0,
default_label);
}
since we haven't ruled out the numbers less than
this node's value. So handle node->left explicitly. */
do_jump_if_equal (index,
- expand_expr (node->left->low, NULL_RTX,
- VOIDmode, 0),
+ convert_modes
+ (mode, imode,
+ expand_expr (node->left->low, NULL_RTX,
+ VOIDmode, 0),
+ unsignedp),
label_rtx (node->left->code_label), unsignedp);
}
}
if (node_is_bounded (node->right, index_type))
/* Right hand node is fully bounded so we can eliminate any
testing and branch directly to the target code. */
- emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ convert_modes
+ (mode, imode,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
+ unsignedp),
GT, NULL_RTX, mode, unsignedp, 0,
label_rtx (node->right->code_label));
else
test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
emit_cmp_and_jump_insns (index,
- expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
+ convert_modes
+ (mode, imode,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
+ unsignedp),
GT, NULL_RTX, mode, unsignedp, 0,
label_rtx (test_label));
}
/* Value belongs to this node or to the left-hand subtree. */
- emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ convert_modes
+ (mode, imode,
+ expand_expr (node->low, NULL_RTX,
+ VOIDmode, 0),
+ unsignedp),
GE, NULL_RTX, mode, unsignedp, 0,
label_rtx (node->code_label));
if (!node_has_low_bound (node, index_type))
{
emit_cmp_and_jump_insns (index,
- expand_expr (node->low, NULL_RTX,
- VOIDmode, 0),
+ convert_modes
+ (mode, imode,
+ expand_expr (node->low, NULL_RTX,
+ VOIDmode, 0),
+ unsignedp),
LT, NULL_RTX, mode, unsignedp, 0,
default_label);
}
/* Value belongs to this node or to the right-hand subtree. */
- emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ convert_modes
+ (mode, imode,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
+ unsignedp),
LE, NULL_RTX, mode, unsignedp, 0,
label_rtx (node->code_label));
if (!node_has_high_bound (node, index_type))
{
emit_cmp_and_jump_insns (index,
- expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
+ convert_modes
+ (mode, imode,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
+ unsignedp),
GT, NULL_RTX, mode, unsignedp, 0,
default_label);
}
/* Value belongs to this node or to the left-hand subtree. */
emit_cmp_and_jump_insns (index,
- expand_expr (node->low, NULL_RTX,
- VOIDmode, 0),
+ convert_modes
+ (mode, imode,
+ expand_expr (node->low, NULL_RTX,
+ VOIDmode, 0),
+ unsignedp),
GE, NULL_RTX, mode, unsignedp, 0,
label_rtx (node->code_label));
/* Node has no children so we check low and high bounds to remove
redundant tests. Only one of the bounds can exist,
since otherwise this node is bounded--a case tested already. */
+ int high_bound = node_has_high_bound (node, index_type);
+ int low_bound = node_has_low_bound (node, index_type);
- if (!node_has_high_bound (node, index_type))
+ if (!high_bound && low_bound)
{
emit_cmp_and_jump_insns (index,
- expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
+ convert_modes
+ (mode, imode,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
+ unsignedp),
GT, NULL_RTX, mode, unsignedp, 0,
default_label);
}
- if (!node_has_low_bound (node, index_type))
+ else if (!low_bound && high_bound)
{
emit_cmp_and_jump_insns (index,
- expand_expr (node->low, NULL_RTX,
- VOIDmode, 0),
+ convert_modes
+ (mode, imode,
+ expand_expr (node->low, NULL_RTX,
+ VOIDmode, 0),
+ unsignedp),
LT, NULL_RTX, mode, unsignedp, 0,
default_label);
}
+ else if (!low_bound && !high_bound)
+ {
+ /* Widen LOW and HIGH to the same width as INDEX. */
+ tree type = type_for_mode (mode, unsignedp);
+ tree low = build1 (CONVERT_EXPR, type, node->low);
+ tree high = build1 (CONVERT_EXPR, type, node->high);
+ rtx low_rtx, new_index, new_bound;
+
+ /* Instead of doing two branches, emit one unsigned branch for
+ (index-low) > (high-low). */
+ low_rtx = expand_expr (low, NULL_RTX, mode, 0);
+ new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
+ NULL_RTX, unsignedp,
+ OPTAB_WIDEN);
+ new_bound = expand_expr (fold (build (MINUS_EXPR, type,
+ high, low)),
+ NULL_RTX, mode, 0);
+
+ emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
+ mode, 1, 0, default_label);
+ }
emit_jump (label_rtx (node->code_label));
}