/* Expands front end tree to back end RTL for GCC
Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
- 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
+ 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
+ Free Software Foundation, Inc.
This file is part of GCC.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 59 Temple Place - Suite 330, Boston, MA
-02111-1307, USA. */
+Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
+02110-1301, USA. */
/* This file handles the generation of rtl code from tree structure
above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
- It also creates the rtl expressions for parameters and auto variables
- and has full responsibility for allocating stack slots.
-
The functions whose names start with `expand_' are called by the
- parser to generate RTL instructions for various kinds of constructs.
-
- Some control and binding constructs require calling several such
- functions at different times. For example, a simple if-then
- is expanded by calling `expand_start_cond' (with the condition-expression
- as argument) before parsing the then-clause and calling `expand_end_cond'
- after parsing the then-clause. */
+ expander to generate RTL instructions for various kinds of constructs. */
#include "config.h"
#include "system.h"
#include "tm.h"
#include "rtl.h"
+#include "hard-reg-set.h"
#include "tree.h"
#include "tm_p.h"
#include "flags.h"
#include "insn-config.h"
#include "expr.h"
#include "libfuncs.h"
-#include "hard-reg-set.h"
-#include "loop.h"
#include "recog.h"
#include "machmode.h"
#include "toplev.h"
is unsigned. */
#define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
\f
-/* Stack of control and binding constructs we are currently inside.
-
- These constructs begin when you call `expand_start_WHATEVER'
- and end when you call `expand_end_WHATEVER'. This stack records
- info about how the construct began that tells the end-function
- what to do. It also may provide information about the construct
- to alter the behavior of other constructs within the body.
- For example, they may affect the behavior of C `break' and `continue'.
-
- Each construct gets one `struct nesting' object.
- All of these objects are chained through the `all' field.
- `nesting_stack' points to the first object (innermost construct).
- The position of an entry on `nesting_stack' is in its `depth' field.
-
- Each type of construct has its own individual stack.
- For example, loops have `cond_stack'. Each object points to the
- next object of the same type through the `next' field.
-
- Some constructs are visible to `break' exit-statements and others
- are not. Which constructs are visible depends on the language.
- Therefore, the data structure allows each construct to be visible
- or not, according to the args given when the construct is started.
- The construct is visible if the `exit_label' field is non-null.
- In that case, the value should be a CODE_LABEL rtx. */
-
-struct nesting GTY(())
-{
- struct nesting *all;
- struct nesting *next;
- int depth;
- rtx exit_label;
- enum nesting_desc {
- COND_NESTING,
- BLOCK_NESTING,
- CASE_NESTING
- } desc;
- union nesting_u
- {
- /* For conds (if-then and if-then-else statements). */
- struct nesting_cond
- {
- /* Label for the end of the if construct.
- There is none if EXITFLAG was not set
- and no `else' has been seen yet. */
- rtx endif_label;
- /* Label for the end of this alternative.
- This may be the end of the if or the next else/elseif. */
- rtx next_label;
- } GTY ((tag ("COND_NESTING"))) cond;
- /* For variable binding contours. */
- struct nesting_block
- {
- /* Sequence number of this binding contour within the function,
- in order of entry. */
- int block_start_count;
- /* The NOTE that starts this contour.
- Used by expand_goto to check whether the destination
- is within each contour or not. */
- rtx first_insn;
- /* The saved target_temp_slot_level from our outer block.
- We may reset target_temp_slot_level to be the level of
- this block, if that is done, target_temp_slot_level
- reverts to the saved target_temp_slot_level at the very
- end of the block. */
- int block_target_temp_slot_level;
- } GTY ((tag ("BLOCK_NESTING"))) block;
- /* For switch (C) or case (Pascal) statements. */
- struct nesting_case
- {
- /* The insn after which the case dispatch should finally
- be emitted. Zero for a dummy. */
- rtx start;
- /* A list of case labels; it is first built as an AVL tree.
- During expand_end_case, this is converted to a list, and may be
- rearranged into a nearly balanced binary tree. */
- struct case_node *case_list;
- /* Label to jump to if no case matches. */
- tree default_label;
- /* The expression to be dispatched on. */
- tree index_expr;
- } GTY ((tag ("CASE_NESTING"))) case_stmt;
- } GTY ((desc ("%1.desc"))) data;
-};
-
-/* Allocate and return a new `struct nesting'. */
-
-#define ALLOC_NESTING() ggc_alloc (sizeof (struct nesting))
-
-/* Pop the nesting stack element by element until we pop off
- the element which is at the top of STACK.
- Update all the other stacks, popping off elements from them
- as we pop them from nesting_stack. */
-
-#define POPSTACK(STACK) \
-do { struct nesting *target = STACK; \
- struct nesting *this; \
- do { this = nesting_stack; \
- if (cond_stack == this) \
- cond_stack = cond_stack->next; \
- if (block_stack == this) \
- block_stack = block_stack->next; \
- if (case_stack == this) \
- case_stack = case_stack->next; \
- nesting_depth = nesting_stack->depth - 1; \
- nesting_stack = this->all; } \
- while (this != target); } while (0)
-\f
-
-struct stmt_status GTY(())
-{
- /* Chain of all pending binding contours. */
- struct nesting * x_block_stack;
-
- /* If any new stacks are added here, add them to POPSTACKS too. */
-
- /* Chain of all pending conditional statements. */
- struct nesting * x_cond_stack;
-
- /* Chain of all pending case or switch statements. */
- struct nesting * x_case_stack;
-
- /* Separate chain including all of the above,
- chained through the `all' field. */
- struct nesting * x_nesting_stack;
-
- /* Number of entries on nesting_stack now. */
- int x_nesting_depth;
-
- /* Number of binding contours started so far in this function. */
- int x_block_start_count;
-
- /* Location of last line-number note, whether we actually
- emitted it or not. */
- location_t x_emit_locus;
-};
-
-#define block_stack (cfun->stmt->x_block_stack)
-#define cond_stack (cfun->stmt->x_cond_stack)
-#define case_stack (cfun->stmt->x_case_stack)
-#define nesting_stack (cfun->stmt->x_nesting_stack)
-#define nesting_depth (cfun->stmt->x_nesting_depth)
-#define current_block_start_count (cfun->stmt->x_block_start_count)
-#define emit_locus (cfun->stmt->x_emit_locus)
-
static int n_occurrences (int, const char *);
static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
static void expand_nl_goto_receiver (void);
static bool check_unique_operand_names (tree, tree);
static char *resolve_operand_name_1 (char *, tree, tree);
static void expand_null_return_1 (void);
-static enum br_predictor return_prediction (rtx);
-static rtx shift_return_value (rtx);
static void expand_value_return (rtx);
static void do_jump_if_equal (rtx, rtx, rtx, int);
static int estimate_case_costs (case_node_ptr);
-static bool same_case_target_p (rtx, rtx);
static bool lshift_cheap_p (void);
static int case_bit_test_cmp (const void *, const void *);
static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
static int node_has_high_bound (case_node_ptr, tree);
static int node_is_bounded (case_node_ptr, tree);
static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
-\f
-void
-init_stmt_for_function (void)
-{
- cfun->stmt = ggc_alloc_cleared (sizeof (struct stmt_status));
-}
-\f
-/* Record the current file and line. Called from emit_line_note. */
-
-void
-set_file_and_line_for_stmt (location_t location)
-{
- /* If we're outputting an inline function, and we add a line note,
- there may be no CFUN->STMT information. So, there's no need to
- update it. */
- if (cfun->stmt)
- emit_locus = location;
-}
+static struct case_node *add_case_node (struct case_node *, tree,
+ tree, tree, tree);
-/* Emit a no-op instruction. */
-
-void
-emit_nop (void)
-{
- rtx last_insn;
-
- last_insn = get_last_insn ();
- if (!optimize
- && (LABEL_P (last_insn)
- || (NOTE_P (last_insn)
- && prev_real_insn (last_insn) == 0)))
- emit_insn (gen_nop ());
-}
\f
/* Return the rtx-label that corresponds to a LABEL_DECL,
creating it if necessary. */
rtx
label_rtx (tree label)
{
- if (TREE_CODE (label) != LABEL_DECL)
- abort ();
+ gcc_assert (TREE_CODE (label) == LABEL_DECL);
if (!DECL_RTL_SET_P (label))
{
tree function = decl_function_context (label);
struct function *p;
- if (!function)
- abort ();
+ gcc_assert (function);
if (function != current_function_decl)
p = find_function_data (function);
if (FORCED_LABEL (label))
forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
-
+
if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
maybe_set_first_label_num (label_r);
}
/* Check for a nonlocal goto to a containing function. Should have
gotten translated to __builtin_nonlocal_goto. */
tree context = decl_function_context (label);
- if (context != 0 && context != current_function_decl)
- abort ();
+ gcc_assert (!context || context == current_function_decl);
#endif
emit_jump (label_rtx (label));
or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
insn is volatile; don't optimize it. */
-void
+static void
expand_asm (tree string, int vol)
{
rtx body;
if (TREE_CODE (string) == ADDR_EXPR)
string = TREE_OPERAND (string, 0);
- body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
+ body = gen_rtx_ASM_INPUT (VOIDmode,
+ ggc_strdup (TREE_STRING_POINTER (string)));
MEM_VOLATILE_P (body) = vol;
message. */
if (!p)
{
- error ("output operand constraint lacks `='");
+ error ("output operand constraint lacks %<=%>");
return false;
}
*is_inout = (*p == '+');
/* Canonicalize the output constraint so that it begins with `='. */
- if (p != constraint || is_inout)
+ if (p != constraint || *is_inout)
{
char *buf;
size_t c_len = strlen (constraint);
if (p != constraint)
- warning ("output constraint `%c' for operand %d is not at the beginning",
+ warning (0, "output constraint %qc for operand %d "
+ "is not at the beginning",
*p, operand_num);
/* Make a copy of the constraint. */
{
case '+':
case '=':
- error ("operand constraint contains incorrectly positioned '+' or '='");
+ error ("operand constraint contains incorrectly positioned "
+ "%<+%> or %<=%>");
return false;
case '%':
if (operand_num + 1 == ninputs + noutputs)
{
- error ("`%%' constraint used with last operand");
+ error ("%<%%%> constraint used with last operand");
return false;
}
break;
case '+': case '=': case '&':
if (constraint == orig_constraint)
{
- error ("input operand constraint contains `%c'", constraint[j]);
+ error ("input operand constraint contains %qc", constraint[j]);
return false;
}
break;
if (constraint == orig_constraint
&& input_num + 1 == ninputs - ninout)
{
- error ("`%%' constraint used with last operand");
+ error ("%<%%%> constraint used with last operand");
return false;
}
break;
default:
if (! ISALPHA (constraint[j]))
{
- error ("invalid punctuation `%c' in constraint", constraint[j]);
+ error ("invalid punctuation %qc in constraint", constraint[j]);
return false;
}
if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
}
if (saw_match && !*allows_reg)
- warning ("matching constraint does not allow a register");
+ warning (0, "matching constraint does not allow a register");
return true;
}
-/* INPUT is one of the input operands from EXPR, an ASM_EXPR. Returns true
- if it is an operand which must be passed in memory (i.e. an "m"
- constraint), false otherwise. */
+/* Return true iff there's an overlap between REGS and DECL, where DECL
+ can be an asm-declared register. */
bool
-asm_op_is_mem_input (tree input, tree expr)
+decl_overlaps_hard_reg_set_p (tree decl, const HARD_REG_SET regs)
{
- const char *constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (input)));
- tree outputs = ASM_OUTPUTS (expr);
- int noutputs = list_length (outputs);
- const char **constraints
- = (const char **) alloca ((noutputs) * sizeof (const char *));
- int i = 0;
- bool allows_mem, allows_reg;
- tree t;
+ if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
+ && DECL_REGISTER (decl)
+ && REG_P (DECL_RTL (decl))
+ && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
+ {
+ rtx reg = DECL_RTL (decl);
+ unsigned int regno;
- /* Collect output constraints. */
- for (t = outputs; t ; t = TREE_CHAIN (t), i++)
- constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
+ for (regno = REGNO (reg);
+ regno < (REGNO (reg)
+ + hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]);
+ regno++)
+ if (TEST_HARD_REG_BIT (regs, regno))
+ return true;
+ }
- /* We pass 0 for input_num, ninputs and ninout; they are only used for
- error checking which will be done at expand time. */
- parse_input_constraint (&constraint, 0, 0, noutputs, 0, constraints,
- &allows_mem, &allows_reg);
- return (!allows_reg && allows_mem);
+ return false;
}
+
/* Check for overlap between registers marked in CLOBBERED_REGS and
anything inappropriate in DECL. Emit error and return TRUE for error,
FALSE for ok. */
{
/* Conflicts between asm-declared register variables and the clobber
list are not allowed. */
- if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
- && DECL_REGISTER (decl)
- && REG_P (DECL_RTL (decl))
- && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
+ if (decl_overlaps_hard_reg_set_p (decl, clobbered_regs))
{
- rtx reg = DECL_RTL (decl);
- unsigned int regno;
+ error ("asm-specifier for variable %qs conflicts with asm clobber list",
+ IDENTIFIER_POINTER (DECL_NAME (decl)));
- for (regno = REGNO (reg);
- regno < (REGNO (reg)
- + hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]);
- regno++)
- if (TEST_HARD_REG_BIT (clobbered_regs, regno))
- {
- error ("asm-specifier for variable `%s' conflicts with asm clobber list",
- IDENTIFIER_POINTER (DECL_NAME (decl)));
-
- /* Reset registerness to stop multiple errors emitted for a
- single variable. */
- DECL_REGISTER (decl) = 0;
- return true;
- }
+ /* Reset registerness to stop multiple errors emitted for a single
+ variable. */
+ DECL_REGISTER (decl) = 0;
+ return true;
}
+
return false;
}
VOL nonzero means the insn is volatile; don't optimize it. */
-void
+static void
expand_asm_operands (tree string, tree outputs, tree inputs,
tree clobbers, int vol, location_t locus)
{
Case in point is when the i386 backend moved from cc0 to a hard reg --
maintaining source-level compatibility means automatically clobbering
the flags register. */
- clobbers = targetm.md_asm_clobbers (clobbers);
+ clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
/* Count the number of meaningful clobbered registers, ignoring what
we would ignore later. */
if (i >= 0 || i == -4)
++nclobbers;
else if (i == -2)
- error ("unknown register name `%s' in `asm'", regname);
+ error ("unknown register name %qs in %<asm%>", regname);
/* Mark clobbered registers. */
if (i >= 0)
{
- /* Clobbering the PIC register is an error */
+ /* Clobbering the PIC register is an error. */
if (i == (int) PIC_OFFSET_TABLE_REGNUM)
{
- error ("PIC register `%s' clobbered in `asm'", regname);
+ error ("PIC register %qs clobbered in %<asm%>", regname);
return;
}
ninputs += ninout;
if (ninputs + noutputs > MAX_RECOG_OPERANDS)
{
- error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
+ error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
return;
}
bool allows_reg;
bool allows_mem;
rtx op;
+ bool ok;
- if (!parse_output_constraint (&constraints[i], i, ninputs,
+ ok = parse_output_constraint (&constraints[i], i, ninputs,
noutputs, &allows_mem, &allows_reg,
- &is_inout))
- abort ();
+ &is_inout);
+ gcc_assert (ok);
/* If an output operand is not a decl or indirect ref and our constraint
allows a register, make a temporary to act as an intermediate.
body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
: GET_MODE (output_rtx[0])),
- TREE_STRING_POINTER (string),
+ ggc_strdup (TREE_STRING_POINTER (string)),
empty_string, 0, argvec, constraintvec,
locus);
const char *constraint;
tree val, type;
rtx op;
+ bool ok;
constraint = constraints[i + noutputs];
- if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
- constraints, &allows_mem, &allows_reg))
- abort ();
+ ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
+ constraints, &allows_mem, &allows_reg);
+ gcc_assert (ok);
generating_concat_p = 0;
if (asm_operand_ok (op, constraint) <= 0)
{
- if (allows_reg)
+ if (allows_reg && TYPE_MODE (type) != BLKmode)
op = force_reg (TYPE_MODE (type), op);
else if (!allows_mem)
- warning ("asm operand %d probably doesn't match constraints",
+ warning (0, "asm operand %d probably doesn%'t match constraints",
i + noutputs);
else if (MEM_P (op))
{
}
else
{
- warning ("use of memory input without lvalue in "
+ warning (0, "use of memory input without lvalue in "
"asm operand %d is deprecated", i + noutputs);
if (CONSTANT_P (op))
ASM_OPERANDS_INPUT (body, i) = op;
ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
- = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
+ = gen_rtx_ASM_INPUT (TYPE_MODE (type),
+ ggc_strdup (constraints[i + noutputs]));
if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
clobber_conflict_found = 1;
if (noutputs == 1 && nclobbers == 0)
{
- ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
+ ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
}
output_rtx[i],
gen_rtx_ASM_OPERANDS
(GET_MODE (output_rtx[i]),
- TREE_STRING_POINTER (string),
- constraints[i], i, argvec, constraintvec,
- locus));
+ ggc_strdup (TREE_STRING_POINTER (string)),
+ ggc_strdup (constraints[i]),
+ i, argvec, constraintvec, locus));
MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
}
{
if (o[i] != TREE_VALUE (tail))
{
- expand_assignment (o[i], TREE_VALUE (tail), 0);
+ expand_assignment (o[i], TREE_VALUE (tail));
free_temp_slots ();
/* Restore the original value so that it's correct the next
if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
{
- error ("too many alternatives in `asm'");
+ error ("too many alternatives in %<asm%>");
return false;
}
if (n_occurrences (',', constraint) != nalternatives)
{
- error ("operand constraints for `asm' differ in number of alternatives");
+ error ("operand constraints for %<asm%> differ "
+ "in number of alternatives");
return false;
}
return true;
failure:
- error ("duplicate asm operand name '%s'",
+ error ("duplicate asm operand name %qs",
TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
return false;
}
than 999 operands. */
buffer = xstrdup (TREE_STRING_POINTER (string));
p = buffer + (c - TREE_STRING_POINTER (string));
-
+
while ((p = strchr (p, '%')) != NULL)
{
if (p[1] == '[')
}
*q = '\0';
- error ("undefined named operand '%s'", p + 1);
+ error ("undefined named operand %qs", p + 1);
op = 0;
found:
p = strchr (p, '\0');
/* Verify the no extra buffer space assumption. */
- if (p > q)
- abort ();
+ gcc_assert (p <= q);
/* Shift the rest of the buffer down to fill the gap. */
memmove (p, q + 1, strlen (q + 1) + 1);
}
/* Warn if EXP contains any computations whose results are not used.
- Return 1 if a warning is printed; 0 otherwise. LOCUS is the
+ Return 1 if a warning is printed; 0 otherwise. LOCUS is the
(potential) location of the expression. */
int
default:
/* Referencing a volatile value is a side effect, so don't warn. */
- if ((DECL_P (exp)
- || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
+ if ((DECL_P (exp) || REFERENCE_CLASS_P (exp))
&& TREE_THIS_VOLATILE (exp))
return 0;
/* If this is an expression which has no operands, there is no value
to be unused. There are no such language-independent codes,
but front ends may define such. */
- if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
- && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
+ if (EXPRESSION_CLASS_P (exp) && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
return 0;
maybe_warn:
if (TREE_SIDE_EFFECTS (exp))
return 0;
- warning ("%Hvalue computed is not used", &locus);
+ warning (0, "%Hvalue computed is not used", &locus);
return 1;
}
}
-\f
-/* Generate RTL for the start of an if-then. COND is the expression
- whose truth should be tested.
-
- If EXITFLAG is nonzero, this conditional is visible to
- `exit_something'. */
-
-void
-expand_start_cond (tree cond, int exitflag)
-{
- struct nesting *thiscond = ALLOC_NESTING ();
-
- /* Make an entry on cond_stack for the cond we are entering. */
-
- thiscond->desc = COND_NESTING;
- thiscond->next = cond_stack;
- thiscond->all = nesting_stack;
- thiscond->depth = ++nesting_depth;
- thiscond->data.cond.next_label = gen_label_rtx ();
- /* Before we encounter an `else', we don't need a separate exit label
- unless there are supposed to be exit statements
- to exit this conditional. */
- thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
- thiscond->data.cond.endif_label = thiscond->exit_label;
- cond_stack = thiscond;
- nesting_stack = thiscond;
-
- do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
-}
-
-/* Generate RTL between then-clause and the elseif-clause
- of an if-then-elseif-.... */
-
-void
-expand_start_elseif (tree cond)
-{
- if (cond_stack->data.cond.endif_label == 0)
- cond_stack->data.cond.endif_label = gen_label_rtx ();
- emit_jump (cond_stack->data.cond.endif_label);
- emit_label (cond_stack->data.cond.next_label);
- cond_stack->data.cond.next_label = gen_label_rtx ();
- do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
-}
-
-/* Generate RTL between the then-clause and the else-clause
- of an if-then-else. */
-
-void
-expand_start_else (void)
-{
- if (cond_stack->data.cond.endif_label == 0)
- cond_stack->data.cond.endif_label = gen_label_rtx ();
-
- emit_jump (cond_stack->data.cond.endif_label);
- emit_label (cond_stack->data.cond.next_label);
- cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
-}
-
-/* After calling expand_start_else, turn this "else" into an "else if"
- by providing another condition. */
-
-void
-expand_elseif (tree cond)
-{
- cond_stack->data.cond.next_label = gen_label_rtx ();
- do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
-}
-
-/* Generate RTL for the end of an if-then.
- Pop the record for it off of cond_stack. */
-
-void
-expand_end_cond (void)
-{
- struct nesting *thiscond = cond_stack;
-
- do_pending_stack_adjust ();
- if (thiscond->data.cond.next_label)
- emit_label (thiscond->data.cond.next_label);
- if (thiscond->data.cond.endif_label)
- emit_label (thiscond->data.cond.endif_label);
-
- POPSTACK (cond_stack);
-}
-\f
-/* Return nonzero if we should preserve sub-expressions as separate
- pseudos. We never do so if we aren't optimizing. We always do so
- if -fexpensive-optimizations. */
-
-int
-preserve_subexpressions_p (void)
-{
- if (flag_expensive_optimizations)
- return 1;
-
- if (optimize == 0 || cfun == 0 || cfun->stmt == 0)
- return 0;
-
- return 1;
-}
\f
/* Generate RTL to return from the current function, with no value.
emit_jump (end_label);
}
-/* Try to guess whether the value of return means error code. */
-static enum br_predictor
-return_prediction (rtx val)
-{
- /* Different heuristics for pointers and scalars. */
- if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
- {
- /* NULL is usually not returned. */
- if (val == const0_rtx)
- return PRED_NULL_RETURN;
- }
- else
- {
- /* Negative return values are often used to indicate
- errors. */
- if (GET_CODE (val) == CONST_INT
- && INTVAL (val) < 0)
- return PRED_NEGATIVE_RETURN;
- /* Constant return values are also usually erors,
- zero/one often mean booleans so exclude them from the
- heuristics. */
- if (CONSTANT_P (val)
- && (val != const0_rtx && val != const1_rtx))
- return PRED_CONST_RETURN;
- }
- return PRED_NO_PREDICTION;
-}
-
-
-/* If the current function returns values in the most significant part
- of a register, shift return value VAL appropriately. The mode of
- the function's return type is known not to be BLKmode. */
-
-static rtx
-shift_return_value (rtx val)
-{
- tree type;
-
- type = TREE_TYPE (DECL_RESULT (current_function_decl));
- if (targetm.calls.return_in_msb (type))
- {
- rtx target;
- HOST_WIDE_INT shift;
-
- target = DECL_RTL (DECL_RESULT (current_function_decl));
- shift = (GET_MODE_BITSIZE (GET_MODE (target))
- - BITS_PER_UNIT * int_size_in_bytes (type));
- if (shift > 0)
- val = expand_shift (LSHIFT_EXPR, GET_MODE (target),
- gen_lowpart (GET_MODE (target), val),
- build_int_2 (shift, 0), target, 1);
- }
- return val;
-}
-
-
/* Generate RTL to return from the current function, with value VAL. */
static void
expand_value_return (rtx val)
{
- rtx return_reg;
- enum br_predictor pred;
-
- if (flag_guess_branch_prob
- && (pred = return_prediction (val)) != PRED_NO_PREDICTION)
- {
- /* Emit information for branch prediction. */
- rtx note;
-
- note = emit_note (NOTE_INSN_PREDICTION);
-
- NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
-
- }
-
- return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
-
/* Copy the value to the return location
unless it's already there. */
+ rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
if (return_reg != val)
{
tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
static void
expand_null_return_1 (void)
{
- rtx end_label;
-
clear_pending_stack_adjust ();
do_pending_stack_adjust ();
-
- end_label = return_label;
- if (end_label == 0)
- end_label = return_label = gen_label_rtx ();
- emit_jump (end_label);
+ emit_jump (return_label);
}
\f
/* Generate RTL to evaluate the expression RETVAL and return it
expand_null_return ();
return;
}
- else if (TREE_CODE (retval) == RESULT_DECL)
- retval_rhs = retval;
else if ((TREE_CODE (retval) == MODIFY_EXPR
|| TREE_CODE (retval) == INIT_EXPR)
&& TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
+ /* If we are returning the RESULT_DECL, then the value has already
+ been stored into it, so we don't have to do anything special. */
+ if (TREE_CODE (retval_rhs) == RESULT_DECL)
+ expand_value_return (result_rtl);
+
/* If the result is an aggregate that is being returned in one (or more)
registers, load the registers here. The compiler currently can't handle
copying a BLKmode value into registers. We could put this code in a
call/return), but until this feature is generally usable it is kept here
(and in expand_call). */
- if (retval_rhs != 0
- && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
- && REG_P (result_rtl))
+ else if (retval_rhs != 0
+ && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
+ && REG_P (result_rtl))
{
int i;
unsigned HOST_WIDE_INT bitpos, xbitpos;
if (GET_MODE_SIZE (tmpmode) >= bytes)
break;
- /* No suitable mode found. */
- if (tmpmode == VOIDmode)
- abort ();
+ /* A suitable mode should have been found. */
+ gcc_assert (tmpmode != VOIDmode);
PUT_MODE (result_rtl, tmpmode);
}
val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
val = force_not_mem (val);
/* Return the calculated value. */
- expand_value_return (shift_return_value (val));
+ expand_value_return (val);
}
else
{
}
}
\f
-/* Generate the RTL code for entering a binding contour.
- The variables are declared one by one, by calls to `expand_decl'.
-
- FLAGS is a bitwise or of the following flags:
-
- 1 - Nonzero if this construct should be visible to
- `exit_something'.
-
- 2 - Nonzero if this contour does not require a
- NOTE_INSN_BLOCK_BEG note. Virtually all calls from
- language-independent code should set this flag because they
- will not create corresponding BLOCK nodes. (There should be
- a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
- and BLOCKs.) If this flag is set, MARK_ENDS should be zero
- when expand_end_bindings is called.
-
- If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
- optionally be supplied. If so, it becomes the NOTE_BLOCK for the
- note. */
-
-void
-expand_start_bindings_and_block (int flags, tree block)
-{
- struct nesting *thisblock = ALLOC_NESTING ();
- rtx note;
- int exit_flag = ((flags & 1) != 0);
- int block_flag = ((flags & 2) == 0);
-
- /* If a BLOCK is supplied, then the caller should be requesting a
- NOTE_INSN_BLOCK_BEG note. */
- if (!block_flag && block)
- abort ();
-
- /* Create a note to mark the beginning of the block. */
- note = emit_note (NOTE_INSN_DELETED);
-
- /* Make an entry on block_stack for the block we are entering. */
-
- thisblock->desc = BLOCK_NESTING;
- thisblock->next = block_stack;
- thisblock->all = nesting_stack;
- thisblock->depth = ++nesting_depth;
- thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
-
- /* When we insert instructions after the last unconditional cleanup,
- we don't adjust last_insn. That means that a later add_insn will
- clobber the instructions we've just added. The easiest way to
- fix this is to just insert another instruction here, so that the
- instructions inserted after the last unconditional cleanup are
- never the last instruction. */
- emit_note (NOTE_INSN_DELETED);
-
- thisblock->data.block.first_insn = note;
- thisblock->data.block.block_start_count = ++current_block_start_count;
- thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
- block_stack = thisblock;
- nesting_stack = thisblock;
-
- /* Make a new level for allocating stack slots. */
- push_temp_slots ();
-}
-
-/* Specify the scope of temporaries created by TARGET_EXPRs. Similar
- to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
- expand_expr are made. After we end the region, we know that all
- space for all temporaries that were created by TARGET_EXPRs will be
- destroyed and their space freed for reuse. */
-
-void
-expand_start_target_temps (void)
-{
- /* This is so that even if the result is preserved, the space
- allocated will be freed, as we know that it is no longer in use. */
- push_temp_slots ();
-
- /* Start a new binding layer that will keep track of all cleanup
- actions to be performed. */
- expand_start_bindings (2);
-
- target_temp_slot_level = temp_slot_level;
-}
-
-void
-expand_end_target_temps (void)
-{
- expand_end_bindings (NULL_TREE, 0, 0);
-
- /* This is so that even if the result is preserved, the space
- allocated will be freed, as we know that it is no longer in use. */
- pop_temp_slots ();
-}
-
/* Given a pointer to a BLOCK node return nonzero if (and only if) the node
in question represents the outermost pair of curly braces (i.e. the "body
block") of a function or method.
return 0;
}
-/* Return an opaque pointer to the current nesting level, so frontend code
- can check its own sanity. */
-
-struct nesting *
-current_nesting_level (void)
-{
- return cfun ? block_stack : 0;
-}
-
/* Emit code to restore vital registers at the beginning of a nonlocal goto
handler. */
static void
insn. */
emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
}
-
-/* Warn about any unused VARS (which may contain nodes other than
- VAR_DECLs, but such nodes are ignored). The nodes are connected
- via the TREE_CHAIN field. */
-
-void
-warn_about_unused_variables (tree vars)
-{
- tree decl;
-
- if (warn_unused_variable)
- for (decl = vars; decl; decl = TREE_CHAIN (decl))
- if (TREE_CODE (decl) == VAR_DECL
- && ! TREE_USED (decl)
- && ! DECL_IN_SYSTEM_HEADER (decl)
- && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
- warning ("%Junused variable '%D'", decl, decl);
-}
-
-/* Generate RTL code to terminate a binding contour.
-
- VARS is the chain of VAR_DECL nodes for the variables bound in this
- contour. There may actually be other nodes in this chain, but any
- nodes other than VAR_DECLS are ignored.
-
- MARK_ENDS is nonzero if we should put a note at the beginning
- and end of this binding contour.
-
- DONT_JUMP_IN is positive if it is not valid to jump into this contour,
- zero if we can jump into this contour only if it does not have a saved
- stack level, and negative if we are not to check for invalid use of
- labels (because the front end does that). */
-
-void
-expand_end_bindings (tree vars, int mark_ends ATTRIBUTE_UNUSED,
- int dont_jump_in ATTRIBUTE_UNUSED)
-{
- struct nesting *thisblock = block_stack;
-
- /* If any of the variables in this scope were not used, warn the
- user. */
- warn_about_unused_variables (vars);
-
- if (thisblock->exit_label)
- {
- do_pending_stack_adjust ();
- emit_label (thisblock->exit_label);
- }
-
- /* Mark the beginning and end of the scope if requested. */
-
- /* Get rid of the beginning-mark if we don't make an end-mark. */
- NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
-
- /* Restore the temporary level of TARGET_EXPRs. */
- target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
-
- /* Restore block_stack level for containing block. */
-
- POPSTACK (block_stack);
-
- /* Pop the stack slot nesting and free any slots at this level. */
- pop_temp_slots ();
-}
\f
/* Generate RTL for the automatic variable declaration DECL.
(Other kinds of declarations are simply ignored if seen here.) */
mark_reg_pointer (DECL_RTL (decl),
TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
}
-
- maybe_set_unchanging (DECL_RTL (decl), decl);
}
else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
to the proper address. */
if (DECL_RTL_SET_P (decl))
{
- if (!MEM_P (DECL_RTL (decl))
- || !REG_P (XEXP (DECL_RTL (decl), 0)))
- abort ();
+ gcc_assert (MEM_P (DECL_RTL (decl)));
+ gcc_assert (REG_P (XEXP (DECL_RTL (decl), 0)));
oldaddr = XEXP (DECL_RTL (decl), 0);
}
}
}
\f
-/* Emit code to allocate T_SIZE bytes of dynamic stack space for ALLOC. */
-void
-expand_stack_alloc (tree alloc, tree t_size)
-{
- rtx address, dest, size;
- tree var, type;
-
- if (TREE_CODE (alloc) != ADDR_EXPR)
- abort ();
- var = TREE_OPERAND (alloc, 0);
- if (TREE_CODE (var) != VAR_DECL)
- abort ();
-
- type = TREE_TYPE (var);
-
- /* Compute the variable's size, in bytes. */
- size = expand_expr (t_size, NULL_RTX, VOIDmode, 0);
- free_temp_slots ();
-
- /* Allocate space on the stack for the variable. */
- address = XEXP (DECL_RTL (var), 0);
- dest = allocate_dynamic_stack_space (size, address, TYPE_ALIGN (type));
- if (dest != address)
- emit_move_insn (address, dest);
-
- /* Indicate the alignment we actually gave this variable. */
-#ifdef STACK_BOUNDARY
- DECL_ALIGN (var) = STACK_BOUNDARY;
-#else
- DECL_ALIGN (var) = BIGGEST_ALIGNMENT;
-#endif
- DECL_USER_ALIGN (var) = 0;
-}
-
/* Emit code to save the current value of stack. */
rtx
expand_stack_save (void)
emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
}
\f
-/* Emit code to perform the initialization of a declaration DECL. */
-
-void
-expand_decl_init (tree decl)
-{
- int was_used = TREE_USED (decl);
-
- /* If this is a CONST_DECL, we don't have to generate any code. Likewise
- for static decls. */
- if (TREE_CODE (decl) == CONST_DECL
- || TREE_STATIC (decl))
- return;
-
- /* Compute and store the initial value now. */
-
- push_temp_slots ();
-
- if (DECL_INITIAL (decl) == error_mark_node)
- {
- enum tree_code code = TREE_CODE (TREE_TYPE (decl));
-
- if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
- || code == POINTER_TYPE || code == REFERENCE_TYPE)
- expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
- 0);
- }
- else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
- {
- emit_line_note (DECL_SOURCE_LOCATION (decl));
- expand_assignment (decl, DECL_INITIAL (decl), 0);
- }
-
- /* Don't let the initialization count as "using" the variable. */
- TREE_USED (decl) = was_used;
-
- /* Free any temporaries we made while initializing the decl. */
- preserve_temp_slots (NULL_RTX);
- free_temp_slots ();
- pop_temp_slots ();
-}
-
-\f
/* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
DECL_ELTS is the list of elements that belong to DECL's type.
In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
{
tree decl_elt = TREE_VALUE (t);
enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
+ rtx decl_rtl;
/* If any of the elements are addressable, so is the entire
union. */
DECL_MODE (decl_elt) = mode
= mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
- /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
- instead create a new MEM rtx with the proper mode. */
- if (MEM_P (x))
- {
- if (mode == GET_MODE (x))
- SET_DECL_RTL (decl_elt, x);
- else
- SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
- }
- else if (REG_P (x))
+ if (mode == GET_MODE (x))
+ decl_rtl = x;
+ else if (MEM_P (x))
+ /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
+ instead create a new MEM rtx with the proper mode. */
+ decl_rtl = adjust_address_nv (x, mode, 0);
+ else
{
- if (mode == GET_MODE (x))
- SET_DECL_RTL (decl_elt, x);
- else
- SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
+ gcc_assert (REG_P (x));
+ decl_rtl = gen_lowpart_SUBREG (mode, x);
}
- else
- abort ();
+ SET_DECL_RTL (decl_elt, decl_rtl);
}
}
\f
-/* Enter a case (Pascal) or switch (C) statement.
- Push a block onto case_stack and nesting_stack
- to accumulate the case-labels that are seen
- and to record the labels generated for the statement.
-
- EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
- Otherwise, this construct is transparent for `exit_something'.
-
- EXPR is the index-expression to be dispatched on.
- TYPE is its nominal type. We could simply convert EXPR to this type,
- but instead we take short cuts. */
-
-void
-expand_start_case (tree index_expr)
-{
- struct nesting *thiscase = ALLOC_NESTING ();
-
- /* Make an entry on case_stack for the case we are entering. */
-
- thiscase->desc = CASE_NESTING;
- thiscase->next = case_stack;
- thiscase->all = nesting_stack;
- thiscase->depth = ++nesting_depth;
- thiscase->exit_label = 0;
- thiscase->data.case_stmt.case_list = 0;
- thiscase->data.case_stmt.index_expr = index_expr;
- thiscase->data.case_stmt.default_label = 0;
- case_stack = thiscase;
- nesting_stack = thiscase;
-
- do_pending_stack_adjust ();
-
- /* Make sure case_stmt.start points to something that won't
- need any transformation before expand_end_case. */
- if (!NOTE_P (get_last_insn ()))
- emit_note (NOTE_INSN_DELETED);
-
- thiscase->data.case_stmt.start = get_last_insn ();
-}
-
-/* Do the insertion of a case label into
- case_stack->data.case_stmt.case_list. The labels are fed to us
- in descending order from the sorted vector of case labels used
+/* Do the insertion of a case label into case_list. The labels are
+ fed to us in descending order from the sorted vector of case labels used
in the tree part of the middle end. So the list we construct is
- sorted in ascending order. */
+ sorted in ascending order. The bounds on the case range, LOW and HIGH,
+ are converted to case's index type TYPE. */
-void
-add_case_node (tree low, tree high, tree label)
+static struct case_node *
+add_case_node (struct case_node *head, tree type, tree low, tree high,
+ tree label)
{
+ tree min_value, max_value;
struct case_node *r;
+ gcc_assert (TREE_CODE (low) == INTEGER_CST);
+ gcc_assert (!high || TREE_CODE (high) == INTEGER_CST);
+
+ min_value = TYPE_MIN_VALUE (type);
+ max_value = TYPE_MAX_VALUE (type);
+
/* If there's no HIGH value, then this is not a case range; it's
just a simple case label. But that's just a degenerate case
range.
If the bounds are equal, turn this into the one-value case. */
if (!high || tree_int_cst_equal (low, high))
- high = low;
-
- /* Handle default labels specially. */
- if (!high && !low)
{
-#ifdef ENABLE_CHECKING
- if (case_stack->data.case_stmt.default_label != 0)
- abort ();
-#endif
- case_stack->data.case_stmt.default_label = label;
- return;
+ /* If the simple case value is unreachable, ignore it. */
+ if ((TREE_CODE (min_value) == INTEGER_CST
+ && tree_int_cst_compare (low, min_value) < 0)
+ || (TREE_CODE (max_value) == INTEGER_CST
+ && tree_int_cst_compare (low, max_value) > 0))
+ return head;
+ low = fold_convert (type, low);
+ high = low;
+ }
+ else
+ {
+ /* If the entire case range is unreachable, ignore it. */
+ if ((TREE_CODE (min_value) == INTEGER_CST
+ && tree_int_cst_compare (high, min_value) < 0)
+ || (TREE_CODE (max_value) == INTEGER_CST
+ && tree_int_cst_compare (low, max_value) > 0))
+ return head;
+
+ /* If the lower bound is less than the index type's minimum
+ value, truncate the range bounds. */
+ if (TREE_CODE (min_value) == INTEGER_CST
+ && tree_int_cst_compare (low, min_value) < 0)
+ low = min_value;
+ low = fold_convert (type, low);
+
+ /* If the upper bound is greater than the index type's maximum
+ value, truncate the range bounds. */
+ if (TREE_CODE (max_value) == INTEGER_CST
+ && tree_int_cst_compare (high, max_value) > 0)
+ high = max_value;
+ high = fold_convert (type, high);
}
+
/* Add this label to the chain. */
r = ggc_alloc (sizeof (struct case_node));
r->low = low;
r->high = high;
r->code_label = label;
r->parent = r->left = NULL;
- r->right = case_stack->data.case_stmt.case_list;
- case_stack->data.case_stmt.case_list = r;
+ r->right = head;
+ return r;
}
\f
/* Maximum number of case bit tests. */
{
label = label_rtx (n->code_label);
for (i = 0; i < count; i++)
- if (same_case_target_p (label, test[i].label))
+ if (label == test[i].label)
break;
if (i == count)
{
- if (count >= MAX_CASE_BIT_TESTS)
- abort ();
- test[i].hi = 0;
- test[i].lo = 0;
+ gcc_assert (count < MAX_CASE_BIT_TESTS);
+ test[i].hi = 0;
+ test[i].lo = 0;
test[i].label = label;
test[i].bits = 1;
count++;
else
test[i].bits++;
- lo = tree_low_cst (fold (build (MINUS_EXPR, index_type,
- n->low, minval)), 1);
- hi = tree_low_cst (fold (build (MINUS_EXPR, index_type,
- n->high, minval)), 1);
+ lo = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
+ n->low, minval), 1);
+ hi = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
+ n->high, minval), 1);
for (j = lo; j <= hi; j++)
if (j >= HOST_BITS_PER_WIDE_INT)
test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
qsort (test, count, sizeof(*test), case_bit_test_cmp);
- index_expr = fold (build (MINUS_EXPR, index_type,
- convert (index_type, index_expr),
- convert (index_type, minval)));
+ index_expr = fold_build2 (MINUS_EXPR, index_type,
+ fold_convert (index_type, index_expr),
+ fold_convert (index_type, minval));
index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
do_pending_stack_adjust ();
Generate the code to test it and jump to the right place. */
void
-expand_end_case_type (tree orig_index, tree orig_type)
+expand_case (tree exp)
{
tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
rtx default_label = 0;
- struct case_node *n, *m;
+ struct case_node *n;
unsigned int count, uniq;
rtx index;
rtx table_label;
int ncases;
rtx *labelvec;
- int i;
+ int i, fail;
rtx before_case, end, lab;
- struct nesting *thiscase = case_stack;
- tree index_expr, index_type;
- bool exit_done = false;
- int unsignedp;
- /* Don't crash due to previous errors. */
- if (thiscase == NULL)
- return;
+ tree vec = SWITCH_LABELS (exp);
+ tree orig_type = TREE_TYPE (exp);
+ tree index_expr = SWITCH_COND (exp);
+ tree index_type = TREE_TYPE (index_expr);
+ int unsignedp = TYPE_UNSIGNED (index_type);
+
+ /* The insn after which the case dispatch should finally
+ be emitted. Zero for a dummy. */
+ rtx start;
+
+ /* A list of case labels; it is first built as a list and it may then
+ be rearranged into a nearly balanced binary tree. */
+ struct case_node *case_list = 0;
- index_expr = thiscase->data.case_stmt.index_expr;
- index_type = TREE_TYPE (index_expr);
- unsignedp = TYPE_UNSIGNED (index_type);
- if (orig_type == NULL)
- orig_type = TREE_TYPE (orig_index);
+ /* Label to jump to if no case matches. */
+ tree default_label_decl;
+
+ /* The switch body is lowered in gimplify.c, we should never have
+ switches with a non-NULL SWITCH_BODY here. */
+ gcc_assert (!SWITCH_BODY (exp));
+ gcc_assert (SWITCH_LABELS (exp));
do_pending_stack_adjust ();
/* An ERROR_MARK occurs for various reasons including invalid data type. */
if (index_type != error_mark_node)
{
- /* If we don't have a default-label, create one here,
- after the body of the switch. */
- if (thiscase->data.case_stmt.default_label == 0)
+ tree elt;
+ bitmap label_bitmap;
+
+ /* cleanup_tree_cfg removes all SWITCH_EXPR with their index
+ expressions being INTEGER_CST. */
+ gcc_assert (TREE_CODE (index_expr) != INTEGER_CST);
+
+ /* The default case is at the end of TREE_VEC. */
+ elt = TREE_VEC_ELT (vec, TREE_VEC_LENGTH (vec) - 1);
+ gcc_assert (!CASE_HIGH (elt));
+ gcc_assert (!CASE_LOW (elt));
+ default_label_decl = CASE_LABEL (elt);
+
+ for (i = TREE_VEC_LENGTH (vec) - 1; --i >= 0; )
{
- thiscase->data.case_stmt.default_label
- = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
- /* Share the exit label if possible. */
- if (thiscase->exit_label)
- {
- SET_DECL_RTL (thiscase->data.case_stmt.default_label,
- thiscase->exit_label);
- exit_done = true;
- }
- expand_label (thiscase->data.case_stmt.default_label);
+ elt = TREE_VEC_ELT (vec, i);
+ gcc_assert (CASE_LOW (elt));
+ case_list = add_case_node (case_list, index_type,
+ CASE_LOW (elt), CASE_HIGH (elt),
+ CASE_LABEL (elt));
+ }
+
+
+ /* Make sure start points to something that won't need any
+ transformation before the end of this function. */
+ start = get_last_insn ();
+ if (! NOTE_P (start))
+ {
+ emit_note (NOTE_INSN_DELETED);
+ start = get_last_insn ();
}
- default_label = label_rtx (thiscase->data.case_stmt.default_label);
+
+ default_label = label_rtx (default_label_decl);
before_case = get_last_insn ();
- /* Get upper and lower bounds of case values.
- Also convert all the case values to the index expr's data type. */
+ /* Get upper and lower bounds of case values. */
uniq = 0;
count = 0;
- for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
+ label_bitmap = BITMAP_ALLOC (NULL);
+ for (n = case_list; n; n = n->right)
{
- /* Check low and high label values are integers. */
- if (TREE_CODE (n->low) != INTEGER_CST)
- abort ();
- if (TREE_CODE (n->high) != INTEGER_CST)
- abort ();
-
- n->low = convert (index_type, n->low);
- n->high = convert (index_type, n->high);
-
/* Count the elements and track the largest and smallest
of them (treating them as signed even if they are not). */
if (count++ == 0)
if (! tree_int_cst_equal (n->low, n->high))
count++;
- /* Count the number of unique case node targets. */
- uniq++;
+ /* If we have not seen this label yet, then increase the
+ number of unique case node targets seen. */
lab = label_rtx (n->code_label);
- for (m = thiscase->data.case_stmt.case_list; m != n; m = m->right)
- if (same_case_target_p (label_rtx (m->code_label), lab))
- {
- uniq--;
- break;
- }
+ if (!bitmap_bit_p (label_bitmap, CODE_LABEL_NUMBER (lab)))
+ {
+ bitmap_set_bit (label_bitmap, CODE_LABEL_NUMBER (lab));
+ uniq++;
+ }
}
- /* Compute span of values. */
- if (count != 0)
- range = fold (build (MINUS_EXPR, index_type, maxval, minval));
+ BITMAP_FREE (label_bitmap);
+ /* cleanup_tree_cfg removes all SWITCH_EXPR with a single
+ destination, such as one with a default case only. However,
+ it doesn't remove cases that are out of range for the switch
+ type, so we may still get a zero here. */
if (count == 0)
{
- expand_expr (index_expr, const0_rtx, VOIDmode, 0);
emit_jump (default_label);
+ return;
}
+ /* Compute span of values. */
+ range = fold_build2 (MINUS_EXPR, index_type, maxval, minval);
+
/* Try implementing this switch statement by a short sequence of
bit-wise comparisons. However, we let the binary-tree case
below handle constant index expressions. */
- else if (CASE_USE_BIT_TESTS
- && ! TREE_CONSTANT (index_expr)
- && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
- && compare_tree_int (range, 0) > 0
- && lshift_cheap_p ()
- && ((uniq == 1 && count >= 3)
- || (uniq == 2 && count >= 5)
- || (uniq == 3 && count >= 6)))
+ if (CASE_USE_BIT_TESTS
+ && ! TREE_CONSTANT (index_expr)
+ && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
+ && compare_tree_int (range, 0) > 0
+ && lshift_cheap_p ()
+ && ((uniq == 1 && count >= 3)
+ || (uniq == 2 && count >= 5)
+ || (uniq == 3 && count >= 6)))
{
/* Optimize the case where all the case values fit in a
word without having to subtract MINVAL. In this case,
if (compare_tree_int (minval, 0) > 0
&& compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
{
- minval = integer_zero_node;
+ minval = build_int_cst (index_type, 0);
range = maxval;
}
emit_case_bit_tests (index_type, index_expr, minval, range,
- thiscase->data.case_stmt.case_list,
- default_label);
+ case_list, default_label);
}
/* If range of values is much bigger than number of values,
#ifndef ASM_OUTPUT_ADDR_DIFF_ELT
|| flag_pic
#endif
+ || !flag_jump_tables
|| TREE_CONSTANT (index_expr)
/* If neither casesi or tablejump is available, we can
only go this way. */
if (MEM_P (index))
index = copy_to_reg (index);
- if (GET_CODE (index) == CONST_INT
- || TREE_CODE (index_expr) == INTEGER_CST)
- {
- /* Make a tree node with the proper constant value
- if we don't already have one. */
- if (TREE_CODE (index_expr) != INTEGER_CST)
- {
- index_expr
- = build_int_2 (INTVAL (index),
- unsignedp || INTVAL (index) >= 0 ? 0 : -1);
- index_expr = convert (index_type, index_expr);
- }
- /* For constant index expressions we need only
- issue an unconditional branch to the appropriate
- target code. The job of removing any unreachable
- code is left to the optimization phase if the
- "-O" option is specified. */
- for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
- if (! tree_int_cst_lt (index_expr, n->low)
- && ! tree_int_cst_lt (n->high, index_expr))
- break;
-
- if (n)
- emit_jump (label_rtx (n->code_label));
- else
- emit_jump (default_label);
- }
- else
- {
- /* If the index expression is not constant we generate
- a binary decision tree to select the appropriate
- target code. This is done as follows:
-
- The list of cases is rearranged into a binary tree,
- nearly optimal assuming equal probability for each case.
-
- The tree is transformed into RTL, eliminating
- redundant test conditions at the same time.
-
- If program flow could reach the end of the
- decision tree an unconditional jump to the
- default code is emitted. */
-
- use_cost_table
- = (TREE_CODE (orig_type) != ENUMERAL_TYPE
- && estimate_case_costs (thiscase->data.case_stmt.case_list));
- balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
- emit_case_nodes (index, thiscase->data.case_stmt.case_list,
- default_label, index_type);
- emit_jump (default_label);
- }
+ /* We generate a binary decision tree to select the
+ appropriate target code. This is done as follows:
+
+ The list of cases is rearranged into a binary tree,
+ nearly optimal assuming equal probability for each case.
+
+ The tree is transformed into RTL, eliminating
+ redundant test conditions at the same time.
+
+ If program flow could reach the end of the
+ decision tree an unconditional jump to the
+ default code is emitted. */
+
+ use_cost_table
+ = (TREE_CODE (orig_type) != ENUMERAL_TYPE
+ && estimate_case_costs (case_list));
+ balance_case_nodes (&case_list, NULL);
+ emit_case_nodes (index, case_list, default_label, index_type);
+ emit_jump (default_label);
}
else
{
if (! try_casesi (index_type, index_expr, minval, range,
table_label, default_label))
{
- index_type = integer_type_node;
+ bool ok;
/* Index jumptables from zero for suitable values of
minval to avoid a subtraction. */
&& compare_tree_int (minval, 0) > 0
&& compare_tree_int (minval, 3) < 0)
{
- minval = integer_zero_node;
+ minval = build_int_cst (index_type, 0);
range = maxval;
}
- if (! try_tablejump (index_type, index_expr, minval, range,
- table_label, default_label))
- abort ();
+ ok = try_tablejump (index_type, index_expr, minval, range,
+ table_label, default_label);
+ gcc_assert (ok);
}
/* Get table of labels to jump to, in order of case index. */
labelvec = alloca (ncases * sizeof (rtx));
memset (labelvec, 0, ncases * sizeof (rtx));
- for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
+ for (n = case_list; n; n = n->right)
{
/* Compute the low and high bounds relative to the minimum
value since that should fit in a HOST_WIDE_INT while the
actual values may not. */
HOST_WIDE_INT i_low
- = tree_low_cst (fold (build (MINUS_EXPR, index_type,
- n->low, minval)), 1);
+ = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
+ n->low, minval), 1);
HOST_WIDE_INT i_high
- = tree_low_cst (fold (build (MINUS_EXPR, index_type,
- n->high, minval)), 1);
+ = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
+ n->high, minval), 1);
HOST_WIDE_INT i;
for (i = i_low; i <= i_high; i ++)
emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
gen_rtvec_v (ncases, labelvec)));
- /* If the case insn drops through the table,
- after the table we must jump to the default-label.
- Otherwise record no drop-through after the table. */
-#ifdef CASE_DROPS_THROUGH
- emit_jump (default_label);
-#else
+ /* Record no drop-through after the table. */
emit_barrier ();
-#endif
}
before_case = NEXT_INSN (before_case);
end = get_last_insn ();
- if (squeeze_notes (&before_case, &end))
- abort ();
- reorder_insns (before_case, end,
- thiscase->data.case_stmt.start);
+ fail = squeeze_notes (&before_case, &end);
+ gcc_assert (!fail);
+ reorder_insns (before_case, end, start);
}
- if (thiscase->exit_label && !exit_done)
- emit_label (thiscase->exit_label);
-
- POPSTACK (case_stack);
-
free_temp_slots ();
}
estimate_case_costs (case_node_ptr node)
{
tree min_ascii = integer_minus_one_node;
- tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
+ tree max_ascii = build_int_cst (TREE_TYPE (node->high), 127);
case_node_ptr n;
int i;
return 1;
}
-/* Determine whether two case labels branch to the same target.
- Since we now do tree optimizations, just comparing labels is
- good enough. */
-
-static bool
-same_case_target_p (rtx l1, rtx l2)
-{
- return l1 == l2;
-}
-
/* Take an ordered list of case nodes
and transform them into a near optimal binary tree,
on the assumption that any target code selection value is as
if (node->left)
return 0;
- low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
- node->low, integer_one_node));
+ low_minus_one = fold_build2 (MINUS_EXPR, TREE_TYPE (node->low),
+ node->low,
+ build_int_cst (TREE_TYPE (node->low), 1));
/* If the subtraction above overflowed, we can't verify anything.
Otherwise, look for a parent that tests our value - 1. */
if (node->right)
return 0;
- high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
- node->high, integer_one_node));
+ high_plus_one = fold_build2 (PLUS_EXPR, TREE_TYPE (node->high),
+ node->high,
+ build_int_cst (TREE_TYPE (node->high), 1));
/* If the addition above overflowed, we can't verify anything.
Otherwise, look for a parent that tests our value + 1. */
else if (node->right != 0 && node->left == 0)
{
- /* Here we have a right child but no left so we issue conditional
+ /* Here we have a right child but no left so we issue a conditional
branch to default and process the right child.
- Omit the conditional branch to default if we it avoid only one
- right child; it costs too much space to save so little time. */
+ Omit the conditional branch to default if the right child
+ does not have any children and is single valued; it would
+ cost too much space to save so little time. */
if (node->right->right || node->right->left
|| !tree_int_cst_equal (node->right->low, node->right->high))
new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
NULL_RTX, unsignedp,
OPTAB_WIDEN);
- new_bound = expand_expr (fold (build (MINUS_EXPR, type,
- high, low)),
+ new_bound = expand_expr (fold_build2 (MINUS_EXPR, type,
+ high, low),
NULL_RTX, mode, 0);
emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
}
}
}
-
-#include "gt-stmt.h"