the Free Software Foundation, 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA. */
-
/* This file handles the generation of rtl code from tree structure
above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
It also creates the rtl expressions for parameters and auto variables
#ifndef CASE_VECTOR_PC_RELATIVE
#define CASE_VECTOR_PC_RELATIVE 0
#endif
-
\f
/* Functions and data structures for expanding case statements. */
tree index_expr;
/* Type that INDEX_EXPR should be converted to. */
tree nominal_type;
- /* Number of range exprs in case statement. */
- int num_ranges;
/* Name of this kind of statement, for warnings. */
const char *printname;
/* Used to save no_line_numbers till we see the first case label.
/* Character strings, each containing a single decimal digit. */
static char *digit_strings[10];
-
static int n_occurrences PARAMS ((int, const char *));
static void expand_goto_internal PARAMS ((tree, rtx, rtx));
static int expand_fixup PARAMS ((tree, rtx, rtx));
static int node_is_bounded PARAMS ((case_node_ptr, tree));
static void emit_jump_if_reachable PARAMS ((rtx));
static void emit_case_nodes PARAMS ((rtx, case_node_ptr, rtx, tree));
-static int add_case_node PARAMS ((tree, tree, tree, tree *));
static struct case_node *case_tree2list PARAMS ((case_node *, case_node *));
static void mark_cond_nesting PARAMS ((struct nesting *));
static void mark_loop_nesting PARAMS ((struct nesting *));
emit_queue ();
/* Be sure the function is executable. */
if (current_function_check_memory_usage)
- emit_library_call (chkr_check_exec_libfunc, 1,
+ emit_library_call (chkr_check_exec_libfunc, LCT_CONST_MAKE_BLOCK,
VOIDmode, 1, x, ptr_mode);
do_pending_stack_adjust ();
emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
emit_indirect_jump (handler_slot);
}
- }
+ }
else
expand_goto_internal (label, label_rtx (label), NULL_RTX);
}
insert_block (block);
else
{
- BLOCK_CHAIN (block)
+ BLOCK_CHAIN (block)
= BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
= block;
return block != 0;
}
-
-
\f
/* Expand any needed fixups in the outputmost binding level of the
function. FIRST_INSN is the first insn in the function. */
&& ! (f->target_rtl == return_label
&& ((TREE_CODE (TREE_TYPE (current_function_decl))
== FUNCTION_TYPE)
- && (TYPE_RETURNS_STACK_DEPRESSED
+ && (TYPE_RETURNS_STACK_DEPRESSED
(TREE_TYPE (current_function_decl))))))
emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
end_sequence ();
emit_insns_after (cleanup_insns, f->before_jump);
-
f->before_jump = 0;
}
}
= (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
/* The insn we have emitted. */
rtx insn;
+ int old_generating_concat_p = generating_concat_p;
/* An ASM with no outputs needs to be treated as volatile, for now. */
if (noutputs == 0)
error ("too many alternatives in `asm'");
return;
}
-
+
tmp = outputs;
while (tmp)
{
Make the asm insn write into that, then our caller will copy it to
the real output operand. Likewise for promoted variables. */
+ generating_concat_p = 0;
+
real_output_rtx[i] = NULL_RTX;
if ((TREE_CODE (val) == INDIRECT_REF
&& allows_mem)
if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
error ("output number %d not directly addressable", i);
- if (! allows_mem && GET_CODE (output_rtx[i]) == MEM)
+ if ((! allows_mem && GET_CODE (output_rtx[i]) == MEM)
+ || GET_CODE (output_rtx[i]) == CONCAT)
{
real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
}
+ generating_concat_p = old_generating_concat_p;
+
if (is_inout)
{
inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
argvec = rtvec_alloc (ninputs);
constraints = rtvec_alloc (ninputs);
- body = gen_rtx_ASM_OPERANDS (VOIDmode, TREE_STRING_POINTER (string),
- empty_string, 0, argvec, constraints,
+ body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
+ : GET_MODE (output_rtx[0])),
+ TREE_STRING_POINTER (string),
+ empty_string, 0, argvec, constraints,
filename, line);
MEM_VOLATILE_P (body) = vol;
for (j = constraint[j] - '0'; j > 0; --j)
o = TREE_CHAIN (o);
-
+
c_len = strlen (TREE_STRING_POINTER (TREE_PURPOSE (o)));
constraint = TREE_STRING_POINTER (TREE_PURPOSE (o));
j = 0;
break;
}
- /* ... fall through ... */
+ /* Fall through. */
case 'p': case 'r':
allows_reg = 1;
op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
+ /* Never pass a CONCAT to an ASM. */
+ generating_concat_p = 0;
+ if (GET_CODE (op) == CONCAT)
+ op = force_reg (GET_MODE (op), op);
+
if (asm_operand_ok (op, constraint) <= 0)
{
if (allows_reg)
not satisfied. */
warning ("asm operand %d probably doesn't match constraints", i);
}
- XVECEXP (body, 3, i) = op;
+ generating_concat_p = old_generating_concat_p;
+ ASM_OPERANDS_INPUT (body, i) = op;
- XVECEXP (body, 4, i) /* constraints */
+ ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
= gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
orig_constraint);
i++;
/* Protect all the operands from the queue now that they have all been
evaluated. */
+ generating_concat_p = 0;
+
for (i = 0; i < ninputs - ninout; i++)
- XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
+ ASM_OPERANDS_INPUT (body, i)
+ = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
for (i = 0; i < noutputs; i++)
output_rtx[i] = protect_from_queue (output_rtx[i], 1);
- /* For in-out operands, copy output rtx to input rtx. */
+ /* For in-out operands, copy output rtx to input rtx. */
for (i = 0; i < ninout; i++)
{
int j = inout_opnum[i];
- XVECEXP (body, 3, ninputs - ninout + i) /* argvec */
+ ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
= output_rtx[j];
- XVECEXP (body, 4, ninputs - ninout + i) /* constraints */
+ ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
= gen_rtx_ASM_INPUT (inout_mode[i], digit_strings[j]);
}
+ generating_concat_p = old_generating_concat_p;
+
/* Now, for each output, construct an rtx
(set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
ARGVEC CONSTRAINTS))
if (noutputs == 1 && nclobbers == 0)
{
- XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
+ ASM_OPERANDS_OUTPUT_CONSTRAINT (body)
+ = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
}
= gen_rtx_SET (VOIDmode,
output_rtx[i],
gen_rtx_ASM_OPERANDS
- (VOIDmode,
+ (GET_MODE (output_rtx[i]),
TREE_STRING_POINTER (string),
TREE_STRING_POINTER (TREE_PURPOSE (tail)),
i, argvec, constraints,
if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
- last_expr_type = TREE_TYPE (exp);
+ /* The call to `expand_expr' could cause last_expr_type and
+ last_expr_value to get reset. Therefore, we set last_expr_value
+ and last_expr_type *after* calling expand_expr. */
last_expr_value = expand_expr (exp,
(expr_stmts_for_value
? NULL_RTX : const0_rtx),
VOIDmode, 0);
+ last_expr_type = TREE_TYPE (exp);
/* If all we do is reference a volatile value in memory,
copy it to a register to be sure it is actually touched. */
else
{
rtx lab = gen_label_rtx ();
-
+
/* Compare the value with itself to reference it. */
emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
expand_expr (TYPE_SIZE (last_expr_type),
the user cannot control it. */
if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
return warn_if_unused_value (TREE_OPERAND (exp, 0));
- /* ... fall through ... */
-
+ /* Fall through. */
+
default:
/* Referencing a volatile value is a side effect, so don't warn. */
if ((DECL_P (exp)
POPSTACK (cond_stack);
last_expr_type = 0;
}
-
-
\f
/* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
loop should be exited by `exit_something'. This is a loop for which
insn = PREV_INSN (label);
reorder_insns (label, label, start_label);
- for (prev = PREV_INSN (jump); ; prev = PREV_INSN (prev))
- {
+ for (prev = PREV_INSN (jump);; prev = PREV_INSN (prev))
+ {
/* We ignore line number notes, but if we see any other note,
in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
NOTE_INSN_LOOP_*, we disable this optimization. */
body;
goto start_label;
end_label:
-
+
transform it to look like:
goto start_label;
for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) == NOTE)
+ if (GET_CODE (insn) == NOTE)
{
if (optimize < 2
&& (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
{
--eh_regions;
- if (eh_regions < 0)
+ if (eh_regions < 0)
/* We've come to the end of an EH region, but
never saw the beginning of that region. That
means that an EH region begins before the top
if (last_test_insn && num_insns > 30)
break;
- if (eh_regions > 0)
+ if (eh_regions > 0)
/* We don't want to move a partial EH region. Consider:
while ( ( { try {
- if (cond ()) 0;
+ if (cond ()) 0;
else {
bar();
1;
}
- } catch (...) {
+ } catch (...) {
1;
} )) {
body;
- }
+ }
This isn't legal C++, but here's what it's supposed to
mean: if cond() is true, stop looping. Otherwise,
call bar, and keep looping. In addition, if cond
throws an exception, catch it and keep looping. Such
- constructs are certainy legal in LISP.
+ constructs are certainy legal in LISP.
We should not move the `if (cond()) 0' test since then
the EH-region for the try-block would be broken up.
(In this case we would the EH_BEG note for the `try'
and `if cond()' but not the call to bar() or the
- EH_END note.)
+ EH_END note.)
So we don't look for tests within an EH region. */
continue;
- if (GET_CODE (insn) == JUMP_INSN
+ if (GET_CODE (insn) == JUMP_INSN
&& GET_CODE (PATTERN (insn)) == SET
&& SET_DEST (PATTERN (insn)) == pc_rtx)
{
do {
if (dest1 && GET_CODE (dest1) == LABEL_REF
- && ((XEXP (dest1, 0)
+ && ((XEXP (dest1, 0)
== loop_stack->data.loop.alt_end_label)
- || (XEXP (dest1, 0)
+ || (XEXP (dest1, 0)
== loop_stack->data.loop.end_label)))
{
last_test_insn = potential_last_test;
around a unconditional branch to exit the loop. If fixups are
necessary, they go before the unconditional branch. */
-
do_jump (cond, NULL_RTX, label);
last_insn = get_last_insn ();
if (GET_CODE (last_insn) == CODE_LABEL)
struct nesting *block = block_stack;
rtx last_insn = get_last_insn ();
- /* If this function was declared to return a value, but we
+ /* If this function was declared to return a value, but we
didn't, clobber the return registers so that they are not
propogated live to the rest of the function. */
clobber_return_register ();
big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
* BITS_PER_UNIT));
- /* Copy the structure BITSIZE bits at a time. */
+ /* Copy the structure BITSIZE bits at a time. */
for (bitpos = 0, xbitpos = big_endian_correction;
bitpos < bytes * BITS_PER_UNIT;
bitpos += bitsize, xbitpos += bitsize)
\f
/* Attempt to optimize a potential tail recursion call into a goto.
ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
- where to place the jump to the tail recursion label.
-
+ where to place the jump to the tail recursion label.
+
Return TRUE if the call was optimized into a goto. */
int
register int j;
for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
- { copy = 1; break; }
+ {
+ copy = 1;
+ break;
+ }
if (copy)
argvec[i] = copy_to_reg (argvec[i]);
}
will not create corresponding BLOCK nodes. (There should be
a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
and BLOCKs.) If this flag is set, MARK_ENDS should be zero
- when expand_end_bindings is called.
+ when expand_end_bindings is called.
If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
optionally be supplied. If so, it becomes the NOTE_BLOCK for the
rtx note;
int exit_flag = ((flags & 1) != 0);
int block_flag = ((flags & 2) == 0);
-
+
/* If a BLOCK is supplied, then the caller should be requesting a
NOTE_INSN_BLOCK_BEG note. */
if (!block_flag && block)
}
else
note = emit_note (NULL_PTR, NOTE_INSN_DELETED);
-
+
/* Make an entry on block_stack for the block we are entering. */
thisblock->next = block_stack;
expand_end_target_temps ()
{
expand_end_bindings (NULL_TREE, 0, 0);
-
+
/* This is so that even if the result is preserved, the space
allocated will be freed, as we know that it is no longer in use. */
pop_temp_slots ();
BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
represents the outermost (function) scope for the function or method (i.e.
the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
- *that* node in turn will point to the relevant FUNCTION_DECL node. */
+ *that* node in turn will point to the relevant FUNCTION_DECL node. */
int
is_body_block (stmt)
if (warn_unused_variable)
for (decl = vars; decl; decl = TREE_CHAIN (decl))
- if (TREE_CODE (decl) == VAR_DECL
+ if (TREE_CODE (decl) == VAR_DECL
&& ! TREE_USED (decl)
&& ! DECL_IN_SYSTEM_HEADER (decl)
- && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
+ && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
warning_with_decl (decl, "unused variable `%s'");
}
/* Since expand_eh_region_start does an expand_start_bindings, we
have to first end all the bindings that were created by
expand_eh_region_start. */
-
+
thisblock = block_stack;
/* If any of the variables in this scope were not used, warn the
if (GET_CODE (insn) == NOTE)
insn = prev_nonnote_insn (insn);
reachable = (! insn || GET_CODE (insn) != BARRIER);
-
+
/* Do the cleanups. */
expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
if (reachable)
free_temp_slots ();
/* Allocate space on the stack for the variable. Note that
- DECL_ALIGN says how the variable is to be aligned and we
+ DECL_ALIGN says how the variable is to be aligned and we
cannot use it to conclude anything about the alignment of
the size. */
address = allocate_dynamic_stack_space (size, NULL_RTX,
TREE_ADDRESSABLE (decl) = 1;
break;
}
-
+
expand_decl (decl);
expand_decl_cleanup (decl, cleanup);
x = DECL_RTL (decl);
thiscase->data.case_stmt.index_expr = expr;
thiscase->data.case_stmt.nominal_type = type;
thiscase->data.case_stmt.default_label = 0;
- thiscase->data.case_stmt.num_ranges = 0;
thiscase->data.case_stmt.printname = printname;
thiscase->data.case_stmt.line_number_status = force_line_numbers ();
case_stack = thiscase;
start_cleanup_deferral ();
}
-
/* Start a "dummy case statement" within which case labels are invalid
and are not connected to any larger real case statement.
This can be used if you don't want to let a case statement jump
thiscase->data.case_stmt.start = 0;
thiscase->data.case_stmt.nominal_type = 0;
thiscase->data.case_stmt.default_label = 0;
- thiscase->data.case_stmt.num_ranges = 0;
case_stack = thiscase;
nesting_stack = thiscase;
start_cleanup_deferral ();
/* If insn is zero, then there must have been a syntax error. */
if (insn)
- warning_with_file_and_line (NOTE_SOURCE_FILE(insn),
- NOTE_LINE_NUMBER(insn),
+ warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
+ NOTE_LINE_NUMBER (insn),
"unreachable code at beginning of %s",
case_stack->data.case_stmt.printname);
break;
|| ! int_fits_type_p (value, index_type)))
return 3;
- /* Fail if this is a duplicate or overlaps another entry. */
- if (value == 0)
- {
- if (case_stack->data.case_stmt.default_label != 0)
- {
- *duplicate = case_stack->data.case_stmt.default_label;
- return 2;
- }
- case_stack->data.case_stmt.default_label = label;
- }
- else
- return add_case_node (value, value, label, duplicate);
-
- expand_label (label);
- return 0;
+ return add_case_node (value, value, label, duplicate);
}
/* Like pushcase but this case applies to all values between VALUE1 and
if (value2 != 0 && tree_int_cst_lt (value2, value1))
return 4;
- /* If the max was unbounded, use the max of the nominal_type we are
+ /* If the max was unbounded, use the max of the nominal_type we are
converting to. Do this after the < check above to suppress false
positives. */
if (value2 == 0)
into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
slowdown for large switch statements. */
-static int
+int
add_case_node (low, high, label, duplicate)
tree low, high;
tree label;
{
struct case_node *p, **q, *r;
+ /* If there's no HIGH value, then this is not a case range; it's
+ just a simple case label. But that's just a degenerate case
+ range. */
+ if (!high)
+ high = low;
+
+ /* Handle default labels specially. */
+ if (!high && !low)
+ {
+ if (case_stack->data.case_stmt.default_label != 0)
+ {
+ *duplicate = case_stack->data.case_stmt.default_label;
+ return 2;
+ }
+ case_stack->data.case_stmt.default_label = label;
+ expand_label (label);
+ return 0;
+ }
+
q = &case_stack->data.case_stmt.case_list;
p = *q;
r->low = copy_node (low);
/* If the bounds are equal, turn this into the one-value case. */
-
if (tree_int_cst_equal (low, high))
r->high = r->low;
else
- {
- r->high = copy_node (high);
- case_stack->data.case_stmt.num_ranges++;
- }
+ r->high = copy_node (high);
r->code_label = label;
expand_label (label);
return 0;
}
-
\f
/* Returns the number of possible values of TYPE.
Returns -1 if the number is unknown, variable, or if the number does not
TREE_TYPE (val) = type;
if (! root)
- ; /* Do nothing */
+ /* Do nothing. */
+ ;
else if (sparseness == 2)
{
tree t;
duplicate case values (multiple enum constants
with the same value). */
TREE_TYPE (val) = TREE_TYPE (root->low);
- for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
+ for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
t = TREE_CHAIN (t), xlo++)
{
TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
/* Keep going past elements distinctly greater than VAL. */
if (tree_int_cst_lt (val, n->low))
n = n->left;
-
+
/* or distinctly less than VAL. */
else if (tree_int_cst_lt (n->high, val))
n = n->right;
-
+
else
{
/* We have found a matching range. */
{
TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
- while ( ! tree_int_cst_lt (n->high, val))
+ while (! tree_int_cst_lt (n->high, val))
{
/* Calculate (into xlo) the "offset" of the integer (val).
The element with lowest value has offset 0, the next smallest
TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
&xlo, &xhi);
}
-
+
if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
BITARRAY_SET (cases_seen, xlo);
{
register struct case_node *n;
register tree chain;
-#if 0 /* variable used by 'if 0'ed code below. */
+#if 0 /* variable used by 'if 0'ed code below. */
register struct case_node **l;
int all_values = 1;
#endif
if (size > 0 && size < 600000
/* We deliberately use calloc here, not cmalloc, so that we can suppress
- this optimization if we don't have enough memory rather than
+ this optimization if we don't have enough memory rather than
aborting, as xmalloc would do. */
&& (cases_seen = (unsigned char *) calloc (bytes_needed, 1)) != NULL)
{
mark_seen_cases (type, cases_seen, size, sparseness);
for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
- if (BITARRAY_TEST(cases_seen, i) == 0)
+ if (BITARRAY_TEST (cases_seen, i) == 0)
warning ("enumeration value `%s' not handled in switch",
IDENTIFIER_POINTER (TREE_PURPOSE (v)));
if (thiscase->data.case_stmt.case_list
&& thiscase->data.case_stmt.case_list->left)
thiscase->data.case_stmt.case_list
- = case_tree2list(thiscase->data.case_stmt.case_list, 0);
+ = case_tree2list (thiscase->data.case_stmt.case_list, 0);
/* Simplify the case-list before we count it. */
group_case_nodes (thiscase->data.case_stmt.case_list);
generate the conversion. */
if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
- && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
+ && (cmp_optab->handlers[(int) GET_MODE (index)].insn_code
== CODE_FOR_nothing))
{
enum machine_mode wider_mode;
use_cost_table
= (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
&& estimate_case_costs (thiscase->data.case_stmt.case_list));
- balance_case_nodes (&thiscase->data.case_stmt.case_list,
+ balance_case_nodes (&thiscase->data.case_stmt.case_list,
NULL_PTR);
emit_case_nodes (index, thiscase->data.case_stmt.case_list,
default_label, index_type);
index = protect_from_queue (index, 0);
do_pending_stack_adjust ();
- op_mode = insn_data[(int)CODE_FOR_casesi].operand[0].mode;
- if (! (*insn_data[(int)CODE_FOR_casesi].operand[0].predicate)
+ op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
+ if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
(index, op_mode))
index = copy_to_mode_reg (op_mode, index);
op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
- op_mode = insn_data[(int)CODE_FOR_casesi].operand[1].mode;
- if (! (*insn_data[(int)CODE_FOR_casesi].operand[1].predicate)
+ op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
+ if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
(op1, op_mode))
op1 = copy_to_mode_reg (op_mode, op1);
op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
- op_mode = insn_data[(int)CODE_FOR_casesi].operand[2].mode;
- if (! (*insn_data[(int)CODE_FOR_casesi].operand[2].predicate)
+ op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
+ if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
(op2, op_mode))
op2 = copy_to_mode_reg (op_mode, op2);
{
index_type = thiscase->data.case_stmt.nominal_type;
index_expr = fold (build (MINUS_EXPR, index_type,
- convert (index_type, index_expr),
- convert (index_type, minval)));
+ convert (index_type, index_expr),
+ convert (index_type, minval)));
index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
emit_queue ();
index = protect_from_queue (index, 0);
emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
gen_rtx_LABEL_REF (Pmode, table_label),
gen_rtvec_v (ncases, labelvec),
- const0_rtx, const0_rtx));
+ const0_rtx, const0_rtx));
else
emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
gen_rtvec_v (ncases, labelvec)));
if (node_is_bounded (node->right, index_type))
{
- emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
- GT, NULL_RTX, mode, unsignedp, 0,
- label_rtx (node->right->code_label));
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
+ GT, NULL_RTX, mode, unsignedp, 0,
+ label_rtx (node->right->code_label));
emit_case_nodes (index, node->left, default_label, index_type);
}
else if (node_is_bounded (node->left, index_type))
{
- emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
LT, NULL_RTX, mode, unsignedp, 0,
label_rtx (node->left->code_label));
emit_case_nodes (index, node->right, default_label, index_type);
/* Neither node is bounded. First distinguish the two sides;
then emit the code for one side at a time. */
- tree test_label
- = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
+ tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
/* See if the value is on the right. */
- emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
GT, NULL_RTX, mode, unsignedp, 0,
label_rtx (test_label));
{
if (!node_has_low_bound (node, index_type))
{
- emit_cmp_and_jump_insns (index, expand_expr (node->high,
- NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
LT, NULL_RTX, mode, unsignedp, 0,
default_label);
}
a branch-greater-than will get us to the default
label correctly. */
if (use_cost_table
- && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
+ && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
;
#endif /* 0 */
- if (node->left->left || node->left->right
+ if (node->left->left || node->left->right
|| !tree_int_cst_equal (node->left->low, node->left->high))
{
if (!node_has_high_bound (node, index_type))
then handle the two subtrees. */
tree test_label = 0;
-
if (node_is_bounded (node->right, index_type))
/* Right hand node is fully bounded so we can eliminate any
testing and branch directly to the target code. */
Branch to a label where we will handle it later. */
test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
- emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
GT, NULL_RTX, mode, unsignedp, 0,
label_rtx (test_label));
}
if they are possible. */
if (!node_has_low_bound (node, index_type))
{
- emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->low, NULL_RTX,
+ VOIDmode, 0),
LT, NULL_RTX, mode, unsignedp, 0,
default_label);
}
if they are possible. */
if (!node_has_high_bound (node, index_type))
{
- emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
GT, NULL_RTX, mode, unsignedp, 0,
default_label);
}
/* Value belongs to this node or to the left-hand subtree. */
- emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->low, NULL_RTX,
+ VOIDmode, 0),
GE, NULL_RTX, mode, unsignedp, 0,
label_rtx (node->code_label));
if (!node_has_high_bound (node, index_type))
{
- emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
GT, NULL_RTX, mode, unsignedp, 0,
default_label);
}
if (!node_has_low_bound (node, index_type))
{
- emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->low, NULL_RTX,
+ VOIDmode, 0),
LT, NULL_RTX, mode, unsignedp, 0,
default_label);
}
}
}
}
-\f