the Free Software Foundation, 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA. */
-
/* This file handles the generation of rtl code from tree structure
above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
It also creates the rtl expressions for parameters and auto variables
#ifndef CASE_VECTOR_PC_RELATIVE
#define CASE_VECTOR_PC_RELATIVE 0
#endif
-
\f
/* Functions and data structures for expanding case statements. */
/* This must be a signed type, and non-ANSI compilers lack signed char. */
static short cost_table_[129];
-static short *cost_table;
static int use_cost_table;
+static int cost_table_initialized;
+
+/* Special care is needed because we allow -1, but TREE_INT_CST_LOW
+ is unsigned. */
+#define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT)((I) + 1)]
\f
/* Stack of control and binding constructs we are currently inside.
tree index_expr;
/* Type that INDEX_EXPR should be converted to. */
tree nominal_type;
- /* Number of range exprs in case statement. */
- int num_ranges;
/* Name of this kind of statement, for warnings. */
const char *printname;
/* Used to save no_line_numbers till we see the first case label.
/* Non-zero if we are using EH to handle cleanus. */
static int using_eh_for_cleanups_p = 0;
-/* Character strings, each containing a single decimal digit. */
-static char *digit_strings[10];
-
-
static int n_occurrences PARAMS ((int, const char *));
static void expand_goto_internal PARAMS ((tree, rtx, rtx));
static int expand_fixup PARAMS ((tree, rtx, rtx));
static int node_is_bounded PARAMS ((case_node_ptr, tree));
static void emit_jump_if_reachable PARAMS ((rtx));
static void emit_case_nodes PARAMS ((rtx, case_node_ptr, rtx, tree));
-static int add_case_node PARAMS ((tree, tree, tree, tree *));
static struct case_node *case_tree2list PARAMS ((case_node *, case_node *));
static void mark_cond_nesting PARAMS ((struct nesting *));
static void mark_loop_nesting PARAMS ((struct nesting *));
static void mark_case_nesting PARAMS ((struct nesting *));
static void mark_case_node PARAMS ((struct case_node *));
static void mark_goto_fixup PARAMS ((struct goto_fixup *));
-
+static void free_case_nodes PARAMS ((case_node_ptr));
\f
void
using_eh_for_cleanups ()
ggc_mark_tree (n->data.block.cleanups);
ggc_mark_tree (n->data.block.outer_cleanups);
- for (l = n->data.block.label_chain; l != NULL; l = l->next)
- ggc_mark_tree (l->label);
+ for (l = n->data.block.label_chain; l != NULL; l = l->next)
+ {
+ ggc_mark (l);
+ ggc_mark_tree (l->label);
+ }
ggc_mark_rtx (n->data.block.last_unconditional_cleanup);
void
init_stmt ()
{
- int i;
-
gcc_obstack_init (&stmt_obstack);
-
- for (i = 0; i < 10; i++)
- {
- digit_strings[i] = ggc_alloc_string (NULL, 1);
- digit_strings[i][0] = '0' + i;
- }
- ggc_add_string_root (digit_strings, 10);
}
void
emit_queue ();
/* Be sure the function is executable. */
if (current_function_check_memory_usage)
- emit_library_call (chkr_check_exec_libfunc, 1,
+ emit_library_call (chkr_check_exec_libfunc, LCT_CONST_MAKE_BLOCK,
VOIDmode, 1, x, ptr_mode);
do_pending_stack_adjust ();
if (stack_block_stack != 0)
{
- p = (struct label_chain *) oballoc (sizeof (struct label_chain));
+ p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
p->next = stack_block_stack->data.block.label_chain;
stack_block_stack->data.block.label_chain = p;
p->label = label;
{
struct function *p = find_function_data (context);
rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
- rtx handler_slot, static_chain, save_area;
+ rtx handler_slot, static_chain, save_area, insn;
tree link;
/* Find the corresponding handler slot for this label. */
emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
emit_indirect_jump (handler_slot);
}
- }
+
+ /* Search backwards to the jump insn and mark it as a
+ non-local goto. */
+ for (insn = get_last_insn ();
+ GET_CODE (insn) != JUMP_INSN;
+ insn = PREV_INSN (insn))
+ continue;
+ REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
+ REG_NOTES (insn));
+ }
else
expand_goto_internal (label, label_rtx (label), NULL_RTX);
}
insert_block (block);
else
{
- BLOCK_CHAIN (block)
+ BLOCK_CHAIN (block)
= BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
= block;
return block != 0;
}
-
-
\f
/* Expand any needed fixups in the outputmost binding level of the
function. FIRST_INSN is the first insn in the function. */
&& ! (f->target_rtl == return_label
&& ((TREE_CODE (TREE_TYPE (current_function_decl))
== FUNCTION_TYPE)
- && (TYPE_RETURNS_STACK_DEPRESSED
+ && (TYPE_RETURNS_STACK_DEPRESSED
(TREE_TYPE (current_function_decl))))))
emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
end_sequence ();
emit_insns_after (cleanup_insns, f->before_jump);
-
f->before_jump = 0;
}
}
= (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
/* The insn we have emitted. */
rtx insn;
+ int old_generating_concat_p = generating_concat_p;
/* An ASM with no outputs needs to be treated as volatile, for now. */
if (noutputs == 0)
error ("too many alternatives in `asm'");
return;
}
-
+
tmp = outputs;
while (tmp)
{
{
tree val = TREE_VALUE (tail);
tree type = TREE_TYPE (val);
- char *constraint;
+ const char *constraint;
char *p;
int c_len;
int j;
the worst that happens if we get it wrong is we issue an error
message. */
- c_len = strlen (TREE_STRING_POINTER (TREE_PURPOSE (tail)));
constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
+ c_len = strlen (constraint);
/* Allow the `=' or `+' to not be at the beginning of the string,
since it wasn't explicitly documented that way, and there is a
error ("output operand constraint lacks `='");
return;
}
+ j = p - constraint;
+ is_inout = *p == '+';
- if (p != constraint)
+ if (j || is_inout)
{
- j = *p;
- bcopy (constraint, constraint+1, p-constraint);
- *constraint = j;
-
- warning ("output constraint `%c' for operand %d is not at the beginning", j, i);
+ /* Have to throw away this constraint string and get a new one. */
+ char *buf = alloca (c_len + 1);
+ buf[0] = '=';
+ if (j)
+ memcpy (buf + 1, constraint, j);
+ memcpy (buf + 1 + j, p + 1, c_len - j); /* not -j-1 - copy null */
+ constraint = ggc_alloc_string (buf, c_len);
+
+ if (j)
+ warning (
+ "output constraint `%c' for operand %d is not at the beginning",
+ *p, i);
}
- is_inout = constraint[0] == '+';
- /* Replace '+' with '='. */
- constraint[0] = '=';
/* Make sure we can specify the matching operand. */
if (is_inout && i > 9)
{
}
break;
- case '?': case '!': case '*': case '&':
+ case '?': case '!': case '*': case '&': case '#':
case 'E': case 'F': case 'G': case 'H':
case 's': case 'i': case 'n':
case 'I': case 'J': case 'K': case 'L': case 'M':
case 'N': case 'O': case 'P': case ',':
-#ifdef EXTRA_CONSTRAINT
- case 'Q': case 'R': case 'S': case 'T': case 'U':
-#endif
break;
case '0': case '1': case '2': case '3': case '4':
break;
case 'p': case 'r':
- default:
allows_reg = 1;
break;
+
+ default:
+ if (! ISALPHA (constraint[j]))
+ {
+ error ("invalid punctuation `%c' in constraint",
+ constraint[j]);
+ return;
+ }
+ if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
+ allows_reg = 1;
+#ifdef EXTRA_CONSTRAINT
+ else
+ {
+ /* Otherwise we can't assume anything about the nature of
+ the constraint except that it isn't purely registers.
+ Treat it like "g" and hope for the best. */
+ allows_reg = 1;
+ allows_mem = 1;
+ }
+#endif
+ break;
}
/* If an output operand is not a decl or indirect ref and our constraint
Make the asm insn write into that, then our caller will copy it to
the real output operand. Likewise for promoted variables. */
+ generating_concat_p = 0;
+
real_output_rtx[i] = NULL_RTX;
if ((TREE_CODE (val) == INDIRECT_REF
&& allows_mem)
if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
error ("output number %d not directly addressable", i);
- if (! allows_mem && GET_CODE (output_rtx[i]) == MEM)
+ if ((! allows_mem && GET_CODE (output_rtx[i]) == MEM)
+ || GET_CODE (output_rtx[i]) == CONCAT)
{
real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
}
+ generating_concat_p = old_generating_concat_p;
+
if (is_inout)
{
inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
argvec = rtvec_alloc (ninputs);
constraints = rtvec_alloc (ninputs);
- body = gen_rtx_ASM_OPERANDS (VOIDmode, TREE_STRING_POINTER (string),
- empty_string, 0, argvec, constraints,
+ body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
+ : GET_MODE (output_rtx[0])),
+ TREE_STRING_POINTER (string),
+ empty_string, 0, argvec, constraints,
filename, line);
MEM_VOLATILE_P (body) = vol;
{
int j;
int allows_reg = 0, allows_mem = 0;
- char *constraint, *orig_constraint;
+ const char *constraint, *orig_constraint;
int c_len;
rtx op;
return;
}
- c_len = strlen (TREE_STRING_POINTER (TREE_PURPOSE (tail)));
constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
+ c_len = strlen (constraint);
orig_constraint = constraint;
/* Make sure constraint has neither `=', `+', nor '&'. */
break;
case '<': case '>':
- case '?': case '!': case '*':
- case 'E': case 'F': case 'G': case 'H': case 'X':
+ case '?': case '!': case '*': case '#':
+ case 'E': case 'F': case 'G': case 'H':
case 's': case 'i': case 'n':
case 'I': case 'J': case 'K': case 'L': case 'M':
case 'N': case 'O': case 'P': case ',':
-#ifdef EXTRA_CONSTRAINT
- case 'Q': case 'R': case 'S': case 'T': case 'U':
-#endif
break;
/* Whether or not a numeric constraint allows a register is
for (j = constraint[j] - '0'; j > 0; --j)
o = TREE_CHAIN (o);
-
- c_len = strlen (TREE_STRING_POINTER (TREE_PURPOSE (o)));
+
constraint = TREE_STRING_POINTER (TREE_PURPOSE (o));
+ c_len = strlen (constraint);
j = 0;
break;
}
- /* ... fall through ... */
+ /* Fall through. */
case 'p': case 'r':
- default:
allows_reg = 1;
break;
- case 'g':
+ case 'g': case 'X':
allows_reg = 1;
allows_mem = 1;
break;
+
+ default:
+ if (! ISALPHA (constraint[j]))
+ {
+ error ("invalid punctuation `%c' in constraint",
+ constraint[j]);
+ return;
+ }
+ if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
+ allows_reg = 1;
+#ifdef EXTRA_CONSTRAINT
+ else
+ {
+ /* Otherwise we can't assume anything about the nature of
+ the constraint except that it isn't purely registers.
+ Treat it like "g" and hope for the best. */
+ allows_reg = 1;
+ allows_mem = 1;
+ }
+#endif
+ break;
}
if (! allows_reg && allows_mem)
op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
+ /* Never pass a CONCAT to an ASM. */
+ generating_concat_p = 0;
+ if (GET_CODE (op) == CONCAT)
+ op = force_reg (GET_MODE (op), op);
+
if (asm_operand_ok (op, constraint) <= 0)
{
if (allows_reg)
|| GET_CODE (op) == CONCAT)
{
tree type = TREE_TYPE (TREE_VALUE (tail));
- rtx memloc = assign_temp (type, 1, 1, 1);
+ tree qual_type = build_qualified_type (type,
+ (TYPE_QUALS (type)
+ | TYPE_QUAL_CONST));
+ rtx memloc = assign_temp (qual_type, 1, 1, 1);
emit_move_insn (memloc, op);
op = memloc;
not satisfied. */
warning ("asm operand %d probably doesn't match constraints", i);
}
- XVECEXP (body, 3, i) = op;
+ generating_concat_p = old_generating_concat_p;
+ ASM_OPERANDS_INPUT (body, i) = op;
- XVECEXP (body, 4, i) /* constraints */
+ ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
= gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
orig_constraint);
i++;
/* Protect all the operands from the queue now that they have all been
evaluated. */
+ generating_concat_p = 0;
+
for (i = 0; i < ninputs - ninout; i++)
- XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
+ ASM_OPERANDS_INPUT (body, i)
+ = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
for (i = 0; i < noutputs; i++)
output_rtx[i] = protect_from_queue (output_rtx[i], 1);
- /* For in-out operands, copy output rtx to input rtx. */
+ /* For in-out operands, copy output rtx to input rtx. */
for (i = 0; i < ninout; i++)
{
int j = inout_opnum[i];
- XVECEXP (body, 3, ninputs - ninout + i) /* argvec */
+ ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
= output_rtx[j];
- XVECEXP (body, 4, ninputs - ninout + i) /* constraints */
- = gen_rtx_ASM_INPUT (inout_mode[i], digit_strings[j]);
+ ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
+ = gen_rtx_ASM_INPUT (inout_mode[i], digit_string (j));
}
+ generating_concat_p = old_generating_concat_p;
+
/* Now, for each output, construct an rtx
(set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
ARGVEC CONSTRAINTS))
if (noutputs == 1 && nclobbers == 0)
{
- XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
+ ASM_OPERANDS_OUTPUT_CONSTRAINT (body)
+ = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
}
= gen_rtx_SET (VOIDmode,
output_rtx[i],
gen_rtx_ASM_OPERANDS
- (VOIDmode,
+ (GET_MODE (output_rtx[i]),
TREE_STRING_POINTER (string),
TREE_STRING_POINTER (TREE_PURPOSE (tail)),
i, argvec, constraints,
except inside a ({...}) where they may be useful. */
if (expr_stmts_for_value == 0 && exp != error_mark_node)
{
- if (! TREE_SIDE_EFFECTS (exp)
- && (extra_warnings || warn_unused_value)
- && !(TREE_CODE (exp) == CONVERT_EXPR
- && VOID_TYPE_P (TREE_TYPE (exp))))
- warning_with_file_and_line (emit_filename, emit_lineno,
- "statement with no effect");
+ if (! TREE_SIDE_EFFECTS (exp))
+ {
+ if ((extra_warnings || warn_unused_value)
+ && !(TREE_CODE (exp) == CONVERT_EXPR
+ && VOID_TYPE_P (TREE_TYPE (exp))))
+ warning_with_file_and_line (emit_filename, emit_lineno,
+ "statement with no effect");
+ }
else if (warn_unused_value)
warn_if_unused_value (exp);
}
if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
- last_expr_type = TREE_TYPE (exp);
+ /* The call to `expand_expr' could cause last_expr_type and
+ last_expr_value to get reset. Therefore, we set last_expr_value
+ and last_expr_type *after* calling expand_expr. */
last_expr_value = expand_expr (exp,
(expr_stmts_for_value
? NULL_RTX : const0_rtx),
VOIDmode, 0);
+ last_expr_type = TREE_TYPE (exp);
/* If all we do is reference a volatile value in memory,
copy it to a register to be sure it is actually touched. */
else
{
rtx lab = gen_label_rtx ();
-
+
/* Compare the value with itself to reference it. */
emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
expand_expr (TYPE_SIZE (last_expr_type),
if (TREE_USED (exp))
return 0;
+ /* Don't warn about void constructs. This includes casting to void,
+ void function calls, and statement expressions with a final cast
+ to void. */
+ if (VOID_TYPE_P (TREE_TYPE (exp)))
+ return 0;
+
+ /* If this is an expression with side effects, don't warn. */
+ if (TREE_SIDE_EFFECTS (exp))
+ return 0;
+
switch (TREE_CODE (exp))
{
case PREINCREMENT_EXPR:
case TRY_CATCH_EXPR:
case WITH_CLEANUP_EXPR:
case EXIT_EXPR:
- /* We don't warn about COND_EXPR because it may be a useful
- construct if either arm contains a side effect. */
- case COND_EXPR:
return 0;
case BIND_EXPR:
case NOP_EXPR:
case CONVERT_EXPR:
case NON_LVALUE_EXPR:
- /* Don't warn about values cast to void. */
- if (VOID_TYPE_P (TREE_TYPE (exp)))
- return 0;
/* Don't warn about conversions not explicit in the user's program. */
if (TREE_NO_UNUSED_WARNING (exp))
return 0;
the user cannot control it. */
if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
return warn_if_unused_value (TREE_OPERAND (exp, 0));
- /* ... fall through ... */
-
+ /* Fall through. */
+
default:
/* Referencing a volatile value is a side effect, so don't warn. */
if ((DECL_P (exp)
tree
expand_start_stmt_expr ()
{
- int momentary;
tree t;
/* Make the RTL_EXPR node temporary, not momentary,
so that rtl_expr_chain doesn't become garbage. */
- momentary = suspend_momentary ();
t = make_node (RTL_EXPR);
- resume_momentary (momentary);
do_pending_stack_adjust ();
start_sequence_for_rtl_expr (t);
NO_DEFER_POP;
POPSTACK (cond_stack);
last_expr_type = 0;
}
-
-
\f
/* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
loop should be exited by `exit_something'. This is a loop for which
return thisloop;
}
+/* Begin a null, aka do { } while (0) "loop". But since the contents
+ of said loop can still contain a break, we must frob the loop nest. */
+
+struct nesting *
+expand_start_null_loop ()
+{
+ register struct nesting *thisloop = ALLOC_NESTING ();
+
+ /* Make an entry on loop_stack for the loop we are entering. */
+
+ thisloop->next = loop_stack;
+ thisloop->all = nesting_stack;
+ thisloop->depth = ++nesting_depth;
+ thisloop->data.loop.start_label = emit_note (NULL, NOTE_INSN_DELETED);
+ thisloop->data.loop.end_label = gen_label_rtx ();
+ thisloop->data.loop.alt_end_label = NULL_RTX;
+ thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
+ thisloop->exit_label = thisloop->data.loop.end_label;
+ loop_stack = thisloop;
+ nesting_stack = thisloop;
+
+ return thisloop;
+}
+
/* Specify the continuation point for a loop started with
expand_start_loop_continue_elsewhere.
Use this at the point in the code to which a continue statement
insn = PREV_INSN (label);
reorder_insns (label, label, start_label);
- for (prev = PREV_INSN (jump); ; prev = PREV_INSN (prev))
- {
+ for (prev = PREV_INSN (jump);; prev = PREV_INSN (prev))
+ {
/* We ignore line number notes, but if we see any other note,
in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
NOTE_INSN_LOOP_*, we disable this optimization. */
body;
goto start_label;
end_label:
-
+
transform it to look like:
goto start_label;
for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) == NOTE)
+ if (GET_CODE (insn) == NOTE)
{
if (optimize < 2
&& (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
{
--eh_regions;
- if (eh_regions < 0)
+ if (eh_regions < 0)
/* We've come to the end of an EH region, but
never saw the beginning of that region. That
means that an EH region begins before the top
if (last_test_insn && num_insns > 30)
break;
- if (eh_regions > 0)
+ if (eh_regions > 0)
/* We don't want to move a partial EH region. Consider:
while ( ( { try {
- if (cond ()) 0;
+ if (cond ()) 0;
else {
bar();
1;
}
- } catch (...) {
+ } catch (...) {
1;
} )) {
body;
- }
+ }
This isn't legal C++, but here's what it's supposed to
mean: if cond() is true, stop looping. Otherwise,
call bar, and keep looping. In addition, if cond
throws an exception, catch it and keep looping. Such
- constructs are certainy legal in LISP.
+ constructs are certainy legal in LISP.
We should not move the `if (cond()) 0' test since then
the EH-region for the try-block would be broken up.
(In this case we would the EH_BEG note for the `try'
and `if cond()' but not the call to bar() or the
- EH_END note.)
+ EH_END note.)
So we don't look for tests within an EH region. */
continue;
- if (GET_CODE (insn) == JUMP_INSN
+ if (GET_CODE (insn) == JUMP_INSN
&& GET_CODE (PATTERN (insn)) == SET
&& SET_DEST (PATTERN (insn)) == pc_rtx)
{
do {
if (dest1 && GET_CODE (dest1) == LABEL_REF
- && ((XEXP (dest1, 0)
+ && ((XEXP (dest1, 0)
== loop_stack->data.loop.alt_end_label)
- || (XEXP (dest1, 0)
+ || (XEXP (dest1, 0)
== loop_stack->data.loop.end_label)))
{
last_test_insn = potential_last_test;
last_expr_type = 0;
}
+/* Finish a null loop, aka do { } while (0). */
+
+void
+expand_end_null_loop ()
+{
+ do_pending_stack_adjust ();
+ emit_label (loop_stack->data.loop.end_label);
+
+ POPSTACK (loop_stack);
+
+ last_expr_type = 0;
+}
+
/* Generate a jump to the current loop's continue-point.
This is usually the top of the loop, but may be specified
explicitly elsewhere. If not currently inside a loop,
around a unconditional branch to exit the loop. If fixups are
necessary, they go before the unconditional branch. */
-
do_jump (cond, NULL_RTX, label);
last_insn = get_last_insn ();
if (GET_CODE (last_insn) == CODE_LABEL)
struct nesting *block = block_stack;
rtx last_insn = get_last_insn ();
- /* If this function was declared to return a value, but we
+ /* If this function was declared to return a value, but we
didn't, clobber the return registers so that they are not
propogated live to the rest of the function. */
clobber_return_register ();
rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
enum machine_mode tmpmode, result_reg_mode;
+ if (bytes == 0)
+ {
+ expand_null_return ();
+ return;
+ }
+
/* Structures whose size is not a multiple of a word are aligned
to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
machine, this means we must skip the empty high order bytes when
big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
* BITS_PER_UNIT));
- /* Copy the structure BITSIZE bits at a time. */
+ /* Copy the structure BITSIZE bits at a time. */
for (bitpos = 0, xbitpos = big_endian_correction;
bitpos < bytes * BITS_PER_UNIT;
bitpos += bitsize, xbitpos += bitsize)
/* Find the smallest integer mode large enough to hold the
entire structure and use that mode instead of BLKmode
on the USE insn for the return register. */
- bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
tmpmode != VOIDmode;
tmpmode = GET_MODE_WIDER_MODE (tmpmode))
- {
- /* Have we found a large enough mode? */
- if (GET_MODE_SIZE (tmpmode) >= bytes)
- break;
- }
+ /* Have we found a large enough mode? */
+ if (GET_MODE_SIZE (tmpmode) >= bytes)
+ break;
/* No suitable mode found. */
if (tmpmode == VOIDmode)
{
/* Calculate the return value into a temporary (usually a pseudo
reg). */
- val = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)),
- 0, 0, 1);
+ tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
+ tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
+
+ val = assign_temp (nt, 0, 0, 1);
val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
val = force_not_mem (val);
emit_queue ();
\f
/* Attempt to optimize a potential tail recursion call into a goto.
ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
- where to place the jump to the tail recursion label.
-
+ where to place the jump to the tail recursion label.
+
Return TRUE if the call was optimized into a goto. */
int
register int j;
for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
- { copy = 1; break; }
+ {
+ copy = 1;
+ break;
+ }
if (copy)
argvec[i] = copy_to_reg (argvec[i]);
}
will not create corresponding BLOCK nodes. (There should be
a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
and BLOCKs.) If this flag is set, MARK_ENDS should be zero
- when expand_end_bindings is called.
+ when expand_end_bindings is called.
If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
optionally be supplied. If so, it becomes the NOTE_BLOCK for the
rtx note;
int exit_flag = ((flags & 1) != 0);
int block_flag = ((flags & 2) == 0);
-
+
/* If a BLOCK is supplied, then the caller should be requesting a
NOTE_INSN_BLOCK_BEG note. */
if (!block_flag && block)
}
else
note = emit_note (NULL_PTR, NOTE_INSN_DELETED);
-
+
/* Make an entry on block_stack for the block we are entering. */
thisblock->next = block_stack;
expand_end_target_temps ()
{
expand_end_bindings (NULL_TREE, 0, 0);
-
+
/* This is so that even if the result is preserved, the space
allocated will be freed, as we know that it is no longer in use. */
pop_temp_slots ();
BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
represents the outermost (function) scope for the function or method (i.e.
the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
- *that* node in turn will point to the relevant FUNCTION_DECL node. */
+ *that* node in turn will point to the relevant FUNCTION_DECL node. */
int
is_body_block (stmt)
if (warn_unused_variable)
for (decl = vars; decl; decl = TREE_CHAIN (decl))
- if (TREE_CODE (decl) == VAR_DECL
+ if (TREE_CODE (decl) == VAR_DECL
&& ! TREE_USED (decl)
&& ! DECL_IN_SYSTEM_HEADER (decl)
- && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
+ && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
warning_with_decl (decl, "unused variable `%s'");
}
/* Since expand_eh_region_start does an expand_start_bindings, we
have to first end all the bindings that were created by
expand_eh_region_start. */
-
+
thisblock = block_stack;
/* If any of the variables in this scope were not used, warn the
if (GET_CODE (insn) == NOTE)
insn = prev_nonnote_insn (insn);
reachable = (! insn || GET_CODE (insn) != BARRIER);
-
+
/* Do the cleanups. */
expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
if (reachable)
if (type == error_mark_node)
DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
+
else if (DECL_SIZE (decl) == 0)
/* Variable with incomplete type. */
{
if (DECL_INITIAL (decl) == 0)
/* Error message was already done; now avoid a crash. */
- DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
+ DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
else
/* An initializer is going to decide the size of this array.
Until we know the size, represent its address with a reg. */
&& !(flag_float_store
&& TREE_CODE (type) == REAL_TYPE)
&& ! TREE_THIS_VOLATILE (decl)
- && ! TREE_ADDRESSABLE (decl)
&& (DECL_REGISTER (decl) || optimize)
/* if -fcheck-memory-usage, check all variables. */
&& ! current_function_check_memory_usage)
TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
maybe_set_unchanging (DECL_RTL (decl), decl);
+
+ /* If something wants our address, try to use ADDRESSOF. */
+ if (TREE_ADDRESSABLE (decl))
+ put_var_into_stack (decl);
}
else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
free_temp_slots ();
/* Allocate space on the stack for the variable. Note that
- DECL_ALIGN says how the variable is to be aligned and we
+ DECL_ALIGN says how the variable is to be aligned and we
cannot use it to conclude anything about the alignment of
the size. */
address = allocate_dynamic_stack_space (size, NULL_RTX,
emit_move_insn (flag, const1_rtx);
- /* All cleanups must be on the function_obstack. */
- push_obstacks_nochange ();
- resume_temporary_allocation ();
-
cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
DECL_RTL (cond) = flag;
cleanup, integer_zero_node);
cleanup = fold (cleanup);
- pop_obstacks ();
-
cleanups = thisblock->data.block.cleanup_ptr;
}
- /* All cleanups must be on the function_obstack. */
- push_obstacks_nochange ();
- resume_temporary_allocation ();
cleanup = unsave_expr (cleanup);
- pop_obstacks ();
- t = *cleanups = temp_tree_cons (decl, cleanup, *cleanups);
+ t = *cleanups = tree_cons (decl, cleanup, *cleanups);
if (! cond_context)
/* If this block has a cleanup, it belongs in stack_block_stack. */
/* Record the cleanup for the dynamic handler chain. */
- /* All cleanups must be on the function_obstack. */
- push_obstacks_nochange ();
- resume_temporary_allocation ();
cleanup = make_node (POPDCC_EXPR);
- pop_obstacks ();
/* Add the cleanup in a manner similar to expand_decl_cleanup. */
thisblock->data.block.cleanups
- = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
+ = tree_cons (decl, cleanup, thisblock->data.block.cleanups);
/* If this block has a cleanup, it belongs in stack_block_stack. */
stack_block_stack = thisblock;
/* Record the cleanup for the dynamic handler chain. */
- /* All cleanups must be on the function_obstack. */
- push_obstacks_nochange ();
- resume_temporary_allocation ();
cleanup = make_node (POPDHC_EXPR);
- pop_obstacks ();
/* Add the cleanup in a manner similar to expand_decl_cleanup. */
thisblock->data.block.cleanups
- = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
+ = tree_cons (decl, cleanup, thisblock->data.block.cleanups);
/* If this block has a cleanup, it belongs in stack_block_stack. */
stack_block_stack = thisblock;
TREE_ADDRESSABLE (decl) = 1;
break;
}
-
+
expand_decl (decl);
expand_decl_cleanup (decl, cleanup);
x = DECL_RTL (decl);
if (cleanup != 0)
thisblock->data.block.cleanups
- = temp_tree_cons (decl_elt, cleanup_elt,
- thisblock->data.block.cleanups);
+ = tree_cons (decl_elt, cleanup_elt,
+ thisblock->data.block.cleanups);
}
}
\f
thiscase->data.case_stmt.index_expr = expr;
thiscase->data.case_stmt.nominal_type = type;
thiscase->data.case_stmt.default_label = 0;
- thiscase->data.case_stmt.num_ranges = 0;
thiscase->data.case_stmt.printname = printname;
thiscase->data.case_stmt.line_number_status = force_line_numbers ();
case_stack = thiscase;
start_cleanup_deferral ();
}
-
/* Start a "dummy case statement" within which case labels are invalid
and are not connected to any larger real case statement.
This can be used if you don't want to let a case statement jump
thiscase->data.case_stmt.start = 0;
thiscase->data.case_stmt.nominal_type = 0;
thiscase->data.case_stmt.default_label = 0;
- thiscase->data.case_stmt.num_ranges = 0;
case_stack = thiscase;
nesting_stack = thiscase;
start_cleanup_deferral ();
/* If insn is zero, then there must have been a syntax error. */
if (insn)
- warning_with_file_and_line (NOTE_SOURCE_FILE(insn),
- NOTE_LINE_NUMBER(insn),
+ warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
+ NOTE_LINE_NUMBER (insn),
"unreachable code at beginning of %s",
case_stack->data.case_stmt.printname);
break;
|| ! int_fits_type_p (value, index_type)))
return 3;
- /* Fail if this is a duplicate or overlaps another entry. */
- if (value == 0)
- {
- if (case_stack->data.case_stmt.default_label != 0)
- {
- *duplicate = case_stack->data.case_stmt.default_label;
- return 2;
- }
- case_stack->data.case_stmt.default_label = label;
- }
- else
- return add_case_node (value, value, label, duplicate);
-
- expand_label (label);
- return 0;
+ return add_case_node (value, value, label, duplicate);
}
/* Like pushcase but this case applies to all values between VALUE1 and
if (value2 != 0 && tree_int_cst_lt (value2, value1))
return 4;
- /* If the max was unbounded, use the max of the nominal_type we are
+ /* If the max was unbounded, use the max of the nominal_type we are
converting to. Do this after the < check above to suppress false
positives. */
if (value2 == 0)
into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
slowdown for large switch statements. */
-static int
+int
add_case_node (low, high, label, duplicate)
tree low, high;
tree label;
{
struct case_node *p, **q, *r;
+ /* If there's no HIGH value, then this is not a case range; it's
+ just a simple case label. But that's just a degenerate case
+ range. */
+ if (!high)
+ high = low;
+
+ /* Handle default labels specially. */
+ if (!high && !low)
+ {
+ if (case_stack->data.case_stmt.default_label != 0)
+ {
+ *duplicate = case_stack->data.case_stmt.default_label;
+ return 2;
+ }
+ case_stack->data.case_stmt.default_label = label;
+ expand_label (label);
+ return 0;
+ }
+
q = &case_stack->data.case_stmt.case_list;
p = *q;
}
}
- /* Add this label to the chain, and succeed.
- Copy LOW, HIGH so they are on temporary rather than momentary
- obstack and will thus survive till the end of the case statement. */
+ /* Add this label to the chain, and succeed. */
- r = (struct case_node *) oballoc (sizeof (struct case_node));
- r->low = copy_node (low);
+ r = (struct case_node *) xmalloc (sizeof (struct case_node));
+ r->low = low;
/* If the bounds are equal, turn this into the one-value case. */
-
if (tree_int_cst_equal (low, high))
r->high = r->low;
else
- {
- r->high = copy_node (high);
- case_stack->data.case_stmt.num_ranges++;
- }
+ r->high = high;
r->code_label = label;
expand_label (label);
return 0;
}
-
\f
/* Returns the number of possible values of TYPE.
Returns -1 if the number is unknown, variable, or if the number does not
TREE_TYPE (val) = type;
if (! root)
- ; /* Do nothing */
+ /* Do nothing. */
+ ;
else if (sparseness == 2)
{
tree t;
duplicate case values (multiple enum constants
with the same value). */
TREE_TYPE (val) = TREE_TYPE (root->low);
- for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
+ for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
t = TREE_CHAIN (t), xlo++)
{
TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
/* Keep going past elements distinctly greater than VAL. */
if (tree_int_cst_lt (val, n->low))
n = n->left;
-
+
/* or distinctly less than VAL. */
else if (tree_int_cst_lt (n->high, val))
n = n->right;
-
+
else
{
/* We have found a matching range. */
{
TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
- while ( ! tree_int_cst_lt (n->high, val))
+ while (! tree_int_cst_lt (n->high, val))
{
/* Calculate (into xlo) the "offset" of the integer (val).
The element with lowest value has offset 0, the next smallest
TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
&xlo, &xhi);
}
-
+
if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
BITARRAY_SET (cases_seen, xlo);
{
register struct case_node *n;
register tree chain;
-#if 0 /* variable used by 'if 0'ed code below. */
+#if 0 /* variable used by 'if 0'ed code below. */
register struct case_node **l;
int all_values = 1;
#endif
if (size > 0 && size < 600000
/* We deliberately use calloc here, not cmalloc, so that we can suppress
- this optimization if we don't have enough memory rather than
+ this optimization if we don't have enough memory rather than
aborting, as xmalloc would do. */
&& (cases_seen = (unsigned char *) calloc (bytes_needed, 1)) != NULL)
{
mark_seen_cases (type, cases_seen, size, sparseness);
for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
- if (BITARRAY_TEST(cases_seen, i) == 0)
+ if (BITARRAY_TEST (cases_seen, i) == 0)
warning ("enumeration value `%s' not handled in switch",
IDENTIFIER_POINTER (TREE_PURPOSE (v)));
#endif /* 0 */
}
+/* Free CN, and its children. */
+
+static void
+free_case_nodes (cn)
+ case_node_ptr cn;
+{
+ if (cn)
+ {
+ free_case_nodes (cn->left);
+ free_case_nodes (cn->right);
+ free (cn);
+ }
+}
+
\f
/* Terminate a case (Pascal) or switch (C) statement
in which ORIG_INDEX is the expression to be tested.
if (thiscase->data.case_stmt.case_list
&& thiscase->data.case_stmt.case_list->left)
thiscase->data.case_stmt.case_list
- = case_tree2list(thiscase->data.case_stmt.case_list, 0);
+ = case_tree2list (thiscase->data.case_stmt.case_list, 0);
/* Simplify the case-list before we count it. */
group_case_nodes (thiscase->data.case_stmt.case_list);
generate the conversion. */
if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
- && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
+ && (cmp_optab->handlers[(int) GET_MODE (index)].insn_code
== CODE_FOR_nothing))
{
enum machine_mode wider_mode;
use_cost_table
= (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
&& estimate_case_costs (thiscase->data.case_stmt.case_list));
- balance_case_nodes (&thiscase->data.case_stmt.case_list,
+ balance_case_nodes (&thiscase->data.case_stmt.case_list,
NULL_PTR);
emit_case_nodes (index, thiscase->data.case_stmt.case_list,
default_label, index_type);
index = protect_from_queue (index, 0);
do_pending_stack_adjust ();
- op_mode = insn_data[(int)CODE_FOR_casesi].operand[0].mode;
- if (! (*insn_data[(int)CODE_FOR_casesi].operand[0].predicate)
+ op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
+ if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
(index, op_mode))
index = copy_to_mode_reg (op_mode, index);
op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
- op_mode = insn_data[(int)CODE_FOR_casesi].operand[1].mode;
- if (! (*insn_data[(int)CODE_FOR_casesi].operand[1].predicate)
+ op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
+ if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
(op1, op_mode))
op1 = copy_to_mode_reg (op_mode, op1);
op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
- op_mode = insn_data[(int)CODE_FOR_casesi].operand[2].mode;
- if (! (*insn_data[(int)CODE_FOR_casesi].operand[2].predicate)
+ op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
+ if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
(op2, op_mode))
op2 = copy_to_mode_reg (op_mode, op2);
{
index_type = thiscase->data.case_stmt.nominal_type;
index_expr = fold (build (MINUS_EXPR, index_type,
- convert (index_type, index_expr),
- convert (index_type, minval)));
+ convert (index_type, index_expr),
+ convert (index_type, minval)));
index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
emit_queue ();
index = protect_from_queue (index, 0);
ncases = TREE_INT_CST_LOW (range) + 1;
labelvec = (rtx *) alloca (ncases * sizeof (rtx));
- bzero ((char *) labelvec, ncases * sizeof (rtx));
+ memset ((char *) labelvec, 0, ncases * sizeof (rtx));
for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
{
emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
gen_rtx_LABEL_REF (Pmode, table_label),
gen_rtvec_v (ncases, labelvec),
- const0_rtx, const0_rtx));
+ const0_rtx, const0_rtx));
else
emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
gen_rtvec_v (ncases, labelvec)));
if (thiscase->exit_label)
emit_label (thiscase->exit_label);
+ free_case_nodes (case_stack->data.case_stmt.case_list);
POPSTACK (case_stack);
free_temp_slots ();
estimate_case_costs (node)
case_node_ptr node;
{
- tree min_ascii = build_int_2 (-1, -1);
+ tree min_ascii = integer_minus_one_node;
tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
case_node_ptr n;
int i;
/* If we haven't already made the cost table, make it now. Note that the
lower bound of the table is -1, not zero. */
- if (cost_table == NULL)
+ if (! cost_table_initialized)
{
- cost_table = cost_table_ + 1;
+ cost_table_initialized = 1;
for (i = 0; i < 128; i++)
{
if (ISALNUM (i))
- cost_table[i] = 16;
+ COST_TABLE (i) = 16;
else if (ISPUNCT (i))
- cost_table[i] = 8;
+ COST_TABLE (i) = 8;
else if (ISCNTRL (i))
- cost_table[i] = -1;
+ COST_TABLE (i) = -1;
}
- cost_table[' '] = 8;
- cost_table['\t'] = 4;
- cost_table['\0'] = 4;
- cost_table['\n'] = 2;
- cost_table['\f'] = 1;
- cost_table['\v'] = 1;
- cost_table['\b'] = 1;
+ COST_TABLE (' ') = 8;
+ COST_TABLE ('\t') = 4;
+ COST_TABLE ('\0') = 4;
+ COST_TABLE ('\n') = 2;
+ COST_TABLE ('\f') = 1;
+ COST_TABLE ('\v') = 1;
+ COST_TABLE ('\b') = 1;
}
/* See if all the case expressions look like text. It is text if the
for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
- if (cost_table[i] < 0)
+ if (COST_TABLE (i) < 0)
return 0;
}
{
ranges++;
if (use_cost_table)
- cost += cost_table[TREE_INT_CST_LOW (np->high)];
+ cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
}
if (use_cost_table)
- cost += cost_table[TREE_INT_CST_LOW (np->low)];
+ cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
i++;
np = np->right;
{
/* Skip nodes while their cost does not reach that amount. */
if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
- i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
- i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
+ i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
+ i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
if (i <= 0)
break;
npp = &(*npp)->right;
if (node_is_bounded (node->right, index_type))
{
- emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
- GT, NULL_RTX, mode, unsignedp, 0,
- label_rtx (node->right->code_label));
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
+ GT, NULL_RTX, mode, unsignedp, 0,
+ label_rtx (node->right->code_label));
emit_case_nodes (index, node->left, default_label, index_type);
}
else if (node_is_bounded (node->left, index_type))
{
- emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
LT, NULL_RTX, mode, unsignedp, 0,
label_rtx (node->left->code_label));
emit_case_nodes (index, node->right, default_label, index_type);
/* Neither node is bounded. First distinguish the two sides;
then emit the code for one side at a time. */
- tree test_label
- = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
+ tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
/* See if the value is on the right. */
- emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
GT, NULL_RTX, mode, unsignedp, 0,
label_rtx (test_label));
{
if (!node_has_low_bound (node, index_type))
{
- emit_cmp_and_jump_insns (index, expand_expr (node->high,
- NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
LT, NULL_RTX, mode, unsignedp, 0,
default_label);
}
a branch-greater-than will get us to the default
label correctly. */
if (use_cost_table
- && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
+ && COST_TABLE (TREE_INT_CST_LOW (node->high)) < 12)
;
#endif /* 0 */
- if (node->left->left || node->left->right
+ if (node->left->left || node->left->right
|| !tree_int_cst_equal (node->left->low, node->left->high))
{
if (!node_has_high_bound (node, index_type))
then handle the two subtrees. */
tree test_label = 0;
-
if (node_is_bounded (node->right, index_type))
/* Right hand node is fully bounded so we can eliminate any
testing and branch directly to the target code. */
Branch to a label where we will handle it later. */
test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
- emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
GT, NULL_RTX, mode, unsignedp, 0,
label_rtx (test_label));
}
if they are possible. */
if (!node_has_low_bound (node, index_type))
{
- emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->low, NULL_RTX,
+ VOIDmode, 0),
LT, NULL_RTX, mode, unsignedp, 0,
default_label);
}
if they are possible. */
if (!node_has_high_bound (node, index_type))
{
- emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
GT, NULL_RTX, mode, unsignedp, 0,
default_label);
}
/* Value belongs to this node or to the left-hand subtree. */
- emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->low, NULL_RTX,
+ VOIDmode, 0),
GE, NULL_RTX, mode, unsignedp, 0,
label_rtx (node->code_label));
if (!node_has_high_bound (node, index_type))
{
- emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
GT, NULL_RTX, mode, unsignedp, 0,
default_label);
}
if (!node_has_low_bound (node, index_type))
{
- emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->low, NULL_RTX,
+ VOIDmode, 0),
LT, NULL_RTX, mode, unsignedp, 0,
default_label);
}
}
}
}
-\f