/* Expands front end tree to back end RTL for GNU C-Compiler
- Copyright (C) 1987, 88, 89, 92, 93, 1994 Free Software Foundation, Inc.
+ Copyright (C) 1987, 88, 89, 92, 93, 94, 1995 Free Software Foundation, Inc.
This file is part of GNU CC.
cleanup list whenever an empty list is required. */
static tree empty_cleanup_list;
#endif
+
+extern void (*interim_eh_hook) PROTO((tree));
\f
/* Functions and data structures for expanding case statements. */
rtx start_label;
/* Label at the end of the whole construct. */
rtx end_label;
+ /* Label before a jump that branches to the end of the whole
+ construct. This is where destructors go if any. */
+ rtx alt_end_label;
/* Label for `continue' statement to jump to;
this is in front of the stepper of the loop. */
rtx continue_label;
rtx, int));
static void bc_fixup_gotos PROTO((struct nesting *, int, tree,
rtx, int));
-static int warn_if_unused_value PROTO((tree));
static void bc_expand_start_cond PROTO((tree, int));
static void bc_expand_end_cond PROTO((void));
static void bc_expand_start_else PROTO((void));
static void bc_expand_variable_local_init PROTO((tree));
static void bc_expand_decl_init PROTO((tree));
static void expand_null_return_1 PROTO((rtx, int));
+static void expand_value_return PROTO((rtx));
static int tail_recursion_args PROTO((tree, tree));
-static void expand_cleanups PROTO((tree, tree));
+static void expand_cleanups PROTO((tree, tree, int, int));
static void bc_expand_start_case PROTO((struct nesting *, tree,
tree, char *));
static int bc_pushcase PROTO((tree, tree));
/* Execute the cleanups for blocks we are exiting. */
if (block->data.block.cleanups != 0)
{
- expand_cleanups (block->data.block.cleanups, NULL_TREE);
+ expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
do_pending_stack_adjust ();
}
}
/* Execute the cleanups for blocks we are exiting. */
if (block->data.block.cleanups != 0)
{
- expand_cleanups (block->data.block.cleanups, NULL_TREE);
+ expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
do_pending_stack_adjust ();
}
}
if (TREE_ADDRESSABLE (lists)
&& TREE_VALUE (lists) != 0)
{
- expand_cleanups (TREE_VALUE (lists), 0);
+ expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
/* Pop any pushes done in the cleanups,
in case function is about to return. */
do_pending_stack_adjust ();
}
}
- /* Mark the cleanups of exited blocks so that they are executed
- by the code above. */
+ /* For any still-undefined labels, do the cleanups for this block now.
+ We must do this now since items in the cleanup list may go out
+ of scope when the block ends. */
for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
if (f->before_jump != 0
&& PREV_INSN (f->target_rtl) == 0
/* Label has still not appeared. If we are exiting a block with
a stack level to restore, that started before the fixup,
mark this stack level as needing restoration
- when the fixup is later finalized.
- Also mark the cleanup_list_list element for F
- that corresponds to this block, so that ultimately
- this block's cleanups will be executed by the code above. */
+ when the fixup is later finalized. */
&& thisblock != 0
- /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
- it means the label is undefined. That's erroneous, but possible. */
+ /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
+ means the label is undefined. That's erroneous, but possible. */
&& (thisblock->data.block.block_start_count
<= f->block_start_count))
{
tree lists = f->cleanup_list_list;
+ rtx cleanup_insns;
+
for (; lists; lists = TREE_CHAIN (lists))
/* If the following elt. corresponds to our containing block
then the elt. must be for this block. */
if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
- TREE_ADDRESSABLE (lists) = 1;
+ {
+ start_sequence ();
+ pushlevel (0);
+ set_block (f->context);
+ expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
+ do_pending_stack_adjust ();
+ cleanup_insns = get_insns ();
+ poplevel (1, 0, 0);
+ end_sequence ();
+ f->before_jump
+ = emit_insns_after (cleanup_insns, f->before_jump);
+
+ TREE_VALUE (lists) = 0;
+ }
if (stack_level)
f->stack_level = stack_level;
{
if (output_bytecode)
{
- error ("`asm' is illegal when generating bytecode");
+ error ("`asm' is invalid when generating bytecode");
return;
}
if (output_bytecode)
{
- error ("`asm' is illegal when generating bytecode");
+ error ("`asm' is invalid when generating bytecode");
return;
}
i = decode_reg_name (regname);
if (i >= 0 || i == -4)
++nclobbers;
+ else if (i == -2)
+ error ("unknown register name `%s' in `asm'", regname);
}
last_expr_type = 0;
for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
{
tree val = TREE_VALUE (tail);
+ tree type = TREE_TYPE (val);
tree val1;
int j;
int found_equal;
return;
}
- /* If an output operand is not a variable or indirect ref,
- or a part of one,
- create a SAVE_EXPR which is a pseudo-reg
- to act as an intermediate temporary.
- Make the asm insn write into that, then copy it to
- the real output operand. */
-
- while (TREE_CODE (val) == COMPONENT_REF
- || TREE_CODE (val) == ARRAY_REF)
- val = TREE_OPERAND (val, 0);
+ /* If an output operand is not a decl or indirect ref,
+ make a temporary to act as an intermediate. Make the asm insn
+ write into that, then our caller will copy it to the real output
+ operand. Likewise for promoted variables. */
- if (TREE_CODE (val) != VAR_DECL
- && TREE_CODE (val) != PARM_DECL
- && TREE_CODE (val) != INDIRECT_REF)
+ if (TREE_CODE (val) == INDIRECT_REF
+ || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
+ && ! (GET_CODE (DECL_RTL (val)) == REG
+ && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
+ output_rtx[i] = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
+ else
{
- TREE_VALUE (tail) = save_expr (TREE_VALUE (tail));
- /* If it's a constant, print error now so don't crash later. */
- if (TREE_CODE (TREE_VALUE (tail)) != SAVE_EXPR)
+ if (TYPE_MODE (type) == BLKmode)
{
- error ("invalid output in `asm'");
- return;
+ output_rtx[i] = assign_stack_temp (BLKmode,
+ int_size_in_bytes (type), 0);
+ MEM_IN_STRUCT_P (output_rtx[i]) = AGGREGATE_TYPE_P (type);
}
- }
+ else
+ output_rtx[i] = gen_reg_rtx (TYPE_MODE (type));
- output_rtx[i] = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
+ TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
+ }
}
if (ninputs + noutputs > MAX_RECOG_OPERANDS)
XVECEXP (body, 3, i) /* argvec */
= expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
+ if (CONSTANT_P (XVECEXP (body, 3, i))
+ && ! general_operand (XVECEXP (body, 3, i),
+ TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)))))
+ XVECEXP (body, 3, i)
+ = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
+ XVECEXP (body, 3, i));
XVECEXP (body, 4, i) /* constraints */
= gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
TREE_STRING_POINTER (TREE_PURPOSE (tail)));
{
XVECEXP (body, 0, i++)
= gen_rtx (CLOBBER, VOIDmode,
- gen_rtx (MEM, QImode,
+ gen_rtx (MEM, BLKmode,
gen_rtx (SCRATCH, VOIDmode, 0)));
continue;
}
- error ("unknown register name `%s' in `asm'", regname);
- return;
+ /* Ignore unknown register, error already signalled. */
+ continue;
}
/* Use QImode since that's guaranteed to clobber just one reg. */
else if (warn_unused)
warn_if_unused_value (exp);
}
+
+ /* If EXP is of function type and we are expanding statements for
+ value, convert it to pointer-to-function. */
+ if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
+ exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
+
last_expr_type = TREE_TYPE (exp);
if (! flag_syntax_only)
last_expr_value = expand_expr (exp,
/* Warn if EXP contains any computations whose results are not used.
Return 1 if a warning is printed; 0 otherwise. */
-static int
+int
warn_if_unused_value (exp)
tree exp;
{
/* For a binding, warn if no side effect within it. */
return warn_if_unused_value (TREE_OPERAND (exp, 1));
+ case SAVE_EXPR:
+ return warn_if_unused_value (TREE_OPERAND (exp, 1));
+
case TRUTH_ORIF_EXPR:
case TRUTH_ANDIF_EXPR:
/* In && or ||, warn if 2nd operand has no side effect. */
return warn_if_unused_value (TREE_OPERAND (exp, 1));
case COMPOUND_EXPR:
+ if (TREE_NO_UNUSED_WARNING (exp))
+ return 0;
if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
return 1;
/* Let people do `(foo (), 0)' without a warning. */
while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
tem = TREE_OPERAND (tem, 0);
- if (TREE_CODE (tem) == MODIFY_EXPR)
+ if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
+ || TREE_CODE (tem) == CALL_EXPR)
return 0;
}
- /* ... fall through ... */
+ goto warn;
+ case INDIRECT_REF:
+ /* Don't warn about automatic dereferencing of references, since
+ the user cannot control it. */
+ if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
+ return warn_if_unused_value (TREE_OPERAND (exp, 0));
+ /* ... fall through ... */
+
default:
/* Referencing a volatile value is a side effect, so don't warn. */
if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
|| TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
&& TREE_THIS_VOLATILE (exp))
return 0;
+ warn:
warning_with_file_and_line (emit_filename, emit_lineno,
"value computed is not used");
return 1;
cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
}
+/* After calling expand_start_else, turn this "else" into an "else if"
+ by providing another condition. */
+
+void
+expand_elseif (cond)
+ tree cond;
+{
+ cond_stack->data.cond.next_label = gen_label_rtx ();
+ do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
+}
+
/* Generate RTL for the end of an if-then.
Pop the record for it off of cond_stack. */
thisloop->depth = ++nesting_depth;
thisloop->data.loop.start_label = gen_label_rtx ();
thisloop->data.loop.end_label = gen_label_rtx ();
+ thisloop->data.loop.alt_end_label = 0;
thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
loop_stack = thisloop;
&& SET_DEST (PATTERN (insn)) == pc_rtx
&& GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
&& ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
- && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
- == loop_stack->data.loop.end_label))
+ && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
+ == loop_stack->data.loop.end_label)
+ || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
+ == loop_stack->data.loop.alt_end_label)))
|| (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
- && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
- == loop_stack->data.loop.end_label))))
+ && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
+ == loop_stack->data.loop.end_label)
+ || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
+ == loop_stack->data.loop.alt_end_label)))))
last_test_insn = insn;
if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
&& GET_CODE (PATTERN (insn)) == SET
&& SET_DEST (PATTERN (insn)) == pc_rtx
&& GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
- && (XEXP (SET_SRC (PATTERN (insn)), 0)
- == loop_stack->data.loop.end_label))
+ && ((XEXP (SET_SRC (PATTERN (insn)), 0)
+ == loop_stack->data.loop.end_label)
+ || (XEXP (SET_SRC (PATTERN (insn)), 0)
+ == loop_stack->data.loop.alt_end_label)))
/* Include BARRIER. */
last_test_insn = NEXT_INSN (insn);
}
NULL_TREE);
}
else
- do_jump (cond, whichloop->data.loop.end_label, NULL_RTX);
+ {
+ /* In order to handle fixups, we actually create a conditional jump
+ around a unconditional branch to exit the loop. If fixups are
+ necessary, they go before the unconditional branch. */
+
+ rtx label = gen_label_rtx ();
+ rtx last_insn;
+
+ do_jump (cond, NULL_RTX, label);
+ last_insn = get_last_insn ();
+ if (GET_CODE (last_insn) == CODE_LABEL)
+ whichloop->data.loop.alt_end_label = last_insn;
+ expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
+ NULL_RTX);
+ emit_label (label);
+ }
return 1;
}
/* Generate RTL to return from the current function, with value VAL. */
-void
+static void
expand_value_return (val)
rtx val;
{
}
/* Are any cleanups needed? E.g. C++ destructors to be run? */
+ /* This is not sufficient. We also need to watch for cleanups of the
+ expression we are about to expand. Unfortunately, we cannot know
+ if it has cleanups until we expand it, and we want to change how we
+ expand it depending upon if we need cleanups. We can't win. */
+#if 0
cleanups = any_pending_cleanups (1);
+#else
+ cleanups = 1;
+#endif
if (TREE_CODE (retval) == RESULT_DECL)
retval_rhs = retval;
}
#endif /* HAVE_return */
- if (cleanups
+ /* If the result is an aggregate that is being returned in one (or more)
+ registers, load the registers here. The compiler currently can't handle
+ copying a BLKmode value into registers. We could put this code in a
+ more general area (for use by everyone instead of just function
+ call/return), but until this feature is generally usable it is kept here
+ (and in expand_call). */
+
+ if (retval_rhs != 0
+ && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
+ && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
+ {
+ int i;
+ int big_endian_correction = 0;
+ int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
+ int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
+ rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
+ rtx result_reg = DECL_RTL (DECL_RESULT (current_function_decl));
+ rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
+ enum machine_mode tmpmode;
+
+ /* Structures smaller than a word are aligned to the least significant
+ byte (to the right). On a BYTES_BIG_ENDIAN machine, this means we
+ must skip the empty high order bytes when calculating the bit
+ offset. */
+ if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
+ big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
+
+ for (i = 0; i < n_regs; i++)
+ {
+ rtx reg = gen_reg_rtx (word_mode);
+ rtx word = operand_subword_force (result_val, i, BLKmode);
+ int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),BITS_PER_WORD);
+ int bitpos;
+
+ result_pseudos[i] = reg;
+
+ /* Clobber REG and move each partword into it. Ensure we don't
+ go past the end of the structure. Note that the loop below
+ works because we've already verified that padding and
+ endianness are compatable. */
+ emit_insn (gen_rtx (CLOBBER, VOIDmode, reg));
+
+ for (bitpos = 0;
+ bitpos < BITS_PER_WORD && bytes > 0;
+ bitpos += bitsize, bytes -= bitsize / BITS_PER_UNIT)
+ {
+ int xbitpos = bitpos + big_endian_correction;
+
+ store_bit_field (reg, bitsize, xbitpos, word_mode,
+ extract_bit_field (word, bitsize, bitpos, 1,
+ NULL_RTX, word_mode,
+ word_mode,
+ bitsize / BITS_PER_UNIT,
+ BITS_PER_WORD),
+ bitsize / BITS_PER_UNIT, BITS_PER_WORD);
+ }
+ }
+
+ /* Now that the value is in pseudos, copy it to the result reg(s). */
+ emit_queue ();
+ free_temp_slots ();
+ for (i = 0; i < n_regs; i++)
+ emit_move_insn (gen_rtx (REG, word_mode, REGNO (result_reg) + i),
+ result_pseudos[i]);
+
+ /* Find the smallest integer mode large enough to hold the
+ entire structure and use that mode instead of BLKmode
+ on the USE insn for the return register. */
+ bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
+ for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
+ tmpmode != MAX_MACHINE_MODE;
+ tmpmode = GET_MODE_WIDER_MODE (tmpmode))
+ {
+ /* Have we found a large enough mode? */
+ if (GET_MODE_SIZE (tmpmode) >= bytes)
+ break;
+ }
+
+ /* No suitable mode found. */
+ if (tmpmode == MAX_MACHINE_MODE)
+ abort ();
+
+ PUT_MODE (result_reg, tmpmode);
+
+ expand_value_return (result_reg);
+ }
+ else if (cleanups
&& retval_rhs != 0
&& TREE_TYPE (retval_rhs) != void_type_node
&& GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
the original assignment true.
So the following insn will actually be
decrementing fp by STARTING_FRAME_OFFSET. */
- emit_move_insn (virtual_stack_vars_rtx, frame_pointer_rtx);
+ emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
if (fixed_regs[ARG_POINTER_REGNUM])
if (thisblock->data.block.stack_level != 0
|| thisblock->data.block.cleanups != 0)
{
+ /* Only clean up here if this point can actually be reached. */
+ int reachable = GET_CODE (get_last_insn ()) != BARRIER;
+
/* Don't let cleanups affect ({...}) constructs. */
int old_expr_stmts_for_value = expr_stmts_for_value;
rtx old_last_expr_value = last_expr_value;
expr_stmts_for_value = 0;
/* Do the cleanups. */
- expand_cleanups (thisblock->data.block.cleanups, NULL_TREE);
- do_pending_stack_adjust ();
+ expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
+ if (reachable)
+ do_pending_stack_adjust ();
expr_stmts_for_value = old_expr_stmts_for_value;
last_expr_value = old_last_expr_value;
/* Restore the stack level. */
- if (thisblock->data.block.stack_level != 0)
+ if (reachable && thisblock->data.block.stack_level != 0)
{
emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
thisblock->data.block.stack_level, NULL_RTX);
/* An initializer is going to decide the size of this array.
Until we know the size, represent its address with a reg. */
DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
+ MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (type);
}
else if (DECL_MODE (decl) != BLKmode
/* If -ffloat-store, don't put explicit float vars
+ BITS_PER_UNIT - 1)
/ BITS_PER_UNIT),
1);
+ MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
/* Set alignment we actually gave this decl. */
DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
= temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
/* If this block has a cleanup, it belongs in stack_block_stack. */
stack_block_stack = thisblock;
+ (*interim_eh_hook) (NULL_TREE);
}
return 1;
}
tree cleanup_elt = TREE_PURPOSE (decl_elts);
enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
+ /* Propagate the union's alignment to the elements. */
+ DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
+
+ /* If the element has BLKmode and the union doesn't, the union is
+ aligned such that the element doesn't need to have BLKmode, so
+ change the element's mode to the appropriate one for its size. */
+ if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
+ DECL_MODE (decl_elt) = mode
+ = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
+ MODE_INT, 1);
+
/* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
instead create a new MEM rtx with the proper mode. */
if (GET_CODE (x) == MEM)
If DONT_DO is nonnull, then any list-element
whose TREE_PURPOSE matches DONT_DO is omitted.
This is sometimes used to avoid a cleanup associated with
- a value that is being returned out of the scope. */
+ a value that is being returned out of the scope.
+
+ If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
+ goto and handle protection regions specially in that case.
+
+ If REACHABLE, we emit code, otherwise just inform the exception handling
+ code about this finalization. */
static void
-expand_cleanups (list, dont_do)
+expand_cleanups (list, dont_do, in_fixup, reachable)
tree list;
tree dont_do;
+ int in_fixup;
+ int reachable;
{
tree tail;
for (tail = list; tail; tail = TREE_CHAIN (tail))
if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
{
if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
- expand_cleanups (TREE_VALUE (tail), dont_do);
+ expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
else
{
- /* Cleanups may be run multiple times. For example,
- when exiting a binding contour, we expand the
- cleanups associated with that contour. When a goto
- within that binding contour has a target outside that
- contour, it will expand all cleanups from its scope to
- the target. Though the cleanups are expanded multiple
- times, the control paths are non-overlapping so the
- cleanups will not be executed twice. */
- expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
- free_temp_slots ();
+ if (! in_fixup)
+ (*interim_eh_hook) (TREE_VALUE (tail));
+
+ if (reachable)
+ {
+ /* Cleanups may be run multiple times. For example,
+ when exiting a binding contour, we expand the
+ cleanups associated with that contour. When a goto
+ within that binding contour has a target outside that
+ contour, it will expand all cleanups from its scope to
+ the target. Though the cleanups are expanded multiple
+ times, the control paths are non-overlapping so the
+ cleanups will not be executed twice. */
+ expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
+ free_temp_slots ();
+ }
}
}
}
return 0;
}
\f
+/* Returns the number of possible values of TYPE.
+ Returns -1 if the number is unknown or variable.
+ Returns -2 if the number does not fit in a HOST_WIDE_INT.
+ Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
+ do not increase monotonically (there may be duplicates);
+ to 1 if the values increase monotonically, but not always by 1;
+ otherwise sets it to 0. */
+
+HOST_WIDE_INT
+all_cases_count (type, spareness)
+ tree type;
+ int *spareness;
+{
+ HOST_WIDE_INT count, count_high = 0;
+ *spareness = 0;
+
+ switch (TREE_CODE (type))
+ {
+ tree t;
+ case BOOLEAN_TYPE:
+ count = 2;
+ break;
+ case CHAR_TYPE:
+ count = 1 << BITS_PER_UNIT;
+ break;
+ default:
+ case INTEGER_TYPE:
+ if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
+ || TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST)
+ return -1;
+ else
+ {
+ /* count
+ = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
+ - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
+ but with overflow checking. */
+ tree mint = TYPE_MIN_VALUE (type);
+ tree maxt = TYPE_MAX_VALUE (type);
+ HOST_WIDE_INT lo, hi;
+ neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
+ &lo, &hi);
+ add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
+ lo, hi, &lo, &hi);
+ add_double (lo, hi, 1, 0, &lo, &hi);
+ if (hi != 0 || lo < 0)
+ return -2;
+ count = lo;
+ }
+ break;
+ case ENUMERAL_TYPE:
+ count = 0;
+ for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
+ {
+ if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
+ || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
+ || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
+ != TREE_INT_CST_LOW (TREE_VALUE (t)))
+ *spareness = 1;
+ count++;
+ }
+ if (*spareness == 1)
+ {
+ tree prev = TREE_VALUE (TYPE_VALUES (type));
+ for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
+ {
+ if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
+ {
+ *spareness = 2;
+ break;
+ }
+ prev = TREE_VALUE (t);
+ }
+
+ }
+ }
+ return count;
+}
+
+
+#define BITARRAY_TEST(ARRAY, INDEX) \
+ ((ARRAY)[(unsigned)(INDEX) / HOST_BITS_PER_CHAR]\
+ & (1 << ((unsigned)(INDEX) % HOST_BITS_PER_CHAR)))
+#define BITARRAY_SET(ARRAY, INDEX) \
+ ((ARRAY)[(unsigned)(INDEX) / HOST_BITS_PER_CHAR]\
+ |= 1 << ((unsigned)(INDEX) % HOST_BITS_PER_CHAR))
+
+/* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
+ with the case values we have seen, assuming the case expression
+ has the given TYPE.
+ SPARSENESS is as determined by all_cases_count.
+
+ The time needed is propotional to COUNT, unless
+ SPARSENESS is 2, in which case quadratic time is needed. */
+
+void
+mark_seen_cases (type, cases_seen, count, sparseness)
+ tree type;
+ unsigned char *cases_seen;
+ long count;
+ int sparseness;
+{
+ long i;
+
+ tree next_node_to_try = NULL_TREE;
+ long next_node_offset = 0;
+
+ register struct case_node *n;
+ tree val = make_node (INTEGER_CST);
+ TREE_TYPE (val) = type;
+ for (n = case_stack->data.case_stmt.case_list; n;
+ n = n->right)
+ {
+ TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
+ TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
+ while ( ! tree_int_cst_lt (n->high, val))
+ {
+ /* Calculate (into xlo) the "offset" of the integer (val).
+ The element with lowest value has offset 0, the next smallest
+ element has offset 1, etc. */
+
+ HOST_WIDE_INT xlo, xhi;
+ tree t;
+ if (sparseness == 2)
+ {
+ /* This less efficient loop is only needed to handle
+ duplicate case values (multiple enum constants
+ with the same value). */
+ for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
+ t = TREE_CHAIN (t), xlo++)
+ {
+ if (tree_int_cst_equal (val, TREE_VALUE (t)))
+ BITARRAY_SET (cases_seen, xlo);
+ }
+ }
+ else
+ {
+ if (sparseness && TYPE_VALUES (type) != NULL_TREE)
+ {
+ /* The TYPE_VALUES will be in increasing order, so
+ starting searching where we last ended. */
+ t = next_node_to_try;
+ xlo = next_node_offset;
+ xhi = 0;
+ for (;;)
+ {
+ if (t == NULL_TREE)
+ {
+ t = TYPE_VALUES (type);
+ xlo = 0;
+ }
+ if (tree_int_cst_equal (val, TREE_VALUE (t)))
+ {
+ next_node_to_try = TREE_CHAIN (t);
+ next_node_offset = xlo + 1;
+ break;
+ }
+ xlo++;
+ t = TREE_CHAIN (t);
+ if (t == next_node_to_try)
+ break;
+ }
+ }
+ else
+ {
+ t = TYPE_MIN_VALUE (type);
+ if (t)
+ neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
+ &xlo, &xhi);
+ else
+ xlo = xhi = 0;
+ add_double (xlo, xhi,
+ TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
+ &xlo, &xhi);
+ }
+
+ if (xhi == 0 && xlo >= 0 && xlo < count)
+ BITARRAY_SET (cases_seen, xlo);
+ }
+ add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
+ 1, 0,
+ &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
+ }
+ }
+}
+
/* Called when the index of a switch statement is an enumerated type
and there is no default label.
register tree chain;
int all_values = 1;
+ /* True iff the selector type is a numbered set mode. */
+ int sparseness = 0;
+
+ /* The number of possible selector values. */
+ HOST_WIDE_INT size;
+
+ /* For each possible selector value. a one iff it has been matched
+ by a case value alternative. */
+ unsigned char *cases_seen;
+
+ /* The allocated size of cases_seen, in chars. */
+ long bytes_needed;
+ tree t;
+
if (output_bytecode)
{
bc_check_for_full_enumeration_handling (type);
return;
}
- /* The time complexity of this loop is currently O(N * M), with
- N being the number of members in the enumerated type, and
- M being the number of case expressions in the switch. */
+ if (! warn_switch)
+ return;
+
+ size = all_cases_count (type, &sparseness);
+ bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
- for (chain = TYPE_VALUES (type);
- chain;
- chain = TREE_CHAIN (chain))
+ if (size > 0 && size < 600000
+ /* We deliberately use malloc here - not xmalloc. */
+ && (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
{
- /* Find a match between enumeral and case expression, if possible.
- Quit looking when we've gone too far (since case expressions
- are kept sorted in ascending order). Warn about enumerators not
- handled in the switch statement case expression list. */
-
- for (n = case_stack->data.case_stmt.case_list;
- n && tree_int_cst_lt (n->high, TREE_VALUE (chain));
- n = n->right)
- ;
+ long i;
+ tree v = TYPE_VALUES (type);
+ bzero (cases_seen, bytes_needed);
- if (!n || tree_int_cst_lt (TREE_VALUE (chain), n->low))
+ /* The time complexity of this code is normally O(N), where
+ N being the number of members in the enumerated type.
+ However, if type is a ENUMERAL_TYPE whose values do not
+ increase monotonically, quadratic time may be needed. */
+
+ mark_seen_cases (type, cases_seen, size, sparseness);
+
+ for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
{
- if (warn_switch)
+ if (BITARRAY_TEST(cases_seen, i) == 0)
warning ("enumeration value `%s' not handled in switch",
- IDENTIFIER_POINTER (TREE_PURPOSE (chain)));
- all_values = 0;
+ IDENTIFIER_POINTER (TREE_PURPOSE (v)));
}
+
+ free (cases_seen);
}
/* Now we go the other way around; we warn if there are case
register int i;
rtx before_case;
register struct nesting *thiscase = case_stack;
- tree index_expr;
+ tree index_expr, index_type;
int unsignedp;
if (output_bytecode)
table_label = gen_label_rtx ();
index_expr = thiscase->data.case_stmt.index_expr;
- unsignedp = TREE_UNSIGNED (TREE_TYPE (index_expr));
+ index_type = TREE_TYPE (index_expr);
+ unsignedp = TREE_UNSIGNED (index_type);
do_pending_stack_adjust ();
/* An ERROR_MARK occurs for various reasons including invalid data type. */
- if (TREE_TYPE (index_expr) != error_mark_node)
+ if (index_type != error_mark_node)
{
/* If switch expression was an enumerated type, check that all
enumeration literals are covered by the cases.
if (TREE_CODE (n->high) != INTEGER_CST)
abort ();
- n->low = convert (TREE_TYPE (index_expr), n->low);
- n->high = convert (TREE_TYPE (index_expr), n->high);
+ n->low = convert (index_type, n->low);
+ n->high = convert (index_type, n->high);
/* Count the elements and track the largest and smallest
of them (treating them as signed even if they are not). */
/* Compute span of values. */
if (count != 0)
- range = fold (build (MINUS_EXPR, TREE_TYPE (index_expr),
- maxval, minval));
+ range = fold (build (MINUS_EXPR, index_type, maxval, minval));
- if (count == 0 || TREE_CODE (TREE_TYPE (index_expr)) == ERROR_MARK)
+ if (count == 0)
{
expand_expr (index_expr, const0_rtx, VOIDmode, 0);
emit_queue ();
index_expr
= build_int_2 (INTVAL (index),
unsignedp || INTVAL (index) >= 0 ? 0 : -1);
- index_expr = convert (TREE_TYPE (index_expr), index_expr);
+ index_expr = convert (index_type, index_expr);
}
/* For constant index expressions we need only
target code. The job of removing any unreachable
code is left to the optimisation phase if the
"-O" option is specified. */
- for (n = thiscase->data.case_stmt.case_list;
- n;
- n = n->right)
- {
- if (! tree_int_cst_lt (index_expr, n->low)
- && ! tree_int_cst_lt (n->high, index_expr))
- break;
- }
+ for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
+ if (! tree_int_cst_lt (index_expr, n->low)
+ && ! tree_int_cst_lt (n->high, index_expr))
+ break;
+
if (n)
emit_jump (label_rtx (n->code_label));
else
balance_case_nodes (&thiscase->data.case_stmt.case_list,
NULL_PTR);
emit_case_nodes (index, thiscase->data.case_stmt.case_list,
- default_label, TREE_TYPE (index_expr));
+ default_label, index_type);
emit_jump_if_reachable (default_label);
}
}
enum machine_mode op_mode;
/* Convert the index to SImode. */
- if (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (index_expr)))
+ if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
> GET_MODE_BITSIZE (index_mode))
{
- enum machine_mode omode = TYPE_MODE (TREE_TYPE (index_expr));
+ enum machine_mode omode = TYPE_MODE (index_type);
rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
/* We must handle the endpoints in the original mode. */
- index_expr = build (MINUS_EXPR, TREE_TYPE (index_expr),
+ index_expr = build (MINUS_EXPR, index_type,
index_expr, minval);
minval = integer_zero_node;
index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
}
else
{
- if (TYPE_MODE (TREE_TYPE (index_expr)) != index_mode)
- index_expr = convert (type_for_size (index_bits, 0),
- index_expr);
+ if (TYPE_MODE (index_type) != index_mode)
+ {
+ index_expr = convert (type_for_size (index_bits, 0),
+ index_expr);
+ index_type = TREE_TYPE (index_expr);
+ }
+
index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
}
emit_queue ();
if (! win && HAVE_tablejump)
{
index_expr = convert (thiscase->data.case_stmt.nominal_type,
- fold (build (MINUS_EXPR,
- TREE_TYPE (index_expr),
+ fold (build (MINUS_EXPR, index_type,
index_expr, minval)));
+ index_type = TREE_TYPE (index_expr);
index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
emit_queue ();
index = protect_from_queue (index, 0);
do_pending_stack_adjust ();
- do_tablejump (index, TYPE_MODE (TREE_TYPE (index_expr)),
+ do_tablejump (index, TYPE_MODE (index_type),
expand_expr (range, NULL_RTX, VOIDmode, 0),
table_label, default_label);
win = 1;
ncases = TREE_INT_CST_LOW (range) + 1;
labelvec = (rtx *) alloca (ncases * sizeof (rtx));
- bzero (labelvec, ncases * sizeof (rtx));
+ bzero ((char *) labelvec, ncases * sizeof (rtx));
for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
{
reorder_insns (before_case, get_last_insn (),
thiscase->data.case_stmt.start);
}
+
if (thiscase->exit_label)
emit_label (thiscase->exit_label);
if (cost_table == NULL)
{
cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
- bzero (cost_table - 1, 129 * sizeof (short));
+ bzero ((char *) (cost_table - 1), 129 * sizeof (short));
for (i = 0; i < 128; i++)
{