/* Handle verification of bytecoded methods for the GNU compiler for
the Java(TM) language.
- Copyright (C) 1997 Free Software Foundation, Inc.
+ Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
+ Free Software Foundation, Inc.
-This file is part of GNU CC.
+This file is part of GCC.
-GNU CC is free software; you can redistribute it and/or modify
+GCC is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2, or (at your option)
any later version.
-GNU CC is distributed in the hope that it will be useful,
+GCC is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with GNU CC; see the file COPYING. If not, write to
+along with GCC; see the file COPYING. If not, write to
the Free Software Foundation, 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA.
of Sun Microsystems, Inc. in the United States and other countries.
The Free Software Foundation is independent of Sun Microsystems, Inc. */
+/* This bytecode verifier is an implementation of the bytecode
+verification process described in section 4.9 of "The Java(TM) Virtual
+Machine Specification", Second Edition, by Tim Lindholm and Frank Yellin,
+published by Addison-Wesley in 1999. */
+
#include "config.h"
#include "system.h"
+#include "coretypes.h"
+#include "tm.h"
#include "tree.h"
#include "java-tree.h"
#include "javaop.h"
#include "java-except.h"
#include "toplev.h"
+static void push_pending_label (tree);
+static tree merge_types (tree, tree);
+static const char *check_pending_block (tree);
+static void type_stack_dup (int, int);
+static int start_pc_cmp (const void *, const void *);
+static char *pop_argument_types (tree);
+
extern int stack_pointer;
/* During verification, start of the current subroutine (jsr target). */
/* Append TARGET_LABEL to the pending_block stack unless already in it. */
-void
-push_pending_label (target_label)
- tree target_label;
+static void
+push_pending_label (tree target_label)
{
if (! LABEL_CHANGED (target_label))
{
/* Note that TARGET_LABEL is a possible successor instruction.
Merge the type state etc.
- Return NULL on sucess, or an error message on failure. */
+ Return NULL on success, or an error message on failure. */
-static char *
-check_pending_block (target_label)
- tree target_label;
+static const char *
+check_pending_block (tree target_label)
{
int changed = merge_type_state (target_label);
push_pending_label (target_label);
}
- if (current_subr == NULL)
+ if (current_subr == NULL_TREE)
{
if (LABEL_IN_SUBR (target_label))
return "might transfer control into subroutine";
return NULL;
}
+/* Count the number of nested jsr calls needed to reach LABEL. */
+
+static int
+subroutine_nesting (tree label)
+{
+ int nesting = 0;
+ while (label != NULL_TREE && LABEL_IN_SUBR (label))
+ {
+ if (! LABEL_IS_SUBR_START (label))
+ label = LABEL_SUBR_START (label);
+ label = LABEL_SUBR_CONTEXT (label);
+ nesting++;
+ }
+ return nesting;
+}
+
/* Return the "merged" types of TYPE1 and TYPE2.
If either is primitive, the other must match (after promotion to int).
For reference types, return the common super-class.
Return TYPE_UNKNOWN if the types cannot be merged. */
-tree
-merge_types (type1, type2)
- tree type1, type2;
+static tree
+merge_types (tree type1, tree type2)
{
if (type1 == type2)
return type1;
if (type2 == ptr_type_node || type1 == object_ptr_type_node)
return type1;
- tt1 = HANDLE_TO_CLASS_TYPE (TREE_TYPE (type1));
- tt2 = HANDLE_TO_CLASS_TYPE (TREE_TYPE (type2));
+ tt1 = TREE_TYPE (type1);
+ tt2 = TREE_TYPE (type2);
+
+ /* If tt{1,2} haven't been properly loaded, now is a good time
+ to do it. */
+ if (!TYPE_SIZE (tt1))
+ {
+ load_class (tt1, 1);
+ safe_layout_class (tt1);
+ }
+
+ if (!TYPE_SIZE (tt2))
+ {
+ load_class (tt2, 1);
+ safe_layout_class (tt2);
+ }
if (TYPE_ARRAY_P (tt1) || TYPE_ARRAY_P (tt2))
{
}
return object_ptr_type_node;
}
+
+ if (CLASS_INTERFACE (TYPE_NAME (tt1)))
+ {
+ /* FIXME: should see if two interfaces have a common
+ superinterface. */
+ if (CLASS_INTERFACE (TYPE_NAME (tt2)))
+ {
+ /* This is a kludge, but matches what Sun's verifier does.
+ It can be tricked, but is safe as long as type errors
+ (i.e. interface method calls) are caught at run-time. */
+ return object_ptr_type_node;
+ }
+ else
+ {
+ if (can_widen_reference_to (tt2, tt1))
+ return type1;
+ else
+ return object_ptr_type_node;
+ }
+ }
+ else if (CLASS_INTERFACE (TYPE_NAME (tt2)))
+ {
+ if (can_widen_reference_to (tt1, tt2))
+ return type2;
+ else
+ return object_ptr_type_node;
+ }
+
type1 = tt1;
type2 = tt2;
depth1 = class_depth (type1);
depth2 = class_depth (type2);
for ( ; depth1 > depth2; depth1--)
- type1 = TYPE_BINFO_BASETYPE (type1, 0);
+ type1 = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (type1), 0));
for ( ; depth2 > depth1; depth2--)
- type2 = TYPE_BINFO_BASETYPE (type2, 0);
+ type2 = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (type2), 0));
while (type1 != type2)
{
- type1 = TYPE_BINFO_BASETYPE (type1, 0);
- type2 = TYPE_BINFO_BASETYPE (type2, 0);
+ type1 = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (type1), 0));
+ type2 = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (type2), 0));
}
return promote_type (type1);
}
}
/* Merge the current type state with that at LABEL.
- Return -1 the the states are incompatible (i.e. on error),
+ Return -1 if the states are incompatible (i.e. on error),
0 if there was no change, and 1 if there was a change. */
int
-merge_type_state (label)
- tree label;
+merge_type_state (tree label)
{
- int nlocals = DECL_MAX_LOCALS(current_function_decl);
+ int nlocals = DECL_MAX_LOCALS (current_function_decl);
int cur_length = stack_pointer + nlocals;
tree vec = LABEL_TYPE_STATE (label);
tree return_map;
{
vec = make_tree_vec (cur_length);
LABEL_TYPE_STATE (label) = vec;
+
while (--cur_length >= 0)
- TREE_VEC_ELT (vec, cur_length) = type_map [cur_length];
+ TREE_VEC_ELT (vec, cur_length) = type_map[cur_length];
return 1;
}
else
for (i = 0; i < cur_length; i++)
{
tree old_type = TREE_VEC_ELT (vec, i);
- tree new_type = merge_types (old_type, type_map [i]);
+ tree new_type = merge_types (old_type, type_map[i]);
if (TREE_VEC_ELT (vec, i) != new_type)
{
/* If there has been a change, note that since we must re-verify.
However, if the label is the start of a subroutine,
we don't care about local variables that are neither
- set nor used in the sub-routine. */
+ set nor used in the subroutine. */
if (return_map == NULL_TREE || i >= nlocals
|| TREE_VEC_ELT (return_map, i) != TYPE_UNUSED
|| (TYPE_IS_WIDE (new_type)
/* Handle dup-like operations. */
static void
-type_stack_dup (size, offset)
- int size, offset;
+type_stack_dup (int size, int offset)
{
tree type[4];
int index;
- if (size + offset > stack_pointer)
- error ("stack underflow - dup* operation");
for (index = 0; index < size + offset; index++)
{
type[index] = stack_type_map[stack_pointer - 1];
index++;
type[index] = stack_type_map[stack_pointer - 2];
if (! TYPE_IS_WIDE (type[index]))
- fatal ("internal error - dup operation");
+ abort ();
if (index == size || index == size + offset)
- fatal ("dup operation splits 64-bit number");
+ /* Dup operation splits 64-bit number. */
+ abort ();
}
pop_type (type[index]);
}
}
}
+/* This keeps track of a start PC and corresponding initial index. */
+struct pc_index
+{
+ int start_pc;
+ int index;
+};
+
+/* A helper that is used when sorting exception ranges. */
+static int
+start_pc_cmp (const void *xp, const void *yp)
+{
+ const struct pc_index *x = (const struct pc_index *) xp;
+ const struct pc_index *y = (const struct pc_index *) yp;
+ return x->start_pc - y->start_pc;
+}
+
/* This causes the next iteration to ignore the next instruction
and look for some other unhandled instruction. */
#define INVALIDATE_PC (prevpc = -1, oldpc = PC, PC = INVALID_PC)
#define VERIFICATION_ERROR(MESSAGE) \
do { message = MESSAGE; goto verify_error; } while (0)
+#define VERIFICATION_ERROR_WITH_INDEX(MESSAGE) \
+ do { message = MESSAGE; goto error_with_index; } while (0)
+
+/* Recursive helper function to pop argument types during verification.
+ ARG_TYPES is the list of formal parameter types.
+ Return NULL on success and a freshly malloc'd error message on failure. */
+
+static char *
+pop_argument_types (tree arg_types)
+{
+ if (arg_types == end_params_node)
+ return NULL;
+ if (TREE_CODE (arg_types) == TREE_LIST)
+ {
+ char *message = pop_argument_types (TREE_CHAIN (arg_types));
+ if (message == NULL)
+ pop_type_0 (TREE_VALUE (arg_types), &message);
+ return message;
+ }
+ abort ();
+}
+
+#define POP_TYPE(TYPE, MESSAGE) \
+ do { pmessage = NULL; pop_type_0 (TYPE, &pmessage); \
+ if (pmessage != NULL) goto pop_type_error; \
+ } while (0)
+
+#define POP_TYPE_CONV(TYPE, POPPED_TYPE, MESSAGE) \
+ do { pmessage = NULL; POPPED_TYPE = pop_type_0 (TYPE, &pmessage); \
+ if (pmessage != NULL) goto pop_type_error; \
+ } while (0)
+
+#define PUSH_TYPE(TYPE) \
+ do { if (! push_type_0 (TYPE)) { goto stack_overflow; }} while (0)
+
#define PUSH_PENDING(LABEL) \
- do { if ((message = check_pending_block (LABEL)) != NULL) \
- goto verify_error; } while (0)
+ do { tree tmplab = LABEL; \
+ if ((message = check_pending_block (tmplab)) != NULL) \
+ { oldpc = LABEL_PC (tmplab); goto verify_error; }} while (0)
#ifdef __GNUC__
-#define CHECK_PC_IN_RANGE(PC) ({if (PC < 0 || PC > length) goto bad_pc; 1;})
+#define CHECK_PC_IN_RANGE(PC) __extension__ \
+ ({if (PC < 0 || PC > length) goto bad_pc; (void)1;})
#else
-#define CHECK_PC_IN_RANGE(PC) (PC < 0 || PC > length ? \
- (fatal("Bad byte codes.\n"), 0) : 1)
+#define CHECK_PC_IN_RANGE(PC) (PC < 0 || PC > length ? (abort (), 0) : 1)
#endif
#define BCODE byte_ops
-/* Verify the bytecodes of the current method.
- Return 1 on sucess, 0 on failure. */
+\f
+/* Verify the bytecodes of the current method, with the instructions
+ starting at BYTE_OPS and LENGTH in number, from the class file pointed to
+ by JCF.
+ Return 1 on success, 0 on failure. */
int
-verify_jvm_instructions (jcf, byte_ops, length)
- JCF* jcf;
- unsigned char* byte_ops;
- long length;
+verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length)
{
tree label;
int wide = 0;
int op_code;
int PC;
- int oldpc; /* PC of start of instruction. */
- int prevpc; /* If >= 0, PC of previous instruction. */
- char *message;
+ int oldpc = 0; /* PC of start of instruction. */
+ int prevpc = 0; /* If >= 0, PC of previous instruction. */
+ const char *message = 0;
+ char *pmessage;
int i;
- register unsigned char *p;
+ int index;
+ unsigned char *p;
struct eh_range *prev_eh_ranges = NULL_EH_RANGE;
struct eh_range *eh_ranges;
+ tree return_type = TREE_TYPE (TREE_TYPE (current_function_decl));
+ struct pc_index *starts;
+ int eh_count;
jint int_value = -1;
pending_blocks = NULL_TREE;
- /* Handle the exception table. */
+ current_subr = NULL_TREE;
+
+ /* Handle the exception table. */
method_init_exceptions ();
JCF_SEEK (jcf, DECL_CODE_OFFSET (current_function_decl) + length);
- i = JCF_readu2 (jcf);
+ eh_count = JCF_readu2 (jcf);
- /* We read the exception backwards. */
- p = jcf->read_ptr + 8 * i;
- while (--i >= 0)
+ /* We read the exception handlers in order of increasing start PC.
+ To do this we first read and sort the start PCs. */
+ starts = xmalloc (eh_count * sizeof (struct pc_index));
+ for (i = 0; i < eh_count; ++i)
{
- int start_pc = GET_u2 (p-8);
- int end_pc = GET_u2 (p-6);
- int handler_pc = GET_u2 (p-4);
- int catch_type = GET_u2 (p-2);
- p -= 8;
+ starts[i].start_pc = GET_u2 (jcf->read_ptr + 8 * i);
+ starts[i].index = i;
+ }
+ qsort (starts, eh_count, sizeof (struct pc_index), start_pc_cmp);
+
+ for (i = 0; i < eh_count; ++i)
+ {
+ int start_pc, end_pc, handler_pc, catch_type;
+
+ p = jcf->read_ptr + 8 * starts[i].index;
+
+ start_pc = GET_u2 (p);
+ end_pc = GET_u2 (p+2);
+ handler_pc = GET_u2 (p+4);
+ catch_type = GET_u2 (p+6);
if (start_pc < 0 || start_pc >= length
|| end_pc < 0 || end_pc > length || start_pc >= end_pc
|| handler_pc < 0 || handler_pc >= length
- || (handler_pc >= start_pc && handler_pc < end_pc)
- || ! (instruction_bits [start_pc] & BCODE_INSTRUCTION_START)
- || ! (instruction_bits [end_pc] & BCODE_INSTRUCTION_START)
- || ! (instruction_bits [handler_pc] & BCODE_INSTRUCTION_START))
+ || ! (instruction_bits[start_pc] & BCODE_INSTRUCTION_START)
+ || (end_pc < length &&
+ ! (instruction_bits[end_pc] & BCODE_INSTRUCTION_START))
+ || ! (instruction_bits[handler_pc] & BCODE_INSTRUCTION_START))
{
error ("bad pc in exception_table");
+ free (starts);
return 0;
}
- if (! add_handler (start_pc, end_pc,
- lookup_label (handler_pc),
- catch_type == 0 ? NULL_TREE
- : get_class_constant (jcf, catch_type)))
- {
- error ("overlapping exception ranges are not supported");
- return 0;
- }
+ add_handler (start_pc, end_pc,
+ lookup_label (handler_pc),
+ catch_type == 0 ? NULL_TREE
+ : get_class_constant (jcf, catch_type));
- instruction_bits [handler_pc] |= BCODE_EXCEPTION_TARGET;
+ instruction_bits[handler_pc] |= BCODE_EXCEPTION_TARGET;
}
+ free (starts);
+ handle_nested_ranges ();
+
for (PC = 0;;)
{
- int index;
tree type, tmp;
+
if (((PC != INVALID_PC
- && instruction_bits [PC] & BCODE_TARGET) != 0)
+ && instruction_bits[PC] & BCODE_TARGET) != 0)
|| PC == 0)
{
PUSH_PENDING (lookup_label (PC));
INVALIDATE_PC;
}
+
+ /* Check if there are any more pending blocks in the current
+ subroutine. Because we push pending blocks in a
+ last-in-first-out order, and because we don't push anything
+ from our caller until we are done with this subroutine or
+ anything nested in it, we are done if the top of the
+ pending_blocks stack is not in a subroutine, or it is in our
+ caller. */
+ if (current_subr && PC == INVALID_PC)
+ {
+ if (pending_blocks == NULL_TREE
+ || (subroutine_nesting (pending_blocks)
+ < subroutine_nesting (current_subr)))
+ {
+ int size
+ = DECL_MAX_LOCALS (current_function_decl) + stack_pointer;
+
+ tree ret_map = LABEL_RETURN_TYPE_STATE (current_subr);
+ tmp = LABEL_RETURN_LABELS (current_subr);
+
+ /* FIXME: If we exit a subroutine via a throw, we might
+ have returned to an earlier caller. Obviously a
+ "ret" can only return one level, but a throw may
+ return many levels. */
+ current_subr = LABEL_SUBR_CONTEXT (current_subr);
+
+ if (RETURN_MAP_ADJUSTED (ret_map))
+ {
+ /* Since we are done with this subroutine, set up
+ the (so far known) return address as pending -
+ with the merged type state. */
+ for ( ; tmp != NULL_TREE; tmp = TREE_CHAIN (tmp))
+ {
+ tree return_label = TREE_VALUE (tmp);
+ tree return_state = LABEL_TYPE_STATE (return_label);
+ if (return_state == NULL_TREE)
+ {
+ /* This means we had not verified the subroutine
+ earlier, so this is the first jsr to call it.
+ In this case, the type_map of the return
+ address is just the current type_map - and that
+ is handled by the following PUSH_PENDING. */
+ }
+ else
+ {
+ /* In this case we have to do a merge. But first
+ restore the type_map for unused slots to those
+ that were in effect at the jsr. */
+ for (index = size; --index >= 0; )
+ {
+ type_map[index]
+ = TREE_VEC_ELT (ret_map, index);
+
+ if (type_map[index] == TYPE_UNUSED)
+ type_map[index]
+ = TREE_VEC_ELT (return_state, index);
+ }
+ }
+ PUSH_PENDING (return_label);
+ }
+ }
+ }
+ }
+
if (PC == INVALID_PC)
{
label = pending_blocks;
+
if (label == NULL_TREE)
break; /* We're done! */
+
pending_blocks = LABEL_PENDING_CHAIN (label);
LABEL_CHANGED (label) = 0;
/* Restore type_map and stack_pointer from
LABEL_TYPE_STATE (label), and continue
- compiling from there. */
+ compiling from there. */
load_type_state (label);
+
PC = LABEL_PC (label);
}
else if (PC >= length)
- VERIFICATION_ERROR ("falling through end of method");
+ VERIFICATION_ERROR ("falling through the end of the method");
+
oldpc = PC;
- if (!(instruction_bits [PC] & BCODE_INSTRUCTION_START) && ! wide)
+ if (! (instruction_bits[PC] & BCODE_INSTRUCTION_START) && ! wide)
VERIFICATION_ERROR ("PC not at instruction start");
instruction_bits[PC] |= BCODE_VERIFIED;
switch (op_code)
{
int is_static, is_putting;
+
case OPCODE_nop:
break;
+
case OPCODE_iconst_m1:
case OPCODE_iconst_0: case OPCODE_iconst_1: case OPCODE_iconst_2:
case OPCODE_iconst_3: case OPCODE_iconst_4: case OPCODE_iconst_5:
goto push_int;
push_int:
if (byte_ops[PC] == OPCODE_newarray
- || byte_ops[PC] == OPCODE_newarray)
+ || byte_ops[PC] == OPCODE_anewarray)
int_value = i;
- push_type (int_type_node); break;
+ PUSH_TYPE (int_type_node); break;
+
case OPCODE_lconst_0: case OPCODE_lconst_1:
- push_type (long_type_node); break;
+ PUSH_TYPE (long_type_node); break;
+
case OPCODE_fconst_0: case OPCODE_fconst_1: case OPCODE_fconst_2:
- push_type (float_type_node); break;
+ PUSH_TYPE (float_type_node); break;
+
case OPCODE_dconst_0: case OPCODE_dconst_1:
- push_type (double_type_node); break;
+ PUSH_TYPE (double_type_node); break;
+
case OPCODE_bipush:
i = IMMEDIATE_s1;
goto push_int;
+
case OPCODE_sipush:
i = IMMEDIATE_s2;
goto push_int;
+
case OPCODE_iload: type = int_type_node; goto general_load;
case OPCODE_lload: type = long_type_node; goto general_load;
case OPCODE_fload: type = float_type_node; goto general_load;
if (index < 0
|| (index + TYPE_IS_WIDE (type)
>= DECL_MAX_LOCALS (current_function_decl)))
- VERIFICATION_ERROR ("invalid local variable index in load");
+ VERIFICATION_ERROR_WITH_INDEX
+ ("invalid local variable index %d in load");
tmp = type_map[index];
- if (tmp == TYPE_UNKNOWN || tmp == TYPE_SECOND
+ if (tmp == TYPE_UNKNOWN)
+ VERIFICATION_ERROR_WITH_INDEX
+ ("loading local variable %d which has unknown type");
+ else if (tmp == TYPE_SECOND
|| (TYPE_IS_WIDE (type)
&& type_map[index+1] != void_type_node)
|| (type == ptr_type_node
: type == int_type_node
? (! INTEGRAL_TYPE_P (tmp) || TYPE_PRECISION (tmp) > 32)
: type != tmp))
- VERIFICATION_ERROR("invalid local variable type in load");
- push_type (tmp);
+ VERIFICATION_ERROR_WITH_INDEX
+ ("loading local variable %d which has invalid type");
+ PUSH_TYPE (tmp);
goto note_used;
case OPCODE_istore: type = int_type_node; goto general_store;
case OPCODE_lstore: type = long_type_node; goto general_store;
case OPCODE_fstore: type = float_type_node; goto general_store;
case OPCODE_dstore: type = double_type_node; goto general_store;
- case OPCODE_astore: type = ptr_type_node; goto general_store;
+ case OPCODE_astore: type = object_ptr_type_node; goto general_store;
general_store:
index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
wide = 0;
|| (index + TYPE_IS_WIDE (type)
>= DECL_MAX_LOCALS (current_function_decl)))
{
- VERIFICATION_ERROR ("invalid local variable index in store");
+ VERIFICATION_ERROR_WITH_INDEX
+ ("invalid local variable index %d in store");
return 0;
}
- type = pop_type (type);
+ POP_TYPE_CONV (type, type, NULL);
type_map[index] = type;
- /* If local variable changed, we need to reconsider eh handlers. */
+ /* If a local variable has changed, we need to reconsider exception
+ handlers. */
prev_eh_ranges = NULL_EH_RANGE;
- /* Allocate decl and rtx for this variable now, so if we're not
- optmizing, we get a temporary that survives the whole method. */
+ /* Allocate decl for this variable now, so we get a temporary
+! that survives the whole method. */
find_local_variable (index, type, oldpc);
if (TYPE_IS_WIDE (type))
type_map[index+1] = TYPE_SECOND;
+
/* ... fall through to note_used ... */
note_used:
/* For store or load, note that local variable INDEX is used.
- This is needed to verify try-finally sub-routines. */
+ This is needed to verify try-finally subroutines. */
if (current_subr)
{
tree vec = LABEL_RETURN_TYPE_STATE (current_subr);
type = double_type_node; goto binop;
case OPCODE_dneg:
type = double_type_node; goto unop;
+
unop:
pop_type (type);
- push_type (type);
+ PUSH_TYPE (type);
break;
+
binop:
pop_type (type);
pop_type (type);
- push_type (type);
+ PUSH_TYPE (type);
break;
+
case OPCODE_lshl:
case OPCODE_lshr:
case OPCODE_lushr:
pop_type (int_type_node);
pop_type (long_type_node);
- push_type (long_type_node);
+ PUSH_TYPE (long_type_node);
break;
+
case OPCODE_iinc:
index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
PC += wide + 1;
if (index < 0 || index >= DECL_MAX_LOCALS (current_function_decl))
VERIFICATION_ERROR ("invalid local variable index in iinc");
tmp = type_map[index];
- if (! INTEGRAL_TYPE_P (tmp) || TYPE_PRECISION (tmp) > 32)
+ if (tmp == NULL_TREE
+ || ! INTEGRAL_TYPE_P (tmp) || TYPE_PRECISION (tmp) > 32)
VERIFICATION_ERROR ("invalid local variable type in iinc");
break;
+
case OPCODE_i2l:
- pop_type (int_type_node); push_type (long_type_node); break;
+ pop_type (int_type_node); PUSH_TYPE (long_type_node); break;
case OPCODE_i2f:
- pop_type (int_type_node); push_type (float_type_node); break;
+ pop_type (int_type_node); PUSH_TYPE (float_type_node); break;
case OPCODE_i2d:
- pop_type (int_type_node); push_type (double_type_node); break;
+ pop_type (int_type_node); PUSH_TYPE (double_type_node); break;
case OPCODE_l2i:
- pop_type (long_type_node); push_type (int_type_node); break;
+ pop_type (long_type_node); PUSH_TYPE (int_type_node); break;
case OPCODE_l2f:
- pop_type (long_type_node); push_type (float_type_node); break;
+ pop_type (long_type_node); PUSH_TYPE (float_type_node); break;
case OPCODE_l2d:
- pop_type (long_type_node); push_type (double_type_node); break;
+ pop_type (long_type_node); PUSH_TYPE (double_type_node); break;
case OPCODE_f2i:
- pop_type (float_type_node); push_type (int_type_node); break;
+ pop_type (float_type_node); PUSH_TYPE (int_type_node); break;
case OPCODE_f2l:
- pop_type (float_type_node); push_type (long_type_node); break;
+ pop_type (float_type_node); PUSH_TYPE (long_type_node); break;
case OPCODE_f2d:
- pop_type (float_type_node); push_type (double_type_node); break;
+ pop_type (float_type_node); PUSH_TYPE (double_type_node); break;
case OPCODE_d2i:
- pop_type (double_type_node); push_type (int_type_node); break;
+ pop_type (double_type_node); PUSH_TYPE (int_type_node); break;
case OPCODE_d2l:
- pop_type (double_type_node); push_type (long_type_node); break;
+ pop_type (double_type_node); PUSH_TYPE (long_type_node); break;
case OPCODE_d2f:
- pop_type (double_type_node); push_type (float_type_node); break;
+ pop_type (double_type_node); PUSH_TYPE (float_type_node); break;
+
case OPCODE_lcmp:
type = long_type_node; goto compare;
case OPCODE_fcmpl:
type = double_type_node; goto compare;
compare:
pop_type (type); pop_type (type);
- push_type (int_type_node); break;
+ PUSH_TYPE (int_type_node); break;
+
case OPCODE_ifeq:
case OPCODE_ifne:
case OPCODE_iflt:
case OPCODE_if_acmpne:
pop_type (object_ptr_type_node); pop_type (object_ptr_type_node);
goto cond;
+
cond:
PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s2));
break;
+
case OPCODE_goto:
PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s2));
INVALIDATE_PC;
break;
+
case OPCODE_wide:
switch (byte_ops[PC])
{
VERIFICATION_ERROR ("invalid use of wide instruction");
}
break;
- case OPCODE_ireturn: type = int_type_node; goto ret;
+
+ case OPCODE_return: type = void_type_node; goto ret;
+ case OPCODE_ireturn:
+ if ((TREE_CODE (return_type) == BOOLEAN_TYPE
+ || TREE_CODE (return_type) == CHAR_TYPE
+ || TREE_CODE (return_type) == INTEGER_TYPE)
+ && TYPE_PRECISION (return_type) <= 32)
+ type = return_type;
+ else
+ type = NULL_TREE;
+ goto ret;
case OPCODE_lreturn: type = long_type_node; goto ret;
case OPCODE_freturn: type = float_type_node; goto ret;
case OPCODE_dreturn: type = double_type_node; goto ret;
- case OPCODE_areturn: type = ptr_type_node; goto ret;
+ case OPCODE_areturn:
+ if (TREE_CODE (return_type) == POINTER_TYPE)
+ type = return_type;
+ else
+ type = NULL_TREE;
+ goto ret;
+
ret:
- pop_type (type);
- /* ... fall through ... */
- case OPCODE_return:
+ if (type != return_type)
+ VERIFICATION_ERROR ("incorrect ?return opcode");
+ if (type != void_type_node)
+ POP_TYPE (type, "return value has wrong type");
INVALIDATE_PC;
break;
+
case OPCODE_getstatic: is_putting = 0; is_static = 1; goto field;
case OPCODE_putstatic: is_putting = 1; is_static = 1; goto field;
case OPCODE_getfield: is_putting = 0; is_static = 0; goto field;
case OPCODE_putfield: is_putting = 1; is_static = 0; goto field;
field:
{
- int index = IMMEDIATE_u2;
- tree self_type = get_class_constant
- (jcf, COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool, index));
- tree field_name = COMPONENT_REF_NAME (¤t_jcf->cpool, index);
- tree field_signature = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool, index);
- tree field_type = get_type_from_signature (field_signature);
+ tree field_signature, field_type;
+ index = IMMEDIATE_u2;
+
+ if (index <= 0 || index >= JPOOL_SIZE (current_jcf))
+ VERIFICATION_ERROR_WITH_INDEX ("bad constant pool index %d");
+
+ if (JPOOL_TAG (current_jcf, index) != CONSTANT_Fieldref)
+ VERIFICATION_ERROR
+ ("field instruction does not reference a Fieldref");
+
+ field_signature
+ = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool, index);
+
+ field_type = get_type_from_signature (field_signature);
+
if (is_putting)
- pop_type (field_type);
+ POP_TYPE (field_type, "incorrect type for field");
+
if (! is_static)
{
+ int clindex
+ = COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool, index);
+
+ tree self_type = get_class_constant (current_jcf, clindex);
+
/* Defer actual checking until next pass. */
- pop_type (ptr_type_node);
+ POP_TYPE (self_type, "incorrect type for field reference");
}
+
if (! is_putting)
- push_type (field_type);
+ PUSH_TYPE (field_type);
break;
}
+
case OPCODE_new:
- push_type (get_class_constant (jcf, IMMEDIATE_u2));
+ PUSH_TYPE (get_class_constant (jcf, IMMEDIATE_u2));
+ break;
+
+ case OPCODE_dup: wide = 1; index = 0; goto dup;
+ case OPCODE_dup_x1: wide = 1; index = 1; goto dup;
+ case OPCODE_dup_x2: wide = 1; index = 2; goto dup;
+ case OPCODE_dup2: wide = 2; index = 0; goto dup;
+ case OPCODE_dup2_x1: wide = 2; index = 1; goto dup;
+ case OPCODE_dup2_x2: wide = 2; index = 2; goto dup;
+
+ dup:
+ if (wide + index > stack_pointer)
+ VERIFICATION_ERROR ("stack underflow - dup* operation");
+ type_stack_dup (wide, index);
+ wide = 0;
break;
- case OPCODE_dup: type_stack_dup (1, 0); break;
- case OPCODE_dup_x1: type_stack_dup (1, 1); break;
- case OPCODE_dup_x2: type_stack_dup (1, 2); break;
- case OPCODE_dup2: type_stack_dup (2, 0); break;
- case OPCODE_dup2_x1: type_stack_dup (2, 1); break;
- case OPCODE_dup2_x2: type_stack_dup (2, 2); break;
+
case OPCODE_pop: index = 1; goto pop;
case OPCODE_pop2: index = 2; goto pop;
+
pop:
if (stack_pointer < index)
VERIFICATION_ERROR ("stack underflow");
stack_pointer -= index;
break;
+
case OPCODE_swap:
if (stack_pointer < 2)
VERIFICATION_ERROR ("stack underflow (in swap)");
{
tree type1 = stack_type_map[stack_pointer - 1];
tree type2 = stack_type_map[stack_pointer - 2];
+
if (type1 == void_type_node || type2 == void_type_node)
VERIFICATION_ERROR ("verifier (swap): double or long value");
+
stack_type_map[stack_pointer - 2] = type1;
stack_type_map[stack_pointer - 1] = type2;
}
break;
+
case OPCODE_ldc: index = IMMEDIATE_u1; goto ldc;
case OPCODE_ldc2_w:
case OPCODE_ldc_w:
index = IMMEDIATE_u2; goto ldc;
+
ldc:
- if (index <= 0 || index >= JPOOL_SIZE(current_jcf))
- VERIFICATION_ERROR ("bad constant pool index in ldc");
+ if (index <= 0 || index >= JPOOL_SIZE (current_jcf))
+ VERIFICATION_ERROR_WITH_INDEX ("bad constant pool index %d in ldc");
+
int_value = -1;
switch (JPOOL_TAG (current_jcf, index) & ~CONSTANT_ResolvedFlag)
{
break;
/* ... else fall through ... */
default:
- bad_ldc:
VERIFICATION_ERROR ("bad constant pool tag in ldc");
}
if (type == int_type_node)
i = TREE_INT_CST_LOW (get_constant (current_jcf, index));
goto push_int;
}
- push_type (type);
+ PUSH_TYPE (type);
break;
case OPCODE_invokevirtual:
case OPCODE_invokestatic:
case OPCODE_invokeinterface:
{
- int index = IMMEDIATE_u2;
- tree sig = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool, index);
- tree self_type = get_class_constant
- (current_jcf, COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool,
- index));
- tree method_name = COMPONENT_REF_NAME (¤t_jcf->cpool, index);
- tree method_type;
- method_type = parse_signature_string (IDENTIFIER_POINTER (sig),
+ tree sig, method_name, method_type, self_type;
+ int self_is_interface, tag;
+ index = IMMEDIATE_u2;
+
+ if (index <= 0 || index >= JPOOL_SIZE (current_jcf))
+ VERIFICATION_ERROR_WITH_INDEX
+ ("bad constant pool index %d for invoke");
+
+ tag = JPOOL_TAG (current_jcf, index);
+
+ if (op_code == OPCODE_invokeinterface)
+ {
+ if (tag != CONSTANT_InterfaceMethodref)
+ VERIFICATION_ERROR
+ ("invokeinterface does not reference an InterfaceMethodref");
+ }
+ else
+ {
+ if (tag != CONSTANT_Methodref)
+ VERIFICATION_ERROR ("invoke does not reference a Methodref");
+ }
+
+ sig = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool, index);
+
+ self_type
+ = get_class_constant (current_jcf,
+ COMPONENT_REF_CLASS_INDEX
+ (¤t_jcf->cpool, index));
+
+ if (! CLASS_LOADED_P (self_type))
+ load_class (self_type, 1);
+
+ self_is_interface = CLASS_INTERFACE (TYPE_NAME (self_type));
+ method_name = COMPONENT_REF_NAME (¤t_jcf->cpool, index);
+ method_type = parse_signature_string ((const unsigned char *) IDENTIFIER_POINTER (sig),
IDENTIFIER_LENGTH (sig));
+
if (TREE_CODE (method_type) != FUNCTION_TYPE)
VERIFICATION_ERROR ("bad method signature");
- pop_argument_types (TYPE_ARG_TYPES (method_type));
- /* Can't invoke <clinit> */
- if (method_name == clinit_identifier_node)
+ pmessage = pop_argument_types (TYPE_ARG_TYPES (method_type));
+ if (pmessage != NULL)
+ {
+ message = "invalid argument type";
+ goto pop_type_error;
+ }
+
+ /* Can't invoke <clinit>. */
+ if (ID_CLINIT_P (method_name))
VERIFICATION_ERROR ("invoke opcode can't invoke <clinit>");
- /* Apart invokespecial, can't invoke <init> */
- if (op_code != OPCODE_invokespecial
- && method_name == init_identifier_node)
+
+ /* Apart from invokespecial, can't invoke <init>. */
+ if (op_code != OPCODE_invokespecial && ID_INIT_P (method_name))
VERIFICATION_ERROR ("invoke opcode can't invoke <init>");
if (op_code != OPCODE_invokestatic)
- pop_type (self_type);
+ POP_TYPE (self_type,
+ "stack type not subclass of invoked method's class");
switch (op_code)
{
if (!nargs || notZero)
VERIFICATION_ERROR
("invalid argument number in invokeinterface");
- break;
+
+ /* If we verify/resolve the constant pool, as we should,
+ this test (and the one just following) are redundant. */
+ if (! self_is_interface)
+ VERIFICATION_ERROR
+ ("invokeinterface calls method not in interface");
+ break;
+
+ default:
+ if (self_is_interface)
+ VERIFICATION_ERROR ("method in interface called");
}
}
if (TREE_TYPE (method_type) != void_type_node)
- push_type (TREE_TYPE (method_type));
+ PUSH_TYPE (TREE_TYPE (method_type));
break;
}
case OPCODE_arraylength:
- /* Type checking actually made during code generation */
- pop_type( ptr_type_node );
- push_type( int_type_node );
+ /* Type checking actually made during code generation. */
+ pop_type (ptr_type_node);
+ PUSH_TYPE (int_type_node);
break;
/* Q&D verification *or* more checking done during code generation
case OPCODE_bastore: type = int_type_node; goto astore;
case OPCODE_castore: type = int_type_node; goto astore;
case OPCODE_sastore: type = int_type_node; goto astore;
+
astore:
- /* FIXME - need better verification here */
+ /* FIXME - need better verification here. */
pop_type (type); /* new value */
pop_type (int_type_node); /* index */
pop_type (ptr_type_node); /* array */
case OPCODE_baload: type = promote_type (byte_type_node); goto aload;
case OPCODE_caload: type = promote_type (char_type_node); goto aload;
case OPCODE_saload: type = promote_type (short_type_node); goto aload;
+
aload:
pop_type (int_type_node);
- type = pop_type (ptr_type_node);
- if (! is_array_type_p (type))
+ tmp = pop_type (ptr_type_node);
+ if (is_array_type_p (tmp))
+ type = TYPE_ARRAY_ELEMENT (TREE_TYPE (tmp));
+ else if (tmp != TYPE_NULL)
VERIFICATION_ERROR ("array load from non-array type");
- push_type (TYPE_ARRAY_ELEMENT (TREE_TYPE (type)));
+ PUSH_TYPE (type);
break;
case OPCODE_anewarray:
newarray:
if (int_value >= 0 && prevpc >= 0)
{
- /* If previous instruction pushed int constant,
+ /* If the previous instruction pushed an int constant,
we want to use it. */
switch (byte_ops[prevpc])
{
}
else
int_value = -1;
+
type = build_java_array_type (type, int_value);
pop_type (int_type_node);
- push_type (type);
+ PUSH_TYPE (type);
break;
case OPCODE_multianewarray:
index = IMMEDIATE_u2;
ndim = IMMEDIATE_u1;
- if( ndim < 1 )
- VERIFICATION_ERROR ("number of dimension lower that 1 in multianewarray" );
+ if (ndim < 1)
+ VERIFICATION_ERROR
+ ("number of dimension lower that 1 in multianewarray" );
- for( i = 0; i < ndim; i++ )
+ for (i = 0; i < ndim; i++)
pop_type (int_type_node);
- push_type (get_class_constant (current_jcf, index));
+
+ PUSH_TYPE (get_class_constant (current_jcf, index));
break;
}
case OPCODE_aconst_null:
- push_type (ptr_type_node);
+ PUSH_TYPE (ptr_type_node);
break;
case OPCODE_athrow:
- pop_type (throwable_type_node);
+ /* FIXME: athrow also empties the stack. */
+ POP_TYPE (throwable_type_node, "missing throwable at athrow" );
INVALIDATE_PC;
break;
case OPCODE_checkcast:
- pop_type (ptr_type_node);
+ POP_TYPE (object_ptr_type_node,
+ "checkcast operand is not a pointer");
type = get_class_constant (current_jcf, IMMEDIATE_u2);
- push_type (type);
+ PUSH_TYPE (type);
break;
+
case OPCODE_instanceof:
- pop_type (ptr_type_node);
+ POP_TYPE (object_ptr_type_node,
+ "instanceof operand is not a pointer");
get_class_constant (current_jcf, IMMEDIATE_u2);
- push_type (integer_type_node);
+ PUSH_TYPE (int_type_node);
break;
case OPCODE_tableswitch:
{
- jint default_val, low, high;
+ jint low, high;
+
+ POP_TYPE (int_type_node, "missing int for tableswitch");
- pop_type (integer_type_node);
while (PC%4)
{
if (byte_ops[PC++])
VERIFICATION_ERROR ("bad alignment in tableswitch pad");
}
- PUSH_PENDING (lookup_label (oldpc+IMMEDIATE_s4));
+
+ PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
low = IMMEDIATE_s4;
high = IMMEDIATE_s4;
while (low++ <= high)
PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
+
+ INVALIDATE_PC;
break;
}
case OPCODE_lookupswitch:
{
- jint npairs, last, not_registered = 1;
+ jint npairs, last = 0, not_registered = 1;
+
+ POP_TYPE (int_type_node, "missing int for lookupswitch");
- pop_type (integer_type_node);
while (PC%4)
{
if (byte_ops[PC++])
VERIFICATION_ERROR ("bad alignment in lookupswitch pad");
}
- PUSH_PENDING (lookup_label (oldpc+IMMEDIATE_s4));
+ PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
npairs = IMMEDIATE_s4;
if (npairs < 0)
while (npairs--)
{
int match = IMMEDIATE_s4;
+
if (not_registered)
not_registered = 0;
else if (last >= match)
last = match;
PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
}
+ INVALIDATE_PC;
break;
}
{
tree target = lookup_label (oldpc + IMMEDIATE_s2);
tree return_label = lookup_label (PC);
- push_type (return_address_type_node);
- if (! LABEL_VERIFIED (target))
+ PUSH_TYPE (return_address_type_node);
+ /* The return label chain will be null if this is the first
+ time we've seen this jsr target. */
+ if (LABEL_RETURN_LABEL (target) == NULL_TREE)
{
- /* first time seen */
tree return_type_map;
int nlocals = DECL_MAX_LOCALS (current_function_decl);
index = nlocals + DECL_MAX_STACK (current_function_decl);
return_type_map = make_tree_vec (index);
- while (--index >= nlocals)
- TREE_VEC_ELT (return_type_map, index) = TYPE_UNKNOWN;
- while (--index >= 0)
- TREE_VEC_ELT (return_type_map, index) = TYPE_UNUSED;
+
+ while (index > nlocals)
+ TREE_VEC_ELT (return_type_map, --index) = TYPE_UNKNOWN;
+
+ while (index > 0)
+ TREE_VEC_ELT (return_type_map, --index) = TYPE_UNUSED;
+
LABEL_RETURN_LABEL (target)
= build_decl (LABEL_DECL, NULL_TREE, TREE_TYPE (target));
- LABEL_PC (LABEL_RETURN_LABEL (target)) = -1;
+ LABEL_PC (LABEL_RETURN_LABEL (target)) = INVALID_PC;
LABEL_RETURN_TYPE_STATE (target) = return_type_map;
LABEL_IS_SUBR_START (target) = 1;
LABEL_IN_SUBR (target) = 1;
type_map[len] = TREE_VEC_ELT (return_map, len);
}
current_subr = LABEL_SUBR_CONTEXT (target);
- PUSH_PENDING (return_label);
+ if (RETURN_MAP_ADJUSTED (return_map))
+ PUSH_PENDING (return_label);
}
INVALIDATE_PC;
}
break;
+
case OPCODE_ret:
- if (current_subr == NULL)
+ if (current_subr == NULL_TREE)
VERIFICATION_ERROR ("ret instruction not in a jsr subroutine");
else
{
tree ret_map = LABEL_RETURN_TYPE_STATE (current_subr);
- tree caller = LABEL_SUBR_CONTEXT (current_subr);
- int size = DECL_MAX_LOCALS(current_function_decl)+stack_pointer;
+ int size
+ = DECL_MAX_LOCALS (current_function_decl) + stack_pointer;
index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
wide = 0;
INVALIDATE_PC;
VERIFICATION_ERROR ("invalid ret index");
/* The next chunk of code is similar to an inlined version of
- * merge_type_state (LABEL_RETURN_LABEL (current_subr)).
- * The main differences are that LABEL_RETURN_LABEL is
- * pre-allocated by the jsr (but we don't know the size then);
- * and that we have to handle TYPE_UNUSED. */
+ merge_type_state (LABEL_RETURN_LABEL (current_subr)).
+ The main differences are that LABEL_RETURN_LABEL is
+ pre-allocated by the jsr (but we don't know the size then);
+ and that we have to handle TYPE_UNUSED. */
if (! RETURN_MAP_ADJUSTED (ret_map))
- { /* First return from this subroutine - fix stack pointer. */
+ {
+ /* First return from this subroutine - fix stack
+ pointer. */
TREE_VEC_LENGTH (ret_map) = size;
for (index = size; --index >= 0; )
{
tree type = TREE_VEC_ELT (ret_map, index);
if (type != TYPE_UNUSED)
{
- type = merge_types (type, type_map [index]);
+ type = merge_types (type, type_map[index]);
TREE_VEC_ELT (ret_map, index) = type;
if (type == TYPE_UNKNOWN)
{
}
}
}
+ }
+ break;
- /* Check if there are any more pending blocks in this subroutine.
- Because we push pending blocks in a last-in-first-out order,
- and because we don't push anything from our caller until we
- are done with this subroutine or anything nested in it,
- then we are done if the top of the pending_blocks stack is
- not in a subroutine, or it is in our caller. */
- if (pending_blocks == NULL_TREE
- || ! LABEL_IN_SUBR (pending_blocks)
- || LABEL_SUBR_START (pending_blocks) == caller)
- {
- /* Since we are done with this subroutine (i.e. this is the
- last ret from it), set up the (so far known) return
- address as pending - with the merged type state. */
- tmp = LABEL_RETURN_LABELS (current_subr);
- current_subr = caller;
- for ( ; tmp != NULL_TREE; tmp = TREE_CHAIN (tmp))
- {
- tree return_label = TREE_VALUE (tmp);
- tree return_state = LABEL_TYPE_STATE (return_label);
- if (return_state == NULL_TREE)
- {
- /* This means means we had not verified the
- subroutine earlier, so this is the first jsr to
- call it. In this case, the type_map of the return
- address is just the current type_map - and that
- is handled by the following PUSH_PENDING. */
- }
- else
- {
- /* In this case we have to do a merge. But first
- restore the type_map for unused slots to those
- that were in effect at the jsr. */
- for (index = size; --index >= 0; )
- {
- type_map[index] = TREE_VEC_ELT (ret_map, index);
- if (type_map[index] == TYPE_UNUSED)
- type_map[index]
- = TREE_VEC_ELT (return_state, index);
- }
- }
- PUSH_PENDING (return_label);
- }
- }
- }
- break;
- case OPCODE_jsr_w:
- case OPCODE_ret_w:
+ case OPCODE_jsr_w:
+ case OPCODE_ret_w:
default:
- error ("unknown opcode %d@pc=%d during verification", op_code, PC-1);
- return 0;
- }
+ error ("unknown opcode %d@pc=%d during verification", op_code, PC-1);
+ return 0;
+ }
prevpc = oldpc;
/* The following test is true if we have entered or exited an exception
handler range *or* we have done a store to a local variable.
In either case we need to consider any exception handlers that
- might "follow" this instruction. */
+ might "follow" this instruction. */
if (eh_ranges != prev_eh_ranges)
{
tree save_current_subr = current_subr;
struct eh_range *ranges = find_handler (oldpc);
stack_pointer = 1;
- for (; ranges != NULL_EH_RANGE; ranges = ranges->outer)
+
+ for ( ; ranges != NULL_EH_RANGE; ranges = ranges->outer)
{
tree chain = ranges->handlers;
have that the current_subr is entirely within the catch range.
In that case we can assume if that if a caller (the jsr) of
a subroutine is within the catch range, then the handler is
- *not* part of the subroutine, and vice versa. */
+ *not* part of the subroutine, and vice versa. */
current_subr = save_current_subr;
for ( ; current_subr != NULL_TREE;
{
tree return_labels = LABEL_RETURN_LABELS (current_subr);
/* There could be multiple return_labels, but
- we only need to check one. */
+ we only need to check one. */
int return_pc = LABEL_PC (TREE_VALUE (return_labels));
if (return_pc <= ranges->start_pc
|| return_pc > ranges->end_pc)
break;
}
- for ( ; chain != NULL_TREE; chain = TREE_CHAIN (chain))
+ for ( ; chain != NULL_TREE; chain = TREE_CHAIN (chain))
{
tree handler = TREE_VALUE (chain);
tree type = TREE_PURPOSE (chain);
+
if (type == NULL_TREE) /* a finally handler */
type = throwable_type_node;
+
type_map[index] = promote_type (type);
PUSH_PENDING (handler);
prev_eh_ranges = eh_ranges;
}
}
+
return 1;
+
+ pop_type_error:
+ error ("verification error at PC=%d", oldpc);
+ if (message != NULL)
+ error ("%s", message);
+ error ("%s", pmessage);
+ free (pmessage);
+ return 0;
+
+ stack_overflow:
+ message = "stack overflow";
+ goto verify_error;
+
bad_pc:
message = "program counter out of range";
goto verify_error;
+
+ error_with_index:
+ error ("verification error at PC=%d", oldpc);
+ error (message, index);
+ return 0;
+
verify_error:
- error ("verification error at PC=%d: %s", oldpc, message);
+ error ("verification error at PC=%d", oldpc);
+ error ("%s", message);
return 0;
}