/* Handle verification of bytecoded methods for the GNU compiler for
the Java(TM) language.
- Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003
+ Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
Free Software Foundation, Inc.
This file is part of GCC.
of Sun Microsystems, Inc. in the United States and other countries.
The Free Software Foundation is independent of Sun Microsystems, Inc. */
+/* This bytecode verifier is an implementation of the bytecode
+verification process described in section 4.9 of "The Java(TM) Virtual
+Machine Specification", Second Edition, by Tim Lindholm and Frank Yellin,
+published by Addison-Wesley in 1999. */
+
#include "config.h"
#include "system.h"
#include "coretypes.h"
push_pending_label (target_label);
}
- if (current_subr == NULL)
+ if (current_subr == NULL_TREE)
{
if (LABEL_IN_SUBR (target_label))
return "might transfer control into subroutine";
int nesting = 0;
while (label != NULL_TREE && LABEL_IN_SUBR (label))
{
- if (! LABEL_IS_SUBR_START(label))
+ if (! LABEL_IS_SUBR_START (label))
label = LABEL_SUBR_START (label);
label = LABEL_SUBR_CONTEXT (label);
nesting++;
depth1 = class_depth (type1);
depth2 = class_depth (type2);
for ( ; depth1 > depth2; depth1--)
- type1 = TYPE_BINFO_BASETYPE (type1, 0);
+ type1 = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (type1), 0));
for ( ; depth2 > depth1; depth2--)
- type2 = TYPE_BINFO_BASETYPE (type2, 0);
+ type2 = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (type2), 0));
while (type1 != type2)
{
- type1 = TYPE_BINFO_BASETYPE (type1, 0);
- type2 = TYPE_BINFO_BASETYPE (type2, 0);
+ type1 = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (type1), 0));
+ type2 = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (type2), 0));
}
return promote_type (type1);
}
}
/* Merge the current type state with that at LABEL.
- Return -1 the the states are incompatible (i.e. on error),
+ Return -1 if the states are incompatible (i.e. on error),
0 if there was no change, and 1 if there was a change. */
int
tree return_map;
if (vec == NULL_TREE)
{
- if (!vec)
- {
- vec = make_tree_vec (cur_length);
- LABEL_TYPE_STATE (label) = vec;
- }
+ vec = make_tree_vec (cur_length);
+ LABEL_TYPE_STATE (label) = vec;
+
while (--cur_length >= 0)
- TREE_VEC_ELT (vec, cur_length) = type_map [cur_length];
+ TREE_VEC_ELT (vec, cur_length) = type_map[cur_length];
return 1;
}
else
for (i = 0; i < cur_length; i++)
{
tree old_type = TREE_VEC_ELT (vec, i);
- tree new_type = merge_types (old_type, type_map [i]);
+ tree new_type = merge_types (old_type, type_map[i]);
if (TREE_VEC_ELT (vec, i) != new_type)
{
/* If there has been a change, note that since we must re-verify.
However, if the label is the start of a subroutine,
we don't care about local variables that are neither
- set nor used in the sub-routine. */
+ set nor used in the subroutine. */
if (return_map == NULL_TREE || i >= nlocals
|| TREE_VEC_ELT (return_map, i) != TYPE_UNUSED
|| (TYPE_IS_WIDE (new_type)
{ oldpc = LABEL_PC (tmplab); goto verify_error; }} while (0)
#ifdef __GNUC__
-#define CHECK_PC_IN_RANGE(PC) ({if (PC < 0 || PC > length) goto bad_pc; (void)1;})
+#define CHECK_PC_IN_RANGE(PC) __extension__ \
+ ({if (PC < 0 || PC > length) goto bad_pc; (void)1;})
#else
#define CHECK_PC_IN_RANGE(PC) (PC < 0 || PC > length ? (abort (), 0) : 1)
#endif
#define BCODE byte_ops
-/* Verify the bytecodes of the current method.
- Return 1 on success, 0 on failure. */
+\f
+/* Verify the bytecodes of the current method, with the instructions
+ starting at BYTE_OPS and LENGTH in number, from the class file pointed to
+ by JCF.
+ Return 1 on success, 0 on failure. */
int
verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length)
{
char *pmessage;
int i;
int index;
- register unsigned char *p;
+ unsigned char *p;
struct eh_range *prev_eh_ranges = NULL_EH_RANGE;
struct eh_range *eh_ranges;
tree return_type = TREE_TYPE (TREE_TYPE (current_function_decl));
pending_blocks = NULL_TREE;
- /* Handle the exception table. */
+ current_subr = NULL_TREE;
+
+ /* Handle the exception table. */
method_init_exceptions ();
JCF_SEEK (jcf, DECL_CODE_OFFSET (current_function_decl) + length);
eh_count = JCF_readu2 (jcf);
if (start_pc < 0 || start_pc >= length
|| end_pc < 0 || end_pc > length || start_pc >= end_pc
|| handler_pc < 0 || handler_pc >= length
- || ! (instruction_bits [start_pc] & BCODE_INSTRUCTION_START)
+ || ! (instruction_bits[start_pc] & BCODE_INSTRUCTION_START)
|| (end_pc < length &&
- ! (instruction_bits [end_pc] & BCODE_INSTRUCTION_START))
- || ! (instruction_bits [handler_pc] & BCODE_INSTRUCTION_START))
+ ! (instruction_bits[end_pc] & BCODE_INSTRUCTION_START))
+ || ! (instruction_bits[handler_pc] & BCODE_INSTRUCTION_START))
{
error ("bad pc in exception_table");
free (starts);
return 0;
}
- if (handler_pc >= start_pc && handler_pc < end_pc)
- warning ("exception handler inside code that is being protected");
-
add_handler (start_pc, end_pc,
lookup_label (handler_pc),
catch_type == 0 ? NULL_TREE
: get_class_constant (jcf, catch_type));
- instruction_bits [handler_pc] |= BCODE_EXCEPTION_TARGET;
+ instruction_bits[handler_pc] |= BCODE_EXCEPTION_TARGET;
}
free (starts);
for (PC = 0;;)
{
tree type, tmp;
+
if (((PC != INVALID_PC
- && instruction_bits [PC] & BCODE_TARGET) != 0)
+ && instruction_bits[PC] & BCODE_TARGET) != 0)
|| PC == 0)
{
PUSH_PENDING (lookup_label (PC));
INVALIDATE_PC;
}
+
/* Check if there are any more pending blocks in the current
subroutine. Because we push pending blocks in a
last-in-first-out order, and because we don't push anything
from our caller until we are done with this subroutine or
- anything nested in it, then we are done if the top of the
+ anything nested in it, we are done if the top of the
pending_blocks stack is not in a subroutine, or it is in our
caller. */
- if (current_subr
- && PC == INVALID_PC)
+ if (current_subr && PC == INVALID_PC)
{
if (pending_blocks == NULL_TREE
|| (subroutine_nesting (pending_blocks)
< subroutine_nesting (current_subr)))
{
- int size = DECL_MAX_LOCALS(current_function_decl)+stack_pointer;
+ int size
+ = DECL_MAX_LOCALS (current_function_decl) + stack_pointer;
+
tree ret_map = LABEL_RETURN_TYPE_STATE (current_subr);
tmp = LABEL_RETURN_LABELS (current_subr);
/* FIXME: If we exit a subroutine via a throw, we might
have returned to an earlier caller. Obviously a
"ret" can only return one level, but a throw may
- return many levels.*/
+ return many levels. */
current_subr = LABEL_SUBR_CONTEXT (current_subr);
if (RETURN_MAP_ADJUSTED (ret_map))
{
- /* Since we are done with this subroutine , set up
+ /* Since we are done with this subroutine, set up
the (so far known) return address as pending -
- with the merged type state. */
+ with the merged type state. */
for ( ; tmp != NULL_TREE; tmp = TREE_CHAIN (tmp))
{
tree return_label = TREE_VALUE (tmp);
tree return_state = LABEL_TYPE_STATE (return_label);
if (return_state == NULL_TREE)
{
- /* This means means we had not verified the
- subroutine earlier, so this is the first jsr to
- call it. In this case, the type_map of the return
+ /* This means we had not verified the subroutine
+ earlier, so this is the first jsr to call it.
+ In this case, the type_map of the return
address is just the current type_map - and that
- is handled by the following PUSH_PENDING. */
+ is handled by the following PUSH_PENDING. */
}
else
{
/* In this case we have to do a merge. But first
restore the type_map for unused slots to those
- that were in effect at the jsr. */
- for (index = size; --index >= 0; )
+ that were in effect at the jsr. */
+ for (index = size; --index >= 0; )
{
- type_map[index] = TREE_VEC_ELT (ret_map, index);
+ type_map[index]
+ = TREE_VEC_ELT (ret_map, index);
+
if (type_map[index] == TYPE_UNUSED)
type_map[index]
= TREE_VEC_ELT (return_state, index);
}
}
}
+
if (PC == INVALID_PC)
{
label = pending_blocks;
+
if (label == NULL_TREE)
break; /* We're done! */
+
pending_blocks = LABEL_PENDING_CHAIN (label);
LABEL_CHANGED (label) = 0;
/* Restore type_map and stack_pointer from
LABEL_TYPE_STATE (label), and continue
- compiling from there. */
+ compiling from there. */
load_type_state (label);
+
PC = LABEL_PC (label);
}
else if (PC >= length)
- VERIFICATION_ERROR ("falling through end of method");
+ VERIFICATION_ERROR ("falling through the end of the method");
- /* fprintf (stderr, "** %d\n", PC); */
oldpc = PC;
- if (!(instruction_bits [PC] & BCODE_INSTRUCTION_START) && ! wide)
+ if (! (instruction_bits[PC] & BCODE_INSTRUCTION_START) && ! wide)
VERIFICATION_ERROR ("PC not at instruction start");
instruction_bits[PC] |= BCODE_VERIFIED;
switch (op_code)
{
int is_static, is_putting;
+
case OPCODE_nop:
break;
+
case OPCODE_iconst_m1:
case OPCODE_iconst_0: case OPCODE_iconst_1: case OPCODE_iconst_2:
case OPCODE_iconst_3: case OPCODE_iconst_4: case OPCODE_iconst_5:
|| byte_ops[PC] == OPCODE_anewarray)
int_value = i;
PUSH_TYPE (int_type_node); break;
+
case OPCODE_lconst_0: case OPCODE_lconst_1:
PUSH_TYPE (long_type_node); break;
+
case OPCODE_fconst_0: case OPCODE_fconst_1: case OPCODE_fconst_2:
PUSH_TYPE (float_type_node); break;
+
case OPCODE_dconst_0: case OPCODE_dconst_1:
PUSH_TYPE (double_type_node); break;
+
case OPCODE_bipush:
i = IMMEDIATE_s1;
goto push_int;
+
case OPCODE_sipush:
i = IMMEDIATE_s2;
goto push_int;
+
case OPCODE_iload: type = int_type_node; goto general_load;
case OPCODE_lload: type = long_type_node; goto general_load;
case OPCODE_fload: type = float_type_node; goto general_load;
POP_TYPE_CONV (type, type, NULL);
type_map[index] = type;
- /* If local variable changed, we need to reconsider eh handlers. */
+ /* If a local variable has changed, we need to reconsider exception
+ handlers. */
prev_eh_ranges = NULL_EH_RANGE;
- /* Allocate decl and rtx for this variable now, so if we're not
- optimizing, we get a temporary that survives the whole method. */
+ /* Allocate decl for this variable now, so we get a temporary
+! that survives the whole method. */
find_local_variable (index, type, oldpc);
if (TYPE_IS_WIDE (type))
type_map[index+1] = TYPE_SECOND;
+
/* ... fall through to note_used ... */
note_used:
/* For store or load, note that local variable INDEX is used.
- This is needed to verify try-finally sub-routines. */
+ This is needed to verify try-finally subroutines. */
if (current_subr)
{
tree vec = LABEL_RETURN_TYPE_STATE (current_subr);
type = double_type_node; goto binop;
case OPCODE_dneg:
type = double_type_node; goto unop;
+
unop:
pop_type (type);
PUSH_TYPE (type);
break;
+
binop:
pop_type (type);
pop_type (type);
PUSH_TYPE (type);
break;
+
case OPCODE_lshl:
case OPCODE_lshr:
case OPCODE_lushr:
pop_type (long_type_node);
PUSH_TYPE (long_type_node);
break;
+
case OPCODE_iinc:
index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
PC += wide + 1;
|| ! INTEGRAL_TYPE_P (tmp) || TYPE_PRECISION (tmp) > 32)
VERIFICATION_ERROR ("invalid local variable type in iinc");
break;
+
case OPCODE_i2l:
pop_type (int_type_node); PUSH_TYPE (long_type_node); break;
case OPCODE_i2f:
pop_type (double_type_node); PUSH_TYPE (long_type_node); break;
case OPCODE_d2f:
pop_type (double_type_node); PUSH_TYPE (float_type_node); break;
+
case OPCODE_lcmp:
type = long_type_node; goto compare;
case OPCODE_fcmpl:
compare:
pop_type (type); pop_type (type);
PUSH_TYPE (int_type_node); break;
+
case OPCODE_ifeq:
case OPCODE_ifne:
case OPCODE_iflt:
case OPCODE_if_acmpne:
pop_type (object_ptr_type_node); pop_type (object_ptr_type_node);
goto cond;
+
cond:
PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s2));
break;
+
case OPCODE_goto:
PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s2));
INVALIDATE_PC;
break;
+
case OPCODE_wide:
switch (byte_ops[PC])
{
VERIFICATION_ERROR ("invalid use of wide instruction");
}
break;
+
case OPCODE_return: type = void_type_node; goto ret;
case OPCODE_ireturn:
if ((TREE_CODE (return_type) == BOOLEAN_TYPE
else
type = NULL_TREE;
goto ret;
+
ret:
if (type != return_type)
VERIFICATION_ERROR ("incorrect ?return opcode");
if (type != void_type_node)
- POP_TYPE(type, "return value has wrong type");
+ POP_TYPE (type, "return value has wrong type");
INVALIDATE_PC;
break;
+
case OPCODE_getstatic: is_putting = 0; is_static = 1; goto field;
case OPCODE_putstatic: is_putting = 1; is_static = 1; goto field;
case OPCODE_getfield: is_putting = 0; is_static = 0; goto field;
{
tree field_signature, field_type;
index = IMMEDIATE_u2;
- if (index <= 0 || index >= JPOOL_SIZE(current_jcf))
+
+ if (index <= 0 || index >= JPOOL_SIZE (current_jcf))
VERIFICATION_ERROR_WITH_INDEX ("bad constant pool index %d");
+
if (JPOOL_TAG (current_jcf, index) != CONSTANT_Fieldref)
VERIFICATION_ERROR
("field instruction does not reference a Fieldref");
- field_signature = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool, index);
+
+ field_signature
+ = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool, index);
+
field_type = get_type_from_signature (field_signature);
+
if (is_putting)
POP_TYPE (field_type, "incorrect type for field");
+
if (! is_static)
{
- int clindex = COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool,
- index);
+ int clindex
+ = COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool, index);
+
tree self_type = get_class_constant (current_jcf, clindex);
+
/* Defer actual checking until next pass. */
- POP_TYPE(self_type, "incorrect type for field reference");
+ POP_TYPE (self_type, "incorrect type for field reference");
}
+
if (! is_putting)
PUSH_TYPE (field_type);
break;
}
+
case OPCODE_new:
PUSH_TYPE (get_class_constant (jcf, IMMEDIATE_u2));
break;
+
case OPCODE_dup: wide = 1; index = 0; goto dup;
case OPCODE_dup_x1: wide = 1; index = 1; goto dup;
case OPCODE_dup_x2: wide = 1; index = 2; goto dup;
case OPCODE_dup2: wide = 2; index = 0; goto dup;
case OPCODE_dup2_x1: wide = 2; index = 1; goto dup;
case OPCODE_dup2_x2: wide = 2; index = 2; goto dup;
+
dup:
if (wide + index > stack_pointer)
VERIFICATION_ERROR ("stack underflow - dup* operation");
type_stack_dup (wide, index);
wide = 0;
break;
+
case OPCODE_pop: index = 1; goto pop;
case OPCODE_pop2: index = 2; goto pop;
+
pop:
if (stack_pointer < index)
VERIFICATION_ERROR ("stack underflow");
stack_pointer -= index;
break;
+
case OPCODE_swap:
if (stack_pointer < 2)
VERIFICATION_ERROR ("stack underflow (in swap)");
{
tree type1 = stack_type_map[stack_pointer - 1];
tree type2 = stack_type_map[stack_pointer - 2];
+
if (type1 == void_type_node || type2 == void_type_node)
VERIFICATION_ERROR ("verifier (swap): double or long value");
+
stack_type_map[stack_pointer - 2] = type1;
stack_type_map[stack_pointer - 1] = type2;
}
break;
+
case OPCODE_ldc: index = IMMEDIATE_u1; goto ldc;
case OPCODE_ldc2_w:
case OPCODE_ldc_w:
index = IMMEDIATE_u2; goto ldc;
+
ldc:
- if (index <= 0 || index >= JPOOL_SIZE(current_jcf))
+ if (index <= 0 || index >= JPOOL_SIZE (current_jcf))
VERIFICATION_ERROR_WITH_INDEX ("bad constant pool index %d in ldc");
+
int_value = -1;
switch (JPOOL_TAG (current_jcf, index) & ~CONSTANT_ResolvedFlag)
{
tree sig, method_name, method_type, self_type;
int self_is_interface, tag;
index = IMMEDIATE_u2;
- if (index <= 0 || index >= JPOOL_SIZE(current_jcf))
+
+ if (index <= 0 || index >= JPOOL_SIZE (current_jcf))
VERIFICATION_ERROR_WITH_INDEX
("bad constant pool index %d for invoke");
+
tag = JPOOL_TAG (current_jcf, index);
+
if (op_code == OPCODE_invokeinterface)
{
if (tag != CONSTANT_InterfaceMethodref)
if (tag != CONSTANT_Methodref)
VERIFICATION_ERROR ("invoke does not reference a Methodref");
}
+
sig = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool, index);
- self_type = get_class_constant
- (current_jcf, COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool,
- index));
+
+ self_type
+ = get_class_constant (current_jcf,
+ COMPONENT_REF_CLASS_INDEX
+ (¤t_jcf->cpool, index));
+
if (! CLASS_LOADED_P (self_type))
load_class (self_type, 1);
+
self_is_interface = CLASS_INTERFACE (TYPE_NAME (self_type));
method_name = COMPONENT_REF_NAME (¤t_jcf->cpool, index);
- method_type = parse_signature_string (IDENTIFIER_POINTER (sig),
+ method_type = parse_signature_string ((const unsigned char *) IDENTIFIER_POINTER (sig),
IDENTIFIER_LENGTH (sig));
+
if (TREE_CODE (method_type) != FUNCTION_TYPE)
VERIFICATION_ERROR ("bad method signature");
+
pmessage = pop_argument_types (TYPE_ARG_TYPES (method_type));
if (pmessage != NULL)
{
goto pop_type_error;
}
- /* Can't invoke <clinit> */
+ /* Can't invoke <clinit>. */
if (ID_CLINIT_P (method_name))
VERIFICATION_ERROR ("invoke opcode can't invoke <clinit>");
- /* Apart invokespecial, can't invoke <init> */
+
+ /* Apart from invokespecial, can't invoke <init>. */
if (op_code != OPCODE_invokespecial && ID_INIT_P (method_name))
VERIFICATION_ERROR ("invoke opcode can't invoke <init>");
if (!nargs || notZero)
VERIFICATION_ERROR
("invalid argument number in invokeinterface");
+
/* If we verify/resolve the constant pool, as we should,
this test (and the one just following) are redundant. */
if (! self_is_interface)
- VERIFICATION_ERROR ("invokeinterface calls method not in interface");
+ VERIFICATION_ERROR
+ ("invokeinterface calls method not in interface");
break;
+
default:
if (self_is_interface)
VERIFICATION_ERROR ("method in interface called");
}
case OPCODE_arraylength:
- /* Type checking actually made during code generation */
- pop_type( ptr_type_node );
- PUSH_TYPE( int_type_node );
+ /* Type checking actually made during code generation. */
+ pop_type (ptr_type_node);
+ PUSH_TYPE (int_type_node);
break;
/* Q&D verification *or* more checking done during code generation
case OPCODE_bastore: type = int_type_node; goto astore;
case OPCODE_castore: type = int_type_node; goto astore;
case OPCODE_sastore: type = int_type_node; goto astore;
+
astore:
- /* FIXME - need better verification here */
+ /* FIXME - need better verification here. */
pop_type (type); /* new value */
pop_type (int_type_node); /* index */
pop_type (ptr_type_node); /* array */
case OPCODE_baload: type = promote_type (byte_type_node); goto aload;
case OPCODE_caload: type = promote_type (char_type_node); goto aload;
case OPCODE_saload: type = promote_type (short_type_node); goto aload;
+
aload:
pop_type (int_type_node);
tmp = pop_type (ptr_type_node);
newarray:
if (int_value >= 0 && prevpc >= 0)
{
- /* If previous instruction pushed int constant,
+ /* If the previous instruction pushed an int constant,
we want to use it. */
switch (byte_ops[prevpc])
{
}
else
int_value = -1;
+
type = build_java_array_type (type, int_value);
pop_type (int_type_node);
PUSH_TYPE (type);
index = IMMEDIATE_u2;
ndim = IMMEDIATE_u1;
- if( ndim < 1 )
- VERIFICATION_ERROR ("number of dimension lower that 1 in multianewarray" );
+ if (ndim < 1)
+ VERIFICATION_ERROR
+ ("number of dimension lower that 1 in multianewarray" );
- for( i = 0; i < ndim; i++ )
+ for (i = 0; i < ndim; i++)
pop_type (int_type_node);
+
PUSH_TYPE (get_class_constant (current_jcf, index));
break;
}
break;
case OPCODE_athrow:
- /* FIXME: athrow also empties the stack. */
+ /* FIXME: athrow also empties the stack. */
POP_TYPE (throwable_type_node, "missing throwable at athrow" );
INVALIDATE_PC;
break;
type = get_class_constant (current_jcf, IMMEDIATE_u2);
PUSH_TYPE (type);
break;
+
case OPCODE_instanceof:
POP_TYPE (object_ptr_type_node,
"instanceof operand is not a pointer");
jint low, high;
POP_TYPE (int_type_node, "missing int for tableswitch");
+
while (PC%4)
{
if (byte_ops[PC++])
VERIFICATION_ERROR ("bad alignment in tableswitch pad");
}
- PUSH_PENDING (lookup_label (oldpc+IMMEDIATE_s4));
+
+ PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
low = IMMEDIATE_s4;
high = IMMEDIATE_s4;
while (low++ <= high)
PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
+
INVALIDATE_PC;
break;
}
jint npairs, last = 0, not_registered = 1;
POP_TYPE (int_type_node, "missing int for lookupswitch");
+
while (PC%4)
{
if (byte_ops[PC++])
VERIFICATION_ERROR ("bad alignment in lookupswitch pad");
}
- PUSH_PENDING (lookup_label (oldpc+IMMEDIATE_s4));
+ PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
npairs = IMMEDIATE_s4;
if (npairs < 0)
while (npairs--)
{
int match = IMMEDIATE_s4;
+
if (not_registered)
not_registered = 0;
else if (last >= match)
int nlocals = DECL_MAX_LOCALS (current_function_decl);
index = nlocals + DECL_MAX_STACK (current_function_decl);
return_type_map = make_tree_vec (index);
+
while (index > nlocals)
TREE_VEC_ELT (return_type_map, --index) = TYPE_UNKNOWN;
+
while (index > 0)
TREE_VEC_ELT (return_type_map, --index) = TYPE_UNUSED;
+
LABEL_RETURN_LABEL (target)
= build_decl (LABEL_DECL, NULL_TREE, TREE_TYPE (target));
- LABEL_PC (LABEL_RETURN_LABEL (target)) = -1;
+ LABEL_PC (LABEL_RETURN_LABEL (target)) = INVALID_PC;
LABEL_RETURN_TYPE_STATE (target) = return_type_map;
LABEL_IS_SUBR_START (target) = 1;
LABEL_IN_SUBR (target) = 1;
INVALIDATE_PC;
}
break;
+
case OPCODE_ret:
- if (current_subr == NULL)
+ if (current_subr == NULL_TREE)
VERIFICATION_ERROR ("ret instruction not in a jsr subroutine");
else
{
tree ret_map = LABEL_RETURN_TYPE_STATE (current_subr);
- int size = DECL_MAX_LOCALS(current_function_decl)+stack_pointer;
+ int size
+ = DECL_MAX_LOCALS (current_function_decl) + stack_pointer;
index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
wide = 0;
INVALIDATE_PC;
VERIFICATION_ERROR ("invalid ret index");
/* The next chunk of code is similar to an inlined version of
- * merge_type_state (LABEL_RETURN_LABEL (current_subr)).
- * The main differences are that LABEL_RETURN_LABEL is
- * pre-allocated by the jsr (but we don't know the size then);
- * and that we have to handle TYPE_UNUSED. */
+ merge_type_state (LABEL_RETURN_LABEL (current_subr)).
+ The main differences are that LABEL_RETURN_LABEL is
+ pre-allocated by the jsr (but we don't know the size then);
+ and that we have to handle TYPE_UNUSED. */
if (! RETURN_MAP_ADJUSTED (ret_map))
- { /* First return from this subroutine - fix stack pointer. */
+ {
+ /* First return from this subroutine - fix stack
+ pointer. */
TREE_VEC_LENGTH (ret_map) = size;
for (index = size; --index >= 0; )
{
tree type = TREE_VEC_ELT (ret_map, index);
if (type != TYPE_UNUSED)
{
- type = merge_types (type, type_map [index]);
+ type = merge_types (type, type_map[index]);
TREE_VEC_ELT (ret_map, index) = type;
if (type == TYPE_UNKNOWN)
{
}
}
}
-
-
}
break;
+
case OPCODE_jsr_w:
case OPCODE_ret_w:
default:
/* The following test is true if we have entered or exited an exception
handler range *or* we have done a store to a local variable.
In either case we need to consider any exception handlers that
- might "follow" this instruction. */
+ might "follow" this instruction. */
if (eh_ranges != prev_eh_ranges)
{
tree save_current_subr = current_subr;
struct eh_range *ranges = find_handler (oldpc);
stack_pointer = 1;
- for (; ranges != NULL_EH_RANGE; ranges = ranges->outer)
+
+ for ( ; ranges != NULL_EH_RANGE; ranges = ranges->outer)
{
tree chain = ranges->handlers;
have that the current_subr is entirely within the catch range.
In that case we can assume if that if a caller (the jsr) of
a subroutine is within the catch range, then the handler is
- *not* part of the subroutine, and vice versa. */
+ *not* part of the subroutine, and vice versa. */
current_subr = save_current_subr;
for ( ; current_subr != NULL_TREE;
{
tree return_labels = LABEL_RETURN_LABELS (current_subr);
/* There could be multiple return_labels, but
- we only need to check one. */
+ we only need to check one. */
int return_pc = LABEL_PC (TREE_VALUE (return_labels));
if (return_pc <= ranges->start_pc
|| return_pc > ranges->end_pc)
break;
}
- for ( ; chain != NULL_TREE; chain = TREE_CHAIN (chain))
+ for ( ; chain != NULL_TREE; chain = TREE_CHAIN (chain))
{
tree handler = TREE_VALUE (chain);
tree type = TREE_PURPOSE (chain);
+
if (type == NULL_TREE) /* a finally handler */
type = throwable_type_node;
+
type_map[index] = promote_type (type);
PUSH_PENDING (handler);
prev_eh_ranges = eh_ranges;
}
}
+
return 1;
+
pop_type_error:
error ("verification error at PC=%d", oldpc);
if (message != NULL)
error ("%s", pmessage);
free (pmessage);
return 0;
+
stack_overflow:
message = "stack overflow";
goto verify_error;
+
bad_pc:
message = "program counter out of range";
goto verify_error;
+
error_with_index:
error ("verification error at PC=%d", oldpc);
error (message, index);
return 0;
+
verify_error:
error ("verification error at PC=%d", oldpc);
error ("%s", message);