1 /* Handle verification of bytecoded methods for the GNU compiler for
3 Copyright (C) 1997, 1998, 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA.
22 Java and all Java-based marks are trademarks or registered trademarks
23 of Sun Microsystems, Inc. in the United States and other countries.
24 The Free Software Foundation is independent of Sun Microsystems, Inc. */
29 #include "java-tree.h"
31 #include "java-opcodes.h"
33 #include "java-except.h"
36 static void push_pending_label PARAMS ((tree));
37 static tree merge_types PARAMS ((tree, tree));
38 static const char *check_pending_block PARAMS ((tree));
39 static void type_stack_dup PARAMS ((int, int));
40 static int start_pc_cmp PARAMS ((const PTR, const PTR));
42 extern int stack_pointer;
44 /* During verification, start of the current subroutine (jsr target). */
47 /* A list of pending blocks, chained using LABEL_PENDING_CHAIN.
48 A pending block is one that has LABEL_CHANGED set, which means
49 it requires (re-) verification. */
52 /* Append TARGET_LABEL to the pending_block stack unless already in it. */
55 push_pending_label (target_label)
58 if (! LABEL_CHANGED (target_label))
60 LABEL_PENDING_CHAIN (target_label) = pending_blocks;
61 pending_blocks = target_label;
62 LABEL_CHANGED (target_label) = 1;
66 /* Note that TARGET_LABEL is a possible successor instruction.
67 Merge the type state etc.
68 Return NULL on sucess, or an error message on failure. */
71 check_pending_block (target_label)
74 int changed = merge_type_state (target_label);
79 return "types could not be merged";
80 push_pending_label (target_label);
83 if (current_subr == NULL)
85 if (LABEL_IN_SUBR (target_label))
86 return "might transfer control into subroutine";
90 if (LABEL_IN_SUBR (target_label))
92 if (LABEL_SUBR_START (target_label) != current_subr)
93 return "transfer out of subroutine";
95 else if (! LABEL_VERIFIED (target_label))
97 LABEL_IN_SUBR (target_label) = 1;
98 LABEL_SUBR_START (target_label) = current_subr;
101 return "transfer out of subroutine";
106 /* Return the "merged" types of TYPE1 and TYPE2.
107 If either is primitive, the other must match (after promotion to int).
108 For reference types, return the common super-class.
109 Return TYPE_UNKNOWN if the types cannot be merged. */
112 merge_types (type1, type2)
117 if (type1 == TYPE_UNKNOWN || type2 == TYPE_UNKNOWN
118 || type1 == TYPE_RETURN_ADDR || type2 == TYPE_RETURN_ADDR)
120 if (TREE_CODE (type1) == POINTER_TYPE && TREE_CODE (type2) == POINTER_TYPE)
124 /* ptr_type_node is only used for a null reference,
125 which is compatible with any reference type. */
126 if (type1 == ptr_type_node || type2 == object_ptr_type_node)
128 if (type2 == ptr_type_node || type1 == object_ptr_type_node)
131 tt1 = HANDLE_TO_CLASS_TYPE (TREE_TYPE (type1));
132 tt2 = HANDLE_TO_CLASS_TYPE (TREE_TYPE (type2));
134 /* If tt{1,2} haven't been properly loaded, now is a good time
136 if (!TYPE_SIZE (tt1))
139 safe_layout_class (tt1);
142 if (!TYPE_SIZE (tt2))
145 safe_layout_class (tt2);
148 if (TYPE_ARRAY_P (tt1) || TYPE_ARRAY_P (tt2))
150 if (TYPE_ARRAY_P (tt1) == TYPE_ARRAY_P (tt2))
152 tree el_type1 = TYPE_ARRAY_ELEMENT (tt1);
153 tree el_type2 = TYPE_ARRAY_ELEMENT (tt2);
154 tree el_type = NULL_TREE;
155 if (el_type1 == el_type2)
157 else if (TREE_CODE (el_type1) == POINTER_TYPE
158 && TREE_CODE (el_type2) == POINTER_TYPE)
159 el_type = merge_types (el_type1, el_type2);
160 if (el_type != NULL_TREE)
162 HOST_WIDE_INT len1 = java_array_type_length (tt1);
163 HOST_WIDE_INT len2 = java_array_type_length (tt2);
166 else if (el_type1 == el_type2)
168 return promote_type (build_java_array_type (el_type, len1));
171 return object_ptr_type_node;
174 if (CLASS_INTERFACE (TYPE_NAME (tt1)))
176 /* FIXME: should see if two interfaces have a common
178 if (CLASS_INTERFACE (TYPE_NAME (tt2)))
180 /* This is a kludge, but matches what Sun's verifier does.
181 It can be tricked, but is safe as long as type errors
182 (i.e. interface method calls) are caught at run-time. */
183 return object_ptr_type_node;
187 if (can_widen_reference_to (tt2, tt1))
190 return object_ptr_type_node;
193 else if (CLASS_INTERFACE (TYPE_NAME (tt2)))
195 if (can_widen_reference_to (tt1, tt2))
198 return object_ptr_type_node;
204 depth1 = class_depth (type1);
205 depth2 = class_depth (type2);
206 for ( ; depth1 > depth2; depth1--)
207 type1 = TYPE_BINFO_BASETYPE (type1, 0);
208 for ( ; depth2 > depth1; depth2--)
209 type2 = TYPE_BINFO_BASETYPE (type2, 0);
210 while (type1 != type2)
212 type1 = TYPE_BINFO_BASETYPE (type1, 0);
213 type2 = TYPE_BINFO_BASETYPE (type2, 0);
215 return promote_type (type1);
217 if (INTEGRAL_TYPE_P (type1) && INTEGRAL_TYPE_P (type2)
218 && TYPE_PRECISION (type1) <= 32 && TYPE_PRECISION (type2) <= 32)
219 return int_type_node;
223 /* Merge the current type state with that at LABEL.
224 Return -1 the the states are incompatible (i.e. on error),
225 0 if there was no change, and 1 if there was a change. */
228 merge_type_state (label)
231 int nlocals = DECL_MAX_LOCALS (current_function_decl);
232 int cur_length = stack_pointer + nlocals;
233 tree vec = LABEL_TYPE_STATE (label);
235 if (vec == NULL_TREE)
239 vec = make_tree_vec (cur_length);
240 LABEL_TYPE_STATE (label) = vec;
242 while (--cur_length >= 0)
243 TREE_VEC_ELT (vec, cur_length) = type_map [cur_length];
250 if (LABEL_IS_SUBR_START (label) && LABEL_VERIFIED (label)
251 && current_subr != label)
252 return_map = LABEL_RETURN_TYPE_STATE (label);
254 return_map = NULL_TREE;
255 if (TREE_VEC_LENGTH (vec) != cur_length)
259 for (i = 0; i < cur_length; i++)
261 tree old_type = TREE_VEC_ELT (vec, i);
262 tree new_type = merge_types (old_type, type_map [i]);
263 if (TREE_VEC_ELT (vec, i) != new_type)
265 /* If there has been a change, note that since we must re-verify.
266 However, if the label is the start of a subroutine,
267 we don't care about local variables that are neither
268 set nor used in the sub-routine. */
269 if (return_map == NULL_TREE || i >= nlocals
270 || TREE_VEC_ELT (return_map, i) != TYPE_UNUSED
271 || (TYPE_IS_WIDE (new_type)
272 && TREE_VEC_ELT (return_map, i+1) != TYPE_UNUSED))
275 TREE_VEC_ELT (vec, i) = new_type;
276 if (new_type == TYPE_UNKNOWN)
281 else if (TYPE_IS_WIDE (new_type))
288 /* Handle dup-like operations. */
291 type_stack_dup (size, offset)
296 if (size + offset > stack_pointer)
297 error ("stack underflow - dup* operation");
298 for (index = 0; index < size + offset; index++)
300 type[index] = stack_type_map[stack_pointer - 1];
301 if (type[index] == void_type_node)
304 type[index] = stack_type_map[stack_pointer - 2];
305 if (! TYPE_IS_WIDE (type[index]))
306 fatal ("internal error - dup operation");
307 if (index == size || index == size + offset)
308 fatal ("dup operation splits 64-bit number");
310 pop_type (type[index]);
312 for (index = size; --index >= 0; )
314 if (type[index] != void_type_node)
315 push_type (type[index]);
318 for (index = size + offset; --index >= 0; )
320 if (type[index] != void_type_node)
321 push_type (type[index]);
325 /* This keeps track of a start PC and corresponding initial index. */
332 /* A helper that is used when sorting exception ranges. */
334 start_pc_cmp (xp, yp)
338 const struct pc_index *x = (const struct pc_index *) xp;
339 const struct pc_index *y = (const struct pc_index *) yp;
340 return x->start_pc - y->start_pc;
343 /* This causes the next iteration to ignore the next instruction
344 and look for some other unhandled instruction. */
345 #define INVALIDATE_PC (prevpc = -1, oldpc = PC, PC = INVALID_PC)
346 #define INVALID_PC (-1)
348 #define VERIFICATION_ERROR(MESSAGE) \
349 do { message = MESSAGE; goto verify_error; } while (0)
351 /* Recursive helper function to pop argument types during verifiation.
352 ARG_TYPES is the list of formal parameter types.
353 Return NULL on success and a freshly malloc'd error message on failure. */
356 pop_argument_types (arg_types)
359 if (arg_types == end_params_node)
361 if (TREE_CODE (arg_types) == TREE_LIST)
363 char *message = pop_argument_types (TREE_CHAIN (arg_types));
365 pop_type_0 (TREE_VALUE (arg_types), &message);
371 #define POP_TYPE(TYPE, MESSAGE) \
372 do { pmessage = NULL; pop_type_0 (TYPE, &pmessage); \
373 if (pmessage != NULL) goto pop_type_error; \
376 #define POP_TYPE_CONV(TYPE, POPPED_TYPE, MESSAGE) \
377 do { pmessage = NULL; POPPED_TYPE = pop_type_0 (TYPE, &pmessage); \
378 if (pmessage != NULL) goto pop_type_error; \
381 #define PUSH_TYPE(TYPE) \
382 do { if (! push_type_0 (TYPE)) { goto stack_overflow; }} while (0)
384 #define PUSH_PENDING(LABEL) \
385 do { tree tmplab = LABEL; \
386 if ((message = check_pending_block (tmplab)) != NULL) \
387 { oldpc = LABEL_PC (tmplab); goto verify_error; }} while (0)
390 #define CHECK_PC_IN_RANGE(PC) ({if (PC < 0 || PC > length) goto bad_pc; (void)1;})
392 #define CHECK_PC_IN_RANGE(PC) (PC < 0 || PC > length ? \
393 (fatal("Bad byte codes.\n"), 0) : 1)
396 #define BCODE byte_ops
398 /* Verify the bytecodes of the current method.
399 Return 1 on sucess, 0 on failure. */
401 verify_jvm_instructions (jcf, byte_ops, length)
403 const unsigned char *byte_ops;
410 int oldpc = 0; /* PC of start of instruction. */
411 int prevpc = 0; /* If >= 0, PC of previous instruction. */
415 register unsigned char *p;
416 struct eh_range *prev_eh_ranges = NULL_EH_RANGE;
417 struct eh_range *eh_ranges;
418 tree return_type = TREE_TYPE (TREE_TYPE (current_function_decl));
419 struct pc_index *starts;
424 pending_blocks = NULL_TREE;
426 /* Handle the exception table. */
427 method_init_exceptions ();
428 JCF_SEEK (jcf, DECL_CODE_OFFSET (current_function_decl) + length);
429 eh_count = JCF_readu2 (jcf);
431 /* We read the exception handlers in order of increasing start PC.
432 To do this we first read and sort the start PCs. */
433 starts = (struct pc_index *) xmalloc (eh_count * sizeof (struct pc_index));
434 for (i = 0; i < eh_count; ++i)
436 starts[i].start_pc = GET_u2 (jcf->read_ptr + 8 * i);
439 qsort (starts, eh_count, sizeof (struct pc_index), start_pc_cmp);
441 for (i = 0; i < eh_count; ++i)
443 int start_pc, end_pc, handler_pc, catch_type;
445 p = jcf->read_ptr + 8 * starts[i].index;
447 start_pc = GET_u2 (p);
448 end_pc = GET_u2 (p+2);
449 handler_pc = GET_u2 (p+4);
450 catch_type = GET_u2 (p+6);
452 if (start_pc < 0 || start_pc >= length
453 || end_pc < 0 || end_pc > length || start_pc >= end_pc
454 || handler_pc < 0 || handler_pc >= length
455 || (handler_pc >= start_pc && handler_pc < end_pc)
456 || ! (instruction_bits [start_pc] & BCODE_INSTRUCTION_START)
457 || (end_pc < length &&
458 ! (instruction_bits [end_pc] & BCODE_INSTRUCTION_START))
459 || ! (instruction_bits [handler_pc] & BCODE_INSTRUCTION_START))
461 error ("bad pc in exception_table");
466 add_handler (start_pc, end_pc,
467 lookup_label (handler_pc),
468 catch_type == 0 ? NULL_TREE
469 : get_class_constant (jcf, catch_type));
471 instruction_bits [handler_pc] |= BCODE_EXCEPTION_TARGET;
475 handle_nested_ranges ();
481 if (((PC != INVALID_PC
482 && instruction_bits [PC] & BCODE_TARGET) != 0)
485 PUSH_PENDING (lookup_label (PC));
488 /* Check if there are any more pending blocks in the current
489 subroutine. Because we push pending blocks in a
490 last-in-first-out order, and because we don't push anything
491 from our caller until we are done with this subroutine or
492 anything nested in it, then we are done if the top of the
493 pending_blocks stack is not in a subroutine, or it is in our
498 tree caller = LABEL_SUBR_CONTEXT (current_subr);
500 if (pending_blocks == NULL_TREE
501 || ! LABEL_IN_SUBR (pending_blocks)
502 || LABEL_SUBR_START (pending_blocks) == caller)
504 int size = DECL_MAX_LOCALS(current_function_decl)+stack_pointer;
505 tree ret_map = LABEL_RETURN_TYPE_STATE (current_subr);
506 tmp = LABEL_RETURN_LABELS (current_subr);
508 /* FIXME: If we exit a subroutine via a throw, we might
509 have returned to an earlier caller. Obviously a
510 "ret" can only return one level, but a throw may
511 return many levels.*/
512 current_subr = caller;
514 if (RETURN_MAP_ADJUSTED (ret_map))
516 /* Since we are done with this subroutine , set up
517 the (so far known) return address as pending -
518 with the merged type state. */
519 for ( ; tmp != NULL_TREE; tmp = TREE_CHAIN (tmp))
521 tree return_label = TREE_VALUE (tmp);
522 tree return_state = LABEL_TYPE_STATE (return_label);
523 if (return_state == NULL_TREE)
525 /* This means means we had not verified the
526 subroutine earlier, so this is the first jsr to
527 call it. In this case, the type_map of the return
528 address is just the current type_map - and that
529 is handled by the following PUSH_PENDING. */
533 /* In this case we have to do a merge. But first
534 restore the type_map for unused slots to those
535 that were in effect at the jsr. */
536 for (index = size; --index >= 0; )
538 type_map[index] = TREE_VEC_ELT (ret_map, index);
539 if (type_map[index] == TYPE_UNUSED)
541 = TREE_VEC_ELT (return_state, index);
544 PUSH_PENDING (return_label);
549 if (PC == INVALID_PC)
551 label = pending_blocks;
552 if (label == NULL_TREE)
553 break; /* We're done! */
554 pending_blocks = LABEL_PENDING_CHAIN (label);
555 LABEL_CHANGED (label) = 0;
557 if (LABEL_IN_SUBR (label))
558 current_subr = LABEL_SUBR_START (label);
560 current_subr = NULL_TREE;
562 /* Restore type_map and stack_pointer from
563 LABEL_TYPE_STATE (label), and continue
564 compiling from there. */
565 load_type_state (label);
566 PC = LABEL_PC (label);
568 else if (PC >= length)
569 VERIFICATION_ERROR ("falling through end of method");
571 /* fprintf (stderr, "** %d\n", PC); */
575 if (!(instruction_bits [PC] & BCODE_INSTRUCTION_START) && ! wide)
576 VERIFICATION_ERROR ("PC not at instruction start");
578 instruction_bits[PC] |= BCODE_VERIFIED;
580 eh_ranges = find_handler (oldpc);
582 op_code = byte_ops[PC++];
585 int is_static, is_putting;
588 case OPCODE_iconst_m1:
589 case OPCODE_iconst_0: case OPCODE_iconst_1: case OPCODE_iconst_2:
590 case OPCODE_iconst_3: case OPCODE_iconst_4: case OPCODE_iconst_5:
591 i = op_code - OPCODE_iconst_0;
594 if (byte_ops[PC] == OPCODE_newarray
595 || byte_ops[PC] == OPCODE_newarray)
597 PUSH_TYPE (int_type_node); break;
598 case OPCODE_lconst_0: case OPCODE_lconst_1:
599 PUSH_TYPE (long_type_node); break;
600 case OPCODE_fconst_0: case OPCODE_fconst_1: case OPCODE_fconst_2:
601 PUSH_TYPE (float_type_node); break;
602 case OPCODE_dconst_0: case OPCODE_dconst_1:
603 PUSH_TYPE (double_type_node); break;
610 case OPCODE_iload: type = int_type_node; goto general_load;
611 case OPCODE_lload: type = long_type_node; goto general_load;
612 case OPCODE_fload: type = float_type_node; goto general_load;
613 case OPCODE_dload: type = double_type_node; goto general_load;
614 case OPCODE_aload: type = ptr_type_node; goto general_load;
616 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
619 case OPCODE_iload_0: type = int_type_node; index = 0; goto load;
620 case OPCODE_iload_1: type = int_type_node; index = 1; goto load;
621 case OPCODE_iload_2: type = int_type_node; index = 2; goto load;
622 case OPCODE_iload_3: type = int_type_node; index = 3; goto load;
623 case OPCODE_lload_0: type = long_type_node; index = 0; goto load;
624 case OPCODE_lload_1: type = long_type_node; index = 1; goto load;
625 case OPCODE_lload_2: type = long_type_node; index = 2; goto load;
626 case OPCODE_lload_3: type = long_type_node; index = 3; goto load;
627 case OPCODE_fload_0: type = float_type_node; index = 0; goto load;
628 case OPCODE_fload_1: type = float_type_node; index = 1; goto load;
629 case OPCODE_fload_2: type = float_type_node; index = 2; goto load;
630 case OPCODE_fload_3: type = float_type_node; index = 3; goto load;
631 case OPCODE_dload_0: type = double_type_node; index = 0; goto load;
632 case OPCODE_dload_1: type = double_type_node; index = 1; goto load;
633 case OPCODE_dload_2: type = double_type_node; index = 2; goto load;
634 case OPCODE_dload_3: type = double_type_node; index = 3; goto load;
635 case OPCODE_aload_0: type = ptr_type_node; index = 0; goto load;
636 case OPCODE_aload_1: type = ptr_type_node; index = 1; goto load;
637 case OPCODE_aload_2: type = ptr_type_node; index = 2; goto load;
638 case OPCODE_aload_3: type = ptr_type_node; index = 3; goto load;
641 || (index + TYPE_IS_WIDE (type)
642 >= DECL_MAX_LOCALS (current_function_decl)))
643 VERIFICATION_ERROR ("invalid local variable index in load");
644 tmp = type_map[index];
645 if (tmp == TYPE_UNKNOWN || tmp == TYPE_SECOND
646 || (TYPE_IS_WIDE (type)
647 && type_map[index+1] != void_type_node)
648 || (type == ptr_type_node
649 ? TREE_CODE (tmp) != POINTER_TYPE
650 : type == int_type_node
651 ? (! INTEGRAL_TYPE_P (tmp) || TYPE_PRECISION (tmp) > 32)
653 VERIFICATION_ERROR("invalid local variable type in load");
656 case OPCODE_istore: type = int_type_node; goto general_store;
657 case OPCODE_lstore: type = long_type_node; goto general_store;
658 case OPCODE_fstore: type = float_type_node; goto general_store;
659 case OPCODE_dstore: type = double_type_node; goto general_store;
660 case OPCODE_astore: type = object_ptr_type_node; goto general_store;
662 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
665 case OPCODE_istore_0: type = int_type_node; index = 0; goto store;
666 case OPCODE_istore_1: type = int_type_node; index = 1; goto store;
667 case OPCODE_istore_2: type = int_type_node; index = 2; goto store;
668 case OPCODE_istore_3: type = int_type_node; index = 3; goto store;
669 case OPCODE_lstore_0: type = long_type_node; index=0; goto store;
670 case OPCODE_lstore_1: type = long_type_node; index=1; goto store;
671 case OPCODE_lstore_2: type = long_type_node; index=2; goto store;
672 case OPCODE_lstore_3: type = long_type_node; index=3; goto store;
673 case OPCODE_fstore_0: type=float_type_node; index=0; goto store;
674 case OPCODE_fstore_1: type=float_type_node; index=1; goto store;
675 case OPCODE_fstore_2: type=float_type_node; index=2; goto store;
676 case OPCODE_fstore_3: type=float_type_node; index=3; goto store;
677 case OPCODE_dstore_0: type=double_type_node; index=0; goto store;
678 case OPCODE_dstore_1: type=double_type_node; index=1; goto store;
679 case OPCODE_dstore_2: type=double_type_node; index=2; goto store;
680 case OPCODE_dstore_3: type=double_type_node; index=3; goto store;
681 case OPCODE_astore_0: type = ptr_type_node; index = 0; goto store;
682 case OPCODE_astore_1: type = ptr_type_node; index = 1; goto store;
683 case OPCODE_astore_2: type = ptr_type_node; index = 2; goto store;
684 case OPCODE_astore_3: type = ptr_type_node; index = 3; goto store;
687 || (index + TYPE_IS_WIDE (type)
688 >= DECL_MAX_LOCALS (current_function_decl)))
690 VERIFICATION_ERROR ("invalid local variable index in store");
693 POP_TYPE_CONV (type, type, NULL);
694 type_map[index] = type;
696 /* If local variable changed, we need to reconsider eh handlers. */
697 prev_eh_ranges = NULL_EH_RANGE;
699 /* Allocate decl and rtx for this variable now, so if we're not
700 optmizing, we get a temporary that survives the whole method. */
701 find_local_variable (index, type, oldpc);
703 if (TYPE_IS_WIDE (type))
704 type_map[index+1] = TYPE_SECOND;
705 /* ... fall through to note_used ... */
707 /* For store or load, note that local variable INDEX is used.
708 This is needed to verify try-finally sub-routines. */
711 tree vec = LABEL_RETURN_TYPE_STATE (current_subr);
712 tree subr_vec = LABEL_TYPE_STATE (current_subr);
713 int len = 1 + TYPE_IS_WIDE (type);
716 if (TREE_VEC_ELT (vec, index) == TYPE_UNUSED)
717 TREE_VEC_ELT (vec, index) = TREE_VEC_ELT (subr_vec, index);
732 type = int_type_node; goto binop;
737 type = int_type_node; goto unop;
746 type = long_type_node; goto binop;
748 type = long_type_node; goto unop;
749 case OPCODE_fadd: case OPCODE_fsub:
750 case OPCODE_fmul: case OPCODE_fdiv: case OPCODE_frem:
751 type = float_type_node; goto binop;
753 type = float_type_node; goto unop;
754 case OPCODE_dadd: case OPCODE_dsub:
755 case OPCODE_dmul: case OPCODE_ddiv: case OPCODE_drem:
756 type = double_type_node; goto binop;
758 type = double_type_node; goto unop;
771 pop_type (int_type_node);
772 pop_type (long_type_node);
773 PUSH_TYPE (long_type_node);
776 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
779 if (index < 0 || index >= DECL_MAX_LOCALS (current_function_decl))
780 VERIFICATION_ERROR ("invalid local variable index in iinc");
781 tmp = type_map[index];
783 || ! INTEGRAL_TYPE_P (tmp) || TYPE_PRECISION (tmp) > 32)
784 VERIFICATION_ERROR ("invalid local variable type in iinc");
787 pop_type (int_type_node); PUSH_TYPE (long_type_node); break;
789 pop_type (int_type_node); PUSH_TYPE (float_type_node); break;
791 pop_type (int_type_node); PUSH_TYPE (double_type_node); break;
793 pop_type (long_type_node); PUSH_TYPE (int_type_node); break;
795 pop_type (long_type_node); PUSH_TYPE (float_type_node); break;
797 pop_type (long_type_node); PUSH_TYPE (double_type_node); break;
799 pop_type (float_type_node); PUSH_TYPE (int_type_node); break;
801 pop_type (float_type_node); PUSH_TYPE (long_type_node); break;
803 pop_type (float_type_node); PUSH_TYPE (double_type_node); break;
805 pop_type (double_type_node); PUSH_TYPE (int_type_node); break;
807 pop_type (double_type_node); PUSH_TYPE (long_type_node); break;
809 pop_type (double_type_node); PUSH_TYPE (float_type_node); break;
811 type = long_type_node; goto compare;
814 type = float_type_node; goto compare;
817 type = double_type_node; goto compare;
819 pop_type (type); pop_type (type);
820 PUSH_TYPE (int_type_node); break;
827 pop_type (int_type_node); goto cond;
829 case OPCODE_ifnonnull:
830 pop_type (ptr_type_node ); goto cond;
831 case OPCODE_if_icmpeq:
832 case OPCODE_if_icmpne:
833 case OPCODE_if_icmplt:
834 case OPCODE_if_icmpge:
835 case OPCODE_if_icmpgt:
836 case OPCODE_if_icmple:
837 pop_type (int_type_node); pop_type (int_type_node); goto cond;
838 case OPCODE_if_acmpeq:
839 case OPCODE_if_acmpne:
840 pop_type (object_ptr_type_node); pop_type (object_ptr_type_node);
843 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s2));
846 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s2));
850 switch (byte_ops[PC])
852 case OPCODE_iload: case OPCODE_lload:
853 case OPCODE_fload: case OPCODE_dload: case OPCODE_aload:
854 case OPCODE_istore: case OPCODE_lstore:
855 case OPCODE_fstore: case OPCODE_dstore: case OPCODE_astore:
861 VERIFICATION_ERROR ("invalid use of wide instruction");
864 case OPCODE_return: type = void_type_node; goto ret;
866 if ((TREE_CODE (return_type) == BOOLEAN_TYPE
867 || TREE_CODE (return_type) == CHAR_TYPE
868 || TREE_CODE (return_type) == INTEGER_TYPE)
869 && TYPE_PRECISION (return_type) <= 32)
874 case OPCODE_lreturn: type = long_type_node; goto ret;
875 case OPCODE_freturn: type = float_type_node; goto ret;
876 case OPCODE_dreturn: type = double_type_node; goto ret;
878 if (TREE_CODE (return_type) == POINTER_TYPE)
884 if (type != return_type)
885 VERIFICATION_ERROR ("incorrect ?return opcode");
886 if (type != void_type_node)
887 POP_TYPE(type, "return value has wrong type");
890 case OPCODE_getstatic: is_putting = 0; is_static = 1; goto field;
891 case OPCODE_putstatic: is_putting = 1; is_static = 1; goto field;
892 case OPCODE_getfield: is_putting = 0; is_static = 0; goto field;
893 case OPCODE_putfield: is_putting = 1; is_static = 0; goto field;
896 int index = IMMEDIATE_u2;
897 tree field_signature = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool, index);
898 tree field_type = get_type_from_signature (field_signature);
900 POP_TYPE (field_type, "incorrect type for field");
903 int clindex = COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool,
905 tree self_type = get_class_constant (current_jcf, clindex);
906 /* Defer actual checking until next pass. */
907 POP_TYPE(self_type, "incorrect type for field reference");
910 PUSH_TYPE (field_type);
914 PUSH_TYPE (get_class_constant (jcf, IMMEDIATE_u2));
916 case OPCODE_dup: type_stack_dup (1, 0); break;
917 case OPCODE_dup_x1: type_stack_dup (1, 1); break;
918 case OPCODE_dup_x2: type_stack_dup (1, 2); break;
919 case OPCODE_dup2: type_stack_dup (2, 0); break;
920 case OPCODE_dup2_x1: type_stack_dup (2, 1); break;
921 case OPCODE_dup2_x2: type_stack_dup (2, 2); break;
922 case OPCODE_pop: index = 1; goto pop;
923 case OPCODE_pop2: index = 2; goto pop;
925 if (stack_pointer < index)
926 VERIFICATION_ERROR ("stack underflow");
927 stack_pointer -= index;
930 if (stack_pointer < 2)
931 VERIFICATION_ERROR ("stack underflow (in swap)");
934 tree type1 = stack_type_map[stack_pointer - 1];
935 tree type2 = stack_type_map[stack_pointer - 2];
936 if (type1 == void_type_node || type2 == void_type_node)
937 VERIFICATION_ERROR ("verifier (swap): double or long value");
938 stack_type_map[stack_pointer - 2] = type1;
939 stack_type_map[stack_pointer - 1] = type2;
942 case OPCODE_ldc: index = IMMEDIATE_u1; goto ldc;
945 index = IMMEDIATE_u2; goto ldc;
947 if (index <= 0 || index >= JPOOL_SIZE(current_jcf))
948 VERIFICATION_ERROR ("bad constant pool index in ldc");
950 switch (JPOOL_TAG (current_jcf, index) & ~CONSTANT_ResolvedFlag)
952 case CONSTANT_Integer: type = int_type_node; goto check_ldc;
953 case CONSTANT_Float: type = float_type_node; goto check_ldc;
954 case CONSTANT_String: type = string_type_node; goto check_ldc;
955 case CONSTANT_Long: type = long_type_node; goto check_ldc;
956 case CONSTANT_Double: type = double_type_node; goto check_ldc;
958 if (TYPE_IS_WIDE (type) == (op_code == OPCODE_ldc2_w))
960 /* ... else fall through ... */
962 VERIFICATION_ERROR ("bad constant pool tag in ldc");
964 if (type == int_type_node)
966 i = TREE_INT_CST_LOW (get_constant (current_jcf, index));
972 case OPCODE_invokevirtual:
973 case OPCODE_invokespecial:
974 case OPCODE_invokestatic:
975 case OPCODE_invokeinterface:
977 int index = IMMEDIATE_u2;
978 tree sig = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool, index);
979 tree self_type = get_class_constant
980 (current_jcf, COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool,
982 tree method_name = COMPONENT_REF_NAME (¤t_jcf->cpool, index);
984 method_type = parse_signature_string (IDENTIFIER_POINTER (sig),
985 IDENTIFIER_LENGTH (sig));
986 if (TREE_CODE (method_type) != FUNCTION_TYPE)
987 VERIFICATION_ERROR ("bad method signature");
988 pmessage = pop_argument_types (TYPE_ARG_TYPES (method_type));
989 if (pmessage != NULL)
991 message = "invalid argument type";
995 /* Can't invoke <clinit> */
996 if (ID_CLINIT_P (method_name))
997 VERIFICATION_ERROR ("invoke opcode can't invoke <clinit>");
998 /* Apart invokespecial, can't invoke <init> */
999 if (op_code != OPCODE_invokespecial && ID_INIT_P (method_name))
1000 VERIFICATION_ERROR ("invoke opcode can't invoke <init>");
1002 if (op_code != OPCODE_invokestatic)
1003 POP_TYPE (self_type,
1004 "stack type not subclass of invoked method's class");
1008 case OPCODE_invokeinterface:
1010 int nargs = IMMEDIATE_u1;
1011 int notZero = IMMEDIATE_u1;
1013 if (!nargs || notZero)
1015 ("invalid argument number in invokeinterface");
1020 if (TREE_TYPE (method_type) != void_type_node)
1021 PUSH_TYPE (TREE_TYPE (method_type));
1025 case OPCODE_arraylength:
1026 /* Type checking actually made during code generation */
1027 pop_type( ptr_type_node );
1028 PUSH_TYPE( int_type_node );
1031 /* Q&D verification *or* more checking done during code generation
1032 for byte/boolean/char/short, the value popped is a int coerced
1033 into the right type before being stored. */
1034 case OPCODE_iastore: type = int_type_node; goto astore;
1035 case OPCODE_lastore: type = long_type_node; goto astore;
1036 case OPCODE_fastore: type = float_type_node; goto astore;
1037 case OPCODE_dastore: type = double_type_node; goto astore;
1038 case OPCODE_aastore: type = ptr_type_node; goto astore;
1039 case OPCODE_bastore: type = int_type_node; goto astore;
1040 case OPCODE_castore: type = int_type_node; goto astore;
1041 case OPCODE_sastore: type = int_type_node; goto astore;
1043 /* FIXME - need better verification here */
1044 pop_type (type); /* new value */
1045 pop_type (int_type_node); /* index */
1046 pop_type (ptr_type_node); /* array */
1049 /* Q&D verification *or* more checking done during code generation
1050 for byte/boolean/char/short, the value pushed is a int. */
1051 case OPCODE_iaload: type = int_type_node; goto aload;
1052 case OPCODE_laload: type = long_type_node; goto aload;
1053 case OPCODE_faload: type = float_type_node; goto aload;
1054 case OPCODE_daload: type = double_type_node; goto aload;
1055 case OPCODE_aaload: type = ptr_type_node; goto aload;
1056 case OPCODE_baload: type = promote_type (byte_type_node); goto aload;
1057 case OPCODE_caload: type = promote_type (char_type_node); goto aload;
1058 case OPCODE_saload: type = promote_type (short_type_node); goto aload;
1060 pop_type (int_type_node);
1061 tmp = pop_type (ptr_type_node);
1062 if (is_array_type_p (tmp))
1063 type = TYPE_ARRAY_ELEMENT (TREE_TYPE (tmp));
1064 else if (tmp != TYPE_NULL)
1065 VERIFICATION_ERROR ("array load from non-array type");
1069 case OPCODE_anewarray:
1070 type = get_class_constant (current_jcf, IMMEDIATE_u2);
1071 type = promote_type (type);
1074 case OPCODE_newarray:
1075 index = IMMEDIATE_u1;
1076 type = decode_newarray_type (index);
1077 if (type == NULL_TREE)
1078 VERIFICATION_ERROR ("invalid type code in newarray opcode");
1082 if (int_value >= 0 && prevpc >= 0)
1084 /* If previous instruction pushed int constant,
1085 we want to use it. */
1086 switch (byte_ops[prevpc])
1088 case OPCODE_iconst_0: case OPCODE_iconst_1:
1089 case OPCODE_iconst_2: case OPCODE_iconst_3:
1090 case OPCODE_iconst_4: case OPCODE_iconst_5:
1091 case OPCODE_bipush: case OPCODE_sipush:
1092 case OPCODE_ldc: case OPCODE_ldc_w:
1100 type = build_java_array_type (type, int_value);
1101 pop_type (int_type_node);
1105 case OPCODE_multianewarray:
1108 index = IMMEDIATE_u2;
1109 ndim = IMMEDIATE_u1;
1112 VERIFICATION_ERROR ("number of dimension lower that 1 in multianewarray" );
1114 for( i = 0; i < ndim; i++ )
1115 pop_type (int_type_node);
1116 PUSH_TYPE (get_class_constant (current_jcf, index));
1120 case OPCODE_aconst_null:
1121 PUSH_TYPE (ptr_type_node);
1125 /* FIXME: athrow also empties the stack. */
1126 pop_type (throwable_type_node);
1130 case OPCODE_checkcast:
1131 pop_type (ptr_type_node);
1132 type = get_class_constant (current_jcf, IMMEDIATE_u2);
1135 case OPCODE_instanceof:
1136 pop_type (ptr_type_node);
1137 get_class_constant (current_jcf, IMMEDIATE_u2);
1138 PUSH_TYPE (int_type_node);
1141 case OPCODE_tableswitch:
1145 pop_type (int_type_node);
1149 VERIFICATION_ERROR ("bad alignment in tableswitch pad");
1151 PUSH_PENDING (lookup_label (oldpc+IMMEDIATE_s4));
1153 high = IMMEDIATE_s4;
1156 VERIFICATION_ERROR ("unsorted low/high value in tableswitch");
1158 while (low++ <= high)
1159 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
1164 case OPCODE_lookupswitch:
1166 jint npairs, last = 0, not_registered = 1;
1168 pop_type (int_type_node);
1172 VERIFICATION_ERROR ("bad alignment in lookupswitch pad");
1175 PUSH_PENDING (lookup_label (oldpc+IMMEDIATE_s4));
1176 npairs = IMMEDIATE_s4;
1179 VERIFICATION_ERROR ("invalid number of targets in lookupswitch");
1183 int match = IMMEDIATE_s4;
1186 else if (last >= match)
1187 VERIFICATION_ERROR ("unsorted match value in lookupswitch");
1190 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
1196 case OPCODE_monitorenter:
1198 case OPCODE_monitorexit:
1199 pop_type (ptr_type_node);
1203 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
1209 tree target = lookup_label (oldpc + IMMEDIATE_s2);
1210 tree return_label = lookup_label (PC);
1211 PUSH_TYPE (return_address_type_node);
1212 /* The return label chain will be null if this is the first
1213 time we've seen this jsr target. */
1214 if (LABEL_RETURN_LABEL (target) == NULL_TREE)
1216 tree return_type_map;
1217 int nlocals = DECL_MAX_LOCALS (current_function_decl);
1218 index = nlocals + DECL_MAX_STACK (current_function_decl);
1219 return_type_map = make_tree_vec (index);
1220 while (index > nlocals)
1221 TREE_VEC_ELT (return_type_map, --index) = TYPE_UNKNOWN;
1223 TREE_VEC_ELT (return_type_map, --index) = TYPE_UNUSED;
1224 LABEL_RETURN_LABEL (target)
1225 = build_decl (LABEL_DECL, NULL_TREE, TREE_TYPE (target));
1226 LABEL_PC (LABEL_RETURN_LABEL (target)) = -1;
1227 LABEL_RETURN_TYPE_STATE (target) = return_type_map;
1228 LABEL_IS_SUBR_START (target) = 1;
1229 LABEL_IN_SUBR (target) = 1;
1230 LABEL_SUBR_START (target) = target;
1231 LABEL_SUBR_CONTEXT (target) = current_subr;
1233 else if (! LABEL_IS_SUBR_START (target)
1234 || LABEL_SUBR_CONTEXT (target) != current_subr)
1235 VERIFICATION_ERROR ("label part of different subroutines");
1237 i = merge_type_state (target);
1241 VERIFICATION_ERROR ("types could not be merged at jsr");
1242 push_pending_label (target);
1244 current_subr = target;
1246 /* Chain return_pc onto LABEL_RETURN_LABELS (target) if needed. */
1247 if (! value_member (return_label, LABEL_RETURN_LABELS (target)))
1249 LABEL_RETURN_LABELS (target)
1250 = tree_cons (NULL_TREE, return_label,
1251 LABEL_RETURN_LABELS (target));
1254 if (LABEL_VERIFIED (target))
1256 tree return_map = LABEL_RETURN_TYPE_STATE (target);
1257 int len = TREE_VEC_LENGTH (return_map);
1258 stack_pointer = len - DECL_MAX_LOCALS (current_function_decl);
1261 if (TREE_VEC_ELT (return_map, len) != TYPE_UNUSED)
1262 type_map[len] = TREE_VEC_ELT (return_map, len);
1264 current_subr = LABEL_SUBR_CONTEXT (target);
1265 PUSH_PENDING (return_label);
1272 if (current_subr == NULL)
1273 VERIFICATION_ERROR ("ret instruction not in a jsr subroutine");
1276 tree ret_map = LABEL_RETURN_TYPE_STATE (current_subr);
1277 int size = DECL_MAX_LOCALS(current_function_decl)+stack_pointer;
1278 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
1281 if (index < 0 || index >= DECL_MAX_LOCALS (current_function_decl)
1282 || type_map[index] != TYPE_RETURN_ADDR)
1283 VERIFICATION_ERROR ("invalid ret index");
1285 /* The next chunk of code is similar to an inlined version of
1286 * merge_type_state (LABEL_RETURN_LABEL (current_subr)).
1287 * The main differences are that LABEL_RETURN_LABEL is
1288 * pre-allocated by the jsr (but we don't know the size then);
1289 * and that we have to handle TYPE_UNUSED. */
1291 if (! RETURN_MAP_ADJUSTED (ret_map))
1292 { /* First return from this subroutine - fix stack pointer. */
1293 TREE_VEC_LENGTH (ret_map) = size;
1294 for (index = size; --index >= 0; )
1296 if (TREE_VEC_ELT (ret_map, index) != TYPE_UNUSED)
1297 TREE_VEC_ELT (ret_map, index) = type_map[index];
1299 RETURN_MAP_ADJUSTED (ret_map) = 1;
1303 if (TREE_VEC_LENGTH (ret_map) != size)
1304 VERIFICATION_ERROR ("inconsistent stack size on ret");
1305 for (index = 0; index < size; index++)
1307 tree type = TREE_VEC_ELT (ret_map, index);
1308 if (type != TYPE_UNUSED)
1310 type = merge_types (type, type_map [index]);
1311 TREE_VEC_ELT (ret_map, index) = type;
1312 if (type == TYPE_UNKNOWN)
1314 if (index >= size - stack_pointer)
1316 ("inconsistent types on ret from jsr");
1318 else if (TYPE_IS_WIDE (type))
1330 error ("unknown opcode %d@pc=%d during verification", op_code, PC-1);
1336 /* The following test is true if we have entered or exited an exception
1337 handler range *or* we have done a store to a local variable.
1338 In either case we need to consider any exception handlers that
1339 might "follow" this instruction. */
1341 if (eh_ranges != prev_eh_ranges)
1343 int save_stack_pointer = stack_pointer;
1344 int index = DECL_MAX_LOCALS (current_function_decl);
1345 tree save_type = type_map[index];
1346 tree save_current_subr = current_subr;
1347 struct eh_range *ranges = find_handler (oldpc);
1349 for (; ranges != NULL_EH_RANGE; ranges = ranges->outer)
1351 tree chain = ranges->handlers;
1353 /* We need to determine if the handler is part of current_subr.
1354 The are two cases: (1) The exception catch range
1355 is entirely within current_subr. In that case the handler
1356 is also part of current_subr.
1357 (2) Some of the catch range is not in current_subr.
1358 In that case, the handler is *not* part of current_subr.
1360 Figuring out which is the case is not necessarily obvious,
1361 in the presence of clever code generators (and obfuscators).
1362 We make a simplifying assumption that in case (2) we
1363 have that the current_subr is entirely within the catch range.
1364 In that case we can assume if that if a caller (the jsr) of
1365 a subroutine is within the catch range, then the handler is
1366 *not* part of the subroutine, and vice versa. */
1368 current_subr = save_current_subr;
1369 for ( ; current_subr != NULL_TREE;
1370 current_subr = LABEL_SUBR_CONTEXT (current_subr))
1372 tree return_labels = LABEL_RETURN_LABELS (current_subr);
1373 /* There could be multiple return_labels, but
1374 we only need to check one. */
1375 int return_pc = LABEL_PC (TREE_VALUE (return_labels));
1376 if (return_pc <= ranges->start_pc
1377 || return_pc > ranges->end_pc)
1381 for ( ; chain != NULL_TREE; chain = TREE_CHAIN (chain))
1383 tree handler = TREE_VALUE (chain);
1384 tree type = TREE_PURPOSE (chain);
1385 if (type == NULL_TREE) /* a finally handler */
1386 type = throwable_type_node;
1387 type_map[index] = promote_type (type);
1389 PUSH_PENDING (handler);
1392 stack_pointer = save_stack_pointer;
1393 current_subr = save_current_subr;
1394 type_map[index] = save_type;
1395 prev_eh_ranges = eh_ranges;
1400 error ("verification error at PC=%d", oldpc);
1401 if (message != NULL)
1402 error ("%s", message);
1403 error ("%s", pmessage);
1407 message = "stack overflow";
1410 message = "program counter out of range";
1413 error ("verification error at PC=%d", oldpc);
1414 error ("%s", message);