1 /* Handle verification of bytecoded methods for the GNU compiler for
3 Copyright (C) 1997, 1998, 1999 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA.
22 Java and all Java-based marks are trademarks or registered trademarks
23 of Sun Microsystems, Inc. in the United States and other countries.
24 The Free Software Foundation is independent of Sun Microsystems, Inc. */
29 #include "java-tree.h"
31 #include "java-opcodes.h"
33 #include "java-except.h"
36 static void push_pending_label PROTO ((tree));
37 static tree merge_types PROTO ((tree, tree));
39 extern int stack_pointer;
41 /* During verification, start of the current subroutine (jsr target). */
44 /* A list of pending blocks, chained using LABEL_PENDING_CHAIN.
45 A pending block is one that has LABEL_CHANGED set, which means
46 it requires (re-) verification. */
49 /* Append TARGET_LABEL to the pending_block stack unless already in it. */
52 push_pending_label (target_label)
55 if (! LABEL_CHANGED (target_label))
57 LABEL_PENDING_CHAIN (target_label) = pending_blocks;
58 pending_blocks = target_label;
59 LABEL_CHANGED (target_label) = 1;
63 /* Note that TARGET_LABEL is a possible successor instruction.
64 Merge the type state etc.
65 Return NULL on sucess, or an error message on failure. */
68 check_pending_block (target_label)
71 int changed = merge_type_state (target_label);
76 return "types could not be merged";
77 push_pending_label (target_label);
80 if (current_subr == NULL)
82 if (LABEL_IN_SUBR (target_label))
83 return "might transfer control into subroutine";
87 if (LABEL_IN_SUBR (target_label))
89 if (LABEL_SUBR_START (target_label) != current_subr)
90 return "transfer out of subroutine";
92 else if (! LABEL_VERIFIED (target_label))
94 LABEL_IN_SUBR (target_label) = 1;
95 LABEL_SUBR_START (target_label) = current_subr;
98 return "transfer out of subroutine";
103 /* Return the "merged" types of TYPE1 and TYPE2.
104 If either is primitive, the other must match (after promotion to int).
105 For reference types, return the common super-class.
106 Return TYPE_UNKNOWN if the types cannot be merged. */
109 merge_types (type1, type2)
114 if (type1 == TYPE_UNKNOWN || type2 == TYPE_UNKNOWN
115 || type1 == TYPE_RETURN_ADDR || type2 == TYPE_RETURN_ADDR)
117 if (TREE_CODE (type1) == POINTER_TYPE && TREE_CODE (type2) == POINTER_TYPE)
121 /* ptr_type_node is only used for a null reference,
122 which is compatible with any reference type. */
123 if (type1 == ptr_type_node || type2 == object_ptr_type_node)
125 if (type2 == ptr_type_node || type1 == object_ptr_type_node)
128 tt1 = HANDLE_TO_CLASS_TYPE (TREE_TYPE (type1));
129 tt2 = HANDLE_TO_CLASS_TYPE (TREE_TYPE (type2));
131 if (TYPE_ARRAY_P (tt1) || TYPE_ARRAY_P (tt2))
133 if (TYPE_ARRAY_P (tt1) == TYPE_ARRAY_P (tt2))
135 tree el_type1 = TYPE_ARRAY_ELEMENT (tt1);
136 tree el_type2 = TYPE_ARRAY_ELEMENT (tt2);
137 tree el_type = NULL_TREE;
138 if (el_type1 == el_type2)
140 else if (TREE_CODE (el_type1) == POINTER_TYPE
141 && TREE_CODE (el_type2) == POINTER_TYPE)
142 el_type = merge_types (el_type1, el_type2);
143 if (el_type != NULL_TREE)
145 HOST_WIDE_INT len1 = java_array_type_length (tt1);
146 HOST_WIDE_INT len2 = java_array_type_length (tt2);
149 else if (el_type1 == el_type2)
151 return promote_type (build_java_array_type (el_type, len1));
154 return object_ptr_type_node;
157 if (CLASS_INTERFACE (TYPE_NAME (tt1)))
159 if (CLASS_INTERFACE (TYPE_NAME (tt2)))
161 /* This is a kludge, but matches what Sun's verifier does.
162 It can be tricked, but is safe as long as type errors
163 (i.e. interface method calls) are caught at run-time. */
164 return object_ptr_type_node;
168 if (can_widen_reference_to (tt2, tt1))
174 else if (CLASS_INTERFACE (TYPE_NAME (tt2)))
176 if (can_widen_reference_to (tt1, tt2))
185 depth1 = class_depth (type1);
186 depth2 = class_depth (type2);
187 for ( ; depth1 > depth2; depth1--)
188 type1 = TYPE_BINFO_BASETYPE (type1, 0);
189 for ( ; depth2 > depth1; depth2--)
190 type2 = TYPE_BINFO_BASETYPE (type2, 0);
191 while (type1 != type2)
193 type1 = TYPE_BINFO_BASETYPE (type1, 0);
194 type2 = TYPE_BINFO_BASETYPE (type2, 0);
196 return promote_type (type1);
198 if (INTEGRAL_TYPE_P (type1) && INTEGRAL_TYPE_P (type2)
199 && TYPE_PRECISION (type1) <= 32 && TYPE_PRECISION (type2) <= 32)
200 return int_type_node;
204 /* Merge the current type state with that at LABEL.
205 Return -1 the the states are incompatible (i.e. on error),
206 0 if there was no change, and 1 if there was a change. */
209 merge_type_state (label)
212 int nlocals = DECL_MAX_LOCALS(current_function_decl);
213 int cur_length = stack_pointer + nlocals;
214 tree vec = LABEL_TYPE_STATE (label);
216 if (vec == NULL_TREE)
218 vec = make_tree_vec (cur_length);
219 LABEL_TYPE_STATE (label) = vec;
220 while (--cur_length >= 0)
221 TREE_VEC_ELT (vec, cur_length) = type_map [cur_length];
228 if (LABEL_IS_SUBR_START (label) && LABEL_VERIFIED (label)
229 && current_subr != label)
230 return_map = LABEL_RETURN_TYPE_STATE (label);
232 return_map = NULL_TREE;
233 if (TREE_VEC_LENGTH (vec) != cur_length)
237 for (i = 0; i < cur_length; i++)
239 tree old_type = TREE_VEC_ELT (vec, i);
240 tree new_type = merge_types (old_type, type_map [i]);
241 if (TREE_VEC_ELT (vec, i) != new_type)
243 /* If there has been a change, note that since we must re-verify.
244 However, if the label is the start of a subroutine,
245 we don't care about local variables that are neither
246 set nor used in the sub-routine. */
247 if (return_map == NULL_TREE || i >= nlocals
248 || TREE_VEC_ELT (return_map, i) != TYPE_UNUSED
249 || (TYPE_IS_WIDE (new_type)
250 && TREE_VEC_ELT (return_map, i+1) != TYPE_UNUSED))
253 TREE_VEC_ELT (vec, i) = new_type;
254 if (new_type == TYPE_UNKNOWN)
259 else if (TYPE_IS_WIDE (new_type))
266 /* Handle dup-like operations. */
269 type_stack_dup (size, offset)
274 if (size + offset > stack_pointer)
275 error ("stack underflow - dup* operation");
276 for (index = 0; index < size + offset; index++)
278 type[index] = stack_type_map[stack_pointer - 1];
279 if (type[index] == void_type_node)
282 type[index] = stack_type_map[stack_pointer - 2];
283 if (! TYPE_IS_WIDE (type[index]))
284 fatal ("internal error - dup operation");
285 if (index == size || index == size + offset)
286 fatal ("dup operation splits 64-bit number");
288 pop_type (type[index]);
290 for (index = size; --index >= 0; )
292 if (type[index] != void_type_node)
293 push_type (type[index]);
296 for (index = size + offset; --index >= 0; )
298 if (type[index] != void_type_node)
299 push_type (type[index]);
303 /* This keeps track of a start PC and corresponding initial index. */
310 /* A helper that is used when sorting exception ranges. */
312 start_pc_cmp (xp, yp)
313 const GENERIC_PTR xp;
314 const GENERIC_PTR yp;
316 struct pc_index *x = (struct pc_index *) xp;
317 struct pc_index *y = (struct pc_index *) yp;
318 return x->start_pc - y->start_pc;
321 /* This causes the next iteration to ignore the next instruction
322 and look for some other unhandled instruction. */
323 #define INVALIDATE_PC (prevpc = -1, oldpc = PC, PC = INVALID_PC)
324 #define INVALID_PC (-1)
326 #define VERIFICATION_ERROR(MESSAGE) \
327 do { message = MESSAGE; goto verify_error; } while (0)
329 #define PUSH_PENDING(LABEL) \
330 do { if ((message = check_pending_block (LABEL)) != NULL) \
331 goto verify_error; } while (0)
334 #define CHECK_PC_IN_RANGE(PC) ({if (PC < 0 || PC > length) goto bad_pc; 1;})
336 #define CHECK_PC_IN_RANGE(PC) (PC < 0 || PC > length ? \
337 (fatal("Bad byte codes.\n"), 0) : 1)
340 #define BCODE byte_ops
342 /* Verify the bytecodes of the current method.
343 Return 1 on sucess, 0 on failure. */
345 verify_jvm_instructions (jcf, byte_ops, length)
347 unsigned char* byte_ops;
354 int oldpc; /* PC of start of instruction. */
355 int prevpc; /* If >= 0, PC of previous instruction. */
358 register unsigned char *p;
359 struct eh_range *prev_eh_ranges = NULL_EH_RANGE;
360 struct eh_range *eh_ranges;
361 tree return_type = TREE_TYPE (TREE_TYPE (current_function_decl));
362 struct pc_index *starts;
367 pending_blocks = NULL_TREE;
369 /* Handle the exception table. */
370 method_init_exceptions ();
371 JCF_SEEK (jcf, DECL_CODE_OFFSET (current_function_decl) + length);
372 eh_count = JCF_readu2 (jcf);
374 /* We read the exception handlers in order of increasing start PC.
375 To do this we first read and sort the start PCs. */
376 starts = (struct pc_index *) xmalloc (eh_count * sizeof (struct pc_index));
377 for (i = 0; i < eh_count; ++i)
379 starts[i].start_pc = GET_u2 (jcf->read_ptr + 8 * i);
382 qsort (starts, eh_count, sizeof (struct pc_index), start_pc_cmp);
384 for (i = 0; i < eh_count; ++i)
386 int start_pc, end_pc, handler_pc, catch_type;
388 p = jcf->read_ptr + 8 * starts[i].index;
390 start_pc = GET_u2 (p);
391 end_pc = GET_u2 (p+2);
392 handler_pc = GET_u2 (p+4);
393 catch_type = GET_u2 (p+6);
395 if (start_pc < 0 || start_pc >= length
396 || end_pc < 0 || end_pc > length || start_pc >= end_pc
397 || handler_pc < 0 || handler_pc >= length
398 || (handler_pc >= start_pc && handler_pc < end_pc)
399 || ! (instruction_bits [start_pc] & BCODE_INSTRUCTION_START)
400 || ! (instruction_bits [end_pc] & BCODE_INSTRUCTION_START)
401 || ! (instruction_bits [handler_pc] & BCODE_INSTRUCTION_START))
403 error ("bad pc in exception_table");
408 add_handler (start_pc, end_pc,
409 lookup_label (handler_pc),
410 catch_type == 0 ? NULL_TREE
411 : get_class_constant (jcf, catch_type));
413 instruction_bits [handler_pc] |= BCODE_EXCEPTION_TARGET;
417 handle_nested_ranges ();
423 if (((PC != INVALID_PC
424 && instruction_bits [PC] & BCODE_TARGET) != 0)
427 PUSH_PENDING (lookup_label (PC));
430 if (PC == INVALID_PC)
432 label = pending_blocks;
433 if (label == NULL_TREE)
434 break; /* We're done! */
435 pending_blocks = LABEL_PENDING_CHAIN (label);
436 LABEL_CHANGED (label) = 0;
438 if (LABEL_IN_SUBR (label))
439 current_subr = LABEL_SUBR_START (label);
441 current_subr = NULL_TREE;
443 /* Restore type_map and stack_pointer from
444 LABEL_TYPE_STATE (label), and continue
445 compiling from there. */
446 load_type_state (label);
447 PC = LABEL_PC (label);
449 else if (PC >= length)
450 VERIFICATION_ERROR ("falling through end of method");
454 if (!(instruction_bits [PC] & BCODE_INSTRUCTION_START) && ! wide)
455 VERIFICATION_ERROR ("PC not at instruction start");
457 instruction_bits[PC] |= BCODE_VERIFIED;
459 eh_ranges = find_handler (oldpc);
461 op_code = byte_ops[PC++];
464 int is_static, is_putting;
467 case OPCODE_iconst_m1:
468 case OPCODE_iconst_0: case OPCODE_iconst_1: case OPCODE_iconst_2:
469 case OPCODE_iconst_3: case OPCODE_iconst_4: case OPCODE_iconst_5:
470 i = op_code - OPCODE_iconst_0;
473 if (byte_ops[PC] == OPCODE_newarray
474 || byte_ops[PC] == OPCODE_newarray)
476 push_type (int_type_node); break;
477 case OPCODE_lconst_0: case OPCODE_lconst_1:
478 push_type (long_type_node); break;
479 case OPCODE_fconst_0: case OPCODE_fconst_1: case OPCODE_fconst_2:
480 push_type (float_type_node); break;
481 case OPCODE_dconst_0: case OPCODE_dconst_1:
482 push_type (double_type_node); break;
489 case OPCODE_iload: type = int_type_node; goto general_load;
490 case OPCODE_lload: type = long_type_node; goto general_load;
491 case OPCODE_fload: type = float_type_node; goto general_load;
492 case OPCODE_dload: type = double_type_node; goto general_load;
493 case OPCODE_aload: type = ptr_type_node; goto general_load;
495 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
498 case OPCODE_iload_0: type = int_type_node; index = 0; goto load;
499 case OPCODE_iload_1: type = int_type_node; index = 1; goto load;
500 case OPCODE_iload_2: type = int_type_node; index = 2; goto load;
501 case OPCODE_iload_3: type = int_type_node; index = 3; goto load;
502 case OPCODE_lload_0: type = long_type_node; index = 0; goto load;
503 case OPCODE_lload_1: type = long_type_node; index = 1; goto load;
504 case OPCODE_lload_2: type = long_type_node; index = 2; goto load;
505 case OPCODE_lload_3: type = long_type_node; index = 3; goto load;
506 case OPCODE_fload_0: type = float_type_node; index = 0; goto load;
507 case OPCODE_fload_1: type = float_type_node; index = 1; goto load;
508 case OPCODE_fload_2: type = float_type_node; index = 2; goto load;
509 case OPCODE_fload_3: type = float_type_node; index = 3; goto load;
510 case OPCODE_dload_0: type = double_type_node; index = 0; goto load;
511 case OPCODE_dload_1: type = double_type_node; index = 1; goto load;
512 case OPCODE_dload_2: type = double_type_node; index = 2; goto load;
513 case OPCODE_dload_3: type = double_type_node; index = 3; goto load;
514 case OPCODE_aload_0: type = ptr_type_node; index = 0; goto load;
515 case OPCODE_aload_1: type = ptr_type_node; index = 1; goto load;
516 case OPCODE_aload_2: type = ptr_type_node; index = 2; goto load;
517 case OPCODE_aload_3: type = ptr_type_node; index = 3; goto load;
520 || (index + TYPE_IS_WIDE (type)
521 >= DECL_MAX_LOCALS (current_function_decl)))
522 VERIFICATION_ERROR ("invalid local variable index in load");
523 tmp = type_map[index];
524 if (tmp == TYPE_UNKNOWN || tmp == TYPE_SECOND
525 || (TYPE_IS_WIDE (type)
526 && type_map[index+1] != void_type_node)
527 || (type == ptr_type_node
528 ? TREE_CODE (tmp) != POINTER_TYPE
529 : type == int_type_node
530 ? (! INTEGRAL_TYPE_P (tmp) || TYPE_PRECISION (tmp) > 32)
532 VERIFICATION_ERROR("invalid local variable type in load");
535 case OPCODE_istore: type = int_type_node; goto general_store;
536 case OPCODE_lstore: type = long_type_node; goto general_store;
537 case OPCODE_fstore: type = float_type_node; goto general_store;
538 case OPCODE_dstore: type = double_type_node; goto general_store;
539 case OPCODE_astore: type = ptr_type_node; goto general_store;
541 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
544 case OPCODE_istore_0: type = int_type_node; index = 0; goto store;
545 case OPCODE_istore_1: type = int_type_node; index = 1; goto store;
546 case OPCODE_istore_2: type = int_type_node; index = 2; goto store;
547 case OPCODE_istore_3: type = int_type_node; index = 3; goto store;
548 case OPCODE_lstore_0: type = long_type_node; index=0; goto store;
549 case OPCODE_lstore_1: type = long_type_node; index=1; goto store;
550 case OPCODE_lstore_2: type = long_type_node; index=2; goto store;
551 case OPCODE_lstore_3: type = long_type_node; index=3; goto store;
552 case OPCODE_fstore_0: type=float_type_node; index=0; goto store;
553 case OPCODE_fstore_1: type=float_type_node; index=1; goto store;
554 case OPCODE_fstore_2: type=float_type_node; index=2; goto store;
555 case OPCODE_fstore_3: type=float_type_node; index=3; goto store;
556 case OPCODE_dstore_0: type=double_type_node; index=0; goto store;
557 case OPCODE_dstore_1: type=double_type_node; index=1; goto store;
558 case OPCODE_dstore_2: type=double_type_node; index=2; goto store;
559 case OPCODE_dstore_3: type=double_type_node; index=3; goto store;
560 case OPCODE_astore_0: type = ptr_type_node; index = 0; goto store;
561 case OPCODE_astore_1: type = ptr_type_node; index = 1; goto store;
562 case OPCODE_astore_2: type = ptr_type_node; index = 2; goto store;
563 case OPCODE_astore_3: type = ptr_type_node; index = 3; goto store;
566 || (index + TYPE_IS_WIDE (type)
567 >= DECL_MAX_LOCALS (current_function_decl)))
569 VERIFICATION_ERROR ("invalid local variable index in store");
572 type = pop_type (type);
573 type_map[index] = type;
575 /* If local variable changed, we need to reconsider eh handlers. */
576 prev_eh_ranges = NULL_EH_RANGE;
578 /* Allocate decl and rtx for this variable now, so if we're not
579 optmizing, we get a temporary that survives the whole method. */
580 find_local_variable (index, type, oldpc);
582 if (TYPE_IS_WIDE (type))
583 type_map[index+1] = TYPE_SECOND;
584 /* ... fall through to note_used ... */
586 /* For store or load, note that local variable INDEX is used.
587 This is needed to verify try-finally sub-routines. */
590 tree vec = LABEL_RETURN_TYPE_STATE (current_subr);
591 tree subr_vec = LABEL_TYPE_STATE (current_subr);
592 int len = 1 + TYPE_IS_WIDE (type);
595 if (TREE_VEC_ELT (vec, index) == TYPE_UNUSED)
596 TREE_VEC_ELT (vec, index) = TREE_VEC_ELT (subr_vec, index);
611 type = int_type_node; goto binop;
616 type = int_type_node; goto unop;
625 type = long_type_node; goto binop;
627 type = long_type_node; goto unop;
628 case OPCODE_fadd: case OPCODE_fsub:
629 case OPCODE_fmul: case OPCODE_fdiv: case OPCODE_frem:
630 type = float_type_node; goto binop;
632 type = float_type_node; goto unop;
633 case OPCODE_dadd: case OPCODE_dsub:
634 case OPCODE_dmul: case OPCODE_ddiv: case OPCODE_drem:
635 type = double_type_node; goto binop;
637 type = double_type_node; goto unop;
650 pop_type (int_type_node);
651 pop_type (long_type_node);
652 push_type (long_type_node);
655 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
658 if (index < 0 || index >= DECL_MAX_LOCALS (current_function_decl))
659 VERIFICATION_ERROR ("invalid local variable index in iinc");
660 tmp = type_map[index];
661 if (! INTEGRAL_TYPE_P (tmp) || TYPE_PRECISION (tmp) > 32)
662 VERIFICATION_ERROR ("invalid local variable type in iinc");
665 pop_type (int_type_node); push_type (long_type_node); break;
667 pop_type (int_type_node); push_type (float_type_node); break;
669 pop_type (int_type_node); push_type (double_type_node); break;
671 pop_type (long_type_node); push_type (int_type_node); break;
673 pop_type (long_type_node); push_type (float_type_node); break;
675 pop_type (long_type_node); push_type (double_type_node); break;
677 pop_type (float_type_node); push_type (int_type_node); break;
679 pop_type (float_type_node); push_type (long_type_node); break;
681 pop_type (float_type_node); push_type (double_type_node); break;
683 pop_type (double_type_node); push_type (int_type_node); break;
685 pop_type (double_type_node); push_type (long_type_node); break;
687 pop_type (double_type_node); push_type (float_type_node); break;
689 type = long_type_node; goto compare;
692 type = float_type_node; goto compare;
695 type = double_type_node; goto compare;
697 pop_type (type); pop_type (type);
698 push_type (int_type_node); break;
705 pop_type (int_type_node); goto cond;
707 case OPCODE_ifnonnull:
708 pop_type (ptr_type_node ); goto cond;
709 case OPCODE_if_icmpeq:
710 case OPCODE_if_icmpne:
711 case OPCODE_if_icmplt:
712 case OPCODE_if_icmpge:
713 case OPCODE_if_icmpgt:
714 case OPCODE_if_icmple:
715 pop_type (int_type_node); pop_type (int_type_node); goto cond;
716 case OPCODE_if_acmpeq:
717 case OPCODE_if_acmpne:
718 pop_type (object_ptr_type_node); pop_type (object_ptr_type_node);
721 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s2));
724 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s2));
728 switch (byte_ops[PC])
730 case OPCODE_iload: case OPCODE_lload:
731 case OPCODE_fload: case OPCODE_dload: case OPCODE_aload:
732 case OPCODE_istore: case OPCODE_lstore:
733 case OPCODE_fstore: case OPCODE_dstore: case OPCODE_astore:
739 VERIFICATION_ERROR ("invalid use of wide instruction");
742 case OPCODE_return: type = void_type_node; goto ret;
744 if ((TREE_CODE (return_type) == BOOLEAN_TYPE
745 || TREE_CODE (return_type) == CHAR_TYPE
746 || TREE_CODE (return_type) == INTEGER_TYPE)
747 && TYPE_PRECISION (return_type) <= 32)
752 case OPCODE_lreturn: type = long_type_node; goto ret;
753 case OPCODE_freturn: type = float_type_node; goto ret;
754 case OPCODE_dreturn: type = double_type_node; goto ret;
756 if (TREE_CODE (return_type) == POINTER_TYPE)
762 if (type != return_type)
763 VERIFICATION_ERROR ("incorrect ?return opcode");
764 if (type != void_type_node)
766 if (pop_type_0 (type) == NULL_TREE)
767 VERIFICATION_ERROR ("return value has wrong type");
771 case OPCODE_getstatic: is_putting = 0; is_static = 1; goto field;
772 case OPCODE_putstatic: is_putting = 1; is_static = 1; goto field;
773 case OPCODE_getfield: is_putting = 0; is_static = 0; goto field;
774 case OPCODE_putfield: is_putting = 1; is_static = 0; goto field;
777 int index = IMMEDIATE_u2;
778 tree field_signature = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool, index);
779 tree field_type = get_type_from_signature (field_signature);
781 pop_type (field_type);
784 int clindex = COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool,
786 tree self_type = get_class_constant (current_jcf, clindex);
787 /* Defer actual checking until next pass. */
788 if (pop_type_0 (self_type) == NULL_TREE)
789 VERIFICATION_ERROR ("incorrect type for field reference");
792 push_type (field_type);
796 push_type (get_class_constant (jcf, IMMEDIATE_u2));
798 case OPCODE_dup: type_stack_dup (1, 0); break;
799 case OPCODE_dup_x1: type_stack_dup (1, 1); break;
800 case OPCODE_dup_x2: type_stack_dup (1, 2); break;
801 case OPCODE_dup2: type_stack_dup (2, 0); break;
802 case OPCODE_dup2_x1: type_stack_dup (2, 1); break;
803 case OPCODE_dup2_x2: type_stack_dup (2, 2); break;
804 case OPCODE_pop: index = 1; goto pop;
805 case OPCODE_pop2: index = 2; goto pop;
807 if (stack_pointer < index)
808 VERIFICATION_ERROR ("stack underflow");
809 stack_pointer -= index;
812 if (stack_pointer < 2)
813 VERIFICATION_ERROR ("stack underflow (in swap)");
816 tree type1 = stack_type_map[stack_pointer - 1];
817 tree type2 = stack_type_map[stack_pointer - 2];
818 if (type1 == void_type_node || type2 == void_type_node)
819 VERIFICATION_ERROR ("verifier (swap): double or long value");
820 stack_type_map[stack_pointer - 2] = type1;
821 stack_type_map[stack_pointer - 1] = type2;
824 case OPCODE_ldc: index = IMMEDIATE_u1; goto ldc;
827 index = IMMEDIATE_u2; goto ldc;
829 if (index <= 0 || index >= JPOOL_SIZE(current_jcf))
830 VERIFICATION_ERROR ("bad constant pool index in ldc");
832 switch (JPOOL_TAG (current_jcf, index) & ~CONSTANT_ResolvedFlag)
834 case CONSTANT_Integer: type = int_type_node; goto check_ldc;
835 case CONSTANT_Float: type = float_type_node; goto check_ldc;
836 case CONSTANT_String: type = string_type_node; goto check_ldc;
837 case CONSTANT_Long: type = long_type_node; goto check_ldc;
838 case CONSTANT_Double: type = double_type_node; goto check_ldc;
840 if (TYPE_IS_WIDE (type) == (op_code == OPCODE_ldc2_w))
842 /* ... else fall through ... */
844 VERIFICATION_ERROR ("bad constant pool tag in ldc");
846 if (type == int_type_node)
848 i = TREE_INT_CST_LOW (get_constant (current_jcf, index));
854 case OPCODE_invokevirtual:
855 case OPCODE_invokespecial:
856 case OPCODE_invokestatic:
857 case OPCODE_invokeinterface:
859 int index = IMMEDIATE_u2;
860 tree sig = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool, index);
861 tree self_type = get_class_constant
862 (current_jcf, COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool,
864 tree method_name = COMPONENT_REF_NAME (¤t_jcf->cpool, index);
866 method_type = parse_signature_string (IDENTIFIER_POINTER (sig),
867 IDENTIFIER_LENGTH (sig));
868 if (TREE_CODE (method_type) != FUNCTION_TYPE)
869 VERIFICATION_ERROR ("bad method signature");
870 pop_argument_types (TYPE_ARG_TYPES (method_type));
872 /* Can't invoke <clinit> */
873 if (method_name == clinit_identifier_node)
874 VERIFICATION_ERROR ("invoke opcode can't invoke <clinit>");
875 /* Apart invokespecial, can't invoke <init> */
876 if (op_code != OPCODE_invokespecial
877 && method_name == init_identifier_node)
878 VERIFICATION_ERROR ("invoke opcode can't invoke <init>");
880 if (op_code != OPCODE_invokestatic)
881 pop_type (self_type);
885 case OPCODE_invokeinterface:
887 int nargs = IMMEDIATE_u1;
888 int notZero = IMMEDIATE_u1;
890 if (!nargs || notZero)
892 ("invalid argument number in invokeinterface");
897 if (TREE_TYPE (method_type) != void_type_node)
898 push_type (TREE_TYPE (method_type));
902 case OPCODE_arraylength:
903 /* Type checking actually made during code generation */
904 pop_type( ptr_type_node );
905 push_type( int_type_node );
908 /* Q&D verification *or* more checking done during code generation
909 for byte/boolean/char/short, the value popped is a int coerced
910 into the right type before being stored. */
911 case OPCODE_iastore: type = int_type_node; goto astore;
912 case OPCODE_lastore: type = long_type_node; goto astore;
913 case OPCODE_fastore: type = float_type_node; goto astore;
914 case OPCODE_dastore: type = double_type_node; goto astore;
915 case OPCODE_aastore: type = ptr_type_node; goto astore;
916 case OPCODE_bastore: type = int_type_node; goto astore;
917 case OPCODE_castore: type = int_type_node; goto astore;
918 case OPCODE_sastore: type = int_type_node; goto astore;
920 /* FIXME - need better verification here */
921 pop_type (type); /* new value */
922 pop_type (int_type_node); /* index */
923 pop_type (ptr_type_node); /* array */
926 /* Q&D verification *or* more checking done during code generation
927 for byte/boolean/char/short, the value pushed is a int. */
928 case OPCODE_iaload: type = int_type_node; goto aload;
929 case OPCODE_laload: type = long_type_node; goto aload;
930 case OPCODE_faload: type = float_type_node; goto aload;
931 case OPCODE_daload: type = double_type_node; goto aload;
932 case OPCODE_aaload: type = ptr_type_node; goto aload;
933 case OPCODE_baload: type = promote_type (byte_type_node); goto aload;
934 case OPCODE_caload: type = promote_type (char_type_node); goto aload;
935 case OPCODE_saload: type = promote_type (short_type_node); goto aload;
937 pop_type (int_type_node);
938 tmp = pop_type (ptr_type_node);
939 if (is_array_type_p (tmp))
940 type = TYPE_ARRAY_ELEMENT (TREE_TYPE (tmp));
941 else if (tmp != TYPE_NULL)
942 VERIFICATION_ERROR ("array load from non-array type");
946 case OPCODE_anewarray:
947 type = get_class_constant (current_jcf, IMMEDIATE_u2);
948 type = promote_type (type);
951 case OPCODE_newarray:
952 index = IMMEDIATE_u1;
953 type = decode_newarray_type (index);
954 if (type == NULL_TREE)
955 VERIFICATION_ERROR ("invalid type code in newarray opcode");
959 if (int_value >= 0 && prevpc >= 0)
961 /* If previous instruction pushed int constant,
962 we want to use it. */
963 switch (byte_ops[prevpc])
965 case OPCODE_iconst_0: case OPCODE_iconst_1:
966 case OPCODE_iconst_2: case OPCODE_iconst_3:
967 case OPCODE_iconst_4: case OPCODE_iconst_5:
968 case OPCODE_bipush: case OPCODE_sipush:
969 case OPCODE_ldc: case OPCODE_ldc_w:
977 type = build_java_array_type (type, int_value);
978 pop_type (int_type_node);
982 case OPCODE_multianewarray:
985 index = IMMEDIATE_u2;
989 VERIFICATION_ERROR ("number of dimension lower that 1 in multianewarray" );
991 for( i = 0; i < ndim; i++ )
992 pop_type (int_type_node);
993 push_type (get_class_constant (current_jcf, index));
997 case OPCODE_aconst_null:
998 push_type (ptr_type_node);
1002 pop_type (throwable_type_node);
1006 case OPCODE_checkcast:
1007 pop_type (ptr_type_node);
1008 type = get_class_constant (current_jcf, IMMEDIATE_u2);
1011 case OPCODE_instanceof:
1012 pop_type (ptr_type_node);
1013 get_class_constant (current_jcf, IMMEDIATE_u2);
1014 push_type (int_type_node);
1017 case OPCODE_tableswitch:
1021 pop_type (int_type_node);
1025 VERIFICATION_ERROR ("bad alignment in tableswitch pad");
1027 PUSH_PENDING (lookup_label (oldpc+IMMEDIATE_s4));
1029 high = IMMEDIATE_s4;
1032 VERIFICATION_ERROR ("unsorted low/high value in tableswitch");
1034 while (low++ <= high)
1035 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
1040 case OPCODE_lookupswitch:
1042 jint npairs, last, not_registered = 1;
1044 pop_type (int_type_node);
1048 VERIFICATION_ERROR ("bad alignment in lookupswitch pad");
1051 PUSH_PENDING (lookup_label (oldpc+IMMEDIATE_s4));
1052 npairs = IMMEDIATE_s4;
1055 VERIFICATION_ERROR ("invalid number of targets in lookupswitch");
1059 int match = IMMEDIATE_s4;
1062 else if (last >= match)
1063 VERIFICATION_ERROR ("unsorted match value in lookupswitch");
1066 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
1072 case OPCODE_monitorenter:
1074 case OPCODE_monitorexit:
1075 pop_type (ptr_type_node);
1079 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
1085 tree target = lookup_label (oldpc + IMMEDIATE_s2);
1086 tree return_label = lookup_label (PC);
1087 push_type (return_address_type_node);
1088 if (! LABEL_VERIFIED (target))
1090 /* first time seen */
1091 tree return_type_map;
1092 int nlocals = DECL_MAX_LOCALS (current_function_decl);
1093 index = nlocals + DECL_MAX_STACK (current_function_decl);
1094 return_type_map = make_tree_vec (index);
1095 while (index > nlocals)
1096 TREE_VEC_ELT (return_type_map, --index) = TYPE_UNKNOWN;
1098 TREE_VEC_ELT (return_type_map, --index) = TYPE_UNUSED;
1099 LABEL_RETURN_LABEL (target)
1100 = build_decl (LABEL_DECL, NULL_TREE, TREE_TYPE (target));
1101 LABEL_PC (LABEL_RETURN_LABEL (target)) = -1;
1102 LABEL_RETURN_TYPE_STATE (target) = return_type_map;
1103 LABEL_IS_SUBR_START (target) = 1;
1104 LABEL_IN_SUBR (target) = 1;
1105 LABEL_SUBR_START (target) = target;
1106 LABEL_SUBR_CONTEXT (target) = current_subr;
1108 else if (! LABEL_IS_SUBR_START (target)
1109 || LABEL_SUBR_CONTEXT (target) != current_subr)
1110 VERIFICATION_ERROR ("label part of different subroutines");
1112 i = merge_type_state (target);
1116 VERIFICATION_ERROR ("types could not be merged at jsr");
1117 push_pending_label (target);
1119 current_subr = target;
1121 /* Chain return_pc onto LABEL_RETURN_LABELS (target) if needed. */
1122 if (! value_member (return_label, LABEL_RETURN_LABELS (target)))
1124 LABEL_RETURN_LABELS (target)
1125 = tree_cons (NULL_TREE, return_label,
1126 LABEL_RETURN_LABELS (target));
1129 if (LABEL_VERIFIED (target))
1131 tree return_map = LABEL_RETURN_TYPE_STATE (target);
1132 int len = TREE_VEC_LENGTH (return_map);
1133 stack_pointer = len - DECL_MAX_LOCALS (current_function_decl);
1136 if (TREE_VEC_ELT (return_map, len) != TYPE_UNUSED)
1137 type_map[len] = TREE_VEC_ELT (return_map, len);
1139 current_subr = LABEL_SUBR_CONTEXT (target);
1140 PUSH_PENDING (return_label);
1147 if (current_subr == NULL)
1148 VERIFICATION_ERROR ("ret instruction not in a jsr subroutine");
1151 tree ret_map = LABEL_RETURN_TYPE_STATE (current_subr);
1152 tree caller = LABEL_SUBR_CONTEXT (current_subr);
1153 int size = DECL_MAX_LOCALS(current_function_decl)+stack_pointer;
1154 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
1157 if (index < 0 || index >= DECL_MAX_LOCALS (current_function_decl)
1158 || type_map[index] != TYPE_RETURN_ADDR)
1159 VERIFICATION_ERROR ("invalid ret index");
1161 /* The next chunk of code is similar to an inlined version of
1162 * merge_type_state (LABEL_RETURN_LABEL (current_subr)).
1163 * The main differences are that LABEL_RETURN_LABEL is
1164 * pre-allocated by the jsr (but we don't know the size then);
1165 * and that we have to handle TYPE_UNUSED. */
1167 if (! RETURN_MAP_ADJUSTED (ret_map))
1168 { /* First return from this subroutine - fix stack pointer. */
1169 TREE_VEC_LENGTH (ret_map) = size;
1170 for (index = size; --index >= 0; )
1172 if (TREE_VEC_ELT (ret_map, index) != TYPE_UNUSED)
1173 TREE_VEC_ELT (ret_map, index) = type_map[index];
1175 RETURN_MAP_ADJUSTED (ret_map) = 1;
1179 if (TREE_VEC_LENGTH (ret_map) != size)
1180 VERIFICATION_ERROR ("inconsistent stack size on ret");
1181 for (index = 0; index < size; index++)
1183 tree type = TREE_VEC_ELT (ret_map, index);
1184 if (type != TYPE_UNUSED)
1186 type = merge_types (type, type_map [index]);
1187 TREE_VEC_ELT (ret_map, index) = type;
1188 if (type == TYPE_UNKNOWN)
1190 if (index >= size - stack_pointer)
1192 ("inconsistent types on ret from jsr");
1194 else if (TYPE_IS_WIDE (type))
1200 /* Check if there are any more pending blocks in this subroutine.
1201 Because we push pending blocks in a last-in-first-out order,
1202 and because we don't push anything from our caller until we
1203 are done with this subroutine or anything nested in it,
1204 then we are done if the top of the pending_blocks stack is
1205 not in a subroutine, or it is in our caller. */
1206 if (pending_blocks == NULL_TREE
1207 || ! LABEL_IN_SUBR (pending_blocks)
1208 || LABEL_SUBR_START (pending_blocks) == caller)
1210 /* Since we are done with this subroutine (i.e. this is the
1211 last ret from it), set up the (so far known) return
1212 address as pending - with the merged type state. */
1213 tmp = LABEL_RETURN_LABELS (current_subr);
1214 current_subr = caller;
1215 for ( ; tmp != NULL_TREE; tmp = TREE_CHAIN (tmp))
1217 tree return_label = TREE_VALUE (tmp);
1218 tree return_state = LABEL_TYPE_STATE (return_label);
1219 if (return_state == NULL_TREE)
1221 /* This means means we had not verified the
1222 subroutine earlier, so this is the first jsr to
1223 call it. In this case, the type_map of the return
1224 address is just the current type_map - and that
1225 is handled by the following PUSH_PENDING. */
1229 /* In this case we have to do a merge. But first
1230 restore the type_map for unused slots to those
1231 that were in effect at the jsr. */
1232 for (index = size; --index >= 0; )
1234 type_map[index] = TREE_VEC_ELT (ret_map, index);
1235 if (type_map[index] == TYPE_UNUSED)
1237 = TREE_VEC_ELT (return_state, index);
1240 PUSH_PENDING (return_label);
1248 error ("unknown opcode %d@pc=%d during verification", op_code, PC-1);
1254 /* The following test is true if we have entered or exited an exception
1255 handler range *or* we have done a store to a local variable.
1256 In either case we need to consider any exception handlers that
1257 might "follow" this instruction. */
1259 if (eh_ranges != prev_eh_ranges)
1261 int save_stack_pointer = stack_pointer;
1262 int index = DECL_MAX_LOCALS (current_function_decl);
1263 tree save_type = type_map[index];
1264 tree save_current_subr = current_subr;
1265 struct eh_range *ranges = find_handler (oldpc);
1267 for (; ranges != NULL_EH_RANGE; ranges = ranges->outer)
1269 tree chain = ranges->handlers;
1271 /* We need to determine if the handler is part of current_subr.
1272 The are two cases: (1) The exception catch range
1273 is entirely within current_subr. In that case the handler
1274 is also part of current_subr.
1275 (2) Some of the catch range is not in current_subr.
1276 In that case, the handler is *not* part of current_subr.
1278 Figuring out which is the case is not necessarily obvious,
1279 in the presence of clever code generators (and obfuscators).
1280 We make a simplifying assumption that in case (2) we
1281 have that the current_subr is entirely within the catch range.
1282 In that case we can assume if that if a caller (the jsr) of
1283 a subroutine is within the catch range, then the handler is
1284 *not* part of the subroutine, and vice versa. */
1286 current_subr = save_current_subr;
1287 for ( ; current_subr != NULL_TREE;
1288 current_subr = LABEL_SUBR_CONTEXT (current_subr))
1290 tree return_labels = LABEL_RETURN_LABELS (current_subr);
1291 /* There could be multiple return_labels, but
1292 we only need to check one. */
1293 int return_pc = LABEL_PC (TREE_VALUE (return_labels));
1294 if (return_pc <= ranges->start_pc
1295 || return_pc > ranges->end_pc)
1299 for ( ; chain != NULL_TREE; chain = TREE_CHAIN (chain))
1301 tree handler = TREE_VALUE (chain);
1302 tree type = TREE_PURPOSE (chain);
1303 if (type == NULL_TREE) /* a finally handler */
1304 type = throwable_type_node;
1305 type_map[index] = promote_type (type);
1307 PUSH_PENDING (handler);
1310 stack_pointer = save_stack_pointer;
1311 current_subr = save_current_subr;
1312 type_map[index] = save_type;
1313 prev_eh_ranges = eh_ranges;
1318 message = "program counter out of range";
1321 error ("verification error at PC=%d", oldpc);