1 /* Handle verification of bytecoded methods for the GNU compiler for
3 Copyright (C) 1997, 1998, 1999 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA.
22 Java and all Java-based marks are trademarks or registered trademarks
23 of Sun Microsystems, Inc. in the United States and other countries.
24 The Free Software Foundation is independent of Sun Microsystems, Inc. */
29 #include "java-tree.h"
31 #include "java-opcodes.h"
33 #include "java-except.h"
36 static void push_pending_label PROTO ((tree));
37 static tree merge_types PROTO ((tree, tree));
39 extern int stack_pointer;
41 /* During verification, start of the current subroutine (jsr target). */
44 /* A list of pending blocks, chained using LABEL_PENDING_CHAIN.
45 A pending block is one that has LABEL_CHANGED set, which means
46 it requires (re-) verification. */
49 /* Append TARGET_LABEL to the pending_block stack unless already in it. */
52 push_pending_label (target_label)
55 if (! LABEL_CHANGED (target_label))
57 LABEL_PENDING_CHAIN (target_label) = pending_blocks;
58 pending_blocks = target_label;
59 LABEL_CHANGED (target_label) = 1;
63 /* Note that TARGET_LABEL is a possible successor instruction.
64 Merge the type state etc.
65 Return NULL on sucess, or an error message on failure. */
68 check_pending_block (target_label)
71 int changed = merge_type_state (target_label);
76 return "types could not be merged";
77 push_pending_label (target_label);
80 if (current_subr == NULL)
82 if (LABEL_IN_SUBR (target_label))
83 return "might transfer control into subroutine";
87 if (LABEL_IN_SUBR (target_label))
89 if (LABEL_SUBR_START (target_label) != current_subr)
90 return "transfer out of subroutine";
92 else if (! LABEL_VERIFIED (target_label))
94 LABEL_IN_SUBR (target_label) = 1;
95 LABEL_SUBR_START (target_label) = current_subr;
98 return "transfer out of subroutine";
103 /* Return the "merged" types of TYPE1 and TYPE2.
104 If either is primitive, the other must match (after promotion to int).
105 For reference types, return the common super-class.
106 Return TYPE_UNKNOWN if the types cannot be merged. */
109 merge_types (type1, type2)
114 if (type1 == TYPE_UNKNOWN || type2 == TYPE_UNKNOWN
115 || type1 == TYPE_RETURN_ADDR || type2 == TYPE_RETURN_ADDR)
117 if (TREE_CODE (type1) == POINTER_TYPE && TREE_CODE (type2) == POINTER_TYPE)
121 /* ptr_type_node is only used for a null reference,
122 which is compatible with any reference type. */
123 if (type1 == ptr_type_node || type2 == object_ptr_type_node)
125 if (type2 == ptr_type_node || type1 == object_ptr_type_node)
128 tt1 = HANDLE_TO_CLASS_TYPE (TREE_TYPE (type1));
129 tt2 = HANDLE_TO_CLASS_TYPE (TREE_TYPE (type2));
131 if (TYPE_ARRAY_P (tt1) || TYPE_ARRAY_P (tt2))
133 if (TYPE_ARRAY_P (tt1) == TYPE_ARRAY_P (tt2))
135 tree el_type1 = TYPE_ARRAY_ELEMENT (tt1);
136 tree el_type2 = TYPE_ARRAY_ELEMENT (tt2);
137 tree el_type = NULL_TREE;
138 if (el_type1 == el_type2)
140 else if (TREE_CODE (el_type1) == POINTER_TYPE
141 && TREE_CODE (el_type2) == POINTER_TYPE)
142 el_type = merge_types (el_type1, el_type2);
143 if (el_type != NULL_TREE)
145 HOST_WIDE_INT len1 = java_array_type_length (tt1);
146 HOST_WIDE_INT len2 = java_array_type_length (tt2);
149 else if (el_type1 == el_type2)
151 return promote_type (build_java_array_type (el_type, len1));
154 return object_ptr_type_node;
157 if (CLASS_INTERFACE (TYPE_NAME (tt1)))
159 if (CLASS_INTERFACE (TYPE_NAME (tt2)))
161 /* This is a kludge, but matches what Sun's verifier does.
162 It can be tricked, but is safe as long as type errors
163 (i.e. interface method calls) are caught at run-time. */
164 return object_ptr_type_node;
168 if (can_widen_reference_to (tt2, tt1))
174 else if (CLASS_INTERFACE (TYPE_NAME (tt2)))
176 if (can_widen_reference_to (tt1, tt2))
185 depth1 = class_depth (type1);
186 depth2 = class_depth (type2);
187 for ( ; depth1 > depth2; depth1--)
188 type1 = TYPE_BINFO_BASETYPE (type1, 0);
189 for ( ; depth2 > depth1; depth2--)
190 type2 = TYPE_BINFO_BASETYPE (type2, 0);
191 while (type1 != type2)
193 type1 = TYPE_BINFO_BASETYPE (type1, 0);
194 type2 = TYPE_BINFO_BASETYPE (type2, 0);
196 return promote_type (type1);
198 if (INTEGRAL_TYPE_P (type1) && INTEGRAL_TYPE_P (type2)
199 && TYPE_PRECISION (type1) <= 32 && TYPE_PRECISION (type2) <= 32)
200 return int_type_node;
204 /* Merge the current type state with that at LABEL.
205 Return -1 the the states are incompatible (i.e. on error),
206 0 if there was no change, and 1 if there was a change. */
209 merge_type_state (label)
212 int nlocals = DECL_MAX_LOCALS(current_function_decl);
213 int cur_length = stack_pointer + nlocals;
214 tree vec = LABEL_TYPE_STATE (label);
216 if (vec == NULL_TREE)
218 vec = make_tree_vec (cur_length);
219 LABEL_TYPE_STATE (label) = vec;
220 while (--cur_length >= 0)
221 TREE_VEC_ELT (vec, cur_length) = type_map [cur_length];
228 if (LABEL_IS_SUBR_START (label) && LABEL_VERIFIED (label)
229 && current_subr != label)
230 return_map = LABEL_RETURN_TYPE_STATE (label);
232 return_map = NULL_TREE;
233 if (TREE_VEC_LENGTH (vec) != cur_length)
237 for (i = 0; i < cur_length; i++)
239 tree old_type = TREE_VEC_ELT (vec, i);
240 tree new_type = merge_types (old_type, type_map [i]);
241 if (TREE_VEC_ELT (vec, i) != new_type)
243 /* If there has been a change, note that since we must re-verify.
244 However, if the label is the start of a subroutine,
245 we don't care about local variables that are neither
246 set nor used in the sub-routine. */
247 if (return_map == NULL_TREE || i >= nlocals
248 || TREE_VEC_ELT (return_map, i) != TYPE_UNUSED
249 || (TYPE_IS_WIDE (new_type)
250 && TREE_VEC_ELT (return_map, i+1) != TYPE_UNUSED))
253 TREE_VEC_ELT (vec, i) = new_type;
254 if (new_type == TYPE_UNKNOWN)
259 else if (TYPE_IS_WIDE (new_type))
266 /* Handle dup-like operations. */
269 type_stack_dup (size, offset)
274 if (size + offset > stack_pointer)
275 error ("stack underflow - dup* operation");
276 for (index = 0; index < size + offset; index++)
278 type[index] = stack_type_map[stack_pointer - 1];
279 if (type[index] == void_type_node)
282 type[index] = stack_type_map[stack_pointer - 2];
283 if (! TYPE_IS_WIDE (type[index]))
284 fatal ("internal error - dup operation");
285 if (index == size || index == size + offset)
286 fatal ("dup operation splits 64-bit number");
288 pop_type (type[index]);
290 for (index = size; --index >= 0; )
292 if (type[index] != void_type_node)
293 push_type (type[index]);
296 for (index = size + offset; --index >= 0; )
298 if (type[index] != void_type_node)
299 push_type (type[index]);
303 /* This causes the next iteration to ignore the next instruction
304 and look for some other unhandled instruction. */
305 #define INVALIDATE_PC (prevpc = -1, oldpc = PC, PC = INVALID_PC)
306 #define INVALID_PC (-1)
308 #define VERIFICATION_ERROR(MESSAGE) \
309 do { message = MESSAGE; goto verify_error; } while (0)
311 #define PUSH_PENDING(LABEL) \
312 do { if ((message = check_pending_block (LABEL)) != NULL) \
313 goto verify_error; } while (0)
316 #define CHECK_PC_IN_RANGE(PC) ({if (PC < 0 || PC > length) goto bad_pc; 1;})
318 #define CHECK_PC_IN_RANGE(PC) (PC < 0 || PC > length ? \
319 (fatal("Bad byte codes.\n"), 0) : 1)
322 #define BCODE byte_ops
324 /* Verify the bytecodes of the current method.
325 Return 1 on sucess, 0 on failure. */
327 verify_jvm_instructions (jcf, byte_ops, length)
329 unsigned char* byte_ops;
336 int oldpc; /* PC of start of instruction. */
337 int prevpc; /* If >= 0, PC of previous instruction. */
340 register unsigned char *p;
341 struct eh_range *prev_eh_ranges = NULL_EH_RANGE;
342 struct eh_range *eh_ranges;
346 pending_blocks = NULL_TREE;
348 /* Handle the exception table. */
349 method_init_exceptions ();
350 JCF_SEEK (jcf, DECL_CODE_OFFSET (current_function_decl) + length);
351 i = JCF_readu2 (jcf);
353 /* We read the exception backwards. */
354 p = jcf->read_ptr + 8 * i;
357 int start_pc = GET_u2 (p-8);
358 int end_pc = GET_u2 (p-6);
359 int handler_pc = GET_u2 (p-4);
360 int catch_type = GET_u2 (p-2);
363 if (start_pc < 0 || start_pc >= length
364 || end_pc < 0 || end_pc > length || start_pc >= end_pc
365 || handler_pc < 0 || handler_pc >= length
366 || (handler_pc >= start_pc && handler_pc < end_pc)
367 || ! (instruction_bits [start_pc] & BCODE_INSTRUCTION_START)
368 || ! (instruction_bits [end_pc] & BCODE_INSTRUCTION_START)
369 || ! (instruction_bits [handler_pc] & BCODE_INSTRUCTION_START))
371 error ("bad pc in exception_table");
375 if (! add_handler (start_pc, end_pc,
376 lookup_label (handler_pc),
377 catch_type == 0 ? NULL_TREE
378 : get_class_constant (jcf, catch_type)))
380 error ("overlapping exception ranges are not supported");
384 instruction_bits [handler_pc] |= BCODE_EXCEPTION_TARGET;
391 if (((PC != INVALID_PC
392 && instruction_bits [PC] & BCODE_TARGET) != 0)
395 PUSH_PENDING (lookup_label (PC));
398 if (PC == INVALID_PC)
400 label = pending_blocks;
401 if (label == NULL_TREE)
402 break; /* We're done! */
403 pending_blocks = LABEL_PENDING_CHAIN (label);
404 LABEL_CHANGED (label) = 0;
406 if (LABEL_IN_SUBR (label))
407 current_subr = LABEL_SUBR_START (label);
409 current_subr = NULL_TREE;
411 /* Restore type_map and stack_pointer from
412 LABEL_TYPE_STATE (label), and continue
413 compiling from there. */
414 load_type_state (label);
415 PC = LABEL_PC (label);
417 else if (PC >= length)
418 VERIFICATION_ERROR ("falling through end of method");
422 if (!(instruction_bits [PC] & BCODE_INSTRUCTION_START) && ! wide)
423 VERIFICATION_ERROR ("PC not at instruction start");
425 instruction_bits[PC] |= BCODE_VERIFIED;
427 eh_ranges = find_handler (oldpc);
429 op_code = byte_ops[PC++];
432 int is_static, is_putting;
435 case OPCODE_iconst_m1:
436 case OPCODE_iconst_0: case OPCODE_iconst_1: case OPCODE_iconst_2:
437 case OPCODE_iconst_3: case OPCODE_iconst_4: case OPCODE_iconst_5:
438 i = op_code - OPCODE_iconst_0;
441 if (byte_ops[PC] == OPCODE_newarray
442 || byte_ops[PC] == OPCODE_newarray)
444 push_type (int_type_node); break;
445 case OPCODE_lconst_0: case OPCODE_lconst_1:
446 push_type (long_type_node); break;
447 case OPCODE_fconst_0: case OPCODE_fconst_1: case OPCODE_fconst_2:
448 push_type (float_type_node); break;
449 case OPCODE_dconst_0: case OPCODE_dconst_1:
450 push_type (double_type_node); break;
457 case OPCODE_iload: type = int_type_node; goto general_load;
458 case OPCODE_lload: type = long_type_node; goto general_load;
459 case OPCODE_fload: type = float_type_node; goto general_load;
460 case OPCODE_dload: type = double_type_node; goto general_load;
461 case OPCODE_aload: type = ptr_type_node; goto general_load;
463 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
466 case OPCODE_iload_0: type = int_type_node; index = 0; goto load;
467 case OPCODE_iload_1: type = int_type_node; index = 1; goto load;
468 case OPCODE_iload_2: type = int_type_node; index = 2; goto load;
469 case OPCODE_iload_3: type = int_type_node; index = 3; goto load;
470 case OPCODE_lload_0: type = long_type_node; index = 0; goto load;
471 case OPCODE_lload_1: type = long_type_node; index = 1; goto load;
472 case OPCODE_lload_2: type = long_type_node; index = 2; goto load;
473 case OPCODE_lload_3: type = long_type_node; index = 3; goto load;
474 case OPCODE_fload_0: type = float_type_node; index = 0; goto load;
475 case OPCODE_fload_1: type = float_type_node; index = 1; goto load;
476 case OPCODE_fload_2: type = float_type_node; index = 2; goto load;
477 case OPCODE_fload_3: type = float_type_node; index = 3; goto load;
478 case OPCODE_dload_0: type = double_type_node; index = 0; goto load;
479 case OPCODE_dload_1: type = double_type_node; index = 1; goto load;
480 case OPCODE_dload_2: type = double_type_node; index = 2; goto load;
481 case OPCODE_dload_3: type = double_type_node; index = 3; goto load;
482 case OPCODE_aload_0: type = ptr_type_node; index = 0; goto load;
483 case OPCODE_aload_1: type = ptr_type_node; index = 1; goto load;
484 case OPCODE_aload_2: type = ptr_type_node; index = 2; goto load;
485 case OPCODE_aload_3: type = ptr_type_node; index = 3; goto load;
488 || (index + TYPE_IS_WIDE (type)
489 >= DECL_MAX_LOCALS (current_function_decl)))
490 VERIFICATION_ERROR ("invalid local variable index in load");
491 tmp = type_map[index];
492 if (tmp == TYPE_UNKNOWN || tmp == TYPE_SECOND
493 || (TYPE_IS_WIDE (type)
494 && type_map[index+1] != void_type_node)
495 || (type == ptr_type_node
496 ? TREE_CODE (tmp) != POINTER_TYPE
497 : type == int_type_node
498 ? (! INTEGRAL_TYPE_P (tmp) || TYPE_PRECISION (tmp) > 32)
500 VERIFICATION_ERROR("invalid local variable type in load");
503 case OPCODE_istore: type = int_type_node; goto general_store;
504 case OPCODE_lstore: type = long_type_node; goto general_store;
505 case OPCODE_fstore: type = float_type_node; goto general_store;
506 case OPCODE_dstore: type = double_type_node; goto general_store;
507 case OPCODE_astore: type = ptr_type_node; goto general_store;
509 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
512 case OPCODE_istore_0: type = int_type_node; index = 0; goto store;
513 case OPCODE_istore_1: type = int_type_node; index = 1; goto store;
514 case OPCODE_istore_2: type = int_type_node; index = 2; goto store;
515 case OPCODE_istore_3: type = int_type_node; index = 3; goto store;
516 case OPCODE_lstore_0: type = long_type_node; index=0; goto store;
517 case OPCODE_lstore_1: type = long_type_node; index=1; goto store;
518 case OPCODE_lstore_2: type = long_type_node; index=2; goto store;
519 case OPCODE_lstore_3: type = long_type_node; index=3; goto store;
520 case OPCODE_fstore_0: type=float_type_node; index=0; goto store;
521 case OPCODE_fstore_1: type=float_type_node; index=1; goto store;
522 case OPCODE_fstore_2: type=float_type_node; index=2; goto store;
523 case OPCODE_fstore_3: type=float_type_node; index=3; goto store;
524 case OPCODE_dstore_0: type=double_type_node; index=0; goto store;
525 case OPCODE_dstore_1: type=double_type_node; index=1; goto store;
526 case OPCODE_dstore_2: type=double_type_node; index=2; goto store;
527 case OPCODE_dstore_3: type=double_type_node; index=3; goto store;
528 case OPCODE_astore_0: type = ptr_type_node; index = 0; goto store;
529 case OPCODE_astore_1: type = ptr_type_node; index = 1; goto store;
530 case OPCODE_astore_2: type = ptr_type_node; index = 2; goto store;
531 case OPCODE_astore_3: type = ptr_type_node; index = 3; goto store;
534 || (index + TYPE_IS_WIDE (type)
535 >= DECL_MAX_LOCALS (current_function_decl)))
537 VERIFICATION_ERROR ("invalid local variable index in store");
540 type = pop_type (type);
541 type_map[index] = type;
543 /* If local variable changed, we need to reconsider eh handlers. */
544 prev_eh_ranges = NULL_EH_RANGE;
546 /* Allocate decl and rtx for this variable now, so if we're not
547 optmizing, we get a temporary that survives the whole method. */
548 find_local_variable (index, type, oldpc);
550 if (TYPE_IS_WIDE (type))
551 type_map[index+1] = TYPE_SECOND;
552 /* ... fall through to note_used ... */
554 /* For store or load, note that local variable INDEX is used.
555 This is needed to verify try-finally sub-routines. */
558 tree vec = LABEL_RETURN_TYPE_STATE (current_subr);
559 tree subr_vec = LABEL_TYPE_STATE (current_subr);
560 int len = 1 + TYPE_IS_WIDE (type);
563 if (TREE_VEC_ELT (vec, index) == TYPE_UNUSED)
564 TREE_VEC_ELT (vec, index) = TREE_VEC_ELT (subr_vec, index);
579 type = int_type_node; goto binop;
584 type = int_type_node; goto unop;
593 type = long_type_node; goto binop;
595 type = long_type_node; goto unop;
596 case OPCODE_fadd: case OPCODE_fsub:
597 case OPCODE_fmul: case OPCODE_fdiv: case OPCODE_frem:
598 type = float_type_node; goto binop;
600 type = float_type_node; goto unop;
601 case OPCODE_dadd: case OPCODE_dsub:
602 case OPCODE_dmul: case OPCODE_ddiv: case OPCODE_drem:
603 type = double_type_node; goto binop;
605 type = double_type_node; goto unop;
618 pop_type (int_type_node);
619 pop_type (long_type_node);
620 push_type (long_type_node);
623 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
626 if (index < 0 || index >= DECL_MAX_LOCALS (current_function_decl))
627 VERIFICATION_ERROR ("invalid local variable index in iinc");
628 tmp = type_map[index];
629 if (! INTEGRAL_TYPE_P (tmp) || TYPE_PRECISION (tmp) > 32)
630 VERIFICATION_ERROR ("invalid local variable type in iinc");
633 pop_type (int_type_node); push_type (long_type_node); break;
635 pop_type (int_type_node); push_type (float_type_node); break;
637 pop_type (int_type_node); push_type (double_type_node); break;
639 pop_type (long_type_node); push_type (int_type_node); break;
641 pop_type (long_type_node); push_type (float_type_node); break;
643 pop_type (long_type_node); push_type (double_type_node); break;
645 pop_type (float_type_node); push_type (int_type_node); break;
647 pop_type (float_type_node); push_type (long_type_node); break;
649 pop_type (float_type_node); push_type (double_type_node); break;
651 pop_type (double_type_node); push_type (int_type_node); break;
653 pop_type (double_type_node); push_type (long_type_node); break;
655 pop_type (double_type_node); push_type (float_type_node); break;
657 type = long_type_node; goto compare;
660 type = float_type_node; goto compare;
663 type = double_type_node; goto compare;
665 pop_type (type); pop_type (type);
666 push_type (int_type_node); break;
673 pop_type (int_type_node); goto cond;
675 case OPCODE_ifnonnull:
676 pop_type (ptr_type_node ); goto cond;
677 case OPCODE_if_icmpeq:
678 case OPCODE_if_icmpne:
679 case OPCODE_if_icmplt:
680 case OPCODE_if_icmpge:
681 case OPCODE_if_icmpgt:
682 case OPCODE_if_icmple:
683 pop_type (int_type_node); pop_type (int_type_node); goto cond;
684 case OPCODE_if_acmpeq:
685 case OPCODE_if_acmpne:
686 pop_type (object_ptr_type_node); pop_type (object_ptr_type_node);
689 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s2));
692 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s2));
696 switch (byte_ops[PC])
698 case OPCODE_iload: case OPCODE_lload:
699 case OPCODE_fload: case OPCODE_dload: case OPCODE_aload:
700 case OPCODE_istore: case OPCODE_lstore:
701 case OPCODE_fstore: case OPCODE_dstore: case OPCODE_astore:
707 VERIFICATION_ERROR ("invalid use of wide instruction");
710 case OPCODE_ireturn: type = int_type_node; goto ret;
711 case OPCODE_lreturn: type = long_type_node; goto ret;
712 case OPCODE_freturn: type = float_type_node; goto ret;
713 case OPCODE_dreturn: type = double_type_node; goto ret;
714 case OPCODE_areturn: type = ptr_type_node; goto ret;
717 /* ... fall through ... */
721 case OPCODE_getstatic: is_putting = 0; is_static = 1; goto field;
722 case OPCODE_putstatic: is_putting = 1; is_static = 1; goto field;
723 case OPCODE_getfield: is_putting = 0; is_static = 0; goto field;
724 case OPCODE_putfield: is_putting = 1; is_static = 0; goto field;
727 int index = IMMEDIATE_u2;
728 tree self_type = get_class_constant
729 (jcf, COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool, index));
730 tree field_signature = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool, index);
731 tree field_type = get_type_from_signature (field_signature);
733 pop_type (field_type);
736 /* Defer actual checking until next pass. */
737 pop_type (ptr_type_node);
740 push_type (field_type);
744 push_type (get_class_constant (jcf, IMMEDIATE_u2));
746 case OPCODE_dup: type_stack_dup (1, 0); break;
747 case OPCODE_dup_x1: type_stack_dup (1, 1); break;
748 case OPCODE_dup_x2: type_stack_dup (1, 2); break;
749 case OPCODE_dup2: type_stack_dup (2, 0); break;
750 case OPCODE_dup2_x1: type_stack_dup (2, 1); break;
751 case OPCODE_dup2_x2: type_stack_dup (2, 2); break;
752 case OPCODE_pop: index = 1; goto pop;
753 case OPCODE_pop2: index = 2; goto pop;
755 if (stack_pointer < index)
756 VERIFICATION_ERROR ("stack underflow");
757 stack_pointer -= index;
760 if (stack_pointer < 2)
761 VERIFICATION_ERROR ("stack underflow (in swap)");
764 tree type1 = stack_type_map[stack_pointer - 1];
765 tree type2 = stack_type_map[stack_pointer - 2];
766 if (type1 == void_type_node || type2 == void_type_node)
767 VERIFICATION_ERROR ("verifier (swap): double or long value");
768 stack_type_map[stack_pointer - 2] = type1;
769 stack_type_map[stack_pointer - 1] = type2;
772 case OPCODE_ldc: index = IMMEDIATE_u1; goto ldc;
775 index = IMMEDIATE_u2; goto ldc;
777 if (index <= 0 || index >= JPOOL_SIZE(current_jcf))
778 VERIFICATION_ERROR ("bad constant pool index in ldc");
780 switch (JPOOL_TAG (current_jcf, index) & ~CONSTANT_ResolvedFlag)
782 case CONSTANT_Integer: type = int_type_node; goto check_ldc;
783 case CONSTANT_Float: type = float_type_node; goto check_ldc;
784 case CONSTANT_String: type = string_type_node; goto check_ldc;
785 case CONSTANT_Long: type = long_type_node; goto check_ldc;
786 case CONSTANT_Double: type = double_type_node; goto check_ldc;
788 if (TYPE_IS_WIDE (type) == (op_code == OPCODE_ldc2_w))
790 /* ... else fall through ... */
792 VERIFICATION_ERROR ("bad constant pool tag in ldc");
794 if (type == int_type_node)
796 i = TREE_INT_CST_LOW (get_constant (current_jcf, index));
802 case OPCODE_invokevirtual:
803 case OPCODE_invokespecial:
804 case OPCODE_invokestatic:
805 case OPCODE_invokeinterface:
807 int index = IMMEDIATE_u2;
808 tree sig = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool, index);
809 tree self_type = get_class_constant
810 (current_jcf, COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool,
812 tree method_name = COMPONENT_REF_NAME (¤t_jcf->cpool, index);
814 method_type = parse_signature_string (IDENTIFIER_POINTER (sig),
815 IDENTIFIER_LENGTH (sig));
816 if (TREE_CODE (method_type) != FUNCTION_TYPE)
817 VERIFICATION_ERROR ("bad method signature");
818 pop_argument_types (TYPE_ARG_TYPES (method_type));
820 /* Can't invoke <clinit> */
821 if (method_name == clinit_identifier_node)
822 VERIFICATION_ERROR ("invoke opcode can't invoke <clinit>");
823 /* Apart invokespecial, can't invoke <init> */
824 if (op_code != OPCODE_invokespecial
825 && method_name == init_identifier_node)
826 VERIFICATION_ERROR ("invoke opcode can't invoke <init>");
828 if (op_code != OPCODE_invokestatic)
829 pop_type (self_type);
833 case OPCODE_invokeinterface:
835 int nargs = IMMEDIATE_u1;
836 int notZero = IMMEDIATE_u1;
838 if (!nargs || notZero)
840 ("invalid argument number in invokeinterface");
845 if (TREE_TYPE (method_type) != void_type_node)
846 push_type (TREE_TYPE (method_type));
850 case OPCODE_arraylength:
851 /* Type checking actually made during code generation */
852 pop_type( ptr_type_node );
853 push_type( int_type_node );
856 /* Q&D verification *or* more checking done during code generation
857 for byte/boolean/char/short, the value popped is a int coerced
858 into the right type before being stored. */
859 case OPCODE_iastore: type = int_type_node; goto astore;
860 case OPCODE_lastore: type = long_type_node; goto astore;
861 case OPCODE_fastore: type = float_type_node; goto astore;
862 case OPCODE_dastore: type = double_type_node; goto astore;
863 case OPCODE_aastore: type = ptr_type_node; goto astore;
864 case OPCODE_bastore: type = int_type_node; goto astore;
865 case OPCODE_castore: type = int_type_node; goto astore;
866 case OPCODE_sastore: type = int_type_node; goto astore;
868 /* FIXME - need better verification here */
869 pop_type (type); /* new value */
870 pop_type (int_type_node); /* index */
871 pop_type (ptr_type_node); /* array */
874 /* Q&D verification *or* more checking done during code generation
875 for byte/boolean/char/short, the value pushed is a int. */
876 case OPCODE_iaload: type = int_type_node; goto aload;
877 case OPCODE_laload: type = long_type_node; goto aload;
878 case OPCODE_faload: type = float_type_node; goto aload;
879 case OPCODE_daload: type = double_type_node; goto aload;
880 case OPCODE_aaload: type = ptr_type_node; goto aload;
881 case OPCODE_baload: type = promote_type (byte_type_node); goto aload;
882 case OPCODE_caload: type = promote_type (char_type_node); goto aload;
883 case OPCODE_saload: type = promote_type (short_type_node); goto aload;
885 pop_type (int_type_node);
886 tmp = pop_type (ptr_type_node);
887 if (is_array_type_p (tmp))
888 type = TYPE_ARRAY_ELEMENT (TREE_TYPE (tmp));
889 else if (tmp != TYPE_NULL)
890 VERIFICATION_ERROR ("array load from non-array type");
894 case OPCODE_anewarray:
895 type = get_class_constant (current_jcf, IMMEDIATE_u2);
896 type = promote_type (type);
899 case OPCODE_newarray:
900 index = IMMEDIATE_u1;
901 type = decode_newarray_type (index);
902 if (type == NULL_TREE)
903 VERIFICATION_ERROR ("invalid type code in newarray opcode");
907 if (int_value >= 0 && prevpc >= 0)
909 /* If previous instruction pushed int constant,
910 we want to use it. */
911 switch (byte_ops[prevpc])
913 case OPCODE_iconst_0: case OPCODE_iconst_1:
914 case OPCODE_iconst_2: case OPCODE_iconst_3:
915 case OPCODE_iconst_4: case OPCODE_iconst_5:
916 case OPCODE_bipush: case OPCODE_sipush:
917 case OPCODE_ldc: case OPCODE_ldc_w:
925 type = build_java_array_type (type, int_value);
926 pop_type (int_type_node);
930 case OPCODE_multianewarray:
933 index = IMMEDIATE_u2;
937 VERIFICATION_ERROR ("number of dimension lower that 1 in multianewarray" );
939 for( i = 0; i < ndim; i++ )
940 pop_type (int_type_node);
941 push_type (get_class_constant (current_jcf, index));
945 case OPCODE_aconst_null:
946 push_type (ptr_type_node);
950 pop_type (throwable_type_node);
954 case OPCODE_checkcast:
955 pop_type (ptr_type_node);
956 type = get_class_constant (current_jcf, IMMEDIATE_u2);
959 case OPCODE_instanceof:
960 pop_type (ptr_type_node);
961 get_class_constant (current_jcf, IMMEDIATE_u2);
962 push_type (int_type_node);
965 case OPCODE_tableswitch:
969 pop_type (int_type_node);
973 VERIFICATION_ERROR ("bad alignment in tableswitch pad");
975 PUSH_PENDING (lookup_label (oldpc+IMMEDIATE_s4));
980 VERIFICATION_ERROR ("unsorted low/high value in tableswitch");
982 while (low++ <= high)
983 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
988 case OPCODE_lookupswitch:
990 jint npairs, last, not_registered = 1;
992 pop_type (int_type_node);
996 VERIFICATION_ERROR ("bad alignment in lookupswitch pad");
999 PUSH_PENDING (lookup_label (oldpc+IMMEDIATE_s4));
1000 npairs = IMMEDIATE_s4;
1003 VERIFICATION_ERROR ("invalid number of targets in lookupswitch");
1007 int match = IMMEDIATE_s4;
1010 else if (last >= match)
1011 VERIFICATION_ERROR ("unsorted match value in lookupswitch");
1014 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
1020 case OPCODE_monitorenter:
1022 case OPCODE_monitorexit:
1023 pop_type (ptr_type_node);
1027 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
1033 tree target = lookup_label (oldpc + IMMEDIATE_s2);
1034 tree return_label = lookup_label (PC);
1035 push_type (return_address_type_node);
1036 if (! LABEL_VERIFIED (target))
1038 /* first time seen */
1039 tree return_type_map;
1040 int nlocals = DECL_MAX_LOCALS (current_function_decl);
1041 index = nlocals + DECL_MAX_STACK (current_function_decl);
1042 return_type_map = make_tree_vec (index);
1043 while (index > nlocals)
1044 TREE_VEC_ELT (return_type_map, --index) = TYPE_UNKNOWN;
1046 TREE_VEC_ELT (return_type_map, --index) = TYPE_UNUSED;
1047 LABEL_RETURN_LABEL (target)
1048 = build_decl (LABEL_DECL, NULL_TREE, TREE_TYPE (target));
1049 LABEL_PC (LABEL_RETURN_LABEL (target)) = -1;
1050 LABEL_RETURN_TYPE_STATE (target) = return_type_map;
1051 LABEL_IS_SUBR_START (target) = 1;
1052 LABEL_IN_SUBR (target) = 1;
1053 LABEL_SUBR_START (target) = target;
1054 LABEL_SUBR_CONTEXT (target) = current_subr;
1056 else if (! LABEL_IS_SUBR_START (target)
1057 || LABEL_SUBR_CONTEXT (target) != current_subr)
1058 VERIFICATION_ERROR ("label part of different subroutines");
1060 i = merge_type_state (target);
1064 VERIFICATION_ERROR ("types could not be merged at jsr");
1065 push_pending_label (target);
1067 current_subr = target;
1069 /* Chain return_pc onto LABEL_RETURN_LABELS (target) if needed. */
1070 if (! value_member (return_label, LABEL_RETURN_LABELS (target)))
1072 LABEL_RETURN_LABELS (target)
1073 = tree_cons (NULL_TREE, return_label,
1074 LABEL_RETURN_LABELS (target));
1077 if (LABEL_VERIFIED (target))
1079 tree return_map = LABEL_RETURN_TYPE_STATE (target);
1080 int len = TREE_VEC_LENGTH (return_map);
1081 stack_pointer = len - DECL_MAX_LOCALS (current_function_decl);
1084 if (TREE_VEC_ELT (return_map, len) != TYPE_UNUSED)
1085 type_map[len] = TREE_VEC_ELT (return_map, len);
1087 current_subr = LABEL_SUBR_CONTEXT (target);
1088 PUSH_PENDING (return_label);
1095 if (current_subr == NULL)
1096 VERIFICATION_ERROR ("ret instruction not in a jsr subroutine");
1099 tree ret_map = LABEL_RETURN_TYPE_STATE (current_subr);
1100 tree caller = LABEL_SUBR_CONTEXT (current_subr);
1101 int size = DECL_MAX_LOCALS(current_function_decl)+stack_pointer;
1102 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
1105 if (index < 0 || index >= DECL_MAX_LOCALS (current_function_decl)
1106 || type_map[index] != TYPE_RETURN_ADDR)
1107 VERIFICATION_ERROR ("invalid ret index");
1109 /* The next chunk of code is similar to an inlined version of
1110 * merge_type_state (LABEL_RETURN_LABEL (current_subr)).
1111 * The main differences are that LABEL_RETURN_LABEL is
1112 * pre-allocated by the jsr (but we don't know the size then);
1113 * and that we have to handle TYPE_UNUSED. */
1115 if (! RETURN_MAP_ADJUSTED (ret_map))
1116 { /* First return from this subroutine - fix stack pointer. */
1117 TREE_VEC_LENGTH (ret_map) = size;
1118 for (index = size; --index >= 0; )
1120 if (TREE_VEC_ELT (ret_map, index) != TYPE_UNUSED)
1121 TREE_VEC_ELT (ret_map, index) = type_map[index];
1123 RETURN_MAP_ADJUSTED (ret_map) = 1;
1127 if (TREE_VEC_LENGTH (ret_map) != size)
1128 VERIFICATION_ERROR ("inconsistent stack size on ret");
1129 for (index = 0; index < size; index++)
1131 tree type = TREE_VEC_ELT (ret_map, index);
1132 if (type != TYPE_UNUSED)
1134 type = merge_types (type, type_map [index]);
1135 TREE_VEC_ELT (ret_map, index) = type;
1136 if (type == TYPE_UNKNOWN)
1138 if (index >= size - stack_pointer)
1140 ("inconsistent types on ret from jsr");
1142 else if (TYPE_IS_WIDE (type))
1148 /* Check if there are any more pending blocks in this subroutine.
1149 Because we push pending blocks in a last-in-first-out order,
1150 and because we don't push anything from our caller until we
1151 are done with this subroutine or anything nested in it,
1152 then we are done if the top of the pending_blocks stack is
1153 not in a subroutine, or it is in our caller. */
1154 if (pending_blocks == NULL_TREE
1155 || ! LABEL_IN_SUBR (pending_blocks)
1156 || LABEL_SUBR_START (pending_blocks) == caller)
1158 /* Since we are done with this subroutine (i.e. this is the
1159 last ret from it), set up the (so far known) return
1160 address as pending - with the merged type state. */
1161 tmp = LABEL_RETURN_LABELS (current_subr);
1162 current_subr = caller;
1163 for ( ; tmp != NULL_TREE; tmp = TREE_CHAIN (tmp))
1165 tree return_label = TREE_VALUE (tmp);
1166 tree return_state = LABEL_TYPE_STATE (return_label);
1167 if (return_state == NULL_TREE)
1169 /* This means means we had not verified the
1170 subroutine earlier, so this is the first jsr to
1171 call it. In this case, the type_map of the return
1172 address is just the current type_map - and that
1173 is handled by the following PUSH_PENDING. */
1177 /* In this case we have to do a merge. But first
1178 restore the type_map for unused slots to those
1179 that were in effect at the jsr. */
1180 for (index = size; --index >= 0; )
1182 type_map[index] = TREE_VEC_ELT (ret_map, index);
1183 if (type_map[index] == TYPE_UNUSED)
1185 = TREE_VEC_ELT (return_state, index);
1188 PUSH_PENDING (return_label);
1196 error ("unknown opcode %d@pc=%d during verification", op_code, PC-1);
1202 /* The following test is true if we have entered or exited an exception
1203 handler range *or* we have done a store to a local variable.
1204 In either case we need to consider any exception handlers that
1205 might "follow" this instruction. */
1207 if (eh_ranges != prev_eh_ranges)
1209 int save_stack_pointer = stack_pointer;
1210 int index = DECL_MAX_LOCALS (current_function_decl);
1211 tree save_type = type_map[index];
1212 tree save_current_subr = current_subr;
1213 struct eh_range *ranges = find_handler (oldpc);
1215 for (; ranges != NULL_EH_RANGE; ranges = ranges->outer)
1217 tree chain = ranges->handlers;
1219 /* We need to determine if the handler is part of current_subr.
1220 The are two cases: (1) The exception catch range
1221 is entirely within current_subr. In that case the handler
1222 is also part of current_subr.
1223 (2) Some of the catch range is not in current_subr.
1224 In that case, the handler is *not* part of current_subr.
1226 Figuring out which is the case is not necessarily obvious,
1227 in the presence of clever code generators (and obfuscators).
1228 We make a simplifying assumption that in case (2) we
1229 have that the current_subr is entirely within the catch range.
1230 In that case we can assume if that if a caller (the jsr) of
1231 a subroutine is within the catch range, then the handler is
1232 *not* part of the subroutine, and vice versa. */
1234 current_subr = save_current_subr;
1235 for ( ; current_subr != NULL_TREE;
1236 current_subr = LABEL_SUBR_CONTEXT (current_subr))
1238 tree return_labels = LABEL_RETURN_LABELS (current_subr);
1239 /* There could be multiple return_labels, but
1240 we only need to check one. */
1241 int return_pc = LABEL_PC (TREE_VALUE (return_labels));
1242 if (return_pc <= ranges->start_pc
1243 || return_pc > ranges->end_pc)
1247 for ( ; chain != NULL_TREE; chain = TREE_CHAIN (chain))
1249 tree handler = TREE_VALUE (chain);
1250 tree type = TREE_PURPOSE (chain);
1251 if (type == NULL_TREE) /* a finally handler */
1252 type = throwable_type_node;
1253 type_map[index] = promote_type (type);
1255 PUSH_PENDING (handler);
1258 stack_pointer = save_stack_pointer;
1259 current_subr = save_current_subr;
1260 type_map[index] = save_type;
1261 prev_eh_ranges = eh_ranges;
1266 message = "program counter out of range";
1269 error ("verification error at PC=%d: %s", oldpc, message);