1 /* Handle verification of bytecoded methods for the GNU compiler for
3 Copyright (C) 1997, 1998, 1999 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA.
22 Java and all Java-based marks are trademarks or registered trademarks
23 of Sun Microsystems, Inc. in the United States and other countries.
24 The Free Software Foundation is independent of Sun Microsystems, Inc. */
29 #include "java-tree.h"
31 #include "java-opcodes.h"
33 #include "java-except.h"
36 static void push_pending_label PROTO ((tree));
37 static tree merge_types PROTO ((tree, tree));
39 extern int stack_pointer;
41 /* During verification, start of the current subroutine (jsr target). */
44 /* A list of pending blocks, chained using LABEL_PENDING_CHAIN.
45 A pending block is one that has LABEL_CHANGED set, which means
46 it requires (re-) verification. */
49 /* Append TARGET_LABEL to the pending_block stack unless already in it. */
52 push_pending_label (target_label)
55 if (! LABEL_CHANGED (target_label))
57 LABEL_PENDING_CHAIN (target_label) = pending_blocks;
58 pending_blocks = target_label;
59 LABEL_CHANGED (target_label) = 1;
63 /* Note that TARGET_LABEL is a possible successor instruction.
64 Merge the type state etc.
65 Return NULL on sucess, or an error message on failure. */
68 check_pending_block (target_label)
71 int changed = merge_type_state (target_label);
76 return "types could not be merged";
77 push_pending_label (target_label);
80 if (current_subr == NULL)
82 if (LABEL_IN_SUBR (target_label))
83 return "might transfer control into subroutine";
87 if (LABEL_IN_SUBR (target_label))
89 if (LABEL_SUBR_START (target_label) != current_subr)
90 return "transfer out of subroutine";
92 else if (! LABEL_VERIFIED (target_label))
94 LABEL_IN_SUBR (target_label) = 1;
95 LABEL_SUBR_START (target_label) = current_subr;
98 return "transfer out of subroutine";
103 /* Return the "merged" types of TYPE1 and TYPE2.
104 If either is primitive, the other must match (after promotion to int).
105 For reference types, return the common super-class.
106 Return TYPE_UNKNOWN if the types cannot be merged. */
109 merge_types (type1, type2)
114 if (type1 == TYPE_UNKNOWN || type2 == TYPE_UNKNOWN
115 || type1 == TYPE_RETURN_ADDR || type2 == TYPE_RETURN_ADDR)
117 if (TREE_CODE (type1) == POINTER_TYPE && TREE_CODE (type2) == POINTER_TYPE)
121 /* ptr_type_node is only used for a null reference,
122 which is compatible with any reference type. */
123 if (type1 == ptr_type_node || type2 == object_ptr_type_node)
125 if (type2 == ptr_type_node || type1 == object_ptr_type_node)
128 tt1 = HANDLE_TO_CLASS_TYPE (TREE_TYPE (type1));
129 tt2 = HANDLE_TO_CLASS_TYPE (TREE_TYPE (type2));
131 if (TYPE_ARRAY_P (tt1) || TYPE_ARRAY_P (tt2))
133 if (TYPE_ARRAY_P (tt1) == TYPE_ARRAY_P (tt2))
135 tree el_type1 = TYPE_ARRAY_ELEMENT (tt1);
136 tree el_type2 = TYPE_ARRAY_ELEMENT (tt2);
137 tree el_type = NULL_TREE;
138 if (el_type1 == el_type2)
140 else if (TREE_CODE (el_type1) == POINTER_TYPE
141 && TREE_CODE (el_type2) == POINTER_TYPE)
142 el_type = merge_types (el_type1, el_type2);
143 if (el_type != NULL_TREE)
145 HOST_WIDE_INT len1 = java_array_type_length (tt1);
146 HOST_WIDE_INT len2 = java_array_type_length (tt2);
149 else if (el_type1 == el_type2)
151 return promote_type (build_java_array_type (el_type, len1));
154 return object_ptr_type_node;
157 if (CLASS_INTERFACE (TYPE_NAME (tt1)))
159 if (CLASS_INTERFACE (TYPE_NAME (tt2)))
161 /* This is a kludge, but matches what Sun's verifier does.
162 It can be tricked, but is safe as long as type errors
163 (i.e. interface method calls) are caught at run-time. */
164 return object_ptr_type_node;
168 if (can_widen_reference_to (tt2, tt1))
174 else if (CLASS_INTERFACE (TYPE_NAME (tt2)))
176 if (can_widen_reference_to (tt1, tt2))
185 depth1 = class_depth (type1);
186 depth2 = class_depth (type2);
187 for ( ; depth1 > depth2; depth1--)
188 type1 = TYPE_BINFO_BASETYPE (type1, 0);
189 for ( ; depth2 > depth1; depth2--)
190 type2 = TYPE_BINFO_BASETYPE (type2, 0);
191 while (type1 != type2)
193 type1 = TYPE_BINFO_BASETYPE (type1, 0);
194 type2 = TYPE_BINFO_BASETYPE (type2, 0);
196 return promote_type (type1);
198 if (INTEGRAL_TYPE_P (type1) && INTEGRAL_TYPE_P (type2)
199 && TYPE_PRECISION (type1) <= 32 && TYPE_PRECISION (type2) <= 32)
200 return int_type_node;
204 /* Merge the current type state with that at LABEL.
205 Return -1 the the states are incompatible (i.e. on error),
206 0 if there was no change, and 1 if there was a change. */
209 merge_type_state (label)
212 int nlocals = DECL_MAX_LOCALS(current_function_decl);
213 int cur_length = stack_pointer + nlocals;
214 tree vec = LABEL_TYPE_STATE (label);
216 if (vec == NULL_TREE)
218 vec = make_tree_vec (cur_length);
219 LABEL_TYPE_STATE (label) = vec;
220 while (--cur_length >= 0)
221 TREE_VEC_ELT (vec, cur_length) = type_map [cur_length];
228 if (LABEL_IS_SUBR_START (label) && LABEL_VERIFIED (label)
229 && current_subr != label)
230 return_map = LABEL_RETURN_TYPE_STATE (label);
232 return_map = NULL_TREE;
233 if (TREE_VEC_LENGTH (vec) != cur_length)
237 for (i = 0; i < cur_length; i++)
239 tree old_type = TREE_VEC_ELT (vec, i);
240 tree new_type = merge_types (old_type, type_map [i]);
241 if (TREE_VEC_ELT (vec, i) != new_type)
243 /* If there has been a change, note that since we must re-verify.
244 However, if the label is the start of a subroutine,
245 we don't care about local variables that are neither
246 set nor used in the sub-routine. */
247 if (return_map == NULL_TREE || i >= nlocals
248 || TREE_VEC_ELT (return_map, i) != TYPE_UNUSED
249 || (TYPE_IS_WIDE (new_type)
250 && TREE_VEC_ELT (return_map, i+1) != TYPE_UNUSED))
253 TREE_VEC_ELT (vec, i) = new_type;
254 if (new_type == TYPE_UNKNOWN)
259 else if (TYPE_IS_WIDE (new_type))
266 /* Handle dup-like operations. */
269 type_stack_dup (size, offset)
274 if (size + offset > stack_pointer)
275 error ("stack underflow - dup* operation");
276 for (index = 0; index < size + offset; index++)
278 type[index] = stack_type_map[stack_pointer - 1];
279 if (type[index] == void_type_node)
282 type[index] = stack_type_map[stack_pointer - 2];
283 if (! TYPE_IS_WIDE (type[index]))
284 fatal ("internal error - dup operation");
285 if (index == size || index == size + offset)
286 fatal ("dup operation splits 64-bit number");
288 pop_type (type[index]);
290 for (index = size; --index >= 0; )
292 if (type[index] != void_type_node)
293 push_type (type[index]);
296 for (index = size + offset; --index >= 0; )
298 if (type[index] != void_type_node)
299 push_type (type[index]);
303 /* This causes the next iteration to ignore the next instruction
304 and look for some other unhandled instruction. */
305 #define INVALIDATE_PC (prevpc = -1, oldpc = PC, PC = INVALID_PC)
306 #define INVALID_PC (-1)
308 #define VERIFICATION_ERROR(MESSAGE) \
309 do { message = MESSAGE; goto verify_error; } while (0)
311 #define PUSH_PENDING(LABEL) \
312 do { if ((message = check_pending_block (LABEL)) != NULL) \
313 goto verify_error; } while (0)
316 #define CHECK_PC_IN_RANGE(PC) ({if (PC < 0 || PC > length) goto bad_pc; 1;})
318 #define CHECK_PC_IN_RANGE(PC) (PC < 0 || PC > length ? \
319 (fatal("Bad byte codes.\n"), 0) : 1)
322 #define BCODE byte_ops
324 /* Verify the bytecodes of the current method.
325 Return 1 on sucess, 0 on failure. */
327 verify_jvm_instructions (jcf, byte_ops, length)
329 unsigned char* byte_ops;
336 int oldpc; /* PC of start of instruction. */
337 int prevpc; /* If >= 0, PC of previous instruction. */
340 register unsigned char *p;
341 struct eh_range *prev_eh_ranges = NULL_EH_RANGE;
342 struct eh_range *eh_ranges;
346 pending_blocks = NULL_TREE;
348 /* Handle the exception table. */
349 method_init_exceptions ();
350 JCF_SEEK (jcf, DECL_CODE_OFFSET (current_function_decl) + length);
351 i = JCF_readu2 (jcf);
353 /* We read the exception backwards. */
354 p = jcf->read_ptr + 8 * i;
357 int start_pc = GET_u2 (p-8);
358 int end_pc = GET_u2 (p-6);
359 int handler_pc = GET_u2 (p-4);
360 int catch_type = GET_u2 (p-2);
363 if (start_pc < 0 || start_pc >= length
364 || end_pc < 0 || end_pc > length || start_pc >= end_pc
365 || handler_pc < 0 || handler_pc >= length
366 || (handler_pc >= start_pc && handler_pc < end_pc)
367 || ! (instruction_bits [start_pc] & BCODE_INSTRUCTION_START)
368 || ! (instruction_bits [end_pc] & BCODE_INSTRUCTION_START)
369 || ! (instruction_bits [handler_pc] & BCODE_INSTRUCTION_START))
371 error ("bad pc in exception_table");
375 if (! add_handler (start_pc, end_pc,
376 lookup_label (handler_pc),
377 catch_type == 0 ? NULL_TREE
378 : get_class_constant (jcf, catch_type)))
380 error ("overlapping exception ranges are not supported");
384 instruction_bits [handler_pc] |= BCODE_EXCEPTION_TARGET;
391 if (((PC != INVALID_PC
392 && instruction_bits [PC] & BCODE_TARGET) != 0)
395 PUSH_PENDING (lookup_label (PC));
398 if (PC == INVALID_PC)
400 label = pending_blocks;
401 if (label == NULL_TREE)
402 break; /* We're done! */
403 pending_blocks = LABEL_PENDING_CHAIN (label);
404 LABEL_CHANGED (label) = 0;
406 if (LABEL_IN_SUBR (label))
407 current_subr = LABEL_SUBR_START (label);
409 current_subr = NULL_TREE;
411 /* Restore type_map and stack_pointer from
412 LABEL_TYPE_STATE (label), and continue
413 compiling from there. */
414 load_type_state (label);
415 PC = LABEL_PC (label);
417 else if (PC >= length)
418 VERIFICATION_ERROR ("falling through end of method");
422 if (!(instruction_bits [PC] & BCODE_INSTRUCTION_START) && ! wide)
423 VERIFICATION_ERROR ("PC not at instruction start");
425 instruction_bits[PC] |= BCODE_VERIFIED;
427 eh_ranges = find_handler (oldpc);
429 op_code = byte_ops[PC++];
432 int is_static, is_putting;
435 case OPCODE_iconst_m1:
436 case OPCODE_iconst_0: case OPCODE_iconst_1: case OPCODE_iconst_2:
437 case OPCODE_iconst_3: case OPCODE_iconst_4: case OPCODE_iconst_5:
438 i = op_code - OPCODE_iconst_0;
441 if (byte_ops[PC] == OPCODE_newarray
442 || byte_ops[PC] == OPCODE_newarray)
444 push_type (int_type_node); break;
445 case OPCODE_lconst_0: case OPCODE_lconst_1:
446 push_type (long_type_node); break;
447 case OPCODE_fconst_0: case OPCODE_fconst_1: case OPCODE_fconst_2:
448 push_type (float_type_node); break;
449 case OPCODE_dconst_0: case OPCODE_dconst_1:
450 push_type (double_type_node); break;
457 case OPCODE_iload: type = int_type_node; goto general_load;
458 case OPCODE_lload: type = long_type_node; goto general_load;
459 case OPCODE_fload: type = float_type_node; goto general_load;
460 case OPCODE_dload: type = double_type_node; goto general_load;
461 case OPCODE_aload: type = ptr_type_node; goto general_load;
463 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
466 case OPCODE_iload_0: type = int_type_node; index = 0; goto load;
467 case OPCODE_iload_1: type = int_type_node; index = 1; goto load;
468 case OPCODE_iload_2: type = int_type_node; index = 2; goto load;
469 case OPCODE_iload_3: type = int_type_node; index = 3; goto load;
470 case OPCODE_lload_0: type = long_type_node; index = 0; goto load;
471 case OPCODE_lload_1: type = long_type_node; index = 1; goto load;
472 case OPCODE_lload_2: type = long_type_node; index = 2; goto load;
473 case OPCODE_lload_3: type = long_type_node; index = 3; goto load;
474 case OPCODE_fload_0: type = float_type_node; index = 0; goto load;
475 case OPCODE_fload_1: type = float_type_node; index = 1; goto load;
476 case OPCODE_fload_2: type = float_type_node; index = 2; goto load;
477 case OPCODE_fload_3: type = float_type_node; index = 3; goto load;
478 case OPCODE_dload_0: type = double_type_node; index = 0; goto load;
479 case OPCODE_dload_1: type = double_type_node; index = 1; goto load;
480 case OPCODE_dload_2: type = double_type_node; index = 2; goto load;
481 case OPCODE_dload_3: type = double_type_node; index = 3; goto load;
482 case OPCODE_aload_0: type = ptr_type_node; index = 0; goto load;
483 case OPCODE_aload_1: type = ptr_type_node; index = 1; goto load;
484 case OPCODE_aload_2: type = ptr_type_node; index = 2; goto load;
485 case OPCODE_aload_3: type = ptr_type_node; index = 3; goto load;
488 || (index + TYPE_IS_WIDE (type)
489 >= DECL_MAX_LOCALS (current_function_decl)))
490 VERIFICATION_ERROR ("invalid local variable index in load");
491 tmp = type_map[index];
492 if (tmp == TYPE_UNKNOWN || tmp == TYPE_SECOND
493 || (TYPE_IS_WIDE (type)
494 && type_map[index+1] != void_type_node)
495 || (type == ptr_type_node
496 ? TREE_CODE (tmp) != POINTER_TYPE
497 : type == int_type_node
498 ? (! INTEGRAL_TYPE_P (tmp) || TYPE_PRECISION (tmp) > 32)
500 VERIFICATION_ERROR("invalid local variable type in load");
503 case OPCODE_istore: type = int_type_node; goto general_store;
504 case OPCODE_lstore: type = long_type_node; goto general_store;
505 case OPCODE_fstore: type = float_type_node; goto general_store;
506 case OPCODE_dstore: type = double_type_node; goto general_store;
507 case OPCODE_astore: type = ptr_type_node; goto general_store;
509 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
512 case OPCODE_istore_0: type = int_type_node; index = 0; goto store;
513 case OPCODE_istore_1: type = int_type_node; index = 1; goto store;
514 case OPCODE_istore_2: type = int_type_node; index = 2; goto store;
515 case OPCODE_istore_3: type = int_type_node; index = 3; goto store;
516 case OPCODE_lstore_0: type = long_type_node; index=0; goto store;
517 case OPCODE_lstore_1: type = long_type_node; index=1; goto store;
518 case OPCODE_lstore_2: type = long_type_node; index=2; goto store;
519 case OPCODE_lstore_3: type = long_type_node; index=3; goto store;
520 case OPCODE_fstore_0: type=float_type_node; index=0; goto store;
521 case OPCODE_fstore_1: type=float_type_node; index=1; goto store;
522 case OPCODE_fstore_2: type=float_type_node; index=2; goto store;
523 case OPCODE_fstore_3: type=float_type_node; index=3; goto store;
524 case OPCODE_dstore_0: type=double_type_node; index=0; goto store;
525 case OPCODE_dstore_1: type=double_type_node; index=1; goto store;
526 case OPCODE_dstore_2: type=double_type_node; index=2; goto store;
527 case OPCODE_dstore_3: type=double_type_node; index=3; goto store;
528 case OPCODE_astore_0: type = ptr_type_node; index = 0; goto store;
529 case OPCODE_astore_1: type = ptr_type_node; index = 1; goto store;
530 case OPCODE_astore_2: type = ptr_type_node; index = 2; goto store;
531 case OPCODE_astore_3: type = ptr_type_node; index = 3; goto store;
534 || (index + TYPE_IS_WIDE (type)
535 >= DECL_MAX_LOCALS (current_function_decl)))
537 VERIFICATION_ERROR ("invalid local variable index in store");
540 type = pop_type (type);
541 type_map[index] = type;
543 /* If local variable changed, we need to reconsider eh handlers. */
544 prev_eh_ranges = NULL_EH_RANGE;
546 /* Allocate decl and rtx for this variable now, so if we're not
547 optmizing, we get a temporary that survives the whole method. */
548 find_local_variable (index, type, oldpc);
550 if (TYPE_IS_WIDE (type))
551 type_map[index+1] = TYPE_SECOND;
552 /* ... fall through to note_used ... */
554 /* For store or load, note that local variable INDEX is used.
555 This is needed to verify try-finally sub-routines. */
558 tree vec = LABEL_RETURN_TYPE_STATE (current_subr);
559 tree subr_vec = LABEL_TYPE_STATE (current_subr);
560 int len = 1 + TYPE_IS_WIDE (type);
563 if (TREE_VEC_ELT (vec, index) == TYPE_UNUSED)
564 TREE_VEC_ELT (vec, index) = TREE_VEC_ELT (subr_vec, index);
579 type = int_type_node; goto binop;
584 type = int_type_node; goto unop;
593 type = long_type_node; goto binop;
595 type = long_type_node; goto unop;
596 case OPCODE_fadd: case OPCODE_fsub:
597 case OPCODE_fmul: case OPCODE_fdiv: case OPCODE_frem:
598 type = float_type_node; goto binop;
600 type = float_type_node; goto unop;
601 case OPCODE_dadd: case OPCODE_dsub:
602 case OPCODE_dmul: case OPCODE_ddiv: case OPCODE_drem:
603 type = double_type_node; goto binop;
605 type = double_type_node; goto unop;
618 pop_type (int_type_node);
619 pop_type (long_type_node);
620 push_type (long_type_node);
623 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
626 if (index < 0 || index >= DECL_MAX_LOCALS (current_function_decl))
627 VERIFICATION_ERROR ("invalid local variable index in iinc");
628 tmp = type_map[index];
629 if (! INTEGRAL_TYPE_P (tmp) || TYPE_PRECISION (tmp) > 32)
630 VERIFICATION_ERROR ("invalid local variable type in iinc");
633 pop_type (int_type_node); push_type (long_type_node); break;
635 pop_type (int_type_node); push_type (float_type_node); break;
637 pop_type (int_type_node); push_type (double_type_node); break;
639 pop_type (long_type_node); push_type (int_type_node); break;
641 pop_type (long_type_node); push_type (float_type_node); break;
643 pop_type (long_type_node); push_type (double_type_node); break;
645 pop_type (float_type_node); push_type (int_type_node); break;
647 pop_type (float_type_node); push_type (long_type_node); break;
649 pop_type (float_type_node); push_type (double_type_node); break;
651 pop_type (double_type_node); push_type (int_type_node); break;
653 pop_type (double_type_node); push_type (long_type_node); break;
655 pop_type (double_type_node); push_type (float_type_node); break;
657 type = long_type_node; goto compare;
660 type = float_type_node; goto compare;
663 type = double_type_node; goto compare;
665 pop_type (type); pop_type (type);
666 push_type (int_type_node); break;
673 pop_type (int_type_node); goto cond;
675 case OPCODE_ifnonnull:
676 pop_type (ptr_type_node ); goto cond;
677 case OPCODE_if_icmpeq:
678 case OPCODE_if_icmpne:
679 case OPCODE_if_icmplt:
680 case OPCODE_if_icmpge:
681 case OPCODE_if_icmpgt:
682 case OPCODE_if_icmple:
683 pop_type (int_type_node); pop_type (int_type_node); goto cond;
684 case OPCODE_if_acmpeq:
685 case OPCODE_if_acmpne:
686 pop_type (object_ptr_type_node); pop_type (object_ptr_type_node);
689 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s2));
692 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s2));
696 switch (byte_ops[PC])
698 case OPCODE_iload: case OPCODE_lload:
699 case OPCODE_fload: case OPCODE_dload: case OPCODE_aload:
700 case OPCODE_istore: case OPCODE_lstore:
701 case OPCODE_fstore: case OPCODE_dstore: case OPCODE_astore:
707 VERIFICATION_ERROR ("invalid use of wide instruction");
710 case OPCODE_ireturn: type = int_type_node; goto ret;
711 case OPCODE_lreturn: type = long_type_node; goto ret;
712 case OPCODE_freturn: type = float_type_node; goto ret;
713 case OPCODE_dreturn: type = double_type_node; goto ret;
714 case OPCODE_areturn: type = ptr_type_node; goto ret;
717 /* ... fall through ... */
721 case OPCODE_getstatic: is_putting = 0; is_static = 1; goto field;
722 case OPCODE_putstatic: is_putting = 1; is_static = 1; goto field;
723 case OPCODE_getfield: is_putting = 0; is_static = 0; goto field;
724 case OPCODE_putfield: is_putting = 1; is_static = 0; goto field;
727 int index = IMMEDIATE_u2;
728 tree field_signature = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool, index);
729 tree field_type = get_type_from_signature (field_signature);
731 pop_type (field_type);
734 /* Defer actual checking until next pass. */
735 pop_type (ptr_type_node);
738 push_type (field_type);
742 push_type (get_class_constant (jcf, IMMEDIATE_u2));
744 case OPCODE_dup: type_stack_dup (1, 0); break;
745 case OPCODE_dup_x1: type_stack_dup (1, 1); break;
746 case OPCODE_dup_x2: type_stack_dup (1, 2); break;
747 case OPCODE_dup2: type_stack_dup (2, 0); break;
748 case OPCODE_dup2_x1: type_stack_dup (2, 1); break;
749 case OPCODE_dup2_x2: type_stack_dup (2, 2); break;
750 case OPCODE_pop: index = 1; goto pop;
751 case OPCODE_pop2: index = 2; goto pop;
753 if (stack_pointer < index)
754 VERIFICATION_ERROR ("stack underflow");
755 stack_pointer -= index;
758 if (stack_pointer < 2)
759 VERIFICATION_ERROR ("stack underflow (in swap)");
762 tree type1 = stack_type_map[stack_pointer - 1];
763 tree type2 = stack_type_map[stack_pointer - 2];
764 if (type1 == void_type_node || type2 == void_type_node)
765 VERIFICATION_ERROR ("verifier (swap): double or long value");
766 stack_type_map[stack_pointer - 2] = type1;
767 stack_type_map[stack_pointer - 1] = type2;
770 case OPCODE_ldc: index = IMMEDIATE_u1; goto ldc;
773 index = IMMEDIATE_u2; goto ldc;
775 if (index <= 0 || index >= JPOOL_SIZE(current_jcf))
776 VERIFICATION_ERROR ("bad constant pool index in ldc");
778 switch (JPOOL_TAG (current_jcf, index) & ~CONSTANT_ResolvedFlag)
780 case CONSTANT_Integer: type = int_type_node; goto check_ldc;
781 case CONSTANT_Float: type = float_type_node; goto check_ldc;
782 case CONSTANT_String: type = string_type_node; goto check_ldc;
783 case CONSTANT_Long: type = long_type_node; goto check_ldc;
784 case CONSTANT_Double: type = double_type_node; goto check_ldc;
786 if (TYPE_IS_WIDE (type) == (op_code == OPCODE_ldc2_w))
788 /* ... else fall through ... */
790 VERIFICATION_ERROR ("bad constant pool tag in ldc");
792 if (type == int_type_node)
794 i = TREE_INT_CST_LOW (get_constant (current_jcf, index));
800 case OPCODE_invokevirtual:
801 case OPCODE_invokespecial:
802 case OPCODE_invokestatic:
803 case OPCODE_invokeinterface:
805 int index = IMMEDIATE_u2;
806 tree sig = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool, index);
807 tree self_type = get_class_constant
808 (current_jcf, COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool,
810 tree method_name = COMPONENT_REF_NAME (¤t_jcf->cpool, index);
812 method_type = parse_signature_string (IDENTIFIER_POINTER (sig),
813 IDENTIFIER_LENGTH (sig));
814 if (TREE_CODE (method_type) != FUNCTION_TYPE)
815 VERIFICATION_ERROR ("bad method signature");
816 pop_argument_types (TYPE_ARG_TYPES (method_type));
818 /* Can't invoke <clinit> */
819 if (method_name == clinit_identifier_node)
820 VERIFICATION_ERROR ("invoke opcode can't invoke <clinit>");
821 /* Apart invokespecial, can't invoke <init> */
822 if (op_code != OPCODE_invokespecial
823 && method_name == init_identifier_node)
824 VERIFICATION_ERROR ("invoke opcode can't invoke <init>");
826 if (op_code != OPCODE_invokestatic)
827 pop_type (self_type);
831 case OPCODE_invokeinterface:
833 int nargs = IMMEDIATE_u1;
834 int notZero = IMMEDIATE_u1;
836 if (!nargs || notZero)
838 ("invalid argument number in invokeinterface");
843 if (TREE_TYPE (method_type) != void_type_node)
844 push_type (TREE_TYPE (method_type));
848 case OPCODE_arraylength:
849 /* Type checking actually made during code generation */
850 pop_type( ptr_type_node );
851 push_type( int_type_node );
854 /* Q&D verification *or* more checking done during code generation
855 for byte/boolean/char/short, the value popped is a int coerced
856 into the right type before being stored. */
857 case OPCODE_iastore: type = int_type_node; goto astore;
858 case OPCODE_lastore: type = long_type_node; goto astore;
859 case OPCODE_fastore: type = float_type_node; goto astore;
860 case OPCODE_dastore: type = double_type_node; goto astore;
861 case OPCODE_aastore: type = ptr_type_node; goto astore;
862 case OPCODE_bastore: type = int_type_node; goto astore;
863 case OPCODE_castore: type = int_type_node; goto astore;
864 case OPCODE_sastore: type = int_type_node; goto astore;
866 /* FIXME - need better verification here */
867 pop_type (type); /* new value */
868 pop_type (int_type_node); /* index */
869 pop_type (ptr_type_node); /* array */
872 /* Q&D verification *or* more checking done during code generation
873 for byte/boolean/char/short, the value pushed is a int. */
874 case OPCODE_iaload: type = int_type_node; goto aload;
875 case OPCODE_laload: type = long_type_node; goto aload;
876 case OPCODE_faload: type = float_type_node; goto aload;
877 case OPCODE_daload: type = double_type_node; goto aload;
878 case OPCODE_aaload: type = ptr_type_node; goto aload;
879 case OPCODE_baload: type = promote_type (byte_type_node); goto aload;
880 case OPCODE_caload: type = promote_type (char_type_node); goto aload;
881 case OPCODE_saload: type = promote_type (short_type_node); goto aload;
883 pop_type (int_type_node);
884 tmp = pop_type (ptr_type_node);
885 if (is_array_type_p (tmp))
886 type = TYPE_ARRAY_ELEMENT (TREE_TYPE (tmp));
887 else if (tmp != TYPE_NULL)
888 VERIFICATION_ERROR ("array load from non-array type");
892 case OPCODE_anewarray:
893 type = get_class_constant (current_jcf, IMMEDIATE_u2);
894 type = promote_type (type);
897 case OPCODE_newarray:
898 index = IMMEDIATE_u1;
899 type = decode_newarray_type (index);
900 if (type == NULL_TREE)
901 VERIFICATION_ERROR ("invalid type code in newarray opcode");
905 if (int_value >= 0 && prevpc >= 0)
907 /* If previous instruction pushed int constant,
908 we want to use it. */
909 switch (byte_ops[prevpc])
911 case OPCODE_iconst_0: case OPCODE_iconst_1:
912 case OPCODE_iconst_2: case OPCODE_iconst_3:
913 case OPCODE_iconst_4: case OPCODE_iconst_5:
914 case OPCODE_bipush: case OPCODE_sipush:
915 case OPCODE_ldc: case OPCODE_ldc_w:
923 type = build_java_array_type (type, int_value);
924 pop_type (int_type_node);
928 case OPCODE_multianewarray:
931 index = IMMEDIATE_u2;
935 VERIFICATION_ERROR ("number of dimension lower that 1 in multianewarray" );
937 for( i = 0; i < ndim; i++ )
938 pop_type (int_type_node);
939 push_type (get_class_constant (current_jcf, index));
943 case OPCODE_aconst_null:
944 push_type (ptr_type_node);
948 pop_type (throwable_type_node);
952 case OPCODE_checkcast:
953 pop_type (ptr_type_node);
954 type = get_class_constant (current_jcf, IMMEDIATE_u2);
957 case OPCODE_instanceof:
958 pop_type (ptr_type_node);
959 get_class_constant (current_jcf, IMMEDIATE_u2);
960 push_type (int_type_node);
963 case OPCODE_tableswitch:
967 pop_type (int_type_node);
971 VERIFICATION_ERROR ("bad alignment in tableswitch pad");
973 PUSH_PENDING (lookup_label (oldpc+IMMEDIATE_s4));
978 VERIFICATION_ERROR ("unsorted low/high value in tableswitch");
980 while (low++ <= high)
981 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
986 case OPCODE_lookupswitch:
988 jint npairs, last, not_registered = 1;
990 pop_type (int_type_node);
994 VERIFICATION_ERROR ("bad alignment in lookupswitch pad");
997 PUSH_PENDING (lookup_label (oldpc+IMMEDIATE_s4));
998 npairs = IMMEDIATE_s4;
1001 VERIFICATION_ERROR ("invalid number of targets in lookupswitch");
1005 int match = IMMEDIATE_s4;
1008 else if (last >= match)
1009 VERIFICATION_ERROR ("unsorted match value in lookupswitch");
1012 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
1018 case OPCODE_monitorenter:
1020 case OPCODE_monitorexit:
1021 pop_type (ptr_type_node);
1025 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
1031 tree target = lookup_label (oldpc + IMMEDIATE_s2);
1032 tree return_label = lookup_label (PC);
1033 push_type (return_address_type_node);
1034 if (! LABEL_VERIFIED (target))
1036 /* first time seen */
1037 tree return_type_map;
1038 int nlocals = DECL_MAX_LOCALS (current_function_decl);
1039 index = nlocals + DECL_MAX_STACK (current_function_decl);
1040 return_type_map = make_tree_vec (index);
1041 while (index > nlocals)
1042 TREE_VEC_ELT (return_type_map, --index) = TYPE_UNKNOWN;
1044 TREE_VEC_ELT (return_type_map, --index) = TYPE_UNUSED;
1045 LABEL_RETURN_LABEL (target)
1046 = build_decl (LABEL_DECL, NULL_TREE, TREE_TYPE (target));
1047 LABEL_PC (LABEL_RETURN_LABEL (target)) = -1;
1048 LABEL_RETURN_TYPE_STATE (target) = return_type_map;
1049 LABEL_IS_SUBR_START (target) = 1;
1050 LABEL_IN_SUBR (target) = 1;
1051 LABEL_SUBR_START (target) = target;
1052 LABEL_SUBR_CONTEXT (target) = current_subr;
1054 else if (! LABEL_IS_SUBR_START (target)
1055 || LABEL_SUBR_CONTEXT (target) != current_subr)
1056 VERIFICATION_ERROR ("label part of different subroutines");
1058 i = merge_type_state (target);
1062 VERIFICATION_ERROR ("types could not be merged at jsr");
1063 push_pending_label (target);
1065 current_subr = target;
1067 /* Chain return_pc onto LABEL_RETURN_LABELS (target) if needed. */
1068 if (! value_member (return_label, LABEL_RETURN_LABELS (target)))
1070 LABEL_RETURN_LABELS (target)
1071 = tree_cons (NULL_TREE, return_label,
1072 LABEL_RETURN_LABELS (target));
1075 if (LABEL_VERIFIED (target))
1077 tree return_map = LABEL_RETURN_TYPE_STATE (target);
1078 int len = TREE_VEC_LENGTH (return_map);
1079 stack_pointer = len - DECL_MAX_LOCALS (current_function_decl);
1082 if (TREE_VEC_ELT (return_map, len) != TYPE_UNUSED)
1083 type_map[len] = TREE_VEC_ELT (return_map, len);
1085 current_subr = LABEL_SUBR_CONTEXT (target);
1086 PUSH_PENDING (return_label);
1093 if (current_subr == NULL)
1094 VERIFICATION_ERROR ("ret instruction not in a jsr subroutine");
1097 tree ret_map = LABEL_RETURN_TYPE_STATE (current_subr);
1098 tree caller = LABEL_SUBR_CONTEXT (current_subr);
1099 int size = DECL_MAX_LOCALS(current_function_decl)+stack_pointer;
1100 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
1103 if (index < 0 || index >= DECL_MAX_LOCALS (current_function_decl)
1104 || type_map[index] != TYPE_RETURN_ADDR)
1105 VERIFICATION_ERROR ("invalid ret index");
1107 /* The next chunk of code is similar to an inlined version of
1108 * merge_type_state (LABEL_RETURN_LABEL (current_subr)).
1109 * The main differences are that LABEL_RETURN_LABEL is
1110 * pre-allocated by the jsr (but we don't know the size then);
1111 * and that we have to handle TYPE_UNUSED. */
1113 if (! RETURN_MAP_ADJUSTED (ret_map))
1114 { /* First return from this subroutine - fix stack pointer. */
1115 TREE_VEC_LENGTH (ret_map) = size;
1116 for (index = size; --index >= 0; )
1118 if (TREE_VEC_ELT (ret_map, index) != TYPE_UNUSED)
1119 TREE_VEC_ELT (ret_map, index) = type_map[index];
1121 RETURN_MAP_ADJUSTED (ret_map) = 1;
1125 if (TREE_VEC_LENGTH (ret_map) != size)
1126 VERIFICATION_ERROR ("inconsistent stack size on ret");
1127 for (index = 0; index < size; index++)
1129 tree type = TREE_VEC_ELT (ret_map, index);
1130 if (type != TYPE_UNUSED)
1132 type = merge_types (type, type_map [index]);
1133 TREE_VEC_ELT (ret_map, index) = type;
1134 if (type == TYPE_UNKNOWN)
1136 if (index >= size - stack_pointer)
1138 ("inconsistent types on ret from jsr");
1140 else if (TYPE_IS_WIDE (type))
1146 /* Check if there are any more pending blocks in this subroutine.
1147 Because we push pending blocks in a last-in-first-out order,
1148 and because we don't push anything from our caller until we
1149 are done with this subroutine or anything nested in it,
1150 then we are done if the top of the pending_blocks stack is
1151 not in a subroutine, or it is in our caller. */
1152 if (pending_blocks == NULL_TREE
1153 || ! LABEL_IN_SUBR (pending_blocks)
1154 || LABEL_SUBR_START (pending_blocks) == caller)
1156 /* Since we are done with this subroutine (i.e. this is the
1157 last ret from it), set up the (so far known) return
1158 address as pending - with the merged type state. */
1159 tmp = LABEL_RETURN_LABELS (current_subr);
1160 current_subr = caller;
1161 for ( ; tmp != NULL_TREE; tmp = TREE_CHAIN (tmp))
1163 tree return_label = TREE_VALUE (tmp);
1164 tree return_state = LABEL_TYPE_STATE (return_label);
1165 if (return_state == NULL_TREE)
1167 /* This means means we had not verified the
1168 subroutine earlier, so this is the first jsr to
1169 call it. In this case, the type_map of the return
1170 address is just the current type_map - and that
1171 is handled by the following PUSH_PENDING. */
1175 /* In this case we have to do a merge. But first
1176 restore the type_map for unused slots to those
1177 that were in effect at the jsr. */
1178 for (index = size; --index >= 0; )
1180 type_map[index] = TREE_VEC_ELT (ret_map, index);
1181 if (type_map[index] == TYPE_UNUSED)
1183 = TREE_VEC_ELT (return_state, index);
1186 PUSH_PENDING (return_label);
1194 error ("unknown opcode %d@pc=%d during verification", op_code, PC-1);
1200 /* The following test is true if we have entered or exited an exception
1201 handler range *or* we have done a store to a local variable.
1202 In either case we need to consider any exception handlers that
1203 might "follow" this instruction. */
1205 if (eh_ranges != prev_eh_ranges)
1207 int save_stack_pointer = stack_pointer;
1208 int index = DECL_MAX_LOCALS (current_function_decl);
1209 tree save_type = type_map[index];
1210 tree save_current_subr = current_subr;
1211 struct eh_range *ranges = find_handler (oldpc);
1213 for (; ranges != NULL_EH_RANGE; ranges = ranges->outer)
1215 tree chain = ranges->handlers;
1217 /* We need to determine if the handler is part of current_subr.
1218 The are two cases: (1) The exception catch range
1219 is entirely within current_subr. In that case the handler
1220 is also part of current_subr.
1221 (2) Some of the catch range is not in current_subr.
1222 In that case, the handler is *not* part of current_subr.
1224 Figuring out which is the case is not necessarily obvious,
1225 in the presence of clever code generators (and obfuscators).
1226 We make a simplifying assumption that in case (2) we
1227 have that the current_subr is entirely within the catch range.
1228 In that case we can assume if that if a caller (the jsr) of
1229 a subroutine is within the catch range, then the handler is
1230 *not* part of the subroutine, and vice versa. */
1232 current_subr = save_current_subr;
1233 for ( ; current_subr != NULL_TREE;
1234 current_subr = LABEL_SUBR_CONTEXT (current_subr))
1236 tree return_labels = LABEL_RETURN_LABELS (current_subr);
1237 /* There could be multiple return_labels, but
1238 we only need to check one. */
1239 int return_pc = LABEL_PC (TREE_VALUE (return_labels));
1240 if (return_pc <= ranges->start_pc
1241 || return_pc > ranges->end_pc)
1245 for ( ; chain != NULL_TREE; chain = TREE_CHAIN (chain))
1247 tree handler = TREE_VALUE (chain);
1248 tree type = TREE_PURPOSE (chain);
1249 if (type == NULL_TREE) /* a finally handler */
1250 type = throwable_type_node;
1251 type_map[index] = promote_type (type);
1253 PUSH_PENDING (handler);
1256 stack_pointer = save_stack_pointer;
1257 current_subr = save_current_subr;
1258 type_map[index] = save_type;
1259 prev_eh_ranges = eh_ranges;
1264 message = "program counter out of range";
1267 error ("verification error at PC=%d: %s", oldpc, message);