1 /* Handle verification of bytecoded methods for the GNU compiler for
3 Copyright (C) 1997, 1998, 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA.
22 Java and all Java-based marks are trademarks or registered trademarks
23 of Sun Microsystems, Inc. in the United States and other countries.
24 The Free Software Foundation is independent of Sun Microsystems, Inc. */
29 #include "java-tree.h"
31 #include "java-opcodes.h"
33 #include "java-except.h"
36 static void push_pending_label PARAMS ((tree));
37 static tree merge_types PARAMS ((tree, tree));
38 static const char *check_pending_block PARAMS ((tree));
39 static void type_stack_dup PARAMS ((int, int));
40 static int start_pc_cmp PARAMS ((const PTR, const PTR));
42 extern int stack_pointer;
44 /* During verification, start of the current subroutine (jsr target). */
47 /* A list of pending blocks, chained using LABEL_PENDING_CHAIN.
48 A pending block is one that has LABEL_CHANGED set, which means
49 it requires (re-) verification. */
52 /* Append TARGET_LABEL to the pending_block stack unless already in it. */
55 push_pending_label (target_label)
58 if (! LABEL_CHANGED (target_label))
60 LABEL_PENDING_CHAIN (target_label) = pending_blocks;
61 pending_blocks = target_label;
62 LABEL_CHANGED (target_label) = 1;
66 /* Note that TARGET_LABEL is a possible successor instruction.
67 Merge the type state etc.
68 Return NULL on sucess, or an error message on failure. */
71 check_pending_block (target_label)
74 int changed = merge_type_state (target_label);
79 return "types could not be merged";
80 push_pending_label (target_label);
83 if (current_subr == NULL)
85 if (LABEL_IN_SUBR (target_label))
86 return "might transfer control into subroutine";
90 if (LABEL_IN_SUBR (target_label))
92 if (LABEL_SUBR_START (target_label) != current_subr)
93 return "transfer out of subroutine";
95 else if (! LABEL_VERIFIED (target_label))
97 LABEL_IN_SUBR (target_label) = 1;
98 LABEL_SUBR_START (target_label) = current_subr;
101 return "transfer out of subroutine";
106 /* Return the "merged" types of TYPE1 and TYPE2.
107 If either is primitive, the other must match (after promotion to int).
108 For reference types, return the common super-class.
109 Return TYPE_UNKNOWN if the types cannot be merged. */
112 merge_types (type1, type2)
117 if (type1 == TYPE_UNKNOWN || type2 == TYPE_UNKNOWN
118 || type1 == TYPE_RETURN_ADDR || type2 == TYPE_RETURN_ADDR)
120 if (TREE_CODE (type1) == POINTER_TYPE && TREE_CODE (type2) == POINTER_TYPE)
124 /* ptr_type_node is only used for a null reference,
125 which is compatible with any reference type. */
126 if (type1 == ptr_type_node || type2 == object_ptr_type_node)
128 if (type2 == ptr_type_node || type1 == object_ptr_type_node)
131 tt1 = HANDLE_TO_CLASS_TYPE (TREE_TYPE (type1));
132 tt2 = HANDLE_TO_CLASS_TYPE (TREE_TYPE (type2));
134 /* If tt{1,2} haven't been properly loaded, now is a good time
136 if (!TYPE_SIZE (tt1))
139 safe_layout_class (tt1);
142 if (!TYPE_SIZE (tt2))
145 safe_layout_class (tt2);
148 if (TYPE_ARRAY_P (tt1) || TYPE_ARRAY_P (tt2))
150 if (TYPE_ARRAY_P (tt1) == TYPE_ARRAY_P (tt2))
152 tree el_type1 = TYPE_ARRAY_ELEMENT (tt1);
153 tree el_type2 = TYPE_ARRAY_ELEMENT (tt2);
154 tree el_type = NULL_TREE;
155 if (el_type1 == el_type2)
157 else if (TREE_CODE (el_type1) == POINTER_TYPE
158 && TREE_CODE (el_type2) == POINTER_TYPE)
159 el_type = merge_types (el_type1, el_type2);
160 if (el_type != NULL_TREE)
162 HOST_WIDE_INT len1 = java_array_type_length (tt1);
163 HOST_WIDE_INT len2 = java_array_type_length (tt2);
166 else if (el_type1 == el_type2)
168 return promote_type (build_java_array_type (el_type, len1));
171 return object_ptr_type_node;
174 if (CLASS_INTERFACE (TYPE_NAME (tt1)))
176 /* FIXME: should see if two interfaces have a common
178 if (CLASS_INTERFACE (TYPE_NAME (tt2)))
180 /* This is a kludge, but matches what Sun's verifier does.
181 It can be tricked, but is safe as long as type errors
182 (i.e. interface method calls) are caught at run-time. */
183 return object_ptr_type_node;
187 if (can_widen_reference_to (tt2, tt1))
190 return object_ptr_type_node;
193 else if (CLASS_INTERFACE (TYPE_NAME (tt2)))
195 if (can_widen_reference_to (tt1, tt2))
198 return object_ptr_type_node;
204 depth1 = class_depth (type1);
205 depth2 = class_depth (type2);
206 for ( ; depth1 > depth2; depth1--)
207 type1 = TYPE_BINFO_BASETYPE (type1, 0);
208 for ( ; depth2 > depth1; depth2--)
209 type2 = TYPE_BINFO_BASETYPE (type2, 0);
210 while (type1 != type2)
212 type1 = TYPE_BINFO_BASETYPE (type1, 0);
213 type2 = TYPE_BINFO_BASETYPE (type2, 0);
215 return promote_type (type1);
217 if (INTEGRAL_TYPE_P (type1) && INTEGRAL_TYPE_P (type2)
218 && TYPE_PRECISION (type1) <= 32 && TYPE_PRECISION (type2) <= 32)
219 return int_type_node;
223 /* Merge the current type state with that at LABEL.
224 Return -1 the the states are incompatible (i.e. on error),
225 0 if there was no change, and 1 if there was a change. */
228 merge_type_state (label)
231 int nlocals = DECL_MAX_LOCALS (current_function_decl);
232 int cur_length = stack_pointer + nlocals;
233 tree vec = LABEL_TYPE_STATE (label);
235 if (vec == NULL_TREE || !LABEL_VERIFIED (label))
239 vec = make_tree_vec (cur_length);
240 LABEL_TYPE_STATE (label) = vec;
242 while (--cur_length >= 0)
243 TREE_VEC_ELT (vec, cur_length) = type_map [cur_length];
250 if (LABEL_IS_SUBR_START (label) && LABEL_VERIFIED (label)
251 && current_subr != label)
252 return_map = LABEL_RETURN_TYPE_STATE (label);
254 return_map = NULL_TREE;
255 if (TREE_VEC_LENGTH (vec) != cur_length)
259 for (i = 0; i < cur_length; i++)
261 tree old_type = TREE_VEC_ELT (vec, i);
262 tree new_type = merge_types (old_type, type_map [i]);
263 if (TREE_VEC_ELT (vec, i) != new_type)
265 /* If there has been a change, note that since we must re-verify.
266 However, if the label is the start of a subroutine,
267 we don't care about local variables that are neither
268 set nor used in the sub-routine. */
269 if (return_map == NULL_TREE || i >= nlocals
270 || TREE_VEC_ELT (return_map, i) != TYPE_UNUSED
271 || (TYPE_IS_WIDE (new_type)
272 && TREE_VEC_ELT (return_map, i+1) != TYPE_UNUSED))
275 TREE_VEC_ELT (vec, i) = new_type;
276 if (new_type == TYPE_UNKNOWN)
281 else if (TYPE_IS_WIDE (new_type))
288 /* Handle dup-like operations. */
291 type_stack_dup (size, offset)
296 if (size + offset > stack_pointer)
297 error ("stack underflow - dup* operation");
298 for (index = 0; index < size + offset; index++)
300 type[index] = stack_type_map[stack_pointer - 1];
301 if (type[index] == void_type_node)
304 type[index] = stack_type_map[stack_pointer - 2];
305 if (! TYPE_IS_WIDE (type[index]))
306 fatal ("internal error - dup operation");
307 if (index == size || index == size + offset)
308 fatal ("dup operation splits 64-bit number");
310 pop_type (type[index]);
312 for (index = size; --index >= 0; )
314 if (type[index] != void_type_node)
315 push_type (type[index]);
318 for (index = size + offset; --index >= 0; )
320 if (type[index] != void_type_node)
321 push_type (type[index]);
325 /* This keeps track of a start PC and corresponding initial index. */
332 /* A helper that is used when sorting exception ranges. */
334 start_pc_cmp (xp, yp)
338 const struct pc_index *x = (const struct pc_index *) xp;
339 const struct pc_index *y = (const struct pc_index *) yp;
340 return x->start_pc - y->start_pc;
343 /* This causes the next iteration to ignore the next instruction
344 and look for some other unhandled instruction. */
345 #define INVALIDATE_PC (prevpc = -1, oldpc = PC, PC = INVALID_PC)
346 #define INVALID_PC (-1)
348 #define VERIFICATION_ERROR(MESSAGE) \
349 do { message = MESSAGE; goto verify_error; } while (0)
351 #define PUSH_PENDING(LABEL) \
352 do { if ((message = check_pending_block (LABEL)) != NULL) \
353 goto verify_error; } while (0)
356 #define CHECK_PC_IN_RANGE(PC) ({if (PC < 0 || PC > length) goto bad_pc; (void)1;})
358 #define CHECK_PC_IN_RANGE(PC) (PC < 0 || PC > length ? \
359 (fatal("Bad byte codes.\n"), 0) : 1)
362 #define BCODE byte_ops
364 /* Verify the bytecodes of the current method.
365 Return 1 on sucess, 0 on failure. */
367 verify_jvm_instructions (jcf, byte_ops, length)
369 const unsigned char *byte_ops;
376 int oldpc = 0; /* PC of start of instruction. */
377 int prevpc = 0; /* If >= 0, PC of previous instruction. */
380 register unsigned char *p;
381 struct eh_range *prev_eh_ranges = NULL_EH_RANGE;
382 struct eh_range *eh_ranges;
383 tree return_type = TREE_TYPE (TREE_TYPE (current_function_decl));
384 struct pc_index *starts;
389 pending_blocks = NULL_TREE;
391 /* Handle the exception table. */
392 method_init_exceptions ();
393 JCF_SEEK (jcf, DECL_CODE_OFFSET (current_function_decl) + length);
394 eh_count = JCF_readu2 (jcf);
396 /* We read the exception handlers in order of increasing start PC.
397 To do this we first read and sort the start PCs. */
398 starts = (struct pc_index *) xmalloc (eh_count * sizeof (struct pc_index));
399 for (i = 0; i < eh_count; ++i)
401 starts[i].start_pc = GET_u2 (jcf->read_ptr + 8 * i);
404 qsort (starts, eh_count, sizeof (struct pc_index), start_pc_cmp);
406 for (i = 0; i < eh_count; ++i)
408 int start_pc, end_pc, handler_pc, catch_type;
410 p = jcf->read_ptr + 8 * starts[i].index;
412 start_pc = GET_u2 (p);
413 end_pc = GET_u2 (p+2);
414 handler_pc = GET_u2 (p+4);
415 catch_type = GET_u2 (p+6);
417 if (start_pc < 0 || start_pc >= length
418 || end_pc < 0 || end_pc > length || start_pc >= end_pc
419 || handler_pc < 0 || handler_pc >= length
420 || (handler_pc >= start_pc && handler_pc < end_pc)
421 || ! (instruction_bits [start_pc] & BCODE_INSTRUCTION_START)
422 || (end_pc < length &&
423 ! (instruction_bits [end_pc] & BCODE_INSTRUCTION_START))
424 || ! (instruction_bits [handler_pc] & BCODE_INSTRUCTION_START))
426 error ("bad pc in exception_table");
431 add_handler (start_pc, end_pc,
432 lookup_label (handler_pc),
433 catch_type == 0 ? NULL_TREE
434 : get_class_constant (jcf, catch_type));
436 instruction_bits [handler_pc] |= BCODE_EXCEPTION_TARGET;
440 handle_nested_ranges ();
446 if (((PC != INVALID_PC
447 && instruction_bits [PC] & BCODE_TARGET) != 0)
450 PUSH_PENDING (lookup_label (PC));
453 /* Check if there are any more pending blocks in the current
454 subroutine. Because we push pending blocks in a
455 last-in-first-out order, and because we don't push anything
456 from our caller until we are done with this subroutine or
457 anything nested in it, then we are done if the top of the
458 pending_blocks stack is not in a subroutine, or it is in our
463 tree caller = LABEL_SUBR_CONTEXT (current_subr);
465 if (pending_blocks == NULL_TREE
466 || ! LABEL_IN_SUBR (pending_blocks)
467 || LABEL_SUBR_START (pending_blocks) == caller)
469 int size = DECL_MAX_LOCALS(current_function_decl)+stack_pointer;
470 tree ret_map = LABEL_RETURN_TYPE_STATE (current_subr);
471 tmp = LABEL_RETURN_LABELS (current_subr);
473 /* FIXME: If we exit a subroutine via a throw, we might
474 have returned to an earlier caller. Obviously a
475 "ret" can only return one level, but a throw may
476 return many levels.*/
477 current_subr = caller;
479 if (RETURN_MAP_ADJUSTED (ret_map))
481 /* Since we are done with this subroutine , set up
482 the (so far known) return address as pending -
483 with the merged type state. */
484 for ( ; tmp != NULL_TREE; tmp = TREE_CHAIN (tmp))
486 tree return_label = TREE_VALUE (tmp);
487 tree return_state = LABEL_TYPE_STATE (return_label);
488 if (return_state == NULL_TREE)
490 /* This means means we had not verified the
491 subroutine earlier, so this is the first jsr to
492 call it. In this case, the type_map of the return
493 address is just the current type_map - and that
494 is handled by the following PUSH_PENDING. */
498 /* In this case we have to do a merge. But first
499 restore the type_map for unused slots to those
500 that were in effect at the jsr. */
501 for (index = size; --index >= 0; )
503 type_map[index] = TREE_VEC_ELT (ret_map, index);
504 if (type_map[index] == TYPE_UNUSED)
506 = TREE_VEC_ELT (return_state, index);
509 PUSH_PENDING (return_label);
514 if (PC == INVALID_PC)
516 label = pending_blocks;
517 if (label == NULL_TREE)
518 break; /* We're done! */
519 pending_blocks = LABEL_PENDING_CHAIN (label);
520 LABEL_CHANGED (label) = 0;
522 if (LABEL_IN_SUBR (label))
523 current_subr = LABEL_SUBR_START (label);
525 current_subr = NULL_TREE;
527 /* Restore type_map and stack_pointer from
528 LABEL_TYPE_STATE (label), and continue
529 compiling from there. */
530 load_type_state (label);
531 PC = LABEL_PC (label);
533 else if (PC >= length)
534 VERIFICATION_ERROR ("falling through end of method");
536 /* fprintf (stderr, "** %d\n", PC); */
540 if (!(instruction_bits [PC] & BCODE_INSTRUCTION_START) && ! wide)
541 VERIFICATION_ERROR ("PC not at instruction start");
543 instruction_bits[PC] |= BCODE_VERIFIED;
545 eh_ranges = find_handler (oldpc);
547 op_code = byte_ops[PC++];
550 int is_static, is_putting;
553 case OPCODE_iconst_m1:
554 case OPCODE_iconst_0: case OPCODE_iconst_1: case OPCODE_iconst_2:
555 case OPCODE_iconst_3: case OPCODE_iconst_4: case OPCODE_iconst_5:
556 i = op_code - OPCODE_iconst_0;
559 if (byte_ops[PC] == OPCODE_newarray
560 || byte_ops[PC] == OPCODE_newarray)
562 push_type (int_type_node); break;
563 case OPCODE_lconst_0: case OPCODE_lconst_1:
564 push_type (long_type_node); break;
565 case OPCODE_fconst_0: case OPCODE_fconst_1: case OPCODE_fconst_2:
566 push_type (float_type_node); break;
567 case OPCODE_dconst_0: case OPCODE_dconst_1:
568 push_type (double_type_node); break;
575 case OPCODE_iload: type = int_type_node; goto general_load;
576 case OPCODE_lload: type = long_type_node; goto general_load;
577 case OPCODE_fload: type = float_type_node; goto general_load;
578 case OPCODE_dload: type = double_type_node; goto general_load;
579 case OPCODE_aload: type = ptr_type_node; goto general_load;
581 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
584 case OPCODE_iload_0: type = int_type_node; index = 0; goto load;
585 case OPCODE_iload_1: type = int_type_node; index = 1; goto load;
586 case OPCODE_iload_2: type = int_type_node; index = 2; goto load;
587 case OPCODE_iload_3: type = int_type_node; index = 3; goto load;
588 case OPCODE_lload_0: type = long_type_node; index = 0; goto load;
589 case OPCODE_lload_1: type = long_type_node; index = 1; goto load;
590 case OPCODE_lload_2: type = long_type_node; index = 2; goto load;
591 case OPCODE_lload_3: type = long_type_node; index = 3; goto load;
592 case OPCODE_fload_0: type = float_type_node; index = 0; goto load;
593 case OPCODE_fload_1: type = float_type_node; index = 1; goto load;
594 case OPCODE_fload_2: type = float_type_node; index = 2; goto load;
595 case OPCODE_fload_3: type = float_type_node; index = 3; goto load;
596 case OPCODE_dload_0: type = double_type_node; index = 0; goto load;
597 case OPCODE_dload_1: type = double_type_node; index = 1; goto load;
598 case OPCODE_dload_2: type = double_type_node; index = 2; goto load;
599 case OPCODE_dload_3: type = double_type_node; index = 3; goto load;
600 case OPCODE_aload_0: type = ptr_type_node; index = 0; goto load;
601 case OPCODE_aload_1: type = ptr_type_node; index = 1; goto load;
602 case OPCODE_aload_2: type = ptr_type_node; index = 2; goto load;
603 case OPCODE_aload_3: type = ptr_type_node; index = 3; goto load;
606 || (index + TYPE_IS_WIDE (type)
607 >= DECL_MAX_LOCALS (current_function_decl)))
608 VERIFICATION_ERROR ("invalid local variable index in load");
609 tmp = type_map[index];
610 if (tmp == TYPE_UNKNOWN || tmp == TYPE_SECOND
611 || (TYPE_IS_WIDE (type)
612 && type_map[index+1] != void_type_node)
613 || (type == ptr_type_node
614 ? TREE_CODE (tmp) != POINTER_TYPE
615 : type == int_type_node
616 ? (! INTEGRAL_TYPE_P (tmp) || TYPE_PRECISION (tmp) > 32)
618 VERIFICATION_ERROR("invalid local variable type in load");
621 case OPCODE_istore: type = int_type_node; goto general_store;
622 case OPCODE_lstore: type = long_type_node; goto general_store;
623 case OPCODE_fstore: type = float_type_node; goto general_store;
624 case OPCODE_dstore: type = double_type_node; goto general_store;
625 case OPCODE_astore: type = ptr_type_node; goto general_store;
627 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
630 case OPCODE_istore_0: type = int_type_node; index = 0; goto store;
631 case OPCODE_istore_1: type = int_type_node; index = 1; goto store;
632 case OPCODE_istore_2: type = int_type_node; index = 2; goto store;
633 case OPCODE_istore_3: type = int_type_node; index = 3; goto store;
634 case OPCODE_lstore_0: type = long_type_node; index=0; goto store;
635 case OPCODE_lstore_1: type = long_type_node; index=1; goto store;
636 case OPCODE_lstore_2: type = long_type_node; index=2; goto store;
637 case OPCODE_lstore_3: type = long_type_node; index=3; goto store;
638 case OPCODE_fstore_0: type=float_type_node; index=0; goto store;
639 case OPCODE_fstore_1: type=float_type_node; index=1; goto store;
640 case OPCODE_fstore_2: type=float_type_node; index=2; goto store;
641 case OPCODE_fstore_3: type=float_type_node; index=3; goto store;
642 case OPCODE_dstore_0: type=double_type_node; index=0; goto store;
643 case OPCODE_dstore_1: type=double_type_node; index=1; goto store;
644 case OPCODE_dstore_2: type=double_type_node; index=2; goto store;
645 case OPCODE_dstore_3: type=double_type_node; index=3; goto store;
646 case OPCODE_astore_0: type = ptr_type_node; index = 0; goto store;
647 case OPCODE_astore_1: type = ptr_type_node; index = 1; goto store;
648 case OPCODE_astore_2: type = ptr_type_node; index = 2; goto store;
649 case OPCODE_astore_3: type = ptr_type_node; index = 3; goto store;
652 || (index + TYPE_IS_WIDE (type)
653 >= DECL_MAX_LOCALS (current_function_decl)))
655 VERIFICATION_ERROR ("invalid local variable index in store");
658 type = pop_type (type);
659 type_map[index] = type;
661 /* If local variable changed, we need to reconsider eh handlers. */
662 prev_eh_ranges = NULL_EH_RANGE;
664 /* Allocate decl and rtx for this variable now, so if we're not
665 optmizing, we get a temporary that survives the whole method. */
666 find_local_variable (index, type, oldpc);
668 if (TYPE_IS_WIDE (type))
669 type_map[index+1] = TYPE_SECOND;
670 /* ... fall through to note_used ... */
672 /* For store or load, note that local variable INDEX is used.
673 This is needed to verify try-finally sub-routines. */
676 tree vec = LABEL_RETURN_TYPE_STATE (current_subr);
677 tree subr_vec = LABEL_TYPE_STATE (current_subr);
678 int len = 1 + TYPE_IS_WIDE (type);
681 if (TREE_VEC_ELT (vec, index) == TYPE_UNUSED)
682 TREE_VEC_ELT (vec, index) = TREE_VEC_ELT (subr_vec, index);
697 type = int_type_node; goto binop;
702 type = int_type_node; goto unop;
711 type = long_type_node; goto binop;
713 type = long_type_node; goto unop;
714 case OPCODE_fadd: case OPCODE_fsub:
715 case OPCODE_fmul: case OPCODE_fdiv: case OPCODE_frem:
716 type = float_type_node; goto binop;
718 type = float_type_node; goto unop;
719 case OPCODE_dadd: case OPCODE_dsub:
720 case OPCODE_dmul: case OPCODE_ddiv: case OPCODE_drem:
721 type = double_type_node; goto binop;
723 type = double_type_node; goto unop;
736 pop_type (int_type_node);
737 pop_type (long_type_node);
738 push_type (long_type_node);
741 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
744 if (index < 0 || index >= DECL_MAX_LOCALS (current_function_decl))
745 VERIFICATION_ERROR ("invalid local variable index in iinc");
746 tmp = type_map[index];
747 if (! INTEGRAL_TYPE_P (tmp) || TYPE_PRECISION (tmp) > 32)
748 VERIFICATION_ERROR ("invalid local variable type in iinc");
751 pop_type (int_type_node); push_type (long_type_node); break;
753 pop_type (int_type_node); push_type (float_type_node); break;
755 pop_type (int_type_node); push_type (double_type_node); break;
757 pop_type (long_type_node); push_type (int_type_node); break;
759 pop_type (long_type_node); push_type (float_type_node); break;
761 pop_type (long_type_node); push_type (double_type_node); break;
763 pop_type (float_type_node); push_type (int_type_node); break;
765 pop_type (float_type_node); push_type (long_type_node); break;
767 pop_type (float_type_node); push_type (double_type_node); break;
769 pop_type (double_type_node); push_type (int_type_node); break;
771 pop_type (double_type_node); push_type (long_type_node); break;
773 pop_type (double_type_node); push_type (float_type_node); break;
775 type = long_type_node; goto compare;
778 type = float_type_node; goto compare;
781 type = double_type_node; goto compare;
783 pop_type (type); pop_type (type);
784 push_type (int_type_node); break;
791 pop_type (int_type_node); goto cond;
793 case OPCODE_ifnonnull:
794 pop_type (ptr_type_node ); goto cond;
795 case OPCODE_if_icmpeq:
796 case OPCODE_if_icmpne:
797 case OPCODE_if_icmplt:
798 case OPCODE_if_icmpge:
799 case OPCODE_if_icmpgt:
800 case OPCODE_if_icmple:
801 pop_type (int_type_node); pop_type (int_type_node); goto cond;
802 case OPCODE_if_acmpeq:
803 case OPCODE_if_acmpne:
804 pop_type (object_ptr_type_node); pop_type (object_ptr_type_node);
807 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s2));
810 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s2));
814 switch (byte_ops[PC])
816 case OPCODE_iload: case OPCODE_lload:
817 case OPCODE_fload: case OPCODE_dload: case OPCODE_aload:
818 case OPCODE_istore: case OPCODE_lstore:
819 case OPCODE_fstore: case OPCODE_dstore: case OPCODE_astore:
825 VERIFICATION_ERROR ("invalid use of wide instruction");
828 case OPCODE_return: type = void_type_node; goto ret;
830 if ((TREE_CODE (return_type) == BOOLEAN_TYPE
831 || TREE_CODE (return_type) == CHAR_TYPE
832 || TREE_CODE (return_type) == INTEGER_TYPE)
833 && TYPE_PRECISION (return_type) <= 32)
838 case OPCODE_lreturn: type = long_type_node; goto ret;
839 case OPCODE_freturn: type = float_type_node; goto ret;
840 case OPCODE_dreturn: type = double_type_node; goto ret;
842 if (TREE_CODE (return_type) == POINTER_TYPE)
848 if (type != return_type)
849 VERIFICATION_ERROR ("incorrect ?return opcode");
850 if (type != void_type_node)
852 if (pop_type_0 (type) == NULL_TREE)
853 VERIFICATION_ERROR ("return value has wrong type");
857 case OPCODE_getstatic: is_putting = 0; is_static = 1; goto field;
858 case OPCODE_putstatic: is_putting = 1; is_static = 1; goto field;
859 case OPCODE_getfield: is_putting = 0; is_static = 0; goto field;
860 case OPCODE_putfield: is_putting = 1; is_static = 0; goto field;
863 int index = IMMEDIATE_u2;
864 tree field_signature = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool, index);
865 tree field_type = get_type_from_signature (field_signature);
867 pop_type (field_type);
870 int clindex = COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool,
872 tree self_type = get_class_constant (current_jcf, clindex);
873 /* Defer actual checking until next pass. */
874 if (pop_type_0 (self_type) == NULL_TREE)
875 VERIFICATION_ERROR ("incorrect type for field reference");
878 push_type (field_type);
882 push_type (get_class_constant (jcf, IMMEDIATE_u2));
884 case OPCODE_dup: type_stack_dup (1, 0); break;
885 case OPCODE_dup_x1: type_stack_dup (1, 1); break;
886 case OPCODE_dup_x2: type_stack_dup (1, 2); break;
887 case OPCODE_dup2: type_stack_dup (2, 0); break;
888 case OPCODE_dup2_x1: type_stack_dup (2, 1); break;
889 case OPCODE_dup2_x2: type_stack_dup (2, 2); break;
890 case OPCODE_pop: index = 1; goto pop;
891 case OPCODE_pop2: index = 2; goto pop;
893 if (stack_pointer < index)
894 VERIFICATION_ERROR ("stack underflow");
895 stack_pointer -= index;
898 if (stack_pointer < 2)
899 VERIFICATION_ERROR ("stack underflow (in swap)");
902 tree type1 = stack_type_map[stack_pointer - 1];
903 tree type2 = stack_type_map[stack_pointer - 2];
904 if (type1 == void_type_node || type2 == void_type_node)
905 VERIFICATION_ERROR ("verifier (swap): double or long value");
906 stack_type_map[stack_pointer - 2] = type1;
907 stack_type_map[stack_pointer - 1] = type2;
910 case OPCODE_ldc: index = IMMEDIATE_u1; goto ldc;
913 index = IMMEDIATE_u2; goto ldc;
915 if (index <= 0 || index >= JPOOL_SIZE(current_jcf))
916 VERIFICATION_ERROR ("bad constant pool index in ldc");
918 switch (JPOOL_TAG (current_jcf, index) & ~CONSTANT_ResolvedFlag)
920 case CONSTANT_Integer: type = int_type_node; goto check_ldc;
921 case CONSTANT_Float: type = float_type_node; goto check_ldc;
922 case CONSTANT_String: type = string_type_node; goto check_ldc;
923 case CONSTANT_Long: type = long_type_node; goto check_ldc;
924 case CONSTANT_Double: type = double_type_node; goto check_ldc;
926 if (TYPE_IS_WIDE (type) == (op_code == OPCODE_ldc2_w))
928 /* ... else fall through ... */
930 VERIFICATION_ERROR ("bad constant pool tag in ldc");
932 if (type == int_type_node)
934 i = TREE_INT_CST_LOW (get_constant (current_jcf, index));
940 case OPCODE_invokevirtual:
941 case OPCODE_invokespecial:
942 case OPCODE_invokestatic:
943 case OPCODE_invokeinterface:
945 int index = IMMEDIATE_u2;
946 tree sig = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool, index);
947 tree self_type = get_class_constant
948 (current_jcf, COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool,
950 tree method_name = COMPONENT_REF_NAME (¤t_jcf->cpool, index);
952 method_type = parse_signature_string (IDENTIFIER_POINTER (sig),
953 IDENTIFIER_LENGTH (sig));
954 if (TREE_CODE (method_type) != FUNCTION_TYPE)
955 VERIFICATION_ERROR ("bad method signature");
956 pop_argument_types (TYPE_ARG_TYPES (method_type));
958 /* Can't invoke <clinit> */
959 if (ID_CLINIT_P (method_name))
960 VERIFICATION_ERROR ("invoke opcode can't invoke <clinit>");
961 /* Apart invokespecial, can't invoke <init> */
962 if (op_code != OPCODE_invokespecial && ID_INIT_P (method_name))
963 VERIFICATION_ERROR ("invoke opcode can't invoke <init>");
965 if (op_code != OPCODE_invokestatic)
966 pop_type (self_type);
970 case OPCODE_invokeinterface:
972 int nargs = IMMEDIATE_u1;
973 int notZero = IMMEDIATE_u1;
975 if (!nargs || notZero)
977 ("invalid argument number in invokeinterface");
982 if (TREE_TYPE (method_type) != void_type_node)
983 push_type (TREE_TYPE (method_type));
987 case OPCODE_arraylength:
988 /* Type checking actually made during code generation */
989 pop_type( ptr_type_node );
990 push_type( int_type_node );
993 /* Q&D verification *or* more checking done during code generation
994 for byte/boolean/char/short, the value popped is a int coerced
995 into the right type before being stored. */
996 case OPCODE_iastore: type = int_type_node; goto astore;
997 case OPCODE_lastore: type = long_type_node; goto astore;
998 case OPCODE_fastore: type = float_type_node; goto astore;
999 case OPCODE_dastore: type = double_type_node; goto astore;
1000 case OPCODE_aastore: type = ptr_type_node; goto astore;
1001 case OPCODE_bastore: type = int_type_node; goto astore;
1002 case OPCODE_castore: type = int_type_node; goto astore;
1003 case OPCODE_sastore: type = int_type_node; goto astore;
1005 /* FIXME - need better verification here */
1006 pop_type (type); /* new value */
1007 pop_type (int_type_node); /* index */
1008 pop_type (ptr_type_node); /* array */
1011 /* Q&D verification *or* more checking done during code generation
1012 for byte/boolean/char/short, the value pushed is a int. */
1013 case OPCODE_iaload: type = int_type_node; goto aload;
1014 case OPCODE_laload: type = long_type_node; goto aload;
1015 case OPCODE_faload: type = float_type_node; goto aload;
1016 case OPCODE_daload: type = double_type_node; goto aload;
1017 case OPCODE_aaload: type = ptr_type_node; goto aload;
1018 case OPCODE_baload: type = promote_type (byte_type_node); goto aload;
1019 case OPCODE_caload: type = promote_type (char_type_node); goto aload;
1020 case OPCODE_saload: type = promote_type (short_type_node); goto aload;
1022 pop_type (int_type_node);
1023 tmp = pop_type (ptr_type_node);
1024 if (is_array_type_p (tmp))
1025 type = TYPE_ARRAY_ELEMENT (TREE_TYPE (tmp));
1026 else if (tmp != TYPE_NULL)
1027 VERIFICATION_ERROR ("array load from non-array type");
1031 case OPCODE_anewarray:
1032 type = get_class_constant (current_jcf, IMMEDIATE_u2);
1033 type = promote_type (type);
1036 case OPCODE_newarray:
1037 index = IMMEDIATE_u1;
1038 type = decode_newarray_type (index);
1039 if (type == NULL_TREE)
1040 VERIFICATION_ERROR ("invalid type code in newarray opcode");
1044 if (int_value >= 0 && prevpc >= 0)
1046 /* If previous instruction pushed int constant,
1047 we want to use it. */
1048 switch (byte_ops[prevpc])
1050 case OPCODE_iconst_0: case OPCODE_iconst_1:
1051 case OPCODE_iconst_2: case OPCODE_iconst_3:
1052 case OPCODE_iconst_4: case OPCODE_iconst_5:
1053 case OPCODE_bipush: case OPCODE_sipush:
1054 case OPCODE_ldc: case OPCODE_ldc_w:
1062 type = build_java_array_type (type, int_value);
1063 pop_type (int_type_node);
1067 case OPCODE_multianewarray:
1070 index = IMMEDIATE_u2;
1071 ndim = IMMEDIATE_u1;
1074 VERIFICATION_ERROR ("number of dimension lower that 1 in multianewarray" );
1076 for( i = 0; i < ndim; i++ )
1077 pop_type (int_type_node);
1078 push_type (get_class_constant (current_jcf, index));
1082 case OPCODE_aconst_null:
1083 push_type (ptr_type_node);
1087 /* FIXME: athrow also empties the stack. */
1088 pop_type (throwable_type_node);
1092 case OPCODE_checkcast:
1093 pop_type (ptr_type_node);
1094 type = get_class_constant (current_jcf, IMMEDIATE_u2);
1097 case OPCODE_instanceof:
1098 pop_type (ptr_type_node);
1099 get_class_constant (current_jcf, IMMEDIATE_u2);
1100 push_type (int_type_node);
1103 case OPCODE_tableswitch:
1107 pop_type (int_type_node);
1111 VERIFICATION_ERROR ("bad alignment in tableswitch pad");
1113 PUSH_PENDING (lookup_label (oldpc+IMMEDIATE_s4));
1115 high = IMMEDIATE_s4;
1118 VERIFICATION_ERROR ("unsorted low/high value in tableswitch");
1120 while (low++ <= high)
1121 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
1126 case OPCODE_lookupswitch:
1128 jint npairs, last = 0, not_registered = 1;
1130 pop_type (int_type_node);
1134 VERIFICATION_ERROR ("bad alignment in lookupswitch pad");
1137 PUSH_PENDING (lookup_label (oldpc+IMMEDIATE_s4));
1138 npairs = IMMEDIATE_s4;
1141 VERIFICATION_ERROR ("invalid number of targets in lookupswitch");
1145 int match = IMMEDIATE_s4;
1148 else if (last >= match)
1149 VERIFICATION_ERROR ("unsorted match value in lookupswitch");
1152 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
1158 case OPCODE_monitorenter:
1160 case OPCODE_monitorexit:
1161 pop_type (ptr_type_node);
1165 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
1171 tree target = lookup_label (oldpc + IMMEDIATE_s2);
1172 tree return_label = lookup_label (PC);
1173 push_type (return_address_type_node);
1174 /* The return label chain will be null if this is the first
1175 time we've seen this jsr target. */
1176 if (LABEL_RETURN_LABEL (target) == NULL_TREE)
1178 tree return_type_map;
1179 int nlocals = DECL_MAX_LOCALS (current_function_decl);
1180 index = nlocals + DECL_MAX_STACK (current_function_decl);
1181 return_type_map = make_tree_vec (index);
1182 while (index > nlocals)
1183 TREE_VEC_ELT (return_type_map, --index) = TYPE_UNKNOWN;
1185 TREE_VEC_ELT (return_type_map, --index) = TYPE_UNUSED;
1186 LABEL_RETURN_LABEL (target)
1187 = build_decl (LABEL_DECL, NULL_TREE, TREE_TYPE (target));
1188 LABEL_PC (LABEL_RETURN_LABEL (target)) = -1;
1189 LABEL_RETURN_TYPE_STATE (target) = return_type_map;
1190 LABEL_IS_SUBR_START (target) = 1;
1191 LABEL_IN_SUBR (target) = 1;
1192 LABEL_SUBR_START (target) = target;
1193 LABEL_SUBR_CONTEXT (target) = current_subr;
1195 else if (! LABEL_IS_SUBR_START (target)
1196 || LABEL_SUBR_CONTEXT (target) != current_subr)
1197 VERIFICATION_ERROR ("label part of different subroutines");
1199 i = merge_type_state (target);
1203 VERIFICATION_ERROR ("types could not be merged at jsr");
1204 push_pending_label (target);
1206 current_subr = target;
1208 /* Chain return_pc onto LABEL_RETURN_LABELS (target) if needed. */
1209 if (! value_member (return_label, LABEL_RETURN_LABELS (target)))
1211 LABEL_RETURN_LABELS (target)
1212 = tree_cons (NULL_TREE, return_label,
1213 LABEL_RETURN_LABELS (target));
1216 if (LABEL_VERIFIED (target))
1218 tree return_map = LABEL_RETURN_TYPE_STATE (target);
1219 int len = TREE_VEC_LENGTH (return_map);
1220 stack_pointer = len - DECL_MAX_LOCALS (current_function_decl);
1223 if (TREE_VEC_ELT (return_map, len) != TYPE_UNUSED)
1224 type_map[len] = TREE_VEC_ELT (return_map, len);
1226 current_subr = LABEL_SUBR_CONTEXT (target);
1227 PUSH_PENDING (return_label);
1234 if (current_subr == NULL)
1235 VERIFICATION_ERROR ("ret instruction not in a jsr subroutine");
1238 tree ret_map = LABEL_RETURN_TYPE_STATE (current_subr);
1239 int size = DECL_MAX_LOCALS(current_function_decl)+stack_pointer;
1240 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
1243 if (index < 0 || index >= DECL_MAX_LOCALS (current_function_decl)
1244 || type_map[index] != TYPE_RETURN_ADDR)
1245 VERIFICATION_ERROR ("invalid ret index");
1247 /* The next chunk of code is similar to an inlined version of
1248 * merge_type_state (LABEL_RETURN_LABEL (current_subr)).
1249 * The main differences are that LABEL_RETURN_LABEL is
1250 * pre-allocated by the jsr (but we don't know the size then);
1251 * and that we have to handle TYPE_UNUSED. */
1253 if (! RETURN_MAP_ADJUSTED (ret_map))
1254 { /* First return from this subroutine - fix stack pointer. */
1255 TREE_VEC_LENGTH (ret_map) = size;
1256 for (index = size; --index >= 0; )
1258 if (TREE_VEC_ELT (ret_map, index) != TYPE_UNUSED)
1259 TREE_VEC_ELT (ret_map, index) = type_map[index];
1261 RETURN_MAP_ADJUSTED (ret_map) = 1;
1265 if (TREE_VEC_LENGTH (ret_map) != size)
1266 VERIFICATION_ERROR ("inconsistent stack size on ret");
1267 for (index = 0; index < size; index++)
1269 tree type = TREE_VEC_ELT (ret_map, index);
1270 if (type != TYPE_UNUSED)
1272 type = merge_types (type, type_map [index]);
1273 TREE_VEC_ELT (ret_map, index) = type;
1274 if (type == TYPE_UNKNOWN)
1276 if (index >= size - stack_pointer)
1278 ("inconsistent types on ret from jsr");
1280 else if (TYPE_IS_WIDE (type))
1292 error ("unknown opcode %d@pc=%d during verification", op_code, PC-1);
1298 /* The following test is true if we have entered or exited an exception
1299 handler range *or* we have done a store to a local variable.
1300 In either case we need to consider any exception handlers that
1301 might "follow" this instruction. */
1303 if (eh_ranges != prev_eh_ranges)
1305 int save_stack_pointer = stack_pointer;
1306 int index = DECL_MAX_LOCALS (current_function_decl);
1307 tree save_type = type_map[index];
1308 tree save_current_subr = current_subr;
1309 struct eh_range *ranges = find_handler (oldpc);
1311 for (; ranges != NULL_EH_RANGE; ranges = ranges->outer)
1313 tree chain = ranges->handlers;
1315 /* We need to determine if the handler is part of current_subr.
1316 The are two cases: (1) The exception catch range
1317 is entirely within current_subr. In that case the handler
1318 is also part of current_subr.
1319 (2) Some of the catch range is not in current_subr.
1320 In that case, the handler is *not* part of current_subr.
1322 Figuring out which is the case is not necessarily obvious,
1323 in the presence of clever code generators (and obfuscators).
1324 We make a simplifying assumption that in case (2) we
1325 have that the current_subr is entirely within the catch range.
1326 In that case we can assume if that if a caller (the jsr) of
1327 a subroutine is within the catch range, then the handler is
1328 *not* part of the subroutine, and vice versa. */
1330 current_subr = save_current_subr;
1331 for ( ; current_subr != NULL_TREE;
1332 current_subr = LABEL_SUBR_CONTEXT (current_subr))
1334 tree return_labels = LABEL_RETURN_LABELS (current_subr);
1335 /* There could be multiple return_labels, but
1336 we only need to check one. */
1337 int return_pc = LABEL_PC (TREE_VALUE (return_labels));
1338 if (return_pc <= ranges->start_pc
1339 || return_pc > ranges->end_pc)
1343 for ( ; chain != NULL_TREE; chain = TREE_CHAIN (chain))
1345 tree handler = TREE_VALUE (chain);
1346 tree type = TREE_PURPOSE (chain);
1347 if (type == NULL_TREE) /* a finally handler */
1348 type = throwable_type_node;
1349 type_map[index] = promote_type (type);
1351 PUSH_PENDING (handler);
1354 stack_pointer = save_stack_pointer;
1355 current_subr = save_current_subr;
1356 type_map[index] = save_type;
1357 prev_eh_ranges = eh_ranges;
1362 message = "program counter out of range";
1365 error ("verification error at PC=%d", oldpc);
1366 error ("%s", message);