1 /* Process expressions for the GNU compiler for the Java(TM) language.
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2006, 2007, 2008, 2010 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>.
21 Java and all Java-based marks are trademarks or registered trademarks
22 of Sun Microsystems, Inc. in the United States and other countries.
23 The Free Software Foundation is independent of Sun Microsystems, Inc. */
25 /* Hacked by Per Bothner <bothner@cygnus.com> February 1996. */
29 #include "coretypes.h"
32 #include "java-tree.h"
34 #include "java-opcodes.h"
36 #include "java-except.h"
40 #include "tree-iterator.h"
43 static void flush_quick_stack (void);
44 static void push_value (tree);
45 static tree pop_value (tree);
46 static void java_stack_swap (void);
47 static void java_stack_dup (int, int);
48 static void build_java_athrow (tree);
49 static void build_java_jsr (int, int);
50 static void build_java_ret (tree);
51 static void expand_java_multianewarray (tree, int);
52 static void expand_java_arraystore (tree);
53 static void expand_java_arrayload (tree);
54 static void expand_java_array_length (void);
55 static tree build_java_monitor (tree, tree);
56 static void expand_java_pushc (int, tree);
57 static void expand_java_return (tree);
58 static void expand_load_internal (int, tree, int);
59 static void expand_java_NEW (tree);
60 static void expand_java_INSTANCEOF (tree);
61 static void expand_java_CHECKCAST (tree);
62 static void expand_iinc (unsigned int, int, int);
63 static void expand_java_binop (tree, enum tree_code);
64 static void note_label (int, int);
65 static void expand_compare (enum tree_code, tree, tree, int);
66 static void expand_test (enum tree_code, tree, int);
67 static void expand_cond (enum tree_code, tree, int);
68 static void expand_java_goto (int);
69 static tree expand_java_switch (tree, int);
70 static void expand_java_add_case (tree, int, int);
71 static VEC(tree,gc) *pop_arguments (tree);
72 static void expand_invoke (int, int, int);
73 static void expand_java_field_op (int, int, int);
74 static void java_push_constant_from_pool (struct JCF *, int);
75 static void java_stack_pop (int);
76 static tree build_java_throw_out_of_bounds_exception (tree);
77 static tree build_java_check_indexed_type (tree, tree);
78 static unsigned char peek_opcode_at_pc (struct JCF *, int, int);
79 static void promote_arguments (void);
80 static void cache_cpool_data_ref (void);
82 static GTY(()) tree operand_type[59];
84 static GTY(()) tree methods_ident;
85 static GTY(()) tree ncode_ident;
86 tree dtable_ident = NULL_TREE;
88 /* Set to nonzero value in order to emit class initialization code
89 before static field references. */
90 int always_initialize_class_p = 0;
92 /* We store the stack state in two places:
93 Within a basic block, we use the quick_stack, which is a
94 pushdown list (TREE_LISTs) of expression nodes.
95 This is the top part of the stack; below that we use find_stack_slot.
96 At the end of a basic block, the quick_stack must be flushed
97 to the stack slot array (as handled by find_stack_slot).
98 Using quick_stack generates better code (especially when
99 compiled without optimization), because we do not have to
100 explicitly store and load trees to temporary variables.
102 If a variable is on the quick stack, it means the value of variable
103 when the quick stack was last flushed. Conceptually, flush_quick_stack
104 saves all the quick_stack elements in parallel. However, that is
105 complicated, so it actually saves them (i.e. copies each stack value
106 to is home virtual register) from low indexes. This allows a quick_stack
107 element at index i (counting from the bottom of stack the) to references
108 slot virtuals for register that are >= i, but not those that are deeper.
109 This convention makes most operations easier. For example iadd works
110 even when the stack contains (reg[0], reg[1]): It results in the
111 stack containing (reg[0]+reg[1]), which is OK. However, some stack
112 operations are more complicated. For example dup given a stack
113 containing (reg[0]) would yield (reg[0], reg[0]), which would violate
114 the convention, since stack value 1 would refer to a register with
115 lower index (reg[0]), which flush_quick_stack does not safely handle.
116 So dup cannot just add an extra element to the quick_stack, but iadd can.
119 static GTY(()) tree quick_stack;
121 /* A free-list of unused permanent TREE_LIST nodes. */
122 static GTY((deletable)) tree tree_list_free_list;
124 /* The physical memory page size used in this computer. See
125 build_field_ref(). */
126 static GTY(()) tree page_size;
128 /* The stack pointer of the Java virtual machine.
129 This does include the size of the quick_stack. */
133 const unsigned char *linenumber_table;
134 int linenumber_count;
136 /* Largest pc so far in this method that has been passed to lookup_label. */
137 int highest_label_pc_this_method = -1;
139 /* Base value for this method to add to pc to get generated label. */
140 int start_label_pc_this_method = 0;
143 init_expr_processing (void)
145 operand_type[21] = operand_type[54] = int_type_node;
146 operand_type[22] = operand_type[55] = long_type_node;
147 operand_type[23] = operand_type[56] = float_type_node;
148 operand_type[24] = operand_type[57] = double_type_node;
149 operand_type[25] = operand_type[58] = ptr_type_node;
153 java_truthvalue_conversion (tree expr)
155 /* It is simpler and generates better code to have only TRUTH_*_EXPR
156 or comparison expressions as truth values at this level.
158 This function should normally be identity for Java. */
160 switch (TREE_CODE (expr))
162 case EQ_EXPR: case NE_EXPR: case UNEQ_EXPR: case LTGT_EXPR:
163 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
164 case UNLE_EXPR: case UNGE_EXPR: case UNLT_EXPR: case UNGT_EXPR:
165 case ORDERED_EXPR: case UNORDERED_EXPR:
166 case TRUTH_ANDIF_EXPR:
167 case TRUTH_ORIF_EXPR:
176 return integer_zerop (expr) ? boolean_false_node : boolean_true_node;
179 return real_zerop (expr) ? boolean_false_node : boolean_true_node;
181 /* are these legal? XXX JH */
185 /* These don't change whether an object is nonzero or zero. */
186 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
189 /* Distribute the conversion into the arms of a COND_EXPR. */
190 return fold_build3 (COND_EXPR, boolean_type_node, TREE_OPERAND (expr, 0),
191 java_truthvalue_conversion (TREE_OPERAND (expr, 1)),
192 java_truthvalue_conversion (TREE_OPERAND (expr, 2)));
195 /* If this is widening the argument, we can ignore it. */
196 if (TYPE_PRECISION (TREE_TYPE (expr))
197 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
198 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
199 /* fall through to default */
202 return fold_build2 (NE_EXPR, boolean_type_node,
203 expr, boolean_false_node);
207 /* Save any stack slots that happen to be in the quick_stack into their
208 home virtual register slots.
210 The copy order is from low stack index to high, to support the invariant
211 that the expression for a slot may contain decls for stack slots with
212 higher (or the same) index, but not lower. */
215 flush_quick_stack (void)
217 int stack_index = stack_pointer;
218 tree prev, cur, next;
220 /* First reverse the quick_stack, and count the number of slots it has. */
221 for (cur = quick_stack, prev = NULL_TREE; cur != NULL_TREE; cur = next)
223 next = TREE_CHAIN (cur);
224 TREE_CHAIN (cur) = prev;
226 stack_index -= 1 + TYPE_IS_WIDE (TREE_TYPE (TREE_VALUE (cur)));
230 while (quick_stack != NULL_TREE)
233 tree node = quick_stack, type;
234 quick_stack = TREE_CHAIN (node);
235 TREE_CHAIN (node) = tree_list_free_list;
236 tree_list_free_list = node;
237 node = TREE_VALUE (node);
238 type = TREE_TYPE (node);
240 decl = find_stack_slot (stack_index, type);
242 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (node), decl, node));
243 stack_index += 1 + TYPE_IS_WIDE (type);
247 /* Push TYPE on the type stack.
248 Return true on success, 0 on overflow. */
251 push_type_0 (tree type)
254 type = promote_type (type);
255 n_words = 1 + TYPE_IS_WIDE (type);
256 if (stack_pointer + n_words > DECL_MAX_STACK (current_function_decl))
258 /* Allocate decl for this variable now, so we get a temporary that
259 survives the whole method. */
260 find_stack_slot (stack_pointer, type);
261 stack_type_map[stack_pointer++] = type;
263 while (--n_words >= 0)
264 stack_type_map[stack_pointer++] = TYPE_SECOND;
269 push_type (tree type)
271 int r = push_type_0 (type);
276 push_value (tree value)
278 tree type = TREE_TYPE (value);
279 if (TYPE_PRECISION (type) < 32 && INTEGRAL_TYPE_P (type))
281 type = promote_type (type);
282 value = convert (type, value);
285 if (tree_list_free_list == NULL_TREE)
286 quick_stack = tree_cons (NULL_TREE, value, quick_stack);
289 tree node = tree_list_free_list;
290 tree_list_free_list = TREE_CHAIN (tree_list_free_list);
291 TREE_VALUE (node) = value;
292 TREE_CHAIN (node) = quick_stack;
295 /* If the value has a side effect, then we need to evaluate it
296 whether or not the result is used. If the value ends up on the
297 quick stack and is then popped, this won't happen -- so we flush
298 the quick stack. It is safest to simply always flush, though,
299 since TREE_SIDE_EFFECTS doesn't capture COMPONENT_REF, and for
300 the latter we may need to strip conversions. */
301 flush_quick_stack ();
304 /* Pop a type from the type stack.
305 TYPE is the expected type. Return the actual type, which must be
307 On an error, *MESSAGEP is set to a freshly malloc'd error message. */
310 pop_type_0 (tree type, char **messagep)
315 if (TREE_CODE (type) == RECORD_TYPE)
316 type = promote_type (type);
317 n_words = 1 + TYPE_IS_WIDE (type);
318 if (stack_pointer < n_words)
320 *messagep = xstrdup ("stack underflow");
323 while (--n_words > 0)
325 if (stack_type_map[--stack_pointer] != void_type_node)
327 *messagep = xstrdup ("Invalid multi-word value on type stack");
331 t = stack_type_map[--stack_pointer];
332 if (type == NULL_TREE || t == type)
334 if (TREE_CODE (t) == TREE_LIST)
338 tree tt = TREE_PURPOSE (t);
339 if (! can_widen_reference_to (tt, type))
349 if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (t)
350 && TYPE_PRECISION (type) <= 32 && TYPE_PRECISION (t) <= 32)
352 if (TREE_CODE (type) == POINTER_TYPE && TREE_CODE (t) == POINTER_TYPE)
354 /* If the expected type we've been passed is object or ptr
355 (i.e. void*), the caller needs to know the real type. */
356 if (type == ptr_type_node || type == object_ptr_type_node)
359 /* Since the verifier has already run, we know that any
360 types we see will be compatible. In BC mode, this fact
361 may be checked at runtime, but if that is so then we can
362 assume its truth here as well. So, we always succeed
363 here, with the expected type. */
367 if (! flag_verify_invocations && flag_indirect_dispatch
368 && t == object_ptr_type_node)
370 if (type != ptr_type_node)
371 warning (0, "need to insert runtime check for %s",
372 xstrdup (lang_printable_name (type, 0)));
376 /* lang_printable_name uses a static buffer, so we must save the result
377 from calling it the first time. */
380 char *temp = xstrdup (lang_printable_name (type, 0));
381 /* If the stack contains a multi-word type, keep popping the stack until
382 the real type is found. */
383 while (t == void_type_node)
384 t = stack_type_map[--stack_pointer];
385 *messagep = concat ("expected type '", temp,
386 "' but stack contains '", lang_printable_name (t, 0),
393 /* Pop a type from the type stack.
394 TYPE is the expected type. Return the actual type, which must be
395 convertible to TYPE, otherwise call error. */
400 char *message = NULL;
401 type = pop_type_0 (type, &message);
404 error ("%s", message);
411 /* Return true if two type assertions are equal. */
414 type_assertion_eq (const void * k1_p, const void * k2_p)
416 const type_assertion k1 = *(const type_assertion *)k1_p;
417 const type_assertion k2 = *(const type_assertion *)k2_p;
418 return (k1.assertion_code == k2.assertion_code
420 && k1.op2 == k2.op2);
423 /* Hash a type assertion. */
426 type_assertion_hash (const void *p)
428 const type_assertion *k_p = (const type_assertion *) p;
429 hashval_t hash = iterative_hash (&k_p->assertion_code, sizeof
430 k_p->assertion_code, 0);
432 switch (k_p->assertion_code)
434 case JV_ASSERT_TYPES_COMPATIBLE:
435 hash = iterative_hash (&TYPE_UID (k_p->op2), sizeof TYPE_UID (k_p->op2),
439 case JV_ASSERT_IS_INSTANTIABLE:
440 hash = iterative_hash (&TYPE_UID (k_p->op1), sizeof TYPE_UID (k_p->op1),
444 case JV_ASSERT_END_OF_TABLE:
454 /* Add an entry to the type assertion table for the given class.
455 KLASS is the class for which this assertion will be evaluated by the
456 runtime during loading/initialization.
457 ASSERTION_CODE is the 'opcode' or type of this assertion: see java-tree.h.
458 OP1 and OP2 are the operands. The tree type of these arguments may be
459 specific to each assertion_code. */
462 add_type_assertion (tree klass, int assertion_code, tree op1, tree op2)
464 htab_t assertions_htab;
468 assertions_htab = TYPE_ASSERTIONS (klass);
469 if (assertions_htab == NULL)
471 assertions_htab = htab_create_ggc (7, type_assertion_hash,
472 type_assertion_eq, NULL);
473 TYPE_ASSERTIONS (current_class) = assertions_htab;
476 as.assertion_code = assertion_code;
480 as_pp = htab_find_slot (assertions_htab, &as, INSERT);
482 /* Don't add the same assertion twice. */
486 *as_pp = ggc_alloc (sizeof (type_assertion));
487 **(type_assertion **)as_pp = as;
491 /* Return 1 if SOURCE_TYPE can be safely widened to TARGET_TYPE.
492 Handles array types and interfaces. */
495 can_widen_reference_to (tree source_type, tree target_type)
497 if (source_type == ptr_type_node || target_type == object_ptr_type_node)
500 /* Get rid of pointers */
501 if (TREE_CODE (source_type) == POINTER_TYPE)
502 source_type = TREE_TYPE (source_type);
503 if (TREE_CODE (target_type) == POINTER_TYPE)
504 target_type = TREE_TYPE (target_type);
506 if (source_type == target_type)
509 /* FIXME: This is very pessimistic, in that it checks everything,
510 even if we already know that the types are compatible. If we're
511 to support full Java class loader semantics, we need this.
512 However, we could do something more optimal. */
513 if (! flag_verify_invocations)
515 add_type_assertion (current_class, JV_ASSERT_TYPES_COMPATIBLE,
516 source_type, target_type);
519 warning (0, "assert: %s is assign compatible with %s",
520 xstrdup (lang_printable_name (target_type, 0)),
521 xstrdup (lang_printable_name (source_type, 0)));
522 /* Punt everything to runtime. */
526 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
532 if (TYPE_ARRAY_P (source_type) || TYPE_ARRAY_P (target_type))
534 HOST_WIDE_INT source_length, target_length;
535 if (TYPE_ARRAY_P (source_type) != TYPE_ARRAY_P (target_type))
537 /* An array implements Cloneable and Serializable. */
538 tree name = DECL_NAME (TYPE_NAME (target_type));
539 return (name == java_lang_cloneable_identifier_node
540 || name == java_io_serializable_identifier_node);
542 target_length = java_array_type_length (target_type);
543 if (target_length >= 0)
545 source_length = java_array_type_length (source_type);
546 if (source_length != target_length)
549 source_type = TYPE_ARRAY_ELEMENT (source_type);
550 target_type = TYPE_ARRAY_ELEMENT (target_type);
551 if (source_type == target_type)
553 if (TREE_CODE (source_type) != POINTER_TYPE
554 || TREE_CODE (target_type) != POINTER_TYPE)
556 return can_widen_reference_to (source_type, target_type);
560 int source_depth = class_depth (source_type);
561 int target_depth = class_depth (target_type);
563 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
566 warning (0, "assert: %s is assign compatible with %s",
567 xstrdup (lang_printable_name (target_type, 0)),
568 xstrdup (lang_printable_name (source_type, 0)));
572 /* class_depth can return a negative depth if an error occurred */
573 if (source_depth < 0 || target_depth < 0)
576 if (CLASS_INTERFACE (TYPE_NAME (target_type)))
578 /* target_type is OK if source_type or source_type ancestors
579 implement target_type. We handle multiple sub-interfaces */
580 tree binfo, base_binfo;
583 for (binfo = TYPE_BINFO (source_type), i = 0;
584 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
585 if (can_widen_reference_to
586 (BINFO_TYPE (base_binfo), target_type))
593 for ( ; source_depth > target_depth; source_depth--)
596 = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (source_type), 0));
598 return source_type == target_type;
604 pop_value (tree type)
606 type = pop_type (type);
609 tree node = quick_stack;
610 quick_stack = TREE_CHAIN (quick_stack);
611 TREE_CHAIN (node) = tree_list_free_list;
612 tree_list_free_list = node;
613 node = TREE_VALUE (node);
617 return find_stack_slot (stack_pointer, promote_type (type));
621 /* Pop and discard the top COUNT stack slots. */
624 java_stack_pop (int count)
630 gcc_assert (stack_pointer != 0);
632 type = stack_type_map[stack_pointer - 1];
633 if (type == TYPE_SECOND)
636 gcc_assert (stack_pointer != 1 && count > 0);
638 type = stack_type_map[stack_pointer - 2];
645 /* Implement the 'swap' operator (to swap two top stack slots). */
648 java_stack_swap (void)
654 if (stack_pointer < 2
655 || (type1 = stack_type_map[stack_pointer - 1]) == TYPE_SECOND
656 || (type2 = stack_type_map[stack_pointer - 2]) == TYPE_SECOND
657 || TYPE_IS_WIDE (type1) || TYPE_IS_WIDE (type2))
658 /* Bad stack swap. */
660 /* Bad stack swap. */
662 flush_quick_stack ();
663 decl1 = find_stack_slot (stack_pointer - 1, type1);
664 decl2 = find_stack_slot (stack_pointer - 2, type2);
665 temp = build_decl (input_location, VAR_DECL, NULL_TREE, type1);
666 java_add_local_var (temp);
667 java_add_stmt (build2 (MODIFY_EXPR, type1, temp, decl1));
668 java_add_stmt (build2 (MODIFY_EXPR, type2,
669 find_stack_slot (stack_pointer - 1, type2),
671 java_add_stmt (build2 (MODIFY_EXPR, type1,
672 find_stack_slot (stack_pointer - 2, type1),
674 stack_type_map[stack_pointer - 1] = type2;
675 stack_type_map[stack_pointer - 2] = type1;
679 java_stack_dup (int size, int offset)
681 int low_index = stack_pointer - size - offset;
684 error ("stack underflow - dup* operation");
686 flush_quick_stack ();
688 stack_pointer += size;
689 dst_index = stack_pointer;
691 for (dst_index = stack_pointer; --dst_index >= low_index; )
694 int src_index = dst_index - size;
695 if (src_index < low_index)
696 src_index = dst_index + size + offset;
697 type = stack_type_map [src_index];
698 if (type == TYPE_SECOND)
700 /* Dup operation splits 64-bit number. */
701 gcc_assert (src_index > low_index);
703 stack_type_map[dst_index] = type;
704 src_index--; dst_index--;
705 type = stack_type_map[src_index];
706 gcc_assert (TYPE_IS_WIDE (type));
709 gcc_assert (! TYPE_IS_WIDE (type));
711 if (src_index != dst_index)
713 tree src_decl = find_stack_slot (src_index, type);
714 tree dst_decl = find_stack_slot (dst_index, type);
717 (build2 (MODIFY_EXPR, TREE_TYPE (dst_decl), dst_decl, src_decl));
718 stack_type_map[dst_index] = type;
723 /* Calls _Jv_Throw or _Jv_Sjlj_Throw. Discard the contents of the
727 build_java_athrow (tree node)
731 call = build_call_nary (void_type_node,
732 build_address_of (throw_node),
734 TREE_SIDE_EFFECTS (call) = 1;
735 java_add_stmt (call);
736 java_stack_pop (stack_pointer);
739 /* Implementation for jsr/ret */
742 build_java_jsr (int target_pc, int return_pc)
744 tree where = lookup_label (target_pc);
745 tree ret = lookup_label (return_pc);
746 tree ret_label = fold_build1 (ADDR_EXPR, return_address_type_node, ret);
747 push_value (ret_label);
748 flush_quick_stack ();
749 java_add_stmt (build1 (GOTO_EXPR, void_type_node, where));
751 /* Do not need to emit the label here. We noted the existence of the
752 label as a jump target in note_instructions; we'll emit the label
753 for real at the beginning of the expand_byte_code loop. */
757 build_java_ret (tree location)
759 java_add_stmt (build1 (GOTO_EXPR, void_type_node, location));
762 /* Implementation of operations on array: new, load, store, length */
765 decode_newarray_type (int atype)
769 case 4: return boolean_type_node;
770 case 5: return char_type_node;
771 case 6: return float_type_node;
772 case 7: return double_type_node;
773 case 8: return byte_type_node;
774 case 9: return short_type_node;
775 case 10: return int_type_node;
776 case 11: return long_type_node;
777 default: return NULL_TREE;
781 /* Map primitive type to the code used by OPCODE_newarray. */
784 encode_newarray_type (tree type)
786 if (type == boolean_type_node)
788 else if (type == char_type_node)
790 else if (type == float_type_node)
792 else if (type == double_type_node)
794 else if (type == byte_type_node)
796 else if (type == short_type_node)
798 else if (type == int_type_node)
800 else if (type == long_type_node)
806 /* Build a call to _Jv_ThrowBadArrayIndex(), the
807 ArrayIndexOfBoundsException exception handler. */
810 build_java_throw_out_of_bounds_exception (tree index)
814 /* We need to build a COMPOUND_EXPR because _Jv_ThrowBadArrayIndex()
815 has void return type. We cannot just set the type of the CALL_EXPR below
816 to int_type_node because we would lose it during gimplification. */
817 gcc_assert (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (soft_badarrayindex_node))));
818 node = build_call_nary (void_type_node,
819 build_address_of (soft_badarrayindex_node),
821 TREE_SIDE_EFFECTS (node) = 1;
823 node = build2 (COMPOUND_EXPR, int_type_node, node, integer_zero_node);
824 TREE_SIDE_EFFECTS (node) = 1; /* Allows expansion within ANDIF */
829 /* Return the length of an array. Doesn't perform any checking on the nature
830 or value of the array NODE. May be used to implement some bytecodes. */
833 build_java_array_length_access (tree node)
835 tree type = TREE_TYPE (node);
836 tree array_type = TREE_TYPE (type);
837 HOST_WIDE_INT length;
839 if (!is_array_type_p (type))
841 /* With the new verifier, we will see an ordinary pointer type
842 here. In this case, we just use an arbitrary array type. */
843 array_type = build_java_array_type (object_ptr_type_node, -1);
844 type = promote_type (array_type);
847 length = java_array_type_length (type);
849 return build_int_cst (NULL_TREE, length);
851 node = build3 (COMPONENT_REF, int_type_node,
852 build_java_indirect_ref (array_type, node,
853 flag_check_references),
854 lookup_field (&array_type, get_identifier ("length")),
856 IS_ARRAY_LENGTH_ACCESS (node) = 1;
860 /* Optionally checks a reference against the NULL pointer. ARG1: the
861 expr, ARG2: we should check the reference. Don't generate extra
862 checks if we're not generating code. */
865 java_check_reference (tree expr, int check)
867 if (!flag_syntax_only && check)
869 expr = save_expr (expr);
870 expr = build3 (COND_EXPR, TREE_TYPE (expr),
871 build2 (EQ_EXPR, boolean_type_node,
872 expr, null_pointer_node),
873 build_call_nary (void_type_node,
874 build_address_of (soft_nullpointer_node),
882 /* Reference an object: just like an INDIRECT_REF, but with checking. */
885 build_java_indirect_ref (tree type, tree expr, int check)
888 t = java_check_reference (expr, check);
889 t = convert (build_pointer_type (type), t);
890 return build1 (INDIRECT_REF, type, t);
893 /* Implement array indexing (either as l-value or r-value).
894 Returns a tree for ARRAY[INDEX], assume TYPE is the element type.
895 Optionally performs bounds checking and/or test to NULL.
896 At this point, ARRAY should have been verified as an array. */
899 build_java_arrayaccess (tree array, tree type, tree index)
901 tree node, throw_expr = NULL_TREE;
904 tree array_type = TREE_TYPE (TREE_TYPE (array));
905 tree size_exp = fold_convert (sizetype, size_in_bytes (type));
907 if (!is_array_type_p (TREE_TYPE (array)))
909 /* With the new verifier, we will see an ordinary pointer type
910 here. In this case, we just use the correct array type. */
911 array_type = build_java_array_type (type, -1);
914 if (flag_bounds_check)
917 * (unsigned jint) INDEX >= (unsigned jint) LEN
918 * && throw ArrayIndexOutOfBoundsException.
919 * Note this is equivalent to and more efficient than:
920 * INDEX < 0 || INDEX >= LEN && throw ... */
922 tree len = convert (unsigned_int_type_node,
923 build_java_array_length_access (array));
924 test = fold_build2 (GE_EXPR, boolean_type_node,
925 convert (unsigned_int_type_node, index),
927 if (! integer_zerop (test))
930 = build2 (TRUTH_ANDIF_EXPR, int_type_node, test,
931 build_java_throw_out_of_bounds_exception (index));
932 /* allows expansion within COMPOUND */
933 TREE_SIDE_EFFECTS( throw_expr ) = 1;
937 /* If checking bounds, wrap the index expr with a COMPOUND_EXPR in order
938 to have the bounds check evaluated first. */
939 if (throw_expr != NULL_TREE)
940 index = build2 (COMPOUND_EXPR, int_type_node, throw_expr, index);
942 data_field = lookup_field (&array_type, get_identifier ("data"));
944 ref = build3 (COMPONENT_REF, TREE_TYPE (data_field),
945 build_java_indirect_ref (array_type, array,
946 flag_check_references),
947 data_field, NULL_TREE);
949 /* Take the address of the data field and convert it to a pointer to
951 node = build1 (NOP_EXPR, build_pointer_type (type), build_address_of (ref));
953 /* Multiply the index by the size of an element to obtain a byte
954 offset. Convert the result to a pointer to the element type. */
955 index = build2 (MULT_EXPR, sizetype,
956 fold_convert (sizetype, index),
959 /* Sum the byte offset and the address of the data field. */
960 node = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (node), node, index);
964 *((&array->data) + index*size_exp)
967 return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (node)), node);
970 /* Generate code to throw an ArrayStoreException if OBJECT is not assignable
971 (at runtime) to an element of ARRAY. A NOP_EXPR is returned if it can
972 determine that no check is required. */
975 build_java_arraystore_check (tree array, tree object)
977 tree check, element_type, source;
978 tree array_type_p = TREE_TYPE (array);
979 tree object_type = TYPE_NAME (TREE_TYPE (TREE_TYPE (object)));
981 if (! flag_verify_invocations)
983 /* With the new verifier, we don't track precise types. FIXME:
984 performance regression here. */
985 element_type = TYPE_NAME (object_type_node);
989 gcc_assert (is_array_type_p (array_type_p));
991 /* Get the TYPE_DECL for ARRAY's element type. */
993 = TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p))));
996 gcc_assert (TREE_CODE (element_type) == TYPE_DECL
997 && TREE_CODE (object_type) == TYPE_DECL);
999 if (!flag_store_check)
1000 return build1 (NOP_EXPR, array_type_p, array);
1002 /* No check is needed if the element type is final. Also check that
1003 element_type matches object_type, since in the bytecode
1004 compilation case element_type may be the actual element type of
1005 the array rather than its declared type. However, if we're doing
1006 indirect dispatch, we can't do the `final' optimization. */
1007 if (element_type == object_type
1008 && ! flag_indirect_dispatch
1009 && CLASS_FINAL (element_type))
1010 return build1 (NOP_EXPR, array_type_p, array);
1012 /* OBJECT might be wrapped by a SAVE_EXPR. */
1013 if (TREE_CODE (object) == SAVE_EXPR)
1014 source = TREE_OPERAND (object, 0);
1018 /* Avoid the check if OBJECT was just loaded from the same array. */
1019 if (TREE_CODE (source) == ARRAY_REF)
1022 source = TREE_OPERAND (source, 0); /* COMPONENT_REF. */
1023 source = TREE_OPERAND (source, 0); /* INDIRECT_REF. */
1024 source = TREE_OPERAND (source, 0); /* Source array's DECL or SAVE_EXPR. */
1025 if (TREE_CODE (source) == SAVE_EXPR)
1026 source = TREE_OPERAND (source, 0);
1029 if (TREE_CODE (target) == SAVE_EXPR)
1030 target = TREE_OPERAND (target, 0);
1032 if (source == target)
1033 return build1 (NOP_EXPR, array_type_p, array);
1036 /* Build an invocation of _Jv_CheckArrayStore */
1037 check = build_call_nary (void_type_node,
1038 build_address_of (soft_checkarraystore_node),
1040 TREE_SIDE_EFFECTS (check) = 1;
1045 /* Makes sure that INDEXED_TYPE is appropriate. If not, make it from
1046 ARRAY_NODE. This function is used to retrieve something less vague than
1047 a pointer type when indexing the first dimension of something like [[<t>.
1048 May return a corrected type, if necessary, otherwise INDEXED_TYPE is
1049 return unchanged. */
1052 build_java_check_indexed_type (tree array_node ATTRIBUTE_UNUSED,
1055 /* We used to check to see if ARRAY_NODE really had array type.
1056 However, with the new verifier, this is not necessary, as we know
1057 that the object will be an array of the appropriate type. */
1059 return indexed_type;
1062 /* newarray triggers a call to _Jv_NewPrimArray. This function should be
1063 called with an integer code (the type of array to create), and the length
1064 of the array to create. */
1067 build_newarray (int atype_value, tree length)
1071 tree prim_type = decode_newarray_type (atype_value);
1073 = build_java_array_type (prim_type,
1074 host_integerp (length, 0) == INTEGER_CST
1075 ? tree_low_cst (length, 0) : -1);
1077 /* Pass a reference to the primitive type class and save the runtime
1079 type_arg = build_class_ref (prim_type);
1081 return build_call_nary (promote_type (type),
1082 build_address_of (soft_newarray_node),
1083 2, type_arg, length);
1086 /* Generates anewarray from a given CLASS_TYPE. Gets from the stack the size
1087 of the dimension. */
1090 build_anewarray (tree class_type, tree length)
1093 = build_java_array_type (class_type,
1094 host_integerp (length, 0)
1095 ? tree_low_cst (length, 0) : -1);
1097 return build_call_nary (promote_type (type),
1098 build_address_of (soft_anewarray_node),
1101 build_class_ref (class_type),
1105 /* Return a node the evaluates 'new TYPE[LENGTH]'. */
1108 build_new_array (tree type, tree length)
1110 if (JPRIMITIVE_TYPE_P (type))
1111 return build_newarray (encode_newarray_type (type), length);
1113 return build_anewarray (TREE_TYPE (type), length);
1116 /* Generates a call to _Jv_NewMultiArray. multianewarray expects a
1117 class pointer, a number of dimensions and the matching number of
1118 dimensions. The argument list is NULL terminated. */
1121 expand_java_multianewarray (tree class_type, int ndim)
1124 VEC(tree,gc) *args = NULL;
1126 VEC_safe_grow (tree, gc, args, 3 + ndim);
1128 VEC_replace (tree, args, 0, build_class_ref (class_type));
1129 VEC_replace (tree, args, 1, build_int_cst (NULL_TREE, ndim));
1131 for(i = ndim - 1; i >= 0; i-- )
1132 VEC_replace (tree, args, (unsigned)(2 + i), pop_value (int_type_node));
1134 VEC_replace (tree, args, 2 + ndim, null_pointer_node);
1136 push_value (build_call_vec (promote_type (class_type),
1137 build_address_of (soft_multianewarray_node),
1141 /* ARRAY[INDEX] <- RHS. build_java_check_indexed_type makes sure that
1142 ARRAY is an array type. May expand some bound checking and NULL
1143 pointer checking. RHS_TYPE_NODE we are going to store. In the case
1144 of the CHAR/BYTE/BOOLEAN SHORT, the type popped of the stack is an
1145 INT. In those cases, we make the conversion.
1147 if ARRAy is a reference type, the assignment is checked at run-time
1148 to make sure that the RHS can be assigned to the array element
1149 type. It is not necessary to generate this code if ARRAY is final. */
1152 expand_java_arraystore (tree rhs_type_node)
1154 tree rhs_node = pop_value ((INTEGRAL_TYPE_P (rhs_type_node)
1155 && TYPE_PRECISION (rhs_type_node) <= 32) ?
1156 int_type_node : rhs_type_node);
1157 tree index = pop_value (int_type_node);
1158 tree array_type, array, temp, access;
1160 /* If we're processing an `aaload' we might as well just pick
1162 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1164 array_type = build_java_array_type (object_ptr_type_node, -1);
1165 rhs_type_node = object_ptr_type_node;
1168 array_type = build_java_array_type (rhs_type_node, -1);
1170 array = pop_value (array_type);
1171 array = build1 (NOP_EXPR, promote_type (array_type), array);
1173 rhs_type_node = build_java_check_indexed_type (array, rhs_type_node);
1175 flush_quick_stack ();
1177 index = save_expr (index);
1178 array = save_expr (array);
1180 /* We want to perform the bounds check (done by
1181 build_java_arrayaccess) before the type check (done by
1182 build_java_arraystore_check). So, we call build_java_arrayaccess
1183 -- which returns an ARRAY_REF lvalue -- and we then generate code
1184 to stash the address of that lvalue in a temp. Then we call
1185 build_java_arraystore_check, and finally we generate a
1186 MODIFY_EXPR to set the array element. */
1188 access = build_java_arrayaccess (array, rhs_type_node, index);
1189 temp = build_decl (input_location, VAR_DECL, NULL_TREE,
1190 build_pointer_type (TREE_TYPE (access)));
1191 java_add_local_var (temp);
1192 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (temp),
1194 build_fold_addr_expr (access)));
1196 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1198 tree check = build_java_arraystore_check (array, rhs_node);
1199 java_add_stmt (check);
1202 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (access),
1203 build1 (INDIRECT_REF, TREE_TYPE (access), temp),
1207 /* Expand the evaluation of ARRAY[INDEX]. build_java_check_indexed_type makes
1208 sure that LHS is an array type. May expand some bound checking and NULL
1210 LHS_TYPE_NODE is the type of ARRAY[INDEX]. But in the case of CHAR/BYTE/
1211 BOOLEAN/SHORT, we push a promoted type back to the stack.
1215 expand_java_arrayload (tree lhs_type_node)
1218 tree index_node = pop_value (int_type_node);
1222 /* If we're processing an `aaload' we might as well just pick
1224 if (TREE_CODE (lhs_type_node) == POINTER_TYPE)
1226 array_type = build_java_array_type (object_ptr_type_node, -1);
1227 lhs_type_node = object_ptr_type_node;
1230 array_type = build_java_array_type (lhs_type_node, -1);
1231 array_node = pop_value (array_type);
1232 array_node = build1 (NOP_EXPR, promote_type (array_type), array_node);
1234 index_node = save_expr (index_node);
1235 array_node = save_expr (array_node);
1237 lhs_type_node = build_java_check_indexed_type (array_node,
1239 load_node = build_java_arrayaccess (array_node,
1242 if (INTEGRAL_TYPE_P (lhs_type_node) && TYPE_PRECISION (lhs_type_node) <= 32)
1243 load_node = fold_build1 (NOP_EXPR, int_type_node, load_node);
1244 push_value (load_node);
1247 /* Expands .length. Makes sure that we deal with and array and may expand
1248 a NULL check on the array object. */
1251 expand_java_array_length (void)
1253 tree array = pop_value (ptr_type_node);
1254 tree length = build_java_array_length_access (array);
1256 push_value (length);
1259 /* Emit code for the call to _Jv_Monitor{Enter,Exit}. CALL can be
1260 either soft_monitorenter_node or soft_monitorexit_node. */
1263 build_java_monitor (tree call, tree object)
1265 return build_call_nary (void_type_node,
1266 build_address_of (call),
1270 /* Emit code for one of the PUSHC instructions. */
1273 expand_java_pushc (int ival, tree type)
1276 if (type == ptr_type_node && ival == 0)
1277 value = null_pointer_node;
1278 else if (type == int_type_node || type == long_type_node)
1279 value = build_int_cst (type, ival);
1280 else if (type == float_type_node || type == double_type_node)
1283 REAL_VALUE_FROM_INT (x, ival, 0, TYPE_MODE (type));
1284 value = build_real (type, x);
1293 expand_java_return (tree type)
1295 if (type == void_type_node)
1296 java_add_stmt (build1 (RETURN_EXPR, void_type_node, NULL));
1299 tree retval = pop_value (type);
1300 tree res = DECL_RESULT (current_function_decl);
1301 retval = build2 (MODIFY_EXPR, TREE_TYPE (res), res, retval);
1303 /* Handle the situation where the native integer type is smaller
1304 than the JVM integer. It can happen for many cross compilers.
1305 The whole if expression just goes away if INT_TYPE_SIZE < 32
1307 if (INT_TYPE_SIZE < 32
1308 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (res)))
1309 < GET_MODE_SIZE (TYPE_MODE (type))))
1310 retval = build1(NOP_EXPR, TREE_TYPE(res), retval);
1312 TREE_SIDE_EFFECTS (retval) = 1;
1313 java_add_stmt (build1 (RETURN_EXPR, void_type_node, retval));
1318 expand_load_internal (int index, tree type, int pc)
1321 tree var = find_local_variable (index, type, pc);
1323 /* Now VAR is the VAR_DECL (or PARM_DECL) that we are going to push
1324 on the stack. If there is an assignment to this VAR_DECL between
1325 the stack push and the use, then the wrong code could be
1326 generated. To avoid this we create a new local and copy our
1327 value into it. Then we push this new local on the stack.
1328 Hopefully this all gets optimized out. */
1329 copy = build_decl (input_location, VAR_DECL, NULL_TREE, type);
1330 if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1331 && TREE_TYPE (copy) != TREE_TYPE (var))
1332 var = convert (type, var);
1333 java_add_local_var (copy);
1334 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (var), copy, var));
1340 build_address_of (tree value)
1342 return build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (value)), value);
1346 class_has_finalize_method (tree type)
1348 tree super = CLASSTYPE_SUPER (type);
1350 if (super == NULL_TREE)
1351 return false; /* Every class with a real finalizer inherits */
1352 /* from java.lang.Object. */
1354 return HAS_FINALIZER_P (type) || class_has_finalize_method (super);
1358 java_create_object (tree type)
1360 tree alloc_node = (class_has_finalize_method (type)
1362 : alloc_no_finalizer_node);
1364 return build_call_nary (promote_type (type),
1365 build_address_of (alloc_node),
1366 1, build_class_ref (type));
1370 expand_java_NEW (tree type)
1374 alloc_node = (class_has_finalize_method (type) ? alloc_object_node
1375 : alloc_no_finalizer_node);
1376 if (! CLASS_LOADED_P (type))
1377 load_class (type, 1);
1378 safe_layout_class (type);
1379 push_value (build_call_nary (promote_type (type),
1380 build_address_of (alloc_node),
1381 1, build_class_ref (type)));
1384 /* This returns an expression which will extract the class of an
1388 build_get_class (tree value)
1390 tree class_field = lookup_field (&dtable_type, get_identifier ("class"));
1391 tree vtable_field = lookup_field (&object_type_node,
1392 get_identifier ("vtable"));
1393 tree tmp = build3 (COMPONENT_REF, dtable_ptr_type,
1394 build_java_indirect_ref (object_type_node, value,
1395 flag_check_references),
1396 vtable_field, NULL_TREE);
1397 return build3 (COMPONENT_REF, class_ptr_type,
1398 build1 (INDIRECT_REF, dtable_type, tmp),
1399 class_field, NULL_TREE);
1402 /* This builds the tree representation of the `instanceof' operator.
1403 It tries various tricks to optimize this in cases where types are
1407 build_instanceof (tree value, tree type)
1410 tree itype = TREE_TYPE (TREE_TYPE (soft_instanceof_node));
1411 tree valtype = TREE_TYPE (TREE_TYPE (value));
1412 tree valclass = TYPE_NAME (valtype);
1415 /* When compiling from bytecode, we need to ensure that TYPE has
1417 if (CLASS_P (type) && ! CLASS_LOADED_P (type))
1419 load_class (type, 1);
1420 safe_layout_class (type);
1421 if (! TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) == ERROR_MARK)
1422 return error_mark_node;
1424 klass = TYPE_NAME (type);
1426 if (type == object_type_node || inherits_from_p (valtype, type))
1428 /* Anything except `null' is an instance of Object. Likewise,
1429 if the object is known to be an instance of the class, then
1430 we only need to check for `null'. */
1431 expr = build2 (NE_EXPR, itype, value, null_pointer_node);
1433 else if (flag_verify_invocations
1434 && ! TYPE_ARRAY_P (type)
1435 && ! TYPE_ARRAY_P (valtype)
1436 && DECL_P (klass) && DECL_P (valclass)
1437 && ! CLASS_INTERFACE (valclass)
1438 && ! CLASS_INTERFACE (klass)
1439 && ! inherits_from_p (type, valtype)
1440 && (CLASS_FINAL (klass)
1441 || ! inherits_from_p (valtype, type)))
1443 /* The classes are from different branches of the derivation
1444 tree, so we immediately know the answer. */
1445 expr = boolean_false_node;
1447 else if (DECL_P (klass) && CLASS_FINAL (klass))
1449 tree save = save_expr (value);
1450 expr = build3 (COND_EXPR, itype,
1451 build2 (NE_EXPR, boolean_type_node,
1452 save, null_pointer_node),
1453 build2 (EQ_EXPR, itype,
1454 build_get_class (save),
1455 build_class_ref (type)),
1456 boolean_false_node);
1460 expr = build_call_nary (itype,
1461 build_address_of (soft_instanceof_node),
1462 2, value, build_class_ref (type));
1464 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (value);
1469 expand_java_INSTANCEOF (tree type)
1471 tree value = pop_value (object_ptr_type_node);
1472 value = build_instanceof (value, type);
1477 expand_java_CHECKCAST (tree type)
1479 tree value = pop_value (ptr_type_node);
1480 value = build_call_nary (promote_type (type),
1481 build_address_of (soft_checkcast_node),
1482 2, build_class_ref (type), value);
1487 expand_iinc (unsigned int local_var_index, int ival, int pc)
1489 tree local_var, res;
1490 tree constant_value;
1492 flush_quick_stack ();
1493 local_var = find_local_variable (local_var_index, int_type_node, pc);
1494 constant_value = build_int_cst (NULL_TREE, ival);
1495 res = fold_build2 (PLUS_EXPR, int_type_node, local_var, constant_value);
1496 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (local_var), local_var, res));
1501 build_java_soft_divmod (enum tree_code op, tree type, tree op1, tree op2)
1504 tree arg1 = convert (type, op1);
1505 tree arg2 = convert (type, op2);
1507 if (type == int_type_node)
1511 case TRUNC_DIV_EXPR:
1512 call = soft_idiv_node;
1514 case TRUNC_MOD_EXPR:
1515 call = soft_irem_node;
1521 else if (type == long_type_node)
1525 case TRUNC_DIV_EXPR:
1526 call = soft_ldiv_node;
1528 case TRUNC_MOD_EXPR:
1529 call = soft_lrem_node;
1537 call = build_call_nary (type, build_address_of (call), 2, arg1, arg2);
1542 build_java_binop (enum tree_code op, tree type, tree arg1, tree arg2)
1549 tree u_type = unsigned_type_for (type);
1550 arg1 = convert (u_type, arg1);
1551 arg1 = build_java_binop (RSHIFT_EXPR, u_type, arg1, arg2);
1552 return convert (type, arg1);
1556 mask = build_int_cst (NULL_TREE,
1557 TYPE_PRECISION (TREE_TYPE (arg1)) - 1);
1558 arg2 = fold_build2 (BIT_AND_EXPR, int_type_node, arg2, mask);
1561 case COMPARE_L_EXPR: /* arg1 > arg2 ? 1 : arg1 == arg2 ? 0 : -1 */
1562 case COMPARE_G_EXPR: /* arg1 < arg2 ? -1 : arg1 == arg2 ? 0 : 1 */
1563 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1565 tree ifexp1 = fold_build2 (op == COMPARE_L_EXPR ? GT_EXPR : LT_EXPR,
1566 boolean_type_node, arg1, arg2);
1567 tree ifexp2 = fold_build2 (EQ_EXPR, boolean_type_node, arg1, arg2);
1568 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1569 ifexp2, integer_zero_node,
1570 op == COMPARE_L_EXPR
1571 ? integer_minus_one_node
1572 : integer_one_node);
1573 return fold_build3 (COND_EXPR, int_type_node, ifexp1,
1574 op == COMPARE_L_EXPR ? integer_one_node
1575 : integer_minus_one_node,
1579 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1581 tree ifexp1 = fold_build2 (LT_EXPR, boolean_type_node, arg1, arg2);
1582 tree ifexp2 = fold_build2 (GT_EXPR, boolean_type_node, arg1, arg2);
1583 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1584 ifexp2, integer_one_node,
1586 return fold_build3 (COND_EXPR, int_type_node,
1587 ifexp1, integer_minus_one_node, second_compare);
1589 case TRUNC_DIV_EXPR:
1590 case TRUNC_MOD_EXPR:
1591 if (TREE_CODE (type) == REAL_TYPE
1592 && op == TRUNC_MOD_EXPR)
1595 if (type != double_type_node)
1597 arg1 = convert (double_type_node, arg1);
1598 arg2 = convert (double_type_node, arg2);
1600 call = build_call_nary (double_type_node,
1601 build_address_of (soft_fmod_node),
1603 if (type != double_type_node)
1604 call = convert (type, call);
1608 if (TREE_CODE (type) == INTEGER_TYPE
1609 && flag_use_divide_subroutine
1610 && ! flag_syntax_only)
1611 return build_java_soft_divmod (op, type, arg1, arg2);
1616 return fold_build2 (op, type, arg1, arg2);
1620 expand_java_binop (tree type, enum tree_code op)
1630 rtype = int_type_node;
1631 rarg = pop_value (rtype);
1634 rarg = pop_value (rtype);
1636 larg = pop_value (ltype);
1637 push_value (build_java_binop (op, type, larg, rarg));
1640 /* Lookup the field named NAME in *TYPEP or its super classes.
1641 If not found, return NULL_TREE.
1642 (If the *TYPEP is not found, or if the field reference is
1643 ambiguous, return error_mark_node.)
1644 If found, return the FIELD_DECL, and set *TYPEP to the
1645 class containing the field. */
1648 lookup_field (tree *typep, tree name)
1650 if (CLASS_P (*typep) && !CLASS_LOADED_P (*typep))
1652 load_class (*typep, 1);
1653 safe_layout_class (*typep);
1654 if (!TYPE_SIZE (*typep) || TREE_CODE (TYPE_SIZE (*typep)) == ERROR_MARK)
1655 return error_mark_node;
1659 tree field, binfo, base_binfo;
1663 for (field = TYPE_FIELDS (*typep); field; field = TREE_CHAIN (field))
1664 if (DECL_NAME (field) == name)
1667 /* Process implemented interfaces. */
1668 save_field = NULL_TREE;
1669 for (binfo = TYPE_BINFO (*typep), i = 0;
1670 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1672 tree t = BINFO_TYPE (base_binfo);
1673 if ((field = lookup_field (&t, name)))
1675 if (save_field == field)
1677 if (save_field == NULL_TREE)
1681 tree i1 = DECL_CONTEXT (save_field);
1682 tree i2 = DECL_CONTEXT (field);
1683 error ("reference %qs is ambiguous: appears in interface %qs and interface %qs",
1684 IDENTIFIER_POINTER (name),
1685 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i1))),
1686 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i2))));
1687 return error_mark_node;
1692 if (save_field != NULL_TREE)
1695 *typep = CLASSTYPE_SUPER (*typep);
1700 /* Look up the field named NAME in object SELF_VALUE,
1701 which has class SELF_CLASS (a non-handle RECORD_TYPE).
1702 SELF_VALUE is NULL_TREE if looking for a static field. */
1705 build_field_ref (tree self_value, tree self_class, tree name)
1707 tree base_class = self_class;
1708 tree field_decl = lookup_field (&base_class, name);
1709 if (field_decl == NULL_TREE)
1711 error ("field %qs not found", IDENTIFIER_POINTER (name));
1712 return error_mark_node;
1714 if (self_value == NULL_TREE)
1716 return build_static_field_ref (field_decl);
1720 tree base_type = promote_type (base_class);
1722 /* CHECK is true if self_value is not the this pointer. */
1723 int check = (! (DECL_P (self_value)
1724 && DECL_NAME (self_value) == this_identifier_node));
1726 /* Determine whether a field offset from NULL will lie within
1727 Page 0: this is necessary on those GNU/Linux/BSD systems that
1728 trap SEGV to generate NullPointerExceptions.
1730 We assume that Page 0 will be mapped with NOPERM, and that
1731 memory may be allocated from any other page, so only field
1732 offsets < pagesize are guaranteed to trap. We also assume
1733 the smallest page size we'll encounter is 4k bytes. */
1734 if (! flag_syntax_only && check && ! flag_check_references
1735 && ! flag_indirect_dispatch)
1737 tree field_offset = byte_position (field_decl);
1739 page_size = size_int (4096);
1740 check = ! INT_CST_LT_UNSIGNED (field_offset, page_size);
1743 if (base_type != TREE_TYPE (self_value))
1744 self_value = fold_build1 (NOP_EXPR, base_type, self_value);
1745 if (! flag_syntax_only && flag_indirect_dispatch)
1748 = build_int_cst (NULL_TREE, get_symbol_table_index
1749 (field_decl, NULL_TREE,
1750 &TYPE_OTABLE_METHODS (output_class)));
1752 = build4 (ARRAY_REF, integer_type_node,
1753 TYPE_OTABLE_DECL (output_class), otable_index,
1754 NULL_TREE, NULL_TREE);
1757 if (DECL_CONTEXT (field_decl) != output_class)
1759 = build3 (COND_EXPR, TREE_TYPE (field_offset),
1760 build2 (EQ_EXPR, boolean_type_node,
1761 field_offset, integer_zero_node),
1762 build_call_nary (void_type_node,
1763 build_address_of (soft_nosuchfield_node),
1767 field_offset = fold (convert (sizetype, field_offset));
1768 self_value = java_check_reference (self_value, check);
1770 = fold_build2 (POINTER_PLUS_EXPR,
1771 TREE_TYPE (self_value),
1772 self_value, field_offset);
1773 address = fold_convert (build_pointer_type (TREE_TYPE (field_decl)),
1775 return fold_build1 (INDIRECT_REF, TREE_TYPE (field_decl), address);
1778 self_value = build_java_indirect_ref (TREE_TYPE (TREE_TYPE (self_value)),
1780 return fold_build3 (COMPONENT_REF, TREE_TYPE (field_decl),
1781 self_value, field_decl, NULL_TREE);
1786 lookup_label (int pc)
1790 if (pc > highest_label_pc_this_method)
1791 highest_label_pc_this_method = pc;
1792 ASM_GENERATE_INTERNAL_LABEL(buf, "LJpc=", start_label_pc_this_method + pc);
1793 name = get_identifier (buf);
1794 if (IDENTIFIER_LOCAL_VALUE (name))
1795 return IDENTIFIER_LOCAL_VALUE (name);
1798 /* The type of the address of a label is return_address_type_node. */
1799 tree decl = create_label_decl (name);
1800 return pushdecl (decl);
1804 /* Generate a unique name for the purpose of loops and switches
1805 labels, and try-catch-finally blocks label or temporary variables. */
1808 generate_name (void)
1810 static int l_number = 0;
1812 ASM_GENERATE_INTERNAL_LABEL(buff, "LJv", l_number);
1814 return get_identifier (buff);
1818 create_label_decl (tree name)
1821 decl = build_decl (input_location, LABEL_DECL, name,
1822 TREE_TYPE (return_address_type_node));
1823 DECL_CONTEXT (decl) = current_function_decl;
1824 DECL_IGNORED_P (decl) = 1;
1828 /* This maps a bytecode offset (PC) to various flags. */
1829 char *instruction_bits;
1831 /* This is a vector of type states for the current method. It is
1832 indexed by PC. Each element is a tree vector holding the type
1833 state at that PC. We only note type states at basic block
1835 VEC(tree, gc) *type_states;
1838 note_label (int current_pc ATTRIBUTE_UNUSED, int target_pc)
1840 lookup_label (target_pc);
1841 instruction_bits [target_pc] |= BCODE_JUMP_TARGET;
1844 /* Emit code to jump to TARGET_PC if VALUE1 CONDITION VALUE2,
1845 where CONDITION is one of one the compare operators. */
1848 expand_compare (enum tree_code condition, tree value1, tree value2,
1851 tree target = lookup_label (target_pc);
1852 tree cond = fold_build2 (condition, boolean_type_node, value1, value2);
1854 (build3 (COND_EXPR, void_type_node, java_truthvalue_conversion (cond),
1855 build1 (GOTO_EXPR, void_type_node, target),
1856 build_java_empty_stmt ()));
1859 /* Emit code for a TEST-type opcode. */
1862 expand_test (enum tree_code condition, tree type, int target_pc)
1864 tree value1, value2;
1865 flush_quick_stack ();
1866 value1 = pop_value (type);
1867 value2 = (type == ptr_type_node) ? null_pointer_node : integer_zero_node;
1868 expand_compare (condition, value1, value2, target_pc);
1871 /* Emit code for a COND-type opcode. */
1874 expand_cond (enum tree_code condition, tree type, int target_pc)
1876 tree value1, value2;
1877 flush_quick_stack ();
1878 /* note: pop values in opposite order */
1879 value2 = pop_value (type);
1880 value1 = pop_value (type);
1881 /* Maybe should check value1 and value2 for type compatibility ??? */
1882 expand_compare (condition, value1, value2, target_pc);
1886 expand_java_goto (int target_pc)
1888 tree target_label = lookup_label (target_pc);
1889 flush_quick_stack ();
1890 java_add_stmt (build1 (GOTO_EXPR, void_type_node, target_label));
1894 expand_java_switch (tree selector, int default_pc)
1896 tree switch_expr, x;
1898 flush_quick_stack ();
1899 switch_expr = build3 (SWITCH_EXPR, TREE_TYPE (selector), selector,
1900 NULL_TREE, NULL_TREE);
1901 java_add_stmt (switch_expr);
1903 x = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE, NULL_TREE,
1904 create_artificial_label (input_location));
1905 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1907 x = build1 (GOTO_EXPR, void_type_node, lookup_label (default_pc));
1908 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1914 expand_java_add_case (tree switch_expr, int match, int target_pc)
1918 value = build_int_cst (TREE_TYPE (switch_expr), match);
1920 x = build3 (CASE_LABEL_EXPR, void_type_node, value, NULL_TREE,
1921 create_artificial_label (input_location));
1922 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1924 x = build1 (GOTO_EXPR, void_type_node, lookup_label (target_pc));
1925 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1928 static VEC(tree,gc) *
1929 pop_arguments (tree method_type)
1931 function_args_iterator fnai;
1933 VEC(tree,gc) *args = NULL;
1936 FOREACH_FUNCTION_ARGS (method_type, type, fnai)
1938 /* XXX: leaky abstraction. */
1939 if (type == void_type_node)
1942 VEC_safe_push (tree, gc, args, type);
1945 arity = VEC_length (tree, args);
1949 tree arg = pop_value (VEC_index (tree, args, arity));
1951 /* We simply cast each argument to its proper type. This is
1952 needed since we lose type information coming out of the
1953 verifier. We also have to do this when we pop an integer
1954 type that must be promoted for the function call. */
1955 if (TREE_CODE (type) == POINTER_TYPE)
1956 arg = build1 (NOP_EXPR, type, arg);
1957 else if (targetm.calls.promote_prototypes (type)
1958 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
1959 && INTEGRAL_TYPE_P (type))
1960 arg = convert (integer_type_node, arg);
1962 VEC_replace (tree, args, arity, arg);
1968 /* Attach to PTR (a block) the declaration found in ENTRY. */
1971 attach_init_test_initialization_flags (void **entry, void *ptr)
1973 tree block = (tree)ptr;
1974 struct treetreehash_entry *ite = (struct treetreehash_entry *) *entry;
1976 if (block != error_mark_node)
1978 if (TREE_CODE (block) == BIND_EXPR)
1980 tree body = BIND_EXPR_BODY (block);
1981 TREE_CHAIN (ite->value) = BIND_EXPR_VARS (block);
1982 BIND_EXPR_VARS (block) = ite->value;
1983 body = build2 (COMPOUND_EXPR, void_type_node,
1984 build1 (DECL_EXPR, void_type_node, ite->value), body);
1985 BIND_EXPR_BODY (block) = body;
1989 tree body = BLOCK_SUBBLOCKS (block);
1990 TREE_CHAIN (ite->value) = BLOCK_EXPR_DECLS (block);
1991 BLOCK_EXPR_DECLS (block) = ite->value;
1992 body = build2 (COMPOUND_EXPR, void_type_node,
1993 build1 (DECL_EXPR, void_type_node, ite->value), body);
1994 BLOCK_SUBBLOCKS (block) = body;
2001 /* Build an expression to initialize the class CLAS.
2002 if EXPR is non-NULL, returns an expression to first call the initializer
2003 (if it is needed) and then calls EXPR. */
2006 build_class_init (tree clas, tree expr)
2010 /* An optimization: if CLAS is a superclass of the class we're
2011 compiling, we don't need to initialize it. However, if CLAS is
2012 an interface, it won't necessarily be initialized, even if we
2014 if ((! CLASS_INTERFACE (TYPE_NAME (clas))
2015 && inherits_from_p (current_class, clas))
2016 || current_class == clas)
2019 if (always_initialize_class_p)
2021 init = build_call_nary (void_type_node,
2022 build_address_of (soft_initclass_node),
2023 1, build_class_ref (clas));
2024 TREE_SIDE_EFFECTS (init) = 1;
2028 tree *init_test_decl;
2030 init_test_decl = java_treetreehash_new
2031 (DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl), clas);
2033 if (*init_test_decl == NULL)
2035 /* Build a declaration and mark it as a flag used to track
2036 static class initializations. */
2037 decl = build_decl (input_location, VAR_DECL, NULL_TREE,
2039 MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (decl);
2040 DECL_CONTEXT (decl) = current_function_decl;
2041 DECL_INITIAL (decl) = boolean_false_node;
2042 /* Don't emit any symbolic debugging info for this decl. */
2043 DECL_IGNORED_P (decl) = 1;
2044 *init_test_decl = decl;
2047 init = build_call_nary (void_type_node,
2048 build_address_of (soft_initclass_node),
2049 1, build_class_ref (clas));
2050 TREE_SIDE_EFFECTS (init) = 1;
2051 init = build3 (COND_EXPR, void_type_node,
2052 build2 (EQ_EXPR, boolean_type_node,
2053 *init_test_decl, boolean_false_node),
2054 init, integer_zero_node);
2055 TREE_SIDE_EFFECTS (init) = 1;
2056 init = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init,
2057 build2 (MODIFY_EXPR, boolean_type_node,
2058 *init_test_decl, boolean_true_node));
2059 TREE_SIDE_EFFECTS (init) = 1;
2062 if (expr != NULL_TREE)
2064 expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init, expr);
2065 TREE_SIDE_EFFECTS (expr) = 1;
2073 /* Rewrite expensive calls that require stack unwinding at runtime to
2074 cheaper alternatives. The logic here performs these
2077 java.lang.Class.forName("foo") -> java.lang.Class.forName("foo", class$)
2078 java.lang.Class.getClassLoader() -> java.lang.Class.getClassLoader(class$)
2084 const char *classname;
2086 const char *signature;
2087 const char *new_classname;
2088 const char *new_signature;
2090 void (*rewrite_arglist) (VEC(tree,gc) **);
2093 /* Add __builtin_return_address(0) to the end of an arglist. */
2097 rewrite_arglist_getcaller (VEC(tree,gc) **arglist)
2100 = build_call_expr (built_in_decls[BUILT_IN_RETURN_ADDRESS],
2101 1, integer_zero_node);
2103 DECL_UNINLINABLE (current_function_decl) = 1;
2105 VEC_safe_push (tree, gc, *arglist, retaddr);
2108 /* Add this.class to the end of an arglist. */
2111 rewrite_arglist_getclass (VEC(tree,gc) **arglist)
2113 VEC_safe_push (tree, gc, *arglist, build_class_ref (output_class));
2116 static rewrite_rule rules[] =
2117 {{"java.lang.Class", "getClassLoader", "()Ljava/lang/ClassLoader;",
2118 "java.lang.Class", "(Ljava/lang/Class;)Ljava/lang/ClassLoader;",
2119 ACC_FINAL|ACC_PRIVATE, rewrite_arglist_getclass},
2121 {"java.lang.Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;",
2122 "java.lang.Class", "(Ljava/lang/String;Ljava/lang/Class;)Ljava/lang/Class;",
2123 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getclass},
2125 {"gnu.classpath.VMStackWalker", "getCallingClass", "()Ljava/lang/Class;",
2126 "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/Class;",
2127 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2129 {"gnu.classpath.VMStackWalker", "getCallingClassLoader",
2130 "()Ljava/lang/ClassLoader;",
2131 "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/ClassLoader;",
2132 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2134 {"gnu.java.lang.VMCPStringBuilder", "toString", "([CII)Ljava/lang/String;",
2135 "java.lang.String", "([CII)Ljava/lang/String;",
2136 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, NULL},
2138 {NULL, NULL, NULL, NULL, NULL, 0, NULL}};
2140 /* True if this method is special, i.e. it's a private method that
2141 should be exported from a DSO. */
2144 special_method_p (tree candidate_method)
2146 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (candidate_method)));
2147 tree method = DECL_NAME (candidate_method);
2150 for (p = rules; p->classname; p++)
2152 if (get_identifier (p->classname) == context
2153 && get_identifier (p->method) == method)
2159 /* Scan the rules list for replacements for *METHOD_P and replace the
2160 args accordingly. If the rewrite results in an access to a private
2161 method, update SPECIAL.*/
2164 maybe_rewrite_invocation (tree *method_p, VEC(tree,gc) **arg_list_p,
2165 tree *method_signature_p, tree *special)
2167 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (*method_p)));
2169 *special = NULL_TREE;
2171 for (p = rules; p->classname; p++)
2173 if (get_identifier (p->classname) == context)
2175 tree method = DECL_NAME (*method_p);
2176 if (get_identifier (p->method) == method
2177 && get_identifier (p->signature) == *method_signature_p)
2180 tree destination_class
2181 = lookup_class (get_identifier (p->new_classname));
2182 gcc_assert (destination_class);
2184 = lookup_java_method (destination_class,
2186 get_identifier (p->new_signature));
2187 if (! maybe_method && ! flag_verify_invocations)
2190 = add_method (destination_class, p->flags,
2191 method, get_identifier (p->new_signature));
2192 DECL_EXTERNAL (maybe_method) = 1;
2194 *method_p = maybe_method;
2195 gcc_assert (*method_p);
2196 if (p->rewrite_arglist)
2197 p->rewrite_arglist (arg_list_p);
2198 *method_signature_p = get_identifier (p->new_signature);
2199 *special = integer_one_node;
2210 build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED,
2211 tree self_type, tree method_signature ATTRIBUTE_UNUSED,
2212 VEC(tree,gc) *arg_list ATTRIBUTE_UNUSED, tree special)
2215 if (is_compiled_class (self_type))
2217 /* With indirect dispatch we have to use indirect calls for all
2218 publicly visible methods or gcc will use PLT indirections
2219 to reach them. We also have to use indirect dispatch for all
2220 external methods. */
2221 if (! flag_indirect_dispatch
2222 || (! DECL_EXTERNAL (method) && ! TREE_PUBLIC (method)))
2224 func = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (method)),
2230 = build_int_cst (NULL_TREE,
2231 (get_symbol_table_index
2233 &TYPE_ATABLE_METHODS (output_class))));
2235 = build4 (ARRAY_REF,
2236 TREE_TYPE (TREE_TYPE (TYPE_ATABLE_DECL (output_class))),
2237 TYPE_ATABLE_DECL (output_class), table_index,
2238 NULL_TREE, NULL_TREE);
2240 func = convert (method_ptr_type_node, func);
2244 /* We don't know whether the method has been (statically) compiled.
2245 Compile this code to get a reference to the method's code:
2247 SELF_TYPE->methods[METHOD_INDEX].ncode
2251 int method_index = 0;
2254 /* The method might actually be declared in some superclass, so
2255 we have to use its class context, not the caller's notion of
2256 where the method is. */
2257 self_type = DECL_CONTEXT (method);
2258 ref = build_class_ref (self_type);
2259 ref = build1 (INDIRECT_REF, class_type_node, ref);
2260 if (ncode_ident == NULL_TREE)
2261 ncode_ident = get_identifier ("ncode");
2262 if (methods_ident == NULL_TREE)
2263 methods_ident = get_identifier ("methods");
2264 ref = build3 (COMPONENT_REF, method_ptr_type_node, ref,
2265 lookup_field (&class_type_node, methods_ident),
2267 for (meth = TYPE_METHODS (self_type);
2268 ; meth = TREE_CHAIN (meth))
2272 if (meth == NULL_TREE)
2273 fatal_error ("method '%s' not found in class",
2274 IDENTIFIER_POINTER (DECL_NAME (method)));
2277 method_index *= int_size_in_bytes (method_type_node);
2278 ref = fold_build2 (POINTER_PLUS_EXPR, method_ptr_type_node,
2279 ref, size_int (method_index));
2280 ref = build1 (INDIRECT_REF, method_type_node, ref);
2281 func = build3 (COMPONENT_REF, nativecode_ptr_type_node,
2282 ref, lookup_field (&method_type_node, ncode_ident),
2289 invoke_build_dtable (int is_invoke_interface, VEC(tree,gc) *arg_list)
2291 tree dtable, objectref;
2292 tree saved = save_expr (VEC_index (tree, arg_list, 0));
2294 VEC_replace (tree, arg_list, 0, saved);
2296 /* If we're dealing with interfaces and if the objectref
2297 argument is an array then get the dispatch table of the class
2298 Object rather than the one from the objectref. */
2299 objectref = (is_invoke_interface
2300 && is_array_type_p (TREE_TYPE (saved))
2301 ? build_class_ref (object_type_node) : saved);
2303 if (dtable_ident == NULL_TREE)
2304 dtable_ident = get_identifier ("vtable");
2305 dtable = build_java_indirect_ref (object_type_node, objectref,
2306 flag_check_references);
2307 dtable = build3 (COMPONENT_REF, dtable_ptr_type, dtable,
2308 lookup_field (&object_type_node, dtable_ident), NULL_TREE);
2313 /* Determine the index in SYMBOL_TABLE for a reference to the decl
2314 T. If this decl has not been seen before, it will be added to the
2315 [oa]table_methods. If it has, the existing table slot will be
2319 get_symbol_table_index (tree t, tree special, tree *symbol_table)
2324 if (*symbol_table == NULL_TREE)
2326 *symbol_table = build_tree_list (special, t);
2330 method_list = *symbol_table;
2334 tree value = TREE_VALUE (method_list);
2335 tree purpose = TREE_PURPOSE (method_list);
2336 if (value == t && purpose == special)
2339 if (TREE_CHAIN (method_list) == NULL_TREE)
2342 method_list = TREE_CHAIN (method_list);
2345 TREE_CHAIN (method_list) = build_tree_list (special, t);
2350 build_invokevirtual (tree dtable, tree method, tree special)
2353 tree nativecode_ptr_ptr_type_node
2354 = build_pointer_type (nativecode_ptr_type_node);
2358 if (flag_indirect_dispatch)
2360 gcc_assert (! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))));
2363 = build_int_cst (NULL_TREE, get_symbol_table_index
2365 &TYPE_OTABLE_METHODS (output_class)));
2366 method_index = build4 (ARRAY_REF, integer_type_node,
2367 TYPE_OTABLE_DECL (output_class),
2368 otable_index, NULL_TREE, NULL_TREE);
2372 /* We fetch the DECL_VINDEX field directly here, rather than
2373 using get_method_index(). DECL_VINDEX is the true offset
2374 from the vtable base to a method, regrdless of any extra
2375 words inserted at the start of the vtable. */
2376 method_index = DECL_VINDEX (method);
2377 method_index = size_binop (MULT_EXPR, method_index,
2378 TYPE_SIZE_UNIT (nativecode_ptr_ptr_type_node));
2379 if (TARGET_VTABLE_USES_DESCRIPTORS)
2380 method_index = size_binop (MULT_EXPR, method_index,
2381 size_int (TARGET_VTABLE_USES_DESCRIPTORS));
2384 func = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dtable), dtable,
2385 convert (sizetype, method_index));
2387 if (TARGET_VTABLE_USES_DESCRIPTORS)
2388 func = build1 (NOP_EXPR, nativecode_ptr_type_node, func);
2391 func = fold_convert (nativecode_ptr_ptr_type_node, func);
2392 func = build1 (INDIRECT_REF, nativecode_ptr_type_node, func);
2398 static GTY(()) tree class_ident;
2400 build_invokeinterface (tree dtable, tree method)
2405 /* We expand invokeinterface here. */
2407 if (class_ident == NULL_TREE)
2408 class_ident = get_identifier ("class");
2410 dtable = build_java_indirect_ref (dtable_type, dtable,
2411 flag_check_references);
2412 dtable = build3 (COMPONENT_REF, class_ptr_type, dtable,
2413 lookup_field (&dtable_type, class_ident), NULL_TREE);
2415 interface = DECL_CONTEXT (method);
2416 gcc_assert (CLASS_INTERFACE (TYPE_NAME (interface)));
2417 layout_class_methods (interface);
2419 if (flag_indirect_dispatch)
2422 = 2 * (get_symbol_table_index
2423 (method, NULL_TREE, &TYPE_ITABLE_METHODS (output_class)));
2425 = build4 (ARRAY_REF,
2426 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2427 TYPE_ITABLE_DECL (output_class),
2428 build_int_cst (NULL_TREE, itable_index-1),
2429 NULL_TREE, NULL_TREE);
2431 = build4 (ARRAY_REF,
2432 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2433 TYPE_ITABLE_DECL (output_class),
2434 build_int_cst (NULL_TREE, itable_index),
2435 NULL_TREE, NULL_TREE);
2436 interface = convert (class_ptr_type, interface);
2437 idx = convert (integer_type_node, idx);
2441 idx = build_int_cst (NULL_TREE,
2442 get_interface_method_index (method, interface));
2443 interface = build_class_ref (interface);
2446 return build_call_nary (ptr_type_node,
2447 build_address_of (soft_lookupinterfacemethod_node),
2448 3, dtable, interface, idx);
2451 /* Expand one of the invoke_* opcodes.
2452 OPCODE is the specific opcode.
2453 METHOD_REF_INDEX is an index into the constant pool.
2454 NARGS is the number of arguments, or -1 if not specified. */
2457 expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED)
2459 tree method_signature
2460 = COMPONENT_REF_SIGNATURE(¤t_jcf->cpool, method_ref_index);
2461 tree method_name = COMPONENT_REF_NAME (¤t_jcf->cpool,
2464 = get_class_constant (current_jcf,
2465 COMPONENT_REF_CLASS_INDEX(¤t_jcf->cpool,
2467 const char *const self_name
2468 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2469 tree call, func, method, method_type;
2470 VEC(tree,gc) *arg_list;
2471 tree check = NULL_TREE;
2473 tree special = NULL_TREE;
2475 if (! CLASS_LOADED_P (self_type))
2477 load_class (self_type, 1);
2478 safe_layout_class (self_type);
2479 if (TREE_CODE (TYPE_SIZE (self_type)) == ERROR_MARK)
2480 fatal_error ("failed to find class '%s'", self_name);
2482 layout_class_methods (self_type);
2484 if (ID_INIT_P (method_name))
2485 method = lookup_java_constructor (self_type, method_signature);
2487 method = lookup_java_method (self_type, method_name, method_signature);
2489 /* We've found a method in a class other than the one in which it
2490 was wanted. This can happen if, for instance, we're trying to
2491 compile invokespecial super.equals().
2492 FIXME: This is a kludge. Rather than nullifying the result, we
2493 should change lookup_java_method() so that it doesn't search the
2494 superclass chain when we're BC-compiling. */
2495 if (! flag_verify_invocations
2497 && ! TYPE_ARRAY_P (self_type)
2498 && self_type != DECL_CONTEXT (method))
2501 /* We've found a method in an interface, but this isn't an interface
2503 if (opcode != OPCODE_invokeinterface
2505 && (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method)))))
2508 /* We've found a non-interface method but we are making an
2509 interface call. This can happen if the interface overrides a
2510 method in Object. */
2511 if (! flag_verify_invocations
2512 && opcode == OPCODE_invokeinterface
2514 && ! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))))
2517 if (method == NULL_TREE)
2519 if (flag_verify_invocations || ! flag_indirect_dispatch)
2521 error ("class '%s' has no method named '%s' matching signature '%s'",
2523 IDENTIFIER_POINTER (method_name),
2524 IDENTIFIER_POINTER (method_signature));
2528 int flags = ACC_PUBLIC;
2529 if (opcode == OPCODE_invokestatic)
2530 flags |= ACC_STATIC;
2531 if (opcode == OPCODE_invokeinterface)
2533 flags |= ACC_INTERFACE | ACC_ABSTRACT;
2534 CLASS_INTERFACE (TYPE_NAME (self_type)) = 1;
2536 method = add_method (self_type, flags, method_name,
2538 DECL_ARTIFICIAL (method) = 1;
2539 METHOD_DUMMY (method) = 1;
2540 layout_class_method (self_type, NULL,
2545 /* Invoke static can't invoke static/abstract method */
2546 if (method != NULL_TREE)
2548 if (opcode == OPCODE_invokestatic)
2550 if (!METHOD_STATIC (method))
2552 error ("invokestatic on non static method");
2555 else if (METHOD_ABSTRACT (method))
2557 error ("invokestatic on abstract method");
2563 if (METHOD_STATIC (method))
2565 error ("invoke[non-static] on static method");
2571 if (method == NULL_TREE)
2573 /* If we got here, we emitted an error message above. So we
2574 just pop the arguments, push a properly-typed zero, and
2576 method_type = get_type_from_signature (method_signature);
2577 pop_arguments (method_type);
2578 if (opcode != OPCODE_invokestatic)
2579 pop_type (self_type);
2580 method_type = promote_type (TREE_TYPE (method_type));
2581 push_value (convert (method_type, integer_zero_node));
2585 method_type = TREE_TYPE (method);
2586 arg_list = pop_arguments (method_type);
2587 flush_quick_stack ();
2589 maybe_rewrite_invocation (&method, &arg_list, &method_signature,
2593 if (opcode == OPCODE_invokestatic)
2594 func = build_known_method_ref (method, method_type, self_type,
2595 method_signature, arg_list, special);
2596 else if (opcode == OPCODE_invokespecial
2597 || (opcode == OPCODE_invokevirtual
2598 && (METHOD_PRIVATE (method)
2599 || METHOD_FINAL (method)
2600 || CLASS_FINAL (TYPE_NAME (self_type)))))
2602 /* If the object for the method call is null, we throw an
2603 exception. We don't do this if the object is the current
2604 method's `this'. In other cases we just rely on an
2605 optimization pass to eliminate redundant checks. FIXME:
2606 Unfortunately there doesn't seem to be a way to determine
2607 what the current method is right now.
2608 We do omit the check if we're calling <init>. */
2609 /* We use a SAVE_EXPR here to make sure we only evaluate
2610 the new `self' expression once. */
2611 tree save_arg = save_expr (VEC_index (tree, arg_list, 0));
2612 VEC_replace (tree, arg_list, 0, save_arg);
2613 check = java_check_reference (save_arg, ! DECL_INIT_P (method));
2614 func = build_known_method_ref (method, method_type, self_type,
2615 method_signature, arg_list, special);
2619 tree dtable = invoke_build_dtable (opcode == OPCODE_invokeinterface,
2621 if (opcode == OPCODE_invokevirtual)
2622 func = build_invokevirtual (dtable, method, special);
2624 func = build_invokeinterface (dtable, method);
2627 if (TREE_CODE (func) == ADDR_EXPR)
2628 TREE_TYPE (func) = build_pointer_type (method_type);
2630 func = build1 (NOP_EXPR, build_pointer_type (method_type), func);
2632 call = build_call_vec (TREE_TYPE (method_type), func, arg_list);
2633 TREE_SIDE_EFFECTS (call) = 1;
2634 call = check_for_builtin (method, call);
2636 if (check != NULL_TREE)
2638 call = build2 (COMPOUND_EXPR, TREE_TYPE (call), check, call);
2639 TREE_SIDE_EFFECTS (call) = 1;
2642 if (TREE_CODE (TREE_TYPE (method_type)) == VOID_TYPE)
2643 java_add_stmt (call);
2647 flush_quick_stack ();
2651 /* Create a stub which will be put into the vtable but which will call
2655 build_jni_stub (tree method)
2657 tree jnifunc, call, body, method_sig, arg_types;
2658 tree jniarg0, jniarg1, jniarg2, jniarg3;
2659 tree jni_func_type, tem;
2660 tree env_var, res_var = NULL_TREE, block;
2664 VEC(tree,gc) *args = NULL;
2667 tree klass = DECL_CONTEXT (method);
2668 klass = build_class_ref (klass);
2670 gcc_assert (METHOD_NATIVE (method) && flag_jni);
2672 DECL_ARTIFICIAL (method) = 1;
2673 DECL_EXTERNAL (method) = 0;
2675 env_var = build_decl (input_location,
2676 VAR_DECL, get_identifier ("env"), ptr_type_node);
2677 DECL_CONTEXT (env_var) = method;
2679 if (TREE_TYPE (TREE_TYPE (method)) != void_type_node)
2681 res_var = build_decl (input_location, VAR_DECL, get_identifier ("res"),
2682 TREE_TYPE (TREE_TYPE (method)));
2683 DECL_CONTEXT (res_var) = method;
2684 TREE_CHAIN (env_var) = res_var;
2687 method_args = DECL_ARGUMENTS (method);
2688 block = build_block (env_var, NULL_TREE, method_args, NULL_TREE);
2689 TREE_SIDE_EFFECTS (block) = 1;
2690 TREE_TYPE (block) = TREE_TYPE (TREE_TYPE (method));
2692 /* Compute the local `env' by calling _Jv_GetJNIEnvNewFrame. */
2693 body = build2 (MODIFY_EXPR, ptr_type_node, env_var,
2694 build_call_nary (ptr_type_node,
2695 build_address_of (soft_getjnienvnewframe_node),
2698 /* The JNIEnv structure is the first argument to the JNI function. */
2699 args_size += int_size_in_bytes (TREE_TYPE (env_var));
2700 VEC_safe_push (tree, gc, args, env_var);
2702 /* For a static method the second argument is the class. For a
2703 non-static method the second argument is `this'; that is already
2704 available in the argument list. */
2705 if (METHOD_STATIC (method))
2707 args_size += int_size_in_bytes (TREE_TYPE (klass));
2708 VEC_safe_push (tree, gc, args, klass);
2711 /* All the arguments to this method become arguments to the
2712 underlying JNI function. If we had to wrap object arguments in a
2713 special way, we would do that here. */
2714 for (tem = method_args; tem != NULL_TREE; tem = TREE_CHAIN (tem))
2716 int arg_bits = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)));
2717 #ifdef PARM_BOUNDARY
2718 arg_bits = (((arg_bits + PARM_BOUNDARY - 1) / PARM_BOUNDARY)
2721 args_size += (arg_bits / BITS_PER_UNIT);
2723 VEC_safe_push (tree, gc, args, tem);
2725 arg_types = TYPE_ARG_TYPES (TREE_TYPE (method));
2727 /* Argument types for static methods and the JNIEnv structure.
2728 FIXME: Write and use build_function_type_vec to avoid this. */
2729 if (METHOD_STATIC (method))
2730 arg_types = tree_cons (NULL_TREE, object_ptr_type_node, arg_types);
2731 arg_types = tree_cons (NULL_TREE, ptr_type_node, arg_types);
2733 /* We call _Jv_LookupJNIMethod to find the actual underlying
2734 function pointer. _Jv_LookupJNIMethod will throw the appropriate
2735 exception if this function is not found at runtime. */
2736 method_sig = build_java_signature (TREE_TYPE (method));
2738 jniarg1 = build_utf8_ref (DECL_NAME (method));
2739 jniarg2 = build_utf8_ref (unmangle_classname
2740 (IDENTIFIER_POINTER (method_sig),
2741 IDENTIFIER_LENGTH (method_sig)));
2742 jniarg3 = build_int_cst (NULL_TREE, args_size);
2744 tem = build_function_type (TREE_TYPE (TREE_TYPE (method)), arg_types);
2746 #ifdef MODIFY_JNI_METHOD_CALL
2747 tem = MODIFY_JNI_METHOD_CALL (tem);
2750 jni_func_type = build_pointer_type (tem);
2752 /* Use the actual function type, rather than a generic pointer type,
2753 such that this decl keeps the actual pointer type from being
2754 garbage-collected. If it is, we end up using canonical types
2755 with different uids for equivalent function types, and this in
2756 turn causes utf8 identifiers and output order to vary. */
2757 meth_var = build_decl (input_location,
2758 VAR_DECL, get_identifier ("meth"), jni_func_type);
2759 TREE_STATIC (meth_var) = 1;
2760 TREE_PUBLIC (meth_var) = 0;
2761 DECL_EXTERNAL (meth_var) = 0;
2762 DECL_CONTEXT (meth_var) = method;
2763 DECL_ARTIFICIAL (meth_var) = 1;
2764 DECL_INITIAL (meth_var) = null_pointer_node;
2765 TREE_USED (meth_var) = 1;
2766 chainon (env_var, meth_var);
2767 build_result_decl (method);
2769 jnifunc = build3 (COND_EXPR, jni_func_type,
2770 build2 (NE_EXPR, boolean_type_node,
2771 meth_var, build_int_cst (TREE_TYPE (meth_var), 0)),
2773 build2 (MODIFY_EXPR, jni_func_type, meth_var,
2775 (NOP_EXPR, jni_func_type,
2776 build_call_nary (ptr_type_node,
2778 (soft_lookupjnimethod_node),
2781 jniarg2, jniarg3))));
2783 /* Now we make the actual JNI call via the resulting function
2785 call = build_call_vec (TREE_TYPE (TREE_TYPE (method)), jnifunc, args);
2787 /* If the JNI call returned a result, capture it here. If we had to
2788 unwrap JNI object results, we would do that here. */
2789 if (res_var != NULL_TREE)
2791 /* If the call returns an object, it may return a JNI weak
2792 reference, in which case we must unwrap it. */
2793 if (! JPRIMITIVE_TYPE_P (TREE_TYPE (TREE_TYPE (method))))
2794 call = build_call_nary (TREE_TYPE (TREE_TYPE (method)),
2795 build_address_of (soft_unwrapjni_node),
2797 call = build2 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (method)),
2801 TREE_SIDE_EFFECTS (call) = 1;
2803 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2804 TREE_SIDE_EFFECTS (body) = 1;
2806 /* Now free the environment we allocated. */
2807 call = build_call_nary (ptr_type_node,
2808 build_address_of (soft_jnipopsystemframe_node),
2810 TREE_SIDE_EFFECTS (call) = 1;
2811 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2812 TREE_SIDE_EFFECTS (body) = 1;
2814 /* Finally, do the return. */
2815 if (res_var != NULL_TREE)
2818 gcc_assert (DECL_RESULT (method));
2819 /* Make sure we copy the result variable to the actual
2820 result. We use the type of the DECL_RESULT because it
2821 might be different from the return type of the function:
2822 it might be promoted. */
2823 drt = TREE_TYPE (DECL_RESULT (method));
2824 if (drt != TREE_TYPE (res_var))
2825 res_var = build1 (CONVERT_EXPR, drt, res_var);
2826 res_var = build2 (MODIFY_EXPR, drt, DECL_RESULT (method), res_var);
2827 TREE_SIDE_EFFECTS (res_var) = 1;
2830 body = build2 (COMPOUND_EXPR, void_type_node, body,
2831 build1 (RETURN_EXPR, void_type_node, res_var));
2832 TREE_SIDE_EFFECTS (body) = 1;
2834 /* Prepend class initialization for static methods reachable from
2836 if (METHOD_STATIC (method)
2837 && (! METHOD_PRIVATE (method)
2838 || INNER_CLASS_P (DECL_CONTEXT (method))))
2840 tree init = build_call_expr (soft_initclass_node, 1,
2842 body = build2 (COMPOUND_EXPR, void_type_node, init, body);
2843 TREE_SIDE_EFFECTS (body) = 1;
2846 bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
2852 /* Given lvalue EXP, return a volatile expression that references the
2856 java_modify_addr_for_volatile (tree exp)
2858 tree exp_type = TREE_TYPE (exp);
2860 = build_qualified_type (exp_type,
2861 TYPE_QUALS (exp_type) | TYPE_QUAL_VOLATILE);
2862 tree addr = build_fold_addr_expr (exp);
2863 v_type = build_pointer_type (v_type);
2864 addr = fold_convert (v_type, addr);
2865 exp = build_fold_indirect_ref (addr);
2870 /* Expand an operation to extract from or store into a field.
2871 IS_STATIC is 1 iff the field is static.
2872 IS_PUTTING is 1 for putting into a field; 0 for getting from the field.
2873 FIELD_REF_INDEX is an index into the constant pool. */
2876 expand_java_field_op (int is_static, int is_putting, int field_ref_index)
2879 = get_class_constant (current_jcf,
2880 COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool,
2882 const char *self_name
2883 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2884 tree field_name = COMPONENT_REF_NAME (¤t_jcf->cpool, field_ref_index);
2885 tree field_signature = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool,
2887 tree field_type = get_type_from_signature (field_signature);
2888 tree new_value = is_putting ? pop_value (field_type) : NULL_TREE;
2891 tree original_self_type = self_type;
2895 if (! CLASS_LOADED_P (self_type))
2896 load_class (self_type, 1);
2897 field_decl = lookup_field (&self_type, field_name);
2898 if (field_decl == error_mark_node)
2902 else if (field_decl == NULL_TREE)
2904 if (! flag_verify_invocations)
2906 int flags = ACC_PUBLIC;
2908 flags |= ACC_STATIC;
2909 self_type = original_self_type;
2910 field_decl = add_field (original_self_type, field_name,
2912 DECL_ARTIFICIAL (field_decl) = 1;
2913 DECL_IGNORED_P (field_decl) = 1;
2915 /* FIXME: We should be pessimistic about volatility. We
2916 don't know one way or another, but this is safe.
2917 However, doing this has bad effects on code quality. We
2918 need to look at better ways to do this. */
2919 TREE_THIS_VOLATILE (field_decl) = 1;
2924 error ("missing field '%s' in '%s'",
2925 IDENTIFIER_POINTER (field_name), self_name);
2929 else if (build_java_signature (TREE_TYPE (field_decl)) != field_signature)
2931 error ("mismatching signature for field '%s' in '%s'",
2932 IDENTIFIER_POINTER (field_name), self_name);
2935 field_ref = is_static ? NULL_TREE : pop_value (self_type);
2939 push_value (convert (field_type, integer_zero_node));
2940 flush_quick_stack ();
2944 field_ref = build_field_ref (field_ref, self_type, field_name);
2946 && ! flag_indirect_dispatch)
2948 tree context = DECL_CONTEXT (field_ref);
2949 if (context != self_type && CLASS_INTERFACE (TYPE_NAME (context)))
2950 field_ref = build_class_init (context, field_ref);
2952 field_ref = build_class_init (self_type, field_ref);
2956 flush_quick_stack ();
2957 if (FIELD_FINAL (field_decl))
2959 if (DECL_CONTEXT (field_decl) != current_class)
2960 error ("assignment to final field %q+D not in field's class",
2962 /* We used to check for assignments to final fields not
2963 occurring in the class initializer or in a constructor
2964 here. However, this constraint doesn't seem to be
2965 enforced by the JVM. */
2968 if (TREE_THIS_VOLATILE (field_decl))
2969 field_ref = java_modify_addr_for_volatile (field_ref);
2971 modify_expr = build2 (MODIFY_EXPR, TREE_TYPE (field_ref),
2972 field_ref, new_value);
2974 if (TREE_THIS_VOLATILE (field_decl))
2976 (build_call_expr (built_in_decls[BUILT_IN_SYNCHRONIZE], 0));
2978 java_add_stmt (modify_expr);
2982 tree temp = build_decl (input_location,
2983 VAR_DECL, NULL_TREE, TREE_TYPE (field_ref));
2984 java_add_local_var (temp);
2986 if (TREE_THIS_VOLATILE (field_decl))
2987 field_ref = java_modify_addr_for_volatile (field_ref);
2990 = build2 (MODIFY_EXPR, TREE_TYPE (field_ref), temp, field_ref);
2991 java_add_stmt (modify_expr);
2993 if (TREE_THIS_VOLATILE (field_decl))
2995 (build_call_expr (built_in_decls[BUILT_IN_SYNCHRONIZE], 0));
2999 TREE_THIS_VOLATILE (field_ref) = TREE_THIS_VOLATILE (field_decl);
3003 load_type_state (int pc)
3006 tree vec = VEC_index (tree, type_states, pc);
3007 int cur_length = TREE_VEC_LENGTH (vec);
3008 stack_pointer = cur_length - DECL_MAX_LOCALS(current_function_decl);
3009 for (i = 0; i < cur_length; i++)
3010 type_map [i] = TREE_VEC_ELT (vec, i);
3013 /* Go over METHOD's bytecode and note instruction starts in
3014 instruction_bits[]. */
3017 note_instructions (JCF *jcf, tree method)
3020 unsigned char* byte_ops;
3021 long length = DECL_CODE_LENGTH (method);
3026 #undef RET /* Defined by config/i386/i386.h */
3028 #define BCODE byte_ops
3029 #define BYTE_type_node byte_type_node
3030 #define SHORT_type_node short_type_node
3031 #define INT_type_node int_type_node
3032 #define LONG_type_node long_type_node
3033 #define CHAR_type_node char_type_node
3034 #define PTR_type_node ptr_type_node
3035 #define FLOAT_type_node float_type_node
3036 #define DOUBLE_type_node double_type_node
3037 #define VOID_type_node void_type_node
3038 #define CONST_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3039 #define CONST_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3040 #define VAR_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3041 #define VAR_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3043 #define CHECK_PC_IN_RANGE(PC) ((void)1) /* Already handled by verifier. */
3045 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3046 byte_ops = jcf->read_ptr;
3047 instruction_bits = XRESIZEVAR (char, instruction_bits, length + 1);
3048 memset (instruction_bits, 0, length + 1);
3049 type_states = VEC_alloc (tree, gc, length + 1);
3050 VEC_safe_grow_cleared (tree, gc, type_states, length + 1);
3052 /* This pass figures out which PC can be the targets of jumps. */
3053 for (PC = 0; PC < length;)
3055 int oldpc = PC; /* PC at instruction start. */
3056 instruction_bits [PC] |= BCODE_INSTRUCTION_START;
3057 switch (byte_ops[PC++])
3059 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3061 PRE_##OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3064 #define NOTE_LABEL(PC) note_label(oldpc, PC)
3066 #define PRE_PUSHC(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3067 #define PRE_LOAD(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3068 #define PRE_STORE(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3069 #define PRE_STACK(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3070 #define PRE_UNOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3071 #define PRE_BINOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3072 #define PRE_CONVERT(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3073 #define PRE_CONVERT2(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3075 #define PRE_SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3076 PRE_SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3077 #define PRE_SPECIAL_IINC(OPERAND_TYPE) \
3078 ((void) IMMEDIATE_u1, (void) IMMEDIATE_s1)
3079 #define PRE_SPECIAL_ENTER(IGNORE) /* nothing */
3080 #define PRE_SPECIAL_EXIT(IGNORE) /* nothing */
3081 #define PRE_SPECIAL_THROW(IGNORE) /* nothing */
3082 #define PRE_SPECIAL_BREAK(IGNORE) /* nothing */
3084 /* two forms of wide instructions */
3085 #define PRE_SPECIAL_WIDE(IGNORE) \
3087 int modified_opcode = IMMEDIATE_u1; \
3088 if (modified_opcode == OPCODE_iinc) \
3090 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3091 (void) IMMEDIATE_s2; /* constbyte1 and constbyte2 */ \
3095 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3099 #define PRE_IMPL(IGNORE1, IGNORE2) /* nothing */
3101 #define PRE_MONITOR(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3103 #define PRE_RETURN(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3104 #define PRE_ARRAY(OPERAND_TYPE, SUBOP) \
3105 PRE_ARRAY_##SUBOP(OPERAND_TYPE)
3106 #define PRE_ARRAY_LOAD(TYPE) /* nothing */
3107 #define PRE_ARRAY_STORE(TYPE) /* nothing */
3108 #define PRE_ARRAY_LENGTH(TYPE) /* nothing */
3109 #define PRE_ARRAY_NEW(TYPE) PRE_ARRAY_NEW_##TYPE
3110 #define PRE_ARRAY_NEW_NUM ((void) IMMEDIATE_u1)
3111 #define PRE_ARRAY_NEW_PTR ((void) IMMEDIATE_u2)
3112 #define PRE_ARRAY_NEW_MULTI ((void) IMMEDIATE_u2, (void) IMMEDIATE_u1)
3114 #define PRE_TEST(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3115 #define PRE_COND(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3116 #define PRE_BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3117 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3118 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3119 #define PRE_JSR(OPERAND_TYPE, OPERAND_VALUE) \
3120 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3122 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3124 #define PRE_RET(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE)
3126 #define PRE_SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3127 PC = (PC + 3) / 4 * 4; PRE_##TABLE_OR_LOOKUP##_SWITCH
3129 #define PRE_LOOKUP_SWITCH \
3130 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3131 NOTE_LABEL (default_offset+oldpc); \
3133 while (--npairs >= 0) { \
3134 jint match ATTRIBUTE_UNUSED = IMMEDIATE_s4; \
3135 jint offset = IMMEDIATE_s4; \
3136 NOTE_LABEL (offset+oldpc); } \
3139 #define PRE_TABLE_SWITCH \
3140 { jint default_offset = IMMEDIATE_s4; \
3141 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3142 NOTE_LABEL (default_offset+oldpc); \
3144 while (low++ <= high) { \
3145 jint offset = IMMEDIATE_s4; \
3146 NOTE_LABEL (offset+oldpc); } \
3149 #define PRE_FIELD(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3150 #define PRE_OBJECT(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3151 #define PRE_INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3152 (void)(IMMEDIATE_u2); \
3153 PC += 2 * IS_INTERFACE /* for invokeinterface */;
3155 #include "javaop.def"
3162 expand_byte_code (JCF *jcf, tree method)
3166 const unsigned char *linenumber_pointer;
3167 int dead_code_index = -1;
3168 unsigned char* byte_ops;
3169 long length = DECL_CODE_LENGTH (method);
3170 location_t max_location = input_location;
3173 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3174 byte_ops = jcf->read_ptr;
3176 /* We make an initial pass of the line number table, to note
3177 which instructions have associated line number entries. */
3178 linenumber_pointer = linenumber_table;
3179 for (i = 0; i < linenumber_count; i++)
3181 int pc = GET_u2 (linenumber_pointer);
3182 linenumber_pointer += 4;
3184 warning (0, "invalid PC in line number table");
3187 if ((instruction_bits[pc] & BCODE_HAS_LINENUMBER) != 0)
3188 instruction_bits[pc] |= BCODE_HAS_MULTI_LINENUMBERS;
3189 instruction_bits[pc] |= BCODE_HAS_LINENUMBER;
3193 if (! verify_jvm_instructions_new (jcf, byte_ops, length))
3196 promote_arguments ();
3197 cache_this_class_ref (method);
3198 cache_cpool_data_ref ();
3200 /* Translate bytecodes. */
3201 linenumber_pointer = linenumber_table;
3202 for (PC = 0; PC < length;)
3204 if ((instruction_bits [PC] & BCODE_TARGET) != 0 || PC == 0)
3206 tree label = lookup_label (PC);
3207 flush_quick_stack ();
3208 if ((instruction_bits [PC] & BCODE_TARGET) != 0)
3209 java_add_stmt (build1 (LABEL_EXPR, void_type_node, label));
3210 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3211 load_type_state (PC);
3214 if (! (instruction_bits [PC] & BCODE_VERIFIED))
3216 if (dead_code_index == -1)
3218 /* This is the start of a region of unreachable bytecodes.
3219 They still need to be processed in order for EH ranges
3220 to get handled correctly. However, we can simply
3221 replace these bytecodes with nops. */
3222 dead_code_index = PC;
3225 /* Turn this bytecode into a nop. */
3230 if (dead_code_index != -1)
3232 /* We've just reached the end of a region of dead code. */
3234 warning (0, "unreachable bytecode from %d to before %d",
3235 dead_code_index, PC);
3236 dead_code_index = -1;
3240 /* Handle possible line number entry for this PC.
3242 This code handles out-of-order and multiple linenumbers per PC,
3243 but is optimized for the case of line numbers increasing
3244 monotonically with PC. */
3245 if ((instruction_bits[PC] & BCODE_HAS_LINENUMBER) != 0)
3247 if ((instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS) != 0
3248 || GET_u2 (linenumber_pointer) != PC)
3249 linenumber_pointer = linenumber_table;
3250 while (linenumber_pointer < linenumber_table + linenumber_count * 4)
3252 int pc = GET_u2 (linenumber_pointer);
3253 linenumber_pointer += 4;
3256 int line = GET_u2 (linenumber_pointer - 2);
3257 input_location = linemap_line_start (line_table, line, 1);
3258 if (input_location > max_location)
3259 max_location = input_location;
3260 if (!(instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS))
3265 maybe_pushlevels (PC);
3266 PC = process_jvm_instruction (PC, byte_ops, length);
3267 maybe_poplevels (PC);
3270 uncache_this_class_ref (method);
3272 if (dead_code_index != -1)
3274 /* We've just reached the end of a region of dead code. */
3276 warning (0, "unreachable bytecode from %d to the end of the method",
3280 DECL_FUNCTION_LAST_LINE (method) = max_location;
3284 java_push_constant_from_pool (JCF *jcf, int index)
3287 if (JPOOL_TAG (jcf, index) == CONSTANT_String)
3290 name = get_name_constant (jcf, JPOOL_USHORT1 (jcf, index));
3291 index = alloc_name_constant (CONSTANT_String, name);
3292 c = build_ref_from_constant_pool (index);
3293 c = convert (promote_type (string_type_node), c);
3295 else if (JPOOL_TAG (jcf, index) == CONSTANT_Class
3296 || JPOOL_TAG (jcf, index) == CONSTANT_ResolvedClass)
3298 tree record = get_class_constant (jcf, index);
3299 c = build_class_ref (record);
3302 c = get_constant (jcf, index);
3307 process_jvm_instruction (int PC, const unsigned char* byte_ops,
3308 long length ATTRIBUTE_UNUSED)
3310 const char *opname; /* Temporary ??? */
3311 int oldpc = PC; /* PC at instruction start. */
3313 /* If the instruction is at the beginning of an exception handler,
3314 replace the top of the stack with the thrown object reference. */
3315 if (instruction_bits [PC] & BCODE_EXCEPTION_TARGET)
3317 /* Note that the verifier will not emit a type map at all for
3318 dead exception handlers. In this case we just ignore the
3320 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3322 tree type = pop_type (promote_type (throwable_type_node));
3323 push_value (build_exception_object_ref (type));
3327 switch (byte_ops[PC++])
3329 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3332 OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3335 #define RET(OPERAND_TYPE, OPERAND_VALUE) \
3337 int saw_index = 0; \
3338 int index = OPERAND_VALUE; \
3339 (void) saw_index; /* Avoid set but not used warning. */ \
3341 (find_local_variable (index, return_address_type_node, oldpc)); \
3344 #define JSR(OPERAND_TYPE, OPERAND_VALUE) \
3346 /* OPERAND_VALUE may have side-effects on PC */ \
3347 int opvalue = OPERAND_VALUE; \
3348 build_java_jsr (oldpc + opvalue, PC); \
3351 /* Push a constant onto the stack. */
3352 #define PUSHC(OPERAND_TYPE, OPERAND_VALUE) \
3353 { int saw_index = 0; int ival = (OPERAND_VALUE); \
3354 if (saw_index) java_push_constant_from_pool (current_jcf, ival); \
3355 else expand_java_pushc (ival, OPERAND_TYPE##_type_node); }
3357 /* internal macro added for use by the WIDE case */
3358 #define LOAD_INTERNAL(OPTYPE, OPVALUE) \
3359 expand_load_internal (OPVALUE, type_map[OPVALUE], oldpc);
3361 /* Push local variable onto the opcode stack. */
3362 #define LOAD(OPERAND_TYPE, OPERAND_VALUE) \
3364 /* have to do this since OPERAND_VALUE may have side-effects */ \
3365 int opvalue = OPERAND_VALUE; \
3366 LOAD_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3369 #define RETURN(OPERAND_TYPE, OPERAND_VALUE) \
3370 expand_java_return (OPERAND_TYPE##_type_node)
3372 #define REM_EXPR TRUNC_MOD_EXPR
3373 #define BINOP(OPERAND_TYPE, OPERAND_VALUE) \
3374 expand_java_binop (OPERAND_TYPE##_type_node, OPERAND_VALUE##_EXPR)
3376 #define FIELD(IS_STATIC, IS_PUT) \
3377 expand_java_field_op (IS_STATIC, IS_PUT, IMMEDIATE_u2)
3379 #define TEST(OPERAND_TYPE, CONDITION) \
3380 expand_test (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3382 #define COND(OPERAND_TYPE, CONDITION) \
3383 expand_cond (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3385 #define BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3386 BRANCH_##OPERAND_TYPE (OPERAND_VALUE)
3388 #define BRANCH_GOTO(OPERAND_VALUE) \
3389 expand_java_goto (oldpc + OPERAND_VALUE)
3391 #define BRANCH_CALL(OPERAND_VALUE) \
3392 expand_java_call (oldpc + OPERAND_VALUE, oldpc)
3395 #define BRANCH_RETURN(OPERAND_VALUE) \
3397 tree type = OPERAND_TYPE##_type_node; \
3398 tree value = find_local_variable (OPERAND_VALUE, type, oldpc); \
3399 expand_java_ret (value); \
3403 #define NOT_IMPL(OPERAND_TYPE, OPERAND_VALUE) \
3404 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3405 fprintf (stderr, "(not implemented)\n")
3406 #define NOT_IMPL1(OPERAND_VALUE) \
3407 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3408 fprintf (stderr, "(not implemented)\n")
3410 #define BRANCH_RETURN(OPERAND_VALUE) NOT_IMPL1(OPERAND_VALUE)
3412 #define STACK(SUBOP, COUNT) STACK_##SUBOP (COUNT)
3414 #define STACK_POP(COUNT) java_stack_pop (COUNT)
3416 #define STACK_SWAP(COUNT) java_stack_swap()
3418 #define STACK_DUP(COUNT) java_stack_dup (COUNT, 0)
3419 #define STACK_DUPx1(COUNT) java_stack_dup (COUNT, 1)
3420 #define STACK_DUPx2(COUNT) java_stack_dup (COUNT, 2)
3422 #define SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3423 PC = (PC + 3) / 4 * 4; TABLE_OR_LOOKUP##_SWITCH
3425 #define LOOKUP_SWITCH \
3426 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3427 tree selector = pop_value (INT_type_node); \
3428 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3429 while (--npairs >= 0) \
3431 jint match = IMMEDIATE_s4; jint offset = IMMEDIATE_s4; \
3432 expand_java_add_case (switch_expr, match, oldpc + offset); \
3436 #define TABLE_SWITCH \
3437 { jint default_offset = IMMEDIATE_s4; \
3438 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3439 tree selector = pop_value (INT_type_node); \
3440 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3441 for (; low <= high; low++) \
3443 jint offset = IMMEDIATE_s4; \
3444 expand_java_add_case (switch_expr, low, oldpc + offset); \
3448 #define INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3449 { int opcode = byte_ops[PC-1]; \
3450 int method_ref_index = IMMEDIATE_u2; \
3452 if (IS_INTERFACE) { nargs = IMMEDIATE_u1; (void) IMMEDIATE_u1; } \
3454 expand_invoke (opcode, method_ref_index, nargs); \
3457 /* Handle new, checkcast, instanceof */
3458 #define OBJECT(TYPE, OP) \
3459 expand_java_##OP (get_class_constant (current_jcf, IMMEDIATE_u2))
3461 #define ARRAY(OPERAND_TYPE, SUBOP) ARRAY_##SUBOP(OPERAND_TYPE)
3463 #define ARRAY_LOAD(OPERAND_TYPE) \
3465 expand_java_arrayload( OPERAND_TYPE##_type_node ); \
3468 #define ARRAY_STORE(OPERAND_TYPE) \
3470 expand_java_arraystore( OPERAND_TYPE##_type_node ); \
3473 #define ARRAY_LENGTH(OPERAND_TYPE) expand_java_array_length();
3474 #define ARRAY_NEW(OPERAND_TYPE) ARRAY_NEW_##OPERAND_TYPE()
3475 #define ARRAY_NEW_PTR() \
3476 push_value (build_anewarray (get_class_constant (current_jcf, \
3478 pop_value (int_type_node)));
3479 #define ARRAY_NEW_NUM() \
3481 int atype = IMMEDIATE_u1; \
3482 push_value (build_newarray (atype, pop_value (int_type_node)));\
3484 #define ARRAY_NEW_MULTI() \
3486 tree klass = get_class_constant (current_jcf, IMMEDIATE_u2 ); \
3487 int ndims = IMMEDIATE_u1; \
3488 expand_java_multianewarray( klass, ndims ); \
3491 #define UNOP(OPERAND_TYPE, OPERAND_VALUE) \
3492 push_value (fold_build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \
3493 pop_value (OPERAND_TYPE##_type_node)));
3495 #define CONVERT2(FROM_TYPE, TO_TYPE) \
3497 push_value (build1 (NOP_EXPR, int_type_node, \
3498 (convert (TO_TYPE##_type_node, \
3499 pop_value (FROM_TYPE##_type_node))))); \
3502 #define CONVERT(FROM_TYPE, TO_TYPE) \
3504 push_value (convert (TO_TYPE##_type_node, \
3505 pop_value (FROM_TYPE##_type_node))); \
3508 /* internal macro added for use by the WIDE case
3509 Added TREE_TYPE (decl) assignment, apbianco */
3510 #define STORE_INTERNAL(OPTYPE, OPVALUE) \
3513 int index = OPVALUE; \
3514 tree type = OPTYPE; \
3515 value = pop_value (type); \
3516 type = TREE_TYPE (value); \
3517 decl = find_local_variable (index, type, oldpc); \
3518 set_local_type (index, type); \
3519 java_add_stmt (build2 (MODIFY_EXPR, type, decl, value)); \
3522 #define STORE(OPERAND_TYPE, OPERAND_VALUE) \
3524 /* have to do this since OPERAND_VALUE may have side-effects */ \
3525 int opvalue = OPERAND_VALUE; \
3526 STORE_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3529 #define SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3530 SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3532 #define SPECIAL_ENTER(IGNORED) MONITOR_OPERATION (soft_monitorenter_node)
3533 #define SPECIAL_EXIT(IGNORED) MONITOR_OPERATION (soft_monitorexit_node)
3535 #define MONITOR_OPERATION(call) \
3537 tree o = pop_value (ptr_type_node); \
3539 flush_quick_stack (); \
3540 c = build_java_monitor (call, o); \
3541 TREE_SIDE_EFFECTS (c) = 1; \
3542 java_add_stmt (c); \
3545 #define SPECIAL_IINC(IGNORED) \
3547 unsigned int local_var_index = IMMEDIATE_u1; \
3548 int ival = IMMEDIATE_s1; \
3549 expand_iinc(local_var_index, ival, oldpc); \
3552 #define SPECIAL_WIDE(IGNORED) \
3554 int modified_opcode = IMMEDIATE_u1; \
3555 unsigned int local_var_index = IMMEDIATE_u2; \
3556 switch (modified_opcode) \
3560 int ival = IMMEDIATE_s2; \
3561 expand_iinc (local_var_index, ival, oldpc); \
3564 case OPCODE_iload: \
3565 case OPCODE_lload: \
3566 case OPCODE_fload: \
3567 case OPCODE_dload: \
3568 case OPCODE_aload: \
3570 /* duplicate code from LOAD macro */ \
3571 LOAD_INTERNAL(operand_type[modified_opcode], local_var_index); \
3574 case OPCODE_istore: \
3575 case OPCODE_lstore: \
3576 case OPCODE_fstore: \
3577 case OPCODE_dstore: \
3578 case OPCODE_astore: \
3580 STORE_INTERNAL(operand_type[modified_opcode], local_var_index); \
3584 error ("unrecogized wide sub-instruction"); \
3588 #define SPECIAL_THROW(IGNORED) \
3589 build_java_athrow (pop_value (throwable_type_node))
3591 #define SPECIAL_BREAK NOT_IMPL1
3592 #define IMPL NOT_IMPL
3594 #include "javaop.def"
3597 fprintf (stderr, "%3d: unknown(%3d)\n", oldpc, byte_ops[PC]);
3602 /* Return the opcode at PC in the code section pointed to by
3605 static unsigned char
3606 peek_opcode_at_pc (JCF *jcf, int code_offset, int pc)
3608 unsigned char opcode;
3609 long absolute_offset = (long)JCF_TELL (jcf);
3611 JCF_SEEK (jcf, code_offset);
3612 opcode = jcf->read_ptr [pc];
3613 JCF_SEEK (jcf, absolute_offset);
3617 /* Some bytecode compilers are emitting accurate LocalVariableTable
3618 attributes. Here's an example:
3623 Attribute "LocalVariableTable"
3624 slot #<n>: ... (PC: PC+1 length: L)
3626 This is accurate because the local in slot <n> really exists after
3627 the opcode at PC is executed, hence from PC+1 to PC+1+L.
3629 This procedure recognizes this situation and extends the live range
3630 of the local in SLOT to START_PC-1 or START_PC-2 (depending on the
3631 length of the store instruction.)
3633 This function is used by `give_name_to_locals' so that a local's
3634 DECL features a DECL_LOCAL_START_PC such that the first related
3635 store operation will use DECL as a destination, not an unrelated
3636 temporary created for the occasion.
3638 This function uses a global (instruction_bits) `note_instructions' should
3639 have allocated and filled properly. */
3642 maybe_adjust_start_pc (struct JCF *jcf, int code_offset,
3643 int start_pc, int slot)
3645 int first, index, opcode;
3654 /* Find last previous instruction and remember it */
3655 for (pc = start_pc-1; pc; pc--)
3656 if (instruction_bits [pc] & BCODE_INSTRUCTION_START)
3660 /* Retrieve the instruction, handle `wide'. */
3661 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3662 if (opcode == OPCODE_wide)
3665 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3670 case OPCODE_astore_0:
3671 case OPCODE_astore_1:
3672 case OPCODE_astore_2:
3673 case OPCODE_astore_3:
3674 first = OPCODE_astore_0;
3677 case OPCODE_istore_0:
3678 case OPCODE_istore_1:
3679 case OPCODE_istore_2:
3680 case OPCODE_istore_3:
3681 first = OPCODE_istore_0;
3684 case OPCODE_lstore_0:
3685 case OPCODE_lstore_1:
3686 case OPCODE_lstore_2:
3687 case OPCODE_lstore_3:
3688 first = OPCODE_lstore_0;
3691 case OPCODE_fstore_0:
3692 case OPCODE_fstore_1:
3693 case OPCODE_fstore_2:
3694 case OPCODE_fstore_3:
3695 first = OPCODE_fstore_0;
3698 case OPCODE_dstore_0:
3699 case OPCODE_dstore_1:
3700 case OPCODE_dstore_2:
3701 case OPCODE_dstore_3:
3702 first = OPCODE_dstore_0;
3710 index = peek_opcode_at_pc (jcf, code_offset, pc);
3713 int other = peek_opcode_at_pc (jcf, code_offset, ++pc);
3714 index = (other << 8) + index;
3719 /* Now we decide: first >0 means we have a <t>store_<n>, index >0
3720 means we have a <t>store. */
3721 if ((first > 0 && opcode - first == slot) || (index > 0 && index == slot))
3727 /* Force the (direct) sub-operands of NODE to be evaluated in left-to-right
3728 order, as specified by Java Language Specification.
3730 The problem is that while expand_expr will evaluate its sub-operands in
3731 left-to-right order, for variables it will just return an rtx (i.e.
3732 an lvalue) for the variable (rather than an rvalue). So it is possible
3733 that a later sub-operand will change the register, and when the
3734 actual operation is done, it will use the new value, when it should
3735 have used the original value.
3737 We fix this by using save_expr. This forces the sub-operand to be
3738 copied into a fresh virtual register,
3740 For method invocation, we modify the arguments so that a
3741 left-to-right order evaluation is performed. Saved expressions
3742 will, in CALL_EXPR order, be reused when the call will be expanded.
3744 We also promote outgoing args if needed. */
3747 force_evaluation_order (tree node)
3749 if (flag_syntax_only)
3751 if (TREE_CODE (node) == CALL_EXPR
3752 || (TREE_CODE (node) == COMPOUND_EXPR
3753 && TREE_CODE (TREE_OPERAND (node, 0)) == CALL_EXPR
3754 && TREE_CODE (TREE_OPERAND (node, 1)) == SAVE_EXPR))
3759 /* Account for wrapped around ctors. */
3760 if (TREE_CODE (node) == COMPOUND_EXPR)
3761 call = TREE_OPERAND (node, 0);
3765 nargs = call_expr_nargs (call);
3767 /* This reverses the evaluation order. This is a desired effect. */
3768 for (i = 0, cmp = NULL_TREE; i < nargs; i++)
3770 tree arg = CALL_EXPR_ARG (call, i);
3771 /* Promote types smaller than integer. This is required by
3773 tree type = TREE_TYPE (arg);
3775 if (targetm.calls.promote_prototypes (type)
3776 && INTEGRAL_TYPE_P (type)
3777 && INT_CST_LT_UNSIGNED (TYPE_SIZE (type),
3778 TYPE_SIZE (integer_type_node)))
3779 arg = fold_convert (integer_type_node, arg);
3781 saved = save_expr (force_evaluation_order (arg));
3782 cmp = (cmp == NULL_TREE ? saved :
3783 build2 (COMPOUND_EXPR, void_type_node, cmp, saved));
3785 CALL_EXPR_ARG (call, i) = saved;
3788 if (cmp && TREE_CODE (cmp) == COMPOUND_EXPR)
3789 TREE_SIDE_EFFECTS (cmp) = 1;
3793 cmp = build2 (COMPOUND_EXPR, TREE_TYPE (node), cmp, node);
3794 if (TREE_TYPE (cmp) != void_type_node)
3795 cmp = save_expr (cmp);
3796 TREE_SIDE_EFFECTS (cmp) = 1;
3803 /* Build a node to represent empty statements and blocks. */
3806 build_java_empty_stmt (void)
3808 tree t = build_empty_stmt (input_location);
3812 /* Promote all args of integral type before generating any code. */
3815 promote_arguments (void)
3819 for (arg = DECL_ARGUMENTS (current_function_decl), i = 0;
3820 arg != NULL_TREE; arg = TREE_CHAIN (arg), i++)
3822 tree arg_type = TREE_TYPE (arg);
3823 if (INTEGRAL_TYPE_P (arg_type)
3824 && TYPE_PRECISION (arg_type) < 32)
3826 tree copy = find_local_variable (i, integer_type_node, -1);
3827 java_add_stmt (build2 (MODIFY_EXPR, integer_type_node,
3829 fold_convert (integer_type_node, arg)));
3831 if (TYPE_IS_WIDE (arg_type))
3836 /* Create a local variable that points to the constant pool. */
3839 cache_cpool_data_ref (void)
3844 tree d = build_constant_data_ref (flag_indirect_classes);
3845 tree cpool_ptr = build_decl (input_location, VAR_DECL, NULL_TREE,
3846 build_pointer_type (TREE_TYPE (d)));
3847 java_add_local_var (cpool_ptr);
3848 TREE_CONSTANT (cpool_ptr) = 1;
3850 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (cpool_ptr),
3851 cpool_ptr, build_address_of (d)));
3852 cpool = build1 (INDIRECT_REF, TREE_TYPE (d), cpool_ptr);
3853 TREE_THIS_NOTRAP (cpool) = 1;
3854 TYPE_CPOOL_DATA_REF (output_class) = cpool;
3858 #include "gt-java-expr.h"