1 /* Process expressions for the GNU compiler for the Java(TM) language.
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2006, 2007, 2008, 2010 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>.
21 Java and all Java-based marks are trademarks or registered trademarks
22 of Sun Microsystems, Inc. in the United States and other countries.
23 The Free Software Foundation is independent of Sun Microsystems, Inc. */
25 /* Hacked by Per Bothner <bothner@cygnus.com> February 1996. */
29 #include "coretypes.h"
36 #include "java-tree.h"
38 #include "java-opcodes.h"
40 #include "java-except.h"
46 #include "tree-iterator.h"
50 static void flush_quick_stack (void);
51 static void push_value (tree);
52 static tree pop_value (tree);
53 static void java_stack_swap (void);
54 static void java_stack_dup (int, int);
55 static void build_java_athrow (tree);
56 static void build_java_jsr (int, int);
57 static void build_java_ret (tree);
58 static void expand_java_multianewarray (tree, int);
59 static void expand_java_arraystore (tree);
60 static void expand_java_arrayload (tree);
61 static void expand_java_array_length (void);
62 static tree build_java_monitor (tree, tree);
63 static void expand_java_pushc (int, tree);
64 static void expand_java_return (tree);
65 static void expand_load_internal (int, tree, int);
66 static void expand_java_NEW (tree);
67 static void expand_java_INSTANCEOF (tree);
68 static void expand_java_CHECKCAST (tree);
69 static void expand_iinc (unsigned int, int, int);
70 static void expand_java_binop (tree, enum tree_code);
71 static void note_label (int, int);
72 static void expand_compare (enum tree_code, tree, tree, int);
73 static void expand_test (enum tree_code, tree, int);
74 static void expand_cond (enum tree_code, tree, int);
75 static void expand_java_goto (int);
76 static tree expand_java_switch (tree, int);
77 static void expand_java_add_case (tree, int, int);
78 static VEC(tree,gc) *pop_arguments (tree);
79 static void expand_invoke (int, int, int);
80 static void expand_java_field_op (int, int, int);
81 static void java_push_constant_from_pool (struct JCF *, int);
82 static void java_stack_pop (int);
83 static tree build_java_throw_out_of_bounds_exception (tree);
84 static tree build_java_check_indexed_type (tree, tree);
85 static unsigned char peek_opcode_at_pc (struct JCF *, int, int);
86 static void promote_arguments (void);
87 static void cache_cpool_data_ref (void);
89 static GTY(()) tree operand_type[59];
91 static GTY(()) tree methods_ident;
92 static GTY(()) tree ncode_ident;
93 tree dtable_ident = NULL_TREE;
95 /* Set to nonzero value in order to emit class initialization code
96 before static field references. */
97 int always_initialize_class_p = 0;
99 /* We store the stack state in two places:
100 Within a basic block, we use the quick_stack, which is a
101 pushdown list (TREE_LISTs) of expression nodes.
102 This is the top part of the stack; below that we use find_stack_slot.
103 At the end of a basic block, the quick_stack must be flushed
104 to the stack slot array (as handled by find_stack_slot).
105 Using quick_stack generates better code (especially when
106 compiled without optimization), because we do not have to
107 explicitly store and load trees to temporary variables.
109 If a variable is on the quick stack, it means the value of variable
110 when the quick stack was last flushed. Conceptually, flush_quick_stack
111 saves all the quick_stack elements in parallel. However, that is
112 complicated, so it actually saves them (i.e. copies each stack value
113 to is home virtual register) from low indexes. This allows a quick_stack
114 element at index i (counting from the bottom of stack the) to references
115 slot virtuals for register that are >= i, but not those that are deeper.
116 This convention makes most operations easier. For example iadd works
117 even when the stack contains (reg[0], reg[1]): It results in the
118 stack containing (reg[0]+reg[1]), which is OK. However, some stack
119 operations are more complicated. For example dup given a stack
120 containing (reg[0]) would yield (reg[0], reg[0]), which would violate
121 the convention, since stack value 1 would refer to a register with
122 lower index (reg[0]), which flush_quick_stack does not safely handle.
123 So dup cannot just add an extra element to the quick_stack, but iadd can.
126 static GTY(()) tree quick_stack;
128 /* A free-list of unused permanent TREE_LIST nodes. */
129 static GTY((deletable)) tree tree_list_free_list;
131 /* The physical memory page size used in this computer. See
132 build_field_ref(). */
133 static GTY(()) tree page_size;
135 /* The stack pointer of the Java virtual machine.
136 This does include the size of the quick_stack. */
140 const unsigned char *linenumber_table;
141 int linenumber_count;
143 /* Largest pc so far in this method that has been passed to lookup_label. */
144 int highest_label_pc_this_method = -1;
146 /* Base value for this method to add to pc to get generated label. */
147 int start_label_pc_this_method = 0;
150 init_expr_processing (void)
152 operand_type[21] = operand_type[54] = int_type_node;
153 operand_type[22] = operand_type[55] = long_type_node;
154 operand_type[23] = operand_type[56] = float_type_node;
155 operand_type[24] = operand_type[57] = double_type_node;
156 operand_type[25] = operand_type[58] = ptr_type_node;
160 java_truthvalue_conversion (tree expr)
162 /* It is simpler and generates better code to have only TRUTH_*_EXPR
163 or comparison expressions as truth values at this level.
165 This function should normally be identity for Java. */
167 switch (TREE_CODE (expr))
169 case EQ_EXPR: case NE_EXPR: case UNEQ_EXPR: case LTGT_EXPR:
170 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
171 case UNLE_EXPR: case UNGE_EXPR: case UNLT_EXPR: case UNGT_EXPR:
172 case ORDERED_EXPR: case UNORDERED_EXPR:
173 case TRUTH_ANDIF_EXPR:
174 case TRUTH_ORIF_EXPR:
183 return integer_zerop (expr) ? boolean_false_node : boolean_true_node;
186 return real_zerop (expr) ? boolean_false_node : boolean_true_node;
188 /* are these legal? XXX JH */
192 /* These don't change whether an object is nonzero or zero. */
193 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
196 /* Distribute the conversion into the arms of a COND_EXPR. */
197 return fold_build3 (COND_EXPR, boolean_type_node, TREE_OPERAND (expr, 0),
198 java_truthvalue_conversion (TREE_OPERAND (expr, 1)),
199 java_truthvalue_conversion (TREE_OPERAND (expr, 2)));
202 /* If this is widening the argument, we can ignore it. */
203 if (TYPE_PRECISION (TREE_TYPE (expr))
204 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
205 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
206 /* fall through to default */
209 return fold_build2 (NE_EXPR, boolean_type_node,
210 expr, boolean_false_node);
214 /* Save any stack slots that happen to be in the quick_stack into their
215 home virtual register slots.
217 The copy order is from low stack index to high, to support the invariant
218 that the expression for a slot may contain decls for stack slots with
219 higher (or the same) index, but not lower. */
222 flush_quick_stack (void)
224 int stack_index = stack_pointer;
225 tree prev, cur, next;
227 /* First reverse the quick_stack, and count the number of slots it has. */
228 for (cur = quick_stack, prev = NULL_TREE; cur != NULL_TREE; cur = next)
230 next = TREE_CHAIN (cur);
231 TREE_CHAIN (cur) = prev;
233 stack_index -= 1 + TYPE_IS_WIDE (TREE_TYPE (TREE_VALUE (cur)));
237 while (quick_stack != NULL_TREE)
240 tree node = quick_stack, type;
241 quick_stack = TREE_CHAIN (node);
242 TREE_CHAIN (node) = tree_list_free_list;
243 tree_list_free_list = node;
244 node = TREE_VALUE (node);
245 type = TREE_TYPE (node);
247 decl = find_stack_slot (stack_index, type);
249 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (node), decl, node));
250 stack_index += 1 + TYPE_IS_WIDE (type);
254 /* Push TYPE on the type stack.
255 Return true on success, 0 on overflow. */
258 push_type_0 (tree type)
261 type = promote_type (type);
262 n_words = 1 + TYPE_IS_WIDE (type);
263 if (stack_pointer + n_words > DECL_MAX_STACK (current_function_decl))
265 /* Allocate decl for this variable now, so we get a temporary that
266 survives the whole method. */
267 find_stack_slot (stack_pointer, type);
268 stack_type_map[stack_pointer++] = type;
270 while (--n_words >= 0)
271 stack_type_map[stack_pointer++] = TYPE_SECOND;
276 push_type (tree type)
278 int r = push_type_0 (type);
283 push_value (tree value)
285 tree type = TREE_TYPE (value);
286 if (TYPE_PRECISION (type) < 32 && INTEGRAL_TYPE_P (type))
288 type = promote_type (type);
289 value = convert (type, value);
292 if (tree_list_free_list == NULL_TREE)
293 quick_stack = tree_cons (NULL_TREE, value, quick_stack);
296 tree node = tree_list_free_list;
297 tree_list_free_list = TREE_CHAIN (tree_list_free_list);
298 TREE_VALUE (node) = value;
299 TREE_CHAIN (node) = quick_stack;
302 /* If the value has a side effect, then we need to evaluate it
303 whether or not the result is used. If the value ends up on the
304 quick stack and is then popped, this won't happen -- so we flush
305 the quick stack. It is safest to simply always flush, though,
306 since TREE_SIDE_EFFECTS doesn't capture COMPONENT_REF, and for
307 the latter we may need to strip conversions. */
308 flush_quick_stack ();
311 /* Pop a type from the type stack.
312 TYPE is the expected type. Return the actual type, which must be
314 On an error, *MESSAGEP is set to a freshly malloc'd error message. */
317 pop_type_0 (tree type, char **messagep)
322 if (TREE_CODE (type) == RECORD_TYPE)
323 type = promote_type (type);
324 n_words = 1 + TYPE_IS_WIDE (type);
325 if (stack_pointer < n_words)
327 *messagep = xstrdup ("stack underflow");
330 while (--n_words > 0)
332 if (stack_type_map[--stack_pointer] != void_type_node)
334 *messagep = xstrdup ("Invalid multi-word value on type stack");
338 t = stack_type_map[--stack_pointer];
339 if (type == NULL_TREE || t == type)
341 if (TREE_CODE (t) == TREE_LIST)
345 tree tt = TREE_PURPOSE (t);
346 if (! can_widen_reference_to (tt, type))
356 if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (t)
357 && TYPE_PRECISION (type) <= 32 && TYPE_PRECISION (t) <= 32)
359 if (TREE_CODE (type) == POINTER_TYPE && TREE_CODE (t) == POINTER_TYPE)
361 /* If the expected type we've been passed is object or ptr
362 (i.e. void*), the caller needs to know the real type. */
363 if (type == ptr_type_node || type == object_ptr_type_node)
366 /* Since the verifier has already run, we know that any
367 types we see will be compatible. In BC mode, this fact
368 may be checked at runtime, but if that is so then we can
369 assume its truth here as well. So, we always succeed
370 here, with the expected type. */
374 if (! flag_verify_invocations && flag_indirect_dispatch
375 && t == object_ptr_type_node)
377 if (type != ptr_type_node)
378 warning (0, "need to insert runtime check for %s",
379 xstrdup (lang_printable_name (type, 0)));
383 /* lang_printable_name uses a static buffer, so we must save the result
384 from calling it the first time. */
387 char *temp = xstrdup (lang_printable_name (type, 0));
388 /* If the stack contains a multi-word type, keep popping the stack until
389 the real type is found. */
390 while (t == void_type_node)
391 t = stack_type_map[--stack_pointer];
392 *messagep = concat ("expected type '", temp,
393 "' but stack contains '", lang_printable_name (t, 0),
400 /* Pop a type from the type stack.
401 TYPE is the expected type. Return the actual type, which must be
402 convertible to TYPE, otherwise call error. */
407 char *message = NULL;
408 type = pop_type_0 (type, &message);
411 error ("%s", message);
418 /* Return true if two type assertions are equal. */
421 type_assertion_eq (const void * k1_p, const void * k2_p)
423 const type_assertion k1 = *(const type_assertion *)k1_p;
424 const type_assertion k2 = *(const type_assertion *)k2_p;
425 return (k1.assertion_code == k2.assertion_code
427 && k1.op2 == k2.op2);
430 /* Hash a type assertion. */
433 type_assertion_hash (const void *p)
435 const type_assertion *k_p = (const type_assertion *) p;
436 hashval_t hash = iterative_hash (&k_p->assertion_code, sizeof
437 k_p->assertion_code, 0);
439 switch (k_p->assertion_code)
441 case JV_ASSERT_TYPES_COMPATIBLE:
442 hash = iterative_hash (&TYPE_UID (k_p->op2), sizeof TYPE_UID (k_p->op2),
446 case JV_ASSERT_IS_INSTANTIABLE:
447 hash = iterative_hash (&TYPE_UID (k_p->op1), sizeof TYPE_UID (k_p->op1),
451 case JV_ASSERT_END_OF_TABLE:
461 /* Add an entry to the type assertion table for the given class.
462 KLASS is the class for which this assertion will be evaluated by the
463 runtime during loading/initialization.
464 ASSERTION_CODE is the 'opcode' or type of this assertion: see java-tree.h.
465 OP1 and OP2 are the operands. The tree type of these arguments may be
466 specific to each assertion_code. */
469 add_type_assertion (tree klass, int assertion_code, tree op1, tree op2)
471 htab_t assertions_htab;
475 assertions_htab = TYPE_ASSERTIONS (klass);
476 if (assertions_htab == NULL)
478 assertions_htab = htab_create_ggc (7, type_assertion_hash,
479 type_assertion_eq, NULL);
480 TYPE_ASSERTIONS (current_class) = assertions_htab;
483 as.assertion_code = assertion_code;
487 as_pp = htab_find_slot (assertions_htab, &as, INSERT);
489 /* Don't add the same assertion twice. */
493 *as_pp = ggc_alloc (sizeof (type_assertion));
494 **(type_assertion **)as_pp = as;
498 /* Return 1 if SOURCE_TYPE can be safely widened to TARGET_TYPE.
499 Handles array types and interfaces. */
502 can_widen_reference_to (tree source_type, tree target_type)
504 if (source_type == ptr_type_node || target_type == object_ptr_type_node)
507 /* Get rid of pointers */
508 if (TREE_CODE (source_type) == POINTER_TYPE)
509 source_type = TREE_TYPE (source_type);
510 if (TREE_CODE (target_type) == POINTER_TYPE)
511 target_type = TREE_TYPE (target_type);
513 if (source_type == target_type)
516 /* FIXME: This is very pessimistic, in that it checks everything,
517 even if we already know that the types are compatible. If we're
518 to support full Java class loader semantics, we need this.
519 However, we could do something more optimal. */
520 if (! flag_verify_invocations)
522 add_type_assertion (current_class, JV_ASSERT_TYPES_COMPATIBLE,
523 source_type, target_type);
526 warning (0, "assert: %s is assign compatible with %s",
527 xstrdup (lang_printable_name (target_type, 0)),
528 xstrdup (lang_printable_name (source_type, 0)));
529 /* Punt everything to runtime. */
533 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
539 if (TYPE_ARRAY_P (source_type) || TYPE_ARRAY_P (target_type))
541 HOST_WIDE_INT source_length, target_length;
542 if (TYPE_ARRAY_P (source_type) != TYPE_ARRAY_P (target_type))
544 /* An array implements Cloneable and Serializable. */
545 tree name = DECL_NAME (TYPE_NAME (target_type));
546 return (name == java_lang_cloneable_identifier_node
547 || name == java_io_serializable_identifier_node);
549 target_length = java_array_type_length (target_type);
550 if (target_length >= 0)
552 source_length = java_array_type_length (source_type);
553 if (source_length != target_length)
556 source_type = TYPE_ARRAY_ELEMENT (source_type);
557 target_type = TYPE_ARRAY_ELEMENT (target_type);
558 if (source_type == target_type)
560 if (TREE_CODE (source_type) != POINTER_TYPE
561 || TREE_CODE (target_type) != POINTER_TYPE)
563 return can_widen_reference_to (source_type, target_type);
567 int source_depth = class_depth (source_type);
568 int target_depth = class_depth (target_type);
570 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
573 warning (0, "assert: %s is assign compatible with %s",
574 xstrdup (lang_printable_name (target_type, 0)),
575 xstrdup (lang_printable_name (source_type, 0)));
579 /* class_depth can return a negative depth if an error occurred */
580 if (source_depth < 0 || target_depth < 0)
583 if (CLASS_INTERFACE (TYPE_NAME (target_type)))
585 /* target_type is OK if source_type or source_type ancestors
586 implement target_type. We handle multiple sub-interfaces */
587 tree binfo, base_binfo;
590 for (binfo = TYPE_BINFO (source_type), i = 0;
591 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
592 if (can_widen_reference_to
593 (BINFO_TYPE (base_binfo), target_type))
600 for ( ; source_depth > target_depth; source_depth--)
603 = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (source_type), 0));
605 return source_type == target_type;
611 pop_value (tree type)
613 type = pop_type (type);
616 tree node = quick_stack;
617 quick_stack = TREE_CHAIN (quick_stack);
618 TREE_CHAIN (node) = tree_list_free_list;
619 tree_list_free_list = node;
620 node = TREE_VALUE (node);
624 return find_stack_slot (stack_pointer, promote_type (type));
628 /* Pop and discard the top COUNT stack slots. */
631 java_stack_pop (int count)
637 gcc_assert (stack_pointer != 0);
639 type = stack_type_map[stack_pointer - 1];
640 if (type == TYPE_SECOND)
643 gcc_assert (stack_pointer != 1 && count > 0);
645 type = stack_type_map[stack_pointer - 2];
652 /* Implement the 'swap' operator (to swap two top stack slots). */
655 java_stack_swap (void)
661 if (stack_pointer < 2
662 || (type1 = stack_type_map[stack_pointer - 1]) == TYPE_SECOND
663 || (type2 = stack_type_map[stack_pointer - 2]) == TYPE_SECOND
664 || TYPE_IS_WIDE (type1) || TYPE_IS_WIDE (type2))
665 /* Bad stack swap. */
667 /* Bad stack swap. */
669 flush_quick_stack ();
670 decl1 = find_stack_slot (stack_pointer - 1, type1);
671 decl2 = find_stack_slot (stack_pointer - 2, type2);
672 temp = build_decl (input_location, VAR_DECL, NULL_TREE, type1);
673 java_add_local_var (temp);
674 java_add_stmt (build2 (MODIFY_EXPR, type1, temp, decl1));
675 java_add_stmt (build2 (MODIFY_EXPR, type2,
676 find_stack_slot (stack_pointer - 1, type2),
678 java_add_stmt (build2 (MODIFY_EXPR, type1,
679 find_stack_slot (stack_pointer - 2, type1),
681 stack_type_map[stack_pointer - 1] = type2;
682 stack_type_map[stack_pointer - 2] = type1;
686 java_stack_dup (int size, int offset)
688 int low_index = stack_pointer - size - offset;
691 error ("stack underflow - dup* operation");
693 flush_quick_stack ();
695 stack_pointer += size;
696 dst_index = stack_pointer;
698 for (dst_index = stack_pointer; --dst_index >= low_index; )
701 int src_index = dst_index - size;
702 if (src_index < low_index)
703 src_index = dst_index + size + offset;
704 type = stack_type_map [src_index];
705 if (type == TYPE_SECOND)
707 /* Dup operation splits 64-bit number. */
708 gcc_assert (src_index > low_index);
710 stack_type_map[dst_index] = type;
711 src_index--; dst_index--;
712 type = stack_type_map[src_index];
713 gcc_assert (TYPE_IS_WIDE (type));
716 gcc_assert (! TYPE_IS_WIDE (type));
718 if (src_index != dst_index)
720 tree src_decl = find_stack_slot (src_index, type);
721 tree dst_decl = find_stack_slot (dst_index, type);
724 (build2 (MODIFY_EXPR, TREE_TYPE (dst_decl), dst_decl, src_decl));
725 stack_type_map[dst_index] = type;
730 /* Calls _Jv_Throw or _Jv_Sjlj_Throw. Discard the contents of the
734 build_java_athrow (tree node)
738 call = build_call_nary (void_type_node,
739 build_address_of (throw_node),
741 TREE_SIDE_EFFECTS (call) = 1;
742 java_add_stmt (call);
743 java_stack_pop (stack_pointer);
746 /* Implementation for jsr/ret */
749 build_java_jsr (int target_pc, int return_pc)
751 tree where = lookup_label (target_pc);
752 tree ret = lookup_label (return_pc);
753 tree ret_label = fold_build1 (ADDR_EXPR, return_address_type_node, ret);
754 push_value (ret_label);
755 flush_quick_stack ();
756 java_add_stmt (build1 (GOTO_EXPR, void_type_node, where));
758 /* Do not need to emit the label here. We noted the existence of the
759 label as a jump target in note_instructions; we'll emit the label
760 for real at the beginning of the expand_byte_code loop. */
764 build_java_ret (tree location)
766 java_add_stmt (build1 (GOTO_EXPR, void_type_node, location));
769 /* Implementation of operations on array: new, load, store, length */
772 decode_newarray_type (int atype)
776 case 4: return boolean_type_node;
777 case 5: return char_type_node;
778 case 6: return float_type_node;
779 case 7: return double_type_node;
780 case 8: return byte_type_node;
781 case 9: return short_type_node;
782 case 10: return int_type_node;
783 case 11: return long_type_node;
784 default: return NULL_TREE;
788 /* Map primitive type to the code used by OPCODE_newarray. */
791 encode_newarray_type (tree type)
793 if (type == boolean_type_node)
795 else if (type == char_type_node)
797 else if (type == float_type_node)
799 else if (type == double_type_node)
801 else if (type == byte_type_node)
803 else if (type == short_type_node)
805 else if (type == int_type_node)
807 else if (type == long_type_node)
813 /* Build a call to _Jv_ThrowBadArrayIndex(), the
814 ArrayIndexOfBoundsException exception handler. */
817 build_java_throw_out_of_bounds_exception (tree index)
821 /* We need to build a COMPOUND_EXPR because _Jv_ThrowBadArrayIndex()
822 has void return type. We cannot just set the type of the CALL_EXPR below
823 to int_type_node because we would lose it during gimplification. */
824 gcc_assert (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (soft_badarrayindex_node))));
825 node = build_call_nary (void_type_node,
826 build_address_of (soft_badarrayindex_node),
828 TREE_SIDE_EFFECTS (node) = 1;
830 node = build2 (COMPOUND_EXPR, int_type_node, node, integer_zero_node);
831 TREE_SIDE_EFFECTS (node) = 1; /* Allows expansion within ANDIF */
836 /* Return the length of an array. Doesn't perform any checking on the nature
837 or value of the array NODE. May be used to implement some bytecodes. */
840 build_java_array_length_access (tree node)
842 tree type = TREE_TYPE (node);
843 tree array_type = TREE_TYPE (type);
844 HOST_WIDE_INT length;
846 if (!is_array_type_p (type))
848 /* With the new verifier, we will see an ordinary pointer type
849 here. In this case, we just use an arbitrary array type. */
850 array_type = build_java_array_type (object_ptr_type_node, -1);
851 type = promote_type (array_type);
854 length = java_array_type_length (type);
856 return build_int_cst (NULL_TREE, length);
858 node = build3 (COMPONENT_REF, int_type_node,
859 build_java_indirect_ref (array_type, node,
860 flag_check_references),
861 lookup_field (&array_type, get_identifier ("length")),
863 IS_ARRAY_LENGTH_ACCESS (node) = 1;
867 /* Optionally checks a reference against the NULL pointer. ARG1: the
868 expr, ARG2: we should check the reference. Don't generate extra
869 checks if we're not generating code. */
872 java_check_reference (tree expr, int check)
874 if (!flag_syntax_only && check)
876 expr = save_expr (expr);
877 expr = build3 (COND_EXPR, TREE_TYPE (expr),
878 build2 (EQ_EXPR, boolean_type_node,
879 expr, null_pointer_node),
880 build_call_nary (void_type_node,
881 build_address_of (soft_nullpointer_node),
889 /* Reference an object: just like an INDIRECT_REF, but with checking. */
892 build_java_indirect_ref (tree type, tree expr, int check)
895 t = java_check_reference (expr, check);
896 t = convert (build_pointer_type (type), t);
897 return build1 (INDIRECT_REF, type, t);
900 /* Implement array indexing (either as l-value or r-value).
901 Returns a tree for ARRAY[INDEX], assume TYPE is the element type.
902 Optionally performs bounds checking and/or test to NULL.
903 At this point, ARRAY should have been verified as an array. */
906 build_java_arrayaccess (tree array, tree type, tree index)
908 tree node, throw_expr = NULL_TREE;
911 tree array_type = TREE_TYPE (TREE_TYPE (array));
912 tree size_exp = fold_convert (sizetype, size_in_bytes (type));
914 if (!is_array_type_p (TREE_TYPE (array)))
916 /* With the new verifier, we will see an ordinary pointer type
917 here. In this case, we just use the correct array type. */
918 array_type = build_java_array_type (type, -1);
921 if (flag_bounds_check)
924 * (unsigned jint) INDEX >= (unsigned jint) LEN
925 * && throw ArrayIndexOutOfBoundsException.
926 * Note this is equivalent to and more efficient than:
927 * INDEX < 0 || INDEX >= LEN && throw ... */
929 tree len = convert (unsigned_int_type_node,
930 build_java_array_length_access (array));
931 test = fold_build2 (GE_EXPR, boolean_type_node,
932 convert (unsigned_int_type_node, index),
934 if (! integer_zerop (test))
937 = build2 (TRUTH_ANDIF_EXPR, int_type_node, test,
938 build_java_throw_out_of_bounds_exception (index));
939 /* allows expansion within COMPOUND */
940 TREE_SIDE_EFFECTS( throw_expr ) = 1;
944 /* If checking bounds, wrap the index expr with a COMPOUND_EXPR in order
945 to have the bounds check evaluated first. */
946 if (throw_expr != NULL_TREE)
947 index = build2 (COMPOUND_EXPR, int_type_node, throw_expr, index);
949 data_field = lookup_field (&array_type, get_identifier ("data"));
951 ref = build3 (COMPONENT_REF, TREE_TYPE (data_field),
952 build_java_indirect_ref (array_type, array,
953 flag_check_references),
954 data_field, NULL_TREE);
956 /* Take the address of the data field and convert it to a pointer to
958 node = build1 (NOP_EXPR, build_pointer_type (type), build_address_of (ref));
960 /* Multiply the index by the size of an element to obtain a byte
961 offset. Convert the result to a pointer to the element type. */
962 index = build2 (MULT_EXPR, sizetype,
963 fold_convert (sizetype, index),
966 /* Sum the byte offset and the address of the data field. */
967 node = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (node), node, index);
971 *((&array->data) + index*size_exp)
974 return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (node)), node);
977 /* Generate code to throw an ArrayStoreException if OBJECT is not assignable
978 (at runtime) to an element of ARRAY. A NOP_EXPR is returned if it can
979 determine that no check is required. */
982 build_java_arraystore_check (tree array, tree object)
984 tree check, element_type, source;
985 tree array_type_p = TREE_TYPE (array);
986 tree object_type = TYPE_NAME (TREE_TYPE (TREE_TYPE (object)));
988 if (! flag_verify_invocations)
990 /* With the new verifier, we don't track precise types. FIXME:
991 performance regression here. */
992 element_type = TYPE_NAME (object_type_node);
996 gcc_assert (is_array_type_p (array_type_p));
998 /* Get the TYPE_DECL for ARRAY's element type. */
1000 = TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p))));
1003 gcc_assert (TREE_CODE (element_type) == TYPE_DECL
1004 && TREE_CODE (object_type) == TYPE_DECL);
1006 if (!flag_store_check)
1007 return build1 (NOP_EXPR, array_type_p, array);
1009 /* No check is needed if the element type is final. Also check that
1010 element_type matches object_type, since in the bytecode
1011 compilation case element_type may be the actual element type of
1012 the array rather than its declared type. However, if we're doing
1013 indirect dispatch, we can't do the `final' optimization. */
1014 if (element_type == object_type
1015 && ! flag_indirect_dispatch
1016 && CLASS_FINAL (element_type))
1017 return build1 (NOP_EXPR, array_type_p, array);
1019 /* OBJECT might be wrapped by a SAVE_EXPR. */
1020 if (TREE_CODE (object) == SAVE_EXPR)
1021 source = TREE_OPERAND (object, 0);
1025 /* Avoid the check if OBJECT was just loaded from the same array. */
1026 if (TREE_CODE (source) == ARRAY_REF)
1029 source = TREE_OPERAND (source, 0); /* COMPONENT_REF. */
1030 source = TREE_OPERAND (source, 0); /* INDIRECT_REF. */
1031 source = TREE_OPERAND (source, 0); /* Source array's DECL or SAVE_EXPR. */
1032 if (TREE_CODE (source) == SAVE_EXPR)
1033 source = TREE_OPERAND (source, 0);
1036 if (TREE_CODE (target) == SAVE_EXPR)
1037 target = TREE_OPERAND (target, 0);
1039 if (source == target)
1040 return build1 (NOP_EXPR, array_type_p, array);
1043 /* Build an invocation of _Jv_CheckArrayStore */
1044 check = build_call_nary (void_type_node,
1045 build_address_of (soft_checkarraystore_node),
1047 TREE_SIDE_EFFECTS (check) = 1;
1052 /* Makes sure that INDEXED_TYPE is appropriate. If not, make it from
1053 ARRAY_NODE. This function is used to retrieve something less vague than
1054 a pointer type when indexing the first dimension of something like [[<t>.
1055 May return a corrected type, if necessary, otherwise INDEXED_TYPE is
1056 return unchanged. */
1059 build_java_check_indexed_type (tree array_node ATTRIBUTE_UNUSED,
1062 /* We used to check to see if ARRAY_NODE really had array type.
1063 However, with the new verifier, this is not necessary, as we know
1064 that the object will be an array of the appropriate type. */
1066 return indexed_type;
1069 /* newarray triggers a call to _Jv_NewPrimArray. This function should be
1070 called with an integer code (the type of array to create), and the length
1071 of the array to create. */
1074 build_newarray (int atype_value, tree length)
1078 tree prim_type = decode_newarray_type (atype_value);
1080 = build_java_array_type (prim_type,
1081 host_integerp (length, 0) == INTEGER_CST
1082 ? tree_low_cst (length, 0) : -1);
1084 /* Pass a reference to the primitive type class and save the runtime
1086 type_arg = build_class_ref (prim_type);
1088 return build_call_nary (promote_type (type),
1089 build_address_of (soft_newarray_node),
1090 2, type_arg, length);
1093 /* Generates anewarray from a given CLASS_TYPE. Gets from the stack the size
1094 of the dimension. */
1097 build_anewarray (tree class_type, tree length)
1100 = build_java_array_type (class_type,
1101 host_integerp (length, 0)
1102 ? tree_low_cst (length, 0) : -1);
1104 return build_call_nary (promote_type (type),
1105 build_address_of (soft_anewarray_node),
1108 build_class_ref (class_type),
1112 /* Return a node the evaluates 'new TYPE[LENGTH]'. */
1115 build_new_array (tree type, tree length)
1117 if (JPRIMITIVE_TYPE_P (type))
1118 return build_newarray (encode_newarray_type (type), length);
1120 return build_anewarray (TREE_TYPE (type), length);
1123 /* Generates a call to _Jv_NewMultiArray. multianewarray expects a
1124 class pointer, a number of dimensions and the matching number of
1125 dimensions. The argument list is NULL terminated. */
1128 expand_java_multianewarray (tree class_type, int ndim)
1131 VEC(tree,gc) *args = NULL;
1133 VEC_safe_grow (tree, gc, args, 3 + ndim);
1135 VEC_replace (tree, args, 0, build_class_ref (class_type));
1136 VEC_replace (tree, args, 1, build_int_cst (NULL_TREE, ndim));
1138 for(i = ndim - 1; i >= 0; i-- )
1139 VEC_replace (tree, args, (unsigned)(2 + i), pop_value (int_type_node));
1141 VEC_replace (tree, args, 2 + ndim, null_pointer_node);
1143 push_value (build_call_vec (promote_type (class_type),
1144 build_address_of (soft_multianewarray_node),
1148 /* ARRAY[INDEX] <- RHS. build_java_check_indexed_type makes sure that
1149 ARRAY is an array type. May expand some bound checking and NULL
1150 pointer checking. RHS_TYPE_NODE we are going to store. In the case
1151 of the CHAR/BYTE/BOOLEAN SHORT, the type popped of the stack is an
1152 INT. In those cases, we make the conversion.
1154 if ARRAy is a reference type, the assignment is checked at run-time
1155 to make sure that the RHS can be assigned to the array element
1156 type. It is not necessary to generate this code if ARRAY is final. */
1159 expand_java_arraystore (tree rhs_type_node)
1161 tree rhs_node = pop_value ((INTEGRAL_TYPE_P (rhs_type_node)
1162 && TYPE_PRECISION (rhs_type_node) <= 32) ?
1163 int_type_node : rhs_type_node);
1164 tree index = pop_value (int_type_node);
1165 tree array_type, array, temp, access;
1167 /* If we're processing an `aaload' we might as well just pick
1169 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1171 array_type = build_java_array_type (object_ptr_type_node, -1);
1172 rhs_type_node = object_ptr_type_node;
1175 array_type = build_java_array_type (rhs_type_node, -1);
1177 array = pop_value (array_type);
1178 array = build1 (NOP_EXPR, promote_type (array_type), array);
1180 rhs_type_node = build_java_check_indexed_type (array, rhs_type_node);
1182 flush_quick_stack ();
1184 index = save_expr (index);
1185 array = save_expr (array);
1187 /* We want to perform the bounds check (done by
1188 build_java_arrayaccess) before the type check (done by
1189 build_java_arraystore_check). So, we call build_java_arrayaccess
1190 -- which returns an ARRAY_REF lvalue -- and we then generate code
1191 to stash the address of that lvalue in a temp. Then we call
1192 build_java_arraystore_check, and finally we generate a
1193 MODIFY_EXPR to set the array element. */
1195 access = build_java_arrayaccess (array, rhs_type_node, index);
1196 temp = build_decl (input_location, VAR_DECL, NULL_TREE,
1197 build_pointer_type (TREE_TYPE (access)));
1198 java_add_local_var (temp);
1199 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (temp),
1201 build_fold_addr_expr (access)));
1203 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1205 tree check = build_java_arraystore_check (array, rhs_node);
1206 java_add_stmt (check);
1209 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (access),
1210 build1 (INDIRECT_REF, TREE_TYPE (access), temp),
1214 /* Expand the evaluation of ARRAY[INDEX]. build_java_check_indexed_type makes
1215 sure that LHS is an array type. May expand some bound checking and NULL
1217 LHS_TYPE_NODE is the type of ARRAY[INDEX]. But in the case of CHAR/BYTE/
1218 BOOLEAN/SHORT, we push a promoted type back to the stack.
1222 expand_java_arrayload (tree lhs_type_node)
1225 tree index_node = pop_value (int_type_node);
1229 /* If we're processing an `aaload' we might as well just pick
1231 if (TREE_CODE (lhs_type_node) == POINTER_TYPE)
1233 array_type = build_java_array_type (object_ptr_type_node, -1);
1234 lhs_type_node = object_ptr_type_node;
1237 array_type = build_java_array_type (lhs_type_node, -1);
1238 array_node = pop_value (array_type);
1239 array_node = build1 (NOP_EXPR, promote_type (array_type), array_node);
1241 index_node = save_expr (index_node);
1242 array_node = save_expr (array_node);
1244 lhs_type_node = build_java_check_indexed_type (array_node,
1246 load_node = build_java_arrayaccess (array_node,
1249 if (INTEGRAL_TYPE_P (lhs_type_node) && TYPE_PRECISION (lhs_type_node) <= 32)
1250 load_node = fold_build1 (NOP_EXPR, int_type_node, load_node);
1251 push_value (load_node);
1254 /* Expands .length. Makes sure that we deal with and array and may expand
1255 a NULL check on the array object. */
1258 expand_java_array_length (void)
1260 tree array = pop_value (ptr_type_node);
1261 tree length = build_java_array_length_access (array);
1263 push_value (length);
1266 /* Emit code for the call to _Jv_Monitor{Enter,Exit}. CALL can be
1267 either soft_monitorenter_node or soft_monitorexit_node. */
1270 build_java_monitor (tree call, tree object)
1272 return build_call_nary (void_type_node,
1273 build_address_of (call),
1277 /* Emit code for one of the PUSHC instructions. */
1280 expand_java_pushc (int ival, tree type)
1283 if (type == ptr_type_node && ival == 0)
1284 value = null_pointer_node;
1285 else if (type == int_type_node || type == long_type_node)
1286 value = build_int_cst (type, ival);
1287 else if (type == float_type_node || type == double_type_node)
1290 REAL_VALUE_FROM_INT (x, ival, 0, TYPE_MODE (type));
1291 value = build_real (type, x);
1300 expand_java_return (tree type)
1302 if (type == void_type_node)
1303 java_add_stmt (build1 (RETURN_EXPR, void_type_node, NULL));
1306 tree retval = pop_value (type);
1307 tree res = DECL_RESULT (current_function_decl);
1308 retval = build2 (MODIFY_EXPR, TREE_TYPE (res), res, retval);
1310 /* Handle the situation where the native integer type is smaller
1311 than the JVM integer. It can happen for many cross compilers.
1312 The whole if expression just goes away if INT_TYPE_SIZE < 32
1314 if (INT_TYPE_SIZE < 32
1315 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (res)))
1316 < GET_MODE_SIZE (TYPE_MODE (type))))
1317 retval = build1(NOP_EXPR, TREE_TYPE(res), retval);
1319 TREE_SIDE_EFFECTS (retval) = 1;
1320 java_add_stmt (build1 (RETURN_EXPR, void_type_node, retval));
1325 expand_load_internal (int index, tree type, int pc)
1328 tree var = find_local_variable (index, type, pc);
1330 /* Now VAR is the VAR_DECL (or PARM_DECL) that we are going to push
1331 on the stack. If there is an assignment to this VAR_DECL between
1332 the stack push and the use, then the wrong code could be
1333 generated. To avoid this we create a new local and copy our
1334 value into it. Then we push this new local on the stack.
1335 Hopefully this all gets optimized out. */
1336 copy = build_decl (input_location, VAR_DECL, NULL_TREE, type);
1337 if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1338 && TREE_TYPE (copy) != TREE_TYPE (var))
1339 var = convert (type, var);
1340 java_add_local_var (copy);
1341 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (var), copy, var));
1347 build_address_of (tree value)
1349 return build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (value)), value);
1353 class_has_finalize_method (tree type)
1355 tree super = CLASSTYPE_SUPER (type);
1357 if (super == NULL_TREE)
1358 return false; /* Every class with a real finalizer inherits */
1359 /* from java.lang.Object. */
1361 return HAS_FINALIZER_P (type) || class_has_finalize_method (super);
1365 java_create_object (tree type)
1367 tree alloc_node = (class_has_finalize_method (type)
1369 : alloc_no_finalizer_node);
1371 return build_call_nary (promote_type (type),
1372 build_address_of (alloc_node),
1373 1, build_class_ref (type));
1377 expand_java_NEW (tree type)
1381 alloc_node = (class_has_finalize_method (type) ? alloc_object_node
1382 : alloc_no_finalizer_node);
1383 if (! CLASS_LOADED_P (type))
1384 load_class (type, 1);
1385 safe_layout_class (type);
1386 push_value (build_call_nary (promote_type (type),
1387 build_address_of (alloc_node),
1388 1, build_class_ref (type)));
1391 /* This returns an expression which will extract the class of an
1395 build_get_class (tree value)
1397 tree class_field = lookup_field (&dtable_type, get_identifier ("class"));
1398 tree vtable_field = lookup_field (&object_type_node,
1399 get_identifier ("vtable"));
1400 tree tmp = build3 (COMPONENT_REF, dtable_ptr_type,
1401 build_java_indirect_ref (object_type_node, value,
1402 flag_check_references),
1403 vtable_field, NULL_TREE);
1404 return build3 (COMPONENT_REF, class_ptr_type,
1405 build1 (INDIRECT_REF, dtable_type, tmp),
1406 class_field, NULL_TREE);
1409 /* This builds the tree representation of the `instanceof' operator.
1410 It tries various tricks to optimize this in cases where types are
1414 build_instanceof (tree value, tree type)
1417 tree itype = TREE_TYPE (TREE_TYPE (soft_instanceof_node));
1418 tree valtype = TREE_TYPE (TREE_TYPE (value));
1419 tree valclass = TYPE_NAME (valtype);
1422 /* When compiling from bytecode, we need to ensure that TYPE has
1424 if (CLASS_P (type) && ! CLASS_LOADED_P (type))
1426 load_class (type, 1);
1427 safe_layout_class (type);
1428 if (! TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) == ERROR_MARK)
1429 return error_mark_node;
1431 klass = TYPE_NAME (type);
1433 if (type == object_type_node || inherits_from_p (valtype, type))
1435 /* Anything except `null' is an instance of Object. Likewise,
1436 if the object is known to be an instance of the class, then
1437 we only need to check for `null'. */
1438 expr = build2 (NE_EXPR, itype, value, null_pointer_node);
1440 else if (flag_verify_invocations
1441 && ! TYPE_ARRAY_P (type)
1442 && ! TYPE_ARRAY_P (valtype)
1443 && DECL_P (klass) && DECL_P (valclass)
1444 && ! CLASS_INTERFACE (valclass)
1445 && ! CLASS_INTERFACE (klass)
1446 && ! inherits_from_p (type, valtype)
1447 && (CLASS_FINAL (klass)
1448 || ! inherits_from_p (valtype, type)))
1450 /* The classes are from different branches of the derivation
1451 tree, so we immediately know the answer. */
1452 expr = boolean_false_node;
1454 else if (DECL_P (klass) && CLASS_FINAL (klass))
1456 tree save = save_expr (value);
1457 expr = build3 (COND_EXPR, itype,
1458 build2 (NE_EXPR, boolean_type_node,
1459 save, null_pointer_node),
1460 build2 (EQ_EXPR, itype,
1461 build_get_class (save),
1462 build_class_ref (type)),
1463 boolean_false_node);
1467 expr = build_call_nary (itype,
1468 build_address_of (soft_instanceof_node),
1469 2, value, build_class_ref (type));
1471 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (value);
1476 expand_java_INSTANCEOF (tree type)
1478 tree value = pop_value (object_ptr_type_node);
1479 value = build_instanceof (value, type);
1484 expand_java_CHECKCAST (tree type)
1486 tree value = pop_value (ptr_type_node);
1487 value = build_call_nary (promote_type (type),
1488 build_address_of (soft_checkcast_node),
1489 2, build_class_ref (type), value);
1494 expand_iinc (unsigned int local_var_index, int ival, int pc)
1496 tree local_var, res;
1497 tree constant_value;
1499 flush_quick_stack ();
1500 local_var = find_local_variable (local_var_index, int_type_node, pc);
1501 constant_value = build_int_cst (NULL_TREE, ival);
1502 res = fold_build2 (PLUS_EXPR, int_type_node, local_var, constant_value);
1503 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (local_var), local_var, res));
1508 build_java_soft_divmod (enum tree_code op, tree type, tree op1, tree op2)
1511 tree arg1 = convert (type, op1);
1512 tree arg2 = convert (type, op2);
1514 if (type == int_type_node)
1518 case TRUNC_DIV_EXPR:
1519 call = soft_idiv_node;
1521 case TRUNC_MOD_EXPR:
1522 call = soft_irem_node;
1528 else if (type == long_type_node)
1532 case TRUNC_DIV_EXPR:
1533 call = soft_ldiv_node;
1535 case TRUNC_MOD_EXPR:
1536 call = soft_lrem_node;
1544 call = build_call_nary (type, build_address_of (call), 2, arg1, arg2);
1549 build_java_binop (enum tree_code op, tree type, tree arg1, tree arg2)
1556 tree u_type = unsigned_type_for (type);
1557 arg1 = convert (u_type, arg1);
1558 arg1 = build_java_binop (RSHIFT_EXPR, u_type, arg1, arg2);
1559 return convert (type, arg1);
1563 mask = build_int_cst (NULL_TREE,
1564 TYPE_PRECISION (TREE_TYPE (arg1)) - 1);
1565 arg2 = fold_build2 (BIT_AND_EXPR, int_type_node, arg2, mask);
1568 case COMPARE_L_EXPR: /* arg1 > arg2 ? 1 : arg1 == arg2 ? 0 : -1 */
1569 case COMPARE_G_EXPR: /* arg1 < arg2 ? -1 : arg1 == arg2 ? 0 : 1 */
1570 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1572 tree ifexp1 = fold_build2 (op == COMPARE_L_EXPR ? GT_EXPR : LT_EXPR,
1573 boolean_type_node, arg1, arg2);
1574 tree ifexp2 = fold_build2 (EQ_EXPR, boolean_type_node, arg1, arg2);
1575 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1576 ifexp2, integer_zero_node,
1577 op == COMPARE_L_EXPR
1578 ? integer_minus_one_node
1579 : integer_one_node);
1580 return fold_build3 (COND_EXPR, int_type_node, ifexp1,
1581 op == COMPARE_L_EXPR ? integer_one_node
1582 : integer_minus_one_node,
1586 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1588 tree ifexp1 = fold_build2 (LT_EXPR, boolean_type_node, arg1, arg2);
1589 tree ifexp2 = fold_build2 (GT_EXPR, boolean_type_node, arg1, arg2);
1590 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1591 ifexp2, integer_one_node,
1593 return fold_build3 (COND_EXPR, int_type_node,
1594 ifexp1, integer_minus_one_node, second_compare);
1596 case TRUNC_DIV_EXPR:
1597 case TRUNC_MOD_EXPR:
1598 if (TREE_CODE (type) == REAL_TYPE
1599 && op == TRUNC_MOD_EXPR)
1602 if (type != double_type_node)
1604 arg1 = convert (double_type_node, arg1);
1605 arg2 = convert (double_type_node, arg2);
1607 call = build_call_nary (double_type_node,
1608 build_address_of (soft_fmod_node),
1610 if (type != double_type_node)
1611 call = convert (type, call);
1615 if (TREE_CODE (type) == INTEGER_TYPE
1616 && flag_use_divide_subroutine
1617 && ! flag_syntax_only)
1618 return build_java_soft_divmod (op, type, arg1, arg2);
1623 return fold_build2 (op, type, arg1, arg2);
1627 expand_java_binop (tree type, enum tree_code op)
1637 rtype = int_type_node;
1638 rarg = pop_value (rtype);
1641 rarg = pop_value (rtype);
1643 larg = pop_value (ltype);
1644 push_value (build_java_binop (op, type, larg, rarg));
1647 /* Lookup the field named NAME in *TYPEP or its super classes.
1648 If not found, return NULL_TREE.
1649 (If the *TYPEP is not found, or if the field reference is
1650 ambiguous, return error_mark_node.)
1651 If found, return the FIELD_DECL, and set *TYPEP to the
1652 class containing the field. */
1655 lookup_field (tree *typep, tree name)
1657 if (CLASS_P (*typep) && !CLASS_LOADED_P (*typep))
1659 load_class (*typep, 1);
1660 safe_layout_class (*typep);
1661 if (!TYPE_SIZE (*typep) || TREE_CODE (TYPE_SIZE (*typep)) == ERROR_MARK)
1662 return error_mark_node;
1666 tree field, binfo, base_binfo;
1670 for (field = TYPE_FIELDS (*typep); field; field = TREE_CHAIN (field))
1671 if (DECL_NAME (field) == name)
1674 /* Process implemented interfaces. */
1675 save_field = NULL_TREE;
1676 for (binfo = TYPE_BINFO (*typep), i = 0;
1677 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1679 tree t = BINFO_TYPE (base_binfo);
1680 if ((field = lookup_field (&t, name)))
1682 if (save_field == field)
1684 if (save_field == NULL_TREE)
1688 tree i1 = DECL_CONTEXT (save_field);
1689 tree i2 = DECL_CONTEXT (field);
1690 error ("reference %qs is ambiguous: appears in interface %qs and interface %qs",
1691 IDENTIFIER_POINTER (name),
1692 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i1))),
1693 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i2))));
1694 return error_mark_node;
1699 if (save_field != NULL_TREE)
1702 *typep = CLASSTYPE_SUPER (*typep);
1707 /* Look up the field named NAME in object SELF_VALUE,
1708 which has class SELF_CLASS (a non-handle RECORD_TYPE).
1709 SELF_VALUE is NULL_TREE if looking for a static field. */
1712 build_field_ref (tree self_value, tree self_class, tree name)
1714 tree base_class = self_class;
1715 tree field_decl = lookup_field (&base_class, name);
1716 if (field_decl == NULL_TREE)
1718 error ("field %qs not found", IDENTIFIER_POINTER (name));
1719 return error_mark_node;
1721 if (self_value == NULL_TREE)
1723 return build_static_field_ref (field_decl);
1727 tree base_type = promote_type (base_class);
1729 /* CHECK is true if self_value is not the this pointer. */
1730 int check = (! (DECL_P (self_value)
1731 && DECL_NAME (self_value) == this_identifier_node));
1733 /* Determine whether a field offset from NULL will lie within
1734 Page 0: this is necessary on those GNU/Linux/BSD systems that
1735 trap SEGV to generate NullPointerExceptions.
1737 We assume that Page 0 will be mapped with NOPERM, and that
1738 memory may be allocated from any other page, so only field
1739 offsets < pagesize are guaranteed to trap. We also assume
1740 the smallest page size we'll encounter is 4k bytes. */
1741 if (! flag_syntax_only && check && ! flag_check_references
1742 && ! flag_indirect_dispatch)
1744 tree field_offset = byte_position (field_decl);
1746 page_size = size_int (4096);
1747 check = ! INT_CST_LT_UNSIGNED (field_offset, page_size);
1750 if (base_type != TREE_TYPE (self_value))
1751 self_value = fold_build1 (NOP_EXPR, base_type, self_value);
1752 if (! flag_syntax_only && flag_indirect_dispatch)
1755 = build_int_cst (NULL_TREE, get_symbol_table_index
1756 (field_decl, NULL_TREE,
1757 &TYPE_OTABLE_METHODS (output_class)));
1759 = build4 (ARRAY_REF, integer_type_node,
1760 TYPE_OTABLE_DECL (output_class), otable_index,
1761 NULL_TREE, NULL_TREE);
1764 if (DECL_CONTEXT (field_decl) != output_class)
1766 = build3 (COND_EXPR, TREE_TYPE (field_offset),
1767 build2 (EQ_EXPR, boolean_type_node,
1768 field_offset, integer_zero_node),
1769 build_call_nary (void_type_node,
1770 build_address_of (soft_nosuchfield_node),
1774 field_offset = fold (convert (sizetype, field_offset));
1775 self_value = java_check_reference (self_value, check);
1777 = fold_build2 (POINTER_PLUS_EXPR,
1778 TREE_TYPE (self_value),
1779 self_value, field_offset);
1780 address = fold_convert (build_pointer_type (TREE_TYPE (field_decl)),
1782 return fold_build1 (INDIRECT_REF, TREE_TYPE (field_decl), address);
1785 self_value = build_java_indirect_ref (TREE_TYPE (TREE_TYPE (self_value)),
1787 return fold_build3 (COMPONENT_REF, TREE_TYPE (field_decl),
1788 self_value, field_decl, NULL_TREE);
1793 lookup_label (int pc)
1797 if (pc > highest_label_pc_this_method)
1798 highest_label_pc_this_method = pc;
1799 ASM_GENERATE_INTERNAL_LABEL(buf, "LJpc=", start_label_pc_this_method + pc);
1800 name = get_identifier (buf);
1801 if (IDENTIFIER_LOCAL_VALUE (name))
1802 return IDENTIFIER_LOCAL_VALUE (name);
1805 /* The type of the address of a label is return_address_type_node. */
1806 tree decl = create_label_decl (name);
1807 return pushdecl (decl);
1811 /* Generate a unique name for the purpose of loops and switches
1812 labels, and try-catch-finally blocks label or temporary variables. */
1815 generate_name (void)
1817 static int l_number = 0;
1819 ASM_GENERATE_INTERNAL_LABEL(buff, "LJv", l_number);
1821 return get_identifier (buff);
1825 create_label_decl (tree name)
1828 decl = build_decl (input_location, LABEL_DECL, name,
1829 TREE_TYPE (return_address_type_node));
1830 DECL_CONTEXT (decl) = current_function_decl;
1831 DECL_IGNORED_P (decl) = 1;
1835 /* This maps a bytecode offset (PC) to various flags. */
1836 char *instruction_bits;
1838 /* This is a vector of type states for the current method. It is
1839 indexed by PC. Each element is a tree vector holding the type
1840 state at that PC. We only note type states at basic block
1842 VEC(tree, gc) *type_states;
1845 note_label (int current_pc ATTRIBUTE_UNUSED, int target_pc)
1847 lookup_label (target_pc);
1848 instruction_bits [target_pc] |= BCODE_JUMP_TARGET;
1851 /* Emit code to jump to TARGET_PC if VALUE1 CONDITION VALUE2,
1852 where CONDITION is one of one the compare operators. */
1855 expand_compare (enum tree_code condition, tree value1, tree value2,
1858 tree target = lookup_label (target_pc);
1859 tree cond = fold_build2 (condition, boolean_type_node, value1, value2);
1861 (build3 (COND_EXPR, void_type_node, java_truthvalue_conversion (cond),
1862 build1 (GOTO_EXPR, void_type_node, target),
1863 build_java_empty_stmt ()));
1866 /* Emit code for a TEST-type opcode. */
1869 expand_test (enum tree_code condition, tree type, int target_pc)
1871 tree value1, value2;
1872 flush_quick_stack ();
1873 value1 = pop_value (type);
1874 value2 = (type == ptr_type_node) ? null_pointer_node : integer_zero_node;
1875 expand_compare (condition, value1, value2, target_pc);
1878 /* Emit code for a COND-type opcode. */
1881 expand_cond (enum tree_code condition, tree type, int target_pc)
1883 tree value1, value2;
1884 flush_quick_stack ();
1885 /* note: pop values in opposite order */
1886 value2 = pop_value (type);
1887 value1 = pop_value (type);
1888 /* Maybe should check value1 and value2 for type compatibility ??? */
1889 expand_compare (condition, value1, value2, target_pc);
1893 expand_java_goto (int target_pc)
1895 tree target_label = lookup_label (target_pc);
1896 flush_quick_stack ();
1897 java_add_stmt (build1 (GOTO_EXPR, void_type_node, target_label));
1901 expand_java_switch (tree selector, int default_pc)
1903 tree switch_expr, x;
1905 flush_quick_stack ();
1906 switch_expr = build3 (SWITCH_EXPR, TREE_TYPE (selector), selector,
1907 NULL_TREE, NULL_TREE);
1908 java_add_stmt (switch_expr);
1910 x = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE, NULL_TREE,
1911 create_artificial_label (input_location));
1912 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1914 x = build1 (GOTO_EXPR, void_type_node, lookup_label (default_pc));
1915 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1921 expand_java_add_case (tree switch_expr, int match, int target_pc)
1925 value = build_int_cst (TREE_TYPE (switch_expr), match);
1927 x = build3 (CASE_LABEL_EXPR, void_type_node, value, NULL_TREE,
1928 create_artificial_label (input_location));
1929 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1931 x = build1 (GOTO_EXPR, void_type_node, lookup_label (target_pc));
1932 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1935 static VEC(tree,gc) *
1936 pop_arguments (tree method_type)
1938 function_args_iterator fnai;
1940 VEC(tree,gc) *args = NULL;
1943 FOREACH_FUNCTION_ARGS (method_type, type, fnai)
1945 /* XXX: leaky abstraction. */
1946 if (type == void_type_node)
1949 VEC_safe_push (tree, gc, args, type);
1952 arity = VEC_length (tree, args);
1956 tree arg = pop_value (VEC_index (tree, args, arity));
1958 /* We simply cast each argument to its proper type. This is
1959 needed since we lose type information coming out of the
1960 verifier. We also have to do this when we pop an integer
1961 type that must be promoted for the function call. */
1962 if (TREE_CODE (type) == POINTER_TYPE)
1963 arg = build1 (NOP_EXPR, type, arg);
1964 else if (targetm.calls.promote_prototypes (type)
1965 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
1966 && INTEGRAL_TYPE_P (type))
1967 arg = convert (integer_type_node, arg);
1969 VEC_replace (tree, args, arity, arg);
1975 /* Attach to PTR (a block) the declaration found in ENTRY. */
1978 attach_init_test_initialization_flags (void **entry, void *ptr)
1980 tree block = (tree)ptr;
1981 struct treetreehash_entry *ite = (struct treetreehash_entry *) *entry;
1983 if (block != error_mark_node)
1985 if (TREE_CODE (block) == BIND_EXPR)
1987 tree body = BIND_EXPR_BODY (block);
1988 TREE_CHAIN (ite->value) = BIND_EXPR_VARS (block);
1989 BIND_EXPR_VARS (block) = ite->value;
1990 body = build2 (COMPOUND_EXPR, void_type_node,
1991 build1 (DECL_EXPR, void_type_node, ite->value), body);
1992 BIND_EXPR_BODY (block) = body;
1996 tree body = BLOCK_SUBBLOCKS (block);
1997 TREE_CHAIN (ite->value) = BLOCK_EXPR_DECLS (block);
1998 BLOCK_EXPR_DECLS (block) = ite->value;
1999 body = build2 (COMPOUND_EXPR, void_type_node,
2000 build1 (DECL_EXPR, void_type_node, ite->value), body);
2001 BLOCK_SUBBLOCKS (block) = body;
2008 /* Build an expression to initialize the class CLAS.
2009 if EXPR is non-NULL, returns an expression to first call the initializer
2010 (if it is needed) and then calls EXPR. */
2013 build_class_init (tree clas, tree expr)
2017 /* An optimization: if CLAS is a superclass of the class we're
2018 compiling, we don't need to initialize it. However, if CLAS is
2019 an interface, it won't necessarily be initialized, even if we
2021 if ((! CLASS_INTERFACE (TYPE_NAME (clas))
2022 && inherits_from_p (current_class, clas))
2023 || current_class == clas)
2026 if (always_initialize_class_p)
2028 init = build_call_nary (void_type_node,
2029 build_address_of (soft_initclass_node),
2030 1, build_class_ref (clas));
2031 TREE_SIDE_EFFECTS (init) = 1;
2035 tree *init_test_decl;
2037 init_test_decl = java_treetreehash_new
2038 (DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl), clas);
2040 if (*init_test_decl == NULL)
2042 /* Build a declaration and mark it as a flag used to track
2043 static class initializations. */
2044 decl = build_decl (input_location, VAR_DECL, NULL_TREE,
2046 MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (decl);
2047 DECL_CONTEXT (decl) = current_function_decl;
2048 DECL_INITIAL (decl) = boolean_false_node;
2049 /* Don't emit any symbolic debugging info for this decl. */
2050 DECL_IGNORED_P (decl) = 1;
2051 *init_test_decl = decl;
2054 init = build_call_nary (void_type_node,
2055 build_address_of (soft_initclass_node),
2056 1, build_class_ref (clas));
2057 TREE_SIDE_EFFECTS (init) = 1;
2058 init = build3 (COND_EXPR, void_type_node,
2059 build2 (EQ_EXPR, boolean_type_node,
2060 *init_test_decl, boolean_false_node),
2061 init, integer_zero_node);
2062 TREE_SIDE_EFFECTS (init) = 1;
2063 init = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init,
2064 build2 (MODIFY_EXPR, boolean_type_node,
2065 *init_test_decl, boolean_true_node));
2066 TREE_SIDE_EFFECTS (init) = 1;
2069 if (expr != NULL_TREE)
2071 expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init, expr);
2072 TREE_SIDE_EFFECTS (expr) = 1;
2080 /* Rewrite expensive calls that require stack unwinding at runtime to
2081 cheaper alternatives. The logic here performs these
2084 java.lang.Class.forName("foo") -> java.lang.Class.forName("foo", class$)
2085 java.lang.Class.getClassLoader() -> java.lang.Class.getClassLoader(class$)
2091 const char *classname;
2093 const char *signature;
2094 const char *new_classname;
2095 const char *new_signature;
2097 void (*rewrite_arglist) (VEC(tree,gc) **);
2100 /* Add __builtin_return_address(0) to the end of an arglist. */
2104 rewrite_arglist_getcaller (VEC(tree,gc) **arglist)
2107 = build_call_expr (built_in_decls[BUILT_IN_RETURN_ADDRESS],
2108 1, integer_zero_node);
2110 DECL_UNINLINABLE (current_function_decl) = 1;
2112 VEC_safe_push (tree, gc, *arglist, retaddr);
2115 /* Add this.class to the end of an arglist. */
2118 rewrite_arglist_getclass (VEC(tree,gc) **arglist)
2120 VEC_safe_push (tree, gc, *arglist, build_class_ref (output_class));
2123 static rewrite_rule rules[] =
2124 {{"java.lang.Class", "getClassLoader", "()Ljava/lang/ClassLoader;",
2125 "java.lang.Class", "(Ljava/lang/Class;)Ljava/lang/ClassLoader;",
2126 ACC_FINAL|ACC_PRIVATE, rewrite_arglist_getclass},
2128 {"java.lang.Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;",
2129 "java.lang.Class", "(Ljava/lang/String;Ljava/lang/Class;)Ljava/lang/Class;",
2130 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getclass},
2132 {"gnu.classpath.VMStackWalker", "getCallingClass", "()Ljava/lang/Class;",
2133 "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/Class;",
2134 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2136 {"gnu.classpath.VMStackWalker", "getCallingClassLoader",
2137 "()Ljava/lang/ClassLoader;",
2138 "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/ClassLoader;",
2139 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2141 {"gnu.java.lang.VMCPStringBuilder", "toString", "([CII)Ljava/lang/String;",
2142 "java.lang.String", "([CII)Ljava/lang/String;",
2143 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, NULL},
2145 {NULL, NULL, NULL, NULL, NULL, 0, NULL}};
2147 /* True if this method is special, i.e. it's a private method that
2148 should be exported from a DSO. */
2151 special_method_p (tree candidate_method)
2153 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (candidate_method)));
2154 tree method = DECL_NAME (candidate_method);
2157 for (p = rules; p->classname; p++)
2159 if (get_identifier (p->classname) == context
2160 && get_identifier (p->method) == method)
2166 /* Scan the rules list for replacements for *METHOD_P and replace the
2167 args accordingly. If the rewrite results in an access to a private
2168 method, update SPECIAL.*/
2171 maybe_rewrite_invocation (tree *method_p, VEC(tree,gc) **arg_list_p,
2172 tree *method_signature_p, tree *special)
2174 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (*method_p)));
2176 *special = NULL_TREE;
2178 for (p = rules; p->classname; p++)
2180 if (get_identifier (p->classname) == context)
2182 tree method = DECL_NAME (*method_p);
2183 if (get_identifier (p->method) == method
2184 && get_identifier (p->signature) == *method_signature_p)
2187 tree destination_class
2188 = lookup_class (get_identifier (p->new_classname));
2189 gcc_assert (destination_class);
2191 = lookup_java_method (destination_class,
2193 get_identifier (p->new_signature));
2194 if (! maybe_method && ! flag_verify_invocations)
2197 = add_method (destination_class, p->flags,
2198 method, get_identifier (p->new_signature));
2199 DECL_EXTERNAL (maybe_method) = 1;
2201 *method_p = maybe_method;
2202 gcc_assert (*method_p);
2203 if (p->rewrite_arglist)
2204 p->rewrite_arglist (arg_list_p);
2205 *method_signature_p = get_identifier (p->new_signature);
2206 *special = integer_one_node;
2217 build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED,
2218 tree self_type, tree method_signature ATTRIBUTE_UNUSED,
2219 VEC(tree,gc) *arg_list ATTRIBUTE_UNUSED, tree special)
2222 if (is_compiled_class (self_type))
2224 /* With indirect dispatch we have to use indirect calls for all
2225 publicly visible methods or gcc will use PLT indirections
2226 to reach them. We also have to use indirect dispatch for all
2227 external methods. */
2228 if (! flag_indirect_dispatch
2229 || (! DECL_EXTERNAL (method) && ! TREE_PUBLIC (method)))
2231 func = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (method)),
2237 = build_int_cst (NULL_TREE,
2238 (get_symbol_table_index
2240 &TYPE_ATABLE_METHODS (output_class))));
2242 = build4 (ARRAY_REF,
2243 TREE_TYPE (TREE_TYPE (TYPE_ATABLE_DECL (output_class))),
2244 TYPE_ATABLE_DECL (output_class), table_index,
2245 NULL_TREE, NULL_TREE);
2247 func = convert (method_ptr_type_node, func);
2251 /* We don't know whether the method has been (statically) compiled.
2252 Compile this code to get a reference to the method's code:
2254 SELF_TYPE->methods[METHOD_INDEX].ncode
2258 int method_index = 0;
2261 /* The method might actually be declared in some superclass, so
2262 we have to use its class context, not the caller's notion of
2263 where the method is. */
2264 self_type = DECL_CONTEXT (method);
2265 ref = build_class_ref (self_type);
2266 ref = build1 (INDIRECT_REF, class_type_node, ref);
2267 if (ncode_ident == NULL_TREE)
2268 ncode_ident = get_identifier ("ncode");
2269 if (methods_ident == NULL_TREE)
2270 methods_ident = get_identifier ("methods");
2271 ref = build3 (COMPONENT_REF, method_ptr_type_node, ref,
2272 lookup_field (&class_type_node, methods_ident),
2274 for (meth = TYPE_METHODS (self_type);
2275 ; meth = TREE_CHAIN (meth))
2279 if (meth == NULL_TREE)
2280 fatal_error ("method '%s' not found in class",
2281 IDENTIFIER_POINTER (DECL_NAME (method)));
2284 method_index *= int_size_in_bytes (method_type_node);
2285 ref = fold_build2 (POINTER_PLUS_EXPR, method_ptr_type_node,
2286 ref, size_int (method_index));
2287 ref = build1 (INDIRECT_REF, method_type_node, ref);
2288 func = build3 (COMPONENT_REF, nativecode_ptr_type_node,
2289 ref, lookup_field (&method_type_node, ncode_ident),
2296 invoke_build_dtable (int is_invoke_interface, VEC(tree,gc) *arg_list)
2298 tree dtable, objectref;
2299 tree saved = save_expr (VEC_index (tree, arg_list, 0));
2301 VEC_replace (tree, arg_list, 0, saved);
2303 /* If we're dealing with interfaces and if the objectref
2304 argument is an array then get the dispatch table of the class
2305 Object rather than the one from the objectref. */
2306 objectref = (is_invoke_interface
2307 && is_array_type_p (TREE_TYPE (saved))
2308 ? build_class_ref (object_type_node) : saved);
2310 if (dtable_ident == NULL_TREE)
2311 dtable_ident = get_identifier ("vtable");
2312 dtable = build_java_indirect_ref (object_type_node, objectref,
2313 flag_check_references);
2314 dtable = build3 (COMPONENT_REF, dtable_ptr_type, dtable,
2315 lookup_field (&object_type_node, dtable_ident), NULL_TREE);
2320 /* Determine the index in SYMBOL_TABLE for a reference to the decl
2321 T. If this decl has not been seen before, it will be added to the
2322 [oa]table_methods. If it has, the existing table slot will be
2326 get_symbol_table_index (tree t, tree special, tree *symbol_table)
2331 if (*symbol_table == NULL_TREE)
2333 *symbol_table = build_tree_list (special, t);
2337 method_list = *symbol_table;
2341 tree value = TREE_VALUE (method_list);
2342 tree purpose = TREE_PURPOSE (method_list);
2343 if (value == t && purpose == special)
2346 if (TREE_CHAIN (method_list) == NULL_TREE)
2349 method_list = TREE_CHAIN (method_list);
2352 TREE_CHAIN (method_list) = build_tree_list (special, t);
2357 build_invokevirtual (tree dtable, tree method, tree special)
2360 tree nativecode_ptr_ptr_type_node
2361 = build_pointer_type (nativecode_ptr_type_node);
2365 if (flag_indirect_dispatch)
2367 gcc_assert (! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))));
2370 = build_int_cst (NULL_TREE, get_symbol_table_index
2372 &TYPE_OTABLE_METHODS (output_class)));
2373 method_index = build4 (ARRAY_REF, integer_type_node,
2374 TYPE_OTABLE_DECL (output_class),
2375 otable_index, NULL_TREE, NULL_TREE);
2379 /* We fetch the DECL_VINDEX field directly here, rather than
2380 using get_method_index(). DECL_VINDEX is the true offset
2381 from the vtable base to a method, regrdless of any extra
2382 words inserted at the start of the vtable. */
2383 method_index = DECL_VINDEX (method);
2384 method_index = size_binop (MULT_EXPR, method_index,
2385 TYPE_SIZE_UNIT (nativecode_ptr_ptr_type_node));
2386 if (TARGET_VTABLE_USES_DESCRIPTORS)
2387 method_index = size_binop (MULT_EXPR, method_index,
2388 size_int (TARGET_VTABLE_USES_DESCRIPTORS));
2391 func = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dtable), dtable,
2392 convert (sizetype, method_index));
2394 if (TARGET_VTABLE_USES_DESCRIPTORS)
2395 func = build1 (NOP_EXPR, nativecode_ptr_type_node, func);
2398 func = fold_convert (nativecode_ptr_ptr_type_node, func);
2399 func = build1 (INDIRECT_REF, nativecode_ptr_type_node, func);
2405 static GTY(()) tree class_ident;
2407 build_invokeinterface (tree dtable, tree method)
2412 /* We expand invokeinterface here. */
2414 if (class_ident == NULL_TREE)
2415 class_ident = get_identifier ("class");
2417 dtable = build_java_indirect_ref (dtable_type, dtable,
2418 flag_check_references);
2419 dtable = build3 (COMPONENT_REF, class_ptr_type, dtable,
2420 lookup_field (&dtable_type, class_ident), NULL_TREE);
2422 interface = DECL_CONTEXT (method);
2423 gcc_assert (CLASS_INTERFACE (TYPE_NAME (interface)));
2424 layout_class_methods (interface);
2426 if (flag_indirect_dispatch)
2429 = 2 * (get_symbol_table_index
2430 (method, NULL_TREE, &TYPE_ITABLE_METHODS (output_class)));
2432 = build4 (ARRAY_REF,
2433 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2434 TYPE_ITABLE_DECL (output_class),
2435 build_int_cst (NULL_TREE, itable_index-1),
2436 NULL_TREE, NULL_TREE);
2438 = build4 (ARRAY_REF,
2439 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2440 TYPE_ITABLE_DECL (output_class),
2441 build_int_cst (NULL_TREE, itable_index),
2442 NULL_TREE, NULL_TREE);
2443 interface = convert (class_ptr_type, interface);
2444 idx = convert (integer_type_node, idx);
2448 idx = build_int_cst (NULL_TREE,
2449 get_interface_method_index (method, interface));
2450 interface = build_class_ref (interface);
2453 return build_call_nary (ptr_type_node,
2454 build_address_of (soft_lookupinterfacemethod_node),
2455 3, dtable, interface, idx);
2458 /* Expand one of the invoke_* opcodes.
2459 OPCODE is the specific opcode.
2460 METHOD_REF_INDEX is an index into the constant pool.
2461 NARGS is the number of arguments, or -1 if not specified. */
2464 expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED)
2466 tree method_signature
2467 = COMPONENT_REF_SIGNATURE(¤t_jcf->cpool, method_ref_index);
2468 tree method_name = COMPONENT_REF_NAME (¤t_jcf->cpool,
2471 = get_class_constant (current_jcf,
2472 COMPONENT_REF_CLASS_INDEX(¤t_jcf->cpool,
2474 const char *const self_name
2475 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2476 tree call, func, method, method_type;
2477 VEC(tree,gc) *arg_list;
2478 tree check = NULL_TREE;
2480 tree special = NULL_TREE;
2482 if (! CLASS_LOADED_P (self_type))
2484 load_class (self_type, 1);
2485 safe_layout_class (self_type);
2486 if (TREE_CODE (TYPE_SIZE (self_type)) == ERROR_MARK)
2487 fatal_error ("failed to find class '%s'", self_name);
2489 layout_class_methods (self_type);
2491 if (ID_INIT_P (method_name))
2492 method = lookup_java_constructor (self_type, method_signature);
2494 method = lookup_java_method (self_type, method_name, method_signature);
2496 /* We've found a method in a class other than the one in which it
2497 was wanted. This can happen if, for instance, we're trying to
2498 compile invokespecial super.equals().
2499 FIXME: This is a kludge. Rather than nullifying the result, we
2500 should change lookup_java_method() so that it doesn't search the
2501 superclass chain when we're BC-compiling. */
2502 if (! flag_verify_invocations
2504 && ! TYPE_ARRAY_P (self_type)
2505 && self_type != DECL_CONTEXT (method))
2508 /* We've found a method in an interface, but this isn't an interface
2510 if (opcode != OPCODE_invokeinterface
2512 && (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method)))))
2515 /* We've found a non-interface method but we are making an
2516 interface call. This can happen if the interface overrides a
2517 method in Object. */
2518 if (! flag_verify_invocations
2519 && opcode == OPCODE_invokeinterface
2521 && ! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))))
2524 if (method == NULL_TREE)
2526 if (flag_verify_invocations || ! flag_indirect_dispatch)
2528 error ("class '%s' has no method named '%s' matching signature '%s'",
2530 IDENTIFIER_POINTER (method_name),
2531 IDENTIFIER_POINTER (method_signature));
2535 int flags = ACC_PUBLIC;
2536 if (opcode == OPCODE_invokestatic)
2537 flags |= ACC_STATIC;
2538 if (opcode == OPCODE_invokeinterface)
2540 flags |= ACC_INTERFACE | ACC_ABSTRACT;
2541 CLASS_INTERFACE (TYPE_NAME (self_type)) = 1;
2543 method = add_method (self_type, flags, method_name,
2545 DECL_ARTIFICIAL (method) = 1;
2546 METHOD_DUMMY (method) = 1;
2547 layout_class_method (self_type, NULL,
2552 /* Invoke static can't invoke static/abstract method */
2553 if (method != NULL_TREE)
2555 if (opcode == OPCODE_invokestatic)
2557 if (!METHOD_STATIC (method))
2559 error ("invokestatic on non static method");
2562 else if (METHOD_ABSTRACT (method))
2564 error ("invokestatic on abstract method");
2570 if (METHOD_STATIC (method))
2572 error ("invoke[non-static] on static method");
2578 if (method == NULL_TREE)
2580 /* If we got here, we emitted an error message above. So we
2581 just pop the arguments, push a properly-typed zero, and
2583 method_type = get_type_from_signature (method_signature);
2584 pop_arguments (method_type);
2585 if (opcode != OPCODE_invokestatic)
2586 pop_type (self_type);
2587 method_type = promote_type (TREE_TYPE (method_type));
2588 push_value (convert (method_type, integer_zero_node));
2592 method_type = TREE_TYPE (method);
2593 arg_list = pop_arguments (method_type);
2594 flush_quick_stack ();
2596 maybe_rewrite_invocation (&method, &arg_list, &method_signature,
2600 if (opcode == OPCODE_invokestatic)
2601 func = build_known_method_ref (method, method_type, self_type,
2602 method_signature, arg_list, special);
2603 else if (opcode == OPCODE_invokespecial
2604 || (opcode == OPCODE_invokevirtual
2605 && (METHOD_PRIVATE (method)
2606 || METHOD_FINAL (method)
2607 || CLASS_FINAL (TYPE_NAME (self_type)))))
2609 /* If the object for the method call is null, we throw an
2610 exception. We don't do this if the object is the current
2611 method's `this'. In other cases we just rely on an
2612 optimization pass to eliminate redundant checks. FIXME:
2613 Unfortunately there doesn't seem to be a way to determine
2614 what the current method is right now.
2615 We do omit the check if we're calling <init>. */
2616 /* We use a SAVE_EXPR here to make sure we only evaluate
2617 the new `self' expression once. */
2618 tree save_arg = save_expr (VEC_index (tree, arg_list, 0));
2619 VEC_replace (tree, arg_list, 0, save_arg);
2620 check = java_check_reference (save_arg, ! DECL_INIT_P (method));
2621 func = build_known_method_ref (method, method_type, self_type,
2622 method_signature, arg_list, special);
2626 tree dtable = invoke_build_dtable (opcode == OPCODE_invokeinterface,
2628 if (opcode == OPCODE_invokevirtual)
2629 func = build_invokevirtual (dtable, method, special);
2631 func = build_invokeinterface (dtable, method);
2634 if (TREE_CODE (func) == ADDR_EXPR)
2635 TREE_TYPE (func) = build_pointer_type (method_type);
2637 func = build1 (NOP_EXPR, build_pointer_type (method_type), func);
2639 call = build_call_vec (TREE_TYPE (method_type), func, arg_list);
2640 TREE_SIDE_EFFECTS (call) = 1;
2641 call = check_for_builtin (method, call);
2643 if (check != NULL_TREE)
2645 call = build2 (COMPOUND_EXPR, TREE_TYPE (call), check, call);
2646 TREE_SIDE_EFFECTS (call) = 1;
2649 if (TREE_CODE (TREE_TYPE (method_type)) == VOID_TYPE)
2650 java_add_stmt (call);
2654 flush_quick_stack ();
2658 /* Create a stub which will be put into the vtable but which will call
2662 build_jni_stub (tree method)
2664 tree jnifunc, call, body, method_sig, arg_types;
2665 tree jniarg0, jniarg1, jniarg2, jniarg3;
2666 tree jni_func_type, tem;
2667 tree env_var, res_var = NULL_TREE, block;
2671 VEC(tree,gc) *args = NULL;
2674 tree klass = DECL_CONTEXT (method);
2675 klass = build_class_ref (klass);
2677 gcc_assert (METHOD_NATIVE (method) && flag_jni);
2679 DECL_ARTIFICIAL (method) = 1;
2680 DECL_EXTERNAL (method) = 0;
2682 env_var = build_decl (input_location,
2683 VAR_DECL, get_identifier ("env"), ptr_type_node);
2684 DECL_CONTEXT (env_var) = method;
2686 if (TREE_TYPE (TREE_TYPE (method)) != void_type_node)
2688 res_var = build_decl (input_location, VAR_DECL, get_identifier ("res"),
2689 TREE_TYPE (TREE_TYPE (method)));
2690 DECL_CONTEXT (res_var) = method;
2691 TREE_CHAIN (env_var) = res_var;
2694 method_args = DECL_ARGUMENTS (method);
2695 block = build_block (env_var, NULL_TREE, method_args, NULL_TREE);
2696 TREE_SIDE_EFFECTS (block) = 1;
2697 TREE_TYPE (block) = TREE_TYPE (TREE_TYPE (method));
2699 /* Compute the local `env' by calling _Jv_GetJNIEnvNewFrame. */
2700 body = build2 (MODIFY_EXPR, ptr_type_node, env_var,
2701 build_call_nary (ptr_type_node,
2702 build_address_of (soft_getjnienvnewframe_node),
2705 /* The JNIEnv structure is the first argument to the JNI function. */
2706 args_size += int_size_in_bytes (TREE_TYPE (env_var));
2707 VEC_safe_push (tree, gc, args, env_var);
2709 /* For a static method the second argument is the class. For a
2710 non-static method the second argument is `this'; that is already
2711 available in the argument list. */
2712 if (METHOD_STATIC (method))
2714 args_size += int_size_in_bytes (TREE_TYPE (klass));
2715 VEC_safe_push (tree, gc, args, klass);
2718 /* All the arguments to this method become arguments to the
2719 underlying JNI function. If we had to wrap object arguments in a
2720 special way, we would do that here. */
2721 for (tem = method_args; tem != NULL_TREE; tem = TREE_CHAIN (tem))
2723 int arg_bits = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)));
2724 #ifdef PARM_BOUNDARY
2725 arg_bits = (((arg_bits + PARM_BOUNDARY - 1) / PARM_BOUNDARY)
2728 args_size += (arg_bits / BITS_PER_UNIT);
2730 VEC_safe_push (tree, gc, args, tem);
2732 arg_types = TYPE_ARG_TYPES (TREE_TYPE (method));
2734 /* Argument types for static methods and the JNIEnv structure.
2735 FIXME: Write and use build_function_type_vec to avoid this. */
2736 if (METHOD_STATIC (method))
2737 arg_types = tree_cons (NULL_TREE, object_ptr_type_node, arg_types);
2738 arg_types = tree_cons (NULL_TREE, ptr_type_node, arg_types);
2740 /* We call _Jv_LookupJNIMethod to find the actual underlying
2741 function pointer. _Jv_LookupJNIMethod will throw the appropriate
2742 exception if this function is not found at runtime. */
2743 method_sig = build_java_signature (TREE_TYPE (method));
2745 jniarg1 = build_utf8_ref (DECL_NAME (method));
2746 jniarg2 = build_utf8_ref (unmangle_classname
2747 (IDENTIFIER_POINTER (method_sig),
2748 IDENTIFIER_LENGTH (method_sig)));
2749 jniarg3 = build_int_cst (NULL_TREE, args_size);
2751 tem = build_function_type (TREE_TYPE (TREE_TYPE (method)), arg_types);
2753 #ifdef MODIFY_JNI_METHOD_CALL
2754 tem = MODIFY_JNI_METHOD_CALL (tem);
2757 jni_func_type = build_pointer_type (tem);
2759 /* Use the actual function type, rather than a generic pointer type,
2760 such that this decl keeps the actual pointer type from being
2761 garbage-collected. If it is, we end up using canonical types
2762 with different uids for equivalent function types, and this in
2763 turn causes utf8 identifiers and output order to vary. */
2764 meth_var = build_decl (input_location,
2765 VAR_DECL, get_identifier ("meth"), jni_func_type);
2766 TREE_STATIC (meth_var) = 1;
2767 TREE_PUBLIC (meth_var) = 0;
2768 DECL_EXTERNAL (meth_var) = 0;
2769 DECL_CONTEXT (meth_var) = method;
2770 DECL_ARTIFICIAL (meth_var) = 1;
2771 DECL_INITIAL (meth_var) = null_pointer_node;
2772 TREE_USED (meth_var) = 1;
2773 chainon (env_var, meth_var);
2774 build_result_decl (method);
2776 jnifunc = build3 (COND_EXPR, jni_func_type,
2777 build2 (NE_EXPR, boolean_type_node,
2778 meth_var, build_int_cst (TREE_TYPE (meth_var), 0)),
2780 build2 (MODIFY_EXPR, jni_func_type, meth_var,
2782 (NOP_EXPR, jni_func_type,
2783 build_call_nary (ptr_type_node,
2785 (soft_lookupjnimethod_node),
2788 jniarg2, jniarg3))));
2790 /* Now we make the actual JNI call via the resulting function
2792 call = build_call_vec (TREE_TYPE (TREE_TYPE (method)), jnifunc, args);
2794 /* If the JNI call returned a result, capture it here. If we had to
2795 unwrap JNI object results, we would do that here. */
2796 if (res_var != NULL_TREE)
2798 /* If the call returns an object, it may return a JNI weak
2799 reference, in which case we must unwrap it. */
2800 if (! JPRIMITIVE_TYPE_P (TREE_TYPE (TREE_TYPE (method))))
2801 call = build_call_nary (TREE_TYPE (TREE_TYPE (method)),
2802 build_address_of (soft_unwrapjni_node),
2804 call = build2 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (method)),
2808 TREE_SIDE_EFFECTS (call) = 1;
2810 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2811 TREE_SIDE_EFFECTS (body) = 1;
2813 /* Now free the environment we allocated. */
2814 call = build_call_nary (ptr_type_node,
2815 build_address_of (soft_jnipopsystemframe_node),
2817 TREE_SIDE_EFFECTS (call) = 1;
2818 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2819 TREE_SIDE_EFFECTS (body) = 1;
2821 /* Finally, do the return. */
2822 if (res_var != NULL_TREE)
2825 gcc_assert (DECL_RESULT (method));
2826 /* Make sure we copy the result variable to the actual
2827 result. We use the type of the DECL_RESULT because it
2828 might be different from the return type of the function:
2829 it might be promoted. */
2830 drt = TREE_TYPE (DECL_RESULT (method));
2831 if (drt != TREE_TYPE (res_var))
2832 res_var = build1 (CONVERT_EXPR, drt, res_var);
2833 res_var = build2 (MODIFY_EXPR, drt, DECL_RESULT (method), res_var);
2834 TREE_SIDE_EFFECTS (res_var) = 1;
2837 body = build2 (COMPOUND_EXPR, void_type_node, body,
2838 build1 (RETURN_EXPR, void_type_node, res_var));
2839 TREE_SIDE_EFFECTS (body) = 1;
2841 /* Prepend class initialization for static methods reachable from
2843 if (METHOD_STATIC (method)
2844 && (! METHOD_PRIVATE (method)
2845 || INNER_CLASS_P (DECL_CONTEXT (method))))
2847 tree init = build_call_expr (soft_initclass_node, 1,
2849 body = build2 (COMPOUND_EXPR, void_type_node, init, body);
2850 TREE_SIDE_EFFECTS (body) = 1;
2853 bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
2859 /* Given lvalue EXP, return a volatile expression that references the
2863 java_modify_addr_for_volatile (tree exp)
2865 tree exp_type = TREE_TYPE (exp);
2867 = build_qualified_type (exp_type,
2868 TYPE_QUALS (exp_type) | TYPE_QUAL_VOLATILE);
2869 tree addr = build_fold_addr_expr (exp);
2870 v_type = build_pointer_type (v_type);
2871 addr = fold_convert (v_type, addr);
2872 exp = build_fold_indirect_ref (addr);
2877 /* Expand an operation to extract from or store into a field.
2878 IS_STATIC is 1 iff the field is static.
2879 IS_PUTTING is 1 for putting into a field; 0 for getting from the field.
2880 FIELD_REF_INDEX is an index into the constant pool. */
2883 expand_java_field_op (int is_static, int is_putting, int field_ref_index)
2886 = get_class_constant (current_jcf,
2887 COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool,
2889 const char *self_name
2890 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2891 tree field_name = COMPONENT_REF_NAME (¤t_jcf->cpool, field_ref_index);
2892 tree field_signature = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool,
2894 tree field_type = get_type_from_signature (field_signature);
2895 tree new_value = is_putting ? pop_value (field_type) : NULL_TREE;
2898 tree original_self_type = self_type;
2902 if (! CLASS_LOADED_P (self_type))
2903 load_class (self_type, 1);
2904 field_decl = lookup_field (&self_type, field_name);
2905 if (field_decl == error_mark_node)
2909 else if (field_decl == NULL_TREE)
2911 if (! flag_verify_invocations)
2913 int flags = ACC_PUBLIC;
2915 flags |= ACC_STATIC;
2916 self_type = original_self_type;
2917 field_decl = add_field (original_self_type, field_name,
2919 DECL_ARTIFICIAL (field_decl) = 1;
2920 DECL_IGNORED_P (field_decl) = 1;
2922 /* FIXME: We should be pessimistic about volatility. We
2923 don't know one way or another, but this is safe.
2924 However, doing this has bad effects on code quality. We
2925 need to look at better ways to do this. */
2926 TREE_THIS_VOLATILE (field_decl) = 1;
2931 error ("missing field '%s' in '%s'",
2932 IDENTIFIER_POINTER (field_name), self_name);
2936 else if (build_java_signature (TREE_TYPE (field_decl)) != field_signature)
2938 error ("mismatching signature for field '%s' in '%s'",
2939 IDENTIFIER_POINTER (field_name), self_name);
2942 field_ref = is_static ? NULL_TREE : pop_value (self_type);
2946 push_value (convert (field_type, integer_zero_node));
2947 flush_quick_stack ();
2951 field_ref = build_field_ref (field_ref, self_type, field_name);
2953 && ! flag_indirect_dispatch)
2955 tree context = DECL_CONTEXT (field_ref);
2956 if (context != self_type && CLASS_INTERFACE (TYPE_NAME (context)))
2957 field_ref = build_class_init (context, field_ref);
2959 field_ref = build_class_init (self_type, field_ref);
2963 flush_quick_stack ();
2964 if (FIELD_FINAL (field_decl))
2966 if (DECL_CONTEXT (field_decl) != current_class)
2967 error ("assignment to final field %q+D not in field's class",
2969 /* We used to check for assignments to final fields not
2970 occurring in the class initializer or in a constructor
2971 here. However, this constraint doesn't seem to be
2972 enforced by the JVM. */
2975 if (TREE_THIS_VOLATILE (field_decl))
2976 field_ref = java_modify_addr_for_volatile (field_ref);
2978 modify_expr = build2 (MODIFY_EXPR, TREE_TYPE (field_ref),
2979 field_ref, new_value);
2981 if (TREE_THIS_VOLATILE (field_decl))
2983 (build_call_expr (built_in_decls[BUILT_IN_SYNCHRONIZE], 0));
2985 java_add_stmt (modify_expr);
2989 tree temp = build_decl (input_location,
2990 VAR_DECL, NULL_TREE, TREE_TYPE (field_ref));
2991 java_add_local_var (temp);
2993 if (TREE_THIS_VOLATILE (field_decl))
2994 field_ref = java_modify_addr_for_volatile (field_ref);
2997 = build2 (MODIFY_EXPR, TREE_TYPE (field_ref), temp, field_ref);
2998 java_add_stmt (modify_expr);
3000 if (TREE_THIS_VOLATILE (field_decl))
3002 (build_call_expr (built_in_decls[BUILT_IN_SYNCHRONIZE], 0));
3006 TREE_THIS_VOLATILE (field_ref) = TREE_THIS_VOLATILE (field_decl);
3010 load_type_state (int pc)
3013 tree vec = VEC_index (tree, type_states, pc);
3014 int cur_length = TREE_VEC_LENGTH (vec);
3015 stack_pointer = cur_length - DECL_MAX_LOCALS(current_function_decl);
3016 for (i = 0; i < cur_length; i++)
3017 type_map [i] = TREE_VEC_ELT (vec, i);
3020 /* Go over METHOD's bytecode and note instruction starts in
3021 instruction_bits[]. */
3024 note_instructions (JCF *jcf, tree method)
3027 unsigned char* byte_ops;
3028 long length = DECL_CODE_LENGTH (method);
3033 #undef RET /* Defined by config/i386/i386.h */
3035 #define BCODE byte_ops
3036 #define BYTE_type_node byte_type_node
3037 #define SHORT_type_node short_type_node
3038 #define INT_type_node int_type_node
3039 #define LONG_type_node long_type_node
3040 #define CHAR_type_node char_type_node
3041 #define PTR_type_node ptr_type_node
3042 #define FLOAT_type_node float_type_node
3043 #define DOUBLE_type_node double_type_node
3044 #define VOID_type_node void_type_node
3045 #define CONST_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3046 #define CONST_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3047 #define VAR_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3048 #define VAR_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3050 #define CHECK_PC_IN_RANGE(PC) ((void)1) /* Already handled by verifier. */
3052 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3053 byte_ops = jcf->read_ptr;
3054 instruction_bits = XRESIZEVAR (char, instruction_bits, length + 1);
3055 memset (instruction_bits, 0, length + 1);
3056 type_states = VEC_alloc (tree, gc, length + 1);
3057 VEC_safe_grow_cleared (tree, gc, type_states, length + 1);
3059 /* This pass figures out which PC can be the targets of jumps. */
3060 for (PC = 0; PC < length;)
3062 int oldpc = PC; /* PC at instruction start. */
3063 instruction_bits [PC] |= BCODE_INSTRUCTION_START;
3064 switch (byte_ops[PC++])
3066 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3068 PRE_##OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3071 #define NOTE_LABEL(PC) note_label(oldpc, PC)
3073 #define PRE_PUSHC(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3074 #define PRE_LOAD(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3075 #define PRE_STORE(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3076 #define PRE_STACK(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3077 #define PRE_UNOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3078 #define PRE_BINOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3079 #define PRE_CONVERT(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3080 #define PRE_CONVERT2(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3082 #define PRE_SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3083 PRE_SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3084 #define PRE_SPECIAL_IINC(OPERAND_TYPE) \
3085 ((void) IMMEDIATE_u1, (void) IMMEDIATE_s1)
3086 #define PRE_SPECIAL_ENTER(IGNORE) /* nothing */
3087 #define PRE_SPECIAL_EXIT(IGNORE) /* nothing */
3088 #define PRE_SPECIAL_THROW(IGNORE) /* nothing */
3089 #define PRE_SPECIAL_BREAK(IGNORE) /* nothing */
3091 /* two forms of wide instructions */
3092 #define PRE_SPECIAL_WIDE(IGNORE) \
3094 int modified_opcode = IMMEDIATE_u1; \
3095 if (modified_opcode == OPCODE_iinc) \
3097 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3098 (void) IMMEDIATE_s2; /* constbyte1 and constbyte2 */ \
3102 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3106 #define PRE_IMPL(IGNORE1, IGNORE2) /* nothing */
3108 #define PRE_MONITOR(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3110 #define PRE_RETURN(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3111 #define PRE_ARRAY(OPERAND_TYPE, SUBOP) \
3112 PRE_ARRAY_##SUBOP(OPERAND_TYPE)
3113 #define PRE_ARRAY_LOAD(TYPE) /* nothing */
3114 #define PRE_ARRAY_STORE(TYPE) /* nothing */
3115 #define PRE_ARRAY_LENGTH(TYPE) /* nothing */
3116 #define PRE_ARRAY_NEW(TYPE) PRE_ARRAY_NEW_##TYPE
3117 #define PRE_ARRAY_NEW_NUM ((void) IMMEDIATE_u1)
3118 #define PRE_ARRAY_NEW_PTR ((void) IMMEDIATE_u2)
3119 #define PRE_ARRAY_NEW_MULTI ((void) IMMEDIATE_u2, (void) IMMEDIATE_u1)
3121 #define PRE_TEST(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3122 #define PRE_COND(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3123 #define PRE_BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3124 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3125 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3126 #define PRE_JSR(OPERAND_TYPE, OPERAND_VALUE) \
3127 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3129 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3131 #define PRE_RET(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE)
3133 #define PRE_SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3134 PC = (PC + 3) / 4 * 4; PRE_##TABLE_OR_LOOKUP##_SWITCH
3136 #define PRE_LOOKUP_SWITCH \
3137 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3138 NOTE_LABEL (default_offset+oldpc); \
3140 while (--npairs >= 0) { \
3141 jint match ATTRIBUTE_UNUSED = IMMEDIATE_s4; \
3142 jint offset = IMMEDIATE_s4; \
3143 NOTE_LABEL (offset+oldpc); } \
3146 #define PRE_TABLE_SWITCH \
3147 { jint default_offset = IMMEDIATE_s4; \
3148 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3149 NOTE_LABEL (default_offset+oldpc); \
3151 while (low++ <= high) { \
3152 jint offset = IMMEDIATE_s4; \
3153 NOTE_LABEL (offset+oldpc); } \
3156 #define PRE_FIELD(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3157 #define PRE_OBJECT(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3158 #define PRE_INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3159 (void)(IMMEDIATE_u2); \
3160 PC += 2 * IS_INTERFACE /* for invokeinterface */;
3162 #include "javaop.def"
3169 expand_byte_code (JCF *jcf, tree method)
3173 const unsigned char *linenumber_pointer;
3174 int dead_code_index = -1;
3175 unsigned char* byte_ops;
3176 long length = DECL_CODE_LENGTH (method);
3177 location_t max_location = input_location;
3180 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3181 byte_ops = jcf->read_ptr;
3183 /* We make an initial pass of the line number table, to note
3184 which instructions have associated line number entries. */
3185 linenumber_pointer = linenumber_table;
3186 for (i = 0; i < linenumber_count; i++)
3188 int pc = GET_u2 (linenumber_pointer);
3189 linenumber_pointer += 4;
3191 warning (0, "invalid PC in line number table");
3194 if ((instruction_bits[pc] & BCODE_HAS_LINENUMBER) != 0)
3195 instruction_bits[pc] |= BCODE_HAS_MULTI_LINENUMBERS;
3196 instruction_bits[pc] |= BCODE_HAS_LINENUMBER;
3200 if (! verify_jvm_instructions_new (jcf, byte_ops, length))
3203 promote_arguments ();
3204 cache_this_class_ref (method);
3205 cache_cpool_data_ref ();
3207 /* Translate bytecodes. */
3208 linenumber_pointer = linenumber_table;
3209 for (PC = 0; PC < length;)
3211 if ((instruction_bits [PC] & BCODE_TARGET) != 0 || PC == 0)
3213 tree label = lookup_label (PC);
3214 flush_quick_stack ();
3215 if ((instruction_bits [PC] & BCODE_TARGET) != 0)
3216 java_add_stmt (build1 (LABEL_EXPR, void_type_node, label));
3217 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3218 load_type_state (PC);
3221 if (! (instruction_bits [PC] & BCODE_VERIFIED))
3223 if (dead_code_index == -1)
3225 /* This is the start of a region of unreachable bytecodes.
3226 They still need to be processed in order for EH ranges
3227 to get handled correctly. However, we can simply
3228 replace these bytecodes with nops. */
3229 dead_code_index = PC;
3232 /* Turn this bytecode into a nop. */
3237 if (dead_code_index != -1)
3239 /* We've just reached the end of a region of dead code. */
3241 warning (0, "unreachable bytecode from %d to before %d",
3242 dead_code_index, PC);
3243 dead_code_index = -1;
3247 /* Handle possible line number entry for this PC.
3249 This code handles out-of-order and multiple linenumbers per PC,
3250 but is optimized for the case of line numbers increasing
3251 monotonically with PC. */
3252 if ((instruction_bits[PC] & BCODE_HAS_LINENUMBER) != 0)
3254 if ((instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS) != 0
3255 || GET_u2 (linenumber_pointer) != PC)
3256 linenumber_pointer = linenumber_table;
3257 while (linenumber_pointer < linenumber_table + linenumber_count * 4)
3259 int pc = GET_u2 (linenumber_pointer);
3260 linenumber_pointer += 4;
3263 int line = GET_u2 (linenumber_pointer - 2);
3264 input_location = linemap_line_start (line_table, line, 1);
3265 if (input_location > max_location)
3266 max_location = input_location;
3267 if (!(instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS))
3272 maybe_pushlevels (PC);
3273 PC = process_jvm_instruction (PC, byte_ops, length);
3274 maybe_poplevels (PC);
3277 uncache_this_class_ref (method);
3279 if (dead_code_index != -1)
3281 /* We've just reached the end of a region of dead code. */
3283 warning (0, "unreachable bytecode from %d to the end of the method",
3287 DECL_FUNCTION_LAST_LINE (method) = max_location;
3291 java_push_constant_from_pool (JCF *jcf, int index)
3294 if (JPOOL_TAG (jcf, index) == CONSTANT_String)
3297 name = get_name_constant (jcf, JPOOL_USHORT1 (jcf, index));
3298 index = alloc_name_constant (CONSTANT_String, name);
3299 c = build_ref_from_constant_pool (index);
3300 c = convert (promote_type (string_type_node), c);
3302 else if (JPOOL_TAG (jcf, index) == CONSTANT_Class
3303 || JPOOL_TAG (jcf, index) == CONSTANT_ResolvedClass)
3305 tree record = get_class_constant (jcf, index);
3306 c = build_class_ref (record);
3309 c = get_constant (jcf, index);
3314 process_jvm_instruction (int PC, const unsigned char* byte_ops,
3315 long length ATTRIBUTE_UNUSED)
3317 const char *opname; /* Temporary ??? */
3318 int oldpc = PC; /* PC at instruction start. */
3320 /* If the instruction is at the beginning of an exception handler,
3321 replace the top of the stack with the thrown object reference. */
3322 if (instruction_bits [PC] & BCODE_EXCEPTION_TARGET)
3324 /* Note that the verifier will not emit a type map at all for
3325 dead exception handlers. In this case we just ignore the
3327 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3329 tree type = pop_type (promote_type (throwable_type_node));
3330 push_value (build_exception_object_ref (type));
3334 switch (byte_ops[PC++])
3336 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3339 OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3342 #define RET(OPERAND_TYPE, OPERAND_VALUE) \
3344 int saw_index = 0; \
3345 int index = OPERAND_VALUE; \
3346 (void) saw_index; /* Avoid set but not used warning. */ \
3348 (find_local_variable (index, return_address_type_node, oldpc)); \
3351 #define JSR(OPERAND_TYPE, OPERAND_VALUE) \
3353 /* OPERAND_VALUE may have side-effects on PC */ \
3354 int opvalue = OPERAND_VALUE; \
3355 build_java_jsr (oldpc + opvalue, PC); \
3358 /* Push a constant onto the stack. */
3359 #define PUSHC(OPERAND_TYPE, OPERAND_VALUE) \
3360 { int saw_index = 0; int ival = (OPERAND_VALUE); \
3361 if (saw_index) java_push_constant_from_pool (current_jcf, ival); \
3362 else expand_java_pushc (ival, OPERAND_TYPE##_type_node); }
3364 /* internal macro added for use by the WIDE case */
3365 #define LOAD_INTERNAL(OPTYPE, OPVALUE) \
3366 expand_load_internal (OPVALUE, type_map[OPVALUE], oldpc);
3368 /* Push local variable onto the opcode stack. */
3369 #define LOAD(OPERAND_TYPE, OPERAND_VALUE) \
3371 /* have to do this since OPERAND_VALUE may have side-effects */ \
3372 int opvalue = OPERAND_VALUE; \
3373 LOAD_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3376 #define RETURN(OPERAND_TYPE, OPERAND_VALUE) \
3377 expand_java_return (OPERAND_TYPE##_type_node)
3379 #define REM_EXPR TRUNC_MOD_EXPR
3380 #define BINOP(OPERAND_TYPE, OPERAND_VALUE) \
3381 expand_java_binop (OPERAND_TYPE##_type_node, OPERAND_VALUE##_EXPR)
3383 #define FIELD(IS_STATIC, IS_PUT) \
3384 expand_java_field_op (IS_STATIC, IS_PUT, IMMEDIATE_u2)
3386 #define TEST(OPERAND_TYPE, CONDITION) \
3387 expand_test (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3389 #define COND(OPERAND_TYPE, CONDITION) \
3390 expand_cond (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3392 #define BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3393 BRANCH_##OPERAND_TYPE (OPERAND_VALUE)
3395 #define BRANCH_GOTO(OPERAND_VALUE) \
3396 expand_java_goto (oldpc + OPERAND_VALUE)
3398 #define BRANCH_CALL(OPERAND_VALUE) \
3399 expand_java_call (oldpc + OPERAND_VALUE, oldpc)
3402 #define BRANCH_RETURN(OPERAND_VALUE) \
3404 tree type = OPERAND_TYPE##_type_node; \
3405 tree value = find_local_variable (OPERAND_VALUE, type, oldpc); \
3406 expand_java_ret (value); \
3410 #define NOT_IMPL(OPERAND_TYPE, OPERAND_VALUE) \
3411 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3412 fprintf (stderr, "(not implemented)\n")
3413 #define NOT_IMPL1(OPERAND_VALUE) \
3414 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3415 fprintf (stderr, "(not implemented)\n")
3417 #define BRANCH_RETURN(OPERAND_VALUE) NOT_IMPL1(OPERAND_VALUE)
3419 #define STACK(SUBOP, COUNT) STACK_##SUBOP (COUNT)
3421 #define STACK_POP(COUNT) java_stack_pop (COUNT)
3423 #define STACK_SWAP(COUNT) java_stack_swap()
3425 #define STACK_DUP(COUNT) java_stack_dup (COUNT, 0)
3426 #define STACK_DUPx1(COUNT) java_stack_dup (COUNT, 1)
3427 #define STACK_DUPx2(COUNT) java_stack_dup (COUNT, 2)
3429 #define SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3430 PC = (PC + 3) / 4 * 4; TABLE_OR_LOOKUP##_SWITCH
3432 #define LOOKUP_SWITCH \
3433 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3434 tree selector = pop_value (INT_type_node); \
3435 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3436 while (--npairs >= 0) \
3438 jint match = IMMEDIATE_s4; jint offset = IMMEDIATE_s4; \
3439 expand_java_add_case (switch_expr, match, oldpc + offset); \
3443 #define TABLE_SWITCH \
3444 { jint default_offset = IMMEDIATE_s4; \
3445 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3446 tree selector = pop_value (INT_type_node); \
3447 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3448 for (; low <= high; low++) \
3450 jint offset = IMMEDIATE_s4; \
3451 expand_java_add_case (switch_expr, low, oldpc + offset); \
3455 #define INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3456 { int opcode = byte_ops[PC-1]; \
3457 int method_ref_index = IMMEDIATE_u2; \
3459 if (IS_INTERFACE) { nargs = IMMEDIATE_u1; (void) IMMEDIATE_u1; } \
3461 expand_invoke (opcode, method_ref_index, nargs); \
3464 /* Handle new, checkcast, instanceof */
3465 #define OBJECT(TYPE, OP) \
3466 expand_java_##OP (get_class_constant (current_jcf, IMMEDIATE_u2))
3468 #define ARRAY(OPERAND_TYPE, SUBOP) ARRAY_##SUBOP(OPERAND_TYPE)
3470 #define ARRAY_LOAD(OPERAND_TYPE) \
3472 expand_java_arrayload( OPERAND_TYPE##_type_node ); \
3475 #define ARRAY_STORE(OPERAND_TYPE) \
3477 expand_java_arraystore( OPERAND_TYPE##_type_node ); \
3480 #define ARRAY_LENGTH(OPERAND_TYPE) expand_java_array_length();
3481 #define ARRAY_NEW(OPERAND_TYPE) ARRAY_NEW_##OPERAND_TYPE()
3482 #define ARRAY_NEW_PTR() \
3483 push_value (build_anewarray (get_class_constant (current_jcf, \
3485 pop_value (int_type_node)));
3486 #define ARRAY_NEW_NUM() \
3488 int atype = IMMEDIATE_u1; \
3489 push_value (build_newarray (atype, pop_value (int_type_node)));\
3491 #define ARRAY_NEW_MULTI() \
3493 tree klass = get_class_constant (current_jcf, IMMEDIATE_u2 ); \
3494 int ndims = IMMEDIATE_u1; \
3495 expand_java_multianewarray( klass, ndims ); \
3498 #define UNOP(OPERAND_TYPE, OPERAND_VALUE) \
3499 push_value (fold_build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \
3500 pop_value (OPERAND_TYPE##_type_node)));
3502 #define CONVERT2(FROM_TYPE, TO_TYPE) \
3504 push_value (build1 (NOP_EXPR, int_type_node, \
3505 (convert (TO_TYPE##_type_node, \
3506 pop_value (FROM_TYPE##_type_node))))); \
3509 #define CONVERT(FROM_TYPE, TO_TYPE) \
3511 push_value (convert (TO_TYPE##_type_node, \
3512 pop_value (FROM_TYPE##_type_node))); \
3515 /* internal macro added for use by the WIDE case
3516 Added TREE_TYPE (decl) assignment, apbianco */
3517 #define STORE_INTERNAL(OPTYPE, OPVALUE) \
3520 int index = OPVALUE; \
3521 tree type = OPTYPE; \
3522 value = pop_value (type); \
3523 type = TREE_TYPE (value); \
3524 decl = find_local_variable (index, type, oldpc); \
3525 set_local_type (index, type); \
3526 java_add_stmt (build2 (MODIFY_EXPR, type, decl, value)); \
3529 #define STORE(OPERAND_TYPE, OPERAND_VALUE) \
3531 /* have to do this since OPERAND_VALUE may have side-effects */ \
3532 int opvalue = OPERAND_VALUE; \
3533 STORE_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3536 #define SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3537 SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3539 #define SPECIAL_ENTER(IGNORED) MONITOR_OPERATION (soft_monitorenter_node)
3540 #define SPECIAL_EXIT(IGNORED) MONITOR_OPERATION (soft_monitorexit_node)
3542 #define MONITOR_OPERATION(call) \
3544 tree o = pop_value (ptr_type_node); \
3546 flush_quick_stack (); \
3547 c = build_java_monitor (call, o); \
3548 TREE_SIDE_EFFECTS (c) = 1; \
3549 java_add_stmt (c); \
3552 #define SPECIAL_IINC(IGNORED) \
3554 unsigned int local_var_index = IMMEDIATE_u1; \
3555 int ival = IMMEDIATE_s1; \
3556 expand_iinc(local_var_index, ival, oldpc); \
3559 #define SPECIAL_WIDE(IGNORED) \
3561 int modified_opcode = IMMEDIATE_u1; \
3562 unsigned int local_var_index = IMMEDIATE_u2; \
3563 switch (modified_opcode) \
3567 int ival = IMMEDIATE_s2; \
3568 expand_iinc (local_var_index, ival, oldpc); \
3571 case OPCODE_iload: \
3572 case OPCODE_lload: \
3573 case OPCODE_fload: \
3574 case OPCODE_dload: \
3575 case OPCODE_aload: \
3577 /* duplicate code from LOAD macro */ \
3578 LOAD_INTERNAL(operand_type[modified_opcode], local_var_index); \
3581 case OPCODE_istore: \
3582 case OPCODE_lstore: \
3583 case OPCODE_fstore: \
3584 case OPCODE_dstore: \
3585 case OPCODE_astore: \
3587 STORE_INTERNAL(operand_type[modified_opcode], local_var_index); \
3591 error ("unrecogized wide sub-instruction"); \
3595 #define SPECIAL_THROW(IGNORED) \
3596 build_java_athrow (pop_value (throwable_type_node))
3598 #define SPECIAL_BREAK NOT_IMPL1
3599 #define IMPL NOT_IMPL
3601 #include "javaop.def"
3604 fprintf (stderr, "%3d: unknown(%3d)\n", oldpc, byte_ops[PC]);
3609 /* Return the opcode at PC in the code section pointed to by
3612 static unsigned char
3613 peek_opcode_at_pc (JCF *jcf, int code_offset, int pc)
3615 unsigned char opcode;
3616 long absolute_offset = (long)JCF_TELL (jcf);
3618 JCF_SEEK (jcf, code_offset);
3619 opcode = jcf->read_ptr [pc];
3620 JCF_SEEK (jcf, absolute_offset);
3624 /* Some bytecode compilers are emitting accurate LocalVariableTable
3625 attributes. Here's an example:
3630 Attribute "LocalVariableTable"
3631 slot #<n>: ... (PC: PC+1 length: L)
3633 This is accurate because the local in slot <n> really exists after
3634 the opcode at PC is executed, hence from PC+1 to PC+1+L.
3636 This procedure recognizes this situation and extends the live range
3637 of the local in SLOT to START_PC-1 or START_PC-2 (depending on the
3638 length of the store instruction.)
3640 This function is used by `give_name_to_locals' so that a local's
3641 DECL features a DECL_LOCAL_START_PC such that the first related
3642 store operation will use DECL as a destination, not an unrelated
3643 temporary created for the occasion.
3645 This function uses a global (instruction_bits) `note_instructions' should
3646 have allocated and filled properly. */
3649 maybe_adjust_start_pc (struct JCF *jcf, int code_offset,
3650 int start_pc, int slot)
3652 int first, index, opcode;
3661 /* Find last previous instruction and remember it */
3662 for (pc = start_pc-1; pc; pc--)
3663 if (instruction_bits [pc] & BCODE_INSTRUCTION_START)
3667 /* Retrieve the instruction, handle `wide'. */
3668 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3669 if (opcode == OPCODE_wide)
3672 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3677 case OPCODE_astore_0:
3678 case OPCODE_astore_1:
3679 case OPCODE_astore_2:
3680 case OPCODE_astore_3:
3681 first = OPCODE_astore_0;
3684 case OPCODE_istore_0:
3685 case OPCODE_istore_1:
3686 case OPCODE_istore_2:
3687 case OPCODE_istore_3:
3688 first = OPCODE_istore_0;
3691 case OPCODE_lstore_0:
3692 case OPCODE_lstore_1:
3693 case OPCODE_lstore_2:
3694 case OPCODE_lstore_3:
3695 first = OPCODE_lstore_0;
3698 case OPCODE_fstore_0:
3699 case OPCODE_fstore_1:
3700 case OPCODE_fstore_2:
3701 case OPCODE_fstore_3:
3702 first = OPCODE_fstore_0;
3705 case OPCODE_dstore_0:
3706 case OPCODE_dstore_1:
3707 case OPCODE_dstore_2:
3708 case OPCODE_dstore_3:
3709 first = OPCODE_dstore_0;
3717 index = peek_opcode_at_pc (jcf, code_offset, pc);
3720 int other = peek_opcode_at_pc (jcf, code_offset, ++pc);
3721 index = (other << 8) + index;
3726 /* Now we decide: first >0 means we have a <t>store_<n>, index >0
3727 means we have a <t>store. */
3728 if ((first > 0 && opcode - first == slot) || (index > 0 && index == slot))
3734 /* Force the (direct) sub-operands of NODE to be evaluated in left-to-right
3735 order, as specified by Java Language Specification.
3737 The problem is that while expand_expr will evaluate its sub-operands in
3738 left-to-right order, for variables it will just return an rtx (i.e.
3739 an lvalue) for the variable (rather than an rvalue). So it is possible
3740 that a later sub-operand will change the register, and when the
3741 actual operation is done, it will use the new value, when it should
3742 have used the original value.
3744 We fix this by using save_expr. This forces the sub-operand to be
3745 copied into a fresh virtual register,
3747 For method invocation, we modify the arguments so that a
3748 left-to-right order evaluation is performed. Saved expressions
3749 will, in CALL_EXPR order, be reused when the call will be expanded.
3751 We also promote outgoing args if needed. */
3754 force_evaluation_order (tree node)
3756 if (flag_syntax_only)
3758 if (TREE_CODE (node) == CALL_EXPR
3759 || (TREE_CODE (node) == COMPOUND_EXPR
3760 && TREE_CODE (TREE_OPERAND (node, 0)) == CALL_EXPR
3761 && TREE_CODE (TREE_OPERAND (node, 1)) == SAVE_EXPR))
3766 /* Account for wrapped around ctors. */
3767 if (TREE_CODE (node) == COMPOUND_EXPR)
3768 call = TREE_OPERAND (node, 0);
3772 nargs = call_expr_nargs (call);
3774 /* This reverses the evaluation order. This is a desired effect. */
3775 for (i = 0, cmp = NULL_TREE; i < nargs; i++)
3777 tree arg = CALL_EXPR_ARG (call, i);
3778 /* Promote types smaller than integer. This is required by
3780 tree type = TREE_TYPE (arg);
3782 if (targetm.calls.promote_prototypes (type)
3783 && INTEGRAL_TYPE_P (type)
3784 && INT_CST_LT_UNSIGNED (TYPE_SIZE (type),
3785 TYPE_SIZE (integer_type_node)))
3786 arg = fold_convert (integer_type_node, arg);
3788 saved = save_expr (force_evaluation_order (arg));
3789 cmp = (cmp == NULL_TREE ? saved :
3790 build2 (COMPOUND_EXPR, void_type_node, cmp, saved));
3792 CALL_EXPR_ARG (call, i) = saved;
3795 if (cmp && TREE_CODE (cmp) == COMPOUND_EXPR)
3796 TREE_SIDE_EFFECTS (cmp) = 1;
3800 cmp = build2 (COMPOUND_EXPR, TREE_TYPE (node), cmp, node);
3801 if (TREE_TYPE (cmp) != void_type_node)
3802 cmp = save_expr (cmp);
3803 TREE_SIDE_EFFECTS (cmp) = 1;
3810 /* Build a node to represent empty statements and blocks. */
3813 build_java_empty_stmt (void)
3815 tree t = build_empty_stmt (input_location);
3819 /* Promote all args of integral type before generating any code. */
3822 promote_arguments (void)
3826 for (arg = DECL_ARGUMENTS (current_function_decl), i = 0;
3827 arg != NULL_TREE; arg = TREE_CHAIN (arg), i++)
3829 tree arg_type = TREE_TYPE (arg);
3830 if (INTEGRAL_TYPE_P (arg_type)
3831 && TYPE_PRECISION (arg_type) < 32)
3833 tree copy = find_local_variable (i, integer_type_node, -1);
3834 java_add_stmt (build2 (MODIFY_EXPR, integer_type_node,
3836 fold_convert (integer_type_node, arg)));
3838 if (TYPE_IS_WIDE (arg_type))
3843 /* Create a local variable that points to the constant pool. */
3846 cache_cpool_data_ref (void)
3851 tree d = build_constant_data_ref (flag_indirect_classes);
3852 tree cpool_ptr = build_decl (input_location, VAR_DECL, NULL_TREE,
3853 build_pointer_type (TREE_TYPE (d)));
3854 java_add_local_var (cpool_ptr);
3855 TREE_CONSTANT (cpool_ptr) = 1;
3857 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (cpool_ptr),
3858 cpool_ptr, build_address_of (d)));
3859 cpool = build1 (INDIRECT_REF, TREE_TYPE (d), cpool_ptr);
3860 TREE_THIS_NOTRAP (cpool) = 1;
3861 TYPE_CPOOL_DATA_REF (output_class) = cpool;
3865 #include "gt-java-expr.h"