1 /* Process expressions for the GNU compiler for the Java(TM) language.
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2006, 2007 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>.
21 Java and all Java-based marks are trademarks or registered trademarks
22 of Sun Microsystems, Inc. in the United States and other countries.
23 The Free Software Foundation is independent of Sun Microsystems, Inc. */
25 /* Hacked by Per Bothner <bothner@cygnus.com> February 1996. */
29 #include "coretypes.h"
36 #include "java-tree.h"
38 #include "java-opcodes.h"
40 #include "java-except.h"
45 #include "tree-gimple.h"
48 static void flush_quick_stack (void);
49 static void push_value (tree);
50 static tree pop_value (tree);
51 static void java_stack_swap (void);
52 static void java_stack_dup (int, int);
53 static void build_java_athrow (tree);
54 static void build_java_jsr (int, int);
55 static void build_java_ret (tree);
56 static void expand_java_multianewarray (tree, int);
57 static void expand_java_arraystore (tree);
58 static void expand_java_arrayload (tree);
59 static void expand_java_array_length (void);
60 static tree build_java_monitor (tree, tree);
61 static void expand_java_pushc (int, tree);
62 static void expand_java_return (tree);
63 static void expand_load_internal (int, tree, int);
64 static void expand_java_NEW (tree);
65 static void expand_java_INSTANCEOF (tree);
66 static void expand_java_CHECKCAST (tree);
67 static void expand_iinc (unsigned int, int, int);
68 static void expand_java_binop (tree, enum tree_code);
69 static void note_label (int, int);
70 static void expand_compare (enum tree_code, tree, tree, int);
71 static void expand_test (enum tree_code, tree, int);
72 static void expand_cond (enum tree_code, tree, int);
73 static void expand_java_goto (int);
74 static tree expand_java_switch (tree, int);
75 static void expand_java_add_case (tree, int, int);
76 static tree pop_arguments (tree);
77 static void expand_invoke (int, int, int);
78 static void expand_java_field_op (int, int, int);
79 static void java_push_constant_from_pool (struct JCF *, int);
80 static void java_stack_pop (int);
81 static tree build_java_throw_out_of_bounds_exception (tree);
82 static tree build_java_check_indexed_type (tree, tree);
83 static unsigned char peek_opcode_at_pc (struct JCF *, int, int);
84 static void promote_arguments (void);
85 static void cache_cpool_data_ref (void);
87 static GTY(()) tree operand_type[59];
89 static GTY(()) tree methods_ident;
90 static GTY(()) tree ncode_ident;
91 tree dtable_ident = NULL_TREE;
93 /* Set to nonzero value in order to emit class initialization code
94 before static field references. */
95 int always_initialize_class_p = 0;
97 /* We store the stack state in two places:
98 Within a basic block, we use the quick_stack, which is a
99 pushdown list (TREE_LISTs) of expression nodes.
100 This is the top part of the stack; below that we use find_stack_slot.
101 At the end of a basic block, the quick_stack must be flushed
102 to the stack slot array (as handled by find_stack_slot).
103 Using quick_stack generates better code (especially when
104 compiled without optimization), because we do not have to
105 explicitly store and load trees to temporary variables.
107 If a variable is on the quick stack, it means the value of variable
108 when the quick stack was last flushed. Conceptually, flush_quick_stack
109 saves all the quick_stack elements in parallel. However, that is
110 complicated, so it actually saves them (i.e. copies each stack value
111 to is home virtual register) from low indexes. This allows a quick_stack
112 element at index i (counting from the bottom of stack the) to references
113 slot virtuals for register that are >= i, but not those that are deeper.
114 This convention makes most operations easier. For example iadd works
115 even when the stack contains (reg[0], reg[1]): It results in the
116 stack containing (reg[0]+reg[1]), which is OK. However, some stack
117 operations are more complicated. For example dup given a stack
118 containing (reg[0]) would yield (reg[0], reg[0]), which would violate
119 the convention, since stack value 1 would refer to a register with
120 lower index (reg[0]), which flush_quick_stack does not safely handle.
121 So dup cannot just add an extra element to the quick_stack, but iadd can.
124 static GTY(()) tree quick_stack;
126 /* A free-list of unused permanent TREE_LIST nodes. */
127 static GTY((deletable)) tree tree_list_free_list;
129 /* The physical memory page size used in this computer. See
130 build_field_ref(). */
131 static GTY(()) tree page_size;
133 /* The stack pointer of the Java virtual machine.
134 This does include the size of the quick_stack. */
138 const unsigned char *linenumber_table;
139 int linenumber_count;
141 /* Largest pc so far in this method that has been passed to lookup_label. */
142 int highest_label_pc_this_method = -1;
144 /* Base value for this method to add to pc to get generated label. */
145 int start_label_pc_this_method = 0;
148 init_expr_processing (void)
150 operand_type[21] = operand_type[54] = int_type_node;
151 operand_type[22] = operand_type[55] = long_type_node;
152 operand_type[23] = operand_type[56] = float_type_node;
153 operand_type[24] = operand_type[57] = double_type_node;
154 operand_type[25] = operand_type[58] = ptr_type_node;
158 java_truthvalue_conversion (tree expr)
160 /* It is simpler and generates better code to have only TRUTH_*_EXPR
161 or comparison expressions as truth values at this level.
163 This function should normally be identity for Java. */
165 switch (TREE_CODE (expr))
167 case EQ_EXPR: case NE_EXPR: case UNEQ_EXPR: case LTGT_EXPR:
168 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
169 case UNLE_EXPR: case UNGE_EXPR: case UNLT_EXPR: case UNGT_EXPR:
170 case ORDERED_EXPR: case UNORDERED_EXPR:
171 case TRUTH_ANDIF_EXPR:
172 case TRUTH_ORIF_EXPR:
181 return integer_zerop (expr) ? boolean_false_node : boolean_true_node;
184 return real_zerop (expr) ? boolean_false_node : boolean_true_node;
186 /* are these legal? XXX JH */
190 /* These don't change whether an object is nonzero or zero. */
191 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
194 /* Distribute the conversion into the arms of a COND_EXPR. */
195 return fold_build3 (COND_EXPR, boolean_type_node, TREE_OPERAND (expr, 0),
196 java_truthvalue_conversion (TREE_OPERAND (expr, 1)),
197 java_truthvalue_conversion (TREE_OPERAND (expr, 2)));
200 /* If this is widening the argument, we can ignore it. */
201 if (TYPE_PRECISION (TREE_TYPE (expr))
202 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
203 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
204 /* fall through to default */
207 return fold_build2 (NE_EXPR, boolean_type_node,
208 expr, boolean_false_node);
212 /* Save any stack slots that happen to be in the quick_stack into their
213 home virtual register slots.
215 The copy order is from low stack index to high, to support the invariant
216 that the expression for a slot may contain decls for stack slots with
217 higher (or the same) index, but not lower. */
220 flush_quick_stack (void)
222 int stack_index = stack_pointer;
223 tree prev, cur, next;
225 /* First reverse the quick_stack, and count the number of slots it has. */
226 for (cur = quick_stack, prev = NULL_TREE; cur != NULL_TREE; cur = next)
228 next = TREE_CHAIN (cur);
229 TREE_CHAIN (cur) = prev;
231 stack_index -= 1 + TYPE_IS_WIDE (TREE_TYPE (TREE_VALUE (cur)));
235 while (quick_stack != NULL_TREE)
238 tree node = quick_stack, type;
239 quick_stack = TREE_CHAIN (node);
240 TREE_CHAIN (node) = tree_list_free_list;
241 tree_list_free_list = node;
242 node = TREE_VALUE (node);
243 type = TREE_TYPE (node);
245 decl = find_stack_slot (stack_index, type);
247 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (node), decl, node));
248 stack_index += 1 + TYPE_IS_WIDE (type);
252 /* Push TYPE on the type stack.
253 Return true on success, 0 on overflow. */
256 push_type_0 (tree type)
259 type = promote_type (type);
260 n_words = 1 + TYPE_IS_WIDE (type);
261 if (stack_pointer + n_words > DECL_MAX_STACK (current_function_decl))
263 /* Allocate decl for this variable now, so we get a temporary that
264 survives the whole method. */
265 find_stack_slot (stack_pointer, type);
266 stack_type_map[stack_pointer++] = type;
268 while (--n_words >= 0)
269 stack_type_map[stack_pointer++] = TYPE_SECOND;
274 push_type (tree type)
276 int r = push_type_0 (type);
281 push_value (tree value)
283 tree type = TREE_TYPE (value);
284 if (TYPE_PRECISION (type) < 32 && INTEGRAL_TYPE_P (type))
286 type = promote_type (type);
287 value = convert (type, value);
290 if (tree_list_free_list == NULL_TREE)
291 quick_stack = tree_cons (NULL_TREE, value, quick_stack);
294 tree node = tree_list_free_list;
295 tree_list_free_list = TREE_CHAIN (tree_list_free_list);
296 TREE_VALUE (node) = value;
297 TREE_CHAIN (node) = quick_stack;
300 /* If the value has a side effect, then we need to evaluate it
301 whether or not the result is used. If the value ends up on the
302 quick stack and is then popped, this won't happen -- so we flush
303 the quick stack. It is safest to simply always flush, though,
304 since TREE_SIDE_EFFECTS doesn't capture COMPONENT_REF, and for
305 the latter we may need to strip conversions. */
306 flush_quick_stack ();
309 /* Pop a type from the type stack.
310 TYPE is the expected type. Return the actual type, which must be
312 On an error, *MESSAGEP is set to a freshly malloc'd error message. */
315 pop_type_0 (tree type, char **messagep)
320 if (TREE_CODE (type) == RECORD_TYPE)
321 type = promote_type (type);
322 n_words = 1 + TYPE_IS_WIDE (type);
323 if (stack_pointer < n_words)
325 *messagep = xstrdup ("stack underflow");
328 while (--n_words > 0)
330 if (stack_type_map[--stack_pointer] != void_type_node)
332 *messagep = xstrdup ("Invalid multi-word value on type stack");
336 t = stack_type_map[--stack_pointer];
337 if (type == NULL_TREE || t == type)
339 if (TREE_CODE (t) == TREE_LIST)
343 tree tt = TREE_PURPOSE (t);
344 if (! can_widen_reference_to (tt, type))
354 if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (t)
355 && TYPE_PRECISION (type) <= 32 && TYPE_PRECISION (t) <= 32)
357 if (TREE_CODE (type) == POINTER_TYPE && TREE_CODE (t) == POINTER_TYPE)
359 /* If the expected type we've been passed is object or ptr
360 (i.e. void*), the caller needs to know the real type. */
361 if (type == ptr_type_node || type == object_ptr_type_node)
364 /* Since the verifier has already run, we know that any
365 types we see will be compatible. In BC mode, this fact
366 may be checked at runtime, but if that is so then we can
367 assume its truth here as well. So, we always succeed
368 here, with the expected type. */
372 if (! flag_verify_invocations && flag_indirect_dispatch
373 && t == object_ptr_type_node)
375 if (type != ptr_type_node)
376 warning (0, "need to insert runtime check for %s",
377 xstrdup (lang_printable_name (type, 0)));
381 /* lang_printable_name uses a static buffer, so we must save the result
382 from calling it the first time. */
385 char *temp = xstrdup (lang_printable_name (type, 0));
386 /* If the stack contains a multi-word type, keep popping the stack until
387 the real type is found. */
388 while (t == void_type_node)
389 t = stack_type_map[--stack_pointer];
390 *messagep = concat ("expected type '", temp,
391 "' but stack contains '", lang_printable_name (t, 0),
398 /* Pop a type from the type stack.
399 TYPE is the expected type. Return the actual type, which must be
400 convertible to TYPE, otherwise call error. */
405 char *message = NULL;
406 type = pop_type_0 (type, &message);
409 error ("%s", message);
416 /* Return true if two type assertions are equal. */
419 type_assertion_eq (const void * k1_p, const void * k2_p)
421 const type_assertion k1 = *(const type_assertion *)k1_p;
422 const type_assertion k2 = *(const type_assertion *)k2_p;
423 return (k1.assertion_code == k2.assertion_code
425 && k1.op2 == k2.op2);
428 /* Hash a type assertion. */
431 type_assertion_hash (const void *p)
433 const type_assertion *k_p = p;
434 hashval_t hash = iterative_hash (&k_p->assertion_code, sizeof
435 k_p->assertion_code, 0);
436 hash = iterative_hash (&k_p->op1, sizeof k_p->op1, hash);
437 return iterative_hash (&k_p->op2, sizeof k_p->op2, hash);
440 /* Add an entry to the type assertion table for the given class.
441 CLASS is the class for which this assertion will be evaluated by the
442 runtime during loading/initialization.
443 ASSERTION_CODE is the 'opcode' or type of this assertion: see java-tree.h.
444 OP1 and OP2 are the operands. The tree type of these arguments may be
445 specific to each assertion_code. */
448 add_type_assertion (tree class, int assertion_code, tree op1, tree op2)
450 htab_t assertions_htab;
454 assertions_htab = TYPE_ASSERTIONS (class);
455 if (assertions_htab == NULL)
457 assertions_htab = htab_create_ggc (7, type_assertion_hash,
458 type_assertion_eq, NULL);
459 TYPE_ASSERTIONS (current_class) = assertions_htab;
462 as.assertion_code = assertion_code;
466 as_pp = htab_find_slot (assertions_htab, &as, INSERT);
468 /* Don't add the same assertion twice. */
472 *as_pp = ggc_alloc (sizeof (type_assertion));
473 **(type_assertion **)as_pp = as;
477 /* Return 1 if SOURCE_TYPE can be safely widened to TARGET_TYPE.
478 Handles array types and interfaces. */
481 can_widen_reference_to (tree source_type, tree target_type)
483 if (source_type == ptr_type_node || target_type == object_ptr_type_node)
486 /* Get rid of pointers */
487 if (TREE_CODE (source_type) == POINTER_TYPE)
488 source_type = TREE_TYPE (source_type);
489 if (TREE_CODE (target_type) == POINTER_TYPE)
490 target_type = TREE_TYPE (target_type);
492 if (source_type == target_type)
495 /* FIXME: This is very pessimistic, in that it checks everything,
496 even if we already know that the types are compatible. If we're
497 to support full Java class loader semantics, we need this.
498 However, we could do something more optimal. */
499 if (! flag_verify_invocations)
501 add_type_assertion (current_class, JV_ASSERT_TYPES_COMPATIBLE,
502 source_type, target_type);
505 warning (0, "assert: %s is assign compatible with %s",
506 xstrdup (lang_printable_name (target_type, 0)),
507 xstrdup (lang_printable_name (source_type, 0)));
508 /* Punt everything to runtime. */
512 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
518 if (TYPE_ARRAY_P (source_type) || TYPE_ARRAY_P (target_type))
520 HOST_WIDE_INT source_length, target_length;
521 if (TYPE_ARRAY_P (source_type) != TYPE_ARRAY_P (target_type))
523 /* An array implements Cloneable and Serializable. */
524 tree name = DECL_NAME (TYPE_NAME (target_type));
525 return (name == java_lang_cloneable_identifier_node
526 || name == java_io_serializable_identifier_node);
528 target_length = java_array_type_length (target_type);
529 if (target_length >= 0)
531 source_length = java_array_type_length (source_type);
532 if (source_length != target_length)
535 source_type = TYPE_ARRAY_ELEMENT (source_type);
536 target_type = TYPE_ARRAY_ELEMENT (target_type);
537 if (source_type == target_type)
539 if (TREE_CODE (source_type) != POINTER_TYPE
540 || TREE_CODE (target_type) != POINTER_TYPE)
542 return can_widen_reference_to (source_type, target_type);
546 int source_depth = class_depth (source_type);
547 int target_depth = class_depth (target_type);
549 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
552 warning (0, "assert: %s is assign compatible with %s",
553 xstrdup (lang_printable_name (target_type, 0)),
554 xstrdup (lang_printable_name (source_type, 0)));
558 /* class_depth can return a negative depth if an error occurred */
559 if (source_depth < 0 || target_depth < 0)
562 if (CLASS_INTERFACE (TYPE_NAME (target_type)))
564 /* target_type is OK if source_type or source_type ancestors
565 implement target_type. We handle multiple sub-interfaces */
566 tree binfo, base_binfo;
569 for (binfo = TYPE_BINFO (source_type), i = 0;
570 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
571 if (can_widen_reference_to
572 (BINFO_TYPE (base_binfo), target_type))
579 for ( ; source_depth > target_depth; source_depth--)
582 = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (source_type), 0));
584 return source_type == target_type;
590 pop_value (tree type)
592 type = pop_type (type);
595 tree node = quick_stack;
596 quick_stack = TREE_CHAIN (quick_stack);
597 TREE_CHAIN (node) = tree_list_free_list;
598 tree_list_free_list = node;
599 node = TREE_VALUE (node);
603 return find_stack_slot (stack_pointer, promote_type (type));
607 /* Pop and discard the top COUNT stack slots. */
610 java_stack_pop (int count)
616 gcc_assert (stack_pointer != 0);
618 type = stack_type_map[stack_pointer - 1];
619 if (type == TYPE_SECOND)
622 gcc_assert (stack_pointer != 1 && count > 0);
624 type = stack_type_map[stack_pointer - 2];
626 val = pop_value (type);
631 /* Implement the 'swap' operator (to swap two top stack slots). */
634 java_stack_swap (void)
640 if (stack_pointer < 2
641 || (type1 = stack_type_map[stack_pointer - 1]) == TYPE_UNKNOWN
642 || (type2 = stack_type_map[stack_pointer - 2]) == TYPE_UNKNOWN
643 || type1 == TYPE_SECOND || type2 == TYPE_SECOND
644 || TYPE_IS_WIDE (type1) || TYPE_IS_WIDE (type2))
645 /* Bad stack swap. */
647 /* Bad stack swap. */
649 flush_quick_stack ();
650 decl1 = find_stack_slot (stack_pointer - 1, type1);
651 decl2 = find_stack_slot (stack_pointer - 2, type2);
652 temp = build_decl (VAR_DECL, NULL_TREE, type1);
653 java_add_local_var (temp);
654 java_add_stmt (build2 (MODIFY_EXPR, type1, temp, decl1));
655 java_add_stmt (build2 (MODIFY_EXPR, type2,
656 find_stack_slot (stack_pointer - 1, type2),
658 java_add_stmt (build2 (MODIFY_EXPR, type1,
659 find_stack_slot (stack_pointer - 2, type1),
661 stack_type_map[stack_pointer - 1] = type2;
662 stack_type_map[stack_pointer - 2] = type1;
666 java_stack_dup (int size, int offset)
668 int low_index = stack_pointer - size - offset;
671 error ("stack underflow - dup* operation");
673 flush_quick_stack ();
675 stack_pointer += size;
676 dst_index = stack_pointer;
678 for (dst_index = stack_pointer; --dst_index >= low_index; )
681 int src_index = dst_index - size;
682 if (src_index < low_index)
683 src_index = dst_index + size + offset;
684 type = stack_type_map [src_index];
685 if (type == TYPE_SECOND)
687 /* Dup operation splits 64-bit number. */
688 gcc_assert (src_index > low_index);
690 stack_type_map[dst_index] = type;
691 src_index--; dst_index--;
692 type = stack_type_map[src_index];
693 gcc_assert (TYPE_IS_WIDE (type));
696 gcc_assert (! TYPE_IS_WIDE (type));
698 if (src_index != dst_index)
700 tree src_decl = find_stack_slot (src_index, type);
701 tree dst_decl = find_stack_slot (dst_index, type);
704 (build2 (MODIFY_EXPR, TREE_TYPE (dst_decl), dst_decl, src_decl));
705 stack_type_map[dst_index] = type;
710 /* Calls _Jv_Throw or _Jv_Sjlj_Throw. Discard the contents of the
714 build_java_athrow (tree node)
718 call = build_call_nary (void_type_node,
719 build_address_of (throw_node),
721 TREE_SIDE_EFFECTS (call) = 1;
722 java_add_stmt (call);
723 java_stack_pop (stack_pointer);
726 /* Implementation for jsr/ret */
729 build_java_jsr (int target_pc, int return_pc)
731 tree where = lookup_label (target_pc);
732 tree ret = lookup_label (return_pc);
733 tree ret_label = fold_build1 (ADDR_EXPR, return_address_type_node, ret);
734 push_value (ret_label);
735 flush_quick_stack ();
736 java_add_stmt (build1 (GOTO_EXPR, void_type_node, where));
738 /* Do not need to emit the label here. We noted the existence of the
739 label as a jump target in note_instructions; we'll emit the label
740 for real at the beginning of the expand_byte_code loop. */
744 build_java_ret (tree location)
746 java_add_stmt (build1 (GOTO_EXPR, void_type_node, location));
749 /* Implementation of operations on array: new, load, store, length */
752 decode_newarray_type (int atype)
756 case 4: return boolean_type_node;
757 case 5: return char_type_node;
758 case 6: return float_type_node;
759 case 7: return double_type_node;
760 case 8: return byte_type_node;
761 case 9: return short_type_node;
762 case 10: return int_type_node;
763 case 11: return long_type_node;
764 default: return NULL_TREE;
768 /* Map primitive type to the code used by OPCODE_newarray. */
771 encode_newarray_type (tree type)
773 if (type == boolean_type_node)
775 else if (type == char_type_node)
777 else if (type == float_type_node)
779 else if (type == double_type_node)
781 else if (type == byte_type_node)
783 else if (type == short_type_node)
785 else if (type == int_type_node)
787 else if (type == long_type_node)
793 /* Build a call to _Jv_ThrowBadArrayIndex(), the
794 ArrayIndexOfBoundsException exception handler. */
797 build_java_throw_out_of_bounds_exception (tree index)
799 tree node = build_call_nary (int_type_node,
800 build_address_of (soft_badarrayindex_node),
802 TREE_SIDE_EFFECTS (node) = 1; /* Allows expansion within ANDIF */
806 /* Return the length of an array. Doesn't perform any checking on the nature
807 or value of the array NODE. May be used to implement some bytecodes. */
810 build_java_array_length_access (tree node)
812 tree type = TREE_TYPE (node);
813 tree array_type = TREE_TYPE (type);
814 HOST_WIDE_INT length;
816 if (!is_array_type_p (type))
818 /* With the new verifier, we will see an ordinary pointer type
819 here. In this case, we just use an arbitrary array type. */
820 array_type = build_java_array_type (object_ptr_type_node, -1);
821 type = promote_type (array_type);
824 length = java_array_type_length (type);
826 return build_int_cst (NULL_TREE, length);
828 node = build3 (COMPONENT_REF, int_type_node,
829 build_java_indirect_ref (array_type, node,
830 flag_check_references),
831 lookup_field (&array_type, get_identifier ("length")),
833 IS_ARRAY_LENGTH_ACCESS (node) = 1;
837 /* Optionally checks a reference against the NULL pointer. ARG1: the
838 expr, ARG2: we should check the reference. Don't generate extra
839 checks if we're not generating code. */
842 java_check_reference (tree expr, int check)
844 if (!flag_syntax_only && check)
846 expr = save_expr (expr);
847 expr = build3 (COND_EXPR, TREE_TYPE (expr),
848 build2 (EQ_EXPR, boolean_type_node,
849 expr, null_pointer_node),
850 build_call_nary (void_type_node,
851 build_address_of (soft_nullpointer_node),
859 /* Reference an object: just like an INDIRECT_REF, but with checking. */
862 build_java_indirect_ref (tree type, tree expr, int check)
865 t = java_check_reference (expr, check);
866 t = convert (build_pointer_type (type), t);
867 return build1 (INDIRECT_REF, type, t);
870 /* Implement array indexing (either as l-value or r-value).
871 Returns a tree for ARRAY[INDEX], assume TYPE is the element type.
872 Optionally performs bounds checking and/or test to NULL.
873 At this point, ARRAY should have been verified as an array. */
876 build_java_arrayaccess (tree array, tree type, tree index)
878 tree node, throw = NULL_TREE;
881 tree array_type = TREE_TYPE (TREE_TYPE (array));
882 tree size_exp = fold_convert (sizetype, size_in_bytes (type));
884 if (!is_array_type_p (TREE_TYPE (array)))
886 /* With the new verifier, we will see an ordinary pointer type
887 here. In this case, we just use the correct array type. */
888 array_type = build_java_array_type (type, -1);
891 if (flag_bounds_check)
894 * (unsigned jint) INDEX >= (unsigned jint) LEN
895 * && throw ArrayIndexOutOfBoundsException.
896 * Note this is equivalent to and more efficient than:
897 * INDEX < 0 || INDEX >= LEN && throw ... */
899 tree len = convert (unsigned_int_type_node,
900 build_java_array_length_access (array));
901 test = fold_build2 (GE_EXPR, boolean_type_node,
902 convert (unsigned_int_type_node, index),
904 if (! integer_zerop (test))
906 throw = build2 (TRUTH_ANDIF_EXPR, int_type_node, test,
907 build_java_throw_out_of_bounds_exception (index));
908 /* allows expansion within COMPOUND */
909 TREE_SIDE_EFFECTS( throw ) = 1;
913 /* If checking bounds, wrap the index expr with a COMPOUND_EXPR in order
914 to have the bounds check evaluated first. */
915 if (throw != NULL_TREE)
916 index = build2 (COMPOUND_EXPR, int_type_node, throw, index);
918 data_field = lookup_field (&array_type, get_identifier ("data"));
920 ref = build3 (COMPONENT_REF, TREE_TYPE (data_field),
921 build_java_indirect_ref (array_type, array,
922 flag_check_references),
923 data_field, NULL_TREE);
925 /* Take the address of the data field and convert it to a pointer to
927 node = build1 (NOP_EXPR, build_pointer_type (type), build_address_of (ref));
929 /* Multiply the index by the size of an element to obtain a byte
930 offset. Convert the result to a pointer to the element type. */
931 index = build2 (MULT_EXPR, sizetype,
932 fold_convert (sizetype, index),
935 /* Sum the byte offset and the address of the data field. */
936 node = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (node), node, index);
940 *((&array->data) + index*size_exp)
943 return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (node)), node);
946 /* Generate code to throw an ArrayStoreException if OBJECT is not assignable
947 (at runtime) to an element of ARRAY. A NOP_EXPR is returned if it can
948 determine that no check is required. */
951 build_java_arraystore_check (tree array, tree object)
953 tree check, element_type, source;
954 tree array_type_p = TREE_TYPE (array);
955 tree object_type = TYPE_NAME (TREE_TYPE (TREE_TYPE (object)));
957 if (! flag_verify_invocations)
959 /* With the new verifier, we don't track precise types. FIXME:
960 performance regression here. */
961 element_type = TYPE_NAME (object_type_node);
965 gcc_assert (is_array_type_p (array_type_p));
967 /* Get the TYPE_DECL for ARRAY's element type. */
969 = TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p))));
972 gcc_assert (TREE_CODE (element_type) == TYPE_DECL
973 && TREE_CODE (object_type) == TYPE_DECL);
975 if (!flag_store_check)
976 return build1 (NOP_EXPR, array_type_p, array);
978 /* No check is needed if the element type is final. Also check that
979 element_type matches object_type, since in the bytecode
980 compilation case element_type may be the actual element type of
981 the array rather than its declared type. However, if we're doing
982 indirect dispatch, we can't do the `final' optimization. */
983 if (element_type == object_type
984 && ! flag_indirect_dispatch
985 && CLASS_FINAL (element_type))
986 return build1 (NOP_EXPR, array_type_p, array);
988 /* OBJECT might be wrapped by a SAVE_EXPR. */
989 if (TREE_CODE (object) == SAVE_EXPR)
990 source = TREE_OPERAND (object, 0);
994 /* Avoid the check if OBJECT was just loaded from the same array. */
995 if (TREE_CODE (source) == ARRAY_REF)
998 source = TREE_OPERAND (source, 0); /* COMPONENT_REF. */
999 source = TREE_OPERAND (source, 0); /* INDIRECT_REF. */
1000 source = TREE_OPERAND (source, 0); /* Source array's DECL or SAVE_EXPR. */
1001 if (TREE_CODE (source) == SAVE_EXPR)
1002 source = TREE_OPERAND (source, 0);
1005 if (TREE_CODE (target) == SAVE_EXPR)
1006 target = TREE_OPERAND (target, 0);
1008 if (source == target)
1009 return build1 (NOP_EXPR, array_type_p, array);
1012 /* Build an invocation of _Jv_CheckArrayStore */
1013 check = build_call_nary (void_type_node,
1014 build_address_of (soft_checkarraystore_node),
1016 TREE_SIDE_EFFECTS (check) = 1;
1021 /* Makes sure that INDEXED_TYPE is appropriate. If not, make it from
1022 ARRAY_NODE. This function is used to retrieve something less vague than
1023 a pointer type when indexing the first dimension of something like [[<t>.
1024 May return a corrected type, if necessary, otherwise INDEXED_TYPE is
1025 return unchanged. */
1028 build_java_check_indexed_type (tree array_node ATTRIBUTE_UNUSED,
1031 /* We used to check to see if ARRAY_NODE really had array type.
1032 However, with the new verifier, this is not necessary, as we know
1033 that the object will be an array of the appropriate type. */
1035 return indexed_type;
1038 /* newarray triggers a call to _Jv_NewPrimArray. This function should be
1039 called with an integer code (the type of array to create), and the length
1040 of the array to create. */
1043 build_newarray (int atype_value, tree length)
1047 tree prim_type = decode_newarray_type (atype_value);
1049 = build_java_array_type (prim_type,
1050 host_integerp (length, 0) == INTEGER_CST
1051 ? tree_low_cst (length, 0) : -1);
1053 /* Pass a reference to the primitive type class and save the runtime
1055 type_arg = build_class_ref (prim_type);
1057 return build_call_nary (promote_type (type),
1058 build_address_of (soft_newarray_node),
1059 2, type_arg, length);
1062 /* Generates anewarray from a given CLASS_TYPE. Gets from the stack the size
1063 of the dimension. */
1066 build_anewarray (tree class_type, tree length)
1069 = build_java_array_type (class_type,
1070 host_integerp (length, 0)
1071 ? tree_low_cst (length, 0) : -1);
1073 return build_call_nary (promote_type (type),
1074 build_address_of (soft_anewarray_node),
1077 build_class_ref (class_type),
1081 /* Return a node the evaluates 'new TYPE[LENGTH]'. */
1084 build_new_array (tree type, tree length)
1086 if (JPRIMITIVE_TYPE_P (type))
1087 return build_newarray (encode_newarray_type (type), length);
1089 return build_anewarray (TREE_TYPE (type), length);
1092 /* Generates a call to _Jv_NewMultiArray. multianewarray expects a
1093 class pointer, a number of dimensions and the matching number of
1094 dimensions. The argument list is NULL terminated. */
1097 expand_java_multianewarray (tree class_type, int ndim)
1100 tree args = build_tree_list( NULL_TREE, null_pointer_node );
1102 for( i = 0; i < ndim; i++ )
1103 args = tree_cons (NULL_TREE, pop_value (int_type_node), args);
1105 args = tree_cons (NULL_TREE,
1106 build_class_ref (class_type),
1107 tree_cons (NULL_TREE,
1108 build_int_cst (NULL_TREE, ndim),
1111 push_value (build_call_list (promote_type (class_type),
1112 build_address_of (soft_multianewarray_node),
1116 /* ARRAY[INDEX] <- RHS. build_java_check_indexed_type makes sure that
1117 ARRAY is an array type. May expand some bound checking and NULL
1118 pointer checking. RHS_TYPE_NODE we are going to store. In the case
1119 of the CHAR/BYTE/BOOLEAN SHORT, the type popped of the stack is an
1120 INT. In those cases, we make the conversion.
1122 if ARRAy is a reference type, the assignment is checked at run-time
1123 to make sure that the RHS can be assigned to the array element
1124 type. It is not necessary to generate this code if ARRAY is final. */
1127 expand_java_arraystore (tree rhs_type_node)
1129 tree rhs_node = pop_value ((INTEGRAL_TYPE_P (rhs_type_node)
1130 && TYPE_PRECISION (rhs_type_node) <= 32) ?
1131 int_type_node : rhs_type_node);
1132 tree index = pop_value (int_type_node);
1133 tree array_type, array, temp, access;
1135 /* If we're processing an `aaload' we might as well just pick
1137 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1139 array_type = build_java_array_type (object_ptr_type_node, -1);
1140 rhs_type_node = object_ptr_type_node;
1143 array_type = build_java_array_type (rhs_type_node, -1);
1145 array = pop_value (array_type);
1146 array = build1 (NOP_EXPR, promote_type (array_type), array);
1148 rhs_type_node = build_java_check_indexed_type (array, rhs_type_node);
1150 flush_quick_stack ();
1152 index = save_expr (index);
1153 array = save_expr (array);
1155 /* We want to perform the bounds check (done by
1156 build_java_arrayaccess) before the type check (done by
1157 build_java_arraystore_check). So, we call build_java_arrayaccess
1158 -- which returns an ARRAY_REF lvalue -- and we then generate code
1159 to stash the address of that lvalue in a temp. Then we call
1160 build_java_arraystore_check, and finally we generate a
1161 MODIFY_EXPR to set the array element. */
1163 access = build_java_arrayaccess (array, rhs_type_node, index);
1164 temp = build_decl (VAR_DECL, NULL_TREE,
1165 build_pointer_type (TREE_TYPE (access)));
1166 java_add_local_var (temp);
1167 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (temp),
1169 build_fold_addr_expr (access)));
1171 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1173 tree check = build_java_arraystore_check (array, rhs_node);
1174 java_add_stmt (check);
1177 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (access),
1178 build1 (INDIRECT_REF, TREE_TYPE (access), temp),
1182 /* Expand the evaluation of ARRAY[INDEX]. build_java_check_indexed_type makes
1183 sure that LHS is an array type. May expand some bound checking and NULL
1185 LHS_TYPE_NODE is the type of ARRAY[INDEX]. But in the case of CHAR/BYTE/
1186 BOOLEAN/SHORT, we push a promoted type back to the stack.
1190 expand_java_arrayload (tree lhs_type_node)
1193 tree index_node = pop_value (int_type_node);
1197 /* If we're processing an `aaload' we might as well just pick
1199 if (TREE_CODE (lhs_type_node) == POINTER_TYPE)
1201 array_type = build_java_array_type (object_ptr_type_node, -1);
1202 lhs_type_node = object_ptr_type_node;
1205 array_type = build_java_array_type (lhs_type_node, -1);
1206 array_node = pop_value (array_type);
1207 array_node = build1 (NOP_EXPR, promote_type (array_type), array_node);
1209 index_node = save_expr (index_node);
1210 array_node = save_expr (array_node);
1212 lhs_type_node = build_java_check_indexed_type (array_node,
1214 load_node = build_java_arrayaccess (array_node,
1217 if (INTEGRAL_TYPE_P (lhs_type_node) && TYPE_PRECISION (lhs_type_node) <= 32)
1218 load_node = fold_build1 (NOP_EXPR, int_type_node, load_node);
1219 push_value (load_node);
1222 /* Expands .length. Makes sure that we deal with and array and may expand
1223 a NULL check on the array object. */
1226 expand_java_array_length (void)
1228 tree array = pop_value (ptr_type_node);
1229 tree length = build_java_array_length_access (array);
1231 push_value (length);
1234 /* Emit code for the call to _Jv_Monitor{Enter,Exit}. CALL can be
1235 either soft_monitorenter_node or soft_monitorexit_node. */
1238 build_java_monitor (tree call, tree object)
1240 return build_call_nary (void_type_node,
1241 build_address_of (call),
1245 /* Emit code for one of the PUSHC instructions. */
1248 expand_java_pushc (int ival, tree type)
1251 if (type == ptr_type_node && ival == 0)
1252 value = null_pointer_node;
1253 else if (type == int_type_node || type == long_type_node)
1254 value = build_int_cst (type, ival);
1255 else if (type == float_type_node || type == double_type_node)
1258 REAL_VALUE_FROM_INT (x, ival, 0, TYPE_MODE (type));
1259 value = build_real (type, x);
1268 expand_java_return (tree type)
1270 if (type == void_type_node)
1271 java_add_stmt (build1 (RETURN_EXPR, void_type_node, NULL));
1274 tree retval = pop_value (type);
1275 tree res = DECL_RESULT (current_function_decl);
1276 retval = build2 (MODIFY_EXPR, TREE_TYPE (res), res, retval);
1278 /* Handle the situation where the native integer type is smaller
1279 than the JVM integer. It can happen for many cross compilers.
1280 The whole if expression just goes away if INT_TYPE_SIZE < 32
1282 if (INT_TYPE_SIZE < 32
1283 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (res)))
1284 < GET_MODE_SIZE (TYPE_MODE (type))))
1285 retval = build1(NOP_EXPR, TREE_TYPE(res), retval);
1287 TREE_SIDE_EFFECTS (retval) = 1;
1288 java_add_stmt (build1 (RETURN_EXPR, void_type_node, retval));
1293 expand_load_internal (int index, tree type, int pc)
1296 tree var = find_local_variable (index, type, pc);
1298 /* Now VAR is the VAR_DECL (or PARM_DECL) that we are going to push
1299 on the stack. If there is an assignment to this VAR_DECL between
1300 the stack push and the use, then the wrong code could be
1301 generated. To avoid this we create a new local and copy our
1302 value into it. Then we push this new local on the stack.
1303 Hopefully this all gets optimized out. */
1304 copy = build_decl (VAR_DECL, NULL_TREE, type);
1305 if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1306 && TREE_TYPE (copy) != TREE_TYPE (var))
1307 var = convert (type, var);
1308 java_add_local_var (copy);
1309 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (var), copy, var));
1315 build_address_of (tree value)
1317 return build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (value)), value);
1321 class_has_finalize_method (tree type)
1323 tree super = CLASSTYPE_SUPER (type);
1325 if (super == NULL_TREE)
1326 return false; /* Every class with a real finalizer inherits */
1327 /* from java.lang.Object. */
1329 return HAS_FINALIZER_P (type) || class_has_finalize_method (super);
1333 java_create_object (tree type)
1335 tree alloc_node = (class_has_finalize_method (type)
1337 : alloc_no_finalizer_node);
1339 return build_call_nary (promote_type (type),
1340 build_address_of (alloc_node),
1341 1, build_class_ref (type));
1345 expand_java_NEW (tree type)
1349 alloc_node = (class_has_finalize_method (type) ? alloc_object_node
1350 : alloc_no_finalizer_node);
1351 if (! CLASS_LOADED_P (type))
1352 load_class (type, 1);
1353 safe_layout_class (type);
1354 push_value (build_call_nary (promote_type (type),
1355 build_address_of (alloc_node),
1356 1, build_class_ref (type)));
1359 /* This returns an expression which will extract the class of an
1363 build_get_class (tree value)
1365 tree class_field = lookup_field (&dtable_type, get_identifier ("class"));
1366 tree vtable_field = lookup_field (&object_type_node,
1367 get_identifier ("vtable"));
1368 tree tmp = build3 (COMPONENT_REF, dtable_ptr_type,
1369 build_java_indirect_ref (object_type_node, value,
1370 flag_check_references),
1371 vtable_field, NULL_TREE);
1372 return build3 (COMPONENT_REF, class_ptr_type,
1373 build1 (INDIRECT_REF, dtable_type, tmp),
1374 class_field, NULL_TREE);
1377 /* This builds the tree representation of the `instanceof' operator.
1378 It tries various tricks to optimize this in cases where types are
1382 build_instanceof (tree value, tree type)
1385 tree itype = TREE_TYPE (TREE_TYPE (soft_instanceof_node));
1386 tree valtype = TREE_TYPE (TREE_TYPE (value));
1387 tree valclass = TYPE_NAME (valtype);
1390 /* When compiling from bytecode, we need to ensure that TYPE has
1392 if (CLASS_P (type) && ! CLASS_LOADED_P (type))
1394 load_class (type, 1);
1395 safe_layout_class (type);
1396 if (! TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) == ERROR_MARK)
1397 return error_mark_node;
1399 klass = TYPE_NAME (type);
1401 if (type == object_type_node || inherits_from_p (valtype, type))
1403 /* Anything except `null' is an instance of Object. Likewise,
1404 if the object is known to be an instance of the class, then
1405 we only need to check for `null'. */
1406 expr = build2 (NE_EXPR, itype, value, null_pointer_node);
1408 else if (flag_verify_invocations
1409 && ! TYPE_ARRAY_P (type)
1410 && ! TYPE_ARRAY_P (valtype)
1411 && DECL_P (klass) && DECL_P (valclass)
1412 && ! CLASS_INTERFACE (valclass)
1413 && ! CLASS_INTERFACE (klass)
1414 && ! inherits_from_p (type, valtype)
1415 && (CLASS_FINAL (klass)
1416 || ! inherits_from_p (valtype, type)))
1418 /* The classes are from different branches of the derivation
1419 tree, so we immediately know the answer. */
1420 expr = boolean_false_node;
1422 else if (DECL_P (klass) && CLASS_FINAL (klass))
1424 tree save = save_expr (value);
1425 expr = build3 (COND_EXPR, itype,
1426 build2 (NE_EXPR, boolean_type_node,
1427 save, null_pointer_node),
1428 build2 (EQ_EXPR, itype,
1429 build_get_class (save),
1430 build_class_ref (type)),
1431 boolean_false_node);
1435 expr = build_call_nary (itype,
1436 build_address_of (soft_instanceof_node),
1437 2, value, build_class_ref (type));
1439 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (value);
1444 expand_java_INSTANCEOF (tree type)
1446 tree value = pop_value (object_ptr_type_node);
1447 value = build_instanceof (value, type);
1452 expand_java_CHECKCAST (tree type)
1454 tree value = pop_value (ptr_type_node);
1455 value = build_call_nary (promote_type (type),
1456 build_address_of (soft_checkcast_node),
1457 2, build_class_ref (type), value);
1462 expand_iinc (unsigned int local_var_index, int ival, int pc)
1464 tree local_var, res;
1465 tree constant_value;
1467 flush_quick_stack ();
1468 local_var = find_local_variable (local_var_index, int_type_node, pc);
1469 constant_value = build_int_cst (NULL_TREE, ival);
1470 res = fold_build2 (PLUS_EXPR, int_type_node, local_var, constant_value);
1471 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (local_var), local_var, res));
1476 build_java_soft_divmod (enum tree_code op, tree type, tree op1, tree op2)
1479 tree arg1 = convert (type, op1);
1480 tree arg2 = convert (type, op2);
1482 if (type == int_type_node)
1486 case TRUNC_DIV_EXPR:
1487 call = soft_idiv_node;
1489 case TRUNC_MOD_EXPR:
1490 call = soft_irem_node;
1496 else if (type == long_type_node)
1500 case TRUNC_DIV_EXPR:
1501 call = soft_ldiv_node;
1503 case TRUNC_MOD_EXPR:
1504 call = soft_lrem_node;
1512 call = build_call_nary (type, build_address_of (call), 2, arg1, arg2);
1517 build_java_binop (enum tree_code op, tree type, tree arg1, tree arg2)
1524 tree u_type = unsigned_type_for (type);
1525 arg1 = convert (u_type, arg1);
1526 arg1 = build_java_binop (RSHIFT_EXPR, u_type, arg1, arg2);
1527 return convert (type, arg1);
1531 mask = build_int_cst (NULL_TREE,
1532 TYPE_PRECISION (TREE_TYPE (arg1)) - 1);
1533 arg2 = fold_build2 (BIT_AND_EXPR, int_type_node, arg2, mask);
1536 case COMPARE_L_EXPR: /* arg1 > arg2 ? 1 : arg1 == arg2 ? 0 : -1 */
1537 case COMPARE_G_EXPR: /* arg1 < arg2 ? -1 : arg1 == arg2 ? 0 : 1 */
1538 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1540 tree ifexp1 = fold_build2 (op == COMPARE_L_EXPR ? GT_EXPR : LT_EXPR,
1541 boolean_type_node, arg1, arg2);
1542 tree ifexp2 = fold_build2 (EQ_EXPR, boolean_type_node, arg1, arg2);
1543 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1544 ifexp2, integer_zero_node,
1545 op == COMPARE_L_EXPR
1546 ? integer_minus_one_node
1547 : integer_one_node);
1548 return fold_build3 (COND_EXPR, int_type_node, ifexp1,
1549 op == COMPARE_L_EXPR ? integer_one_node
1550 : integer_minus_one_node,
1554 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1556 tree ifexp1 = fold_build2 (LT_EXPR, boolean_type_node, arg1, arg2);
1557 tree ifexp2 = fold_build2 (GT_EXPR, boolean_type_node, arg1, arg2);
1558 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1559 ifexp2, integer_one_node,
1561 return fold_build3 (COND_EXPR, int_type_node,
1562 ifexp1, integer_minus_one_node, second_compare);
1564 case TRUNC_DIV_EXPR:
1565 case TRUNC_MOD_EXPR:
1566 if (TREE_CODE (type) == REAL_TYPE
1567 && op == TRUNC_MOD_EXPR)
1570 if (type != double_type_node)
1572 arg1 = convert (double_type_node, arg1);
1573 arg2 = convert (double_type_node, arg2);
1575 call = build_call_nary (double_type_node,
1576 build_address_of (soft_fmod_node),
1578 if (type != double_type_node)
1579 call = convert (type, call);
1583 if (TREE_CODE (type) == INTEGER_TYPE
1584 && flag_use_divide_subroutine
1585 && ! flag_syntax_only)
1586 return build_java_soft_divmod (op, type, arg1, arg2);
1591 return fold_build2 (op, type, arg1, arg2);
1595 expand_java_binop (tree type, enum tree_code op)
1605 rtype = int_type_node;
1606 rarg = pop_value (rtype);
1609 rarg = pop_value (rtype);
1611 larg = pop_value (ltype);
1612 push_value (build_java_binop (op, type, larg, rarg));
1615 /* Lookup the field named NAME in *TYPEP or its super classes.
1616 If not found, return NULL_TREE.
1617 (If the *TYPEP is not found, or if the field reference is
1618 ambiguous, return error_mark_node.)
1619 If found, return the FIELD_DECL, and set *TYPEP to the
1620 class containing the field. */
1623 lookup_field (tree *typep, tree name)
1625 if (CLASS_P (*typep) && !CLASS_LOADED_P (*typep))
1627 load_class (*typep, 1);
1628 safe_layout_class (*typep);
1629 if (!TYPE_SIZE (*typep) || TREE_CODE (TYPE_SIZE (*typep)) == ERROR_MARK)
1630 return error_mark_node;
1634 tree field, binfo, base_binfo;
1638 for (field = TYPE_FIELDS (*typep); field; field = TREE_CHAIN (field))
1639 if (DECL_NAME (field) == name)
1642 /* Process implemented interfaces. */
1643 save_field = NULL_TREE;
1644 for (binfo = TYPE_BINFO (*typep), i = 0;
1645 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1647 tree t = BINFO_TYPE (base_binfo);
1648 if ((field = lookup_field (&t, name)))
1650 if (save_field == field)
1652 if (save_field == NULL_TREE)
1656 tree i1 = DECL_CONTEXT (save_field);
1657 tree i2 = DECL_CONTEXT (field);
1658 error ("reference %qs is ambiguous: appears in interface %qs and interface %qs",
1659 IDENTIFIER_POINTER (name),
1660 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i1))),
1661 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i2))));
1662 return error_mark_node;
1667 if (save_field != NULL_TREE)
1670 *typep = CLASSTYPE_SUPER (*typep);
1675 /* Look up the field named NAME in object SELF_VALUE,
1676 which has class SELF_CLASS (a non-handle RECORD_TYPE).
1677 SELF_VALUE is NULL_TREE if looking for a static field. */
1680 build_field_ref (tree self_value, tree self_class, tree name)
1682 tree base_class = self_class;
1683 tree field_decl = lookup_field (&base_class, name);
1684 if (field_decl == NULL_TREE)
1686 error ("field %qs not found", IDENTIFIER_POINTER (name));
1687 return error_mark_node;
1689 if (self_value == NULL_TREE)
1691 return build_static_field_ref (field_decl);
1695 tree base_type = promote_type (base_class);
1697 /* CHECK is true if self_value is not the this pointer. */
1698 int check = (! (DECL_P (self_value)
1699 && DECL_NAME (self_value) == this_identifier_node));
1701 /* Determine whether a field offset from NULL will lie within
1702 Page 0: this is necessary on those GNU/Linux/BSD systems that
1703 trap SEGV to generate NullPointerExceptions.
1705 We assume that Page 0 will be mapped with NOPERM, and that
1706 memory may be allocated from any other page, so only field
1707 offsets < pagesize are guaranteed to trap. We also assume
1708 the smallest page size we'll encounter is 4k bytes. */
1709 if (! flag_syntax_only && check && ! flag_check_references
1710 && ! flag_indirect_dispatch)
1712 tree field_offset = byte_position (field_decl);
1714 page_size = size_int (4096);
1715 check = ! INT_CST_LT_UNSIGNED (field_offset, page_size);
1718 if (base_type != TREE_TYPE (self_value))
1719 self_value = fold_build1 (NOP_EXPR, base_type, self_value);
1720 if (! flag_syntax_only && flag_indirect_dispatch)
1723 = build_int_cst (NULL_TREE, get_symbol_table_index
1724 (field_decl, NULL_TREE,
1725 &TYPE_OTABLE_METHODS (output_class)));
1727 = build4 (ARRAY_REF, integer_type_node,
1728 TYPE_OTABLE_DECL (output_class), otable_index,
1729 NULL_TREE, NULL_TREE);
1732 if (DECL_CONTEXT (field_decl) != output_class)
1734 = build3 (COND_EXPR, TREE_TYPE (field_offset),
1735 build2 (EQ_EXPR, boolean_type_node,
1736 field_offset, integer_zero_node),
1737 build_call_nary (void_type_node,
1738 build_address_of (soft_nosuchfield_node),
1742 field_offset = fold (convert (sizetype, field_offset));
1743 self_value = java_check_reference (self_value, check);
1745 = fold_build2 (POINTER_PLUS_EXPR,
1746 TREE_TYPE (self_value),
1747 self_value, field_offset);
1748 address = fold_convert (build_pointer_type (TREE_TYPE (field_decl)),
1750 return fold_build1 (INDIRECT_REF, TREE_TYPE (field_decl), address);
1753 self_value = build_java_indirect_ref (TREE_TYPE (TREE_TYPE (self_value)),
1755 return fold_build3 (COMPONENT_REF, TREE_TYPE (field_decl),
1756 self_value, field_decl, NULL_TREE);
1761 lookup_label (int pc)
1765 if (pc > highest_label_pc_this_method)
1766 highest_label_pc_this_method = pc;
1767 ASM_GENERATE_INTERNAL_LABEL(buf, "LJpc=", start_label_pc_this_method + pc);
1768 name = get_identifier (buf);
1769 if (IDENTIFIER_LOCAL_VALUE (name))
1770 return IDENTIFIER_LOCAL_VALUE (name);
1773 /* The type of the address of a label is return_address_type_node. */
1774 tree decl = create_label_decl (name);
1775 LABEL_PC (decl) = pc;
1776 return pushdecl (decl);
1780 /* Generate a unique name for the purpose of loops and switches
1781 labels, and try-catch-finally blocks label or temporary variables. */
1784 generate_name (void)
1786 static int l_number = 0;
1788 ASM_GENERATE_INTERNAL_LABEL(buff, "LJv", l_number);
1790 return get_identifier (buff);
1794 create_label_decl (tree name)
1797 decl = build_decl (LABEL_DECL, name,
1798 TREE_TYPE (return_address_type_node));
1799 DECL_CONTEXT (decl) = current_function_decl;
1800 DECL_IGNORED_P (decl) = 1;
1804 /* This maps a bytecode offset (PC) to various flags. */
1805 char *instruction_bits;
1808 note_label (int current_pc ATTRIBUTE_UNUSED, int target_pc)
1810 lookup_label (target_pc);
1811 instruction_bits [target_pc] |= BCODE_JUMP_TARGET;
1814 /* Emit code to jump to TARGET_PC if VALUE1 CONDITION VALUE2,
1815 where CONDITION is one of one the compare operators. */
1818 expand_compare (enum tree_code condition, tree value1, tree value2,
1821 tree target = lookup_label (target_pc);
1822 tree cond = fold_build2 (condition, boolean_type_node, value1, value2);
1824 (build3 (COND_EXPR, void_type_node, java_truthvalue_conversion (cond),
1825 build1 (GOTO_EXPR, void_type_node, target),
1826 build_java_empty_stmt ()));
1829 /* Emit code for a TEST-type opcode. */
1832 expand_test (enum tree_code condition, tree type, int target_pc)
1834 tree value1, value2;
1835 flush_quick_stack ();
1836 value1 = pop_value (type);
1837 value2 = (type == ptr_type_node) ? null_pointer_node : integer_zero_node;
1838 expand_compare (condition, value1, value2, target_pc);
1841 /* Emit code for a COND-type opcode. */
1844 expand_cond (enum tree_code condition, tree type, int target_pc)
1846 tree value1, value2;
1847 flush_quick_stack ();
1848 /* note: pop values in opposite order */
1849 value2 = pop_value (type);
1850 value1 = pop_value (type);
1851 /* Maybe should check value1 and value2 for type compatibility ??? */
1852 expand_compare (condition, value1, value2, target_pc);
1856 expand_java_goto (int target_pc)
1858 tree target_label = lookup_label (target_pc);
1859 flush_quick_stack ();
1860 java_add_stmt (build1 (GOTO_EXPR, void_type_node, target_label));
1864 expand_java_switch (tree selector, int default_pc)
1866 tree switch_expr, x;
1868 flush_quick_stack ();
1869 switch_expr = build3 (SWITCH_EXPR, TREE_TYPE (selector), selector,
1870 NULL_TREE, NULL_TREE);
1871 java_add_stmt (switch_expr);
1873 x = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE, NULL_TREE,
1874 create_artificial_label ());
1875 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1877 x = build1 (GOTO_EXPR, void_type_node, lookup_label (default_pc));
1878 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1884 expand_java_add_case (tree switch_expr, int match, int target_pc)
1888 value = build_int_cst (TREE_TYPE (switch_expr), match);
1890 x = build3 (CASE_LABEL_EXPR, void_type_node, value, NULL_TREE,
1891 create_artificial_label ());
1892 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1894 x = build1 (GOTO_EXPR, void_type_node, lookup_label (target_pc));
1895 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1899 pop_arguments (tree arg_types)
1901 if (arg_types == end_params_node)
1903 if (TREE_CODE (arg_types) == TREE_LIST)
1905 tree tail = pop_arguments (TREE_CHAIN (arg_types));
1906 tree type = TREE_VALUE (arg_types);
1907 tree arg = pop_value (type);
1909 /* We simply cast each argument to its proper type. This is
1910 needed since we lose type information coming out of the
1911 verifier. We also have to do this when we pop an integer
1912 type that must be promoted for the function call. */
1913 if (TREE_CODE (type) == POINTER_TYPE)
1914 arg = build1 (NOP_EXPR, type, arg);
1915 else if (targetm.calls.promote_prototypes (type)
1916 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
1917 && INTEGRAL_TYPE_P (type))
1918 arg = convert (integer_type_node, arg);
1919 return tree_cons (NULL_TREE, arg, tail);
1924 /* Attach to PTR (a block) the declaration found in ENTRY. */
1927 attach_init_test_initialization_flags (void **entry, void *ptr)
1929 tree block = (tree)ptr;
1930 struct treetreehash_entry *ite = (struct treetreehash_entry *) *entry;
1932 if (block != error_mark_node)
1934 if (TREE_CODE (block) == BIND_EXPR)
1936 tree body = BIND_EXPR_BODY (block);
1937 TREE_CHAIN (ite->value) = BIND_EXPR_VARS (block);
1938 BIND_EXPR_VARS (block) = ite->value;
1939 body = build2 (COMPOUND_EXPR, void_type_node,
1940 build1 (DECL_EXPR, void_type_node, ite->value), body);
1941 BIND_EXPR_BODY (block) = body;
1945 tree body = BLOCK_SUBBLOCKS (block);
1946 TREE_CHAIN (ite->value) = BLOCK_EXPR_DECLS (block);
1947 BLOCK_EXPR_DECLS (block) = ite->value;
1948 body = build2 (COMPOUND_EXPR, void_type_node,
1949 build1 (DECL_EXPR, void_type_node, ite->value), body);
1950 BLOCK_SUBBLOCKS (block) = body;
1957 /* Build an expression to initialize the class CLAS.
1958 if EXPR is non-NULL, returns an expression to first call the initializer
1959 (if it is needed) and then calls EXPR. */
1962 build_class_init (tree clas, tree expr)
1966 /* An optimization: if CLAS is a superclass of the class we're
1967 compiling, we don't need to initialize it. However, if CLAS is
1968 an interface, it won't necessarily be initialized, even if we
1970 if ((! CLASS_INTERFACE (TYPE_NAME (clas))
1971 && inherits_from_p (current_class, clas))
1972 || current_class == clas)
1975 if (always_initialize_class_p)
1977 init = build_call_nary (void_type_node,
1978 build_address_of (soft_initclass_node),
1979 1, build_class_ref (clas));
1980 TREE_SIDE_EFFECTS (init) = 1;
1984 tree *init_test_decl;
1986 init_test_decl = java_treetreehash_new
1987 (DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl), clas);
1989 if (*init_test_decl == NULL)
1991 /* Build a declaration and mark it as a flag used to track
1992 static class initializations. */
1993 decl = build_decl (VAR_DECL, NULL_TREE,
1995 MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (decl);
1996 LOCAL_CLASS_INITIALIZATION_FLAG (decl) = 1;
1997 DECL_CONTEXT (decl) = current_function_decl;
1998 DECL_FUNCTION_INIT_TEST_CLASS (decl) = clas;
1999 /* Tell the check-init code to ignore this decl when not
2000 optimizing class initialization. */
2001 if (!STATIC_CLASS_INIT_OPT_P ())
2002 DECL_BIT_INDEX (decl) = -1;
2003 DECL_INITIAL (decl) = boolean_false_node;
2004 /* Don't emit any symbolic debugging info for this decl. */
2005 DECL_IGNORED_P (decl) = 1;
2006 *init_test_decl = decl;
2009 init = build_call_nary (void_type_node,
2010 build_address_of (soft_initclass_node),
2011 1, build_class_ref (clas));
2012 TREE_SIDE_EFFECTS (init) = 1;
2013 init = build3 (COND_EXPR, void_type_node,
2014 build2 (EQ_EXPR, boolean_type_node,
2015 *init_test_decl, boolean_false_node),
2016 init, integer_zero_node);
2017 TREE_SIDE_EFFECTS (init) = 1;
2018 init = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init,
2019 build2 (MODIFY_EXPR, boolean_type_node,
2020 *init_test_decl, boolean_true_node));
2021 TREE_SIDE_EFFECTS (init) = 1;
2024 if (expr != NULL_TREE)
2026 expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init, expr);
2027 TREE_SIDE_EFFECTS (expr) = 1;
2035 /* Rewrite expensive calls that require stack unwinding at runtime to
2036 cheaper alternatives. The logic here performs these
2039 java.lang.Class.forName("foo") -> java.lang.Class.forName("foo", class$)
2040 java.lang.Class.getClassLoader() -> java.lang.Class.getClassLoader(class$)
2046 const char *classname;
2048 const char *signature;
2049 const char *new_signature;
2051 tree (*rewrite_arglist) (tree arglist);
2054 /* Add __builtin_return_address(0) to the end of an arglist. */
2058 rewrite_arglist_getcaller (tree arglist)
2061 = build_call_expr (built_in_decls[BUILT_IN_RETURN_ADDRESS],
2062 1, integer_zero_node);
2064 DECL_INLINE (current_function_decl) = 0;
2066 return chainon (arglist,
2067 tree_cons (NULL_TREE, retaddr,
2071 /* Add this.class to the end of an arglist. */
2074 rewrite_arglist_getclass (tree arglist)
2076 return chainon (arglist,
2077 tree_cons (NULL_TREE, build_class_ref (output_class),
2081 static rewrite_rule rules[] =
2082 {{"java.lang.Class", "getClassLoader", "()Ljava/lang/ClassLoader;",
2083 "(Ljava/lang/Class;)Ljava/lang/ClassLoader;",
2084 ACC_FINAL|ACC_PRIVATE, rewrite_arglist_getclass},
2085 {"java.lang.Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;",
2086 "(Ljava/lang/String;Ljava/lang/Class;)Ljava/lang/Class;",
2087 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getclass},
2088 {"gnu.classpath.VMStackWalker", "getCallingClass", "()Ljava/lang/Class;",
2089 "(Lgnu/gcj/RawData;)Ljava/lang/Class;",
2090 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2091 {"gnu.classpath.VMStackWalker", "getCallingClassLoader",
2092 "()Ljava/lang/ClassLoader;",
2093 "(Lgnu/gcj/RawData;)Ljava/lang/ClassLoader;",
2094 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2096 {NULL, NULL, NULL, NULL, 0, NULL}};
2098 /* True if this method is special, i.e. it's a private method that
2099 should be exported from a DSO. */
2102 special_method_p (tree candidate_method)
2104 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (candidate_method)));
2105 tree method = DECL_NAME (candidate_method);
2108 for (p = rules; p->classname; p++)
2110 if (get_identifier (p->classname) == context
2111 && get_identifier (p->method) == method)
2117 /* Scan the rules list for replacements for *METHOD_P and replace the
2118 args accordingly. If the rewrite results in an access to a private
2119 method, update SPECIAL.*/
2122 maybe_rewrite_invocation (tree *method_p, tree *arg_list_p,
2123 tree *method_signature_p, tree *special)
2125 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (*method_p)));
2127 *special = NULL_TREE;
2129 for (p = rules; p->classname; p++)
2131 if (get_identifier (p->classname) == context)
2133 tree method = DECL_NAME (*method_p);
2134 if (get_identifier (p->method) == method
2135 && get_identifier (p->signature) == *method_signature_p)
2138 = lookup_java_method (DECL_CONTEXT (*method_p),
2140 get_identifier (p->new_signature));
2141 if (! maybe_method && ! flag_verify_invocations)
2144 = add_method (DECL_CONTEXT (*method_p), p->flags,
2145 method, get_identifier (p->new_signature));
2146 DECL_EXTERNAL (maybe_method) = 1;
2148 *method_p = maybe_method;
2149 gcc_assert (*method_p);
2150 *arg_list_p = p->rewrite_arglist (*arg_list_p);
2151 *method_signature_p = get_identifier (p->new_signature);
2152 *special = integer_one_node;
2163 build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED,
2164 tree self_type, tree method_signature ATTRIBUTE_UNUSED,
2165 tree arg_list ATTRIBUTE_UNUSED, tree special)
2168 if (is_compiled_class (self_type))
2170 /* With indirect dispatch we have to use indirect calls for all
2171 publicly visible methods or gcc will use PLT indirections
2172 to reach them. We also have to use indirect dispatch for all
2173 external methods. */
2174 if (! flag_indirect_dispatch
2175 || (! DECL_EXTERNAL (method) && ! TREE_PUBLIC (method)))
2177 func = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (method)),
2183 = build_int_cst (NULL_TREE,
2184 (get_symbol_table_index
2186 &TYPE_ATABLE_METHODS (output_class))));
2188 = build4 (ARRAY_REF,
2189 TREE_TYPE (TREE_TYPE (TYPE_ATABLE_DECL (output_class))),
2190 TYPE_ATABLE_DECL (output_class), table_index,
2191 NULL_TREE, NULL_TREE);
2193 func = convert (method_ptr_type_node, func);
2197 /* We don't know whether the method has been (statically) compiled.
2198 Compile this code to get a reference to the method's code:
2200 SELF_TYPE->methods[METHOD_INDEX].ncode
2204 int method_index = 0;
2207 /* The method might actually be declared in some superclass, so
2208 we have to use its class context, not the caller's notion of
2209 where the method is. */
2210 self_type = DECL_CONTEXT (method);
2211 ref = build_class_ref (self_type);
2212 ref = build1 (INDIRECT_REF, class_type_node, ref);
2213 if (ncode_ident == NULL_TREE)
2214 ncode_ident = get_identifier ("ncode");
2215 if (methods_ident == NULL_TREE)
2216 methods_ident = get_identifier ("methods");
2217 ref = build3 (COMPONENT_REF, method_ptr_type_node, ref,
2218 lookup_field (&class_type_node, methods_ident),
2220 for (meth = TYPE_METHODS (self_type);
2221 ; meth = TREE_CHAIN (meth))
2225 if (meth == NULL_TREE)
2226 fatal_error ("method '%s' not found in class",
2227 IDENTIFIER_POINTER (DECL_NAME (method)));
2230 method_index *= int_size_in_bytes (method_type_node);
2231 ref = fold_build2 (POINTER_PLUS_EXPR, method_ptr_type_node,
2232 ref, size_int (method_index));
2233 ref = build1 (INDIRECT_REF, method_type_node, ref);
2234 func = build3 (COMPONENT_REF, nativecode_ptr_type_node,
2235 ref, lookup_field (&method_type_node, ncode_ident),
2242 invoke_build_dtable (int is_invoke_interface, tree arg_list)
2244 tree dtable, objectref;
2246 TREE_VALUE (arg_list) = save_expr (TREE_VALUE (arg_list));
2248 /* If we're dealing with interfaces and if the objectref
2249 argument is an array then get the dispatch table of the class
2250 Object rather than the one from the objectref. */
2251 objectref = (is_invoke_interface
2252 && is_array_type_p (TREE_TYPE (TREE_VALUE (arg_list)))
2253 ? build_class_ref (object_type_node) : TREE_VALUE (arg_list));
2255 if (dtable_ident == NULL_TREE)
2256 dtable_ident = get_identifier ("vtable");
2257 dtable = build_java_indirect_ref (object_type_node, objectref,
2258 flag_check_references);
2259 dtable = build3 (COMPONENT_REF, dtable_ptr_type, dtable,
2260 lookup_field (&object_type_node, dtable_ident), NULL_TREE);
2265 /* Determine the index in SYMBOL_TABLE for a reference to the decl
2266 T. If this decl has not been seen before, it will be added to the
2267 [oa]table_methods. If it has, the existing table slot will be
2271 get_symbol_table_index (tree t, tree special, tree *symbol_table)
2276 if (*symbol_table == NULL_TREE)
2278 *symbol_table = build_tree_list (special, t);
2282 method_list = *symbol_table;
2286 tree value = TREE_VALUE (method_list);
2287 tree purpose = TREE_PURPOSE (method_list);
2288 if (value == t && purpose == special)
2291 if (TREE_CHAIN (method_list) == NULL_TREE)
2294 method_list = TREE_CHAIN (method_list);
2297 TREE_CHAIN (method_list) = build_tree_list (special, t);
2302 build_invokevirtual (tree dtable, tree method, tree special)
2305 tree nativecode_ptr_ptr_type_node
2306 = build_pointer_type (nativecode_ptr_type_node);
2310 if (flag_indirect_dispatch)
2312 gcc_assert (! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))));
2315 = build_int_cst (NULL_TREE, get_symbol_table_index
2317 &TYPE_OTABLE_METHODS (output_class)));
2318 method_index = build4 (ARRAY_REF, integer_type_node,
2319 TYPE_OTABLE_DECL (output_class),
2320 otable_index, NULL_TREE, NULL_TREE);
2324 /* We fetch the DECL_VINDEX field directly here, rather than
2325 using get_method_index(). DECL_VINDEX is the true offset
2326 from the vtable base to a method, regrdless of any extra
2327 words inserted at the start of the vtable. */
2328 method_index = DECL_VINDEX (method);
2329 method_index = size_binop (MULT_EXPR, method_index,
2330 TYPE_SIZE_UNIT (nativecode_ptr_ptr_type_node));
2331 if (TARGET_VTABLE_USES_DESCRIPTORS)
2332 method_index = size_binop (MULT_EXPR, method_index,
2333 size_int (TARGET_VTABLE_USES_DESCRIPTORS));
2336 func = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dtable), dtable,
2337 convert (sizetype, method_index));
2339 if (TARGET_VTABLE_USES_DESCRIPTORS)
2340 func = build1 (NOP_EXPR, nativecode_ptr_type_node, func);
2343 func = fold_convert (nativecode_ptr_ptr_type_node, func);
2344 func = build1 (INDIRECT_REF, nativecode_ptr_type_node, func);
2350 static GTY(()) tree class_ident;
2352 build_invokeinterface (tree dtable, tree method)
2357 /* We expand invokeinterface here. */
2359 if (class_ident == NULL_TREE)
2360 class_ident = get_identifier ("class");
2362 dtable = build_java_indirect_ref (dtable_type, dtable,
2363 flag_check_references);
2364 dtable = build3 (COMPONENT_REF, class_ptr_type, dtable,
2365 lookup_field (&dtable_type, class_ident), NULL_TREE);
2367 interface = DECL_CONTEXT (method);
2368 gcc_assert (CLASS_INTERFACE (TYPE_NAME (interface)));
2369 layout_class_methods (interface);
2371 if (flag_indirect_dispatch)
2374 = 2 * (get_symbol_table_index
2375 (method, NULL_TREE, &TYPE_ITABLE_METHODS (output_class)));
2377 = build4 (ARRAY_REF,
2378 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2379 TYPE_ITABLE_DECL (output_class),
2380 build_int_cst (NULL_TREE, itable_index-1),
2381 NULL_TREE, NULL_TREE);
2383 = build4 (ARRAY_REF,
2384 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2385 TYPE_ITABLE_DECL (output_class),
2386 build_int_cst (NULL_TREE, itable_index),
2387 NULL_TREE, NULL_TREE);
2388 interface = convert (class_ptr_type, interface);
2389 idx = convert (integer_type_node, idx);
2393 idx = build_int_cst (NULL_TREE,
2394 get_interface_method_index (method, interface));
2395 interface = build_class_ref (interface);
2398 return build_call_nary (ptr_type_node,
2399 build_address_of (soft_lookupinterfacemethod_node),
2400 3, dtable, interface, idx);
2403 /* Expand one of the invoke_* opcodes.
2404 OPCODE is the specific opcode.
2405 METHOD_REF_INDEX is an index into the constant pool.
2406 NARGS is the number of arguments, or -1 if not specified. */
2409 expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED)
2411 tree method_signature
2412 = COMPONENT_REF_SIGNATURE(¤t_jcf->cpool, method_ref_index);
2413 tree method_name = COMPONENT_REF_NAME (¤t_jcf->cpool,
2416 = get_class_constant (current_jcf,
2417 COMPONENT_REF_CLASS_INDEX(¤t_jcf->cpool,
2419 const char *const self_name
2420 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2421 tree call, func, method, arg_list, method_type;
2422 tree check = NULL_TREE;
2424 tree special = NULL_TREE;
2426 if (! CLASS_LOADED_P (self_type))
2428 load_class (self_type, 1);
2429 safe_layout_class (self_type);
2430 if (TREE_CODE (TYPE_SIZE (self_type)) == ERROR_MARK)
2431 fatal_error ("failed to find class '%s'", self_name);
2433 layout_class_methods (self_type);
2435 if (ID_INIT_P (method_name))
2436 method = lookup_java_constructor (self_type, method_signature);
2438 method = lookup_java_method (self_type, method_name, method_signature);
2440 /* We've found a method in a class other than the one in which it
2441 was wanted. This can happen if, for instance, we're trying to
2442 compile invokespecial super.equals().
2443 FIXME: This is a kludge. Rather than nullifying the result, we
2444 should change lookup_java_method() so that it doesn't search the
2445 superclass chain when we're BC-compiling. */
2446 if (! flag_verify_invocations
2448 && ! TYPE_ARRAY_P (self_type)
2449 && self_type != DECL_CONTEXT (method))
2452 /* We've found a method in an interface, but this isn't an interface
2454 if (opcode != OPCODE_invokeinterface
2456 && (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method)))))
2459 /* We've found a non-interface method but we are making an
2460 interface call. This can happen if the interface overrides a
2461 method in Object. */
2462 if (! flag_verify_invocations
2463 && opcode == OPCODE_invokeinterface
2465 && ! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))))
2468 if (method == NULL_TREE)
2470 if (flag_verify_invocations || ! flag_indirect_dispatch)
2472 error ("class '%s' has no method named '%s' matching signature '%s'",
2474 IDENTIFIER_POINTER (method_name),
2475 IDENTIFIER_POINTER (method_signature));
2479 int flags = ACC_PUBLIC;
2480 if (opcode == OPCODE_invokestatic)
2481 flags |= ACC_STATIC;
2482 if (opcode == OPCODE_invokeinterface)
2484 flags |= ACC_INTERFACE | ACC_ABSTRACT;
2485 CLASS_INTERFACE (TYPE_NAME (self_type)) = 1;
2487 method = add_method (self_type, flags, method_name,
2489 DECL_ARTIFICIAL (method) = 1;
2490 METHOD_DUMMY (method) = 1;
2491 layout_class_method (self_type, NULL,
2496 /* Invoke static can't invoke static/abstract method */
2497 if (method != NULL_TREE)
2499 if (opcode == OPCODE_invokestatic)
2501 if (!METHOD_STATIC (method))
2503 error ("invokestatic on non static method");
2506 else if (METHOD_ABSTRACT (method))
2508 error ("invokestatic on abstract method");
2514 if (METHOD_STATIC (method))
2516 error ("invoke[non-static] on static method");
2522 if (method == NULL_TREE)
2524 /* If we got here, we emitted an error message above. So we
2525 just pop the arguments, push a properly-typed zero, and
2527 method_type = get_type_from_signature (method_signature);
2528 pop_arguments (TYPE_ARG_TYPES (method_type));
2529 if (opcode != OPCODE_invokestatic)
2530 pop_type (self_type);
2531 method_type = promote_type (TREE_TYPE (method_type));
2532 push_value (convert (method_type, integer_zero_node));
2536 method_type = TREE_TYPE (method);
2537 arg_list = pop_arguments (TYPE_ARG_TYPES (method_type));
2538 flush_quick_stack ();
2540 maybe_rewrite_invocation (&method, &arg_list, &method_signature,
2544 if (opcode == OPCODE_invokestatic)
2545 func = build_known_method_ref (method, method_type, self_type,
2546 method_signature, arg_list, special);
2547 else if (opcode == OPCODE_invokespecial
2548 || (opcode == OPCODE_invokevirtual
2549 && (METHOD_PRIVATE (method)
2550 || METHOD_FINAL (method)
2551 || CLASS_FINAL (TYPE_NAME (self_type)))))
2553 /* If the object for the method call is null, we throw an
2554 exception. We don't do this if the object is the current
2555 method's `this'. In other cases we just rely on an
2556 optimization pass to eliminate redundant checks. FIXME:
2557 Unfortunately there doesn't seem to be a way to determine
2558 what the current method is right now.
2559 We do omit the check if we're calling <init>. */
2560 /* We use a SAVE_EXPR here to make sure we only evaluate
2561 the new `self' expression once. */
2562 tree save_arg = save_expr (TREE_VALUE (arg_list));
2563 TREE_VALUE (arg_list) = save_arg;
2564 check = java_check_reference (save_arg, ! DECL_INIT_P (method));
2565 func = build_known_method_ref (method, method_type, self_type,
2566 method_signature, arg_list, special);
2570 tree dtable = invoke_build_dtable (opcode == OPCODE_invokeinterface,
2572 if (opcode == OPCODE_invokevirtual)
2573 func = build_invokevirtual (dtable, method, special);
2575 func = build_invokeinterface (dtable, method);
2578 if (TREE_CODE (func) == ADDR_EXPR)
2579 TREE_TYPE (func) = build_pointer_type (method_type);
2581 func = build1 (NOP_EXPR, build_pointer_type (method_type), func);
2583 call = build_call_list (TREE_TYPE (method_type), func, arg_list);
2584 TREE_SIDE_EFFECTS (call) = 1;
2585 call = check_for_builtin (method, call);
2587 if (check != NULL_TREE)
2589 call = build2 (COMPOUND_EXPR, TREE_TYPE (call), check, call);
2590 TREE_SIDE_EFFECTS (call) = 1;
2593 if (TREE_CODE (TREE_TYPE (method_type)) == VOID_TYPE)
2594 java_add_stmt (call);
2598 flush_quick_stack ();
2602 /* Create a stub which will be put into the vtable but which will call
2606 build_jni_stub (tree method)
2608 tree jnifunc, call, args, body, method_sig, arg_types;
2609 tree jniarg0, jniarg1, jniarg2, jniarg3;
2610 tree jni_func_type, tem;
2611 tree env_var, res_var = NULL_TREE, block;
2612 tree method_args, res_type;
2618 tree klass = DECL_CONTEXT (method);
2619 int from_class = ! CLASS_FROM_SOURCE_P (klass);
2620 klass = build_class_ref (klass);
2622 gcc_assert (METHOD_NATIVE (method) && flag_jni);
2624 DECL_ARTIFICIAL (method) = 1;
2625 DECL_EXTERNAL (method) = 0;
2627 env_var = build_decl (VAR_DECL, get_identifier ("env"), ptr_type_node);
2628 DECL_CONTEXT (env_var) = method;
2630 if (TREE_TYPE (TREE_TYPE (method)) != void_type_node)
2632 res_var = build_decl (VAR_DECL, get_identifier ("res"),
2633 TREE_TYPE (TREE_TYPE (method)));
2634 DECL_CONTEXT (res_var) = method;
2635 TREE_CHAIN (env_var) = res_var;
2638 meth_var = build_decl (VAR_DECL, get_identifier ("meth"), ptr_type_node);
2639 TREE_STATIC (meth_var) = 1;
2640 TREE_PUBLIC (meth_var) = 0;
2641 DECL_EXTERNAL (meth_var) = 0;
2642 DECL_CONTEXT (meth_var) = method;
2643 DECL_ARTIFICIAL (meth_var) = 1;
2644 DECL_INITIAL (meth_var) = null_pointer_node;
2645 TREE_USED (meth_var) = 1;
2646 chainon (env_var, meth_var);
2647 build_result_decl (method);
2649 /* One strange way that the front ends are different is that they
2650 store arguments differently. */
2652 method_args = DECL_ARGUMENTS (method);
2654 method_args = BLOCK_EXPR_DECLS (DECL_FUNCTION_BODY (method));
2655 block = build_block (env_var, NULL_TREE, method_args, NULL_TREE);
2656 TREE_SIDE_EFFECTS (block) = 1;
2657 /* When compiling from source we don't set the type of the block,
2658 because that will prevent patch_return from ever being run. */
2660 TREE_TYPE (block) = TREE_TYPE (TREE_TYPE (method));
2662 /* Compute the local `env' by calling _Jv_GetJNIEnvNewFrame. */
2663 body = build2 (MODIFY_EXPR, ptr_type_node, env_var,
2664 build_call_nary (ptr_type_node,
2665 build_address_of (soft_getjnienvnewframe_node),
2667 CAN_COMPLETE_NORMALLY (body) = 1;
2669 /* All the arguments to this method become arguments to the
2670 underlying JNI function. If we had to wrap object arguments in a
2671 special way, we would do that here. */
2673 for (tem = method_args; tem != NULL_TREE; tem = TREE_CHAIN (tem))
2675 int arg_bits = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)));
2676 #ifdef PARM_BOUNDARY
2677 arg_bits = (((arg_bits + PARM_BOUNDARY - 1) / PARM_BOUNDARY)
2680 args_size += (arg_bits / BITS_PER_UNIT);
2682 args = tree_cons (NULL_TREE, tem, args);
2684 args = nreverse (args);
2685 arg_types = TYPE_ARG_TYPES (TREE_TYPE (method));
2687 /* For a static method the second argument is the class. For a
2688 non-static method the second argument is `this'; that is already
2689 available in the argument list. */
2690 if (METHOD_STATIC (method))
2692 args_size += int_size_in_bytes (TREE_TYPE (klass));
2693 args = tree_cons (NULL_TREE, klass, args);
2694 arg_types = tree_cons (NULL_TREE, object_ptr_type_node, arg_types);
2697 /* The JNIEnv structure is the first argument to the JNI function. */
2698 args_size += int_size_in_bytes (TREE_TYPE (env_var));
2699 args = tree_cons (NULL_TREE, env_var, args);
2700 arg_types = tree_cons (NULL_TREE, ptr_type_node, arg_types);
2702 /* We call _Jv_LookupJNIMethod to find the actual underlying
2703 function pointer. _Jv_LookupJNIMethod will throw the appropriate
2704 exception if this function is not found at runtime. */
2705 method_sig = build_java_signature (TREE_TYPE (method));
2707 jniarg1 = build_utf8_ref (DECL_NAME (method));
2708 jniarg2 = build_utf8_ref (unmangle_classname
2709 (IDENTIFIER_POINTER (method_sig),
2710 IDENTIFIER_LENGTH (method_sig)));
2711 jniarg3 = build_int_cst (NULL_TREE, args_size);
2713 tem = build_function_type (TREE_TYPE (TREE_TYPE (method)), arg_types);
2715 #ifdef MODIFY_JNI_METHOD_CALL
2716 tem = MODIFY_JNI_METHOD_CALL (tem);
2719 jni_func_type = build_pointer_type (tem);
2721 jnifunc = build3 (COND_EXPR, ptr_type_node,
2722 build2 (NE_EXPR, boolean_type_node,
2723 meth_var, build_int_cst (TREE_TYPE (meth_var), 0)),
2725 build2 (MODIFY_EXPR, ptr_type_node, meth_var,
2726 build_call_nary (ptr_type_node,
2728 (soft_lookupjnimethod_node),
2731 jniarg2, jniarg3)));
2733 /* Now we make the actual JNI call via the resulting function
2735 call = build_call_list (TREE_TYPE (TREE_TYPE (method)),
2736 build1 (NOP_EXPR, jni_func_type, jnifunc),
2739 /* If the JNI call returned a result, capture it here. If we had to
2740 unwrap JNI object results, we would do that here. */
2741 if (res_var != NULL_TREE)
2743 /* If the call returns an object, it may return a JNI weak
2744 reference, in which case we must unwrap it. */
2745 if (! JPRIMITIVE_TYPE_P (TREE_TYPE (TREE_TYPE (method))))
2746 call = build_call_nary (TREE_TYPE (TREE_TYPE (method)),
2747 build_address_of (soft_unwrapjni_node),
2749 call = build2 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (method)),
2753 TREE_SIDE_EFFECTS (call) = 1;
2754 CAN_COMPLETE_NORMALLY (call) = 1;
2756 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2757 TREE_SIDE_EFFECTS (body) = 1;
2759 /* Now free the environment we allocated. */
2760 call = build_call_nary (ptr_type_node,
2761 build_address_of (soft_jnipopsystemframe_node),
2763 TREE_SIDE_EFFECTS (call) = 1;
2764 CAN_COMPLETE_NORMALLY (call) = 1;
2765 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2766 TREE_SIDE_EFFECTS (body) = 1;
2768 /* Finally, do the return. */
2769 res_type = void_type_node;
2770 if (res_var != NULL_TREE)
2773 gcc_assert (DECL_RESULT (method));
2774 /* Make sure we copy the result variable to the actual
2775 result. We use the type of the DECL_RESULT because it
2776 might be different from the return type of the function:
2777 it might be promoted. */
2778 drt = TREE_TYPE (DECL_RESULT (method));
2779 if (drt != TREE_TYPE (res_var))
2780 res_var = build1 (CONVERT_EXPR, drt, res_var);
2781 res_var = build2 (MODIFY_EXPR, drt, DECL_RESULT (method), res_var);
2782 TREE_SIDE_EFFECTS (res_var) = 1;
2785 body = build2 (COMPOUND_EXPR, void_type_node, body,
2786 build1 (RETURN_EXPR, void_type_node, res_var));
2787 TREE_SIDE_EFFECTS (body) = 1;
2789 /* Prepend class initialization for static methods reachable from
2791 if (METHOD_STATIC (method)
2792 && (! METHOD_PRIVATE (method)
2793 || INNER_CLASS_P (DECL_CONTEXT (method))))
2795 tree init = build_call_expr (soft_initclass_node, 1,
2797 body = build2 (COMPOUND_EXPR, void_type_node, init, body);
2798 TREE_SIDE_EFFECTS (body) = 1;
2801 bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
2807 /* Given lvalue EXP, return a volatile expression that references the
2811 java_modify_addr_for_volatile (tree exp)
2813 tree exp_type = TREE_TYPE (exp);
2815 = build_qualified_type (exp_type,
2816 TYPE_QUALS (exp_type) | TYPE_QUAL_VOLATILE);
2817 tree addr = build_fold_addr_expr (exp);
2818 v_type = build_pointer_type (v_type);
2819 addr = fold_convert (v_type, addr);
2820 exp = build_fold_indirect_ref (addr);
2825 /* Expand an operation to extract from or store into a field.
2826 IS_STATIC is 1 iff the field is static.
2827 IS_PUTTING is 1 for putting into a field; 0 for getting from the field.
2828 FIELD_REF_INDEX is an index into the constant pool. */
2831 expand_java_field_op (int is_static, int is_putting, int field_ref_index)
2834 = get_class_constant (current_jcf,
2835 COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool,
2837 const char *self_name
2838 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2839 tree field_name = COMPONENT_REF_NAME (¤t_jcf->cpool, field_ref_index);
2840 tree field_signature = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool,
2842 tree field_type = get_type_from_signature (field_signature);
2843 tree new_value = is_putting ? pop_value (field_type) : NULL_TREE;
2846 tree original_self_type = self_type;
2850 if (! CLASS_LOADED_P (self_type))
2851 load_class (self_type, 1);
2852 field_decl = lookup_field (&self_type, field_name);
2853 if (field_decl == error_mark_node)
2857 else if (field_decl == NULL_TREE)
2859 if (! flag_verify_invocations)
2861 int flags = ACC_PUBLIC;
2863 flags |= ACC_STATIC;
2864 self_type = original_self_type;
2865 field_decl = add_field (original_self_type, field_name,
2867 DECL_ARTIFICIAL (field_decl) = 1;
2868 DECL_IGNORED_P (field_decl) = 1;
2870 /* FIXME: We should be pessimistic about volatility. We
2871 don't know one way or another, but this is safe.
2872 However, doing this has bad effects on code quality. We
2873 need to look at better ways to do this. */
2874 TREE_THIS_VOLATILE (field_decl) = 1;
2879 error ("missing field '%s' in '%s'",
2880 IDENTIFIER_POINTER (field_name), self_name);
2884 else if (build_java_signature (TREE_TYPE (field_decl)) != field_signature)
2886 error ("mismatching signature for field '%s' in '%s'",
2887 IDENTIFIER_POINTER (field_name), self_name);
2890 field_ref = is_static ? NULL_TREE : pop_value (self_type);
2894 push_value (convert (field_type, integer_zero_node));
2895 flush_quick_stack ();
2899 field_ref = build_field_ref (field_ref, self_type, field_name);
2901 && ! flag_indirect_dispatch)
2903 tree context = DECL_CONTEXT (field_ref);
2904 if (context != self_type && CLASS_INTERFACE (TYPE_NAME (context)))
2905 field_ref = build_class_init (context, field_ref);
2907 field_ref = build_class_init (self_type, field_ref);
2911 flush_quick_stack ();
2912 if (FIELD_FINAL (field_decl))
2914 if (DECL_CONTEXT (field_decl) != current_class)
2915 error ("assignment to final field %q+D not in field's class",
2917 /* We used to check for assignments to final fields not
2918 occurring in the class initializer or in a constructor
2919 here. However, this constraint doesn't seem to be
2920 enforced by the JVM. */
2923 if (TREE_THIS_VOLATILE (field_decl))
2924 field_ref = java_modify_addr_for_volatile (field_ref);
2926 modify_expr = build2 (MODIFY_EXPR, TREE_TYPE (field_ref),
2927 field_ref, new_value);
2929 if (TREE_THIS_VOLATILE (field_decl))
2931 (build_call_expr (built_in_decls[BUILT_IN_SYNCHRONIZE], 0));
2933 java_add_stmt (modify_expr);
2937 tree temp = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (field_ref));
2938 java_add_local_var (temp);
2940 if (TREE_THIS_VOLATILE (field_decl))
2941 field_ref = java_modify_addr_for_volatile (field_ref);
2944 = build2 (MODIFY_EXPR, TREE_TYPE (field_ref), temp, field_ref);
2945 java_add_stmt (modify_expr);
2947 if (TREE_THIS_VOLATILE (field_decl))
2949 (build_call_expr (built_in_decls[BUILT_IN_SYNCHRONIZE], 0));
2953 TREE_THIS_VOLATILE (field_ref) = TREE_THIS_VOLATILE (field_decl);
2957 load_type_state (tree label)
2960 tree vec = LABEL_TYPE_STATE (label);
2961 int cur_length = TREE_VEC_LENGTH (vec);
2962 stack_pointer = cur_length - DECL_MAX_LOCALS(current_function_decl);
2963 for (i = 0; i < cur_length; i++)
2964 type_map [i] = TREE_VEC_ELT (vec, i);
2967 /* Go over METHOD's bytecode and note instruction starts in
2968 instruction_bits[]. */
2971 note_instructions (JCF *jcf, tree method)
2974 unsigned char* byte_ops;
2975 long length = DECL_CODE_LENGTH (method);
2980 #undef RET /* Defined by config/i386/i386.h */
2982 #define BCODE byte_ops
2983 #define BYTE_type_node byte_type_node
2984 #define SHORT_type_node short_type_node
2985 #define INT_type_node int_type_node
2986 #define LONG_type_node long_type_node
2987 #define CHAR_type_node char_type_node
2988 #define PTR_type_node ptr_type_node
2989 #define FLOAT_type_node float_type_node
2990 #define DOUBLE_type_node double_type_node
2991 #define VOID_type_node void_type_node
2992 #define CONST_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
2993 #define CONST_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
2994 #define VAR_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
2995 #define VAR_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
2997 #define CHECK_PC_IN_RANGE(PC) ((void)1) /* Already handled by verifier. */
2999 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3000 byte_ops = jcf->read_ptr;
3001 instruction_bits = xrealloc (instruction_bits, length + 1);
3002 memset (instruction_bits, 0, length + 1);
3004 /* This pass figures out which PC can be the targets of jumps. */
3005 for (PC = 0; PC < length;)
3007 int oldpc = PC; /* PC at instruction start. */
3008 instruction_bits [PC] |= BCODE_INSTRUCTION_START;
3009 switch (byte_ops[PC++])
3011 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3013 PRE_##OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3016 #define NOTE_LABEL(PC) note_label(oldpc, PC)
3018 #define PRE_PUSHC(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3019 #define PRE_LOAD(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3020 #define PRE_STORE(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3021 #define PRE_STACK(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3022 #define PRE_UNOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3023 #define PRE_BINOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3024 #define PRE_CONVERT(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3025 #define PRE_CONVERT2(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3027 #define PRE_SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3028 PRE_SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3029 #define PRE_SPECIAL_IINC(OPERAND_TYPE) \
3030 ((void) IMMEDIATE_u1, (void) IMMEDIATE_s1)
3031 #define PRE_SPECIAL_ENTER(IGNORE) /* nothing */
3032 #define PRE_SPECIAL_EXIT(IGNORE) /* nothing */
3033 #define PRE_SPECIAL_THROW(IGNORE) /* nothing */
3034 #define PRE_SPECIAL_BREAK(IGNORE) /* nothing */
3036 /* two forms of wide instructions */
3037 #define PRE_SPECIAL_WIDE(IGNORE) \
3039 int modified_opcode = IMMEDIATE_u1; \
3040 if (modified_opcode == OPCODE_iinc) \
3042 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3043 (void) IMMEDIATE_s2; /* constbyte1 and constbyte2 */ \
3047 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3051 #define PRE_IMPL(IGNORE1, IGNORE2) /* nothing */
3053 #define PRE_MONITOR(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3055 #define PRE_RETURN(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3056 #define PRE_ARRAY(OPERAND_TYPE, SUBOP) \
3057 PRE_ARRAY_##SUBOP(OPERAND_TYPE)
3058 #define PRE_ARRAY_LOAD(TYPE) /* nothing */
3059 #define PRE_ARRAY_STORE(TYPE) /* nothing */
3060 #define PRE_ARRAY_LENGTH(TYPE) /* nothing */
3061 #define PRE_ARRAY_NEW(TYPE) PRE_ARRAY_NEW_##TYPE
3062 #define PRE_ARRAY_NEW_NUM ((void) IMMEDIATE_u1)
3063 #define PRE_ARRAY_NEW_PTR ((void) IMMEDIATE_u2)
3064 #define PRE_ARRAY_NEW_MULTI ((void) IMMEDIATE_u2, (void) IMMEDIATE_u1)
3066 #define PRE_TEST(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3067 #define PRE_COND(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3068 #define PRE_BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3069 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3070 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3071 #define PRE_JSR(OPERAND_TYPE, OPERAND_VALUE) \
3072 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3074 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3076 #define PRE_RET(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE)
3078 #define PRE_SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3079 PC = (PC + 3) / 4 * 4; PRE_##TABLE_OR_LOOKUP##_SWITCH
3081 #define PRE_LOOKUP_SWITCH \
3082 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3083 NOTE_LABEL (default_offset+oldpc); \
3085 while (--npairs >= 0) { \
3086 jint match ATTRIBUTE_UNUSED = IMMEDIATE_s4; \
3087 jint offset = IMMEDIATE_s4; \
3088 NOTE_LABEL (offset+oldpc); } \
3091 #define PRE_TABLE_SWITCH \
3092 { jint default_offset = IMMEDIATE_s4; \
3093 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3094 NOTE_LABEL (default_offset+oldpc); \
3096 while (low++ <= high) { \
3097 jint offset = IMMEDIATE_s4; \
3098 NOTE_LABEL (offset+oldpc); } \
3101 #define PRE_FIELD(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3102 #define PRE_OBJECT(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3103 #define PRE_INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3104 (void)(IMMEDIATE_u2); \
3105 PC += 2 * IS_INTERFACE /* for invokeinterface */;
3107 #include "javaop.def"
3114 expand_byte_code (JCF *jcf, tree method)
3118 const unsigned char *linenumber_pointer;
3119 int dead_code_index = -1;
3120 unsigned char* byte_ops;
3121 long length = DECL_CODE_LENGTH (method);
3124 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3125 byte_ops = jcf->read_ptr;
3127 /* We make an initial pass of the line number table, to note
3128 which instructions have associated line number entries. */
3129 linenumber_pointer = linenumber_table;
3130 for (i = 0; i < linenumber_count; i++)
3132 int pc = GET_u2 (linenumber_pointer);
3133 linenumber_pointer += 4;
3135 warning (0, "invalid PC in line number table");
3138 if ((instruction_bits[pc] & BCODE_HAS_LINENUMBER) != 0)
3139 instruction_bits[pc] |= BCODE_HAS_MULTI_LINENUMBERS;
3140 instruction_bits[pc] |= BCODE_HAS_LINENUMBER;
3144 if (! verify_jvm_instructions_new (jcf, byte_ops, length))
3147 promote_arguments ();
3148 cache_this_class_ref (method);
3149 cache_cpool_data_ref ();
3151 /* Translate bytecodes. */
3152 linenumber_pointer = linenumber_table;
3153 for (PC = 0; PC < length;)
3155 if ((instruction_bits [PC] & BCODE_TARGET) != 0 || PC == 0)
3157 tree label = lookup_label (PC);
3158 flush_quick_stack ();
3159 if ((instruction_bits [PC] & BCODE_TARGET) != 0)
3160 java_add_stmt (build1 (LABEL_EXPR, void_type_node, label));
3161 if (LABEL_VERIFIED (label) || PC == 0)
3162 load_type_state (label);
3165 if (! (instruction_bits [PC] & BCODE_VERIFIED))
3167 if (dead_code_index == -1)
3169 /* This is the start of a region of unreachable bytecodes.
3170 They still need to be processed in order for EH ranges
3171 to get handled correctly. However, we can simply
3172 replace these bytecodes with nops. */
3173 dead_code_index = PC;
3176 /* Turn this bytecode into a nop. */
3181 if (dead_code_index != -1)
3183 /* We've just reached the end of a region of dead code. */
3185 warning (0, "unreachable bytecode from %d to before %d",
3186 dead_code_index, PC);
3187 dead_code_index = -1;
3191 /* Handle possible line number entry for this PC.
3193 This code handles out-of-order and multiple linenumbers per PC,
3194 but is optimized for the case of line numbers increasing
3195 monotonically with PC. */
3196 if ((instruction_bits[PC] & BCODE_HAS_LINENUMBER) != 0)
3198 if ((instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS) != 0
3199 || GET_u2 (linenumber_pointer) != PC)
3200 linenumber_pointer = linenumber_table;
3201 while (linenumber_pointer < linenumber_table + linenumber_count * 4)
3203 int pc = GET_u2 (linenumber_pointer);
3204 linenumber_pointer += 4;
3207 int line = GET_u2 (linenumber_pointer - 2);
3208 #ifdef USE_MAPPED_LOCATION
3209 input_location = linemap_line_start (&line_table, line, 1);
3211 input_location.line = line;
3213 if (!(instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS))
3218 maybe_pushlevels (PC);
3219 PC = process_jvm_instruction (PC, byte_ops, length);
3220 maybe_poplevels (PC);
3223 uncache_this_class_ref (method);
3225 if (dead_code_index != -1)
3227 /* We've just reached the end of a region of dead code. */
3229 warning (0, "unreachable bytecode from %d to the end of the method",
3235 java_push_constant_from_pool (JCF *jcf, int index)
3238 if (JPOOL_TAG (jcf, index) == CONSTANT_String)
3241 name = get_name_constant (jcf, JPOOL_USHORT1 (jcf, index));
3242 index = alloc_name_constant (CONSTANT_String, name);
3243 c = build_ref_from_constant_pool (index);
3244 c = convert (promote_type (string_type_node), c);
3246 else if (JPOOL_TAG (jcf, index) == CONSTANT_Class
3247 || JPOOL_TAG (jcf, index) == CONSTANT_ResolvedClass)
3249 tree record = get_class_constant (jcf, index);
3250 c = build_class_ref (record);
3253 c = get_constant (jcf, index);
3258 process_jvm_instruction (int PC, const unsigned char* byte_ops,
3259 long length ATTRIBUTE_UNUSED)
3261 const char *opname; /* Temporary ??? */
3262 int oldpc = PC; /* PC at instruction start. */
3264 /* If the instruction is at the beginning of an exception handler,
3265 replace the top of the stack with the thrown object reference. */
3266 if (instruction_bits [PC] & BCODE_EXCEPTION_TARGET)
3268 /* Note that the verifier will not emit a type map at all for
3269 dead exception handlers. In this case we just ignore the
3271 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3273 tree type = pop_type (promote_type (throwable_type_node));
3274 push_value (build_exception_object_ref (type));
3278 switch (byte_ops[PC++])
3280 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3283 OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3286 #define RET(OPERAND_TYPE, OPERAND_VALUE) \
3288 int saw_index = 0; \
3289 int index = OPERAND_VALUE; \
3291 (find_local_variable (index, return_address_type_node, oldpc)); \
3294 #define JSR(OPERAND_TYPE, OPERAND_VALUE) \
3296 /* OPERAND_VALUE may have side-effects on PC */ \
3297 int opvalue = OPERAND_VALUE; \
3298 build_java_jsr (oldpc + opvalue, PC); \
3301 /* Push a constant onto the stack. */
3302 #define PUSHC(OPERAND_TYPE, OPERAND_VALUE) \
3303 { int saw_index = 0; int ival = (OPERAND_VALUE); \
3304 if (saw_index) java_push_constant_from_pool (current_jcf, ival); \
3305 else expand_java_pushc (ival, OPERAND_TYPE##_type_node); }
3307 /* internal macro added for use by the WIDE case */
3308 #define LOAD_INTERNAL(OPTYPE, OPVALUE) \
3309 expand_load_internal (OPVALUE, type_map[OPVALUE], oldpc);
3311 /* Push local variable onto the opcode stack. */
3312 #define LOAD(OPERAND_TYPE, OPERAND_VALUE) \
3314 /* have to do this since OPERAND_VALUE may have side-effects */ \
3315 int opvalue = OPERAND_VALUE; \
3316 LOAD_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3319 #define RETURN(OPERAND_TYPE, OPERAND_VALUE) \
3320 expand_java_return (OPERAND_TYPE##_type_node)
3322 #define REM_EXPR TRUNC_MOD_EXPR
3323 #define BINOP(OPERAND_TYPE, OPERAND_VALUE) \
3324 expand_java_binop (OPERAND_TYPE##_type_node, OPERAND_VALUE##_EXPR)
3326 #define FIELD(IS_STATIC, IS_PUT) \
3327 expand_java_field_op (IS_STATIC, IS_PUT, IMMEDIATE_u2)
3329 #define TEST(OPERAND_TYPE, CONDITION) \
3330 expand_test (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3332 #define COND(OPERAND_TYPE, CONDITION) \
3333 expand_cond (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3335 #define BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3336 BRANCH_##OPERAND_TYPE (OPERAND_VALUE)
3338 #define BRANCH_GOTO(OPERAND_VALUE) \
3339 expand_java_goto (oldpc + OPERAND_VALUE)
3341 #define BRANCH_CALL(OPERAND_VALUE) \
3342 expand_java_call (oldpc + OPERAND_VALUE, oldpc)
3345 #define BRANCH_RETURN(OPERAND_VALUE) \
3347 tree type = OPERAND_TYPE##_type_node; \
3348 tree value = find_local_variable (OPERAND_VALUE, type, oldpc); \
3349 expand_java_ret (value); \
3353 #define NOT_IMPL(OPERAND_TYPE, OPERAND_VALUE) \
3354 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3355 fprintf (stderr, "(not implemented)\n")
3356 #define NOT_IMPL1(OPERAND_VALUE) \
3357 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3358 fprintf (stderr, "(not implemented)\n")
3360 #define BRANCH_RETURN(OPERAND_VALUE) NOT_IMPL1(OPERAND_VALUE)
3362 #define STACK(SUBOP, COUNT) STACK_##SUBOP (COUNT)
3364 #define STACK_POP(COUNT) java_stack_pop (COUNT)
3366 #define STACK_SWAP(COUNT) java_stack_swap()
3368 #define STACK_DUP(COUNT) java_stack_dup (COUNT, 0)
3369 #define STACK_DUPx1(COUNT) java_stack_dup (COUNT, 1)
3370 #define STACK_DUPx2(COUNT) java_stack_dup (COUNT, 2)
3372 #define SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3373 PC = (PC + 3) / 4 * 4; TABLE_OR_LOOKUP##_SWITCH
3375 #define LOOKUP_SWITCH \
3376 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3377 tree selector = pop_value (INT_type_node); \
3378 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3379 while (--npairs >= 0) \
3381 jint match = IMMEDIATE_s4; jint offset = IMMEDIATE_s4; \
3382 expand_java_add_case (switch_expr, match, oldpc + offset); \
3386 #define TABLE_SWITCH \
3387 { jint default_offset = IMMEDIATE_s4; \
3388 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3389 tree selector = pop_value (INT_type_node); \
3390 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3391 for (; low <= high; low++) \
3393 jint offset = IMMEDIATE_s4; \
3394 expand_java_add_case (switch_expr, low, oldpc + offset); \
3398 #define INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3399 { int opcode = byte_ops[PC-1]; \
3400 int method_ref_index = IMMEDIATE_u2; \
3402 if (IS_INTERFACE) { nargs = IMMEDIATE_u1; (void) IMMEDIATE_u1; } \
3404 expand_invoke (opcode, method_ref_index, nargs); \
3407 /* Handle new, checkcast, instanceof */
3408 #define OBJECT(TYPE, OP) \
3409 expand_java_##OP (get_class_constant (current_jcf, IMMEDIATE_u2))
3411 #define ARRAY(OPERAND_TYPE, SUBOP) ARRAY_##SUBOP(OPERAND_TYPE)
3413 #define ARRAY_LOAD(OPERAND_TYPE) \
3415 expand_java_arrayload( OPERAND_TYPE##_type_node ); \
3418 #define ARRAY_STORE(OPERAND_TYPE) \
3420 expand_java_arraystore( OPERAND_TYPE##_type_node ); \
3423 #define ARRAY_LENGTH(OPERAND_TYPE) expand_java_array_length();
3424 #define ARRAY_NEW(OPERAND_TYPE) ARRAY_NEW_##OPERAND_TYPE()
3425 #define ARRAY_NEW_PTR() \
3426 push_value (build_anewarray (get_class_constant (current_jcf, \
3428 pop_value (int_type_node)));
3429 #define ARRAY_NEW_NUM() \
3431 int atype = IMMEDIATE_u1; \
3432 push_value (build_newarray (atype, pop_value (int_type_node)));\
3434 #define ARRAY_NEW_MULTI() \
3436 tree class = get_class_constant (current_jcf, IMMEDIATE_u2 ); \
3437 int ndims = IMMEDIATE_u1; \
3438 expand_java_multianewarray( class, ndims ); \
3441 #define UNOP(OPERAND_TYPE, OPERAND_VALUE) \
3442 push_value (fold_build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \
3443 pop_value (OPERAND_TYPE##_type_node)));
3445 #define CONVERT2(FROM_TYPE, TO_TYPE) \
3447 push_value (build1 (NOP_EXPR, int_type_node, \
3448 (convert (TO_TYPE##_type_node, \
3449 pop_value (FROM_TYPE##_type_node))))); \
3452 #define CONVERT(FROM_TYPE, TO_TYPE) \
3454 push_value (convert (TO_TYPE##_type_node, \
3455 pop_value (FROM_TYPE##_type_node))); \
3458 /* internal macro added for use by the WIDE case
3459 Added TREE_TYPE (decl) assignment, apbianco */
3460 #define STORE_INTERNAL(OPTYPE, OPVALUE) \
3463 int index = OPVALUE; \
3464 tree type = OPTYPE; \
3465 value = pop_value (type); \
3466 type = TREE_TYPE (value); \
3467 decl = find_local_variable (index, type, oldpc); \
3468 set_local_type (index, type); \
3469 java_add_stmt (build2 (MODIFY_EXPR, type, decl, value)); \
3472 #define STORE(OPERAND_TYPE, OPERAND_VALUE) \
3474 /* have to do this since OPERAND_VALUE may have side-effects */ \
3475 int opvalue = OPERAND_VALUE; \
3476 STORE_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3479 #define SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3480 SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3482 #define SPECIAL_ENTER(IGNORED) MONITOR_OPERATION (soft_monitorenter_node)
3483 #define SPECIAL_EXIT(IGNORED) MONITOR_OPERATION (soft_monitorexit_node)
3485 #define MONITOR_OPERATION(call) \
3487 tree o = pop_value (ptr_type_node); \
3489 flush_quick_stack (); \
3490 c = build_java_monitor (call, o); \
3491 TREE_SIDE_EFFECTS (c) = 1; \
3492 java_add_stmt (c); \
3495 #define SPECIAL_IINC(IGNORED) \
3497 unsigned int local_var_index = IMMEDIATE_u1; \
3498 int ival = IMMEDIATE_s1; \
3499 expand_iinc(local_var_index, ival, oldpc); \
3502 #define SPECIAL_WIDE(IGNORED) \
3504 int modified_opcode = IMMEDIATE_u1; \
3505 unsigned int local_var_index = IMMEDIATE_u2; \
3506 switch (modified_opcode) \
3510 int ival = IMMEDIATE_s2; \
3511 expand_iinc (local_var_index, ival, oldpc); \
3514 case OPCODE_iload: \
3515 case OPCODE_lload: \
3516 case OPCODE_fload: \
3517 case OPCODE_dload: \
3518 case OPCODE_aload: \
3520 /* duplicate code from LOAD macro */ \
3521 LOAD_INTERNAL(operand_type[modified_opcode], local_var_index); \
3524 case OPCODE_istore: \
3525 case OPCODE_lstore: \
3526 case OPCODE_fstore: \
3527 case OPCODE_dstore: \
3528 case OPCODE_astore: \
3530 STORE_INTERNAL(operand_type[modified_opcode], local_var_index); \
3534 error ("unrecogized wide sub-instruction"); \
3538 #define SPECIAL_THROW(IGNORED) \
3539 build_java_athrow (pop_value (throwable_type_node))
3541 #define SPECIAL_BREAK NOT_IMPL1
3542 #define IMPL NOT_IMPL
3544 #include "javaop.def"
3547 fprintf (stderr, "%3d: unknown(%3d)\n", oldpc, byte_ops[PC]);
3552 /* Return the opcode at PC in the code section pointed to by
3555 static unsigned char
3556 peek_opcode_at_pc (JCF *jcf, int code_offset, int pc)
3558 unsigned char opcode;
3559 long absolute_offset = (long)JCF_TELL (jcf);
3561 JCF_SEEK (jcf, code_offset);
3562 opcode = jcf->read_ptr [pc];
3563 JCF_SEEK (jcf, absolute_offset);
3567 /* Some bytecode compilers are emitting accurate LocalVariableTable
3568 attributes. Here's an example:
3573 Attribute "LocalVariableTable"
3574 slot #<n>: ... (PC: PC+1 length: L)
3576 This is accurate because the local in slot <n> really exists after
3577 the opcode at PC is executed, hence from PC+1 to PC+1+L.
3579 This procedure recognizes this situation and extends the live range
3580 of the local in SLOT to START_PC-1 or START_PC-2 (depending on the
3581 length of the store instruction.)
3583 This function is used by `give_name_to_locals' so that a local's
3584 DECL features a DECL_LOCAL_START_PC such that the first related
3585 store operation will use DECL as a destination, not an unrelated
3586 temporary created for the occasion.
3588 This function uses a global (instruction_bits) `note_instructions' should
3589 have allocated and filled properly. */
3592 maybe_adjust_start_pc (struct JCF *jcf, int code_offset,
3593 int start_pc, int slot)
3595 int first, index, opcode;
3604 /* Find last previous instruction and remember it */
3605 for (pc = start_pc-1; pc; pc--)
3606 if (instruction_bits [pc] & BCODE_INSTRUCTION_START)
3610 /* Retrieve the instruction, handle `wide'. */
3611 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3612 if (opcode == OPCODE_wide)
3615 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3620 case OPCODE_astore_0:
3621 case OPCODE_astore_1:
3622 case OPCODE_astore_2:
3623 case OPCODE_astore_3:
3624 first = OPCODE_astore_0;
3627 case OPCODE_istore_0:
3628 case OPCODE_istore_1:
3629 case OPCODE_istore_2:
3630 case OPCODE_istore_3:
3631 first = OPCODE_istore_0;
3634 case OPCODE_lstore_0:
3635 case OPCODE_lstore_1:
3636 case OPCODE_lstore_2:
3637 case OPCODE_lstore_3:
3638 first = OPCODE_lstore_0;
3641 case OPCODE_fstore_0:
3642 case OPCODE_fstore_1:
3643 case OPCODE_fstore_2:
3644 case OPCODE_fstore_3:
3645 first = OPCODE_fstore_0;
3648 case OPCODE_dstore_0:
3649 case OPCODE_dstore_1:
3650 case OPCODE_dstore_2:
3651 case OPCODE_dstore_3:
3652 first = OPCODE_dstore_0;
3660 index = peek_opcode_at_pc (jcf, code_offset, pc);
3663 int other = peek_opcode_at_pc (jcf, code_offset, ++pc);
3664 index = (other << 8) + index;
3669 /* Now we decide: first >0 means we have a <t>store_<n>, index >0
3670 means we have a <t>store. */
3671 if ((first > 0 && opcode - first == slot) || (index > 0 && index == slot))
3677 /* Force the (direct) sub-operands of NODE to be evaluated in left-to-right
3678 order, as specified by Java Language Specification.
3680 The problem is that while expand_expr will evaluate its sub-operands in
3681 left-to-right order, for variables it will just return an rtx (i.e.
3682 an lvalue) for the variable (rather than an rvalue). So it is possible
3683 that a later sub-operand will change the register, and when the
3684 actual operation is done, it will use the new value, when it should
3685 have used the original value.
3687 We fix this by using save_expr. This forces the sub-operand to be
3688 copied into a fresh virtual register,
3690 For method invocation, we modify the arguments so that a
3691 left-to-right order evaluation is performed. Saved expressions
3692 will, in CALL_EXPR order, be reused when the call will be expanded.
3694 We also promote outgoing args if needed. */
3697 force_evaluation_order (tree node)
3699 if (flag_syntax_only)
3701 if (TREE_CODE (node) == CALL_EXPR
3702 || (TREE_CODE (node) == COMPOUND_EXPR
3703 && TREE_CODE (TREE_OPERAND (node, 0)) == CALL_EXPR
3704 && TREE_CODE (TREE_OPERAND (node, 1)) == SAVE_EXPR))
3709 /* Account for wrapped around ctors. */
3710 if (TREE_CODE (node) == COMPOUND_EXPR)
3711 call = TREE_OPERAND (node, 0);
3715 nargs = call_expr_nargs (call);
3717 /* This reverses the evaluation order. This is a desired effect. */
3718 for (i = 0, cmp = NULL_TREE; i < nargs; i++)
3720 tree arg = CALL_EXPR_ARG (call, i);
3721 /* Promote types smaller than integer. This is required by
3723 tree type = TREE_TYPE (arg);
3725 if (targetm.calls.promote_prototypes (type)
3726 && INTEGRAL_TYPE_P (type)
3727 && INT_CST_LT_UNSIGNED (TYPE_SIZE (type),
3728 TYPE_SIZE (integer_type_node)))
3729 arg = fold_convert (integer_type_node, arg);
3731 saved = save_expr (force_evaluation_order (arg));
3732 cmp = (cmp == NULL_TREE ? saved :
3733 build2 (COMPOUND_EXPR, void_type_node, cmp, saved));
3735 CALL_EXPR_ARG (call, i) = saved;
3738 if (cmp && TREE_CODE (cmp) == COMPOUND_EXPR)
3739 TREE_SIDE_EFFECTS (cmp) = 1;
3743 cmp = build2 (COMPOUND_EXPR, TREE_TYPE (node), cmp, node);
3744 if (TREE_TYPE (cmp) != void_type_node)
3745 cmp = save_expr (cmp);
3746 CAN_COMPLETE_NORMALLY (cmp) = CAN_COMPLETE_NORMALLY (node);
3747 TREE_SIDE_EFFECTS (cmp) = 1;
3754 /* Build a node to represent empty statements and blocks. */
3757 build_java_empty_stmt (void)
3759 tree t = build_empty_stmt ();
3760 CAN_COMPLETE_NORMALLY (t) = 1;
3764 /* Promote all args of integral type before generating any code. */
3767 promote_arguments (void)
3771 for (arg = DECL_ARGUMENTS (current_function_decl), i = 0;
3772 arg != NULL_TREE; arg = TREE_CHAIN (arg), i++)
3774 tree arg_type = TREE_TYPE (arg);
3775 if (INTEGRAL_TYPE_P (arg_type)
3776 && TYPE_PRECISION (arg_type) < 32)
3778 tree copy = find_local_variable (i, integer_type_node, -1);
3779 java_add_stmt (build2 (MODIFY_EXPR, integer_type_node,
3781 fold_convert (integer_type_node, arg)));
3783 if (TYPE_IS_WIDE (arg_type))
3788 /* Create a local variable that points to the constant pool. */
3791 cache_cpool_data_ref (void)
3796 tree d = build_constant_data_ref (flag_indirect_classes);
3797 tree cpool_ptr = build_decl (VAR_DECL, NULL_TREE,
3798 build_pointer_type (TREE_TYPE (d)));
3799 java_add_local_var (cpool_ptr);
3800 TREE_INVARIANT (cpool_ptr) = 1;
3801 TREE_CONSTANT (cpool_ptr) = 1;
3803 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (cpool_ptr),
3804 cpool_ptr, build_address_of (d)));
3805 cpool = build1 (INDIRECT_REF, TREE_TYPE (d), cpool_ptr);
3806 TREE_THIS_NOTRAP (cpool) = 1;
3807 TYPE_CPOOL_DATA_REF (output_class) = cpool;
3811 #include "gt-java-expr.h"