1 /* Process expressions for the GNU compiler for the Java(TM) language.
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>.
21 Java and all Java-based marks are trademarks or registered trademarks
22 of Sun Microsystems, Inc. in the United States and other countries.
23 The Free Software Foundation is independent of Sun Microsystems, Inc. */
25 /* Hacked by Per Bothner <bothner@cygnus.com> February 1996. */
29 #include "coretypes.h"
36 #include "java-tree.h"
38 #include "java-opcodes.h"
40 #include "java-except.h"
45 #include "tree-gimple.h"
48 static void flush_quick_stack (void);
49 static void push_value (tree);
50 static tree pop_value (tree);
51 static void java_stack_swap (void);
52 static void java_stack_dup (int, int);
53 static void build_java_athrow (tree);
54 static void build_java_jsr (int, int);
55 static void build_java_ret (tree);
56 static void expand_java_multianewarray (tree, int);
57 static void expand_java_arraystore (tree);
58 static void expand_java_arrayload (tree);
59 static void expand_java_array_length (void);
60 static tree build_java_monitor (tree, tree);
61 static void expand_java_pushc (int, tree);
62 static void expand_java_return (tree);
63 static void expand_load_internal (int, tree, int);
64 static void expand_java_NEW (tree);
65 static void expand_java_INSTANCEOF (tree);
66 static void expand_java_CHECKCAST (tree);
67 static void expand_iinc (unsigned int, int, int);
68 static void expand_java_binop (tree, enum tree_code);
69 static void note_label (int, int);
70 static void expand_compare (enum tree_code, tree, tree, int);
71 static void expand_test (enum tree_code, tree, int);
72 static void expand_cond (enum tree_code, tree, int);
73 static void expand_java_goto (int);
74 static tree expand_java_switch (tree, int);
75 static void expand_java_add_case (tree, int, int);
76 static tree pop_arguments (tree);
77 static void expand_invoke (int, int, int);
78 static void expand_java_field_op (int, int, int);
79 static void java_push_constant_from_pool (struct JCF *, int);
80 static void java_stack_pop (int);
81 static tree build_java_throw_out_of_bounds_exception (tree);
82 static tree build_java_check_indexed_type (tree, tree);
83 static unsigned char peek_opcode_at_pc (struct JCF *, int, int);
84 static void promote_arguments (void);
85 static void cache_cpool_data_ref (void);
87 static GTY(()) tree operand_type[59];
89 static GTY(()) tree methods_ident;
90 static GTY(()) tree ncode_ident;
91 tree dtable_ident = NULL_TREE;
93 /* Set to nonzero value in order to emit class initialization code
94 before static field references. */
95 int always_initialize_class_p = 0;
97 /* We store the stack state in two places:
98 Within a basic block, we use the quick_stack, which is a
99 pushdown list (TREE_LISTs) of expression nodes.
100 This is the top part of the stack; below that we use find_stack_slot.
101 At the end of a basic block, the quick_stack must be flushed
102 to the stack slot array (as handled by find_stack_slot).
103 Using quick_stack generates better code (especially when
104 compiled without optimization), because we do not have to
105 explicitly store and load trees to temporary variables.
107 If a variable is on the quick stack, it means the value of variable
108 when the quick stack was last flushed. Conceptually, flush_quick_stack
109 saves all the quick_stack elements in parallel. However, that is
110 complicated, so it actually saves them (i.e. copies each stack value
111 to is home virtual register) from low indexes. This allows a quick_stack
112 element at index i (counting from the bottom of stack the) to references
113 slot virtuals for register that are >= i, but not those that are deeper.
114 This convention makes most operations easier. For example iadd works
115 even when the stack contains (reg[0], reg[1]): It results in the
116 stack containing (reg[0]+reg[1]), which is OK. However, some stack
117 operations are more complicated. For example dup given a stack
118 containing (reg[0]) would yield (reg[0], reg[0]), which would violate
119 the convention, since stack value 1 would refer to a register with
120 lower index (reg[0]), which flush_quick_stack does not safely handle.
121 So dup cannot just add an extra element to the quick_stack, but iadd can.
124 static GTY(()) tree quick_stack;
126 /* A free-list of unused permanent TREE_LIST nodes. */
127 static GTY((deletable)) tree tree_list_free_list;
129 /* The physical memory page size used in this computer. See
130 build_field_ref(). */
131 static GTY(()) tree page_size;
133 /* The stack pointer of the Java virtual machine.
134 This does include the size of the quick_stack. */
138 const unsigned char *linenumber_table;
139 int linenumber_count;
141 /* Largest pc so far in this method that has been passed to lookup_label. */
142 int highest_label_pc_this_method = -1;
144 /* Base value for this method to add to pc to get generated label. */
145 int start_label_pc_this_method = 0;
148 init_expr_processing (void)
150 operand_type[21] = operand_type[54] = int_type_node;
151 operand_type[22] = operand_type[55] = long_type_node;
152 operand_type[23] = operand_type[56] = float_type_node;
153 operand_type[24] = operand_type[57] = double_type_node;
154 operand_type[25] = operand_type[58] = ptr_type_node;
158 java_truthvalue_conversion (tree expr)
160 /* It is simpler and generates better code to have only TRUTH_*_EXPR
161 or comparison expressions as truth values at this level.
163 This function should normally be identity for Java. */
165 switch (TREE_CODE (expr))
167 case EQ_EXPR: case NE_EXPR: case UNEQ_EXPR: case LTGT_EXPR:
168 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
169 case UNLE_EXPR: case UNGE_EXPR: case UNLT_EXPR: case UNGT_EXPR:
170 case ORDERED_EXPR: case UNORDERED_EXPR:
171 case TRUTH_ANDIF_EXPR:
172 case TRUTH_ORIF_EXPR:
181 return integer_zerop (expr) ? boolean_false_node : boolean_true_node;
184 return real_zerop (expr) ? boolean_false_node : boolean_true_node;
186 /* are these legal? XXX JH */
190 /* These don't change whether an object is nonzero or zero. */
191 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
194 /* Distribute the conversion into the arms of a COND_EXPR. */
195 return fold_build3 (COND_EXPR, boolean_type_node, TREE_OPERAND (expr, 0),
196 java_truthvalue_conversion (TREE_OPERAND (expr, 1)),
197 java_truthvalue_conversion (TREE_OPERAND (expr, 2)));
200 /* If this is widening the argument, we can ignore it. */
201 if (TYPE_PRECISION (TREE_TYPE (expr))
202 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
203 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
204 /* fall through to default */
207 return fold_build2 (NE_EXPR, boolean_type_node,
208 expr, boolean_false_node);
212 /* Save any stack slots that happen to be in the quick_stack into their
213 home virtual register slots.
215 The copy order is from low stack index to high, to support the invariant
216 that the expression for a slot may contain decls for stack slots with
217 higher (or the same) index, but not lower. */
220 flush_quick_stack (void)
222 int stack_index = stack_pointer;
223 tree prev, cur, next;
225 /* First reverse the quick_stack, and count the number of slots it has. */
226 for (cur = quick_stack, prev = NULL_TREE; cur != NULL_TREE; cur = next)
228 next = TREE_CHAIN (cur);
229 TREE_CHAIN (cur) = prev;
231 stack_index -= 1 + TYPE_IS_WIDE (TREE_TYPE (TREE_VALUE (cur)));
235 while (quick_stack != NULL_TREE)
238 tree node = quick_stack, type;
239 quick_stack = TREE_CHAIN (node);
240 TREE_CHAIN (node) = tree_list_free_list;
241 tree_list_free_list = node;
242 node = TREE_VALUE (node);
243 type = TREE_TYPE (node);
245 decl = find_stack_slot (stack_index, type);
247 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (node), decl, node));
248 stack_index += 1 + TYPE_IS_WIDE (type);
252 /* Push TYPE on the type stack.
253 Return true on success, 0 on overflow. */
256 push_type_0 (tree type)
259 type = promote_type (type);
260 n_words = 1 + TYPE_IS_WIDE (type);
261 if (stack_pointer + n_words > DECL_MAX_STACK (current_function_decl))
263 /* Allocate decl for this variable now, so we get a temporary that
264 survives the whole method. */
265 find_stack_slot (stack_pointer, type);
266 stack_type_map[stack_pointer++] = type;
268 while (--n_words >= 0)
269 stack_type_map[stack_pointer++] = TYPE_SECOND;
274 push_type (tree type)
276 int r = push_type_0 (type);
281 push_value (tree value)
283 tree type = TREE_TYPE (value);
284 if (TYPE_PRECISION (type) < 32 && INTEGRAL_TYPE_P (type))
286 type = promote_type (type);
287 value = convert (type, value);
290 if (tree_list_free_list == NULL_TREE)
291 quick_stack = tree_cons (NULL_TREE, value, quick_stack);
294 tree node = tree_list_free_list;
295 tree_list_free_list = TREE_CHAIN (tree_list_free_list);
296 TREE_VALUE (node) = value;
297 TREE_CHAIN (node) = quick_stack;
300 /* If the value has a side effect, then we need to evaluate it
301 whether or not the result is used. If the value ends up on the
302 quick stack and is then popped, this won't happen -- so we flush
303 the quick stack. It is safest to simply always flush, though,
304 since TREE_SIDE_EFFECTS doesn't capture COMPONENT_REF, and for
305 the latter we may need to strip conversions. */
306 flush_quick_stack ();
309 /* Pop a type from the type stack.
310 TYPE is the expected type. Return the actual type, which must be
312 On an error, *MESSAGEP is set to a freshly malloc'd error message. */
315 pop_type_0 (tree type, char **messagep)
320 if (TREE_CODE (type) == RECORD_TYPE)
321 type = promote_type (type);
322 n_words = 1 + TYPE_IS_WIDE (type);
323 if (stack_pointer < n_words)
325 *messagep = xstrdup ("stack underflow");
328 while (--n_words > 0)
330 if (stack_type_map[--stack_pointer] != void_type_node)
332 *messagep = xstrdup ("Invalid multi-word value on type stack");
336 t = stack_type_map[--stack_pointer];
337 if (type == NULL_TREE || t == type)
339 if (TREE_CODE (t) == TREE_LIST)
343 tree tt = TREE_PURPOSE (t);
344 if (! can_widen_reference_to (tt, type))
354 if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (t)
355 && TYPE_PRECISION (type) <= 32 && TYPE_PRECISION (t) <= 32)
357 if (TREE_CODE (type) == POINTER_TYPE && TREE_CODE (t) == POINTER_TYPE)
359 /* If the expected type we've been passed is object or ptr
360 (i.e. void*), the caller needs to know the real type. */
361 if (type == ptr_type_node || type == object_ptr_type_node)
364 /* Since the verifier has already run, we know that any
365 types we see will be compatible. In BC mode, this fact
366 may be checked at runtime, but if that is so then we can
367 assume its truth here as well. So, we always succeed
368 here, with the expected type. */
372 if (! flag_verify_invocations && flag_indirect_dispatch
373 && t == object_ptr_type_node)
375 if (type != ptr_type_node)
376 warning (0, "need to insert runtime check for %s",
377 xstrdup (lang_printable_name (type, 0)));
381 /* lang_printable_name uses a static buffer, so we must save the result
382 from calling it the first time. */
385 char *temp = xstrdup (lang_printable_name (type, 0));
386 /* If the stack contains a multi-word type, keep popping the stack until
387 the real type is found. */
388 while (t == void_type_node)
389 t = stack_type_map[--stack_pointer];
390 *messagep = concat ("expected type '", temp,
391 "' but stack contains '", lang_printable_name (t, 0),
398 /* Pop a type from the type stack.
399 TYPE is the expected type. Return the actual type, which must be
400 convertible to TYPE, otherwise call error. */
405 char *message = NULL;
406 type = pop_type_0 (type, &message);
409 error ("%s", message);
416 /* Return true if two type assertions are equal. */
419 type_assertion_eq (const void * k1_p, const void * k2_p)
421 const type_assertion k1 = *(const type_assertion *)k1_p;
422 const type_assertion k2 = *(const type_assertion *)k2_p;
423 return (k1.assertion_code == k2.assertion_code
425 && k1.op2 == k2.op2);
428 /* Hash a type assertion. */
431 type_assertion_hash (const void *p)
433 const type_assertion *k_p = p;
434 hashval_t hash = iterative_hash (&k_p->assertion_code, sizeof
435 k_p->assertion_code, 0);
437 switch (k_p->assertion_code)
439 case JV_ASSERT_TYPES_COMPATIBLE:
440 hash = iterative_hash (&TYPE_UID (k_p->op2), sizeof TYPE_UID (k_p->op2),
444 case JV_ASSERT_IS_INSTANTIABLE:
445 hash = iterative_hash (&TYPE_UID (k_p->op1), sizeof TYPE_UID (k_p->op1),
449 case JV_ASSERT_END_OF_TABLE:
459 /* Add an entry to the type assertion table for the given class.
460 CLASS is the class for which this assertion will be evaluated by the
461 runtime during loading/initialization.
462 ASSERTION_CODE is the 'opcode' or type of this assertion: see java-tree.h.
463 OP1 and OP2 are the operands. The tree type of these arguments may be
464 specific to each assertion_code. */
467 add_type_assertion (tree class, int assertion_code, tree op1, tree op2)
469 htab_t assertions_htab;
473 assertions_htab = TYPE_ASSERTIONS (class);
474 if (assertions_htab == NULL)
476 assertions_htab = htab_create_ggc (7, type_assertion_hash,
477 type_assertion_eq, NULL);
478 TYPE_ASSERTIONS (current_class) = assertions_htab;
481 as.assertion_code = assertion_code;
485 as_pp = htab_find_slot (assertions_htab, &as, INSERT);
487 /* Don't add the same assertion twice. */
491 *as_pp = ggc_alloc (sizeof (type_assertion));
492 **(type_assertion **)as_pp = as;
496 /* Return 1 if SOURCE_TYPE can be safely widened to TARGET_TYPE.
497 Handles array types and interfaces. */
500 can_widen_reference_to (tree source_type, tree target_type)
502 if (source_type == ptr_type_node || target_type == object_ptr_type_node)
505 /* Get rid of pointers */
506 if (TREE_CODE (source_type) == POINTER_TYPE)
507 source_type = TREE_TYPE (source_type);
508 if (TREE_CODE (target_type) == POINTER_TYPE)
509 target_type = TREE_TYPE (target_type);
511 if (source_type == target_type)
514 /* FIXME: This is very pessimistic, in that it checks everything,
515 even if we already know that the types are compatible. If we're
516 to support full Java class loader semantics, we need this.
517 However, we could do something more optimal. */
518 if (! flag_verify_invocations)
520 add_type_assertion (current_class, JV_ASSERT_TYPES_COMPATIBLE,
521 source_type, target_type);
524 warning (0, "assert: %s is assign compatible with %s",
525 xstrdup (lang_printable_name (target_type, 0)),
526 xstrdup (lang_printable_name (source_type, 0)));
527 /* Punt everything to runtime. */
531 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
537 if (TYPE_ARRAY_P (source_type) || TYPE_ARRAY_P (target_type))
539 HOST_WIDE_INT source_length, target_length;
540 if (TYPE_ARRAY_P (source_type) != TYPE_ARRAY_P (target_type))
542 /* An array implements Cloneable and Serializable. */
543 tree name = DECL_NAME (TYPE_NAME (target_type));
544 return (name == java_lang_cloneable_identifier_node
545 || name == java_io_serializable_identifier_node);
547 target_length = java_array_type_length (target_type);
548 if (target_length >= 0)
550 source_length = java_array_type_length (source_type);
551 if (source_length != target_length)
554 source_type = TYPE_ARRAY_ELEMENT (source_type);
555 target_type = TYPE_ARRAY_ELEMENT (target_type);
556 if (source_type == target_type)
558 if (TREE_CODE (source_type) != POINTER_TYPE
559 || TREE_CODE (target_type) != POINTER_TYPE)
561 return can_widen_reference_to (source_type, target_type);
565 int source_depth = class_depth (source_type);
566 int target_depth = class_depth (target_type);
568 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
571 warning (0, "assert: %s is assign compatible with %s",
572 xstrdup (lang_printable_name (target_type, 0)),
573 xstrdup (lang_printable_name (source_type, 0)));
577 /* class_depth can return a negative depth if an error occurred */
578 if (source_depth < 0 || target_depth < 0)
581 if (CLASS_INTERFACE (TYPE_NAME (target_type)))
583 /* target_type is OK if source_type or source_type ancestors
584 implement target_type. We handle multiple sub-interfaces */
585 tree binfo, base_binfo;
588 for (binfo = TYPE_BINFO (source_type), i = 0;
589 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
590 if (can_widen_reference_to
591 (BINFO_TYPE (base_binfo), target_type))
598 for ( ; source_depth > target_depth; source_depth--)
601 = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (source_type), 0));
603 return source_type == target_type;
609 pop_value (tree type)
611 type = pop_type (type);
614 tree node = quick_stack;
615 quick_stack = TREE_CHAIN (quick_stack);
616 TREE_CHAIN (node) = tree_list_free_list;
617 tree_list_free_list = node;
618 node = TREE_VALUE (node);
622 return find_stack_slot (stack_pointer, promote_type (type));
626 /* Pop and discard the top COUNT stack slots. */
629 java_stack_pop (int count)
635 gcc_assert (stack_pointer != 0);
637 type = stack_type_map[stack_pointer - 1];
638 if (type == TYPE_SECOND)
641 gcc_assert (stack_pointer != 1 && count > 0);
643 type = stack_type_map[stack_pointer - 2];
645 val = pop_value (type);
650 /* Implement the 'swap' operator (to swap two top stack slots). */
653 java_stack_swap (void)
659 if (stack_pointer < 2
660 || (type1 = stack_type_map[stack_pointer - 1]) == TYPE_SECOND
661 || (type2 = stack_type_map[stack_pointer - 2]) == TYPE_SECOND
662 || TYPE_IS_WIDE (type1) || TYPE_IS_WIDE (type2))
663 /* Bad stack swap. */
665 /* Bad stack swap. */
667 flush_quick_stack ();
668 decl1 = find_stack_slot (stack_pointer - 1, type1);
669 decl2 = find_stack_slot (stack_pointer - 2, type2);
670 temp = build_decl (VAR_DECL, NULL_TREE, type1);
671 java_add_local_var (temp);
672 java_add_stmt (build2 (MODIFY_EXPR, type1, temp, decl1));
673 java_add_stmt (build2 (MODIFY_EXPR, type2,
674 find_stack_slot (stack_pointer - 1, type2),
676 java_add_stmt (build2 (MODIFY_EXPR, type1,
677 find_stack_slot (stack_pointer - 2, type1),
679 stack_type_map[stack_pointer - 1] = type2;
680 stack_type_map[stack_pointer - 2] = type1;
684 java_stack_dup (int size, int offset)
686 int low_index = stack_pointer - size - offset;
689 error ("stack underflow - dup* operation");
691 flush_quick_stack ();
693 stack_pointer += size;
694 dst_index = stack_pointer;
696 for (dst_index = stack_pointer; --dst_index >= low_index; )
699 int src_index = dst_index - size;
700 if (src_index < low_index)
701 src_index = dst_index + size + offset;
702 type = stack_type_map [src_index];
703 if (type == TYPE_SECOND)
705 /* Dup operation splits 64-bit number. */
706 gcc_assert (src_index > low_index);
708 stack_type_map[dst_index] = type;
709 src_index--; dst_index--;
710 type = stack_type_map[src_index];
711 gcc_assert (TYPE_IS_WIDE (type));
714 gcc_assert (! TYPE_IS_WIDE (type));
716 if (src_index != dst_index)
718 tree src_decl = find_stack_slot (src_index, type);
719 tree dst_decl = find_stack_slot (dst_index, type);
722 (build2 (MODIFY_EXPR, TREE_TYPE (dst_decl), dst_decl, src_decl));
723 stack_type_map[dst_index] = type;
728 /* Calls _Jv_Throw or _Jv_Sjlj_Throw. Discard the contents of the
732 build_java_athrow (tree node)
736 call = build_call_nary (void_type_node,
737 build_address_of (throw_node),
739 TREE_SIDE_EFFECTS (call) = 1;
740 java_add_stmt (call);
741 java_stack_pop (stack_pointer);
744 /* Implementation for jsr/ret */
747 build_java_jsr (int target_pc, int return_pc)
749 tree where = lookup_label (target_pc);
750 tree ret = lookup_label (return_pc);
751 tree ret_label = fold_build1 (ADDR_EXPR, return_address_type_node, ret);
752 push_value (ret_label);
753 flush_quick_stack ();
754 java_add_stmt (build1 (GOTO_EXPR, void_type_node, where));
756 /* Do not need to emit the label here. We noted the existence of the
757 label as a jump target in note_instructions; we'll emit the label
758 for real at the beginning of the expand_byte_code loop. */
762 build_java_ret (tree location)
764 java_add_stmt (build1 (GOTO_EXPR, void_type_node, location));
767 /* Implementation of operations on array: new, load, store, length */
770 decode_newarray_type (int atype)
774 case 4: return boolean_type_node;
775 case 5: return char_type_node;
776 case 6: return float_type_node;
777 case 7: return double_type_node;
778 case 8: return byte_type_node;
779 case 9: return short_type_node;
780 case 10: return int_type_node;
781 case 11: return long_type_node;
782 default: return NULL_TREE;
786 /* Map primitive type to the code used by OPCODE_newarray. */
789 encode_newarray_type (tree type)
791 if (type == boolean_type_node)
793 else if (type == char_type_node)
795 else if (type == float_type_node)
797 else if (type == double_type_node)
799 else if (type == byte_type_node)
801 else if (type == short_type_node)
803 else if (type == int_type_node)
805 else if (type == long_type_node)
811 /* Build a call to _Jv_ThrowBadArrayIndex(), the
812 ArrayIndexOfBoundsException exception handler. */
815 build_java_throw_out_of_bounds_exception (tree index)
817 tree node = build_call_nary (int_type_node,
818 build_address_of (soft_badarrayindex_node),
820 TREE_SIDE_EFFECTS (node) = 1; /* Allows expansion within ANDIF */
824 /* Return the length of an array. Doesn't perform any checking on the nature
825 or value of the array NODE. May be used to implement some bytecodes. */
828 build_java_array_length_access (tree node)
830 tree type = TREE_TYPE (node);
831 tree array_type = TREE_TYPE (type);
832 HOST_WIDE_INT length;
834 if (!is_array_type_p (type))
836 /* With the new verifier, we will see an ordinary pointer type
837 here. In this case, we just use an arbitrary array type. */
838 array_type = build_java_array_type (object_ptr_type_node, -1);
839 type = promote_type (array_type);
842 length = java_array_type_length (type);
844 return build_int_cst (NULL_TREE, length);
846 node = build3 (COMPONENT_REF, int_type_node,
847 build_java_indirect_ref (array_type, node,
848 flag_check_references),
849 lookup_field (&array_type, get_identifier ("length")),
851 IS_ARRAY_LENGTH_ACCESS (node) = 1;
855 /* Optionally checks a reference against the NULL pointer. ARG1: the
856 expr, ARG2: we should check the reference. Don't generate extra
857 checks if we're not generating code. */
860 java_check_reference (tree expr, int check)
862 if (!flag_syntax_only && check)
864 expr = save_expr (expr);
865 expr = build3 (COND_EXPR, TREE_TYPE (expr),
866 build2 (EQ_EXPR, boolean_type_node,
867 expr, null_pointer_node),
868 build_call_nary (void_type_node,
869 build_address_of (soft_nullpointer_node),
877 /* Reference an object: just like an INDIRECT_REF, but with checking. */
880 build_java_indirect_ref (tree type, tree expr, int check)
883 t = java_check_reference (expr, check);
884 t = convert (build_pointer_type (type), t);
885 return build1 (INDIRECT_REF, type, t);
888 /* Implement array indexing (either as l-value or r-value).
889 Returns a tree for ARRAY[INDEX], assume TYPE is the element type.
890 Optionally performs bounds checking and/or test to NULL.
891 At this point, ARRAY should have been verified as an array. */
894 build_java_arrayaccess (tree array, tree type, tree index)
896 tree node, throw = NULL_TREE;
899 tree array_type = TREE_TYPE (TREE_TYPE (array));
900 tree size_exp = fold_convert (sizetype, size_in_bytes (type));
902 if (!is_array_type_p (TREE_TYPE (array)))
904 /* With the new verifier, we will see an ordinary pointer type
905 here. In this case, we just use the correct array type. */
906 array_type = build_java_array_type (type, -1);
909 if (flag_bounds_check)
912 * (unsigned jint) INDEX >= (unsigned jint) LEN
913 * && throw ArrayIndexOutOfBoundsException.
914 * Note this is equivalent to and more efficient than:
915 * INDEX < 0 || INDEX >= LEN && throw ... */
917 tree len = convert (unsigned_int_type_node,
918 build_java_array_length_access (array));
919 test = fold_build2 (GE_EXPR, boolean_type_node,
920 convert (unsigned_int_type_node, index),
922 if (! integer_zerop (test))
924 throw = build2 (TRUTH_ANDIF_EXPR, int_type_node, test,
925 build_java_throw_out_of_bounds_exception (index));
926 /* allows expansion within COMPOUND */
927 TREE_SIDE_EFFECTS( throw ) = 1;
931 /* If checking bounds, wrap the index expr with a COMPOUND_EXPR in order
932 to have the bounds check evaluated first. */
933 if (throw != NULL_TREE)
934 index = build2 (COMPOUND_EXPR, int_type_node, throw, index);
936 data_field = lookup_field (&array_type, get_identifier ("data"));
938 ref = build3 (COMPONENT_REF, TREE_TYPE (data_field),
939 build_java_indirect_ref (array_type, array,
940 flag_check_references),
941 data_field, NULL_TREE);
943 /* Take the address of the data field and convert it to a pointer to
945 node = build1 (NOP_EXPR, build_pointer_type (type), build_address_of (ref));
947 /* Multiply the index by the size of an element to obtain a byte
948 offset. Convert the result to a pointer to the element type. */
949 index = build2 (MULT_EXPR, sizetype,
950 fold_convert (sizetype, index),
953 /* Sum the byte offset and the address of the data field. */
954 node = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (node), node, index);
958 *((&array->data) + index*size_exp)
961 return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (node)), node);
964 /* Generate code to throw an ArrayStoreException if OBJECT is not assignable
965 (at runtime) to an element of ARRAY. A NOP_EXPR is returned if it can
966 determine that no check is required. */
969 build_java_arraystore_check (tree array, tree object)
971 tree check, element_type, source;
972 tree array_type_p = TREE_TYPE (array);
973 tree object_type = TYPE_NAME (TREE_TYPE (TREE_TYPE (object)));
975 if (! flag_verify_invocations)
977 /* With the new verifier, we don't track precise types. FIXME:
978 performance regression here. */
979 element_type = TYPE_NAME (object_type_node);
983 gcc_assert (is_array_type_p (array_type_p));
985 /* Get the TYPE_DECL for ARRAY's element type. */
987 = TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p))));
990 gcc_assert (TREE_CODE (element_type) == TYPE_DECL
991 && TREE_CODE (object_type) == TYPE_DECL);
993 if (!flag_store_check)
994 return build1 (NOP_EXPR, array_type_p, array);
996 /* No check is needed if the element type is final. Also check that
997 element_type matches object_type, since in the bytecode
998 compilation case element_type may be the actual element type of
999 the array rather than its declared type. However, if we're doing
1000 indirect dispatch, we can't do the `final' optimization. */
1001 if (element_type == object_type
1002 && ! flag_indirect_dispatch
1003 && CLASS_FINAL (element_type))
1004 return build1 (NOP_EXPR, array_type_p, array);
1006 /* OBJECT might be wrapped by a SAVE_EXPR. */
1007 if (TREE_CODE (object) == SAVE_EXPR)
1008 source = TREE_OPERAND (object, 0);
1012 /* Avoid the check if OBJECT was just loaded from the same array. */
1013 if (TREE_CODE (source) == ARRAY_REF)
1016 source = TREE_OPERAND (source, 0); /* COMPONENT_REF. */
1017 source = TREE_OPERAND (source, 0); /* INDIRECT_REF. */
1018 source = TREE_OPERAND (source, 0); /* Source array's DECL or SAVE_EXPR. */
1019 if (TREE_CODE (source) == SAVE_EXPR)
1020 source = TREE_OPERAND (source, 0);
1023 if (TREE_CODE (target) == SAVE_EXPR)
1024 target = TREE_OPERAND (target, 0);
1026 if (source == target)
1027 return build1 (NOP_EXPR, array_type_p, array);
1030 /* Build an invocation of _Jv_CheckArrayStore */
1031 check = build_call_nary (void_type_node,
1032 build_address_of (soft_checkarraystore_node),
1034 TREE_SIDE_EFFECTS (check) = 1;
1039 /* Makes sure that INDEXED_TYPE is appropriate. If not, make it from
1040 ARRAY_NODE. This function is used to retrieve something less vague than
1041 a pointer type when indexing the first dimension of something like [[<t>.
1042 May return a corrected type, if necessary, otherwise INDEXED_TYPE is
1043 return unchanged. */
1046 build_java_check_indexed_type (tree array_node ATTRIBUTE_UNUSED,
1049 /* We used to check to see if ARRAY_NODE really had array type.
1050 However, with the new verifier, this is not necessary, as we know
1051 that the object will be an array of the appropriate type. */
1053 return indexed_type;
1056 /* newarray triggers a call to _Jv_NewPrimArray. This function should be
1057 called with an integer code (the type of array to create), and the length
1058 of the array to create. */
1061 build_newarray (int atype_value, tree length)
1065 tree prim_type = decode_newarray_type (atype_value);
1067 = build_java_array_type (prim_type,
1068 host_integerp (length, 0) == INTEGER_CST
1069 ? tree_low_cst (length, 0) : -1);
1071 /* Pass a reference to the primitive type class and save the runtime
1073 type_arg = build_class_ref (prim_type);
1075 return build_call_nary (promote_type (type),
1076 build_address_of (soft_newarray_node),
1077 2, type_arg, length);
1080 /* Generates anewarray from a given CLASS_TYPE. Gets from the stack the size
1081 of the dimension. */
1084 build_anewarray (tree class_type, tree length)
1087 = build_java_array_type (class_type,
1088 host_integerp (length, 0)
1089 ? tree_low_cst (length, 0) : -1);
1091 return build_call_nary (promote_type (type),
1092 build_address_of (soft_anewarray_node),
1095 build_class_ref (class_type),
1099 /* Return a node the evaluates 'new TYPE[LENGTH]'. */
1102 build_new_array (tree type, tree length)
1104 if (JPRIMITIVE_TYPE_P (type))
1105 return build_newarray (encode_newarray_type (type), length);
1107 return build_anewarray (TREE_TYPE (type), length);
1110 /* Generates a call to _Jv_NewMultiArray. multianewarray expects a
1111 class pointer, a number of dimensions and the matching number of
1112 dimensions. The argument list is NULL terminated. */
1115 expand_java_multianewarray (tree class_type, int ndim)
1118 tree args = build_tree_list( NULL_TREE, null_pointer_node );
1120 for( i = 0; i < ndim; i++ )
1121 args = tree_cons (NULL_TREE, pop_value (int_type_node), args);
1123 args = tree_cons (NULL_TREE,
1124 build_class_ref (class_type),
1125 tree_cons (NULL_TREE,
1126 build_int_cst (NULL_TREE, ndim),
1129 push_value (build_call_list (promote_type (class_type),
1130 build_address_of (soft_multianewarray_node),
1134 /* ARRAY[INDEX] <- RHS. build_java_check_indexed_type makes sure that
1135 ARRAY is an array type. May expand some bound checking and NULL
1136 pointer checking. RHS_TYPE_NODE we are going to store. In the case
1137 of the CHAR/BYTE/BOOLEAN SHORT, the type popped of the stack is an
1138 INT. In those cases, we make the conversion.
1140 if ARRAy is a reference type, the assignment is checked at run-time
1141 to make sure that the RHS can be assigned to the array element
1142 type. It is not necessary to generate this code if ARRAY is final. */
1145 expand_java_arraystore (tree rhs_type_node)
1147 tree rhs_node = pop_value ((INTEGRAL_TYPE_P (rhs_type_node)
1148 && TYPE_PRECISION (rhs_type_node) <= 32) ?
1149 int_type_node : rhs_type_node);
1150 tree index = pop_value (int_type_node);
1151 tree array_type, array, temp, access;
1153 /* If we're processing an `aaload' we might as well just pick
1155 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1157 array_type = build_java_array_type (object_ptr_type_node, -1);
1158 rhs_type_node = object_ptr_type_node;
1161 array_type = build_java_array_type (rhs_type_node, -1);
1163 array = pop_value (array_type);
1164 array = build1 (NOP_EXPR, promote_type (array_type), array);
1166 rhs_type_node = build_java_check_indexed_type (array, rhs_type_node);
1168 flush_quick_stack ();
1170 index = save_expr (index);
1171 array = save_expr (array);
1173 /* We want to perform the bounds check (done by
1174 build_java_arrayaccess) before the type check (done by
1175 build_java_arraystore_check). So, we call build_java_arrayaccess
1176 -- which returns an ARRAY_REF lvalue -- and we then generate code
1177 to stash the address of that lvalue in a temp. Then we call
1178 build_java_arraystore_check, and finally we generate a
1179 MODIFY_EXPR to set the array element. */
1181 access = build_java_arrayaccess (array, rhs_type_node, index);
1182 temp = build_decl (VAR_DECL, NULL_TREE,
1183 build_pointer_type (TREE_TYPE (access)));
1184 java_add_local_var (temp);
1185 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (temp),
1187 build_fold_addr_expr (access)));
1189 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1191 tree check = build_java_arraystore_check (array, rhs_node);
1192 java_add_stmt (check);
1195 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (access),
1196 build1 (INDIRECT_REF, TREE_TYPE (access), temp),
1200 /* Expand the evaluation of ARRAY[INDEX]. build_java_check_indexed_type makes
1201 sure that LHS is an array type. May expand some bound checking and NULL
1203 LHS_TYPE_NODE is the type of ARRAY[INDEX]. But in the case of CHAR/BYTE/
1204 BOOLEAN/SHORT, we push a promoted type back to the stack.
1208 expand_java_arrayload (tree lhs_type_node)
1211 tree index_node = pop_value (int_type_node);
1215 /* If we're processing an `aaload' we might as well just pick
1217 if (TREE_CODE (lhs_type_node) == POINTER_TYPE)
1219 array_type = build_java_array_type (object_ptr_type_node, -1);
1220 lhs_type_node = object_ptr_type_node;
1223 array_type = build_java_array_type (lhs_type_node, -1);
1224 array_node = pop_value (array_type);
1225 array_node = build1 (NOP_EXPR, promote_type (array_type), array_node);
1227 index_node = save_expr (index_node);
1228 array_node = save_expr (array_node);
1230 lhs_type_node = build_java_check_indexed_type (array_node,
1232 load_node = build_java_arrayaccess (array_node,
1235 if (INTEGRAL_TYPE_P (lhs_type_node) && TYPE_PRECISION (lhs_type_node) <= 32)
1236 load_node = fold_build1 (NOP_EXPR, int_type_node, load_node);
1237 push_value (load_node);
1240 /* Expands .length. Makes sure that we deal with and array and may expand
1241 a NULL check on the array object. */
1244 expand_java_array_length (void)
1246 tree array = pop_value (ptr_type_node);
1247 tree length = build_java_array_length_access (array);
1249 push_value (length);
1252 /* Emit code for the call to _Jv_Monitor{Enter,Exit}. CALL can be
1253 either soft_monitorenter_node or soft_monitorexit_node. */
1256 build_java_monitor (tree call, tree object)
1258 return build_call_nary (void_type_node,
1259 build_address_of (call),
1263 /* Emit code for one of the PUSHC instructions. */
1266 expand_java_pushc (int ival, tree type)
1269 if (type == ptr_type_node && ival == 0)
1270 value = null_pointer_node;
1271 else if (type == int_type_node || type == long_type_node)
1272 value = build_int_cst (type, ival);
1273 else if (type == float_type_node || type == double_type_node)
1276 REAL_VALUE_FROM_INT (x, ival, 0, TYPE_MODE (type));
1277 value = build_real (type, x);
1286 expand_java_return (tree type)
1288 if (type == void_type_node)
1289 java_add_stmt (build1 (RETURN_EXPR, void_type_node, NULL));
1292 tree retval = pop_value (type);
1293 tree res = DECL_RESULT (current_function_decl);
1294 retval = build2 (MODIFY_EXPR, TREE_TYPE (res), res, retval);
1296 /* Handle the situation where the native integer type is smaller
1297 than the JVM integer. It can happen for many cross compilers.
1298 The whole if expression just goes away if INT_TYPE_SIZE < 32
1300 if (INT_TYPE_SIZE < 32
1301 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (res)))
1302 < GET_MODE_SIZE (TYPE_MODE (type))))
1303 retval = build1(NOP_EXPR, TREE_TYPE(res), retval);
1305 TREE_SIDE_EFFECTS (retval) = 1;
1306 java_add_stmt (build1 (RETURN_EXPR, void_type_node, retval));
1311 expand_load_internal (int index, tree type, int pc)
1314 tree var = find_local_variable (index, type, pc);
1316 /* Now VAR is the VAR_DECL (or PARM_DECL) that we are going to push
1317 on the stack. If there is an assignment to this VAR_DECL between
1318 the stack push and the use, then the wrong code could be
1319 generated. To avoid this we create a new local and copy our
1320 value into it. Then we push this new local on the stack.
1321 Hopefully this all gets optimized out. */
1322 copy = build_decl (VAR_DECL, NULL_TREE, type);
1323 if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1324 && TREE_TYPE (copy) != TREE_TYPE (var))
1325 var = convert (type, var);
1326 java_add_local_var (copy);
1327 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (var), copy, var));
1333 build_address_of (tree value)
1335 return build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (value)), value);
1339 class_has_finalize_method (tree type)
1341 tree super = CLASSTYPE_SUPER (type);
1343 if (super == NULL_TREE)
1344 return false; /* Every class with a real finalizer inherits */
1345 /* from java.lang.Object. */
1347 return HAS_FINALIZER_P (type) || class_has_finalize_method (super);
1351 java_create_object (tree type)
1353 tree alloc_node = (class_has_finalize_method (type)
1355 : alloc_no_finalizer_node);
1357 return build_call_nary (promote_type (type),
1358 build_address_of (alloc_node),
1359 1, build_class_ref (type));
1363 expand_java_NEW (tree type)
1367 alloc_node = (class_has_finalize_method (type) ? alloc_object_node
1368 : alloc_no_finalizer_node);
1369 if (! CLASS_LOADED_P (type))
1370 load_class (type, 1);
1371 safe_layout_class (type);
1372 push_value (build_call_nary (promote_type (type),
1373 build_address_of (alloc_node),
1374 1, build_class_ref (type)));
1377 /* This returns an expression which will extract the class of an
1381 build_get_class (tree value)
1383 tree class_field = lookup_field (&dtable_type, get_identifier ("class"));
1384 tree vtable_field = lookup_field (&object_type_node,
1385 get_identifier ("vtable"));
1386 tree tmp = build3 (COMPONENT_REF, dtable_ptr_type,
1387 build_java_indirect_ref (object_type_node, value,
1388 flag_check_references),
1389 vtable_field, NULL_TREE);
1390 return build3 (COMPONENT_REF, class_ptr_type,
1391 build1 (INDIRECT_REF, dtable_type, tmp),
1392 class_field, NULL_TREE);
1395 /* This builds the tree representation of the `instanceof' operator.
1396 It tries various tricks to optimize this in cases where types are
1400 build_instanceof (tree value, tree type)
1403 tree itype = TREE_TYPE (TREE_TYPE (soft_instanceof_node));
1404 tree valtype = TREE_TYPE (TREE_TYPE (value));
1405 tree valclass = TYPE_NAME (valtype);
1408 /* When compiling from bytecode, we need to ensure that TYPE has
1410 if (CLASS_P (type) && ! CLASS_LOADED_P (type))
1412 load_class (type, 1);
1413 safe_layout_class (type);
1414 if (! TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) == ERROR_MARK)
1415 return error_mark_node;
1417 klass = TYPE_NAME (type);
1419 if (type == object_type_node || inherits_from_p (valtype, type))
1421 /* Anything except `null' is an instance of Object. Likewise,
1422 if the object is known to be an instance of the class, then
1423 we only need to check for `null'. */
1424 expr = build2 (NE_EXPR, itype, value, null_pointer_node);
1426 else if (flag_verify_invocations
1427 && ! TYPE_ARRAY_P (type)
1428 && ! TYPE_ARRAY_P (valtype)
1429 && DECL_P (klass) && DECL_P (valclass)
1430 && ! CLASS_INTERFACE (valclass)
1431 && ! CLASS_INTERFACE (klass)
1432 && ! inherits_from_p (type, valtype)
1433 && (CLASS_FINAL (klass)
1434 || ! inherits_from_p (valtype, type)))
1436 /* The classes are from different branches of the derivation
1437 tree, so we immediately know the answer. */
1438 expr = boolean_false_node;
1440 else if (DECL_P (klass) && CLASS_FINAL (klass))
1442 tree save = save_expr (value);
1443 expr = build3 (COND_EXPR, itype,
1444 build2 (NE_EXPR, boolean_type_node,
1445 save, null_pointer_node),
1446 build2 (EQ_EXPR, itype,
1447 build_get_class (save),
1448 build_class_ref (type)),
1449 boolean_false_node);
1453 expr = build_call_nary (itype,
1454 build_address_of (soft_instanceof_node),
1455 2, value, build_class_ref (type));
1457 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (value);
1462 expand_java_INSTANCEOF (tree type)
1464 tree value = pop_value (object_ptr_type_node);
1465 value = build_instanceof (value, type);
1470 expand_java_CHECKCAST (tree type)
1472 tree value = pop_value (ptr_type_node);
1473 value = build_call_nary (promote_type (type),
1474 build_address_of (soft_checkcast_node),
1475 2, build_class_ref (type), value);
1480 expand_iinc (unsigned int local_var_index, int ival, int pc)
1482 tree local_var, res;
1483 tree constant_value;
1485 flush_quick_stack ();
1486 local_var = find_local_variable (local_var_index, int_type_node, pc);
1487 constant_value = build_int_cst (NULL_TREE, ival);
1488 res = fold_build2 (PLUS_EXPR, int_type_node, local_var, constant_value);
1489 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (local_var), local_var, res));
1494 build_java_soft_divmod (enum tree_code op, tree type, tree op1, tree op2)
1497 tree arg1 = convert (type, op1);
1498 tree arg2 = convert (type, op2);
1500 if (type == int_type_node)
1504 case TRUNC_DIV_EXPR:
1505 call = soft_idiv_node;
1507 case TRUNC_MOD_EXPR:
1508 call = soft_irem_node;
1514 else if (type == long_type_node)
1518 case TRUNC_DIV_EXPR:
1519 call = soft_ldiv_node;
1521 case TRUNC_MOD_EXPR:
1522 call = soft_lrem_node;
1530 call = build_call_nary (type, build_address_of (call), 2, arg1, arg2);
1535 build_java_binop (enum tree_code op, tree type, tree arg1, tree arg2)
1542 tree u_type = unsigned_type_for (type);
1543 arg1 = convert (u_type, arg1);
1544 arg1 = build_java_binop (RSHIFT_EXPR, u_type, arg1, arg2);
1545 return convert (type, arg1);
1549 mask = build_int_cst (NULL_TREE,
1550 TYPE_PRECISION (TREE_TYPE (arg1)) - 1);
1551 arg2 = fold_build2 (BIT_AND_EXPR, int_type_node, arg2, mask);
1554 case COMPARE_L_EXPR: /* arg1 > arg2 ? 1 : arg1 == arg2 ? 0 : -1 */
1555 case COMPARE_G_EXPR: /* arg1 < arg2 ? -1 : arg1 == arg2 ? 0 : 1 */
1556 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1558 tree ifexp1 = fold_build2 (op == COMPARE_L_EXPR ? GT_EXPR : LT_EXPR,
1559 boolean_type_node, arg1, arg2);
1560 tree ifexp2 = fold_build2 (EQ_EXPR, boolean_type_node, arg1, arg2);
1561 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1562 ifexp2, integer_zero_node,
1563 op == COMPARE_L_EXPR
1564 ? integer_minus_one_node
1565 : integer_one_node);
1566 return fold_build3 (COND_EXPR, int_type_node, ifexp1,
1567 op == COMPARE_L_EXPR ? integer_one_node
1568 : integer_minus_one_node,
1572 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1574 tree ifexp1 = fold_build2 (LT_EXPR, boolean_type_node, arg1, arg2);
1575 tree ifexp2 = fold_build2 (GT_EXPR, boolean_type_node, arg1, arg2);
1576 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1577 ifexp2, integer_one_node,
1579 return fold_build3 (COND_EXPR, int_type_node,
1580 ifexp1, integer_minus_one_node, second_compare);
1582 case TRUNC_DIV_EXPR:
1583 case TRUNC_MOD_EXPR:
1584 if (TREE_CODE (type) == REAL_TYPE
1585 && op == TRUNC_MOD_EXPR)
1588 if (type != double_type_node)
1590 arg1 = convert (double_type_node, arg1);
1591 arg2 = convert (double_type_node, arg2);
1593 call = build_call_nary (double_type_node,
1594 build_address_of (soft_fmod_node),
1596 if (type != double_type_node)
1597 call = convert (type, call);
1601 if (TREE_CODE (type) == INTEGER_TYPE
1602 && flag_use_divide_subroutine
1603 && ! flag_syntax_only)
1604 return build_java_soft_divmod (op, type, arg1, arg2);
1609 return fold_build2 (op, type, arg1, arg2);
1613 expand_java_binop (tree type, enum tree_code op)
1623 rtype = int_type_node;
1624 rarg = pop_value (rtype);
1627 rarg = pop_value (rtype);
1629 larg = pop_value (ltype);
1630 push_value (build_java_binop (op, type, larg, rarg));
1633 /* Lookup the field named NAME in *TYPEP or its super classes.
1634 If not found, return NULL_TREE.
1635 (If the *TYPEP is not found, or if the field reference is
1636 ambiguous, return error_mark_node.)
1637 If found, return the FIELD_DECL, and set *TYPEP to the
1638 class containing the field. */
1641 lookup_field (tree *typep, tree name)
1643 if (CLASS_P (*typep) && !CLASS_LOADED_P (*typep))
1645 load_class (*typep, 1);
1646 safe_layout_class (*typep);
1647 if (!TYPE_SIZE (*typep) || TREE_CODE (TYPE_SIZE (*typep)) == ERROR_MARK)
1648 return error_mark_node;
1652 tree field, binfo, base_binfo;
1656 for (field = TYPE_FIELDS (*typep); field; field = TREE_CHAIN (field))
1657 if (DECL_NAME (field) == name)
1660 /* Process implemented interfaces. */
1661 save_field = NULL_TREE;
1662 for (binfo = TYPE_BINFO (*typep), i = 0;
1663 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1665 tree t = BINFO_TYPE (base_binfo);
1666 if ((field = lookup_field (&t, name)))
1668 if (save_field == field)
1670 if (save_field == NULL_TREE)
1674 tree i1 = DECL_CONTEXT (save_field);
1675 tree i2 = DECL_CONTEXT (field);
1676 error ("reference %qs is ambiguous: appears in interface %qs and interface %qs",
1677 IDENTIFIER_POINTER (name),
1678 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i1))),
1679 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i2))));
1680 return error_mark_node;
1685 if (save_field != NULL_TREE)
1688 *typep = CLASSTYPE_SUPER (*typep);
1693 /* Look up the field named NAME in object SELF_VALUE,
1694 which has class SELF_CLASS (a non-handle RECORD_TYPE).
1695 SELF_VALUE is NULL_TREE if looking for a static field. */
1698 build_field_ref (tree self_value, tree self_class, tree name)
1700 tree base_class = self_class;
1701 tree field_decl = lookup_field (&base_class, name);
1702 if (field_decl == NULL_TREE)
1704 error ("field %qs not found", IDENTIFIER_POINTER (name));
1705 return error_mark_node;
1707 if (self_value == NULL_TREE)
1709 return build_static_field_ref (field_decl);
1713 tree base_type = promote_type (base_class);
1715 /* CHECK is true if self_value is not the this pointer. */
1716 int check = (! (DECL_P (self_value)
1717 && DECL_NAME (self_value) == this_identifier_node));
1719 /* Determine whether a field offset from NULL will lie within
1720 Page 0: this is necessary on those GNU/Linux/BSD systems that
1721 trap SEGV to generate NullPointerExceptions.
1723 We assume that Page 0 will be mapped with NOPERM, and that
1724 memory may be allocated from any other page, so only field
1725 offsets < pagesize are guaranteed to trap. We also assume
1726 the smallest page size we'll encounter is 4k bytes. */
1727 if (! flag_syntax_only && check && ! flag_check_references
1728 && ! flag_indirect_dispatch)
1730 tree field_offset = byte_position (field_decl);
1732 page_size = size_int (4096);
1733 check = ! INT_CST_LT_UNSIGNED (field_offset, page_size);
1736 if (base_type != TREE_TYPE (self_value))
1737 self_value = fold_build1 (NOP_EXPR, base_type, self_value);
1738 if (! flag_syntax_only && flag_indirect_dispatch)
1741 = build_int_cst (NULL_TREE, get_symbol_table_index
1742 (field_decl, NULL_TREE,
1743 &TYPE_OTABLE_METHODS (output_class)));
1745 = build4 (ARRAY_REF, integer_type_node,
1746 TYPE_OTABLE_DECL (output_class), otable_index,
1747 NULL_TREE, NULL_TREE);
1750 if (DECL_CONTEXT (field_decl) != output_class)
1752 = build3 (COND_EXPR, TREE_TYPE (field_offset),
1753 build2 (EQ_EXPR, boolean_type_node,
1754 field_offset, integer_zero_node),
1755 build_call_nary (void_type_node,
1756 build_address_of (soft_nosuchfield_node),
1760 field_offset = fold (convert (sizetype, field_offset));
1761 self_value = java_check_reference (self_value, check);
1763 = fold_build2 (POINTER_PLUS_EXPR,
1764 TREE_TYPE (self_value),
1765 self_value, field_offset);
1766 address = fold_convert (build_pointer_type (TREE_TYPE (field_decl)),
1768 return fold_build1 (INDIRECT_REF, TREE_TYPE (field_decl), address);
1771 self_value = build_java_indirect_ref (TREE_TYPE (TREE_TYPE (self_value)),
1773 return fold_build3 (COMPONENT_REF, TREE_TYPE (field_decl),
1774 self_value, field_decl, NULL_TREE);
1779 lookup_label (int pc)
1783 if (pc > highest_label_pc_this_method)
1784 highest_label_pc_this_method = pc;
1785 ASM_GENERATE_INTERNAL_LABEL(buf, "LJpc=", start_label_pc_this_method + pc);
1786 name = get_identifier (buf);
1787 if (IDENTIFIER_LOCAL_VALUE (name))
1788 return IDENTIFIER_LOCAL_VALUE (name);
1791 /* The type of the address of a label is return_address_type_node. */
1792 tree decl = create_label_decl (name);
1793 return pushdecl (decl);
1797 /* Generate a unique name for the purpose of loops and switches
1798 labels, and try-catch-finally blocks label or temporary variables. */
1801 generate_name (void)
1803 static int l_number = 0;
1805 ASM_GENERATE_INTERNAL_LABEL(buff, "LJv", l_number);
1807 return get_identifier (buff);
1811 create_label_decl (tree name)
1814 decl = build_decl (LABEL_DECL, name,
1815 TREE_TYPE (return_address_type_node));
1816 DECL_CONTEXT (decl) = current_function_decl;
1817 DECL_IGNORED_P (decl) = 1;
1821 /* This maps a bytecode offset (PC) to various flags. */
1822 char *instruction_bits;
1824 /* This is a vector of type states for the current method. It is
1825 indexed by PC. Each element is a tree vector holding the type
1826 state at that PC. We only note type states at basic block
1828 VEC(tree, gc) *type_states;
1831 note_label (int current_pc ATTRIBUTE_UNUSED, int target_pc)
1833 lookup_label (target_pc);
1834 instruction_bits [target_pc] |= BCODE_JUMP_TARGET;
1837 /* Emit code to jump to TARGET_PC if VALUE1 CONDITION VALUE2,
1838 where CONDITION is one of one the compare operators. */
1841 expand_compare (enum tree_code condition, tree value1, tree value2,
1844 tree target = lookup_label (target_pc);
1845 tree cond = fold_build2 (condition, boolean_type_node, value1, value2);
1847 (build3 (COND_EXPR, void_type_node, java_truthvalue_conversion (cond),
1848 build1 (GOTO_EXPR, void_type_node, target),
1849 build_java_empty_stmt ()));
1852 /* Emit code for a TEST-type opcode. */
1855 expand_test (enum tree_code condition, tree type, int target_pc)
1857 tree value1, value2;
1858 flush_quick_stack ();
1859 value1 = pop_value (type);
1860 value2 = (type == ptr_type_node) ? null_pointer_node : integer_zero_node;
1861 expand_compare (condition, value1, value2, target_pc);
1864 /* Emit code for a COND-type opcode. */
1867 expand_cond (enum tree_code condition, tree type, int target_pc)
1869 tree value1, value2;
1870 flush_quick_stack ();
1871 /* note: pop values in opposite order */
1872 value2 = pop_value (type);
1873 value1 = pop_value (type);
1874 /* Maybe should check value1 and value2 for type compatibility ??? */
1875 expand_compare (condition, value1, value2, target_pc);
1879 expand_java_goto (int target_pc)
1881 tree target_label = lookup_label (target_pc);
1882 flush_quick_stack ();
1883 java_add_stmt (build1 (GOTO_EXPR, void_type_node, target_label));
1887 expand_java_switch (tree selector, int default_pc)
1889 tree switch_expr, x;
1891 flush_quick_stack ();
1892 switch_expr = build3 (SWITCH_EXPR, TREE_TYPE (selector), selector,
1893 NULL_TREE, NULL_TREE);
1894 java_add_stmt (switch_expr);
1896 x = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE, NULL_TREE,
1897 create_artificial_label ());
1898 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1900 x = build1 (GOTO_EXPR, void_type_node, lookup_label (default_pc));
1901 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1907 expand_java_add_case (tree switch_expr, int match, int target_pc)
1911 value = build_int_cst (TREE_TYPE (switch_expr), match);
1913 x = build3 (CASE_LABEL_EXPR, void_type_node, value, NULL_TREE,
1914 create_artificial_label ());
1915 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1917 x = build1 (GOTO_EXPR, void_type_node, lookup_label (target_pc));
1918 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1922 pop_arguments (tree arg_types)
1924 if (arg_types == end_params_node)
1926 if (TREE_CODE (arg_types) == TREE_LIST)
1928 tree tail = pop_arguments (TREE_CHAIN (arg_types));
1929 tree type = TREE_VALUE (arg_types);
1930 tree arg = pop_value (type);
1932 /* We simply cast each argument to its proper type. This is
1933 needed since we lose type information coming out of the
1934 verifier. We also have to do this when we pop an integer
1935 type that must be promoted for the function call. */
1936 if (TREE_CODE (type) == POINTER_TYPE)
1937 arg = build1 (NOP_EXPR, type, arg);
1938 else if (targetm.calls.promote_prototypes (type)
1939 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
1940 && INTEGRAL_TYPE_P (type))
1941 arg = convert (integer_type_node, arg);
1942 return tree_cons (NULL_TREE, arg, tail);
1947 /* Attach to PTR (a block) the declaration found in ENTRY. */
1950 attach_init_test_initialization_flags (void **entry, void *ptr)
1952 tree block = (tree)ptr;
1953 struct treetreehash_entry *ite = (struct treetreehash_entry *) *entry;
1955 if (block != error_mark_node)
1957 if (TREE_CODE (block) == BIND_EXPR)
1959 tree body = BIND_EXPR_BODY (block);
1960 TREE_CHAIN (ite->value) = BIND_EXPR_VARS (block);
1961 BIND_EXPR_VARS (block) = ite->value;
1962 body = build2 (COMPOUND_EXPR, void_type_node,
1963 build1 (DECL_EXPR, void_type_node, ite->value), body);
1964 BIND_EXPR_BODY (block) = body;
1968 tree body = BLOCK_SUBBLOCKS (block);
1969 TREE_CHAIN (ite->value) = BLOCK_EXPR_DECLS (block);
1970 BLOCK_EXPR_DECLS (block) = ite->value;
1971 body = build2 (COMPOUND_EXPR, void_type_node,
1972 build1 (DECL_EXPR, void_type_node, ite->value), body);
1973 BLOCK_SUBBLOCKS (block) = body;
1980 /* Build an expression to initialize the class CLAS.
1981 if EXPR is non-NULL, returns an expression to first call the initializer
1982 (if it is needed) and then calls EXPR. */
1985 build_class_init (tree clas, tree expr)
1989 /* An optimization: if CLAS is a superclass of the class we're
1990 compiling, we don't need to initialize it. However, if CLAS is
1991 an interface, it won't necessarily be initialized, even if we
1993 if ((! CLASS_INTERFACE (TYPE_NAME (clas))
1994 && inherits_from_p (current_class, clas))
1995 || current_class == clas)
1998 if (always_initialize_class_p)
2000 init = build_call_nary (void_type_node,
2001 build_address_of (soft_initclass_node),
2002 1, build_class_ref (clas));
2003 TREE_SIDE_EFFECTS (init) = 1;
2007 tree *init_test_decl;
2009 init_test_decl = java_treetreehash_new
2010 (DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl), clas);
2012 if (*init_test_decl == NULL)
2014 /* Build a declaration and mark it as a flag used to track
2015 static class initializations. */
2016 decl = build_decl (VAR_DECL, NULL_TREE,
2018 MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (decl);
2019 DECL_CONTEXT (decl) = current_function_decl;
2020 DECL_INITIAL (decl) = boolean_false_node;
2021 /* Don't emit any symbolic debugging info for this decl. */
2022 DECL_IGNORED_P (decl) = 1;
2023 *init_test_decl = decl;
2026 init = build_call_nary (void_type_node,
2027 build_address_of (soft_initclass_node),
2028 1, build_class_ref (clas));
2029 TREE_SIDE_EFFECTS (init) = 1;
2030 init = build3 (COND_EXPR, void_type_node,
2031 build2 (EQ_EXPR, boolean_type_node,
2032 *init_test_decl, boolean_false_node),
2033 init, integer_zero_node);
2034 TREE_SIDE_EFFECTS (init) = 1;
2035 init = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init,
2036 build2 (MODIFY_EXPR, boolean_type_node,
2037 *init_test_decl, boolean_true_node));
2038 TREE_SIDE_EFFECTS (init) = 1;
2041 if (expr != NULL_TREE)
2043 expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init, expr);
2044 TREE_SIDE_EFFECTS (expr) = 1;
2052 /* Rewrite expensive calls that require stack unwinding at runtime to
2053 cheaper alternatives. The logic here performs these
2056 java.lang.Class.forName("foo") -> java.lang.Class.forName("foo", class$)
2057 java.lang.Class.getClassLoader() -> java.lang.Class.getClassLoader(class$)
2063 const char *classname;
2065 const char *signature;
2066 const char *new_signature;
2068 tree (*rewrite_arglist) (tree arglist);
2071 /* Add __builtin_return_address(0) to the end of an arglist. */
2075 rewrite_arglist_getcaller (tree arglist)
2078 = build_call_expr (built_in_decls[BUILT_IN_RETURN_ADDRESS],
2079 1, integer_zero_node);
2081 DECL_INLINE (current_function_decl) = 0;
2083 return chainon (arglist,
2084 tree_cons (NULL_TREE, retaddr,
2088 /* Add this.class to the end of an arglist. */
2091 rewrite_arglist_getclass (tree arglist)
2093 return chainon (arglist,
2094 tree_cons (NULL_TREE, build_class_ref (output_class),
2098 static rewrite_rule rules[] =
2099 {{"java.lang.Class", "getClassLoader", "()Ljava/lang/ClassLoader;",
2100 "(Ljava/lang/Class;)Ljava/lang/ClassLoader;",
2101 ACC_FINAL|ACC_PRIVATE, rewrite_arglist_getclass},
2102 {"java.lang.Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;",
2103 "(Ljava/lang/String;Ljava/lang/Class;)Ljava/lang/Class;",
2104 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getclass},
2105 {"gnu.classpath.VMStackWalker", "getCallingClass", "()Ljava/lang/Class;",
2106 "(Lgnu/gcj/RawData;)Ljava/lang/Class;",
2107 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2108 {"gnu.classpath.VMStackWalker", "getCallingClassLoader",
2109 "()Ljava/lang/ClassLoader;",
2110 "(Lgnu/gcj/RawData;)Ljava/lang/ClassLoader;",
2111 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2113 {NULL, NULL, NULL, NULL, 0, NULL}};
2115 /* True if this method is special, i.e. it's a private method that
2116 should be exported from a DSO. */
2119 special_method_p (tree candidate_method)
2121 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (candidate_method)));
2122 tree method = DECL_NAME (candidate_method);
2125 for (p = rules; p->classname; p++)
2127 if (get_identifier (p->classname) == context
2128 && get_identifier (p->method) == method)
2134 /* Scan the rules list for replacements for *METHOD_P and replace the
2135 args accordingly. If the rewrite results in an access to a private
2136 method, update SPECIAL.*/
2139 maybe_rewrite_invocation (tree *method_p, tree *arg_list_p,
2140 tree *method_signature_p, tree *special)
2142 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (*method_p)));
2144 *special = NULL_TREE;
2146 for (p = rules; p->classname; p++)
2148 if (get_identifier (p->classname) == context)
2150 tree method = DECL_NAME (*method_p);
2151 if (get_identifier (p->method) == method
2152 && get_identifier (p->signature) == *method_signature_p)
2155 = lookup_java_method (DECL_CONTEXT (*method_p),
2157 get_identifier (p->new_signature));
2158 if (! maybe_method && ! flag_verify_invocations)
2161 = add_method (DECL_CONTEXT (*method_p), p->flags,
2162 method, get_identifier (p->new_signature));
2163 DECL_EXTERNAL (maybe_method) = 1;
2165 *method_p = maybe_method;
2166 gcc_assert (*method_p);
2167 *arg_list_p = p->rewrite_arglist (*arg_list_p);
2168 *method_signature_p = get_identifier (p->new_signature);
2169 *special = integer_one_node;
2180 build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED,
2181 tree self_type, tree method_signature ATTRIBUTE_UNUSED,
2182 tree arg_list ATTRIBUTE_UNUSED, tree special)
2185 if (is_compiled_class (self_type))
2187 /* With indirect dispatch we have to use indirect calls for all
2188 publicly visible methods or gcc will use PLT indirections
2189 to reach them. We also have to use indirect dispatch for all
2190 external methods. */
2191 if (! flag_indirect_dispatch
2192 || (! DECL_EXTERNAL (method) && ! TREE_PUBLIC (method)))
2194 func = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (method)),
2200 = build_int_cst (NULL_TREE,
2201 (get_symbol_table_index
2203 &TYPE_ATABLE_METHODS (output_class))));
2205 = build4 (ARRAY_REF,
2206 TREE_TYPE (TREE_TYPE (TYPE_ATABLE_DECL (output_class))),
2207 TYPE_ATABLE_DECL (output_class), table_index,
2208 NULL_TREE, NULL_TREE);
2210 func = convert (method_ptr_type_node, func);
2214 /* We don't know whether the method has been (statically) compiled.
2215 Compile this code to get a reference to the method's code:
2217 SELF_TYPE->methods[METHOD_INDEX].ncode
2221 int method_index = 0;
2224 /* The method might actually be declared in some superclass, so
2225 we have to use its class context, not the caller's notion of
2226 where the method is. */
2227 self_type = DECL_CONTEXT (method);
2228 ref = build_class_ref (self_type);
2229 ref = build1 (INDIRECT_REF, class_type_node, ref);
2230 if (ncode_ident == NULL_TREE)
2231 ncode_ident = get_identifier ("ncode");
2232 if (methods_ident == NULL_TREE)
2233 methods_ident = get_identifier ("methods");
2234 ref = build3 (COMPONENT_REF, method_ptr_type_node, ref,
2235 lookup_field (&class_type_node, methods_ident),
2237 for (meth = TYPE_METHODS (self_type);
2238 ; meth = TREE_CHAIN (meth))
2242 if (meth == NULL_TREE)
2243 fatal_error ("method '%s' not found in class",
2244 IDENTIFIER_POINTER (DECL_NAME (method)));
2247 method_index *= int_size_in_bytes (method_type_node);
2248 ref = fold_build2 (POINTER_PLUS_EXPR, method_ptr_type_node,
2249 ref, size_int (method_index));
2250 ref = build1 (INDIRECT_REF, method_type_node, ref);
2251 func = build3 (COMPONENT_REF, nativecode_ptr_type_node,
2252 ref, lookup_field (&method_type_node, ncode_ident),
2259 invoke_build_dtable (int is_invoke_interface, tree arg_list)
2261 tree dtable, objectref;
2263 TREE_VALUE (arg_list) = save_expr (TREE_VALUE (arg_list));
2265 /* If we're dealing with interfaces and if the objectref
2266 argument is an array then get the dispatch table of the class
2267 Object rather than the one from the objectref. */
2268 objectref = (is_invoke_interface
2269 && is_array_type_p (TREE_TYPE (TREE_VALUE (arg_list)))
2270 ? build_class_ref (object_type_node) : TREE_VALUE (arg_list));
2272 if (dtable_ident == NULL_TREE)
2273 dtable_ident = get_identifier ("vtable");
2274 dtable = build_java_indirect_ref (object_type_node, objectref,
2275 flag_check_references);
2276 dtable = build3 (COMPONENT_REF, dtable_ptr_type, dtable,
2277 lookup_field (&object_type_node, dtable_ident), NULL_TREE);
2282 /* Determine the index in SYMBOL_TABLE for a reference to the decl
2283 T. If this decl has not been seen before, it will be added to the
2284 [oa]table_methods. If it has, the existing table slot will be
2288 get_symbol_table_index (tree t, tree special, tree *symbol_table)
2293 if (*symbol_table == NULL_TREE)
2295 *symbol_table = build_tree_list (special, t);
2299 method_list = *symbol_table;
2303 tree value = TREE_VALUE (method_list);
2304 tree purpose = TREE_PURPOSE (method_list);
2305 if (value == t && purpose == special)
2308 if (TREE_CHAIN (method_list) == NULL_TREE)
2311 method_list = TREE_CHAIN (method_list);
2314 TREE_CHAIN (method_list) = build_tree_list (special, t);
2319 build_invokevirtual (tree dtable, tree method, tree special)
2322 tree nativecode_ptr_ptr_type_node
2323 = build_pointer_type (nativecode_ptr_type_node);
2327 if (flag_indirect_dispatch)
2329 gcc_assert (! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))));
2332 = build_int_cst (NULL_TREE, get_symbol_table_index
2334 &TYPE_OTABLE_METHODS (output_class)));
2335 method_index = build4 (ARRAY_REF, integer_type_node,
2336 TYPE_OTABLE_DECL (output_class),
2337 otable_index, NULL_TREE, NULL_TREE);
2341 /* We fetch the DECL_VINDEX field directly here, rather than
2342 using get_method_index(). DECL_VINDEX is the true offset
2343 from the vtable base to a method, regrdless of any extra
2344 words inserted at the start of the vtable. */
2345 method_index = DECL_VINDEX (method);
2346 method_index = size_binop (MULT_EXPR, method_index,
2347 TYPE_SIZE_UNIT (nativecode_ptr_ptr_type_node));
2348 if (TARGET_VTABLE_USES_DESCRIPTORS)
2349 method_index = size_binop (MULT_EXPR, method_index,
2350 size_int (TARGET_VTABLE_USES_DESCRIPTORS));
2353 func = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dtable), dtable,
2354 convert (sizetype, method_index));
2356 if (TARGET_VTABLE_USES_DESCRIPTORS)
2357 func = build1 (NOP_EXPR, nativecode_ptr_type_node, func);
2360 func = fold_convert (nativecode_ptr_ptr_type_node, func);
2361 func = build1 (INDIRECT_REF, nativecode_ptr_type_node, func);
2367 static GTY(()) tree class_ident;
2369 build_invokeinterface (tree dtable, tree method)
2374 /* We expand invokeinterface here. */
2376 if (class_ident == NULL_TREE)
2377 class_ident = get_identifier ("class");
2379 dtable = build_java_indirect_ref (dtable_type, dtable,
2380 flag_check_references);
2381 dtable = build3 (COMPONENT_REF, class_ptr_type, dtable,
2382 lookup_field (&dtable_type, class_ident), NULL_TREE);
2384 interface = DECL_CONTEXT (method);
2385 gcc_assert (CLASS_INTERFACE (TYPE_NAME (interface)));
2386 layout_class_methods (interface);
2388 if (flag_indirect_dispatch)
2391 = 2 * (get_symbol_table_index
2392 (method, NULL_TREE, &TYPE_ITABLE_METHODS (output_class)));
2394 = build4 (ARRAY_REF,
2395 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2396 TYPE_ITABLE_DECL (output_class),
2397 build_int_cst (NULL_TREE, itable_index-1),
2398 NULL_TREE, NULL_TREE);
2400 = build4 (ARRAY_REF,
2401 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2402 TYPE_ITABLE_DECL (output_class),
2403 build_int_cst (NULL_TREE, itable_index),
2404 NULL_TREE, NULL_TREE);
2405 interface = convert (class_ptr_type, interface);
2406 idx = convert (integer_type_node, idx);
2410 idx = build_int_cst (NULL_TREE,
2411 get_interface_method_index (method, interface));
2412 interface = build_class_ref (interface);
2415 return build_call_nary (ptr_type_node,
2416 build_address_of (soft_lookupinterfacemethod_node),
2417 3, dtable, interface, idx);
2420 /* Expand one of the invoke_* opcodes.
2421 OPCODE is the specific opcode.
2422 METHOD_REF_INDEX is an index into the constant pool.
2423 NARGS is the number of arguments, or -1 if not specified. */
2426 expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED)
2428 tree method_signature
2429 = COMPONENT_REF_SIGNATURE(¤t_jcf->cpool, method_ref_index);
2430 tree method_name = COMPONENT_REF_NAME (¤t_jcf->cpool,
2433 = get_class_constant (current_jcf,
2434 COMPONENT_REF_CLASS_INDEX(¤t_jcf->cpool,
2436 const char *const self_name
2437 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2438 tree call, func, method, arg_list, method_type;
2439 tree check = NULL_TREE;
2441 tree special = NULL_TREE;
2443 if (! CLASS_LOADED_P (self_type))
2445 load_class (self_type, 1);
2446 safe_layout_class (self_type);
2447 if (TREE_CODE (TYPE_SIZE (self_type)) == ERROR_MARK)
2448 fatal_error ("failed to find class '%s'", self_name);
2450 layout_class_methods (self_type);
2452 if (ID_INIT_P (method_name))
2453 method = lookup_java_constructor (self_type, method_signature);
2455 method = lookup_java_method (self_type, method_name, method_signature);
2457 /* We've found a method in a class other than the one in which it
2458 was wanted. This can happen if, for instance, we're trying to
2459 compile invokespecial super.equals().
2460 FIXME: This is a kludge. Rather than nullifying the result, we
2461 should change lookup_java_method() so that it doesn't search the
2462 superclass chain when we're BC-compiling. */
2463 if (! flag_verify_invocations
2465 && ! TYPE_ARRAY_P (self_type)
2466 && self_type != DECL_CONTEXT (method))
2469 /* We've found a method in an interface, but this isn't an interface
2471 if (opcode != OPCODE_invokeinterface
2473 && (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method)))))
2476 /* We've found a non-interface method but we are making an
2477 interface call. This can happen if the interface overrides a
2478 method in Object. */
2479 if (! flag_verify_invocations
2480 && opcode == OPCODE_invokeinterface
2482 && ! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))))
2485 if (method == NULL_TREE)
2487 if (flag_verify_invocations || ! flag_indirect_dispatch)
2489 error ("class '%s' has no method named '%s' matching signature '%s'",
2491 IDENTIFIER_POINTER (method_name),
2492 IDENTIFIER_POINTER (method_signature));
2496 int flags = ACC_PUBLIC;
2497 if (opcode == OPCODE_invokestatic)
2498 flags |= ACC_STATIC;
2499 if (opcode == OPCODE_invokeinterface)
2501 flags |= ACC_INTERFACE | ACC_ABSTRACT;
2502 CLASS_INTERFACE (TYPE_NAME (self_type)) = 1;
2504 method = add_method (self_type, flags, method_name,
2506 DECL_ARTIFICIAL (method) = 1;
2507 METHOD_DUMMY (method) = 1;
2508 layout_class_method (self_type, NULL,
2513 /* Invoke static can't invoke static/abstract method */
2514 if (method != NULL_TREE)
2516 if (opcode == OPCODE_invokestatic)
2518 if (!METHOD_STATIC (method))
2520 error ("invokestatic on non static method");
2523 else if (METHOD_ABSTRACT (method))
2525 error ("invokestatic on abstract method");
2531 if (METHOD_STATIC (method))
2533 error ("invoke[non-static] on static method");
2539 if (method == NULL_TREE)
2541 /* If we got here, we emitted an error message above. So we
2542 just pop the arguments, push a properly-typed zero, and
2544 method_type = get_type_from_signature (method_signature);
2545 pop_arguments (TYPE_ARG_TYPES (method_type));
2546 if (opcode != OPCODE_invokestatic)
2547 pop_type (self_type);
2548 method_type = promote_type (TREE_TYPE (method_type));
2549 push_value (convert (method_type, integer_zero_node));
2553 method_type = TREE_TYPE (method);
2554 arg_list = pop_arguments (TYPE_ARG_TYPES (method_type));
2555 flush_quick_stack ();
2557 maybe_rewrite_invocation (&method, &arg_list, &method_signature,
2561 if (opcode == OPCODE_invokestatic)
2562 func = build_known_method_ref (method, method_type, self_type,
2563 method_signature, arg_list, special);
2564 else if (opcode == OPCODE_invokespecial
2565 || (opcode == OPCODE_invokevirtual
2566 && (METHOD_PRIVATE (method)
2567 || METHOD_FINAL (method)
2568 || CLASS_FINAL (TYPE_NAME (self_type)))))
2570 /* If the object for the method call is null, we throw an
2571 exception. We don't do this if the object is the current
2572 method's `this'. In other cases we just rely on an
2573 optimization pass to eliminate redundant checks. FIXME:
2574 Unfortunately there doesn't seem to be a way to determine
2575 what the current method is right now.
2576 We do omit the check if we're calling <init>. */
2577 /* We use a SAVE_EXPR here to make sure we only evaluate
2578 the new `self' expression once. */
2579 tree save_arg = save_expr (TREE_VALUE (arg_list));
2580 TREE_VALUE (arg_list) = save_arg;
2581 check = java_check_reference (save_arg, ! DECL_INIT_P (method));
2582 func = build_known_method_ref (method, method_type, self_type,
2583 method_signature, arg_list, special);
2587 tree dtable = invoke_build_dtable (opcode == OPCODE_invokeinterface,
2589 if (opcode == OPCODE_invokevirtual)
2590 func = build_invokevirtual (dtable, method, special);
2592 func = build_invokeinterface (dtable, method);
2595 if (TREE_CODE (func) == ADDR_EXPR)
2596 TREE_TYPE (func) = build_pointer_type (method_type);
2598 func = build1 (NOP_EXPR, build_pointer_type (method_type), func);
2600 call = build_call_list (TREE_TYPE (method_type), func, arg_list);
2601 TREE_SIDE_EFFECTS (call) = 1;
2602 call = check_for_builtin (method, call);
2604 if (check != NULL_TREE)
2606 call = build2 (COMPOUND_EXPR, TREE_TYPE (call), check, call);
2607 TREE_SIDE_EFFECTS (call) = 1;
2610 if (TREE_CODE (TREE_TYPE (method_type)) == VOID_TYPE)
2611 java_add_stmt (call);
2615 flush_quick_stack ();
2619 /* Create a stub which will be put into the vtable but which will call
2623 build_jni_stub (tree method)
2625 tree jnifunc, call, args, body, method_sig, arg_types;
2626 tree jniarg0, jniarg1, jniarg2, jniarg3;
2627 tree jni_func_type, tem;
2628 tree env_var, res_var = NULL_TREE, block;
2629 tree method_args, res_type;
2635 tree klass = DECL_CONTEXT (method);
2636 klass = build_class_ref (klass);
2638 gcc_assert (METHOD_NATIVE (method) && flag_jni);
2640 DECL_ARTIFICIAL (method) = 1;
2641 DECL_EXTERNAL (method) = 0;
2643 env_var = build_decl (VAR_DECL, get_identifier ("env"), ptr_type_node);
2644 DECL_CONTEXT (env_var) = method;
2646 if (TREE_TYPE (TREE_TYPE (method)) != void_type_node)
2648 res_var = build_decl (VAR_DECL, get_identifier ("res"),
2649 TREE_TYPE (TREE_TYPE (method)));
2650 DECL_CONTEXT (res_var) = method;
2651 TREE_CHAIN (env_var) = res_var;
2654 method_args = DECL_ARGUMENTS (method);
2655 block = build_block (env_var, NULL_TREE, method_args, NULL_TREE);
2656 TREE_SIDE_EFFECTS (block) = 1;
2657 TREE_TYPE (block) = TREE_TYPE (TREE_TYPE (method));
2659 /* Compute the local `env' by calling _Jv_GetJNIEnvNewFrame. */
2660 body = build2 (MODIFY_EXPR, ptr_type_node, env_var,
2661 build_call_nary (ptr_type_node,
2662 build_address_of (soft_getjnienvnewframe_node),
2665 /* All the arguments to this method become arguments to the
2666 underlying JNI function. If we had to wrap object arguments in a
2667 special way, we would do that here. */
2669 for (tem = method_args; tem != NULL_TREE; tem = TREE_CHAIN (tem))
2671 int arg_bits = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)));
2672 #ifdef PARM_BOUNDARY
2673 arg_bits = (((arg_bits + PARM_BOUNDARY - 1) / PARM_BOUNDARY)
2676 args_size += (arg_bits / BITS_PER_UNIT);
2678 args = tree_cons (NULL_TREE, tem, args);
2680 args = nreverse (args);
2681 arg_types = TYPE_ARG_TYPES (TREE_TYPE (method));
2683 /* For a static method the second argument is the class. For a
2684 non-static method the second argument is `this'; that is already
2685 available in the argument list. */
2686 if (METHOD_STATIC (method))
2688 args_size += int_size_in_bytes (TREE_TYPE (klass));
2689 args = tree_cons (NULL_TREE, klass, args);
2690 arg_types = tree_cons (NULL_TREE, object_ptr_type_node, arg_types);
2693 /* The JNIEnv structure is the first argument to the JNI function. */
2694 args_size += int_size_in_bytes (TREE_TYPE (env_var));
2695 args = tree_cons (NULL_TREE, env_var, args);
2696 arg_types = tree_cons (NULL_TREE, ptr_type_node, arg_types);
2698 /* We call _Jv_LookupJNIMethod to find the actual underlying
2699 function pointer. _Jv_LookupJNIMethod will throw the appropriate
2700 exception if this function is not found at runtime. */
2701 method_sig = build_java_signature (TREE_TYPE (method));
2703 jniarg1 = build_utf8_ref (DECL_NAME (method));
2704 jniarg2 = build_utf8_ref (unmangle_classname
2705 (IDENTIFIER_POINTER (method_sig),
2706 IDENTIFIER_LENGTH (method_sig)));
2707 jniarg3 = build_int_cst (NULL_TREE, args_size);
2709 tem = build_function_type (TREE_TYPE (TREE_TYPE (method)), arg_types);
2711 #ifdef MODIFY_JNI_METHOD_CALL
2712 tem = MODIFY_JNI_METHOD_CALL (tem);
2715 jni_func_type = build_pointer_type (tem);
2717 /* Use the actual function type, rather than a generic pointer type,
2718 such that this decl keeps the actual pointer type from being
2719 garbage-collected. If it is, we end up using canonical types
2720 with different uids for equivalent function types, and this in
2721 turn causes utf8 identifiers and output order to vary. */
2722 meth_var = build_decl (VAR_DECL, get_identifier ("meth"), jni_func_type);
2723 TREE_STATIC (meth_var) = 1;
2724 TREE_PUBLIC (meth_var) = 0;
2725 DECL_EXTERNAL (meth_var) = 0;
2726 DECL_CONTEXT (meth_var) = method;
2727 DECL_ARTIFICIAL (meth_var) = 1;
2728 DECL_INITIAL (meth_var) = null_pointer_node;
2729 TREE_USED (meth_var) = 1;
2730 chainon (env_var, meth_var);
2731 build_result_decl (method);
2733 jnifunc = build3 (COND_EXPR, jni_func_type,
2734 build2 (NE_EXPR, boolean_type_node,
2735 meth_var, build_int_cst (TREE_TYPE (meth_var), 0)),
2737 build2 (MODIFY_EXPR, jni_func_type, meth_var,
2739 (NOP_EXPR, jni_func_type,
2740 build_call_nary (ptr_type_node,
2742 (soft_lookupjnimethod_node),
2745 jniarg2, jniarg3))));
2747 /* Now we make the actual JNI call via the resulting function
2749 call = build_call_list (TREE_TYPE (TREE_TYPE (method)),
2752 /* If the JNI call returned a result, capture it here. If we had to
2753 unwrap JNI object results, we would do that here. */
2754 if (res_var != NULL_TREE)
2756 /* If the call returns an object, it may return a JNI weak
2757 reference, in which case we must unwrap it. */
2758 if (! JPRIMITIVE_TYPE_P (TREE_TYPE (TREE_TYPE (method))))
2759 call = build_call_nary (TREE_TYPE (TREE_TYPE (method)),
2760 build_address_of (soft_unwrapjni_node),
2762 call = build2 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (method)),
2766 TREE_SIDE_EFFECTS (call) = 1;
2768 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2769 TREE_SIDE_EFFECTS (body) = 1;
2771 /* Now free the environment we allocated. */
2772 call = build_call_nary (ptr_type_node,
2773 build_address_of (soft_jnipopsystemframe_node),
2775 TREE_SIDE_EFFECTS (call) = 1;
2776 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2777 TREE_SIDE_EFFECTS (body) = 1;
2779 /* Finally, do the return. */
2780 res_type = void_type_node;
2781 if (res_var != NULL_TREE)
2784 gcc_assert (DECL_RESULT (method));
2785 /* Make sure we copy the result variable to the actual
2786 result. We use the type of the DECL_RESULT because it
2787 might be different from the return type of the function:
2788 it might be promoted. */
2789 drt = TREE_TYPE (DECL_RESULT (method));
2790 if (drt != TREE_TYPE (res_var))
2791 res_var = build1 (CONVERT_EXPR, drt, res_var);
2792 res_var = build2 (MODIFY_EXPR, drt, DECL_RESULT (method), res_var);
2793 TREE_SIDE_EFFECTS (res_var) = 1;
2796 body = build2 (COMPOUND_EXPR, void_type_node, body,
2797 build1 (RETURN_EXPR, void_type_node, res_var));
2798 TREE_SIDE_EFFECTS (body) = 1;
2800 /* Prepend class initialization for static methods reachable from
2802 if (METHOD_STATIC (method)
2803 && (! METHOD_PRIVATE (method)
2804 || INNER_CLASS_P (DECL_CONTEXT (method))))
2806 tree init = build_call_expr (soft_initclass_node, 1,
2808 body = build2 (COMPOUND_EXPR, void_type_node, init, body);
2809 TREE_SIDE_EFFECTS (body) = 1;
2812 bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
2818 /* Given lvalue EXP, return a volatile expression that references the
2822 java_modify_addr_for_volatile (tree exp)
2824 tree exp_type = TREE_TYPE (exp);
2826 = build_qualified_type (exp_type,
2827 TYPE_QUALS (exp_type) | TYPE_QUAL_VOLATILE);
2828 tree addr = build_fold_addr_expr (exp);
2829 v_type = build_pointer_type (v_type);
2830 addr = fold_convert (v_type, addr);
2831 exp = build_fold_indirect_ref (addr);
2836 /* Expand an operation to extract from or store into a field.
2837 IS_STATIC is 1 iff the field is static.
2838 IS_PUTTING is 1 for putting into a field; 0 for getting from the field.
2839 FIELD_REF_INDEX is an index into the constant pool. */
2842 expand_java_field_op (int is_static, int is_putting, int field_ref_index)
2845 = get_class_constant (current_jcf,
2846 COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool,
2848 const char *self_name
2849 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2850 tree field_name = COMPONENT_REF_NAME (¤t_jcf->cpool, field_ref_index);
2851 tree field_signature = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool,
2853 tree field_type = get_type_from_signature (field_signature);
2854 tree new_value = is_putting ? pop_value (field_type) : NULL_TREE;
2857 tree original_self_type = self_type;
2861 if (! CLASS_LOADED_P (self_type))
2862 load_class (self_type, 1);
2863 field_decl = lookup_field (&self_type, field_name);
2864 if (field_decl == error_mark_node)
2868 else if (field_decl == NULL_TREE)
2870 if (! flag_verify_invocations)
2872 int flags = ACC_PUBLIC;
2874 flags |= ACC_STATIC;
2875 self_type = original_self_type;
2876 field_decl = add_field (original_self_type, field_name,
2878 DECL_ARTIFICIAL (field_decl) = 1;
2879 DECL_IGNORED_P (field_decl) = 1;
2881 /* FIXME: We should be pessimistic about volatility. We
2882 don't know one way or another, but this is safe.
2883 However, doing this has bad effects on code quality. We
2884 need to look at better ways to do this. */
2885 TREE_THIS_VOLATILE (field_decl) = 1;
2890 error ("missing field '%s' in '%s'",
2891 IDENTIFIER_POINTER (field_name), self_name);
2895 else if (build_java_signature (TREE_TYPE (field_decl)) != field_signature)
2897 error ("mismatching signature for field '%s' in '%s'",
2898 IDENTIFIER_POINTER (field_name), self_name);
2901 field_ref = is_static ? NULL_TREE : pop_value (self_type);
2905 push_value (convert (field_type, integer_zero_node));
2906 flush_quick_stack ();
2910 field_ref = build_field_ref (field_ref, self_type, field_name);
2912 && ! flag_indirect_dispatch)
2914 tree context = DECL_CONTEXT (field_ref);
2915 if (context != self_type && CLASS_INTERFACE (TYPE_NAME (context)))
2916 field_ref = build_class_init (context, field_ref);
2918 field_ref = build_class_init (self_type, field_ref);
2922 flush_quick_stack ();
2923 if (FIELD_FINAL (field_decl))
2925 if (DECL_CONTEXT (field_decl) != current_class)
2926 error ("assignment to final field %q+D not in field's class",
2928 /* We used to check for assignments to final fields not
2929 occurring in the class initializer or in a constructor
2930 here. However, this constraint doesn't seem to be
2931 enforced by the JVM. */
2934 if (TREE_THIS_VOLATILE (field_decl))
2935 field_ref = java_modify_addr_for_volatile (field_ref);
2937 modify_expr = build2 (MODIFY_EXPR, TREE_TYPE (field_ref),
2938 field_ref, new_value);
2940 if (TREE_THIS_VOLATILE (field_decl))
2942 (build_call_expr (built_in_decls[BUILT_IN_SYNCHRONIZE], 0));
2944 java_add_stmt (modify_expr);
2948 tree temp = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (field_ref));
2949 java_add_local_var (temp);
2951 if (TREE_THIS_VOLATILE (field_decl))
2952 field_ref = java_modify_addr_for_volatile (field_ref);
2955 = build2 (MODIFY_EXPR, TREE_TYPE (field_ref), temp, field_ref);
2956 java_add_stmt (modify_expr);
2958 if (TREE_THIS_VOLATILE (field_decl))
2960 (build_call_expr (built_in_decls[BUILT_IN_SYNCHRONIZE], 0));
2964 TREE_THIS_VOLATILE (field_ref) = TREE_THIS_VOLATILE (field_decl);
2968 load_type_state (int pc)
2971 tree vec = VEC_index (tree, type_states, pc);
2972 int cur_length = TREE_VEC_LENGTH (vec);
2973 stack_pointer = cur_length - DECL_MAX_LOCALS(current_function_decl);
2974 for (i = 0; i < cur_length; i++)
2975 type_map [i] = TREE_VEC_ELT (vec, i);
2978 /* Go over METHOD's bytecode and note instruction starts in
2979 instruction_bits[]. */
2982 note_instructions (JCF *jcf, tree method)
2985 unsigned char* byte_ops;
2986 long length = DECL_CODE_LENGTH (method);
2991 #undef RET /* Defined by config/i386/i386.h */
2993 #define BCODE byte_ops
2994 #define BYTE_type_node byte_type_node
2995 #define SHORT_type_node short_type_node
2996 #define INT_type_node int_type_node
2997 #define LONG_type_node long_type_node
2998 #define CHAR_type_node char_type_node
2999 #define PTR_type_node ptr_type_node
3000 #define FLOAT_type_node float_type_node
3001 #define DOUBLE_type_node double_type_node
3002 #define VOID_type_node void_type_node
3003 #define CONST_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3004 #define CONST_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3005 #define VAR_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3006 #define VAR_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3008 #define CHECK_PC_IN_RANGE(PC) ((void)1) /* Already handled by verifier. */
3010 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3011 byte_ops = jcf->read_ptr;
3012 instruction_bits = xrealloc (instruction_bits, length + 1);
3013 memset (instruction_bits, 0, length + 1);
3014 type_states = VEC_alloc (tree, gc, length + 1);
3015 VEC_safe_grow_cleared (tree, gc, type_states, length + 1);
3017 /* This pass figures out which PC can be the targets of jumps. */
3018 for (PC = 0; PC < length;)
3020 int oldpc = PC; /* PC at instruction start. */
3021 instruction_bits [PC] |= BCODE_INSTRUCTION_START;
3022 switch (byte_ops[PC++])
3024 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3026 PRE_##OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3029 #define NOTE_LABEL(PC) note_label(oldpc, PC)
3031 #define PRE_PUSHC(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3032 #define PRE_LOAD(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3033 #define PRE_STORE(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3034 #define PRE_STACK(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3035 #define PRE_UNOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3036 #define PRE_BINOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3037 #define PRE_CONVERT(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3038 #define PRE_CONVERT2(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3040 #define PRE_SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3041 PRE_SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3042 #define PRE_SPECIAL_IINC(OPERAND_TYPE) \
3043 ((void) IMMEDIATE_u1, (void) IMMEDIATE_s1)
3044 #define PRE_SPECIAL_ENTER(IGNORE) /* nothing */
3045 #define PRE_SPECIAL_EXIT(IGNORE) /* nothing */
3046 #define PRE_SPECIAL_THROW(IGNORE) /* nothing */
3047 #define PRE_SPECIAL_BREAK(IGNORE) /* nothing */
3049 /* two forms of wide instructions */
3050 #define PRE_SPECIAL_WIDE(IGNORE) \
3052 int modified_opcode = IMMEDIATE_u1; \
3053 if (modified_opcode == OPCODE_iinc) \
3055 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3056 (void) IMMEDIATE_s2; /* constbyte1 and constbyte2 */ \
3060 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3064 #define PRE_IMPL(IGNORE1, IGNORE2) /* nothing */
3066 #define PRE_MONITOR(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3068 #define PRE_RETURN(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3069 #define PRE_ARRAY(OPERAND_TYPE, SUBOP) \
3070 PRE_ARRAY_##SUBOP(OPERAND_TYPE)
3071 #define PRE_ARRAY_LOAD(TYPE) /* nothing */
3072 #define PRE_ARRAY_STORE(TYPE) /* nothing */
3073 #define PRE_ARRAY_LENGTH(TYPE) /* nothing */
3074 #define PRE_ARRAY_NEW(TYPE) PRE_ARRAY_NEW_##TYPE
3075 #define PRE_ARRAY_NEW_NUM ((void) IMMEDIATE_u1)
3076 #define PRE_ARRAY_NEW_PTR ((void) IMMEDIATE_u2)
3077 #define PRE_ARRAY_NEW_MULTI ((void) IMMEDIATE_u2, (void) IMMEDIATE_u1)
3079 #define PRE_TEST(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3080 #define PRE_COND(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3081 #define PRE_BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3082 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3083 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3084 #define PRE_JSR(OPERAND_TYPE, OPERAND_VALUE) \
3085 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3087 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3089 #define PRE_RET(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE)
3091 #define PRE_SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3092 PC = (PC + 3) / 4 * 4; PRE_##TABLE_OR_LOOKUP##_SWITCH
3094 #define PRE_LOOKUP_SWITCH \
3095 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3096 NOTE_LABEL (default_offset+oldpc); \
3098 while (--npairs >= 0) { \
3099 jint match ATTRIBUTE_UNUSED = IMMEDIATE_s4; \
3100 jint offset = IMMEDIATE_s4; \
3101 NOTE_LABEL (offset+oldpc); } \
3104 #define PRE_TABLE_SWITCH \
3105 { jint default_offset = IMMEDIATE_s4; \
3106 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3107 NOTE_LABEL (default_offset+oldpc); \
3109 while (low++ <= high) { \
3110 jint offset = IMMEDIATE_s4; \
3111 NOTE_LABEL (offset+oldpc); } \
3114 #define PRE_FIELD(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3115 #define PRE_OBJECT(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3116 #define PRE_INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3117 (void)(IMMEDIATE_u2); \
3118 PC += 2 * IS_INTERFACE /* for invokeinterface */;
3120 #include "javaop.def"
3127 expand_byte_code (JCF *jcf, tree method)
3131 const unsigned char *linenumber_pointer;
3132 int dead_code_index = -1;
3133 unsigned char* byte_ops;
3134 long length = DECL_CODE_LENGTH (method);
3137 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3138 byte_ops = jcf->read_ptr;
3140 /* We make an initial pass of the line number table, to note
3141 which instructions have associated line number entries. */
3142 linenumber_pointer = linenumber_table;
3143 for (i = 0; i < linenumber_count; i++)
3145 int pc = GET_u2 (linenumber_pointer);
3146 linenumber_pointer += 4;
3148 warning (0, "invalid PC in line number table");
3151 if ((instruction_bits[pc] & BCODE_HAS_LINENUMBER) != 0)
3152 instruction_bits[pc] |= BCODE_HAS_MULTI_LINENUMBERS;
3153 instruction_bits[pc] |= BCODE_HAS_LINENUMBER;
3157 if (! verify_jvm_instructions_new (jcf, byte_ops, length))
3160 promote_arguments ();
3161 cache_this_class_ref (method);
3162 cache_cpool_data_ref ();
3164 /* Translate bytecodes. */
3165 linenumber_pointer = linenumber_table;
3166 for (PC = 0; PC < length;)
3168 if ((instruction_bits [PC] & BCODE_TARGET) != 0 || PC == 0)
3170 tree label = lookup_label (PC);
3171 flush_quick_stack ();
3172 if ((instruction_bits [PC] & BCODE_TARGET) != 0)
3173 java_add_stmt (build1 (LABEL_EXPR, void_type_node, label));
3174 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3175 load_type_state (PC);
3178 if (! (instruction_bits [PC] & BCODE_VERIFIED))
3180 if (dead_code_index == -1)
3182 /* This is the start of a region of unreachable bytecodes.
3183 They still need to be processed in order for EH ranges
3184 to get handled correctly. However, we can simply
3185 replace these bytecodes with nops. */
3186 dead_code_index = PC;
3189 /* Turn this bytecode into a nop. */
3194 if (dead_code_index != -1)
3196 /* We've just reached the end of a region of dead code. */
3198 warning (0, "unreachable bytecode from %d to before %d",
3199 dead_code_index, PC);
3200 dead_code_index = -1;
3204 /* Handle possible line number entry for this PC.
3206 This code handles out-of-order and multiple linenumbers per PC,
3207 but is optimized for the case of line numbers increasing
3208 monotonically with PC. */
3209 if ((instruction_bits[PC] & BCODE_HAS_LINENUMBER) != 0)
3211 if ((instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS) != 0
3212 || GET_u2 (linenumber_pointer) != PC)
3213 linenumber_pointer = linenumber_table;
3214 while (linenumber_pointer < linenumber_table + linenumber_count * 4)
3216 int pc = GET_u2 (linenumber_pointer);
3217 linenumber_pointer += 4;
3220 int line = GET_u2 (linenumber_pointer - 2);
3221 input_location = linemap_line_start (line_table, line, 1);
3222 if (!(instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS))
3227 maybe_pushlevels (PC);
3228 PC = process_jvm_instruction (PC, byte_ops, length);
3229 maybe_poplevels (PC);
3232 uncache_this_class_ref (method);
3234 if (dead_code_index != -1)
3236 /* We've just reached the end of a region of dead code. */
3238 warning (0, "unreachable bytecode from %d to the end of the method",
3244 java_push_constant_from_pool (JCF *jcf, int index)
3247 if (JPOOL_TAG (jcf, index) == CONSTANT_String)
3250 name = get_name_constant (jcf, JPOOL_USHORT1 (jcf, index));
3251 index = alloc_name_constant (CONSTANT_String, name);
3252 c = build_ref_from_constant_pool (index);
3253 c = convert (promote_type (string_type_node), c);
3255 else if (JPOOL_TAG (jcf, index) == CONSTANT_Class
3256 || JPOOL_TAG (jcf, index) == CONSTANT_ResolvedClass)
3258 tree record = get_class_constant (jcf, index);
3259 c = build_class_ref (record);
3262 c = get_constant (jcf, index);
3267 process_jvm_instruction (int PC, const unsigned char* byte_ops,
3268 long length ATTRIBUTE_UNUSED)
3270 const char *opname; /* Temporary ??? */
3271 int oldpc = PC; /* PC at instruction start. */
3273 /* If the instruction is at the beginning of an exception handler,
3274 replace the top of the stack with the thrown object reference. */
3275 if (instruction_bits [PC] & BCODE_EXCEPTION_TARGET)
3277 /* Note that the verifier will not emit a type map at all for
3278 dead exception handlers. In this case we just ignore the
3280 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3282 tree type = pop_type (promote_type (throwable_type_node));
3283 push_value (build_exception_object_ref (type));
3287 switch (byte_ops[PC++])
3289 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3292 OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3295 #define RET(OPERAND_TYPE, OPERAND_VALUE) \
3297 int saw_index = 0; \
3298 int index = OPERAND_VALUE; \
3300 (find_local_variable (index, return_address_type_node, oldpc)); \
3303 #define JSR(OPERAND_TYPE, OPERAND_VALUE) \
3305 /* OPERAND_VALUE may have side-effects on PC */ \
3306 int opvalue = OPERAND_VALUE; \
3307 build_java_jsr (oldpc + opvalue, PC); \
3310 /* Push a constant onto the stack. */
3311 #define PUSHC(OPERAND_TYPE, OPERAND_VALUE) \
3312 { int saw_index = 0; int ival = (OPERAND_VALUE); \
3313 if (saw_index) java_push_constant_from_pool (current_jcf, ival); \
3314 else expand_java_pushc (ival, OPERAND_TYPE##_type_node); }
3316 /* internal macro added for use by the WIDE case */
3317 #define LOAD_INTERNAL(OPTYPE, OPVALUE) \
3318 expand_load_internal (OPVALUE, type_map[OPVALUE], oldpc);
3320 /* Push local variable onto the opcode stack. */
3321 #define LOAD(OPERAND_TYPE, OPERAND_VALUE) \
3323 /* have to do this since OPERAND_VALUE may have side-effects */ \
3324 int opvalue = OPERAND_VALUE; \
3325 LOAD_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3328 #define RETURN(OPERAND_TYPE, OPERAND_VALUE) \
3329 expand_java_return (OPERAND_TYPE##_type_node)
3331 #define REM_EXPR TRUNC_MOD_EXPR
3332 #define BINOP(OPERAND_TYPE, OPERAND_VALUE) \
3333 expand_java_binop (OPERAND_TYPE##_type_node, OPERAND_VALUE##_EXPR)
3335 #define FIELD(IS_STATIC, IS_PUT) \
3336 expand_java_field_op (IS_STATIC, IS_PUT, IMMEDIATE_u2)
3338 #define TEST(OPERAND_TYPE, CONDITION) \
3339 expand_test (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3341 #define COND(OPERAND_TYPE, CONDITION) \
3342 expand_cond (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3344 #define BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3345 BRANCH_##OPERAND_TYPE (OPERAND_VALUE)
3347 #define BRANCH_GOTO(OPERAND_VALUE) \
3348 expand_java_goto (oldpc + OPERAND_VALUE)
3350 #define BRANCH_CALL(OPERAND_VALUE) \
3351 expand_java_call (oldpc + OPERAND_VALUE, oldpc)
3354 #define BRANCH_RETURN(OPERAND_VALUE) \
3356 tree type = OPERAND_TYPE##_type_node; \
3357 tree value = find_local_variable (OPERAND_VALUE, type, oldpc); \
3358 expand_java_ret (value); \
3362 #define NOT_IMPL(OPERAND_TYPE, OPERAND_VALUE) \
3363 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3364 fprintf (stderr, "(not implemented)\n")
3365 #define NOT_IMPL1(OPERAND_VALUE) \
3366 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3367 fprintf (stderr, "(not implemented)\n")
3369 #define BRANCH_RETURN(OPERAND_VALUE) NOT_IMPL1(OPERAND_VALUE)
3371 #define STACK(SUBOP, COUNT) STACK_##SUBOP (COUNT)
3373 #define STACK_POP(COUNT) java_stack_pop (COUNT)
3375 #define STACK_SWAP(COUNT) java_stack_swap()
3377 #define STACK_DUP(COUNT) java_stack_dup (COUNT, 0)
3378 #define STACK_DUPx1(COUNT) java_stack_dup (COUNT, 1)
3379 #define STACK_DUPx2(COUNT) java_stack_dup (COUNT, 2)
3381 #define SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3382 PC = (PC + 3) / 4 * 4; TABLE_OR_LOOKUP##_SWITCH
3384 #define LOOKUP_SWITCH \
3385 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3386 tree selector = pop_value (INT_type_node); \
3387 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3388 while (--npairs >= 0) \
3390 jint match = IMMEDIATE_s4; jint offset = IMMEDIATE_s4; \
3391 expand_java_add_case (switch_expr, match, oldpc + offset); \
3395 #define TABLE_SWITCH \
3396 { jint default_offset = IMMEDIATE_s4; \
3397 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3398 tree selector = pop_value (INT_type_node); \
3399 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3400 for (; low <= high; low++) \
3402 jint offset = IMMEDIATE_s4; \
3403 expand_java_add_case (switch_expr, low, oldpc + offset); \
3407 #define INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3408 { int opcode = byte_ops[PC-1]; \
3409 int method_ref_index = IMMEDIATE_u2; \
3411 if (IS_INTERFACE) { nargs = IMMEDIATE_u1; (void) IMMEDIATE_u1; } \
3413 expand_invoke (opcode, method_ref_index, nargs); \
3416 /* Handle new, checkcast, instanceof */
3417 #define OBJECT(TYPE, OP) \
3418 expand_java_##OP (get_class_constant (current_jcf, IMMEDIATE_u2))
3420 #define ARRAY(OPERAND_TYPE, SUBOP) ARRAY_##SUBOP(OPERAND_TYPE)
3422 #define ARRAY_LOAD(OPERAND_TYPE) \
3424 expand_java_arrayload( OPERAND_TYPE##_type_node ); \
3427 #define ARRAY_STORE(OPERAND_TYPE) \
3429 expand_java_arraystore( OPERAND_TYPE##_type_node ); \
3432 #define ARRAY_LENGTH(OPERAND_TYPE) expand_java_array_length();
3433 #define ARRAY_NEW(OPERAND_TYPE) ARRAY_NEW_##OPERAND_TYPE()
3434 #define ARRAY_NEW_PTR() \
3435 push_value (build_anewarray (get_class_constant (current_jcf, \
3437 pop_value (int_type_node)));
3438 #define ARRAY_NEW_NUM() \
3440 int atype = IMMEDIATE_u1; \
3441 push_value (build_newarray (atype, pop_value (int_type_node)));\
3443 #define ARRAY_NEW_MULTI() \
3445 tree class = get_class_constant (current_jcf, IMMEDIATE_u2 ); \
3446 int ndims = IMMEDIATE_u1; \
3447 expand_java_multianewarray( class, ndims ); \
3450 #define UNOP(OPERAND_TYPE, OPERAND_VALUE) \
3451 push_value (fold_build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \
3452 pop_value (OPERAND_TYPE##_type_node)));
3454 #define CONVERT2(FROM_TYPE, TO_TYPE) \
3456 push_value (build1 (NOP_EXPR, int_type_node, \
3457 (convert (TO_TYPE##_type_node, \
3458 pop_value (FROM_TYPE##_type_node))))); \
3461 #define CONVERT(FROM_TYPE, TO_TYPE) \
3463 push_value (convert (TO_TYPE##_type_node, \
3464 pop_value (FROM_TYPE##_type_node))); \
3467 /* internal macro added for use by the WIDE case
3468 Added TREE_TYPE (decl) assignment, apbianco */
3469 #define STORE_INTERNAL(OPTYPE, OPVALUE) \
3472 int index = OPVALUE; \
3473 tree type = OPTYPE; \
3474 value = pop_value (type); \
3475 type = TREE_TYPE (value); \
3476 decl = find_local_variable (index, type, oldpc); \
3477 set_local_type (index, type); \
3478 java_add_stmt (build2 (MODIFY_EXPR, type, decl, value)); \
3481 #define STORE(OPERAND_TYPE, OPERAND_VALUE) \
3483 /* have to do this since OPERAND_VALUE may have side-effects */ \
3484 int opvalue = OPERAND_VALUE; \
3485 STORE_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3488 #define SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3489 SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3491 #define SPECIAL_ENTER(IGNORED) MONITOR_OPERATION (soft_monitorenter_node)
3492 #define SPECIAL_EXIT(IGNORED) MONITOR_OPERATION (soft_monitorexit_node)
3494 #define MONITOR_OPERATION(call) \
3496 tree o = pop_value (ptr_type_node); \
3498 flush_quick_stack (); \
3499 c = build_java_monitor (call, o); \
3500 TREE_SIDE_EFFECTS (c) = 1; \
3501 java_add_stmt (c); \
3504 #define SPECIAL_IINC(IGNORED) \
3506 unsigned int local_var_index = IMMEDIATE_u1; \
3507 int ival = IMMEDIATE_s1; \
3508 expand_iinc(local_var_index, ival, oldpc); \
3511 #define SPECIAL_WIDE(IGNORED) \
3513 int modified_opcode = IMMEDIATE_u1; \
3514 unsigned int local_var_index = IMMEDIATE_u2; \
3515 switch (modified_opcode) \
3519 int ival = IMMEDIATE_s2; \
3520 expand_iinc (local_var_index, ival, oldpc); \
3523 case OPCODE_iload: \
3524 case OPCODE_lload: \
3525 case OPCODE_fload: \
3526 case OPCODE_dload: \
3527 case OPCODE_aload: \
3529 /* duplicate code from LOAD macro */ \
3530 LOAD_INTERNAL(operand_type[modified_opcode], local_var_index); \
3533 case OPCODE_istore: \
3534 case OPCODE_lstore: \
3535 case OPCODE_fstore: \
3536 case OPCODE_dstore: \
3537 case OPCODE_astore: \
3539 STORE_INTERNAL(operand_type[modified_opcode], local_var_index); \
3543 error ("unrecogized wide sub-instruction"); \
3547 #define SPECIAL_THROW(IGNORED) \
3548 build_java_athrow (pop_value (throwable_type_node))
3550 #define SPECIAL_BREAK NOT_IMPL1
3551 #define IMPL NOT_IMPL
3553 #include "javaop.def"
3556 fprintf (stderr, "%3d: unknown(%3d)\n", oldpc, byte_ops[PC]);
3561 /* Return the opcode at PC in the code section pointed to by
3564 static unsigned char
3565 peek_opcode_at_pc (JCF *jcf, int code_offset, int pc)
3567 unsigned char opcode;
3568 long absolute_offset = (long)JCF_TELL (jcf);
3570 JCF_SEEK (jcf, code_offset);
3571 opcode = jcf->read_ptr [pc];
3572 JCF_SEEK (jcf, absolute_offset);
3576 /* Some bytecode compilers are emitting accurate LocalVariableTable
3577 attributes. Here's an example:
3582 Attribute "LocalVariableTable"
3583 slot #<n>: ... (PC: PC+1 length: L)
3585 This is accurate because the local in slot <n> really exists after
3586 the opcode at PC is executed, hence from PC+1 to PC+1+L.
3588 This procedure recognizes this situation and extends the live range
3589 of the local in SLOT to START_PC-1 or START_PC-2 (depending on the
3590 length of the store instruction.)
3592 This function is used by `give_name_to_locals' so that a local's
3593 DECL features a DECL_LOCAL_START_PC such that the first related
3594 store operation will use DECL as a destination, not an unrelated
3595 temporary created for the occasion.
3597 This function uses a global (instruction_bits) `note_instructions' should
3598 have allocated and filled properly. */
3601 maybe_adjust_start_pc (struct JCF *jcf, int code_offset,
3602 int start_pc, int slot)
3604 int first, index, opcode;
3613 /* Find last previous instruction and remember it */
3614 for (pc = start_pc-1; pc; pc--)
3615 if (instruction_bits [pc] & BCODE_INSTRUCTION_START)
3619 /* Retrieve the instruction, handle `wide'. */
3620 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3621 if (opcode == OPCODE_wide)
3624 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3629 case OPCODE_astore_0:
3630 case OPCODE_astore_1:
3631 case OPCODE_astore_2:
3632 case OPCODE_astore_3:
3633 first = OPCODE_astore_0;
3636 case OPCODE_istore_0:
3637 case OPCODE_istore_1:
3638 case OPCODE_istore_2:
3639 case OPCODE_istore_3:
3640 first = OPCODE_istore_0;
3643 case OPCODE_lstore_0:
3644 case OPCODE_lstore_1:
3645 case OPCODE_lstore_2:
3646 case OPCODE_lstore_3:
3647 first = OPCODE_lstore_0;
3650 case OPCODE_fstore_0:
3651 case OPCODE_fstore_1:
3652 case OPCODE_fstore_2:
3653 case OPCODE_fstore_3:
3654 first = OPCODE_fstore_0;
3657 case OPCODE_dstore_0:
3658 case OPCODE_dstore_1:
3659 case OPCODE_dstore_2:
3660 case OPCODE_dstore_3:
3661 first = OPCODE_dstore_0;
3669 index = peek_opcode_at_pc (jcf, code_offset, pc);
3672 int other = peek_opcode_at_pc (jcf, code_offset, ++pc);
3673 index = (other << 8) + index;
3678 /* Now we decide: first >0 means we have a <t>store_<n>, index >0
3679 means we have a <t>store. */
3680 if ((first > 0 && opcode - first == slot) || (index > 0 && index == slot))
3686 /* Force the (direct) sub-operands of NODE to be evaluated in left-to-right
3687 order, as specified by Java Language Specification.
3689 The problem is that while expand_expr will evaluate its sub-operands in
3690 left-to-right order, for variables it will just return an rtx (i.e.
3691 an lvalue) for the variable (rather than an rvalue). So it is possible
3692 that a later sub-operand will change the register, and when the
3693 actual operation is done, it will use the new value, when it should
3694 have used the original value.
3696 We fix this by using save_expr. This forces the sub-operand to be
3697 copied into a fresh virtual register,
3699 For method invocation, we modify the arguments so that a
3700 left-to-right order evaluation is performed. Saved expressions
3701 will, in CALL_EXPR order, be reused when the call will be expanded.
3703 We also promote outgoing args if needed. */
3706 force_evaluation_order (tree node)
3708 if (flag_syntax_only)
3710 if (TREE_CODE (node) == CALL_EXPR
3711 || (TREE_CODE (node) == COMPOUND_EXPR
3712 && TREE_CODE (TREE_OPERAND (node, 0)) == CALL_EXPR
3713 && TREE_CODE (TREE_OPERAND (node, 1)) == SAVE_EXPR))
3718 /* Account for wrapped around ctors. */
3719 if (TREE_CODE (node) == COMPOUND_EXPR)
3720 call = TREE_OPERAND (node, 0);
3724 nargs = call_expr_nargs (call);
3726 /* This reverses the evaluation order. This is a desired effect. */
3727 for (i = 0, cmp = NULL_TREE; i < nargs; i++)
3729 tree arg = CALL_EXPR_ARG (call, i);
3730 /* Promote types smaller than integer. This is required by
3732 tree type = TREE_TYPE (arg);
3734 if (targetm.calls.promote_prototypes (type)
3735 && INTEGRAL_TYPE_P (type)
3736 && INT_CST_LT_UNSIGNED (TYPE_SIZE (type),
3737 TYPE_SIZE (integer_type_node)))
3738 arg = fold_convert (integer_type_node, arg);
3740 saved = save_expr (force_evaluation_order (arg));
3741 cmp = (cmp == NULL_TREE ? saved :
3742 build2 (COMPOUND_EXPR, void_type_node, cmp, saved));
3744 CALL_EXPR_ARG (call, i) = saved;
3747 if (cmp && TREE_CODE (cmp) == COMPOUND_EXPR)
3748 TREE_SIDE_EFFECTS (cmp) = 1;
3752 cmp = build2 (COMPOUND_EXPR, TREE_TYPE (node), cmp, node);
3753 if (TREE_TYPE (cmp) != void_type_node)
3754 cmp = save_expr (cmp);
3755 TREE_SIDE_EFFECTS (cmp) = 1;
3762 /* Build a node to represent empty statements and blocks. */
3765 build_java_empty_stmt (void)
3767 tree t = build_empty_stmt ();
3771 /* Promote all args of integral type before generating any code. */
3774 promote_arguments (void)
3778 for (arg = DECL_ARGUMENTS (current_function_decl), i = 0;
3779 arg != NULL_TREE; arg = TREE_CHAIN (arg), i++)
3781 tree arg_type = TREE_TYPE (arg);
3782 if (INTEGRAL_TYPE_P (arg_type)
3783 && TYPE_PRECISION (arg_type) < 32)
3785 tree copy = find_local_variable (i, integer_type_node, -1);
3786 java_add_stmt (build2 (MODIFY_EXPR, integer_type_node,
3788 fold_convert (integer_type_node, arg)));
3790 if (TYPE_IS_WIDE (arg_type))
3795 /* Create a local variable that points to the constant pool. */
3798 cache_cpool_data_ref (void)
3803 tree d = build_constant_data_ref (flag_indirect_classes);
3804 tree cpool_ptr = build_decl (VAR_DECL, NULL_TREE,
3805 build_pointer_type (TREE_TYPE (d)));
3806 java_add_local_var (cpool_ptr);
3807 TREE_INVARIANT (cpool_ptr) = 1;
3808 TREE_CONSTANT (cpool_ptr) = 1;
3810 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (cpool_ptr),
3811 cpool_ptr, build_address_of (d)));
3812 cpool = build1 (INDIRECT_REF, TREE_TYPE (d), cpool_ptr);
3813 TREE_THIS_NOTRAP (cpool) = 1;
3814 TYPE_CPOOL_DATA_REF (output_class) = cpool;
3818 #include "gt-java-expr.h"