1 /* Process expressions for the GNU compiler for the Java(TM) language.
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2006, 2007 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA.
22 Java and all Java-based marks are trademarks or registered trademarks
23 of Sun Microsystems, Inc. in the United States and other countries.
24 The Free Software Foundation is independent of Sun Microsystems, Inc. */
26 /* Hacked by Per Bothner <bothner@cygnus.com> February 1996. */
30 #include "coretypes.h"
37 #include "java-tree.h"
39 #include "java-opcodes.h"
41 #include "java-except.h"
46 #include "tree-gimple.h"
49 static void flush_quick_stack (void);
50 static void push_value (tree);
51 static tree pop_value (tree);
52 static void java_stack_swap (void);
53 static void java_stack_dup (int, int);
54 static void build_java_athrow (tree);
55 static void build_java_jsr (int, int);
56 static void build_java_ret (tree);
57 static void expand_java_multianewarray (tree, int);
58 static void expand_java_arraystore (tree);
59 static void expand_java_arrayload (tree);
60 static void expand_java_array_length (void);
61 static tree build_java_monitor (tree, tree);
62 static void expand_java_pushc (int, tree);
63 static void expand_java_return (tree);
64 static void expand_load_internal (int, tree, int);
65 static void expand_java_NEW (tree);
66 static void expand_java_INSTANCEOF (tree);
67 static void expand_java_CHECKCAST (tree);
68 static void expand_iinc (unsigned int, int, int);
69 static void expand_java_binop (tree, enum tree_code);
70 static void note_label (int, int);
71 static void expand_compare (enum tree_code, tree, tree, int);
72 static void expand_test (enum tree_code, tree, int);
73 static void expand_cond (enum tree_code, tree, int);
74 static void expand_java_goto (int);
75 static tree expand_java_switch (tree, int);
76 static void expand_java_add_case (tree, int, int);
77 static tree pop_arguments (tree);
78 static void expand_invoke (int, int, int);
79 static void expand_java_field_op (int, int, int);
80 static void java_push_constant_from_pool (struct JCF *, int);
81 static void java_stack_pop (int);
82 static tree build_java_throw_out_of_bounds_exception (tree);
83 static tree build_java_check_indexed_type (tree, tree);
84 static unsigned char peek_opcode_at_pc (struct JCF *, int, int);
85 static void promote_arguments (void);
86 static void cache_cpool_data_ref (void);
88 static GTY(()) tree operand_type[59];
90 static GTY(()) tree methods_ident;
91 static GTY(()) tree ncode_ident;
92 tree dtable_ident = NULL_TREE;
94 /* Set to nonzero value in order to emit class initialization code
95 before static field references. */
96 int always_initialize_class_p = 0;
98 /* We store the stack state in two places:
99 Within a basic block, we use the quick_stack, which is a
100 pushdown list (TREE_LISTs) of expression nodes.
101 This is the top part of the stack; below that we use find_stack_slot.
102 At the end of a basic block, the quick_stack must be flushed
103 to the stack slot array (as handled by find_stack_slot).
104 Using quick_stack generates better code (especially when
105 compiled without optimization), because we do not have to
106 explicitly store and load trees to temporary variables.
108 If a variable is on the quick stack, it means the value of variable
109 when the quick stack was last flushed. Conceptually, flush_quick_stack
110 saves all the quick_stack elements in parallel. However, that is
111 complicated, so it actually saves them (i.e. copies each stack value
112 to is home virtual register) from low indexes. This allows a quick_stack
113 element at index i (counting from the bottom of stack the) to references
114 slot virtuals for register that are >= i, but not those that are deeper.
115 This convention makes most operations easier. For example iadd works
116 even when the stack contains (reg[0], reg[1]): It results in the
117 stack containing (reg[0]+reg[1]), which is OK. However, some stack
118 operations are more complicated. For example dup given a stack
119 containing (reg[0]) would yield (reg[0], reg[0]), which would violate
120 the convention, since stack value 1 would refer to a register with
121 lower index (reg[0]), which flush_quick_stack does not safely handle.
122 So dup cannot just add an extra element to the quick_stack, but iadd can.
125 static GTY(()) tree quick_stack;
127 /* A free-list of unused permanent TREE_LIST nodes. */
128 static GTY((deletable)) tree tree_list_free_list;
130 /* The physical memory page size used in this computer. See
131 build_field_ref(). */
132 static GTY(()) tree page_size;
134 /* The stack pointer of the Java virtual machine.
135 This does include the size of the quick_stack. */
139 const unsigned char *linenumber_table;
140 int linenumber_count;
142 /* Largest pc so far in this method that has been passed to lookup_label. */
143 int highest_label_pc_this_method = -1;
145 /* Base value for this method to add to pc to get generated label. */
146 int start_label_pc_this_method = 0;
149 init_expr_processing (void)
151 operand_type[21] = operand_type[54] = int_type_node;
152 operand_type[22] = operand_type[55] = long_type_node;
153 operand_type[23] = operand_type[56] = float_type_node;
154 operand_type[24] = operand_type[57] = double_type_node;
155 operand_type[25] = operand_type[58] = ptr_type_node;
159 java_truthvalue_conversion (tree expr)
161 /* It is simpler and generates better code to have only TRUTH_*_EXPR
162 or comparison expressions as truth values at this level.
164 This function should normally be identity for Java. */
166 switch (TREE_CODE (expr))
168 case EQ_EXPR: case NE_EXPR: case UNEQ_EXPR: case LTGT_EXPR:
169 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
170 case UNLE_EXPR: case UNGE_EXPR: case UNLT_EXPR: case UNGT_EXPR:
171 case ORDERED_EXPR: case UNORDERED_EXPR:
172 case TRUTH_ANDIF_EXPR:
173 case TRUTH_ORIF_EXPR:
182 return integer_zerop (expr) ? boolean_false_node : boolean_true_node;
185 return real_zerop (expr) ? boolean_false_node : boolean_true_node;
187 /* are these legal? XXX JH */
191 /* These don't change whether an object is nonzero or zero. */
192 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
195 /* Distribute the conversion into the arms of a COND_EXPR. */
196 return fold_build3 (COND_EXPR, boolean_type_node, TREE_OPERAND (expr, 0),
197 java_truthvalue_conversion (TREE_OPERAND (expr, 1)),
198 java_truthvalue_conversion (TREE_OPERAND (expr, 2)));
201 /* If this is widening the argument, we can ignore it. */
202 if (TYPE_PRECISION (TREE_TYPE (expr))
203 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
204 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
205 /* fall through to default */
208 return fold_build2 (NE_EXPR, boolean_type_node,
209 expr, boolean_false_node);
213 /* Save any stack slots that happen to be in the quick_stack into their
214 home virtual register slots.
216 The copy order is from low stack index to high, to support the invariant
217 that the expression for a slot may contain decls for stack slots with
218 higher (or the same) index, but not lower. */
221 flush_quick_stack (void)
223 int stack_index = stack_pointer;
224 tree prev, cur, next;
226 /* First reverse the quick_stack, and count the number of slots it has. */
227 for (cur = quick_stack, prev = NULL_TREE; cur != NULL_TREE; cur = next)
229 next = TREE_CHAIN (cur);
230 TREE_CHAIN (cur) = prev;
232 stack_index -= 1 + TYPE_IS_WIDE (TREE_TYPE (TREE_VALUE (cur)));
236 while (quick_stack != NULL_TREE)
239 tree node = quick_stack, type;
240 quick_stack = TREE_CHAIN (node);
241 TREE_CHAIN (node) = tree_list_free_list;
242 tree_list_free_list = node;
243 node = TREE_VALUE (node);
244 type = TREE_TYPE (node);
246 decl = find_stack_slot (stack_index, type);
248 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (node), decl, node));
249 stack_index += 1 + TYPE_IS_WIDE (type);
253 /* Push TYPE on the type stack.
254 Return true on success, 0 on overflow. */
257 push_type_0 (tree type)
260 type = promote_type (type);
261 n_words = 1 + TYPE_IS_WIDE (type);
262 if (stack_pointer + n_words > DECL_MAX_STACK (current_function_decl))
264 /* Allocate decl for this variable now, so we get a temporary that
265 survives the whole method. */
266 find_stack_slot (stack_pointer, type);
267 stack_type_map[stack_pointer++] = type;
269 while (--n_words >= 0)
270 stack_type_map[stack_pointer++] = TYPE_SECOND;
275 push_type (tree type)
277 int r = push_type_0 (type);
282 push_value (tree value)
284 tree type = TREE_TYPE (value);
285 if (TYPE_PRECISION (type) < 32 && INTEGRAL_TYPE_P (type))
287 type = promote_type (type);
288 value = convert (type, value);
291 if (tree_list_free_list == NULL_TREE)
292 quick_stack = tree_cons (NULL_TREE, value, quick_stack);
295 tree node = tree_list_free_list;
296 tree_list_free_list = TREE_CHAIN (tree_list_free_list);
297 TREE_VALUE (node) = value;
298 TREE_CHAIN (node) = quick_stack;
301 /* If the value has a side effect, then we need to evaluate it
302 whether or not the result is used. If the value ends up on the
303 quick stack and is then popped, this won't happen -- so we flush
304 the quick stack. It is safest to simply always flush, though,
305 since TREE_SIDE_EFFECTS doesn't capture COMPONENT_REF, and for
306 the latter we may need to strip conversions. */
307 flush_quick_stack ();
310 /* Pop a type from the type stack.
311 TYPE is the expected type. Return the actual type, which must be
313 On an error, *MESSAGEP is set to a freshly malloc'd error message. */
316 pop_type_0 (tree type, char **messagep)
321 if (TREE_CODE (type) == RECORD_TYPE)
322 type = promote_type (type);
323 n_words = 1 + TYPE_IS_WIDE (type);
324 if (stack_pointer < n_words)
326 *messagep = xstrdup ("stack underflow");
329 while (--n_words > 0)
331 if (stack_type_map[--stack_pointer] != void_type_node)
333 *messagep = xstrdup ("Invalid multi-word value on type stack");
337 t = stack_type_map[--stack_pointer];
338 if (type == NULL_TREE || t == type)
340 if (TREE_CODE (t) == TREE_LIST)
344 tree tt = TREE_PURPOSE (t);
345 if (! can_widen_reference_to (tt, type))
355 if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (t)
356 && TYPE_PRECISION (type) <= 32 && TYPE_PRECISION (t) <= 32)
358 if (TREE_CODE (type) == POINTER_TYPE && TREE_CODE (t) == POINTER_TYPE)
360 /* If the expected type we've been passed is object or ptr
361 (i.e. void*), the caller needs to know the real type. */
362 if (type == ptr_type_node || type == object_ptr_type_node)
365 /* Since the verifier has already run, we know that any
366 types we see will be compatible. In BC mode, this fact
367 may be checked at runtime, but if that is so then we can
368 assume its truth here as well. So, we always succeed
369 here, with the expected type. */
373 if (! flag_verify_invocations && flag_indirect_dispatch
374 && t == object_ptr_type_node)
376 if (type != ptr_type_node)
377 warning (0, "need to insert runtime check for %s",
378 xstrdup (lang_printable_name (type, 0)));
382 /* lang_printable_name uses a static buffer, so we must save the result
383 from calling it the first time. */
386 char *temp = xstrdup (lang_printable_name (type, 0));
387 /* If the stack contains a multi-word type, keep popping the stack until
388 the real type is found. */
389 while (t == void_type_node)
390 t = stack_type_map[--stack_pointer];
391 *messagep = concat ("expected type '", temp,
392 "' but stack contains '", lang_printable_name (t, 0),
399 /* Pop a type from the type stack.
400 TYPE is the expected type. Return the actual type, which must be
401 convertible to TYPE, otherwise call error. */
406 char *message = NULL;
407 type = pop_type_0 (type, &message);
410 error ("%s", message);
417 /* Return true if two type assertions are equal. */
420 type_assertion_eq (const void * k1_p, const void * k2_p)
422 type_assertion k1 = *(type_assertion *)k1_p;
423 type_assertion k2 = *(type_assertion *)k2_p;
424 return (k1.assertion_code == k2.assertion_code
426 && k1.op2 == k2.op2);
429 /* Hash a type assertion. */
432 type_assertion_hash (const void *p)
434 const type_assertion *k_p = p;
435 hashval_t hash = iterative_hash (&k_p->assertion_code, sizeof
436 k_p->assertion_code, 0);
437 hash = iterative_hash (&k_p->op1, sizeof k_p->op1, hash);
438 return iterative_hash (&k_p->op2, sizeof k_p->op2, hash);
441 /* Add an entry to the type assertion table for the given class.
442 CLASS is the class for which this assertion will be evaluated by the
443 runtime during loading/initialization.
444 ASSERTION_CODE is the 'opcode' or type of this assertion: see java-tree.h.
445 OP1 and OP2 are the operands. The tree type of these arguments may be
446 specific to each assertion_code. */
449 add_type_assertion (tree class, int assertion_code, tree op1, tree op2)
451 htab_t assertions_htab;
455 assertions_htab = TYPE_ASSERTIONS (class);
456 if (assertions_htab == NULL)
458 assertions_htab = htab_create_ggc (7, type_assertion_hash,
459 type_assertion_eq, NULL);
460 TYPE_ASSERTIONS (current_class) = assertions_htab;
463 as.assertion_code = assertion_code;
467 as_pp = htab_find_slot (assertions_htab, &as, INSERT);
469 /* Don't add the same assertion twice. */
473 *as_pp = ggc_alloc (sizeof (type_assertion));
474 **(type_assertion **)as_pp = as;
478 /* Return 1 if SOURCE_TYPE can be safely widened to TARGET_TYPE.
479 Handles array types and interfaces. */
482 can_widen_reference_to (tree source_type, tree target_type)
484 if (source_type == ptr_type_node || target_type == object_ptr_type_node)
487 /* Get rid of pointers */
488 if (TREE_CODE (source_type) == POINTER_TYPE)
489 source_type = TREE_TYPE (source_type);
490 if (TREE_CODE (target_type) == POINTER_TYPE)
491 target_type = TREE_TYPE (target_type);
493 if (source_type == target_type)
496 /* FIXME: This is very pessimistic, in that it checks everything,
497 even if we already know that the types are compatible. If we're
498 to support full Java class loader semantics, we need this.
499 However, we could do something more optimal. */
500 if (! flag_verify_invocations)
502 add_type_assertion (current_class, JV_ASSERT_TYPES_COMPATIBLE,
503 source_type, target_type);
506 warning (0, "assert: %s is assign compatible with %s",
507 xstrdup (lang_printable_name (target_type, 0)),
508 xstrdup (lang_printable_name (source_type, 0)));
509 /* Punt everything to runtime. */
513 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
519 if (TYPE_ARRAY_P (source_type) || TYPE_ARRAY_P (target_type))
521 HOST_WIDE_INT source_length, target_length;
522 if (TYPE_ARRAY_P (source_type) != TYPE_ARRAY_P (target_type))
524 /* An array implements Cloneable and Serializable. */
525 tree name = DECL_NAME (TYPE_NAME (target_type));
526 return (name == java_lang_cloneable_identifier_node
527 || name == java_io_serializable_identifier_node);
529 target_length = java_array_type_length (target_type);
530 if (target_length >= 0)
532 source_length = java_array_type_length (source_type);
533 if (source_length != target_length)
536 source_type = TYPE_ARRAY_ELEMENT (source_type);
537 target_type = TYPE_ARRAY_ELEMENT (target_type);
538 if (source_type == target_type)
540 if (TREE_CODE (source_type) != POINTER_TYPE
541 || TREE_CODE (target_type) != POINTER_TYPE)
543 return can_widen_reference_to (source_type, target_type);
547 int source_depth = class_depth (source_type);
548 int target_depth = class_depth (target_type);
550 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
553 warning (0, "assert: %s is assign compatible with %s",
554 xstrdup (lang_printable_name (target_type, 0)),
555 xstrdup (lang_printable_name (source_type, 0)));
559 /* class_depth can return a negative depth if an error occurred */
560 if (source_depth < 0 || target_depth < 0)
563 if (CLASS_INTERFACE (TYPE_NAME (target_type)))
565 /* target_type is OK if source_type or source_type ancestors
566 implement target_type. We handle multiple sub-interfaces */
567 tree binfo, base_binfo;
570 for (binfo = TYPE_BINFO (source_type), i = 0;
571 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
572 if (can_widen_reference_to
573 (BINFO_TYPE (base_binfo), target_type))
580 for ( ; source_depth > target_depth; source_depth--)
583 = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (source_type), 0));
585 return source_type == target_type;
591 pop_value (tree type)
593 type = pop_type (type);
596 tree node = quick_stack;
597 quick_stack = TREE_CHAIN (quick_stack);
598 TREE_CHAIN (node) = tree_list_free_list;
599 tree_list_free_list = node;
600 node = TREE_VALUE (node);
604 return find_stack_slot (stack_pointer, promote_type (type));
608 /* Pop and discard the top COUNT stack slots. */
611 java_stack_pop (int count)
617 gcc_assert (stack_pointer != 0);
619 type = stack_type_map[stack_pointer - 1];
620 if (type == TYPE_SECOND)
623 gcc_assert (stack_pointer != 1 && count > 0);
625 type = stack_type_map[stack_pointer - 2];
627 val = pop_value (type);
632 /* Implement the 'swap' operator (to swap two top stack slots). */
635 java_stack_swap (void)
641 if (stack_pointer < 2
642 || (type1 = stack_type_map[stack_pointer - 1]) == TYPE_UNKNOWN
643 || (type2 = stack_type_map[stack_pointer - 2]) == TYPE_UNKNOWN
644 || type1 == TYPE_SECOND || type2 == TYPE_SECOND
645 || TYPE_IS_WIDE (type1) || TYPE_IS_WIDE (type2))
646 /* Bad stack swap. */
648 /* Bad stack swap. */
650 flush_quick_stack ();
651 decl1 = find_stack_slot (stack_pointer - 1, type1);
652 decl2 = find_stack_slot (stack_pointer - 2, type2);
653 temp = build_decl (VAR_DECL, NULL_TREE, type1);
654 java_add_local_var (temp);
655 java_add_stmt (build2 (MODIFY_EXPR, type1, temp, decl1));
656 java_add_stmt (build2 (MODIFY_EXPR, type2,
657 find_stack_slot (stack_pointer - 1, type2),
659 java_add_stmt (build2 (MODIFY_EXPR, type1,
660 find_stack_slot (stack_pointer - 2, type1),
662 stack_type_map[stack_pointer - 1] = type2;
663 stack_type_map[stack_pointer - 2] = type1;
667 java_stack_dup (int size, int offset)
669 int low_index = stack_pointer - size - offset;
672 error ("stack underflow - dup* operation");
674 flush_quick_stack ();
676 stack_pointer += size;
677 dst_index = stack_pointer;
679 for (dst_index = stack_pointer; --dst_index >= low_index; )
682 int src_index = dst_index - size;
683 if (src_index < low_index)
684 src_index = dst_index + size + offset;
685 type = stack_type_map [src_index];
686 if (type == TYPE_SECOND)
688 /* Dup operation splits 64-bit number. */
689 gcc_assert (src_index > low_index);
691 stack_type_map[dst_index] = type;
692 src_index--; dst_index--;
693 type = stack_type_map[src_index];
694 gcc_assert (TYPE_IS_WIDE (type));
697 gcc_assert (! TYPE_IS_WIDE (type));
699 if (src_index != dst_index)
701 tree src_decl = find_stack_slot (src_index, type);
702 tree dst_decl = find_stack_slot (dst_index, type);
705 (build2 (MODIFY_EXPR, TREE_TYPE (dst_decl), dst_decl, src_decl));
706 stack_type_map[dst_index] = type;
711 /* Calls _Jv_Throw or _Jv_Sjlj_Throw. Discard the contents of the
715 build_java_athrow (tree node)
719 call = build_call_nary (void_type_node,
720 build_address_of (throw_node),
722 TREE_SIDE_EFFECTS (call) = 1;
723 java_add_stmt (call);
724 java_stack_pop (stack_pointer);
727 /* Implementation for jsr/ret */
730 build_java_jsr (int target_pc, int return_pc)
732 tree where = lookup_label (target_pc);
733 tree ret = lookup_label (return_pc);
734 tree ret_label = fold_build1 (ADDR_EXPR, return_address_type_node, ret);
735 push_value (ret_label);
736 flush_quick_stack ();
737 java_add_stmt (build1 (GOTO_EXPR, void_type_node, where));
739 /* Do not need to emit the label here. We noted the existence of the
740 label as a jump target in note_instructions; we'll emit the label
741 for real at the beginning of the expand_byte_code loop. */
745 build_java_ret (tree location)
747 java_add_stmt (build1 (GOTO_EXPR, void_type_node, location));
750 /* Implementation of operations on array: new, load, store, length */
753 decode_newarray_type (int atype)
757 case 4: return boolean_type_node;
758 case 5: return char_type_node;
759 case 6: return float_type_node;
760 case 7: return double_type_node;
761 case 8: return byte_type_node;
762 case 9: return short_type_node;
763 case 10: return int_type_node;
764 case 11: return long_type_node;
765 default: return NULL_TREE;
769 /* Map primitive type to the code used by OPCODE_newarray. */
772 encode_newarray_type (tree type)
774 if (type == boolean_type_node)
776 else if (type == char_type_node)
778 else if (type == float_type_node)
780 else if (type == double_type_node)
782 else if (type == byte_type_node)
784 else if (type == short_type_node)
786 else if (type == int_type_node)
788 else if (type == long_type_node)
794 /* Build a call to _Jv_ThrowBadArrayIndex(), the
795 ArrayIndexOfBoundsException exception handler. */
798 build_java_throw_out_of_bounds_exception (tree index)
800 tree node = build_call_nary (int_type_node,
801 build_address_of (soft_badarrayindex_node),
803 TREE_SIDE_EFFECTS (node) = 1; /* Allows expansion within ANDIF */
807 /* Return the length of an array. Doesn't perform any checking on the nature
808 or value of the array NODE. May be used to implement some bytecodes. */
811 build_java_array_length_access (tree node)
813 tree type = TREE_TYPE (node);
814 tree array_type = TREE_TYPE (type);
815 HOST_WIDE_INT length;
817 if (!is_array_type_p (type))
819 /* With the new verifier, we will see an ordinary pointer type
820 here. In this case, we just use an arbitrary array type. */
821 array_type = build_java_array_type (object_ptr_type_node, -1);
822 type = promote_type (array_type);
825 length = java_array_type_length (type);
827 return build_int_cst (NULL_TREE, length);
829 node = build3 (COMPONENT_REF, int_type_node,
830 build_java_indirect_ref (array_type, node,
831 flag_check_references),
832 lookup_field (&array_type, get_identifier ("length")),
834 IS_ARRAY_LENGTH_ACCESS (node) = 1;
838 /* Optionally checks a reference against the NULL pointer. ARG1: the
839 expr, ARG2: we should check the reference. Don't generate extra
840 checks if we're not generating code. */
843 java_check_reference (tree expr, int check)
845 if (!flag_syntax_only && check)
847 expr = save_expr (expr);
848 expr = build3 (COND_EXPR, TREE_TYPE (expr),
849 build2 (EQ_EXPR, boolean_type_node,
850 expr, null_pointer_node),
851 build_call_nary (void_type_node,
852 build_address_of (soft_nullpointer_node),
860 /* Reference an object: just like an INDIRECT_REF, but with checking. */
863 build_java_indirect_ref (tree type, tree expr, int check)
866 t = java_check_reference (expr, check);
867 t = convert (build_pointer_type (type), t);
868 return build1 (INDIRECT_REF, type, t);
871 /* Implement array indexing (either as l-value or r-value).
872 Returns a tree for ARRAY[INDEX], assume TYPE is the element type.
873 Optionally performs bounds checking and/or test to NULL.
874 At this point, ARRAY should have been verified as an array. */
877 build_java_arrayaccess (tree array, tree type, tree index)
879 tree node, throw = NULL_TREE;
882 tree array_type = TREE_TYPE (TREE_TYPE (array));
883 tree size_exp = fold_convert (sizetype, size_in_bytes (type));
885 if (!is_array_type_p (TREE_TYPE (array)))
887 /* With the new verifier, we will see an ordinary pointer type
888 here. In this case, we just use the correct array type. */
889 array_type = build_java_array_type (type, -1);
892 if (flag_bounds_check)
895 * (unsigned jint) INDEX >= (unsigned jint) LEN
896 * && throw ArrayIndexOutOfBoundsException.
897 * Note this is equivalent to and more efficient than:
898 * INDEX < 0 || INDEX >= LEN && throw ... */
900 tree len = convert (unsigned_int_type_node,
901 build_java_array_length_access (array));
902 test = fold_build2 (GE_EXPR, boolean_type_node,
903 convert (unsigned_int_type_node, index),
905 if (! integer_zerop (test))
907 throw = build2 (TRUTH_ANDIF_EXPR, int_type_node, test,
908 build_java_throw_out_of_bounds_exception (index));
909 /* allows expansion within COMPOUND */
910 TREE_SIDE_EFFECTS( throw ) = 1;
914 /* If checking bounds, wrap the index expr with a COMPOUND_EXPR in order
915 to have the bounds check evaluated first. */
916 if (throw != NULL_TREE)
917 index = build2 (COMPOUND_EXPR, int_type_node, throw, index);
919 data_field = lookup_field (&array_type, get_identifier ("data"));
921 ref = build3 (COMPONENT_REF, TREE_TYPE (data_field),
922 build_java_indirect_ref (array_type, array,
923 flag_check_references),
924 data_field, NULL_TREE);
926 /* Take the address of the data field and convert it to a pointer to
928 node = build1 (NOP_EXPR, build_pointer_type (type), build_address_of (ref));
930 /* Multiply the index by the size of an element to obtain a byte
931 offset. Convert the result to a pointer to the element type. */
932 index = build2 (MULT_EXPR, sizetype,
933 fold_convert (sizetype, index),
936 /* Sum the byte offset and the address of the data field. */
937 node = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (node), node, index);
941 *((&array->data) + index*size_exp)
944 return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (node)), node);
947 /* Generate code to throw an ArrayStoreException if OBJECT is not assignable
948 (at runtime) to an element of ARRAY. A NOP_EXPR is returned if it can
949 determine that no check is required. */
952 build_java_arraystore_check (tree array, tree object)
954 tree check, element_type, source;
955 tree array_type_p = TREE_TYPE (array);
956 tree object_type = TYPE_NAME (TREE_TYPE (TREE_TYPE (object)));
958 if (! flag_verify_invocations)
960 /* With the new verifier, we don't track precise types. FIXME:
961 performance regression here. */
962 element_type = TYPE_NAME (object_type_node);
966 gcc_assert (is_array_type_p (array_type_p));
968 /* Get the TYPE_DECL for ARRAY's element type. */
970 = TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p))));
973 gcc_assert (TREE_CODE (element_type) == TYPE_DECL
974 && TREE_CODE (object_type) == TYPE_DECL);
976 if (!flag_store_check)
977 return build1 (NOP_EXPR, array_type_p, array);
979 /* No check is needed if the element type is final. Also check that
980 element_type matches object_type, since in the bytecode
981 compilation case element_type may be the actual element type of
982 the array rather than its declared type. However, if we're doing
983 indirect dispatch, we can't do the `final' optimization. */
984 if (element_type == object_type
985 && ! flag_indirect_dispatch
986 && CLASS_FINAL (element_type))
987 return build1 (NOP_EXPR, array_type_p, array);
989 /* OBJECT might be wrapped by a SAVE_EXPR. */
990 if (TREE_CODE (object) == SAVE_EXPR)
991 source = TREE_OPERAND (object, 0);
995 /* Avoid the check if OBJECT was just loaded from the same array. */
996 if (TREE_CODE (source) == ARRAY_REF)
999 source = TREE_OPERAND (source, 0); /* COMPONENT_REF. */
1000 source = TREE_OPERAND (source, 0); /* INDIRECT_REF. */
1001 source = TREE_OPERAND (source, 0); /* Source array's DECL or SAVE_EXPR. */
1002 if (TREE_CODE (source) == SAVE_EXPR)
1003 source = TREE_OPERAND (source, 0);
1006 if (TREE_CODE (target) == SAVE_EXPR)
1007 target = TREE_OPERAND (target, 0);
1009 if (source == target)
1010 return build1 (NOP_EXPR, array_type_p, array);
1013 /* Build an invocation of _Jv_CheckArrayStore */
1014 check = build_call_nary (void_type_node,
1015 build_address_of (soft_checkarraystore_node),
1017 TREE_SIDE_EFFECTS (check) = 1;
1022 /* Makes sure that INDEXED_TYPE is appropriate. If not, make it from
1023 ARRAY_NODE. This function is used to retrieve something less vague than
1024 a pointer type when indexing the first dimension of something like [[<t>.
1025 May return a corrected type, if necessary, otherwise INDEXED_TYPE is
1026 return unchanged. */
1029 build_java_check_indexed_type (tree array_node ATTRIBUTE_UNUSED,
1032 /* We used to check to see if ARRAY_NODE really had array type.
1033 However, with the new verifier, this is not necessary, as we know
1034 that the object will be an array of the appropriate type. */
1036 return indexed_type;
1039 /* newarray triggers a call to _Jv_NewPrimArray. This function should be
1040 called with an integer code (the type of array to create), and the length
1041 of the array to create. */
1044 build_newarray (int atype_value, tree length)
1048 tree prim_type = decode_newarray_type (atype_value);
1050 = build_java_array_type (prim_type,
1051 host_integerp (length, 0) == INTEGER_CST
1052 ? tree_low_cst (length, 0) : -1);
1054 /* Pass a reference to the primitive type class and save the runtime
1056 type_arg = build_class_ref (prim_type);
1058 return build_call_nary (promote_type (type),
1059 build_address_of (soft_newarray_node),
1060 2, type_arg, length);
1063 /* Generates anewarray from a given CLASS_TYPE. Gets from the stack the size
1064 of the dimension. */
1067 build_anewarray (tree class_type, tree length)
1070 = build_java_array_type (class_type,
1071 host_integerp (length, 0)
1072 ? tree_low_cst (length, 0) : -1);
1074 return build_call_nary (promote_type (type),
1075 build_address_of (soft_anewarray_node),
1078 build_class_ref (class_type),
1082 /* Return a node the evaluates 'new TYPE[LENGTH]'. */
1085 build_new_array (tree type, tree length)
1087 if (JPRIMITIVE_TYPE_P (type))
1088 return build_newarray (encode_newarray_type (type), length);
1090 return build_anewarray (TREE_TYPE (type), length);
1093 /* Generates a call to _Jv_NewMultiArray. multianewarray expects a
1094 class pointer, a number of dimensions and the matching number of
1095 dimensions. The argument list is NULL terminated. */
1098 expand_java_multianewarray (tree class_type, int ndim)
1101 tree args = build_tree_list( NULL_TREE, null_pointer_node );
1103 for( i = 0; i < ndim; i++ )
1104 args = tree_cons (NULL_TREE, pop_value (int_type_node), args);
1106 args = tree_cons (NULL_TREE,
1107 build_class_ref (class_type),
1108 tree_cons (NULL_TREE,
1109 build_int_cst (NULL_TREE, ndim),
1112 push_value (build_call_list (promote_type (class_type),
1113 build_address_of (soft_multianewarray_node),
1117 /* ARRAY[INDEX] <- RHS. build_java_check_indexed_type makes sure that
1118 ARRAY is an array type. May expand some bound checking and NULL
1119 pointer checking. RHS_TYPE_NODE we are going to store. In the case
1120 of the CHAR/BYTE/BOOLEAN SHORT, the type popped of the stack is an
1121 INT. In those cases, we make the conversion.
1123 if ARRAy is a reference type, the assignment is checked at run-time
1124 to make sure that the RHS can be assigned to the array element
1125 type. It is not necessary to generate this code if ARRAY is final. */
1128 expand_java_arraystore (tree rhs_type_node)
1130 tree rhs_node = pop_value ((INTEGRAL_TYPE_P (rhs_type_node)
1131 && TYPE_PRECISION (rhs_type_node) <= 32) ?
1132 int_type_node : rhs_type_node);
1133 tree index = pop_value (int_type_node);
1134 tree array_type, array, temp, access;
1136 /* If we're processing an `aaload' we might as well just pick
1138 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1140 array_type = build_java_array_type (object_ptr_type_node, -1);
1141 rhs_type_node = object_ptr_type_node;
1144 array_type = build_java_array_type (rhs_type_node, -1);
1146 array = pop_value (array_type);
1147 array = build1 (NOP_EXPR, promote_type (array_type), array);
1149 rhs_type_node = build_java_check_indexed_type (array, rhs_type_node);
1151 flush_quick_stack ();
1153 index = save_expr (index);
1154 array = save_expr (array);
1156 /* We want to perform the bounds check (done by
1157 build_java_arrayaccess) before the type check (done by
1158 build_java_arraystore_check). So, we call build_java_arrayaccess
1159 -- which returns an ARRAY_REF lvalue -- and we then generate code
1160 to stash the address of that lvalue in a temp. Then we call
1161 build_java_arraystore_check, and finally we generate a
1162 MODIFY_EXPR to set the array element. */
1164 access = build_java_arrayaccess (array, rhs_type_node, index);
1165 temp = build_decl (VAR_DECL, NULL_TREE,
1166 build_pointer_type (TREE_TYPE (access)));
1167 java_add_local_var (temp);
1168 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (temp),
1170 build_fold_addr_expr (access)));
1172 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1174 tree check = build_java_arraystore_check (array, rhs_node);
1175 java_add_stmt (check);
1178 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (access),
1179 build1 (INDIRECT_REF, TREE_TYPE (access), temp),
1183 /* Expand the evaluation of ARRAY[INDEX]. build_java_check_indexed_type makes
1184 sure that LHS is an array type. May expand some bound checking and NULL
1186 LHS_TYPE_NODE is the type of ARRAY[INDEX]. But in the case of CHAR/BYTE/
1187 BOOLEAN/SHORT, we push a promoted type back to the stack.
1191 expand_java_arrayload (tree lhs_type_node)
1194 tree index_node = pop_value (int_type_node);
1198 /* If we're processing an `aaload' we might as well just pick
1200 if (TREE_CODE (lhs_type_node) == POINTER_TYPE)
1202 array_type = build_java_array_type (object_ptr_type_node, -1);
1203 lhs_type_node = object_ptr_type_node;
1206 array_type = build_java_array_type (lhs_type_node, -1);
1207 array_node = pop_value (array_type);
1208 array_node = build1 (NOP_EXPR, promote_type (array_type), array_node);
1210 index_node = save_expr (index_node);
1211 array_node = save_expr (array_node);
1213 lhs_type_node = build_java_check_indexed_type (array_node,
1215 load_node = build_java_arrayaccess (array_node,
1218 if (INTEGRAL_TYPE_P (lhs_type_node) && TYPE_PRECISION (lhs_type_node) <= 32)
1219 load_node = fold_build1 (NOP_EXPR, int_type_node, load_node);
1220 push_value (load_node);
1223 /* Expands .length. Makes sure that we deal with and array and may expand
1224 a NULL check on the array object. */
1227 expand_java_array_length (void)
1229 tree array = pop_value (ptr_type_node);
1230 tree length = build_java_array_length_access (array);
1232 push_value (length);
1235 /* Emit code for the call to _Jv_Monitor{Enter,Exit}. CALL can be
1236 either soft_monitorenter_node or soft_monitorexit_node. */
1239 build_java_monitor (tree call, tree object)
1241 return build_call_nary (void_type_node,
1242 build_address_of (call),
1246 /* Emit code for one of the PUSHC instructions. */
1249 expand_java_pushc (int ival, tree type)
1252 if (type == ptr_type_node && ival == 0)
1253 value = null_pointer_node;
1254 else if (type == int_type_node || type == long_type_node)
1255 value = build_int_cst (type, ival);
1256 else if (type == float_type_node || type == double_type_node)
1259 REAL_VALUE_FROM_INT (x, ival, 0, TYPE_MODE (type));
1260 value = build_real (type, x);
1269 expand_java_return (tree type)
1271 if (type == void_type_node)
1272 java_add_stmt (build1 (RETURN_EXPR, void_type_node, NULL));
1275 tree retval = pop_value (type);
1276 tree res = DECL_RESULT (current_function_decl);
1277 retval = build2 (MODIFY_EXPR, TREE_TYPE (res), res, retval);
1279 /* Handle the situation where the native integer type is smaller
1280 than the JVM integer. It can happen for many cross compilers.
1281 The whole if expression just goes away if INT_TYPE_SIZE < 32
1283 if (INT_TYPE_SIZE < 32
1284 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (res)))
1285 < GET_MODE_SIZE (TYPE_MODE (type))))
1286 retval = build1(NOP_EXPR, TREE_TYPE(res), retval);
1288 TREE_SIDE_EFFECTS (retval) = 1;
1289 java_add_stmt (build1 (RETURN_EXPR, TREE_TYPE (retval), retval));
1294 expand_load_internal (int index, tree type, int pc)
1297 tree var = find_local_variable (index, type, pc);
1299 /* Now VAR is the VAR_DECL (or PARM_DECL) that we are going to push
1300 on the stack. If there is an assignment to this VAR_DECL between
1301 the stack push and the use, then the wrong code could be
1302 generated. To avoid this we create a new local and copy our
1303 value into it. Then we push this new local on the stack.
1304 Hopefully this all gets optimized out. */
1305 copy = build_decl (VAR_DECL, NULL_TREE, type);
1306 if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1307 && TREE_TYPE (copy) != TREE_TYPE (var))
1308 var = convert (type, var);
1309 java_add_local_var (copy);
1310 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (var), copy, var));
1316 build_address_of (tree value)
1318 return build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (value)), value);
1322 class_has_finalize_method (tree type)
1324 tree super = CLASSTYPE_SUPER (type);
1326 if (super == NULL_TREE)
1327 return false; /* Every class with a real finalizer inherits */
1328 /* from java.lang.Object. */
1330 return HAS_FINALIZER_P (type) || class_has_finalize_method (super);
1334 java_create_object (tree type)
1336 tree alloc_node = (class_has_finalize_method (type)
1338 : alloc_no_finalizer_node);
1340 return build_call_nary (promote_type (type),
1341 build_address_of (alloc_node),
1342 1, build_class_ref (type));
1346 expand_java_NEW (tree type)
1350 alloc_node = (class_has_finalize_method (type) ? alloc_object_node
1351 : alloc_no_finalizer_node);
1352 if (! CLASS_LOADED_P (type))
1353 load_class (type, 1);
1354 safe_layout_class (type);
1355 push_value (build_call_nary (promote_type (type),
1356 build_address_of (alloc_node),
1357 1, build_class_ref (type)));
1360 /* This returns an expression which will extract the class of an
1364 build_get_class (tree value)
1366 tree class_field = lookup_field (&dtable_type, get_identifier ("class"));
1367 tree vtable_field = lookup_field (&object_type_node,
1368 get_identifier ("vtable"));
1369 tree tmp = build3 (COMPONENT_REF, dtable_ptr_type,
1370 build_java_indirect_ref (object_type_node, value,
1371 flag_check_references),
1372 vtable_field, NULL_TREE);
1373 return build3 (COMPONENT_REF, class_ptr_type,
1374 build1 (INDIRECT_REF, dtable_type, tmp),
1375 class_field, NULL_TREE);
1378 /* This builds the tree representation of the `instanceof' operator.
1379 It tries various tricks to optimize this in cases where types are
1383 build_instanceof (tree value, tree type)
1386 tree itype = TREE_TYPE (TREE_TYPE (soft_instanceof_node));
1387 tree valtype = TREE_TYPE (TREE_TYPE (value));
1388 tree valclass = TYPE_NAME (valtype);
1391 /* When compiling from bytecode, we need to ensure that TYPE has
1393 if (CLASS_P (type) && ! CLASS_LOADED_P (type))
1395 load_class (type, 1);
1396 safe_layout_class (type);
1397 if (! TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) == ERROR_MARK)
1398 return error_mark_node;
1400 klass = TYPE_NAME (type);
1402 if (type == object_type_node || inherits_from_p (valtype, type))
1404 /* Anything except `null' is an instance of Object. Likewise,
1405 if the object is known to be an instance of the class, then
1406 we only need to check for `null'. */
1407 expr = build2 (NE_EXPR, itype, value, null_pointer_node);
1409 else if (flag_verify_invocations
1410 && ! TYPE_ARRAY_P (type)
1411 && ! TYPE_ARRAY_P (valtype)
1412 && DECL_P (klass) && DECL_P (valclass)
1413 && ! CLASS_INTERFACE (valclass)
1414 && ! CLASS_INTERFACE (klass)
1415 && ! inherits_from_p (type, valtype)
1416 && (CLASS_FINAL (klass)
1417 || ! inherits_from_p (valtype, type)))
1419 /* The classes are from different branches of the derivation
1420 tree, so we immediately know the answer. */
1421 expr = boolean_false_node;
1423 else if (DECL_P (klass) && CLASS_FINAL (klass))
1425 tree save = save_expr (value);
1426 expr = build3 (COND_EXPR, itype,
1427 build2 (NE_EXPR, boolean_type_node,
1428 save, null_pointer_node),
1429 build2 (EQ_EXPR, itype,
1430 build_get_class (save),
1431 build_class_ref (type)),
1432 boolean_false_node);
1436 expr = build_call_nary (itype,
1437 build_address_of (soft_instanceof_node),
1438 2, value, build_class_ref (type));
1440 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (value);
1445 expand_java_INSTANCEOF (tree type)
1447 tree value = pop_value (object_ptr_type_node);
1448 value = build_instanceof (value, type);
1453 expand_java_CHECKCAST (tree type)
1455 tree value = pop_value (ptr_type_node);
1456 value = build_call_nary (promote_type (type),
1457 build_address_of (soft_checkcast_node),
1458 2, build_class_ref (type), value);
1463 expand_iinc (unsigned int local_var_index, int ival, int pc)
1465 tree local_var, res;
1466 tree constant_value;
1468 flush_quick_stack ();
1469 local_var = find_local_variable (local_var_index, int_type_node, pc);
1470 constant_value = build_int_cst (NULL_TREE, ival);
1471 res = fold_build2 (PLUS_EXPR, int_type_node, local_var, constant_value);
1472 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (local_var), local_var, res));
1477 build_java_soft_divmod (enum tree_code op, tree type, tree op1, tree op2)
1480 tree arg1 = convert (type, op1);
1481 tree arg2 = convert (type, op2);
1483 if (type == int_type_node)
1487 case TRUNC_DIV_EXPR:
1488 call = soft_idiv_node;
1490 case TRUNC_MOD_EXPR:
1491 call = soft_irem_node;
1497 else if (type == long_type_node)
1501 case TRUNC_DIV_EXPR:
1502 call = soft_ldiv_node;
1504 case TRUNC_MOD_EXPR:
1505 call = soft_lrem_node;
1513 call = build_call_nary (type, build_address_of (call), 2, arg1, arg2);
1518 build_java_binop (enum tree_code op, tree type, tree arg1, tree arg2)
1525 tree u_type = unsigned_type_for (type);
1526 arg1 = convert (u_type, arg1);
1527 arg1 = build_java_binop (RSHIFT_EXPR, u_type, arg1, arg2);
1528 return convert (type, arg1);
1532 mask = build_int_cst (NULL_TREE,
1533 TYPE_PRECISION (TREE_TYPE (arg1)) - 1);
1534 arg2 = fold_build2 (BIT_AND_EXPR, int_type_node, arg2, mask);
1537 case COMPARE_L_EXPR: /* arg1 > arg2 ? 1 : arg1 == arg2 ? 0 : -1 */
1538 case COMPARE_G_EXPR: /* arg1 < arg2 ? -1 : arg1 == arg2 ? 0 : 1 */
1539 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1541 tree ifexp1 = fold_build2 (op == COMPARE_L_EXPR ? GT_EXPR : LT_EXPR,
1542 boolean_type_node, arg1, arg2);
1543 tree ifexp2 = fold_build2 (EQ_EXPR, boolean_type_node, arg1, arg2);
1544 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1545 ifexp2, integer_zero_node,
1546 op == COMPARE_L_EXPR
1547 ? integer_minus_one_node
1548 : integer_one_node);
1549 return fold_build3 (COND_EXPR, int_type_node, ifexp1,
1550 op == COMPARE_L_EXPR ? integer_one_node
1551 : integer_minus_one_node,
1555 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1557 tree ifexp1 = fold_build2 (LT_EXPR, boolean_type_node, arg1, arg2);
1558 tree ifexp2 = fold_build2 (GT_EXPR, boolean_type_node, arg1, arg2);
1559 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1560 ifexp2, integer_one_node,
1562 return fold_build3 (COND_EXPR, int_type_node,
1563 ifexp1, integer_minus_one_node, second_compare);
1565 case TRUNC_DIV_EXPR:
1566 case TRUNC_MOD_EXPR:
1567 if (TREE_CODE (type) == REAL_TYPE
1568 && op == TRUNC_MOD_EXPR)
1571 if (type != double_type_node)
1573 arg1 = convert (double_type_node, arg1);
1574 arg2 = convert (double_type_node, arg2);
1576 call = build_call_nary (double_type_node,
1577 build_address_of (soft_fmod_node),
1579 if (type != double_type_node)
1580 call = convert (type, call);
1584 if (TREE_CODE (type) == INTEGER_TYPE
1585 && flag_use_divide_subroutine
1586 && ! flag_syntax_only)
1587 return build_java_soft_divmod (op, type, arg1, arg2);
1592 return fold_build2 (op, type, arg1, arg2);
1596 expand_java_binop (tree type, enum tree_code op)
1606 rtype = int_type_node;
1607 rarg = pop_value (rtype);
1610 rarg = pop_value (rtype);
1612 larg = pop_value (ltype);
1613 push_value (build_java_binop (op, type, larg, rarg));
1616 /* Lookup the field named NAME in *TYPEP or its super classes.
1617 If not found, return NULL_TREE.
1618 (If the *TYPEP is not found, or if the field reference is
1619 ambiguous, return error_mark_node.)
1620 If found, return the FIELD_DECL, and set *TYPEP to the
1621 class containing the field. */
1624 lookup_field (tree *typep, tree name)
1626 if (CLASS_P (*typep) && !CLASS_LOADED_P (*typep))
1628 load_class (*typep, 1);
1629 safe_layout_class (*typep);
1630 if (!TYPE_SIZE (*typep) || TREE_CODE (TYPE_SIZE (*typep)) == ERROR_MARK)
1631 return error_mark_node;
1635 tree field, binfo, base_binfo;
1639 for (field = TYPE_FIELDS (*typep); field; field = TREE_CHAIN (field))
1640 if (DECL_NAME (field) == name)
1643 /* Process implemented interfaces. */
1644 save_field = NULL_TREE;
1645 for (binfo = TYPE_BINFO (*typep), i = 0;
1646 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1648 tree t = BINFO_TYPE (base_binfo);
1649 if ((field = lookup_field (&t, name)))
1651 if (save_field == field)
1653 if (save_field == NULL_TREE)
1657 tree i1 = DECL_CONTEXT (save_field);
1658 tree i2 = DECL_CONTEXT (field);
1659 error ("reference %qs is ambiguous: appears in interface %qs and interface %qs",
1660 IDENTIFIER_POINTER (name),
1661 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i1))),
1662 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i2))));
1663 return error_mark_node;
1668 if (save_field != NULL_TREE)
1671 *typep = CLASSTYPE_SUPER (*typep);
1676 /* Look up the field named NAME in object SELF_VALUE,
1677 which has class SELF_CLASS (a non-handle RECORD_TYPE).
1678 SELF_VALUE is NULL_TREE if looking for a static field. */
1681 build_field_ref (tree self_value, tree self_class, tree name)
1683 tree base_class = self_class;
1684 tree field_decl = lookup_field (&base_class, name);
1685 if (field_decl == NULL_TREE)
1687 error ("field %qs not found", IDENTIFIER_POINTER (name));
1688 return error_mark_node;
1690 if (self_value == NULL_TREE)
1692 return build_static_field_ref (field_decl);
1696 tree base_type = promote_type (base_class);
1698 /* CHECK is true if self_value is not the this pointer. */
1699 int check = (! (DECL_P (self_value)
1700 && DECL_NAME (self_value) == this_identifier_node));
1702 /* Determine whether a field offset from NULL will lie within
1703 Page 0: this is necessary on those GNU/Linux/BSD systems that
1704 trap SEGV to generate NullPointerExceptions.
1706 We assume that Page 0 will be mapped with NOPERM, and that
1707 memory may be allocated from any other page, so only field
1708 offsets < pagesize are guaranteed to trap. We also assume
1709 the smallest page size we'll encounter is 4k bytes. */
1710 if (! flag_syntax_only && check && ! flag_check_references
1711 && ! flag_indirect_dispatch)
1713 tree field_offset = byte_position (field_decl);
1715 page_size = size_int (4096);
1716 check = ! INT_CST_LT_UNSIGNED (field_offset, page_size);
1719 if (base_type != TREE_TYPE (self_value))
1720 self_value = fold_build1 (NOP_EXPR, base_type, self_value);
1721 if (! flag_syntax_only && flag_indirect_dispatch)
1724 = build_int_cst (NULL_TREE, get_symbol_table_index
1725 (field_decl, NULL_TREE,
1726 &TYPE_OTABLE_METHODS (output_class)));
1728 = build4 (ARRAY_REF, integer_type_node,
1729 TYPE_OTABLE_DECL (output_class), otable_index,
1730 NULL_TREE, NULL_TREE);
1733 if (DECL_CONTEXT (field_decl) != output_class)
1735 = build3 (COND_EXPR, TREE_TYPE (field_offset),
1736 build2 (EQ_EXPR, boolean_type_node,
1737 field_offset, integer_zero_node),
1738 build_call_nary (void_type_node,
1739 build_address_of (soft_nosuchfield_node),
1743 field_offset = fold (convert (sizetype, field_offset));
1744 self_value = java_check_reference (self_value, check);
1746 = fold_build2 (POINTER_PLUS_EXPR,
1747 build_pointer_type (TREE_TYPE (field_decl)),
1748 self_value, field_offset);
1749 return fold_build1 (INDIRECT_REF, TREE_TYPE (field_decl), address);
1752 self_value = build_java_indirect_ref (TREE_TYPE (TREE_TYPE (self_value)),
1754 return fold_build3 (COMPONENT_REF, TREE_TYPE (field_decl),
1755 self_value, field_decl, NULL_TREE);
1760 lookup_label (int pc)
1764 if (pc > highest_label_pc_this_method)
1765 highest_label_pc_this_method = pc;
1766 ASM_GENERATE_INTERNAL_LABEL(buf, "LJpc=", start_label_pc_this_method + pc);
1767 name = get_identifier (buf);
1768 if (IDENTIFIER_LOCAL_VALUE (name))
1769 return IDENTIFIER_LOCAL_VALUE (name);
1772 /* The type of the address of a label is return_address_type_node. */
1773 tree decl = create_label_decl (name);
1774 LABEL_PC (decl) = pc;
1775 return pushdecl (decl);
1779 /* Generate a unique name for the purpose of loops and switches
1780 labels, and try-catch-finally blocks label or temporary variables. */
1783 generate_name (void)
1785 static int l_number = 0;
1787 ASM_GENERATE_INTERNAL_LABEL(buff, "LJv", l_number);
1789 return get_identifier (buff);
1793 create_label_decl (tree name)
1796 decl = build_decl (LABEL_DECL, name,
1797 TREE_TYPE (return_address_type_node));
1798 DECL_CONTEXT (decl) = current_function_decl;
1799 DECL_IGNORED_P (decl) = 1;
1803 /* This maps a bytecode offset (PC) to various flags. */
1804 char *instruction_bits;
1807 note_label (int current_pc ATTRIBUTE_UNUSED, int target_pc)
1809 lookup_label (target_pc);
1810 instruction_bits [target_pc] |= BCODE_JUMP_TARGET;
1813 /* Emit code to jump to TARGET_PC if VALUE1 CONDITION VALUE2,
1814 where CONDITION is one of one the compare operators. */
1817 expand_compare (enum tree_code condition, tree value1, tree value2,
1820 tree target = lookup_label (target_pc);
1821 tree cond = fold_build2 (condition, boolean_type_node, value1, value2);
1823 (build3 (COND_EXPR, void_type_node, java_truthvalue_conversion (cond),
1824 build1 (GOTO_EXPR, void_type_node, target),
1825 build_java_empty_stmt ()));
1828 /* Emit code for a TEST-type opcode. */
1831 expand_test (enum tree_code condition, tree type, int target_pc)
1833 tree value1, value2;
1834 flush_quick_stack ();
1835 value1 = pop_value (type);
1836 value2 = (type == ptr_type_node) ? null_pointer_node : integer_zero_node;
1837 expand_compare (condition, value1, value2, target_pc);
1840 /* Emit code for a COND-type opcode. */
1843 expand_cond (enum tree_code condition, tree type, int target_pc)
1845 tree value1, value2;
1846 flush_quick_stack ();
1847 /* note: pop values in opposite order */
1848 value2 = pop_value (type);
1849 value1 = pop_value (type);
1850 /* Maybe should check value1 and value2 for type compatibility ??? */
1851 expand_compare (condition, value1, value2, target_pc);
1855 expand_java_goto (int target_pc)
1857 tree target_label = lookup_label (target_pc);
1858 flush_quick_stack ();
1859 java_add_stmt (build1 (GOTO_EXPR, void_type_node, target_label));
1863 expand_java_switch (tree selector, int default_pc)
1865 tree switch_expr, x;
1867 flush_quick_stack ();
1868 switch_expr = build3 (SWITCH_EXPR, TREE_TYPE (selector), selector,
1869 NULL_TREE, NULL_TREE);
1870 java_add_stmt (switch_expr);
1872 x = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE, NULL_TREE,
1873 create_artificial_label ());
1874 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1876 x = build1 (GOTO_EXPR, void_type_node, lookup_label (default_pc));
1877 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1883 expand_java_add_case (tree switch_expr, int match, int target_pc)
1887 value = build_int_cst (TREE_TYPE (switch_expr), match);
1889 x = build3 (CASE_LABEL_EXPR, void_type_node, value, NULL_TREE,
1890 create_artificial_label ());
1891 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1893 x = build1 (GOTO_EXPR, void_type_node, lookup_label (target_pc));
1894 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1898 pop_arguments (tree arg_types)
1900 if (arg_types == end_params_node)
1902 if (TREE_CODE (arg_types) == TREE_LIST)
1904 tree tail = pop_arguments (TREE_CHAIN (arg_types));
1905 tree type = TREE_VALUE (arg_types);
1906 tree arg = pop_value (type);
1908 /* We simply cast each argument to its proper type. This is
1909 needed since we lose type information coming out of the
1910 verifier. We also have to do this when we pop an integer
1911 type that must be promoted for the function call. */
1912 if (TREE_CODE (type) == POINTER_TYPE)
1913 arg = build1 (NOP_EXPR, type, arg);
1914 else if (targetm.calls.promote_prototypes (type)
1915 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
1916 && INTEGRAL_TYPE_P (type))
1917 arg = convert (integer_type_node, arg);
1918 return tree_cons (NULL_TREE, arg, tail);
1923 /* Attach to PTR (a block) the declaration found in ENTRY. */
1926 attach_init_test_initialization_flags (void **entry, void *ptr)
1928 tree block = (tree)ptr;
1929 struct treetreehash_entry *ite = (struct treetreehash_entry *) *entry;
1931 if (block != error_mark_node)
1933 if (TREE_CODE (block) == BIND_EXPR)
1935 tree body = BIND_EXPR_BODY (block);
1936 TREE_CHAIN (ite->value) = BIND_EXPR_VARS (block);
1937 BIND_EXPR_VARS (block) = ite->value;
1938 body = build2 (COMPOUND_EXPR, void_type_node,
1939 build1 (DECL_EXPR, void_type_node, ite->value), body);
1940 BIND_EXPR_BODY (block) = body;
1944 tree body = BLOCK_SUBBLOCKS (block);
1945 TREE_CHAIN (ite->value) = BLOCK_EXPR_DECLS (block);
1946 BLOCK_EXPR_DECLS (block) = ite->value;
1947 body = build2 (COMPOUND_EXPR, void_type_node,
1948 build1 (DECL_EXPR, void_type_node, ite->value), body);
1949 BLOCK_SUBBLOCKS (block) = body;
1956 /* Build an expression to initialize the class CLAS.
1957 if EXPR is non-NULL, returns an expression to first call the initializer
1958 (if it is needed) and then calls EXPR. */
1961 build_class_init (tree clas, tree expr)
1965 /* An optimization: if CLAS is a superclass of the class we're
1966 compiling, we don't need to initialize it. However, if CLAS is
1967 an interface, it won't necessarily be initialized, even if we
1969 if ((! CLASS_INTERFACE (TYPE_NAME (clas))
1970 && inherits_from_p (current_class, clas))
1971 || current_class == clas)
1974 if (always_initialize_class_p)
1976 init = build_call_nary (void_type_node,
1977 build_address_of (soft_initclass_node),
1978 1, build_class_ref (clas));
1979 TREE_SIDE_EFFECTS (init) = 1;
1983 tree *init_test_decl;
1985 init_test_decl = java_treetreehash_new
1986 (DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl), clas);
1988 if (*init_test_decl == NULL)
1990 /* Build a declaration and mark it as a flag used to track
1991 static class initializations. */
1992 decl = build_decl (VAR_DECL, NULL_TREE,
1994 MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (decl);
1995 LOCAL_CLASS_INITIALIZATION_FLAG (decl) = 1;
1996 DECL_CONTEXT (decl) = current_function_decl;
1997 DECL_FUNCTION_INIT_TEST_CLASS (decl) = clas;
1998 /* Tell the check-init code to ignore this decl when not
1999 optimizing class initialization. */
2000 if (!STATIC_CLASS_INIT_OPT_P ())
2001 DECL_BIT_INDEX (decl) = -1;
2002 DECL_INITIAL (decl) = boolean_false_node;
2003 /* Don't emit any symbolic debugging info for this decl. */
2004 DECL_IGNORED_P (decl) = 1;
2005 *init_test_decl = decl;
2008 init = build_call_nary (void_type_node,
2009 build_address_of (soft_initclass_node),
2010 1, build_class_ref (clas));
2011 TREE_SIDE_EFFECTS (init) = 1;
2012 init = build3 (COND_EXPR, void_type_node,
2013 build2 (EQ_EXPR, boolean_type_node,
2014 *init_test_decl, boolean_false_node),
2015 init, integer_zero_node);
2016 TREE_SIDE_EFFECTS (init) = 1;
2017 init = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init,
2018 build2 (MODIFY_EXPR, boolean_type_node,
2019 *init_test_decl, boolean_true_node));
2020 TREE_SIDE_EFFECTS (init) = 1;
2023 if (expr != NULL_TREE)
2025 expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init, expr);
2026 TREE_SIDE_EFFECTS (expr) = 1;
2034 /* Rewrite expensive calls that require stack unwinding at runtime to
2035 cheaper alternatives. The logic here performs these
2038 java.lang.Class.forName("foo") -> java.lang.Class.forName("foo", class$)
2039 java.lang.Class.getClassLoader() -> java.lang.Class.getClassLoader(class$)
2045 const char *classname;
2047 const char *signature;
2048 const char *new_signature;
2050 tree (*rewrite_arglist) (tree arglist);
2053 /* Add __builtin_return_address(0) to the end of an arglist. */
2057 rewrite_arglist_getcaller (tree arglist)
2060 = build_call_expr (built_in_decls[BUILT_IN_RETURN_ADDRESS],
2061 1, integer_zero_node);
2063 DECL_INLINE (current_function_decl) = 0;
2065 return chainon (arglist,
2066 tree_cons (NULL_TREE, retaddr,
2070 /* Add this.class to the end of an arglist. */
2073 rewrite_arglist_getclass (tree arglist)
2075 return chainon (arglist,
2076 tree_cons (NULL_TREE, build_class_ref (output_class),
2080 static rewrite_rule rules[] =
2081 {{"java.lang.Class", "getClassLoader", "()Ljava/lang/ClassLoader;",
2082 "(Ljava/lang/Class;)Ljava/lang/ClassLoader;",
2083 ACC_FINAL|ACC_PRIVATE, rewrite_arglist_getclass},
2084 {"java.lang.Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;",
2085 "(Ljava/lang/String;Ljava/lang/Class;)Ljava/lang/Class;",
2086 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getclass},
2087 {"gnu.classpath.VMStackWalker", "getCallingClass", "()Ljava/lang/Class;",
2088 "(Lgnu/gcj/RawData;)Ljava/lang/Class;",
2089 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2090 {"gnu.classpath.VMStackWalker", "getCallingClassLoader",
2091 "()Ljava/lang/ClassLoader;",
2092 "(Lgnu/gcj/RawData;)Ljava/lang/ClassLoader;",
2093 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2095 {NULL, NULL, NULL, NULL, 0, NULL}};
2097 /* True if this method is special, i.e. it's a private method that
2098 should be exported from a DSO. */
2101 special_method_p (tree candidate_method)
2103 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (candidate_method)));
2104 tree method = DECL_NAME (candidate_method);
2107 for (p = rules; p->classname; p++)
2109 if (get_identifier (p->classname) == context
2110 && get_identifier (p->method) == method)
2116 /* Scan the rules list for replacements for *METHOD_P and replace the
2117 args accordingly. If the rewrite results in an access to a private
2118 method, update SPECIAL.*/
2121 maybe_rewrite_invocation (tree *method_p, tree *arg_list_p,
2122 tree *method_signature_p, tree *special)
2124 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (*method_p)));
2126 *special = NULL_TREE;
2128 for (p = rules; p->classname; p++)
2130 if (get_identifier (p->classname) == context)
2132 tree method = DECL_NAME (*method_p);
2133 if (get_identifier (p->method) == method
2134 && get_identifier (p->signature) == *method_signature_p)
2137 = lookup_java_method (DECL_CONTEXT (*method_p),
2139 get_identifier (p->new_signature));
2140 if (! maybe_method && ! flag_verify_invocations)
2143 = add_method (DECL_CONTEXT (*method_p), p->flags,
2144 method, get_identifier (p->new_signature));
2145 DECL_EXTERNAL (maybe_method) = 1;
2147 *method_p = maybe_method;
2148 gcc_assert (*method_p);
2149 *arg_list_p = p->rewrite_arglist (*arg_list_p);
2150 *method_signature_p = get_identifier (p->new_signature);
2151 *special = integer_one_node;
2162 build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED,
2163 tree self_type, tree method_signature ATTRIBUTE_UNUSED,
2164 tree arg_list ATTRIBUTE_UNUSED, tree special)
2167 if (is_compiled_class (self_type))
2169 /* With indirect dispatch we have to use indirect calls for all
2170 publicly visible methods or gcc will use PLT indirections
2171 to reach them. We also have to use indirect dispatch for all
2172 external methods. */
2173 if (! flag_indirect_dispatch
2174 || (! DECL_EXTERNAL (method) && ! TREE_PUBLIC (method)))
2176 func = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (method)),
2182 = build_int_cst (NULL_TREE,
2183 (get_symbol_table_index
2185 &TYPE_ATABLE_METHODS (output_class))));
2187 = build4 (ARRAY_REF,
2188 TREE_TYPE (TREE_TYPE (TYPE_ATABLE_DECL (output_class))),
2189 TYPE_ATABLE_DECL (output_class), table_index,
2190 NULL_TREE, NULL_TREE);
2192 func = convert (method_ptr_type_node, func);
2196 /* We don't know whether the method has been (statically) compiled.
2197 Compile this code to get a reference to the method's code:
2199 SELF_TYPE->methods[METHOD_INDEX].ncode
2203 int method_index = 0;
2206 /* The method might actually be declared in some superclass, so
2207 we have to use its class context, not the caller's notion of
2208 where the method is. */
2209 self_type = DECL_CONTEXT (method);
2210 ref = build_class_ref (self_type);
2211 ref = build1 (INDIRECT_REF, class_type_node, ref);
2212 if (ncode_ident == NULL_TREE)
2213 ncode_ident = get_identifier ("ncode");
2214 if (methods_ident == NULL_TREE)
2215 methods_ident = get_identifier ("methods");
2216 ref = build3 (COMPONENT_REF, method_ptr_type_node, ref,
2217 lookup_field (&class_type_node, methods_ident),
2219 for (meth = TYPE_METHODS (self_type);
2220 ; meth = TREE_CHAIN (meth))
2224 if (meth == NULL_TREE)
2225 fatal_error ("method '%s' not found in class",
2226 IDENTIFIER_POINTER (DECL_NAME (method)));
2229 method_index *= int_size_in_bytes (method_type_node);
2230 ref = fold_build2 (POINTER_PLUS_EXPR, method_ptr_type_node,
2231 ref, size_int (method_index));
2232 ref = build1 (INDIRECT_REF, method_type_node, ref);
2233 func = build3 (COMPONENT_REF, nativecode_ptr_type_node,
2234 ref, lookup_field (&method_type_node, ncode_ident),
2241 invoke_build_dtable (int is_invoke_interface, tree arg_list)
2243 tree dtable, objectref;
2245 TREE_VALUE (arg_list) = save_expr (TREE_VALUE (arg_list));
2247 /* If we're dealing with interfaces and if the objectref
2248 argument is an array then get the dispatch table of the class
2249 Object rather than the one from the objectref. */
2250 objectref = (is_invoke_interface
2251 && is_array_type_p (TREE_TYPE (TREE_VALUE (arg_list)))
2252 ? build_class_ref (object_type_node) : TREE_VALUE (arg_list));
2254 if (dtable_ident == NULL_TREE)
2255 dtable_ident = get_identifier ("vtable");
2256 dtable = build_java_indirect_ref (object_type_node, objectref,
2257 flag_check_references);
2258 dtable = build3 (COMPONENT_REF, dtable_ptr_type, dtable,
2259 lookup_field (&object_type_node, dtable_ident), NULL_TREE);
2264 /* Determine the index in SYMBOL_TABLE for a reference to the decl
2265 T. If this decl has not been seen before, it will be added to the
2266 [oa]table_methods. If it has, the existing table slot will be
2270 get_symbol_table_index (tree t, tree special, tree *symbol_table)
2275 if (*symbol_table == NULL_TREE)
2277 *symbol_table = build_tree_list (special, t);
2281 method_list = *symbol_table;
2285 tree value = TREE_VALUE (method_list);
2286 tree purpose = TREE_PURPOSE (method_list);
2287 if (value == t && purpose == special)
2290 if (TREE_CHAIN (method_list) == NULL_TREE)
2293 method_list = TREE_CHAIN (method_list);
2296 TREE_CHAIN (method_list) = build_tree_list (special, t);
2301 build_invokevirtual (tree dtable, tree method, tree special)
2304 tree nativecode_ptr_ptr_type_node
2305 = build_pointer_type (nativecode_ptr_type_node);
2309 if (flag_indirect_dispatch)
2311 gcc_assert (! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))));
2314 = build_int_cst (NULL_TREE, get_symbol_table_index
2316 &TYPE_OTABLE_METHODS (output_class)));
2317 method_index = build4 (ARRAY_REF, integer_type_node,
2318 TYPE_OTABLE_DECL (output_class),
2319 otable_index, NULL_TREE, NULL_TREE);
2323 /* We fetch the DECL_VINDEX field directly here, rather than
2324 using get_method_index(). DECL_VINDEX is the true offset
2325 from the vtable base to a method, regrdless of any extra
2326 words inserted at the start of the vtable. */
2327 method_index = DECL_VINDEX (method);
2328 method_index = size_binop (MULT_EXPR, method_index,
2329 TYPE_SIZE_UNIT (nativecode_ptr_ptr_type_node));
2330 if (TARGET_VTABLE_USES_DESCRIPTORS)
2331 method_index = size_binop (MULT_EXPR, method_index,
2332 size_int (TARGET_VTABLE_USES_DESCRIPTORS));
2335 func = fold_build2 (POINTER_PLUS_EXPR, nativecode_ptr_ptr_type_node, dtable,
2336 convert (sizetype, method_index));
2338 if (TARGET_VTABLE_USES_DESCRIPTORS)
2339 func = build1 (NOP_EXPR, nativecode_ptr_type_node, func);
2341 func = build1 (INDIRECT_REF, nativecode_ptr_type_node, func);
2346 static GTY(()) tree class_ident;
2348 build_invokeinterface (tree dtable, tree method)
2353 /* We expand invokeinterface here. */
2355 if (class_ident == NULL_TREE)
2356 class_ident = get_identifier ("class");
2358 dtable = build_java_indirect_ref (dtable_type, dtable,
2359 flag_check_references);
2360 dtable = build3 (COMPONENT_REF, class_ptr_type, dtable,
2361 lookup_field (&dtable_type, class_ident), NULL_TREE);
2363 interface = DECL_CONTEXT (method);
2364 gcc_assert (CLASS_INTERFACE (TYPE_NAME (interface)));
2365 layout_class_methods (interface);
2367 if (flag_indirect_dispatch)
2370 = 2 * (get_symbol_table_index
2371 (method, NULL_TREE, &TYPE_ITABLE_METHODS (output_class)));
2373 = build4 (ARRAY_REF,
2374 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2375 TYPE_ITABLE_DECL (output_class),
2376 build_int_cst (NULL_TREE, itable_index-1),
2377 NULL_TREE, NULL_TREE);
2379 = build4 (ARRAY_REF,
2380 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2381 TYPE_ITABLE_DECL (output_class),
2382 build_int_cst (NULL_TREE, itable_index),
2383 NULL_TREE, NULL_TREE);
2384 interface = convert (class_ptr_type, interface);
2385 idx = convert (integer_type_node, idx);
2389 idx = build_int_cst (NULL_TREE,
2390 get_interface_method_index (method, interface));
2391 interface = build_class_ref (interface);
2394 return build_call_nary (ptr_type_node,
2395 build_address_of (soft_lookupinterfacemethod_node),
2396 3, dtable, interface, idx);
2399 /* Expand one of the invoke_* opcodes.
2400 OPCODE is the specific opcode.
2401 METHOD_REF_INDEX is an index into the constant pool.
2402 NARGS is the number of arguments, or -1 if not specified. */
2405 expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED)
2407 tree method_signature
2408 = COMPONENT_REF_SIGNATURE(¤t_jcf->cpool, method_ref_index);
2409 tree method_name = COMPONENT_REF_NAME (¤t_jcf->cpool,
2412 = get_class_constant (current_jcf,
2413 COMPONENT_REF_CLASS_INDEX(¤t_jcf->cpool,
2415 const char *const self_name
2416 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2417 tree call, func, method, arg_list, method_type;
2418 tree check = NULL_TREE;
2420 tree special = NULL_TREE;
2422 if (! CLASS_LOADED_P (self_type))
2424 load_class (self_type, 1);
2425 safe_layout_class (self_type);
2426 if (TREE_CODE (TYPE_SIZE (self_type)) == ERROR_MARK)
2427 fatal_error ("failed to find class '%s'", self_name);
2429 layout_class_methods (self_type);
2431 if (ID_INIT_P (method_name))
2432 method = lookup_java_constructor (self_type, method_signature);
2434 method = lookup_java_method (self_type, method_name, method_signature);
2436 /* We've found a method in a class other than the one in which it
2437 was wanted. This can happen if, for instance, we're trying to
2438 compile invokespecial super.equals().
2439 FIXME: This is a kludge. Rather than nullifying the result, we
2440 should change lookup_java_method() so that it doesn't search the
2441 superclass chain when we're BC-compiling. */
2442 if (! flag_verify_invocations
2444 && ! TYPE_ARRAY_P (self_type)
2445 && self_type != DECL_CONTEXT (method))
2448 /* We've found a method in an interface, but this isn't an interface
2450 if (opcode != OPCODE_invokeinterface
2452 && (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method)))))
2455 /* We've found a non-interface method but we are making an
2456 interface call. This can happen if the interface overrides a
2457 method in Object. */
2458 if (! flag_verify_invocations
2459 && opcode == OPCODE_invokeinterface
2461 && ! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))))
2464 if (method == NULL_TREE)
2466 if (flag_verify_invocations || ! flag_indirect_dispatch)
2468 error ("class '%s' has no method named '%s' matching signature '%s'",
2470 IDENTIFIER_POINTER (method_name),
2471 IDENTIFIER_POINTER (method_signature));
2475 int flags = ACC_PUBLIC;
2476 if (opcode == OPCODE_invokestatic)
2477 flags |= ACC_STATIC;
2478 if (opcode == OPCODE_invokeinterface)
2480 flags |= ACC_INTERFACE | ACC_ABSTRACT;
2481 CLASS_INTERFACE (TYPE_NAME (self_type)) = 1;
2483 method = add_method (self_type, flags, method_name,
2485 DECL_ARTIFICIAL (method) = 1;
2486 METHOD_DUMMY (method) = 1;
2487 layout_class_method (self_type, NULL,
2492 /* Invoke static can't invoke static/abstract method */
2493 if (method != NULL_TREE)
2495 if (opcode == OPCODE_invokestatic)
2497 if (!METHOD_STATIC (method))
2499 error ("invokestatic on non static method");
2502 else if (METHOD_ABSTRACT (method))
2504 error ("invokestatic on abstract method");
2510 if (METHOD_STATIC (method))
2512 error ("invoke[non-static] on static method");
2518 if (method == NULL_TREE)
2520 /* If we got here, we emitted an error message above. So we
2521 just pop the arguments, push a properly-typed zero, and
2523 method_type = get_type_from_signature (method_signature);
2524 pop_arguments (TYPE_ARG_TYPES (method_type));
2525 if (opcode != OPCODE_invokestatic)
2526 pop_type (self_type);
2527 method_type = promote_type (TREE_TYPE (method_type));
2528 push_value (convert (method_type, integer_zero_node));
2532 method_type = TREE_TYPE (method);
2533 arg_list = pop_arguments (TYPE_ARG_TYPES (method_type));
2534 flush_quick_stack ();
2536 maybe_rewrite_invocation (&method, &arg_list, &method_signature,
2540 if (opcode == OPCODE_invokestatic)
2541 func = build_known_method_ref (method, method_type, self_type,
2542 method_signature, arg_list, special);
2543 else if (opcode == OPCODE_invokespecial
2544 || (opcode == OPCODE_invokevirtual
2545 && (METHOD_PRIVATE (method)
2546 || METHOD_FINAL (method)
2547 || CLASS_FINAL (TYPE_NAME (self_type)))))
2549 /* If the object for the method call is null, we throw an
2550 exception. We don't do this if the object is the current
2551 method's `this'. In other cases we just rely on an
2552 optimization pass to eliminate redundant checks. FIXME:
2553 Unfortunately there doesn't seem to be a way to determine
2554 what the current method is right now.
2555 We do omit the check if we're calling <init>. */
2556 /* We use a SAVE_EXPR here to make sure we only evaluate
2557 the new `self' expression once. */
2558 tree save_arg = save_expr (TREE_VALUE (arg_list));
2559 TREE_VALUE (arg_list) = save_arg;
2560 check = java_check_reference (save_arg, ! DECL_INIT_P (method));
2561 func = build_known_method_ref (method, method_type, self_type,
2562 method_signature, arg_list, special);
2566 tree dtable = invoke_build_dtable (opcode == OPCODE_invokeinterface,
2568 if (opcode == OPCODE_invokevirtual)
2569 func = build_invokevirtual (dtable, method, special);
2571 func = build_invokeinterface (dtable, method);
2574 if (TREE_CODE (func) == ADDR_EXPR)
2575 TREE_TYPE (func) = build_pointer_type (method_type);
2577 func = build1 (NOP_EXPR, build_pointer_type (method_type), func);
2579 call = build_call_list (TREE_TYPE (method_type), func, arg_list);
2580 TREE_SIDE_EFFECTS (call) = 1;
2581 call = check_for_builtin (method, call);
2583 if (check != NULL_TREE)
2585 call = build2 (COMPOUND_EXPR, TREE_TYPE (call), check, call);
2586 TREE_SIDE_EFFECTS (call) = 1;
2589 if (TREE_CODE (TREE_TYPE (method_type)) == VOID_TYPE)
2590 java_add_stmt (call);
2594 flush_quick_stack ();
2598 /* Create a stub which will be put into the vtable but which will call
2602 build_jni_stub (tree method)
2604 tree jnifunc, call, args, body, method_sig, arg_types;
2605 tree jniarg0, jniarg1, jniarg2, jniarg3;
2606 tree jni_func_type, tem;
2607 tree env_var, res_var = NULL_TREE, block;
2608 tree method_args, res_type;
2614 tree klass = DECL_CONTEXT (method);
2615 int from_class = ! CLASS_FROM_SOURCE_P (klass);
2616 klass = build_class_ref (klass);
2618 gcc_assert (METHOD_NATIVE (method) && flag_jni);
2620 DECL_ARTIFICIAL (method) = 1;
2621 DECL_EXTERNAL (method) = 0;
2623 env_var = build_decl (VAR_DECL, get_identifier ("env"), ptr_type_node);
2624 DECL_CONTEXT (env_var) = method;
2626 if (TREE_TYPE (TREE_TYPE (method)) != void_type_node)
2628 res_var = build_decl (VAR_DECL, get_identifier ("res"),
2629 TREE_TYPE (TREE_TYPE (method)));
2630 DECL_CONTEXT (res_var) = method;
2631 TREE_CHAIN (env_var) = res_var;
2634 meth_var = build_decl (VAR_DECL, get_identifier ("meth"), ptr_type_node);
2635 TREE_STATIC (meth_var) = 1;
2636 TREE_PUBLIC (meth_var) = 0;
2637 DECL_EXTERNAL (meth_var) = 0;
2638 DECL_CONTEXT (meth_var) = method;
2639 DECL_ARTIFICIAL (meth_var) = 1;
2640 DECL_INITIAL (meth_var) = null_pointer_node;
2641 TREE_USED (meth_var) = 1;
2642 chainon (env_var, meth_var);
2643 build_result_decl (method);
2645 /* One strange way that the front ends are different is that they
2646 store arguments differently. */
2648 method_args = DECL_ARGUMENTS (method);
2650 method_args = BLOCK_EXPR_DECLS (DECL_FUNCTION_BODY (method));
2651 block = build_block (env_var, NULL_TREE, method_args, NULL_TREE);
2652 TREE_SIDE_EFFECTS (block) = 1;
2653 /* When compiling from source we don't set the type of the block,
2654 because that will prevent patch_return from ever being run. */
2656 TREE_TYPE (block) = TREE_TYPE (TREE_TYPE (method));
2658 /* Compute the local `env' by calling _Jv_GetJNIEnvNewFrame. */
2659 body = build2 (MODIFY_EXPR, ptr_type_node, env_var,
2660 build_call_nary (ptr_type_node,
2661 build_address_of (soft_getjnienvnewframe_node),
2663 CAN_COMPLETE_NORMALLY (body) = 1;
2665 /* All the arguments to this method become arguments to the
2666 underlying JNI function. If we had to wrap object arguments in a
2667 special way, we would do that here. */
2669 for (tem = method_args; tem != NULL_TREE; tem = TREE_CHAIN (tem))
2671 int arg_bits = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)));
2672 #ifdef PARM_BOUNDARY
2673 arg_bits = (((arg_bits + PARM_BOUNDARY - 1) / PARM_BOUNDARY)
2676 args_size += (arg_bits / BITS_PER_UNIT);
2678 args = tree_cons (NULL_TREE, tem, args);
2680 args = nreverse (args);
2681 arg_types = TYPE_ARG_TYPES (TREE_TYPE (method));
2683 /* For a static method the second argument is the class. For a
2684 non-static method the second argument is `this'; that is already
2685 available in the argument list. */
2686 if (METHOD_STATIC (method))
2688 args_size += int_size_in_bytes (TREE_TYPE (klass));
2689 args = tree_cons (NULL_TREE, klass, args);
2690 arg_types = tree_cons (NULL_TREE, object_ptr_type_node, arg_types);
2693 /* The JNIEnv structure is the first argument to the JNI function. */
2694 args_size += int_size_in_bytes (TREE_TYPE (env_var));
2695 args = tree_cons (NULL_TREE, env_var, args);
2696 arg_types = tree_cons (NULL_TREE, ptr_type_node, arg_types);
2698 /* We call _Jv_LookupJNIMethod to find the actual underlying
2699 function pointer. _Jv_LookupJNIMethod will throw the appropriate
2700 exception if this function is not found at runtime. */
2701 method_sig = build_java_signature (TREE_TYPE (method));
2703 jniarg1 = build_utf8_ref (DECL_NAME (method));
2704 jniarg2 = build_utf8_ref (unmangle_classname
2705 (IDENTIFIER_POINTER (method_sig),
2706 IDENTIFIER_LENGTH (method_sig)));
2707 jniarg3 = build_int_cst (NULL_TREE, args_size);
2709 tem = build_function_type (TREE_TYPE (TREE_TYPE (method)), arg_types);
2711 #ifdef MODIFY_JNI_METHOD_CALL
2712 tem = MODIFY_JNI_METHOD_CALL (tem);
2715 jni_func_type = build_pointer_type (tem);
2717 jnifunc = build3 (COND_EXPR, ptr_type_node,
2719 build2 (MODIFY_EXPR, ptr_type_node, meth_var,
2720 build_call_nary (ptr_type_node,
2722 (soft_lookupjnimethod_node),
2725 jniarg2, jniarg3)));
2727 /* Now we make the actual JNI call via the resulting function
2729 call = build_call_list (TREE_TYPE (TREE_TYPE (method)),
2730 build1 (NOP_EXPR, jni_func_type, jnifunc),
2733 /* If the JNI call returned a result, capture it here. If we had to
2734 unwrap JNI object results, we would do that here. */
2735 if (res_var != NULL_TREE)
2737 /* If the call returns an object, it may return a JNI weak
2738 reference, in which case we must unwrap it. */
2739 if (! JPRIMITIVE_TYPE_P (TREE_TYPE (TREE_TYPE (method))))
2740 call = build_call_nary (TREE_TYPE (TREE_TYPE (method)),
2741 build_address_of (soft_unwrapjni_node),
2743 call = build2 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (method)),
2747 TREE_SIDE_EFFECTS (call) = 1;
2748 CAN_COMPLETE_NORMALLY (call) = 1;
2750 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2751 TREE_SIDE_EFFECTS (body) = 1;
2753 /* Now free the environment we allocated. */
2754 call = build_call_nary (ptr_type_node,
2755 build_address_of (soft_jnipopsystemframe_node),
2757 TREE_SIDE_EFFECTS (call) = 1;
2758 CAN_COMPLETE_NORMALLY (call) = 1;
2759 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2760 TREE_SIDE_EFFECTS (body) = 1;
2762 /* Finally, do the return. */
2763 res_type = void_type_node;
2764 if (res_var != NULL_TREE)
2767 gcc_assert (DECL_RESULT (method));
2768 /* Make sure we copy the result variable to the actual
2769 result. We use the type of the DECL_RESULT because it
2770 might be different from the return type of the function:
2771 it might be promoted. */
2772 drt = TREE_TYPE (DECL_RESULT (method));
2773 if (drt != TREE_TYPE (res_var))
2774 res_var = build1 (CONVERT_EXPR, drt, res_var);
2775 res_var = build2 (MODIFY_EXPR, drt, DECL_RESULT (method), res_var);
2776 TREE_SIDE_EFFECTS (res_var) = 1;
2779 body = build2 (COMPOUND_EXPR, void_type_node, body,
2780 build1 (RETURN_EXPR, res_type, res_var));
2781 TREE_SIDE_EFFECTS (body) = 1;
2783 /* Prepend class initialization for static methods reachable from
2785 if (METHOD_STATIC (method)
2786 && (! METHOD_PRIVATE (method)
2787 || INNER_CLASS_P (DECL_CONTEXT (method))))
2789 tree init = build_call_expr (soft_initclass_node, 1,
2791 body = build2 (COMPOUND_EXPR, void_type_node, init, body);
2792 TREE_SIDE_EFFECTS (body) = 1;
2795 bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
2801 /* Given lvalue EXP, return a volatile expression that references the
2805 java_modify_addr_for_volatile (tree exp)
2807 tree exp_type = TREE_TYPE (exp);
2809 = build_qualified_type (exp_type,
2810 TYPE_QUALS (exp_type) | TYPE_QUAL_VOLATILE);
2811 tree addr = build_fold_addr_expr (exp);
2812 v_type = build_pointer_type (v_type);
2813 addr = fold_convert (v_type, addr);
2814 exp = build_fold_indirect_ref (addr);
2819 /* Expand an operation to extract from or store into a field.
2820 IS_STATIC is 1 iff the field is static.
2821 IS_PUTTING is 1 for putting into a field; 0 for getting from the field.
2822 FIELD_REF_INDEX is an index into the constant pool. */
2825 expand_java_field_op (int is_static, int is_putting, int field_ref_index)
2828 = get_class_constant (current_jcf,
2829 COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool,
2831 const char *self_name
2832 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2833 tree field_name = COMPONENT_REF_NAME (¤t_jcf->cpool, field_ref_index);
2834 tree field_signature = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool,
2836 tree field_type = get_type_from_signature (field_signature);
2837 tree new_value = is_putting ? pop_value (field_type) : NULL_TREE;
2840 tree original_self_type = self_type;
2844 if (! CLASS_LOADED_P (self_type))
2845 load_class (self_type, 1);
2846 field_decl = lookup_field (&self_type, field_name);
2847 if (field_decl == error_mark_node)
2851 else if (field_decl == NULL_TREE)
2853 if (! flag_verify_invocations)
2855 int flags = ACC_PUBLIC;
2857 flags |= ACC_STATIC;
2858 self_type = original_self_type;
2859 field_decl = add_field (original_self_type, field_name,
2861 DECL_ARTIFICIAL (field_decl) = 1;
2862 DECL_IGNORED_P (field_decl) = 1;
2864 /* FIXME: We should be pessimistic about volatility. We
2865 don't know one way or another, but this is safe.
2866 However, doing this has bad effects on code quality. We
2867 need to look at better ways to do this. */
2868 TREE_THIS_VOLATILE (field_decl) = 1;
2873 error ("missing field '%s' in '%s'",
2874 IDENTIFIER_POINTER (field_name), self_name);
2878 else if (build_java_signature (TREE_TYPE (field_decl)) != field_signature)
2880 error ("mismatching signature for field '%s' in '%s'",
2881 IDENTIFIER_POINTER (field_name), self_name);
2884 field_ref = is_static ? NULL_TREE : pop_value (self_type);
2888 push_value (convert (field_type, integer_zero_node));
2889 flush_quick_stack ();
2893 field_ref = build_field_ref (field_ref, self_type, field_name);
2895 && ! flag_indirect_dispatch)
2897 tree context = DECL_CONTEXT (field_ref);
2898 if (context != self_type && CLASS_INTERFACE (TYPE_NAME (context)))
2899 field_ref = build_class_init (context, field_ref);
2901 field_ref = build_class_init (self_type, field_ref);
2905 flush_quick_stack ();
2906 if (FIELD_FINAL (field_decl))
2908 if (DECL_CONTEXT (field_decl) != current_class)
2909 error ("assignment to final field %q+D not in field's class",
2911 /* We used to check for assignments to final fields not
2912 occurring in the class initializer or in a constructor
2913 here. However, this constraint doesn't seem to be
2914 enforced by the JVM. */
2917 if (TREE_THIS_VOLATILE (field_decl))
2918 field_ref = java_modify_addr_for_volatile (field_ref);
2920 modify_expr = build2 (MODIFY_EXPR, TREE_TYPE (field_ref),
2921 field_ref, new_value);
2923 if (TREE_THIS_VOLATILE (field_decl))
2925 (build_call_expr (built_in_decls[BUILT_IN_SYNCHRONIZE], 0));
2927 java_add_stmt (modify_expr);
2931 tree temp = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (field_ref));
2932 java_add_local_var (temp);
2934 if (TREE_THIS_VOLATILE (field_decl))
2935 field_ref = java_modify_addr_for_volatile (field_ref);
2938 = build2 (MODIFY_EXPR, TREE_TYPE (field_ref), temp, field_ref);
2939 java_add_stmt (modify_expr);
2941 if (TREE_THIS_VOLATILE (field_decl))
2943 (build_call_expr (built_in_decls[BUILT_IN_SYNCHRONIZE], 0));
2947 TREE_THIS_VOLATILE (field_ref) = TREE_THIS_VOLATILE (field_decl);
2951 load_type_state (tree label)
2954 tree vec = LABEL_TYPE_STATE (label);
2955 int cur_length = TREE_VEC_LENGTH (vec);
2956 stack_pointer = cur_length - DECL_MAX_LOCALS(current_function_decl);
2957 for (i = 0; i < cur_length; i++)
2958 type_map [i] = TREE_VEC_ELT (vec, i);
2961 /* Go over METHOD's bytecode and note instruction starts in
2962 instruction_bits[]. */
2965 note_instructions (JCF *jcf, tree method)
2968 unsigned char* byte_ops;
2969 long length = DECL_CODE_LENGTH (method);
2974 #undef RET /* Defined by config/i386/i386.h */
2976 #define BCODE byte_ops
2977 #define BYTE_type_node byte_type_node
2978 #define SHORT_type_node short_type_node
2979 #define INT_type_node int_type_node
2980 #define LONG_type_node long_type_node
2981 #define CHAR_type_node char_type_node
2982 #define PTR_type_node ptr_type_node
2983 #define FLOAT_type_node float_type_node
2984 #define DOUBLE_type_node double_type_node
2985 #define VOID_type_node void_type_node
2986 #define CONST_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
2987 #define CONST_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
2988 #define VAR_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
2989 #define VAR_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
2991 #define CHECK_PC_IN_RANGE(PC) ((void)1) /* Already handled by verifier. */
2993 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
2994 byte_ops = jcf->read_ptr;
2995 instruction_bits = xrealloc (instruction_bits, length + 1);
2996 memset (instruction_bits, 0, length + 1);
2998 /* This pass figures out which PC can be the targets of jumps. */
2999 for (PC = 0; PC < length;)
3001 int oldpc = PC; /* PC at instruction start. */
3002 instruction_bits [PC] |= BCODE_INSTRUCTION_START;
3003 switch (byte_ops[PC++])
3005 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3007 PRE_##OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3010 #define NOTE_LABEL(PC) note_label(oldpc, PC)
3012 #define PRE_PUSHC(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3013 #define PRE_LOAD(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3014 #define PRE_STORE(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3015 #define PRE_STACK(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3016 #define PRE_UNOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3017 #define PRE_BINOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3018 #define PRE_CONVERT(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3019 #define PRE_CONVERT2(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3021 #define PRE_SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3022 PRE_SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3023 #define PRE_SPECIAL_IINC(OPERAND_TYPE) \
3024 ((void) IMMEDIATE_u1, (void) IMMEDIATE_s1)
3025 #define PRE_SPECIAL_ENTER(IGNORE) /* nothing */
3026 #define PRE_SPECIAL_EXIT(IGNORE) /* nothing */
3027 #define PRE_SPECIAL_THROW(IGNORE) /* nothing */
3028 #define PRE_SPECIAL_BREAK(IGNORE) /* nothing */
3030 /* two forms of wide instructions */
3031 #define PRE_SPECIAL_WIDE(IGNORE) \
3033 int modified_opcode = IMMEDIATE_u1; \
3034 if (modified_opcode == OPCODE_iinc) \
3036 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3037 (void) IMMEDIATE_s2; /* constbyte1 and constbyte2 */ \
3041 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3045 #define PRE_IMPL(IGNORE1, IGNORE2) /* nothing */
3047 #define PRE_MONITOR(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3049 #define PRE_RETURN(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3050 #define PRE_ARRAY(OPERAND_TYPE, SUBOP) \
3051 PRE_ARRAY_##SUBOP(OPERAND_TYPE)
3052 #define PRE_ARRAY_LOAD(TYPE) /* nothing */
3053 #define PRE_ARRAY_STORE(TYPE) /* nothing */
3054 #define PRE_ARRAY_LENGTH(TYPE) /* nothing */
3055 #define PRE_ARRAY_NEW(TYPE) PRE_ARRAY_NEW_##TYPE
3056 #define PRE_ARRAY_NEW_NUM ((void) IMMEDIATE_u1)
3057 #define PRE_ARRAY_NEW_PTR ((void) IMMEDIATE_u2)
3058 #define PRE_ARRAY_NEW_MULTI ((void) IMMEDIATE_u2, (void) IMMEDIATE_u1)
3060 #define PRE_TEST(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3061 #define PRE_COND(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3062 #define PRE_BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3063 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3064 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3065 #define PRE_JSR(OPERAND_TYPE, OPERAND_VALUE) \
3066 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3068 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3070 #define PRE_RET(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE)
3072 #define PRE_SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3073 PC = (PC + 3) / 4 * 4; PRE_##TABLE_OR_LOOKUP##_SWITCH
3075 #define PRE_LOOKUP_SWITCH \
3076 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3077 NOTE_LABEL (default_offset+oldpc); \
3079 while (--npairs >= 0) { \
3080 jint match ATTRIBUTE_UNUSED = IMMEDIATE_s4; \
3081 jint offset = IMMEDIATE_s4; \
3082 NOTE_LABEL (offset+oldpc); } \
3085 #define PRE_TABLE_SWITCH \
3086 { jint default_offset = IMMEDIATE_s4; \
3087 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3088 NOTE_LABEL (default_offset+oldpc); \
3090 while (low++ <= high) { \
3091 jint offset = IMMEDIATE_s4; \
3092 NOTE_LABEL (offset+oldpc); } \
3095 #define PRE_FIELD(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3096 #define PRE_OBJECT(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3097 #define PRE_INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3098 (void)(IMMEDIATE_u2); \
3099 PC += 2 * IS_INTERFACE /* for invokeinterface */;
3101 #include "javaop.def"
3108 expand_byte_code (JCF *jcf, tree method)
3112 const unsigned char *linenumber_pointer;
3113 int dead_code_index = -1;
3114 unsigned char* byte_ops;
3115 long length = DECL_CODE_LENGTH (method);
3118 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3119 byte_ops = jcf->read_ptr;
3121 /* We make an initial pass of the line number table, to note
3122 which instructions have associated line number entries. */
3123 linenumber_pointer = linenumber_table;
3124 for (i = 0; i < linenumber_count; i++)
3126 int pc = GET_u2 (linenumber_pointer);
3127 linenumber_pointer += 4;
3129 warning (0, "invalid PC in line number table");
3132 if ((instruction_bits[pc] & BCODE_HAS_LINENUMBER) != 0)
3133 instruction_bits[pc] |= BCODE_HAS_MULTI_LINENUMBERS;
3134 instruction_bits[pc] |= BCODE_HAS_LINENUMBER;
3138 if (! verify_jvm_instructions_new (jcf, byte_ops, length))
3141 promote_arguments ();
3142 cache_this_class_ref (method);
3143 cache_cpool_data_ref ();
3145 /* Translate bytecodes. */
3146 linenumber_pointer = linenumber_table;
3147 for (PC = 0; PC < length;)
3149 if ((instruction_bits [PC] & BCODE_TARGET) != 0 || PC == 0)
3151 tree label = lookup_label (PC);
3152 flush_quick_stack ();
3153 if ((instruction_bits [PC] & BCODE_TARGET) != 0)
3154 java_add_stmt (build1 (LABEL_EXPR, void_type_node, label));
3155 if (LABEL_VERIFIED (label) || PC == 0)
3156 load_type_state (label);
3159 if (! (instruction_bits [PC] & BCODE_VERIFIED))
3161 if (dead_code_index == -1)
3163 /* This is the start of a region of unreachable bytecodes.
3164 They still need to be processed in order for EH ranges
3165 to get handled correctly. However, we can simply
3166 replace these bytecodes with nops. */
3167 dead_code_index = PC;
3170 /* Turn this bytecode into a nop. */
3175 if (dead_code_index != -1)
3177 /* We've just reached the end of a region of dead code. */
3179 warning (0, "unreachable bytecode from %d to before %d",
3180 dead_code_index, PC);
3181 dead_code_index = -1;
3185 /* Handle possible line number entry for this PC.
3187 This code handles out-of-order and multiple linenumbers per PC,
3188 but is optimized for the case of line numbers increasing
3189 monotonically with PC. */
3190 if ((instruction_bits[PC] & BCODE_HAS_LINENUMBER) != 0)
3192 if ((instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS) != 0
3193 || GET_u2 (linenumber_pointer) != PC)
3194 linenumber_pointer = linenumber_table;
3195 while (linenumber_pointer < linenumber_table + linenumber_count * 4)
3197 int pc = GET_u2 (linenumber_pointer);
3198 linenumber_pointer += 4;
3201 int line = GET_u2 (linenumber_pointer - 2);
3202 #ifdef USE_MAPPED_LOCATION
3203 input_location = linemap_line_start (&line_table, line, 1);
3205 input_location.line = line;
3207 if (!(instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS))
3212 maybe_pushlevels (PC);
3213 PC = process_jvm_instruction (PC, byte_ops, length);
3214 maybe_poplevels (PC);
3217 uncache_this_class_ref (method);
3219 if (dead_code_index != -1)
3221 /* We've just reached the end of a region of dead code. */
3223 warning (0, "unreachable bytecode from %d to the end of the method",
3229 java_push_constant_from_pool (JCF *jcf, int index)
3232 if (JPOOL_TAG (jcf, index) == CONSTANT_String)
3235 name = get_name_constant (jcf, JPOOL_USHORT1 (jcf, index));
3236 index = alloc_name_constant (CONSTANT_String, name);
3237 c = build_ref_from_constant_pool (index);
3238 c = convert (promote_type (string_type_node), c);
3240 else if (JPOOL_TAG (jcf, index) == CONSTANT_Class
3241 || JPOOL_TAG (jcf, index) == CONSTANT_ResolvedClass)
3243 tree record = get_class_constant (jcf, index);
3244 c = build_class_ref (record);
3247 c = get_constant (jcf, index);
3252 process_jvm_instruction (int PC, const unsigned char* byte_ops,
3253 long length ATTRIBUTE_UNUSED)
3255 const char *opname; /* Temporary ??? */
3256 int oldpc = PC; /* PC at instruction start. */
3258 /* If the instruction is at the beginning of an exception handler,
3259 replace the top of the stack with the thrown object reference. */
3260 if (instruction_bits [PC] & BCODE_EXCEPTION_TARGET)
3262 /* Note that the verifier will not emit a type map at all for
3263 dead exception handlers. In this case we just ignore the
3265 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3267 tree type = pop_type (promote_type (throwable_type_node));
3268 push_value (build_exception_object_ref (type));
3272 switch (byte_ops[PC++])
3274 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3277 OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3280 #define RET(OPERAND_TYPE, OPERAND_VALUE) \
3282 int saw_index = 0; \
3283 int index = OPERAND_VALUE; \
3285 (find_local_variable (index, return_address_type_node, oldpc)); \
3288 #define JSR(OPERAND_TYPE, OPERAND_VALUE) \
3290 /* OPERAND_VALUE may have side-effects on PC */ \
3291 int opvalue = OPERAND_VALUE; \
3292 build_java_jsr (oldpc + opvalue, PC); \
3295 /* Push a constant onto the stack. */
3296 #define PUSHC(OPERAND_TYPE, OPERAND_VALUE) \
3297 { int saw_index = 0; int ival = (OPERAND_VALUE); \
3298 if (saw_index) java_push_constant_from_pool (current_jcf, ival); \
3299 else expand_java_pushc (ival, OPERAND_TYPE##_type_node); }
3301 /* internal macro added for use by the WIDE case */
3302 #define LOAD_INTERNAL(OPTYPE, OPVALUE) \
3303 expand_load_internal (OPVALUE, type_map[OPVALUE], oldpc);
3305 /* Push local variable onto the opcode stack. */
3306 #define LOAD(OPERAND_TYPE, OPERAND_VALUE) \
3308 /* have to do this since OPERAND_VALUE may have side-effects */ \
3309 int opvalue = OPERAND_VALUE; \
3310 LOAD_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3313 #define RETURN(OPERAND_TYPE, OPERAND_VALUE) \
3314 expand_java_return (OPERAND_TYPE##_type_node)
3316 #define REM_EXPR TRUNC_MOD_EXPR
3317 #define BINOP(OPERAND_TYPE, OPERAND_VALUE) \
3318 expand_java_binop (OPERAND_TYPE##_type_node, OPERAND_VALUE##_EXPR)
3320 #define FIELD(IS_STATIC, IS_PUT) \
3321 expand_java_field_op (IS_STATIC, IS_PUT, IMMEDIATE_u2)
3323 #define TEST(OPERAND_TYPE, CONDITION) \
3324 expand_test (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3326 #define COND(OPERAND_TYPE, CONDITION) \
3327 expand_cond (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3329 #define BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3330 BRANCH_##OPERAND_TYPE (OPERAND_VALUE)
3332 #define BRANCH_GOTO(OPERAND_VALUE) \
3333 expand_java_goto (oldpc + OPERAND_VALUE)
3335 #define BRANCH_CALL(OPERAND_VALUE) \
3336 expand_java_call (oldpc + OPERAND_VALUE, oldpc)
3339 #define BRANCH_RETURN(OPERAND_VALUE) \
3341 tree type = OPERAND_TYPE##_type_node; \
3342 tree value = find_local_variable (OPERAND_VALUE, type, oldpc); \
3343 expand_java_ret (value); \
3347 #define NOT_IMPL(OPERAND_TYPE, OPERAND_VALUE) \
3348 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3349 fprintf (stderr, "(not implemented)\n")
3350 #define NOT_IMPL1(OPERAND_VALUE) \
3351 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3352 fprintf (stderr, "(not implemented)\n")
3354 #define BRANCH_RETURN(OPERAND_VALUE) NOT_IMPL1(OPERAND_VALUE)
3356 #define STACK(SUBOP, COUNT) STACK_##SUBOP (COUNT)
3358 #define STACK_POP(COUNT) java_stack_pop (COUNT)
3360 #define STACK_SWAP(COUNT) java_stack_swap()
3362 #define STACK_DUP(COUNT) java_stack_dup (COUNT, 0)
3363 #define STACK_DUPx1(COUNT) java_stack_dup (COUNT, 1)
3364 #define STACK_DUPx2(COUNT) java_stack_dup (COUNT, 2)
3366 #define SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3367 PC = (PC + 3) / 4 * 4; TABLE_OR_LOOKUP##_SWITCH
3369 #define LOOKUP_SWITCH \
3370 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3371 tree selector = pop_value (INT_type_node); \
3372 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3373 while (--npairs >= 0) \
3375 jint match = IMMEDIATE_s4; jint offset = IMMEDIATE_s4; \
3376 expand_java_add_case (switch_expr, match, oldpc + offset); \
3380 #define TABLE_SWITCH \
3381 { jint default_offset = IMMEDIATE_s4; \
3382 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3383 tree selector = pop_value (INT_type_node); \
3384 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3385 for (; low <= high; low++) \
3387 jint offset = IMMEDIATE_s4; \
3388 expand_java_add_case (switch_expr, low, oldpc + offset); \
3392 #define INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3393 { int opcode = byte_ops[PC-1]; \
3394 int method_ref_index = IMMEDIATE_u2; \
3396 if (IS_INTERFACE) { nargs = IMMEDIATE_u1; (void) IMMEDIATE_u1; } \
3398 expand_invoke (opcode, method_ref_index, nargs); \
3401 /* Handle new, checkcast, instanceof */
3402 #define OBJECT(TYPE, OP) \
3403 expand_java_##OP (get_class_constant (current_jcf, IMMEDIATE_u2))
3405 #define ARRAY(OPERAND_TYPE, SUBOP) ARRAY_##SUBOP(OPERAND_TYPE)
3407 #define ARRAY_LOAD(OPERAND_TYPE) \
3409 expand_java_arrayload( OPERAND_TYPE##_type_node ); \
3412 #define ARRAY_STORE(OPERAND_TYPE) \
3414 expand_java_arraystore( OPERAND_TYPE##_type_node ); \
3417 #define ARRAY_LENGTH(OPERAND_TYPE) expand_java_array_length();
3418 #define ARRAY_NEW(OPERAND_TYPE) ARRAY_NEW_##OPERAND_TYPE()
3419 #define ARRAY_NEW_PTR() \
3420 push_value (build_anewarray (get_class_constant (current_jcf, \
3422 pop_value (int_type_node)));
3423 #define ARRAY_NEW_NUM() \
3425 int atype = IMMEDIATE_u1; \
3426 push_value (build_newarray (atype, pop_value (int_type_node)));\
3428 #define ARRAY_NEW_MULTI() \
3430 tree class = get_class_constant (current_jcf, IMMEDIATE_u2 ); \
3431 int ndims = IMMEDIATE_u1; \
3432 expand_java_multianewarray( class, ndims ); \
3435 #define UNOP(OPERAND_TYPE, OPERAND_VALUE) \
3436 push_value (fold_build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \
3437 pop_value (OPERAND_TYPE##_type_node)));
3439 #define CONVERT2(FROM_TYPE, TO_TYPE) \
3441 push_value (build1 (NOP_EXPR, int_type_node, \
3442 (convert (TO_TYPE##_type_node, \
3443 pop_value (FROM_TYPE##_type_node))))); \
3446 #define CONVERT(FROM_TYPE, TO_TYPE) \
3448 push_value (convert (TO_TYPE##_type_node, \
3449 pop_value (FROM_TYPE##_type_node))); \
3452 /* internal macro added for use by the WIDE case
3453 Added TREE_TYPE (decl) assignment, apbianco */
3454 #define STORE_INTERNAL(OPTYPE, OPVALUE) \
3457 int index = OPVALUE; \
3458 tree type = OPTYPE; \
3459 value = pop_value (type); \
3460 type = TREE_TYPE (value); \
3461 decl = find_local_variable (index, type, oldpc); \
3462 set_local_type (index, type); \
3463 java_add_stmt (build2 (MODIFY_EXPR, type, decl, value)); \
3466 #define STORE(OPERAND_TYPE, OPERAND_VALUE) \
3468 /* have to do this since OPERAND_VALUE may have side-effects */ \
3469 int opvalue = OPERAND_VALUE; \
3470 STORE_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3473 #define SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3474 SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3476 #define SPECIAL_ENTER(IGNORED) MONITOR_OPERATION (soft_monitorenter_node)
3477 #define SPECIAL_EXIT(IGNORED) MONITOR_OPERATION (soft_monitorexit_node)
3479 #define MONITOR_OPERATION(call) \
3481 tree o = pop_value (ptr_type_node); \
3483 flush_quick_stack (); \
3484 c = build_java_monitor (call, o); \
3485 TREE_SIDE_EFFECTS (c) = 1; \
3486 java_add_stmt (c); \
3489 #define SPECIAL_IINC(IGNORED) \
3491 unsigned int local_var_index = IMMEDIATE_u1; \
3492 int ival = IMMEDIATE_s1; \
3493 expand_iinc(local_var_index, ival, oldpc); \
3496 #define SPECIAL_WIDE(IGNORED) \
3498 int modified_opcode = IMMEDIATE_u1; \
3499 unsigned int local_var_index = IMMEDIATE_u2; \
3500 switch (modified_opcode) \
3504 int ival = IMMEDIATE_s2; \
3505 expand_iinc (local_var_index, ival, oldpc); \
3508 case OPCODE_iload: \
3509 case OPCODE_lload: \
3510 case OPCODE_fload: \
3511 case OPCODE_dload: \
3512 case OPCODE_aload: \
3514 /* duplicate code from LOAD macro */ \
3515 LOAD_INTERNAL(operand_type[modified_opcode], local_var_index); \
3518 case OPCODE_istore: \
3519 case OPCODE_lstore: \
3520 case OPCODE_fstore: \
3521 case OPCODE_dstore: \
3522 case OPCODE_astore: \
3524 STORE_INTERNAL(operand_type[modified_opcode], local_var_index); \
3528 error ("unrecogized wide sub-instruction"); \
3532 #define SPECIAL_THROW(IGNORED) \
3533 build_java_athrow (pop_value (throwable_type_node))
3535 #define SPECIAL_BREAK NOT_IMPL1
3536 #define IMPL NOT_IMPL
3538 #include "javaop.def"
3541 fprintf (stderr, "%3d: unknown(%3d)\n", oldpc, byte_ops[PC]);
3546 /* Return the opcode at PC in the code section pointed to by
3549 static unsigned char
3550 peek_opcode_at_pc (JCF *jcf, int code_offset, int pc)
3552 unsigned char opcode;
3553 long absolute_offset = (long)JCF_TELL (jcf);
3555 JCF_SEEK (jcf, code_offset);
3556 opcode = jcf->read_ptr [pc];
3557 JCF_SEEK (jcf, absolute_offset);
3561 /* Some bytecode compilers are emitting accurate LocalVariableTable
3562 attributes. Here's an example:
3567 Attribute "LocalVariableTable"
3568 slot #<n>: ... (PC: PC+1 length: L)
3570 This is accurate because the local in slot <n> really exists after
3571 the opcode at PC is executed, hence from PC+1 to PC+1+L.
3573 This procedure recognizes this situation and extends the live range
3574 of the local in SLOT to START_PC-1 or START_PC-2 (depending on the
3575 length of the store instruction.)
3577 This function is used by `give_name_to_locals' so that a local's
3578 DECL features a DECL_LOCAL_START_PC such that the first related
3579 store operation will use DECL as a destination, not an unrelated
3580 temporary created for the occasion.
3582 This function uses a global (instruction_bits) `note_instructions' should
3583 have allocated and filled properly. */
3586 maybe_adjust_start_pc (struct JCF *jcf, int code_offset,
3587 int start_pc, int slot)
3589 int first, index, opcode;
3598 /* Find last previous instruction and remember it */
3599 for (pc = start_pc-1; pc; pc--)
3600 if (instruction_bits [pc] & BCODE_INSTRUCTION_START)
3604 /* Retrieve the instruction, handle `wide'. */
3605 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3606 if (opcode == OPCODE_wide)
3609 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3614 case OPCODE_astore_0:
3615 case OPCODE_astore_1:
3616 case OPCODE_astore_2:
3617 case OPCODE_astore_3:
3618 first = OPCODE_astore_0;
3621 case OPCODE_istore_0:
3622 case OPCODE_istore_1:
3623 case OPCODE_istore_2:
3624 case OPCODE_istore_3:
3625 first = OPCODE_istore_0;
3628 case OPCODE_lstore_0:
3629 case OPCODE_lstore_1:
3630 case OPCODE_lstore_2:
3631 case OPCODE_lstore_3:
3632 first = OPCODE_lstore_0;
3635 case OPCODE_fstore_0:
3636 case OPCODE_fstore_1:
3637 case OPCODE_fstore_2:
3638 case OPCODE_fstore_3:
3639 first = OPCODE_fstore_0;
3642 case OPCODE_dstore_0:
3643 case OPCODE_dstore_1:
3644 case OPCODE_dstore_2:
3645 case OPCODE_dstore_3:
3646 first = OPCODE_dstore_0;
3654 index = peek_opcode_at_pc (jcf, code_offset, pc);
3657 int other = peek_opcode_at_pc (jcf, code_offset, ++pc);
3658 index = (other << 8) + index;
3663 /* Now we decide: first >0 means we have a <t>store_<n>, index >0
3664 means we have a <t>store. */
3665 if ((first > 0 && opcode - first == slot) || (index > 0 && index == slot))
3671 /* Force the (direct) sub-operands of NODE to be evaluated in left-to-right
3672 order, as specified by Java Language Specification.
3674 The problem is that while expand_expr will evaluate its sub-operands in
3675 left-to-right order, for variables it will just return an rtx (i.e.
3676 an lvalue) for the variable (rather than an rvalue). So it is possible
3677 that a later sub-operand will change the register, and when the
3678 actual operation is done, it will use the new value, when it should
3679 have used the original value.
3681 We fix this by using save_expr. This forces the sub-operand to be
3682 copied into a fresh virtual register,
3684 For method invocation, we modify the arguments so that a
3685 left-to-right order evaluation is performed. Saved expressions
3686 will, in CALL_EXPR order, be reused when the call will be expanded.
3688 We also promote outgoing args if needed. */
3691 force_evaluation_order (tree node)
3693 if (flag_syntax_only)
3695 if (TREE_CODE (node) == CALL_EXPR
3696 || (TREE_CODE (node) == COMPOUND_EXPR
3697 && TREE_CODE (TREE_OPERAND (node, 0)) == CALL_EXPR
3698 && TREE_CODE (TREE_OPERAND (node, 1)) == SAVE_EXPR))
3703 /* Account for wrapped around ctors. */
3704 if (TREE_CODE (node) == COMPOUND_EXPR)
3705 call = TREE_OPERAND (node, 0);
3709 nargs = call_expr_nargs (call);
3711 /* This reverses the evaluation order. This is a desired effect. */
3712 for (i = 0, cmp = NULL_TREE; i < nargs; i++)
3714 tree arg = CALL_EXPR_ARG (call, i);
3715 /* Promote types smaller than integer. This is required by
3717 tree type = TREE_TYPE (arg);
3719 if (targetm.calls.promote_prototypes (type)
3720 && INTEGRAL_TYPE_P (type)
3721 && INT_CST_LT_UNSIGNED (TYPE_SIZE (type),
3722 TYPE_SIZE (integer_type_node)))
3723 arg = fold_convert (integer_type_node, arg);
3725 saved = save_expr (force_evaluation_order (arg));
3726 cmp = (cmp == NULL_TREE ? saved :
3727 build2 (COMPOUND_EXPR, void_type_node, cmp, saved));
3729 CALL_EXPR_ARG (call, i) = saved;
3732 if (cmp && TREE_CODE (cmp) == COMPOUND_EXPR)
3733 TREE_SIDE_EFFECTS (cmp) = 1;
3737 cmp = build2 (COMPOUND_EXPR, TREE_TYPE (node), cmp, node);
3738 if (TREE_TYPE (cmp) != void_type_node)
3739 cmp = save_expr (cmp);
3740 CAN_COMPLETE_NORMALLY (cmp) = CAN_COMPLETE_NORMALLY (node);
3741 TREE_SIDE_EFFECTS (cmp) = 1;
3748 /* Build a node to represent empty statements and blocks. */
3751 build_java_empty_stmt (void)
3753 tree t = build_empty_stmt ();
3754 CAN_COMPLETE_NORMALLY (t) = 1;
3758 /* Promote all args of integral type before generating any code. */
3761 promote_arguments (void)
3765 for (arg = DECL_ARGUMENTS (current_function_decl), i = 0;
3766 arg != NULL_TREE; arg = TREE_CHAIN (arg), i++)
3768 tree arg_type = TREE_TYPE (arg);
3769 if (INTEGRAL_TYPE_P (arg_type)
3770 && TYPE_PRECISION (arg_type) < 32)
3772 tree copy = find_local_variable (i, integer_type_node, -1);
3773 java_add_stmt (build2 (MODIFY_EXPR, integer_type_node,
3775 fold_convert (integer_type_node, arg)));
3777 if (TYPE_IS_WIDE (arg_type))
3782 /* Create a local variable that points to the constant pool. */
3785 cache_cpool_data_ref (void)
3790 tree d = build_constant_data_ref (flag_indirect_classes);
3791 tree cpool_ptr = build_decl (VAR_DECL, NULL_TREE,
3792 build_pointer_type (TREE_TYPE (d)));
3793 java_add_local_var (cpool_ptr);
3794 TREE_INVARIANT (cpool_ptr) = 1;
3795 TREE_CONSTANT (cpool_ptr) = 1;
3797 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (cpool_ptr),
3798 cpool_ptr, build_address_of (d)));
3799 cpool = build1 (INDIRECT_REF, TREE_TYPE (d), cpool_ptr);
3800 TREE_THIS_NOTRAP (cpool) = 1;
3801 TYPE_CPOOL_DATA_REF (output_class) = cpool;
3805 #include "gt-java-expr.h"