1 /* Process expressions for the GNU compiler for the Java(TM) language.
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2006, 2007 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA.
22 Java and all Java-based marks are trademarks or registered trademarks
23 of Sun Microsystems, Inc. in the United States and other countries.
24 The Free Software Foundation is independent of Sun Microsystems, Inc. */
26 /* Hacked by Per Bothner <bothner@cygnus.com> February 1996. */
30 #include "coretypes.h"
37 #include "java-tree.h"
39 #include "java-opcodes.h"
41 #include "java-except.h"
46 #include "tree-gimple.h"
49 static void flush_quick_stack (void);
50 static void push_value (tree);
51 static tree pop_value (tree);
52 static void java_stack_swap (void);
53 static void java_stack_dup (int, int);
54 static void build_java_athrow (tree);
55 static void build_java_jsr (int, int);
56 static void build_java_ret (tree);
57 static void expand_java_multianewarray (tree, int);
58 static void expand_java_arraystore (tree);
59 static void expand_java_arrayload (tree);
60 static void expand_java_array_length (void);
61 static tree build_java_monitor (tree, tree);
62 static void expand_java_pushc (int, tree);
63 static void expand_java_return (tree);
64 static void expand_load_internal (int, tree, int);
65 static void expand_java_NEW (tree);
66 static void expand_java_INSTANCEOF (tree);
67 static void expand_java_CHECKCAST (tree);
68 static void expand_iinc (unsigned int, int, int);
69 static void expand_java_binop (tree, enum tree_code);
70 static void note_label (int, int);
71 static void expand_compare (enum tree_code, tree, tree, int);
72 static void expand_test (enum tree_code, tree, int);
73 static void expand_cond (enum tree_code, tree, int);
74 static void expand_java_goto (int);
75 static tree expand_java_switch (tree, int);
76 static void expand_java_add_case (tree, int, int);
77 static tree pop_arguments (tree);
78 static void expand_invoke (int, int, int);
79 static void expand_java_field_op (int, int, int);
80 static void java_push_constant_from_pool (struct JCF *, int);
81 static void java_stack_pop (int);
82 static tree build_java_throw_out_of_bounds_exception (tree);
83 static tree build_java_check_indexed_type (tree, tree);
84 static unsigned char peek_opcode_at_pc (struct JCF *, int, int);
85 static void promote_arguments (void);
86 static void cache_cpool_data_ref (void);
88 static GTY(()) tree operand_type[59];
90 static GTY(()) tree methods_ident;
91 static GTY(()) tree ncode_ident;
92 tree dtable_ident = NULL_TREE;
94 /* Set to nonzero value in order to emit class initialization code
95 before static field references. */
96 int always_initialize_class_p = 0;
98 /* We store the stack state in two places:
99 Within a basic block, we use the quick_stack, which is a
100 pushdown list (TREE_LISTs) of expression nodes.
101 This is the top part of the stack; below that we use find_stack_slot.
102 At the end of a basic block, the quick_stack must be flushed
103 to the stack slot array (as handled by find_stack_slot).
104 Using quick_stack generates better code (especially when
105 compiled without optimization), because we do not have to
106 explicitly store and load trees to temporary variables.
108 If a variable is on the quick stack, it means the value of variable
109 when the quick stack was last flushed. Conceptually, flush_quick_stack
110 saves all the quick_stack elements in parallel. However, that is
111 complicated, so it actually saves them (i.e. copies each stack value
112 to is home virtual register) from low indexes. This allows a quick_stack
113 element at index i (counting from the bottom of stack the) to references
114 slot virtuals for register that are >= i, but not those that are deeper.
115 This convention makes most operations easier. For example iadd works
116 even when the stack contains (reg[0], reg[1]): It results in the
117 stack containing (reg[0]+reg[1]), which is OK. However, some stack
118 operations are more complicated. For example dup given a stack
119 containing (reg[0]) would yield (reg[0], reg[0]), which would violate
120 the convention, since stack value 1 would refer to a register with
121 lower index (reg[0]), which flush_quick_stack does not safely handle.
122 So dup cannot just add an extra element to the quick_stack, but iadd can.
125 static GTY(()) tree quick_stack;
127 /* A free-list of unused permanent TREE_LIST nodes. */
128 static GTY((deletable)) tree tree_list_free_list;
130 /* The physical memory page size used in this computer. See
131 build_field_ref(). */
132 static GTY(()) tree page_size;
134 /* The stack pointer of the Java virtual machine.
135 This does include the size of the quick_stack. */
139 const unsigned char *linenumber_table;
140 int linenumber_count;
142 /* Largest pc so far in this method that has been passed to lookup_label. */
143 int highest_label_pc_this_method = -1;
145 /* Base value for this method to add to pc to get generated label. */
146 int start_label_pc_this_method = 0;
149 init_expr_processing (void)
151 operand_type[21] = operand_type[54] = int_type_node;
152 operand_type[22] = operand_type[55] = long_type_node;
153 operand_type[23] = operand_type[56] = float_type_node;
154 operand_type[24] = operand_type[57] = double_type_node;
155 operand_type[25] = operand_type[58] = ptr_type_node;
159 java_truthvalue_conversion (tree expr)
161 /* It is simpler and generates better code to have only TRUTH_*_EXPR
162 or comparison expressions as truth values at this level.
164 This function should normally be identity for Java. */
166 switch (TREE_CODE (expr))
168 case EQ_EXPR: case NE_EXPR: case UNEQ_EXPR: case LTGT_EXPR:
169 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
170 case UNLE_EXPR: case UNGE_EXPR: case UNLT_EXPR: case UNGT_EXPR:
171 case ORDERED_EXPR: case UNORDERED_EXPR:
172 case TRUTH_ANDIF_EXPR:
173 case TRUTH_ORIF_EXPR:
182 return integer_zerop (expr) ? boolean_false_node : boolean_true_node;
185 return real_zerop (expr) ? boolean_false_node : boolean_true_node;
187 /* are these legal? XXX JH */
191 /* These don't change whether an object is nonzero or zero. */
192 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
195 /* Distribute the conversion into the arms of a COND_EXPR. */
196 return fold_build3 (COND_EXPR, boolean_type_node, TREE_OPERAND (expr, 0),
197 java_truthvalue_conversion (TREE_OPERAND (expr, 1)),
198 java_truthvalue_conversion (TREE_OPERAND (expr, 2)));
201 /* If this is widening the argument, we can ignore it. */
202 if (TYPE_PRECISION (TREE_TYPE (expr))
203 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
204 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
205 /* fall through to default */
208 return fold_build2 (NE_EXPR, boolean_type_node,
209 expr, boolean_false_node);
213 /* Save any stack slots that happen to be in the quick_stack into their
214 home virtual register slots.
216 The copy order is from low stack index to high, to support the invariant
217 that the expression for a slot may contain decls for stack slots with
218 higher (or the same) index, but not lower. */
221 flush_quick_stack (void)
223 int stack_index = stack_pointer;
224 tree prev, cur, next;
226 /* First reverse the quick_stack, and count the number of slots it has. */
227 for (cur = quick_stack, prev = NULL_TREE; cur != NULL_TREE; cur = next)
229 next = TREE_CHAIN (cur);
230 TREE_CHAIN (cur) = prev;
232 stack_index -= 1 + TYPE_IS_WIDE (TREE_TYPE (TREE_VALUE (cur)));
236 while (quick_stack != NULL_TREE)
239 tree node = quick_stack, type;
240 quick_stack = TREE_CHAIN (node);
241 TREE_CHAIN (node) = tree_list_free_list;
242 tree_list_free_list = node;
243 node = TREE_VALUE (node);
244 type = TREE_TYPE (node);
246 decl = find_stack_slot (stack_index, type);
248 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (node), decl, node));
249 stack_index += 1 + TYPE_IS_WIDE (type);
253 /* Push TYPE on the type stack.
254 Return true on success, 0 on overflow. */
257 push_type_0 (tree type)
260 type = promote_type (type);
261 n_words = 1 + TYPE_IS_WIDE (type);
262 if (stack_pointer + n_words > DECL_MAX_STACK (current_function_decl))
264 /* Allocate decl for this variable now, so we get a temporary that
265 survives the whole method. */
266 find_stack_slot (stack_pointer, type);
267 stack_type_map[stack_pointer++] = type;
269 while (--n_words >= 0)
270 stack_type_map[stack_pointer++] = TYPE_SECOND;
275 push_type (tree type)
277 int r = push_type_0 (type);
282 push_value (tree value)
284 tree type = TREE_TYPE (value);
285 if (TYPE_PRECISION (type) < 32 && INTEGRAL_TYPE_P (type))
287 type = promote_type (type);
288 value = convert (type, value);
291 if (tree_list_free_list == NULL_TREE)
292 quick_stack = tree_cons (NULL_TREE, value, quick_stack);
295 tree node = tree_list_free_list;
296 tree_list_free_list = TREE_CHAIN (tree_list_free_list);
297 TREE_VALUE (node) = value;
298 TREE_CHAIN (node) = quick_stack;
301 /* If the value has a side effect, then we need to evaluate it
302 whether or not the result is used. If the value ends up on the
303 quick stack and is then popped, this won't happen -- so we flush
304 the quick stack. It is safest to simply always flush, though,
305 since TREE_SIDE_EFFECTS doesn't capture COMPONENT_REF, and for
306 the latter we may need to strip conversions. */
307 flush_quick_stack ();
310 /* Pop a type from the type stack.
311 TYPE is the expected type. Return the actual type, which must be
313 On an error, *MESSAGEP is set to a freshly malloc'd error message. */
316 pop_type_0 (tree type, char **messagep)
321 if (TREE_CODE (type) == RECORD_TYPE)
322 type = promote_type (type);
323 n_words = 1 + TYPE_IS_WIDE (type);
324 if (stack_pointer < n_words)
326 *messagep = xstrdup ("stack underflow");
329 while (--n_words > 0)
331 if (stack_type_map[--stack_pointer] != void_type_node)
333 *messagep = xstrdup ("Invalid multi-word value on type stack");
337 t = stack_type_map[--stack_pointer];
338 if (type == NULL_TREE || t == type)
340 if (TREE_CODE (t) == TREE_LIST)
344 tree tt = TREE_PURPOSE (t);
345 if (! can_widen_reference_to (tt, type))
355 if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (t)
356 && TYPE_PRECISION (type) <= 32 && TYPE_PRECISION (t) <= 32)
358 if (TREE_CODE (type) == POINTER_TYPE && TREE_CODE (t) == POINTER_TYPE)
360 /* If the expected type we've been passed is object or ptr
361 (i.e. void*), the caller needs to know the real type. */
362 if (type == ptr_type_node || type == object_ptr_type_node)
365 /* Since the verifier has already run, we know that any
366 types we see will be compatible. In BC mode, this fact
367 may be checked at runtime, but if that is so then we can
368 assume its truth here as well. So, we always succeed
369 here, with the expected type. */
373 if (! flag_verify_invocations && flag_indirect_dispatch
374 && t == object_ptr_type_node)
376 if (type != ptr_type_node)
377 warning (0, "need to insert runtime check for %s",
378 xstrdup (lang_printable_name (type, 0)));
382 /* lang_printable_name uses a static buffer, so we must save the result
383 from calling it the first time. */
386 char *temp = xstrdup (lang_printable_name (type, 0));
387 /* If the stack contains a multi-word type, keep popping the stack until
388 the real type is found. */
389 while (t == void_type_node)
390 t = stack_type_map[--stack_pointer];
391 *messagep = concat ("expected type '", temp,
392 "' but stack contains '", lang_printable_name (t, 0),
399 /* Pop a type from the type stack.
400 TYPE is the expected type. Return the actual type, which must be
401 convertible to TYPE, otherwise call error. */
406 char *message = NULL;
407 type = pop_type_0 (type, &message);
410 error ("%s", message);
417 /* Return true if two type assertions are equal. */
420 type_assertion_eq (const void * k1_p, const void * k2_p)
422 type_assertion k1 = *(type_assertion *)k1_p;
423 type_assertion k2 = *(type_assertion *)k2_p;
424 return (k1.assertion_code == k2.assertion_code
426 && k1.op2 == k2.op2);
429 /* Hash a type assertion. */
432 type_assertion_hash (const void *p)
434 const type_assertion *k_p = p;
435 hashval_t hash = iterative_hash (&k_p->assertion_code, sizeof
436 k_p->assertion_code, 0);
437 hash = iterative_hash (&k_p->op1, sizeof k_p->op1, hash);
438 return iterative_hash (&k_p->op2, sizeof k_p->op2, hash);
441 /* Add an entry to the type assertion table for the given class.
442 CLASS is the class for which this assertion will be evaluated by the
443 runtime during loading/initialization.
444 ASSERTION_CODE is the 'opcode' or type of this assertion: see java-tree.h.
445 OP1 and OP2 are the operands. The tree type of these arguments may be
446 specific to each assertion_code. */
449 add_type_assertion (tree class, int assertion_code, tree op1, tree op2)
451 htab_t assertions_htab;
455 assertions_htab = TYPE_ASSERTIONS (class);
456 if (assertions_htab == NULL)
458 assertions_htab = htab_create_ggc (7, type_assertion_hash,
459 type_assertion_eq, NULL);
460 TYPE_ASSERTIONS (current_class) = assertions_htab;
463 as.assertion_code = assertion_code;
467 as_pp = htab_find_slot (assertions_htab, &as, INSERT);
469 /* Don't add the same assertion twice. */
473 *as_pp = ggc_alloc (sizeof (type_assertion));
474 **(type_assertion **)as_pp = as;
478 /* Return 1 if SOURCE_TYPE can be safely widened to TARGET_TYPE.
479 Handles array types and interfaces. */
482 can_widen_reference_to (tree source_type, tree target_type)
484 if (source_type == ptr_type_node || target_type == object_ptr_type_node)
487 /* Get rid of pointers */
488 if (TREE_CODE (source_type) == POINTER_TYPE)
489 source_type = TREE_TYPE (source_type);
490 if (TREE_CODE (target_type) == POINTER_TYPE)
491 target_type = TREE_TYPE (target_type);
493 if (source_type == target_type)
496 /* FIXME: This is very pessimistic, in that it checks everything,
497 even if we already know that the types are compatible. If we're
498 to support full Java class loader semantics, we need this.
499 However, we could do something more optimal. */
500 if (! flag_verify_invocations)
502 add_type_assertion (current_class, JV_ASSERT_TYPES_COMPATIBLE,
503 source_type, target_type);
506 warning (0, "assert: %s is assign compatible with %s",
507 xstrdup (lang_printable_name (target_type, 0)),
508 xstrdup (lang_printable_name (source_type, 0)));
509 /* Punt everything to runtime. */
513 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
519 if (TYPE_ARRAY_P (source_type) || TYPE_ARRAY_P (target_type))
521 HOST_WIDE_INT source_length, target_length;
522 if (TYPE_ARRAY_P (source_type) != TYPE_ARRAY_P (target_type))
524 /* An array implements Cloneable and Serializable. */
525 tree name = DECL_NAME (TYPE_NAME (target_type));
526 return (name == java_lang_cloneable_identifier_node
527 || name == java_io_serializable_identifier_node);
529 target_length = java_array_type_length (target_type);
530 if (target_length >= 0)
532 source_length = java_array_type_length (source_type);
533 if (source_length != target_length)
536 source_type = TYPE_ARRAY_ELEMENT (source_type);
537 target_type = TYPE_ARRAY_ELEMENT (target_type);
538 if (source_type == target_type)
540 if (TREE_CODE (source_type) != POINTER_TYPE
541 || TREE_CODE (target_type) != POINTER_TYPE)
543 return can_widen_reference_to (source_type, target_type);
547 int source_depth = class_depth (source_type);
548 int target_depth = class_depth (target_type);
550 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
553 warning (0, "assert: %s is assign compatible with %s",
554 xstrdup (lang_printable_name (target_type, 0)),
555 xstrdup (lang_printable_name (source_type, 0)));
559 /* class_depth can return a negative depth if an error occurred */
560 if (source_depth < 0 || target_depth < 0)
563 if (CLASS_INTERFACE (TYPE_NAME (target_type)))
565 /* target_type is OK if source_type or source_type ancestors
566 implement target_type. We handle multiple sub-interfaces */
567 tree binfo, base_binfo;
570 for (binfo = TYPE_BINFO (source_type), i = 0;
571 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
572 if (can_widen_reference_to
573 (BINFO_TYPE (base_binfo), target_type))
580 for ( ; source_depth > target_depth; source_depth--)
583 = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (source_type), 0));
585 return source_type == target_type;
591 pop_value (tree type)
593 type = pop_type (type);
596 tree node = quick_stack;
597 quick_stack = TREE_CHAIN (quick_stack);
598 TREE_CHAIN (node) = tree_list_free_list;
599 tree_list_free_list = node;
600 node = TREE_VALUE (node);
604 return find_stack_slot (stack_pointer, promote_type (type));
608 /* Pop and discard the top COUNT stack slots. */
611 java_stack_pop (int count)
617 gcc_assert (stack_pointer != 0);
619 type = stack_type_map[stack_pointer - 1];
620 if (type == TYPE_SECOND)
623 gcc_assert (stack_pointer != 1 && count > 0);
625 type = stack_type_map[stack_pointer - 2];
627 val = pop_value (type);
632 /* Implement the 'swap' operator (to swap two top stack slots). */
635 java_stack_swap (void)
641 if (stack_pointer < 2
642 || (type1 = stack_type_map[stack_pointer - 1]) == TYPE_UNKNOWN
643 || (type2 = stack_type_map[stack_pointer - 2]) == TYPE_UNKNOWN
644 || type1 == TYPE_SECOND || type2 == TYPE_SECOND
645 || TYPE_IS_WIDE (type1) || TYPE_IS_WIDE (type2))
646 /* Bad stack swap. */
648 /* Bad stack swap. */
650 flush_quick_stack ();
651 decl1 = find_stack_slot (stack_pointer - 1, type1);
652 decl2 = find_stack_slot (stack_pointer - 2, type2);
653 temp = build_decl (VAR_DECL, NULL_TREE, type1);
654 java_add_local_var (temp);
655 java_add_stmt (build2 (MODIFY_EXPR, type1, temp, decl1));
656 java_add_stmt (build2 (MODIFY_EXPR, type2,
657 find_stack_slot (stack_pointer - 1, type2),
659 java_add_stmt (build2 (MODIFY_EXPR, type1,
660 find_stack_slot (stack_pointer - 2, type1),
662 stack_type_map[stack_pointer - 1] = type2;
663 stack_type_map[stack_pointer - 2] = type1;
667 java_stack_dup (int size, int offset)
669 int low_index = stack_pointer - size - offset;
672 error ("stack underflow - dup* operation");
674 flush_quick_stack ();
676 stack_pointer += size;
677 dst_index = stack_pointer;
679 for (dst_index = stack_pointer; --dst_index >= low_index; )
682 int src_index = dst_index - size;
683 if (src_index < low_index)
684 src_index = dst_index + size + offset;
685 type = stack_type_map [src_index];
686 if (type == TYPE_SECOND)
688 /* Dup operation splits 64-bit number. */
689 gcc_assert (src_index > low_index);
691 stack_type_map[dst_index] = type;
692 src_index--; dst_index--;
693 type = stack_type_map[src_index];
694 gcc_assert (TYPE_IS_WIDE (type));
697 gcc_assert (! TYPE_IS_WIDE (type));
699 if (src_index != dst_index)
701 tree src_decl = find_stack_slot (src_index, type);
702 tree dst_decl = find_stack_slot (dst_index, type);
705 (build2 (MODIFY_EXPR, TREE_TYPE (dst_decl), dst_decl, src_decl));
706 stack_type_map[dst_index] = type;
711 /* Calls _Jv_Throw or _Jv_Sjlj_Throw. Discard the contents of the
715 build_java_athrow (tree node)
719 call = build_call_nary (void_type_node,
720 build_address_of (throw_node),
722 TREE_SIDE_EFFECTS (call) = 1;
723 java_add_stmt (call);
724 java_stack_pop (stack_pointer);
727 /* Implementation for jsr/ret */
730 build_java_jsr (int target_pc, int return_pc)
732 tree where = lookup_label (target_pc);
733 tree ret = lookup_label (return_pc);
734 tree ret_label = fold_build1 (ADDR_EXPR, return_address_type_node, ret);
735 push_value (ret_label);
736 flush_quick_stack ();
737 java_add_stmt (build1 (GOTO_EXPR, void_type_node, where));
739 /* Do not need to emit the label here. We noted the existence of the
740 label as a jump target in note_instructions; we'll emit the label
741 for real at the beginning of the expand_byte_code loop. */
745 build_java_ret (tree location)
747 java_add_stmt (build1 (GOTO_EXPR, void_type_node, location));
750 /* Implementation of operations on array: new, load, store, length */
753 decode_newarray_type (int atype)
757 case 4: return boolean_type_node;
758 case 5: return char_type_node;
759 case 6: return float_type_node;
760 case 7: return double_type_node;
761 case 8: return byte_type_node;
762 case 9: return short_type_node;
763 case 10: return int_type_node;
764 case 11: return long_type_node;
765 default: return NULL_TREE;
769 /* Map primitive type to the code used by OPCODE_newarray. */
772 encode_newarray_type (tree type)
774 if (type == boolean_type_node)
776 else if (type == char_type_node)
778 else if (type == float_type_node)
780 else if (type == double_type_node)
782 else if (type == byte_type_node)
784 else if (type == short_type_node)
786 else if (type == int_type_node)
788 else if (type == long_type_node)
794 /* Build a call to _Jv_ThrowBadArrayIndex(), the
795 ArrayIndexOfBoundsException exception handler. */
798 build_java_throw_out_of_bounds_exception (tree index)
800 tree node = build_call_nary (int_type_node,
801 build_address_of (soft_badarrayindex_node),
803 TREE_SIDE_EFFECTS (node) = 1; /* Allows expansion within ANDIF */
807 /* Return the length of an array. Doesn't perform any checking on the nature
808 or value of the array NODE. May be used to implement some bytecodes. */
811 build_java_array_length_access (tree node)
813 tree type = TREE_TYPE (node);
814 tree array_type = TREE_TYPE (type);
815 HOST_WIDE_INT length;
817 if (!is_array_type_p (type))
819 /* With the new verifier, we will see an ordinary pointer type
820 here. In this case, we just use an arbitrary array type. */
821 array_type = build_java_array_type (object_ptr_type_node, -1);
822 type = promote_type (array_type);
825 length = java_array_type_length (type);
827 return build_int_cst (NULL_TREE, length);
829 node = build3 (COMPONENT_REF, int_type_node,
830 build_java_indirect_ref (array_type, node,
831 flag_check_references),
832 lookup_field (&array_type, get_identifier ("length")),
834 IS_ARRAY_LENGTH_ACCESS (node) = 1;
838 /* Optionally checks a reference against the NULL pointer. ARG1: the
839 expr, ARG2: we should check the reference. Don't generate extra
840 checks if we're not generating code. */
843 java_check_reference (tree expr, int check)
845 if (!flag_syntax_only && check)
847 expr = save_expr (expr);
848 expr = build3 (COND_EXPR, TREE_TYPE (expr),
849 build2 (EQ_EXPR, boolean_type_node,
850 expr, null_pointer_node),
851 build_call_nary (void_type_node,
852 build_address_of (soft_nullpointer_node),
860 /* Reference an object: just like an INDIRECT_REF, but with checking. */
863 build_java_indirect_ref (tree type, tree expr, int check)
866 t = java_check_reference (expr, check);
867 t = convert (build_pointer_type (type), t);
868 return build1 (INDIRECT_REF, type, t);
871 /* Implement array indexing (either as l-value or r-value).
872 Returns a tree for ARRAY[INDEX], assume TYPE is the element type.
873 Optionally performs bounds checking and/or test to NULL.
874 At this point, ARRAY should have been verified as an array. */
877 build_java_arrayaccess (tree array, tree type, tree index)
879 tree node, throw = NULL_TREE;
882 tree array_type = TREE_TYPE (TREE_TYPE (array));
883 tree size_exp = fold_convert (sizetype, size_in_bytes (type));
885 if (!is_array_type_p (TREE_TYPE (array)))
887 /* With the new verifier, we will see an ordinary pointer type
888 here. In this case, we just use the correct array type. */
889 array_type = build_java_array_type (type, -1);
892 if (flag_bounds_check)
895 * (unsigned jint) INDEX >= (unsigned jint) LEN
896 * && throw ArrayIndexOutOfBoundsException.
897 * Note this is equivalent to and more efficient than:
898 * INDEX < 0 || INDEX >= LEN && throw ... */
900 tree len = convert (unsigned_int_type_node,
901 build_java_array_length_access (array));
902 test = fold_build2 (GE_EXPR, boolean_type_node,
903 convert (unsigned_int_type_node, index),
905 if (! integer_zerop (test))
907 throw = build2 (TRUTH_ANDIF_EXPR, int_type_node, test,
908 build_java_throw_out_of_bounds_exception (index));
909 /* allows expansion within COMPOUND */
910 TREE_SIDE_EFFECTS( throw ) = 1;
914 /* If checking bounds, wrap the index expr with a COMPOUND_EXPR in order
915 to have the bounds check evaluated first. */
916 if (throw != NULL_TREE)
917 index = build2 (COMPOUND_EXPR, int_type_node, throw, index);
919 data_field = lookup_field (&array_type, get_identifier ("data"));
921 ref = build3 (COMPONENT_REF, TREE_TYPE (data_field),
922 build_java_indirect_ref (array_type, array,
923 flag_check_references),
924 data_field, NULL_TREE);
926 /* Take the address of the data field and convert it to a pointer to
928 node = build1 (NOP_EXPR, build_pointer_type (type), build_address_of (ref));
930 /* Multiply the index by the size of an element to obtain a byte
931 offset. Convert the result to a pointer to the element type. */
932 index = fold_convert (TREE_TYPE (node),
933 build2 (MULT_EXPR, sizetype,
934 fold_convert (sizetype, index),
937 /* Sum the byte offset and the address of the data field. */
938 node = fold_build2 (PLUS_EXPR, TREE_TYPE (node), node, index);
942 *((&array->data) + index*size_exp)
945 return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (node)), node);
948 /* Generate code to throw an ArrayStoreException if OBJECT is not assignable
949 (at runtime) to an element of ARRAY. A NOP_EXPR is returned if it can
950 determine that no check is required. */
953 build_java_arraystore_check (tree array, tree object)
955 tree check, element_type, source;
956 tree array_type_p = TREE_TYPE (array);
957 tree object_type = TYPE_NAME (TREE_TYPE (TREE_TYPE (object)));
959 if (! flag_verify_invocations)
961 /* With the new verifier, we don't track precise types. FIXME:
962 performance regression here. */
963 element_type = TYPE_NAME (object_type_node);
967 gcc_assert (is_array_type_p (array_type_p));
969 /* Get the TYPE_DECL for ARRAY's element type. */
971 = TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p))));
974 gcc_assert (TREE_CODE (element_type) == TYPE_DECL
975 && TREE_CODE (object_type) == TYPE_DECL);
977 if (!flag_store_check)
978 return build1 (NOP_EXPR, array_type_p, array);
980 /* No check is needed if the element type is final. Also check that
981 element_type matches object_type, since in the bytecode
982 compilation case element_type may be the actual element type of
983 the array rather than its declared type. However, if we're doing
984 indirect dispatch, we can't do the `final' optimization. */
985 if (element_type == object_type
986 && ! flag_indirect_dispatch
987 && CLASS_FINAL (element_type))
988 return build1 (NOP_EXPR, array_type_p, array);
990 /* OBJECT might be wrapped by a SAVE_EXPR. */
991 if (TREE_CODE (object) == SAVE_EXPR)
992 source = TREE_OPERAND (object, 0);
996 /* Avoid the check if OBJECT was just loaded from the same array. */
997 if (TREE_CODE (source) == ARRAY_REF)
1000 source = TREE_OPERAND (source, 0); /* COMPONENT_REF. */
1001 source = TREE_OPERAND (source, 0); /* INDIRECT_REF. */
1002 source = TREE_OPERAND (source, 0); /* Source array's DECL or SAVE_EXPR. */
1003 if (TREE_CODE (source) == SAVE_EXPR)
1004 source = TREE_OPERAND (source, 0);
1007 if (TREE_CODE (target) == SAVE_EXPR)
1008 target = TREE_OPERAND (target, 0);
1010 if (source == target)
1011 return build1 (NOP_EXPR, array_type_p, array);
1014 /* Build an invocation of _Jv_CheckArrayStore */
1015 check = build_call_nary (void_type_node,
1016 build_address_of (soft_checkarraystore_node),
1018 TREE_SIDE_EFFECTS (check) = 1;
1023 /* Makes sure that INDEXED_TYPE is appropriate. If not, make it from
1024 ARRAY_NODE. This function is used to retrieve something less vague than
1025 a pointer type when indexing the first dimension of something like [[<t>.
1026 May return a corrected type, if necessary, otherwise INDEXED_TYPE is
1027 return unchanged. */
1030 build_java_check_indexed_type (tree array_node ATTRIBUTE_UNUSED,
1033 /* We used to check to see if ARRAY_NODE really had array type.
1034 However, with the new verifier, this is not necessary, as we know
1035 that the object will be an array of the appropriate type. */
1037 return indexed_type;
1040 /* newarray triggers a call to _Jv_NewPrimArray. This function should be
1041 called with an integer code (the type of array to create), and the length
1042 of the array to create. */
1045 build_newarray (int atype_value, tree length)
1049 tree prim_type = decode_newarray_type (atype_value);
1051 = build_java_array_type (prim_type,
1052 host_integerp (length, 0) == INTEGER_CST
1053 ? tree_low_cst (length, 0) : -1);
1055 /* Pass a reference to the primitive type class and save the runtime
1057 type_arg = build_class_ref (prim_type);
1059 return build_call_nary (promote_type (type),
1060 build_address_of (soft_newarray_node),
1061 2, type_arg, length);
1064 /* Generates anewarray from a given CLASS_TYPE. Gets from the stack the size
1065 of the dimension. */
1068 build_anewarray (tree class_type, tree length)
1071 = build_java_array_type (class_type,
1072 host_integerp (length, 0)
1073 ? tree_low_cst (length, 0) : -1);
1075 return build_call_nary (promote_type (type),
1076 build_address_of (soft_anewarray_node),
1079 build_class_ref (class_type),
1083 /* Return a node the evaluates 'new TYPE[LENGTH]'. */
1086 build_new_array (tree type, tree length)
1088 if (JPRIMITIVE_TYPE_P (type))
1089 return build_newarray (encode_newarray_type (type), length);
1091 return build_anewarray (TREE_TYPE (type), length);
1094 /* Generates a call to _Jv_NewMultiArray. multianewarray expects a
1095 class pointer, a number of dimensions and the matching number of
1096 dimensions. The argument list is NULL terminated. */
1099 expand_java_multianewarray (tree class_type, int ndim)
1102 tree args = build_tree_list( NULL_TREE, null_pointer_node );
1104 for( i = 0; i < ndim; i++ )
1105 args = tree_cons (NULL_TREE, pop_value (int_type_node), args);
1107 args = tree_cons (NULL_TREE,
1108 build_class_ref (class_type),
1109 tree_cons (NULL_TREE,
1110 build_int_cst (NULL_TREE, ndim),
1113 push_value (build_call_list (promote_type (class_type),
1114 build_address_of (soft_multianewarray_node),
1118 /* ARRAY[INDEX] <- RHS. build_java_check_indexed_type makes sure that
1119 ARRAY is an array type. May expand some bound checking and NULL
1120 pointer checking. RHS_TYPE_NODE we are going to store. In the case
1121 of the CHAR/BYTE/BOOLEAN SHORT, the type popped of the stack is an
1122 INT. In those cases, we make the conversion.
1124 if ARRAy is a reference type, the assignment is checked at run-time
1125 to make sure that the RHS can be assigned to the array element
1126 type. It is not necessary to generate this code if ARRAY is final. */
1129 expand_java_arraystore (tree rhs_type_node)
1131 tree rhs_node = pop_value ((INTEGRAL_TYPE_P (rhs_type_node)
1132 && TYPE_PRECISION (rhs_type_node) <= 32) ?
1133 int_type_node : rhs_type_node);
1134 tree index = pop_value (int_type_node);
1135 tree array_type, array, temp, access;
1137 /* If we're processing an `aaload' we might as well just pick
1139 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1141 array_type = build_java_array_type (object_ptr_type_node, -1);
1142 rhs_type_node = object_ptr_type_node;
1145 array_type = build_java_array_type (rhs_type_node, -1);
1147 array = pop_value (array_type);
1148 array = build1 (NOP_EXPR, promote_type (array_type), array);
1150 rhs_type_node = build_java_check_indexed_type (array, rhs_type_node);
1152 flush_quick_stack ();
1154 index = save_expr (index);
1155 array = save_expr (array);
1157 /* We want to perform the bounds check (done by
1158 build_java_arrayaccess) before the type check (done by
1159 build_java_arraystore_check). So, we call build_java_arrayaccess
1160 -- which returns an ARRAY_REF lvalue -- and we then generate code
1161 to stash the address of that lvalue in a temp. Then we call
1162 build_java_arraystore_check, and finally we generate a
1163 MODIFY_EXPR to set the array element. */
1165 access = build_java_arrayaccess (array, rhs_type_node, index);
1166 temp = build_decl (VAR_DECL, NULL_TREE,
1167 build_pointer_type (TREE_TYPE (access)));
1168 java_add_local_var (temp);
1169 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (temp),
1171 build_fold_addr_expr (access)));
1173 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1175 tree check = build_java_arraystore_check (array, rhs_node);
1176 java_add_stmt (check);
1179 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (access),
1180 build1 (INDIRECT_REF, TREE_TYPE (access), temp),
1184 /* Expand the evaluation of ARRAY[INDEX]. build_java_check_indexed_type makes
1185 sure that LHS is an array type. May expand some bound checking and NULL
1187 LHS_TYPE_NODE is the type of ARRAY[INDEX]. But in the case of CHAR/BYTE/
1188 BOOLEAN/SHORT, we push a promoted type back to the stack.
1192 expand_java_arrayload (tree lhs_type_node)
1195 tree index_node = pop_value (int_type_node);
1199 /* If we're processing an `aaload' we might as well just pick
1201 if (TREE_CODE (lhs_type_node) == POINTER_TYPE)
1203 array_type = build_java_array_type (object_ptr_type_node, -1);
1204 lhs_type_node = object_ptr_type_node;
1207 array_type = build_java_array_type (lhs_type_node, -1);
1208 array_node = pop_value (array_type);
1209 array_node = build1 (NOP_EXPR, promote_type (array_type), array_node);
1211 index_node = save_expr (index_node);
1212 array_node = save_expr (array_node);
1214 lhs_type_node = build_java_check_indexed_type (array_node,
1216 load_node = build_java_arrayaccess (array_node,
1219 if (INTEGRAL_TYPE_P (lhs_type_node) && TYPE_PRECISION (lhs_type_node) <= 32)
1220 load_node = fold_build1 (NOP_EXPR, int_type_node, load_node);
1221 push_value (load_node);
1224 /* Expands .length. Makes sure that we deal with and array and may expand
1225 a NULL check on the array object. */
1228 expand_java_array_length (void)
1230 tree array = pop_value (ptr_type_node);
1231 tree length = build_java_array_length_access (array);
1233 push_value (length);
1236 /* Emit code for the call to _Jv_Monitor{Enter,Exit}. CALL can be
1237 either soft_monitorenter_node or soft_monitorexit_node. */
1240 build_java_monitor (tree call, tree object)
1242 return build_call_nary (void_type_node,
1243 build_address_of (call),
1247 /* Emit code for one of the PUSHC instructions. */
1250 expand_java_pushc (int ival, tree type)
1253 if (type == ptr_type_node && ival == 0)
1254 value = null_pointer_node;
1255 else if (type == int_type_node || type == long_type_node)
1256 value = build_int_cst (type, ival);
1257 else if (type == float_type_node || type == double_type_node)
1260 REAL_VALUE_FROM_INT (x, ival, 0, TYPE_MODE (type));
1261 value = build_real (type, x);
1270 expand_java_return (tree type)
1272 if (type == void_type_node)
1273 java_add_stmt (build1 (RETURN_EXPR, void_type_node, NULL));
1276 tree retval = pop_value (type);
1277 tree res = DECL_RESULT (current_function_decl);
1278 retval = build2 (MODIFY_EXPR, TREE_TYPE (res), res, retval);
1280 /* Handle the situation where the native integer type is smaller
1281 than the JVM integer. It can happen for many cross compilers.
1282 The whole if expression just goes away if INT_TYPE_SIZE < 32
1284 if (INT_TYPE_SIZE < 32
1285 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (res)))
1286 < GET_MODE_SIZE (TYPE_MODE (type))))
1287 retval = build1(NOP_EXPR, TREE_TYPE(res), retval);
1289 TREE_SIDE_EFFECTS (retval) = 1;
1290 java_add_stmt (build1 (RETURN_EXPR, TREE_TYPE (retval), retval));
1295 expand_load_internal (int index, tree type, int pc)
1298 tree var = find_local_variable (index, type, pc);
1300 /* Now VAR is the VAR_DECL (or PARM_DECL) that we are going to push
1301 on the stack. If there is an assignment to this VAR_DECL between
1302 the stack push and the use, then the wrong code could be
1303 generated. To avoid this we create a new local and copy our
1304 value into it. Then we push this new local on the stack.
1305 Hopefully this all gets optimized out. */
1306 copy = build_decl (VAR_DECL, NULL_TREE, type);
1307 if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1308 && TREE_TYPE (copy) != TREE_TYPE (var))
1309 var = convert (type, var);
1310 java_add_local_var (copy);
1311 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (var), copy, var));
1317 build_address_of (tree value)
1319 return build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (value)), value);
1323 class_has_finalize_method (tree type)
1325 tree super = CLASSTYPE_SUPER (type);
1327 if (super == NULL_TREE)
1328 return false; /* Every class with a real finalizer inherits */
1329 /* from java.lang.Object. */
1331 return HAS_FINALIZER_P (type) || class_has_finalize_method (super);
1335 java_create_object (tree type)
1337 tree alloc_node = (class_has_finalize_method (type)
1339 : alloc_no_finalizer_node);
1341 return build_call_nary (promote_type (type),
1342 build_address_of (alloc_node),
1343 1, build_class_ref (type));
1347 expand_java_NEW (tree type)
1351 alloc_node = (class_has_finalize_method (type) ? alloc_object_node
1352 : alloc_no_finalizer_node);
1353 if (! CLASS_LOADED_P (type))
1354 load_class (type, 1);
1355 safe_layout_class (type);
1356 push_value (build_call_nary (promote_type (type),
1357 build_address_of (alloc_node),
1358 1, build_class_ref (type)));
1361 /* This returns an expression which will extract the class of an
1365 build_get_class (tree value)
1367 tree class_field = lookup_field (&dtable_type, get_identifier ("class"));
1368 tree vtable_field = lookup_field (&object_type_node,
1369 get_identifier ("vtable"));
1370 tree tmp = build3 (COMPONENT_REF, dtable_ptr_type,
1371 build_java_indirect_ref (object_type_node, value,
1372 flag_check_references),
1373 vtable_field, NULL_TREE);
1374 return build3 (COMPONENT_REF, class_ptr_type,
1375 build1 (INDIRECT_REF, dtable_type, tmp),
1376 class_field, NULL_TREE);
1379 /* This builds the tree representation of the `instanceof' operator.
1380 It tries various tricks to optimize this in cases where types are
1384 build_instanceof (tree value, tree type)
1387 tree itype = TREE_TYPE (TREE_TYPE (soft_instanceof_node));
1388 tree valtype = TREE_TYPE (TREE_TYPE (value));
1389 tree valclass = TYPE_NAME (valtype);
1392 /* When compiling from bytecode, we need to ensure that TYPE has
1394 if (CLASS_P (type) && ! CLASS_LOADED_P (type))
1396 load_class (type, 1);
1397 safe_layout_class (type);
1398 if (! TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) == ERROR_MARK)
1399 return error_mark_node;
1401 klass = TYPE_NAME (type);
1403 if (type == object_type_node || inherits_from_p (valtype, type))
1405 /* Anything except `null' is an instance of Object. Likewise,
1406 if the object is known to be an instance of the class, then
1407 we only need to check for `null'. */
1408 expr = build2 (NE_EXPR, itype, value, null_pointer_node);
1410 else if (flag_verify_invocations
1411 && ! TYPE_ARRAY_P (type)
1412 && ! TYPE_ARRAY_P (valtype)
1413 && DECL_P (klass) && DECL_P (valclass)
1414 && ! CLASS_INTERFACE (valclass)
1415 && ! CLASS_INTERFACE (klass)
1416 && ! inherits_from_p (type, valtype)
1417 && (CLASS_FINAL (klass)
1418 || ! inherits_from_p (valtype, type)))
1420 /* The classes are from different branches of the derivation
1421 tree, so we immediately know the answer. */
1422 expr = boolean_false_node;
1424 else if (DECL_P (klass) && CLASS_FINAL (klass))
1426 tree save = save_expr (value);
1427 expr = build3 (COND_EXPR, itype,
1428 build2 (NE_EXPR, boolean_type_node,
1429 save, null_pointer_node),
1430 build2 (EQ_EXPR, itype,
1431 build_get_class (save),
1432 build_class_ref (type)),
1433 boolean_false_node);
1437 expr = build_call_nary (itype,
1438 build_address_of (soft_instanceof_node),
1439 2, value, build_class_ref (type));
1441 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (value);
1446 expand_java_INSTANCEOF (tree type)
1448 tree value = pop_value (object_ptr_type_node);
1449 value = build_instanceof (value, type);
1454 expand_java_CHECKCAST (tree type)
1456 tree value = pop_value (ptr_type_node);
1457 value = build_call_nary (promote_type (type),
1458 build_address_of (soft_checkcast_node),
1459 2, build_class_ref (type), value);
1464 expand_iinc (unsigned int local_var_index, int ival, int pc)
1466 tree local_var, res;
1467 tree constant_value;
1469 flush_quick_stack ();
1470 local_var = find_local_variable (local_var_index, int_type_node, pc);
1471 constant_value = build_int_cst (NULL_TREE, ival);
1472 res = fold_build2 (PLUS_EXPR, int_type_node, local_var, constant_value);
1473 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (local_var), local_var, res));
1478 build_java_soft_divmod (enum tree_code op, tree type, tree op1, tree op2)
1481 tree arg1 = convert (type, op1);
1482 tree arg2 = convert (type, op2);
1484 if (type == int_type_node)
1488 case TRUNC_DIV_EXPR:
1489 call = soft_idiv_node;
1491 case TRUNC_MOD_EXPR:
1492 call = soft_irem_node;
1498 else if (type == long_type_node)
1502 case TRUNC_DIV_EXPR:
1503 call = soft_ldiv_node;
1505 case TRUNC_MOD_EXPR:
1506 call = soft_lrem_node;
1514 call = build_call_nary (type, build_address_of (call), 2, arg1, arg2);
1519 build_java_binop (enum tree_code op, tree type, tree arg1, tree arg2)
1526 tree u_type = java_unsigned_type (type);
1527 arg1 = convert (u_type, arg1);
1528 arg1 = build_java_binop (RSHIFT_EXPR, u_type, arg1, arg2);
1529 return convert (type, arg1);
1533 mask = build_int_cst (NULL_TREE,
1534 TYPE_PRECISION (TREE_TYPE (arg1)) - 1);
1535 arg2 = fold_build2 (BIT_AND_EXPR, int_type_node, arg2, mask);
1538 case COMPARE_L_EXPR: /* arg1 > arg2 ? 1 : arg1 == arg2 ? 0 : -1 */
1539 case COMPARE_G_EXPR: /* arg1 < arg2 ? -1 : arg1 == arg2 ? 0 : 1 */
1540 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1542 tree ifexp1 = fold_build2 (op == COMPARE_L_EXPR ? GT_EXPR : LT_EXPR,
1543 boolean_type_node, arg1, arg2);
1544 tree ifexp2 = fold_build2 (EQ_EXPR, boolean_type_node, arg1, arg2);
1545 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1546 ifexp2, integer_zero_node,
1547 op == COMPARE_L_EXPR
1548 ? integer_minus_one_node
1549 : integer_one_node);
1550 return fold_build3 (COND_EXPR, int_type_node, ifexp1,
1551 op == COMPARE_L_EXPR ? integer_one_node
1552 : integer_minus_one_node,
1556 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1558 tree ifexp1 = fold_build2 (LT_EXPR, boolean_type_node, arg1, arg2);
1559 tree ifexp2 = fold_build2 (GT_EXPR, boolean_type_node, arg1, arg2);
1560 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1561 ifexp2, integer_one_node,
1563 return fold_build3 (COND_EXPR, int_type_node,
1564 ifexp1, integer_minus_one_node, second_compare);
1566 case TRUNC_DIV_EXPR:
1567 case TRUNC_MOD_EXPR:
1568 if (TREE_CODE (type) == REAL_TYPE
1569 && op == TRUNC_MOD_EXPR)
1572 if (type != double_type_node)
1574 arg1 = convert (double_type_node, arg1);
1575 arg2 = convert (double_type_node, arg2);
1577 call = build_call_nary (double_type_node,
1578 build_address_of (soft_fmod_node),
1580 if (type != double_type_node)
1581 call = convert (type, call);
1585 if (TREE_CODE (type) == INTEGER_TYPE
1586 && flag_use_divide_subroutine
1587 && ! flag_syntax_only)
1588 return build_java_soft_divmod (op, type, arg1, arg2);
1593 return fold_build2 (op, type, arg1, arg2);
1597 expand_java_binop (tree type, enum tree_code op)
1607 rtype = int_type_node;
1608 rarg = pop_value (rtype);
1611 rarg = pop_value (rtype);
1613 larg = pop_value (ltype);
1614 push_value (build_java_binop (op, type, larg, rarg));
1617 /* Lookup the field named NAME in *TYPEP or its super classes.
1618 If not found, return NULL_TREE.
1619 (If the *TYPEP is not found, or if the field reference is
1620 ambiguous, return error_mark_node.)
1621 If found, return the FIELD_DECL, and set *TYPEP to the
1622 class containing the field. */
1625 lookup_field (tree *typep, tree name)
1627 if (CLASS_P (*typep) && !CLASS_LOADED_P (*typep))
1629 load_class (*typep, 1);
1630 safe_layout_class (*typep);
1631 if (!TYPE_SIZE (*typep) || TREE_CODE (TYPE_SIZE (*typep)) == ERROR_MARK)
1632 return error_mark_node;
1636 tree field, binfo, base_binfo;
1640 for (field = TYPE_FIELDS (*typep); field; field = TREE_CHAIN (field))
1641 if (DECL_NAME (field) == name)
1644 /* Process implemented interfaces. */
1645 save_field = NULL_TREE;
1646 for (binfo = TYPE_BINFO (*typep), i = 0;
1647 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1649 tree t = BINFO_TYPE (base_binfo);
1650 if ((field = lookup_field (&t, name)))
1652 if (save_field == field)
1654 if (save_field == NULL_TREE)
1658 tree i1 = DECL_CONTEXT (save_field);
1659 tree i2 = DECL_CONTEXT (field);
1660 error ("reference %qs is ambiguous: appears in interface %qs and interface %qs",
1661 IDENTIFIER_POINTER (name),
1662 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i1))),
1663 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i2))));
1664 return error_mark_node;
1669 if (save_field != NULL_TREE)
1672 *typep = CLASSTYPE_SUPER (*typep);
1677 /* Look up the field named NAME in object SELF_VALUE,
1678 which has class SELF_CLASS (a non-handle RECORD_TYPE).
1679 SELF_VALUE is NULL_TREE if looking for a static field. */
1682 build_field_ref (tree self_value, tree self_class, tree name)
1684 tree base_class = self_class;
1685 tree field_decl = lookup_field (&base_class, name);
1686 if (field_decl == NULL_TREE)
1688 error ("field %qs not found", IDENTIFIER_POINTER (name));
1689 return error_mark_node;
1691 if (self_value == NULL_TREE)
1693 return build_static_field_ref (field_decl);
1697 tree base_type = promote_type (base_class);
1699 /* CHECK is true if self_value is not the this pointer. */
1700 int check = (! (DECL_P (self_value)
1701 && DECL_NAME (self_value) == this_identifier_node));
1703 /* Determine whether a field offset from NULL will lie within
1704 Page 0: this is necessary on those GNU/Linux/BSD systems that
1705 trap SEGV to generate NullPointerExceptions.
1707 We assume that Page 0 will be mapped with NOPERM, and that
1708 memory may be allocated from any other page, so only field
1709 offsets < pagesize are guaranteed to trap. We also assume
1710 the smallest page size we'll encounter is 4k bytes. */
1711 if (! flag_syntax_only && check && ! flag_check_references
1712 && ! flag_indirect_dispatch)
1714 tree field_offset = byte_position (field_decl);
1716 page_size = size_int (4096);
1717 check = ! INT_CST_LT_UNSIGNED (field_offset, page_size);
1720 if (base_type != TREE_TYPE (self_value))
1721 self_value = fold_build1 (NOP_EXPR, base_type, self_value);
1722 if (! flag_syntax_only && flag_indirect_dispatch)
1725 = build_int_cst (NULL_TREE, get_symbol_table_index
1726 (field_decl, NULL_TREE,
1727 &TYPE_OTABLE_METHODS (output_class)));
1729 = build4 (ARRAY_REF, integer_type_node,
1730 TYPE_OTABLE_DECL (output_class), otable_index,
1731 NULL_TREE, NULL_TREE);
1734 if (DECL_CONTEXT (field_decl) != output_class)
1736 = build3 (COND_EXPR, TREE_TYPE (field_offset),
1737 build2 (EQ_EXPR, boolean_type_node,
1738 field_offset, integer_zero_node),
1739 build_call_nary (void_type_node,
1740 build_address_of (soft_nosuchfield_node),
1744 field_offset = fold (convert (sizetype, field_offset));
1745 self_value = java_check_reference (self_value, check);
1747 = fold_build2 (PLUS_EXPR,
1748 build_pointer_type (TREE_TYPE (field_decl)),
1749 self_value, field_offset);
1750 return fold_build1 (INDIRECT_REF, TREE_TYPE (field_decl), address);
1753 self_value = build_java_indirect_ref (TREE_TYPE (TREE_TYPE (self_value)),
1755 return fold_build3 (COMPONENT_REF, TREE_TYPE (field_decl),
1756 self_value, field_decl, NULL_TREE);
1761 lookup_label (int pc)
1765 if (pc > highest_label_pc_this_method)
1766 highest_label_pc_this_method = pc;
1767 ASM_GENERATE_INTERNAL_LABEL(buf, "LJpc=", start_label_pc_this_method + pc);
1768 name = get_identifier (buf);
1769 if (IDENTIFIER_LOCAL_VALUE (name))
1770 return IDENTIFIER_LOCAL_VALUE (name);
1773 /* The type of the address of a label is return_address_type_node. */
1774 tree decl = create_label_decl (name);
1775 LABEL_PC (decl) = pc;
1776 return pushdecl (decl);
1780 /* Generate a unique name for the purpose of loops and switches
1781 labels, and try-catch-finally blocks label or temporary variables. */
1784 generate_name (void)
1786 static int l_number = 0;
1788 ASM_GENERATE_INTERNAL_LABEL(buff, "LJv", l_number);
1790 return get_identifier (buff);
1794 create_label_decl (tree name)
1797 decl = build_decl (LABEL_DECL, name,
1798 TREE_TYPE (return_address_type_node));
1799 DECL_CONTEXT (decl) = current_function_decl;
1800 DECL_IGNORED_P (decl) = 1;
1804 /* This maps a bytecode offset (PC) to various flags. */
1805 char *instruction_bits;
1808 note_label (int current_pc ATTRIBUTE_UNUSED, int target_pc)
1810 lookup_label (target_pc);
1811 instruction_bits [target_pc] |= BCODE_JUMP_TARGET;
1814 /* Emit code to jump to TARGET_PC if VALUE1 CONDITION VALUE2,
1815 where CONDITION is one of one the compare operators. */
1818 expand_compare (enum tree_code condition, tree value1, tree value2,
1821 tree target = lookup_label (target_pc);
1822 tree cond = fold_build2 (condition, boolean_type_node, value1, value2);
1824 (build3 (COND_EXPR, void_type_node, java_truthvalue_conversion (cond),
1825 build1 (GOTO_EXPR, void_type_node, target),
1826 build_java_empty_stmt ()));
1829 /* Emit code for a TEST-type opcode. */
1832 expand_test (enum tree_code condition, tree type, int target_pc)
1834 tree value1, value2;
1835 flush_quick_stack ();
1836 value1 = pop_value (type);
1837 value2 = (type == ptr_type_node) ? null_pointer_node : integer_zero_node;
1838 expand_compare (condition, value1, value2, target_pc);
1841 /* Emit code for a COND-type opcode. */
1844 expand_cond (enum tree_code condition, tree type, int target_pc)
1846 tree value1, value2;
1847 flush_quick_stack ();
1848 /* note: pop values in opposite order */
1849 value2 = pop_value (type);
1850 value1 = pop_value (type);
1851 /* Maybe should check value1 and value2 for type compatibility ??? */
1852 expand_compare (condition, value1, value2, target_pc);
1856 expand_java_goto (int target_pc)
1858 tree target_label = lookup_label (target_pc);
1859 flush_quick_stack ();
1860 java_add_stmt (build1 (GOTO_EXPR, void_type_node, target_label));
1864 expand_java_switch (tree selector, int default_pc)
1866 tree switch_expr, x;
1868 flush_quick_stack ();
1869 switch_expr = build3 (SWITCH_EXPR, TREE_TYPE (selector), selector,
1870 NULL_TREE, NULL_TREE);
1871 java_add_stmt (switch_expr);
1873 x = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE, NULL_TREE,
1874 create_artificial_label ());
1875 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1877 x = build1 (GOTO_EXPR, void_type_node, lookup_label (default_pc));
1878 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1884 expand_java_add_case (tree switch_expr, int match, int target_pc)
1888 value = build_int_cst (TREE_TYPE (switch_expr), match);
1890 x = build3 (CASE_LABEL_EXPR, void_type_node, value, NULL_TREE,
1891 create_artificial_label ());
1892 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1894 x = build1 (GOTO_EXPR, void_type_node, lookup_label (target_pc));
1895 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1899 pop_arguments (tree arg_types)
1901 if (arg_types == end_params_node)
1903 if (TREE_CODE (arg_types) == TREE_LIST)
1905 tree tail = pop_arguments (TREE_CHAIN (arg_types));
1906 tree type = TREE_VALUE (arg_types);
1907 tree arg = pop_value (type);
1909 /* We simply cast each argument to its proper type. This is
1910 needed since we lose type information coming out of the
1911 verifier. We also have to do this when we pop an integer
1912 type that must be promoted for the function call. */
1913 if (TREE_CODE (type) == POINTER_TYPE)
1914 arg = build1 (NOP_EXPR, type, arg);
1915 else if (targetm.calls.promote_prototypes (type)
1916 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
1917 && INTEGRAL_TYPE_P (type))
1918 arg = convert (integer_type_node, arg);
1919 return tree_cons (NULL_TREE, arg, tail);
1924 /* Attach to PTR (a block) the declaration found in ENTRY. */
1927 attach_init_test_initialization_flags (void **entry, void *ptr)
1929 tree block = (tree)ptr;
1930 struct treetreehash_entry *ite = (struct treetreehash_entry *) *entry;
1932 if (block != error_mark_node)
1934 if (TREE_CODE (block) == BIND_EXPR)
1936 tree body = BIND_EXPR_BODY (block);
1937 TREE_CHAIN (ite->value) = BIND_EXPR_VARS (block);
1938 BIND_EXPR_VARS (block) = ite->value;
1939 body = build2 (COMPOUND_EXPR, void_type_node,
1940 build1 (DECL_EXPR, void_type_node, ite->value), body);
1941 BIND_EXPR_BODY (block) = body;
1945 tree body = BLOCK_SUBBLOCKS (block);
1946 TREE_CHAIN (ite->value) = BLOCK_EXPR_DECLS (block);
1947 BLOCK_EXPR_DECLS (block) = ite->value;
1948 body = build2 (COMPOUND_EXPR, void_type_node,
1949 build1 (DECL_EXPR, void_type_node, ite->value), body);
1950 BLOCK_SUBBLOCKS (block) = body;
1957 /* Build an expression to initialize the class CLAS.
1958 if EXPR is non-NULL, returns an expression to first call the initializer
1959 (if it is needed) and then calls EXPR. */
1962 build_class_init (tree clas, tree expr)
1966 /* An optimization: if CLAS is a superclass of the class we're
1967 compiling, we don't need to initialize it. However, if CLAS is
1968 an interface, it won't necessarily be initialized, even if we
1970 if ((! CLASS_INTERFACE (TYPE_NAME (clas))
1971 && inherits_from_p (current_class, clas))
1972 || current_class == clas)
1975 if (always_initialize_class_p)
1977 init = build_call_nary (void_type_node,
1978 build_address_of (soft_initclass_node),
1979 1, build_class_ref (clas));
1980 TREE_SIDE_EFFECTS (init) = 1;
1984 tree *init_test_decl;
1986 init_test_decl = java_treetreehash_new
1987 (DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl), clas);
1989 if (*init_test_decl == NULL)
1991 /* Build a declaration and mark it as a flag used to track
1992 static class initializations. */
1993 decl = build_decl (VAR_DECL, NULL_TREE,
1995 MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (decl);
1996 LOCAL_CLASS_INITIALIZATION_FLAG (decl) = 1;
1997 DECL_CONTEXT (decl) = current_function_decl;
1998 DECL_FUNCTION_INIT_TEST_CLASS (decl) = clas;
1999 /* Tell the check-init code to ignore this decl when not
2000 optimizing class initialization. */
2001 if (!STATIC_CLASS_INIT_OPT_P ())
2002 DECL_BIT_INDEX (decl) = -1;
2003 DECL_INITIAL (decl) = boolean_false_node;
2004 /* Don't emit any symbolic debugging info for this decl. */
2005 DECL_IGNORED_P (decl) = 1;
2006 *init_test_decl = decl;
2009 init = build_call_nary (void_type_node,
2010 build_address_of (soft_initclass_node),
2011 1, build_class_ref (clas));
2012 TREE_SIDE_EFFECTS (init) = 1;
2013 init = build3 (COND_EXPR, void_type_node,
2014 build2 (EQ_EXPR, boolean_type_node,
2015 *init_test_decl, boolean_false_node),
2016 init, integer_zero_node);
2017 TREE_SIDE_EFFECTS (init) = 1;
2018 init = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init,
2019 build2 (MODIFY_EXPR, boolean_type_node,
2020 *init_test_decl, boolean_true_node));
2021 TREE_SIDE_EFFECTS (init) = 1;
2024 if (expr != NULL_TREE)
2026 expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init, expr);
2027 TREE_SIDE_EFFECTS (expr) = 1;
2035 /* Rewrite expensive calls that require stack unwinding at runtime to
2036 cheaper alternatives. The logic here performs these
2039 java.lang.Class.forName("foo") -> java.lang.Class.forName("foo", class$)
2040 java.lang.Class.getClassLoader() -> java.lang.Class.getClassLoader(class$)
2046 const char *classname;
2048 const char *signature;
2049 const char *new_signature;
2051 tree (*rewrite_arglist) (tree arglist);
2054 /* Add __builtin_return_address(0) to the end of an arglist. */
2058 rewrite_arglist_getcaller (tree arglist)
2061 = build_call_expr (built_in_decls[BUILT_IN_RETURN_ADDRESS],
2062 1, integer_zero_node);
2064 DECL_INLINE (current_function_decl) = 0;
2066 return chainon (arglist,
2067 tree_cons (NULL_TREE, retaddr,
2071 /* Add this.class to the end of an arglist. */
2074 rewrite_arglist_getclass (tree arglist)
2076 return chainon (arglist,
2077 tree_cons (NULL_TREE, build_class_ref (output_class),
2081 static rewrite_rule rules[] =
2082 {{"java.lang.Class", "getClassLoader", "()Ljava/lang/ClassLoader;",
2083 "(Ljava/lang/Class;)Ljava/lang/ClassLoader;",
2084 ACC_FINAL|ACC_PRIVATE, rewrite_arglist_getclass},
2085 {"java.lang.Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;",
2086 "(Ljava/lang/String;Ljava/lang/Class;)Ljava/lang/Class;",
2087 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getclass},
2088 {"gnu.classpath.VMStackWalker", "getCallingClass", "()Ljava/lang/Class;",
2089 "(Lgnu/gcj/RawData;)Ljava/lang/Class;",
2090 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2091 {"gnu.classpath.VMStackWalker", "getCallingClassLoader",
2092 "()Ljava/lang/ClassLoader;",
2093 "(Lgnu/gcj/RawData;)Ljava/lang/ClassLoader;",
2094 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2096 {NULL, NULL, NULL, NULL, 0, NULL}};
2098 /* Scan the rules list for replacements for *METHOD_P and replace the
2099 args accordingly. If the rewrite results in an access to a private
2100 method, update SPECIAL.*/
2103 maybe_rewrite_invocation (tree *method_p, tree *arg_list_p,
2104 tree *method_signature_p, tree *special)
2106 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (*method_p)));
2108 *special = NULL_TREE;
2110 for (p = rules; p->classname; p++)
2112 if (get_identifier (p->classname) == context)
2114 tree method = DECL_NAME (*method_p);
2115 if (get_identifier (p->method) == method
2116 && get_identifier (p->signature) == *method_signature_p)
2119 = lookup_java_method (DECL_CONTEXT (*method_p),
2121 get_identifier (p->new_signature));
2122 if (! maybe_method && ! flag_verify_invocations)
2125 = add_method (DECL_CONTEXT (*method_p), p->flags,
2126 method, get_identifier (p->new_signature));
2127 DECL_EXTERNAL (maybe_method) = 1;
2129 *method_p = maybe_method;
2130 gcc_assert (*method_p);
2131 *arg_list_p = p->rewrite_arglist (*arg_list_p);
2132 *method_signature_p = get_identifier (p->new_signature);
2133 *special = integer_one_node;
2144 build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED,
2145 tree self_type, tree method_signature ATTRIBUTE_UNUSED,
2146 tree arg_list ATTRIBUTE_UNUSED, tree special)
2149 if (is_compiled_class (self_type))
2151 /* With indirect dispatch we have to use indirect calls for all
2152 publicly visible methods or gcc will use PLT indirections
2153 to reach them. We also have to use indirect dispatch for all
2154 external methods. */
2155 if (! flag_indirect_dispatch
2156 || (! DECL_EXTERNAL (method) && ! TREE_PUBLIC (method)))
2158 func = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (method)),
2164 = build_int_cst (NULL_TREE,
2165 (get_symbol_table_index
2167 &TYPE_ATABLE_METHODS (output_class))));
2169 = build4 (ARRAY_REF,
2170 TREE_TYPE (TREE_TYPE (TYPE_ATABLE_DECL (output_class))),
2171 TYPE_ATABLE_DECL (output_class), table_index,
2172 NULL_TREE, NULL_TREE);
2174 func = convert (method_ptr_type_node, func);
2178 /* We don't know whether the method has been (statically) compiled.
2179 Compile this code to get a reference to the method's code:
2181 SELF_TYPE->methods[METHOD_INDEX].ncode
2185 int method_index = 0;
2188 /* The method might actually be declared in some superclass, so
2189 we have to use its class context, not the caller's notion of
2190 where the method is. */
2191 self_type = DECL_CONTEXT (method);
2192 ref = build_class_ref (self_type);
2193 ref = build1 (INDIRECT_REF, class_type_node, ref);
2194 if (ncode_ident == NULL_TREE)
2195 ncode_ident = get_identifier ("ncode");
2196 if (methods_ident == NULL_TREE)
2197 methods_ident = get_identifier ("methods");
2198 ref = build3 (COMPONENT_REF, method_ptr_type_node, ref,
2199 lookup_field (&class_type_node, methods_ident),
2201 for (meth = TYPE_METHODS (self_type);
2202 ; meth = TREE_CHAIN (meth))
2206 if (meth == NULL_TREE)
2207 fatal_error ("method '%s' not found in class",
2208 IDENTIFIER_POINTER (DECL_NAME (method)));
2211 method_index *= int_size_in_bytes (method_type_node);
2212 ref = fold_build2 (PLUS_EXPR, method_ptr_type_node,
2213 ref, build_int_cst (NULL_TREE, method_index));
2214 ref = build1 (INDIRECT_REF, method_type_node, ref);
2215 func = build3 (COMPONENT_REF, nativecode_ptr_type_node,
2216 ref, lookup_field (&method_type_node, ncode_ident),
2223 invoke_build_dtable (int is_invoke_interface, tree arg_list)
2225 tree dtable, objectref;
2227 TREE_VALUE (arg_list) = save_expr (TREE_VALUE (arg_list));
2229 /* If we're dealing with interfaces and if the objectref
2230 argument is an array then get the dispatch table of the class
2231 Object rather than the one from the objectref. */
2232 objectref = (is_invoke_interface
2233 && is_array_type_p (TREE_TYPE (TREE_VALUE (arg_list)))
2234 ? build_class_ref (object_type_node) : TREE_VALUE (arg_list));
2236 if (dtable_ident == NULL_TREE)
2237 dtable_ident = get_identifier ("vtable");
2238 dtable = build_java_indirect_ref (object_type_node, objectref,
2239 flag_check_references);
2240 dtable = build3 (COMPONENT_REF, dtable_ptr_type, dtable,
2241 lookup_field (&object_type_node, dtable_ident), NULL_TREE);
2246 /* Determine the index in SYMBOL_TABLE for a reference to the decl
2247 T. If this decl has not been seen before, it will be added to the
2248 [oa]table_methods. If it has, the existing table slot will be
2252 get_symbol_table_index (tree t, tree special, tree *symbol_table)
2257 if (*symbol_table == NULL_TREE)
2259 *symbol_table = build_tree_list (special, t);
2263 method_list = *symbol_table;
2267 tree value = TREE_VALUE (method_list);
2268 tree purpose = TREE_PURPOSE (method_list);
2269 if (value == t && purpose == special)
2272 if (TREE_CHAIN (method_list) == NULL_TREE)
2275 method_list = TREE_CHAIN (method_list);
2278 TREE_CHAIN (method_list) = build_tree_list (special, t);
2283 build_invokevirtual (tree dtable, tree method, tree special)
2286 tree nativecode_ptr_ptr_type_node
2287 = build_pointer_type (nativecode_ptr_type_node);
2291 if (flag_indirect_dispatch)
2293 gcc_assert (! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))));
2296 = build_int_cst (NULL_TREE, get_symbol_table_index
2298 &TYPE_OTABLE_METHODS (output_class)));
2299 method_index = build4 (ARRAY_REF, integer_type_node,
2300 TYPE_OTABLE_DECL (output_class),
2301 otable_index, NULL_TREE, NULL_TREE);
2305 /* We fetch the DECL_VINDEX field directly here, rather than
2306 using get_method_index(). DECL_VINDEX is the true offset
2307 from the vtable base to a method, regrdless of any extra
2308 words inserted at the start of the vtable. */
2309 method_index = DECL_VINDEX (method);
2310 method_index = size_binop (MULT_EXPR, method_index,
2311 TYPE_SIZE_UNIT (nativecode_ptr_ptr_type_node));
2312 if (TARGET_VTABLE_USES_DESCRIPTORS)
2313 method_index = size_binop (MULT_EXPR, method_index,
2314 size_int (TARGET_VTABLE_USES_DESCRIPTORS));
2317 func = fold_build2 (PLUS_EXPR, nativecode_ptr_ptr_type_node, dtable,
2318 convert (nativecode_ptr_ptr_type_node, method_index));
2320 if (TARGET_VTABLE_USES_DESCRIPTORS)
2321 func = build1 (NOP_EXPR, nativecode_ptr_type_node, func);
2323 func = build1 (INDIRECT_REF, nativecode_ptr_type_node, func);
2328 static GTY(()) tree class_ident;
2330 build_invokeinterface (tree dtable, tree method)
2335 /* We expand invokeinterface here. */
2337 if (class_ident == NULL_TREE)
2338 class_ident = get_identifier ("class");
2340 dtable = build_java_indirect_ref (dtable_type, dtable,
2341 flag_check_references);
2342 dtable = build3 (COMPONENT_REF, class_ptr_type, dtable,
2343 lookup_field (&dtable_type, class_ident), NULL_TREE);
2345 interface = DECL_CONTEXT (method);
2346 gcc_assert (CLASS_INTERFACE (TYPE_NAME (interface)));
2347 layout_class_methods (interface);
2349 if (flag_indirect_dispatch)
2352 = 2 * (get_symbol_table_index
2353 (method, NULL_TREE, &TYPE_ITABLE_METHODS (output_class)));
2355 = build4 (ARRAY_REF,
2356 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2357 TYPE_ITABLE_DECL (output_class),
2358 build_int_cst (NULL_TREE, itable_index-1),
2359 NULL_TREE, NULL_TREE);
2361 = build4 (ARRAY_REF,
2362 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2363 TYPE_ITABLE_DECL (output_class),
2364 build_int_cst (NULL_TREE, itable_index),
2365 NULL_TREE, NULL_TREE);
2366 interface = convert (class_ptr_type, interface);
2367 idx = convert (integer_type_node, idx);
2371 idx = build_int_cst (NULL_TREE,
2372 get_interface_method_index (method, interface));
2373 interface = build_class_ref (interface);
2376 return build_call_nary (ptr_type_node,
2377 build_address_of (soft_lookupinterfacemethod_node),
2378 3, dtable, interface, idx);
2381 /* Expand one of the invoke_* opcodes.
2382 OPCODE is the specific opcode.
2383 METHOD_REF_INDEX is an index into the constant pool.
2384 NARGS is the number of arguments, or -1 if not specified. */
2387 expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED)
2389 tree method_signature
2390 = COMPONENT_REF_SIGNATURE(¤t_jcf->cpool, method_ref_index);
2391 tree method_name = COMPONENT_REF_NAME (¤t_jcf->cpool,
2394 = get_class_constant (current_jcf,
2395 COMPONENT_REF_CLASS_INDEX(¤t_jcf->cpool,
2397 const char *const self_name
2398 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2399 tree call, func, method, arg_list, method_type;
2400 tree check = NULL_TREE;
2402 tree special = NULL_TREE;
2404 if (! CLASS_LOADED_P (self_type))
2406 load_class (self_type, 1);
2407 safe_layout_class (self_type);
2408 if (TREE_CODE (TYPE_SIZE (self_type)) == ERROR_MARK)
2409 fatal_error ("failed to find class '%s'", self_name);
2411 layout_class_methods (self_type);
2413 if (ID_INIT_P (method_name))
2414 method = lookup_java_constructor (self_type, method_signature);
2416 method = lookup_java_method (self_type, method_name, method_signature);
2418 /* We've found a method in a class other than the one in which it
2419 was wanted. This can happen if, for instance, we're trying to
2420 compile invokespecial super.equals().
2421 FIXME: This is a kludge. Rather than nullifying the result, we
2422 should change lookup_java_method() so that it doesn't search the
2423 superclass chain when we're BC-compiling. */
2424 if (! flag_verify_invocations
2426 && ! TYPE_ARRAY_P (self_type)
2427 && self_type != DECL_CONTEXT (method))
2430 /* We've found a method in an interface, but this isn't an interface
2432 if (opcode != OPCODE_invokeinterface
2434 && (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method)))))
2437 /* We've found a non-interface method but we are making an
2438 interface call. This can happen if the interface overrides a
2439 method in Object. */
2440 if (! flag_verify_invocations
2441 && opcode == OPCODE_invokeinterface
2443 && ! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))))
2446 if (method == NULL_TREE)
2448 if (flag_verify_invocations || ! flag_indirect_dispatch)
2450 error ("class '%s' has no method named '%s' matching signature '%s'",
2452 IDENTIFIER_POINTER (method_name),
2453 IDENTIFIER_POINTER (method_signature));
2457 int flags = ACC_PUBLIC;
2458 if (opcode == OPCODE_invokestatic)
2459 flags |= ACC_STATIC;
2460 if (opcode == OPCODE_invokeinterface)
2462 flags |= ACC_INTERFACE | ACC_ABSTRACT;
2463 CLASS_INTERFACE (TYPE_NAME (self_type)) = 1;
2465 method = add_method (self_type, flags, method_name,
2467 DECL_ARTIFICIAL (method) = 1;
2468 METHOD_DUMMY (method) = 1;
2469 layout_class_method (self_type, NULL,
2474 /* Invoke static can't invoke static/abstract method */
2475 if (method != NULL_TREE)
2477 if (opcode == OPCODE_invokestatic)
2479 if (!METHOD_STATIC (method))
2481 error ("invokestatic on non static method");
2484 else if (METHOD_ABSTRACT (method))
2486 error ("invokestatic on abstract method");
2492 if (METHOD_STATIC (method))
2494 error ("invoke[non-static] on static method");
2500 if (method == NULL_TREE)
2502 /* If we got here, we emitted an error message above. So we
2503 just pop the arguments, push a properly-typed zero, and
2505 method_type = get_type_from_signature (method_signature);
2506 pop_arguments (TYPE_ARG_TYPES (method_type));
2507 if (opcode != OPCODE_invokestatic)
2508 pop_type (self_type);
2509 method_type = promote_type (TREE_TYPE (method_type));
2510 push_value (convert (method_type, integer_zero_node));
2514 method_type = TREE_TYPE (method);
2515 arg_list = pop_arguments (TYPE_ARG_TYPES (method_type));
2516 flush_quick_stack ();
2518 maybe_rewrite_invocation (&method, &arg_list, &method_signature,
2522 if (opcode == OPCODE_invokestatic)
2523 func = build_known_method_ref (method, method_type, self_type,
2524 method_signature, arg_list, special);
2525 else if (opcode == OPCODE_invokespecial
2526 || (opcode == OPCODE_invokevirtual
2527 && (METHOD_PRIVATE (method)
2528 || METHOD_FINAL (method)
2529 || CLASS_FINAL (TYPE_NAME (self_type)))))
2531 /* If the object for the method call is null, we throw an
2532 exception. We don't do this if the object is the current
2533 method's `this'. In other cases we just rely on an
2534 optimization pass to eliminate redundant checks. FIXME:
2535 Unfortunately there doesn't seem to be a way to determine
2536 what the current method is right now.
2537 We do omit the check if we're calling <init>. */
2538 /* We use a SAVE_EXPR here to make sure we only evaluate
2539 the new `self' expression once. */
2540 tree save_arg = save_expr (TREE_VALUE (arg_list));
2541 TREE_VALUE (arg_list) = save_arg;
2542 check = java_check_reference (save_arg, ! DECL_INIT_P (method));
2543 func = build_known_method_ref (method, method_type, self_type,
2544 method_signature, arg_list, special);
2548 tree dtable = invoke_build_dtable (opcode == OPCODE_invokeinterface,
2550 if (opcode == OPCODE_invokevirtual)
2551 func = build_invokevirtual (dtable, method, special);
2553 func = build_invokeinterface (dtable, method);
2556 if (TREE_CODE (func) == ADDR_EXPR)
2557 TREE_TYPE (func) = build_pointer_type (method_type);
2559 func = build1 (NOP_EXPR, build_pointer_type (method_type), func);
2561 call = build_call_list (TREE_TYPE (method_type), func, arg_list);
2562 TREE_SIDE_EFFECTS (call) = 1;
2563 call = check_for_builtin (method, call);
2565 if (check != NULL_TREE)
2567 call = build2 (COMPOUND_EXPR, TREE_TYPE (call), check, call);
2568 TREE_SIDE_EFFECTS (call) = 1;
2571 if (TREE_CODE (TREE_TYPE (method_type)) == VOID_TYPE)
2572 java_add_stmt (call);
2576 flush_quick_stack ();
2580 /* Create a stub which will be put into the vtable but which will call
2584 build_jni_stub (tree method)
2586 tree jnifunc, call, args, body, method_sig, arg_types;
2587 tree jniarg0, jniarg1, jniarg2, jniarg3;
2588 tree jni_func_type, tem;
2589 tree env_var, res_var = NULL_TREE, block;
2590 tree method_args, res_type;
2596 tree klass = DECL_CONTEXT (method);
2597 int from_class = ! CLASS_FROM_SOURCE_P (klass);
2598 klass = build_class_ref (klass);
2600 gcc_assert (METHOD_NATIVE (method) && flag_jni);
2602 DECL_ARTIFICIAL (method) = 1;
2603 DECL_EXTERNAL (method) = 0;
2605 env_var = build_decl (VAR_DECL, get_identifier ("env"), ptr_type_node);
2606 DECL_CONTEXT (env_var) = method;
2608 if (TREE_TYPE (TREE_TYPE (method)) != void_type_node)
2610 res_var = build_decl (VAR_DECL, get_identifier ("res"),
2611 TREE_TYPE (TREE_TYPE (method)));
2612 DECL_CONTEXT (res_var) = method;
2613 TREE_CHAIN (env_var) = res_var;
2616 meth_var = build_decl (VAR_DECL, get_identifier ("meth"), ptr_type_node);
2617 TREE_STATIC (meth_var) = 1;
2618 TREE_PUBLIC (meth_var) = 0;
2619 DECL_EXTERNAL (meth_var) = 0;
2620 DECL_CONTEXT (meth_var) = method;
2621 DECL_ARTIFICIAL (meth_var) = 1;
2622 DECL_INITIAL (meth_var) = null_pointer_node;
2623 TREE_USED (meth_var) = 1;
2624 chainon (env_var, meth_var);
2625 build_result_decl (method);
2627 /* One strange way that the front ends are different is that they
2628 store arguments differently. */
2630 method_args = DECL_ARGUMENTS (method);
2632 method_args = BLOCK_EXPR_DECLS (DECL_FUNCTION_BODY (method));
2633 block = build_block (env_var, NULL_TREE, method_args, NULL_TREE);
2634 TREE_SIDE_EFFECTS (block) = 1;
2635 /* When compiling from source we don't set the type of the block,
2636 because that will prevent patch_return from ever being run. */
2638 TREE_TYPE (block) = TREE_TYPE (TREE_TYPE (method));
2640 /* Compute the local `env' by calling _Jv_GetJNIEnvNewFrame. */
2641 body = build2 (MODIFY_EXPR, ptr_type_node, env_var,
2642 build_call_nary (ptr_type_node,
2643 build_address_of (soft_getjnienvnewframe_node),
2645 CAN_COMPLETE_NORMALLY (body) = 1;
2647 /* All the arguments to this method become arguments to the
2648 underlying JNI function. If we had to wrap object arguments in a
2649 special way, we would do that here. */
2651 for (tem = method_args; tem != NULL_TREE; tem = TREE_CHAIN (tem))
2653 int arg_bits = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)));
2654 #ifdef PARM_BOUNDARY
2655 arg_bits = (((arg_bits + PARM_BOUNDARY - 1) / PARM_BOUNDARY)
2658 args_size += (arg_bits / BITS_PER_UNIT);
2660 args = tree_cons (NULL_TREE, tem, args);
2662 args = nreverse (args);
2663 arg_types = TYPE_ARG_TYPES (TREE_TYPE (method));
2665 /* For a static method the second argument is the class. For a
2666 non-static method the second argument is `this'; that is already
2667 available in the argument list. */
2668 if (METHOD_STATIC (method))
2670 args_size += int_size_in_bytes (TREE_TYPE (klass));
2671 args = tree_cons (NULL_TREE, klass, args);
2672 arg_types = tree_cons (NULL_TREE, object_ptr_type_node, arg_types);
2675 /* The JNIEnv structure is the first argument to the JNI function. */
2676 args_size += int_size_in_bytes (TREE_TYPE (env_var));
2677 args = tree_cons (NULL_TREE, env_var, args);
2678 arg_types = tree_cons (NULL_TREE, ptr_type_node, arg_types);
2680 /* We call _Jv_LookupJNIMethod to find the actual underlying
2681 function pointer. _Jv_LookupJNIMethod will throw the appropriate
2682 exception if this function is not found at runtime. */
2683 method_sig = build_java_signature (TREE_TYPE (method));
2685 jniarg1 = build_utf8_ref (DECL_NAME (method));
2686 jniarg2 = build_utf8_ref (unmangle_classname
2687 (IDENTIFIER_POINTER (method_sig),
2688 IDENTIFIER_LENGTH (method_sig)));
2689 jniarg3 = build_int_cst (NULL_TREE, args_size);
2691 tem = build_function_type (TREE_TYPE (TREE_TYPE (method)), arg_types);
2693 #ifdef MODIFY_JNI_METHOD_CALL
2694 tem = MODIFY_JNI_METHOD_CALL (tem);
2697 jni_func_type = build_pointer_type (tem);
2699 jnifunc = build3 (COND_EXPR, ptr_type_node,
2701 build2 (MODIFY_EXPR, ptr_type_node, meth_var,
2702 build_call_nary (ptr_type_node,
2704 (soft_lookupjnimethod_node),
2707 jniarg2, jniarg3)));
2709 /* Now we make the actual JNI call via the resulting function
2711 call = build_call_list (TREE_TYPE (TREE_TYPE (method)),
2712 build1 (NOP_EXPR, jni_func_type, jnifunc),
2715 /* If the JNI call returned a result, capture it here. If we had to
2716 unwrap JNI object results, we would do that here. */
2717 if (res_var != NULL_TREE)
2719 /* If the call returns an object, it may return a JNI weak
2720 reference, in which case we must unwrap it. */
2721 if (! JPRIMITIVE_TYPE_P (TREE_TYPE (TREE_TYPE (method))))
2722 call = build_call_nary (TREE_TYPE (TREE_TYPE (method)),
2723 build_address_of (soft_unwrapjni_node),
2725 call = build2 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (method)),
2729 TREE_SIDE_EFFECTS (call) = 1;
2730 CAN_COMPLETE_NORMALLY (call) = 1;
2732 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2733 TREE_SIDE_EFFECTS (body) = 1;
2735 /* Now free the environment we allocated. */
2736 call = build_call_nary (ptr_type_node,
2737 build_address_of (soft_jnipopsystemframe_node),
2739 TREE_SIDE_EFFECTS (call) = 1;
2740 CAN_COMPLETE_NORMALLY (call) = 1;
2741 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2742 TREE_SIDE_EFFECTS (body) = 1;
2744 /* Finally, do the return. */
2745 res_type = void_type_node;
2746 if (res_var != NULL_TREE)
2749 gcc_assert (DECL_RESULT (method));
2750 /* Make sure we copy the result variable to the actual
2751 result. We use the type of the DECL_RESULT because it
2752 might be different from the return type of the function:
2753 it might be promoted. */
2754 drt = TREE_TYPE (DECL_RESULT (method));
2755 if (drt != TREE_TYPE (res_var))
2756 res_var = build1 (CONVERT_EXPR, drt, res_var);
2757 res_var = build2 (MODIFY_EXPR, drt, DECL_RESULT (method), res_var);
2758 TREE_SIDE_EFFECTS (res_var) = 1;
2761 body = build2 (COMPOUND_EXPR, void_type_node, body,
2762 build1 (RETURN_EXPR, res_type, res_var));
2763 TREE_SIDE_EFFECTS (body) = 1;
2765 /* Prepend class initialization for static methods reachable from
2767 if (METHOD_STATIC (method)
2768 && (! METHOD_PRIVATE (method)
2769 || INNER_CLASS_P (DECL_CONTEXT (method))))
2771 tree init = build_call_expr (soft_initclass_node, 1,
2773 body = build2 (COMPOUND_EXPR, void_type_node, init, body);
2774 TREE_SIDE_EFFECTS (body) = 1;
2777 bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
2783 /* Given lvalue EXP, return a volatile expression that references the
2787 java_modify_addr_for_volatile (tree exp)
2789 tree exp_type = TREE_TYPE (exp);
2791 = build_qualified_type (exp_type,
2792 TYPE_QUALS (exp_type) | TYPE_QUAL_VOLATILE);
2793 tree addr = build_fold_addr_expr (exp);
2794 v_type = build_pointer_type (v_type);
2795 addr = fold_convert (v_type, addr);
2796 exp = build_fold_indirect_ref (addr);
2801 /* Expand an operation to extract from or store into a field.
2802 IS_STATIC is 1 iff the field is static.
2803 IS_PUTTING is 1 for putting into a field; 0 for getting from the field.
2804 FIELD_REF_INDEX is an index into the constant pool. */
2807 expand_java_field_op (int is_static, int is_putting, int field_ref_index)
2810 = get_class_constant (current_jcf,
2811 COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool,
2813 const char *self_name
2814 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2815 tree field_name = COMPONENT_REF_NAME (¤t_jcf->cpool, field_ref_index);
2816 tree field_signature = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool,
2818 tree field_type = get_type_from_signature (field_signature);
2819 tree new_value = is_putting ? pop_value (field_type) : NULL_TREE;
2822 tree original_self_type = self_type;
2826 if (! CLASS_LOADED_P (self_type))
2827 load_class (self_type, 1);
2828 field_decl = lookup_field (&self_type, field_name);
2829 if (field_decl == error_mark_node)
2833 else if (field_decl == NULL_TREE)
2835 if (! flag_verify_invocations)
2837 int flags = ACC_PUBLIC;
2839 flags |= ACC_STATIC;
2840 self_type = original_self_type;
2841 field_decl = add_field (original_self_type, field_name,
2843 DECL_ARTIFICIAL (field_decl) = 1;
2844 DECL_IGNORED_P (field_decl) = 1;
2846 /* FIXME: We should be pessimistic about volatility. We
2847 don't know one way or another, but this is safe.
2848 However, doing this has bad effects on code quality. We
2849 need to look at better ways to do this. */
2850 TREE_THIS_VOLATILE (field_decl) = 1;
2855 error ("missing field '%s' in '%s'",
2856 IDENTIFIER_POINTER (field_name), self_name);
2860 else if (build_java_signature (TREE_TYPE (field_decl)) != field_signature)
2862 error ("mismatching signature for field '%s' in '%s'",
2863 IDENTIFIER_POINTER (field_name), self_name);
2866 field_ref = is_static ? NULL_TREE : pop_value (self_type);
2870 push_value (convert (field_type, integer_zero_node));
2871 flush_quick_stack ();
2875 field_ref = build_field_ref (field_ref, self_type, field_name);
2877 && ! flag_indirect_dispatch)
2879 tree context = DECL_CONTEXT (field_ref);
2880 if (context != self_type && CLASS_INTERFACE (TYPE_NAME (context)))
2881 field_ref = build_class_init (context, field_ref);
2883 field_ref = build_class_init (self_type, field_ref);
2887 flush_quick_stack ();
2888 if (FIELD_FINAL (field_decl))
2890 if (DECL_CONTEXT (field_decl) != current_class)
2891 error ("assignment to final field %q+D not in field's class",
2893 /* We used to check for assignments to final fields not
2894 occurring in the class initializer or in a constructor
2895 here. However, this constraint doesn't seem to be
2896 enforced by the JVM. */
2899 if (TREE_THIS_VOLATILE (field_decl))
2900 field_ref = java_modify_addr_for_volatile (field_ref);
2902 modify_expr = build2 (MODIFY_EXPR, TREE_TYPE (field_ref),
2903 field_ref, new_value);
2905 if (TREE_THIS_VOLATILE (field_decl))
2907 (build_call_expr (built_in_decls[BUILT_IN_SYNCHRONIZE], 0));
2909 java_add_stmt (modify_expr);
2913 tree temp = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (field_ref));
2914 java_add_local_var (temp);
2916 if (TREE_THIS_VOLATILE (field_decl))
2917 field_ref = java_modify_addr_for_volatile (field_ref);
2920 = build2 (MODIFY_EXPR, TREE_TYPE (field_ref), temp, field_ref);
2921 java_add_stmt (modify_expr);
2923 if (TREE_THIS_VOLATILE (field_decl))
2925 (build_call_expr (built_in_decls[BUILT_IN_SYNCHRONIZE], 0));
2929 TREE_THIS_VOLATILE (field_ref) = TREE_THIS_VOLATILE (field_decl);
2933 load_type_state (tree label)
2936 tree vec = LABEL_TYPE_STATE (label);
2937 int cur_length = TREE_VEC_LENGTH (vec);
2938 stack_pointer = cur_length - DECL_MAX_LOCALS(current_function_decl);
2939 for (i = 0; i < cur_length; i++)
2940 type_map [i] = TREE_VEC_ELT (vec, i);
2943 /* Go over METHOD's bytecode and note instruction starts in
2944 instruction_bits[]. */
2947 note_instructions (JCF *jcf, tree method)
2950 unsigned char* byte_ops;
2951 long length = DECL_CODE_LENGTH (method);
2956 #undef RET /* Defined by config/i386/i386.h */
2958 #define BCODE byte_ops
2959 #define BYTE_type_node byte_type_node
2960 #define SHORT_type_node short_type_node
2961 #define INT_type_node int_type_node
2962 #define LONG_type_node long_type_node
2963 #define CHAR_type_node char_type_node
2964 #define PTR_type_node ptr_type_node
2965 #define FLOAT_type_node float_type_node
2966 #define DOUBLE_type_node double_type_node
2967 #define VOID_type_node void_type_node
2968 #define CONST_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
2969 #define CONST_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
2970 #define VAR_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
2971 #define VAR_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
2973 #define CHECK_PC_IN_RANGE(PC) ((void)1) /* Already handled by verifier. */
2975 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
2976 byte_ops = jcf->read_ptr;
2977 instruction_bits = xrealloc (instruction_bits, length + 1);
2978 memset (instruction_bits, 0, length + 1);
2980 /* This pass figures out which PC can be the targets of jumps. */
2981 for (PC = 0; PC < length;)
2983 int oldpc = PC; /* PC at instruction start. */
2984 instruction_bits [PC] |= BCODE_INSTRUCTION_START;
2985 switch (byte_ops[PC++])
2987 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
2989 PRE_##OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
2992 #define NOTE_LABEL(PC) note_label(oldpc, PC)
2994 #define PRE_PUSHC(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
2995 #define PRE_LOAD(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
2996 #define PRE_STORE(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
2997 #define PRE_STACK(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
2998 #define PRE_UNOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
2999 #define PRE_BINOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3000 #define PRE_CONVERT(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3001 #define PRE_CONVERT2(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3003 #define PRE_SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3004 PRE_SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3005 #define PRE_SPECIAL_IINC(OPERAND_TYPE) \
3006 ((void) IMMEDIATE_u1, (void) IMMEDIATE_s1)
3007 #define PRE_SPECIAL_ENTER(IGNORE) /* nothing */
3008 #define PRE_SPECIAL_EXIT(IGNORE) /* nothing */
3009 #define PRE_SPECIAL_THROW(IGNORE) /* nothing */
3010 #define PRE_SPECIAL_BREAK(IGNORE) /* nothing */
3012 /* two forms of wide instructions */
3013 #define PRE_SPECIAL_WIDE(IGNORE) \
3015 int modified_opcode = IMMEDIATE_u1; \
3016 if (modified_opcode == OPCODE_iinc) \
3018 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3019 (void) IMMEDIATE_s2; /* constbyte1 and constbyte2 */ \
3023 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3027 #define PRE_IMPL(IGNORE1, IGNORE2) /* nothing */
3029 #define PRE_MONITOR(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3031 #define PRE_RETURN(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3032 #define PRE_ARRAY(OPERAND_TYPE, SUBOP) \
3033 PRE_ARRAY_##SUBOP(OPERAND_TYPE)
3034 #define PRE_ARRAY_LOAD(TYPE) /* nothing */
3035 #define PRE_ARRAY_STORE(TYPE) /* nothing */
3036 #define PRE_ARRAY_LENGTH(TYPE) /* nothing */
3037 #define PRE_ARRAY_NEW(TYPE) PRE_ARRAY_NEW_##TYPE
3038 #define PRE_ARRAY_NEW_NUM ((void) IMMEDIATE_u1)
3039 #define PRE_ARRAY_NEW_PTR ((void) IMMEDIATE_u2)
3040 #define PRE_ARRAY_NEW_MULTI ((void) IMMEDIATE_u2, (void) IMMEDIATE_u1)
3042 #define PRE_TEST(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3043 #define PRE_COND(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3044 #define PRE_BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3045 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3046 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3047 #define PRE_JSR(OPERAND_TYPE, OPERAND_VALUE) \
3048 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3050 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3052 #define PRE_RET(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE)
3054 #define PRE_SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3055 PC = (PC + 3) / 4 * 4; PRE_##TABLE_OR_LOOKUP##_SWITCH
3057 #define PRE_LOOKUP_SWITCH \
3058 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3059 NOTE_LABEL (default_offset+oldpc); \
3061 while (--npairs >= 0) { \
3062 jint match ATTRIBUTE_UNUSED = IMMEDIATE_s4; \
3063 jint offset = IMMEDIATE_s4; \
3064 NOTE_LABEL (offset+oldpc); } \
3067 #define PRE_TABLE_SWITCH \
3068 { jint default_offset = IMMEDIATE_s4; \
3069 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3070 NOTE_LABEL (default_offset+oldpc); \
3072 while (low++ <= high) { \
3073 jint offset = IMMEDIATE_s4; \
3074 NOTE_LABEL (offset+oldpc); } \
3077 #define PRE_FIELD(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3078 #define PRE_OBJECT(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3079 #define PRE_INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3080 (void)(IMMEDIATE_u2); \
3081 PC += 2 * IS_INTERFACE /* for invokeinterface */;
3083 #include "javaop.def"
3090 expand_byte_code (JCF *jcf, tree method)
3094 const unsigned char *linenumber_pointer;
3095 int dead_code_index = -1;
3096 unsigned char* byte_ops;
3097 long length = DECL_CODE_LENGTH (method);
3100 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3101 byte_ops = jcf->read_ptr;
3103 /* We make an initial pass of the line number table, to note
3104 which instructions have associated line number entries. */
3105 linenumber_pointer = linenumber_table;
3106 for (i = 0; i < linenumber_count; i++)
3108 int pc = GET_u2 (linenumber_pointer);
3109 linenumber_pointer += 4;
3111 warning (0, "invalid PC in line number table");
3114 if ((instruction_bits[pc] & BCODE_HAS_LINENUMBER) != 0)
3115 instruction_bits[pc] |= BCODE_HAS_MULTI_LINENUMBERS;
3116 instruction_bits[pc] |= BCODE_HAS_LINENUMBER;
3120 if (! verify_jvm_instructions_new (jcf, byte_ops, length))
3123 promote_arguments ();
3124 cache_this_class_ref (method);
3125 cache_cpool_data_ref ();
3127 /* Translate bytecodes. */
3128 linenumber_pointer = linenumber_table;
3129 for (PC = 0; PC < length;)
3131 if ((instruction_bits [PC] & BCODE_TARGET) != 0 || PC == 0)
3133 tree label = lookup_label (PC);
3134 flush_quick_stack ();
3135 if ((instruction_bits [PC] & BCODE_TARGET) != 0)
3136 java_add_stmt (build1 (LABEL_EXPR, void_type_node, label));
3137 if (LABEL_VERIFIED (label) || PC == 0)
3138 load_type_state (label);
3141 if (! (instruction_bits [PC] & BCODE_VERIFIED))
3143 if (dead_code_index == -1)
3145 /* This is the start of a region of unreachable bytecodes.
3146 They still need to be processed in order for EH ranges
3147 to get handled correctly. However, we can simply
3148 replace these bytecodes with nops. */
3149 dead_code_index = PC;
3152 /* Turn this bytecode into a nop. */
3157 if (dead_code_index != -1)
3159 /* We've just reached the end of a region of dead code. */
3161 warning (0, "unreachable bytecode from %d to before %d",
3162 dead_code_index, PC);
3163 dead_code_index = -1;
3167 /* Handle possible line number entry for this PC.
3169 This code handles out-of-order and multiple linenumbers per PC,
3170 but is optimized for the case of line numbers increasing
3171 monotonically with PC. */
3172 if ((instruction_bits[PC] & BCODE_HAS_LINENUMBER) != 0)
3174 if ((instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS) != 0
3175 || GET_u2 (linenumber_pointer) != PC)
3176 linenumber_pointer = linenumber_table;
3177 while (linenumber_pointer < linenumber_table + linenumber_count * 4)
3179 int pc = GET_u2 (linenumber_pointer);
3180 linenumber_pointer += 4;
3183 int line = GET_u2 (linenumber_pointer - 2);
3184 #ifdef USE_MAPPED_LOCATION
3185 input_location = linemap_line_start (&line_table, line, 1);
3187 input_location.line = line;
3189 if (!(instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS))
3194 maybe_pushlevels (PC);
3195 PC = process_jvm_instruction (PC, byte_ops, length);
3196 maybe_poplevels (PC);
3199 uncache_this_class_ref (method);
3201 if (dead_code_index != -1)
3203 /* We've just reached the end of a region of dead code. */
3205 warning (0, "unreachable bytecode from %d to the end of the method",
3211 java_push_constant_from_pool (JCF *jcf, int index)
3214 if (JPOOL_TAG (jcf, index) == CONSTANT_String)
3217 name = get_name_constant (jcf, JPOOL_USHORT1 (jcf, index));
3218 index = alloc_name_constant (CONSTANT_String, name);
3219 c = build_ref_from_constant_pool (index);
3220 c = convert (promote_type (string_type_node), c);
3222 else if (JPOOL_TAG (jcf, index) == CONSTANT_Class
3223 || JPOOL_TAG (jcf, index) == CONSTANT_ResolvedClass)
3225 tree record = get_class_constant (jcf, index);
3226 c = build_class_ref (record);
3229 c = get_constant (jcf, index);
3234 process_jvm_instruction (int PC, const unsigned char* byte_ops,
3235 long length ATTRIBUTE_UNUSED)
3237 const char *opname; /* Temporary ??? */
3238 int oldpc = PC; /* PC at instruction start. */
3240 /* If the instruction is at the beginning of an exception handler,
3241 replace the top of the stack with the thrown object reference. */
3242 if (instruction_bits [PC] & BCODE_EXCEPTION_TARGET)
3244 /* Note that the verifier will not emit a type map at all for
3245 dead exception handlers. In this case we just ignore the
3247 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3249 tree type = pop_type (promote_type (throwable_type_node));
3250 push_value (build_exception_object_ref (type));
3254 switch (byte_ops[PC++])
3256 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3259 OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3262 #define RET(OPERAND_TYPE, OPERAND_VALUE) \
3264 int saw_index = 0; \
3265 int index = OPERAND_VALUE; \
3267 (find_local_variable (index, return_address_type_node, oldpc)); \
3270 #define JSR(OPERAND_TYPE, OPERAND_VALUE) \
3272 /* OPERAND_VALUE may have side-effects on PC */ \
3273 int opvalue = OPERAND_VALUE; \
3274 build_java_jsr (oldpc + opvalue, PC); \
3277 /* Push a constant onto the stack. */
3278 #define PUSHC(OPERAND_TYPE, OPERAND_VALUE) \
3279 { int saw_index = 0; int ival = (OPERAND_VALUE); \
3280 if (saw_index) java_push_constant_from_pool (current_jcf, ival); \
3281 else expand_java_pushc (ival, OPERAND_TYPE##_type_node); }
3283 /* internal macro added for use by the WIDE case */
3284 #define LOAD_INTERNAL(OPTYPE, OPVALUE) \
3285 expand_load_internal (OPVALUE, type_map[OPVALUE], oldpc);
3287 /* Push local variable onto the opcode stack. */
3288 #define LOAD(OPERAND_TYPE, OPERAND_VALUE) \
3290 /* have to do this since OPERAND_VALUE may have side-effects */ \
3291 int opvalue = OPERAND_VALUE; \
3292 LOAD_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3295 #define RETURN(OPERAND_TYPE, OPERAND_VALUE) \
3296 expand_java_return (OPERAND_TYPE##_type_node)
3298 #define REM_EXPR TRUNC_MOD_EXPR
3299 #define BINOP(OPERAND_TYPE, OPERAND_VALUE) \
3300 expand_java_binop (OPERAND_TYPE##_type_node, OPERAND_VALUE##_EXPR)
3302 #define FIELD(IS_STATIC, IS_PUT) \
3303 expand_java_field_op (IS_STATIC, IS_PUT, IMMEDIATE_u2)
3305 #define TEST(OPERAND_TYPE, CONDITION) \
3306 expand_test (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3308 #define COND(OPERAND_TYPE, CONDITION) \
3309 expand_cond (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3311 #define BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3312 BRANCH_##OPERAND_TYPE (OPERAND_VALUE)
3314 #define BRANCH_GOTO(OPERAND_VALUE) \
3315 expand_java_goto (oldpc + OPERAND_VALUE)
3317 #define BRANCH_CALL(OPERAND_VALUE) \
3318 expand_java_call (oldpc + OPERAND_VALUE, oldpc)
3321 #define BRANCH_RETURN(OPERAND_VALUE) \
3323 tree type = OPERAND_TYPE##_type_node; \
3324 tree value = find_local_variable (OPERAND_VALUE, type, oldpc); \
3325 expand_java_ret (value); \
3329 #define NOT_IMPL(OPERAND_TYPE, OPERAND_VALUE) \
3330 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3331 fprintf (stderr, "(not implemented)\n")
3332 #define NOT_IMPL1(OPERAND_VALUE) \
3333 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3334 fprintf (stderr, "(not implemented)\n")
3336 #define BRANCH_RETURN(OPERAND_VALUE) NOT_IMPL1(OPERAND_VALUE)
3338 #define STACK(SUBOP, COUNT) STACK_##SUBOP (COUNT)
3340 #define STACK_POP(COUNT) java_stack_pop (COUNT)
3342 #define STACK_SWAP(COUNT) java_stack_swap()
3344 #define STACK_DUP(COUNT) java_stack_dup (COUNT, 0)
3345 #define STACK_DUPx1(COUNT) java_stack_dup (COUNT, 1)
3346 #define STACK_DUPx2(COUNT) java_stack_dup (COUNT, 2)
3348 #define SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3349 PC = (PC + 3) / 4 * 4; TABLE_OR_LOOKUP##_SWITCH
3351 #define LOOKUP_SWITCH \
3352 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3353 tree selector = pop_value (INT_type_node); \
3354 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3355 while (--npairs >= 0) \
3357 jint match = IMMEDIATE_s4; jint offset = IMMEDIATE_s4; \
3358 expand_java_add_case (switch_expr, match, oldpc + offset); \
3362 #define TABLE_SWITCH \
3363 { jint default_offset = IMMEDIATE_s4; \
3364 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3365 tree selector = pop_value (INT_type_node); \
3366 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3367 for (; low <= high; low++) \
3369 jint offset = IMMEDIATE_s4; \
3370 expand_java_add_case (switch_expr, low, oldpc + offset); \
3374 #define INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3375 { int opcode = byte_ops[PC-1]; \
3376 int method_ref_index = IMMEDIATE_u2; \
3378 if (IS_INTERFACE) { nargs = IMMEDIATE_u1; (void) IMMEDIATE_u1; } \
3380 expand_invoke (opcode, method_ref_index, nargs); \
3383 /* Handle new, checkcast, instanceof */
3384 #define OBJECT(TYPE, OP) \
3385 expand_java_##OP (get_class_constant (current_jcf, IMMEDIATE_u2))
3387 #define ARRAY(OPERAND_TYPE, SUBOP) ARRAY_##SUBOP(OPERAND_TYPE)
3389 #define ARRAY_LOAD(OPERAND_TYPE) \
3391 expand_java_arrayload( OPERAND_TYPE##_type_node ); \
3394 #define ARRAY_STORE(OPERAND_TYPE) \
3396 expand_java_arraystore( OPERAND_TYPE##_type_node ); \
3399 #define ARRAY_LENGTH(OPERAND_TYPE) expand_java_array_length();
3400 #define ARRAY_NEW(OPERAND_TYPE) ARRAY_NEW_##OPERAND_TYPE()
3401 #define ARRAY_NEW_PTR() \
3402 push_value (build_anewarray (get_class_constant (current_jcf, \
3404 pop_value (int_type_node)));
3405 #define ARRAY_NEW_NUM() \
3407 int atype = IMMEDIATE_u1; \
3408 push_value (build_newarray (atype, pop_value (int_type_node)));\
3410 #define ARRAY_NEW_MULTI() \
3412 tree class = get_class_constant (current_jcf, IMMEDIATE_u2 ); \
3413 int ndims = IMMEDIATE_u1; \
3414 expand_java_multianewarray( class, ndims ); \
3417 #define UNOP(OPERAND_TYPE, OPERAND_VALUE) \
3418 push_value (fold_build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \
3419 pop_value (OPERAND_TYPE##_type_node)));
3421 #define CONVERT2(FROM_TYPE, TO_TYPE) \
3423 push_value (build1 (NOP_EXPR, int_type_node, \
3424 (convert (TO_TYPE##_type_node, \
3425 pop_value (FROM_TYPE##_type_node))))); \
3428 #define CONVERT(FROM_TYPE, TO_TYPE) \
3430 push_value (convert (TO_TYPE##_type_node, \
3431 pop_value (FROM_TYPE##_type_node))); \
3434 /* internal macro added for use by the WIDE case
3435 Added TREE_TYPE (decl) assignment, apbianco */
3436 #define STORE_INTERNAL(OPTYPE, OPVALUE) \
3439 int index = OPVALUE; \
3440 tree type = OPTYPE; \
3441 value = pop_value (type); \
3442 type = TREE_TYPE (value); \
3443 decl = find_local_variable (index, type, oldpc); \
3444 set_local_type (index, type); \
3445 java_add_stmt (build2 (MODIFY_EXPR, type, decl, value)); \
3448 #define STORE(OPERAND_TYPE, OPERAND_VALUE) \
3450 /* have to do this since OPERAND_VALUE may have side-effects */ \
3451 int opvalue = OPERAND_VALUE; \
3452 STORE_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3455 #define SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3456 SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3458 #define SPECIAL_ENTER(IGNORED) MONITOR_OPERATION (soft_monitorenter_node)
3459 #define SPECIAL_EXIT(IGNORED) MONITOR_OPERATION (soft_monitorexit_node)
3461 #define MONITOR_OPERATION(call) \
3463 tree o = pop_value (ptr_type_node); \
3465 flush_quick_stack (); \
3466 c = build_java_monitor (call, o); \
3467 TREE_SIDE_EFFECTS (c) = 1; \
3468 java_add_stmt (c); \
3471 #define SPECIAL_IINC(IGNORED) \
3473 unsigned int local_var_index = IMMEDIATE_u1; \
3474 int ival = IMMEDIATE_s1; \
3475 expand_iinc(local_var_index, ival, oldpc); \
3478 #define SPECIAL_WIDE(IGNORED) \
3480 int modified_opcode = IMMEDIATE_u1; \
3481 unsigned int local_var_index = IMMEDIATE_u2; \
3482 switch (modified_opcode) \
3486 int ival = IMMEDIATE_s2; \
3487 expand_iinc (local_var_index, ival, oldpc); \
3490 case OPCODE_iload: \
3491 case OPCODE_lload: \
3492 case OPCODE_fload: \
3493 case OPCODE_dload: \
3494 case OPCODE_aload: \
3496 /* duplicate code from LOAD macro */ \
3497 LOAD_INTERNAL(operand_type[modified_opcode], local_var_index); \
3500 case OPCODE_istore: \
3501 case OPCODE_lstore: \
3502 case OPCODE_fstore: \
3503 case OPCODE_dstore: \
3504 case OPCODE_astore: \
3506 STORE_INTERNAL(operand_type[modified_opcode], local_var_index); \
3510 error ("unrecogized wide sub-instruction"); \
3514 #define SPECIAL_THROW(IGNORED) \
3515 build_java_athrow (pop_value (throwable_type_node))
3517 #define SPECIAL_BREAK NOT_IMPL1
3518 #define IMPL NOT_IMPL
3520 #include "javaop.def"
3523 fprintf (stderr, "%3d: unknown(%3d)\n", oldpc, byte_ops[PC]);
3528 /* Return the opcode at PC in the code section pointed to by
3531 static unsigned char
3532 peek_opcode_at_pc (JCF *jcf, int code_offset, int pc)
3534 unsigned char opcode;
3535 long absolute_offset = (long)JCF_TELL (jcf);
3537 JCF_SEEK (jcf, code_offset);
3538 opcode = jcf->read_ptr [pc];
3539 JCF_SEEK (jcf, absolute_offset);
3543 /* Some bytecode compilers are emitting accurate LocalVariableTable
3544 attributes. Here's an example:
3549 Attribute "LocalVariableTable"
3550 slot #<n>: ... (PC: PC+1 length: L)
3552 This is accurate because the local in slot <n> really exists after
3553 the opcode at PC is executed, hence from PC+1 to PC+1+L.
3555 This procedure recognizes this situation and extends the live range
3556 of the local in SLOT to START_PC-1 or START_PC-2 (depending on the
3557 length of the store instruction.)
3559 This function is used by `give_name_to_locals' so that a local's
3560 DECL features a DECL_LOCAL_START_PC such that the first related
3561 store operation will use DECL as a destination, not an unrelated
3562 temporary created for the occasion.
3564 This function uses a global (instruction_bits) `note_instructions' should
3565 have allocated and filled properly. */
3568 maybe_adjust_start_pc (struct JCF *jcf, int code_offset,
3569 int start_pc, int slot)
3571 int first, index, opcode;
3580 /* Find last previous instruction and remember it */
3581 for (pc = start_pc-1; pc; pc--)
3582 if (instruction_bits [pc] & BCODE_INSTRUCTION_START)
3586 /* Retrieve the instruction, handle `wide'. */
3587 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3588 if (opcode == OPCODE_wide)
3591 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3596 case OPCODE_astore_0:
3597 case OPCODE_astore_1:
3598 case OPCODE_astore_2:
3599 case OPCODE_astore_3:
3600 first = OPCODE_astore_0;
3603 case OPCODE_istore_0:
3604 case OPCODE_istore_1:
3605 case OPCODE_istore_2:
3606 case OPCODE_istore_3:
3607 first = OPCODE_istore_0;
3610 case OPCODE_lstore_0:
3611 case OPCODE_lstore_1:
3612 case OPCODE_lstore_2:
3613 case OPCODE_lstore_3:
3614 first = OPCODE_lstore_0;
3617 case OPCODE_fstore_0:
3618 case OPCODE_fstore_1:
3619 case OPCODE_fstore_2:
3620 case OPCODE_fstore_3:
3621 first = OPCODE_fstore_0;
3624 case OPCODE_dstore_0:
3625 case OPCODE_dstore_1:
3626 case OPCODE_dstore_2:
3627 case OPCODE_dstore_3:
3628 first = OPCODE_dstore_0;
3636 index = peek_opcode_at_pc (jcf, code_offset, pc);
3639 int other = peek_opcode_at_pc (jcf, code_offset, ++pc);
3640 index = (other << 8) + index;
3645 /* Now we decide: first >0 means we have a <t>store_<n>, index >0
3646 means we have a <t>store. */
3647 if ((first > 0 && opcode - first == slot) || (index > 0 && index == slot))
3653 /* Force the (direct) sub-operands of NODE to be evaluated in left-to-right
3654 order, as specified by Java Language Specification.
3656 The problem is that while expand_expr will evaluate its sub-operands in
3657 left-to-right order, for variables it will just return an rtx (i.e.
3658 an lvalue) for the variable (rather than an rvalue). So it is possible
3659 that a later sub-operand will change the register, and when the
3660 actual operation is done, it will use the new value, when it should
3661 have used the original value.
3663 We fix this by using save_expr. This forces the sub-operand to be
3664 copied into a fresh virtual register,
3666 For method invocation, we modify the arguments so that a
3667 left-to-right order evaluation is performed. Saved expressions
3668 will, in CALL_EXPR order, be reused when the call will be expanded.
3670 We also promote outgoing args if needed. */
3673 force_evaluation_order (tree node)
3675 if (flag_syntax_only)
3677 if (TREE_CODE (node) == CALL_EXPR
3678 || (TREE_CODE (node) == COMPOUND_EXPR
3679 && TREE_CODE (TREE_OPERAND (node, 0)) == CALL_EXPR
3680 && TREE_CODE (TREE_OPERAND (node, 1)) == SAVE_EXPR))
3685 /* Account for wrapped around ctors. */
3686 if (TREE_CODE (node) == COMPOUND_EXPR)
3687 call = TREE_OPERAND (node, 0);
3691 nargs = call_expr_nargs (call);
3693 /* This reverses the evaluation order. This is a desired effect. */
3694 for (i = 0, cmp = NULL_TREE; i < nargs; i++)
3696 tree arg = CALL_EXPR_ARG (call, i);
3697 /* Promote types smaller than integer. This is required by
3699 tree type = TREE_TYPE (arg);
3701 if (targetm.calls.promote_prototypes (type)
3702 && INTEGRAL_TYPE_P (type)
3703 && INT_CST_LT_UNSIGNED (TYPE_SIZE (type),
3704 TYPE_SIZE (integer_type_node)))
3705 arg = fold_convert (integer_type_node, arg);
3707 saved = save_expr (force_evaluation_order (arg));
3708 cmp = (cmp == NULL_TREE ? saved :
3709 build2 (COMPOUND_EXPR, void_type_node, cmp, saved));
3711 CALL_EXPR_ARG (call, i) = saved;
3714 if (cmp && TREE_CODE (cmp) == COMPOUND_EXPR)
3715 TREE_SIDE_EFFECTS (cmp) = 1;
3719 cmp = build2 (COMPOUND_EXPR, TREE_TYPE (node), cmp, node);
3720 if (TREE_TYPE (cmp) != void_type_node)
3721 cmp = save_expr (cmp);
3722 CAN_COMPLETE_NORMALLY (cmp) = CAN_COMPLETE_NORMALLY (node);
3723 TREE_SIDE_EFFECTS (cmp) = 1;
3730 /* Build a node to represent empty statements and blocks. */
3733 build_java_empty_stmt (void)
3735 tree t = build_empty_stmt ();
3736 CAN_COMPLETE_NORMALLY (t) = 1;
3740 /* Promote all args of integral type before generating any code. */
3743 promote_arguments (void)
3747 for (arg = DECL_ARGUMENTS (current_function_decl), i = 0;
3748 arg != NULL_TREE; arg = TREE_CHAIN (arg), i++)
3750 tree arg_type = TREE_TYPE (arg);
3751 if (INTEGRAL_TYPE_P (arg_type)
3752 && TYPE_PRECISION (arg_type) < 32)
3754 tree copy = find_local_variable (i, integer_type_node, -1);
3755 java_add_stmt (build2 (MODIFY_EXPR, integer_type_node,
3757 fold_convert (integer_type_node, arg)));
3759 if (TYPE_IS_WIDE (arg_type))
3764 /* Create a local variable that points to the constant pool. */
3767 cache_cpool_data_ref (void)
3772 tree d = build_constant_data_ref (flag_indirect_classes);
3773 tree cpool_ptr = build_decl (VAR_DECL, NULL_TREE,
3774 build_pointer_type (TREE_TYPE (d)));
3775 java_add_local_var (cpool_ptr);
3776 TREE_INVARIANT (cpool_ptr) = 1;
3777 TREE_CONSTANT (cpool_ptr) = 1;
3779 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (cpool_ptr),
3780 cpool_ptr, build_address_of (d)));
3781 cpool = build1 (INDIRECT_REF, TREE_TYPE (d), cpool_ptr);
3782 TREE_THIS_NOTRAP (cpool) = 1;
3783 TYPE_CPOOL_DATA_REF (output_class) = cpool;
3787 #include "gt-java-expr.h"