1 /* Process expressions for the GNU compiler for the Java(TM) language.
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2006, 2007 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA.
22 Java and all Java-based marks are trademarks or registered trademarks
23 of Sun Microsystems, Inc. in the United States and other countries.
24 The Free Software Foundation is independent of Sun Microsystems, Inc. */
26 /* Hacked by Per Bothner <bothner@cygnus.com> February 1996. */
30 #include "coretypes.h"
37 #include "java-tree.h"
39 #include "java-opcodes.h"
41 #include "java-except.h"
46 #include "tree-gimple.h"
49 static void flush_quick_stack (void);
50 static void push_value (tree);
51 static tree pop_value (tree);
52 static void java_stack_swap (void);
53 static void java_stack_dup (int, int);
54 static void build_java_athrow (tree);
55 static void build_java_jsr (int, int);
56 static void build_java_ret (tree);
57 static void expand_java_multianewarray (tree, int);
58 static void expand_java_arraystore (tree);
59 static void expand_java_arrayload (tree);
60 static void expand_java_array_length (void);
61 static tree build_java_monitor (tree, tree);
62 static void expand_java_pushc (int, tree);
63 static void expand_java_return (tree);
64 static void expand_load_internal (int, tree, int);
65 static void expand_java_NEW (tree);
66 static void expand_java_INSTANCEOF (tree);
67 static void expand_java_CHECKCAST (tree);
68 static void expand_iinc (unsigned int, int, int);
69 static void expand_java_binop (tree, enum tree_code);
70 static void note_label (int, int);
71 static void expand_compare (enum tree_code, tree, tree, int);
72 static void expand_test (enum tree_code, tree, int);
73 static void expand_cond (enum tree_code, tree, int);
74 static void expand_java_goto (int);
75 static tree expand_java_switch (tree, int);
76 static void expand_java_add_case (tree, int, int);
77 static tree pop_arguments (tree);
78 static void expand_invoke (int, int, int);
79 static void expand_java_field_op (int, int, int);
80 static void java_push_constant_from_pool (struct JCF *, int);
81 static void java_stack_pop (int);
82 static tree build_java_throw_out_of_bounds_exception (tree);
83 static tree build_java_check_indexed_type (tree, tree);
84 static unsigned char peek_opcode_at_pc (struct JCF *, int, int);
85 static void promote_arguments (void);
86 static void cache_cpool_data_ref (void);
88 static GTY(()) tree operand_type[59];
90 static GTY(()) tree methods_ident;
91 static GTY(()) tree ncode_ident;
92 tree dtable_ident = NULL_TREE;
94 /* Set to nonzero value in order to emit class initialization code
95 before static field references. */
96 int always_initialize_class_p = 0;
98 /* We store the stack state in two places:
99 Within a basic block, we use the quick_stack, which is a
100 pushdown list (TREE_LISTs) of expression nodes.
101 This is the top part of the stack; below that we use find_stack_slot.
102 At the end of a basic block, the quick_stack must be flushed
103 to the stack slot array (as handled by find_stack_slot).
104 Using quick_stack generates better code (especially when
105 compiled without optimization), because we do not have to
106 explicitly store and load trees to temporary variables.
108 If a variable is on the quick stack, it means the value of variable
109 when the quick stack was last flushed. Conceptually, flush_quick_stack
110 saves all the quick_stack elements in parallel. However, that is
111 complicated, so it actually saves them (i.e. copies each stack value
112 to is home virtual register) from low indexes. This allows a quick_stack
113 element at index i (counting from the bottom of stack the) to references
114 slot virtuals for register that are >= i, but not those that are deeper.
115 This convention makes most operations easier. For example iadd works
116 even when the stack contains (reg[0], reg[1]): It results in the
117 stack containing (reg[0]+reg[1]), which is OK. However, some stack
118 operations are more complicated. For example dup given a stack
119 containing (reg[0]) would yield (reg[0], reg[0]), which would violate
120 the convention, since stack value 1 would refer to a register with
121 lower index (reg[0]), which flush_quick_stack does not safely handle.
122 So dup cannot just add an extra element to the quick_stack, but iadd can.
125 static GTY(()) tree quick_stack;
127 /* A free-list of unused permanent TREE_LIST nodes. */
128 static GTY((deletable)) tree tree_list_free_list;
130 /* The physical memory page size used in this computer. See
131 build_field_ref(). */
132 static GTY(()) tree page_size;
134 /* The stack pointer of the Java virtual machine.
135 This does include the size of the quick_stack. */
139 const unsigned char *linenumber_table;
140 int linenumber_count;
142 /* Largest pc so far in this method that has been passed to lookup_label. */
143 int highest_label_pc_this_method = -1;
145 /* Base value for this method to add to pc to get generated label. */
146 int start_label_pc_this_method = 0;
149 init_expr_processing (void)
151 operand_type[21] = operand_type[54] = int_type_node;
152 operand_type[22] = operand_type[55] = long_type_node;
153 operand_type[23] = operand_type[56] = float_type_node;
154 operand_type[24] = operand_type[57] = double_type_node;
155 operand_type[25] = operand_type[58] = ptr_type_node;
159 java_truthvalue_conversion (tree expr)
161 /* It is simpler and generates better code to have only TRUTH_*_EXPR
162 or comparison expressions as truth values at this level.
164 This function should normally be identity for Java. */
166 switch (TREE_CODE (expr))
168 case EQ_EXPR: case NE_EXPR: case UNEQ_EXPR: case LTGT_EXPR:
169 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
170 case UNLE_EXPR: case UNGE_EXPR: case UNLT_EXPR: case UNGT_EXPR:
171 case ORDERED_EXPR: case UNORDERED_EXPR:
172 case TRUTH_ANDIF_EXPR:
173 case TRUTH_ORIF_EXPR:
182 return integer_zerop (expr) ? boolean_false_node : boolean_true_node;
185 return real_zerop (expr) ? boolean_false_node : boolean_true_node;
187 /* are these legal? XXX JH */
191 /* These don't change whether an object is nonzero or zero. */
192 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
195 /* Distribute the conversion into the arms of a COND_EXPR. */
196 return fold_build3 (COND_EXPR, boolean_type_node, TREE_OPERAND (expr, 0),
197 java_truthvalue_conversion (TREE_OPERAND (expr, 1)),
198 java_truthvalue_conversion (TREE_OPERAND (expr, 2)));
201 /* If this is widening the argument, we can ignore it. */
202 if (TYPE_PRECISION (TREE_TYPE (expr))
203 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
204 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
205 /* fall through to default */
208 return fold_build2 (NE_EXPR, boolean_type_node,
209 expr, boolean_false_node);
213 /* Save any stack slots that happen to be in the quick_stack into their
214 home virtual register slots.
216 The copy order is from low stack index to high, to support the invariant
217 that the expression for a slot may contain decls for stack slots with
218 higher (or the same) index, but not lower. */
221 flush_quick_stack (void)
223 int stack_index = stack_pointer;
224 tree prev, cur, next;
226 /* First reverse the quick_stack, and count the number of slots it has. */
227 for (cur = quick_stack, prev = NULL_TREE; cur != NULL_TREE; cur = next)
229 next = TREE_CHAIN (cur);
230 TREE_CHAIN (cur) = prev;
232 stack_index -= 1 + TYPE_IS_WIDE (TREE_TYPE (TREE_VALUE (cur)));
236 while (quick_stack != NULL_TREE)
239 tree node = quick_stack, type;
240 quick_stack = TREE_CHAIN (node);
241 TREE_CHAIN (node) = tree_list_free_list;
242 tree_list_free_list = node;
243 node = TREE_VALUE (node);
244 type = TREE_TYPE (node);
246 decl = find_stack_slot (stack_index, type);
248 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (node), decl, node));
249 stack_index += 1 + TYPE_IS_WIDE (type);
253 /* Push TYPE on the type stack.
254 Return true on success, 0 on overflow. */
257 push_type_0 (tree type)
260 type = promote_type (type);
261 n_words = 1 + TYPE_IS_WIDE (type);
262 if (stack_pointer + n_words > DECL_MAX_STACK (current_function_decl))
264 /* Allocate decl for this variable now, so we get a temporary that
265 survives the whole method. */
266 find_stack_slot (stack_pointer, type);
267 stack_type_map[stack_pointer++] = type;
269 while (--n_words >= 0)
270 stack_type_map[stack_pointer++] = TYPE_SECOND;
275 push_type (tree type)
277 int r = push_type_0 (type);
282 push_value (tree value)
284 tree type = TREE_TYPE (value);
285 if (TYPE_PRECISION (type) < 32 && INTEGRAL_TYPE_P (type))
287 type = promote_type (type);
288 value = convert (type, value);
291 if (tree_list_free_list == NULL_TREE)
292 quick_stack = tree_cons (NULL_TREE, value, quick_stack);
295 tree node = tree_list_free_list;
296 tree_list_free_list = TREE_CHAIN (tree_list_free_list);
297 TREE_VALUE (node) = value;
298 TREE_CHAIN (node) = quick_stack;
301 /* If the value has a side effect, then we need to evaluate it
302 whether or not the result is used. If the value ends up on the
303 quick stack and is then popped, this won't happen -- so we flush
304 the quick stack. It is safest to simply always flush, though,
305 since TREE_SIDE_EFFECTS doesn't capture COMPONENT_REF, and for
306 the latter we may need to strip conversions. */
307 flush_quick_stack ();
310 /* Pop a type from the type stack.
311 TYPE is the expected type. Return the actual type, which must be
313 On an error, *MESSAGEP is set to a freshly malloc'd error message. */
316 pop_type_0 (tree type, char **messagep)
321 if (TREE_CODE (type) == RECORD_TYPE)
322 type = promote_type (type);
323 n_words = 1 + TYPE_IS_WIDE (type);
324 if (stack_pointer < n_words)
326 *messagep = xstrdup ("stack underflow");
329 while (--n_words > 0)
331 if (stack_type_map[--stack_pointer] != void_type_node)
333 *messagep = xstrdup ("Invalid multi-word value on type stack");
337 t = stack_type_map[--stack_pointer];
338 if (type == NULL_TREE || t == type)
340 if (TREE_CODE (t) == TREE_LIST)
344 tree tt = TREE_PURPOSE (t);
345 if (! can_widen_reference_to (tt, type))
355 if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (t)
356 && TYPE_PRECISION (type) <= 32 && TYPE_PRECISION (t) <= 32)
358 if (TREE_CODE (type) == POINTER_TYPE && TREE_CODE (t) == POINTER_TYPE)
360 /* If the expected type we've been passed is object or ptr
361 (i.e. void*), the caller needs to know the real type. */
362 if (type == ptr_type_node || type == object_ptr_type_node)
365 /* Since the verifier has already run, we know that any
366 types we see will be compatible. In BC mode, this fact
367 may be checked at runtime, but if that is so then we can
368 assume its truth here as well. So, we always succeed
369 here, with the expected type. */
373 if (! flag_verify_invocations && flag_indirect_dispatch
374 && t == object_ptr_type_node)
376 if (type != ptr_type_node)
377 warning (0, "need to insert runtime check for %s",
378 xstrdup (lang_printable_name (type, 0)));
382 /* lang_printable_name uses a static buffer, so we must save the result
383 from calling it the first time. */
386 char *temp = xstrdup (lang_printable_name (type, 0));
387 /* If the stack contains a multi-word type, keep popping the stack until
388 the real type is found. */
389 while (t == void_type_node)
390 t = stack_type_map[--stack_pointer];
391 *messagep = concat ("expected type '", temp,
392 "' but stack contains '", lang_printable_name (t, 0),
399 /* Pop a type from the type stack.
400 TYPE is the expected type. Return the actual type, which must be
401 convertible to TYPE, otherwise call error. */
406 char *message = NULL;
407 type = pop_type_0 (type, &message);
410 error ("%s", message);
417 /* Return true if two type assertions are equal. */
420 type_assertion_eq (const void * k1_p, const void * k2_p)
422 const type_assertion k1 = *(const type_assertion *)k1_p;
423 const type_assertion k2 = *(const type_assertion *)k2_p;
424 return (k1.assertion_code == k2.assertion_code
426 && k1.op2 == k2.op2);
429 /* Hash a type assertion. */
432 type_assertion_hash (const void *p)
434 const type_assertion *k_p = p;
435 hashval_t hash = iterative_hash (&k_p->assertion_code, sizeof
436 k_p->assertion_code, 0);
437 hash = iterative_hash (&k_p->op1, sizeof k_p->op1, hash);
438 return iterative_hash (&k_p->op2, sizeof k_p->op2, hash);
441 /* Add an entry to the type assertion table for the given class.
442 CLASS is the class for which this assertion will be evaluated by the
443 runtime during loading/initialization.
444 ASSERTION_CODE is the 'opcode' or type of this assertion: see java-tree.h.
445 OP1 and OP2 are the operands. The tree type of these arguments may be
446 specific to each assertion_code. */
449 add_type_assertion (tree class, int assertion_code, tree op1, tree op2)
451 htab_t assertions_htab;
455 assertions_htab = TYPE_ASSERTIONS (class);
456 if (assertions_htab == NULL)
458 assertions_htab = htab_create_ggc (7, type_assertion_hash,
459 type_assertion_eq, NULL);
460 TYPE_ASSERTIONS (current_class) = assertions_htab;
463 as.assertion_code = assertion_code;
467 as_pp = htab_find_slot (assertions_htab, &as, INSERT);
469 /* Don't add the same assertion twice. */
473 *as_pp = ggc_alloc (sizeof (type_assertion));
474 **(type_assertion **)as_pp = as;
478 /* Return 1 if SOURCE_TYPE can be safely widened to TARGET_TYPE.
479 Handles array types and interfaces. */
482 can_widen_reference_to (tree source_type, tree target_type)
484 if (source_type == ptr_type_node || target_type == object_ptr_type_node)
487 /* Get rid of pointers */
488 if (TREE_CODE (source_type) == POINTER_TYPE)
489 source_type = TREE_TYPE (source_type);
490 if (TREE_CODE (target_type) == POINTER_TYPE)
491 target_type = TREE_TYPE (target_type);
493 if (source_type == target_type)
496 /* FIXME: This is very pessimistic, in that it checks everything,
497 even if we already know that the types are compatible. If we're
498 to support full Java class loader semantics, we need this.
499 However, we could do something more optimal. */
500 if (! flag_verify_invocations)
502 add_type_assertion (current_class, JV_ASSERT_TYPES_COMPATIBLE,
503 source_type, target_type);
506 warning (0, "assert: %s is assign compatible with %s",
507 xstrdup (lang_printable_name (target_type, 0)),
508 xstrdup (lang_printable_name (source_type, 0)));
509 /* Punt everything to runtime. */
513 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
519 if (TYPE_ARRAY_P (source_type) || TYPE_ARRAY_P (target_type))
521 HOST_WIDE_INT source_length, target_length;
522 if (TYPE_ARRAY_P (source_type) != TYPE_ARRAY_P (target_type))
524 /* An array implements Cloneable and Serializable. */
525 tree name = DECL_NAME (TYPE_NAME (target_type));
526 return (name == java_lang_cloneable_identifier_node
527 || name == java_io_serializable_identifier_node);
529 target_length = java_array_type_length (target_type);
530 if (target_length >= 0)
532 source_length = java_array_type_length (source_type);
533 if (source_length != target_length)
536 source_type = TYPE_ARRAY_ELEMENT (source_type);
537 target_type = TYPE_ARRAY_ELEMENT (target_type);
538 if (source_type == target_type)
540 if (TREE_CODE (source_type) != POINTER_TYPE
541 || TREE_CODE (target_type) != POINTER_TYPE)
543 return can_widen_reference_to (source_type, target_type);
547 int source_depth = class_depth (source_type);
548 int target_depth = class_depth (target_type);
550 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
553 warning (0, "assert: %s is assign compatible with %s",
554 xstrdup (lang_printable_name (target_type, 0)),
555 xstrdup (lang_printable_name (source_type, 0)));
559 /* class_depth can return a negative depth if an error occurred */
560 if (source_depth < 0 || target_depth < 0)
563 if (CLASS_INTERFACE (TYPE_NAME (target_type)))
565 /* target_type is OK if source_type or source_type ancestors
566 implement target_type. We handle multiple sub-interfaces */
567 tree binfo, base_binfo;
570 for (binfo = TYPE_BINFO (source_type), i = 0;
571 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
572 if (can_widen_reference_to
573 (BINFO_TYPE (base_binfo), target_type))
580 for ( ; source_depth > target_depth; source_depth--)
583 = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (source_type), 0));
585 return source_type == target_type;
591 pop_value (tree type)
593 type = pop_type (type);
596 tree node = quick_stack;
597 quick_stack = TREE_CHAIN (quick_stack);
598 TREE_CHAIN (node) = tree_list_free_list;
599 tree_list_free_list = node;
600 node = TREE_VALUE (node);
604 return find_stack_slot (stack_pointer, promote_type (type));
608 /* Pop and discard the top COUNT stack slots. */
611 java_stack_pop (int count)
617 gcc_assert (stack_pointer != 0);
619 type = stack_type_map[stack_pointer - 1];
620 if (type == TYPE_SECOND)
623 gcc_assert (stack_pointer != 1 && count > 0);
625 type = stack_type_map[stack_pointer - 2];
627 val = pop_value (type);
632 /* Implement the 'swap' operator (to swap two top stack slots). */
635 java_stack_swap (void)
641 if (stack_pointer < 2
642 || (type1 = stack_type_map[stack_pointer - 1]) == TYPE_UNKNOWN
643 || (type2 = stack_type_map[stack_pointer - 2]) == TYPE_UNKNOWN
644 || type1 == TYPE_SECOND || type2 == TYPE_SECOND
645 || TYPE_IS_WIDE (type1) || TYPE_IS_WIDE (type2))
646 /* Bad stack swap. */
648 /* Bad stack swap. */
650 flush_quick_stack ();
651 decl1 = find_stack_slot (stack_pointer - 1, type1);
652 decl2 = find_stack_slot (stack_pointer - 2, type2);
653 temp = build_decl (VAR_DECL, NULL_TREE, type1);
654 java_add_local_var (temp);
655 java_add_stmt (build2 (MODIFY_EXPR, type1, temp, decl1));
656 java_add_stmt (build2 (MODIFY_EXPR, type2,
657 find_stack_slot (stack_pointer - 1, type2),
659 java_add_stmt (build2 (MODIFY_EXPR, type1,
660 find_stack_slot (stack_pointer - 2, type1),
662 stack_type_map[stack_pointer - 1] = type2;
663 stack_type_map[stack_pointer - 2] = type1;
667 java_stack_dup (int size, int offset)
669 int low_index = stack_pointer - size - offset;
672 error ("stack underflow - dup* operation");
674 flush_quick_stack ();
676 stack_pointer += size;
677 dst_index = stack_pointer;
679 for (dst_index = stack_pointer; --dst_index >= low_index; )
682 int src_index = dst_index - size;
683 if (src_index < low_index)
684 src_index = dst_index + size + offset;
685 type = stack_type_map [src_index];
686 if (type == TYPE_SECOND)
688 /* Dup operation splits 64-bit number. */
689 gcc_assert (src_index > low_index);
691 stack_type_map[dst_index] = type;
692 src_index--; dst_index--;
693 type = stack_type_map[src_index];
694 gcc_assert (TYPE_IS_WIDE (type));
697 gcc_assert (! TYPE_IS_WIDE (type));
699 if (src_index != dst_index)
701 tree src_decl = find_stack_slot (src_index, type);
702 tree dst_decl = find_stack_slot (dst_index, type);
705 (build2 (MODIFY_EXPR, TREE_TYPE (dst_decl), dst_decl, src_decl));
706 stack_type_map[dst_index] = type;
711 /* Calls _Jv_Throw or _Jv_Sjlj_Throw. Discard the contents of the
715 build_java_athrow (tree node)
719 call = build_call_nary (void_type_node,
720 build_address_of (throw_node),
722 TREE_SIDE_EFFECTS (call) = 1;
723 java_add_stmt (call);
724 java_stack_pop (stack_pointer);
727 /* Implementation for jsr/ret */
730 build_java_jsr (int target_pc, int return_pc)
732 tree where = lookup_label (target_pc);
733 tree ret = lookup_label (return_pc);
734 tree ret_label = fold_build1 (ADDR_EXPR, return_address_type_node, ret);
735 push_value (ret_label);
736 flush_quick_stack ();
737 java_add_stmt (build1 (GOTO_EXPR, void_type_node, where));
739 /* Do not need to emit the label here. We noted the existence of the
740 label as a jump target in note_instructions; we'll emit the label
741 for real at the beginning of the expand_byte_code loop. */
745 build_java_ret (tree location)
747 java_add_stmt (build1 (GOTO_EXPR, void_type_node, location));
750 /* Implementation of operations on array: new, load, store, length */
753 decode_newarray_type (int atype)
757 case 4: return boolean_type_node;
758 case 5: return char_type_node;
759 case 6: return float_type_node;
760 case 7: return double_type_node;
761 case 8: return byte_type_node;
762 case 9: return short_type_node;
763 case 10: return int_type_node;
764 case 11: return long_type_node;
765 default: return NULL_TREE;
769 /* Map primitive type to the code used by OPCODE_newarray. */
772 encode_newarray_type (tree type)
774 if (type == boolean_type_node)
776 else if (type == char_type_node)
778 else if (type == float_type_node)
780 else if (type == double_type_node)
782 else if (type == byte_type_node)
784 else if (type == short_type_node)
786 else if (type == int_type_node)
788 else if (type == long_type_node)
794 /* Build a call to _Jv_ThrowBadArrayIndex(), the
795 ArrayIndexOfBoundsException exception handler. */
798 build_java_throw_out_of_bounds_exception (tree index)
800 tree node = build_call_nary (int_type_node,
801 build_address_of (soft_badarrayindex_node),
803 TREE_SIDE_EFFECTS (node) = 1; /* Allows expansion within ANDIF */
807 /* Return the length of an array. Doesn't perform any checking on the nature
808 or value of the array NODE. May be used to implement some bytecodes. */
811 build_java_array_length_access (tree node)
813 tree type = TREE_TYPE (node);
814 tree array_type = TREE_TYPE (type);
815 HOST_WIDE_INT length;
817 if (!is_array_type_p (type))
819 /* With the new verifier, we will see an ordinary pointer type
820 here. In this case, we just use an arbitrary array type. */
821 array_type = build_java_array_type (object_ptr_type_node, -1);
822 type = promote_type (array_type);
825 length = java_array_type_length (type);
827 return build_int_cst (NULL_TREE, length);
829 node = build3 (COMPONENT_REF, int_type_node,
830 build_java_indirect_ref (array_type, node,
831 flag_check_references),
832 lookup_field (&array_type, get_identifier ("length")),
834 IS_ARRAY_LENGTH_ACCESS (node) = 1;
838 /* Optionally checks a reference against the NULL pointer. ARG1: the
839 expr, ARG2: we should check the reference. Don't generate extra
840 checks if we're not generating code. */
843 java_check_reference (tree expr, int check)
845 if (!flag_syntax_only && check)
847 expr = save_expr (expr);
848 expr = build3 (COND_EXPR, TREE_TYPE (expr),
849 build2 (EQ_EXPR, boolean_type_node,
850 expr, null_pointer_node),
851 build_call_nary (void_type_node,
852 build_address_of (soft_nullpointer_node),
860 /* Reference an object: just like an INDIRECT_REF, but with checking. */
863 build_java_indirect_ref (tree type, tree expr, int check)
866 t = java_check_reference (expr, check);
867 t = convert (build_pointer_type (type), t);
868 return build1 (INDIRECT_REF, type, t);
871 /* Implement array indexing (either as l-value or r-value).
872 Returns a tree for ARRAY[INDEX], assume TYPE is the element type.
873 Optionally performs bounds checking and/or test to NULL.
874 At this point, ARRAY should have been verified as an array. */
877 build_java_arrayaccess (tree array, tree type, tree index)
879 tree node, throw = NULL_TREE;
882 tree array_type = TREE_TYPE (TREE_TYPE (array));
883 tree size_exp = fold_convert (sizetype, size_in_bytes (type));
885 if (!is_array_type_p (TREE_TYPE (array)))
887 /* With the new verifier, we will see an ordinary pointer type
888 here. In this case, we just use the correct array type. */
889 array_type = build_java_array_type (type, -1);
892 if (flag_bounds_check)
895 * (unsigned jint) INDEX >= (unsigned jint) LEN
896 * && throw ArrayIndexOutOfBoundsException.
897 * Note this is equivalent to and more efficient than:
898 * INDEX < 0 || INDEX >= LEN && throw ... */
900 tree len = convert (unsigned_int_type_node,
901 build_java_array_length_access (array));
902 test = fold_build2 (GE_EXPR, boolean_type_node,
903 convert (unsigned_int_type_node, index),
905 if (! integer_zerop (test))
907 throw = build2 (TRUTH_ANDIF_EXPR, int_type_node, test,
908 build_java_throw_out_of_bounds_exception (index));
909 /* allows expansion within COMPOUND */
910 TREE_SIDE_EFFECTS( throw ) = 1;
914 /* If checking bounds, wrap the index expr with a COMPOUND_EXPR in order
915 to have the bounds check evaluated first. */
916 if (throw != NULL_TREE)
917 index = build2 (COMPOUND_EXPR, int_type_node, throw, index);
919 data_field = lookup_field (&array_type, get_identifier ("data"));
921 ref = build3 (COMPONENT_REF, TREE_TYPE (data_field),
922 build_java_indirect_ref (array_type, array,
923 flag_check_references),
924 data_field, NULL_TREE);
926 /* Take the address of the data field and convert it to a pointer to
928 node = build1 (NOP_EXPR, build_pointer_type (type), build_address_of (ref));
930 /* Multiply the index by the size of an element to obtain a byte
931 offset. Convert the result to a pointer to the element type. */
932 index = build2 (MULT_EXPR, sizetype,
933 fold_convert (sizetype, index),
936 /* Sum the byte offset and the address of the data field. */
937 node = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (node), node, index);
941 *((&array->data) + index*size_exp)
944 return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (node)), node);
947 /* Generate code to throw an ArrayStoreException if OBJECT is not assignable
948 (at runtime) to an element of ARRAY. A NOP_EXPR is returned if it can
949 determine that no check is required. */
952 build_java_arraystore_check (tree array, tree object)
954 tree check, element_type, source;
955 tree array_type_p = TREE_TYPE (array);
956 tree object_type = TYPE_NAME (TREE_TYPE (TREE_TYPE (object)));
958 if (! flag_verify_invocations)
960 /* With the new verifier, we don't track precise types. FIXME:
961 performance regression here. */
962 element_type = TYPE_NAME (object_type_node);
966 gcc_assert (is_array_type_p (array_type_p));
968 /* Get the TYPE_DECL for ARRAY's element type. */
970 = TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p))));
973 gcc_assert (TREE_CODE (element_type) == TYPE_DECL
974 && TREE_CODE (object_type) == TYPE_DECL);
976 if (!flag_store_check)
977 return build1 (NOP_EXPR, array_type_p, array);
979 /* No check is needed if the element type is final. Also check that
980 element_type matches object_type, since in the bytecode
981 compilation case element_type may be the actual element type of
982 the array rather than its declared type. However, if we're doing
983 indirect dispatch, we can't do the `final' optimization. */
984 if (element_type == object_type
985 && ! flag_indirect_dispatch
986 && CLASS_FINAL (element_type))
987 return build1 (NOP_EXPR, array_type_p, array);
989 /* OBJECT might be wrapped by a SAVE_EXPR. */
990 if (TREE_CODE (object) == SAVE_EXPR)
991 source = TREE_OPERAND (object, 0);
995 /* Avoid the check if OBJECT was just loaded from the same array. */
996 if (TREE_CODE (source) == ARRAY_REF)
999 source = TREE_OPERAND (source, 0); /* COMPONENT_REF. */
1000 source = TREE_OPERAND (source, 0); /* INDIRECT_REF. */
1001 source = TREE_OPERAND (source, 0); /* Source array's DECL or SAVE_EXPR. */
1002 if (TREE_CODE (source) == SAVE_EXPR)
1003 source = TREE_OPERAND (source, 0);
1006 if (TREE_CODE (target) == SAVE_EXPR)
1007 target = TREE_OPERAND (target, 0);
1009 if (source == target)
1010 return build1 (NOP_EXPR, array_type_p, array);
1013 /* Build an invocation of _Jv_CheckArrayStore */
1014 check = build_call_nary (void_type_node,
1015 build_address_of (soft_checkarraystore_node),
1017 TREE_SIDE_EFFECTS (check) = 1;
1022 /* Makes sure that INDEXED_TYPE is appropriate. If not, make it from
1023 ARRAY_NODE. This function is used to retrieve something less vague than
1024 a pointer type when indexing the first dimension of something like [[<t>.
1025 May return a corrected type, if necessary, otherwise INDEXED_TYPE is
1026 return unchanged. */
1029 build_java_check_indexed_type (tree array_node ATTRIBUTE_UNUSED,
1032 /* We used to check to see if ARRAY_NODE really had array type.
1033 However, with the new verifier, this is not necessary, as we know
1034 that the object will be an array of the appropriate type. */
1036 return indexed_type;
1039 /* newarray triggers a call to _Jv_NewPrimArray. This function should be
1040 called with an integer code (the type of array to create), and the length
1041 of the array to create. */
1044 build_newarray (int atype_value, tree length)
1048 tree prim_type = decode_newarray_type (atype_value);
1050 = build_java_array_type (prim_type,
1051 host_integerp (length, 0) == INTEGER_CST
1052 ? tree_low_cst (length, 0) : -1);
1054 /* Pass a reference to the primitive type class and save the runtime
1056 type_arg = build_class_ref (prim_type);
1058 return build_call_nary (promote_type (type),
1059 build_address_of (soft_newarray_node),
1060 2, type_arg, length);
1063 /* Generates anewarray from a given CLASS_TYPE. Gets from the stack the size
1064 of the dimension. */
1067 build_anewarray (tree class_type, tree length)
1070 = build_java_array_type (class_type,
1071 host_integerp (length, 0)
1072 ? tree_low_cst (length, 0) : -1);
1074 return build_call_nary (promote_type (type),
1075 build_address_of (soft_anewarray_node),
1078 build_class_ref (class_type),
1082 /* Return a node the evaluates 'new TYPE[LENGTH]'. */
1085 build_new_array (tree type, tree length)
1087 if (JPRIMITIVE_TYPE_P (type))
1088 return build_newarray (encode_newarray_type (type), length);
1090 return build_anewarray (TREE_TYPE (type), length);
1093 /* Generates a call to _Jv_NewMultiArray. multianewarray expects a
1094 class pointer, a number of dimensions and the matching number of
1095 dimensions. The argument list is NULL terminated. */
1098 expand_java_multianewarray (tree class_type, int ndim)
1101 tree args = build_tree_list( NULL_TREE, null_pointer_node );
1103 for( i = 0; i < ndim; i++ )
1104 args = tree_cons (NULL_TREE, pop_value (int_type_node), args);
1106 args = tree_cons (NULL_TREE,
1107 build_class_ref (class_type),
1108 tree_cons (NULL_TREE,
1109 build_int_cst (NULL_TREE, ndim),
1112 push_value (build_call_list (promote_type (class_type),
1113 build_address_of (soft_multianewarray_node),
1117 /* ARRAY[INDEX] <- RHS. build_java_check_indexed_type makes sure that
1118 ARRAY is an array type. May expand some bound checking and NULL
1119 pointer checking. RHS_TYPE_NODE we are going to store. In the case
1120 of the CHAR/BYTE/BOOLEAN SHORT, the type popped of the stack is an
1121 INT. In those cases, we make the conversion.
1123 if ARRAy is a reference type, the assignment is checked at run-time
1124 to make sure that the RHS can be assigned to the array element
1125 type. It is not necessary to generate this code if ARRAY is final. */
1128 expand_java_arraystore (tree rhs_type_node)
1130 tree rhs_node = pop_value ((INTEGRAL_TYPE_P (rhs_type_node)
1131 && TYPE_PRECISION (rhs_type_node) <= 32) ?
1132 int_type_node : rhs_type_node);
1133 tree index = pop_value (int_type_node);
1134 tree array_type, array, temp, access;
1136 /* If we're processing an `aaload' we might as well just pick
1138 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1140 array_type = build_java_array_type (object_ptr_type_node, -1);
1141 rhs_type_node = object_ptr_type_node;
1144 array_type = build_java_array_type (rhs_type_node, -1);
1146 array = pop_value (array_type);
1147 array = build1 (NOP_EXPR, promote_type (array_type), array);
1149 rhs_type_node = build_java_check_indexed_type (array, rhs_type_node);
1151 flush_quick_stack ();
1153 index = save_expr (index);
1154 array = save_expr (array);
1156 /* We want to perform the bounds check (done by
1157 build_java_arrayaccess) before the type check (done by
1158 build_java_arraystore_check). So, we call build_java_arrayaccess
1159 -- which returns an ARRAY_REF lvalue -- and we then generate code
1160 to stash the address of that lvalue in a temp. Then we call
1161 build_java_arraystore_check, and finally we generate a
1162 MODIFY_EXPR to set the array element. */
1164 access = build_java_arrayaccess (array, rhs_type_node, index);
1165 temp = build_decl (VAR_DECL, NULL_TREE,
1166 build_pointer_type (TREE_TYPE (access)));
1167 java_add_local_var (temp);
1168 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (temp),
1170 build_fold_addr_expr (access)));
1172 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1174 tree check = build_java_arraystore_check (array, rhs_node);
1175 java_add_stmt (check);
1178 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (access),
1179 build1 (INDIRECT_REF, TREE_TYPE (access), temp),
1183 /* Expand the evaluation of ARRAY[INDEX]. build_java_check_indexed_type makes
1184 sure that LHS is an array type. May expand some bound checking and NULL
1186 LHS_TYPE_NODE is the type of ARRAY[INDEX]. But in the case of CHAR/BYTE/
1187 BOOLEAN/SHORT, we push a promoted type back to the stack.
1191 expand_java_arrayload (tree lhs_type_node)
1194 tree index_node = pop_value (int_type_node);
1198 /* If we're processing an `aaload' we might as well just pick
1200 if (TREE_CODE (lhs_type_node) == POINTER_TYPE)
1202 array_type = build_java_array_type (object_ptr_type_node, -1);
1203 lhs_type_node = object_ptr_type_node;
1206 array_type = build_java_array_type (lhs_type_node, -1);
1207 array_node = pop_value (array_type);
1208 array_node = build1 (NOP_EXPR, promote_type (array_type), array_node);
1210 index_node = save_expr (index_node);
1211 array_node = save_expr (array_node);
1213 lhs_type_node = build_java_check_indexed_type (array_node,
1215 load_node = build_java_arrayaccess (array_node,
1218 if (INTEGRAL_TYPE_P (lhs_type_node) && TYPE_PRECISION (lhs_type_node) <= 32)
1219 load_node = fold_build1 (NOP_EXPR, int_type_node, load_node);
1220 push_value (load_node);
1223 /* Expands .length. Makes sure that we deal with and array and may expand
1224 a NULL check on the array object. */
1227 expand_java_array_length (void)
1229 tree array = pop_value (ptr_type_node);
1230 tree length = build_java_array_length_access (array);
1232 push_value (length);
1235 /* Emit code for the call to _Jv_Monitor{Enter,Exit}. CALL can be
1236 either soft_monitorenter_node or soft_monitorexit_node. */
1239 build_java_monitor (tree call, tree object)
1241 return build_call_nary (void_type_node,
1242 build_address_of (call),
1246 /* Emit code for one of the PUSHC instructions. */
1249 expand_java_pushc (int ival, tree type)
1252 if (type == ptr_type_node && ival == 0)
1253 value = null_pointer_node;
1254 else if (type == int_type_node || type == long_type_node)
1255 value = build_int_cst (type, ival);
1256 else if (type == float_type_node || type == double_type_node)
1259 REAL_VALUE_FROM_INT (x, ival, 0, TYPE_MODE (type));
1260 value = build_real (type, x);
1269 expand_java_return (tree type)
1271 if (type == void_type_node)
1272 java_add_stmt (build1 (RETURN_EXPR, void_type_node, NULL));
1275 tree retval = pop_value (type);
1276 tree res = DECL_RESULT (current_function_decl);
1277 retval = build2 (MODIFY_EXPR, TREE_TYPE (res), res, retval);
1279 /* Handle the situation where the native integer type is smaller
1280 than the JVM integer. It can happen for many cross compilers.
1281 The whole if expression just goes away if INT_TYPE_SIZE < 32
1283 if (INT_TYPE_SIZE < 32
1284 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (res)))
1285 < GET_MODE_SIZE (TYPE_MODE (type))))
1286 retval = build1(NOP_EXPR, TREE_TYPE(res), retval);
1288 TREE_SIDE_EFFECTS (retval) = 1;
1289 java_add_stmt (build1 (RETURN_EXPR, void_type_node, retval));
1294 expand_load_internal (int index, tree type, int pc)
1297 tree var = find_local_variable (index, type, pc);
1299 /* Now VAR is the VAR_DECL (or PARM_DECL) that we are going to push
1300 on the stack. If there is an assignment to this VAR_DECL between
1301 the stack push and the use, then the wrong code could be
1302 generated. To avoid this we create a new local and copy our
1303 value into it. Then we push this new local on the stack.
1304 Hopefully this all gets optimized out. */
1305 copy = build_decl (VAR_DECL, NULL_TREE, type);
1306 if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1307 && TREE_TYPE (copy) != TREE_TYPE (var))
1308 var = convert (type, var);
1309 java_add_local_var (copy);
1310 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (var), copy, var));
1316 build_address_of (tree value)
1318 return build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (value)), value);
1322 class_has_finalize_method (tree type)
1324 tree super = CLASSTYPE_SUPER (type);
1326 if (super == NULL_TREE)
1327 return false; /* Every class with a real finalizer inherits */
1328 /* from java.lang.Object. */
1330 return HAS_FINALIZER_P (type) || class_has_finalize_method (super);
1334 java_create_object (tree type)
1336 tree alloc_node = (class_has_finalize_method (type)
1338 : alloc_no_finalizer_node);
1340 return build_call_nary (promote_type (type),
1341 build_address_of (alloc_node),
1342 1, build_class_ref (type));
1346 expand_java_NEW (tree type)
1350 alloc_node = (class_has_finalize_method (type) ? alloc_object_node
1351 : alloc_no_finalizer_node);
1352 if (! CLASS_LOADED_P (type))
1353 load_class (type, 1);
1354 safe_layout_class (type);
1355 push_value (build_call_nary (promote_type (type),
1356 build_address_of (alloc_node),
1357 1, build_class_ref (type)));
1360 /* This returns an expression which will extract the class of an
1364 build_get_class (tree value)
1366 tree class_field = lookup_field (&dtable_type, get_identifier ("class"));
1367 tree vtable_field = lookup_field (&object_type_node,
1368 get_identifier ("vtable"));
1369 tree tmp = build3 (COMPONENT_REF, dtable_ptr_type,
1370 build_java_indirect_ref (object_type_node, value,
1371 flag_check_references),
1372 vtable_field, NULL_TREE);
1373 return build3 (COMPONENT_REF, class_ptr_type,
1374 build1 (INDIRECT_REF, dtable_type, tmp),
1375 class_field, NULL_TREE);
1378 /* This builds the tree representation of the `instanceof' operator.
1379 It tries various tricks to optimize this in cases where types are
1383 build_instanceof (tree value, tree type)
1386 tree itype = TREE_TYPE (TREE_TYPE (soft_instanceof_node));
1387 tree valtype = TREE_TYPE (TREE_TYPE (value));
1388 tree valclass = TYPE_NAME (valtype);
1391 /* When compiling from bytecode, we need to ensure that TYPE has
1393 if (CLASS_P (type) && ! CLASS_LOADED_P (type))
1395 load_class (type, 1);
1396 safe_layout_class (type);
1397 if (! TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) == ERROR_MARK)
1398 return error_mark_node;
1400 klass = TYPE_NAME (type);
1402 if (type == object_type_node || inherits_from_p (valtype, type))
1404 /* Anything except `null' is an instance of Object. Likewise,
1405 if the object is known to be an instance of the class, then
1406 we only need to check for `null'. */
1407 expr = build2 (NE_EXPR, itype, value, null_pointer_node);
1409 else if (flag_verify_invocations
1410 && ! TYPE_ARRAY_P (type)
1411 && ! TYPE_ARRAY_P (valtype)
1412 && DECL_P (klass) && DECL_P (valclass)
1413 && ! CLASS_INTERFACE (valclass)
1414 && ! CLASS_INTERFACE (klass)
1415 && ! inherits_from_p (type, valtype)
1416 && (CLASS_FINAL (klass)
1417 || ! inherits_from_p (valtype, type)))
1419 /* The classes are from different branches of the derivation
1420 tree, so we immediately know the answer. */
1421 expr = boolean_false_node;
1423 else if (DECL_P (klass) && CLASS_FINAL (klass))
1425 tree save = save_expr (value);
1426 expr = build3 (COND_EXPR, itype,
1427 build2 (NE_EXPR, boolean_type_node,
1428 save, null_pointer_node),
1429 build2 (EQ_EXPR, itype,
1430 build_get_class (save),
1431 build_class_ref (type)),
1432 boolean_false_node);
1436 expr = build_call_nary (itype,
1437 build_address_of (soft_instanceof_node),
1438 2, value, build_class_ref (type));
1440 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (value);
1445 expand_java_INSTANCEOF (tree type)
1447 tree value = pop_value (object_ptr_type_node);
1448 value = build_instanceof (value, type);
1453 expand_java_CHECKCAST (tree type)
1455 tree value = pop_value (ptr_type_node);
1456 value = build_call_nary (promote_type (type),
1457 build_address_of (soft_checkcast_node),
1458 2, build_class_ref (type), value);
1463 expand_iinc (unsigned int local_var_index, int ival, int pc)
1465 tree local_var, res;
1466 tree constant_value;
1468 flush_quick_stack ();
1469 local_var = find_local_variable (local_var_index, int_type_node, pc);
1470 constant_value = build_int_cst (NULL_TREE, ival);
1471 res = fold_build2 (PLUS_EXPR, int_type_node, local_var, constant_value);
1472 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (local_var), local_var, res));
1477 build_java_soft_divmod (enum tree_code op, tree type, tree op1, tree op2)
1480 tree arg1 = convert (type, op1);
1481 tree arg2 = convert (type, op2);
1483 if (type == int_type_node)
1487 case TRUNC_DIV_EXPR:
1488 call = soft_idiv_node;
1490 case TRUNC_MOD_EXPR:
1491 call = soft_irem_node;
1497 else if (type == long_type_node)
1501 case TRUNC_DIV_EXPR:
1502 call = soft_ldiv_node;
1504 case TRUNC_MOD_EXPR:
1505 call = soft_lrem_node;
1513 call = build_call_nary (type, build_address_of (call), 2, arg1, arg2);
1518 build_java_binop (enum tree_code op, tree type, tree arg1, tree arg2)
1525 tree u_type = unsigned_type_for (type);
1526 arg1 = convert (u_type, arg1);
1527 arg1 = build_java_binop (RSHIFT_EXPR, u_type, arg1, arg2);
1528 return convert (type, arg1);
1532 mask = build_int_cst (NULL_TREE,
1533 TYPE_PRECISION (TREE_TYPE (arg1)) - 1);
1534 arg2 = fold_build2 (BIT_AND_EXPR, int_type_node, arg2, mask);
1537 case COMPARE_L_EXPR: /* arg1 > arg2 ? 1 : arg1 == arg2 ? 0 : -1 */
1538 case COMPARE_G_EXPR: /* arg1 < arg2 ? -1 : arg1 == arg2 ? 0 : 1 */
1539 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1541 tree ifexp1 = fold_build2 (op == COMPARE_L_EXPR ? GT_EXPR : LT_EXPR,
1542 boolean_type_node, arg1, arg2);
1543 tree ifexp2 = fold_build2 (EQ_EXPR, boolean_type_node, arg1, arg2);
1544 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1545 ifexp2, integer_zero_node,
1546 op == COMPARE_L_EXPR
1547 ? integer_minus_one_node
1548 : integer_one_node);
1549 return fold_build3 (COND_EXPR, int_type_node, ifexp1,
1550 op == COMPARE_L_EXPR ? integer_one_node
1551 : integer_minus_one_node,
1555 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1557 tree ifexp1 = fold_build2 (LT_EXPR, boolean_type_node, arg1, arg2);
1558 tree ifexp2 = fold_build2 (GT_EXPR, boolean_type_node, arg1, arg2);
1559 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1560 ifexp2, integer_one_node,
1562 return fold_build3 (COND_EXPR, int_type_node,
1563 ifexp1, integer_minus_one_node, second_compare);
1565 case TRUNC_DIV_EXPR:
1566 case TRUNC_MOD_EXPR:
1567 if (TREE_CODE (type) == REAL_TYPE
1568 && op == TRUNC_MOD_EXPR)
1571 if (type != double_type_node)
1573 arg1 = convert (double_type_node, arg1);
1574 arg2 = convert (double_type_node, arg2);
1576 call = build_call_nary (double_type_node,
1577 build_address_of (soft_fmod_node),
1579 if (type != double_type_node)
1580 call = convert (type, call);
1584 if (TREE_CODE (type) == INTEGER_TYPE
1585 && flag_use_divide_subroutine
1586 && ! flag_syntax_only)
1587 return build_java_soft_divmod (op, type, arg1, arg2);
1592 return fold_build2 (op, type, arg1, arg2);
1596 expand_java_binop (tree type, enum tree_code op)
1606 rtype = int_type_node;
1607 rarg = pop_value (rtype);
1610 rarg = pop_value (rtype);
1612 larg = pop_value (ltype);
1613 push_value (build_java_binop (op, type, larg, rarg));
1616 /* Lookup the field named NAME in *TYPEP or its super classes.
1617 If not found, return NULL_TREE.
1618 (If the *TYPEP is not found, or if the field reference is
1619 ambiguous, return error_mark_node.)
1620 If found, return the FIELD_DECL, and set *TYPEP to the
1621 class containing the field. */
1624 lookup_field (tree *typep, tree name)
1626 if (CLASS_P (*typep) && !CLASS_LOADED_P (*typep))
1628 load_class (*typep, 1);
1629 safe_layout_class (*typep);
1630 if (!TYPE_SIZE (*typep) || TREE_CODE (TYPE_SIZE (*typep)) == ERROR_MARK)
1631 return error_mark_node;
1635 tree field, binfo, base_binfo;
1639 for (field = TYPE_FIELDS (*typep); field; field = TREE_CHAIN (field))
1640 if (DECL_NAME (field) == name)
1643 /* Process implemented interfaces. */
1644 save_field = NULL_TREE;
1645 for (binfo = TYPE_BINFO (*typep), i = 0;
1646 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1648 tree t = BINFO_TYPE (base_binfo);
1649 if ((field = lookup_field (&t, name)))
1651 if (save_field == field)
1653 if (save_field == NULL_TREE)
1657 tree i1 = DECL_CONTEXT (save_field);
1658 tree i2 = DECL_CONTEXT (field);
1659 error ("reference %qs is ambiguous: appears in interface %qs and interface %qs",
1660 IDENTIFIER_POINTER (name),
1661 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i1))),
1662 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i2))));
1663 return error_mark_node;
1668 if (save_field != NULL_TREE)
1671 *typep = CLASSTYPE_SUPER (*typep);
1676 /* Look up the field named NAME in object SELF_VALUE,
1677 which has class SELF_CLASS (a non-handle RECORD_TYPE).
1678 SELF_VALUE is NULL_TREE if looking for a static field. */
1681 build_field_ref (tree self_value, tree self_class, tree name)
1683 tree base_class = self_class;
1684 tree field_decl = lookup_field (&base_class, name);
1685 if (field_decl == NULL_TREE)
1687 error ("field %qs not found", IDENTIFIER_POINTER (name));
1688 return error_mark_node;
1690 if (self_value == NULL_TREE)
1692 return build_static_field_ref (field_decl);
1696 tree base_type = promote_type (base_class);
1698 /* CHECK is true if self_value is not the this pointer. */
1699 int check = (! (DECL_P (self_value)
1700 && DECL_NAME (self_value) == this_identifier_node));
1702 /* Determine whether a field offset from NULL will lie within
1703 Page 0: this is necessary on those GNU/Linux/BSD systems that
1704 trap SEGV to generate NullPointerExceptions.
1706 We assume that Page 0 will be mapped with NOPERM, and that
1707 memory may be allocated from any other page, so only field
1708 offsets < pagesize are guaranteed to trap. We also assume
1709 the smallest page size we'll encounter is 4k bytes. */
1710 if (! flag_syntax_only && check && ! flag_check_references
1711 && ! flag_indirect_dispatch)
1713 tree field_offset = byte_position (field_decl);
1715 page_size = size_int (4096);
1716 check = ! INT_CST_LT_UNSIGNED (field_offset, page_size);
1719 if (base_type != TREE_TYPE (self_value))
1720 self_value = fold_build1 (NOP_EXPR, base_type, self_value);
1721 if (! flag_syntax_only && flag_indirect_dispatch)
1724 = build_int_cst (NULL_TREE, get_symbol_table_index
1725 (field_decl, NULL_TREE,
1726 &TYPE_OTABLE_METHODS (output_class)));
1728 = build4 (ARRAY_REF, integer_type_node,
1729 TYPE_OTABLE_DECL (output_class), otable_index,
1730 NULL_TREE, NULL_TREE);
1733 if (DECL_CONTEXT (field_decl) != output_class)
1735 = build3 (COND_EXPR, TREE_TYPE (field_offset),
1736 build2 (EQ_EXPR, boolean_type_node,
1737 field_offset, integer_zero_node),
1738 build_call_nary (void_type_node,
1739 build_address_of (soft_nosuchfield_node),
1743 field_offset = fold (convert (sizetype, field_offset));
1744 self_value = java_check_reference (self_value, check);
1746 = fold_build2 (POINTER_PLUS_EXPR,
1747 TREE_TYPE (self_value),
1748 self_value, field_offset);
1749 address = fold_convert (build_pointer_type (TREE_TYPE (field_decl)),
1751 return fold_build1 (INDIRECT_REF, TREE_TYPE (field_decl), address);
1754 self_value = build_java_indirect_ref (TREE_TYPE (TREE_TYPE (self_value)),
1756 return fold_build3 (COMPONENT_REF, TREE_TYPE (field_decl),
1757 self_value, field_decl, NULL_TREE);
1762 lookup_label (int pc)
1766 if (pc > highest_label_pc_this_method)
1767 highest_label_pc_this_method = pc;
1768 ASM_GENERATE_INTERNAL_LABEL(buf, "LJpc=", start_label_pc_this_method + pc);
1769 name = get_identifier (buf);
1770 if (IDENTIFIER_LOCAL_VALUE (name))
1771 return IDENTIFIER_LOCAL_VALUE (name);
1774 /* The type of the address of a label is return_address_type_node. */
1775 tree decl = create_label_decl (name);
1776 LABEL_PC (decl) = pc;
1777 return pushdecl (decl);
1781 /* Generate a unique name for the purpose of loops and switches
1782 labels, and try-catch-finally blocks label or temporary variables. */
1785 generate_name (void)
1787 static int l_number = 0;
1789 ASM_GENERATE_INTERNAL_LABEL(buff, "LJv", l_number);
1791 return get_identifier (buff);
1795 create_label_decl (tree name)
1798 decl = build_decl (LABEL_DECL, name,
1799 TREE_TYPE (return_address_type_node));
1800 DECL_CONTEXT (decl) = current_function_decl;
1801 DECL_IGNORED_P (decl) = 1;
1805 /* This maps a bytecode offset (PC) to various flags. */
1806 char *instruction_bits;
1809 note_label (int current_pc ATTRIBUTE_UNUSED, int target_pc)
1811 lookup_label (target_pc);
1812 instruction_bits [target_pc] |= BCODE_JUMP_TARGET;
1815 /* Emit code to jump to TARGET_PC if VALUE1 CONDITION VALUE2,
1816 where CONDITION is one of one the compare operators. */
1819 expand_compare (enum tree_code condition, tree value1, tree value2,
1822 tree target = lookup_label (target_pc);
1823 tree cond = fold_build2 (condition, boolean_type_node, value1, value2);
1825 (build3 (COND_EXPR, void_type_node, java_truthvalue_conversion (cond),
1826 build1 (GOTO_EXPR, void_type_node, target),
1827 build_java_empty_stmt ()));
1830 /* Emit code for a TEST-type opcode. */
1833 expand_test (enum tree_code condition, tree type, int target_pc)
1835 tree value1, value2;
1836 flush_quick_stack ();
1837 value1 = pop_value (type);
1838 value2 = (type == ptr_type_node) ? null_pointer_node : integer_zero_node;
1839 expand_compare (condition, value1, value2, target_pc);
1842 /* Emit code for a COND-type opcode. */
1845 expand_cond (enum tree_code condition, tree type, int target_pc)
1847 tree value1, value2;
1848 flush_quick_stack ();
1849 /* note: pop values in opposite order */
1850 value2 = pop_value (type);
1851 value1 = pop_value (type);
1852 /* Maybe should check value1 and value2 for type compatibility ??? */
1853 expand_compare (condition, value1, value2, target_pc);
1857 expand_java_goto (int target_pc)
1859 tree target_label = lookup_label (target_pc);
1860 flush_quick_stack ();
1861 java_add_stmt (build1 (GOTO_EXPR, void_type_node, target_label));
1865 expand_java_switch (tree selector, int default_pc)
1867 tree switch_expr, x;
1869 flush_quick_stack ();
1870 switch_expr = build3 (SWITCH_EXPR, TREE_TYPE (selector), selector,
1871 NULL_TREE, NULL_TREE);
1872 java_add_stmt (switch_expr);
1874 x = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE, NULL_TREE,
1875 create_artificial_label ());
1876 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1878 x = build1 (GOTO_EXPR, void_type_node, lookup_label (default_pc));
1879 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1885 expand_java_add_case (tree switch_expr, int match, int target_pc)
1889 value = build_int_cst (TREE_TYPE (switch_expr), match);
1891 x = build3 (CASE_LABEL_EXPR, void_type_node, value, NULL_TREE,
1892 create_artificial_label ());
1893 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1895 x = build1 (GOTO_EXPR, void_type_node, lookup_label (target_pc));
1896 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1900 pop_arguments (tree arg_types)
1902 if (arg_types == end_params_node)
1904 if (TREE_CODE (arg_types) == TREE_LIST)
1906 tree tail = pop_arguments (TREE_CHAIN (arg_types));
1907 tree type = TREE_VALUE (arg_types);
1908 tree arg = pop_value (type);
1910 /* We simply cast each argument to its proper type. This is
1911 needed since we lose type information coming out of the
1912 verifier. We also have to do this when we pop an integer
1913 type that must be promoted for the function call. */
1914 if (TREE_CODE (type) == POINTER_TYPE)
1915 arg = build1 (NOP_EXPR, type, arg);
1916 else if (targetm.calls.promote_prototypes (type)
1917 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
1918 && INTEGRAL_TYPE_P (type))
1919 arg = convert (integer_type_node, arg);
1920 return tree_cons (NULL_TREE, arg, tail);
1925 /* Attach to PTR (a block) the declaration found in ENTRY. */
1928 attach_init_test_initialization_flags (void **entry, void *ptr)
1930 tree block = (tree)ptr;
1931 struct treetreehash_entry *ite = (struct treetreehash_entry *) *entry;
1933 if (block != error_mark_node)
1935 if (TREE_CODE (block) == BIND_EXPR)
1937 tree body = BIND_EXPR_BODY (block);
1938 TREE_CHAIN (ite->value) = BIND_EXPR_VARS (block);
1939 BIND_EXPR_VARS (block) = ite->value;
1940 body = build2 (COMPOUND_EXPR, void_type_node,
1941 build1 (DECL_EXPR, void_type_node, ite->value), body);
1942 BIND_EXPR_BODY (block) = body;
1946 tree body = BLOCK_SUBBLOCKS (block);
1947 TREE_CHAIN (ite->value) = BLOCK_EXPR_DECLS (block);
1948 BLOCK_EXPR_DECLS (block) = ite->value;
1949 body = build2 (COMPOUND_EXPR, void_type_node,
1950 build1 (DECL_EXPR, void_type_node, ite->value), body);
1951 BLOCK_SUBBLOCKS (block) = body;
1958 /* Build an expression to initialize the class CLAS.
1959 if EXPR is non-NULL, returns an expression to first call the initializer
1960 (if it is needed) and then calls EXPR. */
1963 build_class_init (tree clas, tree expr)
1967 /* An optimization: if CLAS is a superclass of the class we're
1968 compiling, we don't need to initialize it. However, if CLAS is
1969 an interface, it won't necessarily be initialized, even if we
1971 if ((! CLASS_INTERFACE (TYPE_NAME (clas))
1972 && inherits_from_p (current_class, clas))
1973 || current_class == clas)
1976 if (always_initialize_class_p)
1978 init = build_call_nary (void_type_node,
1979 build_address_of (soft_initclass_node),
1980 1, build_class_ref (clas));
1981 TREE_SIDE_EFFECTS (init) = 1;
1985 tree *init_test_decl;
1987 init_test_decl = java_treetreehash_new
1988 (DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl), clas);
1990 if (*init_test_decl == NULL)
1992 /* Build a declaration and mark it as a flag used to track
1993 static class initializations. */
1994 decl = build_decl (VAR_DECL, NULL_TREE,
1996 MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (decl);
1997 LOCAL_CLASS_INITIALIZATION_FLAG (decl) = 1;
1998 DECL_CONTEXT (decl) = current_function_decl;
1999 DECL_FUNCTION_INIT_TEST_CLASS (decl) = clas;
2000 /* Tell the check-init code to ignore this decl when not
2001 optimizing class initialization. */
2002 if (!STATIC_CLASS_INIT_OPT_P ())
2003 DECL_BIT_INDEX (decl) = -1;
2004 DECL_INITIAL (decl) = boolean_false_node;
2005 /* Don't emit any symbolic debugging info for this decl. */
2006 DECL_IGNORED_P (decl) = 1;
2007 *init_test_decl = decl;
2010 init = build_call_nary (void_type_node,
2011 build_address_of (soft_initclass_node),
2012 1, build_class_ref (clas));
2013 TREE_SIDE_EFFECTS (init) = 1;
2014 init = build3 (COND_EXPR, void_type_node,
2015 build2 (EQ_EXPR, boolean_type_node,
2016 *init_test_decl, boolean_false_node),
2017 init, integer_zero_node);
2018 TREE_SIDE_EFFECTS (init) = 1;
2019 init = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init,
2020 build2 (MODIFY_EXPR, boolean_type_node,
2021 *init_test_decl, boolean_true_node));
2022 TREE_SIDE_EFFECTS (init) = 1;
2025 if (expr != NULL_TREE)
2027 expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init, expr);
2028 TREE_SIDE_EFFECTS (expr) = 1;
2036 /* Rewrite expensive calls that require stack unwinding at runtime to
2037 cheaper alternatives. The logic here performs these
2040 java.lang.Class.forName("foo") -> java.lang.Class.forName("foo", class$)
2041 java.lang.Class.getClassLoader() -> java.lang.Class.getClassLoader(class$)
2047 const char *classname;
2049 const char *signature;
2050 const char *new_signature;
2052 tree (*rewrite_arglist) (tree arglist);
2055 /* Add __builtin_return_address(0) to the end of an arglist. */
2059 rewrite_arglist_getcaller (tree arglist)
2062 = build_call_expr (built_in_decls[BUILT_IN_RETURN_ADDRESS],
2063 1, integer_zero_node);
2065 DECL_INLINE (current_function_decl) = 0;
2067 return chainon (arglist,
2068 tree_cons (NULL_TREE, retaddr,
2072 /* Add this.class to the end of an arglist. */
2075 rewrite_arglist_getclass (tree arglist)
2077 return chainon (arglist,
2078 tree_cons (NULL_TREE, build_class_ref (output_class),
2082 static rewrite_rule rules[] =
2083 {{"java.lang.Class", "getClassLoader", "()Ljava/lang/ClassLoader;",
2084 "(Ljava/lang/Class;)Ljava/lang/ClassLoader;",
2085 ACC_FINAL|ACC_PRIVATE, rewrite_arglist_getclass},
2086 {"java.lang.Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;",
2087 "(Ljava/lang/String;Ljava/lang/Class;)Ljava/lang/Class;",
2088 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getclass},
2089 {"gnu.classpath.VMStackWalker", "getCallingClass", "()Ljava/lang/Class;",
2090 "(Lgnu/gcj/RawData;)Ljava/lang/Class;",
2091 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2092 {"gnu.classpath.VMStackWalker", "getCallingClassLoader",
2093 "()Ljava/lang/ClassLoader;",
2094 "(Lgnu/gcj/RawData;)Ljava/lang/ClassLoader;",
2095 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2097 {NULL, NULL, NULL, NULL, 0, NULL}};
2099 /* True if this method is special, i.e. it's a private method that
2100 should be exported from a DSO. */
2103 special_method_p (tree candidate_method)
2105 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (candidate_method)));
2106 tree method = DECL_NAME (candidate_method);
2109 for (p = rules; p->classname; p++)
2111 if (get_identifier (p->classname) == context
2112 && get_identifier (p->method) == method)
2118 /* Scan the rules list for replacements for *METHOD_P and replace the
2119 args accordingly. If the rewrite results in an access to a private
2120 method, update SPECIAL.*/
2123 maybe_rewrite_invocation (tree *method_p, tree *arg_list_p,
2124 tree *method_signature_p, tree *special)
2126 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (*method_p)));
2128 *special = NULL_TREE;
2130 for (p = rules; p->classname; p++)
2132 if (get_identifier (p->classname) == context)
2134 tree method = DECL_NAME (*method_p);
2135 if (get_identifier (p->method) == method
2136 && get_identifier (p->signature) == *method_signature_p)
2139 = lookup_java_method (DECL_CONTEXT (*method_p),
2141 get_identifier (p->new_signature));
2142 if (! maybe_method && ! flag_verify_invocations)
2145 = add_method (DECL_CONTEXT (*method_p), p->flags,
2146 method, get_identifier (p->new_signature));
2147 DECL_EXTERNAL (maybe_method) = 1;
2149 *method_p = maybe_method;
2150 gcc_assert (*method_p);
2151 *arg_list_p = p->rewrite_arglist (*arg_list_p);
2152 *method_signature_p = get_identifier (p->new_signature);
2153 *special = integer_one_node;
2164 build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED,
2165 tree self_type, tree method_signature ATTRIBUTE_UNUSED,
2166 tree arg_list ATTRIBUTE_UNUSED, tree special)
2169 if (is_compiled_class (self_type))
2171 /* With indirect dispatch we have to use indirect calls for all
2172 publicly visible methods or gcc will use PLT indirections
2173 to reach them. We also have to use indirect dispatch for all
2174 external methods. */
2175 if (! flag_indirect_dispatch
2176 || (! DECL_EXTERNAL (method) && ! TREE_PUBLIC (method)))
2178 func = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (method)),
2184 = build_int_cst (NULL_TREE,
2185 (get_symbol_table_index
2187 &TYPE_ATABLE_METHODS (output_class))));
2189 = build4 (ARRAY_REF,
2190 TREE_TYPE (TREE_TYPE (TYPE_ATABLE_DECL (output_class))),
2191 TYPE_ATABLE_DECL (output_class), table_index,
2192 NULL_TREE, NULL_TREE);
2194 func = convert (method_ptr_type_node, func);
2198 /* We don't know whether the method has been (statically) compiled.
2199 Compile this code to get a reference to the method's code:
2201 SELF_TYPE->methods[METHOD_INDEX].ncode
2205 int method_index = 0;
2208 /* The method might actually be declared in some superclass, so
2209 we have to use its class context, not the caller's notion of
2210 where the method is. */
2211 self_type = DECL_CONTEXT (method);
2212 ref = build_class_ref (self_type);
2213 ref = build1 (INDIRECT_REF, class_type_node, ref);
2214 if (ncode_ident == NULL_TREE)
2215 ncode_ident = get_identifier ("ncode");
2216 if (methods_ident == NULL_TREE)
2217 methods_ident = get_identifier ("methods");
2218 ref = build3 (COMPONENT_REF, method_ptr_type_node, ref,
2219 lookup_field (&class_type_node, methods_ident),
2221 for (meth = TYPE_METHODS (self_type);
2222 ; meth = TREE_CHAIN (meth))
2226 if (meth == NULL_TREE)
2227 fatal_error ("method '%s' not found in class",
2228 IDENTIFIER_POINTER (DECL_NAME (method)));
2231 method_index *= int_size_in_bytes (method_type_node);
2232 ref = fold_build2 (POINTER_PLUS_EXPR, method_ptr_type_node,
2233 ref, size_int (method_index));
2234 ref = build1 (INDIRECT_REF, method_type_node, ref);
2235 func = build3 (COMPONENT_REF, nativecode_ptr_type_node,
2236 ref, lookup_field (&method_type_node, ncode_ident),
2243 invoke_build_dtable (int is_invoke_interface, tree arg_list)
2245 tree dtable, objectref;
2247 TREE_VALUE (arg_list) = save_expr (TREE_VALUE (arg_list));
2249 /* If we're dealing with interfaces and if the objectref
2250 argument is an array then get the dispatch table of the class
2251 Object rather than the one from the objectref. */
2252 objectref = (is_invoke_interface
2253 && is_array_type_p (TREE_TYPE (TREE_VALUE (arg_list)))
2254 ? build_class_ref (object_type_node) : TREE_VALUE (arg_list));
2256 if (dtable_ident == NULL_TREE)
2257 dtable_ident = get_identifier ("vtable");
2258 dtable = build_java_indirect_ref (object_type_node, objectref,
2259 flag_check_references);
2260 dtable = build3 (COMPONENT_REF, dtable_ptr_type, dtable,
2261 lookup_field (&object_type_node, dtable_ident), NULL_TREE);
2266 /* Determine the index in SYMBOL_TABLE for a reference to the decl
2267 T. If this decl has not been seen before, it will be added to the
2268 [oa]table_methods. If it has, the existing table slot will be
2272 get_symbol_table_index (tree t, tree special, tree *symbol_table)
2277 if (*symbol_table == NULL_TREE)
2279 *symbol_table = build_tree_list (special, t);
2283 method_list = *symbol_table;
2287 tree value = TREE_VALUE (method_list);
2288 tree purpose = TREE_PURPOSE (method_list);
2289 if (value == t && purpose == special)
2292 if (TREE_CHAIN (method_list) == NULL_TREE)
2295 method_list = TREE_CHAIN (method_list);
2298 TREE_CHAIN (method_list) = build_tree_list (special, t);
2303 build_invokevirtual (tree dtable, tree method, tree special)
2306 tree nativecode_ptr_ptr_type_node
2307 = build_pointer_type (nativecode_ptr_type_node);
2311 if (flag_indirect_dispatch)
2313 gcc_assert (! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))));
2316 = build_int_cst (NULL_TREE, get_symbol_table_index
2318 &TYPE_OTABLE_METHODS (output_class)));
2319 method_index = build4 (ARRAY_REF, integer_type_node,
2320 TYPE_OTABLE_DECL (output_class),
2321 otable_index, NULL_TREE, NULL_TREE);
2325 /* We fetch the DECL_VINDEX field directly here, rather than
2326 using get_method_index(). DECL_VINDEX is the true offset
2327 from the vtable base to a method, regrdless of any extra
2328 words inserted at the start of the vtable. */
2329 method_index = DECL_VINDEX (method);
2330 method_index = size_binop (MULT_EXPR, method_index,
2331 TYPE_SIZE_UNIT (nativecode_ptr_ptr_type_node));
2332 if (TARGET_VTABLE_USES_DESCRIPTORS)
2333 method_index = size_binop (MULT_EXPR, method_index,
2334 size_int (TARGET_VTABLE_USES_DESCRIPTORS));
2337 func = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dtable), dtable,
2338 convert (sizetype, method_index));
2340 if (TARGET_VTABLE_USES_DESCRIPTORS)
2341 func = build1 (NOP_EXPR, nativecode_ptr_type_node, func);
2344 func = fold_convert (nativecode_ptr_ptr_type_node, func);
2345 func = build1 (INDIRECT_REF, nativecode_ptr_type_node, func);
2351 static GTY(()) tree class_ident;
2353 build_invokeinterface (tree dtable, tree method)
2358 /* We expand invokeinterface here. */
2360 if (class_ident == NULL_TREE)
2361 class_ident = get_identifier ("class");
2363 dtable = build_java_indirect_ref (dtable_type, dtable,
2364 flag_check_references);
2365 dtable = build3 (COMPONENT_REF, class_ptr_type, dtable,
2366 lookup_field (&dtable_type, class_ident), NULL_TREE);
2368 interface = DECL_CONTEXT (method);
2369 gcc_assert (CLASS_INTERFACE (TYPE_NAME (interface)));
2370 layout_class_methods (interface);
2372 if (flag_indirect_dispatch)
2375 = 2 * (get_symbol_table_index
2376 (method, NULL_TREE, &TYPE_ITABLE_METHODS (output_class)));
2378 = build4 (ARRAY_REF,
2379 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2380 TYPE_ITABLE_DECL (output_class),
2381 build_int_cst (NULL_TREE, itable_index-1),
2382 NULL_TREE, NULL_TREE);
2384 = build4 (ARRAY_REF,
2385 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2386 TYPE_ITABLE_DECL (output_class),
2387 build_int_cst (NULL_TREE, itable_index),
2388 NULL_TREE, NULL_TREE);
2389 interface = convert (class_ptr_type, interface);
2390 idx = convert (integer_type_node, idx);
2394 idx = build_int_cst (NULL_TREE,
2395 get_interface_method_index (method, interface));
2396 interface = build_class_ref (interface);
2399 return build_call_nary (ptr_type_node,
2400 build_address_of (soft_lookupinterfacemethod_node),
2401 3, dtable, interface, idx);
2404 /* Expand one of the invoke_* opcodes.
2405 OPCODE is the specific opcode.
2406 METHOD_REF_INDEX is an index into the constant pool.
2407 NARGS is the number of arguments, or -1 if not specified. */
2410 expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED)
2412 tree method_signature
2413 = COMPONENT_REF_SIGNATURE(¤t_jcf->cpool, method_ref_index);
2414 tree method_name = COMPONENT_REF_NAME (¤t_jcf->cpool,
2417 = get_class_constant (current_jcf,
2418 COMPONENT_REF_CLASS_INDEX(¤t_jcf->cpool,
2420 const char *const self_name
2421 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2422 tree call, func, method, arg_list, method_type;
2423 tree check = NULL_TREE;
2425 tree special = NULL_TREE;
2427 if (! CLASS_LOADED_P (self_type))
2429 load_class (self_type, 1);
2430 safe_layout_class (self_type);
2431 if (TREE_CODE (TYPE_SIZE (self_type)) == ERROR_MARK)
2432 fatal_error ("failed to find class '%s'", self_name);
2434 layout_class_methods (self_type);
2436 if (ID_INIT_P (method_name))
2437 method = lookup_java_constructor (self_type, method_signature);
2439 method = lookup_java_method (self_type, method_name, method_signature);
2441 /* We've found a method in a class other than the one in which it
2442 was wanted. This can happen if, for instance, we're trying to
2443 compile invokespecial super.equals().
2444 FIXME: This is a kludge. Rather than nullifying the result, we
2445 should change lookup_java_method() so that it doesn't search the
2446 superclass chain when we're BC-compiling. */
2447 if (! flag_verify_invocations
2449 && ! TYPE_ARRAY_P (self_type)
2450 && self_type != DECL_CONTEXT (method))
2453 /* We've found a method in an interface, but this isn't an interface
2455 if (opcode != OPCODE_invokeinterface
2457 && (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method)))))
2460 /* We've found a non-interface method but we are making an
2461 interface call. This can happen if the interface overrides a
2462 method in Object. */
2463 if (! flag_verify_invocations
2464 && opcode == OPCODE_invokeinterface
2466 && ! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))))
2469 if (method == NULL_TREE)
2471 if (flag_verify_invocations || ! flag_indirect_dispatch)
2473 error ("class '%s' has no method named '%s' matching signature '%s'",
2475 IDENTIFIER_POINTER (method_name),
2476 IDENTIFIER_POINTER (method_signature));
2480 int flags = ACC_PUBLIC;
2481 if (opcode == OPCODE_invokestatic)
2482 flags |= ACC_STATIC;
2483 if (opcode == OPCODE_invokeinterface)
2485 flags |= ACC_INTERFACE | ACC_ABSTRACT;
2486 CLASS_INTERFACE (TYPE_NAME (self_type)) = 1;
2488 method = add_method (self_type, flags, method_name,
2490 DECL_ARTIFICIAL (method) = 1;
2491 METHOD_DUMMY (method) = 1;
2492 layout_class_method (self_type, NULL,
2497 /* Invoke static can't invoke static/abstract method */
2498 if (method != NULL_TREE)
2500 if (opcode == OPCODE_invokestatic)
2502 if (!METHOD_STATIC (method))
2504 error ("invokestatic on non static method");
2507 else if (METHOD_ABSTRACT (method))
2509 error ("invokestatic on abstract method");
2515 if (METHOD_STATIC (method))
2517 error ("invoke[non-static] on static method");
2523 if (method == NULL_TREE)
2525 /* If we got here, we emitted an error message above. So we
2526 just pop the arguments, push a properly-typed zero, and
2528 method_type = get_type_from_signature (method_signature);
2529 pop_arguments (TYPE_ARG_TYPES (method_type));
2530 if (opcode != OPCODE_invokestatic)
2531 pop_type (self_type);
2532 method_type = promote_type (TREE_TYPE (method_type));
2533 push_value (convert (method_type, integer_zero_node));
2537 method_type = TREE_TYPE (method);
2538 arg_list = pop_arguments (TYPE_ARG_TYPES (method_type));
2539 flush_quick_stack ();
2541 maybe_rewrite_invocation (&method, &arg_list, &method_signature,
2545 if (opcode == OPCODE_invokestatic)
2546 func = build_known_method_ref (method, method_type, self_type,
2547 method_signature, arg_list, special);
2548 else if (opcode == OPCODE_invokespecial
2549 || (opcode == OPCODE_invokevirtual
2550 && (METHOD_PRIVATE (method)
2551 || METHOD_FINAL (method)
2552 || CLASS_FINAL (TYPE_NAME (self_type)))))
2554 /* If the object for the method call is null, we throw an
2555 exception. We don't do this if the object is the current
2556 method's `this'. In other cases we just rely on an
2557 optimization pass to eliminate redundant checks. FIXME:
2558 Unfortunately there doesn't seem to be a way to determine
2559 what the current method is right now.
2560 We do omit the check if we're calling <init>. */
2561 /* We use a SAVE_EXPR here to make sure we only evaluate
2562 the new `self' expression once. */
2563 tree save_arg = save_expr (TREE_VALUE (arg_list));
2564 TREE_VALUE (arg_list) = save_arg;
2565 check = java_check_reference (save_arg, ! DECL_INIT_P (method));
2566 func = build_known_method_ref (method, method_type, self_type,
2567 method_signature, arg_list, special);
2571 tree dtable = invoke_build_dtable (opcode == OPCODE_invokeinterface,
2573 if (opcode == OPCODE_invokevirtual)
2574 func = build_invokevirtual (dtable, method, special);
2576 func = build_invokeinterface (dtable, method);
2579 if (TREE_CODE (func) == ADDR_EXPR)
2580 TREE_TYPE (func) = build_pointer_type (method_type);
2582 func = build1 (NOP_EXPR, build_pointer_type (method_type), func);
2584 call = build_call_list (TREE_TYPE (method_type), func, arg_list);
2585 TREE_SIDE_EFFECTS (call) = 1;
2586 call = check_for_builtin (method, call);
2588 if (check != NULL_TREE)
2590 call = build2 (COMPOUND_EXPR, TREE_TYPE (call), check, call);
2591 TREE_SIDE_EFFECTS (call) = 1;
2594 if (TREE_CODE (TREE_TYPE (method_type)) == VOID_TYPE)
2595 java_add_stmt (call);
2599 flush_quick_stack ();
2603 /* Create a stub which will be put into the vtable but which will call
2607 build_jni_stub (tree method)
2609 tree jnifunc, call, args, body, method_sig, arg_types;
2610 tree jniarg0, jniarg1, jniarg2, jniarg3;
2611 tree jni_func_type, tem;
2612 tree env_var, res_var = NULL_TREE, block;
2613 tree method_args, res_type;
2619 tree klass = DECL_CONTEXT (method);
2620 int from_class = ! CLASS_FROM_SOURCE_P (klass);
2621 klass = build_class_ref (klass);
2623 gcc_assert (METHOD_NATIVE (method) && flag_jni);
2625 DECL_ARTIFICIAL (method) = 1;
2626 DECL_EXTERNAL (method) = 0;
2628 env_var = build_decl (VAR_DECL, get_identifier ("env"), ptr_type_node);
2629 DECL_CONTEXT (env_var) = method;
2631 if (TREE_TYPE (TREE_TYPE (method)) != void_type_node)
2633 res_var = build_decl (VAR_DECL, get_identifier ("res"),
2634 TREE_TYPE (TREE_TYPE (method)));
2635 DECL_CONTEXT (res_var) = method;
2636 TREE_CHAIN (env_var) = res_var;
2639 meth_var = build_decl (VAR_DECL, get_identifier ("meth"), ptr_type_node);
2640 TREE_STATIC (meth_var) = 1;
2641 TREE_PUBLIC (meth_var) = 0;
2642 DECL_EXTERNAL (meth_var) = 0;
2643 DECL_CONTEXT (meth_var) = method;
2644 DECL_ARTIFICIAL (meth_var) = 1;
2645 DECL_INITIAL (meth_var) = null_pointer_node;
2646 TREE_USED (meth_var) = 1;
2647 chainon (env_var, meth_var);
2648 build_result_decl (method);
2650 /* One strange way that the front ends are different is that they
2651 store arguments differently. */
2653 method_args = DECL_ARGUMENTS (method);
2655 method_args = BLOCK_EXPR_DECLS (DECL_FUNCTION_BODY (method));
2656 block = build_block (env_var, NULL_TREE, method_args, NULL_TREE);
2657 TREE_SIDE_EFFECTS (block) = 1;
2658 /* When compiling from source we don't set the type of the block,
2659 because that will prevent patch_return from ever being run. */
2661 TREE_TYPE (block) = TREE_TYPE (TREE_TYPE (method));
2663 /* Compute the local `env' by calling _Jv_GetJNIEnvNewFrame. */
2664 body = build2 (MODIFY_EXPR, ptr_type_node, env_var,
2665 build_call_nary (ptr_type_node,
2666 build_address_of (soft_getjnienvnewframe_node),
2668 CAN_COMPLETE_NORMALLY (body) = 1;
2670 /* All the arguments to this method become arguments to the
2671 underlying JNI function. If we had to wrap object arguments in a
2672 special way, we would do that here. */
2674 for (tem = method_args; tem != NULL_TREE; tem = TREE_CHAIN (tem))
2676 int arg_bits = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)));
2677 #ifdef PARM_BOUNDARY
2678 arg_bits = (((arg_bits + PARM_BOUNDARY - 1) / PARM_BOUNDARY)
2681 args_size += (arg_bits / BITS_PER_UNIT);
2683 args = tree_cons (NULL_TREE, tem, args);
2685 args = nreverse (args);
2686 arg_types = TYPE_ARG_TYPES (TREE_TYPE (method));
2688 /* For a static method the second argument is the class. For a
2689 non-static method the second argument is `this'; that is already
2690 available in the argument list. */
2691 if (METHOD_STATIC (method))
2693 args_size += int_size_in_bytes (TREE_TYPE (klass));
2694 args = tree_cons (NULL_TREE, klass, args);
2695 arg_types = tree_cons (NULL_TREE, object_ptr_type_node, arg_types);
2698 /* The JNIEnv structure is the first argument to the JNI function. */
2699 args_size += int_size_in_bytes (TREE_TYPE (env_var));
2700 args = tree_cons (NULL_TREE, env_var, args);
2701 arg_types = tree_cons (NULL_TREE, ptr_type_node, arg_types);
2703 /* We call _Jv_LookupJNIMethod to find the actual underlying
2704 function pointer. _Jv_LookupJNIMethod will throw the appropriate
2705 exception if this function is not found at runtime. */
2706 method_sig = build_java_signature (TREE_TYPE (method));
2708 jniarg1 = build_utf8_ref (DECL_NAME (method));
2709 jniarg2 = build_utf8_ref (unmangle_classname
2710 (IDENTIFIER_POINTER (method_sig),
2711 IDENTIFIER_LENGTH (method_sig)));
2712 jniarg3 = build_int_cst (NULL_TREE, args_size);
2714 tem = build_function_type (TREE_TYPE (TREE_TYPE (method)), arg_types);
2716 #ifdef MODIFY_JNI_METHOD_CALL
2717 tem = MODIFY_JNI_METHOD_CALL (tem);
2720 jni_func_type = build_pointer_type (tem);
2722 jnifunc = build3 (COND_EXPR, ptr_type_node,
2723 build2 (NE_EXPR, boolean_type_node,
2724 meth_var, build_int_cst (TREE_TYPE (meth_var), 0)),
2726 build2 (MODIFY_EXPR, ptr_type_node, meth_var,
2727 build_call_nary (ptr_type_node,
2729 (soft_lookupjnimethod_node),
2732 jniarg2, jniarg3)));
2734 /* Now we make the actual JNI call via the resulting function
2736 call = build_call_list (TREE_TYPE (TREE_TYPE (method)),
2737 build1 (NOP_EXPR, jni_func_type, jnifunc),
2740 /* If the JNI call returned a result, capture it here. If we had to
2741 unwrap JNI object results, we would do that here. */
2742 if (res_var != NULL_TREE)
2744 /* If the call returns an object, it may return a JNI weak
2745 reference, in which case we must unwrap it. */
2746 if (! JPRIMITIVE_TYPE_P (TREE_TYPE (TREE_TYPE (method))))
2747 call = build_call_nary (TREE_TYPE (TREE_TYPE (method)),
2748 build_address_of (soft_unwrapjni_node),
2750 call = build2 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (method)),
2754 TREE_SIDE_EFFECTS (call) = 1;
2755 CAN_COMPLETE_NORMALLY (call) = 1;
2757 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2758 TREE_SIDE_EFFECTS (body) = 1;
2760 /* Now free the environment we allocated. */
2761 call = build_call_nary (ptr_type_node,
2762 build_address_of (soft_jnipopsystemframe_node),
2764 TREE_SIDE_EFFECTS (call) = 1;
2765 CAN_COMPLETE_NORMALLY (call) = 1;
2766 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2767 TREE_SIDE_EFFECTS (body) = 1;
2769 /* Finally, do the return. */
2770 res_type = void_type_node;
2771 if (res_var != NULL_TREE)
2774 gcc_assert (DECL_RESULT (method));
2775 /* Make sure we copy the result variable to the actual
2776 result. We use the type of the DECL_RESULT because it
2777 might be different from the return type of the function:
2778 it might be promoted. */
2779 drt = TREE_TYPE (DECL_RESULT (method));
2780 if (drt != TREE_TYPE (res_var))
2781 res_var = build1 (CONVERT_EXPR, drt, res_var);
2782 res_var = build2 (MODIFY_EXPR, drt, DECL_RESULT (method), res_var);
2783 TREE_SIDE_EFFECTS (res_var) = 1;
2786 body = build2 (COMPOUND_EXPR, void_type_node, body,
2787 build1 (RETURN_EXPR, void_type_node, res_var));
2788 TREE_SIDE_EFFECTS (body) = 1;
2790 /* Prepend class initialization for static methods reachable from
2792 if (METHOD_STATIC (method)
2793 && (! METHOD_PRIVATE (method)
2794 || INNER_CLASS_P (DECL_CONTEXT (method))))
2796 tree init = build_call_expr (soft_initclass_node, 1,
2798 body = build2 (COMPOUND_EXPR, void_type_node, init, body);
2799 TREE_SIDE_EFFECTS (body) = 1;
2802 bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
2808 /* Given lvalue EXP, return a volatile expression that references the
2812 java_modify_addr_for_volatile (tree exp)
2814 tree exp_type = TREE_TYPE (exp);
2816 = build_qualified_type (exp_type,
2817 TYPE_QUALS (exp_type) | TYPE_QUAL_VOLATILE);
2818 tree addr = build_fold_addr_expr (exp);
2819 v_type = build_pointer_type (v_type);
2820 addr = fold_convert (v_type, addr);
2821 exp = build_fold_indirect_ref (addr);
2826 /* Expand an operation to extract from or store into a field.
2827 IS_STATIC is 1 iff the field is static.
2828 IS_PUTTING is 1 for putting into a field; 0 for getting from the field.
2829 FIELD_REF_INDEX is an index into the constant pool. */
2832 expand_java_field_op (int is_static, int is_putting, int field_ref_index)
2835 = get_class_constant (current_jcf,
2836 COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool,
2838 const char *self_name
2839 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2840 tree field_name = COMPONENT_REF_NAME (¤t_jcf->cpool, field_ref_index);
2841 tree field_signature = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool,
2843 tree field_type = get_type_from_signature (field_signature);
2844 tree new_value = is_putting ? pop_value (field_type) : NULL_TREE;
2847 tree original_self_type = self_type;
2851 if (! CLASS_LOADED_P (self_type))
2852 load_class (self_type, 1);
2853 field_decl = lookup_field (&self_type, field_name);
2854 if (field_decl == error_mark_node)
2858 else if (field_decl == NULL_TREE)
2860 if (! flag_verify_invocations)
2862 int flags = ACC_PUBLIC;
2864 flags |= ACC_STATIC;
2865 self_type = original_self_type;
2866 field_decl = add_field (original_self_type, field_name,
2868 DECL_ARTIFICIAL (field_decl) = 1;
2869 DECL_IGNORED_P (field_decl) = 1;
2871 /* FIXME: We should be pessimistic about volatility. We
2872 don't know one way or another, but this is safe.
2873 However, doing this has bad effects on code quality. We
2874 need to look at better ways to do this. */
2875 TREE_THIS_VOLATILE (field_decl) = 1;
2880 error ("missing field '%s' in '%s'",
2881 IDENTIFIER_POINTER (field_name), self_name);
2885 else if (build_java_signature (TREE_TYPE (field_decl)) != field_signature)
2887 error ("mismatching signature for field '%s' in '%s'",
2888 IDENTIFIER_POINTER (field_name), self_name);
2891 field_ref = is_static ? NULL_TREE : pop_value (self_type);
2895 push_value (convert (field_type, integer_zero_node));
2896 flush_quick_stack ();
2900 field_ref = build_field_ref (field_ref, self_type, field_name);
2902 && ! flag_indirect_dispatch)
2904 tree context = DECL_CONTEXT (field_ref);
2905 if (context != self_type && CLASS_INTERFACE (TYPE_NAME (context)))
2906 field_ref = build_class_init (context, field_ref);
2908 field_ref = build_class_init (self_type, field_ref);
2912 flush_quick_stack ();
2913 if (FIELD_FINAL (field_decl))
2915 if (DECL_CONTEXT (field_decl) != current_class)
2916 error ("assignment to final field %q+D not in field's class",
2918 /* We used to check for assignments to final fields not
2919 occurring in the class initializer or in a constructor
2920 here. However, this constraint doesn't seem to be
2921 enforced by the JVM. */
2924 if (TREE_THIS_VOLATILE (field_decl))
2925 field_ref = java_modify_addr_for_volatile (field_ref);
2927 modify_expr = build2 (MODIFY_EXPR, TREE_TYPE (field_ref),
2928 field_ref, new_value);
2930 if (TREE_THIS_VOLATILE (field_decl))
2932 (build_call_expr (built_in_decls[BUILT_IN_SYNCHRONIZE], 0));
2934 java_add_stmt (modify_expr);
2938 tree temp = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (field_ref));
2939 java_add_local_var (temp);
2941 if (TREE_THIS_VOLATILE (field_decl))
2942 field_ref = java_modify_addr_for_volatile (field_ref);
2945 = build2 (MODIFY_EXPR, TREE_TYPE (field_ref), temp, field_ref);
2946 java_add_stmt (modify_expr);
2948 if (TREE_THIS_VOLATILE (field_decl))
2950 (build_call_expr (built_in_decls[BUILT_IN_SYNCHRONIZE], 0));
2954 TREE_THIS_VOLATILE (field_ref) = TREE_THIS_VOLATILE (field_decl);
2958 load_type_state (tree label)
2961 tree vec = LABEL_TYPE_STATE (label);
2962 int cur_length = TREE_VEC_LENGTH (vec);
2963 stack_pointer = cur_length - DECL_MAX_LOCALS(current_function_decl);
2964 for (i = 0; i < cur_length; i++)
2965 type_map [i] = TREE_VEC_ELT (vec, i);
2968 /* Go over METHOD's bytecode and note instruction starts in
2969 instruction_bits[]. */
2972 note_instructions (JCF *jcf, tree method)
2975 unsigned char* byte_ops;
2976 long length = DECL_CODE_LENGTH (method);
2981 #undef RET /* Defined by config/i386/i386.h */
2983 #define BCODE byte_ops
2984 #define BYTE_type_node byte_type_node
2985 #define SHORT_type_node short_type_node
2986 #define INT_type_node int_type_node
2987 #define LONG_type_node long_type_node
2988 #define CHAR_type_node char_type_node
2989 #define PTR_type_node ptr_type_node
2990 #define FLOAT_type_node float_type_node
2991 #define DOUBLE_type_node double_type_node
2992 #define VOID_type_node void_type_node
2993 #define CONST_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
2994 #define CONST_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
2995 #define VAR_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
2996 #define VAR_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
2998 #define CHECK_PC_IN_RANGE(PC) ((void)1) /* Already handled by verifier. */
3000 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3001 byte_ops = jcf->read_ptr;
3002 instruction_bits = xrealloc (instruction_bits, length + 1);
3003 memset (instruction_bits, 0, length + 1);
3005 /* This pass figures out which PC can be the targets of jumps. */
3006 for (PC = 0; PC < length;)
3008 int oldpc = PC; /* PC at instruction start. */
3009 instruction_bits [PC] |= BCODE_INSTRUCTION_START;
3010 switch (byte_ops[PC++])
3012 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3014 PRE_##OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3017 #define NOTE_LABEL(PC) note_label(oldpc, PC)
3019 #define PRE_PUSHC(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3020 #define PRE_LOAD(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3021 #define PRE_STORE(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3022 #define PRE_STACK(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3023 #define PRE_UNOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3024 #define PRE_BINOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3025 #define PRE_CONVERT(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3026 #define PRE_CONVERT2(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3028 #define PRE_SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3029 PRE_SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3030 #define PRE_SPECIAL_IINC(OPERAND_TYPE) \
3031 ((void) IMMEDIATE_u1, (void) IMMEDIATE_s1)
3032 #define PRE_SPECIAL_ENTER(IGNORE) /* nothing */
3033 #define PRE_SPECIAL_EXIT(IGNORE) /* nothing */
3034 #define PRE_SPECIAL_THROW(IGNORE) /* nothing */
3035 #define PRE_SPECIAL_BREAK(IGNORE) /* nothing */
3037 /* two forms of wide instructions */
3038 #define PRE_SPECIAL_WIDE(IGNORE) \
3040 int modified_opcode = IMMEDIATE_u1; \
3041 if (modified_opcode == OPCODE_iinc) \
3043 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3044 (void) IMMEDIATE_s2; /* constbyte1 and constbyte2 */ \
3048 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3052 #define PRE_IMPL(IGNORE1, IGNORE2) /* nothing */
3054 #define PRE_MONITOR(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3056 #define PRE_RETURN(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3057 #define PRE_ARRAY(OPERAND_TYPE, SUBOP) \
3058 PRE_ARRAY_##SUBOP(OPERAND_TYPE)
3059 #define PRE_ARRAY_LOAD(TYPE) /* nothing */
3060 #define PRE_ARRAY_STORE(TYPE) /* nothing */
3061 #define PRE_ARRAY_LENGTH(TYPE) /* nothing */
3062 #define PRE_ARRAY_NEW(TYPE) PRE_ARRAY_NEW_##TYPE
3063 #define PRE_ARRAY_NEW_NUM ((void) IMMEDIATE_u1)
3064 #define PRE_ARRAY_NEW_PTR ((void) IMMEDIATE_u2)
3065 #define PRE_ARRAY_NEW_MULTI ((void) IMMEDIATE_u2, (void) IMMEDIATE_u1)
3067 #define PRE_TEST(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3068 #define PRE_COND(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3069 #define PRE_BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3070 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3071 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3072 #define PRE_JSR(OPERAND_TYPE, OPERAND_VALUE) \
3073 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3075 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3077 #define PRE_RET(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE)
3079 #define PRE_SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3080 PC = (PC + 3) / 4 * 4; PRE_##TABLE_OR_LOOKUP##_SWITCH
3082 #define PRE_LOOKUP_SWITCH \
3083 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3084 NOTE_LABEL (default_offset+oldpc); \
3086 while (--npairs >= 0) { \
3087 jint match ATTRIBUTE_UNUSED = IMMEDIATE_s4; \
3088 jint offset = IMMEDIATE_s4; \
3089 NOTE_LABEL (offset+oldpc); } \
3092 #define PRE_TABLE_SWITCH \
3093 { jint default_offset = IMMEDIATE_s4; \
3094 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3095 NOTE_LABEL (default_offset+oldpc); \
3097 while (low++ <= high) { \
3098 jint offset = IMMEDIATE_s4; \
3099 NOTE_LABEL (offset+oldpc); } \
3102 #define PRE_FIELD(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3103 #define PRE_OBJECT(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3104 #define PRE_INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3105 (void)(IMMEDIATE_u2); \
3106 PC += 2 * IS_INTERFACE /* for invokeinterface */;
3108 #include "javaop.def"
3115 expand_byte_code (JCF *jcf, tree method)
3119 const unsigned char *linenumber_pointer;
3120 int dead_code_index = -1;
3121 unsigned char* byte_ops;
3122 long length = DECL_CODE_LENGTH (method);
3125 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3126 byte_ops = jcf->read_ptr;
3128 /* We make an initial pass of the line number table, to note
3129 which instructions have associated line number entries. */
3130 linenumber_pointer = linenumber_table;
3131 for (i = 0; i < linenumber_count; i++)
3133 int pc = GET_u2 (linenumber_pointer);
3134 linenumber_pointer += 4;
3136 warning (0, "invalid PC in line number table");
3139 if ((instruction_bits[pc] & BCODE_HAS_LINENUMBER) != 0)
3140 instruction_bits[pc] |= BCODE_HAS_MULTI_LINENUMBERS;
3141 instruction_bits[pc] |= BCODE_HAS_LINENUMBER;
3145 if (! verify_jvm_instructions_new (jcf, byte_ops, length))
3148 promote_arguments ();
3149 cache_this_class_ref (method);
3150 cache_cpool_data_ref ();
3152 /* Translate bytecodes. */
3153 linenumber_pointer = linenumber_table;
3154 for (PC = 0; PC < length;)
3156 if ((instruction_bits [PC] & BCODE_TARGET) != 0 || PC == 0)
3158 tree label = lookup_label (PC);
3159 flush_quick_stack ();
3160 if ((instruction_bits [PC] & BCODE_TARGET) != 0)
3161 java_add_stmt (build1 (LABEL_EXPR, void_type_node, label));
3162 if (LABEL_VERIFIED (label) || PC == 0)
3163 load_type_state (label);
3166 if (! (instruction_bits [PC] & BCODE_VERIFIED))
3168 if (dead_code_index == -1)
3170 /* This is the start of a region of unreachable bytecodes.
3171 They still need to be processed in order for EH ranges
3172 to get handled correctly. However, we can simply
3173 replace these bytecodes with nops. */
3174 dead_code_index = PC;
3177 /* Turn this bytecode into a nop. */
3182 if (dead_code_index != -1)
3184 /* We've just reached the end of a region of dead code. */
3186 warning (0, "unreachable bytecode from %d to before %d",
3187 dead_code_index, PC);
3188 dead_code_index = -1;
3192 /* Handle possible line number entry for this PC.
3194 This code handles out-of-order and multiple linenumbers per PC,
3195 but is optimized for the case of line numbers increasing
3196 monotonically with PC. */
3197 if ((instruction_bits[PC] & BCODE_HAS_LINENUMBER) != 0)
3199 if ((instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS) != 0
3200 || GET_u2 (linenumber_pointer) != PC)
3201 linenumber_pointer = linenumber_table;
3202 while (linenumber_pointer < linenumber_table + linenumber_count * 4)
3204 int pc = GET_u2 (linenumber_pointer);
3205 linenumber_pointer += 4;
3208 int line = GET_u2 (linenumber_pointer - 2);
3209 #ifdef USE_MAPPED_LOCATION
3210 input_location = linemap_line_start (&line_table, line, 1);
3212 input_location.line = line;
3214 if (!(instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS))
3219 maybe_pushlevels (PC);
3220 PC = process_jvm_instruction (PC, byte_ops, length);
3221 maybe_poplevels (PC);
3224 uncache_this_class_ref (method);
3226 if (dead_code_index != -1)
3228 /* We've just reached the end of a region of dead code. */
3230 warning (0, "unreachable bytecode from %d to the end of the method",
3236 java_push_constant_from_pool (JCF *jcf, int index)
3239 if (JPOOL_TAG (jcf, index) == CONSTANT_String)
3242 name = get_name_constant (jcf, JPOOL_USHORT1 (jcf, index));
3243 index = alloc_name_constant (CONSTANT_String, name);
3244 c = build_ref_from_constant_pool (index);
3245 c = convert (promote_type (string_type_node), c);
3247 else if (JPOOL_TAG (jcf, index) == CONSTANT_Class
3248 || JPOOL_TAG (jcf, index) == CONSTANT_ResolvedClass)
3250 tree record = get_class_constant (jcf, index);
3251 c = build_class_ref (record);
3254 c = get_constant (jcf, index);
3259 process_jvm_instruction (int PC, const unsigned char* byte_ops,
3260 long length ATTRIBUTE_UNUSED)
3262 const char *opname; /* Temporary ??? */
3263 int oldpc = PC; /* PC at instruction start. */
3265 /* If the instruction is at the beginning of an exception handler,
3266 replace the top of the stack with the thrown object reference. */
3267 if (instruction_bits [PC] & BCODE_EXCEPTION_TARGET)
3269 /* Note that the verifier will not emit a type map at all for
3270 dead exception handlers. In this case we just ignore the
3272 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3274 tree type = pop_type (promote_type (throwable_type_node));
3275 push_value (build_exception_object_ref (type));
3279 switch (byte_ops[PC++])
3281 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3284 OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3287 #define RET(OPERAND_TYPE, OPERAND_VALUE) \
3289 int saw_index = 0; \
3290 int index = OPERAND_VALUE; \
3292 (find_local_variable (index, return_address_type_node, oldpc)); \
3295 #define JSR(OPERAND_TYPE, OPERAND_VALUE) \
3297 /* OPERAND_VALUE may have side-effects on PC */ \
3298 int opvalue = OPERAND_VALUE; \
3299 build_java_jsr (oldpc + opvalue, PC); \
3302 /* Push a constant onto the stack. */
3303 #define PUSHC(OPERAND_TYPE, OPERAND_VALUE) \
3304 { int saw_index = 0; int ival = (OPERAND_VALUE); \
3305 if (saw_index) java_push_constant_from_pool (current_jcf, ival); \
3306 else expand_java_pushc (ival, OPERAND_TYPE##_type_node); }
3308 /* internal macro added for use by the WIDE case */
3309 #define LOAD_INTERNAL(OPTYPE, OPVALUE) \
3310 expand_load_internal (OPVALUE, type_map[OPVALUE], oldpc);
3312 /* Push local variable onto the opcode stack. */
3313 #define LOAD(OPERAND_TYPE, OPERAND_VALUE) \
3315 /* have to do this since OPERAND_VALUE may have side-effects */ \
3316 int opvalue = OPERAND_VALUE; \
3317 LOAD_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3320 #define RETURN(OPERAND_TYPE, OPERAND_VALUE) \
3321 expand_java_return (OPERAND_TYPE##_type_node)
3323 #define REM_EXPR TRUNC_MOD_EXPR
3324 #define BINOP(OPERAND_TYPE, OPERAND_VALUE) \
3325 expand_java_binop (OPERAND_TYPE##_type_node, OPERAND_VALUE##_EXPR)
3327 #define FIELD(IS_STATIC, IS_PUT) \
3328 expand_java_field_op (IS_STATIC, IS_PUT, IMMEDIATE_u2)
3330 #define TEST(OPERAND_TYPE, CONDITION) \
3331 expand_test (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3333 #define COND(OPERAND_TYPE, CONDITION) \
3334 expand_cond (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3336 #define BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3337 BRANCH_##OPERAND_TYPE (OPERAND_VALUE)
3339 #define BRANCH_GOTO(OPERAND_VALUE) \
3340 expand_java_goto (oldpc + OPERAND_VALUE)
3342 #define BRANCH_CALL(OPERAND_VALUE) \
3343 expand_java_call (oldpc + OPERAND_VALUE, oldpc)
3346 #define BRANCH_RETURN(OPERAND_VALUE) \
3348 tree type = OPERAND_TYPE##_type_node; \
3349 tree value = find_local_variable (OPERAND_VALUE, type, oldpc); \
3350 expand_java_ret (value); \
3354 #define NOT_IMPL(OPERAND_TYPE, OPERAND_VALUE) \
3355 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3356 fprintf (stderr, "(not implemented)\n")
3357 #define NOT_IMPL1(OPERAND_VALUE) \
3358 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3359 fprintf (stderr, "(not implemented)\n")
3361 #define BRANCH_RETURN(OPERAND_VALUE) NOT_IMPL1(OPERAND_VALUE)
3363 #define STACK(SUBOP, COUNT) STACK_##SUBOP (COUNT)
3365 #define STACK_POP(COUNT) java_stack_pop (COUNT)
3367 #define STACK_SWAP(COUNT) java_stack_swap()
3369 #define STACK_DUP(COUNT) java_stack_dup (COUNT, 0)
3370 #define STACK_DUPx1(COUNT) java_stack_dup (COUNT, 1)
3371 #define STACK_DUPx2(COUNT) java_stack_dup (COUNT, 2)
3373 #define SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3374 PC = (PC + 3) / 4 * 4; TABLE_OR_LOOKUP##_SWITCH
3376 #define LOOKUP_SWITCH \
3377 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3378 tree selector = pop_value (INT_type_node); \
3379 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3380 while (--npairs >= 0) \
3382 jint match = IMMEDIATE_s4; jint offset = IMMEDIATE_s4; \
3383 expand_java_add_case (switch_expr, match, oldpc + offset); \
3387 #define TABLE_SWITCH \
3388 { jint default_offset = IMMEDIATE_s4; \
3389 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3390 tree selector = pop_value (INT_type_node); \
3391 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3392 for (; low <= high; low++) \
3394 jint offset = IMMEDIATE_s4; \
3395 expand_java_add_case (switch_expr, low, oldpc + offset); \
3399 #define INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3400 { int opcode = byte_ops[PC-1]; \
3401 int method_ref_index = IMMEDIATE_u2; \
3403 if (IS_INTERFACE) { nargs = IMMEDIATE_u1; (void) IMMEDIATE_u1; } \
3405 expand_invoke (opcode, method_ref_index, nargs); \
3408 /* Handle new, checkcast, instanceof */
3409 #define OBJECT(TYPE, OP) \
3410 expand_java_##OP (get_class_constant (current_jcf, IMMEDIATE_u2))
3412 #define ARRAY(OPERAND_TYPE, SUBOP) ARRAY_##SUBOP(OPERAND_TYPE)
3414 #define ARRAY_LOAD(OPERAND_TYPE) \
3416 expand_java_arrayload( OPERAND_TYPE##_type_node ); \
3419 #define ARRAY_STORE(OPERAND_TYPE) \
3421 expand_java_arraystore( OPERAND_TYPE##_type_node ); \
3424 #define ARRAY_LENGTH(OPERAND_TYPE) expand_java_array_length();
3425 #define ARRAY_NEW(OPERAND_TYPE) ARRAY_NEW_##OPERAND_TYPE()
3426 #define ARRAY_NEW_PTR() \
3427 push_value (build_anewarray (get_class_constant (current_jcf, \
3429 pop_value (int_type_node)));
3430 #define ARRAY_NEW_NUM() \
3432 int atype = IMMEDIATE_u1; \
3433 push_value (build_newarray (atype, pop_value (int_type_node)));\
3435 #define ARRAY_NEW_MULTI() \
3437 tree class = get_class_constant (current_jcf, IMMEDIATE_u2 ); \
3438 int ndims = IMMEDIATE_u1; \
3439 expand_java_multianewarray( class, ndims ); \
3442 #define UNOP(OPERAND_TYPE, OPERAND_VALUE) \
3443 push_value (fold_build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \
3444 pop_value (OPERAND_TYPE##_type_node)));
3446 #define CONVERT2(FROM_TYPE, TO_TYPE) \
3448 push_value (build1 (NOP_EXPR, int_type_node, \
3449 (convert (TO_TYPE##_type_node, \
3450 pop_value (FROM_TYPE##_type_node))))); \
3453 #define CONVERT(FROM_TYPE, TO_TYPE) \
3455 push_value (convert (TO_TYPE##_type_node, \
3456 pop_value (FROM_TYPE##_type_node))); \
3459 /* internal macro added for use by the WIDE case
3460 Added TREE_TYPE (decl) assignment, apbianco */
3461 #define STORE_INTERNAL(OPTYPE, OPVALUE) \
3464 int index = OPVALUE; \
3465 tree type = OPTYPE; \
3466 value = pop_value (type); \
3467 type = TREE_TYPE (value); \
3468 decl = find_local_variable (index, type, oldpc); \
3469 set_local_type (index, type); \
3470 java_add_stmt (build2 (MODIFY_EXPR, type, decl, value)); \
3473 #define STORE(OPERAND_TYPE, OPERAND_VALUE) \
3475 /* have to do this since OPERAND_VALUE may have side-effects */ \
3476 int opvalue = OPERAND_VALUE; \
3477 STORE_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3480 #define SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3481 SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3483 #define SPECIAL_ENTER(IGNORED) MONITOR_OPERATION (soft_monitorenter_node)
3484 #define SPECIAL_EXIT(IGNORED) MONITOR_OPERATION (soft_monitorexit_node)
3486 #define MONITOR_OPERATION(call) \
3488 tree o = pop_value (ptr_type_node); \
3490 flush_quick_stack (); \
3491 c = build_java_monitor (call, o); \
3492 TREE_SIDE_EFFECTS (c) = 1; \
3493 java_add_stmt (c); \
3496 #define SPECIAL_IINC(IGNORED) \
3498 unsigned int local_var_index = IMMEDIATE_u1; \
3499 int ival = IMMEDIATE_s1; \
3500 expand_iinc(local_var_index, ival, oldpc); \
3503 #define SPECIAL_WIDE(IGNORED) \
3505 int modified_opcode = IMMEDIATE_u1; \
3506 unsigned int local_var_index = IMMEDIATE_u2; \
3507 switch (modified_opcode) \
3511 int ival = IMMEDIATE_s2; \
3512 expand_iinc (local_var_index, ival, oldpc); \
3515 case OPCODE_iload: \
3516 case OPCODE_lload: \
3517 case OPCODE_fload: \
3518 case OPCODE_dload: \
3519 case OPCODE_aload: \
3521 /* duplicate code from LOAD macro */ \
3522 LOAD_INTERNAL(operand_type[modified_opcode], local_var_index); \
3525 case OPCODE_istore: \
3526 case OPCODE_lstore: \
3527 case OPCODE_fstore: \
3528 case OPCODE_dstore: \
3529 case OPCODE_astore: \
3531 STORE_INTERNAL(operand_type[modified_opcode], local_var_index); \
3535 error ("unrecogized wide sub-instruction"); \
3539 #define SPECIAL_THROW(IGNORED) \
3540 build_java_athrow (pop_value (throwable_type_node))
3542 #define SPECIAL_BREAK NOT_IMPL1
3543 #define IMPL NOT_IMPL
3545 #include "javaop.def"
3548 fprintf (stderr, "%3d: unknown(%3d)\n", oldpc, byte_ops[PC]);
3553 /* Return the opcode at PC in the code section pointed to by
3556 static unsigned char
3557 peek_opcode_at_pc (JCF *jcf, int code_offset, int pc)
3559 unsigned char opcode;
3560 long absolute_offset = (long)JCF_TELL (jcf);
3562 JCF_SEEK (jcf, code_offset);
3563 opcode = jcf->read_ptr [pc];
3564 JCF_SEEK (jcf, absolute_offset);
3568 /* Some bytecode compilers are emitting accurate LocalVariableTable
3569 attributes. Here's an example:
3574 Attribute "LocalVariableTable"
3575 slot #<n>: ... (PC: PC+1 length: L)
3577 This is accurate because the local in slot <n> really exists after
3578 the opcode at PC is executed, hence from PC+1 to PC+1+L.
3580 This procedure recognizes this situation and extends the live range
3581 of the local in SLOT to START_PC-1 or START_PC-2 (depending on the
3582 length of the store instruction.)
3584 This function is used by `give_name_to_locals' so that a local's
3585 DECL features a DECL_LOCAL_START_PC such that the first related
3586 store operation will use DECL as a destination, not an unrelated
3587 temporary created for the occasion.
3589 This function uses a global (instruction_bits) `note_instructions' should
3590 have allocated and filled properly. */
3593 maybe_adjust_start_pc (struct JCF *jcf, int code_offset,
3594 int start_pc, int slot)
3596 int first, index, opcode;
3605 /* Find last previous instruction and remember it */
3606 for (pc = start_pc-1; pc; pc--)
3607 if (instruction_bits [pc] & BCODE_INSTRUCTION_START)
3611 /* Retrieve the instruction, handle `wide'. */
3612 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3613 if (opcode == OPCODE_wide)
3616 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3621 case OPCODE_astore_0:
3622 case OPCODE_astore_1:
3623 case OPCODE_astore_2:
3624 case OPCODE_astore_3:
3625 first = OPCODE_astore_0;
3628 case OPCODE_istore_0:
3629 case OPCODE_istore_1:
3630 case OPCODE_istore_2:
3631 case OPCODE_istore_3:
3632 first = OPCODE_istore_0;
3635 case OPCODE_lstore_0:
3636 case OPCODE_lstore_1:
3637 case OPCODE_lstore_2:
3638 case OPCODE_lstore_3:
3639 first = OPCODE_lstore_0;
3642 case OPCODE_fstore_0:
3643 case OPCODE_fstore_1:
3644 case OPCODE_fstore_2:
3645 case OPCODE_fstore_3:
3646 first = OPCODE_fstore_0;
3649 case OPCODE_dstore_0:
3650 case OPCODE_dstore_1:
3651 case OPCODE_dstore_2:
3652 case OPCODE_dstore_3:
3653 first = OPCODE_dstore_0;
3661 index = peek_opcode_at_pc (jcf, code_offset, pc);
3664 int other = peek_opcode_at_pc (jcf, code_offset, ++pc);
3665 index = (other << 8) + index;
3670 /* Now we decide: first >0 means we have a <t>store_<n>, index >0
3671 means we have a <t>store. */
3672 if ((first > 0 && opcode - first == slot) || (index > 0 && index == slot))
3678 /* Force the (direct) sub-operands of NODE to be evaluated in left-to-right
3679 order, as specified by Java Language Specification.
3681 The problem is that while expand_expr will evaluate its sub-operands in
3682 left-to-right order, for variables it will just return an rtx (i.e.
3683 an lvalue) for the variable (rather than an rvalue). So it is possible
3684 that a later sub-operand will change the register, and when the
3685 actual operation is done, it will use the new value, when it should
3686 have used the original value.
3688 We fix this by using save_expr. This forces the sub-operand to be
3689 copied into a fresh virtual register,
3691 For method invocation, we modify the arguments so that a
3692 left-to-right order evaluation is performed. Saved expressions
3693 will, in CALL_EXPR order, be reused when the call will be expanded.
3695 We also promote outgoing args if needed. */
3698 force_evaluation_order (tree node)
3700 if (flag_syntax_only)
3702 if (TREE_CODE (node) == CALL_EXPR
3703 || (TREE_CODE (node) == COMPOUND_EXPR
3704 && TREE_CODE (TREE_OPERAND (node, 0)) == CALL_EXPR
3705 && TREE_CODE (TREE_OPERAND (node, 1)) == SAVE_EXPR))
3710 /* Account for wrapped around ctors. */
3711 if (TREE_CODE (node) == COMPOUND_EXPR)
3712 call = TREE_OPERAND (node, 0);
3716 nargs = call_expr_nargs (call);
3718 /* This reverses the evaluation order. This is a desired effect. */
3719 for (i = 0, cmp = NULL_TREE; i < nargs; i++)
3721 tree arg = CALL_EXPR_ARG (call, i);
3722 /* Promote types smaller than integer. This is required by
3724 tree type = TREE_TYPE (arg);
3726 if (targetm.calls.promote_prototypes (type)
3727 && INTEGRAL_TYPE_P (type)
3728 && INT_CST_LT_UNSIGNED (TYPE_SIZE (type),
3729 TYPE_SIZE (integer_type_node)))
3730 arg = fold_convert (integer_type_node, arg);
3732 saved = save_expr (force_evaluation_order (arg));
3733 cmp = (cmp == NULL_TREE ? saved :
3734 build2 (COMPOUND_EXPR, void_type_node, cmp, saved));
3736 CALL_EXPR_ARG (call, i) = saved;
3739 if (cmp && TREE_CODE (cmp) == COMPOUND_EXPR)
3740 TREE_SIDE_EFFECTS (cmp) = 1;
3744 cmp = build2 (COMPOUND_EXPR, TREE_TYPE (node), cmp, node);
3745 if (TREE_TYPE (cmp) != void_type_node)
3746 cmp = save_expr (cmp);
3747 CAN_COMPLETE_NORMALLY (cmp) = CAN_COMPLETE_NORMALLY (node);
3748 TREE_SIDE_EFFECTS (cmp) = 1;
3755 /* Build a node to represent empty statements and blocks. */
3758 build_java_empty_stmt (void)
3760 tree t = build_empty_stmt ();
3761 CAN_COMPLETE_NORMALLY (t) = 1;
3765 /* Promote all args of integral type before generating any code. */
3768 promote_arguments (void)
3772 for (arg = DECL_ARGUMENTS (current_function_decl), i = 0;
3773 arg != NULL_TREE; arg = TREE_CHAIN (arg), i++)
3775 tree arg_type = TREE_TYPE (arg);
3776 if (INTEGRAL_TYPE_P (arg_type)
3777 && TYPE_PRECISION (arg_type) < 32)
3779 tree copy = find_local_variable (i, integer_type_node, -1);
3780 java_add_stmt (build2 (MODIFY_EXPR, integer_type_node,
3782 fold_convert (integer_type_node, arg)));
3784 if (TYPE_IS_WIDE (arg_type))
3789 /* Create a local variable that points to the constant pool. */
3792 cache_cpool_data_ref (void)
3797 tree d = build_constant_data_ref (flag_indirect_classes);
3798 tree cpool_ptr = build_decl (VAR_DECL, NULL_TREE,
3799 build_pointer_type (TREE_TYPE (d)));
3800 java_add_local_var (cpool_ptr);
3801 TREE_INVARIANT (cpool_ptr) = 1;
3802 TREE_CONSTANT (cpool_ptr) = 1;
3804 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (cpool_ptr),
3805 cpool_ptr, build_address_of (d)));
3806 cpool = build1 (INDIRECT_REF, TREE_TYPE (d), cpool_ptr);
3807 TREE_THIS_NOTRAP (cpool) = 1;
3808 TYPE_CPOOL_DATA_REF (output_class) = cpool;
3812 #include "gt-java-expr.h"