1 /* Process expressions for the GNU compiler for the Java(TM) language.
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2006, 2007, 2008, 2010 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>.
21 Java and all Java-based marks are trademarks or registered trademarks
22 of Sun Microsystems, Inc. in the United States and other countries.
23 The Free Software Foundation is independent of Sun Microsystems, Inc. */
25 /* Hacked by Per Bothner <bothner@cygnus.com> February 1996. */
29 #include "coretypes.h"
35 #include "java-tree.h"
37 #include "java-opcodes.h"
39 #include "java-except.h"
45 #include "tree-iterator.h"
49 static void flush_quick_stack (void);
50 static void push_value (tree);
51 static tree pop_value (tree);
52 static void java_stack_swap (void);
53 static void java_stack_dup (int, int);
54 static void build_java_athrow (tree);
55 static void build_java_jsr (int, int);
56 static void build_java_ret (tree);
57 static void expand_java_multianewarray (tree, int);
58 static void expand_java_arraystore (tree);
59 static void expand_java_arrayload (tree);
60 static void expand_java_array_length (void);
61 static tree build_java_monitor (tree, tree);
62 static void expand_java_pushc (int, tree);
63 static void expand_java_return (tree);
64 static void expand_load_internal (int, tree, int);
65 static void expand_java_NEW (tree);
66 static void expand_java_INSTANCEOF (tree);
67 static void expand_java_CHECKCAST (tree);
68 static void expand_iinc (unsigned int, int, int);
69 static void expand_java_binop (tree, enum tree_code);
70 static void note_label (int, int);
71 static void expand_compare (enum tree_code, tree, tree, int);
72 static void expand_test (enum tree_code, tree, int);
73 static void expand_cond (enum tree_code, tree, int);
74 static void expand_java_goto (int);
75 static tree expand_java_switch (tree, int);
76 static void expand_java_add_case (tree, int, int);
77 static VEC(tree,gc) *pop_arguments (tree);
78 static void expand_invoke (int, int, int);
79 static void expand_java_field_op (int, int, int);
80 static void java_push_constant_from_pool (struct JCF *, int);
81 static void java_stack_pop (int);
82 static tree build_java_throw_out_of_bounds_exception (tree);
83 static tree build_java_check_indexed_type (tree, tree);
84 static unsigned char peek_opcode_at_pc (struct JCF *, int, int);
85 static void promote_arguments (void);
86 static void cache_cpool_data_ref (void);
88 static GTY(()) tree operand_type[59];
90 static GTY(()) tree methods_ident;
91 static GTY(()) tree ncode_ident;
92 tree dtable_ident = NULL_TREE;
94 /* Set to nonzero value in order to emit class initialization code
95 before static field references. */
96 int always_initialize_class_p = 0;
98 /* We store the stack state in two places:
99 Within a basic block, we use the quick_stack, which is a
100 pushdown list (TREE_LISTs) of expression nodes.
101 This is the top part of the stack; below that we use find_stack_slot.
102 At the end of a basic block, the quick_stack must be flushed
103 to the stack slot array (as handled by find_stack_slot).
104 Using quick_stack generates better code (especially when
105 compiled without optimization), because we do not have to
106 explicitly store and load trees to temporary variables.
108 If a variable is on the quick stack, it means the value of variable
109 when the quick stack was last flushed. Conceptually, flush_quick_stack
110 saves all the quick_stack elements in parallel. However, that is
111 complicated, so it actually saves them (i.e. copies each stack value
112 to is home virtual register) from low indexes. This allows a quick_stack
113 element at index i (counting from the bottom of stack the) to references
114 slot virtuals for register that are >= i, but not those that are deeper.
115 This convention makes most operations easier. For example iadd works
116 even when the stack contains (reg[0], reg[1]): It results in the
117 stack containing (reg[0]+reg[1]), which is OK. However, some stack
118 operations are more complicated. For example dup given a stack
119 containing (reg[0]) would yield (reg[0], reg[0]), which would violate
120 the convention, since stack value 1 would refer to a register with
121 lower index (reg[0]), which flush_quick_stack does not safely handle.
122 So dup cannot just add an extra element to the quick_stack, but iadd can.
125 static GTY(()) tree quick_stack;
127 /* A free-list of unused permanent TREE_LIST nodes. */
128 static GTY((deletable)) tree tree_list_free_list;
130 /* The physical memory page size used in this computer. See
131 build_field_ref(). */
132 static GTY(()) tree page_size;
134 /* The stack pointer of the Java virtual machine.
135 This does include the size of the quick_stack. */
139 const unsigned char *linenumber_table;
140 int linenumber_count;
142 /* Largest pc so far in this method that has been passed to lookup_label. */
143 int highest_label_pc_this_method = -1;
145 /* Base value for this method to add to pc to get generated label. */
146 int start_label_pc_this_method = 0;
149 init_expr_processing (void)
151 operand_type[21] = operand_type[54] = int_type_node;
152 operand_type[22] = operand_type[55] = long_type_node;
153 operand_type[23] = operand_type[56] = float_type_node;
154 operand_type[24] = operand_type[57] = double_type_node;
155 operand_type[25] = operand_type[58] = ptr_type_node;
159 java_truthvalue_conversion (tree expr)
161 /* It is simpler and generates better code to have only TRUTH_*_EXPR
162 or comparison expressions as truth values at this level.
164 This function should normally be identity for Java. */
166 switch (TREE_CODE (expr))
168 case EQ_EXPR: case NE_EXPR: case UNEQ_EXPR: case LTGT_EXPR:
169 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
170 case UNLE_EXPR: case UNGE_EXPR: case UNLT_EXPR: case UNGT_EXPR:
171 case ORDERED_EXPR: case UNORDERED_EXPR:
172 case TRUTH_ANDIF_EXPR:
173 case TRUTH_ORIF_EXPR:
182 return integer_zerop (expr) ? boolean_false_node : boolean_true_node;
185 return real_zerop (expr) ? boolean_false_node : boolean_true_node;
187 /* are these legal? XXX JH */
191 /* These don't change whether an object is nonzero or zero. */
192 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
195 /* Distribute the conversion into the arms of a COND_EXPR. */
196 return fold_build3 (COND_EXPR, boolean_type_node, TREE_OPERAND (expr, 0),
197 java_truthvalue_conversion (TREE_OPERAND (expr, 1)),
198 java_truthvalue_conversion (TREE_OPERAND (expr, 2)));
201 /* If this is widening the argument, we can ignore it. */
202 if (TYPE_PRECISION (TREE_TYPE (expr))
203 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
204 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
205 /* fall through to default */
208 return fold_build2 (NE_EXPR, boolean_type_node,
209 expr, boolean_false_node);
213 /* Save any stack slots that happen to be in the quick_stack into their
214 home virtual register slots.
216 The copy order is from low stack index to high, to support the invariant
217 that the expression for a slot may contain decls for stack slots with
218 higher (or the same) index, but not lower. */
221 flush_quick_stack (void)
223 int stack_index = stack_pointer;
224 tree prev, cur, next;
226 /* First reverse the quick_stack, and count the number of slots it has. */
227 for (cur = quick_stack, prev = NULL_TREE; cur != NULL_TREE; cur = next)
229 next = TREE_CHAIN (cur);
230 TREE_CHAIN (cur) = prev;
232 stack_index -= 1 + TYPE_IS_WIDE (TREE_TYPE (TREE_VALUE (cur)));
236 while (quick_stack != NULL_TREE)
239 tree node = quick_stack, type;
240 quick_stack = TREE_CHAIN (node);
241 TREE_CHAIN (node) = tree_list_free_list;
242 tree_list_free_list = node;
243 node = TREE_VALUE (node);
244 type = TREE_TYPE (node);
246 decl = find_stack_slot (stack_index, type);
248 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (node), decl, node));
249 stack_index += 1 + TYPE_IS_WIDE (type);
253 /* Push TYPE on the type stack.
254 Return true on success, 0 on overflow. */
257 push_type_0 (tree type)
260 type = promote_type (type);
261 n_words = 1 + TYPE_IS_WIDE (type);
262 if (stack_pointer + n_words > DECL_MAX_STACK (current_function_decl))
264 /* Allocate decl for this variable now, so we get a temporary that
265 survives the whole method. */
266 find_stack_slot (stack_pointer, type);
267 stack_type_map[stack_pointer++] = type;
269 while (--n_words >= 0)
270 stack_type_map[stack_pointer++] = TYPE_SECOND;
275 push_type (tree type)
277 int r = push_type_0 (type);
282 push_value (tree value)
284 tree type = TREE_TYPE (value);
285 if (TYPE_PRECISION (type) < 32 && INTEGRAL_TYPE_P (type))
287 type = promote_type (type);
288 value = convert (type, value);
291 if (tree_list_free_list == NULL_TREE)
292 quick_stack = tree_cons (NULL_TREE, value, quick_stack);
295 tree node = tree_list_free_list;
296 tree_list_free_list = TREE_CHAIN (tree_list_free_list);
297 TREE_VALUE (node) = value;
298 TREE_CHAIN (node) = quick_stack;
301 /* If the value has a side effect, then we need to evaluate it
302 whether or not the result is used. If the value ends up on the
303 quick stack and is then popped, this won't happen -- so we flush
304 the quick stack. It is safest to simply always flush, though,
305 since TREE_SIDE_EFFECTS doesn't capture COMPONENT_REF, and for
306 the latter we may need to strip conversions. */
307 flush_quick_stack ();
310 /* Pop a type from the type stack.
311 TYPE is the expected type. Return the actual type, which must be
313 On an error, *MESSAGEP is set to a freshly malloc'd error message. */
316 pop_type_0 (tree type, char **messagep)
321 if (TREE_CODE (type) == RECORD_TYPE)
322 type = promote_type (type);
323 n_words = 1 + TYPE_IS_WIDE (type);
324 if (stack_pointer < n_words)
326 *messagep = xstrdup ("stack underflow");
329 while (--n_words > 0)
331 if (stack_type_map[--stack_pointer] != void_type_node)
333 *messagep = xstrdup ("Invalid multi-word value on type stack");
337 t = stack_type_map[--stack_pointer];
338 if (type == NULL_TREE || t == type)
340 if (TREE_CODE (t) == TREE_LIST)
344 tree tt = TREE_PURPOSE (t);
345 if (! can_widen_reference_to (tt, type))
355 if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (t)
356 && TYPE_PRECISION (type) <= 32 && TYPE_PRECISION (t) <= 32)
358 if (TREE_CODE (type) == POINTER_TYPE && TREE_CODE (t) == POINTER_TYPE)
360 /* If the expected type we've been passed is object or ptr
361 (i.e. void*), the caller needs to know the real type. */
362 if (type == ptr_type_node || type == object_ptr_type_node)
365 /* Since the verifier has already run, we know that any
366 types we see will be compatible. In BC mode, this fact
367 may be checked at runtime, but if that is so then we can
368 assume its truth here as well. So, we always succeed
369 here, with the expected type. */
373 if (! flag_verify_invocations && flag_indirect_dispatch
374 && t == object_ptr_type_node)
376 if (type != ptr_type_node)
377 warning (0, "need to insert runtime check for %s",
378 xstrdup (lang_printable_name (type, 0)));
382 /* lang_printable_name uses a static buffer, so we must save the result
383 from calling it the first time. */
386 char *temp = xstrdup (lang_printable_name (type, 0));
387 /* If the stack contains a multi-word type, keep popping the stack until
388 the real type is found. */
389 while (t == void_type_node)
390 t = stack_type_map[--stack_pointer];
391 *messagep = concat ("expected type '", temp,
392 "' but stack contains '", lang_printable_name (t, 0),
399 /* Pop a type from the type stack.
400 TYPE is the expected type. Return the actual type, which must be
401 convertible to TYPE, otherwise call error. */
406 char *message = NULL;
407 type = pop_type_0 (type, &message);
410 error ("%s", message);
417 /* Return true if two type assertions are equal. */
420 type_assertion_eq (const void * k1_p, const void * k2_p)
422 const type_assertion k1 = *(const type_assertion *)k1_p;
423 const type_assertion k2 = *(const type_assertion *)k2_p;
424 return (k1.assertion_code == k2.assertion_code
426 && k1.op2 == k2.op2);
429 /* Hash a type assertion. */
432 type_assertion_hash (const void *p)
434 const type_assertion *k_p = (const type_assertion *) p;
435 hashval_t hash = iterative_hash (&k_p->assertion_code, sizeof
436 k_p->assertion_code, 0);
438 switch (k_p->assertion_code)
440 case JV_ASSERT_TYPES_COMPATIBLE:
441 hash = iterative_hash (&TYPE_UID (k_p->op2), sizeof TYPE_UID (k_p->op2),
445 case JV_ASSERT_IS_INSTANTIABLE:
446 hash = iterative_hash (&TYPE_UID (k_p->op1), sizeof TYPE_UID (k_p->op1),
450 case JV_ASSERT_END_OF_TABLE:
460 /* Add an entry to the type assertion table for the given class.
461 KLASS is the class for which this assertion will be evaluated by the
462 runtime during loading/initialization.
463 ASSERTION_CODE is the 'opcode' or type of this assertion: see java-tree.h.
464 OP1 and OP2 are the operands. The tree type of these arguments may be
465 specific to each assertion_code. */
468 add_type_assertion (tree klass, int assertion_code, tree op1, tree op2)
470 htab_t assertions_htab;
474 assertions_htab = TYPE_ASSERTIONS (klass);
475 if (assertions_htab == NULL)
477 assertions_htab = htab_create_ggc (7, type_assertion_hash,
478 type_assertion_eq, NULL);
479 TYPE_ASSERTIONS (current_class) = assertions_htab;
482 as.assertion_code = assertion_code;
486 as_pp = htab_find_slot (assertions_htab, &as, INSERT);
488 /* Don't add the same assertion twice. */
492 *as_pp = ggc_alloc (sizeof (type_assertion));
493 **(type_assertion **)as_pp = as;
497 /* Return 1 if SOURCE_TYPE can be safely widened to TARGET_TYPE.
498 Handles array types and interfaces. */
501 can_widen_reference_to (tree source_type, tree target_type)
503 if (source_type == ptr_type_node || target_type == object_ptr_type_node)
506 /* Get rid of pointers */
507 if (TREE_CODE (source_type) == POINTER_TYPE)
508 source_type = TREE_TYPE (source_type);
509 if (TREE_CODE (target_type) == POINTER_TYPE)
510 target_type = TREE_TYPE (target_type);
512 if (source_type == target_type)
515 /* FIXME: This is very pessimistic, in that it checks everything,
516 even if we already know that the types are compatible. If we're
517 to support full Java class loader semantics, we need this.
518 However, we could do something more optimal. */
519 if (! flag_verify_invocations)
521 add_type_assertion (current_class, JV_ASSERT_TYPES_COMPATIBLE,
522 source_type, target_type);
525 warning (0, "assert: %s is assign compatible with %s",
526 xstrdup (lang_printable_name (target_type, 0)),
527 xstrdup (lang_printable_name (source_type, 0)));
528 /* Punt everything to runtime. */
532 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
538 if (TYPE_ARRAY_P (source_type) || TYPE_ARRAY_P (target_type))
540 HOST_WIDE_INT source_length, target_length;
541 if (TYPE_ARRAY_P (source_type) != TYPE_ARRAY_P (target_type))
543 /* An array implements Cloneable and Serializable. */
544 tree name = DECL_NAME (TYPE_NAME (target_type));
545 return (name == java_lang_cloneable_identifier_node
546 || name == java_io_serializable_identifier_node);
548 target_length = java_array_type_length (target_type);
549 if (target_length >= 0)
551 source_length = java_array_type_length (source_type);
552 if (source_length != target_length)
555 source_type = TYPE_ARRAY_ELEMENT (source_type);
556 target_type = TYPE_ARRAY_ELEMENT (target_type);
557 if (source_type == target_type)
559 if (TREE_CODE (source_type) != POINTER_TYPE
560 || TREE_CODE (target_type) != POINTER_TYPE)
562 return can_widen_reference_to (source_type, target_type);
566 int source_depth = class_depth (source_type);
567 int target_depth = class_depth (target_type);
569 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
572 warning (0, "assert: %s is assign compatible with %s",
573 xstrdup (lang_printable_name (target_type, 0)),
574 xstrdup (lang_printable_name (source_type, 0)));
578 /* class_depth can return a negative depth if an error occurred */
579 if (source_depth < 0 || target_depth < 0)
582 if (CLASS_INTERFACE (TYPE_NAME (target_type)))
584 /* target_type is OK if source_type or source_type ancestors
585 implement target_type. We handle multiple sub-interfaces */
586 tree binfo, base_binfo;
589 for (binfo = TYPE_BINFO (source_type), i = 0;
590 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
591 if (can_widen_reference_to
592 (BINFO_TYPE (base_binfo), target_type))
599 for ( ; source_depth > target_depth; source_depth--)
602 = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (source_type), 0));
604 return source_type == target_type;
610 pop_value (tree type)
612 type = pop_type (type);
615 tree node = quick_stack;
616 quick_stack = TREE_CHAIN (quick_stack);
617 TREE_CHAIN (node) = tree_list_free_list;
618 tree_list_free_list = node;
619 node = TREE_VALUE (node);
623 return find_stack_slot (stack_pointer, promote_type (type));
627 /* Pop and discard the top COUNT stack slots. */
630 java_stack_pop (int count)
636 gcc_assert (stack_pointer != 0);
638 type = stack_type_map[stack_pointer - 1];
639 if (type == TYPE_SECOND)
642 gcc_assert (stack_pointer != 1 && count > 0);
644 type = stack_type_map[stack_pointer - 2];
651 /* Implement the 'swap' operator (to swap two top stack slots). */
654 java_stack_swap (void)
660 if (stack_pointer < 2
661 || (type1 = stack_type_map[stack_pointer - 1]) == TYPE_SECOND
662 || (type2 = stack_type_map[stack_pointer - 2]) == TYPE_SECOND
663 || TYPE_IS_WIDE (type1) || TYPE_IS_WIDE (type2))
664 /* Bad stack swap. */
666 /* Bad stack swap. */
668 flush_quick_stack ();
669 decl1 = find_stack_slot (stack_pointer - 1, type1);
670 decl2 = find_stack_slot (stack_pointer - 2, type2);
671 temp = build_decl (input_location, VAR_DECL, NULL_TREE, type1);
672 java_add_local_var (temp);
673 java_add_stmt (build2 (MODIFY_EXPR, type1, temp, decl1));
674 java_add_stmt (build2 (MODIFY_EXPR, type2,
675 find_stack_slot (stack_pointer - 1, type2),
677 java_add_stmt (build2 (MODIFY_EXPR, type1,
678 find_stack_slot (stack_pointer - 2, type1),
680 stack_type_map[stack_pointer - 1] = type2;
681 stack_type_map[stack_pointer - 2] = type1;
685 java_stack_dup (int size, int offset)
687 int low_index = stack_pointer - size - offset;
690 error ("stack underflow - dup* operation");
692 flush_quick_stack ();
694 stack_pointer += size;
695 dst_index = stack_pointer;
697 for (dst_index = stack_pointer; --dst_index >= low_index; )
700 int src_index = dst_index - size;
701 if (src_index < low_index)
702 src_index = dst_index + size + offset;
703 type = stack_type_map [src_index];
704 if (type == TYPE_SECOND)
706 /* Dup operation splits 64-bit number. */
707 gcc_assert (src_index > low_index);
709 stack_type_map[dst_index] = type;
710 src_index--; dst_index--;
711 type = stack_type_map[src_index];
712 gcc_assert (TYPE_IS_WIDE (type));
715 gcc_assert (! TYPE_IS_WIDE (type));
717 if (src_index != dst_index)
719 tree src_decl = find_stack_slot (src_index, type);
720 tree dst_decl = find_stack_slot (dst_index, type);
723 (build2 (MODIFY_EXPR, TREE_TYPE (dst_decl), dst_decl, src_decl));
724 stack_type_map[dst_index] = type;
729 /* Calls _Jv_Throw or _Jv_Sjlj_Throw. Discard the contents of the
733 build_java_athrow (tree node)
737 call = build_call_nary (void_type_node,
738 build_address_of (throw_node),
740 TREE_SIDE_EFFECTS (call) = 1;
741 java_add_stmt (call);
742 java_stack_pop (stack_pointer);
745 /* Implementation for jsr/ret */
748 build_java_jsr (int target_pc, int return_pc)
750 tree where = lookup_label (target_pc);
751 tree ret = lookup_label (return_pc);
752 tree ret_label = fold_build1 (ADDR_EXPR, return_address_type_node, ret);
753 push_value (ret_label);
754 flush_quick_stack ();
755 java_add_stmt (build1 (GOTO_EXPR, void_type_node, where));
757 /* Do not need to emit the label here. We noted the existence of the
758 label as a jump target in note_instructions; we'll emit the label
759 for real at the beginning of the expand_byte_code loop. */
763 build_java_ret (tree location)
765 java_add_stmt (build1 (GOTO_EXPR, void_type_node, location));
768 /* Implementation of operations on array: new, load, store, length */
771 decode_newarray_type (int atype)
775 case 4: return boolean_type_node;
776 case 5: return char_type_node;
777 case 6: return float_type_node;
778 case 7: return double_type_node;
779 case 8: return byte_type_node;
780 case 9: return short_type_node;
781 case 10: return int_type_node;
782 case 11: return long_type_node;
783 default: return NULL_TREE;
787 /* Map primitive type to the code used by OPCODE_newarray. */
790 encode_newarray_type (tree type)
792 if (type == boolean_type_node)
794 else if (type == char_type_node)
796 else if (type == float_type_node)
798 else if (type == double_type_node)
800 else if (type == byte_type_node)
802 else if (type == short_type_node)
804 else if (type == int_type_node)
806 else if (type == long_type_node)
812 /* Build a call to _Jv_ThrowBadArrayIndex(), the
813 ArrayIndexOfBoundsException exception handler. */
816 build_java_throw_out_of_bounds_exception (tree index)
820 /* We need to build a COMPOUND_EXPR because _Jv_ThrowBadArrayIndex()
821 has void return type. We cannot just set the type of the CALL_EXPR below
822 to int_type_node because we would lose it during gimplification. */
823 gcc_assert (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (soft_badarrayindex_node))));
824 node = build_call_nary (void_type_node,
825 build_address_of (soft_badarrayindex_node),
827 TREE_SIDE_EFFECTS (node) = 1;
829 node = build2 (COMPOUND_EXPR, int_type_node, node, integer_zero_node);
830 TREE_SIDE_EFFECTS (node) = 1; /* Allows expansion within ANDIF */
835 /* Return the length of an array. Doesn't perform any checking on the nature
836 or value of the array NODE. May be used to implement some bytecodes. */
839 build_java_array_length_access (tree node)
841 tree type = TREE_TYPE (node);
842 tree array_type = TREE_TYPE (type);
843 HOST_WIDE_INT length;
845 if (!is_array_type_p (type))
847 /* With the new verifier, we will see an ordinary pointer type
848 here. In this case, we just use an arbitrary array type. */
849 array_type = build_java_array_type (object_ptr_type_node, -1);
850 type = promote_type (array_type);
853 length = java_array_type_length (type);
855 return build_int_cst (NULL_TREE, length);
857 node = build3 (COMPONENT_REF, int_type_node,
858 build_java_indirect_ref (array_type, node,
859 flag_check_references),
860 lookup_field (&array_type, get_identifier ("length")),
862 IS_ARRAY_LENGTH_ACCESS (node) = 1;
866 /* Optionally checks a reference against the NULL pointer. ARG1: the
867 expr, ARG2: we should check the reference. Don't generate extra
868 checks if we're not generating code. */
871 java_check_reference (tree expr, int check)
873 if (!flag_syntax_only && check)
875 expr = save_expr (expr);
876 expr = build3 (COND_EXPR, TREE_TYPE (expr),
877 build2 (EQ_EXPR, boolean_type_node,
878 expr, null_pointer_node),
879 build_call_nary (void_type_node,
880 build_address_of (soft_nullpointer_node),
888 /* Reference an object: just like an INDIRECT_REF, but with checking. */
891 build_java_indirect_ref (tree type, tree expr, int check)
894 t = java_check_reference (expr, check);
895 t = convert (build_pointer_type (type), t);
896 return build1 (INDIRECT_REF, type, t);
899 /* Implement array indexing (either as l-value or r-value).
900 Returns a tree for ARRAY[INDEX], assume TYPE is the element type.
901 Optionally performs bounds checking and/or test to NULL.
902 At this point, ARRAY should have been verified as an array. */
905 build_java_arrayaccess (tree array, tree type, tree index)
907 tree node, throw_expr = NULL_TREE;
910 tree array_type = TREE_TYPE (TREE_TYPE (array));
911 tree size_exp = fold_convert (sizetype, size_in_bytes (type));
913 if (!is_array_type_p (TREE_TYPE (array)))
915 /* With the new verifier, we will see an ordinary pointer type
916 here. In this case, we just use the correct array type. */
917 array_type = build_java_array_type (type, -1);
920 if (flag_bounds_check)
923 * (unsigned jint) INDEX >= (unsigned jint) LEN
924 * && throw ArrayIndexOutOfBoundsException.
925 * Note this is equivalent to and more efficient than:
926 * INDEX < 0 || INDEX >= LEN && throw ... */
928 tree len = convert (unsigned_int_type_node,
929 build_java_array_length_access (array));
930 test = fold_build2 (GE_EXPR, boolean_type_node,
931 convert (unsigned_int_type_node, index),
933 if (! integer_zerop (test))
936 = build2 (TRUTH_ANDIF_EXPR, int_type_node, test,
937 build_java_throw_out_of_bounds_exception (index));
938 /* allows expansion within COMPOUND */
939 TREE_SIDE_EFFECTS( throw_expr ) = 1;
943 /* If checking bounds, wrap the index expr with a COMPOUND_EXPR in order
944 to have the bounds check evaluated first. */
945 if (throw_expr != NULL_TREE)
946 index = build2 (COMPOUND_EXPR, int_type_node, throw_expr, index);
948 data_field = lookup_field (&array_type, get_identifier ("data"));
950 ref = build3 (COMPONENT_REF, TREE_TYPE (data_field),
951 build_java_indirect_ref (array_type, array,
952 flag_check_references),
953 data_field, NULL_TREE);
955 /* Take the address of the data field and convert it to a pointer to
957 node = build1 (NOP_EXPR, build_pointer_type (type), build_address_of (ref));
959 /* Multiply the index by the size of an element to obtain a byte
960 offset. Convert the result to a pointer to the element type. */
961 index = build2 (MULT_EXPR, sizetype,
962 fold_convert (sizetype, index),
965 /* Sum the byte offset and the address of the data field. */
966 node = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (node), node, index);
970 *((&array->data) + index*size_exp)
973 return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (node)), node);
976 /* Generate code to throw an ArrayStoreException if OBJECT is not assignable
977 (at runtime) to an element of ARRAY. A NOP_EXPR is returned if it can
978 determine that no check is required. */
981 build_java_arraystore_check (tree array, tree object)
983 tree check, element_type, source;
984 tree array_type_p = TREE_TYPE (array);
985 tree object_type = TYPE_NAME (TREE_TYPE (TREE_TYPE (object)));
987 if (! flag_verify_invocations)
989 /* With the new verifier, we don't track precise types. FIXME:
990 performance regression here. */
991 element_type = TYPE_NAME (object_type_node);
995 gcc_assert (is_array_type_p (array_type_p));
997 /* Get the TYPE_DECL for ARRAY's element type. */
999 = TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p))));
1002 gcc_assert (TREE_CODE (element_type) == TYPE_DECL
1003 && TREE_CODE (object_type) == TYPE_DECL);
1005 if (!flag_store_check)
1006 return build1 (NOP_EXPR, array_type_p, array);
1008 /* No check is needed if the element type is final. Also check that
1009 element_type matches object_type, since in the bytecode
1010 compilation case element_type may be the actual element type of
1011 the array rather than its declared type. However, if we're doing
1012 indirect dispatch, we can't do the `final' optimization. */
1013 if (element_type == object_type
1014 && ! flag_indirect_dispatch
1015 && CLASS_FINAL (element_type))
1016 return build1 (NOP_EXPR, array_type_p, array);
1018 /* OBJECT might be wrapped by a SAVE_EXPR. */
1019 if (TREE_CODE (object) == SAVE_EXPR)
1020 source = TREE_OPERAND (object, 0);
1024 /* Avoid the check if OBJECT was just loaded from the same array. */
1025 if (TREE_CODE (source) == ARRAY_REF)
1028 source = TREE_OPERAND (source, 0); /* COMPONENT_REF. */
1029 source = TREE_OPERAND (source, 0); /* INDIRECT_REF. */
1030 source = TREE_OPERAND (source, 0); /* Source array's DECL or SAVE_EXPR. */
1031 if (TREE_CODE (source) == SAVE_EXPR)
1032 source = TREE_OPERAND (source, 0);
1035 if (TREE_CODE (target) == SAVE_EXPR)
1036 target = TREE_OPERAND (target, 0);
1038 if (source == target)
1039 return build1 (NOP_EXPR, array_type_p, array);
1042 /* Build an invocation of _Jv_CheckArrayStore */
1043 check = build_call_nary (void_type_node,
1044 build_address_of (soft_checkarraystore_node),
1046 TREE_SIDE_EFFECTS (check) = 1;
1051 /* Makes sure that INDEXED_TYPE is appropriate. If not, make it from
1052 ARRAY_NODE. This function is used to retrieve something less vague than
1053 a pointer type when indexing the first dimension of something like [[<t>.
1054 May return a corrected type, if necessary, otherwise INDEXED_TYPE is
1055 return unchanged. */
1058 build_java_check_indexed_type (tree array_node ATTRIBUTE_UNUSED,
1061 /* We used to check to see if ARRAY_NODE really had array type.
1062 However, with the new verifier, this is not necessary, as we know
1063 that the object will be an array of the appropriate type. */
1065 return indexed_type;
1068 /* newarray triggers a call to _Jv_NewPrimArray. This function should be
1069 called with an integer code (the type of array to create), and the length
1070 of the array to create. */
1073 build_newarray (int atype_value, tree length)
1077 tree prim_type = decode_newarray_type (atype_value);
1079 = build_java_array_type (prim_type,
1080 host_integerp (length, 0) == INTEGER_CST
1081 ? tree_low_cst (length, 0) : -1);
1083 /* Pass a reference to the primitive type class and save the runtime
1085 type_arg = build_class_ref (prim_type);
1087 return build_call_nary (promote_type (type),
1088 build_address_of (soft_newarray_node),
1089 2, type_arg, length);
1092 /* Generates anewarray from a given CLASS_TYPE. Gets from the stack the size
1093 of the dimension. */
1096 build_anewarray (tree class_type, tree length)
1099 = build_java_array_type (class_type,
1100 host_integerp (length, 0)
1101 ? tree_low_cst (length, 0) : -1);
1103 return build_call_nary (promote_type (type),
1104 build_address_of (soft_anewarray_node),
1107 build_class_ref (class_type),
1111 /* Return a node the evaluates 'new TYPE[LENGTH]'. */
1114 build_new_array (tree type, tree length)
1116 if (JPRIMITIVE_TYPE_P (type))
1117 return build_newarray (encode_newarray_type (type), length);
1119 return build_anewarray (TREE_TYPE (type), length);
1122 /* Generates a call to _Jv_NewMultiArray. multianewarray expects a
1123 class pointer, a number of dimensions and the matching number of
1124 dimensions. The argument list is NULL terminated. */
1127 expand_java_multianewarray (tree class_type, int ndim)
1130 VEC(tree,gc) *args = NULL;
1132 VEC_safe_grow (tree, gc, args, 3 + ndim);
1134 VEC_replace (tree, args, 0, build_class_ref (class_type));
1135 VEC_replace (tree, args, 1, build_int_cst (NULL_TREE, ndim));
1137 for(i = ndim - 1; i >= 0; i-- )
1138 VEC_replace (tree, args, (unsigned)(2 + i), pop_value (int_type_node));
1140 VEC_replace (tree, args, 2 + ndim, null_pointer_node);
1142 push_value (build_call_vec (promote_type (class_type),
1143 build_address_of (soft_multianewarray_node),
1147 /* ARRAY[INDEX] <- RHS. build_java_check_indexed_type makes sure that
1148 ARRAY is an array type. May expand some bound checking and NULL
1149 pointer checking. RHS_TYPE_NODE we are going to store. In the case
1150 of the CHAR/BYTE/BOOLEAN SHORT, the type popped of the stack is an
1151 INT. In those cases, we make the conversion.
1153 if ARRAy is a reference type, the assignment is checked at run-time
1154 to make sure that the RHS can be assigned to the array element
1155 type. It is not necessary to generate this code if ARRAY is final. */
1158 expand_java_arraystore (tree rhs_type_node)
1160 tree rhs_node = pop_value ((INTEGRAL_TYPE_P (rhs_type_node)
1161 && TYPE_PRECISION (rhs_type_node) <= 32) ?
1162 int_type_node : rhs_type_node);
1163 tree index = pop_value (int_type_node);
1164 tree array_type, array, temp, access;
1166 /* If we're processing an `aaload' we might as well just pick
1168 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1170 array_type = build_java_array_type (object_ptr_type_node, -1);
1171 rhs_type_node = object_ptr_type_node;
1174 array_type = build_java_array_type (rhs_type_node, -1);
1176 array = pop_value (array_type);
1177 array = build1 (NOP_EXPR, promote_type (array_type), array);
1179 rhs_type_node = build_java_check_indexed_type (array, rhs_type_node);
1181 flush_quick_stack ();
1183 index = save_expr (index);
1184 array = save_expr (array);
1186 /* We want to perform the bounds check (done by
1187 build_java_arrayaccess) before the type check (done by
1188 build_java_arraystore_check). So, we call build_java_arrayaccess
1189 -- which returns an ARRAY_REF lvalue -- and we then generate code
1190 to stash the address of that lvalue in a temp. Then we call
1191 build_java_arraystore_check, and finally we generate a
1192 MODIFY_EXPR to set the array element. */
1194 access = build_java_arrayaccess (array, rhs_type_node, index);
1195 temp = build_decl (input_location, VAR_DECL, NULL_TREE,
1196 build_pointer_type (TREE_TYPE (access)));
1197 java_add_local_var (temp);
1198 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (temp),
1200 build_fold_addr_expr (access)));
1202 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1204 tree check = build_java_arraystore_check (array, rhs_node);
1205 java_add_stmt (check);
1208 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (access),
1209 build1 (INDIRECT_REF, TREE_TYPE (access), temp),
1213 /* Expand the evaluation of ARRAY[INDEX]. build_java_check_indexed_type makes
1214 sure that LHS is an array type. May expand some bound checking and NULL
1216 LHS_TYPE_NODE is the type of ARRAY[INDEX]. But in the case of CHAR/BYTE/
1217 BOOLEAN/SHORT, we push a promoted type back to the stack.
1221 expand_java_arrayload (tree lhs_type_node)
1224 tree index_node = pop_value (int_type_node);
1228 /* If we're processing an `aaload' we might as well just pick
1230 if (TREE_CODE (lhs_type_node) == POINTER_TYPE)
1232 array_type = build_java_array_type (object_ptr_type_node, -1);
1233 lhs_type_node = object_ptr_type_node;
1236 array_type = build_java_array_type (lhs_type_node, -1);
1237 array_node = pop_value (array_type);
1238 array_node = build1 (NOP_EXPR, promote_type (array_type), array_node);
1240 index_node = save_expr (index_node);
1241 array_node = save_expr (array_node);
1243 lhs_type_node = build_java_check_indexed_type (array_node,
1245 load_node = build_java_arrayaccess (array_node,
1248 if (INTEGRAL_TYPE_P (lhs_type_node) && TYPE_PRECISION (lhs_type_node) <= 32)
1249 load_node = fold_build1 (NOP_EXPR, int_type_node, load_node);
1250 push_value (load_node);
1253 /* Expands .length. Makes sure that we deal with and array and may expand
1254 a NULL check on the array object. */
1257 expand_java_array_length (void)
1259 tree array = pop_value (ptr_type_node);
1260 tree length = build_java_array_length_access (array);
1262 push_value (length);
1265 /* Emit code for the call to _Jv_Monitor{Enter,Exit}. CALL can be
1266 either soft_monitorenter_node or soft_monitorexit_node. */
1269 build_java_monitor (tree call, tree object)
1271 return build_call_nary (void_type_node,
1272 build_address_of (call),
1276 /* Emit code for one of the PUSHC instructions. */
1279 expand_java_pushc (int ival, tree type)
1282 if (type == ptr_type_node && ival == 0)
1283 value = null_pointer_node;
1284 else if (type == int_type_node || type == long_type_node)
1285 value = build_int_cst (type, ival);
1286 else if (type == float_type_node || type == double_type_node)
1289 REAL_VALUE_FROM_INT (x, ival, 0, TYPE_MODE (type));
1290 value = build_real (type, x);
1299 expand_java_return (tree type)
1301 if (type == void_type_node)
1302 java_add_stmt (build1 (RETURN_EXPR, void_type_node, NULL));
1305 tree retval = pop_value (type);
1306 tree res = DECL_RESULT (current_function_decl);
1307 retval = build2 (MODIFY_EXPR, TREE_TYPE (res), res, retval);
1309 /* Handle the situation where the native integer type is smaller
1310 than the JVM integer. It can happen for many cross compilers.
1311 The whole if expression just goes away if INT_TYPE_SIZE < 32
1313 if (INT_TYPE_SIZE < 32
1314 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (res)))
1315 < GET_MODE_SIZE (TYPE_MODE (type))))
1316 retval = build1(NOP_EXPR, TREE_TYPE(res), retval);
1318 TREE_SIDE_EFFECTS (retval) = 1;
1319 java_add_stmt (build1 (RETURN_EXPR, void_type_node, retval));
1324 expand_load_internal (int index, tree type, int pc)
1327 tree var = find_local_variable (index, type, pc);
1329 /* Now VAR is the VAR_DECL (or PARM_DECL) that we are going to push
1330 on the stack. If there is an assignment to this VAR_DECL between
1331 the stack push and the use, then the wrong code could be
1332 generated. To avoid this we create a new local and copy our
1333 value into it. Then we push this new local on the stack.
1334 Hopefully this all gets optimized out. */
1335 copy = build_decl (input_location, VAR_DECL, NULL_TREE, type);
1336 if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1337 && TREE_TYPE (copy) != TREE_TYPE (var))
1338 var = convert (type, var);
1339 java_add_local_var (copy);
1340 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (var), copy, var));
1346 build_address_of (tree value)
1348 return build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (value)), value);
1352 class_has_finalize_method (tree type)
1354 tree super = CLASSTYPE_SUPER (type);
1356 if (super == NULL_TREE)
1357 return false; /* Every class with a real finalizer inherits */
1358 /* from java.lang.Object. */
1360 return HAS_FINALIZER_P (type) || class_has_finalize_method (super);
1364 java_create_object (tree type)
1366 tree alloc_node = (class_has_finalize_method (type)
1368 : alloc_no_finalizer_node);
1370 return build_call_nary (promote_type (type),
1371 build_address_of (alloc_node),
1372 1, build_class_ref (type));
1376 expand_java_NEW (tree type)
1380 alloc_node = (class_has_finalize_method (type) ? alloc_object_node
1381 : alloc_no_finalizer_node);
1382 if (! CLASS_LOADED_P (type))
1383 load_class (type, 1);
1384 safe_layout_class (type);
1385 push_value (build_call_nary (promote_type (type),
1386 build_address_of (alloc_node),
1387 1, build_class_ref (type)));
1390 /* This returns an expression which will extract the class of an
1394 build_get_class (tree value)
1396 tree class_field = lookup_field (&dtable_type, get_identifier ("class"));
1397 tree vtable_field = lookup_field (&object_type_node,
1398 get_identifier ("vtable"));
1399 tree tmp = build3 (COMPONENT_REF, dtable_ptr_type,
1400 build_java_indirect_ref (object_type_node, value,
1401 flag_check_references),
1402 vtable_field, NULL_TREE);
1403 return build3 (COMPONENT_REF, class_ptr_type,
1404 build1 (INDIRECT_REF, dtable_type, tmp),
1405 class_field, NULL_TREE);
1408 /* This builds the tree representation of the `instanceof' operator.
1409 It tries various tricks to optimize this in cases where types are
1413 build_instanceof (tree value, tree type)
1416 tree itype = TREE_TYPE (TREE_TYPE (soft_instanceof_node));
1417 tree valtype = TREE_TYPE (TREE_TYPE (value));
1418 tree valclass = TYPE_NAME (valtype);
1421 /* When compiling from bytecode, we need to ensure that TYPE has
1423 if (CLASS_P (type) && ! CLASS_LOADED_P (type))
1425 load_class (type, 1);
1426 safe_layout_class (type);
1427 if (! TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) == ERROR_MARK)
1428 return error_mark_node;
1430 klass = TYPE_NAME (type);
1432 if (type == object_type_node || inherits_from_p (valtype, type))
1434 /* Anything except `null' is an instance of Object. Likewise,
1435 if the object is known to be an instance of the class, then
1436 we only need to check for `null'. */
1437 expr = build2 (NE_EXPR, itype, value, null_pointer_node);
1439 else if (flag_verify_invocations
1440 && ! TYPE_ARRAY_P (type)
1441 && ! TYPE_ARRAY_P (valtype)
1442 && DECL_P (klass) && DECL_P (valclass)
1443 && ! CLASS_INTERFACE (valclass)
1444 && ! CLASS_INTERFACE (klass)
1445 && ! inherits_from_p (type, valtype)
1446 && (CLASS_FINAL (klass)
1447 || ! inherits_from_p (valtype, type)))
1449 /* The classes are from different branches of the derivation
1450 tree, so we immediately know the answer. */
1451 expr = boolean_false_node;
1453 else if (DECL_P (klass) && CLASS_FINAL (klass))
1455 tree save = save_expr (value);
1456 expr = build3 (COND_EXPR, itype,
1457 build2 (NE_EXPR, boolean_type_node,
1458 save, null_pointer_node),
1459 build2 (EQ_EXPR, itype,
1460 build_get_class (save),
1461 build_class_ref (type)),
1462 boolean_false_node);
1466 expr = build_call_nary (itype,
1467 build_address_of (soft_instanceof_node),
1468 2, value, build_class_ref (type));
1470 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (value);
1475 expand_java_INSTANCEOF (tree type)
1477 tree value = pop_value (object_ptr_type_node);
1478 value = build_instanceof (value, type);
1483 expand_java_CHECKCAST (tree type)
1485 tree value = pop_value (ptr_type_node);
1486 value = build_call_nary (promote_type (type),
1487 build_address_of (soft_checkcast_node),
1488 2, build_class_ref (type), value);
1493 expand_iinc (unsigned int local_var_index, int ival, int pc)
1495 tree local_var, res;
1496 tree constant_value;
1498 flush_quick_stack ();
1499 local_var = find_local_variable (local_var_index, int_type_node, pc);
1500 constant_value = build_int_cst (NULL_TREE, ival);
1501 res = fold_build2 (PLUS_EXPR, int_type_node, local_var, constant_value);
1502 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (local_var), local_var, res));
1507 build_java_soft_divmod (enum tree_code op, tree type, tree op1, tree op2)
1510 tree arg1 = convert (type, op1);
1511 tree arg2 = convert (type, op2);
1513 if (type == int_type_node)
1517 case TRUNC_DIV_EXPR:
1518 call = soft_idiv_node;
1520 case TRUNC_MOD_EXPR:
1521 call = soft_irem_node;
1527 else if (type == long_type_node)
1531 case TRUNC_DIV_EXPR:
1532 call = soft_ldiv_node;
1534 case TRUNC_MOD_EXPR:
1535 call = soft_lrem_node;
1543 call = build_call_nary (type, build_address_of (call), 2, arg1, arg2);
1548 build_java_binop (enum tree_code op, tree type, tree arg1, tree arg2)
1555 tree u_type = unsigned_type_for (type);
1556 arg1 = convert (u_type, arg1);
1557 arg1 = build_java_binop (RSHIFT_EXPR, u_type, arg1, arg2);
1558 return convert (type, arg1);
1562 mask = build_int_cst (NULL_TREE,
1563 TYPE_PRECISION (TREE_TYPE (arg1)) - 1);
1564 arg2 = fold_build2 (BIT_AND_EXPR, int_type_node, arg2, mask);
1567 case COMPARE_L_EXPR: /* arg1 > arg2 ? 1 : arg1 == arg2 ? 0 : -1 */
1568 case COMPARE_G_EXPR: /* arg1 < arg2 ? -1 : arg1 == arg2 ? 0 : 1 */
1569 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1571 tree ifexp1 = fold_build2 (op == COMPARE_L_EXPR ? GT_EXPR : LT_EXPR,
1572 boolean_type_node, arg1, arg2);
1573 tree ifexp2 = fold_build2 (EQ_EXPR, boolean_type_node, arg1, arg2);
1574 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1575 ifexp2, integer_zero_node,
1576 op == COMPARE_L_EXPR
1577 ? integer_minus_one_node
1578 : integer_one_node);
1579 return fold_build3 (COND_EXPR, int_type_node, ifexp1,
1580 op == COMPARE_L_EXPR ? integer_one_node
1581 : integer_minus_one_node,
1585 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1587 tree ifexp1 = fold_build2 (LT_EXPR, boolean_type_node, arg1, arg2);
1588 tree ifexp2 = fold_build2 (GT_EXPR, boolean_type_node, arg1, arg2);
1589 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1590 ifexp2, integer_one_node,
1592 return fold_build3 (COND_EXPR, int_type_node,
1593 ifexp1, integer_minus_one_node, second_compare);
1595 case TRUNC_DIV_EXPR:
1596 case TRUNC_MOD_EXPR:
1597 if (TREE_CODE (type) == REAL_TYPE
1598 && op == TRUNC_MOD_EXPR)
1601 if (type != double_type_node)
1603 arg1 = convert (double_type_node, arg1);
1604 arg2 = convert (double_type_node, arg2);
1606 call = build_call_nary (double_type_node,
1607 build_address_of (soft_fmod_node),
1609 if (type != double_type_node)
1610 call = convert (type, call);
1614 if (TREE_CODE (type) == INTEGER_TYPE
1615 && flag_use_divide_subroutine
1616 && ! flag_syntax_only)
1617 return build_java_soft_divmod (op, type, arg1, arg2);
1622 return fold_build2 (op, type, arg1, arg2);
1626 expand_java_binop (tree type, enum tree_code op)
1636 rtype = int_type_node;
1637 rarg = pop_value (rtype);
1640 rarg = pop_value (rtype);
1642 larg = pop_value (ltype);
1643 push_value (build_java_binop (op, type, larg, rarg));
1646 /* Lookup the field named NAME in *TYPEP or its super classes.
1647 If not found, return NULL_TREE.
1648 (If the *TYPEP is not found, or if the field reference is
1649 ambiguous, return error_mark_node.)
1650 If found, return the FIELD_DECL, and set *TYPEP to the
1651 class containing the field. */
1654 lookup_field (tree *typep, tree name)
1656 if (CLASS_P (*typep) && !CLASS_LOADED_P (*typep))
1658 load_class (*typep, 1);
1659 safe_layout_class (*typep);
1660 if (!TYPE_SIZE (*typep) || TREE_CODE (TYPE_SIZE (*typep)) == ERROR_MARK)
1661 return error_mark_node;
1665 tree field, binfo, base_binfo;
1669 for (field = TYPE_FIELDS (*typep); field; field = TREE_CHAIN (field))
1670 if (DECL_NAME (field) == name)
1673 /* Process implemented interfaces. */
1674 save_field = NULL_TREE;
1675 for (binfo = TYPE_BINFO (*typep), i = 0;
1676 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1678 tree t = BINFO_TYPE (base_binfo);
1679 if ((field = lookup_field (&t, name)))
1681 if (save_field == field)
1683 if (save_field == NULL_TREE)
1687 tree i1 = DECL_CONTEXT (save_field);
1688 tree i2 = DECL_CONTEXT (field);
1689 error ("reference %qs is ambiguous: appears in interface %qs and interface %qs",
1690 IDENTIFIER_POINTER (name),
1691 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i1))),
1692 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i2))));
1693 return error_mark_node;
1698 if (save_field != NULL_TREE)
1701 *typep = CLASSTYPE_SUPER (*typep);
1706 /* Look up the field named NAME in object SELF_VALUE,
1707 which has class SELF_CLASS (a non-handle RECORD_TYPE).
1708 SELF_VALUE is NULL_TREE if looking for a static field. */
1711 build_field_ref (tree self_value, tree self_class, tree name)
1713 tree base_class = self_class;
1714 tree field_decl = lookup_field (&base_class, name);
1715 if (field_decl == NULL_TREE)
1717 error ("field %qs not found", IDENTIFIER_POINTER (name));
1718 return error_mark_node;
1720 if (self_value == NULL_TREE)
1722 return build_static_field_ref (field_decl);
1726 tree base_type = promote_type (base_class);
1728 /* CHECK is true if self_value is not the this pointer. */
1729 int check = (! (DECL_P (self_value)
1730 && DECL_NAME (self_value) == this_identifier_node));
1732 /* Determine whether a field offset from NULL will lie within
1733 Page 0: this is necessary on those GNU/Linux/BSD systems that
1734 trap SEGV to generate NullPointerExceptions.
1736 We assume that Page 0 will be mapped with NOPERM, and that
1737 memory may be allocated from any other page, so only field
1738 offsets < pagesize are guaranteed to trap. We also assume
1739 the smallest page size we'll encounter is 4k bytes. */
1740 if (! flag_syntax_only && check && ! flag_check_references
1741 && ! flag_indirect_dispatch)
1743 tree field_offset = byte_position (field_decl);
1745 page_size = size_int (4096);
1746 check = ! INT_CST_LT_UNSIGNED (field_offset, page_size);
1749 if (base_type != TREE_TYPE (self_value))
1750 self_value = fold_build1 (NOP_EXPR, base_type, self_value);
1751 if (! flag_syntax_only && flag_indirect_dispatch)
1754 = build_int_cst (NULL_TREE, get_symbol_table_index
1755 (field_decl, NULL_TREE,
1756 &TYPE_OTABLE_METHODS (output_class)));
1758 = build4 (ARRAY_REF, integer_type_node,
1759 TYPE_OTABLE_DECL (output_class), otable_index,
1760 NULL_TREE, NULL_TREE);
1763 if (DECL_CONTEXT (field_decl) != output_class)
1765 = build3 (COND_EXPR, TREE_TYPE (field_offset),
1766 build2 (EQ_EXPR, boolean_type_node,
1767 field_offset, integer_zero_node),
1768 build_call_nary (void_type_node,
1769 build_address_of (soft_nosuchfield_node),
1773 field_offset = fold (convert (sizetype, field_offset));
1774 self_value = java_check_reference (self_value, check);
1776 = fold_build2 (POINTER_PLUS_EXPR,
1777 TREE_TYPE (self_value),
1778 self_value, field_offset);
1779 address = fold_convert (build_pointer_type (TREE_TYPE (field_decl)),
1781 return fold_build1 (INDIRECT_REF, TREE_TYPE (field_decl), address);
1784 self_value = build_java_indirect_ref (TREE_TYPE (TREE_TYPE (self_value)),
1786 return fold_build3 (COMPONENT_REF, TREE_TYPE (field_decl),
1787 self_value, field_decl, NULL_TREE);
1792 lookup_label (int pc)
1796 if (pc > highest_label_pc_this_method)
1797 highest_label_pc_this_method = pc;
1798 ASM_GENERATE_INTERNAL_LABEL(buf, "LJpc=", start_label_pc_this_method + pc);
1799 name = get_identifier (buf);
1800 if (IDENTIFIER_LOCAL_VALUE (name))
1801 return IDENTIFIER_LOCAL_VALUE (name);
1804 /* The type of the address of a label is return_address_type_node. */
1805 tree decl = create_label_decl (name);
1806 return pushdecl (decl);
1810 /* Generate a unique name for the purpose of loops and switches
1811 labels, and try-catch-finally blocks label or temporary variables. */
1814 generate_name (void)
1816 static int l_number = 0;
1818 ASM_GENERATE_INTERNAL_LABEL(buff, "LJv", l_number);
1820 return get_identifier (buff);
1824 create_label_decl (tree name)
1827 decl = build_decl (input_location, LABEL_DECL, name,
1828 TREE_TYPE (return_address_type_node));
1829 DECL_CONTEXT (decl) = current_function_decl;
1830 DECL_IGNORED_P (decl) = 1;
1834 /* This maps a bytecode offset (PC) to various flags. */
1835 char *instruction_bits;
1837 /* This is a vector of type states for the current method. It is
1838 indexed by PC. Each element is a tree vector holding the type
1839 state at that PC. We only note type states at basic block
1841 VEC(tree, gc) *type_states;
1844 note_label (int current_pc ATTRIBUTE_UNUSED, int target_pc)
1846 lookup_label (target_pc);
1847 instruction_bits [target_pc] |= BCODE_JUMP_TARGET;
1850 /* Emit code to jump to TARGET_PC if VALUE1 CONDITION VALUE2,
1851 where CONDITION is one of one the compare operators. */
1854 expand_compare (enum tree_code condition, tree value1, tree value2,
1857 tree target = lookup_label (target_pc);
1858 tree cond = fold_build2 (condition, boolean_type_node, value1, value2);
1860 (build3 (COND_EXPR, void_type_node, java_truthvalue_conversion (cond),
1861 build1 (GOTO_EXPR, void_type_node, target),
1862 build_java_empty_stmt ()));
1865 /* Emit code for a TEST-type opcode. */
1868 expand_test (enum tree_code condition, tree type, int target_pc)
1870 tree value1, value2;
1871 flush_quick_stack ();
1872 value1 = pop_value (type);
1873 value2 = (type == ptr_type_node) ? null_pointer_node : integer_zero_node;
1874 expand_compare (condition, value1, value2, target_pc);
1877 /* Emit code for a COND-type opcode. */
1880 expand_cond (enum tree_code condition, tree type, int target_pc)
1882 tree value1, value2;
1883 flush_quick_stack ();
1884 /* note: pop values in opposite order */
1885 value2 = pop_value (type);
1886 value1 = pop_value (type);
1887 /* Maybe should check value1 and value2 for type compatibility ??? */
1888 expand_compare (condition, value1, value2, target_pc);
1892 expand_java_goto (int target_pc)
1894 tree target_label = lookup_label (target_pc);
1895 flush_quick_stack ();
1896 java_add_stmt (build1 (GOTO_EXPR, void_type_node, target_label));
1900 expand_java_switch (tree selector, int default_pc)
1902 tree switch_expr, x;
1904 flush_quick_stack ();
1905 switch_expr = build3 (SWITCH_EXPR, TREE_TYPE (selector), selector,
1906 NULL_TREE, NULL_TREE);
1907 java_add_stmt (switch_expr);
1909 x = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE, NULL_TREE,
1910 create_artificial_label (input_location));
1911 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1913 x = build1 (GOTO_EXPR, void_type_node, lookup_label (default_pc));
1914 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1920 expand_java_add_case (tree switch_expr, int match, int target_pc)
1924 value = build_int_cst (TREE_TYPE (switch_expr), match);
1926 x = build3 (CASE_LABEL_EXPR, void_type_node, value, NULL_TREE,
1927 create_artificial_label (input_location));
1928 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1930 x = build1 (GOTO_EXPR, void_type_node, lookup_label (target_pc));
1931 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1934 static VEC(tree,gc) *
1935 pop_arguments (tree method_type)
1937 function_args_iterator fnai;
1939 VEC(tree,gc) *args = NULL;
1942 FOREACH_FUNCTION_ARGS (method_type, type, fnai)
1944 /* XXX: leaky abstraction. */
1945 if (type == void_type_node)
1948 VEC_safe_push (tree, gc, args, type);
1951 arity = VEC_length (tree, args);
1955 tree arg = pop_value (VEC_index (tree, args, arity));
1957 /* We simply cast each argument to its proper type. This is
1958 needed since we lose type information coming out of the
1959 verifier. We also have to do this when we pop an integer
1960 type that must be promoted for the function call. */
1961 if (TREE_CODE (type) == POINTER_TYPE)
1962 arg = build1 (NOP_EXPR, type, arg);
1963 else if (targetm.calls.promote_prototypes (type)
1964 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
1965 && INTEGRAL_TYPE_P (type))
1966 arg = convert (integer_type_node, arg);
1968 VEC_replace (tree, args, arity, arg);
1974 /* Attach to PTR (a block) the declaration found in ENTRY. */
1977 attach_init_test_initialization_flags (void **entry, void *ptr)
1979 tree block = (tree)ptr;
1980 struct treetreehash_entry *ite = (struct treetreehash_entry *) *entry;
1982 if (block != error_mark_node)
1984 if (TREE_CODE (block) == BIND_EXPR)
1986 tree body = BIND_EXPR_BODY (block);
1987 TREE_CHAIN (ite->value) = BIND_EXPR_VARS (block);
1988 BIND_EXPR_VARS (block) = ite->value;
1989 body = build2 (COMPOUND_EXPR, void_type_node,
1990 build1 (DECL_EXPR, void_type_node, ite->value), body);
1991 BIND_EXPR_BODY (block) = body;
1995 tree body = BLOCK_SUBBLOCKS (block);
1996 TREE_CHAIN (ite->value) = BLOCK_EXPR_DECLS (block);
1997 BLOCK_EXPR_DECLS (block) = ite->value;
1998 body = build2 (COMPOUND_EXPR, void_type_node,
1999 build1 (DECL_EXPR, void_type_node, ite->value), body);
2000 BLOCK_SUBBLOCKS (block) = body;
2007 /* Build an expression to initialize the class CLAS.
2008 if EXPR is non-NULL, returns an expression to first call the initializer
2009 (if it is needed) and then calls EXPR. */
2012 build_class_init (tree clas, tree expr)
2016 /* An optimization: if CLAS is a superclass of the class we're
2017 compiling, we don't need to initialize it. However, if CLAS is
2018 an interface, it won't necessarily be initialized, even if we
2020 if ((! CLASS_INTERFACE (TYPE_NAME (clas))
2021 && inherits_from_p (current_class, clas))
2022 || current_class == clas)
2025 if (always_initialize_class_p)
2027 init = build_call_nary (void_type_node,
2028 build_address_of (soft_initclass_node),
2029 1, build_class_ref (clas));
2030 TREE_SIDE_EFFECTS (init) = 1;
2034 tree *init_test_decl;
2036 init_test_decl = java_treetreehash_new
2037 (DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl), clas);
2039 if (*init_test_decl == NULL)
2041 /* Build a declaration and mark it as a flag used to track
2042 static class initializations. */
2043 decl = build_decl (input_location, VAR_DECL, NULL_TREE,
2045 MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (decl);
2046 DECL_CONTEXT (decl) = current_function_decl;
2047 DECL_INITIAL (decl) = boolean_false_node;
2048 /* Don't emit any symbolic debugging info for this decl. */
2049 DECL_IGNORED_P (decl) = 1;
2050 *init_test_decl = decl;
2053 init = build_call_nary (void_type_node,
2054 build_address_of (soft_initclass_node),
2055 1, build_class_ref (clas));
2056 TREE_SIDE_EFFECTS (init) = 1;
2057 init = build3 (COND_EXPR, void_type_node,
2058 build2 (EQ_EXPR, boolean_type_node,
2059 *init_test_decl, boolean_false_node),
2060 init, integer_zero_node);
2061 TREE_SIDE_EFFECTS (init) = 1;
2062 init = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init,
2063 build2 (MODIFY_EXPR, boolean_type_node,
2064 *init_test_decl, boolean_true_node));
2065 TREE_SIDE_EFFECTS (init) = 1;
2068 if (expr != NULL_TREE)
2070 expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init, expr);
2071 TREE_SIDE_EFFECTS (expr) = 1;
2079 /* Rewrite expensive calls that require stack unwinding at runtime to
2080 cheaper alternatives. The logic here performs these
2083 java.lang.Class.forName("foo") -> java.lang.Class.forName("foo", class$)
2084 java.lang.Class.getClassLoader() -> java.lang.Class.getClassLoader(class$)
2090 const char *classname;
2092 const char *signature;
2093 const char *new_classname;
2094 const char *new_signature;
2096 void (*rewrite_arglist) (VEC(tree,gc) **);
2099 /* Add __builtin_return_address(0) to the end of an arglist. */
2103 rewrite_arglist_getcaller (VEC(tree,gc) **arglist)
2106 = build_call_expr (built_in_decls[BUILT_IN_RETURN_ADDRESS],
2107 1, integer_zero_node);
2109 DECL_UNINLINABLE (current_function_decl) = 1;
2111 VEC_safe_push (tree, gc, *arglist, retaddr);
2114 /* Add this.class to the end of an arglist. */
2117 rewrite_arglist_getclass (VEC(tree,gc) **arglist)
2119 VEC_safe_push (tree, gc, *arglist, build_class_ref (output_class));
2122 static rewrite_rule rules[] =
2123 {{"java.lang.Class", "getClassLoader", "()Ljava/lang/ClassLoader;",
2124 "java.lang.Class", "(Ljava/lang/Class;)Ljava/lang/ClassLoader;",
2125 ACC_FINAL|ACC_PRIVATE, rewrite_arglist_getclass},
2127 {"java.lang.Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;",
2128 "java.lang.Class", "(Ljava/lang/String;Ljava/lang/Class;)Ljava/lang/Class;",
2129 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getclass},
2131 {"gnu.classpath.VMStackWalker", "getCallingClass", "()Ljava/lang/Class;",
2132 "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/Class;",
2133 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2135 {"gnu.classpath.VMStackWalker", "getCallingClassLoader",
2136 "()Ljava/lang/ClassLoader;",
2137 "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/ClassLoader;",
2138 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2140 {"gnu.java.lang.VMCPStringBuilder", "toString", "([CII)Ljava/lang/String;",
2141 "java.lang.String", "([CII)Ljava/lang/String;",
2142 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, NULL},
2144 {NULL, NULL, NULL, NULL, NULL, 0, NULL}};
2146 /* True if this method is special, i.e. it's a private method that
2147 should be exported from a DSO. */
2150 special_method_p (tree candidate_method)
2152 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (candidate_method)));
2153 tree method = DECL_NAME (candidate_method);
2156 for (p = rules; p->classname; p++)
2158 if (get_identifier (p->classname) == context
2159 && get_identifier (p->method) == method)
2165 /* Scan the rules list for replacements for *METHOD_P and replace the
2166 args accordingly. If the rewrite results in an access to a private
2167 method, update SPECIAL.*/
2170 maybe_rewrite_invocation (tree *method_p, VEC(tree,gc) **arg_list_p,
2171 tree *method_signature_p, tree *special)
2173 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (*method_p)));
2175 *special = NULL_TREE;
2177 for (p = rules; p->classname; p++)
2179 if (get_identifier (p->classname) == context)
2181 tree method = DECL_NAME (*method_p);
2182 if (get_identifier (p->method) == method
2183 && get_identifier (p->signature) == *method_signature_p)
2186 tree destination_class
2187 = lookup_class (get_identifier (p->new_classname));
2188 gcc_assert (destination_class);
2190 = lookup_java_method (destination_class,
2192 get_identifier (p->new_signature));
2193 if (! maybe_method && ! flag_verify_invocations)
2196 = add_method (destination_class, p->flags,
2197 method, get_identifier (p->new_signature));
2198 DECL_EXTERNAL (maybe_method) = 1;
2200 *method_p = maybe_method;
2201 gcc_assert (*method_p);
2202 if (p->rewrite_arglist)
2203 p->rewrite_arglist (arg_list_p);
2204 *method_signature_p = get_identifier (p->new_signature);
2205 *special = integer_one_node;
2216 build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED,
2217 tree self_type, tree method_signature ATTRIBUTE_UNUSED,
2218 VEC(tree,gc) *arg_list ATTRIBUTE_UNUSED, tree special)
2221 if (is_compiled_class (self_type))
2223 /* With indirect dispatch we have to use indirect calls for all
2224 publicly visible methods or gcc will use PLT indirections
2225 to reach them. We also have to use indirect dispatch for all
2226 external methods. */
2227 if (! flag_indirect_dispatch
2228 || (! DECL_EXTERNAL (method) && ! TREE_PUBLIC (method)))
2230 func = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (method)),
2236 = build_int_cst (NULL_TREE,
2237 (get_symbol_table_index
2239 &TYPE_ATABLE_METHODS (output_class))));
2241 = build4 (ARRAY_REF,
2242 TREE_TYPE (TREE_TYPE (TYPE_ATABLE_DECL (output_class))),
2243 TYPE_ATABLE_DECL (output_class), table_index,
2244 NULL_TREE, NULL_TREE);
2246 func = convert (method_ptr_type_node, func);
2250 /* We don't know whether the method has been (statically) compiled.
2251 Compile this code to get a reference to the method's code:
2253 SELF_TYPE->methods[METHOD_INDEX].ncode
2257 int method_index = 0;
2260 /* The method might actually be declared in some superclass, so
2261 we have to use its class context, not the caller's notion of
2262 where the method is. */
2263 self_type = DECL_CONTEXT (method);
2264 ref = build_class_ref (self_type);
2265 ref = build1 (INDIRECT_REF, class_type_node, ref);
2266 if (ncode_ident == NULL_TREE)
2267 ncode_ident = get_identifier ("ncode");
2268 if (methods_ident == NULL_TREE)
2269 methods_ident = get_identifier ("methods");
2270 ref = build3 (COMPONENT_REF, method_ptr_type_node, ref,
2271 lookup_field (&class_type_node, methods_ident),
2273 for (meth = TYPE_METHODS (self_type);
2274 ; meth = TREE_CHAIN (meth))
2278 if (meth == NULL_TREE)
2279 fatal_error ("method '%s' not found in class",
2280 IDENTIFIER_POINTER (DECL_NAME (method)));
2283 method_index *= int_size_in_bytes (method_type_node);
2284 ref = fold_build2 (POINTER_PLUS_EXPR, method_ptr_type_node,
2285 ref, size_int (method_index));
2286 ref = build1 (INDIRECT_REF, method_type_node, ref);
2287 func = build3 (COMPONENT_REF, nativecode_ptr_type_node,
2288 ref, lookup_field (&method_type_node, ncode_ident),
2295 invoke_build_dtable (int is_invoke_interface, VEC(tree,gc) *arg_list)
2297 tree dtable, objectref;
2298 tree saved = save_expr (VEC_index (tree, arg_list, 0));
2300 VEC_replace (tree, arg_list, 0, saved);
2302 /* If we're dealing with interfaces and if the objectref
2303 argument is an array then get the dispatch table of the class
2304 Object rather than the one from the objectref. */
2305 objectref = (is_invoke_interface
2306 && is_array_type_p (TREE_TYPE (saved))
2307 ? build_class_ref (object_type_node) : saved);
2309 if (dtable_ident == NULL_TREE)
2310 dtable_ident = get_identifier ("vtable");
2311 dtable = build_java_indirect_ref (object_type_node, objectref,
2312 flag_check_references);
2313 dtable = build3 (COMPONENT_REF, dtable_ptr_type, dtable,
2314 lookup_field (&object_type_node, dtable_ident), NULL_TREE);
2319 /* Determine the index in SYMBOL_TABLE for a reference to the decl
2320 T. If this decl has not been seen before, it will be added to the
2321 [oa]table_methods. If it has, the existing table slot will be
2325 get_symbol_table_index (tree t, tree special, tree *symbol_table)
2330 if (*symbol_table == NULL_TREE)
2332 *symbol_table = build_tree_list (special, t);
2336 method_list = *symbol_table;
2340 tree value = TREE_VALUE (method_list);
2341 tree purpose = TREE_PURPOSE (method_list);
2342 if (value == t && purpose == special)
2345 if (TREE_CHAIN (method_list) == NULL_TREE)
2348 method_list = TREE_CHAIN (method_list);
2351 TREE_CHAIN (method_list) = build_tree_list (special, t);
2356 build_invokevirtual (tree dtable, tree method, tree special)
2359 tree nativecode_ptr_ptr_type_node
2360 = build_pointer_type (nativecode_ptr_type_node);
2364 if (flag_indirect_dispatch)
2366 gcc_assert (! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))));
2369 = build_int_cst (NULL_TREE, get_symbol_table_index
2371 &TYPE_OTABLE_METHODS (output_class)));
2372 method_index = build4 (ARRAY_REF, integer_type_node,
2373 TYPE_OTABLE_DECL (output_class),
2374 otable_index, NULL_TREE, NULL_TREE);
2378 /* We fetch the DECL_VINDEX field directly here, rather than
2379 using get_method_index(). DECL_VINDEX is the true offset
2380 from the vtable base to a method, regrdless of any extra
2381 words inserted at the start of the vtable. */
2382 method_index = DECL_VINDEX (method);
2383 method_index = size_binop (MULT_EXPR, method_index,
2384 TYPE_SIZE_UNIT (nativecode_ptr_ptr_type_node));
2385 if (TARGET_VTABLE_USES_DESCRIPTORS)
2386 method_index = size_binop (MULT_EXPR, method_index,
2387 size_int (TARGET_VTABLE_USES_DESCRIPTORS));
2390 func = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dtable), dtable,
2391 convert (sizetype, method_index));
2393 if (TARGET_VTABLE_USES_DESCRIPTORS)
2394 func = build1 (NOP_EXPR, nativecode_ptr_type_node, func);
2397 func = fold_convert (nativecode_ptr_ptr_type_node, func);
2398 func = build1 (INDIRECT_REF, nativecode_ptr_type_node, func);
2404 static GTY(()) tree class_ident;
2406 build_invokeinterface (tree dtable, tree method)
2411 /* We expand invokeinterface here. */
2413 if (class_ident == NULL_TREE)
2414 class_ident = get_identifier ("class");
2416 dtable = build_java_indirect_ref (dtable_type, dtable,
2417 flag_check_references);
2418 dtable = build3 (COMPONENT_REF, class_ptr_type, dtable,
2419 lookup_field (&dtable_type, class_ident), NULL_TREE);
2421 interface = DECL_CONTEXT (method);
2422 gcc_assert (CLASS_INTERFACE (TYPE_NAME (interface)));
2423 layout_class_methods (interface);
2425 if (flag_indirect_dispatch)
2428 = 2 * (get_symbol_table_index
2429 (method, NULL_TREE, &TYPE_ITABLE_METHODS (output_class)));
2431 = build4 (ARRAY_REF,
2432 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2433 TYPE_ITABLE_DECL (output_class),
2434 build_int_cst (NULL_TREE, itable_index-1),
2435 NULL_TREE, NULL_TREE);
2437 = build4 (ARRAY_REF,
2438 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2439 TYPE_ITABLE_DECL (output_class),
2440 build_int_cst (NULL_TREE, itable_index),
2441 NULL_TREE, NULL_TREE);
2442 interface = convert (class_ptr_type, interface);
2443 idx = convert (integer_type_node, idx);
2447 idx = build_int_cst (NULL_TREE,
2448 get_interface_method_index (method, interface));
2449 interface = build_class_ref (interface);
2452 return build_call_nary (ptr_type_node,
2453 build_address_of (soft_lookupinterfacemethod_node),
2454 3, dtable, interface, idx);
2457 /* Expand one of the invoke_* opcodes.
2458 OPCODE is the specific opcode.
2459 METHOD_REF_INDEX is an index into the constant pool.
2460 NARGS is the number of arguments, or -1 if not specified. */
2463 expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED)
2465 tree method_signature
2466 = COMPONENT_REF_SIGNATURE(¤t_jcf->cpool, method_ref_index);
2467 tree method_name = COMPONENT_REF_NAME (¤t_jcf->cpool,
2470 = get_class_constant (current_jcf,
2471 COMPONENT_REF_CLASS_INDEX(¤t_jcf->cpool,
2473 const char *const self_name
2474 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2475 tree call, func, method, method_type;
2476 VEC(tree,gc) *arg_list;
2477 tree check = NULL_TREE;
2479 tree special = NULL_TREE;
2481 if (! CLASS_LOADED_P (self_type))
2483 load_class (self_type, 1);
2484 safe_layout_class (self_type);
2485 if (TREE_CODE (TYPE_SIZE (self_type)) == ERROR_MARK)
2486 fatal_error ("failed to find class '%s'", self_name);
2488 layout_class_methods (self_type);
2490 if (ID_INIT_P (method_name))
2491 method = lookup_java_constructor (self_type, method_signature);
2493 method = lookup_java_method (self_type, method_name, method_signature);
2495 /* We've found a method in a class other than the one in which it
2496 was wanted. This can happen if, for instance, we're trying to
2497 compile invokespecial super.equals().
2498 FIXME: This is a kludge. Rather than nullifying the result, we
2499 should change lookup_java_method() so that it doesn't search the
2500 superclass chain when we're BC-compiling. */
2501 if (! flag_verify_invocations
2503 && ! TYPE_ARRAY_P (self_type)
2504 && self_type != DECL_CONTEXT (method))
2507 /* We've found a method in an interface, but this isn't an interface
2509 if (opcode != OPCODE_invokeinterface
2511 && (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method)))))
2514 /* We've found a non-interface method but we are making an
2515 interface call. This can happen if the interface overrides a
2516 method in Object. */
2517 if (! flag_verify_invocations
2518 && opcode == OPCODE_invokeinterface
2520 && ! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))))
2523 if (method == NULL_TREE)
2525 if (flag_verify_invocations || ! flag_indirect_dispatch)
2527 error ("class '%s' has no method named '%s' matching signature '%s'",
2529 IDENTIFIER_POINTER (method_name),
2530 IDENTIFIER_POINTER (method_signature));
2534 int flags = ACC_PUBLIC;
2535 if (opcode == OPCODE_invokestatic)
2536 flags |= ACC_STATIC;
2537 if (opcode == OPCODE_invokeinterface)
2539 flags |= ACC_INTERFACE | ACC_ABSTRACT;
2540 CLASS_INTERFACE (TYPE_NAME (self_type)) = 1;
2542 method = add_method (self_type, flags, method_name,
2544 DECL_ARTIFICIAL (method) = 1;
2545 METHOD_DUMMY (method) = 1;
2546 layout_class_method (self_type, NULL,
2551 /* Invoke static can't invoke static/abstract method */
2552 if (method != NULL_TREE)
2554 if (opcode == OPCODE_invokestatic)
2556 if (!METHOD_STATIC (method))
2558 error ("invokestatic on non static method");
2561 else if (METHOD_ABSTRACT (method))
2563 error ("invokestatic on abstract method");
2569 if (METHOD_STATIC (method))
2571 error ("invoke[non-static] on static method");
2577 if (method == NULL_TREE)
2579 /* If we got here, we emitted an error message above. So we
2580 just pop the arguments, push a properly-typed zero, and
2582 method_type = get_type_from_signature (method_signature);
2583 pop_arguments (method_type);
2584 if (opcode != OPCODE_invokestatic)
2585 pop_type (self_type);
2586 method_type = promote_type (TREE_TYPE (method_type));
2587 push_value (convert (method_type, integer_zero_node));
2591 method_type = TREE_TYPE (method);
2592 arg_list = pop_arguments (method_type);
2593 flush_quick_stack ();
2595 maybe_rewrite_invocation (&method, &arg_list, &method_signature,
2599 if (opcode == OPCODE_invokestatic)
2600 func = build_known_method_ref (method, method_type, self_type,
2601 method_signature, arg_list, special);
2602 else if (opcode == OPCODE_invokespecial
2603 || (opcode == OPCODE_invokevirtual
2604 && (METHOD_PRIVATE (method)
2605 || METHOD_FINAL (method)
2606 || CLASS_FINAL (TYPE_NAME (self_type)))))
2608 /* If the object for the method call is null, we throw an
2609 exception. We don't do this if the object is the current
2610 method's `this'. In other cases we just rely on an
2611 optimization pass to eliminate redundant checks. FIXME:
2612 Unfortunately there doesn't seem to be a way to determine
2613 what the current method is right now.
2614 We do omit the check if we're calling <init>. */
2615 /* We use a SAVE_EXPR here to make sure we only evaluate
2616 the new `self' expression once. */
2617 tree save_arg = save_expr (VEC_index (tree, arg_list, 0));
2618 VEC_replace (tree, arg_list, 0, save_arg);
2619 check = java_check_reference (save_arg, ! DECL_INIT_P (method));
2620 func = build_known_method_ref (method, method_type, self_type,
2621 method_signature, arg_list, special);
2625 tree dtable = invoke_build_dtable (opcode == OPCODE_invokeinterface,
2627 if (opcode == OPCODE_invokevirtual)
2628 func = build_invokevirtual (dtable, method, special);
2630 func = build_invokeinterface (dtable, method);
2633 if (TREE_CODE (func) == ADDR_EXPR)
2634 TREE_TYPE (func) = build_pointer_type (method_type);
2636 func = build1 (NOP_EXPR, build_pointer_type (method_type), func);
2638 call = build_call_vec (TREE_TYPE (method_type), func, arg_list);
2639 TREE_SIDE_EFFECTS (call) = 1;
2640 call = check_for_builtin (method, call);
2642 if (check != NULL_TREE)
2644 call = build2 (COMPOUND_EXPR, TREE_TYPE (call), check, call);
2645 TREE_SIDE_EFFECTS (call) = 1;
2648 if (TREE_CODE (TREE_TYPE (method_type)) == VOID_TYPE)
2649 java_add_stmt (call);
2653 flush_quick_stack ();
2657 /* Create a stub which will be put into the vtable but which will call
2661 build_jni_stub (tree method)
2663 tree jnifunc, call, body, method_sig, arg_types;
2664 tree jniarg0, jniarg1, jniarg2, jniarg3;
2665 tree jni_func_type, tem;
2666 tree env_var, res_var = NULL_TREE, block;
2670 VEC(tree,gc) *args = NULL;
2673 tree klass = DECL_CONTEXT (method);
2674 klass = build_class_ref (klass);
2676 gcc_assert (METHOD_NATIVE (method) && flag_jni);
2678 DECL_ARTIFICIAL (method) = 1;
2679 DECL_EXTERNAL (method) = 0;
2681 env_var = build_decl (input_location,
2682 VAR_DECL, get_identifier ("env"), ptr_type_node);
2683 DECL_CONTEXT (env_var) = method;
2685 if (TREE_TYPE (TREE_TYPE (method)) != void_type_node)
2687 res_var = build_decl (input_location, VAR_DECL, get_identifier ("res"),
2688 TREE_TYPE (TREE_TYPE (method)));
2689 DECL_CONTEXT (res_var) = method;
2690 TREE_CHAIN (env_var) = res_var;
2693 method_args = DECL_ARGUMENTS (method);
2694 block = build_block (env_var, NULL_TREE, method_args, NULL_TREE);
2695 TREE_SIDE_EFFECTS (block) = 1;
2696 TREE_TYPE (block) = TREE_TYPE (TREE_TYPE (method));
2698 /* Compute the local `env' by calling _Jv_GetJNIEnvNewFrame. */
2699 body = build2 (MODIFY_EXPR, ptr_type_node, env_var,
2700 build_call_nary (ptr_type_node,
2701 build_address_of (soft_getjnienvnewframe_node),
2704 /* The JNIEnv structure is the first argument to the JNI function. */
2705 args_size += int_size_in_bytes (TREE_TYPE (env_var));
2706 VEC_safe_push (tree, gc, args, env_var);
2708 /* For a static method the second argument is the class. For a
2709 non-static method the second argument is `this'; that is already
2710 available in the argument list. */
2711 if (METHOD_STATIC (method))
2713 args_size += int_size_in_bytes (TREE_TYPE (klass));
2714 VEC_safe_push (tree, gc, args, klass);
2717 /* All the arguments to this method become arguments to the
2718 underlying JNI function. If we had to wrap object arguments in a
2719 special way, we would do that here. */
2720 for (tem = method_args; tem != NULL_TREE; tem = TREE_CHAIN (tem))
2722 int arg_bits = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)));
2723 #ifdef PARM_BOUNDARY
2724 arg_bits = (((arg_bits + PARM_BOUNDARY - 1) / PARM_BOUNDARY)
2727 args_size += (arg_bits / BITS_PER_UNIT);
2729 VEC_safe_push (tree, gc, args, tem);
2731 arg_types = TYPE_ARG_TYPES (TREE_TYPE (method));
2733 /* Argument types for static methods and the JNIEnv structure.
2734 FIXME: Write and use build_function_type_vec to avoid this. */
2735 if (METHOD_STATIC (method))
2736 arg_types = tree_cons (NULL_TREE, object_ptr_type_node, arg_types);
2737 arg_types = tree_cons (NULL_TREE, ptr_type_node, arg_types);
2739 /* We call _Jv_LookupJNIMethod to find the actual underlying
2740 function pointer. _Jv_LookupJNIMethod will throw the appropriate
2741 exception if this function is not found at runtime. */
2742 method_sig = build_java_signature (TREE_TYPE (method));
2744 jniarg1 = build_utf8_ref (DECL_NAME (method));
2745 jniarg2 = build_utf8_ref (unmangle_classname
2746 (IDENTIFIER_POINTER (method_sig),
2747 IDENTIFIER_LENGTH (method_sig)));
2748 jniarg3 = build_int_cst (NULL_TREE, args_size);
2750 tem = build_function_type (TREE_TYPE (TREE_TYPE (method)), arg_types);
2752 #ifdef MODIFY_JNI_METHOD_CALL
2753 tem = MODIFY_JNI_METHOD_CALL (tem);
2756 jni_func_type = build_pointer_type (tem);
2758 /* Use the actual function type, rather than a generic pointer type,
2759 such that this decl keeps the actual pointer type from being
2760 garbage-collected. If it is, we end up using canonical types
2761 with different uids for equivalent function types, and this in
2762 turn causes utf8 identifiers and output order to vary. */
2763 meth_var = build_decl (input_location,
2764 VAR_DECL, get_identifier ("meth"), jni_func_type);
2765 TREE_STATIC (meth_var) = 1;
2766 TREE_PUBLIC (meth_var) = 0;
2767 DECL_EXTERNAL (meth_var) = 0;
2768 DECL_CONTEXT (meth_var) = method;
2769 DECL_ARTIFICIAL (meth_var) = 1;
2770 DECL_INITIAL (meth_var) = null_pointer_node;
2771 TREE_USED (meth_var) = 1;
2772 chainon (env_var, meth_var);
2773 build_result_decl (method);
2775 jnifunc = build3 (COND_EXPR, jni_func_type,
2776 build2 (NE_EXPR, boolean_type_node,
2777 meth_var, build_int_cst (TREE_TYPE (meth_var), 0)),
2779 build2 (MODIFY_EXPR, jni_func_type, meth_var,
2781 (NOP_EXPR, jni_func_type,
2782 build_call_nary (ptr_type_node,
2784 (soft_lookupjnimethod_node),
2787 jniarg2, jniarg3))));
2789 /* Now we make the actual JNI call via the resulting function
2791 call = build_call_vec (TREE_TYPE (TREE_TYPE (method)), jnifunc, args);
2793 /* If the JNI call returned a result, capture it here. If we had to
2794 unwrap JNI object results, we would do that here. */
2795 if (res_var != NULL_TREE)
2797 /* If the call returns an object, it may return a JNI weak
2798 reference, in which case we must unwrap it. */
2799 if (! JPRIMITIVE_TYPE_P (TREE_TYPE (TREE_TYPE (method))))
2800 call = build_call_nary (TREE_TYPE (TREE_TYPE (method)),
2801 build_address_of (soft_unwrapjni_node),
2803 call = build2 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (method)),
2807 TREE_SIDE_EFFECTS (call) = 1;
2809 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2810 TREE_SIDE_EFFECTS (body) = 1;
2812 /* Now free the environment we allocated. */
2813 call = build_call_nary (ptr_type_node,
2814 build_address_of (soft_jnipopsystemframe_node),
2816 TREE_SIDE_EFFECTS (call) = 1;
2817 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2818 TREE_SIDE_EFFECTS (body) = 1;
2820 /* Finally, do the return. */
2821 if (res_var != NULL_TREE)
2824 gcc_assert (DECL_RESULT (method));
2825 /* Make sure we copy the result variable to the actual
2826 result. We use the type of the DECL_RESULT because it
2827 might be different from the return type of the function:
2828 it might be promoted. */
2829 drt = TREE_TYPE (DECL_RESULT (method));
2830 if (drt != TREE_TYPE (res_var))
2831 res_var = build1 (CONVERT_EXPR, drt, res_var);
2832 res_var = build2 (MODIFY_EXPR, drt, DECL_RESULT (method), res_var);
2833 TREE_SIDE_EFFECTS (res_var) = 1;
2836 body = build2 (COMPOUND_EXPR, void_type_node, body,
2837 build1 (RETURN_EXPR, void_type_node, res_var));
2838 TREE_SIDE_EFFECTS (body) = 1;
2840 /* Prepend class initialization for static methods reachable from
2842 if (METHOD_STATIC (method)
2843 && (! METHOD_PRIVATE (method)
2844 || INNER_CLASS_P (DECL_CONTEXT (method))))
2846 tree init = build_call_expr (soft_initclass_node, 1,
2848 body = build2 (COMPOUND_EXPR, void_type_node, init, body);
2849 TREE_SIDE_EFFECTS (body) = 1;
2852 bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
2858 /* Given lvalue EXP, return a volatile expression that references the
2862 java_modify_addr_for_volatile (tree exp)
2864 tree exp_type = TREE_TYPE (exp);
2866 = build_qualified_type (exp_type,
2867 TYPE_QUALS (exp_type) | TYPE_QUAL_VOLATILE);
2868 tree addr = build_fold_addr_expr (exp);
2869 v_type = build_pointer_type (v_type);
2870 addr = fold_convert (v_type, addr);
2871 exp = build_fold_indirect_ref (addr);
2876 /* Expand an operation to extract from or store into a field.
2877 IS_STATIC is 1 iff the field is static.
2878 IS_PUTTING is 1 for putting into a field; 0 for getting from the field.
2879 FIELD_REF_INDEX is an index into the constant pool. */
2882 expand_java_field_op (int is_static, int is_putting, int field_ref_index)
2885 = get_class_constant (current_jcf,
2886 COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool,
2888 const char *self_name
2889 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2890 tree field_name = COMPONENT_REF_NAME (¤t_jcf->cpool, field_ref_index);
2891 tree field_signature = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool,
2893 tree field_type = get_type_from_signature (field_signature);
2894 tree new_value = is_putting ? pop_value (field_type) : NULL_TREE;
2897 tree original_self_type = self_type;
2901 if (! CLASS_LOADED_P (self_type))
2902 load_class (self_type, 1);
2903 field_decl = lookup_field (&self_type, field_name);
2904 if (field_decl == error_mark_node)
2908 else if (field_decl == NULL_TREE)
2910 if (! flag_verify_invocations)
2912 int flags = ACC_PUBLIC;
2914 flags |= ACC_STATIC;
2915 self_type = original_self_type;
2916 field_decl = add_field (original_self_type, field_name,
2918 DECL_ARTIFICIAL (field_decl) = 1;
2919 DECL_IGNORED_P (field_decl) = 1;
2921 /* FIXME: We should be pessimistic about volatility. We
2922 don't know one way or another, but this is safe.
2923 However, doing this has bad effects on code quality. We
2924 need to look at better ways to do this. */
2925 TREE_THIS_VOLATILE (field_decl) = 1;
2930 error ("missing field '%s' in '%s'",
2931 IDENTIFIER_POINTER (field_name), self_name);
2935 else if (build_java_signature (TREE_TYPE (field_decl)) != field_signature)
2937 error ("mismatching signature for field '%s' in '%s'",
2938 IDENTIFIER_POINTER (field_name), self_name);
2941 field_ref = is_static ? NULL_TREE : pop_value (self_type);
2945 push_value (convert (field_type, integer_zero_node));
2946 flush_quick_stack ();
2950 field_ref = build_field_ref (field_ref, self_type, field_name);
2952 && ! flag_indirect_dispatch)
2954 tree context = DECL_CONTEXT (field_ref);
2955 if (context != self_type && CLASS_INTERFACE (TYPE_NAME (context)))
2956 field_ref = build_class_init (context, field_ref);
2958 field_ref = build_class_init (self_type, field_ref);
2962 flush_quick_stack ();
2963 if (FIELD_FINAL (field_decl))
2965 if (DECL_CONTEXT (field_decl) != current_class)
2966 error ("assignment to final field %q+D not in field's class",
2968 /* We used to check for assignments to final fields not
2969 occurring in the class initializer or in a constructor
2970 here. However, this constraint doesn't seem to be
2971 enforced by the JVM. */
2974 if (TREE_THIS_VOLATILE (field_decl))
2975 field_ref = java_modify_addr_for_volatile (field_ref);
2977 modify_expr = build2 (MODIFY_EXPR, TREE_TYPE (field_ref),
2978 field_ref, new_value);
2980 if (TREE_THIS_VOLATILE (field_decl))
2982 (build_call_expr (built_in_decls[BUILT_IN_SYNCHRONIZE], 0));
2984 java_add_stmt (modify_expr);
2988 tree temp = build_decl (input_location,
2989 VAR_DECL, NULL_TREE, TREE_TYPE (field_ref));
2990 java_add_local_var (temp);
2992 if (TREE_THIS_VOLATILE (field_decl))
2993 field_ref = java_modify_addr_for_volatile (field_ref);
2996 = build2 (MODIFY_EXPR, TREE_TYPE (field_ref), temp, field_ref);
2997 java_add_stmt (modify_expr);
2999 if (TREE_THIS_VOLATILE (field_decl))
3001 (build_call_expr (built_in_decls[BUILT_IN_SYNCHRONIZE], 0));
3005 TREE_THIS_VOLATILE (field_ref) = TREE_THIS_VOLATILE (field_decl);
3009 load_type_state (int pc)
3012 tree vec = VEC_index (tree, type_states, pc);
3013 int cur_length = TREE_VEC_LENGTH (vec);
3014 stack_pointer = cur_length - DECL_MAX_LOCALS(current_function_decl);
3015 for (i = 0; i < cur_length; i++)
3016 type_map [i] = TREE_VEC_ELT (vec, i);
3019 /* Go over METHOD's bytecode and note instruction starts in
3020 instruction_bits[]. */
3023 note_instructions (JCF *jcf, tree method)
3026 unsigned char* byte_ops;
3027 long length = DECL_CODE_LENGTH (method);
3032 #undef RET /* Defined by config/i386/i386.h */
3034 #define BCODE byte_ops
3035 #define BYTE_type_node byte_type_node
3036 #define SHORT_type_node short_type_node
3037 #define INT_type_node int_type_node
3038 #define LONG_type_node long_type_node
3039 #define CHAR_type_node char_type_node
3040 #define PTR_type_node ptr_type_node
3041 #define FLOAT_type_node float_type_node
3042 #define DOUBLE_type_node double_type_node
3043 #define VOID_type_node void_type_node
3044 #define CONST_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3045 #define CONST_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3046 #define VAR_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3047 #define VAR_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3049 #define CHECK_PC_IN_RANGE(PC) ((void)1) /* Already handled by verifier. */
3051 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3052 byte_ops = jcf->read_ptr;
3053 instruction_bits = XRESIZEVAR (char, instruction_bits, length + 1);
3054 memset (instruction_bits, 0, length + 1);
3055 type_states = VEC_alloc (tree, gc, length + 1);
3056 VEC_safe_grow_cleared (tree, gc, type_states, length + 1);
3058 /* This pass figures out which PC can be the targets of jumps. */
3059 for (PC = 0; PC < length;)
3061 int oldpc = PC; /* PC at instruction start. */
3062 instruction_bits [PC] |= BCODE_INSTRUCTION_START;
3063 switch (byte_ops[PC++])
3065 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3067 PRE_##OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3070 #define NOTE_LABEL(PC) note_label(oldpc, PC)
3072 #define PRE_PUSHC(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3073 #define PRE_LOAD(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3074 #define PRE_STORE(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3075 #define PRE_STACK(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3076 #define PRE_UNOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3077 #define PRE_BINOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3078 #define PRE_CONVERT(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3079 #define PRE_CONVERT2(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3081 #define PRE_SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3082 PRE_SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3083 #define PRE_SPECIAL_IINC(OPERAND_TYPE) \
3084 ((void) IMMEDIATE_u1, (void) IMMEDIATE_s1)
3085 #define PRE_SPECIAL_ENTER(IGNORE) /* nothing */
3086 #define PRE_SPECIAL_EXIT(IGNORE) /* nothing */
3087 #define PRE_SPECIAL_THROW(IGNORE) /* nothing */
3088 #define PRE_SPECIAL_BREAK(IGNORE) /* nothing */
3090 /* two forms of wide instructions */
3091 #define PRE_SPECIAL_WIDE(IGNORE) \
3093 int modified_opcode = IMMEDIATE_u1; \
3094 if (modified_opcode == OPCODE_iinc) \
3096 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3097 (void) IMMEDIATE_s2; /* constbyte1 and constbyte2 */ \
3101 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3105 #define PRE_IMPL(IGNORE1, IGNORE2) /* nothing */
3107 #define PRE_MONITOR(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3109 #define PRE_RETURN(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3110 #define PRE_ARRAY(OPERAND_TYPE, SUBOP) \
3111 PRE_ARRAY_##SUBOP(OPERAND_TYPE)
3112 #define PRE_ARRAY_LOAD(TYPE) /* nothing */
3113 #define PRE_ARRAY_STORE(TYPE) /* nothing */
3114 #define PRE_ARRAY_LENGTH(TYPE) /* nothing */
3115 #define PRE_ARRAY_NEW(TYPE) PRE_ARRAY_NEW_##TYPE
3116 #define PRE_ARRAY_NEW_NUM ((void) IMMEDIATE_u1)
3117 #define PRE_ARRAY_NEW_PTR ((void) IMMEDIATE_u2)
3118 #define PRE_ARRAY_NEW_MULTI ((void) IMMEDIATE_u2, (void) IMMEDIATE_u1)
3120 #define PRE_TEST(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3121 #define PRE_COND(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3122 #define PRE_BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3123 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3124 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3125 #define PRE_JSR(OPERAND_TYPE, OPERAND_VALUE) \
3126 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3128 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3130 #define PRE_RET(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE)
3132 #define PRE_SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3133 PC = (PC + 3) / 4 * 4; PRE_##TABLE_OR_LOOKUP##_SWITCH
3135 #define PRE_LOOKUP_SWITCH \
3136 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3137 NOTE_LABEL (default_offset+oldpc); \
3139 while (--npairs >= 0) { \
3140 jint match ATTRIBUTE_UNUSED = IMMEDIATE_s4; \
3141 jint offset = IMMEDIATE_s4; \
3142 NOTE_LABEL (offset+oldpc); } \
3145 #define PRE_TABLE_SWITCH \
3146 { jint default_offset = IMMEDIATE_s4; \
3147 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3148 NOTE_LABEL (default_offset+oldpc); \
3150 while (low++ <= high) { \
3151 jint offset = IMMEDIATE_s4; \
3152 NOTE_LABEL (offset+oldpc); } \
3155 #define PRE_FIELD(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3156 #define PRE_OBJECT(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3157 #define PRE_INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3158 (void)(IMMEDIATE_u2); \
3159 PC += 2 * IS_INTERFACE /* for invokeinterface */;
3161 #include "javaop.def"
3168 expand_byte_code (JCF *jcf, tree method)
3172 const unsigned char *linenumber_pointer;
3173 int dead_code_index = -1;
3174 unsigned char* byte_ops;
3175 long length = DECL_CODE_LENGTH (method);
3176 location_t max_location = input_location;
3179 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3180 byte_ops = jcf->read_ptr;
3182 /* We make an initial pass of the line number table, to note
3183 which instructions have associated line number entries. */
3184 linenumber_pointer = linenumber_table;
3185 for (i = 0; i < linenumber_count; i++)
3187 int pc = GET_u2 (linenumber_pointer);
3188 linenumber_pointer += 4;
3190 warning (0, "invalid PC in line number table");
3193 if ((instruction_bits[pc] & BCODE_HAS_LINENUMBER) != 0)
3194 instruction_bits[pc] |= BCODE_HAS_MULTI_LINENUMBERS;
3195 instruction_bits[pc] |= BCODE_HAS_LINENUMBER;
3199 if (! verify_jvm_instructions_new (jcf, byte_ops, length))
3202 promote_arguments ();
3203 cache_this_class_ref (method);
3204 cache_cpool_data_ref ();
3206 /* Translate bytecodes. */
3207 linenumber_pointer = linenumber_table;
3208 for (PC = 0; PC < length;)
3210 if ((instruction_bits [PC] & BCODE_TARGET) != 0 || PC == 0)
3212 tree label = lookup_label (PC);
3213 flush_quick_stack ();
3214 if ((instruction_bits [PC] & BCODE_TARGET) != 0)
3215 java_add_stmt (build1 (LABEL_EXPR, void_type_node, label));
3216 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3217 load_type_state (PC);
3220 if (! (instruction_bits [PC] & BCODE_VERIFIED))
3222 if (dead_code_index == -1)
3224 /* This is the start of a region of unreachable bytecodes.
3225 They still need to be processed in order for EH ranges
3226 to get handled correctly. However, we can simply
3227 replace these bytecodes with nops. */
3228 dead_code_index = PC;
3231 /* Turn this bytecode into a nop. */
3236 if (dead_code_index != -1)
3238 /* We've just reached the end of a region of dead code. */
3240 warning (0, "unreachable bytecode from %d to before %d",
3241 dead_code_index, PC);
3242 dead_code_index = -1;
3246 /* Handle possible line number entry for this PC.
3248 This code handles out-of-order and multiple linenumbers per PC,
3249 but is optimized for the case of line numbers increasing
3250 monotonically with PC. */
3251 if ((instruction_bits[PC] & BCODE_HAS_LINENUMBER) != 0)
3253 if ((instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS) != 0
3254 || GET_u2 (linenumber_pointer) != PC)
3255 linenumber_pointer = linenumber_table;
3256 while (linenumber_pointer < linenumber_table + linenumber_count * 4)
3258 int pc = GET_u2 (linenumber_pointer);
3259 linenumber_pointer += 4;
3262 int line = GET_u2 (linenumber_pointer - 2);
3263 input_location = linemap_line_start (line_table, line, 1);
3264 if (input_location > max_location)
3265 max_location = input_location;
3266 if (!(instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS))
3271 maybe_pushlevels (PC);
3272 PC = process_jvm_instruction (PC, byte_ops, length);
3273 maybe_poplevels (PC);
3276 uncache_this_class_ref (method);
3278 if (dead_code_index != -1)
3280 /* We've just reached the end of a region of dead code. */
3282 warning (0, "unreachable bytecode from %d to the end of the method",
3286 DECL_FUNCTION_LAST_LINE (method) = max_location;
3290 java_push_constant_from_pool (JCF *jcf, int index)
3293 if (JPOOL_TAG (jcf, index) == CONSTANT_String)
3296 name = get_name_constant (jcf, JPOOL_USHORT1 (jcf, index));
3297 index = alloc_name_constant (CONSTANT_String, name);
3298 c = build_ref_from_constant_pool (index);
3299 c = convert (promote_type (string_type_node), c);
3301 else if (JPOOL_TAG (jcf, index) == CONSTANT_Class
3302 || JPOOL_TAG (jcf, index) == CONSTANT_ResolvedClass)
3304 tree record = get_class_constant (jcf, index);
3305 c = build_class_ref (record);
3308 c = get_constant (jcf, index);
3313 process_jvm_instruction (int PC, const unsigned char* byte_ops,
3314 long length ATTRIBUTE_UNUSED)
3316 const char *opname; /* Temporary ??? */
3317 int oldpc = PC; /* PC at instruction start. */
3319 /* If the instruction is at the beginning of an exception handler,
3320 replace the top of the stack with the thrown object reference. */
3321 if (instruction_bits [PC] & BCODE_EXCEPTION_TARGET)
3323 /* Note that the verifier will not emit a type map at all for
3324 dead exception handlers. In this case we just ignore the
3326 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3328 tree type = pop_type (promote_type (throwable_type_node));
3329 push_value (build_exception_object_ref (type));
3333 switch (byte_ops[PC++])
3335 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3338 OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3341 #define RET(OPERAND_TYPE, OPERAND_VALUE) \
3343 int saw_index = 0; \
3344 int index = OPERAND_VALUE; \
3345 (void) saw_index; /* Avoid set but not used warning. */ \
3347 (find_local_variable (index, return_address_type_node, oldpc)); \
3350 #define JSR(OPERAND_TYPE, OPERAND_VALUE) \
3352 /* OPERAND_VALUE may have side-effects on PC */ \
3353 int opvalue = OPERAND_VALUE; \
3354 build_java_jsr (oldpc + opvalue, PC); \
3357 /* Push a constant onto the stack. */
3358 #define PUSHC(OPERAND_TYPE, OPERAND_VALUE) \
3359 { int saw_index = 0; int ival = (OPERAND_VALUE); \
3360 if (saw_index) java_push_constant_from_pool (current_jcf, ival); \
3361 else expand_java_pushc (ival, OPERAND_TYPE##_type_node); }
3363 /* internal macro added for use by the WIDE case */
3364 #define LOAD_INTERNAL(OPTYPE, OPVALUE) \
3365 expand_load_internal (OPVALUE, type_map[OPVALUE], oldpc);
3367 /* Push local variable onto the opcode stack. */
3368 #define LOAD(OPERAND_TYPE, OPERAND_VALUE) \
3370 /* have to do this since OPERAND_VALUE may have side-effects */ \
3371 int opvalue = OPERAND_VALUE; \
3372 LOAD_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3375 #define RETURN(OPERAND_TYPE, OPERAND_VALUE) \
3376 expand_java_return (OPERAND_TYPE##_type_node)
3378 #define REM_EXPR TRUNC_MOD_EXPR
3379 #define BINOP(OPERAND_TYPE, OPERAND_VALUE) \
3380 expand_java_binop (OPERAND_TYPE##_type_node, OPERAND_VALUE##_EXPR)
3382 #define FIELD(IS_STATIC, IS_PUT) \
3383 expand_java_field_op (IS_STATIC, IS_PUT, IMMEDIATE_u2)
3385 #define TEST(OPERAND_TYPE, CONDITION) \
3386 expand_test (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3388 #define COND(OPERAND_TYPE, CONDITION) \
3389 expand_cond (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3391 #define BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3392 BRANCH_##OPERAND_TYPE (OPERAND_VALUE)
3394 #define BRANCH_GOTO(OPERAND_VALUE) \
3395 expand_java_goto (oldpc + OPERAND_VALUE)
3397 #define BRANCH_CALL(OPERAND_VALUE) \
3398 expand_java_call (oldpc + OPERAND_VALUE, oldpc)
3401 #define BRANCH_RETURN(OPERAND_VALUE) \
3403 tree type = OPERAND_TYPE##_type_node; \
3404 tree value = find_local_variable (OPERAND_VALUE, type, oldpc); \
3405 expand_java_ret (value); \
3409 #define NOT_IMPL(OPERAND_TYPE, OPERAND_VALUE) \
3410 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3411 fprintf (stderr, "(not implemented)\n")
3412 #define NOT_IMPL1(OPERAND_VALUE) \
3413 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3414 fprintf (stderr, "(not implemented)\n")
3416 #define BRANCH_RETURN(OPERAND_VALUE) NOT_IMPL1(OPERAND_VALUE)
3418 #define STACK(SUBOP, COUNT) STACK_##SUBOP (COUNT)
3420 #define STACK_POP(COUNT) java_stack_pop (COUNT)
3422 #define STACK_SWAP(COUNT) java_stack_swap()
3424 #define STACK_DUP(COUNT) java_stack_dup (COUNT, 0)
3425 #define STACK_DUPx1(COUNT) java_stack_dup (COUNT, 1)
3426 #define STACK_DUPx2(COUNT) java_stack_dup (COUNT, 2)
3428 #define SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3429 PC = (PC + 3) / 4 * 4; TABLE_OR_LOOKUP##_SWITCH
3431 #define LOOKUP_SWITCH \
3432 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3433 tree selector = pop_value (INT_type_node); \
3434 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3435 while (--npairs >= 0) \
3437 jint match = IMMEDIATE_s4; jint offset = IMMEDIATE_s4; \
3438 expand_java_add_case (switch_expr, match, oldpc + offset); \
3442 #define TABLE_SWITCH \
3443 { jint default_offset = IMMEDIATE_s4; \
3444 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3445 tree selector = pop_value (INT_type_node); \
3446 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3447 for (; low <= high; low++) \
3449 jint offset = IMMEDIATE_s4; \
3450 expand_java_add_case (switch_expr, low, oldpc + offset); \
3454 #define INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3455 { int opcode = byte_ops[PC-1]; \
3456 int method_ref_index = IMMEDIATE_u2; \
3458 if (IS_INTERFACE) { nargs = IMMEDIATE_u1; (void) IMMEDIATE_u1; } \
3460 expand_invoke (opcode, method_ref_index, nargs); \
3463 /* Handle new, checkcast, instanceof */
3464 #define OBJECT(TYPE, OP) \
3465 expand_java_##OP (get_class_constant (current_jcf, IMMEDIATE_u2))
3467 #define ARRAY(OPERAND_TYPE, SUBOP) ARRAY_##SUBOP(OPERAND_TYPE)
3469 #define ARRAY_LOAD(OPERAND_TYPE) \
3471 expand_java_arrayload( OPERAND_TYPE##_type_node ); \
3474 #define ARRAY_STORE(OPERAND_TYPE) \
3476 expand_java_arraystore( OPERAND_TYPE##_type_node ); \
3479 #define ARRAY_LENGTH(OPERAND_TYPE) expand_java_array_length();
3480 #define ARRAY_NEW(OPERAND_TYPE) ARRAY_NEW_##OPERAND_TYPE()
3481 #define ARRAY_NEW_PTR() \
3482 push_value (build_anewarray (get_class_constant (current_jcf, \
3484 pop_value (int_type_node)));
3485 #define ARRAY_NEW_NUM() \
3487 int atype = IMMEDIATE_u1; \
3488 push_value (build_newarray (atype, pop_value (int_type_node)));\
3490 #define ARRAY_NEW_MULTI() \
3492 tree klass = get_class_constant (current_jcf, IMMEDIATE_u2 ); \
3493 int ndims = IMMEDIATE_u1; \
3494 expand_java_multianewarray( klass, ndims ); \
3497 #define UNOP(OPERAND_TYPE, OPERAND_VALUE) \
3498 push_value (fold_build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \
3499 pop_value (OPERAND_TYPE##_type_node)));
3501 #define CONVERT2(FROM_TYPE, TO_TYPE) \
3503 push_value (build1 (NOP_EXPR, int_type_node, \
3504 (convert (TO_TYPE##_type_node, \
3505 pop_value (FROM_TYPE##_type_node))))); \
3508 #define CONVERT(FROM_TYPE, TO_TYPE) \
3510 push_value (convert (TO_TYPE##_type_node, \
3511 pop_value (FROM_TYPE##_type_node))); \
3514 /* internal macro added for use by the WIDE case
3515 Added TREE_TYPE (decl) assignment, apbianco */
3516 #define STORE_INTERNAL(OPTYPE, OPVALUE) \
3519 int index = OPVALUE; \
3520 tree type = OPTYPE; \
3521 value = pop_value (type); \
3522 type = TREE_TYPE (value); \
3523 decl = find_local_variable (index, type, oldpc); \
3524 set_local_type (index, type); \
3525 java_add_stmt (build2 (MODIFY_EXPR, type, decl, value)); \
3528 #define STORE(OPERAND_TYPE, OPERAND_VALUE) \
3530 /* have to do this since OPERAND_VALUE may have side-effects */ \
3531 int opvalue = OPERAND_VALUE; \
3532 STORE_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3535 #define SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3536 SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3538 #define SPECIAL_ENTER(IGNORED) MONITOR_OPERATION (soft_monitorenter_node)
3539 #define SPECIAL_EXIT(IGNORED) MONITOR_OPERATION (soft_monitorexit_node)
3541 #define MONITOR_OPERATION(call) \
3543 tree o = pop_value (ptr_type_node); \
3545 flush_quick_stack (); \
3546 c = build_java_monitor (call, o); \
3547 TREE_SIDE_EFFECTS (c) = 1; \
3548 java_add_stmt (c); \
3551 #define SPECIAL_IINC(IGNORED) \
3553 unsigned int local_var_index = IMMEDIATE_u1; \
3554 int ival = IMMEDIATE_s1; \
3555 expand_iinc(local_var_index, ival, oldpc); \
3558 #define SPECIAL_WIDE(IGNORED) \
3560 int modified_opcode = IMMEDIATE_u1; \
3561 unsigned int local_var_index = IMMEDIATE_u2; \
3562 switch (modified_opcode) \
3566 int ival = IMMEDIATE_s2; \
3567 expand_iinc (local_var_index, ival, oldpc); \
3570 case OPCODE_iload: \
3571 case OPCODE_lload: \
3572 case OPCODE_fload: \
3573 case OPCODE_dload: \
3574 case OPCODE_aload: \
3576 /* duplicate code from LOAD macro */ \
3577 LOAD_INTERNAL(operand_type[modified_opcode], local_var_index); \
3580 case OPCODE_istore: \
3581 case OPCODE_lstore: \
3582 case OPCODE_fstore: \
3583 case OPCODE_dstore: \
3584 case OPCODE_astore: \
3586 STORE_INTERNAL(operand_type[modified_opcode], local_var_index); \
3590 error ("unrecogized wide sub-instruction"); \
3594 #define SPECIAL_THROW(IGNORED) \
3595 build_java_athrow (pop_value (throwable_type_node))
3597 #define SPECIAL_BREAK NOT_IMPL1
3598 #define IMPL NOT_IMPL
3600 #include "javaop.def"
3603 fprintf (stderr, "%3d: unknown(%3d)\n", oldpc, byte_ops[PC]);
3608 /* Return the opcode at PC in the code section pointed to by
3611 static unsigned char
3612 peek_opcode_at_pc (JCF *jcf, int code_offset, int pc)
3614 unsigned char opcode;
3615 long absolute_offset = (long)JCF_TELL (jcf);
3617 JCF_SEEK (jcf, code_offset);
3618 opcode = jcf->read_ptr [pc];
3619 JCF_SEEK (jcf, absolute_offset);
3623 /* Some bytecode compilers are emitting accurate LocalVariableTable
3624 attributes. Here's an example:
3629 Attribute "LocalVariableTable"
3630 slot #<n>: ... (PC: PC+1 length: L)
3632 This is accurate because the local in slot <n> really exists after
3633 the opcode at PC is executed, hence from PC+1 to PC+1+L.
3635 This procedure recognizes this situation and extends the live range
3636 of the local in SLOT to START_PC-1 or START_PC-2 (depending on the
3637 length of the store instruction.)
3639 This function is used by `give_name_to_locals' so that a local's
3640 DECL features a DECL_LOCAL_START_PC such that the first related
3641 store operation will use DECL as a destination, not an unrelated
3642 temporary created for the occasion.
3644 This function uses a global (instruction_bits) `note_instructions' should
3645 have allocated and filled properly. */
3648 maybe_adjust_start_pc (struct JCF *jcf, int code_offset,
3649 int start_pc, int slot)
3651 int first, index, opcode;
3660 /* Find last previous instruction and remember it */
3661 for (pc = start_pc-1; pc; pc--)
3662 if (instruction_bits [pc] & BCODE_INSTRUCTION_START)
3666 /* Retrieve the instruction, handle `wide'. */
3667 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3668 if (opcode == OPCODE_wide)
3671 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3676 case OPCODE_astore_0:
3677 case OPCODE_astore_1:
3678 case OPCODE_astore_2:
3679 case OPCODE_astore_3:
3680 first = OPCODE_astore_0;
3683 case OPCODE_istore_0:
3684 case OPCODE_istore_1:
3685 case OPCODE_istore_2:
3686 case OPCODE_istore_3:
3687 first = OPCODE_istore_0;
3690 case OPCODE_lstore_0:
3691 case OPCODE_lstore_1:
3692 case OPCODE_lstore_2:
3693 case OPCODE_lstore_3:
3694 first = OPCODE_lstore_0;
3697 case OPCODE_fstore_0:
3698 case OPCODE_fstore_1:
3699 case OPCODE_fstore_2:
3700 case OPCODE_fstore_3:
3701 first = OPCODE_fstore_0;
3704 case OPCODE_dstore_0:
3705 case OPCODE_dstore_1:
3706 case OPCODE_dstore_2:
3707 case OPCODE_dstore_3:
3708 first = OPCODE_dstore_0;
3716 index = peek_opcode_at_pc (jcf, code_offset, pc);
3719 int other = peek_opcode_at_pc (jcf, code_offset, ++pc);
3720 index = (other << 8) + index;
3725 /* Now we decide: first >0 means we have a <t>store_<n>, index >0
3726 means we have a <t>store. */
3727 if ((first > 0 && opcode - first == slot) || (index > 0 && index == slot))
3733 /* Force the (direct) sub-operands of NODE to be evaluated in left-to-right
3734 order, as specified by Java Language Specification.
3736 The problem is that while expand_expr will evaluate its sub-operands in
3737 left-to-right order, for variables it will just return an rtx (i.e.
3738 an lvalue) for the variable (rather than an rvalue). So it is possible
3739 that a later sub-operand will change the register, and when the
3740 actual operation is done, it will use the new value, when it should
3741 have used the original value.
3743 We fix this by using save_expr. This forces the sub-operand to be
3744 copied into a fresh virtual register,
3746 For method invocation, we modify the arguments so that a
3747 left-to-right order evaluation is performed. Saved expressions
3748 will, in CALL_EXPR order, be reused when the call will be expanded.
3750 We also promote outgoing args if needed. */
3753 force_evaluation_order (tree node)
3755 if (flag_syntax_only)
3757 if (TREE_CODE (node) == CALL_EXPR
3758 || (TREE_CODE (node) == COMPOUND_EXPR
3759 && TREE_CODE (TREE_OPERAND (node, 0)) == CALL_EXPR
3760 && TREE_CODE (TREE_OPERAND (node, 1)) == SAVE_EXPR))
3765 /* Account for wrapped around ctors. */
3766 if (TREE_CODE (node) == COMPOUND_EXPR)
3767 call = TREE_OPERAND (node, 0);
3771 nargs = call_expr_nargs (call);
3773 /* This reverses the evaluation order. This is a desired effect. */
3774 for (i = 0, cmp = NULL_TREE; i < nargs; i++)
3776 tree arg = CALL_EXPR_ARG (call, i);
3777 /* Promote types smaller than integer. This is required by
3779 tree type = TREE_TYPE (arg);
3781 if (targetm.calls.promote_prototypes (type)
3782 && INTEGRAL_TYPE_P (type)
3783 && INT_CST_LT_UNSIGNED (TYPE_SIZE (type),
3784 TYPE_SIZE (integer_type_node)))
3785 arg = fold_convert (integer_type_node, arg);
3787 saved = save_expr (force_evaluation_order (arg));
3788 cmp = (cmp == NULL_TREE ? saved :
3789 build2 (COMPOUND_EXPR, void_type_node, cmp, saved));
3791 CALL_EXPR_ARG (call, i) = saved;
3794 if (cmp && TREE_CODE (cmp) == COMPOUND_EXPR)
3795 TREE_SIDE_EFFECTS (cmp) = 1;
3799 cmp = build2 (COMPOUND_EXPR, TREE_TYPE (node), cmp, node);
3800 if (TREE_TYPE (cmp) != void_type_node)
3801 cmp = save_expr (cmp);
3802 TREE_SIDE_EFFECTS (cmp) = 1;
3809 /* Build a node to represent empty statements and blocks. */
3812 build_java_empty_stmt (void)
3814 tree t = build_empty_stmt (input_location);
3818 /* Promote all args of integral type before generating any code. */
3821 promote_arguments (void)
3825 for (arg = DECL_ARGUMENTS (current_function_decl), i = 0;
3826 arg != NULL_TREE; arg = TREE_CHAIN (arg), i++)
3828 tree arg_type = TREE_TYPE (arg);
3829 if (INTEGRAL_TYPE_P (arg_type)
3830 && TYPE_PRECISION (arg_type) < 32)
3832 tree copy = find_local_variable (i, integer_type_node, -1);
3833 java_add_stmt (build2 (MODIFY_EXPR, integer_type_node,
3835 fold_convert (integer_type_node, arg)));
3837 if (TYPE_IS_WIDE (arg_type))
3842 /* Create a local variable that points to the constant pool. */
3845 cache_cpool_data_ref (void)
3850 tree d = build_constant_data_ref (flag_indirect_classes);
3851 tree cpool_ptr = build_decl (input_location, VAR_DECL, NULL_TREE,
3852 build_pointer_type (TREE_TYPE (d)));
3853 java_add_local_var (cpool_ptr);
3854 TREE_CONSTANT (cpool_ptr) = 1;
3856 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (cpool_ptr),
3857 cpool_ptr, build_address_of (d)));
3858 cpool = build1 (INDIRECT_REF, TREE_TYPE (d), cpool_ptr);
3859 TREE_THIS_NOTRAP (cpool) = 1;
3860 TYPE_CPOOL_DATA_REF (output_class) = cpool;
3864 #include "gt-java-expr.h"