X-Git-Url: http://git.sourceforge.jp/view?p=pf3gnuchains%2Fgcc-fork.git;a=blobdiff_plain;f=gcc%2Fjava%2Fexpr.c;h=b113df1de9e82d4932661e1c088ab79257f045cf;hp=a6bc5759a63243f4abce824f18a15fd96518410c;hb=e0859e4a536bd443d95503803cbfb725165f71d5;hpb=7881bc34f365874a9f2008fcd63852ac05c9d048 diff --git a/gcc/java/expr.c b/gcc/java/expr.c index a6bc5759a63..b113df1de9e 100644 --- a/gcc/java/expr.c +++ b/gcc/java/expr.c @@ -1,12 +1,12 @@ /* Process expressions for the GNU compiler for the Java(TM) language. - Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004 - Free Software Foundation, Inc. + Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, + 2005, 2006, 2007, 2008 Free Software Foundation, Inc. This file is part of GCC. GCC is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by -the Free Software Foundation; either version 2, or (at your option) +the Free Software Foundation; either version 3, or (at your option) any later version. GCC is distributed in the hope that it will be useful, @@ -15,9 +15,8 @@ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License -along with GCC; see the file COPYING. If not, write to -the Free Software Foundation, 59 Temple Place - Suite 330, -Boston, MA 02111-1307, USA. +along with GCC; see the file COPYING3. If not see +. Java and all Java-based marks are trademarks or registered trademarks of Sun Microsystems, Inc. in the United States and other countries. @@ -74,10 +73,6 @@ static void expand_cond (enum tree_code, tree, int); static void expand_java_goto (int); static tree expand_java_switch (tree, int); static void expand_java_add_case (tree, int, int); -#if 0 -static void expand_java_call (int, int); -static void expand_java_ret (tree); -#endif static tree pop_arguments (tree); static void expand_invoke (int, int, int); static void expand_java_field_op (int, int, int); @@ -86,6 +81,8 @@ static void java_stack_pop (int); static tree build_java_throw_out_of_bounds_exception (tree); static tree build_java_check_indexed_type (tree, tree); static unsigned char peek_opcode_at_pc (struct JCF *, int, int); +static void promote_arguments (void); +static void cache_cpool_data_ref (void); static GTY(()) tree operand_type[59]; @@ -95,8 +92,7 @@ tree dtable_ident = NULL_TREE; /* Set to nonzero value in order to emit class initialization code before static field references. */ -/* FIXME: Make this work with gimplify. */ -int always_initialize_class_p = 1; +int always_initialize_class_p = 0; /* We store the stack state in two places: Within a basic block, we use the quick_stack, which is a @@ -130,6 +126,10 @@ static GTY(()) tree quick_stack; /* A free-list of unused permanent TREE_LIST nodes. */ static GTY((deletable)) tree tree_list_free_list; +/* The physical memory page size used in this computer. See + build_field_ref(). */ +static GTY(()) tree page_size; + /* The stack pointer of the Java virtual machine. This does include the size of the quick_stack. */ @@ -138,6 +138,12 @@ int stack_pointer; const unsigned char *linenumber_table; int linenumber_count; +/* Largest pc so far in this method that has been passed to lookup_label. */ +int highest_label_pc_this_method = -1; + +/* Base value for this method to add to pc to get generated label. */ +int start_label_pc_this_method = 0; + void init_expr_processing (void) { @@ -186,10 +192,9 @@ java_truthvalue_conversion (tree expr) case COND_EXPR: /* Distribute the conversion into the arms of a COND_EXPR. */ - return fold - (build3 (COND_EXPR, boolean_type_node, TREE_OPERAND (expr, 0), - java_truthvalue_conversion (TREE_OPERAND (expr, 1)), - java_truthvalue_conversion (TREE_OPERAND (expr, 2)))); + return fold_build3 (COND_EXPR, boolean_type_node, TREE_OPERAND (expr, 0), + java_truthvalue_conversion (TREE_OPERAND (expr, 1)), + java_truthvalue_conversion (TREE_OPERAND (expr, 2))); case NOP_EXPR: /* If this is widening the argument, we can ignore it. */ @@ -199,8 +204,8 @@ java_truthvalue_conversion (tree expr) /* fall through to default */ default: - return fold (build2 (NE_EXPR, boolean_type_node, - expr, boolean_false_node)); + return fold_build2 (NE_EXPR, boolean_type_node, + expr, boolean_false_node); } } @@ -255,6 +260,9 @@ push_type_0 (tree type) n_words = 1 + TYPE_IS_WIDE (type); if (stack_pointer + n_words > DECL_MAX_STACK (current_function_decl)) return 0; + /* Allocate decl for this variable now, so we get a temporary that + survives the whole method. */ + find_stack_slot (stack_pointer, type); stack_type_map[stack_pointer++] = type; n_words--; while (--n_words >= 0) @@ -265,8 +273,8 @@ push_type_0 (tree type) void push_type (tree type) { - if (! push_type_0 (type)) - abort (); + int r = push_type_0 (type); + gcc_assert (r); } static void @@ -289,6 +297,13 @@ push_value (tree value) TREE_CHAIN (node) = quick_stack; quick_stack = node; } + /* If the value has a side effect, then we need to evaluate it + whether or not the result is used. If the value ends up on the + quick stack and is then popped, this won't happen -- so we flush + the quick stack. It is safest to simply always flush, though, + since TREE_SIDE_EFFECTS doesn't capture COMPONENT_REF, and for + the latter we may need to strip conversions. */ + flush_quick_stack (); } /* Pop a type from the type stack. @@ -321,28 +336,57 @@ pop_type_0 (tree type, char **messagep) t = stack_type_map[--stack_pointer]; if (type == NULL_TREE || t == type) return t; + if (TREE_CODE (t) == TREE_LIST) + { + do + { + tree tt = TREE_PURPOSE (t); + if (! can_widen_reference_to (tt, type)) + { + t = tt; + goto fail; + } + t = TREE_CHAIN (t); + } + while (t); + return t; + } if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (t) && TYPE_PRECISION (type) <= 32 && TYPE_PRECISION (t) <= 32) - return t; + return t; if (TREE_CODE (type) == POINTER_TYPE && TREE_CODE (t) == POINTER_TYPE) { + /* If the expected type we've been passed is object or ptr + (i.e. void*), the caller needs to know the real type. */ if (type == ptr_type_node || type == object_ptr_type_node) - return t; - else if (t == ptr_type_node) /* Special case for null reference. */ - return type; - else if (can_widen_reference_to (t, type)) - return t; - /* This is a kludge, but matches what Sun's verifier does. - It can be tricked, but is safe as long as type errors - (i.e. interface method calls) are caught at run-time. */ - else if (CLASS_INTERFACE (TYPE_NAME (TREE_TYPE (type)))) - return object_ptr_type_node; + return t; + + /* Since the verifier has already run, we know that any + types we see will be compatible. In BC mode, this fact + may be checked at runtime, but if that is so then we can + assume its truth here as well. So, we always succeed + here, with the expected type. */ + return type; + } + + if (! flag_verify_invocations && flag_indirect_dispatch + && t == object_ptr_type_node) + { + if (type != ptr_type_node) + warning (0, "need to insert runtime check for %s", + xstrdup (lang_printable_name (type, 0))); + return type; } /* lang_printable_name uses a static buffer, so we must save the result from calling it the first time. */ + fail: { char *temp = xstrdup (lang_printable_name (type, 0)); + /* If the stack contains a multi-word type, keep popping the stack until + the real type is found. */ + while (t == void_type_node) + t = stack_type_map[--stack_pointer]; *messagep = concat ("expected type '", temp, "' but stack contains '", lang_printable_name (t, 0), "'", NULL); @@ -368,7 +412,88 @@ pop_type (tree type) return type; } -/* Return 1f if SOURCE_TYPE can be safely widened to TARGET_TYPE. + +/* Return true if two type assertions are equal. */ + +static int +type_assertion_eq (const void * k1_p, const void * k2_p) +{ + const type_assertion k1 = *(const type_assertion *)k1_p; + const type_assertion k2 = *(const type_assertion *)k2_p; + return (k1.assertion_code == k2.assertion_code + && k1.op1 == k2.op1 + && k1.op2 == k2.op2); +} + +/* Hash a type assertion. */ + +static hashval_t +type_assertion_hash (const void *p) +{ + const type_assertion *k_p = p; + hashval_t hash = iterative_hash (&k_p->assertion_code, sizeof + k_p->assertion_code, 0); + + switch (k_p->assertion_code) + { + case JV_ASSERT_TYPES_COMPATIBLE: + hash = iterative_hash (&TYPE_UID (k_p->op2), sizeof TYPE_UID (k_p->op2), + hash); + /* Fall through. */ + + case JV_ASSERT_IS_INSTANTIABLE: + hash = iterative_hash (&TYPE_UID (k_p->op1), sizeof TYPE_UID (k_p->op1), + hash); + /* Fall through. */ + + case JV_ASSERT_END_OF_TABLE: + break; + + default: + gcc_unreachable (); + } + + return hash; +} + +/* Add an entry to the type assertion table for the given class. + CLASS is the class for which this assertion will be evaluated by the + runtime during loading/initialization. + ASSERTION_CODE is the 'opcode' or type of this assertion: see java-tree.h. + OP1 and OP2 are the operands. The tree type of these arguments may be + specific to each assertion_code. */ + +void +add_type_assertion (tree class, int assertion_code, tree op1, tree op2) +{ + htab_t assertions_htab; + type_assertion as; + void **as_pp; + + assertions_htab = TYPE_ASSERTIONS (class); + if (assertions_htab == NULL) + { + assertions_htab = htab_create_ggc (7, type_assertion_hash, + type_assertion_eq, NULL); + TYPE_ASSERTIONS (current_class) = assertions_htab; + } + + as.assertion_code = assertion_code; + as.op1 = op1; + as.op2 = op2; + + as_pp = htab_find_slot (assertions_htab, &as, INSERT); + + /* Don't add the same assertion twice. */ + if (*as_pp) + return; + + *as_pp = ggc_alloc (sizeof (type_assertion)); + **(type_assertion **)as_pp = as; +} + + +/* Return 1 if SOURCE_TYPE can be safely widened to TARGET_TYPE. Handles array types and interfaces. */ int @@ -385,6 +510,28 @@ can_widen_reference_to (tree source_type, tree target_type) if (source_type == target_type) return 1; + + /* FIXME: This is very pessimistic, in that it checks everything, + even if we already know that the types are compatible. If we're + to support full Java class loader semantics, we need this. + However, we could do something more optimal. */ + if (! flag_verify_invocations) + { + add_type_assertion (current_class, JV_ASSERT_TYPES_COMPATIBLE, + source_type, target_type); + + if (!quiet_flag) + warning (0, "assert: %s is assign compatible with %s", + xstrdup (lang_printable_name (target_type, 0)), + xstrdup (lang_printable_name (source_type, 0))); + /* Punt everything to runtime. */ + return 1; + } + + if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type)) + { + return 1; + } else { if (TYPE_ARRAY_P (source_type) || TYPE_ARRAY_P (target_type)) @@ -418,7 +565,16 @@ can_widen_reference_to (tree source_type, tree target_type) int source_depth = class_depth (source_type); int target_depth = class_depth (target_type); - /* class_depth can return a negative depth if an error occurred */ + if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type)) + { + if (! quiet_flag) + warning (0, "assert: %s is assign compatible with %s", + xstrdup (lang_printable_name (target_type, 0)), + xstrdup (lang_printable_name (source_type, 0))); + return 1; + } + + /* class_depth can return a negative depth if an error occurred */ if (source_depth < 0 || target_depth < 0) return 0; @@ -426,15 +582,16 @@ can_widen_reference_to (tree source_type, tree target_type) { /* target_type is OK if source_type or source_type ancestors implement target_type. We handle multiple sub-interfaces */ + tree binfo, base_binfo; + int i; - tree basetype_vec = BINFO_BASE_BINFOS (TYPE_BINFO (source_type)); - int n = TREE_VEC_LENGTH (basetype_vec), i; - for (i=0 ; i < n; i++) - if (can_widen_reference_to - (TREE_TYPE (TREE_VEC_ELT (basetype_vec, i)), - target_type)) + for (binfo = TYPE_BINFO (source_type), i = 0; + BINFO_BASE_ITERATE (binfo, i, base_binfo); i++) + if (can_widen_reference_to + (BINFO_TYPE (base_binfo), target_type)) return 1; - if (n == 0) + + if (!i) return 0; } @@ -475,15 +632,13 @@ java_stack_pop (int count) { tree type, val; - if (stack_pointer == 0) - abort (); + gcc_assert (stack_pointer != 0); type = stack_type_map[stack_pointer - 1]; if (type == TYPE_SECOND) { count--; - if (stack_pointer == 1 || count <= 0) - abort (); + gcc_assert (stack_pointer != 1 && count > 0); type = stack_type_map[stack_pointer - 2]; } @@ -502,12 +657,12 @@ java_stack_swap (void) tree decl1, decl2; if (stack_pointer < 2 - || (type1 = stack_type_map[stack_pointer - 1]) == TYPE_UNKNOWN - || (type2 = stack_type_map[stack_pointer - 2]) == TYPE_UNKNOWN - || type1 == TYPE_SECOND || type2 == TYPE_SECOND + || (type1 = stack_type_map[stack_pointer - 1]) == TYPE_SECOND + || (type2 = stack_type_map[stack_pointer - 2]) == TYPE_SECOND || TYPE_IS_WIDE (type1) || TYPE_IS_WIDE (type2)) /* Bad stack swap. */ abort (); + /* Bad stack swap. */ flush_quick_stack (); decl1 = find_stack_slot (stack_pointer - 1, type1); @@ -547,18 +702,16 @@ java_stack_dup (int size, int offset) type = stack_type_map [src_index]; if (type == TYPE_SECOND) { - if (src_index <= low_index) - /* Dup operation splits 64-bit number. */ - abort (); + /* Dup operation splits 64-bit number. */ + gcc_assert (src_index > low_index); stack_type_map[dst_index] = type; src_index--; dst_index--; type = stack_type_map[src_index]; - if (! TYPE_IS_WIDE (type)) - abort (); + gcc_assert (TYPE_IS_WIDE (type)); } - else if (TYPE_IS_WIDE (type)) - abort (); + else + gcc_assert (! TYPE_IS_WIDE (type)); if (src_index != dst_index) { @@ -580,11 +733,9 @@ build_java_athrow (tree node) { tree call; - call = build3 (CALL_EXPR, - void_type_node, - build_address_of (throw_node), - build_tree_list (NULL_TREE, node), - NULL_TREE); + call = build_call_nary (void_type_node, + build_address_of (throw_node), + 1, node); TREE_SIDE_EFFECTS (call) = 1; java_add_stmt (call); java_stack_pop (stack_pointer); @@ -597,12 +748,12 @@ build_java_jsr (int target_pc, int return_pc) { tree where = lookup_label (target_pc); tree ret = lookup_label (return_pc); - tree ret_label = fold (build1 (ADDR_EXPR, return_address_type_node, ret)); + tree ret_label = fold_build1 (ADDR_EXPR, return_address_type_node, ret); push_value (ret_label); flush_quick_stack (); java_add_stmt (build1 (GOTO_EXPR, void_type_node, where)); - /* Do not need to emit the label here. We noted the existance of the + /* Do not need to emit the label here. We noted the existence of the label as a jump target in note_instructions; we'll emit the label for real at the beginning of the expand_byte_code loop. */ } @@ -654,7 +805,7 @@ encode_newarray_type (tree type) else if (type == long_type_node) return 11; else - abort (); + gcc_unreachable (); } /* Build a call to _Jv_ThrowBadArrayIndex(), the @@ -663,9 +814,9 @@ encode_newarray_type (tree type) static tree build_java_throw_out_of_bounds_exception (tree index) { - tree node = build3 (CALL_EXPR, int_type_node, - build_address_of (soft_badarrayindex_node), - build_tree_list (NULL_TREE, index), NULL_TREE); + tree node = build_call_nary (int_type_node, + build_address_of (soft_badarrayindex_node), + 1, index); TREE_SIDE_EFFECTS (node) = 1; /* Allows expansion within ANDIF */ return (node); } @@ -680,21 +831,17 @@ build_java_array_length_access (tree node) tree array_type = TREE_TYPE (type); HOST_WIDE_INT length; - /* JVM spec: If the arrayref is null, the arraylength instruction - throws a NullPointerException. The only way we could get a node - of type ptr_type_node at this point is `aconst_null; arraylength' - or something equivalent. */ - if (type == ptr_type_node) - return build3 (CALL_EXPR, int_type_node, - build_address_of (soft_nullpointer_node), - NULL_TREE, NULL_TREE); - if (!is_array_type_p (type)) - abort (); + { + /* With the new verifier, we will see an ordinary pointer type + here. In this case, we just use an arbitrary array type. */ + array_type = build_java_array_type (object_ptr_type_node, -1); + type = promote_type (array_type); + } length = java_array_type_length (type); if (length >= 0) - return build_int_2 (length, 0); + return build_int_cst (NULL_TREE, length); node = build3 (COMPONENT_REF, int_type_node, build_java_indirect_ref (array_type, node, @@ -718,9 +865,9 @@ java_check_reference (tree expr, int check) expr = build3 (COND_EXPR, TREE_TYPE (expr), build2 (EQ_EXPR, boolean_type_node, expr, null_pointer_node), - build3 (CALL_EXPR, void_type_node, - build_address_of (soft_nullpointer_node), - NULL_TREE, NULL_TREE), + build_call_nary (void_type_node, + build_address_of (soft_nullpointer_node), + 0), expr); } @@ -750,6 +897,14 @@ build_java_arrayaccess (tree array, tree type, tree index) tree data_field; tree ref; tree array_type = TREE_TYPE (TREE_TYPE (array)); + tree size_exp = fold_convert (sizetype, size_in_bytes (type)); + + if (!is_array_type_p (TREE_TYPE (array))) + { + /* With the new verifier, we will see an ordinary pointer type + here. In this case, we just use the correct array type. */ + array_type = build_java_array_type (type, -1); + } if (flag_bounds_check) { @@ -759,11 +914,11 @@ build_java_arrayaccess (tree array, tree type, tree index) * Note this is equivalent to and more efficient than: * INDEX < 0 || INDEX >= LEN && throw ... */ tree test; - tree len = build_java_array_length_access (array); - TREE_TYPE (len) = unsigned_int_type_node; - test = fold (build2 (GE_EXPR, boolean_type_node, - convert (unsigned_int_type_node, index), - len)); + tree len = convert (unsigned_int_type_node, + build_java_array_length_access (array)); + test = fold_build2 (GE_EXPR, boolean_type_node, + convert (unsigned_int_type_node, index), + len); if (! integer_zerop (test)) { throw = build2 (TRUTH_ANDIF_EXPR, int_type_node, test, @@ -777,16 +932,33 @@ build_java_arrayaccess (tree array, tree type, tree index) to have the bounds check evaluated first. */ if (throw != NULL_TREE) index = build2 (COMPOUND_EXPR, int_type_node, throw, index); - + data_field = lookup_field (&array_type, get_identifier ("data")); ref = build3 (COMPONENT_REF, TREE_TYPE (data_field), build_java_indirect_ref (array_type, array, flag_check_references), data_field, NULL_TREE); - - node = build4 (ARRAY_REF, type, ref, index, NULL_TREE, NULL_TREE); - return node; + + /* Take the address of the data field and convert it to a pointer to + the element type. */ + node = build1 (NOP_EXPR, build_pointer_type (type), build_address_of (ref)); + + /* Multiply the index by the size of an element to obtain a byte + offset. Convert the result to a pointer to the element type. */ + index = build2 (MULT_EXPR, sizetype, + fold_convert (sizetype, index), + size_exp); + + /* Sum the byte offset and the address of the data field. */ + node = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (node), node, index); + + /* Finally, return + + *((&array->data) + index*size_exp) + + */ + return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (node)), node); } /* Generate code to throw an ArrayStoreException if OBJECT is not assignable @@ -800,26 +972,35 @@ build_java_arraystore_check (tree array, tree object) tree array_type_p = TREE_TYPE (array); tree object_type = TYPE_NAME (TREE_TYPE (TREE_TYPE (object))); - if (! is_array_type_p (array_type_p)) - abort (); + if (! flag_verify_invocations) + { + /* With the new verifier, we don't track precise types. FIXME: + performance regression here. */ + element_type = TYPE_NAME (object_type_node); + } + else + { + gcc_assert (is_array_type_p (array_type_p)); - /* Get the TYPE_DECL for ARRAY's element type. */ - element_type = TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p)))); + /* Get the TYPE_DECL for ARRAY's element type. */ + element_type + = TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p)))); + } - if (TREE_CODE (element_type) != TYPE_DECL - || TREE_CODE (object_type) != TYPE_DECL) - abort (); + gcc_assert (TREE_CODE (element_type) == TYPE_DECL + && TREE_CODE (object_type) == TYPE_DECL); if (!flag_store_check) return build1 (NOP_EXPR, array_type_p, array); - /* No check is needed if the element type is final or is itself an array. - Also check that element_type matches object_type, since in the bytecode - compilation case element_type may be the actual element type of the array - rather than its declared type. */ + /* No check is needed if the element type is final. Also check that + element_type matches object_type, since in the bytecode + compilation case element_type may be the actual element type of + the array rather than its declared type. However, if we're doing + indirect dispatch, we can't do the `final' optimization. */ if (element_type == object_type - && (TYPE_ARRAY_P (TREE_TYPE (element_type)) - || CLASS_FINAL (element_type))) + && ! flag_indirect_dispatch + && CLASS_FINAL (element_type)) return build1 (NOP_EXPR, array_type_p, array); /* OBJECT might be wrapped by a SAVE_EXPR. */ @@ -847,11 +1028,9 @@ build_java_arraystore_check (tree array, tree object) } /* Build an invocation of _Jv_CheckArrayStore */ - check = build3 (CALL_EXPR, void_type_node, - build_address_of (soft_checkarraystore_node), - tree_cons (NULL_TREE, array, - build_tree_list (NULL_TREE, object)), - NULL_TREE); + check = build_call_nary (void_type_node, + build_address_of (soft_checkarraystore_node), + 2, array, object); TREE_SIDE_EFFECTS (check) = 1; return check; @@ -861,30 +1040,17 @@ build_java_arraystore_check (tree array, tree object) ARRAY_NODE. This function is used to retrieve something less vague than a pointer type when indexing the first dimension of something like [[. May return a corrected type, if necessary, otherwise INDEXED_TYPE is - return unchanged. - As a side effect, it also makes sure that ARRAY_NODE is an array. */ + return unchanged. */ static tree -build_java_check_indexed_type (tree array_node, tree indexed_type) +build_java_check_indexed_type (tree array_node ATTRIBUTE_UNUSED, + tree indexed_type) { - tree elt_type; - - if (!is_array_type_p (TREE_TYPE (array_node))) - abort (); - - elt_type = (TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (array_node)))); - - if (indexed_type == ptr_type_node ) - return promote_type (elt_type); - - /* BYTE/BOOLEAN store and load are used for both type */ - if (indexed_type == byte_type_node && elt_type == boolean_type_node ) - return boolean_type_node; + /* We used to check to see if ARRAY_NODE really had array type. + However, with the new verifier, this is not necessary, as we know + that the object will be an array of the appropriate type. */ - if (indexed_type != elt_type ) - abort (); - else - return indexed_type; + return indexed_type; } /* newarray triggers a call to _Jv_NewPrimArray. This function should be @@ -902,20 +1068,13 @@ build_newarray (int atype_value, tree length) host_integerp (length, 0) == INTEGER_CST ? tree_low_cst (length, 0) : -1); - /* If compiling to native, pass a reference to the primitive type class - and save the runtime some work. However, the bytecode generator - expects to find the type_code int here. */ - if (flag_emit_class_files) - type_arg = build_int_2 (atype_value, 0); - else - type_arg = build_class_ref (prim_type); + /* Pass a reference to the primitive type class and save the runtime + some work. */ + type_arg = build_class_ref (prim_type); - return build3 (CALL_EXPR, promote_type (type), - build_address_of (soft_newarray_node), - tree_cons (NULL_TREE, - type_arg, - build_tree_list (NULL_TREE, length)), - NULL_TREE); + return build_call_nary (promote_type (type), + build_address_of (soft_newarray_node), + 2, type_arg, length); } /* Generates anewarray from a given CLASS_TYPE. Gets from the stack the size @@ -929,13 +1088,12 @@ build_anewarray (tree class_type, tree length) host_integerp (length, 0) ? tree_low_cst (length, 0) : -1); - return build3 (CALL_EXPR, promote_type (type), - build_address_of (soft_anewarray_node), - tree_cons (NULL_TREE, length, - tree_cons (NULL_TREE, build_class_ref (class_type), - build_tree_list (NULL_TREE, - null_pointer_node))), - NULL_TREE); + return build_call_nary (promote_type (type), + build_address_of (soft_anewarray_node), + 3, + length, + build_class_ref (class_type), + null_pointer_node); } /* Return a node the evaluates 'new TYPE[LENGTH]'. */ @@ -962,13 +1120,15 @@ expand_java_multianewarray (tree class_type, int ndim) for( i = 0; i < ndim; i++ ) args = tree_cons (NULL_TREE, pop_value (int_type_node), args); - push_value (build3 (CALL_EXPR, - promote_type (class_type), - build_address_of (soft_multianewarray_node), - tree_cons (NULL_TREE, build_class_ref (class_type), - tree_cons (NULL_TREE, - build_int_2 (ndim, 0), args)), - NULL_TREE)); + args = tree_cons (NULL_TREE, + build_class_ref (class_type), + tree_cons (NULL_TREE, + build_int_cst (NULL_TREE, ndim), + args)); + + push_value (build_call_list (promote_type (class_type), + build_address_of (soft_multianewarray_node), + args)); } /* ARRAY[INDEX] <- RHS. build_java_check_indexed_type makes sure that @@ -988,7 +1148,20 @@ expand_java_arraystore (tree rhs_type_node) && TYPE_PRECISION (rhs_type_node) <= 32) ? int_type_node : rhs_type_node); tree index = pop_value (int_type_node); - tree array = pop_value (ptr_type_node); + tree array_type, array, temp, access; + + /* If we're processing an `aaload' we might as well just pick + `Object'. */ + if (TREE_CODE (rhs_type_node) == POINTER_TYPE) + { + array_type = build_java_array_type (object_ptr_type_node, -1); + rhs_type_node = object_ptr_type_node; + } + else + array_type = build_java_array_type (rhs_type_node, -1); + + array = pop_value (array_type); + array = build1 (NOP_EXPR, promote_type (array_type), array); rhs_type_node = build_java_check_indexed_type (array, rhs_type_node); @@ -997,14 +1170,31 @@ expand_java_arraystore (tree rhs_type_node) index = save_expr (index); array = save_expr (array); + /* We want to perform the bounds check (done by + build_java_arrayaccess) before the type check (done by + build_java_arraystore_check). So, we call build_java_arrayaccess + -- which returns an ARRAY_REF lvalue -- and we then generate code + to stash the address of that lvalue in a temp. Then we call + build_java_arraystore_check, and finally we generate a + MODIFY_EXPR to set the array element. */ + + access = build_java_arrayaccess (array, rhs_type_node, index); + temp = build_decl (VAR_DECL, NULL_TREE, + build_pointer_type (TREE_TYPE (access))); + java_add_local_var (temp); + java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (temp), + temp, + build_fold_addr_expr (access))); + if (TREE_CODE (rhs_type_node) == POINTER_TYPE) { tree check = build_java_arraystore_check (array, rhs_node); java_add_stmt (check); } - array = build_java_arrayaccess (array, rhs_type_node, index); - java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (array), array, rhs_node)); + java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (access), + build1 (INDIRECT_REF, TREE_TYPE (access), temp), + rhs_node)); } /* Expand the evaluation of ARRAY[INDEX]. build_java_check_indexed_type makes @@ -1015,31 +1205,35 @@ expand_java_arraystore (tree rhs_type_node) */ static void -expand_java_arrayload (tree lhs_type_node ) +expand_java_arrayload (tree lhs_type_node) { tree load_node; tree index_node = pop_value (int_type_node); - tree array_node = pop_value (ptr_type_node); + tree array_type; + tree array_node; - index_node = save_expr (index_node); - array_node = save_expr (array_node); - - if (TREE_TYPE (array_node) == ptr_type_node) - /* The only way we could get a node of type ptr_type_node at this - point is `aconst_null; arraylength' or something equivalent, so - unconditionally throw NullPointerException. */ - load_node = build3 (CALL_EXPR, lhs_type_node, - build_address_of (soft_nullpointer_node), - NULL_TREE, NULL_TREE); - else + /* If we're processing an `aaload' we might as well just pick + `Object'. */ + if (TREE_CODE (lhs_type_node) == POINTER_TYPE) { - lhs_type_node = build_java_check_indexed_type (array_node, lhs_type_node); - load_node = build_java_arrayaccess (array_node, - lhs_type_node, - index_node); + array_type = build_java_array_type (object_ptr_type_node, -1); + lhs_type_node = object_ptr_type_node; } + else + array_type = build_java_array_type (lhs_type_node, -1); + array_node = pop_value (array_type); + array_node = build1 (NOP_EXPR, promote_type (array_type), array_node); + + index_node = save_expr (index_node); + array_node = save_expr (array_node); + + lhs_type_node = build_java_check_indexed_type (array_node, + lhs_type_node); + load_node = build_java_arrayaccess (array_node, + lhs_type_node, + index_node); if (INTEGRAL_TYPE_P (lhs_type_node) && TYPE_PRECISION (lhs_type_node) <= 32) - load_node = fold (build1 (NOP_EXPR, int_type_node, load_node)); + load_node = fold_build1 (NOP_EXPR, int_type_node, load_node); push_value (load_node); } @@ -1061,11 +1255,9 @@ expand_java_array_length (void) static tree build_java_monitor (tree call, tree object) { - return build3 (CALL_EXPR, - void_type_node, - build_address_of (call), - build_tree_list (NULL_TREE, object), - NULL_TREE); + return build_call_nary (void_type_node, + build_address_of (call), + 1, object); } /* Emit code for one of the PUSHC instructions. */ @@ -1077,10 +1269,7 @@ expand_java_pushc (int ival, tree type) if (type == ptr_type_node && ival == 0) value = null_pointer_node; else if (type == int_type_node || type == long_type_node) - { - value = build_int_2 (ival, ival < 0 ? -1 : 0); - TREE_TYPE (value) = type; - } + value = build_int_cst (type, ival); else if (type == float_type_node || type == double_type_node) { REAL_VALUE_TYPE x; @@ -1088,7 +1277,7 @@ expand_java_pushc (int ival, tree type) value = build_real (type, x); } else - abort (); + gcc_unreachable (); push_value (value); } @@ -1114,7 +1303,7 @@ expand_java_return (tree type) retval = build1(NOP_EXPR, TREE_TYPE(res), retval); TREE_SIDE_EFFECTS (retval) = 1; - java_add_stmt (build1 (RETURN_EXPR, TREE_TYPE (retval), retval)); + java_add_stmt (build1 (RETURN_EXPR, void_type_node, retval)); } } @@ -1131,6 +1320,9 @@ expand_load_internal (int index, tree type, int pc) value into it. Then we push this new local on the stack. Hopefully this all gets optimized out. */ copy = build_decl (VAR_DECL, NULL_TREE, type); + if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)) + && TREE_TYPE (copy) != TREE_TYPE (var)) + var = convert (type, var); java_add_local_var (copy); java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (var), copy, var)); @@ -1155,6 +1347,18 @@ class_has_finalize_method (tree type) return HAS_FINALIZER_P (type) || class_has_finalize_method (super); } +tree +java_create_object (tree type) +{ + tree alloc_node = (class_has_finalize_method (type) + ? alloc_object_node + : alloc_no_finalizer_node); + + return build_call_nary (promote_type (type), + build_address_of (alloc_node), + 1, build_class_ref (type)); +} + static void expand_java_NEW (tree type) { @@ -1165,10 +1369,9 @@ expand_java_NEW (tree type) if (! CLASS_LOADED_P (type)) load_class (type, 1); safe_layout_class (type); - push_value (build3 (CALL_EXPR, promote_type (type), - build_address_of (alloc_node), - build_tree_list (NULL_TREE, build_class_ref (type)), - NULL_TREE)); + push_value (build_call_nary (promote_type (type), + build_address_of (alloc_node), + 1, build_class_ref (type))); } /* This returns an expression which will extract the class of an @@ -1220,7 +1423,8 @@ build_instanceof (tree value, tree type) we only need to check for `null'. */ expr = build2 (NE_EXPR, itype, value, null_pointer_node); } - else if (! TYPE_ARRAY_P (type) + else if (flag_verify_invocations + && ! TYPE_ARRAY_P (type) && ! TYPE_ARRAY_P (valtype) && DECL_P (klass) && DECL_P (valclass) && ! CLASS_INTERFACE (valclass) @@ -1246,12 +1450,9 @@ build_instanceof (tree value, tree type) } else { - expr = build3 (CALL_EXPR, itype, - build_address_of (soft_instanceof_node), - tree_cons (NULL_TREE, value, - build_tree_list (NULL_TREE, - build_class_ref (type))), - NULL_TREE); + expr = build_call_nary (itype, + build_address_of (soft_instanceof_node), + 2, value, build_class_ref (type)); } TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (value); return expr; @@ -1269,11 +1470,9 @@ static void expand_java_CHECKCAST (tree type) { tree value = pop_value (ptr_type_node); - value = build3 (CALL_EXPR, promote_type (type), - build_address_of (soft_checkcast_node), - tree_cons (NULL_TREE, build_class_ref (type), - build_tree_list (NULL_TREE, value)), - NULL_TREE); + value = build_call_nary (promote_type (type), + build_address_of (soft_checkcast_node), + 2, build_class_ref (type), value); push_value (value); } @@ -1285,13 +1484,12 @@ expand_iinc (unsigned int local_var_index, int ival, int pc) flush_quick_stack (); local_var = find_local_variable (local_var_index, int_type_node, pc); - constant_value = build_int_2 (ival, ival < 0 ? -1 : 0); - res = fold (build2 (PLUS_EXPR, int_type_node, local_var, constant_value)); + constant_value = build_int_cst (NULL_TREE, ival); + res = fold_build2 (PLUS_EXPR, int_type_node, local_var, constant_value); java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (local_var), local_var, res)); - update_aliases (local_var, local_var_index); } - + tree build_java_soft_divmod (enum tree_code op, tree type, tree op1, tree op2) { @@ -1328,15 +1526,8 @@ build_java_soft_divmod (enum tree_code op, tree type, tree op1, tree op2) } } - if (! call) - abort (); - - call = build3 (CALL_EXPR, type, - build_address_of (call), - tree_cons (NULL_TREE, arg1, - build_tree_list (NULL_TREE, arg2)), - NULL_TREE); - + gcc_assert (call); + call = build_call_nary (type, build_address_of (call), 2, arg1, arg2); return call; } @@ -1348,44 +1539,45 @@ build_java_binop (enum tree_code op, tree type, tree arg1, tree arg2) { case URSHIFT_EXPR: { - tree u_type = java_unsigned_type (type); + tree u_type = unsigned_type_for (type); arg1 = convert (u_type, arg1); arg1 = build_java_binop (RSHIFT_EXPR, u_type, arg1, arg2); return convert (type, arg1); } case LSHIFT_EXPR: case RSHIFT_EXPR: - mask = build_int_2 (TYPE_PRECISION (TREE_TYPE (arg1)) - 1, 0); - arg2 = fold (build2 (BIT_AND_EXPR, int_type_node, arg2, mask)); + mask = build_int_cst (NULL_TREE, + TYPE_PRECISION (TREE_TYPE (arg1)) - 1); + arg2 = fold_build2 (BIT_AND_EXPR, int_type_node, arg2, mask); break; case COMPARE_L_EXPR: /* arg1 > arg2 ? 1 : arg1 == arg2 ? 0 : -1 */ case COMPARE_G_EXPR: /* arg1 < arg2 ? -1 : arg1 == arg2 ? 0 : 1 */ arg1 = save_expr (arg1); arg2 = save_expr (arg2); { - tree ifexp1 = fold (build2 (op == COMPARE_L_EXPR ? GT_EXPR : LT_EXPR, - boolean_type_node, arg1, arg2)); - tree ifexp2 = fold (build2 (EQ_EXPR, boolean_type_node, arg1, arg2)); - tree second_compare = fold (build3 (COND_EXPR, int_type_node, - ifexp2, integer_zero_node, - op == COMPARE_L_EXPR - ? integer_minus_one_node - : integer_one_node)); - return fold (build3 (COND_EXPR, int_type_node, ifexp1, - op == COMPARE_L_EXPR ? integer_one_node - : integer_minus_one_node, - second_compare)); + tree ifexp1 = fold_build2 (op == COMPARE_L_EXPR ? GT_EXPR : LT_EXPR, + boolean_type_node, arg1, arg2); + tree ifexp2 = fold_build2 (EQ_EXPR, boolean_type_node, arg1, arg2); + tree second_compare = fold_build3 (COND_EXPR, int_type_node, + ifexp2, integer_zero_node, + op == COMPARE_L_EXPR + ? integer_minus_one_node + : integer_one_node); + return fold_build3 (COND_EXPR, int_type_node, ifexp1, + op == COMPARE_L_EXPR ? integer_one_node + : integer_minus_one_node, + second_compare); } case COMPARE_EXPR: arg1 = save_expr (arg1); arg2 = save_expr (arg2); { - tree ifexp1 = fold (build2 (LT_EXPR, boolean_type_node, arg1, arg2)); - tree ifexp2 = fold (build2 (GT_EXPR, boolean_type_node, arg1, arg2)); - tree second_compare = fold (build3 (COND_EXPR, int_type_node, - ifexp2, integer_one_node, - integer_zero_node)); - return fold (build3 (COND_EXPR, int_type_node, - ifexp1, integer_minus_one_node, second_compare)); + tree ifexp1 = fold_build2 (LT_EXPR, boolean_type_node, arg1, arg2); + tree ifexp2 = fold_build2 (GT_EXPR, boolean_type_node, arg1, arg2); + tree second_compare = fold_build3 (COND_EXPR, int_type_node, + ifexp2, integer_one_node, + integer_zero_node); + return fold_build3 (COND_EXPR, int_type_node, + ifexp1, integer_minus_one_node, second_compare); } case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR: @@ -1398,11 +1590,9 @@ build_java_binop (enum tree_code op, tree type, tree arg1, tree arg2) arg1 = convert (double_type_node, arg1); arg2 = convert (double_type_node, arg2); } - call = build3 (CALL_EXPR, double_type_node, - build_address_of (soft_fmod_node), - tree_cons (NULL_TREE, arg1, - build_tree_list (NULL_TREE, arg2)), - NULL_TREE); + call = build_call_nary (double_type_node, + build_address_of (soft_fmod_node), + 2, arg1, arg2); if (type != double_type_node) call = convert (type, call); return call; @@ -1416,7 +1606,7 @@ build_java_binop (enum tree_code op, tree type, tree arg1, tree arg2) break; default: ; } - return fold (build2 (op, type, arg1, arg2)); + return fold_build2 (op, type, arg1, arg2); } static void @@ -1459,21 +1649,20 @@ lookup_field (tree *typep, tree name) } do { - tree field, basetype_vec; + tree field, binfo, base_binfo; tree save_field; - int n, i; + int i; for (field = TYPE_FIELDS (*typep); field; field = TREE_CHAIN (field)) if (DECL_NAME (field) == name) return field; /* Process implemented interfaces. */ - basetype_vec = BINFO_BASE_BINFOS (TYPE_BINFO (*typep)); - n = TREE_VEC_LENGTH (basetype_vec); save_field = NULL_TREE; - for (i = 0; i < n; i++) + for (binfo = TYPE_BINFO (*typep), i = 0; + BINFO_BASE_ITERATE (binfo, i, base_binfo); i++) { - tree t = BINFO_TYPE (TREE_VEC_ELT (basetype_vec, i)); + tree t = BINFO_TYPE (base_binfo); if ((field = lookup_field (&t, name))) { if (save_field == field) @@ -1484,7 +1673,7 @@ lookup_field (tree *typep, tree name) { tree i1 = DECL_CONTEXT (save_field); tree i2 = DECL_CONTEXT (field); - error ("reference `%s' is ambiguous: appears in interface `%s' and interface `%s'", + error ("reference %qs is ambiguous: appears in interface %qs and interface %qs", IDENTIFIER_POINTER (name), IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i1))), IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i2)))); @@ -1512,7 +1701,7 @@ build_field_ref (tree self_value, tree self_class, tree name) tree field_decl = lookup_field (&base_class, name); if (field_decl == NULL_TREE) { - error ("field `%s' not found", IDENTIFIER_POINTER (name)); + error ("field %qs not found", IDENTIFIER_POINTER (name)); return error_mark_node; } if (self_value == NULL_TREE) @@ -1521,42 +1710,68 @@ build_field_ref (tree self_value, tree self_class, tree name) } else { - int check = (flag_check_references - && ! (DECL_P (self_value) - && DECL_NAME (self_value) == this_identifier_node)); - tree base_type = promote_type (base_class); + + /* CHECK is true if self_value is not the this pointer. */ + int check = (! (DECL_P (self_value) + && DECL_NAME (self_value) == this_identifier_node)); + + /* Determine whether a field offset from NULL will lie within + Page 0: this is necessary on those GNU/Linux/BSD systems that + trap SEGV to generate NullPointerExceptions. + + We assume that Page 0 will be mapped with NOPERM, and that + memory may be allocated from any other page, so only field + offsets < pagesize are guaranteed to trap. We also assume + the smallest page size we'll encounter is 4k bytes. */ + if (! flag_syntax_only && check && ! flag_check_references + && ! flag_indirect_dispatch) + { + tree field_offset = byte_position (field_decl); + if (! page_size) + page_size = size_int (4096); + check = ! INT_CST_LT_UNSIGNED (field_offset, page_size); + } + if (base_type != TREE_TYPE (self_value)) - self_value = fold (build1 (NOP_EXPR, base_type, self_value)); - if (flag_indirect_dispatch - && output_class != self_class) - /* FIXME: output_class != self_class is not exactly the right - test. What we really want to know is whether self_class is - in the same translation unit as output_class. If it is, - we can make a direct reference. */ + self_value = fold_build1 (NOP_EXPR, base_type, self_value); + if (! flag_syntax_only && flag_indirect_dispatch) { tree otable_index - = build_int_2 (get_symbol_table_index - (field_decl, &TYPE_OTABLE_METHODS (output_class)), - 0); + = build_int_cst (NULL_TREE, get_symbol_table_index + (field_decl, NULL_TREE, + &TYPE_OTABLE_METHODS (output_class))); tree field_offset = build4 (ARRAY_REF, integer_type_node, TYPE_OTABLE_DECL (output_class), otable_index, NULL_TREE, NULL_TREE); tree address; + if (DECL_CONTEXT (field_decl) != output_class) + field_offset + = build3 (COND_EXPR, TREE_TYPE (field_offset), + build2 (EQ_EXPR, boolean_type_node, + field_offset, integer_zero_node), + build_call_nary (void_type_node, + build_address_of (soft_nosuchfield_node), + 1, otable_index), + field_offset); + field_offset = fold (convert (sizetype, field_offset)); + self_value = java_check_reference (self_value, check); address - = fold (build2 (PLUS_EXPR, - build_pointer_type (TREE_TYPE (field_decl)), - self_value, field_offset)); - return fold (build1 (INDIRECT_REF, TREE_TYPE (field_decl), address)); + = fold_build2 (POINTER_PLUS_EXPR, + TREE_TYPE (self_value), + self_value, field_offset); + address = fold_convert (build_pointer_type (TREE_TYPE (field_decl)), + address); + return fold_build1 (INDIRECT_REF, TREE_TYPE (field_decl), address); } self_value = build_java_indirect_ref (TREE_TYPE (TREE_TYPE (self_value)), self_value, check); - return fold (build3 (COMPONENT_REF, TREE_TYPE (field_decl), - self_value, field_decl, NULL_TREE)); + return fold_build3 (COMPONENT_REF, TREE_TYPE (field_decl), + self_value, field_decl, NULL_TREE); } } @@ -1565,7 +1780,9 @@ lookup_label (int pc) { tree name; char buf[32]; - ASM_GENERATE_INTERNAL_LABEL(buf, "LJpc=", pc); + if (pc > highest_label_pc_this_method) + highest_label_pc_this_method = pc; + ASM_GENERATE_INTERNAL_LABEL(buf, "LJpc=", start_label_pc_this_method + pc); name = get_identifier (buf); if (IDENTIFIER_LOCAL_VALUE (name)) return IDENTIFIER_LOCAL_VALUE (name); @@ -1573,7 +1790,6 @@ lookup_label (int pc) { /* The type of the address of a label is return_address_type_node. */ tree decl = create_label_decl (name); - LABEL_PC (decl) = pc; return pushdecl (decl); } } @@ -1602,9 +1818,15 @@ create_label_decl (tree name) return decl; } -/* This maps a bytecode offset (PC) to various flags. */ +/* This maps a bytecode offset (PC) to various flags. */ char *instruction_bits; +/* This is a vector of type states for the current method. It is + indexed by PC. Each element is a tree vector holding the type + state at that PC. We only note type states at basic block + boundaries. */ +VEC(tree, gc) *type_states; + static void note_label (int current_pc ATTRIBUTE_UNUSED, int target_pc) { @@ -1620,7 +1842,7 @@ expand_compare (enum tree_code condition, tree value1, tree value2, int target_pc) { tree target = lookup_label (target_pc); - tree cond = fold (build2 (condition, boolean_type_node, value1, value2)); + tree cond = fold_build2 (condition, boolean_type_node, value1, value2); java_add_stmt (build3 (COND_EXPR, void_type_node, java_truthvalue_conversion (cond), build1 (GOTO_EXPR, void_type_node, target), @@ -1686,8 +1908,7 @@ expand_java_add_case (tree switch_expr, int match, int target_pc) { tree value, x; - value = build_int_2 (match, match < 0 ? -1 : 0); - TREE_TYPE (value) = TREE_TYPE (switch_expr); + value = build_int_cst (TREE_TYPE (switch_expr), match); x = build3 (CASE_LABEL_EXPR, void_type_node, value, NULL_TREE, create_artificial_label ()); @@ -1697,29 +1918,6 @@ expand_java_add_case (tree switch_expr, int match, int target_pc) append_to_statement_list (x, &SWITCH_BODY (switch_expr)); } -#if 0 -static void -expand_java_call (int target_pc, int return_address) -{ - tree target_label = lookup_label (target_pc); - tree value = build_int_2 (return_address, return_address < 0 ? -1 : 0); - push_value (value); - flush_quick_stack (); - expand_goto (target_label); -} - -static void -expand_java_ret (tree return_address ATTRIBUTE_UNUSED) -{ - warning ("ret instruction not implemented"); -#if 0 - tree target_label = lookup_label (target_pc); - flush_quick_stack (); - expand_goto (target_label); -#endif -} -#endif - static tree pop_arguments (tree arg_types) { @@ -1730,13 +1928,53 @@ pop_arguments (tree arg_types) tree tail = pop_arguments (TREE_CHAIN (arg_types)); tree type = TREE_VALUE (arg_types); tree arg = pop_value (type); - if (targetm.calls.promote_prototypes (type) - && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node) - && INTEGRAL_TYPE_P (type)) + + /* We simply cast each argument to its proper type. This is + needed since we lose type information coming out of the + verifier. We also have to do this when we pop an integer + type that must be promoted for the function call. */ + if (TREE_CODE (type) == POINTER_TYPE) + arg = build1 (NOP_EXPR, type, arg); + else if (targetm.calls.promote_prototypes (type) + && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node) + && INTEGRAL_TYPE_P (type)) arg = convert (integer_type_node, arg); return tree_cons (NULL_TREE, arg, tail); } - abort (); + gcc_unreachable (); +} + +/* Attach to PTR (a block) the declaration found in ENTRY. */ + +int +attach_init_test_initialization_flags (void **entry, void *ptr) +{ + tree block = (tree)ptr; + struct treetreehash_entry *ite = (struct treetreehash_entry *) *entry; + + if (block != error_mark_node) + { + if (TREE_CODE (block) == BIND_EXPR) + { + tree body = BIND_EXPR_BODY (block); + TREE_CHAIN (ite->value) = BIND_EXPR_VARS (block); + BIND_EXPR_VARS (block) = ite->value; + body = build2 (COMPOUND_EXPR, void_type_node, + build1 (DECL_EXPR, void_type_node, ite->value), body); + BIND_EXPR_BODY (block) = body; + } + else + { + tree body = BLOCK_SUBBLOCKS (block); + TREE_CHAIN (ite->value) = BLOCK_EXPR_DECLS (block); + BLOCK_EXPR_DECLS (block) = ite->value; + body = build2 (COMPOUND_EXPR, void_type_node, + build1 (DECL_EXPR, void_type_node, ite->value), body); + BLOCK_SUBBLOCKS (block) = body; + } + + } + return true; } /* Build an expression to initialize the class CLAS. @@ -1759,15 +1997,15 @@ build_class_init (tree clas, tree expr) if (always_initialize_class_p) { - init = build3 (CALL_EXPR, void_type_node, - build_address_of (soft_initclass_node), - build_tree_list (NULL_TREE, build_class_ref (clas)), - NULL_TREE); + init = build_call_nary (void_type_node, + build_address_of (soft_initclass_node), + 1, build_class_ref (clas)); TREE_SIDE_EFFECTS (init) = 1; } else { tree *init_test_decl; + tree decl; init_test_decl = java_treetreehash_new (DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl), clas); @@ -1775,25 +2013,19 @@ build_class_init (tree clas, tree expr) { /* Build a declaration and mark it as a flag used to track static class initializations. */ - *init_test_decl = build_decl (VAR_DECL, NULL_TREE, - boolean_type_node); - MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (*init_test_decl); - LOCAL_CLASS_INITIALIZATION_FLAG (*init_test_decl) = 1; - DECL_CONTEXT (*init_test_decl) = current_function_decl; - DECL_FUNCTION_INIT_TEST_CLASS (*init_test_decl) = clas; - /* Tell the check-init code to ignore this decl when not - optimizing class initialization. */ - if (!STATIC_CLASS_INIT_OPT_P ()) - DECL_BIT_INDEX(*init_test_decl) = -1; - DECL_INITIAL (*init_test_decl) = integer_zero_node; + decl = build_decl (VAR_DECL, NULL_TREE, + boolean_type_node); + MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (decl); + DECL_CONTEXT (decl) = current_function_decl; + DECL_INITIAL (decl) = boolean_false_node; /* Don't emit any symbolic debugging info for this decl. */ - DECL_IGNORED_P (*init_test_decl) = 1; + DECL_IGNORED_P (decl) = 1; + *init_test_decl = decl; } - init = build3 (CALL_EXPR, void_type_node, - build_address_of (soft_initclass_node), - build_tree_list (NULL_TREE, build_class_ref (clas)), - NULL_TREE); + init = build_call_nary (void_type_node, + build_address_of (soft_initclass_node), + 1, build_class_ref (clas)); TREE_SIDE_EFFECTS (init) = 1; init = build3 (COND_EXPR, void_type_node, build2 (EQ_EXPR, boolean_type_node, @@ -1815,29 +2047,165 @@ build_class_init (tree clas, tree expr) return init; } + + +/* Rewrite expensive calls that require stack unwinding at runtime to + cheaper alternatives. The logic here performs these + transformations: + + java.lang.Class.forName("foo") -> java.lang.Class.forName("foo", class$) + java.lang.Class.getClassLoader() -> java.lang.Class.getClassLoader(class$) + +*/ + +typedef struct +{ + const char *classname; + const char *method; + const char *signature; + const char *new_signature; + int flags; + tree (*rewrite_arglist) (tree arglist); +} rewrite_rule; + +/* Add __builtin_return_address(0) to the end of an arglist. */ + + +static tree +rewrite_arglist_getcaller (tree arglist) +{ + tree retaddr + = build_call_expr (built_in_decls[BUILT_IN_RETURN_ADDRESS], + 1, integer_zero_node); + + DECL_INLINE (current_function_decl) = 0; + + return chainon (arglist, + tree_cons (NULL_TREE, retaddr, + NULL_TREE)); +} + +/* Add this.class to the end of an arglist. */ + +static tree +rewrite_arglist_getclass (tree arglist) +{ + return chainon (arglist, + tree_cons (NULL_TREE, build_class_ref (output_class), + NULL_TREE)); +} + +static rewrite_rule rules[] = + {{"java.lang.Class", "getClassLoader", "()Ljava/lang/ClassLoader;", + "(Ljava/lang/Class;)Ljava/lang/ClassLoader;", + ACC_FINAL|ACC_PRIVATE, rewrite_arglist_getclass}, + {"java.lang.Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;", + "(Ljava/lang/String;Ljava/lang/Class;)Ljava/lang/Class;", + ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getclass}, + {"gnu.classpath.VMStackWalker", "getCallingClass", "()Ljava/lang/Class;", + "(Lgnu/gcj/RawData;)Ljava/lang/Class;", + ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller}, + {"gnu.classpath.VMStackWalker", "getCallingClassLoader", + "()Ljava/lang/ClassLoader;", + "(Lgnu/gcj/RawData;)Ljava/lang/ClassLoader;", + ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller}, + + {NULL, NULL, NULL, NULL, 0, NULL}}; + +/* True if this method is special, i.e. it's a private method that + should be exported from a DSO. */ + +bool +special_method_p (tree candidate_method) +{ + tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (candidate_method))); + tree method = DECL_NAME (candidate_method); + rewrite_rule *p; + + for (p = rules; p->classname; p++) + { + if (get_identifier (p->classname) == context + && get_identifier (p->method) == method) + return true; + } + return false; +} + +/* Scan the rules list for replacements for *METHOD_P and replace the + args accordingly. If the rewrite results in an access to a private + method, update SPECIAL.*/ + +void +maybe_rewrite_invocation (tree *method_p, tree *arg_list_p, + tree *method_signature_p, tree *special) +{ + tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (*method_p))); + rewrite_rule *p; + *special = NULL_TREE; + + for (p = rules; p->classname; p++) + { + if (get_identifier (p->classname) == context) + { + tree method = DECL_NAME (*method_p); + if (get_identifier (p->method) == method + && get_identifier (p->signature) == *method_signature_p) + { + tree maybe_method + = lookup_java_method (DECL_CONTEXT (*method_p), + method, + get_identifier (p->new_signature)); + if (! maybe_method && ! flag_verify_invocations) + { + maybe_method + = add_method (DECL_CONTEXT (*method_p), p->flags, + method, get_identifier (p->new_signature)); + DECL_EXTERNAL (maybe_method) = 1; + } + *method_p = maybe_method; + gcc_assert (*method_p); + *arg_list_p = p->rewrite_arglist (*arg_list_p); + *method_signature_p = get_identifier (p->new_signature); + *special = integer_one_node; + + break; + } + } + } +} + + + tree build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED, tree self_type, tree method_signature ATTRIBUTE_UNUSED, - tree arg_list ATTRIBUTE_UNUSED) + tree arg_list ATTRIBUTE_UNUSED, tree special) { tree func; if (is_compiled_class (self_type)) { - if (!flag_indirect_dispatch - || (!TREE_PUBLIC (method) && DECL_CONTEXT (method))) + /* With indirect dispatch we have to use indirect calls for all + publicly visible methods or gcc will use PLT indirections + to reach them. We also have to use indirect dispatch for all + external methods. */ + if (! flag_indirect_dispatch + || (! DECL_EXTERNAL (method) && ! TREE_PUBLIC (method))) { - make_decl_rtl (method, NULL); func = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (method)), method); } else { tree table_index - = build_int_2 (get_symbol_table_index - (method, &TYPE_ATABLE_METHODS (output_class)), 0); - func = build4 (ARRAY_REF, method_ptr_type_node, - TYPE_ATABLE_DECL (output_class), table_index, - NULL_TREE, NULL_TREE); + = build_int_cst (NULL_TREE, + (get_symbol_table_index + (method, special, + &TYPE_ATABLE_METHODS (output_class)))); + func + = build4 (ARRAY_REF, + TREE_TYPE (TREE_TYPE (TYPE_ATABLE_DECL (output_class))), + TYPE_ATABLE_DECL (output_class), table_index, + NULL_TREE, NULL_TREE); } func = convert (method_ptr_type_node, func); } @@ -1877,8 +2245,8 @@ build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED, method_index++; } method_index *= int_size_in_bytes (method_type_node); - ref = fold (build2 (PLUS_EXPR, method_ptr_type_node, - ref, build_int_2 (method_index, 0))); + ref = fold_build2 (POINTER_PLUS_EXPR, method_ptr_type_node, + ref, size_int (method_index)); ref = build1 (INDIRECT_REF, method_type_node, ref); func = build3 (COMPONENT_REF, nativecode_ptr_type_node, ref, lookup_field (&method_type_node, ncode_ident), @@ -1898,9 +2266,9 @@ invoke_build_dtable (int is_invoke_interface, tree arg_list) argument is an array then get the dispatch table of the class Object rather than the one from the objectref. */ objectref = (is_invoke_interface - && is_array_type_p (TREE_TYPE (TREE_VALUE (arg_list))) ? - object_type_node : TREE_VALUE (arg_list)); - + && is_array_type_p (TREE_TYPE (TREE_VALUE (arg_list))) + ? build_class_ref (object_type_node) : TREE_VALUE (arg_list)); + if (dtable_ident == NULL_TREE) dtable_ident = get_identifier ("vtable"); dtable = build_java_indirect_ref (object_type_node, objectref, @@ -1913,18 +2281,18 @@ invoke_build_dtable (int is_invoke_interface, tree arg_list) /* Determine the index in SYMBOL_TABLE for a reference to the decl T. If this decl has not been seen before, it will be added to the - otable_methods. If it has, the existing table slot will be - reused. */ + [oa]table_methods. If it has, the existing table slot will be + reused. */ int -get_symbol_table_index (tree t, tree *symbol_table) +get_symbol_table_index (tree t, tree special, tree *symbol_table) { int i = 1; tree method_list; if (*symbol_table == NULL_TREE) { - *symbol_table = build_tree_list (t, t); + *symbol_table = build_tree_list (special, t); return 1; } @@ -1933,8 +2301,9 @@ get_symbol_table_index (tree t, tree *symbol_table) while (1) { tree value = TREE_VALUE (method_list); - if (value == t) - return i; + tree purpose = TREE_PURPOSE (method_list); + if (value == t && purpose == special) + return i; i++; if (TREE_CHAIN (method_list) == NULL_TREE) break; @@ -1942,12 +2311,12 @@ get_symbol_table_index (tree t, tree *symbol_table) method_list = TREE_CHAIN (method_list); } - TREE_CHAIN (method_list) = build_tree_list (t, t); + TREE_CHAIN (method_list) = build_tree_list (special, t); return i; } tree -build_invokevirtual (tree dtable, tree method) +build_invokevirtual (tree dtable, tree method, tree special) { tree func; tree nativecode_ptr_ptr_type_node @@ -1957,9 +2326,12 @@ build_invokevirtual (tree dtable, tree method) if (flag_indirect_dispatch) { + gcc_assert (! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method)))); + otable_index - = build_int_2 (get_symbol_table_index - (method, &TYPE_OTABLE_METHODS (output_class)), 0); + = build_int_cst (NULL_TREE, get_symbol_table_index + (method, special, + &TYPE_OTABLE_METHODS (output_class))); method_index = build4 (ARRAY_REF, integer_type_node, TYPE_OTABLE_DECL (output_class), otable_index, NULL_TREE, NULL_TREE); @@ -1978,13 +2350,16 @@ build_invokevirtual (tree dtable, tree method) size_int (TARGET_VTABLE_USES_DESCRIPTORS)); } - func = fold (build2 (PLUS_EXPR, nativecode_ptr_ptr_type_node, dtable, - convert (nativecode_ptr_ptr_type_node, method_index))); + func = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dtable), dtable, + convert (sizetype, method_index)); if (TARGET_VTABLE_USES_DESCRIPTORS) func = build1 (NOP_EXPR, nativecode_ptr_type_node, func); else - func = build1 (INDIRECT_REF, nativecode_ptr_type_node, func); + { + func = fold_convert (nativecode_ptr_ptr_type_node, func); + func = build1 (INDIRECT_REF, nativecode_ptr_type_node, func); + } return func; } @@ -1993,14 +2368,10 @@ static GTY(()) tree class_ident; tree build_invokeinterface (tree dtable, tree method) { - tree lookup_arg; tree interface; tree idx; - tree otable_index; - /* We expand invokeinterface here. _Jv_LookupInterfaceMethod() will - ensure that the selected method exists, is public and not - abstract nor static. */ + /* We expand invokeinterface here. */ if (class_ident == NULL_TREE) class_ident = get_identifier ("class"); @@ -2011,33 +2382,43 @@ build_invokeinterface (tree dtable, tree method) lookup_field (&dtable_type, class_ident), NULL_TREE); interface = DECL_CONTEXT (method); - if (! CLASS_INTERFACE (TYPE_NAME (interface))) - abort (); + gcc_assert (CLASS_INTERFACE (TYPE_NAME (interface))); layout_class_methods (interface); if (flag_indirect_dispatch) { - otable_index - = build_int_2 (get_symbol_table_index - (method, &TYPE_OTABLE_METHODS (output_class)), 0); - idx = build4 (ARRAY_REF, integer_type_node, - TYPE_OTABLE_DECL (output_class), otable_index, - NULL_TREE, NULL_TREE); + int itable_index + = 2 * (get_symbol_table_index + (method, NULL_TREE, &TYPE_ITABLE_METHODS (output_class))); + interface + = build4 (ARRAY_REF, + TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))), + TYPE_ITABLE_DECL (output_class), + build_int_cst (NULL_TREE, itable_index-1), + NULL_TREE, NULL_TREE); + idx + = build4 (ARRAY_REF, + TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))), + TYPE_ITABLE_DECL (output_class), + build_int_cst (NULL_TREE, itable_index), + NULL_TREE, NULL_TREE); + interface = convert (class_ptr_type, interface); + idx = convert (integer_type_node, idx); } else - idx = build_int_2 (get_interface_method_index (method, interface), 0); - - lookup_arg = tree_cons (NULL_TREE, dtable, - tree_cons (NULL_TREE, build_class_ref (interface), - build_tree_list (NULL_TREE, idx))); + { + idx = build_int_cst (NULL_TREE, + get_interface_method_index (method, interface)); + interface = build_class_ref (interface); + } - return build3 (CALL_EXPR, ptr_type_node, - build_address_of (soft_lookupinterfacemethod_node), - lookup_arg, NULL_TREE); + return build_call_nary (ptr_type_node, + build_address_of (soft_lookupinterfacemethod_node), + 3, dtable, interface, idx); } /* Expand one of the invoke_* opcodes. - OCPODE is the specific opcode. + OPCODE is the specific opcode. METHOD_REF_INDEX is an index into the constant pool. NARGS is the number of arguments, or -1 if not specified. */ @@ -2046,7 +2427,8 @@ expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED) { tree method_signature = COMPONENT_REF_SIGNATURE(¤t_jcf->cpool, method_ref_index); - tree method_name = COMPONENT_REF_NAME (¤t_jcf->cpool, method_ref_index); + tree method_name = COMPONENT_REF_NAME (¤t_jcf->cpool, + method_ref_index); tree self_type = get_class_constant (current_jcf, COMPONENT_REF_CLASS_INDEX(¤t_jcf->cpool, @@ -2056,6 +2438,8 @@ expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED) tree call, func, method, arg_list, method_type; tree check = NULL_TREE; + tree special = NULL_TREE; + if (! CLASS_LOADED_P (self_type)) { load_class (self_type, 1); @@ -2069,38 +2453,94 @@ expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED) method = lookup_java_constructor (self_type, method_signature); else method = lookup_java_method (self_type, method_name, method_signature); + + /* We've found a method in a class other than the one in which it + was wanted. This can happen if, for instance, we're trying to + compile invokespecial super.equals(). + FIXME: This is a kludge. Rather than nullifying the result, we + should change lookup_java_method() so that it doesn't search the + superclass chain when we're BC-compiling. */ + if (! flag_verify_invocations + && method + && ! TYPE_ARRAY_P (self_type) + && self_type != DECL_CONTEXT (method)) + method = NULL_TREE; + + /* We've found a method in an interface, but this isn't an interface + call. */ + if (opcode != OPCODE_invokeinterface + && method + && (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))))) + method = NULL_TREE; + + /* We've found a non-interface method but we are making an + interface call. This can happen if the interface overrides a + method in Object. */ + if (! flag_verify_invocations + && opcode == OPCODE_invokeinterface + && method + && ! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method)))) + method = NULL_TREE; + if (method == NULL_TREE) { - error ("class '%s' has no method named '%s' matching signature '%s'", - self_name, - IDENTIFIER_POINTER (method_name), - IDENTIFIER_POINTER (method_signature)); - } - /* Invoke static can't invoke static/abstract method */ - else if (opcode == OPCODE_invokestatic) - { - if (!METHOD_STATIC (method)) + if (flag_verify_invocations || ! flag_indirect_dispatch) { - error ("invokestatic on non static method"); - method = NULL_TREE; + error ("class '%s' has no method named '%s' matching signature '%s'", + self_name, + IDENTIFIER_POINTER (method_name), + IDENTIFIER_POINTER (method_signature)); } - else if (METHOD_ABSTRACT (method)) + else { - error ("invokestatic on abstract method"); - method = NULL_TREE; + int flags = ACC_PUBLIC; + if (opcode == OPCODE_invokestatic) + flags |= ACC_STATIC; + if (opcode == OPCODE_invokeinterface) + { + flags |= ACC_INTERFACE | ACC_ABSTRACT; + CLASS_INTERFACE (TYPE_NAME (self_type)) = 1; + } + method = add_method (self_type, flags, method_name, + method_signature); + DECL_ARTIFICIAL (method) = 1; + METHOD_DUMMY (method) = 1; + layout_class_method (self_type, NULL, + method, NULL); } } - else + + /* Invoke static can't invoke static/abstract method */ + if (method != NULL_TREE) { - if (METHOD_STATIC (method)) + if (opcode == OPCODE_invokestatic) { - error ("invoke[non-static] on static method"); - method = NULL_TREE; + if (!METHOD_STATIC (method)) + { + error ("invokestatic on non static method"); + method = NULL_TREE; + } + else if (METHOD_ABSTRACT (method)) + { + error ("invokestatic on abstract method"); + method = NULL_TREE; + } + } + else + { + if (METHOD_STATIC (method)) + { + error ("invoke[non-static] on static method"); + method = NULL_TREE; + } } } if (method == NULL_TREE) { + /* If we got here, we emitted an error message above. So we + just pop the arguments, push a properly-typed zero, and + continue. */ method_type = get_type_from_signature (method_signature); pop_arguments (TYPE_ARG_TYPES (method_type)); if (opcode != OPCODE_invokestatic) @@ -2114,10 +2554,13 @@ expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED) arg_list = pop_arguments (TYPE_ARG_TYPES (method_type)); flush_quick_stack (); + maybe_rewrite_invocation (&method, &arg_list, &method_signature, + &special); + func = NULL_TREE; if (opcode == OPCODE_invokestatic) func = build_known_method_ref (method, method_type, self_type, - method_signature, arg_list); + method_signature, arg_list, special); else if (opcode == OPCODE_invokespecial || (opcode == OPCODE_invokevirtual && (METHOD_PRIVATE (method) @@ -2137,14 +2580,14 @@ expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED) TREE_VALUE (arg_list) = save_arg; check = java_check_reference (save_arg, ! DECL_INIT_P (method)); func = build_known_method_ref (method, method_type, self_type, - method_signature, arg_list); + method_signature, arg_list, special); } else { tree dtable = invoke_build_dtable (opcode == OPCODE_invokeinterface, arg_list); if (opcode == OPCODE_invokevirtual) - func = build_invokevirtual (dtable, method); + func = build_invokevirtual (dtable, method, special); else func = build_invokeinterface (dtable, method); } @@ -2154,8 +2597,7 @@ expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED) else func = build1 (NOP_EXPR, build_pointer_type (method_type), func); - call = build3 (CALL_EXPR, TREE_TYPE (method_type), - func, arg_list, NULL_TREE); + call = build_call_list (TREE_TYPE (method_type), func, arg_list); TREE_SIDE_EFFECTS (call) = 1; call = check_for_builtin (method, call); @@ -2180,7 +2622,8 @@ expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED) tree build_jni_stub (tree method) { - tree jnifunc, call, args, body, lookup_arg, method_sig, arg_types; + tree jnifunc, call, args, body, method_sig, arg_types; + tree jniarg0, jniarg1, jniarg2, jniarg3; tree jni_func_type, tem; tree env_var, res_var = NULL_TREE, block; tree method_args, res_type; @@ -2190,11 +2633,9 @@ build_jni_stub (tree method) int args_size = 0; tree klass = DECL_CONTEXT (method); - int from_class = ! CLASS_FROM_SOURCE_P (klass); klass = build_class_ref (klass); - if (! METHOD_NATIVE (method) || ! flag_jni) - abort (); + gcc_assert (METHOD_NATIVE (method) && flag_jni); DECL_ARTIFICIAL (method) = 1; DECL_EXTERNAL (method) = 0; @@ -2210,38 +2651,16 @@ build_jni_stub (tree method) TREE_CHAIN (env_var) = res_var; } - meth_var = build_decl (VAR_DECL, get_identifier ("meth"), ptr_type_node); - TREE_STATIC (meth_var) = 1; - TREE_PUBLIC (meth_var) = 0; - DECL_EXTERNAL (meth_var) = 0; - DECL_CONTEXT (meth_var) = method; - DECL_ARTIFICIAL (meth_var) = 1; - DECL_INITIAL (meth_var) = null_pointer_node; - TREE_USED (meth_var) = 1; - chainon (env_var, meth_var); - build_result_decl (method); - - /* One strange way that the front ends are different is that they - store arguments differently. */ - if (from_class) - method_args = DECL_ARGUMENTS (method); - else - method_args = BLOCK_EXPR_DECLS (DECL_FUNCTION_BODY (method)); - block = build_block (env_var, NULL_TREE, NULL_TREE, - method_args, NULL_TREE); + method_args = DECL_ARGUMENTS (method); + block = build_block (env_var, NULL_TREE, method_args, NULL_TREE); TREE_SIDE_EFFECTS (block) = 1; - /* When compiling from source we don't set the type of the block, - because that will prevent patch_return from ever being run. */ - if (from_class) - TREE_TYPE (block) = TREE_TYPE (TREE_TYPE (method)); + TREE_TYPE (block) = TREE_TYPE (TREE_TYPE (method)); /* Compute the local `env' by calling _Jv_GetJNIEnvNewFrame. */ body = build2 (MODIFY_EXPR, ptr_type_node, env_var, - build3 (CALL_EXPR, ptr_type_node, - build_address_of (soft_getjnienvnewframe_node), - build_tree_list (NULL_TREE, klass), - NULL_TREE)); - CAN_COMPLETE_NORMALLY (body) = 1; + build_call_nary (ptr_type_node, + build_address_of (soft_getjnienvnewframe_node), + 1, klass)); /* All the arguments to this method become arguments to the underlying JNI function. If we had to wrap object arguments in a @@ -2249,7 +2668,7 @@ build_jni_stub (tree method) args = NULL_TREE; for (tem = method_args; tem != NULL_TREE; tem = TREE_CHAIN (tem)) { - int arg_bits = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (tem))); + int arg_bits = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem))); #ifdef PARM_BOUNDARY arg_bits = (((arg_bits + PARM_BOUNDARY - 1) / PARM_BOUNDARY) * PARM_BOUNDARY); @@ -2279,18 +2698,14 @@ build_jni_stub (tree method) /* We call _Jv_LookupJNIMethod to find the actual underlying function pointer. _Jv_LookupJNIMethod will throw the appropriate exception if this function is not found at runtime. */ - tem = build_tree_list (NULL_TREE, build_int_2 (args_size, 0)); method_sig = build_java_signature (TREE_TYPE (method)); - lookup_arg = tree_cons (NULL_TREE, - build_utf8_ref (unmangle_classname - (IDENTIFIER_POINTER (method_sig), - IDENTIFIER_LENGTH (method_sig))), - tem); - tem = DECL_NAME (method); - lookup_arg - = tree_cons (NULL_TREE, klass, - tree_cons (NULL_TREE, build_utf8_ref (tem), lookup_arg)); - + jniarg0 = klass; + jniarg1 = build_utf8_ref (DECL_NAME (method)); + jniarg2 = build_utf8_ref (unmangle_classname + (IDENTIFIER_POINTER (method_sig), + IDENTIFIER_LENGTH (method_sig))); + jniarg3 = build_int_cst (NULL_TREE, args_size); + tem = build_function_type (TREE_TYPE (TREE_TYPE (method)), arg_types); #ifdef MODIFY_JNI_METHOD_CALL @@ -2299,39 +2714,65 @@ build_jni_stub (tree method) jni_func_type = build_pointer_type (tem); - jnifunc = build3 (COND_EXPR, ptr_type_node, - meth_var, meth_var, - build2 (MODIFY_EXPR, ptr_type_node, meth_var, - build3 (CALL_EXPR, ptr_type_node, - build_address_of - (soft_lookupjnimethod_node), - lookup_arg, NULL_TREE))); + /* Use the actual function type, rather than a generic pointer type, + such that this decl keeps the actual pointer type from being + garbage-collected. If it is, we end up using canonical types + with different uids for equivalent function types, and this in + turn causes utf8 identifiers and output order to vary. */ + meth_var = build_decl (VAR_DECL, get_identifier ("meth"), jni_func_type); + TREE_STATIC (meth_var) = 1; + TREE_PUBLIC (meth_var) = 0; + DECL_EXTERNAL (meth_var) = 0; + DECL_CONTEXT (meth_var) = method; + DECL_ARTIFICIAL (meth_var) = 1; + DECL_INITIAL (meth_var) = null_pointer_node; + TREE_USED (meth_var) = 1; + chainon (env_var, meth_var); + build_result_decl (method); + + jnifunc = build3 (COND_EXPR, jni_func_type, + build2 (NE_EXPR, boolean_type_node, + meth_var, build_int_cst (TREE_TYPE (meth_var), 0)), + meth_var, + build2 (MODIFY_EXPR, jni_func_type, meth_var, + build1 + (NOP_EXPR, jni_func_type, + build_call_nary (ptr_type_node, + build_address_of + (soft_lookupjnimethod_node), + 4, + jniarg0, jniarg1, + jniarg2, jniarg3)))); /* Now we make the actual JNI call via the resulting function pointer. */ - call = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (method)), - build1 (NOP_EXPR, jni_func_type, jnifunc), - args, NULL_TREE); + call = build_call_list (TREE_TYPE (TREE_TYPE (method)), + jnifunc, args); /* If the JNI call returned a result, capture it here. If we had to unwrap JNI object results, we would do that here. */ if (res_var != NULL_TREE) - call = build2 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (method)), - res_var, call); + { + /* If the call returns an object, it may return a JNI weak + reference, in which case we must unwrap it. */ + if (! JPRIMITIVE_TYPE_P (TREE_TYPE (TREE_TYPE (method)))) + call = build_call_nary (TREE_TYPE (TREE_TYPE (method)), + build_address_of (soft_unwrapjni_node), + 1, call); + call = build2 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (method)), + res_var, call); + } TREE_SIDE_EFFECTS (call) = 1; - CAN_COMPLETE_NORMALLY (call) = 1; body = build2 (COMPOUND_EXPR, void_type_node, body, call); TREE_SIDE_EFFECTS (body) = 1; /* Now free the environment we allocated. */ - call = build3 (CALL_EXPR, ptr_type_node, - build_address_of (soft_jnipopsystemframe_node), - build_tree_list (NULL_TREE, env_var), - NULL_TREE); + call = build_call_nary (ptr_type_node, + build_address_of (soft_jnipopsystemframe_node), + 1, env_var); TREE_SIDE_EFFECTS (call) = 1; - CAN_COMPLETE_NORMALLY (call) = 1; body = build2 (COMPOUND_EXPR, void_type_node, body, call); TREE_SIDE_EFFECTS (body) = 1; @@ -2340,8 +2781,7 @@ build_jni_stub (tree method) if (res_var != NULL_TREE) { tree drt; - if (! DECL_RESULT (method)) - abort (); + gcc_assert (DECL_RESULT (method)); /* Make sure we copy the result variable to the actual result. We use the type of the DECL_RESULT because it might be different from the return type of the function: @@ -2354,14 +2794,45 @@ build_jni_stub (tree method) } body = build2 (COMPOUND_EXPR, void_type_node, body, - build1 (RETURN_EXPR, res_type, res_var)); + build1 (RETURN_EXPR, void_type_node, res_var)); TREE_SIDE_EFFECTS (body) = 1; + /* Prepend class initialization for static methods reachable from + other classes. */ + if (METHOD_STATIC (method) + && (! METHOD_PRIVATE (method) + || INNER_CLASS_P (DECL_CONTEXT (method)))) + { + tree init = build_call_expr (soft_initclass_node, 1, + klass); + body = build2 (COMPOUND_EXPR, void_type_node, init, body); + TREE_SIDE_EFFECTS (body) = 1; + } + bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block), body, block); return bind; } + +/* Given lvalue EXP, return a volatile expression that references the + same object. */ + +tree +java_modify_addr_for_volatile (tree exp) +{ + tree exp_type = TREE_TYPE (exp); + tree v_type + = build_qualified_type (exp_type, + TYPE_QUALS (exp_type) | TYPE_QUAL_VOLATILE); + tree addr = build_fold_addr_expr (exp); + v_type = build_pointer_type (v_type); + addr = fold_convert (v_type, addr); + exp = build_fold_indirect_ref (addr); + return exp; +} + + /* Expand an operation to extract from or store into a field. IS_STATIC is 1 iff the field is static. IS_PUTTING is 1 for putting into a field; 0 for getting from the field. @@ -2383,7 +2854,9 @@ expand_java_field_op (int is_static, int is_putting, int field_ref_index) tree new_value = is_putting ? pop_value (field_type) : NULL_TREE; tree field_ref; int is_error = 0; + tree original_self_type = self_type; tree field_decl; + tree modify_expr; if (! CLASS_LOADED_P (self_type)) load_class (self_type, 1); @@ -2394,9 +2867,30 @@ expand_java_field_op (int is_static, int is_putting, int field_ref_index) } else if (field_decl == NULL_TREE) { - error ("missing field '%s' in '%s'", - IDENTIFIER_POINTER (field_name), self_name); - is_error = 1; + if (! flag_verify_invocations) + { + int flags = ACC_PUBLIC; + if (is_static) + flags |= ACC_STATIC; + self_type = original_self_type; + field_decl = add_field (original_self_type, field_name, + field_type, flags); + DECL_ARTIFICIAL (field_decl) = 1; + DECL_IGNORED_P (field_decl) = 1; +#if 0 + /* FIXME: We should be pessimistic about volatility. We + don't know one way or another, but this is safe. + However, doing this has bad effects on code quality. We + need to look at better ways to do this. */ + TREE_THIS_VOLATILE (field_decl) = 1; +#endif + } + else + { + error ("missing field '%s' in '%s'", + IDENTIFIER_POINTER (field_name), self_name); + is_error = 1; + } } else if (build_java_signature (TREE_TYPE (field_decl)) != field_signature) { @@ -2414,44 +2908,67 @@ expand_java_field_op (int is_static, int is_putting, int field_ref_index) } field_ref = build_field_ref (field_ref, self_type, field_name); - if (is_static) - field_ref = build_class_init (self_type, field_ref); + if (is_static + && ! flag_indirect_dispatch) + { + tree context = DECL_CONTEXT (field_ref); + if (context != self_type && CLASS_INTERFACE (TYPE_NAME (context))) + field_ref = build_class_init (context, field_ref); + else + field_ref = build_class_init (self_type, field_ref); + } if (is_putting) { flush_quick_stack (); if (FIELD_FINAL (field_decl)) { if (DECL_CONTEXT (field_decl) != current_class) - error ("%Jassignment to final field '%D' not in field's class", - field_decl, field_decl); - else if (FIELD_STATIC (field_decl)) - { - if (!DECL_CLINIT_P (current_function_decl)) - warning ("%Jassignment to final static field `%D' not in " - "class initializer", - field_decl, field_decl); - } - else - { - tree cfndecl_name = DECL_NAME (current_function_decl); - if (! DECL_CONSTRUCTOR_P (current_function_decl) - && !ID_FINIT_P (cfndecl_name)) - warning ("%Jassignment to final field '%D' not in constructor", - field_decl, field_decl); - } - } - java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (field_ref), - field_ref, new_value)); + error ("assignment to final field %q+D not in field's class", + field_decl); + /* We used to check for assignments to final fields not + occurring in the class initializer or in a constructor + here. However, this constraint doesn't seem to be + enforced by the JVM. */ + } + + if (TREE_THIS_VOLATILE (field_decl)) + field_ref = java_modify_addr_for_volatile (field_ref); + + modify_expr = build2 (MODIFY_EXPR, TREE_TYPE (field_ref), + field_ref, new_value); + + if (TREE_THIS_VOLATILE (field_decl)) + java_add_stmt + (build_call_expr (built_in_decls[BUILT_IN_SYNCHRONIZE], 0)); + + java_add_stmt (modify_expr); } else - push_value (field_ref); + { + tree temp = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (field_ref)); + java_add_local_var (temp); + + if (TREE_THIS_VOLATILE (field_decl)) + field_ref = java_modify_addr_for_volatile (field_ref); + + modify_expr + = build2 (MODIFY_EXPR, TREE_TYPE (field_ref), temp, field_ref); + java_add_stmt (modify_expr); + + if (TREE_THIS_VOLATILE (field_decl)) + java_add_stmt + (build_call_expr (built_in_decls[BUILT_IN_SYNCHRONIZE], 0)); + + push_value (temp); + } + TREE_THIS_VOLATILE (field_ref) = TREE_THIS_VOLATILE (field_decl); } -void -load_type_state (tree label) +static void +load_type_state (int pc) { int i; - tree vec = LABEL_TYPE_STATE (label); + tree vec = VEC_index (tree, type_states, pc); int cur_length = TREE_VEC_LENGTH (vec); stack_pointer = cur_length - DECL_MAX_LOCALS(current_function_decl); for (i = 0; i < cur_length; i++) @@ -2494,6 +3011,8 @@ note_instructions (JCF *jcf, tree method) byte_ops = jcf->read_ptr; instruction_bits = xrealloc (instruction_bits, length + 1); memset (instruction_bits, 0, length + 1); + type_states = VEC_alloc (tree, gc, length + 1); + VEC_safe_grow_cleared (tree, gc, type_states, length + 1); /* This pass figures out which PC can be the targets of jumps. */ for (PC = 0; PC < length;) @@ -2613,6 +3132,7 @@ expand_byte_code (JCF *jcf, tree method) int dead_code_index = -1; unsigned char* byte_ops; long length = DECL_CODE_LENGTH (method); + location_t max_location = input_location; stack_pointer = 0; JCF_SEEK (jcf, DECL_CODE_OFFSET (method)); @@ -2626,7 +3146,7 @@ expand_byte_code (JCF *jcf, tree method) int pc = GET_u2 (linenumber_pointer); linenumber_pointer += 4; if (pc >= length) - warning ("invalid PC in line number table"); + warning (0, "invalid PC in line number table"); else { if ((instruction_bits[pc] & BCODE_HAS_LINENUMBER) != 0) @@ -2635,9 +3155,13 @@ expand_byte_code (JCF *jcf, tree method) } } - if (! verify_jvm_instructions (jcf, byte_ops, length)) + if (! verify_jvm_instructions_new (jcf, byte_ops, length)) return; + promote_arguments (); + cache_this_class_ref (method); + cache_cpool_data_ref (); + /* Translate bytecodes. */ linenumber_pointer = linenumber_table; for (PC = 0; PC < length;) @@ -2648,8 +3172,8 @@ expand_byte_code (JCF *jcf, tree method) flush_quick_stack (); if ((instruction_bits [PC] & BCODE_TARGET) != 0) java_add_stmt (build1 (LABEL_EXPR, void_type_node, label)); - if (LABEL_VERIFIED (label) || PC == 0) - load_type_state (label); + if ((instruction_bits[PC] & BCODE_VERIFIED) != 0) + load_type_state (PC); } if (! (instruction_bits [PC] & BCODE_VERIFIED)) @@ -2672,7 +3196,7 @@ expand_byte_code (JCF *jcf, tree method) { /* We've just reached the end of a region of dead code. */ if (extra_warnings) - warning ("unreachable bytecode from %d to before %d", + warning (0, "unreachable bytecode from %d to before %d", dead_code_index, PC); dead_code_index = -1; } @@ -2694,7 +3218,10 @@ expand_byte_code (JCF *jcf, tree method) linenumber_pointer += 4; if (pc == PC) { - input_location.line = GET_u2 (linenumber_pointer - 2); + int line = GET_u2 (linenumber_pointer - 2); + input_location = linemap_line_start (line_table, line, 1); + if (input_location > max_location) + max_location = input_location; if (!(instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS)) break; } @@ -2704,14 +3231,18 @@ expand_byte_code (JCF *jcf, tree method) PC = process_jvm_instruction (PC, byte_ops, length); maybe_poplevels (PC); } /* for */ - + + uncache_this_class_ref (method); + if (dead_code_index != -1) { /* We've just reached the end of a region of dead code. */ if (extra_warnings) - warning ("unreachable bytecode from %d to the end of the method", + warning (0, "unreachable bytecode from %d to the end of the method", dead_code_index); } + + DECL_FUNCTION_LAST_LINE (method) = max_location; } static void @@ -2726,6 +3257,12 @@ java_push_constant_from_pool (JCF *jcf, int index) c = build_ref_from_constant_pool (index); c = convert (promote_type (string_type_node), c); } + else if (JPOOL_TAG (jcf, index) == CONSTANT_Class + || JPOOL_TAG (jcf, index) == CONSTANT_ResolvedClass) + { + tree record = get_class_constant (jcf, index); + c = build_class_ref (record); + } else c = get_constant (jcf, index); push_value (c); @@ -2738,12 +3275,18 @@ process_jvm_instruction (int PC, const unsigned char* byte_ops, const char *opname; /* Temporary ??? */ int oldpc = PC; /* PC at instruction start. */ - /* If the instruction is at the beginning of a exception handler, - replace the top of the stack with the thrown object reference */ + /* If the instruction is at the beginning of an exception handler, + replace the top of the stack with the thrown object reference. */ if (instruction_bits [PC] & BCODE_EXCEPTION_TARGET) { - tree type = pop_type (ptr_type_node); - push_value (build_exception_object_ref (type)); + /* Note that the verifier will not emit a type map at all for + dead exception handlers. In this case we just ignore the + situation. */ + if ((instruction_bits[PC] & BCODE_VERIFIED) != 0) + { + tree type = pop_type (promote_type (throwable_type_node)); + push_value (build_exception_object_ref (type)); + } } switch (byte_ops[PC++]) @@ -2758,7 +3301,8 @@ process_jvm_instruction (int PC, const unsigned char* byte_ops, { \ int saw_index = 0; \ int index = OPERAND_VALUE; \ - build_java_ret (find_local_variable (index, ptr_type_node, oldpc)); \ + build_java_ret \ + (find_local_variable (index, return_address_type_node, oldpc)); \ } #define JSR(OPERAND_TYPE, OPERAND_VALUE) \ @@ -2909,8 +3453,8 @@ process_jvm_instruction (int PC, const unsigned char* byte_ops, } #define UNOP(OPERAND_TYPE, OPERAND_VALUE) \ - push_value (fold (build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \ - pop_value (OPERAND_TYPE##_type_node)))); + push_value (fold_build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \ + pop_value (OPERAND_TYPE##_type_node))); #define CONVERT2(FROM_TYPE, TO_TYPE) \ { \ @@ -2937,7 +3481,6 @@ process_jvm_instruction (int PC, const unsigned char* byte_ops, decl = find_local_variable (index, type, oldpc); \ set_local_type (index, type); \ java_add_stmt (build2 (MODIFY_EXPR, type, decl, value)); \ - update_aliases (decl, index); \ } #define STORE(OPERAND_TYPE, OPERAND_VALUE) \ @@ -3053,7 +3596,7 @@ peek_opcode_at_pc (JCF *jcf, int code_offset, int pc) This function is used by `give_name_to_locals' so that a local's DECL features a DECL_LOCAL_START_PC such that the first related - store operation will use DECL as a destination, not a unrelated + store operation will use DECL as a destination, not an unrelated temporary created for the occasion. This function uses a global (instruction_bits) `note_instructions' should @@ -3161,7 +3704,8 @@ maybe_adjust_start_pc (struct JCF *jcf, int code_offset, For method invocation, we modify the arguments so that a left-to-right order evaluation is performed. Saved expressions will, in CALL_EXPR order, be reused when the call will be expanded. -*/ + + We also promote outgoing args if needed. */ tree force_evaluation_order (tree node) @@ -3169,35 +3713,40 @@ force_evaluation_order (tree node) if (flag_syntax_only) return node; if (TREE_CODE (node) == CALL_EXPR - || TREE_CODE (node) == NEW_CLASS_EXPR || (TREE_CODE (node) == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (node, 0)) == CALL_EXPR && TREE_CODE (TREE_OPERAND (node, 1)) == SAVE_EXPR)) { - tree arg, cmp; + tree call, cmp; + int i, nargs; - if (!TREE_OPERAND (node, 1)) - return node; - - arg = node; - - /* Position arg properly, account for wrapped around ctors. */ + /* Account for wrapped around ctors. */ if (TREE_CODE (node) == COMPOUND_EXPR) - arg = TREE_OPERAND (node, 0); - - arg = TREE_OPERAND (arg, 1); - - /* Not having a list of argument here is an error. */ - if (TREE_CODE (arg) != TREE_LIST) - abort (); + call = TREE_OPERAND (node, 0); + else + call = node; + + nargs = call_expr_nargs (call); /* This reverses the evaluation order. This is a desired effect. */ - for (cmp = NULL_TREE; arg; arg = TREE_CHAIN (arg)) + for (i = 0, cmp = NULL_TREE; i < nargs; i++) { - tree saved = save_expr (force_evaluation_order (TREE_VALUE (arg))); + tree arg = CALL_EXPR_ARG (call, i); + /* Promote types smaller than integer. This is required by + some ABIs. */ + tree type = TREE_TYPE (arg); + tree saved; + if (targetm.calls.promote_prototypes (type) + && INTEGRAL_TYPE_P (type) + && INT_CST_LT_UNSIGNED (TYPE_SIZE (type), + TYPE_SIZE (integer_type_node))) + arg = fold_convert (integer_type_node, arg); + + saved = save_expr (force_evaluation_order (arg)); cmp = (cmp == NULL_TREE ? saved : build2 (COMPOUND_EXPR, void_type_node, cmp, saved)); - TREE_VALUE (arg) = saved; + + CALL_EXPR_ARG (call, i) = saved; } if (cmp && TREE_CODE (cmp) == COMPOUND_EXPR) @@ -3208,7 +3757,6 @@ force_evaluation_order (tree node) cmp = build2 (COMPOUND_EXPR, TREE_TYPE (node), cmp, node); if (TREE_TYPE (cmp) != void_type_node) cmp = save_expr (cmp); - CAN_COMPLETE_NORMALLY (cmp) = CAN_COMPLETE_NORMALLY (node); TREE_SIDE_EFFECTS (cmp) = 1; node = cmp; } @@ -3216,46 +3764,60 @@ force_evaluation_order (tree node) return node; } -/* EXPR_WITH_FILE_LOCATION are used to keep track of the exact - location where an expression or an identifier were encountered. It - is necessary for languages where the frontend parser will handle - recursively more than one file (Java is one of them). */ +/* Build a node to represent empty statements and blocks. */ tree -build_expr_wfl (tree node, const char *file, int line, int col) +build_java_empty_stmt (void) { - static const char *last_file = 0; - static tree last_filenode = NULL_TREE; - tree wfl = make_node (EXPR_WITH_FILE_LOCATION); + tree t = build_empty_stmt (); + return t; +} - EXPR_WFL_NODE (wfl) = node; - EXPR_WFL_SET_LINECOL (wfl, line, col); - if (file != last_file) - { - last_file = file; - last_filenode = file ? get_identifier (file) : NULL_TREE; - } +/* Promote all args of integral type before generating any code. */ - EXPR_WFL_FILENAME_NODE (wfl) = last_filenode; - if (node) +static void +promote_arguments (void) +{ + int i; + tree arg; + for (arg = DECL_ARGUMENTS (current_function_decl), i = 0; + arg != NULL_TREE; arg = TREE_CHAIN (arg), i++) { - if (IS_NON_TYPE_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (node)))) - TREE_SIDE_EFFECTS (wfl) = TREE_SIDE_EFFECTS (node); - TREE_TYPE (wfl) = TREE_TYPE (node); + tree arg_type = TREE_TYPE (arg); + if (INTEGRAL_TYPE_P (arg_type) + && TYPE_PRECISION (arg_type) < 32) + { + tree copy = find_local_variable (i, integer_type_node, -1); + java_add_stmt (build2 (MODIFY_EXPR, integer_type_node, + copy, + fold_convert (integer_type_node, arg))); + } + if (TYPE_IS_WIDE (arg_type)) + i++; } - - return wfl; } +/* Create a local variable that points to the constant pool. */ -/* Build a node to represent empty statements and blocks. */ - -tree -build_java_empty_stmt (void) +static void +cache_cpool_data_ref (void) { - tree t = build_empty_stmt (); - CAN_COMPLETE_NORMALLY (t) = 1; - return t; + if (optimize) + { + tree cpool; + tree d = build_constant_data_ref (flag_indirect_classes); + tree cpool_ptr = build_decl (VAR_DECL, NULL_TREE, + build_pointer_type (TREE_TYPE (d))); + java_add_local_var (cpool_ptr); + TREE_INVARIANT (cpool_ptr) = 1; + TREE_CONSTANT (cpool_ptr) = 1; + + java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (cpool_ptr), + cpool_ptr, build_address_of (d))); + cpool = build1 (INDIRECT_REF, TREE_TYPE (d), cpool_ptr); + TREE_THIS_NOTRAP (cpool) = 1; + TYPE_CPOOL_DATA_REF (output_class) = cpool; + } } #include "gt-java-expr.h"