X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Fjava%2Fexpr.c;h=b08d33e9ada57be8be2e30ca60b5a6afac04c952;hb=85386eb7ba6e9a2b1647aaae75235cc96f5dafaf;hp=11748238fd751c37e29ee790d32d4d464413109b;hpb=a12c1e677b5114369c1d5fe69b741a536a48da1e;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/java/expr.c b/gcc/java/expr.c index 11748238fd7..b08d33e9ada 100644 --- a/gcc/java/expr.c +++ b/gcc/java/expr.c @@ -1,5 +1,5 @@ /* Process expressions for the GNU compiler for the Java(TM) language. - Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003 + Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc. This file is part of GCC. @@ -16,8 +16,8 @@ GNU General Public License for more details. You should have received a copy of the GNU General Public License along with GCC; see the file COPYING. If not, write to -the Free Software Foundation, 59 Temple Place - Suite 330, -Boston, MA 02111-1307, USA. +the Free Software Foundation, 51 Franklin Street, Fifth Floor, +Boston, MA 02110-1301, USA. Java and all Java-based marks are trademarks or registered trademarks of Sun Microsystems, Inc. in the United States and other countries. @@ -43,6 +43,8 @@ The Free Software Foundation is independent of Sun Microsystems, Inc. */ #include "toplev.h" #include "except.h" #include "ggc.h" +#include "tree-gimple.h" +#include "target.h" static void flush_quick_stack (void); static void push_value (tree); @@ -70,6 +72,8 @@ static void expand_compare (enum tree_code, tree, tree, int); static void expand_test (enum tree_code, tree, int); static void expand_cond (enum tree_code, tree, int); static void expand_java_goto (int); +static tree expand_java_switch (tree, int); +static void expand_java_add_case (tree, int, int); #if 0 static void expand_java_call (int, int); static void expand_java_ret (tree); @@ -81,10 +85,8 @@ static void java_push_constant_from_pool (struct JCF *, int); static void java_stack_pop (int); static tree build_java_throw_out_of_bounds_exception (tree); static tree build_java_check_indexed_type (tree, tree); -static tree case_identity (tree, tree); static unsigned char peek_opcode_at_pc (struct JCF *, int, int); -static int emit_init_test_initialization (void **entry, void * ptr); -static int get_offset_table_index (tree); +static void promote_arguments (void); static GTY(()) tree operand_type[59]; @@ -94,7 +96,7 @@ tree dtable_ident = NULL_TREE; /* Set to nonzero value in order to emit class initialization code before static field references. */ -int always_initialize_class_p; +int always_initialize_class_p = 0; /* We store the stack state in two places: Within a basic block, we use the quick_stack, which is a @@ -108,7 +110,7 @@ int always_initialize_class_p; If a variable is on the quick stack, it means the value of variable when the quick stack was last flushed. Conceptually, flush_quick_stack - saves all the the quick_stack elements in parellel. However, that is + saves all the quick_stack elements in parallel. However, that is complicated, so it actually saves them (i.e. copies each stack value to is home virtual register) from low indexes. This allows a quick_stack element at index i (counting from the bottom of stack the) to references @@ -126,7 +128,11 @@ int always_initialize_class_p; static GTY(()) tree quick_stack; /* A free-list of unused permanent TREE_LIST nodes. */ -static GTY((deletable (""))) tree tree_list_free_list; +static GTY((deletable)) tree tree_list_free_list; + +/* The physical memory page size used in this computer. See + build_field_ref(). */ +static GTY(()) tree page_size; /* The stack pointer of the Java virtual machine. This does include the size of the quick_stack. */ @@ -136,6 +142,12 @@ int stack_pointer; const unsigned char *linenumber_table; int linenumber_count; +/* Largest pc so far in this method that has been passed to lookup_label. */ +int highest_label_pc_this_method = -1; + +/* Base value for this method to add to pc to get generated label. */ +int start_label_pc_this_method = 0; + void init_expr_processing (void) { @@ -156,12 +168,16 @@ java_truthvalue_conversion (tree expr) switch (TREE_CODE (expr)) { - case EQ_EXPR: - case NE_EXPR: case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR: + case EQ_EXPR: case NE_EXPR: case UNEQ_EXPR: case LTGT_EXPR: + case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR: + case UNLE_EXPR: case UNGE_EXPR: case UNLT_EXPR: case UNGT_EXPR: + case ORDERED_EXPR: case UNORDERED_EXPR: case TRUTH_ANDIF_EXPR: case TRUTH_ORIF_EXPR: case TRUTH_AND_EXPR: case TRUTH_OR_EXPR: + case TRUTH_XOR_EXPR: + case TRUTH_NOT_EXPR: case ERROR_MARK: return expr; @@ -175,15 +191,14 @@ java_truthvalue_conversion (tree expr) case NEGATE_EXPR: case ABS_EXPR: case FLOAT_EXPR: - case FFS_EXPR: /* These don't change whether an object is nonzero or zero. */ return java_truthvalue_conversion (TREE_OPERAND (expr, 0)); case COND_EXPR: /* Distribute the conversion into the arms of a COND_EXPR. */ - return fold (build (COND_EXPR, boolean_type_node, TREE_OPERAND (expr, 0), - java_truthvalue_conversion (TREE_OPERAND (expr, 1)), - java_truthvalue_conversion (TREE_OPERAND (expr, 2)))); + return fold_build3 (COND_EXPR, boolean_type_node, TREE_OPERAND (expr, 0), + java_truthvalue_conversion (TREE_OPERAND (expr, 1)), + java_truthvalue_conversion (TREE_OPERAND (expr, 2))); case NOP_EXPR: /* If this is widening the argument, we can ignore it. */ @@ -193,7 +208,8 @@ java_truthvalue_conversion (tree expr) /* fall through to default */ default: - return fold (build (NE_EXPR, boolean_type_node, expr, boolean_false_node)); + return fold_build2 (NE_EXPR, boolean_type_node, + expr, boolean_false_node); } } @@ -208,7 +224,7 @@ static void flush_quick_stack (void) { int stack_index = stack_pointer; - register tree prev, cur, next; + tree prev, cur, next; /* First reverse the quick_stack, and count the number of slots it has. */ for (cur = quick_stack, prev = NULL_TREE; cur != NULL_TREE; cur = next) @@ -232,7 +248,7 @@ flush_quick_stack (void) decl = find_stack_slot (stack_index, type); if (decl != node) - expand_assignment (decl, node, 0, 0); + java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (node), decl, node)); stack_index += 1 + TYPE_IS_WIDE (type); } } @@ -248,6 +264,9 @@ push_type_0 (tree type) n_words = 1 + TYPE_IS_WIDE (type); if (stack_pointer + n_words > DECL_MAX_STACK (current_function_decl)) return 0; + /* Allocate decl for this variable now, so we get a temporary that + survives the whole method. */ + find_stack_slot (stack_pointer, type); stack_type_map[stack_pointer++] = type; n_words--; while (--n_words >= 0) @@ -258,8 +277,8 @@ push_type_0 (tree type) void push_type (tree type) { - if (! push_type_0 (type)) - abort (); + int r = push_type_0 (type); + gcc_assert (r); } static void @@ -314,28 +333,57 @@ pop_type_0 (tree type, char **messagep) t = stack_type_map[--stack_pointer]; if (type == NULL_TREE || t == type) return t; + if (TREE_CODE (t) == TREE_LIST) + { + do + { + tree tt = TREE_PURPOSE (t); + if (! can_widen_reference_to (tt, type)) + { + t = tt; + goto fail; + } + t = TREE_CHAIN (t); + } + while (t); + return t; + } if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (t) && TYPE_PRECISION (type) <= 32 && TYPE_PRECISION (t) <= 32) - return t; + return t; if (TREE_CODE (type) == POINTER_TYPE && TREE_CODE (t) == POINTER_TYPE) { + /* If the expected type we've been passed is object or ptr + (i.e. void*), the caller needs to know the real type. */ if (type == ptr_type_node || type == object_ptr_type_node) - return t; - else if (t == ptr_type_node) /* Special case for null reference. */ - return type; - else if (can_widen_reference_to (t, type)) - return t; - /* This is a kludge, but matches what Sun's verifier does. - It can be tricked, but is safe as long as type errors - (i.e. interface method calls) are caught at run-time. */ - else if (CLASS_INTERFACE (TYPE_NAME (TREE_TYPE (type)))) - return object_ptr_type_node; + return t; + + /* Since the verifier has already run, we know that any + types we see will be compatible. In BC mode, this fact + may be checked at runtime, but if that is so then we can + assume its truth here as well. So, we always succeed + here, with the expected type. */ + return type; + } + + if (! flag_verify_invocations && flag_indirect_dispatch + && t == object_ptr_type_node) + { + if (type != ptr_type_node) + warning (0, "need to insert runtime check for %s", + xstrdup (lang_printable_name (type, 0))); + return type; } /* lang_printable_name uses a static buffer, so we must save the result from calling it the first time. */ + fail: { char *temp = xstrdup (lang_printable_name (type, 0)); + /* If the stack contains a multi-word type, keep popping the stack until + the real type is found. */ + while (t == void_type_node) + t = stack_type_map[--stack_pointer]; *messagep = concat ("expected type '", temp, "' but stack contains '", lang_printable_name (t, 0), "'", NULL); @@ -361,7 +409,69 @@ pop_type (tree type) return type; } -/* Return 1f if SOURCE_TYPE can be safely widened to TARGET_TYPE. + +/* Return true if two type assertions are equal. */ + +static int +type_assertion_eq (const void * k1_p, const void * k2_p) +{ + type_assertion k1 = *(type_assertion *)k1_p; + type_assertion k2 = *(type_assertion *)k2_p; + return (k1.assertion_code == k2.assertion_code + && k1.op1 == k2.op1 + && k1.op2 == k2.op2); +} + +/* Hash a type assertion. */ + +static hashval_t +type_assertion_hash (const void *p) +{ + const type_assertion *k_p = p; + hashval_t hash = iterative_hash (&k_p->assertion_code, sizeof + k_p->assertion_code, 0); + hash = iterative_hash (&k_p->op1, sizeof k_p->op1, hash); + return iterative_hash (&k_p->op2, sizeof k_p->op2, hash); +} + +/* Add an entry to the type assertion table for the given class. + CLASS is the class for which this assertion will be evaluated by the + runtime during loading/initialization. + ASSERTION_CODE is the 'opcode' or type of this assertion: see java-tree.h. + OP1 and OP2 are the operands. The tree type of these arguments may be + specific to each assertion_code. */ + +void +add_type_assertion (tree class, int assertion_code, tree op1, tree op2) +{ + htab_t assertions_htab; + type_assertion as; + void **as_pp; + + assertions_htab = TYPE_ASSERTIONS (class); + if (assertions_htab == NULL) + { + assertions_htab = htab_create_ggc (7, type_assertion_hash, + type_assertion_eq, NULL); + TYPE_ASSERTIONS (current_class) = assertions_htab; + } + + as.assertion_code = assertion_code; + as.op1 = op1; + as.op2 = op2; + + as_pp = htab_find_slot (assertions_htab, &as, INSERT); + + /* Don't add the same assertion twice. */ + if (*as_pp) + return; + + *as_pp = ggc_alloc (sizeof (type_assertion)); + **(type_assertion **)as_pp = as; +} + + +/* Return 1 if SOURCE_TYPE can be safely widened to TARGET_TYPE. Handles array types and interfaces. */ int @@ -378,6 +488,28 @@ can_widen_reference_to (tree source_type, tree target_type) if (source_type == target_type) return 1; + + /* FIXME: This is very pessimistic, in that it checks everything, + even if we already know that the types are compatible. If we're + to support full Java class loader semantics, we need this. + However, we could do something more optimal. */ + if (! flag_verify_invocations) + { + add_type_assertion (current_class, JV_ASSERT_TYPES_COMPATIBLE, + source_type, target_type); + + if (!quiet_flag) + warning (0, "assert: %s is assign compatible with %s", + xstrdup (lang_printable_name (target_type, 0)), + xstrdup (lang_printable_name (source_type, 0))); + /* Punt everything to runtime. */ + return 1; + } + + if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type)) + { + return 1; + } else { if (TYPE_ARRAY_P (source_type) || TYPE_ARRAY_P (target_type)) @@ -411,7 +543,16 @@ can_widen_reference_to (tree source_type, tree target_type) int source_depth = class_depth (source_type); int target_depth = class_depth (target_type); - /* class_depth can return a negative depth if an error occurred */ + if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type)) + { + if (! quiet_flag) + warning (0, "assert: %s is assign compatible with %s", + xstrdup (lang_printable_name (target_type, 0)), + xstrdup (lang_printable_name (source_type, 0))); + return 1; + } + + /* class_depth can return a negative depth if an error occurred */ if (source_depth < 0 || target_depth < 0) return 0; @@ -419,21 +560,23 @@ can_widen_reference_to (tree source_type, tree target_type) { /* target_type is OK if source_type or source_type ancestors implement target_type. We handle multiple sub-interfaces */ + tree binfo, base_binfo; + int i; - tree basetype_vec = TYPE_BINFO_BASETYPES (source_type); - int n = TREE_VEC_LENGTH (basetype_vec), i; - for (i=0 ; i < n; i++) - if (can_widen_reference_to - (TREE_TYPE (TREE_VEC_ELT (basetype_vec, i)), - target_type)) + for (binfo = TYPE_BINFO (source_type), i = 0; + BINFO_BASE_ITERATE (binfo, i, base_binfo); i++) + if (can_widen_reference_to + (BINFO_TYPE (base_binfo), target_type)) return 1; - if (n == 0) + + if (!i) return 0; } for ( ; source_depth > target_depth; source_depth--) { - source_type = TYPE_BINFO_BASETYPE (source_type, 0); + source_type + = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (source_type), 0)); } return source_type == target_type; } @@ -458,7 +601,7 @@ pop_value (tree type) } -/* Pop and discrad the top COUNT stack slots. */ +/* Pop and discard the top COUNT stack slots. */ static void java_stack_pop (int count) @@ -467,15 +610,13 @@ java_stack_pop (int count) { tree type, val; - if (stack_pointer == 0) - abort (); + gcc_assert (stack_pointer != 0); type = stack_type_map[stack_pointer - 1]; if (type == TYPE_SECOND) { count--; - if (stack_pointer == 1 || count <= 0) - abort (); + gcc_assert (stack_pointer != 1 && count > 0); type = stack_type_map[stack_pointer - 2]; } @@ -490,7 +631,7 @@ static void java_stack_swap (void) { tree type1, type2; - rtx temp; + tree temp; tree decl1, decl2; if (stack_pointer < 2 @@ -500,13 +641,20 @@ java_stack_swap (void) || TYPE_IS_WIDE (type1) || TYPE_IS_WIDE (type2)) /* Bad stack swap. */ abort (); + /* Bad stack swap. */ flush_quick_stack (); decl1 = find_stack_slot (stack_pointer - 1, type1); decl2 = find_stack_slot (stack_pointer - 2, type2); - temp = copy_to_reg (DECL_RTL (decl1)); - emit_move_insn (DECL_RTL (decl1), DECL_RTL (decl2)); - emit_move_insn (DECL_RTL (decl2), temp); + temp = build_decl (VAR_DECL, NULL_TREE, type1); + java_add_local_var (temp); + java_add_stmt (build2 (MODIFY_EXPR, type1, temp, decl1)); + java_add_stmt (build2 (MODIFY_EXPR, type2, + find_stack_slot (stack_pointer - 1, type2), + decl2)); + java_add_stmt (build2 (MODIFY_EXPR, type1, + find_stack_slot (stack_pointer - 2, type1), + temp)); stack_type_map[stack_pointer - 1] = type2; stack_type_map[stack_pointer - 2] = type1; } @@ -533,24 +681,24 @@ java_stack_dup (int size, int offset) type = stack_type_map [src_index]; if (type == TYPE_SECOND) { - if (src_index <= low_index) - /* Dup operation splits 64-bit number. */ - abort (); + /* Dup operation splits 64-bit number. */ + gcc_assert (src_index > low_index); stack_type_map[dst_index] = type; src_index--; dst_index--; type = stack_type_map[src_index]; - if (! TYPE_IS_WIDE (type)) - abort (); + gcc_assert (TYPE_IS_WIDE (type)); } - else if (TYPE_IS_WIDE (type)) - abort (); + else + gcc_assert (! TYPE_IS_WIDE (type)); if (src_index != dst_index) { tree src_decl = find_stack_slot (src_index, type); tree dst_decl = find_stack_slot (dst_index, type); - emit_move_insn (DECL_RTL (dst_decl), DECL_RTL (src_decl)); + + java_add_stmt + (build2 (MODIFY_EXPR, TREE_TYPE (dst_decl), dst_decl, src_decl)); stack_type_map[dst_index] = type; } } @@ -564,13 +712,13 @@ build_java_athrow (tree node) { tree call; - call = build (CALL_EXPR, - void_type_node, - build_address_of (throw_node), - build_tree_list (NULL_TREE, node), - NULL_TREE); + call = build3 (CALL_EXPR, + void_type_node, + build_address_of (throw_node), + build_tree_list (NULL_TREE, node), + NULL_TREE); TREE_SIDE_EFFECTS (call) = 1; - expand_expr_stmt (call); + java_add_stmt (call); java_stack_pop (stack_pointer); } @@ -581,19 +729,20 @@ build_java_jsr (int target_pc, int return_pc) { tree where = lookup_label (target_pc); tree ret = lookup_label (return_pc); - tree ret_label = fold (build1 (ADDR_EXPR, return_address_type_node, ret)); + tree ret_label = fold_build1 (ADDR_EXPR, return_address_type_node, ret); push_value (ret_label); flush_quick_stack (); - emit_jump (label_rtx (where)); - expand_label (ret); - if (instruction_bits [return_pc] & BCODE_VERIFIED) - load_type_state (ret); + java_add_stmt (build1 (GOTO_EXPR, void_type_node, where)); + + /* Do not need to emit the label here. We noted the existence of the + label as a jump target in note_instructions; we'll emit the label + for real at the beginning of the expand_byte_code loop. */ } static void build_java_ret (tree location) { - expand_computed_goto (location); + java_add_stmt (build1 (GOTO_EXPR, void_type_node, location)); } /* Implementation of operations on array: new, load, store, length */ @@ -637,7 +786,7 @@ encode_newarray_type (tree type) else if (type == long_type_node) return 11; else - abort (); + gcc_unreachable (); } /* Build a call to _Jv_ThrowBadArrayIndex(), the @@ -646,9 +795,9 @@ encode_newarray_type (tree type) static tree build_java_throw_out_of_bounds_exception (tree index) { - tree node = build (CALL_EXPR, int_type_node, - build_address_of (soft_badarrayindex_node), - build_tree_list (NULL_TREE, index), NULL_TREE); + tree node = build3 (CALL_EXPR, int_type_node, + build_address_of (soft_badarrayindex_node), + build_tree_list (NULL_TREE, index), NULL_TREE); TREE_SIDE_EFFECTS (node) = 1; /* Allows expansion within ANDIF */ return (node); } @@ -663,26 +812,23 @@ build_java_array_length_access (tree node) tree array_type = TREE_TYPE (type); HOST_WIDE_INT length; - /* JVM spec: If the arrayref is null, the arraylength instruction - throws a NullPointerException. The only way we could get a node - of type ptr_type_node at this point is `aconst_null; arraylength' - or something equivalent. */ - if (type == ptr_type_node) - return build (CALL_EXPR, int_type_node, - build_address_of (soft_nullpointer_node), - NULL_TREE, NULL_TREE); - if (!is_array_type_p (type)) - abort (); + { + /* With the new verifier, we will see an ordinary pointer type + here. In this case, we just use an arbitrary array type. */ + array_type = build_java_array_type (object_ptr_type_node, -1); + type = promote_type (array_type); + } length = java_array_type_length (type); if (length >= 0) - return build_int_2 (length, 0); + return build_int_cst (NULL_TREE, length); - node = build (COMPONENT_REF, int_type_node, - build_java_indirect_ref (array_type, node, - flag_check_references), - lookup_field (&array_type, get_identifier ("length"))); + node = build3 (COMPONENT_REF, int_type_node, + build_java_indirect_ref (array_type, node, + flag_check_references), + lookup_field (&array_type, get_identifier ("length")), + NULL_TREE); IS_ARRAY_LENGTH_ACCESS (node) = 1; return node; } @@ -696,15 +842,14 @@ java_check_reference (tree expr, int check) { if (!flag_syntax_only && check) { - tree cond; expr = save_expr (expr); - cond = build (COND_EXPR, void_type_node, - build (EQ_EXPR, boolean_type_node, expr, null_pointer_node), - build (CALL_EXPR, void_type_node, - build_address_of (soft_nullpointer_node), - NULL_TREE, NULL_TREE), - empty_stmt_node); - expr = build (COMPOUND_EXPR, TREE_TYPE (expr), cond, expr); + expr = build3 (COND_EXPR, TREE_TYPE (expr), + build2 (EQ_EXPR, boolean_type_node, + expr, null_pointer_node), + build3 (CALL_EXPR, void_type_node, + build_address_of (soft_nullpointer_node), + NULL_TREE, NULL_TREE), + expr); } return expr; @@ -715,7 +860,10 @@ java_check_reference (tree expr, int check) tree build_java_indirect_ref (tree type, tree expr, int check) { - return build1 (INDIRECT_REF, type, java_check_reference (expr, check)); + tree t; + t = java_check_reference (expr, check); + t = convert (build_pointer_type (type), t); + return build1 (INDIRECT_REF, type, t); } /* Implement array indexing (either as l-value or r-value). @@ -731,6 +879,13 @@ build_java_arrayaccess (tree array, tree type, tree index) tree ref; tree array_type = TREE_TYPE (TREE_TYPE (array)); + if (!is_array_type_p (TREE_TYPE (array))) + { + /* With the new verifier, we will see an ordinary pointer type + here. In this case, we just use the correct array type. */ + array_type = build_java_array_type (type, -1); + } + if (flag_bounds_check) { /* Generate: @@ -739,15 +894,15 @@ build_java_arrayaccess (tree array, tree type, tree index) * Note this is equivalent to and more efficient than: * INDEX < 0 || INDEX >= LEN && throw ... */ tree test; - tree len = build_java_array_length_access (array); - TREE_TYPE (len) = unsigned_int_type_node; - test = fold (build (GE_EXPR, boolean_type_node, - convert (unsigned_int_type_node, index), - len)); + tree len = convert (unsigned_int_type_node, + build_java_array_length_access (array)); + test = fold_build2 (GE_EXPR, boolean_type_node, + convert (unsigned_int_type_node, index), + len); if (! integer_zerop (test)) { - throw = build (TRUTH_ANDIF_EXPR, int_type_node, test, - build_java_throw_out_of_bounds_exception (index)); + throw = build2 (TRUTH_ANDIF_EXPR, int_type_node, test, + build_java_throw_out_of_bounds_exception (index)); /* allows expansion within COMPOUND */ TREE_SIDE_EFFECTS( throw ) = 1; } @@ -756,16 +911,16 @@ build_java_arrayaccess (tree array, tree type, tree index) /* If checking bounds, wrap the index expr with a COMPOUND_EXPR in order to have the bounds check evaluated first. */ if (throw != NULL_TREE) - index = build (COMPOUND_EXPR, int_type_node, throw, index); + index = build2 (COMPOUND_EXPR, int_type_node, throw, index); data_field = lookup_field (&array_type, get_identifier ("data")); - ref = build (COMPONENT_REF, TREE_TYPE (data_field), - build_java_indirect_ref (array_type, array, - flag_check_references), - data_field); + ref = build3 (COMPONENT_REF, TREE_TYPE (data_field), + build_java_indirect_ref (array_type, array, + flag_check_references), + data_field, NULL_TREE); - node = build (ARRAY_REF, type, ref, index); + node = build4 (ARRAY_REF, type, ref, index, NULL_TREE, NULL_TREE); return node; } @@ -780,26 +935,35 @@ build_java_arraystore_check (tree array, tree object) tree array_type_p = TREE_TYPE (array); tree object_type = TYPE_NAME (TREE_TYPE (TREE_TYPE (object))); - if (! is_array_type_p (array_type_p)) - abort (); + if (! flag_verify_invocations) + { + /* With the new verifier, we don't track precise types. FIXME: + performance regression here. */ + element_type = TYPE_NAME (object_type_node); + } + else + { + gcc_assert (is_array_type_p (array_type_p)); - /* Get the TYPE_DECL for ARRAY's element type. */ - element_type = TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p)))); + /* Get the TYPE_DECL for ARRAY's element type. */ + element_type + = TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p)))); + } - if (TREE_CODE (element_type) != TYPE_DECL - || TREE_CODE (object_type) != TYPE_DECL) - abort (); + gcc_assert (TREE_CODE (element_type) == TYPE_DECL + && TREE_CODE (object_type) == TYPE_DECL); if (!flag_store_check) return build1 (NOP_EXPR, array_type_p, array); - /* No check is needed if the element type is final or is itself an array. - Also check that element_type matches object_type, since in the bytecode - compilation case element_type may be the actual element type of the array - rather than its declared type. */ + /* No check is needed if the element type is final. Also check that + element_type matches object_type, since in the bytecode + compilation case element_type may be the actual element type of + the array rather than its declared type. However, if we're doing + indirect dispatch, we can't do the `final' optimization. */ if (element_type == object_type - && (TYPE_ARRAY_P (TREE_TYPE (element_type)) - || CLASS_FINAL (element_type))) + && ! flag_indirect_dispatch + && CLASS_FINAL (element_type)) return build1 (NOP_EXPR, array_type_p, array); /* OBJECT might be wrapped by a SAVE_EXPR. */ @@ -827,11 +991,11 @@ build_java_arraystore_check (tree array, tree object) } /* Build an invocation of _Jv_CheckArrayStore */ - check = build (CALL_EXPR, void_type_node, - build_address_of (soft_checkarraystore_node), - tree_cons (NULL_TREE, array, - build_tree_list (NULL_TREE, object)), - NULL_TREE); + check = build3 (CALL_EXPR, void_type_node, + build_address_of (soft_checkarraystore_node), + tree_cons (NULL_TREE, array, + build_tree_list (NULL_TREE, object)), + NULL_TREE); TREE_SIDE_EFFECTS (check) = 1; return check; @@ -841,30 +1005,17 @@ build_java_arraystore_check (tree array, tree object) ARRAY_NODE. This function is used to retrieve something less vague than a pointer type when indexing the first dimension of something like [[. May return a corrected type, if necessary, otherwise INDEXED_TYPE is - return unchanged. - As a side effect, it also makes sure that ARRAY_NODE is an array. */ + return unchanged. */ static tree -build_java_check_indexed_type (tree array_node, tree indexed_type) +build_java_check_indexed_type (tree array_node ATTRIBUTE_UNUSED, + tree indexed_type) { - tree elt_type; - - if (!is_array_type_p (TREE_TYPE (array_node))) - abort (); - - elt_type = (TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (array_node)))); - - if (indexed_type == ptr_type_node ) - return promote_type (elt_type); + /* We used to check to see if ARRAY_NODE really had array type. + However, with the new verifier, this is not necessary, as we know + that the object will be an array of the appropriate type. */ - /* BYTE/BOOLEAN store and load are used for both type */ - if (indexed_type == byte_type_node && elt_type == boolean_type_node ) - return boolean_type_node; - - if (indexed_type != elt_type ) - abort (); - else - return indexed_type; + return indexed_type; } /* newarray triggers a call to _Jv_NewPrimArray. This function should be @@ -886,16 +1037,16 @@ build_newarray (int atype_value, tree length) and save the runtime some work. However, the bytecode generator expects to find the type_code int here. */ if (flag_emit_class_files) - type_arg = build_int_2 (atype_value, 0); + type_arg = build_int_cst (NULL_TREE, atype_value); else type_arg = build_class_ref (prim_type); - return build (CALL_EXPR, promote_type (type), - build_address_of (soft_newarray_node), - tree_cons (NULL_TREE, - type_arg, - build_tree_list (NULL_TREE, length)), - NULL_TREE); + return build3 (CALL_EXPR, promote_type (type), + build_address_of (soft_newarray_node), + tree_cons (NULL_TREE, + type_arg, + build_tree_list (NULL_TREE, length)), + NULL_TREE); } /* Generates anewarray from a given CLASS_TYPE. Gets from the stack the size @@ -909,13 +1060,13 @@ build_anewarray (tree class_type, tree length) host_integerp (length, 0) ? tree_low_cst (length, 0) : -1); - return build (CALL_EXPR, promote_type (type), - build_address_of (soft_anewarray_node), - tree_cons (NULL_TREE, length, - tree_cons (NULL_TREE, build_class_ref (class_type), - build_tree_list (NULL_TREE, - null_pointer_node))), - NULL_TREE); + return build3 (CALL_EXPR, promote_type (type), + build_address_of (soft_anewarray_node), + tree_cons (NULL_TREE, length, + tree_cons (NULL_TREE, build_class_ref (class_type), + build_tree_list (NULL_TREE, + null_pointer_node))), + NULL_TREE); } /* Return a node the evaluates 'new TYPE[LENGTH]'. */ @@ -942,13 +1093,14 @@ expand_java_multianewarray (tree class_type, int ndim) for( i = 0; i < ndim; i++ ) args = tree_cons (NULL_TREE, pop_value (int_type_node), args); - push_value (build (CALL_EXPR, - promote_type (class_type), - build_address_of (soft_multianewarray_node), - tree_cons (NULL_TREE, build_class_ref (class_type), - tree_cons (NULL_TREE, - build_int_2 (ndim, 0), args )), - NULL_TREE)); + push_value (build3 (CALL_EXPR, + promote_type (class_type), + build_address_of (soft_multianewarray_node), + tree_cons (NULL_TREE, build_class_ref (class_type), + tree_cons (NULL_TREE, + build_int_cst (NULL_TREE, ndim), + args)), + NULL_TREE)); } /* ARRAY[INDEX] <- RHS. build_java_check_indexed_type makes sure that @@ -968,7 +1120,20 @@ expand_java_arraystore (tree rhs_type_node) && TYPE_PRECISION (rhs_type_node) <= 32) ? int_type_node : rhs_type_node); tree index = pop_value (int_type_node); - tree array = pop_value (ptr_type_node); + tree array_type, array; + + /* If we're processing an `aaload' we might as well just pick + `Object'. */ + if (TREE_CODE (rhs_type_node) == POINTER_TYPE) + { + array_type = build_java_array_type (object_ptr_type_node, -1); + rhs_type_node = object_ptr_type_node; + } + else + array_type = build_java_array_type (rhs_type_node, -1); + + array = pop_value (array_type); + array = build1 (NOP_EXPR, promote_type (array_type), array); rhs_type_node = build_java_check_indexed_type (array, rhs_type_node); @@ -980,13 +1145,11 @@ expand_java_arraystore (tree rhs_type_node) if (TREE_CODE (rhs_type_node) == POINTER_TYPE) { tree check = build_java_arraystore_check (array, rhs_node); - expand_expr_stmt (check); + java_add_stmt (check); } - expand_assignment (build_java_arrayaccess (array, - rhs_type_node, - index), - rhs_node, 0, 0); + array = build_java_arrayaccess (array, rhs_type_node, index); + java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (array), array, rhs_node)); } /* Expand the evaluation of ARRAY[INDEX]. build_java_check_indexed_type makes @@ -997,31 +1160,35 @@ expand_java_arraystore (tree rhs_type_node) */ static void -expand_java_arrayload (tree lhs_type_node ) +expand_java_arrayload (tree lhs_type_node) { tree load_node; tree index_node = pop_value (int_type_node); - tree array_node = pop_value (ptr_type_node); + tree array_type; + tree array_node; - index_node = save_expr (index_node); - array_node = save_expr (array_node); - - if (TREE_TYPE (array_node) == ptr_type_node) - /* The only way we could get a node of type ptr_type_node at this - point is `aconst_null; arraylength' or something equivalent, so - unconditionally throw NullPointerException. */ - load_node = build (CALL_EXPR, lhs_type_node, - build_address_of (soft_nullpointer_node), - NULL_TREE, NULL_TREE); - else + /* If we're processing an `aaload' we might as well just pick + `Object'. */ + if (TREE_CODE (lhs_type_node) == POINTER_TYPE) { - lhs_type_node = build_java_check_indexed_type (array_node, lhs_type_node); - load_node = build_java_arrayaccess (array_node, - lhs_type_node, - index_node); + array_type = build_java_array_type (object_ptr_type_node, -1); + lhs_type_node = object_ptr_type_node; } + else + array_type = build_java_array_type (lhs_type_node, -1); + array_node = pop_value (array_type); + array_node = build1 (NOP_EXPR, promote_type (array_type), array_node); + + index_node = save_expr (index_node); + array_node = save_expr (array_node); + + lhs_type_node = build_java_check_indexed_type (array_node, + lhs_type_node); + load_node = build_java_arrayaccess (array_node, + lhs_type_node, + index_node); if (INTEGRAL_TYPE_P (lhs_type_node) && TYPE_PRECISION (lhs_type_node) <= 32) - load_node = fold (build1 (NOP_EXPR, int_type_node, load_node)); + load_node = fold_build1 (NOP_EXPR, int_type_node, load_node); push_value (load_node); } @@ -1043,11 +1210,11 @@ expand_java_array_length (void) static tree build_java_monitor (tree call, tree object) { - return (build (CALL_EXPR, + return build3 (CALL_EXPR, void_type_node, build_address_of (call), build_tree_list (NULL_TREE, object), - NULL_TREE)); + NULL_TREE); } /* Emit code for one of the PUSHC instructions. */ @@ -1059,10 +1226,7 @@ expand_java_pushc (int ival, tree type) if (type == ptr_type_node && ival == 0) value = null_pointer_node; else if (type == int_type_node || type == long_type_node) - { - value = build_int_2 (ival, ival < 0 ? -1 : 0); - TREE_TYPE (value) = type; - } + value = build_int_cst (type, ival); else if (type == float_type_node || type == double_type_node) { REAL_VALUE_TYPE x; @@ -1070,7 +1234,7 @@ expand_java_pushc (int ival, tree type) value = build_real (type, x); } else - abort (); + gcc_unreachable (); push_value (value); } @@ -1079,12 +1243,12 @@ static void expand_java_return (tree type) { if (type == void_type_node) - expand_null_return (); + java_add_stmt (build1 (RETURN_EXPR, void_type_node, NULL)); else { tree retval = pop_value (type); tree res = DECL_RESULT (current_function_decl); - retval = build (MODIFY_EXPR, TREE_TYPE (res), res, retval); + retval = build2 (MODIFY_EXPR, TREE_TYPE (res), res, retval); /* Handle the situation where the native integer type is smaller than the JVM integer. It can happen for many cross compilers. @@ -1096,7 +1260,7 @@ expand_java_return (tree type) retval = build1(NOP_EXPR, TREE_TYPE(res), retval); TREE_SIDE_EFFECTS (retval) = 1; - expand_return (retval); + java_add_stmt (build1 (RETURN_EXPR, TREE_TYPE (retval), retval)); } } @@ -1113,13 +1277,12 @@ expand_load_internal (int index, tree type, int pc) value into it. Then we push this new local on the stack. Hopefully this all gets optimized out. */ copy = build_decl (VAR_DECL, NULL_TREE, type); - DECL_CONTEXT (copy) = current_function_decl; - layout_decl (copy, 0); - DECL_REGISTER (copy) = 1; - expand_decl (copy); - MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (copy); - DECL_INITIAL (copy) = var; - expand_decl_init (copy); + if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)) + && TREE_TYPE (copy) != TREE_TYPE (var)) + var = convert (type, var); + java_add_local_var (copy); + java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (var), copy, var)); + push_value (copy); } @@ -1129,7 +1292,8 @@ build_address_of (tree value) return build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (value)), value); } -bool class_has_finalize_method (tree type) +bool +class_has_finalize_method (tree type) { tree super = CLASSTYPE_SUPER (type); @@ -1140,6 +1304,19 @@ bool class_has_finalize_method (tree type) return HAS_FINALIZER_P (type) || class_has_finalize_method (super); } +tree +java_create_object (tree type) +{ + tree alloc_node = (class_has_finalize_method (type) + ? alloc_object_node + : alloc_no_finalizer_node); + + return build3 (CALL_EXPR, promote_type (type), + build_address_of (alloc_node), + build_tree_list (NULL_TREE, build_class_ref (type)), + NULL_TREE); +} + static void expand_java_NEW (tree type) { @@ -1150,12 +1327,10 @@ expand_java_NEW (tree type) if (! CLASS_LOADED_P (type)) load_class (type, 1); safe_layout_class (type); - push_value (build (CALL_EXPR, promote_type (type), - build_address_of (alloc_node), - tree_cons (NULL_TREE, build_class_ref (type), - build_tree_list (NULL_TREE, - size_in_bytes (type))), - NULL_TREE)); + push_value (build3 (CALL_EXPR, promote_type (type), + build_address_of (alloc_node), + build_tree_list (NULL_TREE, build_class_ref (type)), + NULL_TREE)); } /* This returns an expression which will extract the class of an @@ -1167,13 +1342,13 @@ build_get_class (tree value) tree class_field = lookup_field (&dtable_type, get_identifier ("class")); tree vtable_field = lookup_field (&object_type_node, get_identifier ("vtable")); - return build (COMPONENT_REF, class_ptr_type, - build1 (INDIRECT_REF, dtable_type, - build (COMPONENT_REF, dtable_ptr_type, - build_java_indirect_ref (object_type_node, value, - flag_check_references), - vtable_field)), - class_field); + tree tmp = build3 (COMPONENT_REF, dtable_ptr_type, + build_java_indirect_ref (object_type_node, value, + flag_check_references), + vtable_field, NULL_TREE); + return build3 (COMPONENT_REF, class_ptr_type, + build1 (INDIRECT_REF, dtable_type, tmp), + class_field, NULL_TREE); } /* This builds the tree representation of the `instanceof' operator. @@ -1205,9 +1380,10 @@ build_instanceof (tree value, tree type) /* Anything except `null' is an instance of Object. Likewise, if the object is known to be an instance of the class, then we only need to check for `null'. */ - expr = build (NE_EXPR, itype, value, null_pointer_node); + expr = build2 (NE_EXPR, itype, value, null_pointer_node); } - else if (! TYPE_ARRAY_P (type) + else if (flag_verify_invocations + && ! TYPE_ARRAY_P (type) && ! TYPE_ARRAY_P (valtype) && DECL_P (klass) && DECL_P (valclass) && ! CLASS_INTERFACE (valclass) @@ -1223,21 +1399,22 @@ build_instanceof (tree value, tree type) else if (DECL_P (klass) && CLASS_FINAL (klass)) { tree save = save_expr (value); - expr = build (COND_EXPR, itype, - save, - build (EQ_EXPR, itype, - build_get_class (save), - build_class_ref (type)), - boolean_false_node); + expr = build3 (COND_EXPR, itype, + build2 (NE_EXPR, boolean_type_node, + save, null_pointer_node), + build2 (EQ_EXPR, itype, + build_get_class (save), + build_class_ref (type)), + boolean_false_node); } else { - expr = build (CALL_EXPR, itype, - build_address_of (soft_instanceof_node), - tree_cons (NULL_TREE, value, - build_tree_list (NULL_TREE, - build_class_ref (type))), - NULL_TREE); + expr = build3 (CALL_EXPR, itype, + build_address_of (soft_instanceof_node), + tree_cons (NULL_TREE, value, + build_tree_list (NULL_TREE, + build_class_ref (type))), + NULL_TREE); } TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (value); return expr; @@ -1255,28 +1432,28 @@ static void expand_java_CHECKCAST (tree type) { tree value = pop_value (ptr_type_node); - value = build (CALL_EXPR, promote_type (type), - build_address_of (soft_checkcast_node), - tree_cons (NULL_TREE, build_class_ref (type), - build_tree_list (NULL_TREE, value)), - NULL_TREE); + value = build3 (CALL_EXPR, promote_type (type), + build_address_of (soft_checkcast_node), + tree_cons (NULL_TREE, build_class_ref (type), + build_tree_list (NULL_TREE, value)), + NULL_TREE); push_value (value); } static void expand_iinc (unsigned int local_var_index, int ival, int pc) { - tree local_var, res; - tree constant_value; + tree local_var, res; + tree constant_value; - flush_quick_stack (); - local_var = find_local_variable (local_var_index, int_type_node, pc); - constant_value = build_int_2 (ival, ival < 0 ? -1 : 0); - res = fold (build (PLUS_EXPR, int_type_node, local_var, constant_value)); - expand_assignment (local_var, res, 0, 0); + flush_quick_stack (); + local_var = find_local_variable (local_var_index, int_type_node, pc); + constant_value = build_int_cst (NULL_TREE, ival); + res = fold_build2 (PLUS_EXPR, int_type_node, local_var, constant_value); + java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (local_var), local_var, res)); } - + tree build_java_soft_divmod (enum tree_code op, tree type, tree op1, tree op2) { @@ -1313,14 +1490,12 @@ build_java_soft_divmod (enum tree_code op, tree type, tree op1, tree op2) } } - if (! call) - abort (); - - call = build (CALL_EXPR, type, - build_address_of (call), - tree_cons (NULL_TREE, arg1, - build_tree_list (NULL_TREE, arg2)), - NULL_TREE); + gcc_assert (call); + call = build3 (CALL_EXPR, type, + build_address_of (call), + tree_cons (NULL_TREE, arg1, + build_tree_list (NULL_TREE, arg2)), + NULL_TREE); return call; } @@ -1340,37 +1515,38 @@ build_java_binop (enum tree_code op, tree type, tree arg1, tree arg2) } case LSHIFT_EXPR: case RSHIFT_EXPR: - mask = build_int_2 (TYPE_PRECISION (TREE_TYPE (arg1)) - 1, 0); - arg2 = fold (build (BIT_AND_EXPR, int_type_node, arg2, mask)); + mask = build_int_cst (NULL_TREE, + TYPE_PRECISION (TREE_TYPE (arg1)) - 1); + arg2 = fold_build2 (BIT_AND_EXPR, int_type_node, arg2, mask); break; case COMPARE_L_EXPR: /* arg1 > arg2 ? 1 : arg1 == arg2 ? 0 : -1 */ case COMPARE_G_EXPR: /* arg1 < arg2 ? -1 : arg1 == arg2 ? 0 : 1 */ arg1 = save_expr (arg1); arg2 = save_expr (arg2); { - tree ifexp1 = fold ( build (op == COMPARE_L_EXPR ? GT_EXPR : LT_EXPR, - boolean_type_node, arg1, arg2)); - tree ifexp2 = fold ( build (EQ_EXPR, boolean_type_node, arg1, arg2)); - tree second_compare = fold (build (COND_EXPR, int_type_node, + tree ifexp1 = fold_build2 (op == COMPARE_L_EXPR ? GT_EXPR : LT_EXPR, + boolean_type_node, arg1, arg2); + tree ifexp2 = fold_build2 (EQ_EXPR, boolean_type_node, arg1, arg2); + tree second_compare = fold_build3 (COND_EXPR, int_type_node, ifexp2, integer_zero_node, op == COMPARE_L_EXPR ? integer_minus_one_node - : integer_one_node)); - return fold (build (COND_EXPR, int_type_node, ifexp1, + : integer_one_node); + return fold_build3 (COND_EXPR, int_type_node, ifexp1, op == COMPARE_L_EXPR ? integer_one_node : integer_minus_one_node, - second_compare)); + second_compare); } case COMPARE_EXPR: arg1 = save_expr (arg1); arg2 = save_expr (arg2); { - tree ifexp1 = fold ( build (LT_EXPR, boolean_type_node, arg1, arg2)); - tree ifexp2 = fold ( build (GT_EXPR, boolean_type_node, arg1, arg2)); - tree second_compare = fold ( build (COND_EXPR, int_type_node, - ifexp2, integer_one_node, - integer_zero_node)); - return fold (build (COND_EXPR, int_type_node, - ifexp1, integer_minus_one_node, second_compare)); + tree ifexp1 = fold_build2 (LT_EXPR, boolean_type_node, arg1, arg2); + tree ifexp2 = fold_build2 (GT_EXPR, boolean_type_node, arg1, arg2); + tree second_compare = fold_build3 (COND_EXPR, int_type_node, + ifexp2, integer_one_node, + integer_zero_node); + return fold_build3 (COND_EXPR, int_type_node, + ifexp1, integer_minus_one_node, second_compare); } case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR: @@ -1383,11 +1559,11 @@ build_java_binop (enum tree_code op, tree type, tree arg1, tree arg2) arg1 = convert (double_type_node, arg1); arg2 = convert (double_type_node, arg2); } - call = build (CALL_EXPR, double_type_node, - build_address_of (soft_fmod_node), - tree_cons (NULL_TREE, arg1, - build_tree_list (NULL_TREE, arg2)), - NULL_TREE); + call = build3 (CALL_EXPR, double_type_node, + build_address_of (soft_fmod_node), + tree_cons (NULL_TREE, arg1, + build_tree_list (NULL_TREE, arg2)), + NULL_TREE); if (type != double_type_node) call = convert (type, call); return call; @@ -1401,7 +1577,7 @@ build_java_binop (enum tree_code op, tree type, tree arg1, tree arg2) break; default: ; } - return fold (build (op, type, arg1, arg2)); + return fold_build2 (op, type, arg1, arg2); } static void @@ -1444,21 +1620,20 @@ lookup_field (tree *typep, tree name) } do { - tree field, basetype_vec; + tree field, binfo, base_binfo; tree save_field; - int n, i; + int i; for (field = TYPE_FIELDS (*typep); field; field = TREE_CHAIN (field)) if (DECL_NAME (field) == name) return field; /* Process implemented interfaces. */ - basetype_vec = TYPE_BINFO_BASETYPES (*typep); - n = TREE_VEC_LENGTH (basetype_vec); save_field = NULL_TREE; - for (i = 0; i < n; i++) + for (binfo = TYPE_BINFO (*typep), i = 0; + BINFO_BASE_ITERATE (binfo, i, base_binfo); i++) { - tree t = BINFO_TYPE (TREE_VEC_ELT (basetype_vec, i)); + tree t = BINFO_TYPE (base_binfo); if ((field = lookup_field (&t, name))) { if (save_field == field) @@ -1469,7 +1644,7 @@ lookup_field (tree *typep, tree name) { tree i1 = DECL_CONTEXT (save_field); tree i2 = DECL_CONTEXT (field); - error ("reference `%s' is ambiguous: appears in interface `%s' and interface `%s'", + error ("reference %qs is ambiguous: appears in interface %qs and interface %qs", IDENTIFIER_POINTER (name), IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i1))), IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i2)))); @@ -1497,7 +1672,7 @@ build_field_ref (tree self_value, tree self_class, tree name) tree field_decl = lookup_field (&base_class, name); if (field_decl == NULL_TREE) { - error ("field `%s' not found", IDENTIFIER_POINTER (name)); + error ("field %qs not found", IDENTIFIER_POINTER (name)); return error_mark_node; } if (self_value == NULL_TREE) @@ -1506,17 +1681,67 @@ build_field_ref (tree self_value, tree self_class, tree name) } else { - int check = (flag_check_references - && ! (DECL_P (self_value) - && DECL_NAME (self_value) == this_identifier_node)); - tree base_type = promote_type (base_class); + + /* CHECK is true if self_value is not the this pointer. */ + int check = (! (DECL_P (self_value) + && DECL_NAME (self_value) == this_identifier_node)); + + /* Determine whether a field offset from NULL will lie within + Page 0: this is necessary on those GNU/Linux/BSD systems that + trap SEGV to generate NullPointerExceptions. + + We assume that Page 0 will be mapped with NOPERM, and that + memory may be allocated from any other page, so only field + offsets < pagesize are guaranteed to trap. We also assume + the smallest page size we'll encounter is 4k bytes. */ + if (! flag_syntax_only && check && ! flag_check_references + && ! flag_indirect_dispatch) + { + tree field_offset = byte_position (field_decl); + if (! page_size) + page_size = size_int (4096); + check = ! INT_CST_LT_UNSIGNED (field_offset, page_size); + } + if (base_type != TREE_TYPE (self_value)) - self_value = fold (build1 (NOP_EXPR, base_type, self_value)); + self_value = fold_build1 (NOP_EXPR, base_type, self_value); + if (! flag_syntax_only && flag_indirect_dispatch) + { + tree otable_index + = build_int_cst (NULL_TREE, get_symbol_table_index + (field_decl, NULL_TREE, + &TYPE_OTABLE_METHODS (output_class))); + tree field_offset + = build4 (ARRAY_REF, integer_type_node, + TYPE_OTABLE_DECL (output_class), otable_index, + NULL_TREE, NULL_TREE); + tree address; + + if (DECL_CONTEXT (field_decl) != output_class) + field_offset + = build3 (COND_EXPR, TREE_TYPE (field_offset), + build2 (EQ_EXPR, boolean_type_node, + field_offset, integer_zero_node), + build3 (CALL_EXPR, void_type_node, + build_address_of (soft_nosuchfield_node), + build_tree_list (NULL_TREE, otable_index), + NULL_TREE), + field_offset); + + field_offset = fold (convert (sizetype, field_offset)); + self_value = java_check_reference (self_value, check); + address + = fold_build2 (PLUS_EXPR, + build_pointer_type (TREE_TYPE (field_decl)), + self_value, field_offset); + return fold_build1 (INDIRECT_REF, TREE_TYPE (field_decl), address); + } + self_value = build_java_indirect_ref (TREE_TYPE (TREE_TYPE (self_value)), self_value, check); - return fold (build (COMPONENT_REF, TREE_TYPE (field_decl), - self_value, field_decl)); + return fold_build3 (COMPONENT_REF, TREE_TYPE (field_decl), + self_value, field_decl, NULL_TREE); } } @@ -1525,7 +1750,9 @@ lookup_label (int pc) { tree name; char buf[32]; - ASM_GENERATE_INTERNAL_LABEL(buf, "LJpc=", pc); + if (pc > highest_label_pc_this_method) + highest_label_pc_this_method = pc; + ASM_GENERATE_INTERNAL_LABEL(buf, "LJpc=", start_label_pc_this_method + pc); name = get_identifier (buf); if (IDENTIFIER_LOCAL_VALUE (name)) return IDENTIFIER_LOCAL_VALUE (name); @@ -1534,7 +1761,6 @@ lookup_label (int pc) /* The type of the address of a label is return_address_type_node. */ tree decl = create_label_decl (name); LABEL_PC (decl) = pc; - label_rtx (decl); return pushdecl (decl); } } @@ -1581,10 +1807,11 @@ expand_compare (enum tree_code condition, tree value1, tree value2, int target_pc) { tree target = lookup_label (target_pc); - tree cond = fold (build (condition, boolean_type_node, value1, value2)); - expand_start_cond (java_truthvalue_conversion (cond), 0); - expand_goto (target); - expand_end_cond (); + tree cond = fold_build2 (condition, boolean_type_node, value1, value2); + java_add_stmt + (build3 (COND_EXPR, void_type_node, java_truthvalue_conversion (cond), + build1 (GOTO_EXPR, void_type_node, target), + build_java_empty_stmt ())); } /* Emit code for a TEST-type opcode. */ @@ -1618,32 +1845,43 @@ expand_java_goto (int target_pc) { tree target_label = lookup_label (target_pc); flush_quick_stack (); - expand_goto (target_label); + java_add_stmt (build1 (GOTO_EXPR, void_type_node, target_label)); } -#if 0 -static void -expand_java_call (int target_pc, int return_address) - int target_pc, return_address; +static tree +expand_java_switch (tree selector, int default_pc) { - tree target_label = lookup_label (target_pc); - tree value = build_int_2 (return_address, return_address < 0 ? -1 : 0); - push_value (value); + tree switch_expr, x; + flush_quick_stack (); - expand_goto (target_label); + switch_expr = build3 (SWITCH_EXPR, TREE_TYPE (selector), selector, + NULL_TREE, NULL_TREE); + java_add_stmt (switch_expr); + + x = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE, NULL_TREE, + create_artificial_label ()); + append_to_statement_list (x, &SWITCH_BODY (switch_expr)); + + x = build1 (GOTO_EXPR, void_type_node, lookup_label (default_pc)); + append_to_statement_list (x, &SWITCH_BODY (switch_expr)); + + return switch_expr; } static void -expand_java_ret (tree return_address ATTRIBUTE_UNUSED) +expand_java_add_case (tree switch_expr, int match, int target_pc) { - warning ("ret instruction not implemented"); -#if 0 - tree target_label = lookup_label (target_pc); - flush_quick_stack (); - expand_goto (target_label); -#endif + tree value, x; + + value = build_int_cst (TREE_TYPE (switch_expr), match); + + x = build3 (CASE_LABEL_EXPR, void_type_node, value, NULL_TREE, + create_artificial_label ()); + append_to_statement_list (x, &SWITCH_BODY (switch_expr)); + + x = build1 (GOTO_EXPR, void_type_node, lookup_label (target_pc)); + append_to_statement_list (x, &SWITCH_BODY (switch_expr)); } -#endif static tree pop_arguments (tree arg_types) @@ -1655,13 +1893,53 @@ pop_arguments (tree arg_types) tree tail = pop_arguments (TREE_CHAIN (arg_types)); tree type = TREE_VALUE (arg_types); tree arg = pop_value (type); - if (PROMOTE_PROTOTYPES - && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node) - && INTEGRAL_TYPE_P (type)) + + /* We simply cast each argument to its proper type. This is + needed since we lose type information coming out of the + verifier. We also have to do this when we pop an integer + type that must be promoted for the function call. */ + if (TREE_CODE (type) == POINTER_TYPE) + arg = build1 (NOP_EXPR, type, arg); + else if (targetm.calls.promote_prototypes (type) + && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node) + && INTEGRAL_TYPE_P (type)) arg = convert (integer_type_node, arg); return tree_cons (NULL_TREE, arg, tail); } - abort (); + gcc_unreachable (); +} + +/* Attach to PTR (a block) the declaration found in ENTRY. */ + +int +attach_init_test_initialization_flags (void **entry, void *ptr) +{ + tree block = (tree)ptr; + struct treetreehash_entry *ite = (struct treetreehash_entry *) *entry; + + if (block != error_mark_node) + { + if (TREE_CODE (block) == BIND_EXPR) + { + tree body = BIND_EXPR_BODY (block); + TREE_CHAIN (ite->value) = BIND_EXPR_VARS (block); + BIND_EXPR_VARS (block) = ite->value; + body = build2 (COMPOUND_EXPR, void_type_node, + build1 (DECL_EXPR, void_type_node, ite->value), body); + BIND_EXPR_BODY (block) = body; + } + else + { + tree body = BLOCK_SUBBLOCKS (block); + TREE_CHAIN (ite->value) = BLOCK_EXPR_DECLS (block); + BLOCK_EXPR_DECLS (block) = ite->value; + body = build2 (COMPOUND_EXPR, void_type_node, + build1 (DECL_EXPR, void_type_node, ite->value), body); + BLOCK_SUBBLOCKS (block) = body; + } + + } + return true; } /* Build an expression to initialize the class CLAS. @@ -1684,15 +1962,16 @@ build_class_init (tree clas, tree expr) if (always_initialize_class_p) { - init = build (CALL_EXPR, void_type_node, - build_address_of (soft_initclass_node), - build_tree_list (NULL_TREE, build_class_ref (clas)), - NULL_TREE); + init = build3 (CALL_EXPR, void_type_node, + build_address_of (soft_initclass_node), + build_tree_list (NULL_TREE, build_class_ref (clas)), + NULL_TREE); TREE_SIDE_EFFECTS (init) = 1; } else { tree *init_test_decl; + tree decl; init_test_decl = java_treetreehash_new (DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl), clas); @@ -1700,53 +1979,163 @@ build_class_init (tree clas, tree expr) { /* Build a declaration and mark it as a flag used to track static class initializations. */ - *init_test_decl = build_decl (VAR_DECL, NULL_TREE, - boolean_type_node); - MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (*init_test_decl); - LOCAL_CLASS_INITIALIZATION_FLAG (*init_test_decl) = 1; - DECL_CONTEXT (*init_test_decl) = current_function_decl; - DECL_FUNCTION_INIT_TEST_CLASS (*init_test_decl) = clas; + decl = build_decl (VAR_DECL, NULL_TREE, + boolean_type_node); + MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (decl); + LOCAL_CLASS_INITIALIZATION_FLAG (decl) = 1; + DECL_CONTEXT (decl) = current_function_decl; + DECL_FUNCTION_INIT_TEST_CLASS (decl) = clas; /* Tell the check-init code to ignore this decl when not optimizing class initialization. */ if (!STATIC_CLASS_INIT_OPT_P ()) - DECL_BIT_INDEX(*init_test_decl) = -1; + DECL_BIT_INDEX (decl) = -1; + DECL_INITIAL (decl) = boolean_false_node; + /* Don't emit any symbolic debugging info for this decl. */ + DECL_IGNORED_P (decl) = 1; + *init_test_decl = decl; } - init = build (CALL_EXPR, void_type_node, - build_address_of (soft_initclass_node), - build_tree_list (NULL_TREE, build_class_ref (clas)), - NULL_TREE); + init = build3 (CALL_EXPR, void_type_node, + build_address_of (soft_initclass_node), + build_tree_list (NULL_TREE, build_class_ref (clas)), + NULL_TREE); TREE_SIDE_EFFECTS (init) = 1; - init = build (COND_EXPR, void_type_node, - build (EQ_EXPR, boolean_type_node, - *init_test_decl, boolean_false_node), - init, integer_zero_node); + init = build3 (COND_EXPR, void_type_node, + build2 (EQ_EXPR, boolean_type_node, + *init_test_decl, boolean_false_node), + init, integer_zero_node); TREE_SIDE_EFFECTS (init) = 1; - init = build (COMPOUND_EXPR, TREE_TYPE (expr), init, - build (MODIFY_EXPR, boolean_type_node, - *init_test_decl, boolean_true_node)); + init = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init, + build2 (MODIFY_EXPR, boolean_type_node, + *init_test_decl, boolean_true_node)); TREE_SIDE_EFFECTS (init) = 1; } if (expr != NULL_TREE) { - expr = build (COMPOUND_EXPR, TREE_TYPE (expr), init, expr); + expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init, expr); TREE_SIDE_EFFECTS (expr) = 1; return expr; } return init; } + + +/* Rewrite expensive calls that require stack unwinding at runtime to + cheaper alternatives. The logic here performs these + transformations: + + java.lang.Class.forName("foo") -> java.lang.Class.forName("foo", class$) + java.lang.Class.getClassLoader() -> java.lang.Class.getClassLoader(class$) + +*/ + +typedef struct +{ + const char *classname; + const char *method; + const char *signature; + const char *new_signature; + int flags; + tree (*rewrite_arglist) (tree arglist); +} rewrite_rule; + +/* Add this.class to the end of an arglist. */ + +static tree +rewrite_arglist_getclass (tree arglist) +{ + return chainon (arglist, + tree_cons (NULL_TREE, build_class_ref (output_class), NULL_TREE)); +} + +static rewrite_rule rules[] = + {{"java.lang.Class", "getClassLoader", "()Ljava/lang/ClassLoader;", + "(Ljava/lang/Class;)Ljava/lang/ClassLoader;", + ACC_FINAL|ACC_PRIVATE, rewrite_arglist_getclass}, + {"java.lang.Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;", + "(Ljava/lang/String;Ljava/lang/Class;)Ljava/lang/Class;", + ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getclass}, + {NULL, NULL, NULL, NULL, 0, NULL}}; + +/* Scan the rules list for replacements for *METHOD_P and replace the + args accordingly. If the rewrite results in an access to a private + method, update SPECIAL.*/ + +void +maybe_rewrite_invocation (tree *method_p, tree *arg_list_p, + tree *method_signature_p, tree *special) +{ + tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (*method_p))); + rewrite_rule *p; + *special = NULL_TREE; + + for (p = rules; p->classname; p++) + { + if (get_identifier (p->classname) == context) + { + tree method = DECL_NAME (*method_p); + if (get_identifier (p->method) == method + && get_identifier (p->signature) == *method_signature_p) + { + tree maybe_method + = lookup_java_method (DECL_CONTEXT (*method_p), + method, + get_identifier (p->new_signature)); + if (! maybe_method && ! flag_verify_invocations) + { + maybe_method + = add_method (DECL_CONTEXT (*method_p), p->flags, + method, get_identifier (p->new_signature)); + DECL_EXTERNAL (maybe_method) = 1; + } + *method_p = maybe_method; + gcc_assert (*method_p); + *arg_list_p = p->rewrite_arglist (*arg_list_p); + *method_signature_p = get_identifier (p->new_signature); + *special = integer_one_node; + + break; + } + } + } +} + + + tree build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED, tree self_type, tree method_signature ATTRIBUTE_UNUSED, - tree arg_list ATTRIBUTE_UNUSED) + tree arg_list ATTRIBUTE_UNUSED, tree special) { tree func; if (is_compiled_class (self_type)) { - make_decl_rtl (method, NULL); - func = build1 (ADDR_EXPR, method_ptr_type_node, method); + /* With indirect dispatch we have to use indirect calls for all + publicly visible methods or gcc will use PLT indirections + to reach them. We also have to use indirect dispatch for all + external methods. */ + if (! flag_indirect_dispatch + || (! DECL_EXTERNAL (method) && ! TREE_PUBLIC (method))) + { + func = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (method)), + method); + } + else + { + tree table_index + = build_int_cst (NULL_TREE, + (get_symbol_table_index + (method, special, + &TYPE_ATABLE_METHODS (output_class)))); + func + = build4 (ARRAY_REF, + TREE_TYPE (TREE_TYPE (TYPE_ATABLE_DECL (output_class))), + TYPE_ATABLE_DECL (output_class), table_index, + NULL_TREE, NULL_TREE); + } + func = convert (method_ptr_type_node, func); } else { @@ -1770,8 +2159,9 @@ build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED, ncode_ident = get_identifier ("ncode"); if (methods_ident == NULL_TREE) methods_ident = get_identifier ("methods"); - ref = build (COMPONENT_REF, method_ptr_type_node, ref, - lookup_field (&class_type_node, methods_ident)); + ref = build3 (COMPONENT_REF, method_ptr_type_node, ref, + lookup_field (&class_type_node, methods_ident), + NULL_TREE); for (meth = TYPE_METHODS (self_type); ; meth = TREE_CHAIN (meth)) { @@ -1783,12 +2173,12 @@ build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED, method_index++; } method_index *= int_size_in_bytes (method_type_node); - ref = fold (build (PLUS_EXPR, method_ptr_type_node, - ref, build_int_2 (method_index, 0))); + ref = fold_build2 (PLUS_EXPR, method_ptr_type_node, + ref, build_int_cst (NULL_TREE, method_index)); ref = build1 (INDIRECT_REF, method_type_node, ref); - func = build (COMPONENT_REF, nativecode_ptr_type_node, - ref, - lookup_field (&method_type_node, ncode_ident)); + func = build3 (COMPONENT_REF, nativecode_ptr_type_node, + ref, lookup_field (&method_type_node, ncode_ident), + NULL_TREE); } return func; } @@ -1804,41 +2194,44 @@ invoke_build_dtable (int is_invoke_interface, tree arg_list) argument is an array then get the dispatch table of the class Object rather than the one from the objectref. */ objectref = (is_invoke_interface - && is_array_type_p (TREE_TYPE (TREE_VALUE (arg_list))) ? - object_type_node : TREE_VALUE (arg_list)); - + && is_array_type_p (TREE_TYPE (TREE_VALUE (arg_list))) + ? build_class_ref (object_type_node) : TREE_VALUE (arg_list)); + if (dtable_ident == NULL_TREE) dtable_ident = get_identifier ("vtable"); dtable = build_java_indirect_ref (object_type_node, objectref, flag_check_references); - dtable = build (COMPONENT_REF, dtable_ptr_type, dtable, - lookup_field (&object_type_node, dtable_ident)); + dtable = build3 (COMPONENT_REF, dtable_ptr_type, dtable, + lookup_field (&object_type_node, dtable_ident), NULL_TREE); return dtable; } -/* Determine the index in the virtual offset table (otable) for a call to - METHOD. If this method has not been seen before, it will be added to the - otable_methods. If it has, the existing otable slot will be reused. */ +/* Determine the index in SYMBOL_TABLE for a reference to the decl + T. If this decl has not been seen before, it will be added to the + [oa]table_methods. If it has, the existing table slot will be + reused. */ int -get_offset_table_index (tree method) +get_symbol_table_index (tree t, tree special, tree *symbol_table) { int i = 1; tree method_list; - - if (otable_methods == NULL_TREE) + + if (*symbol_table == NULL_TREE) { - otable_methods = build_tree_list (method, method); + *symbol_table = build_tree_list (special, t); return 1; } - method_list = otable_methods; + method_list = *symbol_table; while (1) { - if (TREE_VALUE (method_list) == method) - return i; + tree value = TREE_VALUE (method_list); + tree purpose = TREE_PURPOSE (method_list); + if (value == t && purpose == special) + return i; i++; if (TREE_CHAIN (method_list) == NULL_TREE) break; @@ -1846,12 +2239,12 @@ get_offset_table_index (tree method) method_list = TREE_CHAIN (method_list); } - TREE_CHAIN (method_list) = build_tree_list (method, method); + TREE_CHAIN (method_list) = build_tree_list (special, t); return i; } tree -build_invokevirtual (tree dtable, tree method) +build_invokevirtual (tree dtable, tree method, tree special) { tree func; tree nativecode_ptr_ptr_type_node @@ -1861,31 +2254,32 @@ build_invokevirtual (tree dtable, tree method) if (flag_indirect_dispatch) { - otable_index = build_int_2 (get_offset_table_index (method), 0); - method_index = build (ARRAY_REF, integer_type_node, otable_decl, - otable_index); + gcc_assert (! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method)))); + + otable_index + = build_int_cst (NULL_TREE, get_symbol_table_index + (method, special, + &TYPE_OTABLE_METHODS (output_class))); + method_index = build4 (ARRAY_REF, integer_type_node, + TYPE_OTABLE_DECL (output_class), + otable_index, NULL_TREE, NULL_TREE); } else { - method_index = convert (sizetype, DECL_VINDEX (method)); - - if (TARGET_VTABLE_USES_DESCRIPTORS) - /* Add one to skip bogus descriptor for class and GC descriptor. */ - method_index = size_binop (PLUS_EXPR, method_index, size_int (1)); - else - /* Add 1 to skip "class" field of dtable, and 1 to skip GC descriptor. */ - method_index = size_binop (PLUS_EXPR, method_index, size_int (2)); - + /* We fetch the DECL_VINDEX field directly here, rather than + using get_method_index(). DECL_VINDEX is the true offset + from the vtable base to a method, regrdless of any extra + words inserted at the start of the vtable. */ + method_index = DECL_VINDEX (method); method_index = size_binop (MULT_EXPR, method_index, TYPE_SIZE_UNIT (nativecode_ptr_ptr_type_node)); - if (TARGET_VTABLE_USES_DESCRIPTORS) method_index = size_binop (MULT_EXPR, method_index, size_int (TARGET_VTABLE_USES_DESCRIPTORS)); } - func = fold (build (PLUS_EXPR, nativecode_ptr_ptr_type_node, dtable, - convert (nativecode_ptr_ptr_type_node, method_index))); + func = fold_build2 (PLUS_EXPR, nativecode_ptr_ptr_type_node, dtable, + convert (nativecode_ptr_ptr_type_node, method_index)); if (TARGET_VTABLE_USES_DESCRIPTORS) func = build1 (NOP_EXPR, nativecode_ptr_type_node, func); @@ -1902,73 +2296,80 @@ build_invokeinterface (tree dtable, tree method) tree lookup_arg; tree interface; tree idx; - tree meth; - tree otable_index; - int i; - /* We expand invokeinterface here. _Jv_LookupInterfaceMethod() will - ensure that the selected method exists, is public and not - abstract nor static. */ + /* We expand invokeinterface here. */ if (class_ident == NULL_TREE) class_ident = get_identifier ("class"); dtable = build_java_indirect_ref (dtable_type, dtable, flag_check_references); - dtable = build (COMPONENT_REF, class_ptr_type, dtable, - lookup_field (&dtable_type, class_ident)); + dtable = build3 (COMPONENT_REF, class_ptr_type, dtable, + lookup_field (&dtable_type, class_ident), NULL_TREE); interface = DECL_CONTEXT (method); - if (! CLASS_INTERFACE (TYPE_NAME (interface))) - abort (); + gcc_assert (CLASS_INTERFACE (TYPE_NAME (interface))); layout_class_methods (interface); if (flag_indirect_dispatch) { - otable_index = build_int_2 (get_offset_table_index (method), 0); - idx = build (ARRAY_REF, integer_type_node, otable_decl, otable_index); + int itable_index + = 2 * (get_symbol_table_index + (method, NULL_TREE, &TYPE_ITABLE_METHODS (output_class))); + interface + = build4 (ARRAY_REF, + TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))), + TYPE_ITABLE_DECL (output_class), + build_int_cst (NULL_TREE, itable_index-1), + NULL_TREE, NULL_TREE); + idx + = build4 (ARRAY_REF, + TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))), + TYPE_ITABLE_DECL (output_class), + build_int_cst (NULL_TREE, itable_index), + NULL_TREE, NULL_TREE); + interface = convert (class_ptr_type, interface); + idx = convert (integer_type_node, idx); } else { - i = 1; - for (meth = TYPE_METHODS (interface); ; meth = TREE_CHAIN (meth), i++) - { - if (meth == method) - { - idx = build_int_2 (i, 0); - break; - } - if (meth == NULL_TREE) - abort (); - } + idx = build_int_cst (NULL_TREE, + get_interface_method_index (method, interface)); + interface = build_class_ref (interface); } - - lookup_arg = tree_cons (NULL_TREE, dtable, - tree_cons (NULL_TREE, build_class_ref (interface), - build_tree_list (NULL_TREE, idx))); - return build (CALL_EXPR, ptr_type_node, - build_address_of (soft_lookupinterfacemethod_node), - lookup_arg, NULL_TREE); + lookup_arg = tree_cons (NULL_TREE, dtable, + tree_cons (NULL_TREE, interface, + build_tree_list (NULL_TREE, idx))); + + return build3 (CALL_EXPR, ptr_type_node, + build_address_of (soft_lookupinterfacemethod_node), + lookup_arg, NULL_TREE); } /* Expand one of the invoke_* opcodes. - OCPODE is the specific opcode. + OPCODE is the specific opcode. METHOD_REF_INDEX is an index into the constant pool. NARGS is the number of arguments, or -1 if not specified. */ static void expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED) { - tree method_signature = COMPONENT_REF_SIGNATURE(¤t_jcf->cpool, method_ref_index); - tree method_name = COMPONENT_REF_NAME (¤t_jcf->cpool, method_ref_index); - tree self_type = get_class_constant - (current_jcf, COMPONENT_REF_CLASS_INDEX(¤t_jcf->cpool, method_ref_index)); + tree method_signature + = COMPONENT_REF_SIGNATURE(¤t_jcf->cpool, method_ref_index); + tree method_name = COMPONENT_REF_NAME (¤t_jcf->cpool, + method_ref_index); + tree self_type + = get_class_constant (current_jcf, + COMPONENT_REF_CLASS_INDEX(¤t_jcf->cpool, + method_ref_index)); const char *const self_name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type))); tree call, func, method, arg_list, method_type; tree check = NULL_TREE; + tree special = NULL_TREE; + if (! CLASS_LOADED_P (self_type)) { load_class (self_type, 1); @@ -1982,38 +2383,94 @@ expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED) method = lookup_java_constructor (self_type, method_signature); else method = lookup_java_method (self_type, method_name, method_signature); + + /* We've found a method in a class other than the one in which it + was wanted. This can happen if, for instance, we're trying to + compile invokespecial super.equals(). + FIXME: This is a kludge. Rather than nullifying the result, we + should change lookup_java_method() so that it doesn't search the + superclass chain when we're BC-compiling. */ + if (! flag_verify_invocations + && method + && ! TYPE_ARRAY_P (self_type) + && self_type != DECL_CONTEXT (method)) + method = NULL_TREE; + + /* We've found a method in an interface, but this isn't an interface + call. */ + if (opcode != OPCODE_invokeinterface + && method + && (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))))) + method = NULL_TREE; + + /* We've found a non-interface method but we are making an + interface call. This can happen if the interface overrides a + method in Object. */ + if (! flag_verify_invocations + && opcode == OPCODE_invokeinterface + && method + && ! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method)))) + method = NULL_TREE; + if (method == NULL_TREE) { - error ("class '%s' has no method named '%s' matching signature '%s'", - self_name, - IDENTIFIER_POINTER (method_name), - IDENTIFIER_POINTER (method_signature)); - } - /* Invoke static can't invoke static/abstract method */ - else if (opcode == OPCODE_invokestatic) - { - if (!METHOD_STATIC (method)) + if (flag_verify_invocations || ! flag_indirect_dispatch) { - error ("invokestatic on non static method"); - method = NULL_TREE; + error ("class '%s' has no method named '%s' matching signature '%s'", + self_name, + IDENTIFIER_POINTER (method_name), + IDENTIFIER_POINTER (method_signature)); } - else if (METHOD_ABSTRACT (method)) + else { - error ("invokestatic on abstract method"); - method = NULL_TREE; + int flags = ACC_PUBLIC; + if (opcode == OPCODE_invokestatic) + flags |= ACC_STATIC; + if (opcode == OPCODE_invokeinterface) + { + flags |= ACC_INTERFACE | ACC_ABSTRACT; + CLASS_INTERFACE (TYPE_NAME (self_type)) = 1; + } + method = add_method (self_type, flags, method_name, + method_signature); + DECL_ARTIFICIAL (method) = 1; + METHOD_DUMMY (method) = 1; + layout_class_method (self_type, NULL, + method, NULL); } } - else + + /* Invoke static can't invoke static/abstract method */ + if (method != NULL_TREE) { - if (METHOD_STATIC (method)) + if (opcode == OPCODE_invokestatic) { - error ("invoke[non-static] on static method"); - method = NULL_TREE; + if (!METHOD_STATIC (method)) + { + error ("invokestatic on non static method"); + method = NULL_TREE; + } + else if (METHOD_ABSTRACT (method)) + { + error ("invokestatic on abstract method"); + method = NULL_TREE; + } + } + else + { + if (METHOD_STATIC (method)) + { + error ("invoke[non-static] on static method"); + method = NULL_TREE; + } } } if (method == NULL_TREE) { + /* If we got here, we emitted an error message above. So we + just pop the arguments, push a properly-typed zero, and + continue. */ method_type = get_type_from_signature (method_signature); pop_arguments (TYPE_ARG_TYPES (method_type)); if (opcode != OPCODE_invokestatic) @@ -2027,10 +2484,13 @@ expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED) arg_list = pop_arguments (TYPE_ARG_TYPES (method_type)); flush_quick_stack (); + maybe_rewrite_invocation (&method, &arg_list, &method_signature, + &special); + func = NULL_TREE; if (opcode == OPCODE_invokestatic) func = build_known_method_ref (method, method_type, self_type, - method_signature, arg_list); + method_signature, arg_list, special); else if (opcode == OPCODE_invokespecial || (opcode == OPCODE_invokevirtual && (METHOD_PRIVATE (method) @@ -2050,31 +2510,36 @@ expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED) TREE_VALUE (arg_list) = save_arg; check = java_check_reference (save_arg, ! DECL_INIT_P (method)); func = build_known_method_ref (method, method_type, self_type, - method_signature, arg_list); + method_signature, arg_list, special); } else { tree dtable = invoke_build_dtable (opcode == OPCODE_invokeinterface, arg_list); if (opcode == OPCODE_invokevirtual) - func = build_invokevirtual (dtable, method); + func = build_invokevirtual (dtable, method, special); else func = build_invokeinterface (dtable, method); } - func = build1 (NOP_EXPR, build_pointer_type (method_type), func); + + if (TREE_CODE (func) == ADDR_EXPR) + TREE_TYPE (func) = build_pointer_type (method_type); + else + func = build1 (NOP_EXPR, build_pointer_type (method_type), func); - call = build (CALL_EXPR, TREE_TYPE (method_type), func, arg_list, NULL_TREE); + call = build3 (CALL_EXPR, TREE_TYPE (method_type), + func, arg_list, NULL_TREE); TREE_SIDE_EFFECTS (call) = 1; call = check_for_builtin (method, call); if (check != NULL_TREE) { - call = build (COMPOUND_EXPR, TREE_TYPE (call), check, call); + call = build2 (COMPOUND_EXPR, TREE_TYPE (call), check, call); TREE_SIDE_EFFECTS (call) = 1; } if (TREE_CODE (TREE_TYPE (method_type)) == VOID_TYPE) - expand_expr_stmt (call); + java_add_stmt (call); else { push_value (call); @@ -2093,6 +2558,7 @@ build_jni_stub (tree method) tree env_var, res_var = NULL_TREE, block; tree method_args, res_type; tree meth_var; + tree bind; int args_size = 0; @@ -2100,8 +2566,7 @@ build_jni_stub (tree method) int from_class = ! CLASS_FROM_SOURCE_P (klass); klass = build_class_ref (klass); - if (! METHOD_NATIVE (method) || ! flag_jni) - abort (); + gcc_assert (METHOD_NATIVE (method) && flag_jni); DECL_ARTIFICIAL (method) = 1; DECL_EXTERNAL (method) = 0; @@ -2126,9 +2591,7 @@ build_jni_stub (tree method) DECL_INITIAL (meth_var) = null_pointer_node; TREE_USED (meth_var) = 1; chainon (env_var, meth_var); - layout_decl (meth_var, 0); - make_decl_rtl (meth_var, NULL); - rest_of_decl_compilation (meth_var, NULL, 0, 0); + build_result_decl (method); /* One strange way that the front ends are different is that they store arguments differently. */ @@ -2136,8 +2599,7 @@ build_jni_stub (tree method) method_args = DECL_ARGUMENTS (method); else method_args = BLOCK_EXPR_DECLS (DECL_FUNCTION_BODY (method)); - block = build_block (env_var, NULL_TREE, NULL_TREE, - method_args, NULL_TREE); + block = build_block (env_var, NULL_TREE, method_args, NULL_TREE); TREE_SIDE_EFFECTS (block) = 1; /* When compiling from source we don't set the type of the block, because that will prevent patch_return from ever being run. */ @@ -2145,11 +2607,11 @@ build_jni_stub (tree method) TREE_TYPE (block) = TREE_TYPE (TREE_TYPE (method)); /* Compute the local `env' by calling _Jv_GetJNIEnvNewFrame. */ - body = build (MODIFY_EXPR, ptr_type_node, env_var, - build (CALL_EXPR, ptr_type_node, - build_address_of (soft_getjnienvnewframe_node), - build_tree_list (NULL_TREE, klass), - NULL_TREE)); + body = build2 (MODIFY_EXPR, ptr_type_node, env_var, + build3 (CALL_EXPR, ptr_type_node, + build_address_of (soft_getjnienvnewframe_node), + build_tree_list (NULL_TREE, klass), + NULL_TREE)); CAN_COMPLETE_NORMALLY (body) = 1; /* All the arguments to this method become arguments to the @@ -2158,7 +2620,7 @@ build_jni_stub (tree method) args = NULL_TREE; for (tem = method_args; tem != NULL_TREE; tem = TREE_CHAIN (tem)) { - int arg_bits = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (tem))); + int arg_bits = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem))); #ifdef PARM_BOUNDARY arg_bits = (((arg_bits + PARM_BOUNDARY - 1) / PARM_BOUNDARY) * PARM_BOUNDARY); @@ -2188,7 +2650,7 @@ build_jni_stub (tree method) /* We call _Jv_LookupJNIMethod to find the actual underlying function pointer. _Jv_LookupJNIMethod will throw the appropriate exception if this function is not found at runtime. */ - tem = build_tree_list (NULL_TREE, build_int_2 (args_size, 0)); + tem = build_tree_list (NULL_TREE, build_int_cst (NULL_TREE, args_size)); method_sig = build_java_signature (TREE_TYPE (method)); lookup_arg = tree_cons (NULL_TREE, build_utf8_ref (unmangle_classname @@ -2208,77 +2670,96 @@ build_jni_stub (tree method) jni_func_type = build_pointer_type (tem); - jnifunc = build (COND_EXPR, ptr_type_node, - meth_var, meth_var, - build (MODIFY_EXPR, ptr_type_node, - meth_var, - build (CALL_EXPR, ptr_type_node, - build_address_of (soft_lookupjnimethod_node), - lookup_arg, NULL_TREE))); + jnifunc = build3 (COND_EXPR, ptr_type_node, + meth_var, meth_var, + build2 (MODIFY_EXPR, ptr_type_node, meth_var, + build3 (CALL_EXPR, ptr_type_node, + build_address_of + (soft_lookupjnimethod_node), + lookup_arg, NULL_TREE))); /* Now we make the actual JNI call via the resulting function pointer. */ - call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (method)), - build1 (NOP_EXPR, jni_func_type, jnifunc), - args, NULL_TREE); + call = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (method)), + build1 (NOP_EXPR, jni_func_type, jnifunc), + args, NULL_TREE); /* If the JNI call returned a result, capture it here. If we had to unwrap JNI object results, we would do that here. */ if (res_var != NULL_TREE) - call = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (method)), - res_var, call); + { + /* If the call returns an object, it may return a JNI weak + reference, in which case we must unwrap it. */ + if (! JPRIMITIVE_TYPE_P (TREE_TYPE (TREE_TYPE (method)))) + call = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (method)), + build_address_of (soft_unwrapjni_node), + build_tree_list (NULL_TREE, call), + NULL_TREE); + call = build2 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (method)), + res_var, call); + } TREE_SIDE_EFFECTS (call) = 1; CAN_COMPLETE_NORMALLY (call) = 1; - body = build (COMPOUND_EXPR, void_type_node, body, call); + body = build2 (COMPOUND_EXPR, void_type_node, body, call); TREE_SIDE_EFFECTS (body) = 1; /* Now free the environment we allocated. */ - call = build (CALL_EXPR, ptr_type_node, - build_address_of (soft_jnipopsystemframe_node), - build_tree_list (NULL_TREE, env_var), - NULL_TREE); + call = build3 (CALL_EXPR, ptr_type_node, + build_address_of (soft_jnipopsystemframe_node), + build_tree_list (NULL_TREE, env_var), + NULL_TREE); TREE_SIDE_EFFECTS (call) = 1; CAN_COMPLETE_NORMALLY (call) = 1; - body = build (COMPOUND_EXPR, void_type_node, body, call); + body = build2 (COMPOUND_EXPR, void_type_node, body, call); TREE_SIDE_EFFECTS (body) = 1; - /* Finally, do the return. When compiling from source we rely on - patch_return to patch the return value -- because DECL_RESULT is - not set at the time this function is called. */ - if (from_class) - { - res_type = void_type_node; - if (res_var != NULL_TREE) - { - tree drt; - if (! DECL_RESULT (method)) - abort (); - /* Make sure we copy the result variable to the actual - result. We use the type of the DECL_RESULT because it - might be different from the return type of the function: - it might be promoted. */ - drt = TREE_TYPE (DECL_RESULT (method)); - if (drt != TREE_TYPE (res_var)) - res_var = build1 (CONVERT_EXPR, drt, res_var); - res_var = build (MODIFY_EXPR, drt, DECL_RESULT (method), res_var); - TREE_SIDE_EFFECTS (res_var) = 1; - } - } - else + /* Finally, do the return. */ + res_type = void_type_node; + if (res_var != NULL_TREE) { - /* This is necessary to get patch_return to run. */ - res_type = NULL_TREE; - } - body = build (COMPOUND_EXPR, void_type_node, body, - build1 (RETURN_EXPR, res_type, res_var)); + tree drt; + gcc_assert (DECL_RESULT (method)); + /* Make sure we copy the result variable to the actual + result. We use the type of the DECL_RESULT because it + might be different from the return type of the function: + it might be promoted. */ + drt = TREE_TYPE (DECL_RESULT (method)); + if (drt != TREE_TYPE (res_var)) + res_var = build1 (CONVERT_EXPR, drt, res_var); + res_var = build2 (MODIFY_EXPR, drt, DECL_RESULT (method), res_var); + TREE_SIDE_EFFECTS (res_var) = 1; + } + + body = build2 (COMPOUND_EXPR, void_type_node, body, + build1 (RETURN_EXPR, res_type, res_var)); TREE_SIDE_EFFECTS (body) = 1; + + bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block), + body, block); + return bind; +} - BLOCK_EXPR_BODY (block) = body; - return block; + +/* Given lvalue EXP, return a volatile expression that references the + same object. */ + +tree +java_modify_addr_for_volatile (tree exp) +{ + tree exp_type = TREE_TYPE (exp); + tree v_type + = build_qualified_type (exp_type, + TYPE_QUALS (exp_type) | TYPE_QUAL_VOLATILE); + tree addr = build_fold_addr_expr (exp); + v_type = build_pointer_type (v_type); + addr = fold_convert (v_type, addr); + exp = build_fold_indirect_ref (addr); + return exp; } + /* Expand an operation to extract from or store into a field. IS_STATIC is 1 iff the field is static. IS_PUTTING is 1 for putting into a field; 0 for getting from the field. @@ -2287,11 +2768,12 @@ build_jni_stub (tree method) static void expand_java_field_op (int is_static, int is_putting, int field_ref_index) { - tree self_type = - get_class_constant (current_jcf, - COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool, - field_ref_index)); - const char *self_name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type))); + tree self_type + = get_class_constant (current_jcf, + COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool, + field_ref_index)); + const char *self_name + = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type))); tree field_name = COMPONENT_REF_NAME (¤t_jcf->cpool, field_ref_index); tree field_signature = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool, field_ref_index); @@ -2299,16 +2781,43 @@ expand_java_field_op (int is_static, int is_putting, int field_ref_index) tree new_value = is_putting ? pop_value (field_type) : NULL_TREE; tree field_ref; int is_error = 0; - tree field_decl = lookup_field (&self_type, field_name); + tree original_self_type = self_type; + tree field_decl; + tree modify_expr; + + if (! CLASS_LOADED_P (self_type)) + load_class (self_type, 1); + field_decl = lookup_field (&self_type, field_name); if (field_decl == error_mark_node) { is_error = 1; } else if (field_decl == NULL_TREE) { - error ("missing field '%s' in '%s'", - IDENTIFIER_POINTER (field_name), self_name); - is_error = 1; + if (! flag_verify_invocations) + { + int flags = ACC_PUBLIC; + if (is_static) + flags |= ACC_STATIC; + self_type = original_self_type; + field_decl = add_field (original_self_type, field_name, + field_type, flags); + DECL_ARTIFICIAL (field_decl) = 1; + DECL_IGNORED_P (field_decl) = 1; +#if 0 + /* FIXME: We should be pessimistic about volatility. We + don't know one way or another, but this is safe. + However, doing this has bad effects on code quality. We + need to look at better ways to do this. */ + TREE_THIS_VOLATILE (field_decl) = 1; +#endif + } + else + { + error ("missing field '%s' in '%s'", + IDENTIFIER_POINTER (field_name), self_name); + is_error = 1; + } } else if (build_java_signature (TREE_TYPE (field_decl)) != field_signature) { @@ -2326,7 +2835,8 @@ expand_java_field_op (int is_static, int is_putting, int field_ref_index) } field_ref = build_field_ref (field_ref, self_type, field_name); - if (is_static) + if (is_static + && ! flag_indirect_dispatch) field_ref = build_class_init (self_type, field_ref); if (is_putting) { @@ -2334,26 +2844,51 @@ expand_java_field_op (int is_static, int is_putting, int field_ref_index) if (FIELD_FINAL (field_decl)) { if (DECL_CONTEXT (field_decl) != current_class) - error_with_decl (field_decl, - "assignment to final field `%s' not in field's class"); - else if (FIELD_STATIC (field_decl)) - { - if (!DECL_CLINIT_P (current_function_decl)) - warning_with_decl (field_decl, - "assignment to final static field `%s' not in class initializer"); - } - else - { - tree cfndecl_name = DECL_NAME (current_function_decl); - if (! DECL_CONSTRUCTOR_P (current_function_decl) - && !ID_FINIT_P (cfndecl_name)) - warning_with_decl (field_decl, "assignment to final field `%s' not in constructor"); - } - } - expand_assignment (field_ref, new_value, 0, 0); + error ("assignment to final field %q+D not in field's class", + field_decl); + /* We used to check for assignments to final fields not + occurring in the class initializer or in a constructor + here. However, this constraint doesn't seem to be + enforced by the JVM. */ + } + + if (TREE_THIS_VOLATILE (field_decl)) + field_ref = java_modify_addr_for_volatile (field_ref); + + modify_expr = build2 (MODIFY_EXPR, TREE_TYPE (field_ref), + field_ref, new_value); + + if (TREE_THIS_VOLATILE (field_decl)) + java_add_stmt + (build3 + (CALL_EXPR, void_type_node, + build_address_of (built_in_decls[BUILT_IN_SYNCHRONIZE]), + NULL_TREE, NULL_TREE)); + + java_add_stmt (modify_expr); } else - push_value (field_ref); + { + tree temp = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (field_ref)); + java_add_local_var (temp); + + if (TREE_THIS_VOLATILE (field_decl)) + field_ref = java_modify_addr_for_volatile (field_ref); + + modify_expr + = build2 (MODIFY_EXPR, TREE_TYPE (field_ref), temp, field_ref); + java_add_stmt (modify_expr); + + if (TREE_THIS_VOLATILE (field_decl)) + java_add_stmt + (build3 + (CALL_EXPR, void_type_node, + build_address_of (built_in_decls[BUILT_IN_SYNCHRONIZE]), + NULL_TREE, NULL_TREE)); + + push_value (temp); + } + TREE_THIS_VOLATILE (field_ref) = TREE_THIS_VOLATILE (field_decl); } void @@ -2367,231 +2902,6 @@ load_type_state (tree label) type_map [i] = TREE_VEC_ELT (vec, i); } -/* Do the expansion of a Java switch. With Gcc, switches are front-end - dependent things, but they rely on gcc routines. This function is - placed here because it uses things defined locally in parse.y. */ - -static tree -case_identity (tree t __attribute__ ((__unused__)), tree v) -{ - return v; -} - -/* Return the name of the vtable for an array of a given primitive - type. */ -static tree -get_primitive_array_vtable (tree elt) -{ - tree r; - if (elt == boolean_type_node) - r = boolean_array_vtable; - else if (elt == byte_type_node) - r = byte_array_vtable; - else if (elt == char_type_node) - r = char_array_vtable; - else if (elt == short_type_node) - r = short_array_vtable; - else if (elt == int_type_node) - r = int_array_vtable; - else if (elt == long_type_node) - r = long_array_vtable; - else if (elt == float_type_node) - r = float_array_vtable; - else if (elt == double_type_node) - r = double_array_vtable; - else - abort (); - return build_address_of (r); -} - -struct rtx_def * -java_expand_expr (tree exp, rtx target, enum machine_mode tmode, - int modifier /* Actually an enum expand_modifier. */) -{ - tree current; - - switch (TREE_CODE (exp)) - { - case NEW_ARRAY_INIT: - { - rtx tmp; - tree array_type = TREE_TYPE (TREE_TYPE (exp)); - tree element_type = TYPE_ARRAY_ELEMENT (array_type); - tree data_fld = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (array_type))); - HOST_WIDE_INT ilength = java_array_type_length (array_type); - tree length = build_int_2 (ilength, 0); - tree init = TREE_OPERAND (exp, 0); - tree array_decl; - - /* See if we can generate the array statically. */ - if (TREE_CONSTANT (init) && TREE_STATIC (exp) - && JPRIMITIVE_TYPE_P (element_type)) - { - tree temp, value, init_decl; - struct rtx_def *r; - START_RECORD_CONSTRUCTOR (temp, object_type_node); - PUSH_FIELD_VALUE (temp, "vtable", - get_primitive_array_vtable (element_type)); - if (! flag_hash_synchronization) - PUSH_FIELD_VALUE (temp, "sync_info", null_pointer_node); - FINISH_RECORD_CONSTRUCTOR (temp); - START_RECORD_CONSTRUCTOR (value, array_type); - PUSH_SUPER_VALUE (value, temp); - PUSH_FIELD_VALUE (value, "length", length); - PUSH_FIELD_VALUE (value, "data", init); - FINISH_RECORD_CONSTRUCTOR (value); - - init_decl = build_decl (VAR_DECL, generate_name (), array_type); - pushdecl_top_level (init_decl); - TREE_STATIC (init_decl) = 1; - DECL_INITIAL (init_decl) = value; - DECL_IGNORED_P (init_decl) = 1; - TREE_READONLY (init_decl) = 1; - /* Hash synchronization requires at least 64-bit alignment. */ - if (flag_hash_synchronization && POINTER_SIZE < 64) - DECL_ALIGN (init_decl) = 64; - rest_of_decl_compilation (init_decl, NULL, 1, 0); - TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (init_decl)) = 1; - init = build1 (ADDR_EXPR, TREE_TYPE (exp), init_decl); - r = expand_expr (init, target, tmode, modifier); - return r; - } - - array_decl = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp)); - expand_decl (array_decl); - tmp = expand_assignment (array_decl, - build_new_array (element_type, length), - 1, 0); - if (TREE_CONSTANT (init) - && ilength >= 10 && JPRIMITIVE_TYPE_P (element_type)) - { - tree init_decl; - init_decl = build_decl (VAR_DECL, generate_name (), - TREE_TYPE (init)); - pushdecl_top_level (init_decl); - TREE_STATIC (init_decl) = 1; - DECL_INITIAL (init_decl) = init; - DECL_IGNORED_P (init_decl) = 1; - TREE_READONLY (init_decl) = 1; - rest_of_decl_compilation (init_decl, NULL, 1, 0); - TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (init_decl)) = 1; - init = init_decl; - } - expand_assignment (build (COMPONENT_REF, TREE_TYPE (data_fld), - build_java_indirect_ref (array_type, - array_decl, flag_check_references), - data_fld), init, 0, 0); - return tmp; - } - case BLOCK: - if (BLOCK_EXPR_BODY (exp)) - { - tree local; - rtx last; - tree body = BLOCK_EXPR_BODY (exp); - /* Set to 1 or more when we found a static class - initialization flag. */ - int found_class_initialization_flag = 0; - - pushlevel (2); /* 2 and above */ - expand_start_bindings (0); - local = BLOCK_EXPR_DECLS (exp); - while (local) - { - tree next = TREE_CHAIN (local); - found_class_initialization_flag += - LOCAL_CLASS_INITIALIZATION_FLAG_P (local); - layout_decl (local, 0); - expand_decl (pushdecl (local)); - local = next; - } - - /* Emit initialization code for test flags if we saw one. */ - if (! always_initialize_class_p - && current_function_decl - && found_class_initialization_flag) - htab_traverse - (DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl), - emit_init_test_initialization, NULL); - - /* Avoid deep recursion for long block. */ - while (TREE_CODE (body) == COMPOUND_EXPR) - { - expand_expr (TREE_OPERAND (body, 0), const0_rtx, VOIDmode, 0); - emit_queue (); - body = TREE_OPERAND (body, 1); - } - last = expand_expr (body, NULL_RTX, VOIDmode, 0); - emit_queue (); - expand_end_bindings (getdecls (), 1, 0); - poplevel (1, 1, 0); - return last; - } - return const0_rtx; - - case CASE_EXPR: - { - tree duplicate; - if (pushcase (TREE_OPERAND (exp, 0), case_identity, - build_decl (LABEL_DECL, NULL_TREE, NULL_TREE), - &duplicate) == 2) - { - EXPR_WFL_LINECOL (wfl_operator) = EXPR_WFL_LINECOL (exp); - parse_error_context - (wfl_operator, "Duplicate case label: `%s'", - print_int_node (TREE_OPERAND (exp, 0))); - } - return const0_rtx; - } - - case DEFAULT_EXPR: - pushcase (NULL_TREE, 0, - build_decl (LABEL_DECL, NULL_TREE, NULL_TREE), NULL); - return const0_rtx; - - case SWITCH_EXPR: - expand_start_case (0, TREE_OPERAND (exp, 0), int_type_node, "switch"); - expand_expr_stmt (TREE_OPERAND (exp, 1)); - expand_end_case (TREE_OPERAND (exp, 0)); - return const0_rtx; - - case TRY_EXPR: - /* We expand a try[-catch] block */ - - /* Expand the try block */ - expand_eh_region_start (); - expand_expr_stmt (TREE_OPERAND (exp, 0)); - expand_start_all_catch (); - - /* Expand all catch clauses (EH handlers) */ - for (current = TREE_OPERAND (exp, 1); current; - current = TREE_CHAIN (current)) - { - tree catch = TREE_OPERAND (current, 0); - tree decl = BLOCK_EXPR_DECLS (catch); - tree type = (decl ? TREE_TYPE (TREE_TYPE (decl)) : NULL_TREE); - - expand_start_catch (type); - expand_expr_stmt (TREE_OPERAND (current, 0)); - expand_end_catch (); - } - expand_end_all_catch (); - return const0_rtx; - - case JAVA_EXC_OBJ_EXPR: - return expand_expr (build_exception_object_ref (TREE_TYPE (exp)), - target, tmode, modifier); - - case LABEL_EXPR: - /* Used only by expanded inline functions. */ - expand_label (TREE_OPERAND (exp, 0)); - return const0_rtx; - - default: - internal_error ("can't expand %s", tree_code_name [TREE_CODE (exp)]); - } -} - /* Go over METHOD's bytecode and note instruction starts in instruction_bits[]. */ @@ -2760,7 +3070,7 @@ expand_byte_code (JCF *jcf, tree method) int pc = GET_u2 (linenumber_pointer); linenumber_pointer += 4; if (pc >= length) - warning ("invalid PC in line number table"); + warning (0, "invalid PC in line number table"); else { if ((instruction_bits[pc] & BCODE_HAS_LINENUMBER) != 0) @@ -2769,10 +3079,12 @@ expand_byte_code (JCF *jcf, tree method) } } - if (! verify_jvm_instructions (jcf, byte_ops, length)) + if (! verify_jvm_instructions_new (jcf, byte_ops, length)) return; - /* Translate bytecodes to rtl instructions. */ + promote_arguments (); + + /* Translate bytecodes. */ linenumber_pointer = linenumber_table; for (PC = 0; PC < length;) { @@ -2781,7 +3093,7 @@ expand_byte_code (JCF *jcf, tree method) tree label = lookup_label (PC); flush_quick_stack (); if ((instruction_bits [PC] & BCODE_TARGET) != 0) - expand_label (label); + java_add_stmt (build1 (LABEL_EXPR, void_type_node, label)); if (LABEL_VERIFIED (label) || PC == 0) load_type_state (label); } @@ -2805,8 +3117,9 @@ expand_byte_code (JCF *jcf, tree method) if (dead_code_index != -1) { /* We've just reached the end of a region of dead code. */ - warning ("unreachable bytecode from %d to before %d", - dead_code_index, PC); + if (extra_warnings) + warning (0, "unreachable bytecode from %d to before %d", + dead_code_index, PC); dead_code_index = -1; } } @@ -2827,8 +3140,12 @@ expand_byte_code (JCF *jcf, tree method) linenumber_pointer += 4; if (pc == PC) { - lineno = GET_u2 (linenumber_pointer - 2); - emit_line_note (input_filename, lineno); + int line = GET_u2 (linenumber_pointer - 2); +#ifdef USE_MAPPED_LOCATION + input_location = linemap_line_start (&line_table, line, 1); +#else + input_location.line = line; +#endif if (!(instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS)) break; } @@ -2842,8 +3159,9 @@ expand_byte_code (JCF *jcf, tree method) if (dead_code_index != -1) { /* We've just reached the end of a region of dead code. */ - warning ("unreachable bytecode from %d to the end of the method", - dead_code_index); + if (extra_warnings) + warning (0, "unreachable bytecode from %d to the end of the method", + dead_code_index); } } @@ -2857,7 +3175,13 @@ java_push_constant_from_pool (JCF *jcf, int index) name = get_name_constant (jcf, JPOOL_USHORT1 (jcf, index)); index = alloc_name_constant (CONSTANT_String, name); c = build_ref_from_constant_pool (index); - TREE_TYPE (c) = promote_type (string_type_node); + c = convert (promote_type (string_type_node), c); + } + else if (JPOOL_TAG (jcf, index) == CONSTANT_Class + || JPOOL_TAG (jcf, index) == CONSTANT_ResolvedClass) + { + tree record = get_class_constant (jcf, index); + c = build_class_ref (record); } else c = get_constant (jcf, index); @@ -2871,12 +3195,18 @@ process_jvm_instruction (int PC, const unsigned char* byte_ops, const char *opname; /* Temporary ??? */ int oldpc = PC; /* PC at instruction start. */ - /* If the instruction is at the beginning of a exception handler, - replace the top of the stack with the thrown object reference */ + /* If the instruction is at the beginning of an exception handler, + replace the top of the stack with the thrown object reference. */ if (instruction_bits [PC] & BCODE_EXCEPTION_TARGET) { - tree type = pop_type (ptr_type_node); - push_value (build (JAVA_EXC_OBJ_EXPR, type)); + /* Note that the verifier will not emit a type map at all for + dead exception handlers. In this case we just ignore the + situation. */ + if ((instruction_bits[PC] & BCODE_VERIFIED) != 0) + { + tree type = pop_type (promote_type (throwable_type_node)); + push_value (build_exception_object_ref (type)); + } } switch (byte_ops[PC++]) @@ -2891,7 +3221,8 @@ process_jvm_instruction (int PC, const unsigned char* byte_ops, { \ int saw_index = 0; \ int index = OPERAND_VALUE; \ - build_java_ret (find_local_variable (index, ptr_type_node, oldpc)); \ + build_java_ret \ + (find_local_variable (index, return_address_type_node, oldpc)); \ } #define JSR(OPERAND_TYPE, OPERAND_VALUE) \ @@ -2978,46 +3309,24 @@ process_jvm_instruction (int PC, const unsigned char* byte_ops, #define LOOKUP_SWITCH \ { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \ tree selector = pop_value (INT_type_node); \ - tree duplicate, label; \ - tree type = TREE_TYPE (selector); \ - flush_quick_stack (); \ - expand_start_case (0, selector, type, "switch statement");\ + tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \ while (--npairs >= 0) \ { \ jint match = IMMEDIATE_s4; jint offset = IMMEDIATE_s4; \ - tree value = build_int_2 (match, match < 0 ? -1 : 0); \ - TREE_TYPE (value) = type; \ - label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE); \ - pushcase (value, convert, label, &duplicate); \ - expand_java_goto (oldpc + offset); \ + expand_java_add_case (switch_expr, match, oldpc + offset); \ } \ - label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE); \ - pushcase (NULL_TREE, 0, label, &duplicate); \ - expand_java_goto (oldpc + default_offset); \ - expand_end_case (selector); \ } #define TABLE_SWITCH \ { jint default_offset = IMMEDIATE_s4; \ jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \ tree selector = pop_value (INT_type_node); \ - tree duplicate, label; \ - tree type = TREE_TYPE (selector); \ - flush_quick_stack (); \ - expand_start_case (0, selector, type, "switch statement");\ + tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \ for (; low <= high; low++) \ { \ jint offset = IMMEDIATE_s4; \ - tree value = build_int_2 (low, low < 0 ? -1 : 0); \ - TREE_TYPE (value) = type; \ - label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE); \ - pushcase (value, convert, label, &duplicate); \ - expand_java_goto (oldpc + offset); \ + expand_java_add_case (switch_expr, low, oldpc + offset); \ } \ - label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE); \ - pushcase (NULL_TREE, 0, label, &duplicate); \ - expand_java_goto (oldpc + default_offset); \ - expand_end_case (selector); \ } #define INVOKE(MAYBE_STATIC, IS_INTERFACE) \ @@ -3064,8 +3373,8 @@ process_jvm_instruction (int PC, const unsigned char* byte_ops, } #define UNOP(OPERAND_TYPE, OPERAND_VALUE) \ - push_value (fold (build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \ - pop_value (OPERAND_TYPE##_type_node)))); + push_value (fold_build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \ + pop_value (OPERAND_TYPE##_type_node))); #define CONVERT2(FROM_TYPE, TO_TYPE) \ { \ @@ -3082,16 +3391,16 @@ process_jvm_instruction (int PC, const unsigned char* byte_ops, /* internal macro added for use by the WIDE case Added TREE_TYPE (decl) assignment, apbianco */ -#define STORE_INTERNAL(OPTYPE, OPVALUE) \ - { \ - tree decl, value; \ - int var = OPVALUE; \ - tree type = OPTYPE; \ - value = pop_value (type); \ - type = TREE_TYPE (value); \ - decl = find_local_variable (var, type, oldpc); \ - set_local_type (var, type ); \ - expand_assignment (decl, value, 0, 0); \ +#define STORE_INTERNAL(OPTYPE, OPVALUE) \ + { \ + tree decl, value; \ + int index = OPVALUE; \ + tree type = OPTYPE; \ + value = pop_value (type); \ + type = TREE_TYPE (value); \ + decl = find_local_variable (index, type, oldpc); \ + set_local_type (index, type); \ + java_add_stmt (build2 (MODIFY_EXPR, type, decl, value)); \ } #define STORE(OPERAND_TYPE, OPERAND_VALUE) \ @@ -3114,7 +3423,7 @@ process_jvm_instruction (int PC, const unsigned char* byte_ops, flush_quick_stack (); \ c = build_java_monitor (call, o); \ TREE_SIDE_EFFECTS (c) = 1; \ - expand_expr_stmt (c); \ + java_add_stmt (c); \ } #define SPECIAL_IINC(IGNORED) \ @@ -3207,7 +3516,7 @@ peek_opcode_at_pc (JCF *jcf, int code_offset, int pc) This function is used by `give_name_to_locals' so that a local's DECL features a DECL_LOCAL_START_PC such that the first related - store operation will use DECL as a destination, not a unrelated + store operation will use DECL as a destination, not an unrelated temporary created for the occasion. This function uses a global (instruction_bits) `note_instructions' should @@ -3315,29 +3624,22 @@ maybe_adjust_start_pc (struct JCF *jcf, int code_offset, For method invocation, we modify the arguments so that a left-to-right order evaluation is performed. Saved expressions will, in CALL_EXPR order, be reused when the call will be expanded. -*/ + + We also promote outgoing args if needed. */ tree force_evaluation_order (tree node) { if (flag_syntax_only) return node; - if (TREE_CODE_CLASS (TREE_CODE (node)) == '2') - { - if (TREE_SIDE_EFFECTS (TREE_OPERAND (node, 1))) - TREE_OPERAND (node, 0) = save_expr (TREE_OPERAND (node, 0)); - } - else if (TREE_CODE (node) == CALL_EXPR - || TREE_CODE (node) == NEW_CLASS_EXPR - || (TREE_CODE (node) == COMPOUND_EXPR - && TREE_CODE (TREE_OPERAND (node, 0)) == CALL_EXPR - && TREE_CODE (TREE_OPERAND (node, 1)) == SAVE_EXPR)) + if (TREE_CODE (node) == CALL_EXPR + || TREE_CODE (node) == NEW_CLASS_EXPR + || (TREE_CODE (node) == COMPOUND_EXPR + && TREE_CODE (TREE_OPERAND (node, 0)) == CALL_EXPR + && TREE_CODE (TREE_OPERAND (node, 1)) == SAVE_EXPR)) { tree arg, cmp; - if (!TREE_OPERAND (node, 1)) - return node; - arg = node; /* Position arg properly, account for wrapped around ctors. */ @@ -3346,16 +3648,29 @@ force_evaluation_order (tree node) arg = TREE_OPERAND (arg, 1); - /* Not having a list of argument here is an error. */ - if (TREE_CODE (arg) != TREE_LIST) - abort (); + /* An empty argument list is ok, just ignore it. */ + if (!arg) + return node; + + /* Not having a list of arguments here is an error. */ + gcc_assert (TREE_CODE (arg) == TREE_LIST); /* This reverses the evaluation order. This is a desired effect. */ for (cmp = NULL_TREE; arg; arg = TREE_CHAIN (arg)) { - tree saved = save_expr (force_evaluation_order (TREE_VALUE (arg))); + /* Promote types smaller than integer. This is required by + some ABIs. */ + tree type = TREE_TYPE (TREE_VALUE (arg)); + tree saved; + if (targetm.calls.promote_prototypes (type) + && INTEGRAL_TYPE_P (type) + && INT_CST_LT_UNSIGNED (TYPE_SIZE (type), + TYPE_SIZE (integer_type_node))) + TREE_VALUE (arg) = fold_convert (integer_type_node, TREE_VALUE (arg)); + + saved = save_expr (force_evaluation_order (TREE_VALUE (arg))); cmp = (cmp == NULL_TREE ? saved : - build (COMPOUND_EXPR, void_type_node, cmp, saved)); + build2 (COMPOUND_EXPR, void_type_node, cmp, saved)); TREE_VALUE (arg) = saved; } @@ -3364,7 +3679,9 @@ force_evaluation_order (tree node) if (cmp) { - cmp = save_expr (build (COMPOUND_EXPR, TREE_TYPE (node), cmp, node)); + cmp = build2 (COMPOUND_EXPR, TREE_TYPE (node), cmp, node); + if (TREE_TYPE (cmp) != void_type_node) + cmp = save_expr (cmp); CAN_COMPLETE_NORMALLY (cmp) = CAN_COMPLETE_NORMALLY (node); TREE_SIDE_EFFECTS (cmp) = 1; node = cmp; @@ -3373,37 +3690,119 @@ force_evaluation_order (tree node) return node; } -/* Called for every element in DECL_FUNCTION_INIT_TEST_TABLE of a - method in order to emit initialization code for each test flag. */ +/* EXPR_WITH_FILE_LOCATION are used to keep track of the exact + location where an expression or an identifier were encountered. It + is necessary for languages where the frontend parser will handle + recursively more than one file (Java is one of them). */ -static int -emit_init_test_initialization (void **entry, void *x ATTRIBUTE_UNUSED) +tree +build_expr_wfl (tree node, +#ifdef USE_MAPPED_LOCATION + source_location location +#else + const char *file, int line, int col +#endif +) { - struct treetreehash_entry *ite = (struct treetreehash_entry *) *entry; - tree klass = build_class_ref (ite->key); - tree rhs; - - /* If the DECL_INITIAL of the test flag is set to true, it - means that the class is already initialized the time it - is in use. */ - if (DECL_INITIAL (ite->value) == boolean_true_node) - rhs = boolean_true_node; - /* Otherwise, we initialize the class init check variable by looking - at the `state' field of the class to see if it is already - initialized. This makes things a bit faster if the class is - already initialized, which should be the common case. */ - else - rhs = build (GE_EXPR, boolean_type_node, - build (COMPONENT_REF, byte_type_node, - build1 (INDIRECT_REF, class_type_node, klass), - lookup_field (&class_type_node, - get_identifier ("state"))), - build_int_2 (JV_STATE_DONE, 0)); - - expand_expr_stmt (build (MODIFY_EXPR, boolean_type_node, - ite->value, rhs)); - return true; + tree wfl; + +#ifdef USE_MAPPED_LOCATION + wfl = make_node (EXPR_WITH_FILE_LOCATION); + SET_EXPR_LOCATION (wfl, location); +#else + static const char *last_file = 0; + static tree last_filenode = NULL_TREE; + + wfl = make_node (EXPR_WITH_FILE_LOCATION); + + EXPR_WFL_SET_LINECOL (wfl, line, col); + if (file != last_file) + { + last_file = file; + last_filenode = file ? get_identifier (file) : NULL_TREE; + } + EXPR_WFL_FILENAME_NODE (wfl) = last_filenode; +#endif + EXPR_WFL_NODE (wfl) = node; + if (node) + { + if (!TYPE_P (node)) + TREE_SIDE_EFFECTS (wfl) = TREE_SIDE_EFFECTS (node); + TREE_TYPE (wfl) = TREE_TYPE (node); + } + + return wfl; } -#include "gt-java-expr.h" +#ifdef USE_MAPPED_LOCATION +tree +expr_add_location (tree node, source_location location, bool statement) +{ + tree wfl; +#if 0 + /* FIXME. This optimization causes failures in code that expects an + EXPR_WITH_FILE_LOCATION. E.g. in resolve_qualified_expression_name. */ + if (node && ! (statement && flag_emit_class_files)) + { + source_location node_loc = EXPR_LOCATION (node); + if (node_loc == location || location == UNKNOWN_LOCATION) + return node; + if (node_loc == UNKNOWN_LOCATION + && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (node)))) + { + SET_EXPR_LOCATION (node, location); + return node; + } + } +#endif + wfl = make_node (EXPR_WITH_FILE_LOCATION); + SET_EXPR_LOCATION (wfl, location); + EXPR_WFL_NODE (wfl) = node; + if (statement && debug_info_level != DINFO_LEVEL_NONE) + EXPR_WFL_EMIT_LINE_NOTE (wfl) = 1; + if (node) + { + if (!TYPE_P (node)) + TREE_SIDE_EFFECTS (wfl) = TREE_SIDE_EFFECTS (node); + TREE_TYPE (wfl) = TREE_TYPE (node); + } + + return wfl; +} +#endif + +/* Build a node to represent empty statements and blocks. */ + +tree +build_java_empty_stmt (void) +{ + tree t = build_empty_stmt (); + CAN_COMPLETE_NORMALLY (t) = 1; + return t; +} + +/* Promote all args of integral type before generating any code. */ + +static void +promote_arguments (void) +{ + int i; + tree arg; + for (arg = DECL_ARGUMENTS (current_function_decl), i = 0; + arg != NULL_TREE; arg = TREE_CHAIN (arg), i++) + { + tree arg_type = TREE_TYPE (arg); + if (INTEGRAL_TYPE_P (arg_type) + && TYPE_PRECISION (arg_type) < 32) + { + tree copy = find_local_variable (i, integer_type_node, -1); + java_add_stmt (build2 (MODIFY_EXPR, integer_type_node, + copy, + fold_convert (integer_type_node, arg))); + } + if (TYPE_IS_WIDE (arg_type)) + i++; + } +} +#include "gt-java-expr.h"