/* Process expressions for the GNU compiler for the Java(TM) language.
- Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
+ Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
Free Software Foundation, Inc.
This file is part of GCC.
/* A free-list of unused permanent TREE_LIST nodes. */
static GTY((deletable)) tree tree_list_free_list;
+/* The physical memory page size used in this computer. See
+ build_field_ref(). */
+static GTY(()) tree page_size;
+
/* The stack pointer of the Java virtual machine.
This does include the size of the quick_stack. */
case COND_EXPR:
/* Distribute the conversion into the arms of a COND_EXPR. */
- return fold
- (build3 (COND_EXPR, boolean_type_node, TREE_OPERAND (expr, 0),
- java_truthvalue_conversion (TREE_OPERAND (expr, 1)),
- java_truthvalue_conversion (TREE_OPERAND (expr, 2))));
+ return fold_build3 (COND_EXPR, boolean_type_node, TREE_OPERAND (expr, 0),
+ java_truthvalue_conversion (TREE_OPERAND (expr, 1)),
+ java_truthvalue_conversion (TREE_OPERAND (expr, 2)));
case NOP_EXPR:
/* If this is widening the argument, we can ignore it. */
/* fall through to default */
default:
- return fold (build2 (NE_EXPR, boolean_type_node,
- expr, boolean_false_node));
+ return fold_build2 (NE_EXPR, boolean_type_node,
+ expr, boolean_false_node);
}
}
void
push_type (tree type)
{
- if (! push_type_0 (type))
- abort ();
+ int r = push_type_0 (type);
+ gcc_assert (r);
}
static void
return t;
if (TREE_CODE (type) == POINTER_TYPE && TREE_CODE (t) == POINTER_TYPE)
{
- if (flag_new_verifier)
- {
- /* Since the verifier has already run, we know that any
- types we see will be compatible. In BC mode, this fact
- may be checked at runtime, but if that is so then we can
- assume its truth here as well. So, we always succeed
- here, with the expected type. */
- return type;
- }
- else
- {
- if (type == ptr_type_node || type == object_ptr_type_node)
- return t;
- else if (t == ptr_type_node) /* Special case for null reference. */
- return type;
- /* This is a kludge, but matches what Sun's verifier does.
- It can be tricked, but is safe as long as type errors
- (i.e. interface method calls) are caught at run-time. */
- else if (CLASS_INTERFACE (TYPE_NAME (TREE_TYPE (type))))
- return object_ptr_type_node;
- else if (can_widen_reference_to (t, type))
- return t;
- }
+ /* If the expected type we've been passed is object or ptr
+ (i.e. void*), the caller needs to know the real type. */
+ if (type == ptr_type_node || type == object_ptr_type_node)
+ return t;
+
+ /* Since the verifier has already run, we know that any
+ types we see will be compatible. In BC mode, this fact
+ may be checked at runtime, but if that is so then we can
+ assume its truth here as well. So, we always succeed
+ here, with the expected type. */
+ return type;
}
if (! flag_verify_invocations && flag_indirect_dispatch
{
tree type, val;
- if (stack_pointer == 0)
- abort ();
+ gcc_assert (stack_pointer != 0);
type = stack_type_map[stack_pointer - 1];
if (type == TYPE_SECOND)
{
count--;
- if (stack_pointer == 1 || count <= 0)
- abort ();
+ gcc_assert (stack_pointer != 1 && count > 0);
type = stack_type_map[stack_pointer - 2];
}
|| TYPE_IS_WIDE (type1) || TYPE_IS_WIDE (type2))
/* Bad stack swap. */
abort ();
+ /* Bad stack swap. */
flush_quick_stack ();
decl1 = find_stack_slot (stack_pointer - 1, type1);
type = stack_type_map [src_index];
if (type == TYPE_SECOND)
{
- if (src_index <= low_index)
- /* Dup operation splits 64-bit number. */
- abort ();
+ /* Dup operation splits 64-bit number. */
+ gcc_assert (src_index > low_index);
stack_type_map[dst_index] = type;
src_index--; dst_index--;
type = stack_type_map[src_index];
- if (! TYPE_IS_WIDE (type))
- abort ();
+ gcc_assert (TYPE_IS_WIDE (type));
}
- else if (TYPE_IS_WIDE (type))
- abort ();
+ else
+ gcc_assert (! TYPE_IS_WIDE (type));
if (src_index != dst_index)
{
{
tree where = lookup_label (target_pc);
tree ret = lookup_label (return_pc);
- tree ret_label = fold (build1 (ADDR_EXPR, return_address_type_node, ret));
+ tree ret_label = fold_build1 (ADDR_EXPR, return_address_type_node, ret);
push_value (ret_label);
flush_quick_stack ();
java_add_stmt (build1 (GOTO_EXPR, void_type_node, where));
else if (type == long_type_node)
return 11;
else
- abort ();
+ gcc_unreachable ();
}
/* Build a call to _Jv_ThrowBadArrayIndex(), the
tree test;
tree len = convert (unsigned_int_type_node,
build_java_array_length_access (array));
- test = fold (build2 (GE_EXPR, boolean_type_node,
- convert (unsigned_int_type_node, index),
- len));
+ test = fold_build2 (GE_EXPR, boolean_type_node,
+ convert (unsigned_int_type_node, index),
+ len);
if (! integer_zerop (test))
{
throw = build2 (TRUTH_ANDIF_EXPR, int_type_node, test,
}
else
{
- if (! is_array_type_p (array_type_p))
- abort ();
+ gcc_assert (is_array_type_p (array_type_p));
/* Get the TYPE_DECL for ARRAY's element type. */
element_type
= TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p))));
}
- if (TREE_CODE (element_type) != TYPE_DECL
- || TREE_CODE (object_type) != TYPE_DECL)
- abort ();
+ gcc_assert (TREE_CODE (element_type) == TYPE_DECL
+ && TREE_CODE (object_type) == TYPE_DECL);
if (!flag_store_check)
return build1 (NOP_EXPR, array_type_p, array);
return unchanged. */
static tree
-build_java_check_indexed_type (tree array_node, tree indexed_type)
+build_java_check_indexed_type (tree array_node ATTRIBUTE_UNUSED,
+ tree indexed_type)
{
- tree elt_type;
-
/* We used to check to see if ARRAY_NODE really had array type.
However, with the new verifier, this is not necessary, as we know
that the object will be an array of the appropriate type. */
- if (flag_new_verifier)
- return indexed_type;
-
- if (!is_array_type_p (TREE_TYPE (array_node)))
- abort ();
-
- elt_type = (TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (array_node))));
-
- if (indexed_type == ptr_type_node)
- return promote_type (elt_type);
-
- /* BYTE/BOOLEAN store and load are used for both type */
- if (indexed_type == byte_type_node && elt_type == boolean_type_node)
- return boolean_type_node;
-
- if (indexed_type != elt_type )
- abort ();
- else
- return indexed_type;
+ return indexed_type;
}
/* newarray triggers a call to _Jv_NewPrimArray. This function should be
tree index = pop_value (int_type_node);
tree array_type, array;
- if (flag_new_verifier)
+ /* If we're processing an `aaload' we might as well just pick
+ `Object'. */
+ if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
{
- /* If we're processing an `aaload' we might as well just pick
- `Object'. */
- if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
- {
- array_type = build_java_array_type (object_ptr_type_node, -1);
- rhs_type_node = object_ptr_type_node;
- }
- else
- array_type = build_java_array_type (rhs_type_node, -1);
+ array_type = build_java_array_type (object_ptr_type_node, -1);
+ rhs_type_node = object_ptr_type_node;
}
else
- array_type = ptr_type_node;
+ array_type = build_java_array_type (rhs_type_node, -1);
+
array = pop_value (array_type);
- if (flag_new_verifier)
- array = build1 (NOP_EXPR, promote_type (array_type), array);
+ array = build1 (NOP_EXPR, promote_type (array_type), array);
rhs_type_node = build_java_check_indexed_type (array, rhs_type_node);
tree array_type;
tree array_node;
- if (flag_new_verifier)
+ /* If we're processing an `aaload' we might as well just pick
+ `Object'. */
+ if (TREE_CODE (lhs_type_node) == POINTER_TYPE)
{
- /* If we're processing an `aaload' we might as well just pick
- `Object'. */
- if (TREE_CODE (lhs_type_node) == POINTER_TYPE)
- {
- array_type = build_java_array_type (object_ptr_type_node, -1);
- lhs_type_node = object_ptr_type_node;
- }
- else
- array_type = build_java_array_type (lhs_type_node, -1);
+ array_type = build_java_array_type (object_ptr_type_node, -1);
+ lhs_type_node = object_ptr_type_node;
}
else
- array_type = ptr_type_node;
+ array_type = build_java_array_type (lhs_type_node, -1);
array_node = pop_value (array_type);
- if (flag_new_verifier)
- array_node = build1 (NOP_EXPR, promote_type (array_type), array_node);
+ array_node = build1 (NOP_EXPR, promote_type (array_type), array_node);
index_node = save_expr (index_node);
array_node = save_expr (array_node);
lhs_type_node,
index_node);
if (INTEGRAL_TYPE_P (lhs_type_node) && TYPE_PRECISION (lhs_type_node) <= 32)
- load_node = fold (build1 (NOP_EXPR, int_type_node, load_node));
+ load_node = fold_build1 (NOP_EXPR, int_type_node, load_node);
push_value (load_node);
}
value = build_real (type, x);
}
else
- abort ();
+ gcc_unreachable ();
push_value (value);
}
? alloc_object_node
: alloc_no_finalizer_node);
- return build (CALL_EXPR, promote_type (type),
- build_address_of (alloc_node),
- build_tree_list (NULL_TREE, build_class_ref (type)),
- NULL_TREE);
+ return build3 (CALL_EXPR, promote_type (type),
+ build_address_of (alloc_node),
+ build_tree_list (NULL_TREE, build_class_ref (type)),
+ NULL_TREE);
}
static void
flush_quick_stack ();
local_var = find_local_variable (local_var_index, int_type_node, pc);
constant_value = build_int_cst (NULL_TREE, ival);
- res = fold (build2 (PLUS_EXPR, int_type_node, local_var, constant_value));
+ res = fold_build2 (PLUS_EXPR, int_type_node, local_var, constant_value);
java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (local_var), local_var, res));
- update_aliases (local_var, local_var_index, pc);
}
}
}
- if (! call)
- abort ();
-
+ gcc_assert (call);
call = build3 (CALL_EXPR, type,
build_address_of (call),
tree_cons (NULL_TREE, arg1,
case RSHIFT_EXPR:
mask = build_int_cst (NULL_TREE,
TYPE_PRECISION (TREE_TYPE (arg1)) - 1);
- arg2 = fold (build2 (BIT_AND_EXPR, int_type_node, arg2, mask));
+ arg2 = fold_build2 (BIT_AND_EXPR, int_type_node, arg2, mask);
break;
case COMPARE_L_EXPR: /* arg1 > arg2 ? 1 : arg1 == arg2 ? 0 : -1 */
case COMPARE_G_EXPR: /* arg1 < arg2 ? -1 : arg1 == arg2 ? 0 : 1 */
arg1 = save_expr (arg1); arg2 = save_expr (arg2);
{
- tree ifexp1 = fold (build2 (op == COMPARE_L_EXPR ? GT_EXPR : LT_EXPR,
- boolean_type_node, arg1, arg2));
- tree ifexp2 = fold (build2 (EQ_EXPR, boolean_type_node, arg1, arg2));
- tree second_compare = fold (build3 (COND_EXPR, int_type_node,
- ifexp2, integer_zero_node,
- op == COMPARE_L_EXPR
- ? integer_minus_one_node
- : integer_one_node));
- return fold (build3 (COND_EXPR, int_type_node, ifexp1,
- op == COMPARE_L_EXPR ? integer_one_node
- : integer_minus_one_node,
- second_compare));
+ tree ifexp1 = fold_build2 (op == COMPARE_L_EXPR ? GT_EXPR : LT_EXPR,
+ boolean_type_node, arg1, arg2);
+ tree ifexp2 = fold_build2 (EQ_EXPR, boolean_type_node, arg1, arg2);
+ tree second_compare = fold_build3 (COND_EXPR, int_type_node,
+ ifexp2, integer_zero_node,
+ op == COMPARE_L_EXPR
+ ? integer_minus_one_node
+ : integer_one_node);
+ return fold_build3 (COND_EXPR, int_type_node, ifexp1,
+ op == COMPARE_L_EXPR ? integer_one_node
+ : integer_minus_one_node,
+ second_compare);
}
case COMPARE_EXPR:
arg1 = save_expr (arg1); arg2 = save_expr (arg2);
{
- tree ifexp1 = fold (build2 (LT_EXPR, boolean_type_node, arg1, arg2));
- tree ifexp2 = fold (build2 (GT_EXPR, boolean_type_node, arg1, arg2));
- tree second_compare = fold (build3 (COND_EXPR, int_type_node,
- ifexp2, integer_one_node,
- integer_zero_node));
- return fold (build3 (COND_EXPR, int_type_node,
- ifexp1, integer_minus_one_node, second_compare));
+ tree ifexp1 = fold_build2 (LT_EXPR, boolean_type_node, arg1, arg2);
+ tree ifexp2 = fold_build2 (GT_EXPR, boolean_type_node, arg1, arg2);
+ tree second_compare = fold_build3 (COND_EXPR, int_type_node,
+ ifexp2, integer_one_node,
+ integer_zero_node);
+ return fold_build3 (COND_EXPR, int_type_node,
+ ifexp1, integer_minus_one_node, second_compare);
}
case TRUNC_DIV_EXPR:
case TRUNC_MOD_EXPR:
break;
default: ;
}
- return fold (build2 (op, type, arg1, arg2));
+ return fold_build2 (op, type, arg1, arg2);
}
static void
}
else
{
- int check = (flag_check_references
- && ! (DECL_P (self_value)
- && DECL_NAME (self_value) == this_identifier_node));
-
tree base_type = promote_type (base_class);
+
+ /* CHECK is true if self_value is not the this pointer. */
+ int check = (! (DECL_P (self_value)
+ && DECL_NAME (self_value) == this_identifier_node));
+
+ /* Determine whether a field offset from NULL will lie within
+ Page 0: this is necessary on those GNU/Linux/BSD systems that
+ trap SEGV to generate NullPointerExceptions.
+
+ We assume that Page 0 will be mapped with NOPERM, and that
+ memory may be allocated from any other page, so only field
+ offsets < pagesize are guaranteed to trap. We also assume
+ the smallest page size we'll encounter is 4k bytes. */
+ if (! flag_syntax_only && check && ! flag_check_references
+ && ! flag_indirect_dispatch)
+ {
+ tree field_offset = byte_position (field_decl);
+ if (! page_size)
+ page_size = size_int (4096);
+ check = ! INT_CST_LT_UNSIGNED (field_offset, page_size);
+ }
+
if (base_type != TREE_TYPE (self_value))
- self_value = fold (build1 (NOP_EXPR, base_type, self_value));
- if (! flag_syntax_only
- && (flag_indirect_dispatch
- /* DECL_FIELD_OFFSET == 0 if we have no reference for
- the field, perhaps because we couldn't find the class
- in which the field is defined.
- FIXME: We should investigate this. */
- || DECL_FIELD_OFFSET (field_decl) == 0))
+ self_value = fold_build1 (NOP_EXPR, base_type, self_value);
+ if (! flag_syntax_only && flag_indirect_dispatch)
{
tree otable_index
= build_int_cst (NULL_TREE, get_symbol_table_index
- (field_decl, &TYPE_OTABLE_METHODS (output_class)));
+ (field_decl, NULL_TREE,
+ &TYPE_OTABLE_METHODS (output_class)));
tree field_offset
= build4 (ARRAY_REF, integer_type_node,
TYPE_OTABLE_DECL (output_class), otable_index,
NULL_TREE, NULL_TREE);
tree address;
+ if (DECL_CONTEXT (field_decl) != output_class)
+ field_offset
+ = build3 (COND_EXPR, TREE_TYPE (field_offset),
+ build2 (EQ_EXPR, boolean_type_node,
+ field_offset, integer_zero_node),
+ build3 (CALL_EXPR, void_type_node,
+ build_address_of (soft_nosuchfield_node),
+ build_tree_list (NULL_TREE, otable_index),
+ NULL_TREE),
+ field_offset);
+
field_offset = fold (convert (sizetype, field_offset));
+ self_value = java_check_reference (self_value, check);
address
- = fold (build2 (PLUS_EXPR,
- build_pointer_type (TREE_TYPE (field_decl)),
- self_value, field_offset));
- return fold (build1 (INDIRECT_REF, TREE_TYPE (field_decl), address));
+ = fold_build2 (PLUS_EXPR,
+ build_pointer_type (TREE_TYPE (field_decl)),
+ self_value, field_offset);
+ return fold_build1 (INDIRECT_REF, TREE_TYPE (field_decl), address);
}
self_value = build_java_indirect_ref (TREE_TYPE (TREE_TYPE (self_value)),
self_value, check);
- return fold (build3 (COMPONENT_REF, TREE_TYPE (field_decl),
- self_value, field_decl, NULL_TREE));
+ return fold_build3 (COMPONENT_REF, TREE_TYPE (field_decl),
+ self_value, field_decl, NULL_TREE);
}
}
int target_pc)
{
tree target = lookup_label (target_pc);
- tree cond = fold (build2 (condition, boolean_type_node, value1, value2));
+ tree cond = fold_build2 (condition, boolean_type_node, value1, value2);
java_add_stmt
(build3 (COND_EXPR, void_type_node, java_truthvalue_conversion (cond),
build1 (GOTO_EXPR, void_type_node, target),
tree type = TREE_VALUE (arg_types);
tree arg = pop_value (type);
- /* With the new verifier we simply cast each argument to its
- proper type. This is needed since we lose type information
- coming out of the verifier. We also have to do this with the
- old verifier when we pop an integer type that must be
- promoted for the function call. */
- if (flag_new_verifier && TREE_CODE (type) == POINTER_TYPE)
+ /* We simply cast each argument to its proper type. This is
+ needed since we lose type information coming out of the
+ verifier. We also have to do this when we pop an integer
+ type that must be promoted for the function call. */
+ if (TREE_CODE (type) == POINTER_TYPE)
arg = build1 (NOP_EXPR, type, arg);
else if (targetm.calls.promote_prototypes (type)
&& TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
arg = convert (integer_type_node, arg);
return tree_cons (NULL_TREE, arg, tail);
}
- abort ();
+ gcc_unreachable ();
}
/* Attach to PTR (a block) the declaration found in ENTRY. */
return init;
}
+\f
+
+/* Rewrite expensive calls that require stack unwinding at runtime to
+ cheaper alternatives. The logic here performs these
+ transformations:
+
+ java.lang.Class.forName("foo") -> java.lang.Class.forName("foo", class$)
+ java.lang.Class.getClassLoader() -> java.lang.Class.getClassLoader(class$)
+
+*/
+
+typedef struct
+{
+ const char *classname;
+ const char *method;
+ const char *signature;
+ const char *new_signature;
+ int flags;
+ tree (*rewrite_arglist) (tree arglist);
+} rewrite_rule;
+
+/* Add this.class to the end of an arglist. */
+
+static tree
+rewrite_arglist_getclass (tree arglist)
+{
+ return chainon (arglist,
+ tree_cons (NULL_TREE, build_class_ref (output_class), NULL_TREE));
+}
+
+static rewrite_rule rules[] =
+ {{"java.lang.Class", "getClassLoader", "()Ljava/lang/ClassLoader;",
+ "(Ljava/lang/Class;)Ljava/lang/ClassLoader;",
+ ACC_FINAL|ACC_PRIVATE, rewrite_arglist_getclass},
+ {"java.lang.Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;",
+ "(Ljava/lang/String;Ljava/lang/Class;)Ljava/lang/Class;",
+ ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getclass},
+ {NULL, NULL, NULL, NULL, 0, NULL}};
+
+/* Scan the rules list for replacements for *METHOD_P and replace the
+ args accordingly. If the rewrite results in an access to a private
+ method, update SPECIAL.*/
+
+void
+maybe_rewrite_invocation (tree *method_p, tree *arg_list_p,
+ tree *method_signature_p, tree *special)
+{
+ tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (*method_p)));
+ rewrite_rule *p;
+ *special = NULL_TREE;
+
+ for (p = rules; p->classname; p++)
+ {
+ if (get_identifier (p->classname) == context)
+ {
+ tree method = DECL_NAME (*method_p);
+ if (get_identifier (p->method) == method
+ && get_identifier (p->signature) == *method_signature_p)
+ {
+ tree maybe_method
+ = lookup_java_method (DECL_CONTEXT (*method_p),
+ method,
+ get_identifier (p->new_signature));
+ if (! maybe_method && ! flag_verify_invocations)
+ {
+ maybe_method
+ = add_method (DECL_CONTEXT (*method_p), p->flags,
+ method, get_identifier (p->new_signature));
+ DECL_EXTERNAL (maybe_method) = 1;
+ }
+ *method_p = maybe_method;
+ gcc_assert (*method_p);
+ *arg_list_p = p->rewrite_arglist (*arg_list_p);
+ *method_signature_p = get_identifier (p->new_signature);
+ *special = integer_one_node;
+
+ break;
+ }
+ }
+ }
+}
+
+\f
+
tree
build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED,
tree self_type, tree method_signature ATTRIBUTE_UNUSED,
- tree arg_list ATTRIBUTE_UNUSED)
+ tree arg_list ATTRIBUTE_UNUSED, tree special)
{
tree func;
if (is_compiled_class (self_type))
else
{
tree table_index
- = build_int_cst (NULL_TREE, get_symbol_table_index
- (method, &TYPE_ATABLE_METHODS (output_class)));
+ = build_int_cst (NULL_TREE,
+ (get_symbol_table_index
+ (method, special,
+ &TYPE_ATABLE_METHODS (output_class))));
func
= build4 (ARRAY_REF,
TREE_TYPE (TREE_TYPE (TYPE_ATABLE_DECL (output_class))),
method_index++;
}
method_index *= int_size_in_bytes (method_type_node);
- ref = fold (build2 (PLUS_EXPR, method_ptr_type_node,
- ref, build_int_cst (NULL_TREE, method_index)));
+ ref = fold_build2 (PLUS_EXPR, method_ptr_type_node,
+ ref, build_int_cst (NULL_TREE, method_index));
ref = build1 (INDIRECT_REF, method_type_node, ref);
func = build3 (COMPONENT_REF, nativecode_ptr_type_node,
ref, lookup_field (&method_type_node, ncode_ident),
reused. */
int
-get_symbol_table_index (tree t, tree *symbol_table)
+get_symbol_table_index (tree t, tree special, tree *symbol_table)
{
int i = 1;
tree method_list;
if (*symbol_table == NULL_TREE)
{
- *symbol_table = build_tree_list (t, t);
+ *symbol_table = build_tree_list (special, t);
return 1;
}
while (1)
{
tree value = TREE_VALUE (method_list);
- if (value == t)
+ tree purpose = TREE_PURPOSE (method_list);
+ if (value == t && purpose == special)
return i;
i++;
if (TREE_CHAIN (method_list) == NULL_TREE)
method_list = TREE_CHAIN (method_list);
}
- TREE_CHAIN (method_list) = build_tree_list (t, t);
+ TREE_CHAIN (method_list) = build_tree_list (special, t);
return i;
}
tree
-build_invokevirtual (tree dtable, tree method)
+build_invokevirtual (tree dtable, tree method, tree special)
{
tree func;
tree nativecode_ptr_ptr_type_node
if (flag_indirect_dispatch)
{
- if (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))))
- abort ();
+ gcc_assert (! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))));
otable_index
= build_int_cst (NULL_TREE, get_symbol_table_index
- (method, &TYPE_OTABLE_METHODS (output_class)));
+ (method, special,
+ &TYPE_OTABLE_METHODS (output_class)));
method_index = build4 (ARRAY_REF, integer_type_node,
TYPE_OTABLE_DECL (output_class),
otable_index, NULL_TREE, NULL_TREE);
size_int (TARGET_VTABLE_USES_DESCRIPTORS));
}
- func = fold (build2 (PLUS_EXPR, nativecode_ptr_ptr_type_node, dtable,
- convert (nativecode_ptr_ptr_type_node, method_index)));
+ func = fold_build2 (PLUS_EXPR, nativecode_ptr_ptr_type_node, dtable,
+ convert (nativecode_ptr_ptr_type_node, method_index));
if (TARGET_VTABLE_USES_DESCRIPTORS)
func = build1 (NOP_EXPR, nativecode_ptr_type_node, func);
lookup_field (&dtable_type, class_ident), NULL_TREE);
interface = DECL_CONTEXT (method);
- if (! CLASS_INTERFACE (TYPE_NAME (interface)))
- abort ();
+ gcc_assert (CLASS_INTERFACE (TYPE_NAME (interface)));
layout_class_methods (interface);
if (flag_indirect_dispatch)
{
int itable_index
= 2 * (get_symbol_table_index
- (method, &TYPE_ITABLE_METHODS (output_class)));
+ (method, NULL_TREE, &TYPE_ITABLE_METHODS (output_class)));
interface
= build4 (ARRAY_REF,
TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
tree call, func, method, arg_list, method_type;
tree check = NULL_TREE;
+ tree special = NULL_TREE;
+
if (! CLASS_LOADED_P (self_type))
{
load_class (self_type, 1);
else
method = lookup_java_method (self_type, method_name, method_signature);
+ /* We've found a method in a class other than the one in which it
+ was wanted. This can happen if, for instance, we're trying to
+ compile invokespecial super.equals().
+ FIXME: This is a kludge. Rather than nullifying the result, we
+ should change lookup_java_method() so that it doesn't search the
+ superclass chain when we're BC-compiling. */
+ if (! flag_verify_invocations
+ && method
+ && ! TYPE_ARRAY_P (self_type)
+ && self_type != DECL_CONTEXT (method))
+ method = NULL_TREE;
+
/* We've found a method in an interface, but this isn't an interface
call. */
if (opcode != OPCODE_invokeinterface
arg_list = pop_arguments (TYPE_ARG_TYPES (method_type));
flush_quick_stack ();
+ maybe_rewrite_invocation (&method, &arg_list, &method_signature,
+ &special);
+
func = NULL_TREE;
if (opcode == OPCODE_invokestatic)
func = build_known_method_ref (method, method_type, self_type,
- method_signature, arg_list);
+ method_signature, arg_list, special);
else if (opcode == OPCODE_invokespecial
|| (opcode == OPCODE_invokevirtual
&& (METHOD_PRIVATE (method)
TREE_VALUE (arg_list) = save_arg;
check = java_check_reference (save_arg, ! DECL_INIT_P (method));
func = build_known_method_ref (method, method_type, self_type,
- method_signature, arg_list);
+ method_signature, arg_list, special);
}
else
{
tree dtable = invoke_build_dtable (opcode == OPCODE_invokeinterface,
arg_list);
if (opcode == OPCODE_invokevirtual)
- func = build_invokevirtual (dtable, method);
+ func = build_invokevirtual (dtable, method, special);
else
func = build_invokeinterface (dtable, method);
}
int from_class = ! CLASS_FROM_SOURCE_P (klass);
klass = build_class_ref (klass);
- if (! METHOD_NATIVE (method) || ! flag_jni)
- abort ();
+ gcc_assert (METHOD_NATIVE (method) && flag_jni);
DECL_ARTIFICIAL (method) = 1;
DECL_EXTERNAL (method) = 0;
/* If the JNI call returned a result, capture it here. If we had to
unwrap JNI object results, we would do that here. */
if (res_var != NULL_TREE)
- call = build2 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (method)),
- res_var, call);
+ {
+ /* If the call returns an object, it may return a JNI weak
+ reference, in which case we must unwrap it. */
+ if (! JPRIMITIVE_TYPE_P (TREE_TYPE (TREE_TYPE (method))))
+ call = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (method)),
+ build_address_of (soft_unwrapjni_node),
+ build_tree_list (NULL_TREE, call),
+ NULL_TREE);
+ call = build2 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (method)),
+ res_var, call);
+ }
TREE_SIDE_EFFECTS (call) = 1;
CAN_COMPLETE_NORMALLY (call) = 1;
if (res_var != NULL_TREE)
{
tree drt;
- if (! DECL_RESULT (method))
- abort ();
+ gcc_assert (DECL_RESULT (method));
/* Make sure we copy the result variable to the actual
result. We use the type of the DECL_RESULT because it
might be different from the return type of the function:
return bind;
}
+
+/* Given lvalue EXP, return a volatile expression that references the
+ same object. */
+
+tree
+java_modify_addr_for_volatile (tree exp)
+{
+ tree exp_type = TREE_TYPE (exp);
+ tree v_type
+ = build_qualified_type (exp_type,
+ TYPE_QUALS (exp_type) | TYPE_QUAL_VOLATILE);
+ tree addr = build_fold_addr_expr (exp);
+ v_type = build_pointer_type (v_type);
+ addr = fold_convert (v_type, addr);
+ exp = build_fold_indirect_ref (addr);
+ return exp;
+}
+
+
/* Expand an operation to extract from or store into a field.
IS_STATIC is 1 iff the field is static.
IS_PUTTING is 1 for putting into a field; 0 for getting from the field.
int is_error = 0;
tree original_self_type = self_type;
tree field_decl;
+ tree modify_expr;
if (! CLASS_LOADED_P (self_type))
load_class (self_type, 1);
field_type, flags);
DECL_ARTIFICIAL (field_decl) = 1;
DECL_IGNORED_P (field_decl) = 1;
+#if 0
+ /* FIXME: We should be pessimistic about volatility. We
+ don't know one way or another, but this is safe.
+ However, doing this has bad effects on code quality. We
+ need to look at better ways to do this. */
+ TREE_THIS_VOLATILE (field_decl) = 1;
+#endif
}
else
{
if (DECL_CONTEXT (field_decl) != current_class)
error ("assignment to final field %q+D not in field's class",
field_decl);
- else if (FIELD_STATIC (field_decl))
- {
- if (!DECL_CLINIT_P (current_function_decl))
- warning (0, "assignment to final static field %q+D not in "
- "class initializer",
- field_decl);
- }
- else
- {
- tree cfndecl_name = DECL_NAME (current_function_decl);
- if (! DECL_CONSTRUCTOR_P (current_function_decl)
- && !ID_FINIT_P (cfndecl_name))
- warning (0, "assignment to final field %q+D not in constructor",
- field_decl);
- }
- }
- java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (field_ref),
- field_ref, new_value));
+ /* We used to check for assignments to final fields not
+ occurring in the class initializer or in a constructor
+ here. However, this constraint doesn't seem to be
+ enforced by the JVM. */
+ }
+
+ if (TREE_THIS_VOLATILE (field_decl))
+ field_ref = java_modify_addr_for_volatile (field_ref);
+
+ modify_expr = build2 (MODIFY_EXPR, TREE_TYPE (field_ref),
+ field_ref, new_value);
+
+ if (TREE_THIS_VOLATILE (field_decl))
+ java_add_stmt
+ (build3
+ (CALL_EXPR, void_type_node,
+ build_address_of (built_in_decls[BUILT_IN_SYNCHRONIZE]),
+ NULL_TREE, NULL_TREE));
+
+ java_add_stmt (modify_expr);
}
else
- push_value (field_ref);
+ {
+ tree temp = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (field_ref));
+ java_add_local_var (temp);
+
+ if (TREE_THIS_VOLATILE (field_decl))
+ field_ref = java_modify_addr_for_volatile (field_ref);
+
+ modify_expr
+ = build2 (MODIFY_EXPR, TREE_TYPE (field_ref), temp, field_ref);
+ java_add_stmt (modify_expr);
+
+ if (TREE_THIS_VOLATILE (field_decl))
+ java_add_stmt
+ (build3
+ (CALL_EXPR, void_type_node,
+ build_address_of (built_in_decls[BUILT_IN_SYNCHRONIZE]),
+ NULL_TREE, NULL_TREE));
+
+ push_value (temp);
+ }
+ TREE_THIS_VOLATILE (field_ref) = TREE_THIS_VOLATILE (field_decl);
}
void
}
}
- if (flag_new_verifier)
- {
- if (! verify_jvm_instructions_new (jcf, byte_ops, length))
- return;
- }
- else
- {
- if (! verify_jvm_instructions (jcf, byte_ops, length))
- return;
- }
+ if (! verify_jvm_instructions_new (jcf, byte_ops, length))
+ return;
promote_arguments ();
c = build_ref_from_constant_pool (index);
c = convert (promote_type (string_type_node), c);
}
+ else if (JPOOL_TAG (jcf, index) == CONSTANT_Class
+ || JPOOL_TAG (jcf, index) == CONSTANT_ResolvedClass)
+ {
+ tree record = get_class_constant (jcf, index);
+ c = build_class_ref (record);
+ }
else
c = get_constant (jcf, index);
push_value (c);
const char *opname; /* Temporary ??? */
int oldpc = PC; /* PC at instruction start. */
- /* If the instruction is at the beginning of a exception handler,
- replace the top of the stack with the thrown object reference */
+ /* If the instruction is at the beginning of an exception handler,
+ replace the top of the stack with the thrown object reference. */
if (instruction_bits [PC] & BCODE_EXCEPTION_TARGET)
{
- /* Note that the new verifier will not emit a type map at all
- for dead exception handlers. In this case we just ignore
- the situation. */
- if (! flag_new_verifier || (instruction_bits[PC] & BCODE_VERIFIED) != 0)
+ /* Note that the verifier will not emit a type map at all for
+ dead exception handlers. In this case we just ignore the
+ situation. */
+ if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
{
tree type = pop_type (promote_type (throwable_type_node));
push_value (build_exception_object_ref (type));
}
#define UNOP(OPERAND_TYPE, OPERAND_VALUE) \
- push_value (fold (build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \
- pop_value (OPERAND_TYPE##_type_node))));
+ push_value (fold_build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \
+ pop_value (OPERAND_TYPE##_type_node)));
#define CONVERT2(FROM_TYPE, TO_TYPE) \
{ \
decl = find_local_variable (index, type, oldpc); \
set_local_type (index, type); \
java_add_stmt (build2 (MODIFY_EXPR, type, decl, value)); \
- update_aliases (decl, index, PC); \
}
#define STORE(OPERAND_TYPE, OPERAND_VALUE) \
This function is used by `give_name_to_locals' so that a local's
DECL features a DECL_LOCAL_START_PC such that the first related
- store operation will use DECL as a destination, not a unrelated
+ store operation will use DECL as a destination, not an unrelated
temporary created for the occasion.
This function uses a global (instruction_bits) `note_instructions' should
return node;
/* Not having a list of arguments here is an error. */
- if (TREE_CODE (arg) != TREE_LIST)
- abort ();
+ gcc_assert (TREE_CODE (arg) == TREE_LIST);
/* This reverses the evaluation order. This is a desired effect. */
for (cmp = NULL_TREE; arg; arg = TREE_CHAIN (arg))