/* Write out a Java(TM) class file.
- Copyright (C) 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
+ Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003
+ Free Software Foundation, Inc.
-This file is part of GNU CC.
+This file is part of GCC.
-GNU CC is free software; you can redistribute it and/or modify
+GCC is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2, or (at your option)
any later version.
-GNU CC is distributed in the hope that it will be useful,
+GCC is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with GNU CC; see the file COPYING. If not, write to
+along with GCC; see the file COPYING. If not, write to
the Free Software Foundation, 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA.
#include "config.h"
#include "system.h"
+#include "coretypes.h"
+#include "tm.h"
#include "jcf.h"
#include "tree.h"
+#include "real.h"
#include "java-tree.h"
#include "obstack.h"
-#undef AND
#include "rtl.h"
#include "flags.h"
#include "java-opcodes.h"
#include "buffer.h"
#include "toplev.h"
#include "ggc.h"
-
-#ifndef DIR_SEPARATOR
-#define DIR_SEPARATOR '/'
-#endif
+#include "tm_p.h"
extern struct obstack temporary_obstack;
/* Base directory in which `.class' files should be written.
NULL means to put the file into the same directory as the
corresponding .java file. */
-char *jcf_write_base_directory = NULL;
+const char *jcf_write_base_directory = NULL;
/* Make sure bytecode.data is big enough for at least N more bytes. */
/* Add a 1-byte instruction/operand I to bytecode.data,
assuming space has already been RESERVE'd. */
-#define OP1(I) (state->last_bc = *state->bytecode.ptr++ = (I), CHECK_OP(state))
+#define OP1(I) (*state->bytecode.ptr++ = (I), CHECK_OP(state))
/* Like OP1, but I is a 2-byte big endian integer. */
to the beginning of the block.
If (pc < 0), the jcf_block is not an actual block (i.e. it has no
- assocated code yet), but it is an undefined label.
+ associated code yet), but it is an undefined label.
*/
struct jcf_block
{
/* For blocks that that are defined, the next block (in pc order).
For blocks that are not-yet-defined the end label of a LABELED_BLOCK_EXPR
- or a cleanup expression (from a WITH_CLEANUP_EXPR),
+ or a cleanup expression (from a TRY_FINALLY_EXPR),
this is the next (outer) such end label, in a stack headed by
labeled_blocks in jcf_partial. */
struct jcf_block *next;
If the label has been defined:
Until perform_relocations is finished, this is the maximum possible
- value of the bytecode offset at the begnning of this block.
+ value of the bytecode offset at the beginning of this block.
After perform_relocations, it is the actual offset (pc). */
int pc;
/* Information about the current switch statement. */
struct jcf_switch_state *sw_state;
- enum java_opcode last_bc; /* The last emitted bytecode */
+ /* The count of jsr instructions that have been emmitted. */
+ long num_jsrs;
};
-static void generate_bytecode_insns PARAMS ((tree, int, struct jcf_partial *));
-static struct chunk * alloc_chunk PARAMS ((struct chunk *, unsigned char *,
- int, struct obstack *));
-static unsigned char * append_chunk PARAMS ((unsigned char *, int,
- struct jcf_partial *));
-static void append_chunk_copy PARAMS ((unsigned char *, int,
- struct jcf_partial *));
-static struct jcf_block * gen_jcf_label PARAMS ((struct jcf_partial *));
-static void finish_jcf_block PARAMS ((struct jcf_partial *));
-static void define_jcf_label PARAMS ((struct jcf_block *,
- struct jcf_partial *));
-static struct jcf_block * get_jcf_label_here PARAMS ((struct jcf_partial *));
-static void put_linenumber PARAMS ((int, struct jcf_partial *));
-static void localvar_alloc PARAMS ((tree, struct jcf_partial *));
-static void localvar_free PARAMS ((tree, struct jcf_partial *));
-static int get_access_flags PARAMS ((tree));
-static void write_chunks PARAMS ((FILE *, struct chunk *));
-static int adjust_typed_op PARAMS ((tree, int));
-static void generate_bytecode_conditional PARAMS ((tree, struct jcf_block *,
- struct jcf_block *, int,
- struct jcf_partial *));
-static void generate_bytecode_return PARAMS ((tree, struct jcf_partial *));
-static void perform_relocations PARAMS ((struct jcf_partial *));
-static void init_jcf_state PARAMS ((struct jcf_partial *, struct obstack *));
-static void init_jcf_method PARAMS ((struct jcf_partial *, tree));
-static void release_jcf_state PARAMS ((struct jcf_partial *));
-static struct chunk * generate_classfile PARAMS ((tree, struct jcf_partial *));
-static struct jcf_handler *alloc_handler PARAMS ((struct jcf_block *,
- struct jcf_block *,
- struct jcf_partial *));
-static void emit_iinc PARAMS ((tree, HOST_WIDE_INT, struct jcf_partial *));
-static void emit_reloc PARAMS ((HOST_WIDE_INT, int, struct jcf_block *,
- struct jcf_partial *));
-static void push_constant1 PARAMS ((HOST_WIDE_INT, struct jcf_partial *));
-static void push_constant2 PARAMS ((HOST_WIDE_INT, struct jcf_partial *));
-static void push_int_const PARAMS ((HOST_WIDE_INT, struct jcf_partial *));
-static int find_constant_wide PARAMS ((HOST_WIDE_INT, HOST_WIDE_INT,
- struct jcf_partial *));
-static void push_long_const PARAMS ((HOST_WIDE_INT, HOST_WIDE_INT,
- struct jcf_partial *));
-static int find_constant_index PARAMS ((tree, struct jcf_partial *));
-static void push_long_const PARAMS ((HOST_WIDE_INT, HOST_WIDE_INT,
- struct jcf_partial *));
-static void field_op PARAMS ((tree, int, struct jcf_partial *));
-static void maybe_wide PARAMS ((int, int, struct jcf_partial *));
-static void emit_dup PARAMS ((int, int, struct jcf_partial *));
-static void emit_pop PARAMS ((int, struct jcf_partial *));
-static void emit_load_or_store PARAMS ((tree, int, struct jcf_partial *));
-static void emit_load PARAMS ((tree, struct jcf_partial *));
-static void emit_store PARAMS ((tree, struct jcf_partial *));
-static void emit_unop PARAMS ((enum java_opcode, tree, struct jcf_partial *));
-static void emit_binop PARAMS ((enum java_opcode, tree, struct jcf_partial *));
-static void emit_reloc PARAMS ((HOST_WIDE_INT, int, struct jcf_block *,
- struct jcf_partial *));
-static void emit_switch_reloc PARAMS ((struct jcf_block *,
- struct jcf_partial *));
-static void emit_case_reloc PARAMS ((struct jcf_relocation *,
- struct jcf_partial *));
-static void emit_if PARAMS ((struct jcf_block *, int, int,
- struct jcf_partial *));
-static void emit_goto PARAMS ((struct jcf_block *, struct jcf_partial *));
-static void emit_jsr PARAMS ((struct jcf_block *, struct jcf_partial *));
-static void call_cleanups PARAMS ((struct jcf_block *, struct jcf_partial *));
-static char *make_class_file_name PARAMS ((tree));
-static unsigned char *append_synthetic_attribute PARAMS ((struct jcf_partial *));
-static void append_innerclasses_attribute PARAMS ((struct jcf_partial *, tree));
-static void append_innerclasses_attribute_entry PARAMS ((struct jcf_partial *, tree, tree));
-static void append_gcj_attribute PARAMS ((struct jcf_partial *, tree));
+static void generate_bytecode_insns (tree, int, struct jcf_partial *);
+static struct chunk * alloc_chunk (struct chunk *, unsigned char *,
+ int, struct obstack *);
+static unsigned char * append_chunk (unsigned char *, int,
+ struct jcf_partial *);
+static void append_chunk_copy (unsigned char *, int, struct jcf_partial *);
+static struct jcf_block * gen_jcf_label (struct jcf_partial *);
+static void finish_jcf_block (struct jcf_partial *);
+static void define_jcf_label (struct jcf_block *, struct jcf_partial *);
+static struct jcf_block * get_jcf_label_here (struct jcf_partial *);
+static void put_linenumber (int, struct jcf_partial *);
+static void localvar_alloc (tree, struct jcf_partial *);
+static void maybe_free_localvar (tree, struct jcf_partial *, int);
+static int get_access_flags (tree);
+static void write_chunks (FILE *, struct chunk *);
+static int adjust_typed_op (tree, int);
+static void generate_bytecode_conditional (tree, struct jcf_block *,
+ struct jcf_block *, int,
+ struct jcf_partial *);
+static void generate_bytecode_return (tree, struct jcf_partial *);
+static void perform_relocations (struct jcf_partial *);
+static void init_jcf_state (struct jcf_partial *, struct obstack *);
+static void init_jcf_method (struct jcf_partial *, tree);
+static void release_jcf_state (struct jcf_partial *);
+static struct chunk * generate_classfile (tree, struct jcf_partial *);
+static struct jcf_handler *alloc_handler (struct jcf_block *,
+ struct jcf_block *,
+ struct jcf_partial *);
+static void emit_iinc (tree, HOST_WIDE_INT, struct jcf_partial *);
+static void emit_reloc (HOST_WIDE_INT, int, struct jcf_block *,
+ struct jcf_partial *);
+static void push_constant1 (HOST_WIDE_INT, struct jcf_partial *);
+static void push_constant2 (HOST_WIDE_INT, struct jcf_partial *);
+static void push_int_const (HOST_WIDE_INT, struct jcf_partial *);
+static int find_constant_wide (HOST_WIDE_INT, HOST_WIDE_INT,
+ struct jcf_partial *);
+static void push_long_const (HOST_WIDE_INT, HOST_WIDE_INT,
+ struct jcf_partial *);
+static int find_constant_index (tree, struct jcf_partial *);
+static void push_long_const (HOST_WIDE_INT, HOST_WIDE_INT,
+ struct jcf_partial *);
+static void field_op (tree, int, struct jcf_partial *);
+static void maybe_wide (int, int, struct jcf_partial *);
+static void emit_dup (int, int, struct jcf_partial *);
+static void emit_pop (int, struct jcf_partial *);
+static void emit_load_or_store (tree, int, struct jcf_partial *);
+static void emit_load (tree, struct jcf_partial *);
+static void emit_store (tree, struct jcf_partial *);
+static void emit_unop (enum java_opcode, tree, struct jcf_partial *);
+static void emit_binop (enum java_opcode, tree, struct jcf_partial *);
+static void emit_reloc (HOST_WIDE_INT, int, struct jcf_block *,
+ struct jcf_partial *);
+static void emit_switch_reloc (struct jcf_block *, struct jcf_partial *);
+static void emit_case_reloc (struct jcf_relocation *, struct jcf_partial *);
+static void emit_if (struct jcf_block *, int, int, struct jcf_partial *);
+static void emit_goto (struct jcf_block *, struct jcf_partial *);
+static void emit_jsr (struct jcf_block *, struct jcf_partial *);
+static void call_cleanups (struct jcf_block *, struct jcf_partial *);
+static char *make_class_file_name (tree);
+static unsigned char *append_synthetic_attribute (struct jcf_partial *);
+static void append_deprecated_attribute (struct jcf_partial *);
+static void append_innerclasses_attribute (struct jcf_partial *, tree);
+static void append_innerclasses_attribute_entry (struct jcf_partial *, tree, tree);
+static void append_gcj_attribute (struct jcf_partial *, tree);
/* Utility macros for appending (big-endian) data to a buffer.
We assume a local variable 'ptr' points into where we want to
write next, and we assume enough space has been allocated. */
#ifdef ENABLE_JC1_CHECKING
-static int CHECK_PUT PARAMS ((void *, struct jcf_partial *, int));
+static int CHECK_PUT (void *, struct jcf_partial *, int);
static int
-CHECK_PUT (ptr, state, i)
- void *ptr;
- struct jcf_partial *state;
- int i;
+CHECK_PUT (void *ptr, struct jcf_partial *state, int i)
{
if ((unsigned char *) ptr < state->chunk->data
|| (unsigned char *) ptr + i > state->chunk->data + state->chunk->size)
However, if DATA is NULL and SIZE>0, allocate a buffer as well. */
static struct chunk *
-alloc_chunk (last, data, size, work)
- struct chunk *last;
- unsigned char *data;
- int size;
- struct obstack *work;
+alloc_chunk (struct chunk *last, unsigned char *data,
+ int size, struct obstack *work)
{
- struct chunk *chunk = (struct chunk *)
- obstack_alloc (work, sizeof(struct chunk));
+ struct chunk *chunk = obstack_alloc (work, sizeof(struct chunk));
if (data == NULL && size > 0)
data = obstack_alloc (work, size);
}
#ifdef ENABLE_JC1_CHECKING
-static int CHECK_OP PARAMS ((struct jcf_partial *));
+static int CHECK_OP (struct jcf_partial *);
static int
-CHECK_OP (state)
- struct jcf_partial *state;
+CHECK_OP (struct jcf_partial *state)
{
if (state->bytecode.ptr > state->bytecode.limit)
abort ();
#endif
static unsigned char *
-append_chunk (data, size, state)
- unsigned char *data;
- int size;
- struct jcf_partial *state;
+append_chunk (unsigned char *data, int size, struct jcf_partial *state)
{
state->chunk = alloc_chunk (state->chunk, data, size, state->chunk_obstack);
if (state->first == NULL)
}
static void
-append_chunk_copy (data, size, state)
- unsigned char *data;
- int size;
- struct jcf_partial *state;
+append_chunk_copy (unsigned char *data, int size, struct jcf_partial *state)
{
unsigned char *ptr = append_chunk (NULL, size, state);
memcpy (ptr, data, size);
}
\f
static struct jcf_block *
-gen_jcf_label (state)
- struct jcf_partial *state;
+gen_jcf_label (struct jcf_partial *state)
{
- struct jcf_block *block = (struct jcf_block *)
- obstack_alloc (state->chunk_obstack, sizeof (struct jcf_block));
+ struct jcf_block *block
+ = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_block));
block->next = NULL;
block->linenumber = -1;
block->pc = UNDEFINED_PC;
}
static void
-finish_jcf_block (state)
- struct jcf_partial *state;
+finish_jcf_block (struct jcf_partial *state)
{
struct jcf_block *block = state->last_block;
struct jcf_relocation *reloc;
}
static void
-define_jcf_label (label, state)
- struct jcf_block *label;
- struct jcf_partial *state;
+define_jcf_label (struct jcf_block *label, struct jcf_partial *state)
{
if (state->last_block != NULL)
finish_jcf_block (state);
}
static struct jcf_block *
-get_jcf_label_here (state)
- struct jcf_partial *state;
+get_jcf_label_here (struct jcf_partial *state)
{
if (state->last_block != NULL && BUFFER_LENGTH (&state->bytecode) == 0)
return state->last_block;
/* Note a line number entry for the current PC and given LINE. */
static void
-put_linenumber (line, state)
- int line;
- struct jcf_partial *state;
+put_linenumber (int line, struct jcf_partial *state)
{
struct jcf_block *label = get_jcf_label_here (state);
if (label->linenumber > 0)
in the range (START_LABEL, END_LABEL). */
static struct jcf_handler *
-alloc_handler (start_label, end_label, state)
- struct jcf_block *start_label;
- struct jcf_block *end_label;
- struct jcf_partial *state;
+alloc_handler (struct jcf_block *start_label, struct jcf_block *end_label,
+ struct jcf_partial *state)
{
- struct jcf_handler *handler = (struct jcf_handler *)
- obstack_alloc (state->chunk_obstack, sizeof (struct jcf_handler));
+ struct jcf_handler *handler
+ = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_handler));
handler->start_label = start_label;
handler->end_label = end_label;
handler->handler_label = get_jcf_label_here (state);
/* The index of jvm local variable allocated for this DECL.
This is assigned when generating .class files;
contrast DECL_LOCAL_SLOT_NUMBER which is set when *reading* a .class file.
- (We don't allocate DECL_LANG_SPECIFIC for locals from Java sourc code.) */
+ (We don't allocate DECL_LANG_SPECIFIC for locals from Java source code.) */
#define DECL_LOCAL_INDEX(DECL) DECL_ALIGN(DECL)
((struct localvar_info**) state->localvars.ptr - localvar_buffer)
static void
-localvar_alloc (decl, state)
- tree decl;
- struct jcf_partial *state;
+localvar_alloc (tree decl, struct jcf_partial *state)
{
struct jcf_block *start_label = get_jcf_label_here (state);
int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
ptr = (struct localvar_info**) state->localvars.data + index;
state->localvars.ptr = (unsigned char *) (ptr + 1 + wide);
}
- info = (struct localvar_info *)
- obstack_alloc (state->chunk_obstack, sizeof (struct localvar_info));
+ info = obstack_alloc (state->chunk_obstack, sizeof (struct localvar_info));
ptr[0] = info;
if (wide)
ptr[1] = (struct localvar_info *)(~0);
}
static void
-localvar_free (decl, state)
- tree decl;
- struct jcf_partial *state;
+maybe_free_localvar (tree decl, struct jcf_partial *state, int really)
{
struct jcf_block *end_label = get_jcf_label_here (state);
int index = DECL_LOCAL_INDEX (decl);
if (info->decl != decl)
abort ();
+ if (! really)
+ return;
ptr[0] = NULL;
if (wide)
{
a field (FIELD_DECL or VAR_DECL, if static), as encoded in a .class file. */
static int
-get_access_flags (decl)
- tree decl;
+get_access_flags (tree decl)
{
int flags = 0;
int isfield = TREE_CODE (decl) == FIELD_DECL || TREE_CODE (decl) == VAR_DECL;
flags |= ACC_INTERFACE;
if (CLASS_STATIC (decl))
flags |= ACC_STATIC;
+ if (CLASS_PRIVATE (decl))
+ flags |= ACC_PRIVATE;
+ if (CLASS_PROTECTED (decl))
+ flags |= ACC_PROTECTED;
if (ANONYMOUS_CLASS_P (TREE_TYPE (decl))
|| LOCAL_CLASS_P (TREE_TYPE (decl)))
flags |= ACC_PRIVATE;
+ if (CLASS_STRICTFP (decl))
+ flags |= ACC_STRICT;
}
else
abort ();
flags |= ACC_SYNCHRONIZED;
if (METHOD_ABSTRACT (decl))
flags |= ACC_ABSTRACT;
+ if (METHOD_STRICTFP (decl))
+ flags |= ACC_STRICT;
}
if (isfield)
{
/* Write the list of segments starting at CHUNKS to STREAM. */
static void
-write_chunks (stream, chunks)
- FILE* stream;
- struct chunk *chunks;
+write_chunks (FILE* stream, struct chunk *chunks)
{
for (; chunks != NULL; chunks = chunks->next)
fwrite (chunks->data, chunks->size, 1, stream);
(Caller is responsible for doing NOTE_PUSH.) */
static void
-push_constant1 (index, state)
- HOST_WIDE_INT index;
- struct jcf_partial *state;
+push_constant1 (HOST_WIDE_INT index, struct jcf_partial *state)
{
RESERVE (3);
if (index < 256)
(Caller is responsible for doing NOTE_PUSH.) */
static void
-push_constant2 (index, state)
- HOST_WIDE_INT index;
- struct jcf_partial *state;
+push_constant2 (HOST_WIDE_INT index, struct jcf_partial *state)
{
RESERVE (3);
OP1 (OPCODE_ldc2_w);
Caller is responsible for doing NOTE_PUSH. */
static void
-push_int_const (i, state)
- HOST_WIDE_INT i;
- struct jcf_partial *state;
+push_int_const (HOST_WIDE_INT i, struct jcf_partial *state)
{
RESERVE(3);
if (i >= -1 && i <= 5)
}
static int
-find_constant_wide (lo, hi, state)
- HOST_WIDE_INT lo, hi;
- struct jcf_partial *state;
+find_constant_wide (HOST_WIDE_INT lo, HOST_WIDE_INT hi,
+ struct jcf_partial *state)
{
HOST_WIDE_INT w1, w2;
lshift_double (lo, hi, -32, 64, &w1, &w2, 1);
Return the index in the constant pool. */
static int
-find_constant_index (value, state)
- tree value;
- struct jcf_partial *state;
+find_constant_index (tree value, struct jcf_partial *state)
{
if (TREE_CODE (value) == INTEGER_CST)
{
else if (TREE_CODE (value) == REAL_CST)
{
long words[2];
+
+ real_to_target (words, &TREE_REAL_CST (value),
+ TYPE_MODE (TREE_TYPE (value)));
+ words[0] &= 0xffffffff;
+ words[1] &= 0xffffffff;
+
if (TYPE_PRECISION (TREE_TYPE (value)) == 32)
- {
- words[0] = etarsingle (TREE_REAL_CST (value)) & 0xFFFFFFFF;
- return find_constant1 (&state->cpool, CONSTANT_Float,
- (jword)words[0]);
- }
+ return find_constant1 (&state->cpool, CONSTANT_Float, (jword)words[0]);
else
- {
- etardouble (TREE_REAL_CST (value), words);
- return find_constant2 (&state->cpool, CONSTANT_Double,
- (jword)(words[1-FLOAT_WORDS_BIG_ENDIAN] &
- 0xFFFFFFFF),
- (jword)(words[FLOAT_WORDS_BIG_ENDIAN] &
- 0xFFFFFFFF));
- }
+ return find_constant2 (&state->cpool, CONSTANT_Double,
+ (jword)words[1-FLOAT_WORDS_BIG_ENDIAN],
+ (jword)words[FLOAT_WORDS_BIG_ENDIAN]);
}
else if (TREE_CODE (value) == STRING_CST)
return find_string_constant (&state->cpool, value);
Caller is responsible for doing NOTE_PUSH. */
static void
-push_long_const (lo, hi, state)
- HOST_WIDE_INT lo, hi;
- struct jcf_partial *state;
+push_long_const (HOST_WIDE_INT lo, HOST_WIDE_INT hi, struct jcf_partial *state)
{
- if (hi == 0 && lo >= 0 && lo <= 1)
+ HOST_WIDE_INT highpart, dummy;
+ jint lowpart = WORD_TO_INT (lo);
+
+ rshift_double (lo, hi, 32, 64, &highpart, &dummy, 1);
+
+ if (highpart == 0 && (lowpart == 0 || lowpart == 1))
{
RESERVE(1);
- OP1(OPCODE_lconst_0 + lo);
+ OP1(OPCODE_lconst_0 + lowpart);
}
- else if ((hi == 0 && (jword)(lo & 0xFFFFFFFF) < 32768)
- || (hi == -1 && (lo & 0xFFFFFFFF) >= (jword)-32768))
+ else if ((highpart == 0 && lowpart > 0 && lowpart < 32768)
+ || (highpart == -1 && lowpart < 0 && lowpart >= -32768))
{
- push_int_const (lo, state);
+ push_int_const (lowpart, state);
RESERVE (1);
OP1 (OPCODE_i2l);
}
}
static void
-field_op (field, opcode, state)
- tree field;
- int opcode;
- struct jcf_partial *state;
+field_op (tree field, int opcode, struct jcf_partial *state)
{
int index = find_fieldref_index (&state->cpool, field);
RESERVE (3);
opcodes typically depend on the operand type. */
static int
-adjust_typed_op (type, max)
- tree type;
- int max;
+adjust_typed_op (tree type, int max)
{
switch (TREE_CODE (type))
{
}
static void
-maybe_wide (opcode, index, state)
- int opcode, index;
- struct jcf_partial *state;
+maybe_wide (int opcode, int index, struct jcf_partial *state)
{
if (index >= 256)
{
(The new words get inserted at stack[SP-size-offset].) */
static void
-emit_dup (size, offset, state)
- int size, offset;
- struct jcf_partial *state;
+emit_dup (int size, int offset, struct jcf_partial *state)
{
int kind;
if (size == 0)
}
static void
-emit_pop (size, state)
- int size;
- struct jcf_partial *state;
+emit_pop (int size, struct jcf_partial *state)
{
RESERVE (1);
OP1 (OPCODE_pop - 1 + size);
}
static void
-emit_iinc (var, value, state)
- tree var;
- HOST_WIDE_INT value;
- struct jcf_partial *state;
+emit_iinc (tree var, HOST_WIDE_INT value, struct jcf_partial *state)
{
int slot = DECL_LOCAL_INDEX (var);
}
static void
-emit_load_or_store (var, opcode, state)
- tree var; /* Variable to load from or store into. */
- int opcode; /* Either OPCODE_iload or OPCODE_istore. */
- struct jcf_partial *state;
+emit_load_or_store (tree var, /* Variable to load from or store into. */
+ int opcode, /* Either OPCODE_iload or OPCODE_istore. */
+ struct jcf_partial *state)
{
tree type = TREE_TYPE (var);
int kind = adjust_typed_op (type, 4);
}
static void
-emit_load (var, state)
- tree var;
- struct jcf_partial *state;
+emit_load (tree var, struct jcf_partial *state)
{
emit_load_or_store (var, OPCODE_iload, state);
NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
}
static void
-emit_store (var, state)
- tree var;
- struct jcf_partial *state;
+emit_store (tree var, struct jcf_partial *state)
{
emit_load_or_store (var, OPCODE_istore, state);
NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
}
static void
-emit_unop (opcode, type, state)
- enum java_opcode opcode;
- tree type ATTRIBUTE_UNUSED;
- struct jcf_partial *state;
+emit_unop (enum java_opcode opcode, tree type ATTRIBUTE_UNUSED,
+ struct jcf_partial *state)
{
RESERVE(1);
OP1 (opcode);
}
static void
-emit_binop (opcode, type, state)
- enum java_opcode opcode;
- tree type;
- struct jcf_partial *state;
+emit_binop (enum java_opcode opcode, tree type, struct jcf_partial *state)
{
int size = TYPE_IS_WIDE (type) ? 2 : 1;
RESERVE(1);
}
static void
-emit_reloc (value, kind, target, state)
- HOST_WIDE_INT value;
- int kind;
- struct jcf_block *target;
- struct jcf_partial *state;
+emit_reloc (HOST_WIDE_INT value, int kind,
+ struct jcf_block *target, struct jcf_partial *state)
{
- struct jcf_relocation *reloc = (struct jcf_relocation *)
- obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
+ struct jcf_relocation *reloc
+ = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
struct jcf_block *block = state->last_block;
reloc->next = block->u.relocations;
block->u.relocations = reloc;
}
static void
-emit_switch_reloc (label, state)
- struct jcf_block *label;
- struct jcf_partial *state;
+emit_switch_reloc (struct jcf_block *label, struct jcf_partial *state)
{
emit_reloc (RELOCATION_VALUE_0, BLOCK_START_RELOC, label, state);
}
but re-uses an existing case reloc. */
static void
-emit_case_reloc (reloc, state)
- struct jcf_relocation *reloc;
- struct jcf_partial *state;
+emit_case_reloc (struct jcf_relocation *reloc, struct jcf_partial *state)
{
struct jcf_block *block = state->last_block;
reloc->next = block->u.relocations;
The opcode is OPCODE, the inverted opcode is INV_OPCODE. */
static void
-emit_if (target, opcode, inv_opcode, state)
- struct jcf_block *target;
- int opcode, inv_opcode;
- struct jcf_partial *state;
+emit_if (struct jcf_block *target, int opcode, int inv_opcode,
+ struct jcf_partial *state)
{
RESERVE(3);
OP1 (opcode);
}
static void
-emit_goto (target, state)
- struct jcf_block *target;
- struct jcf_partial *state;
+emit_goto (struct jcf_block *target, struct jcf_partial *state)
{
RESERVE(3);
OP1 (OPCODE_goto);
}
static void
-emit_jsr (target, state)
- struct jcf_block *target;
- struct jcf_partial *state;
+emit_jsr (struct jcf_block *target, struct jcf_partial *state)
{
RESERVE(3);
OP1 (OPCODE_jsr);
/* Value is 1 byte from reloc back to start of instruction. */
emit_reloc (RELOCATION_VALUE_1, OPCODE_jsr_w, target, state);
+ state->num_jsrs++;
}
/* Generate code to evaluate EXP. If the result is true,
branch to TRUE_LABEL; otherwise, branch to FALSE_LABEL.
- TRUE_BRANCH_FIRST is a code geneation hint that the
+ TRUE_BRANCH_FIRST is a code generation hint that the
TRUE_LABEL may follow right after this. (The idea is that we
may be able to optimize away GOTO TRUE_LABEL; TRUE_LABEL:) */
static void
-generate_bytecode_conditional (exp, true_label, false_label,
- true_branch_first, state)
- tree exp;
- struct jcf_block *true_label;
- struct jcf_block *false_label;
- int true_branch_first;
- struct jcf_partial *state;
+generate_bytecode_conditional (tree exp,
+ struct jcf_block *true_label,
+ struct jcf_block *false_label,
+ int true_branch_first,
+ struct jcf_partial *state)
{
tree exp0, exp1, type;
int save_SP = state->code_SP;
OP1 (OPCODE_lcmp);
goto compare_1;
}
- /* FALLTHOUGH */
+ /* FALLTHROUGH */
default:
if (integer_zerop (exp1))
{
abort ();
}
-/* Call pending cleanups i.e. those for surrounding CLEANUP_POINT_EXPRs
+/* Call pending cleanups i.e. those for surrounding TRY_FINALLY_EXPRs.
but only as far out as LIMIT (since we are about to jump to the
emit label that is LIMIT). */
static void
-call_cleanups (limit, state)
- struct jcf_block *limit;
- struct jcf_partial *state;
+call_cleanups (struct jcf_block *limit, struct jcf_partial *state)
{
struct jcf_block *block = state->labeled_blocks;
for (; block != limit; block = block->next)
}
static void
-generate_bytecode_return (exp, state)
- tree exp;
- struct jcf_partial *state;
+generate_bytecode_return (tree exp, struct jcf_partial *state)
{
tree return_type = TREE_TYPE (TREE_TYPE (state->current_method));
int returns_void = TREE_CODE (return_type) == VOID_TYPE;
if (returns_void)
{
op = OPCODE_return;
- call_cleanups (NULL_PTR, state);
+ call_cleanups (NULL, state);
}
else
{
localvar_alloc (state->return_value_decl, state);
}
emit_store (state->return_value_decl, state);
- call_cleanups (NULL_PTR, state);
+ call_cleanups (NULL, state);
emit_load (state->return_value_decl, state);
- /* If we call localvar_free (state->return_value_decl, state),
+ /* If we call maybe_free_localvar (state->return_value_decl, state, 1),
then we risk the save decl erroneously re-used in the
finalizer. Instead, we keep the state->return_value_decl
allocated through the rest of the method. This is not
TARGET is one of STACK_TARGET or IGNORE_TARGET. */
static void
-generate_bytecode_insns (exp, target, state)
- tree exp;
- int target;
- struct jcf_partial *state;
+generate_bytecode_insns (tree exp, int target, struct jcf_partial *state)
{
- tree type;
+ tree type, arg;
enum java_opcode jopcode;
int op;
HOST_WIDE_INT value;
{
tree local;
tree body = BLOCK_EXPR_BODY (exp);
+ long jsrs = state->num_jsrs;
for (local = BLOCK_EXPR_DECLS (exp); local; )
{
tree next = TREE_CHAIN (local);
body = TREE_OPERAND (body, 1);
}
generate_bytecode_insns (body, target, state);
+
for (local = BLOCK_EXPR_DECLS (exp); local; )
{
tree next = TREE_CHAIN (local);
- localvar_free (local, state);
+ maybe_free_localvar (local, state, state->num_jsrs <= jsrs);
local = next;
}
}
break;
- case COMPOUND_EXPR:
+ case COMPOUND_EXPR:
generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
- generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
+ /* Normally the first operand to a COMPOUND_EXPR must complete
+ normally. However, in the special case of a do-while
+ statement this is not necessarily the case. */
+ if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 0)))
+ generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
break;
case EXPR_WITH_FILE_LOCATION:
{
const char *saved_input_filename = input_filename;
tree body = EXPR_WFL_NODE (exp);
- int saved_lineno = lineno;
+ int saved_lineno = input_line;
if (body == empty_stmt_node)
break;
input_filename = EXPR_WFL_FILENAME (exp);
- lineno = EXPR_WFL_LINENO (exp);
- if (EXPR_WFL_EMIT_LINE_NOTE (exp) && lineno > 0
+ input_line = EXPR_WFL_LINENO (exp);
+ if (EXPR_WFL_EMIT_LINE_NOTE (exp) && input_line > 0
&& debug_info_level > DINFO_LEVEL_NONE)
- put_linenumber (lineno, state);
+ put_linenumber (input_line, state);
generate_bytecode_insns (body, target, state);
input_filename = saved_input_filename;
- lineno = saved_lineno;
+ input_line = saved_lineno;
}
break;
case INTEGER_CST:
{
int prec = TYPE_PRECISION (type) >> 5;
RESERVE(1);
- if (real_zerop (exp))
+ if (real_zerop (exp) && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (exp)))
OP1 (prec == 1 ? OPCODE_fconst_0 : OPCODE_dconst_0);
else if (real_onep (exp))
OP1 (prec == 1 ? OPCODE_fconst_1 : OPCODE_dconst_1);
- /* FIXME Should also use fconst_2 for 2.0f.
- Also, should use iconst_2/ldc followed by i2f/i2d
+ else if (prec == 1 && real_twop (exp))
+ OP1 (OPCODE_fconst_2);
+ /* ??? We could also use iconst_3/ldc followed by i2f/i2d
for other float/double when the value is a small integer. */
else
{
case CASE_EXPR:
{
struct jcf_switch_state *sw_state = state->sw_state;
- struct jcf_relocation *reloc = (struct jcf_relocation *)
- obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
+ struct jcf_relocation *reloc
+ = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
HOST_WIDE_INT case_value = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
reloc->kind = 0;
reloc->label = get_jcf_label_here (state);
1. the switch_expression (the value used to select the correct case);
2. the switch_body;
3. the switch_instruction (the tableswitch/loopupswitch instruction.).
- After code generation, we will re-order then in the order 1, 3, 2.
- This is to avoid an extra GOTOs. */
+ After code generation, we will re-order them in the order 1, 3, 2.
+ This is to avoid any extra GOTOs. */
struct jcf_switch_state sw_state;
struct jcf_block *expression_last; /* Last block of the switch_expression. */
struct jcf_block *body_last; /* Last block of the switch_body. */
sw_state.default_label = NULL;
generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
expression_last = state->last_block;
- body_block = get_jcf_label_here (state); /* Force a new block here. */
+ /* Force a new block here. */
+ body_block = gen_jcf_label (state);
+ define_jcf_label (body_block, state);
generate_bytecode_insns (TREE_OPERAND (exp, 1), IGNORE_TARGET, state);
body_last = state->last_block;
else
{
push_int_const (sw_state.cases->offset, state);
+ NOTE_PUSH (1);
emit_if (sw_state.cases->label,
OPCODE_if_icmpeq, OPCODE_if_icmpne, state);
}
else
{
HOST_WIDE_INT i;
+ unsigned HOST_WIDE_INT delta;
/* Copy the chain of relocs into a sorted array. */
- struct jcf_relocation **relocs = (struct jcf_relocation **)
- xmalloc (sw_state.num_cases * sizeof (struct jcf_relocation *));
+ struct jcf_relocation **relocs
+ = xmalloc (sw_state.num_cases * sizeof (struct jcf_relocation *));
/* The relocs arrays is a buffer with a gap.
The assumption is that cases will normally come in "runs". */
int gap_start = 0;
gap_start--;
}
relocs[gap_start++] = reloc;
- /* Note we don't check for duplicates. FIXME! */
+ /* Note we don't check for duplicates. This is
+ handled by the parser. */
}
- if (2 * sw_state.num_cases
- >= sw_state.max_case - sw_state.min_case)
+ /* We could have DELTA < 0 if sw_state.min_case is
+ something like Integer.MIN_VALUE. That is why delta is
+ unsigned. */
+ delta = sw_state.max_case - sw_state.min_case;
+ if (2 * (unsigned) sw_state.num_cases >= delta)
{ /* Use tableswitch. */
int index = 0;
RESERVE (13 + 4 * (sw_state.max_case - sw_state.min_case + 1));
{
struct jcf_block *head_label = get_jcf_label_here (state);
generate_bytecode_insns (body, IGNORE_TARGET, state);
- emit_goto (head_label, state);
+ if (CAN_COMPLETE_NORMALLY (body))
+ emit_goto (head_label, state);
}
}
break;
case POSTINCREMENT_EXPR: value = 1; post_op = 1; goto increment;
increment:
+ arg = TREE_OPERAND (exp, 1);
exp = TREE_OPERAND (exp, 0);
type = TREE_TYPE (exp);
size = TYPE_IS_WIDE (type) ? 2 : 1;
/* Stack, if ARRAY_REF: ..., [result, ] array, index, oldvalue. */
/* Stack, if COMPONENT_REF: ..., [result, ] objectref, oldvalue. */
/* Stack, otherwise: ..., [result, ] oldvalue. */
- if (size == 1)
- push_int_const (value, state);
- else
- push_long_const (value, (HOST_WIDE_INT)(value >= 0 ? 0 : -1), state);
- NOTE_PUSH (size);
- emit_binop (OPCODE_iadd + adjust_typed_op (type, 3), type, state);
+ generate_bytecode_insns (arg, STACK_TARGET, state);
+ emit_binop ((value >= 0 ? OPCODE_iadd : OPCODE_isub)
+ + adjust_typed_op (type, 3),
+ type, state);
if (target != IGNORE_TARGET && ! post_op)
emit_dup (size, offset, state);
/* Stack, if ARRAY_REF: ..., [result, ] array, index, newvalue. */
}
else
offset = 0;
+
+ /* If the rhs is a binary expression and the left operand is
+ `==' to the lhs then we have an OP= expression. In this
+ case we must do some special processing. */
+ if (TREE_CODE_CLASS (TREE_CODE (rhs)) == '2'
+ && lhs == TREE_OPERAND (rhs, 0))
+ {
+ if (TREE_CODE (lhs) == COMPONENT_REF)
+ {
+ tree field = TREE_OPERAND (lhs, 1);
+ if (! FIELD_STATIC (field))
+ {
+ /* Duplicate the object reference so we can get
+ the field. */
+ emit_dup (TYPE_IS_WIDE (field) ? 2 : 1, 0, state);
+ NOTE_POP (1);
+ }
+ field_op (field, (FIELD_STATIC (field)
+ ? OPCODE_getstatic
+ : OPCODE_getfield),
+ state);
+
+ NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
+ }
+ else if (TREE_CODE (lhs) == VAR_DECL
+ || TREE_CODE (lhs) == PARM_DECL)
+ {
+ if (FIELD_STATIC (lhs))
+ {
+ field_op (lhs, OPCODE_getstatic, state);
+ NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs)) ? 2 : 1);
+ }
+ else
+ emit_load (lhs, state);
+ }
+ else if (TREE_CODE (lhs) == ARRAY_REF)
+ {
+ /* Duplicate the array and index, which are on the
+ stack, so that we can load the old value. */
+ emit_dup (2, 0, state);
+ NOTE_POP (2);
+ jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (lhs), 7);
+ RESERVE (1);
+ OP1 (jopcode);
+ NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs)) ? 2 : 1);
+ }
+ else
+ abort ();
+
+ /* This function correctly handles the case where the LHS
+ of a binary expression is NULL_TREE. */
+ rhs = build (TREE_CODE (rhs), TREE_TYPE (rhs),
+ NULL_TREE, TREE_OPERAND (rhs, 1));
+ }
+
generate_bytecode_insns (rhs, STACK_TARGET, state);
if (target != IGNORE_TARGET)
emit_dup (TYPE_IS_WIDE (type) ? 2 : 1 , offset, state);
exp = lhs;
}
- /* FALLTHOUGH */
+ /* FALLTHROUGH */
finish_assignment:
if (TREE_CODE (exp) == COMPONENT_REF)
tree arg0 = TREE_OPERAND (exp, 0);
tree arg1 = TREE_OPERAND (exp, 1);
jopcode += adjust_typed_op (type, 3);
- if (arg0 == arg1 && TREE_CODE (arg0) == SAVE_EXPR)
+ if (arg0 != NULL_TREE && operand_equal_p (arg0, arg1, 0))
{
/* fold may (e.g) convert 2*x to x+x. */
- generate_bytecode_insns (TREE_OPERAND (arg0, 0), target, state);
+ generate_bytecode_insns (arg0, target, state);
emit_dup (TYPE_PRECISION (TREE_TYPE (arg0)) > 32 ? 2 : 1, 0, state);
}
else
{
- generate_bytecode_insns (arg0, target, state);
+ /* ARG0 will be NULL_TREE if we're handling an `OP='
+ expression. In this case the stack already holds the
+ LHS. See the MODIFY_EXPR case. */
+ if (arg0 != NULL_TREE)
+ generate_bytecode_insns (arg0, target, state);
if (jopcode >= OPCODE_lshl && jopcode <= OPCODE_lushr)
arg1 = convert (int_type_node, arg1);
generate_bytecode_insns (arg1, target, state);
tree src = TREE_OPERAND (exp, 0);
tree src_type = TREE_TYPE (src);
tree dst_type = TREE_TYPE (exp);
- /* Detect the situation of compiling an empty synchronized
- block. A nop should be emitted in order to produce
- verifiable bytecode. */
- if (exp == empty_stmt_node
- && state->last_bc == OPCODE_monitorenter
- && state->labeled_blocks
- && state->labeled_blocks->pc == PENDING_CLEANUP_PC)
- OP1 (OPCODE_nop);
- else
- generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
+ generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
if (target == IGNORE_TARGET || src_type == dst_type)
break;
if (TREE_CODE (dst_type) == POINTER_TYPE)
}
break;
- case CLEANUP_POINT_EXPR:
- {
- struct jcf_block *save_labeled_blocks = state->labeled_blocks;
- int can_complete = CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 0));
- generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
- if (target != IGNORE_TARGET)
- abort ();
- while (state->labeled_blocks != save_labeled_blocks)
- {
- struct jcf_block *finished_label = NULL;
- tree return_link;
- tree exception_type = build_pointer_type (throwable_type_node);
- tree exception_decl = build_decl (VAR_DECL, NULL_TREE,
- exception_type);
- struct jcf_block *end_label = get_jcf_label_here (state);
- struct jcf_block *label = state->labeled_blocks;
- struct jcf_handler *handler;
- tree cleanup = label->u.labeled_block;
- state->labeled_blocks = label->next;
- state->num_finalizers--;
- if (can_complete)
- {
- finished_label = gen_jcf_label (state);
- emit_jsr (label, state);
- emit_goto (finished_label, state);
- if (! CAN_COMPLETE_NORMALLY (cleanup))
- can_complete = 0;
- }
- handler = alloc_handler (label->v.start_label, end_label, state);
- handler->type = NULL_TREE;
- localvar_alloc (exception_decl, state);
- NOTE_PUSH (1);
- emit_store (exception_decl, state);
- emit_jsr (label, state);
- emit_load (exception_decl, state);
- RESERVE (1);
- OP1 (OPCODE_athrow);
- NOTE_POP (1);
-
- /* The finally block. */
- return_link = build_decl (VAR_DECL, NULL_TREE,
- return_address_type_node);
- define_jcf_label (label, state);
- NOTE_PUSH (1);
- localvar_alloc (return_link, state);
- emit_store (return_link, state);
- generate_bytecode_insns (cleanup, IGNORE_TARGET, state);
- maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
- localvar_free (return_link, state);
- localvar_free (exception_decl, state);
- if (finished_label != NULL)
- define_jcf_label (finished_label, state);
- }
- }
- break;
-
- case WITH_CLEANUP_EXPR:
- {
- struct jcf_block *label;
- generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
- label = gen_jcf_label (state);
- label->pc = PENDING_CLEANUP_PC;
- label->next = state->labeled_blocks;
- state->labeled_blocks = label;
- state->num_finalizers++;
- label->u.labeled_block = TREE_OPERAND (exp, 2);
- label->v.start_label = get_jcf_label_here (state);
- if (target != IGNORE_TARGET)
- abort ();
- }
- break;
-
case TRY_EXPR:
{
tree try_clause = TREE_OPERAND (exp, 0);
{
tree catch_clause = TREE_OPERAND (clause, 0);
tree exception_decl = BLOCK_EXPR_DECLS (catch_clause);
- struct jcf_handler *handler = alloc_handler (start_label, end_label, state);
+ struct jcf_handler *handler = alloc_handler (start_label,
+ end_label, state);
if (exception_decl == NULL_TREE)
handler->type = NULL_TREE;
else
define_jcf_label (finished_label, state);
}
break;
+
case TRY_FINALLY_EXPR:
{
- struct jcf_block *finished_label,
- *finally_label, *start_label, *end_label;
+ struct jcf_block *finished_label = NULL;
+ struct jcf_block *finally_label, *start_label, *end_label;
struct jcf_handler *handler;
tree try_block = TREE_OPERAND (exp, 0);
tree finally = TREE_OPERAND (exp, 1);
finally_label = gen_jcf_label (state);
start_label = get_jcf_label_here (state);
- finally_label->pc = PENDING_CLEANUP_PC;
- finally_label->next = state->labeled_blocks;
- state->labeled_blocks = finally_label;
- state->num_finalizers++;
+ /* If the `finally' clause can complete normally, we emit it
+ as a subroutine and let the other clauses call it via
+ `jsr'. If it can't complete normally, then we simply emit
+ `goto's directly to it. */
+ if (CAN_COMPLETE_NORMALLY (finally))
+ {
+ finally_label->pc = PENDING_CLEANUP_PC;
+ finally_label->next = state->labeled_blocks;
+ state->labeled_blocks = finally_label;
+ state->num_finalizers++;
+ }
generate_bytecode_insns (try_block, target, state);
- if (state->labeled_blocks != finally_label)
- abort();
- state->labeled_blocks = finally_label->next;
+
+ if (CAN_COMPLETE_NORMALLY (finally))
+ {
+ if (state->labeled_blocks != finally_label)
+ abort();
+ state->labeled_blocks = finally_label->next;
+ }
end_label = get_jcf_label_here (state);
if (end_label == start_label)
break;
}
- return_link = build_decl (VAR_DECL, NULL_TREE,
- return_address_type_node);
- finished_label = gen_jcf_label (state);
-
+ if (CAN_COMPLETE_NORMALLY (finally))
+ {
+ return_link = build_decl (VAR_DECL, NULL_TREE,
+ return_address_type_node);
+ finished_label = gen_jcf_label (state);
+ }
if (CAN_COMPLETE_NORMALLY (try_block))
{
- emit_jsr (finally_label, state);
- emit_goto (finished_label, state);
+ if (CAN_COMPLETE_NORMALLY (finally))
+ {
+ emit_jsr (finally_label, state);
+ emit_goto (finished_label, state);
+ }
+ else
+ emit_goto (finally_label, state);
}
- /* Handle exceptions. */
+ /* Handle exceptions. */
exception_type = build_pointer_type (throwable_type_node);
- exception_decl = build_decl (VAR_DECL, NULL_TREE, exception_type);
- localvar_alloc (return_link, state);
+ if (CAN_COMPLETE_NORMALLY (finally))
+ {
+ /* We're going to generate a subroutine, so we'll need to
+ save and restore the exception around the `jsr'. */
+ exception_decl = build_decl (VAR_DECL, NULL_TREE, exception_type);
+ localvar_alloc (return_link, state);
+ }
handler = alloc_handler (start_label, end_label, state);
handler->type = NULL_TREE;
- localvar_alloc (exception_decl, state);
- NOTE_PUSH (1);
- emit_store (exception_decl, state);
- emit_jsr (finally_label, state);
- emit_load (exception_decl, state);
- RESERVE (1);
- OP1 (OPCODE_athrow);
- NOTE_POP (1);
-
- /* The finally block. First save return PC into return_link. */
+ if (CAN_COMPLETE_NORMALLY (finally))
+ {
+ localvar_alloc (exception_decl, state);
+ NOTE_PUSH (1);
+ emit_store (exception_decl, state);
+ emit_jsr (finally_label, state);
+ emit_load (exception_decl, state);
+ RESERVE (1);
+ OP1 (OPCODE_athrow);
+ NOTE_POP (1);
+ }
+ else
+ {
+ /* We're not generating a subroutine. In this case we can
+ simply have the exception handler pop the exception and
+ then fall through to the `finally' block. */
+ NOTE_PUSH (1);
+ emit_pop (1, state);
+ NOTE_POP (1);
+ }
+
+ /* The finally block. If we're generating a subroutine, first
+ save return PC into return_link. Otherwise, just generate
+ the code for the `finally' block. */
define_jcf_label (finally_label, state);
- NOTE_PUSH (1);
- emit_store (return_link, state);
+ if (CAN_COMPLETE_NORMALLY (finally))
+ {
+ NOTE_PUSH (1);
+ emit_store (return_link, state);
+ }
generate_bytecode_insns (finally, IGNORE_TARGET, state);
- maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
- localvar_free (exception_decl, state);
- localvar_free (return_link, state);
- define_jcf_label (finished_label, state);
+ if (CAN_COMPLETE_NORMALLY (finally))
+ {
+ maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
+ maybe_free_localvar (exception_decl, state, 1);
+ maybe_free_localvar (return_link, state, 1);
+ define_jcf_label (finished_label, state);
+ }
}
break;
case THROW_EXPR:
}
}
break;
- case EXC_PTR_EXPR:
+ case JAVA_EXC_OBJ_EXPR:
NOTE_PUSH (1); /* Pushed by exception system. */
break;
+ case MIN_EXPR:
+ case MAX_EXPR:
+ {
+ /* This copes with cases where fold() has created MIN or MAX
+ from a conditional expression. */
+ enum tree_code code = TREE_CODE (exp) == MIN_EXPR ? LT_EXPR : GT_EXPR;
+ tree op0 = TREE_OPERAND (exp, 0);
+ tree op1 = TREE_OPERAND (exp, 1);
+ tree x;
+ if (TREE_SIDE_EFFECTS (op0) || TREE_SIDE_EFFECTS (op1))
+ abort ();
+ x = build (COND_EXPR, TREE_TYPE (exp),
+ build (code, boolean_type_node, op0, op1),
+ op0, op1);
+ generate_bytecode_insns (x, target, state);
+ break;
+ }
case NEW_CLASS_EXPR:
{
tree class = TREE_TYPE (TREE_TYPE (exp));
}
static void
-perform_relocations (state)
- struct jcf_partial *state;
+perform_relocations (struct jcf_partial *state)
{
struct jcf_block *block;
struct jcf_relocation *reloc;
shrink += 3;
}
+ /* Optimize GOTO L; ... L: GOTO X by changing the first goto to
+ jump directly to X. We're careful here to avoid an infinite
+ loop if the `goto's themselves form one. We do this
+ optimization because we can generate a goto-to-goto for some
+ try/finally blocks. */
+ while (reloc != NULL
+ && reloc->kind == OPCODE_goto_w
+ && reloc->label != block
+ && reloc->label->v.chunk->data != NULL
+ && reloc->label->v.chunk->data[0] == OPCODE_goto)
+ {
+ /* Find the reloc for the first instruction of the
+ destination block. */
+ struct jcf_relocation *first_reloc;
+ for (first_reloc = reloc->label->u.relocations;
+ first_reloc;
+ first_reloc = first_reloc->next)
+ {
+ if (first_reloc->offset == 1
+ && first_reloc->kind == OPCODE_goto_w)
+ {
+ reloc->label = first_reloc->label;
+ break;
+ }
+ }
+
+ /* If we didn't do anything, exit the loop. */
+ if (first_reloc == NULL)
+ break;
+ }
+
for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
{
if (reloc->kind == SWITCH_ALIGN_RELOC)
unsigned char *old_ptr = old_buffer + old_size;
if (new_size != old_size)
{
- chunk->data = (unsigned char *)
- obstack_alloc (state->chunk_obstack, new_size);
+ chunk->data = obstack_alloc (state->chunk_obstack, new_size);
chunk->size = new_size;
}
new_ptr = chunk->data + new_size;
/* new_ptr and old_ptr point into the old and new buffers,
respectively. (If no relocations cause the buffer to
grow, the buffer will be the same buffer, and new_ptr==old_ptr.)
- The bytes at higher adress have been copied and relocations
+ The bytes at higher address have been copied and relocations
handled; those at lower addresses remain to process. */
/* Lower old index of piece to be copied with no relocation.
}
static void
-init_jcf_state (state, work)
- struct jcf_partial *state;
- struct obstack *work;
+init_jcf_state (struct jcf_partial *state, struct obstack *work)
{
state->chunk_obstack = work;
state->first = state->chunk = NULL;
}
static void
-init_jcf_method (state, method)
- struct jcf_partial *state;
- tree method;
+init_jcf_method (struct jcf_partial *state, tree method)
{
state->current_method = method;
state->blocks = state->last_block = NULL;
}
static void
-release_jcf_state (state)
- struct jcf_partial *state;
+release_jcf_state (struct jcf_partial *state)
{
CPOOL_FINISH (&state->cpool);
obstack_free (state->chunk_obstack, state->first);
in the .class file representation. The list can be written to a
.class file using write_chunks. Allocate chunks from obstack WORK. */
+static GTY(()) tree SourceFile_node;
static struct chunk *
-generate_classfile (clas, state)
- tree clas;
- struct jcf_partial *state;
+generate_classfile (tree clas, struct jcf_partial *state)
{
struct chunk *cpool_chunk;
const char *source_file, *s;
int fields_count = 0;
char *methods_count_ptr;
int methods_count = 0;
- static tree SourceFile_node = NULL_TREE;
tree part;
int total_supers
= clas == object_type_node ? 0
append_chunk (NULL, 0, state);
cpool_chunk = state->chunk;
- /* Next allocate the chunk containing acces_flags through fields_counr. */
+ /* Next allocate the chunk containing acces_flags through fields_count. */
if (clas == object_type_node)
i = 10;
else
build_java_signature (TREE_TYPE (part)));
PUT2(i);
have_value = DECL_INITIAL (part) != NULL_TREE
- && FIELD_STATIC (part) && CONSTANT_VALUE_P (DECL_INITIAL (part));
+ && FIELD_STATIC (part) && CONSTANT_VALUE_P (DECL_INITIAL (part))
+ && FIELD_FINAL (part)
+ && (JPRIMITIVE_TYPE_P (TREE_TYPE (part))
+ || TREE_TYPE (part) == string_ptr_type_node);
if (have_value)
attr_count++;
- if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part))
+ if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part)
+ || FIELD_SYNTHETIC (part))
attr_count++;
+ if (FIELD_DEPRECATED (part))
+ attr_count++;
PUT2 (attr_count); /* attributes_count */
if (have_value)
{
tree init = DECL_INITIAL (part);
static tree ConstantValue_node = NULL_TREE;
+ if (TREE_TYPE (part) != TREE_TYPE (init))
+ fatal_error ("field initializer type mismatch");
ptr = append_chunk (NULL, 8, state);
if (ConstantValue_node == NULL_TREE)
ConstantValue_node = get_identifier ("ConstantValue");
PUT4 (2); /* attribute_length */
i = find_constant_index (init, state); PUT2 (i);
}
- /* Emit the "Synthetic" attribute for val$<x> and this$<n> fields. */
- if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part))
+ /* Emit the "Synthetic" attribute for val$<x> and this$<n>
+ fields and other fields which need it. */
+ if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part)
+ || FIELD_SYNTHETIC (part))
ptr = append_synthetic_attribute (state);
+ if (FIELD_DEPRECATED (part))
+ append_deprecated_attribute (state);
fields_count++;
}
ptr = fields_count_ptr; UNSAFE_PUT2 (fields_count);
tree type = TREE_TYPE (part);
tree save_function = current_function_decl;
int synthetic_p = 0;
+
+ /* Invisible Miranda methods shouldn't end up in the .class
+ file. */
+ if (METHOD_INVISIBLE (part))
+ continue;
+
current_function_decl = part;
ptr = append_chunk (NULL, 8, state);
i = get_access_flags (part); PUT2 (i);
i = (body != NULL_TREE) + (DECL_FUNCTION_THROWS (part) != NULL_TREE);
/* Make room for the Synthetic attribute (of zero length.) */
- if (DECL_FINIT_P (part)
+ if (DECL_FINIT_P (part)
+ || DECL_INSTINIT_P (part)
|| OUTER_FIELD_ACCESS_IDENTIFIER_P (DECL_NAME (part))
|| TYPE_DOT_CLASS (clas) == part)
{
i++;
synthetic_p = 1;
}
+ /* Make room for Deprecated attribute. */
+ if (METHOD_DEPRECATED (part))
+ i++;
PUT2 (i); /* attributes_count */
get_jcf_label_here (state); /* Force a first block. */
for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
localvar_alloc (t, state);
+ state->num_jsrs = 0;
generate_bytecode_insns (body, IGNORE_TARGET, state);
if (CAN_COMPLETE_NORMALLY (body))
{
OP1 (OPCODE_return);
}
for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
- localvar_free (t, state);
+ maybe_free_localvar (t, state, 1);
if (state->return_value_decl != NULL_TREE)
- localvar_free (state->return_value_decl, state);
+ maybe_free_localvar (state->return_value_decl, state, 1);
finish_jcf_block (state);
perform_relocations (state);
PUT2 (i);
}
}
+
+ if (METHOD_DEPRECATED (part))
+ append_deprecated_attribute (state);
+
methods_count++;
current_function_decl = save_function;
}
i++;
if (clas == object_type_node)
i++;
+ if (CLASS_DEPRECATED (TYPE_NAME (clas)))
+ i++;
+
PUT2 (i); /* attributes_count */
/* generate the SourceFile attribute. */
if (SourceFile_node == NULL_TREE)
{
SourceFile_node = get_identifier ("SourceFile");
- ggc_add_tree_root (&SourceFile_node, 1);
}
i = find_utf8_constant (&state->cpool, SourceFile_node);
PUT2 (i);
append_gcj_attribute (state, clas);
append_innerclasses_attribute (state, clas);
+ if (CLASS_DEPRECATED (TYPE_NAME (clas)))
+ append_deprecated_attribute (state);
/* New finally generate the contents of the constant pool chunk. */
i = count_constant_pool_bytes (&state->cpool);
return state->first;
}
+static GTY(()) tree Synthetic_node;
static unsigned char *
-append_synthetic_attribute (state)
- struct jcf_partial *state;
+append_synthetic_attribute (struct jcf_partial *state)
{
- static tree Synthetic_node = NULL_TREE;
unsigned char *ptr = append_chunk (NULL, 6, state);
int i;
if (Synthetic_node == NULL_TREE)
{
Synthetic_node = get_identifier ("Synthetic");
- ggc_add_tree_root (&Synthetic_node, 1);
}
i = find_utf8_constant (&state->cpool, Synthetic_node);
PUT2 (i); /* Attribute string index */
}
static void
-append_gcj_attribute (state, class)
- struct jcf_partial *state;
- tree class;
+append_deprecated_attribute (struct jcf_partial *state)
+{
+ unsigned char *ptr = append_chunk (NULL, 6, state);
+ int i;
+
+ i = find_utf8_constant (&state->cpool, get_identifier ("Deprecated"));
+ PUT2 (i); /* Attribute string index */
+ PUT4 (0); /* Attribute length */
+}
+
+static void
+append_gcj_attribute (struct jcf_partial *state, tree class)
{
unsigned char *ptr;
int i;
PUT4 (0); /* Attribute length */
}
+static tree InnerClasses_node;
static void
-append_innerclasses_attribute (state, class)
- struct jcf_partial *state;
- tree class;
+append_innerclasses_attribute (struct jcf_partial *state, tree class)
{
- static tree InnerClasses_node = NULL_TREE;
tree orig_decl = TYPE_NAME (class);
tree current, decl;
int length = 0, i;
if (InnerClasses_node == NULL_TREE)
{
InnerClasses_node = get_identifier ("InnerClasses");
- ggc_add_tree_root (&InnerClasses_node, 1);
}
i = find_utf8_constant (&state->cpool, InnerClasses_node);
PUT2 (i);
}
static void
-append_innerclasses_attribute_entry (state, decl, name)
- struct jcf_partial *state;
- tree decl, name;
+append_innerclasses_attribute_entry (struct jcf_partial *state,
+ tree decl, tree name)
{
int icii, icaf;
int ocii = 0, ini = 0;
}
static char *
-make_class_file_name (clas)
- tree clas;
+make_class_file_name (tree clas)
{
const char *dname, *cname, *slash;
char *r;
struct stat sb;
+ char sep;
cname = IDENTIFIER_POINTER (identifier_subst (DECL_NAME (TYPE_NAME (clas)),
"", '.', DIR_SEPARATOR,
char *t;
dname = DECL_SOURCE_FILE (TYPE_NAME (clas));
slash = strrchr (dname, DIR_SEPARATOR);
+#ifdef DIR_SEPARATOR_2
if (! slash)
- {
- dname = ".";
- slash = dname + 1;
- }
+ slash = strrchr (dname, DIR_SEPARATOR_2);
+#endif
+ if (! slash)
+ {
+ dname = ".";
+ slash = dname + 1;
+ sep = DIR_SEPARATOR;
+ }
+ else
+ sep = *slash;
+
t = strrchr (cname, DIR_SEPARATOR);
if (t)
cname = t + 1;
}
else
{
+ char *s;
+
dname = jcf_write_base_directory;
+
+ s = strrchr (dname, DIR_SEPARATOR);
+#ifdef DIR_SEPARATOR_2
+ if (! s)
+ s = strrchr (dname, DIR_SEPARATOR_2);
+#endif
+ if (s)
+ sep = *s;
+ else
+ sep = DIR_SEPARATOR;
+
slash = dname + strlen (dname);
}
r = xmalloc (slash - dname + strlen (cname) + 2);
strncpy (r, dname, slash - dname);
- r[slash - dname] = DIR_SEPARATOR;
+ r[slash - dname] = sep;
strcpy (&r[slash - dname + 1], cname);
/* We try to make new directories when we need them. We only do
dname = r + (slash - dname) + 1;
while (1)
{
- char *s = strchr (dname, DIR_SEPARATOR);
+ char *s = strchr (dname, sep);
if (s == NULL)
break;
*s = '\0';
if (stat (r, &sb) == -1
/* Try to make it. */
&& mkdir (r, 0755) == -1)
- fatal_io_error ("can't create directory %s", r);
+ fatal_error ("can't create directory %s: %m", r);
- *s = DIR_SEPARATOR;
+ *s = sep;
/* Skip consecutive separators. */
- for (dname = s + 1; *dname && *dname == DIR_SEPARATOR; ++dname)
+ for (dname = s + 1; *dname && *dname == sep; ++dname)
;
}
return r;
}
-/* Write out the contens of a class (RECORD_TYPE) CLAS, as a .class file.
+/* Write out the contents of a class (RECORD_TYPE) CLAS, as a .class file.
The output .class file name is make_class_file_name(CLAS). */
void
-write_classfile (clas)
- tree clas;
+write_classfile (tree clas)
{
struct obstack *work = &temporary_obstack;
struct jcf_partial state[1];
if (class_file_name != NULL)
{
- FILE *stream = fopen (class_file_name, "wb");
+ FILE *stream;
+ char *temporary_file_name;
+
+ /* The .class file is initially written to a ".tmp" file so that
+ if multiple instances of the compiler are running at once
+ they do not see partially formed class files. */
+ temporary_file_name = concat (class_file_name, ".tmp", NULL);
+ stream = fopen (temporary_file_name, "wb");
if (stream == NULL)
- fatal_io_error ("can't to open %s", class_file_name);
+ fatal_error ("can't open %s for writing: %m", temporary_file_name);
jcf_dependency_add_target (class_file_name);
init_jcf_state (state, work);
chunks = generate_classfile (clas, state);
write_chunks (stream, chunks);
if (fclose (stream))
- fatal_io_error ("can't close %s", class_file_name);
+ fatal_error ("error closing %s: %m", temporary_file_name);
+
+ /* If a file named by the string pointed to by `new' exists
+ prior to the call to the `rename' function, the bahaviour
+ is implementation-defined. ISO 9899-1990 7.9.4.2.
+
+ For example, on Win32 with MSVCRT, it is an error. */
+
+ unlink (class_file_name);
+
+ if (rename (temporary_file_name, class_file_name) == -1)
+ {
+ remove (temporary_file_name);
+ fatal_error ("can't create %s: %m", class_file_name);
+ }
+ free (temporary_file_name);
free (class_file_name);
}
release_jcf_state (state);
string concatenation
synchronized statement
*/
+
+#include "gt-java-jcf-write.h"