1 /* Write out a Java(TM) class file.
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA.
21 Java and all Java-based marks are trademarks or registered trademarks
22 of Sun Microsystems, Inc. in the United States and other countries.
23 The Free Software Foundation is independent of Sun Microsystems, Inc. */
27 #include "coretypes.h"
32 #include "java-tree.h"
36 #include "java-opcodes.h"
37 #include "parse.h" /* for BLOCK_EXPR_BODY */
44 #define DIR_SEPARATOR '/'
47 extern struct obstack temporary_obstack;
49 /* Base directory in which `.class' files should be written.
50 NULL means to put the file into the same directory as the
51 corresponding .java file. */
52 char *jcf_write_base_directory = NULL;
54 /* Make sure bytecode.data is big enough for at least N more bytes. */
57 do { CHECK_OP(state); \
58 if (state->bytecode.ptr + (N) > state->bytecode.limit) \
59 buffer_grow (&state->bytecode, N); } while (0)
61 /* Add a 1-byte instruction/operand I to bytecode.data,
62 assuming space has already been RESERVE'd. */
64 #define OP1(I) (*state->bytecode.ptr++ = (I), CHECK_OP(state))
66 /* Like OP1, but I is a 2-byte big endian integer. */
69 do { int _i = (I); OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
71 /* Like OP1, but I is a 4-byte big endian integer. */
74 do { int _i = (I); OP1 (_i >> 24); OP1 (_i >> 16); \
75 OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
77 /* Macro to call each time we push I words on the JVM stack. */
79 #define NOTE_PUSH(I) \
80 do { state->code_SP += (I); \
81 if (state->code_SP > state->code_SP_max) \
82 state->code_SP_max = state->code_SP; } while (0)
84 /* Macro to call each time we pop I words from the JVM stack. */
87 do { state->code_SP -= (I); if (state->code_SP < 0) abort(); } while (0)
89 /* A chunk or segment of a .class file. */
93 /* The next segment of this .class file. */
96 /* The actual data in this segment to be written to the .class file. */
99 /* The size of the segment to be written to the .class file. */
103 #define PENDING_CLEANUP_PC (-3)
104 #define PENDING_EXIT_PC (-2)
105 #define UNDEFINED_PC (-1)
107 /* Each "block" represents a label plus the bytecode instructions following.
108 There may be branches out of the block, but no incoming jumps, except
109 to the beginning of the block.
111 If (pc < 0), the jcf_block is not an actual block (i.e. it has no
112 associated code yet), but it is an undefined label.
117 /* For blocks that that are defined, the next block (in pc order).
118 For blocks that are not-yet-defined the end label of a LABELED_BLOCK_EXPR
119 or a cleanup expression (from a TRY_FINALLY_EXPR),
120 this is the next (outer) such end label, in a stack headed by
121 labeled_blocks in jcf_partial. */
122 struct jcf_block *next;
124 /* In the not-yet-defined end label for an unfinished EXIT_BLOCK_EXPR.
125 pc is PENDING_EXIT_PC.
126 In the not-yet-defined end label for pending cleanup subroutine,
127 pc is PENDING_CLEANUP_PC.
128 For other not-yet-defined labels, pc is UNDEFINED_PC.
130 If the label has been defined:
131 Until perform_relocations is finished, this is the maximum possible
132 value of the bytecode offset at the beginning of this block.
133 After perform_relocations, it is the actual offset (pc). */
138 /* After finish_jcf_block is called, the actual instructions
139 contained in this block. Before that NULL, and the instructions
140 are in state->bytecode. */
144 /* If pc==PENDING_CLEANUP_PC, start_label is the start of the region
145 covered by the cleanup. */
146 struct jcf_block *start_label;
150 /* Set of relocations (in reverse offset order) for this block. */
151 struct jcf_relocation *relocations;
153 /* If this block is that of the not-yet-defined end label of
154 a LABELED_BLOCK_EXPR, where LABELED_BLOCK is that LABELED_BLOCK_EXPR.
155 If pc==PENDING_CLEANUP_PC, the cleanup that needs to be run. */
160 /* A "relocation" type for the 0-3 bytes of padding at the start
161 of a tableswitch or a lookupswitch. */
162 #define SWITCH_ALIGN_RELOC 4
164 /* A relocation type for the labels in a tableswitch or a lookupswitch;
165 these are relative to the start of the instruction, but (due to
166 th 0-3 bytes of padding), we don't know the offset before relocation. */
167 #define BLOCK_START_RELOC 1
169 struct jcf_relocation
171 /* Next relocation for the current jcf_block. */
172 struct jcf_relocation *next;
174 /* The (byte) offset within the current block that needs to be relocated. */
175 HOST_WIDE_INT offset;
177 /* 0 if offset is a 4-byte relative offset.
178 4 (SWITCH_ALIGN_RELOC) if offset points to 0-3 padding bytes inserted
179 for proper alignment in tableswitch/lookupswitch instructions.
180 1 (BLOCK_START_RELOC) if offset points to a 4-byte offset relative
181 to the start of the containing block.
182 -1 if offset is a 2-byte relative offset.
183 < -1 if offset is the address of an instruction with a 2-byte offset
184 that does not have a corresponding 4-byte offset version, in which
185 case the absolute value of kind is the inverted opcode.
186 > 4 if offset is the address of an instruction (such as jsr) with a
187 2-byte offset that does have a corresponding 4-byte offset version,
188 in which case kind is the opcode of the 4-byte version (such as jsr_w). */
191 /* The label the relocation wants to actually transfer to. */
192 struct jcf_block *label;
195 #define RELOCATION_VALUE_0 ((HOST_WIDE_INT)0)
196 #define RELOCATION_VALUE_1 ((HOST_WIDE_INT)1)
198 /* State for single catch clause. */
202 struct jcf_handler *next;
204 struct jcf_block *start_label;
205 struct jcf_block *end_label;
206 struct jcf_block *handler_label;
208 /* The sub-class of Throwable handled, or NULL_TREE (for finally). */
212 /* State for the current switch statement. */
214 struct jcf_switch_state
216 struct jcf_switch_state *prev;
217 struct jcf_block *default_label;
219 struct jcf_relocation *cases;
221 HOST_WIDE_INT min_case, max_case;
224 /* This structure is used to contain the various pieces that will
225 become a .class file. */
231 struct obstack *chunk_obstack;
234 /* List of basic blocks for the current method. */
235 struct jcf_block *blocks;
236 struct jcf_block *last_block;
238 struct localvar_info *first_lvar;
239 struct localvar_info *last_lvar;
244 int linenumber_count;
246 /* Until perform_relocations, this is a upper bound on the number
247 of bytes (so far) in the instructions for the current method. */
250 /* Stack of undefined ending labels for LABELED_BLOCK_EXPR. */
251 struct jcf_block *labeled_blocks;
253 /* The current stack size (stack pointer) in the current method. */
256 /* The largest extent of stack size (stack pointer) in the current method. */
259 /* Contains a mapping from local var slot number to localvar_info. */
260 struct buffer localvars;
262 /* The buffer allocated for bytecode for the current jcf_block. */
263 struct buffer bytecode;
265 /* Chain of exception handlers for the current method. */
266 struct jcf_handler *handlers;
268 /* Last element in handlers chain. */
269 struct jcf_handler *last_handler;
271 /* Number of exception handlers for the current method. */
274 /* Number of finalizers we are currently nested within. */
277 /* If non-NULL, use this for the return value. */
278 tree return_value_decl;
280 /* Information about the current switch statement. */
281 struct jcf_switch_state *sw_state;
284 static void generate_bytecode_insns (tree, int, struct jcf_partial *);
285 static struct chunk * alloc_chunk (struct chunk *, unsigned char *,
286 int, struct obstack *);
287 static unsigned char * append_chunk (unsigned char *, int,
288 struct jcf_partial *);
289 static void append_chunk_copy (unsigned char *, int, struct jcf_partial *);
290 static struct jcf_block * gen_jcf_label (struct jcf_partial *);
291 static void finish_jcf_block (struct jcf_partial *);
292 static void define_jcf_label (struct jcf_block *, struct jcf_partial *);
293 static struct jcf_block * get_jcf_label_here (struct jcf_partial *);
294 static void put_linenumber (int, struct jcf_partial *);
295 static void localvar_alloc (tree, struct jcf_partial *);
296 static void localvar_free (tree, struct jcf_partial *);
297 static int get_access_flags (tree);
298 static void write_chunks (FILE *, struct chunk *);
299 static int adjust_typed_op (tree, int);
300 static void generate_bytecode_conditional (tree, struct jcf_block *,
301 struct jcf_block *, int,
302 struct jcf_partial *);
303 static void generate_bytecode_return (tree, struct jcf_partial *);
304 static void perform_relocations (struct jcf_partial *);
305 static void init_jcf_state (struct jcf_partial *, struct obstack *);
306 static void init_jcf_method (struct jcf_partial *, tree);
307 static void release_jcf_state (struct jcf_partial *);
308 static struct chunk * generate_classfile (tree, struct jcf_partial *);
309 static struct jcf_handler *alloc_handler (struct jcf_block *,
311 struct jcf_partial *);
312 static void emit_iinc (tree, HOST_WIDE_INT, struct jcf_partial *);
313 static void emit_reloc (HOST_WIDE_INT, int, struct jcf_block *,
314 struct jcf_partial *);
315 static void push_constant1 (HOST_WIDE_INT, struct jcf_partial *);
316 static void push_constant2 (HOST_WIDE_INT, struct jcf_partial *);
317 static void push_int_const (HOST_WIDE_INT, struct jcf_partial *);
318 static int find_constant_wide (HOST_WIDE_INT, HOST_WIDE_INT,
319 struct jcf_partial *);
320 static void push_long_const (HOST_WIDE_INT, HOST_WIDE_INT,
321 struct jcf_partial *);
322 static int find_constant_index (tree, struct jcf_partial *);
323 static void push_long_const (HOST_WIDE_INT, HOST_WIDE_INT,
324 struct jcf_partial *);
325 static void field_op (tree, int, struct jcf_partial *);
326 static void maybe_wide (int, int, struct jcf_partial *);
327 static void emit_dup (int, int, struct jcf_partial *);
328 static void emit_pop (int, struct jcf_partial *);
329 static void emit_load_or_store (tree, int, struct jcf_partial *);
330 static void emit_load (tree, struct jcf_partial *);
331 static void emit_store (tree, struct jcf_partial *);
332 static void emit_unop (enum java_opcode, tree, struct jcf_partial *);
333 static void emit_binop (enum java_opcode, tree, struct jcf_partial *);
334 static void emit_reloc (HOST_WIDE_INT, int, struct jcf_block *,
335 struct jcf_partial *);
336 static void emit_switch_reloc (struct jcf_block *, struct jcf_partial *);
337 static void emit_case_reloc (struct jcf_relocation *, struct jcf_partial *);
338 static void emit_if (struct jcf_block *, int, int, struct jcf_partial *);
339 static void emit_goto (struct jcf_block *, struct jcf_partial *);
340 static void emit_jsr (struct jcf_block *, struct jcf_partial *);
341 static void call_cleanups (struct jcf_block *, struct jcf_partial *);
342 static char *make_class_file_name (tree);
343 static unsigned char *append_synthetic_attribute (struct jcf_partial *);
344 static void append_innerclasses_attribute (struct jcf_partial *, tree);
345 static void append_innerclasses_attribute_entry (struct jcf_partial *, tree, tree);
346 static void append_gcj_attribute (struct jcf_partial *, tree);
348 /* Utility macros for appending (big-endian) data to a buffer.
349 We assume a local variable 'ptr' points into where we want to
350 write next, and we assume enough space has been allocated. */
352 #ifdef ENABLE_JC1_CHECKING
353 static int CHECK_PUT (void *, struct jcf_partial *, int);
356 CHECK_PUT (void *ptr, struct jcf_partial *state, int i)
358 if ((unsigned char *) ptr < state->chunk->data
359 || (unsigned char *) ptr + i > state->chunk->data + state->chunk->size)
365 #define CHECK_PUT(PTR, STATE, I) ((void)0)
368 #define PUT1(X) (CHECK_PUT(ptr, state, 1), *ptr++ = (X))
369 #define PUT2(X) (PUT1((X) >> 8), PUT1((X) & 0xFF))
370 #define PUT4(X) (PUT2((X) >> 16), PUT2((X) & 0xFFFF))
371 #define PUTN(P, N) (CHECK_PUT(ptr, state, N), memcpy(ptr, P, N), ptr += (N))
373 /* There are some cases below where CHECK_PUT is guaranteed to fail.
374 Use the following macros in those specific cases. */
375 #define UNSAFE_PUT1(X) (*ptr++ = (X))
376 #define UNSAFE_PUT2(X) (UNSAFE_PUT1((X) >> 8), UNSAFE_PUT1((X) & 0xFF))
377 #define UNSAFE_PUT4(X) (UNSAFE_PUT2((X) >> 16), UNSAFE_PUT2((X) & 0xFFFF))
378 #define UNSAFE_PUTN(P, N) (memcpy(ptr, P, N), ptr += (N))
381 /* Allocate a new chunk on obstack WORK, and link it in after LAST.
382 Set the data and size fields to DATA and SIZE, respectively.
383 However, if DATA is NULL and SIZE>0, allocate a buffer as well. */
385 static struct chunk *
386 alloc_chunk (struct chunk *last, unsigned char *data,
387 int size, struct obstack *work)
389 struct chunk *chunk = (struct chunk *)
390 obstack_alloc (work, sizeof(struct chunk));
392 if (data == NULL && size > 0)
393 data = obstack_alloc (work, size);
403 #ifdef ENABLE_JC1_CHECKING
404 static int CHECK_OP (struct jcf_partial *);
407 CHECK_OP (struct jcf_partial *state)
409 if (state->bytecode.ptr > state->bytecode.limit)
415 #define CHECK_OP(STATE) ((void) 0)
418 static unsigned char *
419 append_chunk (unsigned char *data, int size, struct jcf_partial *state)
421 state->chunk = alloc_chunk (state->chunk, data, size, state->chunk_obstack);
422 if (state->first == NULL)
423 state->first = state->chunk;
424 return state->chunk->data;
428 append_chunk_copy (unsigned char *data, int size, struct jcf_partial *state)
430 unsigned char *ptr = append_chunk (NULL, size, state);
431 memcpy (ptr, data, size);
434 static struct jcf_block *
435 gen_jcf_label (struct jcf_partial *state)
437 struct jcf_block *block = (struct jcf_block *)
438 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_block));
440 block->linenumber = -1;
441 block->pc = UNDEFINED_PC;
446 finish_jcf_block (struct jcf_partial *state)
448 struct jcf_block *block = state->last_block;
449 struct jcf_relocation *reloc;
450 int code_length = BUFFER_LENGTH (&state->bytecode);
451 int pc = state->code_length;
452 append_chunk_copy (state->bytecode.data, code_length, state);
453 BUFFER_RESET (&state->bytecode);
454 block->v.chunk = state->chunk;
456 /* Calculate code_length to the maximum value it can have. */
457 pc += block->v.chunk->size;
458 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
460 int kind = reloc->kind;
461 if (kind == SWITCH_ALIGN_RELOC)
463 else if (kind > BLOCK_START_RELOC)
464 pc += 2; /* 2-byte offset may grow to 4-byte offset */
466 pc += 5; /* May need to add a goto_w. */
468 state->code_length = pc;
472 define_jcf_label (struct jcf_block *label, struct jcf_partial *state)
474 if (state->last_block != NULL)
475 finish_jcf_block (state);
476 label->pc = state->code_length;
477 if (state->blocks == NULL)
478 state->blocks = label;
480 state->last_block->next = label;
481 state->last_block = label;
483 label->u.relocations = NULL;
486 static struct jcf_block *
487 get_jcf_label_here (struct jcf_partial *state)
489 if (state->last_block != NULL && BUFFER_LENGTH (&state->bytecode) == 0)
490 return state->last_block;
493 struct jcf_block *label = gen_jcf_label (state);
494 define_jcf_label (label, state);
499 /* Note a line number entry for the current PC and given LINE. */
502 put_linenumber (int line, struct jcf_partial *state)
504 struct jcf_block *label = get_jcf_label_here (state);
505 if (label->linenumber > 0)
507 label = gen_jcf_label (state);
508 define_jcf_label (label, state);
510 label->linenumber = line;
511 state->linenumber_count++;
514 /* Allocate a new jcf_handler, for a catch clause that catches exceptions
515 in the range (START_LABEL, END_LABEL). */
517 static struct jcf_handler *
518 alloc_handler (struct jcf_block *start_label, struct jcf_block *end_label,
519 struct jcf_partial *state)
521 struct jcf_handler *handler = (struct jcf_handler *)
522 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_handler));
523 handler->start_label = start_label;
524 handler->end_label = end_label;
525 handler->handler_label = get_jcf_label_here (state);
526 if (state->handlers == NULL)
527 state->handlers = handler;
529 state->last_handler->next = handler;
530 state->last_handler = handler;
531 handler->next = NULL;
532 state->num_handlers++;
537 /* The index of jvm local variable allocated for this DECL.
538 This is assigned when generating .class files;
539 contrast DECL_LOCAL_SLOT_NUMBER which is set when *reading* a .class file.
540 (We don't allocate DECL_LANG_SPECIFIC for locals from Java source code.) */
542 #define DECL_LOCAL_INDEX(DECL) DECL_ALIGN(DECL)
546 struct localvar_info *next;
549 struct jcf_block *start_label;
550 struct jcf_block *end_label;
553 #define localvar_buffer ((struct localvar_info**) state->localvars.data)
554 #define localvar_max \
555 ((struct localvar_info**) state->localvars.ptr - localvar_buffer)
558 localvar_alloc (tree decl, struct jcf_partial *state)
560 struct jcf_block *start_label = get_jcf_label_here (state);
561 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
563 register struct localvar_info *info;
564 register struct localvar_info **ptr = localvar_buffer;
565 register struct localvar_info **limit
566 = (struct localvar_info**) state->localvars.ptr;
567 for (index = 0; ptr < limit; index++, ptr++)
570 && (! wide || ((ptr+1) < limit && ptr[1] == NULL)))
575 buffer_grow (&state->localvars, 2 * sizeof (struct localvar_info*));
576 ptr = (struct localvar_info**) state->localvars.data + index;
577 state->localvars.ptr = (unsigned char *) (ptr + 1 + wide);
579 info = (struct localvar_info *)
580 obstack_alloc (state->chunk_obstack, sizeof (struct localvar_info));
583 ptr[1] = (struct localvar_info *)(~0);
584 DECL_LOCAL_INDEX (decl) = index;
586 info->start_label = start_label;
588 if (debug_info_level > DINFO_LEVEL_TERSE
589 && DECL_NAME (decl) != NULL_TREE)
591 /* Generate debugging info. */
593 if (state->last_lvar != NULL)
594 state->last_lvar->next = info;
596 state->first_lvar = info;
597 state->last_lvar = info;
603 localvar_free (tree decl, struct jcf_partial *state)
605 struct jcf_block *end_label = get_jcf_label_here (state);
606 int index = DECL_LOCAL_INDEX (decl);
607 register struct localvar_info **ptr = &localvar_buffer [index];
608 register struct localvar_info *info = *ptr;
609 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
611 info->end_label = end_label;
613 if (info->decl != decl)
618 if (ptr[1] != (struct localvar_info *)(~0))
625 #define STACK_TARGET 1
626 #define IGNORE_TARGET 2
628 /* Get the access flags of a class (TYPE_DECL), a method (FUNCTION_DECL), or
629 a field (FIELD_DECL or VAR_DECL, if static), as encoded in a .class file. */
632 get_access_flags (tree decl)
635 int isfield = TREE_CODE (decl) == FIELD_DECL || TREE_CODE (decl) == VAR_DECL;
636 if (CLASS_PUBLIC (decl)) /* same as FIELD_PUBLIC and METHOD_PUBLIC */
638 if (CLASS_FINAL (decl)) /* same as FIELD_FINAL and METHOD_FINAL */
640 if (isfield || TREE_CODE (decl) == FUNCTION_DECL)
642 if (TREE_PROTECTED (decl))
643 flags |= ACC_PROTECTED;
644 if (TREE_PRIVATE (decl))
645 flags |= ACC_PRIVATE;
647 else if (TREE_CODE (decl) == TYPE_DECL)
649 if (CLASS_SUPER (decl))
651 if (CLASS_ABSTRACT (decl))
652 flags |= ACC_ABSTRACT;
653 if (CLASS_INTERFACE (decl))
654 flags |= ACC_INTERFACE;
655 if (CLASS_STATIC (decl))
657 if (CLASS_PRIVATE (decl))
658 flags |= ACC_PRIVATE;
659 if (CLASS_PROTECTED (decl))
660 flags |= ACC_PROTECTED;
661 if (ANONYMOUS_CLASS_P (TREE_TYPE (decl))
662 || LOCAL_CLASS_P (TREE_TYPE (decl)))
663 flags |= ACC_PRIVATE;
664 if (CLASS_STRICTFP (decl))
670 if (TREE_CODE (decl) == FUNCTION_DECL)
672 if (METHOD_NATIVE (decl))
674 if (METHOD_STATIC (decl))
676 if (METHOD_SYNCHRONIZED (decl))
677 flags |= ACC_SYNCHRONIZED;
678 if (METHOD_ABSTRACT (decl))
679 flags |= ACC_ABSTRACT;
680 if (METHOD_STRICTFP (decl))
685 if (FIELD_STATIC (decl))
687 if (FIELD_VOLATILE (decl))
688 flags |= ACC_VOLATILE;
689 if (FIELD_TRANSIENT (decl))
690 flags |= ACC_TRANSIENT;
695 /* Write the list of segments starting at CHUNKS to STREAM. */
698 write_chunks (FILE* stream, struct chunk *chunks)
700 for (; chunks != NULL; chunks = chunks->next)
701 fwrite (chunks->data, chunks->size, 1, stream);
704 /* Push a 1-word constant in the constant pool at the given INDEX.
705 (Caller is responsible for doing NOTE_PUSH.) */
708 push_constant1 (HOST_WIDE_INT index, struct jcf_partial *state)
723 /* Push a 2-word constant in the constant pool at the given INDEX.
724 (Caller is responsible for doing NOTE_PUSH.) */
727 push_constant2 (HOST_WIDE_INT index, struct jcf_partial *state)
734 /* Push 32-bit integer constant on VM stack.
735 Caller is responsible for doing NOTE_PUSH. */
738 push_int_const (HOST_WIDE_INT i, struct jcf_partial *state)
741 if (i >= -1 && i <= 5)
742 OP1(OPCODE_iconst_0 + i);
743 else if (i >= -128 && i < 128)
748 else if (i >= -32768 && i < 32768)
755 i = find_constant1 (&state->cpool, CONSTANT_Integer,
756 (jword)(i & 0xFFFFFFFF));
757 push_constant1 (i, state);
762 find_constant_wide (HOST_WIDE_INT lo, HOST_WIDE_INT hi,
763 struct jcf_partial *state)
765 HOST_WIDE_INT w1, w2;
766 lshift_double (lo, hi, -32, 64, &w1, &w2, 1);
767 return find_constant2 (&state->cpool, CONSTANT_Long,
768 (jword)(w1 & 0xFFFFFFFF), (jword)(lo & 0xFFFFFFFF));
771 /* Find or allocate a constant pool entry for the given VALUE.
772 Return the index in the constant pool. */
775 find_constant_index (tree value, struct jcf_partial *state)
777 if (TREE_CODE (value) == INTEGER_CST)
779 if (TYPE_PRECISION (TREE_TYPE (value)) <= 32)
780 return find_constant1 (&state->cpool, CONSTANT_Integer,
781 (jword)(TREE_INT_CST_LOW (value) & 0xFFFFFFFF));
783 return find_constant_wide (TREE_INT_CST_LOW (value),
784 TREE_INT_CST_HIGH (value), state);
786 else if (TREE_CODE (value) == REAL_CST)
790 real_to_target (words, &TREE_REAL_CST (value),
791 TYPE_MODE (TREE_TYPE (value)));
792 words[0] &= 0xffffffff;
793 words[1] &= 0xffffffff;
795 if (TYPE_PRECISION (TREE_TYPE (value)) == 32)
796 return find_constant1 (&state->cpool, CONSTANT_Float, (jword)words[0]);
798 return find_constant2 (&state->cpool, CONSTANT_Double,
799 (jword)words[1-FLOAT_WORDS_BIG_ENDIAN],
800 (jword)words[FLOAT_WORDS_BIG_ENDIAN]);
802 else if (TREE_CODE (value) == STRING_CST)
803 return find_string_constant (&state->cpool, value);
809 /* Push 64-bit long constant on VM stack.
810 Caller is responsible for doing NOTE_PUSH. */
813 push_long_const (HOST_WIDE_INT lo, HOST_WIDE_INT hi, struct jcf_partial *state)
815 HOST_WIDE_INT highpart, dummy;
816 jint lowpart = WORD_TO_INT (lo);
818 rshift_double (lo, hi, 32, 64, &highpart, &dummy, 1);
820 if (highpart == 0 && (lowpart == 0 || lowpart == 1))
823 OP1(OPCODE_lconst_0 + lowpart);
825 else if ((highpart == 0 && lowpart > 0 && lowpart < 32768)
826 || (highpart == -1 && lowpart < 0 && lowpart >= -32768))
828 push_int_const (lowpart, state);
833 push_constant2 (find_constant_wide (lo, hi, state), state);
837 field_op (tree field, int opcode, struct jcf_partial *state)
839 int index = find_fieldref_index (&state->cpool, field);
845 /* Returns an integer in the range 0 (for 'int') through 4 (for object
846 reference) to 7 (for 'short') which matches the pattern of how JVM
847 opcodes typically depend on the operand type. */
850 adjust_typed_op (tree type, int max)
852 switch (TREE_CODE (type))
855 case RECORD_TYPE: return 4;
857 return TYPE_PRECISION (type) == 32 || max < 5 ? 0 : 5;
859 return TYPE_PRECISION (type) == 32 || max < 6 ? 0 : 6;
861 switch (TYPE_PRECISION (type))
863 case 8: return max < 5 ? 0 : 5;
864 case 16: return max < 7 ? 0 : 7;
870 switch (TYPE_PRECISION (type))
883 maybe_wide (int opcode, int index, struct jcf_partial *state)
900 /* Compile code to duplicate with offset, where
901 SIZE is the size of the stack item to duplicate (1 or 2), abd
902 OFFSET is where to insert the result (must be 0, 1, or 2).
903 (The new words get inserted at stack[SP-size-offset].) */
906 emit_dup (int size, int offset, struct jcf_partial *state)
913 kind = size == 1 ? OPCODE_dup : OPCODE_dup2;
914 else if (offset == 1)
915 kind = size == 1 ? OPCODE_dup_x1 : OPCODE_dup2_x1;
916 else if (offset == 2)
917 kind = size == 1 ? OPCODE_dup_x2 : OPCODE_dup2_x2;
925 emit_pop (int size, struct jcf_partial *state)
928 OP1 (OPCODE_pop - 1 + size);
932 emit_iinc (tree var, HOST_WIDE_INT value, struct jcf_partial *state)
934 int slot = DECL_LOCAL_INDEX (var);
936 if (value < -128 || value > 127 || slot >= 256)
954 emit_load_or_store (tree var, /* Variable to load from or store into. */
955 int opcode, /* Either OPCODE_iload or OPCODE_istore. */
956 struct jcf_partial *state)
958 tree type = TREE_TYPE (var);
959 int kind = adjust_typed_op (type, 4);
960 int index = DECL_LOCAL_INDEX (var);
964 OP1 (opcode + 5 + 4 * kind + index); /* [ilfda]{load,store}_[0123] */
967 maybe_wide (opcode + kind, index, state); /* [ilfda]{load,store} */
971 emit_load (tree var, struct jcf_partial *state)
973 emit_load_or_store (var, OPCODE_iload, state);
974 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
978 emit_store (tree var, struct jcf_partial *state)
980 emit_load_or_store (var, OPCODE_istore, state);
981 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
985 emit_unop (enum java_opcode opcode, tree type ATTRIBUTE_UNUSED,
986 struct jcf_partial *state)
993 emit_binop (enum java_opcode opcode, tree type, struct jcf_partial *state)
995 int size = TYPE_IS_WIDE (type) ? 2 : 1;
1002 emit_reloc (HOST_WIDE_INT value, int kind,
1003 struct jcf_block *target, struct jcf_partial *state)
1005 struct jcf_relocation *reloc = (struct jcf_relocation *)
1006 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1007 struct jcf_block *block = state->last_block;
1008 reloc->next = block->u.relocations;
1009 block->u.relocations = reloc;
1010 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1011 reloc->label = target;
1013 if (kind == 0 || kind == BLOCK_START_RELOC)
1015 else if (kind != SWITCH_ALIGN_RELOC)
1020 emit_switch_reloc (struct jcf_block *label, struct jcf_partial *state)
1022 emit_reloc (RELOCATION_VALUE_0, BLOCK_START_RELOC, label, state);
1025 /* Similar to emit_switch_reloc,
1026 but re-uses an existing case reloc. */
1029 emit_case_reloc (struct jcf_relocation *reloc, struct jcf_partial *state)
1031 struct jcf_block *block = state->last_block;
1032 reloc->next = block->u.relocations;
1033 block->u.relocations = reloc;
1034 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1035 reloc->kind = BLOCK_START_RELOC;
1039 /* Emit a conditional jump to TARGET with a 2-byte relative jump offset
1040 The opcode is OPCODE, the inverted opcode is INV_OPCODE. */
1043 emit_if (struct jcf_block *target, int opcode, int inv_opcode,
1044 struct jcf_partial *state)
1048 /* value is 1 byte from reloc back to start of instruction. */
1049 emit_reloc (RELOCATION_VALUE_1, - inv_opcode, target, state);
1053 emit_goto (struct jcf_block *target, struct jcf_partial *state)
1057 /* Value is 1 byte from reloc back to start of instruction. */
1058 emit_reloc (RELOCATION_VALUE_1, OPCODE_goto_w, target, state);
1062 emit_jsr (struct jcf_block *target, struct jcf_partial *state)
1066 /* Value is 1 byte from reloc back to start of instruction. */
1067 emit_reloc (RELOCATION_VALUE_1, OPCODE_jsr_w, target, state);
1070 /* Generate code to evaluate EXP. If the result is true,
1071 branch to TRUE_LABEL; otherwise, branch to FALSE_LABEL.
1072 TRUE_BRANCH_FIRST is a code generation hint that the
1073 TRUE_LABEL may follow right after this. (The idea is that we
1074 may be able to optimize away GOTO TRUE_LABEL; TRUE_LABEL:) */
1077 generate_bytecode_conditional (tree exp,
1078 struct jcf_block *true_label,
1079 struct jcf_block *false_label,
1080 int true_branch_first,
1081 struct jcf_partial *state)
1083 tree exp0, exp1, type;
1084 int save_SP = state->code_SP;
1085 enum java_opcode op, negop;
1086 switch (TREE_CODE (exp))
1089 emit_goto (integer_zerop (exp) ? false_label : true_label, state);
1093 struct jcf_block *then_label = gen_jcf_label (state);
1094 struct jcf_block *else_label = gen_jcf_label (state);
1095 int save_SP_before, save_SP_after;
1096 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1097 then_label, else_label, 1, state);
1098 define_jcf_label (then_label, state);
1099 save_SP_before = state->code_SP;
1100 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1101 true_label, false_label, 1, state);
1102 save_SP_after = state->code_SP;
1103 state->code_SP = save_SP_before;
1104 define_jcf_label (else_label, state);
1105 generate_bytecode_conditional (TREE_OPERAND (exp, 2),
1106 true_label, false_label,
1107 true_branch_first, state);
1108 if (state->code_SP != save_SP_after)
1112 case TRUTH_NOT_EXPR:
1113 generate_bytecode_conditional (TREE_OPERAND (exp, 0), false_label,
1114 true_label, ! true_branch_first, state);
1116 case TRUTH_ANDIF_EXPR:
1118 struct jcf_block *next_label = gen_jcf_label (state);
1119 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1120 next_label, false_label, 1, state);
1121 define_jcf_label (next_label, state);
1122 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1123 true_label, false_label, 1, state);
1126 case TRUTH_ORIF_EXPR:
1128 struct jcf_block *next_label = gen_jcf_label (state);
1129 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1130 true_label, next_label, 1, state);
1131 define_jcf_label (next_label, state);
1132 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1133 true_label, false_label, 1, state);
1137 /* Assuming op is one of the 2-operand if_icmp<COND> instructions,
1138 set it to the corresponding 1-operand if<COND> instructions. */
1142 /* The opcodes with their inverses are allocated in pairs.
1143 E.g. The inverse of if_icmplt (161) is if_icmpge (162). */
1144 negop = (op & 1) ? op + 1 : op - 1;
1146 if (true_branch_first)
1148 emit_if (false_label, negop, op, state);
1149 emit_goto (true_label, state);
1153 emit_if (true_label, op, negop, state);
1154 emit_goto (false_label, state);
1158 op = OPCODE_if_icmpeq;
1161 op = OPCODE_if_icmpne;
1164 op = OPCODE_if_icmpgt;
1167 op = OPCODE_if_icmplt;
1170 op = OPCODE_if_icmpge;
1173 op = OPCODE_if_icmple;
1176 exp0 = TREE_OPERAND (exp, 0);
1177 exp1 = TREE_OPERAND (exp, 1);
1178 type = TREE_TYPE (exp0);
1179 switch (TREE_CODE (type))
1182 case POINTER_TYPE: case RECORD_TYPE:
1183 switch (TREE_CODE (exp))
1185 case EQ_EXPR: op = OPCODE_if_acmpeq; break;
1186 case NE_EXPR: op = OPCODE_if_acmpne; break;
1189 if (integer_zerop (exp1) || integer_zerop (exp0))
1191 generate_bytecode_insns (integer_zerop (exp0) ? exp1 : exp0,
1192 STACK_TARGET, state);
1193 op = op + (OPCODE_ifnull - OPCODE_if_acmpeq);
1194 negop = (op & 1) ? op - 1 : op + 1;
1198 generate_bytecode_insns (exp0, STACK_TARGET, state);
1199 generate_bytecode_insns (exp1, STACK_TARGET, state);
1203 generate_bytecode_insns (exp0, STACK_TARGET, state);
1204 generate_bytecode_insns (exp1, STACK_TARGET, state);
1205 if (op == OPCODE_if_icmplt || op == OPCODE_if_icmple)
1209 if (TYPE_PRECISION (type) > 32)
1220 if (TYPE_PRECISION (type) > 32)
1222 generate_bytecode_insns (exp0, STACK_TARGET, state);
1223 generate_bytecode_insns (exp1, STACK_TARGET, state);
1231 if (integer_zerop (exp1))
1233 generate_bytecode_insns (exp0, STACK_TARGET, state);
1237 if (integer_zerop (exp0))
1241 case OPCODE_if_icmplt:
1242 case OPCODE_if_icmpge:
1245 case OPCODE_if_icmpgt:
1246 case OPCODE_if_icmple:
1252 generate_bytecode_insns (exp1, STACK_TARGET, state);
1256 generate_bytecode_insns (exp0, STACK_TARGET, state);
1257 generate_bytecode_insns (exp1, STACK_TARGET, state);
1263 generate_bytecode_insns (exp, STACK_TARGET, state);
1265 if (true_branch_first)
1267 emit_if (false_label, OPCODE_ifeq, OPCODE_ifne, state);
1268 emit_goto (true_label, state);
1272 emit_if (true_label, OPCODE_ifne, OPCODE_ifeq, state);
1273 emit_goto (false_label, state);
1277 if (save_SP != state->code_SP)
1281 /* Call pending cleanups i.e. those for surrounding TRY_FINALLY_EXPRs.
1282 but only as far out as LIMIT (since we are about to jump to the
1283 emit label that is LIMIT). */
1286 call_cleanups (struct jcf_block *limit, struct jcf_partial *state)
1288 struct jcf_block *block = state->labeled_blocks;
1289 for (; block != limit; block = block->next)
1291 if (block->pc == PENDING_CLEANUP_PC)
1292 emit_jsr (block, state);
1297 generate_bytecode_return (tree exp, struct jcf_partial *state)
1299 tree return_type = TREE_TYPE (TREE_TYPE (state->current_method));
1300 int returns_void = TREE_CODE (return_type) == VOID_TYPE;
1305 switch (TREE_CODE (exp))
1308 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET,
1310 exp = TREE_OPERAND (exp, 1);
1314 struct jcf_block *then_label = gen_jcf_label (state);
1315 struct jcf_block *else_label = gen_jcf_label (state);
1316 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1317 then_label, else_label, 1, state);
1318 define_jcf_label (then_label, state);
1319 generate_bytecode_return (TREE_OPERAND (exp, 1), state);
1320 define_jcf_label (else_label, state);
1321 generate_bytecode_return (TREE_OPERAND (exp, 2), state);
1325 generate_bytecode_insns (exp,
1326 returns_void ? IGNORE_TARGET
1327 : STACK_TARGET, state);
1333 call_cleanups (NULL, state);
1337 op = OPCODE_ireturn + adjust_typed_op (return_type, 4);
1338 if (state->num_finalizers > 0)
1340 if (state->return_value_decl == NULL_TREE)
1342 state->return_value_decl
1343 = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1344 localvar_alloc (state->return_value_decl, state);
1346 emit_store (state->return_value_decl, state);
1347 call_cleanups (NULL, state);
1348 emit_load (state->return_value_decl, state);
1349 /* If we call localvar_free (state->return_value_decl, state),
1350 then we risk the save decl erroneously re-used in the
1351 finalizer. Instead, we keep the state->return_value_decl
1352 allocated through the rest of the method. This is not
1353 the greatest solution, but it is at least simple and safe. */
1360 /* Generate bytecode for sub-expression EXP of METHOD.
1361 TARGET is one of STACK_TARGET or IGNORE_TARGET. */
1364 generate_bytecode_insns (tree exp, int target, struct jcf_partial *state)
1367 enum java_opcode jopcode;
1369 HOST_WIDE_INT value;
1374 if (exp == NULL && target == IGNORE_TARGET)
1377 type = TREE_TYPE (exp);
1379 switch (TREE_CODE (exp))
1382 if (BLOCK_EXPR_BODY (exp))
1385 tree body = BLOCK_EXPR_BODY (exp);
1386 for (local = BLOCK_EXPR_DECLS (exp); local; )
1388 tree next = TREE_CHAIN (local);
1389 localvar_alloc (local, state);
1392 /* Avoid deep recursion for long blocks. */
1393 while (TREE_CODE (body) == COMPOUND_EXPR)
1395 generate_bytecode_insns (TREE_OPERAND (body, 0), target, state);
1396 body = TREE_OPERAND (body, 1);
1398 generate_bytecode_insns (body, target, state);
1399 for (local = BLOCK_EXPR_DECLS (exp); local; )
1401 tree next = TREE_CHAIN (local);
1402 localvar_free (local, state);
1408 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
1409 /* Normally the first operand to a COMPOUND_EXPR must complete
1410 normally. However, in the special case of a do-while
1411 statement this is not necessarily the case. */
1412 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 0)))
1413 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1415 case EXPR_WITH_FILE_LOCATION:
1417 const char *saved_input_filename = input_filename;
1418 tree body = EXPR_WFL_NODE (exp);
1419 int saved_lineno = lineno;
1420 if (body == empty_stmt_node)
1422 input_filename = EXPR_WFL_FILENAME (exp);
1423 lineno = EXPR_WFL_LINENO (exp);
1424 if (EXPR_WFL_EMIT_LINE_NOTE (exp) && lineno > 0
1425 && debug_info_level > DINFO_LEVEL_NONE)
1426 put_linenumber (lineno, state);
1427 generate_bytecode_insns (body, target, state);
1428 input_filename = saved_input_filename;
1429 lineno = saved_lineno;
1433 if (target == IGNORE_TARGET) ; /* do nothing */
1434 else if (TREE_CODE (type) == POINTER_TYPE)
1436 if (! integer_zerop (exp))
1439 OP1 (OPCODE_aconst_null);
1442 else if (TYPE_PRECISION (type) <= 32)
1444 push_int_const (TREE_INT_CST_LOW (exp), state);
1449 push_long_const (TREE_INT_CST_LOW (exp), TREE_INT_CST_HIGH (exp),
1456 int prec = TYPE_PRECISION (type) >> 5;
1458 if (real_zerop (exp) && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (exp)))
1459 OP1 (prec == 1 ? OPCODE_fconst_0 : OPCODE_dconst_0);
1460 else if (real_onep (exp))
1461 OP1 (prec == 1 ? OPCODE_fconst_1 : OPCODE_dconst_1);
1462 /* FIXME Should also use fconst_2 for 2.0f.
1463 Also, should use iconst_2/ldc followed by i2f/i2d
1464 for other float/double when the value is a small integer. */
1467 offset = find_constant_index (exp, state);
1469 push_constant1 (offset, state);
1471 push_constant2 (offset, state);
1477 push_constant1 (find_string_constant (&state->cpool, exp), state);
1481 if (TREE_STATIC (exp))
1483 field_op (exp, OPCODE_getstatic, state);
1484 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1487 /* ... fall through ... */
1489 emit_load (exp, state);
1491 case NON_LVALUE_EXPR:
1493 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1496 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1497 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1498 if (target != IGNORE_TARGET)
1500 jopcode = OPCODE_iaload + adjust_typed_op (type, 7);
1503 if (! TYPE_IS_WIDE (type))
1509 tree obj = TREE_OPERAND (exp, 0);
1510 tree field = TREE_OPERAND (exp, 1);
1511 int is_static = FIELD_STATIC (field);
1512 generate_bytecode_insns (obj,
1513 is_static ? IGNORE_TARGET : target, state);
1514 if (target != IGNORE_TARGET)
1516 if (DECL_NAME (field) == length_identifier_node && !is_static
1517 && TYPE_ARRAY_P (TREE_TYPE (obj)))
1520 OP1 (OPCODE_arraylength);
1524 field_op (field, is_static ? OPCODE_getstatic : OPCODE_getfield,
1528 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
1533 case TRUTH_ANDIF_EXPR:
1534 case TRUTH_ORIF_EXPR:
1542 struct jcf_block *then_label = gen_jcf_label (state);
1543 struct jcf_block *else_label = gen_jcf_label (state);
1544 struct jcf_block *end_label = gen_jcf_label (state);
1545 generate_bytecode_conditional (exp,
1546 then_label, else_label, 1, state);
1547 define_jcf_label (then_label, state);
1548 push_int_const (1, state);
1549 emit_goto (end_label, state);
1550 define_jcf_label (else_label, state);
1551 push_int_const (0, state);
1552 define_jcf_label (end_label, state);
1558 struct jcf_block *then_label = gen_jcf_label (state);
1559 struct jcf_block *else_label = gen_jcf_label (state);
1560 struct jcf_block *end_label = gen_jcf_label (state);
1561 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1562 then_label, else_label, 1, state);
1563 define_jcf_label (then_label, state);
1564 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1565 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 1))
1566 /* Not all expressions have CAN_COMPLETE_NORMALLY set properly. */
1567 || TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE)
1568 emit_goto (end_label, state);
1569 define_jcf_label (else_label, state);
1570 generate_bytecode_insns (TREE_OPERAND (exp, 2), target, state);
1571 define_jcf_label (end_label, state);
1572 /* COND_EXPR can be used in a binop. The stack must be adjusted. */
1573 if (TREE_TYPE (exp) != void_type_node)
1574 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1579 struct jcf_switch_state *sw_state = state->sw_state;
1580 struct jcf_relocation *reloc = (struct jcf_relocation *)
1581 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1582 HOST_WIDE_INT case_value = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
1584 reloc->label = get_jcf_label_here (state);
1585 reloc->offset = case_value;
1586 reloc->next = sw_state->cases;
1587 sw_state->cases = reloc;
1588 if (sw_state->num_cases == 0)
1590 sw_state->min_case = case_value;
1591 sw_state->max_case = case_value;
1595 if (case_value < sw_state->min_case)
1596 sw_state->min_case = case_value;
1597 if (case_value > sw_state->max_case)
1598 sw_state->max_case = case_value;
1600 sw_state->num_cases++;
1604 state->sw_state->default_label = get_jcf_label_here (state);
1609 /* The SWITCH_EXPR has three parts, generated in the following order:
1610 1. the switch_expression (the value used to select the correct case);
1612 3. the switch_instruction (the tableswitch/loopupswitch instruction.).
1613 After code generation, we will re-order them in the order 1, 3, 2.
1614 This is to avoid any extra GOTOs. */
1615 struct jcf_switch_state sw_state;
1616 struct jcf_block *expression_last; /* Last block of the switch_expression. */
1617 struct jcf_block *body_last; /* Last block of the switch_body. */
1618 struct jcf_block *switch_instruction; /* First block of switch_instruction. */
1619 struct jcf_block *instruction_last; /* Last block of the switch_instruction. */
1620 struct jcf_block *body_block;
1622 sw_state.prev = state->sw_state;
1623 state->sw_state = &sw_state;
1624 sw_state.cases = NULL;
1625 sw_state.num_cases = 0;
1626 sw_state.default_label = NULL;
1627 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1628 expression_last = state->last_block;
1629 /* Force a new block here. */
1630 body_block = gen_jcf_label (state);
1631 define_jcf_label (body_block, state);
1632 generate_bytecode_insns (TREE_OPERAND (exp, 1), IGNORE_TARGET, state);
1633 body_last = state->last_block;
1635 switch_instruction = gen_jcf_label (state);
1636 define_jcf_label (switch_instruction, state);
1637 if (sw_state.default_label == NULL)
1638 sw_state.default_label = gen_jcf_label (state);
1640 if (sw_state.num_cases <= 1)
1642 if (sw_state.num_cases == 0)
1644 emit_pop (1, state);
1649 push_int_const (sw_state.cases->offset, state);
1651 emit_if (sw_state.cases->label,
1652 OPCODE_if_icmpeq, OPCODE_if_icmpne, state);
1654 emit_goto (sw_state.default_label, state);
1659 unsigned HOST_WIDE_INT delta;
1660 /* Copy the chain of relocs into a sorted array. */
1661 struct jcf_relocation **relocs = (struct jcf_relocation **)
1662 xmalloc (sw_state.num_cases * sizeof (struct jcf_relocation *));
1663 /* The relocs arrays is a buffer with a gap.
1664 The assumption is that cases will normally come in "runs". */
1666 int gap_end = sw_state.num_cases;
1667 struct jcf_relocation *reloc;
1668 for (reloc = sw_state.cases; reloc != NULL; reloc = reloc->next)
1670 HOST_WIDE_INT case_value = reloc->offset;
1671 while (gap_end < sw_state.num_cases)
1673 struct jcf_relocation *end = relocs[gap_end];
1674 if (case_value <= end->offset)
1676 relocs[gap_start++] = end;
1679 while (gap_start > 0)
1681 struct jcf_relocation *before = relocs[gap_start-1];
1682 if (case_value >= before->offset)
1684 relocs[--gap_end] = before;
1687 relocs[gap_start++] = reloc;
1688 /* Note we don't check for duplicates. This is
1689 handled by the parser. */
1692 /* We could have DELTA < 0 if sw_state.min_case is
1693 something like Integer.MIN_VALUE. That is why delta is
1695 delta = sw_state.max_case - sw_state.min_case;
1696 if (2 * (unsigned) sw_state.num_cases >= delta)
1697 { /* Use tableswitch. */
1699 RESERVE (13 + 4 * (sw_state.max_case - sw_state.min_case + 1));
1700 OP1 (OPCODE_tableswitch);
1701 emit_reloc (RELOCATION_VALUE_0,
1702 SWITCH_ALIGN_RELOC, NULL, state);
1703 emit_switch_reloc (sw_state.default_label, state);
1704 OP4 (sw_state.min_case);
1705 OP4 (sw_state.max_case);
1706 for (i = sw_state.min_case; ; )
1708 reloc = relocs[index];
1709 if (i == reloc->offset)
1711 emit_case_reloc (reloc, state);
1712 if (i == sw_state.max_case)
1717 emit_switch_reloc (sw_state.default_label, state);
1722 { /* Use lookupswitch. */
1723 RESERVE(9 + 8 * sw_state.num_cases);
1724 OP1 (OPCODE_lookupswitch);
1725 emit_reloc (RELOCATION_VALUE_0,
1726 SWITCH_ALIGN_RELOC, NULL, state);
1727 emit_switch_reloc (sw_state.default_label, state);
1728 OP4 (sw_state.num_cases);
1729 for (i = 0; i < sw_state.num_cases; i++)
1731 struct jcf_relocation *reloc = relocs[i];
1732 OP4 (reloc->offset);
1733 emit_case_reloc (reloc, state);
1739 instruction_last = state->last_block;
1740 if (sw_state.default_label->pc < 0)
1741 define_jcf_label (sw_state.default_label, state);
1742 else /* Force a new block. */
1743 sw_state.default_label = get_jcf_label_here (state);
1744 /* Now re-arrange the blocks so the switch_instruction
1745 comes before the switch_body. */
1746 switch_length = state->code_length - switch_instruction->pc;
1747 switch_instruction->pc = body_block->pc;
1748 instruction_last->next = body_block;
1749 instruction_last->v.chunk->next = body_block->v.chunk;
1750 expression_last->next = switch_instruction;
1751 expression_last->v.chunk->next = switch_instruction->v.chunk;
1752 body_last->next = sw_state.default_label;
1753 body_last->v.chunk->next = NULL;
1754 state->chunk = body_last->v.chunk;
1755 for (; body_block != sw_state.default_label; body_block = body_block->next)
1756 body_block->pc += switch_length;
1758 state->sw_state = sw_state.prev;
1763 exp = TREE_OPERAND (exp, 0);
1764 if (exp == NULL_TREE)
1765 exp = empty_stmt_node;
1766 else if (TREE_CODE (exp) != MODIFY_EXPR)
1769 exp = TREE_OPERAND (exp, 1);
1770 generate_bytecode_return (exp, state);
1772 case LABELED_BLOCK_EXPR:
1774 struct jcf_block *end_label = gen_jcf_label (state);
1775 end_label->next = state->labeled_blocks;
1776 state->labeled_blocks = end_label;
1777 end_label->pc = PENDING_EXIT_PC;
1778 end_label->u.labeled_block = exp;
1779 if (LABELED_BLOCK_BODY (exp))
1780 generate_bytecode_insns (LABELED_BLOCK_BODY (exp), target, state);
1781 if (state->labeled_blocks != end_label)
1783 state->labeled_blocks = end_label->next;
1784 define_jcf_label (end_label, state);
1789 tree body = TREE_OPERAND (exp, 0);
1791 if (TREE_CODE (body) == COMPOUND_EXPR
1792 && TREE_CODE (TREE_OPERAND (body, 0)) == EXIT_EXPR)
1794 /* Optimize: H: if (TEST) GOTO L; BODY; GOTO H; L:
1795 to: GOTO L; BODY; L: if (!TEST) GOTO L; */
1796 struct jcf_block *head_label;
1797 struct jcf_block *body_label;
1798 struct jcf_block *end_label = gen_jcf_label (state);
1799 struct jcf_block *exit_label = state->labeled_blocks;
1800 head_label = gen_jcf_label (state);
1801 emit_goto (head_label, state);
1802 body_label = get_jcf_label_here (state);
1803 generate_bytecode_insns (TREE_OPERAND (body, 1), target, state);
1804 define_jcf_label (head_label, state);
1805 generate_bytecode_conditional (TREE_OPERAND (body, 0),
1806 end_label, body_label, 1, state);
1807 define_jcf_label (end_label, state);
1812 struct jcf_block *head_label = get_jcf_label_here (state);
1813 generate_bytecode_insns (body, IGNORE_TARGET, state);
1814 if (CAN_COMPLETE_NORMALLY (body))
1815 emit_goto (head_label, state);
1821 struct jcf_block *label = state->labeled_blocks;
1822 struct jcf_block *end_label = gen_jcf_label (state);
1823 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1824 label, end_label, 0, state);
1825 define_jcf_label (end_label, state);
1828 case EXIT_BLOCK_EXPR:
1830 struct jcf_block *label = state->labeled_blocks;
1831 if (TREE_OPERAND (exp, 1) != NULL) goto notimpl;
1832 while (label->u.labeled_block != TREE_OPERAND (exp, 0))
1833 label = label->next;
1834 call_cleanups (label, state);
1835 emit_goto (label, state);
1839 case PREDECREMENT_EXPR: value = -1; post_op = 0; goto increment;
1840 case PREINCREMENT_EXPR: value = 1; post_op = 0; goto increment;
1841 case POSTDECREMENT_EXPR: value = -1; post_op = 1; goto increment;
1842 case POSTINCREMENT_EXPR: value = 1; post_op = 1; goto increment;
1845 arg = TREE_OPERAND (exp, 1);
1846 exp = TREE_OPERAND (exp, 0);
1847 type = TREE_TYPE (exp);
1848 size = TYPE_IS_WIDE (type) ? 2 : 1;
1849 if ((TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1850 && ! TREE_STATIC (exp)
1851 && TREE_CODE (type) == INTEGER_TYPE
1852 && TYPE_PRECISION (type) == 32)
1854 if (target != IGNORE_TARGET && post_op)
1855 emit_load (exp, state);
1856 emit_iinc (exp, value, state);
1857 if (target != IGNORE_TARGET && ! post_op)
1858 emit_load (exp, state);
1861 if (TREE_CODE (exp) == COMPONENT_REF)
1863 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1864 emit_dup (1, 0, state);
1865 /* Stack: ..., objectref, objectref. */
1866 field_op (TREE_OPERAND (exp, 1), OPCODE_getfield, state);
1868 /* Stack: ..., objectref, oldvalue. */
1871 else if (TREE_CODE (exp) == ARRAY_REF)
1873 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1874 generate_bytecode_insns (TREE_OPERAND (exp, 1), STACK_TARGET, state);
1875 emit_dup (2, 0, state);
1876 /* Stack: ..., array, index, array, index. */
1877 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (exp), 7);
1881 /* Stack: ..., array, index, oldvalue. */
1884 else if (TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1886 generate_bytecode_insns (exp, STACK_TARGET, state);
1887 /* Stack: ..., oldvalue. */
1893 if (target != IGNORE_TARGET && post_op)
1894 emit_dup (size, offset, state);
1895 /* Stack, if ARRAY_REF: ..., [result, ] array, index, oldvalue. */
1896 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, oldvalue. */
1897 /* Stack, otherwise: ..., [result, ] oldvalue. */
1898 generate_bytecode_insns (arg, STACK_TARGET, state);
1899 emit_binop ((value >= 0 ? OPCODE_iadd : OPCODE_isub)
1900 + adjust_typed_op (type, 3),
1902 if (target != IGNORE_TARGET && ! post_op)
1903 emit_dup (size, offset, state);
1904 /* Stack, if ARRAY_REF: ..., [result, ] array, index, newvalue. */
1905 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, newvalue. */
1906 /* Stack, otherwise: ..., [result, ] newvalue. */
1907 goto finish_assignment;
1911 tree lhs = TREE_OPERAND (exp, 0);
1912 tree rhs = TREE_OPERAND (exp, 1);
1915 /* See if we can use the iinc instruction. */
1916 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1917 && ! TREE_STATIC (lhs)
1918 && TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
1919 && TYPE_PRECISION (TREE_TYPE (lhs)) == 32
1920 && (TREE_CODE (rhs) == PLUS_EXPR || TREE_CODE (rhs) == MINUS_EXPR))
1922 tree arg0 = TREE_OPERAND (rhs, 0);
1923 tree arg1 = TREE_OPERAND (rhs, 1);
1924 HOST_WIDE_INT min_value = -32768;
1925 HOST_WIDE_INT max_value = 32767;
1926 if (TREE_CODE (rhs) == MINUS_EXPR)
1931 else if (arg1 == lhs)
1934 arg1 = TREE_OPERAND (rhs, 0);
1936 if (lhs == arg0 && TREE_CODE (arg1) == INTEGER_CST)
1938 HOST_WIDE_INT hi_value = TREE_INT_CST_HIGH (arg1);
1939 value = TREE_INT_CST_LOW (arg1);
1940 if ((hi_value == 0 && value <= max_value)
1941 || (hi_value == -1 && value >= min_value))
1943 if (TREE_CODE (rhs) == MINUS_EXPR)
1945 emit_iinc (lhs, value, state);
1946 if (target != IGNORE_TARGET)
1947 emit_load (lhs, state);
1953 if (TREE_CODE (lhs) == COMPONENT_REF)
1955 generate_bytecode_insns (TREE_OPERAND (lhs, 0),
1956 STACK_TARGET, state);
1959 else if (TREE_CODE (lhs) == ARRAY_REF)
1961 generate_bytecode_insns (TREE_OPERAND(lhs, 0),
1962 STACK_TARGET, state);
1963 generate_bytecode_insns (TREE_OPERAND(lhs, 1),
1964 STACK_TARGET, state);
1970 /* If the rhs is a binary expression and the left operand is
1971 `==' to the lhs then we have an OP= expression. In this
1972 case we must do some special processing. */
1973 if (TREE_CODE_CLASS (TREE_CODE (rhs)) == '2'
1974 && lhs == TREE_OPERAND (rhs, 0))
1976 if (TREE_CODE (lhs) == COMPONENT_REF)
1978 tree field = TREE_OPERAND (lhs, 1);
1979 if (! FIELD_STATIC (field))
1981 /* Duplicate the object reference so we can get
1983 emit_dup (TYPE_IS_WIDE (field) ? 2 : 1, 0, state);
1986 field_op (field, (FIELD_STATIC (field)
1991 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
1993 else if (TREE_CODE (lhs) == VAR_DECL
1994 || TREE_CODE (lhs) == PARM_DECL)
1996 if (FIELD_STATIC (lhs))
1998 field_op (lhs, OPCODE_getstatic, state);
1999 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs)) ? 2 : 1);
2002 emit_load (lhs, state);
2004 else if (TREE_CODE (lhs) == ARRAY_REF)
2006 /* Duplicate the array and index, which are on the
2007 stack, so that we can load the old value. */
2008 emit_dup (2, 0, state);
2010 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (lhs), 7);
2013 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs)) ? 2 : 1);
2018 /* This function correctly handles the case where the LHS
2019 of a binary expression is NULL_TREE. */
2020 rhs = build (TREE_CODE (rhs), TREE_TYPE (rhs),
2021 NULL_TREE, TREE_OPERAND (rhs, 1));
2024 generate_bytecode_insns (rhs, STACK_TARGET, state);
2025 if (target != IGNORE_TARGET)
2026 emit_dup (TYPE_IS_WIDE (type) ? 2 : 1 , offset, state);
2032 if (TREE_CODE (exp) == COMPONENT_REF)
2034 tree field = TREE_OPERAND (exp, 1);
2035 if (! FIELD_STATIC (field))
2038 FIELD_STATIC (field) ? OPCODE_putstatic : OPCODE_putfield,
2041 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
2043 else if (TREE_CODE (exp) == VAR_DECL
2044 || TREE_CODE (exp) == PARM_DECL)
2046 if (FIELD_STATIC (exp))
2048 field_op (exp, OPCODE_putstatic, state);
2049 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
2052 emit_store (exp, state);
2054 else if (TREE_CODE (exp) == ARRAY_REF)
2056 jopcode = OPCODE_iastore + adjust_typed_op (TREE_TYPE (exp), 7);
2059 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 4 : 3);
2065 jopcode = OPCODE_iadd;
2068 jopcode = OPCODE_isub;
2071 jopcode = OPCODE_imul;
2073 case TRUNC_DIV_EXPR:
2075 jopcode = OPCODE_idiv;
2077 case TRUNC_MOD_EXPR:
2078 jopcode = OPCODE_irem;
2080 case LSHIFT_EXPR: jopcode = OPCODE_ishl; goto binop;
2081 case RSHIFT_EXPR: jopcode = OPCODE_ishr; goto binop;
2082 case URSHIFT_EXPR: jopcode = OPCODE_iushr; goto binop;
2083 case TRUTH_AND_EXPR:
2084 case BIT_AND_EXPR: jopcode = OPCODE_iand; goto binop;
2086 case BIT_IOR_EXPR: jopcode = OPCODE_ior; goto binop;
2087 case TRUTH_XOR_EXPR:
2088 case BIT_XOR_EXPR: jopcode = OPCODE_ixor; goto binop;
2091 tree arg0 = TREE_OPERAND (exp, 0);
2092 tree arg1 = TREE_OPERAND (exp, 1);
2093 jopcode += adjust_typed_op (type, 3);
2094 if (arg0 == arg1 && TREE_CODE (arg0) == SAVE_EXPR)
2096 /* fold may (e.g) convert 2*x to x+x. */
2097 generate_bytecode_insns (TREE_OPERAND (arg0, 0), target, state);
2098 emit_dup (TYPE_PRECISION (TREE_TYPE (arg0)) > 32 ? 2 : 1, 0, state);
2102 /* ARG0 will be NULL_TREE if we're handling an `OP='
2103 expression. In this case the stack already holds the
2104 LHS. See the MODIFY_EXPR case. */
2105 if (arg0 != NULL_TREE)
2106 generate_bytecode_insns (arg0, target, state);
2107 if (jopcode >= OPCODE_lshl && jopcode <= OPCODE_lushr)
2108 arg1 = convert (int_type_node, arg1);
2109 generate_bytecode_insns (arg1, target, state);
2111 /* For most binary operations, both operands and the result have the
2112 same type. Shift operations are different. Using arg1's type
2113 gets us the correct SP adjustment in all cases. */
2114 if (target == STACK_TARGET)
2115 emit_binop (jopcode, TREE_TYPE (arg1), state);
2118 case TRUTH_NOT_EXPR:
2120 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2121 if (target == STACK_TARGET)
2123 int is_long = TYPE_PRECISION (TREE_TYPE (exp)) > 32;
2124 push_int_const (TREE_CODE (exp) == BIT_NOT_EXPR ? -1 : 1, state);
2128 NOTE_PUSH (1 + is_long);
2129 OP1 (OPCODE_ixor + is_long);
2130 NOTE_POP (1 + is_long);
2134 jopcode = OPCODE_ineg;
2135 jopcode += adjust_typed_op (type, 3);
2136 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2137 if (target == STACK_TARGET)
2138 emit_unop (jopcode, type, state);
2140 case INSTANCEOF_EXPR:
2142 int index = find_class_constant (&state->cpool, TREE_OPERAND (exp, 1));
2143 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2145 OP1 (OPCODE_instanceof);
2150 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2155 case FIX_TRUNC_EXPR:
2157 tree src = TREE_OPERAND (exp, 0);
2158 tree src_type = TREE_TYPE (src);
2159 tree dst_type = TREE_TYPE (exp);
2160 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2161 if (target == IGNORE_TARGET || src_type == dst_type)
2163 if (TREE_CODE (dst_type) == POINTER_TYPE)
2165 if (TREE_CODE (exp) == CONVERT_EXPR)
2167 int index = find_class_constant (&state->cpool,
2168 TREE_TYPE (dst_type));
2170 OP1 (OPCODE_checkcast);
2174 else /* Convert numeric types. */
2176 int wide_src = TYPE_PRECISION (src_type) > 32;
2177 int wide_dst = TYPE_PRECISION (dst_type) > 32;
2178 NOTE_POP (1 + wide_src);
2180 if (TREE_CODE (dst_type) == REAL_TYPE)
2182 if (TREE_CODE (src_type) == REAL_TYPE)
2183 OP1 (wide_dst ? OPCODE_f2d : OPCODE_d2f);
2184 else if (TYPE_PRECISION (src_type) == 64)
2185 OP1 (OPCODE_l2f + wide_dst);
2187 OP1 (OPCODE_i2f + wide_dst);
2189 else /* Convert to integral type. */
2191 if (TREE_CODE (src_type) == REAL_TYPE)
2192 OP1 (OPCODE_f2i + wide_dst + 3 * wide_src);
2197 if (TYPE_PRECISION (dst_type) < 32)
2200 /* Already converted to int, if needed. */
2201 if (TYPE_PRECISION (dst_type) <= 8)
2203 else if (TREE_UNSIGNED (dst_type))
2209 NOTE_PUSH (1 + wide_dst);
2216 tree try_clause = TREE_OPERAND (exp, 0);
2217 struct jcf_block *start_label = get_jcf_label_here (state);
2218 struct jcf_block *end_label; /* End of try clause. */
2219 struct jcf_block *finished_label = gen_jcf_label (state);
2220 tree clause = TREE_OPERAND (exp, 1);
2221 if (target != IGNORE_TARGET)
2223 generate_bytecode_insns (try_clause, IGNORE_TARGET, state);
2224 end_label = get_jcf_label_here (state);
2225 if (end_label == start_label)
2227 if (CAN_COMPLETE_NORMALLY (try_clause))
2228 emit_goto (finished_label, state);
2229 while (clause != NULL_TREE)
2231 tree catch_clause = TREE_OPERAND (clause, 0);
2232 tree exception_decl = BLOCK_EXPR_DECLS (catch_clause);
2233 struct jcf_handler *handler = alloc_handler (start_label,
2235 if (exception_decl == NULL_TREE)
2236 handler->type = NULL_TREE;
2238 handler->type = TREE_TYPE (TREE_TYPE (exception_decl));
2239 generate_bytecode_insns (catch_clause, IGNORE_TARGET, state);
2240 clause = TREE_CHAIN (clause);
2241 if (CAN_COMPLETE_NORMALLY (catch_clause) && clause != NULL_TREE)
2242 emit_goto (finished_label, state);
2244 define_jcf_label (finished_label, state);
2248 case TRY_FINALLY_EXPR:
2250 struct jcf_block *finished_label = NULL;
2251 struct jcf_block *finally_label, *start_label, *end_label;
2252 struct jcf_handler *handler;
2253 tree try_block = TREE_OPERAND (exp, 0);
2254 tree finally = TREE_OPERAND (exp, 1);
2255 tree return_link = NULL_TREE, exception_decl = NULL_TREE;
2257 tree exception_type;
2259 finally_label = gen_jcf_label (state);
2260 start_label = get_jcf_label_here (state);
2261 /* If the `finally' clause can complete normally, we emit it
2262 as a subroutine and let the other clauses call it via
2263 `jsr'. If it can't complete normally, then we simply emit
2264 `goto's directly to it. */
2265 if (CAN_COMPLETE_NORMALLY (finally))
2267 finally_label->pc = PENDING_CLEANUP_PC;
2268 finally_label->next = state->labeled_blocks;
2269 state->labeled_blocks = finally_label;
2270 state->num_finalizers++;
2273 generate_bytecode_insns (try_block, target, state);
2275 if (CAN_COMPLETE_NORMALLY (finally))
2277 if (state->labeled_blocks != finally_label)
2279 state->labeled_blocks = finally_label->next;
2281 end_label = get_jcf_label_here (state);
2283 if (end_label == start_label)
2285 state->num_finalizers--;
2286 define_jcf_label (finally_label, state);
2287 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2291 if (CAN_COMPLETE_NORMALLY (finally))
2293 return_link = build_decl (VAR_DECL, NULL_TREE,
2294 return_address_type_node);
2295 finished_label = gen_jcf_label (state);
2298 if (CAN_COMPLETE_NORMALLY (try_block))
2300 if (CAN_COMPLETE_NORMALLY (finally))
2302 emit_jsr (finally_label, state);
2303 emit_goto (finished_label, state);
2306 emit_goto (finally_label, state);
2309 /* Handle exceptions. */
2311 exception_type = build_pointer_type (throwable_type_node);
2312 if (CAN_COMPLETE_NORMALLY (finally))
2314 /* We're going to generate a subroutine, so we'll need to
2315 save and restore the exception around the `jsr'. */
2316 exception_decl = build_decl (VAR_DECL, NULL_TREE, exception_type);
2317 localvar_alloc (return_link, state);
2319 handler = alloc_handler (start_label, end_label, state);
2320 handler->type = NULL_TREE;
2321 if (CAN_COMPLETE_NORMALLY (finally))
2323 localvar_alloc (exception_decl, state);
2325 emit_store (exception_decl, state);
2326 emit_jsr (finally_label, state);
2327 emit_load (exception_decl, state);
2329 OP1 (OPCODE_athrow);
2334 /* We're not generating a subroutine. In this case we can
2335 simply have the exception handler pop the exception and
2336 then fall through to the `finally' block. */
2338 emit_pop (1, state);
2342 /* The finally block. If we're generating a subroutine, first
2343 save return PC into return_link. Otherwise, just generate
2344 the code for the `finally' block. */
2345 define_jcf_label (finally_label, state);
2346 if (CAN_COMPLETE_NORMALLY (finally))
2349 emit_store (return_link, state);
2352 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2353 if (CAN_COMPLETE_NORMALLY (finally))
2355 maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
2356 localvar_free (exception_decl, state);
2357 localvar_free (return_link, state);
2358 define_jcf_label (finished_label, state);
2363 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2365 OP1 (OPCODE_athrow);
2367 case NEW_ARRAY_INIT:
2369 tree values = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
2370 tree array_type = TREE_TYPE (TREE_TYPE (exp));
2371 tree element_type = TYPE_ARRAY_ELEMENT (array_type);
2372 HOST_WIDE_INT length = java_array_type_length (array_type);
2373 if (target == IGNORE_TARGET)
2375 for ( ; values != NULL_TREE; values = TREE_CHAIN (values))
2376 generate_bytecode_insns (TREE_VALUE (values), target, state);
2379 push_int_const (length, state);
2382 if (JPRIMITIVE_TYPE_P (element_type))
2384 int atype = encode_newarray_type (element_type);
2385 OP1 (OPCODE_newarray);
2390 int index = find_class_constant (&state->cpool,
2391 TREE_TYPE (element_type));
2392 OP1 (OPCODE_anewarray);
2396 jopcode = OPCODE_iastore + adjust_typed_op (element_type, 7);
2397 for ( ; values != NULL_TREE; values = TREE_CHAIN (values), offset++)
2399 int save_SP = state->code_SP;
2400 emit_dup (1, 0, state);
2401 push_int_const (offset, state);
2403 generate_bytecode_insns (TREE_VALUE (values), STACK_TARGET, state);
2406 state->code_SP = save_SP;
2410 case JAVA_EXC_OBJ_EXPR:
2411 NOTE_PUSH (1); /* Pushed by exception system. */
2413 case NEW_CLASS_EXPR:
2415 tree class = TREE_TYPE (TREE_TYPE (exp));
2416 int need_result = target != IGNORE_TARGET;
2417 int index = find_class_constant (&state->cpool, class);
2423 NOTE_PUSH (1 + need_result);
2425 /* ... fall though ... */
2428 tree f = TREE_OPERAND (exp, 0);
2429 tree x = TREE_OPERAND (exp, 1);
2430 int save_SP = state->code_SP;
2432 if (TREE_CODE (f) == ADDR_EXPR)
2433 f = TREE_OPERAND (f, 0);
2434 if (f == soft_newarray_node)
2436 int type_code = TREE_INT_CST_LOW (TREE_VALUE (x));
2437 generate_bytecode_insns (TREE_VALUE (TREE_CHAIN (x)),
2438 STACK_TARGET, state);
2440 OP1 (OPCODE_newarray);
2444 else if (f == soft_multianewarray_node)
2448 int index = find_class_constant (&state->cpool,
2449 TREE_TYPE (TREE_TYPE (exp)));
2450 x = TREE_CHAIN (x); /* Skip class argument. */
2451 ndims = TREE_INT_CST_LOW (TREE_VALUE (x));
2452 for (idim = ndims; --idim >= 0; )
2455 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2458 OP1 (OPCODE_multianewarray);
2463 else if (f == soft_anewarray_node)
2465 tree cl = TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (exp)));
2466 int index = find_class_constant (&state->cpool, TREE_TYPE (cl));
2467 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2469 OP1 (OPCODE_anewarray);
2473 else if (f == soft_monitorenter_node
2474 || f == soft_monitorexit_node
2477 if (f == soft_monitorenter_node)
2478 op = OPCODE_monitorenter;
2479 else if (f == soft_monitorexit_node)
2480 op = OPCODE_monitorexit;
2483 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2489 for ( ; x != NULL_TREE; x = TREE_CHAIN (x))
2491 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2493 nargs = state->code_SP - save_SP;
2494 state->code_SP = save_SP;
2495 if (f == soft_fmod_node)
2502 if (TREE_CODE (exp) == NEW_CLASS_EXPR)
2503 NOTE_POP (1); /* Pop implicit this. */
2504 if (TREE_CODE (f) == FUNCTION_DECL && DECL_CONTEXT (f) != NULL_TREE)
2506 tree context = DECL_CONTEXT (f);
2507 int index, interface = 0;
2509 if (METHOD_STATIC (f))
2510 OP1 (OPCODE_invokestatic);
2511 else if (DECL_CONSTRUCTOR_P (f) || CALL_USING_SUPER (exp)
2512 || METHOD_PRIVATE (f))
2513 OP1 (OPCODE_invokespecial);
2516 if (CLASS_INTERFACE (TYPE_NAME (context)))
2518 tree arg1 = TREE_VALUE (TREE_OPERAND (exp, 1));
2519 context = TREE_TYPE (TREE_TYPE (arg1));
2520 if (CLASS_INTERFACE (TYPE_NAME (context)))
2524 OP1 (OPCODE_invokeinterface);
2526 OP1 (OPCODE_invokevirtual);
2528 index = find_methodref_with_class_index (&state->cpool, f, context);
2538 f = TREE_TYPE (TREE_TYPE (f));
2539 if (TREE_CODE (f) != VOID_TYPE)
2541 int size = TYPE_IS_WIDE (f) ? 2 : 1;
2542 if (target == IGNORE_TARGET)
2543 emit_pop (size, state);
2553 error("internal error in generate_bytecode_insn - tree code not implemented: %s",
2554 tree_code_name [(int) TREE_CODE (exp)]);
2559 perform_relocations (struct jcf_partial *state)
2561 struct jcf_block *block;
2562 struct jcf_relocation *reloc;
2566 /* Before we start, the pc field of each block is an upper bound on
2567 the block's start pc (it may be less, if previous blocks need less
2568 than their maximum).
2570 The minimum size of each block is in the block's chunk->size. */
2572 /* First, figure out the actual locations of each block. */
2575 for (block = state->blocks; block != NULL; block = block->next)
2577 int block_size = block->v.chunk->size;
2581 /* Optimize GOTO L; L: by getting rid of the redundant goto.
2582 Assumes relocations are in reverse order. */
2583 reloc = block->u.relocations;
2584 while (reloc != NULL
2585 && reloc->kind == OPCODE_goto_w
2586 && reloc->label->pc == block->next->pc
2587 && reloc->offset + 2 == block_size)
2589 reloc = reloc->next;
2590 block->u.relocations = reloc;
2591 block->v.chunk->size -= 3;
2596 /* Optimize GOTO L; ... L: GOTO X by changing the first goto to
2597 jump directly to X. We're careful here to avoid an infinite
2598 loop if the `goto's themselves form one. We do this
2599 optimization because we can generate a goto-to-goto for some
2600 try/finally blocks. */
2601 while (reloc != NULL
2602 && reloc->kind == OPCODE_goto_w
2603 && reloc->label != block
2604 && reloc->label->v.chunk->data != NULL
2605 && reloc->label->v.chunk->data[0] == OPCODE_goto)
2607 /* Find the reloc for the first instruction of the
2608 destination block. */
2609 struct jcf_relocation *first_reloc;
2610 for (first_reloc = reloc->label->u.relocations;
2612 first_reloc = first_reloc->next)
2614 if (first_reloc->offset == 1
2615 && first_reloc->kind == OPCODE_goto_w)
2617 reloc->label = first_reloc->label;
2622 /* If we didn't do anything, exit the loop. */
2623 if (first_reloc == NULL)
2627 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
2629 if (reloc->kind == SWITCH_ALIGN_RELOC)
2631 /* We assume this is the first relocation in this block,
2632 so we know its final pc. */
2633 int where = pc + reloc->offset;
2634 int pad = ((where + 3) & ~3) - where;
2637 else if (reloc->kind < -1 || reloc->kind > BLOCK_START_RELOC)
2639 int delta = reloc->label->pc - (pc + reloc->offset - 1);
2640 int expand = reloc->kind > 0 ? 2 : 5;
2644 if (delta >= -32768 && delta <= 32767)
2650 block_size += expand;
2656 for (block = state->blocks; block != NULL; block = block->next)
2658 struct chunk *chunk = block->v.chunk;
2659 int old_size = chunk->size;
2660 int next_pc = block->next == NULL ? pc : block->next->pc;
2661 int new_size = next_pc - block->pc;
2662 unsigned char *new_ptr;
2663 unsigned char *old_buffer = chunk->data;
2664 unsigned char *old_ptr = old_buffer + old_size;
2665 if (new_size != old_size)
2667 chunk->data = (unsigned char *)
2668 obstack_alloc (state->chunk_obstack, new_size);
2669 chunk->size = new_size;
2671 new_ptr = chunk->data + new_size;
2673 /* We do the relocations from back to front, because
2674 the relocations are in reverse order. */
2675 for (reloc = block->u.relocations; ; reloc = reloc->next)
2677 /* new_ptr and old_ptr point into the old and new buffers,
2678 respectively. (If no relocations cause the buffer to
2679 grow, the buffer will be the same buffer, and new_ptr==old_ptr.)
2680 The bytes at higher address have been copied and relocations
2681 handled; those at lower addresses remain to process. */
2683 /* Lower old index of piece to be copied with no relocation.
2684 I.e. high index of the first piece that does need relocation. */
2685 int start = reloc == NULL ? 0
2686 : reloc->kind == SWITCH_ALIGN_RELOC ? reloc->offset
2687 : (reloc->kind == 0 || reloc->kind == BLOCK_START_RELOC)
2689 : reloc->offset + 2;
2692 int n = (old_ptr - old_buffer) - start;
2696 memcpy (new_ptr, old_ptr, n);
2697 if (old_ptr == old_buffer)
2700 new_offset = new_ptr - chunk->data;
2701 new_offset -= (reloc->kind == -1 ? 2 : 4);
2702 if (reloc->kind == 0)
2705 value = GET_u4 (old_ptr);
2707 else if (reloc->kind == BLOCK_START_RELOC)
2713 else if (reloc->kind == SWITCH_ALIGN_RELOC)
2715 int where = block->pc + reloc->offset;
2716 int pad = ((where + 3) & ~3) - where;
2724 value = GET_u2 (old_ptr);
2726 value += reloc->label->pc - (block->pc + new_offset);
2727 *--new_ptr = (unsigned char) value; value >>= 8;
2728 *--new_ptr = (unsigned char) value; value >>= 8;
2729 if (reloc->kind != -1)
2731 *--new_ptr = (unsigned char) value; value >>= 8;
2732 *--new_ptr = (unsigned char) value;
2734 if (reloc->kind > BLOCK_START_RELOC)
2736 /* Convert: OP TARGET to: OP_w TARGET; (OP is goto or jsr). */
2738 *--new_ptr = reloc->kind;
2740 else if (reloc->kind < -1)
2742 /* Convert: ifCOND TARGET to: ifNCOND T; goto_w TARGET; T: */
2744 *--new_ptr = OPCODE_goto_w;
2747 *--new_ptr = - reloc->kind;
2750 if (new_ptr != chunk->data)
2753 state->code_length = pc;
2757 init_jcf_state (struct jcf_partial *state, struct obstack *work)
2759 state->chunk_obstack = work;
2760 state->first = state->chunk = NULL;
2761 CPOOL_INIT (&state->cpool);
2762 BUFFER_INIT (&state->localvars);
2763 BUFFER_INIT (&state->bytecode);
2767 init_jcf_method (struct jcf_partial *state, tree method)
2769 state->current_method = method;
2770 state->blocks = state->last_block = NULL;
2771 state->linenumber_count = 0;
2772 state->first_lvar = state->last_lvar = NULL;
2773 state->lvar_count = 0;
2774 state->labeled_blocks = NULL;
2775 state->code_length = 0;
2776 BUFFER_RESET (&state->bytecode);
2777 BUFFER_RESET (&state->localvars);
2779 state->code_SP_max = 0;
2780 state->handlers = NULL;
2781 state->last_handler = NULL;
2782 state->num_handlers = 0;
2783 state->num_finalizers = 0;
2784 state->return_value_decl = NULL_TREE;
2788 release_jcf_state (struct jcf_partial *state)
2790 CPOOL_FINISH (&state->cpool);
2791 obstack_free (state->chunk_obstack, state->first);
2794 /* Generate and return a list of chunks containing the class CLAS
2795 in the .class file representation. The list can be written to a
2796 .class file using write_chunks. Allocate chunks from obstack WORK. */
2798 static GTY(()) tree SourceFile_node;
2799 static struct chunk *
2800 generate_classfile (tree clas, struct jcf_partial *state)
2802 struct chunk *cpool_chunk;
2803 const char *source_file, *s;
2806 char *fields_count_ptr;
2807 int fields_count = 0;
2808 char *methods_count_ptr;
2809 int methods_count = 0;
2812 = clas == object_type_node ? 0
2813 : TREE_VEC_LENGTH (TYPE_BINFO_BASETYPES (clas));
2815 ptr = append_chunk (NULL, 8, state);
2816 PUT4 (0xCafeBabe); /* Magic number */
2817 PUT2 (3); /* Minor version */
2818 PUT2 (45); /* Major version */
2820 append_chunk (NULL, 0, state);
2821 cpool_chunk = state->chunk;
2823 /* Next allocate the chunk containing acces_flags through fields_count. */
2824 if (clas == object_type_node)
2827 i = 8 + 2 * total_supers;
2828 ptr = append_chunk (NULL, i, state);
2829 i = get_access_flags (TYPE_NAME (clas));
2830 if (! (i & ACC_INTERFACE))
2832 PUT2 (i); /* acces_flags */
2833 i = find_class_constant (&state->cpool, clas); PUT2 (i); /* this_class */
2834 if (clas == object_type_node)
2836 PUT2(0); /* super_class */
2837 PUT2(0); /* interfaces_count */
2841 tree basetypes = TYPE_BINFO_BASETYPES (clas);
2842 tree base = BINFO_TYPE (TREE_VEC_ELT (basetypes, 0));
2843 int j = find_class_constant (&state->cpool, base);
2844 PUT2 (j); /* super_class */
2845 PUT2 (total_supers - 1); /* interfaces_count */
2846 for (i = 1; i < total_supers; i++)
2848 base = BINFO_TYPE (TREE_VEC_ELT (basetypes, i));
2849 j = find_class_constant (&state->cpool, base);
2853 fields_count_ptr = ptr;
2855 for (part = TYPE_FIELDS (clas); part; part = TREE_CHAIN (part))
2857 int have_value, attr_count = 0;
2858 if (DECL_NAME (part) == NULL_TREE || DECL_ARTIFICIAL (part))
2860 ptr = append_chunk (NULL, 8, state);
2861 i = get_access_flags (part); PUT2 (i);
2862 i = find_utf8_constant (&state->cpool, DECL_NAME (part)); PUT2 (i);
2863 i = find_utf8_constant (&state->cpool,
2864 build_java_signature (TREE_TYPE (part)));
2866 have_value = DECL_INITIAL (part) != NULL_TREE
2867 && FIELD_STATIC (part) && CONSTANT_VALUE_P (DECL_INITIAL (part))
2868 && FIELD_FINAL (part)
2869 && (JPRIMITIVE_TYPE_P (TREE_TYPE (part))
2870 || TREE_TYPE (part) == string_ptr_type_node);
2874 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part) || FIELD_SYNTHETIC (part))
2877 PUT2 (attr_count); /* attributes_count */
2880 tree init = DECL_INITIAL (part);
2881 static tree ConstantValue_node = NULL_TREE;
2882 if (TREE_TYPE (part) != TREE_TYPE (init))
2883 fatal_error ("field initializer type mismatch");
2884 ptr = append_chunk (NULL, 8, state);
2885 if (ConstantValue_node == NULL_TREE)
2886 ConstantValue_node = get_identifier ("ConstantValue");
2887 i = find_utf8_constant (&state->cpool, ConstantValue_node);
2888 PUT2 (i); /* attribute_name_index */
2889 PUT4 (2); /* attribute_length */
2890 i = find_constant_index (init, state); PUT2 (i);
2892 /* Emit the "Synthetic" attribute for val$<x> and this$<n>
2893 fields and other fields which need it. */
2894 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part)
2895 || FIELD_SYNTHETIC (part))
2896 ptr = append_synthetic_attribute (state);
2899 ptr = fields_count_ptr; UNSAFE_PUT2 (fields_count);
2901 ptr = methods_count_ptr = append_chunk (NULL, 2, state);
2904 for (part = TYPE_METHODS (clas); part; part = TREE_CHAIN (part))
2906 struct jcf_block *block;
2907 tree function_body = DECL_FUNCTION_BODY (part);
2908 tree body = function_body == NULL_TREE ? NULL_TREE
2909 : BLOCK_EXPR_BODY (function_body);
2910 tree name = DECL_CONSTRUCTOR_P (part) ? init_identifier_node
2912 tree type = TREE_TYPE (part);
2913 tree save_function = current_function_decl;
2914 int synthetic_p = 0;
2915 current_function_decl = part;
2916 ptr = append_chunk (NULL, 8, state);
2917 i = get_access_flags (part); PUT2 (i);
2918 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
2919 i = find_utf8_constant (&state->cpool, build_java_signature (type));
2921 i = (body != NULL_TREE) + (DECL_FUNCTION_THROWS (part) != NULL_TREE);
2923 /* Make room for the Synthetic attribute (of zero length.) */
2924 if (DECL_FINIT_P (part)
2925 || DECL_INSTINIT_P (part)
2926 || OUTER_FIELD_ACCESS_IDENTIFIER_P (DECL_NAME (part))
2927 || TYPE_DOT_CLASS (clas) == part)
2933 PUT2 (i); /* attributes_count */
2936 ptr = append_synthetic_attribute (state);
2938 if (body != NULL_TREE)
2940 int code_attributes_count = 0;
2941 static tree Code_node = NULL_TREE;
2944 struct jcf_handler *handler;
2945 if (Code_node == NULL_TREE)
2946 Code_node = get_identifier ("Code");
2947 ptr = append_chunk (NULL, 14, state);
2948 i = find_utf8_constant (&state->cpool, Code_node); PUT2 (i);
2950 init_jcf_method (state, part);
2951 get_jcf_label_here (state); /* Force a first block. */
2952 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
2953 localvar_alloc (t, state);
2954 generate_bytecode_insns (body, IGNORE_TARGET, state);
2955 if (CAN_COMPLETE_NORMALLY (body))
2957 if (TREE_CODE (TREE_TYPE (type)) != VOID_TYPE)
2960 OP1 (OPCODE_return);
2962 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
2963 localvar_free (t, state);
2964 if (state->return_value_decl != NULL_TREE)
2965 localvar_free (state->return_value_decl, state);
2966 finish_jcf_block (state);
2967 perform_relocations (state);
2970 i = 8 + state->code_length + 4 + 8 * state->num_handlers;
2971 if (state->linenumber_count > 0)
2973 code_attributes_count++;
2974 i += 8 + 4 * state->linenumber_count;
2976 if (state->lvar_count > 0)
2978 code_attributes_count++;
2979 i += 8 + 10 * state->lvar_count;
2981 UNSAFE_PUT4 (i); /* attribute_length */
2982 UNSAFE_PUT2 (state->code_SP_max); /* max_stack */
2983 UNSAFE_PUT2 (localvar_max); /* max_locals */
2984 UNSAFE_PUT4 (state->code_length);
2986 /* Emit the exception table. */
2987 ptr = append_chunk (NULL, 2 + 8 * state->num_handlers, state);
2988 PUT2 (state->num_handlers); /* exception_table_length */
2989 handler = state->handlers;
2990 for (; handler != NULL; handler = handler->next)
2993 PUT2 (handler->start_label->pc);
2994 PUT2 (handler->end_label->pc);
2995 PUT2 (handler->handler_label->pc);
2996 if (handler->type == NULL_TREE)
2999 type_index = find_class_constant (&state->cpool,
3004 ptr = append_chunk (NULL, 2, state);
3005 PUT2 (code_attributes_count);
3007 /* Write the LineNumberTable attribute. */
3008 if (state->linenumber_count > 0)
3010 static tree LineNumberTable_node = NULL_TREE;
3011 ptr = append_chunk (NULL,
3012 8 + 4 * state->linenumber_count, state);
3013 if (LineNumberTable_node == NULL_TREE)
3014 LineNumberTable_node = get_identifier ("LineNumberTable");
3015 i = find_utf8_constant (&state->cpool, LineNumberTable_node);
3016 PUT2 (i); /* attribute_name_index */
3017 i = 2+4*state->linenumber_count; PUT4(i); /* attribute_length */
3018 i = state->linenumber_count; PUT2 (i);
3019 for (block = state->blocks; block != NULL; block = block->next)
3021 int line = block->linenumber;
3030 /* Write the LocalVariableTable attribute. */
3031 if (state->lvar_count > 0)
3033 static tree LocalVariableTable_node = NULL_TREE;
3034 struct localvar_info *lvar = state->first_lvar;
3035 ptr = append_chunk (NULL, 8 + 10 * state->lvar_count, state);
3036 if (LocalVariableTable_node == NULL_TREE)
3037 LocalVariableTable_node = get_identifier("LocalVariableTable");
3038 i = find_utf8_constant (&state->cpool, LocalVariableTable_node);
3039 PUT2 (i); /* attribute_name_index */
3040 i = 2 + 10 * state->lvar_count; PUT4 (i); /* attribute_length */
3041 i = state->lvar_count; PUT2 (i);
3042 for ( ; lvar != NULL; lvar = lvar->next)
3044 tree name = DECL_NAME (lvar->decl);
3045 tree sig = build_java_signature (TREE_TYPE (lvar->decl));
3046 i = lvar->start_label->pc; PUT2 (i);
3047 i = lvar->end_label->pc - i; PUT2 (i);
3048 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
3049 i = find_utf8_constant (&state->cpool, sig); PUT2 (i);
3050 i = DECL_LOCAL_INDEX (lvar->decl); PUT2 (i);
3054 if (DECL_FUNCTION_THROWS (part) != NULL_TREE)
3056 tree t = DECL_FUNCTION_THROWS (part);
3057 int throws_count = list_length (t);
3058 static tree Exceptions_node = NULL_TREE;
3059 if (Exceptions_node == NULL_TREE)
3060 Exceptions_node = get_identifier ("Exceptions");
3061 ptr = append_chunk (NULL, 8 + 2 * throws_count, state);
3062 i = find_utf8_constant (&state->cpool, Exceptions_node);
3063 PUT2 (i); /* attribute_name_index */
3064 i = 2 + 2 * throws_count; PUT4(i); /* attribute_length */
3065 i = throws_count; PUT2 (i);
3066 for (; t != NULL_TREE; t = TREE_CHAIN (t))
3068 i = find_class_constant (&state->cpool, TREE_VALUE (t));
3073 current_function_decl = save_function;
3075 ptr = methods_count_ptr; UNSAFE_PUT2 (methods_count);
3077 source_file = DECL_SOURCE_FILE (TYPE_NAME (clas));
3078 for (s = source_file; ; s++)
3083 if (ch == '/' || ch == '\\')
3086 ptr = append_chunk (NULL, 10, state);
3088 i = 1; /* Source file always exists as an attribute */
3089 if (INNER_CLASS_TYPE_P (clas) || DECL_INNER_CLASS_LIST (TYPE_NAME (clas)))
3091 if (clas == object_type_node)
3093 PUT2 (i); /* attributes_count */
3095 /* generate the SourceFile attribute. */
3096 if (SourceFile_node == NULL_TREE)
3098 SourceFile_node = get_identifier ("SourceFile");
3101 i = find_utf8_constant (&state->cpool, SourceFile_node);
3102 PUT2 (i); /* attribute_name_index */
3104 i = find_utf8_constant (&state->cpool, get_identifier (source_file));
3106 append_gcj_attribute (state, clas);
3107 append_innerclasses_attribute (state, clas);
3109 /* New finally generate the contents of the constant pool chunk. */
3110 i = count_constant_pool_bytes (&state->cpool);
3111 ptr = obstack_alloc (state->chunk_obstack, i);
3112 cpool_chunk->data = ptr;
3113 cpool_chunk->size = i;
3114 write_constant_pool (&state->cpool, ptr, i);
3115 return state->first;
3118 static GTY(()) tree Synthetic_node;
3119 static unsigned char *
3120 append_synthetic_attribute (struct jcf_partial *state)
3122 unsigned char *ptr = append_chunk (NULL, 6, state);
3125 if (Synthetic_node == NULL_TREE)
3127 Synthetic_node = get_identifier ("Synthetic");
3129 i = find_utf8_constant (&state->cpool, Synthetic_node);
3130 PUT2 (i); /* Attribute string index */
3131 PUT4 (0); /* Attribute length */
3137 append_gcj_attribute (struct jcf_partial *state, tree class)
3142 if (class != object_type_node)
3145 ptr = append_chunk (NULL, 6, state); /* 2+4 */
3146 i = find_utf8_constant (&state->cpool,
3147 get_identifier ("gnu.gcj.gcj-compiled"));
3148 PUT2 (i); /* Attribute string index */
3149 PUT4 (0); /* Attribute length */
3152 static tree InnerClasses_node;
3154 append_innerclasses_attribute (struct jcf_partial *state, tree class)
3156 tree orig_decl = TYPE_NAME (class);
3159 unsigned char *ptr, *length_marker, *number_marker;
3161 if (!INNER_CLASS_TYPE_P (class) && !DECL_INNER_CLASS_LIST (orig_decl))
3164 ptr = append_chunk (NULL, 8, state); /* 2+4+2 */
3166 if (InnerClasses_node == NULL_TREE)
3168 InnerClasses_node = get_identifier ("InnerClasses");
3170 i = find_utf8_constant (&state->cpool, InnerClasses_node);
3172 length_marker = ptr; PUT4 (0); /* length, to be later patched */
3173 number_marker = ptr; PUT2 (0); /* number of classes, tblp */
3175 /* Generate the entries: all inner classes visible from the one we
3176 process: itself, up and down. */
3177 while (class && INNER_CLASS_TYPE_P (class))
3181 decl = TYPE_NAME (class);
3182 n = IDENTIFIER_POINTER (DECL_NAME (decl)) +
3183 IDENTIFIER_LENGTH (DECL_NAME (decl));
3185 while (n[-1] != '$')
3187 append_innerclasses_attribute_entry (state, decl, get_identifier (n));
3190 class = TREE_TYPE (DECL_CONTEXT (TYPE_NAME (class)));
3194 for (current = DECL_INNER_CLASS_LIST (decl);
3195 current; current = TREE_CHAIN (current))
3197 append_innerclasses_attribute_entry (state, TREE_PURPOSE (current),
3198 TREE_VALUE (current));
3202 ptr = length_marker; PUT4 (8*length+2);
3203 ptr = number_marker; PUT2 (length);
3207 append_innerclasses_attribute_entry (struct jcf_partial *state,
3208 tree decl, tree name)
3211 int ocii = 0, ini = 0;
3212 unsigned char *ptr = append_chunk (NULL, 8, state);
3214 icii = find_class_constant (&state->cpool, TREE_TYPE (decl));
3216 /* Sun's implementation seems to generate ocii to 0 for inner
3217 classes (which aren't considered members of the class they're
3218 in.) The specs are saying that if the class is anonymous,
3219 inner_name_index must be zero. */
3220 if (!ANONYMOUS_CLASS_P (TREE_TYPE (decl)))
3222 ocii = find_class_constant (&state->cpool,
3223 TREE_TYPE (DECL_CONTEXT (decl)));
3224 ini = find_utf8_constant (&state->cpool, name);
3226 icaf = get_access_flags (decl);
3228 PUT2 (icii); PUT2 (ocii); PUT2 (ini); PUT2 (icaf);
3232 make_class_file_name (tree clas)
3234 const char *dname, *cname, *slash;
3238 cname = IDENTIFIER_POINTER (identifier_subst (DECL_NAME (TYPE_NAME (clas)),
3239 "", '.', DIR_SEPARATOR,
3241 if (jcf_write_base_directory == NULL)
3243 /* Make sure we put the class file into the .java file's
3244 directory, and not into some subdirectory thereof. */
3246 dname = DECL_SOURCE_FILE (TYPE_NAME (clas));
3247 slash = strrchr (dname, DIR_SEPARATOR);
3253 t = strrchr (cname, DIR_SEPARATOR);
3259 dname = jcf_write_base_directory;
3260 slash = dname + strlen (dname);
3263 r = xmalloc (slash - dname + strlen (cname) + 2);
3264 strncpy (r, dname, slash - dname);
3265 r[slash - dname] = DIR_SEPARATOR;
3266 strcpy (&r[slash - dname + 1], cname);
3268 /* We try to make new directories when we need them. We only do
3269 this for directories which "might not" exist. For instance, we
3270 assume the `-d' directory exists, but we don't assume that any
3271 subdirectory below it exists. It might be worthwhile to keep
3272 track of which directories we've created to avoid gratuitous
3274 dname = r + (slash - dname) + 1;
3277 char *s = strchr (dname, DIR_SEPARATOR);
3281 if (stat (r, &sb) == -1
3282 /* Try to make it. */
3283 && mkdir (r, 0755) == -1)
3284 fatal_io_error ("can't create directory %s", r);
3287 /* Skip consecutive separators. */
3288 for (dname = s + 1; *dname && *dname == DIR_SEPARATOR; ++dname)
3295 /* Write out the contents of a class (RECORD_TYPE) CLAS, as a .class file.
3296 The output .class file name is make_class_file_name(CLAS). */
3299 write_classfile (tree clas)
3301 struct obstack *work = &temporary_obstack;
3302 struct jcf_partial state[1];
3303 char *class_file_name = make_class_file_name (clas);
3304 struct chunk *chunks;
3306 if (class_file_name != NULL)
3309 char *temporary_file_name;
3311 /* The .class file is initially written to a ".tmp" file so that
3312 if multiple instances of the compiler are running at once
3313 they do not see partially formed class files. */
3314 temporary_file_name = concat (class_file_name, ".tmp", NULL);
3315 stream = fopen (temporary_file_name, "wb");
3317 fatal_io_error ("can't open %s for writing", temporary_file_name);
3319 jcf_dependency_add_target (class_file_name);
3320 init_jcf_state (state, work);
3321 chunks = generate_classfile (clas, state);
3322 write_chunks (stream, chunks);
3323 if (fclose (stream))
3324 fatal_io_error ("error closing %s", temporary_file_name);
3326 /* If a file named by the string pointed to by `new' exists
3327 prior to the call to the `rename' function, the bahaviour
3328 is implementation-defined. ISO 9899-1990 7.9.4.2.
3330 For example, on Win32 with MSVCRT, it is an error. */
3332 unlink (class_file_name);
3334 if (rename (temporary_file_name, class_file_name) == -1)
3336 remove (temporary_file_name);
3337 fatal_io_error ("can't create %s", class_file_name);
3339 free (temporary_file_name);
3340 free (class_file_name);
3342 release_jcf_state (state);
3346 string concatenation
3347 synchronized statement
3350 #include "gt-java-jcf-write.h"