1 /* Write out a Java(TM) class file.
2 Copyright (C) 1998, 1999, 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15 You should have received a copy of the GNU General Public License
16 along with GNU CC; see the file COPYING. If not, write to
17 the Free Software Foundation, 59 Temple Place - Suite 330,
18 Boston, MA 02111-1307, USA.
20 Java and all Java-based marks are trademarks or registered trademarks
21 of Sun Microsystems, Inc. in the United States and other countries.
22 The Free Software Foundation is independent of Sun Microsystems, Inc. */
28 #include "java-tree.h"
33 #include "java-opcodes.h"
34 #include "parse.h" /* for BLOCK_EXPR_BODY */
39 #define DIR_SEPARATOR '/'
42 extern struct obstack temporary_obstack;
44 /* Base directory in which `.class' files should be written.
45 NULL means to put the file into the same directory as the
46 corresponding .java file. */
47 char *jcf_write_base_directory = NULL;
49 /* Make sure bytecode.data is big enough for at least N more bytes. */
52 do { CHECK_OP(state); \
53 if (state->bytecode.ptr + (N) > state->bytecode.limit) \
54 buffer_grow (&state->bytecode, N); } while (0)
56 /* Add a 1-byte instruction/operand I to bytecode.data,
57 assuming space has already been RESERVE'd. */
59 #define OP1(I) (*state->bytecode.ptr++ = (I), CHECK_OP(state))
61 /* Like OP1, but I is a 2-byte big endian integer. */
64 do { int _i = (I); OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
66 /* Like OP1, but I is a 4-byte big endian integer. */
69 do { int _i = (I); OP1 (_i >> 24); OP1 (_i >> 16); \
70 OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
72 /* Macro to call each time we push I words on the JVM stack. */
74 #define NOTE_PUSH(I) \
75 do { state->code_SP += (I); \
76 if (state->code_SP > state->code_SP_max) \
77 state->code_SP_max = state->code_SP; } while (0)
79 /* Macro to call each time we pop I words from the JVM stack. */
82 do { state->code_SP -= (I); if (state->code_SP < 0) abort(); } while (0)
84 /* A chunk or segment of a .class file. */
88 /* The next segment of this .class file. */
91 /* The actual data in this segment to be written to the .class file. */
94 /* The size of the segment to be written to the .class file. */
98 #define PENDING_CLEANUP_PC (-3)
99 #define PENDING_EXIT_PC (-2)
100 #define UNDEFINED_PC (-1)
102 /* Each "block" represents a label plus the bytecode instructions following.
103 There may be branches out of the block, but no incoming jumps, except
104 to the beginning of the block.
106 If (pc < 0), the jcf_block is not an actual block (i.e. it has no
107 assocated code yet), but it is an undefined label.
112 /* For blocks that that are defined, the next block (in pc order).
113 For blocks that are the not-yet-defined end label of a LABELED_BLOCK_EXPR
114 or a cleanup expression (from a WITH_CLEANUP_EXPR),
115 this is the next (outer) such end label, in a stack headed by
116 labeled_blocks in jcf_partial. */
117 struct jcf_block *next;
119 /* In the not-yet-defined end label for an unfinished EXIT_BLOCK_EXPR.
120 pc is PENDING_EXIT_PC.
121 In the not-yet-defined end label for pending cleanup subroutine,
122 pc is PENDING_CLEANUP_PC.
123 For other not-yet-defined labels, pc is UNDEFINED_PC.
125 If the label has been defined:
126 Until perform_relocations is finished, this is the maximum possible
127 value of the bytecode offset at the begnning of this block.
128 After perform_relocations, it is the actual offset (pc). */
133 /* After finish_jcf_block is called, The actual instructions contained in this block.
134 Before than NULL, and the instructions are in state->bytecode. */
138 /* If pc==PENDING_CLEANUP_PC, start_label is the start of the region
139 coveed by the cleanup. */
140 struct jcf_block *start_label;
144 /* Set of relocations (in reverse offset order) for this block. */
145 struct jcf_relocation *relocations;
147 /* If this block is that of the not-yet-defined end label of
148 a LABELED_BLOCK_EXPR, where LABELED_BLOCK is that LABELED_BLOCK_EXPR.
149 If pc==PENDING_CLEANUP_PC, the cleanup that needs to be run. */
154 /* A "relocation" type for the 0-3 bytes of padding at the start
155 of a tableswitch or a lookupswitch. */
156 #define SWITCH_ALIGN_RELOC 4
158 /* A relocation type for the labels in a tableswitch or a lookupswitch;
159 these are relative to the start of the instruction, but (due to
160 th 0-3 bytes of padding), we don't know the offset before relocation. */
161 #define BLOCK_START_RELOC 1
163 struct jcf_relocation
165 /* Next relocation for the current jcf_block. */
166 struct jcf_relocation *next;
168 /* The (byte) offset within the current block that needs to be relocated. */
169 HOST_WIDE_INT offset;
171 /* 0 if offset is a 4-byte relative offset.
172 4 (SWITCH_ALIGN_RELOC) if offset points to 0-3 padding bytes inserted
173 for proper alignment in tableswitch/lookupswitch instructions.
174 1 (BLOCK_START_RELOC) if offset points to a 4-byte offset relative
175 to the start of the containing block.
176 -1 if offset is a 2-byte relative offset.
177 < -1 if offset is the address of an instruction with a 2-byte offset
178 that does not have a corresponding 4-byte offset version, in which
179 case the absolute value of kind is the inverted opcode.
180 > 4 if offset is the address of an instruction (such as jsr) with a
181 2-byte offset that does have a corresponding 4-byte offset version,
182 in which case kind is the opcode of the 4-byte version (such as jsr_w). */
185 /* The label the relocation wants to actually transfer to. */
186 struct jcf_block *label;
189 #define RELOCATION_VALUE_0 ((HOST_WIDE_INT)0)
190 #define RELOCATION_VALUE_1 ((HOST_WIDE_INT)1)
192 /* State for single catch clause. */
196 struct jcf_handler *next;
198 struct jcf_block *start_label;
199 struct jcf_block *end_label;
200 struct jcf_block *handler_label;
202 /* The sub-class of Throwable handled, or NULL_TREE (for finally). */
206 /* State for the current switch statement. */
208 struct jcf_switch_state
210 struct jcf_switch_state *prev;
211 struct jcf_block *default_label;
213 struct jcf_relocation *cases;
215 HOST_WIDE_INT min_case, max_case;
218 /* This structure is used to contain the various pieces that will
219 become a .class file. */
225 struct obstack *chunk_obstack;
228 /* List of basic blocks for the current method. */
229 struct jcf_block *blocks;
230 struct jcf_block *last_block;
232 struct localvar_info *first_lvar;
233 struct localvar_info *last_lvar;
238 int linenumber_count;
240 /* Until perform_relocations, this is a upper bound on the number
241 of bytes (so far) in the instructions for the current method. */
244 /* Stack of undefined ending labels for LABELED_BLOCK_EXPR. */
245 struct jcf_block *labeled_blocks;
247 /* The current stack size (stack pointer) in the current method. */
250 /* The largest extent of stack size (stack pointer) in the current method. */
253 /* Contains a mapping from local var slot number to localvar_info. */
254 struct buffer localvars;
256 /* The buffer allocated for bytecode for the current jcf_block. */
257 struct buffer bytecode;
259 /* Chain of exception handlers for the current method. */
260 struct jcf_handler *handlers;
262 /* Last element in handlers chain. */
263 struct jcf_handler *last_handler;
265 /* Number of exception handlers for the current method. */
268 /* Number of finalizers we are currently nested within. */
271 /* If non-NULL, use this for the return value. */
272 tree return_value_decl;
274 /* Information about the current switch statemenet. */
275 struct jcf_switch_state *sw_state;
278 static void generate_bytecode_insns PARAMS ((tree, int, struct jcf_partial *));
279 static struct chunk * alloc_chunk PARAMS ((struct chunk *, unsigned char *,
280 int, struct obstack *));
281 static unsigned char * append_chunk PARAMS ((unsigned char *, int,
282 struct jcf_partial *));
283 static void append_chunk_copy PARAMS ((unsigned char *, int,
284 struct jcf_partial *));
285 static struct jcf_block * gen_jcf_label PARAMS ((struct jcf_partial *));
286 static void finish_jcf_block PARAMS ((struct jcf_partial *));
287 static void define_jcf_label PARAMS ((struct jcf_block *,
288 struct jcf_partial *));
289 static struct jcf_block * get_jcf_label_here PARAMS ((struct jcf_partial *));
290 static void put_linenumber PARAMS ((int, struct jcf_partial *));
291 static void localvar_alloc PARAMS ((tree, struct jcf_partial *));
292 static void localvar_free PARAMS ((tree, struct jcf_partial *));
293 static int get_access_flags PARAMS ((tree));
294 static void write_chunks PARAMS ((FILE *, struct chunk *));
295 static int adjust_typed_op PARAMS ((tree, int));
296 static void generate_bytecode_conditional PARAMS ((tree, struct jcf_block *,
297 struct jcf_block *, int,
298 struct jcf_partial *));
299 static void generate_bytecode_return PARAMS ((tree, struct jcf_partial *));
300 static void perform_relocations PARAMS ((struct jcf_partial *));
301 static void init_jcf_state PARAMS ((struct jcf_partial *, struct obstack *));
302 static void init_jcf_method PARAMS ((struct jcf_partial *, tree));
303 static void release_jcf_state PARAMS ((struct jcf_partial *));
304 static struct chunk * generate_classfile PARAMS ((tree, struct jcf_partial *));
305 static struct jcf_handler *alloc_handler PARAMS ((struct jcf_block *,
307 struct jcf_partial *));
308 static void emit_iinc PARAMS ((tree, HOST_WIDE_INT, struct jcf_partial *));
309 static void emit_reloc PARAMS ((HOST_WIDE_INT, int, struct jcf_block *,
310 struct jcf_partial *));
311 static void push_constant1 PARAMS ((HOST_WIDE_INT, struct jcf_partial *));
312 static void push_constant2 PARAMS ((HOST_WIDE_INT, struct jcf_partial *));
313 static void push_int_const PARAMS ((HOST_WIDE_INT, struct jcf_partial *));
314 static int find_constant_wide PARAMS ((HOST_WIDE_INT, HOST_WIDE_INT,
315 struct jcf_partial *));
316 static void push_long_const PARAMS ((HOST_WIDE_INT, HOST_WIDE_INT,
317 struct jcf_partial *));
318 static int find_constant_index PARAMS ((tree, struct jcf_partial *));
319 static void push_long_const PARAMS ((HOST_WIDE_INT, HOST_WIDE_INT,
320 struct jcf_partial *));
321 static void field_op PARAMS ((tree, int, struct jcf_partial *));
322 static void maybe_wide PARAMS ((int, int, struct jcf_partial *));
323 static void emit_dup PARAMS ((int, int, struct jcf_partial *));
324 static void emit_pop PARAMS ((int, struct jcf_partial *));
325 static void emit_load_or_store PARAMS ((tree, int, struct jcf_partial *));
326 static void emit_load PARAMS ((tree, struct jcf_partial *));
327 static void emit_store PARAMS ((tree, struct jcf_partial *));
328 static void emit_unop PARAMS ((enum java_opcode, tree, struct jcf_partial *));
329 static void emit_binop PARAMS ((enum java_opcode, tree, struct jcf_partial *));
330 static void emit_reloc PARAMS ((HOST_WIDE_INT, int, struct jcf_block *,
331 struct jcf_partial *));
332 static void emit_switch_reloc PARAMS ((struct jcf_block *,
333 struct jcf_partial *));
334 static void emit_case_reloc PARAMS ((struct jcf_relocation *,
335 struct jcf_partial *));
336 static void emit_if PARAMS ((struct jcf_block *, int, int,
337 struct jcf_partial *));
338 static void emit_goto PARAMS ((struct jcf_block *, struct jcf_partial *));
339 static void emit_jsr PARAMS ((struct jcf_block *, struct jcf_partial *));
340 static void call_cleanups PARAMS ((struct jcf_block *, struct jcf_partial *));
341 static char *make_class_file_name PARAMS ((tree));
342 static unsigned char *append_synthetic_attribute PARAMS ((struct jcf_partial *));
343 static void append_innerclasses_attribute PARAMS ((struct jcf_partial *, tree));
344 static void append_innerclasses_attribute_entry PARAMS ((struct jcf_partial *, tree, tree));
346 /* Utility macros for appending (big-endian) data to a buffer.
347 We assume a local variable 'ptr' points into where we want to
348 write next, and we assume enoygh space has been allocated. */
350 #ifdef ENABLE_CHECKING
352 CHECK_PUT(ptr, state, i)
354 struct jcf_partial *state;
357 if (ptr < state->chunk->data
358 || (char*)ptr + i > state->chunk->data + state->chunk->size)
359 fatal ("internal error - CHECK_PUT failed");
363 #define CHECK_PUT(PTR, STATE, I) ((void)0)
366 #define PUT1(X) (CHECK_PUT(ptr, state, 1), *ptr++ = (X))
367 #define PUT2(X) (PUT1((X) >> 8), PUT1((X) & 0xFF))
368 #define PUT4(X) (PUT2((X) >> 16), PUT2((X) & 0xFFFF))
369 #define PUTN(P, N) (CHECK_PUT(ptr, state, N), memcpy(ptr, P, N), ptr += (N))
371 /* There are some cases below where CHECK_PUT is guaranteed to fail.
372 Use the following macros in those specific cases. */
373 #define UNSAFE_PUT1(X) (*ptr++ = (X))
374 #define UNSAFE_PUT2(X) (UNSAFE_PUT1((X) >> 8), UNSAFE_PUT1((X) & 0xFF))
375 #define UNSAFE_PUT4(X) (UNSAFE_PUT2((X) >> 16), UNSAFE_PUT2((X) & 0xFFFF))
376 #define UNSAFE_PUTN(P, N) (memcpy(ptr, P, N), ptr += (N))
379 /* Allocate a new chunk on obstack WORK, and link it in after LAST.
380 Set the data and size fields to DATA and SIZE, respectively.
381 However, if DATA is NULL and SIZE>0, allocate a buffer as well. */
383 static struct chunk *
384 alloc_chunk (last, data, size, work)
388 struct obstack *work;
390 struct chunk *chunk = (struct chunk *)
391 obstack_alloc (work, sizeof(struct chunk));
393 if (data == NULL && size > 0)
394 data = obstack_alloc (work, size);
404 #ifdef ENABLE_CHECKING
406 CHECK_OP(struct jcf_partial *state)
408 if (state->bytecode.ptr > state->bytecode.limit)
410 fatal("internal error - CHECK_OP failed");
415 #define CHECK_OP(STATE) ((void)0)
418 static unsigned char *
419 append_chunk (data, size, state)
422 struct jcf_partial *state;
424 state->chunk = alloc_chunk (state->chunk, data, size, state->chunk_obstack);
425 if (state->first == NULL)
426 state->first = state->chunk;
427 return state->chunk->data;
431 append_chunk_copy (data, size, state)
434 struct jcf_partial *state;
436 unsigned char *ptr = append_chunk (NULL, size, state);
437 memcpy (ptr, data, size);
440 static struct jcf_block *
441 gen_jcf_label (state)
442 struct jcf_partial *state;
444 struct jcf_block *block = (struct jcf_block *)
445 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_block));
447 block->linenumber = -1;
448 block->pc = UNDEFINED_PC;
453 finish_jcf_block (state)
454 struct jcf_partial *state;
456 struct jcf_block *block = state->last_block;
457 struct jcf_relocation *reloc;
458 int code_length = BUFFER_LENGTH (&state->bytecode);
459 int pc = state->code_length;
460 append_chunk_copy (state->bytecode.data, code_length, state);
461 BUFFER_RESET (&state->bytecode);
462 block->v.chunk = state->chunk;
464 /* Calculate code_length to the maximum value it can have. */
465 pc += block->v.chunk->size;
466 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
468 int kind = reloc->kind;
469 if (kind == SWITCH_ALIGN_RELOC)
471 else if (kind > BLOCK_START_RELOC)
472 pc += 2; /* 2-byte offset may grow to 4-byte offset */
474 pc += 5; /* May need to add a goto_w. */
476 state->code_length = pc;
480 define_jcf_label (label, state)
481 struct jcf_block *label;
482 struct jcf_partial *state;
484 if (state->last_block != NULL)
485 finish_jcf_block (state);
486 label->pc = state->code_length;
487 if (state->blocks == NULL)
488 state->blocks = label;
490 state->last_block->next = label;
491 state->last_block = label;
493 label->u.relocations = NULL;
496 static struct jcf_block *
497 get_jcf_label_here (state)
498 struct jcf_partial *state;
500 if (state->last_block != NULL && BUFFER_LENGTH (&state->bytecode) == 0)
501 return state->last_block;
504 struct jcf_block *label = gen_jcf_label (state);
505 define_jcf_label (label, state);
510 /* Note a line number entry for the current PC and given LINE. */
513 put_linenumber (line, state)
515 struct jcf_partial *state;
517 struct jcf_block *label = get_jcf_label_here (state);
518 if (label->linenumber > 0)
520 label = gen_jcf_label (state);
521 define_jcf_label (label, state);
523 label->linenumber = line;
524 state->linenumber_count++;
527 /* Allocate a new jcf_handler, for a catch clause that catches exceptions
528 in the range (START_LABEL, END_LABEL). */
530 static struct jcf_handler *
531 alloc_handler (start_label, end_label, state)
532 struct jcf_block *start_label;
533 struct jcf_block *end_label;
534 struct jcf_partial *state;
536 struct jcf_handler *handler = (struct jcf_handler *)
537 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_handler));
538 handler->start_label = start_label;
539 handler->end_label = end_label;
540 handler->handler_label = get_jcf_label_here (state);
541 if (state->handlers == NULL)
542 state->handlers = handler;
544 state->last_handler->next = handler;
545 state->last_handler = handler;
546 handler->next = NULL;
547 state->num_handlers++;
552 /* The index of jvm local variable allocated for this DECL.
553 This is assigned when generating .class files;
554 contrast DECL_LOCAL_SLOT_NUMBER which is set when *reading* a .class file.
555 (We don't allocate DECL_LANG_SPECIFIC for locals from Java sourc code.) */
557 #define DECL_LOCAL_INDEX(DECL) DECL_ALIGN(DECL)
561 struct localvar_info *next;
564 struct jcf_block *start_label;
565 struct jcf_block *end_label;
568 #define localvar_buffer ((struct localvar_info**) state->localvars.data)
569 #define localvar_max \
570 ((struct localvar_info**) state->localvars.ptr - localvar_buffer)
573 localvar_alloc (decl, state)
575 struct jcf_partial *state;
577 struct jcf_block *start_label = get_jcf_label_here (state);
578 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
580 register struct localvar_info *info;
581 register struct localvar_info **ptr = localvar_buffer;
582 register struct localvar_info **limit
583 = (struct localvar_info**) state->localvars.ptr;
584 for (index = 0; ptr < limit; index++, ptr++)
587 && (! wide || ((ptr+1) < limit && ptr[1] == NULL)))
592 buffer_grow (&state->localvars, 2 * sizeof (struct localvar_info*));
593 ptr = (struct localvar_info**) state->localvars.data + index;
594 state->localvars.ptr = (unsigned char *) (ptr + 1 + wide);
596 info = (struct localvar_info *)
597 obstack_alloc (state->chunk_obstack, sizeof (struct localvar_info));
600 ptr[1] = (struct localvar_info *)(~0);
601 DECL_LOCAL_INDEX (decl) = index;
603 info->start_label = start_label;
605 if (debug_info_level > DINFO_LEVEL_TERSE
606 && DECL_NAME (decl) != NULL_TREE)
608 /* Generate debugging info. */
610 if (state->last_lvar != NULL)
611 state->last_lvar->next = info;
613 state->first_lvar = info;
614 state->last_lvar = info;
620 localvar_free (decl, state)
622 struct jcf_partial *state;
624 struct jcf_block *end_label = get_jcf_label_here (state);
625 int index = DECL_LOCAL_INDEX (decl);
626 register struct localvar_info **ptr = &localvar_buffer [index];
627 register struct localvar_info *info = *ptr;
628 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
630 info->end_label = end_label;
632 if (info->decl != decl)
637 if (ptr[1] != (struct localvar_info *)(~0))
644 #define STACK_TARGET 1
645 #define IGNORE_TARGET 2
647 /* Get the access flags of a class (TYPE_DECL), a method (FUNCTION_DECL), or
648 a field (FIELD_DECL or VAR_DECL, if static), as encoded in a .class file. */
651 get_access_flags (decl)
655 int isfield = TREE_CODE (decl) == FIELD_DECL || TREE_CODE (decl) == VAR_DECL;
656 if (CLASS_PUBLIC (decl)) /* same as FIELD_PUBLIC and METHOD_PUBLIC */
658 if (CLASS_FINAL (decl)) /* same as FIELD_FINAL and METHOD_FINAL */
660 if (isfield || TREE_CODE (decl) == FUNCTION_DECL)
662 if (TREE_PROTECTED (decl))
663 flags |= ACC_PROTECTED;
664 if (TREE_PRIVATE (decl))
665 flags |= ACC_PRIVATE;
667 else if (TREE_CODE (decl) == TYPE_DECL)
669 if (CLASS_SUPER (decl))
671 if (CLASS_ABSTRACT (decl))
672 flags |= ACC_ABSTRACT;
673 if (CLASS_INTERFACE (decl))
674 flags |= ACC_INTERFACE;
675 if (CLASS_STATIC (decl))
677 if (ANONYMOUS_CLASS_P (TREE_TYPE (decl))
678 || LOCAL_CLASS_P (TREE_TYPE (decl)))
679 flags |= ACC_PRIVATE;
682 fatal ("internal error - bad argument to get_access_flags");
683 if (TREE_CODE (decl) == FUNCTION_DECL)
685 if (METHOD_NATIVE (decl))
687 if (METHOD_STATIC (decl))
689 if (METHOD_SYNCHRONIZED (decl))
690 flags |= ACC_SYNCHRONIZED;
691 if (METHOD_ABSTRACT (decl))
692 flags |= ACC_ABSTRACT;
696 if (FIELD_STATIC (decl))
698 if (FIELD_VOLATILE (decl))
699 flags |= ACC_VOLATILE;
700 if (FIELD_TRANSIENT (decl))
701 flags |= ACC_TRANSIENT;
706 /* Write the list of segments starting at CHUNKS to STREAM. */
709 write_chunks (stream, chunks)
711 struct chunk *chunks;
713 for (; chunks != NULL; chunks = chunks->next)
714 fwrite (chunks->data, chunks->size, 1, stream);
717 /* Push a 1-word constant in the constant pool at the given INDEX.
718 (Caller is responsible for doing NOTE_PUSH.) */
721 push_constant1 (index, state)
723 struct jcf_partial *state;
738 /* Push a 2-word constant in the constant pool at the given INDEX.
739 (Caller is responsible for doing NOTE_PUSH.) */
742 push_constant2 (index, state)
744 struct jcf_partial *state;
751 /* Push 32-bit integer constant on VM stack.
752 Caller is responsible for doing NOTE_PUSH. */
755 push_int_const (i, state)
757 struct jcf_partial *state;
760 if (i >= -1 && i <= 5)
761 OP1(OPCODE_iconst_0 + i);
762 else if (i >= -128 && i < 128)
767 else if (i >= -32768 && i < 32768)
774 i = find_constant1 (&state->cpool, CONSTANT_Integer,
775 (jword)(i & 0xFFFFFFFF));
776 push_constant1 (i, state);
781 find_constant_wide (lo, hi, state)
782 HOST_WIDE_INT lo, hi;
783 struct jcf_partial *state;
785 HOST_WIDE_INT w1, w2;
786 lshift_double (lo, hi, -32, 64, &w1, &w2, 1);
787 return find_constant2 (&state->cpool, CONSTANT_Long,
788 (jword)(w1 & 0xFFFFFFFF), (jword)(lo & 0xFFFFFFFF));
791 /* Find or allocate a constant pool entry for the given VALUE.
792 Return the index in the constant pool. */
795 find_constant_index (value, state)
797 struct jcf_partial *state;
799 if (TREE_CODE (value) == INTEGER_CST)
801 if (TYPE_PRECISION (TREE_TYPE (value)) <= 32)
802 return find_constant1 (&state->cpool, CONSTANT_Integer,
803 (jword)(TREE_INT_CST_LOW (value) & 0xFFFFFFFF));
805 return find_constant_wide (TREE_INT_CST_LOW (value),
806 TREE_INT_CST_HIGH (value), state);
808 else if (TREE_CODE (value) == REAL_CST)
811 if (TYPE_PRECISION (TREE_TYPE (value)) == 32)
813 words[0] = etarsingle (TREE_REAL_CST (value)) & 0xFFFFFFFF;
814 return find_constant1 (&state->cpool, CONSTANT_Float,
819 etardouble (TREE_REAL_CST (value), words);
820 return find_constant2 (&state->cpool, CONSTANT_Double,
821 (jword)(words[1-FLOAT_WORDS_BIG_ENDIAN] &
823 (jword)(words[FLOAT_WORDS_BIG_ENDIAN] &
827 else if (TREE_CODE (value) == STRING_CST)
829 return find_string_constant (&state->cpool, value);
832 fatal ("find_constant_index - bad type");
835 /* Push 64-bit long constant on VM stack.
836 Caller is responsible for doing NOTE_PUSH. */
839 push_long_const (lo, hi, state)
840 HOST_WIDE_INT lo, hi;
841 struct jcf_partial *state;
843 if (hi == 0 && lo >= 0 && lo <= 1)
846 OP1(OPCODE_lconst_0 + lo);
848 else if ((hi == 0 && lo < 32768) || (hi == -1 && lo >= -32768))
850 push_int_const (lo, state);
855 push_constant2 (find_constant_wide (lo, hi, state), state);
859 field_op (field, opcode, state)
862 struct jcf_partial *state;
864 int index = find_fieldref_index (&state->cpool, field);
870 /* Returns an integer in the range 0 (for 'int') through 4 (for object
871 reference) to 7 (for 'short') which matches the pattern of how JVM
872 opcodes typically depend on the operand type. */
875 adjust_typed_op (type, max)
879 switch (TREE_CODE (type))
882 case RECORD_TYPE: return 4;
884 return TYPE_PRECISION (type) == 32 || max < 5 ? 0 : 5;
886 return TYPE_PRECISION (type) == 32 || max < 6 ? 0 : 6;
888 switch (TYPE_PRECISION (type))
890 case 8: return max < 5 ? 0 : 5;
891 case 16: return max < 7 ? 0 : 7;
897 switch (TYPE_PRECISION (type))
910 maybe_wide (opcode, index, state)
912 struct jcf_partial *state;
929 /* Compile code to duplicate with offset, where
930 SIZE is the size of the stack item to duplicate (1 or 2), abd
931 OFFSET is where to insert the result (must be 0, 1, or 2).
932 (The new words get inserted at stack[SP-size-offset].) */
935 emit_dup (size, offset, state)
937 struct jcf_partial *state;
944 kind = size == 1 ? OPCODE_dup : OPCODE_dup2;
945 else if (offset == 1)
946 kind = size == 1 ? OPCODE_dup_x1 : OPCODE_dup2_x1;
947 else if (offset == 2)
948 kind = size == 1 ? OPCODE_dup_x2 : OPCODE_dup2_x2;
956 emit_pop (size, state)
958 struct jcf_partial *state;
961 OP1 (OPCODE_pop - 1 + size);
965 emit_iinc (var, value, state)
968 struct jcf_partial *state;
970 int slot = DECL_LOCAL_INDEX (var);
972 if (value < -128 || value > 127 || slot >= 256)
990 emit_load_or_store (var, opcode, state)
991 tree var; /* Variable to load from or store into. */
992 int opcode; /* Either OPCODE_iload or OPCODE_istore. */
993 struct jcf_partial *state;
995 tree type = TREE_TYPE (var);
996 int kind = adjust_typed_op (type, 4);
997 int index = DECL_LOCAL_INDEX (var);
1001 OP1 (opcode + 5 + 4 * kind + index); /* [ilfda]{load,store}_[0123] */
1004 maybe_wide (opcode + kind, index, state); /* [ilfda]{load,store} */
1008 emit_load (var, state)
1010 struct jcf_partial *state;
1012 emit_load_or_store (var, OPCODE_iload, state);
1013 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
1017 emit_store (var, state)
1019 struct jcf_partial *state;
1021 emit_load_or_store (var, OPCODE_istore, state);
1022 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
1026 emit_unop (opcode, type, state)
1027 enum java_opcode opcode;
1028 tree type ATTRIBUTE_UNUSED;
1029 struct jcf_partial *state;
1036 emit_binop (opcode, type, state)
1037 enum java_opcode opcode;
1039 struct jcf_partial *state;
1041 int size = TYPE_IS_WIDE (type) ? 2 : 1;
1048 emit_reloc (value, kind, target, state)
1049 HOST_WIDE_INT value;
1051 struct jcf_block *target;
1052 struct jcf_partial *state;
1054 struct jcf_relocation *reloc = (struct jcf_relocation *)
1055 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1056 struct jcf_block *block = state->last_block;
1057 reloc->next = block->u.relocations;
1058 block->u.relocations = reloc;
1059 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1060 reloc->label = target;
1062 if (kind == 0 || kind == BLOCK_START_RELOC)
1064 else if (kind != SWITCH_ALIGN_RELOC)
1069 emit_switch_reloc (label, state)
1070 struct jcf_block *label;
1071 struct jcf_partial *state;
1073 emit_reloc (RELOCATION_VALUE_0, BLOCK_START_RELOC, label, state);
1076 /* Similar to emit_switch_reloc,
1077 but re-uses an existing case reloc. */
1080 emit_case_reloc (reloc, state)
1081 struct jcf_relocation *reloc;
1082 struct jcf_partial *state;
1084 struct jcf_block *block = state->last_block;
1085 reloc->next = block->u.relocations;
1086 block->u.relocations = reloc;
1087 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1088 reloc->kind = BLOCK_START_RELOC;
1092 /* Emit a conditional jump to TARGET with a 2-byte relative jump offset
1093 The opcode is OPCODE, the inverted opcode is INV_OPCODE. */
1096 emit_if (target, opcode, inv_opcode, state)
1097 struct jcf_block *target;
1098 int opcode, inv_opcode;
1099 struct jcf_partial *state;
1103 /* value is 1 byte from reloc back to start of instruction. */
1104 emit_reloc (RELOCATION_VALUE_1, - inv_opcode, target, state);
1108 emit_goto (target, state)
1109 struct jcf_block *target;
1110 struct jcf_partial *state;
1114 /* Value is 1 byte from reloc back to start of instruction. */
1115 emit_reloc (RELOCATION_VALUE_1, OPCODE_goto_w, target, state);
1119 emit_jsr (target, state)
1120 struct jcf_block *target;
1121 struct jcf_partial *state;
1125 /* Value is 1 byte from reloc back to start of instruction. */
1126 emit_reloc (RELOCATION_VALUE_1, OPCODE_jsr_w, target, state);
1129 /* Generate code to evaluate EXP. If the result is true,
1130 branch to TRUE_LABEL; otherwise, branch to FALSE_LABEL.
1131 TRUE_BRANCH_FIRST is a code geneation hint that the
1132 TRUE_LABEL may follow right after this. (The idea is that we
1133 may be able to optimize away GOTO TRUE_LABEL; TRUE_LABEL:) */
1136 generate_bytecode_conditional (exp, true_label, false_label,
1137 true_branch_first, state)
1139 struct jcf_block *true_label;
1140 struct jcf_block *false_label;
1141 int true_branch_first;
1142 struct jcf_partial *state;
1144 tree exp0, exp1, type;
1145 int save_SP = state->code_SP;
1146 enum java_opcode op, negop;
1147 switch (TREE_CODE (exp))
1150 emit_goto (integer_zerop (exp) ? false_label : true_label, state);
1154 struct jcf_block *then_label = gen_jcf_label (state);
1155 struct jcf_block *else_label = gen_jcf_label (state);
1156 int save_SP_before, save_SP_after;
1157 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1158 then_label, else_label, 1, state);
1159 define_jcf_label (then_label, state);
1160 save_SP_before = state->code_SP;
1161 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1162 true_label, false_label, 1, state);
1163 save_SP_after = state->code_SP;
1164 state->code_SP = save_SP_before;
1165 define_jcf_label (else_label, state);
1166 generate_bytecode_conditional (TREE_OPERAND (exp, 2),
1167 true_label, false_label,
1168 true_branch_first, state);
1169 if (state->code_SP != save_SP_after)
1170 fatal ("internal error non-matching SP");
1173 case TRUTH_NOT_EXPR:
1174 generate_bytecode_conditional (TREE_OPERAND (exp, 0), false_label, true_label,
1175 ! true_branch_first, state);
1177 case TRUTH_ANDIF_EXPR:
1179 struct jcf_block *next_label = gen_jcf_label (state);
1180 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1181 next_label, false_label, 1, state);
1182 define_jcf_label (next_label, state);
1183 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1184 true_label, false_label, 1, state);
1187 case TRUTH_ORIF_EXPR:
1189 struct jcf_block *next_label = gen_jcf_label (state);
1190 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1191 true_label, next_label, 1, state);
1192 define_jcf_label (next_label, state);
1193 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1194 true_label, false_label, 1, state);
1198 /* Assuming op is one of the 2-operand if_icmp<COND> instructions,
1199 set it to the corresponding 1-operand if<COND> instructions. */
1203 /* The opcodes with their inverses are allocated in pairs.
1204 E.g. The inverse of if_icmplt (161) is if_icmpge (162). */
1205 negop = (op & 1) ? op + 1 : op - 1;
1207 if (true_branch_first)
1209 emit_if (false_label, negop, op, state);
1210 emit_goto (true_label, state);
1214 emit_if (true_label, op, negop, state);
1215 emit_goto (false_label, state);
1219 op = OPCODE_if_icmpeq;
1222 op = OPCODE_if_icmpne;
1225 op = OPCODE_if_icmpgt;
1228 op = OPCODE_if_icmplt;
1231 op = OPCODE_if_icmpge;
1234 op = OPCODE_if_icmple;
1237 exp0 = TREE_OPERAND (exp, 0);
1238 exp1 = TREE_OPERAND (exp, 1);
1239 type = TREE_TYPE (exp0);
1240 switch (TREE_CODE (type))
1243 case POINTER_TYPE: case RECORD_TYPE:
1244 switch (TREE_CODE (exp))
1246 case EQ_EXPR: op = OPCODE_if_acmpeq; break;
1247 case NE_EXPR: op = OPCODE_if_acmpne; break;
1250 if (integer_zerop (exp1) || integer_zerop (exp0))
1252 generate_bytecode_insns (integer_zerop (exp1) ? exp0 : exp0,
1253 STACK_TARGET, state);
1254 op = op + (OPCODE_ifnull - OPCODE_if_acmpeq);
1255 negop = (op & 1) ? op - 1 : op + 1;
1259 generate_bytecode_insns (exp0, STACK_TARGET, state);
1260 generate_bytecode_insns (exp1, STACK_TARGET, state);
1264 generate_bytecode_insns (exp0, STACK_TARGET, state);
1265 generate_bytecode_insns (exp1, STACK_TARGET, state);
1266 if (op == OPCODE_if_icmplt || op == OPCODE_if_icmple)
1270 if (TYPE_PRECISION (type) > 32)
1281 if (TYPE_PRECISION (type) > 32)
1283 generate_bytecode_insns (exp0, STACK_TARGET, state);
1284 generate_bytecode_insns (exp1, STACK_TARGET, state);
1292 if (integer_zerop (exp1))
1294 generate_bytecode_insns (exp0, STACK_TARGET, state);
1298 if (integer_zerop (exp0))
1302 case OPCODE_if_icmplt:
1303 case OPCODE_if_icmpge:
1306 case OPCODE_if_icmpgt:
1307 case OPCODE_if_icmple:
1313 generate_bytecode_insns (exp1, STACK_TARGET, state);
1317 generate_bytecode_insns (exp0, STACK_TARGET, state);
1318 generate_bytecode_insns (exp1, STACK_TARGET, state);
1324 generate_bytecode_insns (exp, STACK_TARGET, state);
1326 if (true_branch_first)
1328 emit_if (false_label, OPCODE_ifeq, OPCODE_ifne, state);
1329 emit_goto (true_label, state);
1333 emit_if (true_label, OPCODE_ifne, OPCODE_ifeq, state);
1334 emit_goto (false_label, state);
1338 if (save_SP != state->code_SP)
1339 fatal ("internal error - SP mismatch");
1342 /* Call pending cleanups i.e. those for surrounding CLEANUP_POINT_EXPRs
1343 but only as far out as LIMIT (since we are about to jump to the
1344 emit label that is LIMIT). */
1347 call_cleanups (limit, state)
1348 struct jcf_block *limit;
1349 struct jcf_partial *state;
1351 struct jcf_block *block = state->labeled_blocks;
1352 for (; block != limit; block = block->next)
1354 if (block->pc == PENDING_CLEANUP_PC)
1355 emit_jsr (block, state);
1360 generate_bytecode_return (exp, state)
1362 struct jcf_partial *state;
1364 tree return_type = TREE_TYPE (TREE_TYPE (state->current_method));
1365 int returns_void = TREE_CODE (return_type) == VOID_TYPE;
1370 switch (TREE_CODE (exp))
1373 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET,
1375 exp = TREE_OPERAND (exp, 1);
1379 struct jcf_block *then_label = gen_jcf_label (state);
1380 struct jcf_block *else_label = gen_jcf_label (state);
1381 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1382 then_label, else_label, 1, state);
1383 define_jcf_label (then_label, state);
1384 generate_bytecode_return (TREE_OPERAND (exp, 1), state);
1385 define_jcf_label (else_label, state);
1386 generate_bytecode_return (TREE_OPERAND (exp, 2), state);
1390 generate_bytecode_insns (exp,
1391 returns_void ? IGNORE_TARGET
1392 : STACK_TARGET, state);
1398 call_cleanups (NULL_PTR, state);
1402 op = OPCODE_ireturn + adjust_typed_op (return_type, 4);
1403 if (state->num_finalizers > 0)
1405 if (state->return_value_decl == NULL_TREE)
1407 state->return_value_decl
1408 = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1409 localvar_alloc (state->return_value_decl, state);
1411 emit_store (state->return_value_decl, state);
1412 call_cleanups (NULL_PTR, state);
1413 emit_load (state->return_value_decl, state);
1414 /* If we call localvar_free (state->return_value_decl, state),
1415 then we risk the save decl erroneously re-used in the
1416 finalizer. Instead, we keep the state->return_value_decl
1417 allocated through the rest of the method. This is not
1418 the greatest solution, but it is at least simple and safe. */
1425 /* Generate bytecode for sub-expression EXP of METHOD.
1426 TARGET is one of STACK_TARGET or IGNORE_TARGET. */
1429 generate_bytecode_insns (exp, target, state)
1432 struct jcf_partial *state;
1435 enum java_opcode jopcode;
1437 HOST_WIDE_INT value;
1442 if (exp == NULL && target == IGNORE_TARGET)
1445 type = TREE_TYPE (exp);
1447 switch (TREE_CODE (exp))
1450 if (BLOCK_EXPR_BODY (exp))
1453 tree body = BLOCK_EXPR_BODY (exp);
1454 for (local = BLOCK_EXPR_DECLS (exp); local; )
1456 tree next = TREE_CHAIN (local);
1457 localvar_alloc (local, state);
1460 /* Avoid deep recursion for long blocks. */
1461 while (TREE_CODE (body) == COMPOUND_EXPR)
1463 generate_bytecode_insns (TREE_OPERAND (body, 0), target, state);
1464 body = TREE_OPERAND (body, 1);
1466 generate_bytecode_insns (body, target, state);
1467 for (local = BLOCK_EXPR_DECLS (exp); local; )
1469 tree next = TREE_CHAIN (local);
1470 localvar_free (local, state);
1476 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
1477 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1479 case EXPR_WITH_FILE_LOCATION:
1481 char *saved_input_filename = input_filename;
1482 tree body = EXPR_WFL_NODE (exp);
1483 int saved_lineno = lineno;
1484 if (body == empty_stmt_node)
1486 input_filename = EXPR_WFL_FILENAME (exp);
1487 lineno = EXPR_WFL_LINENO (exp);
1488 if (EXPR_WFL_EMIT_LINE_NOTE (exp) && lineno > 0
1489 && debug_info_level > DINFO_LEVEL_NONE)
1490 put_linenumber (lineno, state);
1491 generate_bytecode_insns (body, target, state);
1492 input_filename = saved_input_filename;
1493 lineno = saved_lineno;
1497 if (target == IGNORE_TARGET) ; /* do nothing */
1498 else if (TREE_CODE (type) == POINTER_TYPE)
1500 if (! integer_zerop (exp))
1503 OP1 (OPCODE_aconst_null);
1506 else if (TYPE_PRECISION (type) <= 32)
1508 push_int_const (TREE_INT_CST_LOW (exp), state);
1513 push_long_const (TREE_INT_CST_LOW (exp), TREE_INT_CST_HIGH (exp),
1520 int prec = TYPE_PRECISION (type) >> 5;
1522 if (real_zerop (exp))
1523 OP1 (prec == 1 ? OPCODE_fconst_0 : OPCODE_dconst_0);
1524 else if (real_onep (exp))
1525 OP1 (prec == 1 ? OPCODE_fconst_1 : OPCODE_dconst_1);
1526 /* FIXME Should also use fconst_2 for 2.0f.
1527 Also, should use iconst_2/ldc followed by i2f/i2d
1528 for other float/double when the value is a small integer. */
1531 offset = find_constant_index (exp, state);
1533 push_constant1 (offset, state);
1535 push_constant2 (offset, state);
1541 push_constant1 (find_string_constant (&state->cpool, exp), state);
1545 if (TREE_STATIC (exp))
1547 field_op (exp, OPCODE_getstatic, state);
1548 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1551 /* ... fall through ... */
1553 emit_load (exp, state);
1555 case NON_LVALUE_EXPR:
1557 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1560 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1561 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1562 if (target != IGNORE_TARGET)
1564 jopcode = OPCODE_iaload + adjust_typed_op (type, 7);
1567 if (! TYPE_IS_WIDE (type))
1573 tree obj = TREE_OPERAND (exp, 0);
1574 tree field = TREE_OPERAND (exp, 1);
1575 int is_static = FIELD_STATIC (field);
1576 generate_bytecode_insns (obj,
1577 is_static ? IGNORE_TARGET : target, state);
1578 if (target != IGNORE_TARGET)
1580 if (DECL_NAME (field) == length_identifier_node && !is_static
1581 && TYPE_ARRAY_P (TREE_TYPE (obj)))
1584 OP1 (OPCODE_arraylength);
1588 field_op (field, is_static ? OPCODE_getstatic : OPCODE_getfield,
1592 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
1597 case TRUTH_ANDIF_EXPR:
1598 case TRUTH_ORIF_EXPR:
1606 struct jcf_block *then_label = gen_jcf_label (state);
1607 struct jcf_block *else_label = gen_jcf_label (state);
1608 struct jcf_block *end_label = gen_jcf_label (state);
1609 generate_bytecode_conditional (exp,
1610 then_label, else_label, 1, state);
1611 define_jcf_label (then_label, state);
1612 push_int_const (1, state);
1613 emit_goto (end_label, state);
1614 define_jcf_label (else_label, state);
1615 push_int_const (0, state);
1616 define_jcf_label (end_label, state);
1622 struct jcf_block *then_label = gen_jcf_label (state);
1623 struct jcf_block *else_label = gen_jcf_label (state);
1624 struct jcf_block *end_label = gen_jcf_label (state);
1625 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1626 then_label, else_label, 1, state);
1627 define_jcf_label (then_label, state);
1628 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1629 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 1))
1630 /* Not all expressions have CAN_COMPLETE_NORMALLY set properly. */
1631 || TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE)
1632 emit_goto (end_label, state);
1633 define_jcf_label (else_label, state);
1634 generate_bytecode_insns (TREE_OPERAND (exp, 2), target, state);
1635 define_jcf_label (end_label, state);
1636 /* COND_EXPR can be used in a binop. The stack must be adjusted. */
1637 if (TREE_TYPE (exp) != void_type_node)
1638 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1643 struct jcf_switch_state *sw_state = state->sw_state;
1644 struct jcf_relocation *reloc = (struct jcf_relocation *)
1645 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1646 HOST_WIDE_INT case_value = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
1648 reloc->label = get_jcf_label_here (state);
1649 reloc->offset = case_value;
1650 reloc->next = sw_state->cases;
1651 sw_state->cases = reloc;
1652 if (sw_state->num_cases == 0)
1654 sw_state->min_case = case_value;
1655 sw_state->max_case = case_value;
1659 if (case_value < sw_state->min_case)
1660 sw_state->min_case = case_value;
1661 if (case_value > sw_state->max_case)
1662 sw_state->max_case = case_value;
1664 sw_state->num_cases++;
1668 state->sw_state->default_label = get_jcf_label_here (state);
1673 /* The SWITCH_EXPR has three parts, generated in the following order:
1674 1. the switch_expression (the value used to select the correct case);
1676 3. the switch_instruction (the tableswitch/loopupswitch instruction.).
1677 After code generation, we will re-order then in the order 1, 3, 2.
1678 This is to avoid an extra GOTOs. */
1679 struct jcf_switch_state sw_state;
1680 struct jcf_block *expression_last; /* Last block of the switch_expression. */
1681 struct jcf_block *body_last; /* Last block of the switch_body. */
1682 struct jcf_block *switch_instruction; /* First block of switch_instruction. */
1683 struct jcf_block *instruction_last; /* Last block of the switch_instruction. */
1684 struct jcf_block *body_block;
1686 sw_state.prev = state->sw_state;
1687 state->sw_state = &sw_state;
1688 sw_state.cases = NULL;
1689 sw_state.num_cases = 0;
1690 sw_state.default_label = NULL;
1691 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1692 expression_last = state->last_block;
1693 body_block = get_jcf_label_here (state); /* Force a new block here. */
1694 generate_bytecode_insns (TREE_OPERAND (exp, 1), IGNORE_TARGET, state);
1695 body_last = state->last_block;
1697 switch_instruction = gen_jcf_label (state);
1698 define_jcf_label (switch_instruction, state);
1699 if (sw_state.default_label == NULL)
1700 sw_state.default_label = gen_jcf_label (state);
1702 if (sw_state.num_cases <= 1)
1704 if (sw_state.num_cases == 0)
1706 emit_pop (1, state);
1711 push_int_const (sw_state.cases->offset, state);
1712 emit_if (sw_state.cases->label,
1713 OPCODE_ifeq, OPCODE_ifne, state);
1715 emit_goto (sw_state.default_label, state);
1720 /* Copy the chain of relocs into a sorted array. */
1721 struct jcf_relocation **relocs = (struct jcf_relocation **)
1722 xmalloc (sw_state.num_cases * sizeof (struct jcf_relocation *));
1723 /* The relocs arrays is a buffer with a gap.
1724 The assumption is that cases will normally come in "runs". */
1726 int gap_end = sw_state.num_cases;
1727 struct jcf_relocation *reloc;
1728 for (reloc = sw_state.cases; reloc != NULL; reloc = reloc->next)
1730 HOST_WIDE_INT case_value = reloc->offset;
1731 while (gap_end < sw_state.num_cases)
1733 struct jcf_relocation *end = relocs[gap_end];
1734 if (case_value <= end->offset)
1736 relocs[gap_start++] = end;
1739 while (gap_start > 0)
1741 struct jcf_relocation *before = relocs[gap_start-1];
1742 if (case_value >= before->offset)
1744 relocs[--gap_end] = before;
1747 relocs[gap_start++] = reloc;
1748 /* Note we don't check for duplicates. FIXME! */
1751 if (2 * sw_state.num_cases
1752 >= sw_state.max_case - sw_state.min_case)
1753 { /* Use tableswitch. */
1755 RESERVE (13 + 4 * (sw_state.max_case - sw_state.min_case + 1));
1756 OP1 (OPCODE_tableswitch);
1757 emit_reloc (RELOCATION_VALUE_0,
1758 SWITCH_ALIGN_RELOC, NULL, state);
1759 emit_switch_reloc (sw_state.default_label, state);
1760 OP4 (sw_state.min_case);
1761 OP4 (sw_state.max_case);
1762 for (i = sw_state.min_case; ; )
1764 reloc = relocs[index];
1765 if (i == reloc->offset)
1767 emit_case_reloc (reloc, state);
1768 if (i == sw_state.max_case)
1773 emit_switch_reloc (sw_state.default_label, state);
1778 { /* Use lookupswitch. */
1779 RESERVE(9 + 8 * sw_state.num_cases);
1780 OP1 (OPCODE_lookupswitch);
1781 emit_reloc (RELOCATION_VALUE_0,
1782 SWITCH_ALIGN_RELOC, NULL, state);
1783 emit_switch_reloc (sw_state.default_label, state);
1784 OP4 (sw_state.num_cases);
1785 for (i = 0; i < sw_state.num_cases; i++)
1787 struct jcf_relocation *reloc = relocs[i];
1788 OP4 (reloc->offset);
1789 emit_case_reloc (reloc, state);
1795 instruction_last = state->last_block;
1796 if (sw_state.default_label->pc < 0)
1797 define_jcf_label (sw_state.default_label, state);
1798 else /* Force a new block. */
1799 sw_state.default_label = get_jcf_label_here (state);
1800 /* Now re-arrange the blocks so the switch_instruction
1801 comes before the switch_body. */
1802 switch_length = state->code_length - switch_instruction->pc;
1803 switch_instruction->pc = body_block->pc;
1804 instruction_last->next = body_block;
1805 instruction_last->v.chunk->next = body_block->v.chunk;
1806 expression_last->next = switch_instruction;
1807 expression_last->v.chunk->next = switch_instruction->v.chunk;
1808 body_last->next = sw_state.default_label;
1809 body_last->v.chunk->next = NULL;
1810 state->chunk = body_last->v.chunk;
1811 for (; body_block != sw_state.default_label; body_block = body_block->next)
1812 body_block->pc += switch_length;
1814 state->sw_state = sw_state.prev;
1819 exp = TREE_OPERAND (exp, 0);
1820 if (exp == NULL_TREE)
1821 exp = empty_stmt_node;
1822 else if (TREE_CODE (exp) != MODIFY_EXPR)
1825 exp = TREE_OPERAND (exp, 1);
1826 generate_bytecode_return (exp, state);
1828 case LABELED_BLOCK_EXPR:
1830 struct jcf_block *end_label = gen_jcf_label (state);
1831 end_label->next = state->labeled_blocks;
1832 state->labeled_blocks = end_label;
1833 end_label->pc = PENDING_EXIT_PC;
1834 end_label->u.labeled_block = exp;
1835 if (LABELED_BLOCK_BODY (exp))
1836 generate_bytecode_insns (LABELED_BLOCK_BODY (exp), target, state);
1837 if (state->labeled_blocks != end_label)
1839 state->labeled_blocks = end_label->next;
1840 define_jcf_label (end_label, state);
1845 tree body = TREE_OPERAND (exp, 0);
1847 if (TREE_CODE (body) == COMPOUND_EXPR
1848 && TREE_CODE (TREE_OPERAND (body, 0)) == EXIT_EXPR)
1850 /* Optimize: H: if (TEST) GOTO L; BODY; GOTO H; L:
1851 to: GOTO L; BODY; L: if (!TEST) GOTO L; */
1852 struct jcf_block *head_label;
1853 struct jcf_block *body_label;
1854 struct jcf_block *end_label = gen_jcf_label (state);
1855 struct jcf_block *exit_label = state->labeled_blocks;
1856 head_label = gen_jcf_label (state);
1857 emit_goto (head_label, state);
1858 body_label = get_jcf_label_here (state);
1859 generate_bytecode_insns (TREE_OPERAND (body, 1), target, state);
1860 define_jcf_label (head_label, state);
1861 generate_bytecode_conditional (TREE_OPERAND (body, 0),
1862 end_label, body_label, 1, state);
1863 define_jcf_label (end_label, state);
1868 struct jcf_block *head_label = get_jcf_label_here (state);
1869 generate_bytecode_insns (body, IGNORE_TARGET, state);
1870 emit_goto (head_label, state);
1876 struct jcf_block *label = state->labeled_blocks;
1877 struct jcf_block *end_label = gen_jcf_label (state);
1878 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1879 label, end_label, 0, state);
1880 define_jcf_label (end_label, state);
1883 case EXIT_BLOCK_EXPR:
1885 struct jcf_block *label = state->labeled_blocks;
1886 if (TREE_OPERAND (exp, 1) != NULL) goto notimpl;
1887 while (label->u.labeled_block != TREE_OPERAND (exp, 0))
1888 label = label->next;
1889 call_cleanups (label, state);
1890 emit_goto (label, state);
1894 case PREDECREMENT_EXPR: value = -1; post_op = 0; goto increment;
1895 case PREINCREMENT_EXPR: value = 1; post_op = 0; goto increment;
1896 case POSTDECREMENT_EXPR: value = -1; post_op = 1; goto increment;
1897 case POSTINCREMENT_EXPR: value = 1; post_op = 1; goto increment;
1900 exp = TREE_OPERAND (exp, 0);
1901 type = TREE_TYPE (exp);
1902 size = TYPE_IS_WIDE (type) ? 2 : 1;
1903 if ((TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1904 && ! TREE_STATIC (exp)
1905 && TREE_CODE (type) == INTEGER_TYPE
1906 && TYPE_PRECISION (type) == 32)
1908 if (target != IGNORE_TARGET && post_op)
1909 emit_load (exp, state);
1910 emit_iinc (exp, value, state);
1911 if (target != IGNORE_TARGET && ! post_op)
1912 emit_load (exp, state);
1915 if (TREE_CODE (exp) == COMPONENT_REF)
1917 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1918 emit_dup (1, 0, state);
1919 /* Stack: ..., objectref, objectref. */
1920 field_op (TREE_OPERAND (exp, 1), OPCODE_getfield, state);
1922 /* Stack: ..., objectref, oldvalue. */
1925 else if (TREE_CODE (exp) == ARRAY_REF)
1927 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1928 generate_bytecode_insns (TREE_OPERAND (exp, 1), STACK_TARGET, state);
1929 emit_dup (2, 0, state);
1930 /* Stack: ..., array, index, array, index. */
1931 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (exp), 7);
1935 /* Stack: ..., array, index, oldvalue. */
1938 else if (TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1940 generate_bytecode_insns (exp, STACK_TARGET, state);
1941 /* Stack: ..., oldvalue. */
1947 if (target != IGNORE_TARGET && post_op)
1948 emit_dup (size, offset, state);
1949 /* Stack, if ARRAY_REF: ..., [result, ] array, index, oldvalue. */
1950 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, oldvalue. */
1951 /* Stack, otherwise: ..., [result, ] oldvalue. */
1953 push_int_const (value, state);
1955 push_long_const (value, (HOST_WIDE_INT)(value >= 0 ? 0 : -1), state);
1957 emit_binop (OPCODE_iadd + adjust_typed_op (type, 3), type, state);
1958 if (target != IGNORE_TARGET && ! post_op)
1959 emit_dup (size, offset, state);
1960 /* Stack, if ARRAY_REF: ..., [result, ] array, index, newvalue. */
1961 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, newvalue. */
1962 /* Stack, otherwise: ..., [result, ] newvalue. */
1963 goto finish_assignment;
1967 tree lhs = TREE_OPERAND (exp, 0);
1968 tree rhs = TREE_OPERAND (exp, 1);
1971 /* See if we can use the iinc instruction. */
1972 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1973 && ! TREE_STATIC (lhs)
1974 && TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
1975 && TYPE_PRECISION (TREE_TYPE (lhs)) == 32
1976 && (TREE_CODE (rhs) == PLUS_EXPR || TREE_CODE (rhs) == MINUS_EXPR))
1978 tree arg0 = TREE_OPERAND (rhs, 0);
1979 tree arg1 = TREE_OPERAND (rhs, 1);
1980 HOST_WIDE_INT min_value = -32768;
1981 HOST_WIDE_INT max_value = 32767;
1982 if (TREE_CODE (rhs) == MINUS_EXPR)
1987 else if (arg1 == lhs)
1990 arg1 = TREE_OPERAND (rhs, 0);
1992 if (lhs == arg0 && TREE_CODE (arg1) == INTEGER_CST)
1994 HOST_WIDE_INT hi_value = TREE_INT_CST_HIGH (arg1);
1995 value = TREE_INT_CST_LOW (arg1);
1996 if ((hi_value == 0 && value <= max_value)
1997 || (hi_value == -1 && value >= min_value))
1999 if (TREE_CODE (rhs) == MINUS_EXPR)
2001 emit_iinc (lhs, value, state);
2007 if (TREE_CODE (lhs) == COMPONENT_REF)
2009 generate_bytecode_insns (TREE_OPERAND (lhs, 0),
2010 STACK_TARGET, state);
2013 else if (TREE_CODE (lhs) == ARRAY_REF)
2015 generate_bytecode_insns (TREE_OPERAND(lhs, 0),
2016 STACK_TARGET, state);
2017 generate_bytecode_insns (TREE_OPERAND(lhs, 1),
2018 STACK_TARGET, state);
2023 generate_bytecode_insns (rhs, STACK_TARGET, state);
2024 if (target != IGNORE_TARGET)
2025 emit_dup (TYPE_IS_WIDE (type) ? 2 : 1 , offset, state);
2031 if (TREE_CODE (exp) == COMPONENT_REF)
2033 tree field = TREE_OPERAND (exp, 1);
2034 if (! FIELD_STATIC (field))
2037 FIELD_STATIC (field) ? OPCODE_putstatic : OPCODE_putfield,
2040 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
2042 else if (TREE_CODE (exp) == VAR_DECL
2043 || TREE_CODE (exp) == PARM_DECL)
2045 if (FIELD_STATIC (exp))
2047 field_op (exp, OPCODE_putstatic, state);
2048 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
2051 emit_store (exp, state);
2053 else if (TREE_CODE (exp) == ARRAY_REF)
2055 jopcode = OPCODE_iastore + adjust_typed_op (TREE_TYPE (exp), 7);
2058 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 4 : 3);
2061 fatal ("internal error (bad lhs to MODIFY_EXPR)");
2064 jopcode = OPCODE_iadd;
2067 jopcode = OPCODE_isub;
2070 jopcode = OPCODE_imul;
2072 case TRUNC_DIV_EXPR:
2074 jopcode = OPCODE_idiv;
2076 case TRUNC_MOD_EXPR:
2077 jopcode = OPCODE_irem;
2079 case LSHIFT_EXPR: jopcode = OPCODE_ishl; goto binop;
2080 case RSHIFT_EXPR: jopcode = OPCODE_ishr; goto binop;
2081 case URSHIFT_EXPR: jopcode = OPCODE_iushr; goto binop;
2082 case TRUTH_AND_EXPR:
2083 case BIT_AND_EXPR: jopcode = OPCODE_iand; goto binop;
2085 case BIT_IOR_EXPR: jopcode = OPCODE_ior; goto binop;
2086 case TRUTH_XOR_EXPR:
2087 case BIT_XOR_EXPR: jopcode = OPCODE_ixor; goto binop;
2090 tree arg0 = TREE_OPERAND (exp, 0);
2091 tree arg1 = TREE_OPERAND (exp, 1);
2092 jopcode += adjust_typed_op (type, 3);
2093 if (arg0 == arg1 && TREE_CODE (arg0) == SAVE_EXPR)
2095 /* fold may (e.g) convert 2*x to x+x. */
2096 generate_bytecode_insns (TREE_OPERAND (arg0, 0), target, state);
2097 emit_dup (TYPE_PRECISION (TREE_TYPE (arg0)) > 32 ? 2 : 1, 0, state);
2101 generate_bytecode_insns (arg0, target, state);
2102 if (jopcode >= OPCODE_lshl && jopcode <= OPCODE_lushr)
2103 arg1 = convert (int_type_node, arg1);
2104 generate_bytecode_insns (arg1, target, state);
2106 /* For most binary operations, both operands and the result have the
2107 same type. Shift operations are different. Using arg1's type
2108 gets us the correct SP adjustment in all cases. */
2109 if (target == STACK_TARGET)
2110 emit_binop (jopcode, TREE_TYPE (arg1), state);
2113 case TRUTH_NOT_EXPR:
2115 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2116 if (target == STACK_TARGET)
2118 int is_long = TYPE_PRECISION (TREE_TYPE (exp)) > 32;
2119 push_int_const (TREE_CODE (exp) == BIT_NOT_EXPR ? -1 : 1, state);
2123 NOTE_PUSH (1 + is_long);
2124 OP1 (OPCODE_ixor + is_long);
2125 NOTE_POP (1 + is_long);
2129 jopcode = OPCODE_ineg;
2130 jopcode += adjust_typed_op (type, 3);
2131 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2132 if (target == STACK_TARGET)
2133 emit_unop (jopcode, type, state);
2135 case INSTANCEOF_EXPR:
2137 int index = find_class_constant (&state->cpool, TREE_OPERAND (exp, 1));
2138 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2140 OP1 (OPCODE_instanceof);
2145 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2150 case FIX_TRUNC_EXPR:
2152 tree src = TREE_OPERAND (exp, 0);
2153 tree src_type = TREE_TYPE (src);
2154 tree dst_type = TREE_TYPE (exp);
2155 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2156 if (target == IGNORE_TARGET || src_type == dst_type)
2158 if (TREE_CODE (dst_type) == POINTER_TYPE)
2160 if (TREE_CODE (exp) == CONVERT_EXPR)
2162 int index = find_class_constant (&state->cpool,
2163 TREE_TYPE (dst_type));
2165 OP1 (OPCODE_checkcast);
2169 else /* Convert numeric types. */
2171 int wide_src = TYPE_PRECISION (src_type) > 32;
2172 int wide_dst = TYPE_PRECISION (dst_type) > 32;
2173 NOTE_POP (1 + wide_src);
2175 if (TREE_CODE (dst_type) == REAL_TYPE)
2177 if (TREE_CODE (src_type) == REAL_TYPE)
2178 OP1 (wide_dst ? OPCODE_f2d : OPCODE_d2f);
2179 else if (TYPE_PRECISION (src_type) == 64)
2180 OP1 (OPCODE_l2f + wide_dst);
2182 OP1 (OPCODE_i2f + wide_dst);
2184 else /* Convert to integral type. */
2186 if (TREE_CODE (src_type) == REAL_TYPE)
2187 OP1 (OPCODE_f2i + wide_dst + 3 * wide_src);
2192 if (TYPE_PRECISION (dst_type) < 32)
2195 /* Already converted to int, if needed. */
2196 if (TYPE_PRECISION (dst_type) <= 8)
2198 else if (TREE_UNSIGNED (dst_type))
2204 NOTE_PUSH (1 + wide_dst);
2209 case CLEANUP_POINT_EXPR:
2211 struct jcf_block *save_labeled_blocks = state->labeled_blocks;
2212 int can_complete = CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 0));
2213 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
2214 if (target != IGNORE_TARGET)
2216 while (state->labeled_blocks != save_labeled_blocks)
2218 struct jcf_block *finished_label = NULL;
2220 tree exception_type = build_pointer_type (throwable_type_node);
2221 tree exception_decl = build_decl (VAR_DECL, NULL_TREE,
2223 struct jcf_block *end_label = get_jcf_label_here (state);
2224 struct jcf_block *label = state->labeled_blocks;
2225 struct jcf_handler *handler;
2226 tree cleanup = label->u.labeled_block;
2227 state->labeled_blocks = label->next;
2228 state->num_finalizers--;
2231 finished_label = gen_jcf_label (state);
2232 emit_jsr (label, state);
2233 emit_goto (finished_label, state);
2234 if (! CAN_COMPLETE_NORMALLY (cleanup))
2237 handler = alloc_handler (label->v.start_label, end_label, state);
2238 handler->type = NULL_TREE;
2239 localvar_alloc (exception_decl, state);
2241 emit_store (exception_decl, state);
2242 emit_jsr (label, state);
2243 emit_load (exception_decl, state);
2245 OP1 (OPCODE_athrow);
2248 /* The finally block. */
2249 return_link = build_decl (VAR_DECL, NULL_TREE,
2250 return_address_type_node);
2251 define_jcf_label (label, state);
2253 localvar_alloc (return_link, state);
2254 emit_store (return_link, state);
2255 generate_bytecode_insns (cleanup, IGNORE_TARGET, state);
2256 maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
2257 localvar_free (return_link, state);
2258 localvar_free (exception_decl, state);
2259 if (finished_label != NULL)
2260 define_jcf_label (finished_label, state);
2265 case WITH_CLEANUP_EXPR:
2267 struct jcf_block *label;
2268 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
2269 label = gen_jcf_label (state);
2270 label->pc = PENDING_CLEANUP_PC;
2271 label->next = state->labeled_blocks;
2272 state->labeled_blocks = label;
2273 state->num_finalizers++;
2274 label->u.labeled_block = TREE_OPERAND (exp, 2);
2275 label->v.start_label = get_jcf_label_here (state);
2276 if (target != IGNORE_TARGET)
2283 tree try_clause = TREE_OPERAND (exp, 0);
2284 struct jcf_block *start_label = get_jcf_label_here (state);
2285 struct jcf_block *end_label; /* End of try clause. */
2286 struct jcf_block *finished_label = gen_jcf_label (state);
2287 tree clause = TREE_OPERAND (exp, 1);
2288 if (target != IGNORE_TARGET)
2290 generate_bytecode_insns (try_clause, IGNORE_TARGET, state);
2291 end_label = get_jcf_label_here (state);
2292 if (CAN_COMPLETE_NORMALLY (try_clause))
2293 emit_goto (finished_label, state);
2294 while (clause != NULL_TREE)
2296 tree catch_clause = TREE_OPERAND (clause, 0);
2297 tree exception_decl = BLOCK_EXPR_DECLS (catch_clause);
2298 struct jcf_handler *handler = alloc_handler (start_label, end_label, state);
2299 if (exception_decl == NULL_TREE)
2300 handler->type = NULL_TREE;
2302 handler->type = TREE_TYPE (TREE_TYPE (exception_decl));
2303 generate_bytecode_insns (catch_clause, IGNORE_TARGET, state);
2304 clause = TREE_CHAIN (clause);
2305 if (CAN_COMPLETE_NORMALLY (catch_clause) && clause != NULL_TREE)
2306 emit_goto (finished_label, state);
2308 define_jcf_label (finished_label, state);
2311 case TRY_FINALLY_EXPR:
2313 struct jcf_block *finished_label, *finally_label, *start_label;
2314 struct jcf_handler *handler;
2315 int worthwhile_finally = 1;
2316 tree try_block = TREE_OPERAND (exp, 0);
2317 tree finally = TREE_OPERAND (exp, 1);
2318 tree return_link, exception_decl;
2320 finally_label = start_label = NULL;
2321 return_link = exception_decl = NULL_TREE;
2322 finished_label = gen_jcf_label (state);
2324 /* If the finally clause happens to be empty, set a flag so we
2325 remember to just skip it. */
2326 if (BLOCK_EXPR_BODY (finally) == empty_stmt_node)
2327 worthwhile_finally = 0;
2329 if (worthwhile_finally)
2331 tree exception_type;
2332 return_link = build_decl (VAR_DECL, NULL_TREE,
2333 return_address_type_node);
2334 exception_type = build_pointer_type (throwable_type_node);
2335 exception_decl = build_decl (VAR_DECL, NULL_TREE, exception_type);
2337 finally_label = gen_jcf_label (state);
2338 start_label = get_jcf_label_here (state);
2339 finally_label->pc = PENDING_CLEANUP_PC;
2340 finally_label->next = state->labeled_blocks;
2341 state->labeled_blocks = finally_label;
2342 state->num_finalizers++;
2345 generate_bytecode_insns (try_block, target, state);
2347 if (worthwhile_finally)
2349 if (state->labeled_blocks != finally_label)
2351 state->labeled_blocks = finally_label->next;
2352 emit_jsr (finally_label, state);
2355 if (CAN_COMPLETE_NORMALLY (try_block)
2356 && BLOCK_EXPR_BODY (try_block) != empty_stmt_node)
2357 emit_goto (finished_label, state);
2359 /* Handle exceptions. */
2361 if (!worthwhile_finally)
2364 localvar_alloc (return_link, state);
2365 handler = alloc_handler (start_label, NULL_PTR, state);
2366 handler->end_label = handler->handler_label;
2367 handler->type = NULL_TREE;
2368 localvar_alloc (exception_decl, state);
2370 emit_store (exception_decl, state);
2371 emit_jsr (finally_label, state);
2372 emit_load (exception_decl, state);
2374 OP1 (OPCODE_athrow);
2376 localvar_free (exception_decl, state);
2378 /* The finally block. First save return PC into return_link. */
2379 define_jcf_label (finally_label, state);
2381 emit_store (return_link, state);
2383 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2384 maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
2385 localvar_free (return_link, state);
2386 define_jcf_label (finished_label, state);
2390 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2392 OP1 (OPCODE_athrow);
2394 case NEW_ARRAY_INIT:
2396 tree values = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
2397 tree array_type = TREE_TYPE (TREE_TYPE (exp));
2398 tree element_type = TYPE_ARRAY_ELEMENT (array_type);
2399 HOST_WIDE_INT length = java_array_type_length (array_type);
2400 if (target == IGNORE_TARGET)
2402 for ( ; values != NULL_TREE; values = TREE_CHAIN (values))
2403 generate_bytecode_insns (TREE_VALUE (values), target, state);
2406 push_int_const (length, state);
2409 if (JPRIMITIVE_TYPE_P (element_type))
2411 int atype = encode_newarray_type (element_type);
2412 OP1 (OPCODE_newarray);
2417 int index = find_class_constant (&state->cpool,
2418 TREE_TYPE (element_type));
2419 OP1 (OPCODE_anewarray);
2423 jopcode = OPCODE_iastore + adjust_typed_op (element_type, 7);
2424 for ( ; values != NULL_TREE; values = TREE_CHAIN (values), offset++)
2426 int save_SP = state->code_SP;
2427 emit_dup (1, 0, state);
2428 push_int_const (offset, state);
2430 generate_bytecode_insns (TREE_VALUE (values), STACK_TARGET, state);
2433 state->code_SP = save_SP;
2437 case NEW_CLASS_EXPR:
2439 tree class = TREE_TYPE (TREE_TYPE (exp));
2440 int need_result = target != IGNORE_TARGET;
2441 int index = find_class_constant (&state->cpool, class);
2447 NOTE_PUSH (1 + need_result);
2449 /* ... fall though ... */
2452 tree f = TREE_OPERAND (exp, 0);
2453 tree x = TREE_OPERAND (exp, 1);
2454 int save_SP = state->code_SP;
2456 if (TREE_CODE (f) == ADDR_EXPR)
2457 f = TREE_OPERAND (f, 0);
2458 if (f == soft_newarray_node)
2460 int type_code = TREE_INT_CST_LOW (TREE_VALUE (x));
2461 generate_bytecode_insns (TREE_VALUE (TREE_CHAIN (x)),
2462 STACK_TARGET, state);
2464 OP1 (OPCODE_newarray);
2468 else if (f == soft_multianewarray_node)
2472 int index = find_class_constant (&state->cpool,
2473 TREE_TYPE (TREE_TYPE (exp)));
2474 x = TREE_CHAIN (x); /* Skip class argument. */
2475 ndims = TREE_INT_CST_LOW (TREE_VALUE (x));
2476 for (idim = ndims; --idim >= 0; )
2479 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2482 OP1 (OPCODE_multianewarray);
2487 else if (f == soft_anewarray_node)
2489 tree cl = TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (exp)));
2490 int index = find_class_constant (&state->cpool, TREE_TYPE (cl));
2491 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2493 OP1 (OPCODE_anewarray);
2497 else if (f == soft_monitorenter_node
2498 || f == soft_monitorexit_node
2499 || f == throw_node[0]
2500 || f == throw_node[1])
2502 if (f == soft_monitorenter_node)
2503 op = OPCODE_monitorenter;
2504 else if (f == soft_monitorexit_node)
2505 op = OPCODE_monitorexit;
2508 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2514 else if (exp == soft_exceptioninfo_call_node)
2516 NOTE_PUSH (1); /* Pushed by exception system. */
2519 for ( ; x != NULL_TREE; x = TREE_CHAIN (x))
2521 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2523 nargs = state->code_SP - save_SP;
2524 state->code_SP = save_SP;
2525 if (f == soft_fmod_node)
2532 if (TREE_CODE (exp) == NEW_CLASS_EXPR)
2533 NOTE_POP (1); /* Pop implicit this. */
2534 if (TREE_CODE (f) == FUNCTION_DECL && DECL_CONTEXT (f) != NULL_TREE)
2536 int index = find_methodref_index (&state->cpool, f);
2539 if (METHOD_STATIC (f))
2540 OP1 (OPCODE_invokestatic);
2541 else if (DECL_CONSTRUCTOR_P (f) || CALL_USING_SUPER (exp)
2542 || METHOD_PRIVATE (f))
2543 OP1 (OPCODE_invokespecial);
2544 else if (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (f))))
2546 OP1 (OPCODE_invokeinterface);
2550 OP1 (OPCODE_invokevirtual);
2552 f = TREE_TYPE (TREE_TYPE (f));
2553 if (TREE_CODE (f) != VOID_TYPE)
2555 int size = TYPE_IS_WIDE (f) ? 2 : 1;
2556 if (target == IGNORE_TARGET)
2557 emit_pop (size, state);
2572 error("internal error in generate_bytecode_insn - tree code not implemented: %s",
2573 tree_code_name [(int) TREE_CODE (exp)]);
2578 perform_relocations (state)
2579 struct jcf_partial *state;
2581 struct jcf_block *block;
2582 struct jcf_relocation *reloc;
2586 /* Before we start, the pc field of each block is an upper bound on
2587 the block's start pc (it may be less, if previous blocks need less
2588 than their maximum).
2590 The minimum size of each block is in the block's chunk->size. */
2592 /* First, figure out the actual locations of each block. */
2595 for (block = state->blocks; block != NULL; block = block->next)
2597 int block_size = block->v.chunk->size;
2601 /* Optimize GOTO L; L: by getting rid of the redundant goto.
2602 Assumes relocations are in reverse order. */
2603 reloc = block->u.relocations;
2604 while (reloc != NULL
2605 && reloc->kind == OPCODE_goto_w
2606 && reloc->label->pc == block->next->pc
2607 && reloc->offset + 2 == block_size)
2609 reloc = reloc->next;
2610 block->u.relocations = reloc;
2611 block->v.chunk->size -= 3;
2616 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
2618 if (reloc->kind == SWITCH_ALIGN_RELOC)
2620 /* We assume this is the first relocation in this block,
2621 so we know its final pc. */
2622 int where = pc + reloc->offset;
2623 int pad = ((where + 3) & ~3) - where;
2626 else if (reloc->kind < -1 || reloc->kind > BLOCK_START_RELOC)
2628 int delta = reloc->label->pc - (pc + reloc->offset - 1);
2629 int expand = reloc->kind > 0 ? 2 : 5;
2633 if (delta >= -32768 && delta <= 32767)
2639 block_size += expand;
2645 for (block = state->blocks; block != NULL; block = block->next)
2647 struct chunk *chunk = block->v.chunk;
2648 int old_size = chunk->size;
2649 int next_pc = block->next == NULL ? pc : block->next->pc;
2650 int new_size = next_pc - block->pc;
2651 unsigned char *new_ptr;
2652 unsigned char *old_buffer = chunk->data;
2653 unsigned char *old_ptr = old_buffer + old_size;
2654 if (new_size != old_size)
2656 chunk->data = (unsigned char *)
2657 obstack_alloc (state->chunk_obstack, new_size);
2658 chunk->size = new_size;
2660 new_ptr = chunk->data + new_size;
2662 /* We do the relocations from back to front, because
2663 the relocations are in reverse order. */
2664 for (reloc = block->u.relocations; ; reloc = reloc->next)
2666 /* new_ptr and old_ptr point into the old and new buffers,
2667 respectively. (If no relocations cause the buffer to
2668 grow, the buffer will be the same buffer, and new_ptr==old_ptr.)
2669 The bytes at higher adress have been copied and relocations
2670 handled; those at lower addresses remain to process. */
2672 /* Lower old index of piece to be copied with no relocation.
2673 I.e. high index of the first piece that does need relocation. */
2674 int start = reloc == NULL ? 0
2675 : reloc->kind == SWITCH_ALIGN_RELOC ? reloc->offset
2676 : (reloc->kind == 0 || reloc->kind == BLOCK_START_RELOC)
2678 : reloc->offset + 2;
2681 int n = (old_ptr - old_buffer) - start;
2685 memcpy (new_ptr, old_ptr, n);
2686 if (old_ptr == old_buffer)
2689 new_offset = new_ptr - chunk->data;
2690 new_offset -= (reloc->kind == -1 ? 2 : 4);
2691 if (reloc->kind == 0)
2694 value = GET_u4 (old_ptr);
2696 else if (reloc->kind == BLOCK_START_RELOC)
2702 else if (reloc->kind == SWITCH_ALIGN_RELOC)
2704 int where = block->pc + reloc->offset;
2705 int pad = ((where + 3) & ~3) - where;
2713 value = GET_u2 (old_ptr);
2715 value += reloc->label->pc - (block->pc + new_offset);
2716 *--new_ptr = (unsigned char) value; value >>= 8;
2717 *--new_ptr = (unsigned char) value; value >>= 8;
2718 if (reloc->kind != -1)
2720 *--new_ptr = (unsigned char) value; value >>= 8;
2721 *--new_ptr = (unsigned char) value;
2723 if (reloc->kind > BLOCK_START_RELOC)
2725 /* Convert: OP TARGET to: OP_w TARGET; (OP is goto or jsr). */
2727 *--new_ptr = reloc->kind;
2729 else if (reloc->kind < -1)
2731 /* Convert: ifCOND TARGET to: ifNCOND T; goto_w TARGET; T: */
2733 *--new_ptr = OPCODE_goto_w;
2736 *--new_ptr = - reloc->kind;
2739 if (new_ptr != chunk->data)
2740 fatal ("internal error - perform_relocations");
2742 state->code_length = pc;
2746 init_jcf_state (state, work)
2747 struct jcf_partial *state;
2748 struct obstack *work;
2750 state->chunk_obstack = work;
2751 state->first = state->chunk = NULL;
2752 CPOOL_INIT (&state->cpool);
2753 BUFFER_INIT (&state->localvars);
2754 BUFFER_INIT (&state->bytecode);
2758 init_jcf_method (state, method)
2759 struct jcf_partial *state;
2762 state->current_method = method;
2763 state->blocks = state->last_block = NULL;
2764 state->linenumber_count = 0;
2765 state->first_lvar = state->last_lvar = NULL;
2766 state->lvar_count = 0;
2767 state->labeled_blocks = NULL;
2768 state->code_length = 0;
2769 BUFFER_RESET (&state->bytecode);
2770 BUFFER_RESET (&state->localvars);
2772 state->code_SP_max = 0;
2773 state->handlers = NULL;
2774 state->last_handler = NULL;
2775 state->num_handlers = 0;
2776 state->num_finalizers = 0;
2777 state->return_value_decl = NULL_TREE;
2781 release_jcf_state (state)
2782 struct jcf_partial *state;
2784 CPOOL_FINISH (&state->cpool);
2785 obstack_free (state->chunk_obstack, state->first);
2788 /* Generate and return a list of chunks containing the class CLAS
2789 in the .class file representation. The list can be written to a
2790 .class file using write_chunks. Allocate chunks from obstack WORK. */
2792 static struct chunk *
2793 generate_classfile (clas, state)
2795 struct jcf_partial *state;
2797 struct chunk *cpool_chunk;
2801 char *fields_count_ptr;
2802 int fields_count = 0;
2803 char *methods_count_ptr;
2804 int methods_count = 0;
2805 static tree SourceFile_node = NULL_TREE;
2808 = clas == object_type_node ? 0
2809 : TREE_VEC_LENGTH (TYPE_BINFO_BASETYPES (clas));
2811 ptr = append_chunk (NULL, 8, state);
2812 PUT4 (0xCafeBabe); /* Magic number */
2813 PUT2 (3); /* Minor version */
2814 PUT2 (45); /* Major version */
2816 append_chunk (NULL, 0, state);
2817 cpool_chunk = state->chunk;
2819 /* Next allocate the chunk containing acces_flags through fields_counr. */
2820 if (clas == object_type_node)
2823 i = 8 + 2 * total_supers;
2824 ptr = append_chunk (NULL, i, state);
2825 i = get_access_flags (TYPE_NAME (clas));
2826 if (! (i & ACC_INTERFACE))
2828 PUT2 (i); /* acces_flags */
2829 i = find_class_constant (&state->cpool, clas); PUT2 (i); /* this_class */
2830 if (clas == object_type_node)
2832 PUT2(0); /* super_class */
2833 PUT2(0); /* interfaces_count */
2837 tree basetypes = TYPE_BINFO_BASETYPES (clas);
2838 tree base = BINFO_TYPE (TREE_VEC_ELT (basetypes, 0));
2839 int j = find_class_constant (&state->cpool, base);
2840 PUT2 (j); /* super_class */
2841 PUT2 (total_supers - 1); /* interfaces_count */
2842 for (i = 1; i < total_supers; i++)
2844 base = BINFO_TYPE (TREE_VEC_ELT (basetypes, i));
2845 j = find_class_constant (&state->cpool, base);
2849 fields_count_ptr = ptr;
2851 for (part = TYPE_FIELDS (clas); part; part = TREE_CHAIN (part))
2853 int have_value, attr_count = 0;
2854 if (DECL_NAME (part) == NULL_TREE || DECL_ARTIFICIAL (part))
2856 ptr = append_chunk (NULL, 8, state);
2857 i = get_access_flags (part); PUT2 (i);
2858 i = find_utf8_constant (&state->cpool, DECL_NAME (part)); PUT2 (i);
2859 i = find_utf8_constant (&state->cpool,
2860 build_java_signature (TREE_TYPE (part)));
2862 have_value = DECL_INITIAL (part) != NULL_TREE && FIELD_STATIC (part)
2863 && TREE_CODE (TREE_TYPE (part)) != POINTER_TYPE;
2867 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part))
2870 PUT2 (attr_count); /* attributes_count */
2873 tree init = DECL_INITIAL (part);
2874 static tree ConstantValue_node = NULL_TREE;
2875 ptr = append_chunk (NULL, 8, state);
2876 if (ConstantValue_node == NULL_TREE)
2877 ConstantValue_node = get_identifier ("ConstantValue");
2878 i = find_utf8_constant (&state->cpool, ConstantValue_node);
2879 PUT2 (i); /* attribute_name_index */
2880 PUT4 (2); /* attribute_length */
2881 i = find_constant_index (init, state); PUT2 (i);
2883 /* Emit the "Synthetic" attribute for val$<x> and this$<n> fields. */
2884 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part))
2885 ptr = append_synthetic_attribute (state);
2888 ptr = fields_count_ptr; UNSAFE_PUT2 (fields_count);
2890 ptr = methods_count_ptr = append_chunk (NULL, 2, state);
2893 for (part = TYPE_METHODS (clas); part; part = TREE_CHAIN (part))
2895 struct jcf_block *block;
2896 tree function_body = DECL_FUNCTION_BODY (part);
2897 tree body = function_body == NULL_TREE ? NULL_TREE
2898 : BLOCK_EXPR_BODY (function_body);
2899 tree name = DECL_CONSTRUCTOR_P (part) ? init_identifier_node
2901 tree type = TREE_TYPE (part);
2902 tree save_function = current_function_decl;
2903 int synthetic_p = 0;
2904 current_function_decl = part;
2905 ptr = append_chunk (NULL, 8, state);
2906 i = get_access_flags (part); PUT2 (i);
2907 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
2908 i = find_utf8_constant (&state->cpool, build_java_signature (type));
2910 i = (body != NULL_TREE) + (DECL_FUNCTION_THROWS (part) != NULL_TREE);
2912 /* Make room for the Synthetic attribute (of zero length.) */
2913 if (DECL_FINIT_P (part)
2914 || OUTER_FIELD_ACCESS_IDENTIFIER_P (DECL_NAME (part))
2915 || TYPE_DOT_CLASS (clas) == part)
2921 PUT2 (i); /* attributes_count */
2924 ptr = append_synthetic_attribute (state);
2926 if (body != NULL_TREE)
2928 int code_attributes_count = 0;
2929 static tree Code_node = NULL_TREE;
2932 struct jcf_handler *handler;
2933 if (Code_node == NULL_TREE)
2934 Code_node = get_identifier ("Code");
2935 ptr = append_chunk (NULL, 14, state);
2936 i = find_utf8_constant (&state->cpool, Code_node); PUT2 (i);
2938 init_jcf_method (state, part);
2939 get_jcf_label_here (state); /* Force a first block. */
2940 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
2941 localvar_alloc (t, state);
2942 generate_bytecode_insns (body, IGNORE_TARGET, state);
2943 if (CAN_COMPLETE_NORMALLY (body))
2945 if (TREE_CODE (TREE_TYPE (type)) != VOID_TYPE)
2948 OP1 (OPCODE_return);
2950 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
2951 localvar_free (t, state);
2952 if (state->return_value_decl != NULL_TREE)
2953 localvar_free (state->return_value_decl, state);
2954 finish_jcf_block (state);
2955 perform_relocations (state);
2958 i = 8 + state->code_length + 4 + 8 * state->num_handlers;
2959 if (state->linenumber_count > 0)
2961 code_attributes_count++;
2962 i += 8 + 4 * state->linenumber_count;
2964 if (state->lvar_count > 0)
2966 code_attributes_count++;
2967 i += 8 + 10 * state->lvar_count;
2969 UNSAFE_PUT4 (i); /* attribute_length */
2970 UNSAFE_PUT2 (state->code_SP_max); /* max_stack */
2971 UNSAFE_PUT2 (localvar_max); /* max_locals */
2972 UNSAFE_PUT4 (state->code_length);
2974 /* Emit the exception table. */
2975 ptr = append_chunk (NULL, 2 + 8 * state->num_handlers, state);
2976 PUT2 (state->num_handlers); /* exception_table_length */
2977 handler = state->handlers;
2978 for (; handler != NULL; handler = handler->next)
2981 PUT2 (handler->start_label->pc);
2982 PUT2 (handler->end_label->pc);
2983 PUT2 (handler->handler_label->pc);
2984 if (handler->type == NULL_TREE)
2987 type_index = find_class_constant (&state->cpool,
2992 ptr = append_chunk (NULL, 2, state);
2993 PUT2 (code_attributes_count);
2995 /* Write the LineNumberTable attribute. */
2996 if (state->linenumber_count > 0)
2998 static tree LineNumberTable_node = NULL_TREE;
2999 ptr = append_chunk (NULL,
3000 8 + 4 * state->linenumber_count, state);
3001 if (LineNumberTable_node == NULL_TREE)
3002 LineNumberTable_node = get_identifier ("LineNumberTable");
3003 i = find_utf8_constant (&state->cpool, LineNumberTable_node);
3004 PUT2 (i); /* attribute_name_index */
3005 i = 2+4*state->linenumber_count; PUT4(i); /* attribute_length */
3006 i = state->linenumber_count; PUT2 (i);
3007 for (block = state->blocks; block != NULL; block = block->next)
3009 int line = block->linenumber;
3018 /* Write the LocalVariableTable attribute. */
3019 if (state->lvar_count > 0)
3021 static tree LocalVariableTable_node = NULL_TREE;
3022 struct localvar_info *lvar = state->first_lvar;
3023 ptr = append_chunk (NULL, 8 + 10 * state->lvar_count, state);
3024 if (LocalVariableTable_node == NULL_TREE)
3025 LocalVariableTable_node = get_identifier("LocalVariableTable");
3026 i = find_utf8_constant (&state->cpool, LocalVariableTable_node);
3027 PUT2 (i); /* attribute_name_index */
3028 i = 2 + 10 * state->lvar_count; PUT4 (i); /* attribute_length */
3029 i = state->lvar_count; PUT2 (i);
3030 for ( ; lvar != NULL; lvar = lvar->next)
3032 tree name = DECL_NAME (lvar->decl);
3033 tree sig = build_java_signature (TREE_TYPE (lvar->decl));
3034 i = lvar->start_label->pc; PUT2 (i);
3035 i = lvar->end_label->pc - i; PUT2 (i);
3036 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
3037 i = find_utf8_constant (&state->cpool, sig); PUT2 (i);
3038 i = DECL_LOCAL_INDEX (lvar->decl); PUT2 (i);
3042 if (DECL_FUNCTION_THROWS (part) != NULL_TREE)
3044 tree t = DECL_FUNCTION_THROWS (part);
3045 int throws_count = list_length (t);
3046 static tree Exceptions_node = NULL_TREE;
3047 if (Exceptions_node == NULL_TREE)
3048 Exceptions_node = get_identifier ("Exceptions");
3049 ptr = append_chunk (NULL, 8 + 2 * throws_count, state);
3050 i = find_utf8_constant (&state->cpool, Exceptions_node);
3051 PUT2 (i); /* attribute_name_index */
3052 i = 2 + 2 * throws_count; PUT4(i); /* attribute_length */
3053 i = throws_count; PUT2 (i);
3054 for (; t != NULL_TREE; t = TREE_CHAIN (t))
3056 i = find_class_constant (&state->cpool, TREE_VALUE (t));
3061 current_function_decl = save_function;
3063 ptr = methods_count_ptr; UNSAFE_PUT2 (methods_count);
3065 source_file = DECL_SOURCE_FILE (TYPE_NAME (clas));
3066 for (ptr = source_file; ; ptr++)
3071 if (ch == '/' || ch == '\\')
3072 source_file = ptr+1;
3074 ptr = append_chunk (NULL, 10, state);
3076 i = ((INNER_CLASS_TYPE_P (clas)
3077 || DECL_INNER_CLASS_LIST (TYPE_NAME (clas))) ? 2 : 1);
3078 PUT2 (i); /* attributes_count */
3080 /* generate the SourceFile attribute. */
3081 if (SourceFile_node == NULL_TREE)
3082 SourceFile_node = get_identifier ("SourceFile");
3083 i = find_utf8_constant (&state->cpool, SourceFile_node);
3084 PUT2 (i); /* attribute_name_index */
3086 i = find_utf8_constant (&state->cpool, get_identifier (source_file));
3088 append_innerclasses_attribute (state, clas);
3090 /* New finally generate the contents of the constant pool chunk. */
3091 i = count_constant_pool_bytes (&state->cpool);
3092 ptr = obstack_alloc (state->chunk_obstack, i);
3093 cpool_chunk->data = ptr;
3094 cpool_chunk->size = i;
3095 write_constant_pool (&state->cpool, ptr, i);
3096 return state->first;
3099 static unsigned char *
3100 append_synthetic_attribute (state)
3101 struct jcf_partial *state;
3103 static tree Synthetic_node = NULL_TREE;
3104 unsigned char *ptr = append_chunk (NULL, 6, state);
3107 if (Synthetic_node == NULL_TREE)
3108 Synthetic_node = get_identifier ("Synthetic");
3109 i = find_utf8_constant (&state->cpool, Synthetic_node);
3110 PUT2 (i); /* Attribute string index */
3111 PUT4 (0); /* Attribute length */
3117 append_innerclasses_attribute (state, class)
3118 struct jcf_partial *state;
3121 static tree InnerClasses_node = NULL_TREE;
3122 tree orig_decl = TYPE_NAME (class);
3125 unsigned char *ptr, *length_marker, *number_marker;
3127 if (!INNER_CLASS_TYPE_P (class) && !DECL_INNER_CLASS_LIST (orig_decl))
3130 ptr = append_chunk (NULL, 8, state); /* 2+4+2 */
3132 if (InnerClasses_node == NULL_TREE)
3133 InnerClasses_node = get_identifier ("InnerClasses");
3134 i = find_utf8_constant (&state->cpool, InnerClasses_node);
3136 length_marker = ptr; PUT4 (0); /* length, to be later patched */
3137 number_marker = ptr; PUT2 (0); /* number of classes, tblp */
3139 /* Generate the entries: all inner classes visible from the one we
3140 process: itself, up and down. */
3141 while (class && INNER_CLASS_TYPE_P (class))
3145 decl = TYPE_NAME (class);
3146 n = IDENTIFIER_POINTER (DECL_NAME (decl)) +
3147 IDENTIFIER_LENGTH (DECL_NAME (decl));
3149 while (n[-1] != '$')
3151 append_innerclasses_attribute_entry (state, decl, get_identifier (n));
3154 class = TREE_TYPE (DECL_CONTEXT (TYPE_NAME (class)));
3158 for (current = DECL_INNER_CLASS_LIST (decl);
3159 current; current = TREE_CHAIN (current))
3161 append_innerclasses_attribute_entry (state, TREE_PURPOSE (current),
3162 TREE_VALUE (current));
3166 ptr = length_marker; PUT4 (8*length+2);
3167 ptr = number_marker; PUT2 (length);
3171 append_innerclasses_attribute_entry (state, decl, name)
3172 struct jcf_partial *state;
3175 static tree anonymous_name = NULL_TREE;
3176 int icii, ocii, ini, icaf;
3177 unsigned char *ptr = append_chunk (NULL, 8, state);
3179 if (!anonymous_name)
3180 anonymous_name = get_identifier ("");
3182 icii = find_class_constant (&state->cpool, TREE_TYPE (decl));
3183 ocii = find_class_constant (&state->cpool, TREE_TYPE (DECL_CONTEXT (decl)));
3185 /* The specs are saying that if the class is anonymous,
3186 inner_name_index must be zero. But the implementation makes it
3187 point to an empty string. */
3188 ini = find_utf8_constant (&state->cpool,
3189 (ANONYMOUS_CLASS_P (TREE_TYPE (decl)) ?
3190 anonymous_name : name));
3191 icaf = get_access_flags (decl);
3193 PUT2 (icii); PUT2 (ocii); PUT2 (ini); PUT2 (icaf);
3197 make_class_file_name (clas)
3200 const char *dname, *slash;
3204 cname = IDENTIFIER_POINTER (identifier_subst (DECL_NAME (TYPE_NAME (clas)),
3205 "", '.', DIR_SEPARATOR,
3207 if (jcf_write_base_directory == NULL)
3209 /* Make sure we put the class file into the .java file's
3210 directory, and not into some subdirectory thereof. */
3212 dname = DECL_SOURCE_FILE (TYPE_NAME (clas));
3213 slash = strrchr (dname, DIR_SEPARATOR);
3219 t = strrchr (cname, DIR_SEPARATOR);
3225 dname = jcf_write_base_directory;
3226 slash = dname + strlen (dname);
3229 r = xmalloc (slash - dname + strlen (cname) + 2);
3230 strncpy (r, dname, slash - dname);
3231 r[slash - dname] = DIR_SEPARATOR;
3232 strcpy (&r[slash - dname + 1], cname);
3234 /* We try to make new directories when we need them. We only do
3235 this for directories which "might not" exist. For instance, we
3236 assume the `-d' directory exists, but we don't assume that any
3237 subdirectory below it exists. It might be worthwhile to keep
3238 track of which directories we've created to avoid gratuitous
3240 dname = r + (slash - dname) + 1;
3243 cname = strchr (dname, DIR_SEPARATOR);
3247 if (stat (r, &sb) == -1)
3249 /* Try to make it. */
3250 if (mkdir (r, 0755) == -1)
3252 fatal ("failed to create directory `%s'", r);
3257 *cname = DIR_SEPARATOR;
3258 /* Skip consecutive separators. */
3259 for (dname = cname + 1; *dname && *dname == DIR_SEPARATOR; ++dname)
3266 /* Write out the contens of a class (RECORD_TYPE) CLAS, as a .class file.
3267 The output .class file name is make_class_file_name(CLAS). */
3270 write_classfile (clas)
3273 struct obstack *work = &temporary_obstack;
3274 struct jcf_partial state[1];
3275 char *class_file_name = make_class_file_name (clas);
3276 struct chunk *chunks;
3278 if (class_file_name != NULL)
3280 FILE* stream = fopen (class_file_name, "wb");
3282 fatal ("failed to open `%s' for writing", class_file_name);
3283 jcf_dependency_add_target (class_file_name);
3284 init_jcf_state (state, work);
3285 chunks = generate_classfile (clas, state);
3286 write_chunks (stream, chunks);
3287 if (fclose (stream))
3288 fatal ("failed to close after writing `%s'", class_file_name);
3289 free (class_file_name);
3291 release_jcf_state (state);
3295 string concatenation
3296 synchronized statement