1 /* Write out a Java(TM) class file.
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA.
21 Java and all Java-based marks are trademarks or registered trademarks
22 of Sun Microsystems, Inc. in the United States and other countries.
23 The Free Software Foundation is independent of Sun Microsystems, Inc. */
27 #include "coretypes.h"
32 #include "java-tree.h"
36 #include "java-opcodes.h"
37 #include "parse.h" /* for BLOCK_EXPR_BODY */
43 extern struct obstack temporary_obstack;
45 /* Base directory in which `.class' files should be written.
46 NULL means to put the file into the same directory as the
47 corresponding .java file. */
48 const char *jcf_write_base_directory = NULL;
50 /* Make sure bytecode.data is big enough for at least N more bytes. */
53 do { CHECK_OP(state); \
54 if (state->bytecode.ptr + (N) > state->bytecode.limit) \
55 buffer_grow (&state->bytecode, N); } while (0)
57 /* Add a 1-byte instruction/operand I to bytecode.data,
58 assuming space has already been RESERVE'd. */
60 #define OP1(I) (*state->bytecode.ptr++ = (I), CHECK_OP(state))
62 /* Like OP1, but I is a 2-byte big endian integer. */
65 do { int _i = (I); OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
67 /* Like OP1, but I is a 4-byte big endian integer. */
70 do { int _i = (I); OP1 (_i >> 24); OP1 (_i >> 16); \
71 OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
73 /* Macro to call each time we push I words on the JVM stack. */
75 #define NOTE_PUSH(I) \
76 do { state->code_SP += (I); \
77 if (state->code_SP > state->code_SP_max) \
78 state->code_SP_max = state->code_SP; } while (0)
80 /* Macro to call each time we pop I words from the JVM stack. */
83 do { state->code_SP -= (I); if (state->code_SP < 0) abort(); } while (0)
85 /* A chunk or segment of a .class file. */
89 /* The next segment of this .class file. */
92 /* The actual data in this segment to be written to the .class file. */
95 /* The size of the segment to be written to the .class file. */
99 #define PENDING_CLEANUP_PC (-3)
100 #define PENDING_EXIT_PC (-2)
101 #define UNDEFINED_PC (-1)
103 /* Each "block" represents a label plus the bytecode instructions following.
104 There may be branches out of the block, but no incoming jumps, except
105 to the beginning of the block.
107 If (pc < 0), the jcf_block is not an actual block (i.e. it has no
108 associated code yet), but it is an undefined label.
113 /* For blocks that that are defined, the next block (in pc order).
114 For blocks that are not-yet-defined the end label of a LABELED_BLOCK_EXPR
115 or a cleanup expression (from a TRY_FINALLY_EXPR),
116 this is the next (outer) such end label, in a stack headed by
117 labeled_blocks in jcf_partial. */
118 struct jcf_block *next;
120 /* In the not-yet-defined end label for an unfinished EXIT_BLOCK_EXPR.
121 pc is PENDING_EXIT_PC.
122 In the not-yet-defined end label for pending cleanup subroutine,
123 pc is PENDING_CLEANUP_PC.
124 For other not-yet-defined labels, pc is UNDEFINED_PC.
126 If the label has been defined:
127 Until perform_relocations is finished, this is the maximum possible
128 value of the bytecode offset at the beginning of this block.
129 After perform_relocations, it is the actual offset (pc). */
134 /* After finish_jcf_block is called, the actual instructions
135 contained in this block. Before that NULL, and the instructions
136 are in state->bytecode. */
140 /* If pc==PENDING_CLEANUP_PC, start_label is the start of the region
141 covered by the cleanup. */
142 struct jcf_block *start_label;
146 /* Set of relocations (in reverse offset order) for this block. */
147 struct jcf_relocation *relocations;
149 /* If this block is that of the not-yet-defined end label of
150 a LABELED_BLOCK_EXPR, where LABELED_BLOCK is that LABELED_BLOCK_EXPR.
151 If pc==PENDING_CLEANUP_PC, the cleanup that needs to be run. */
156 /* A "relocation" type for the 0-3 bytes of padding at the start
157 of a tableswitch or a lookupswitch. */
158 #define SWITCH_ALIGN_RELOC 4
160 /* A relocation type for the labels in a tableswitch or a lookupswitch;
161 these are relative to the start of the instruction, but (due to
162 th 0-3 bytes of padding), we don't know the offset before relocation. */
163 #define BLOCK_START_RELOC 1
165 struct jcf_relocation
167 /* Next relocation for the current jcf_block. */
168 struct jcf_relocation *next;
170 /* The (byte) offset within the current block that needs to be relocated. */
171 HOST_WIDE_INT offset;
173 /* 0 if offset is a 4-byte relative offset.
174 4 (SWITCH_ALIGN_RELOC) if offset points to 0-3 padding bytes inserted
175 for proper alignment in tableswitch/lookupswitch instructions.
176 1 (BLOCK_START_RELOC) if offset points to a 4-byte offset relative
177 to the start of the containing block.
178 -1 if offset is a 2-byte relative offset.
179 < -1 if offset is the address of an instruction with a 2-byte offset
180 that does not have a corresponding 4-byte offset version, in which
181 case the absolute value of kind is the inverted opcode.
182 > 4 if offset is the address of an instruction (such as jsr) with a
183 2-byte offset that does have a corresponding 4-byte offset version,
184 in which case kind is the opcode of the 4-byte version (such as jsr_w). */
187 /* The label the relocation wants to actually transfer to. */
188 struct jcf_block *label;
191 #define RELOCATION_VALUE_0 ((HOST_WIDE_INT)0)
192 #define RELOCATION_VALUE_1 ((HOST_WIDE_INT)1)
194 /* State for single catch clause. */
198 struct jcf_handler *next;
200 struct jcf_block *start_label;
201 struct jcf_block *end_label;
202 struct jcf_block *handler_label;
204 /* The sub-class of Throwable handled, or NULL_TREE (for finally). */
208 /* State for the current switch statement. */
210 struct jcf_switch_state
212 struct jcf_switch_state *prev;
213 struct jcf_block *default_label;
215 struct jcf_relocation *cases;
217 HOST_WIDE_INT min_case, max_case;
220 /* This structure is used to contain the various pieces that will
221 become a .class file. */
227 struct obstack *chunk_obstack;
230 /* List of basic blocks for the current method. */
231 struct jcf_block *blocks;
232 struct jcf_block *last_block;
234 struct localvar_info *first_lvar;
235 struct localvar_info *last_lvar;
240 int linenumber_count;
242 /* Until perform_relocations, this is a upper bound on the number
243 of bytes (so far) in the instructions for the current method. */
246 /* Stack of undefined ending labels for LABELED_BLOCK_EXPR. */
247 struct jcf_block *labeled_blocks;
249 /* The current stack size (stack pointer) in the current method. */
252 /* The largest extent of stack size (stack pointer) in the current method. */
255 /* Contains a mapping from local var slot number to localvar_info. */
256 struct buffer localvars;
258 /* The buffer allocated for bytecode for the current jcf_block. */
259 struct buffer bytecode;
261 /* Chain of exception handlers for the current method. */
262 struct jcf_handler *handlers;
264 /* Last element in handlers chain. */
265 struct jcf_handler *last_handler;
267 /* Number of exception handlers for the current method. */
270 /* Number of finalizers we are currently nested within. */
273 /* If non-NULL, use this for the return value. */
274 tree return_value_decl;
276 /* Information about the current switch statement. */
277 struct jcf_switch_state *sw_state;
279 /* The count of jsr instructions that have been emitted. */
283 static void generate_bytecode_insns (tree, int, struct jcf_partial *);
284 static struct chunk * alloc_chunk (struct chunk *, unsigned char *,
285 int, struct obstack *);
286 static unsigned char * append_chunk (unsigned char *, int,
287 struct jcf_partial *);
288 static void append_chunk_copy (unsigned char *, int, struct jcf_partial *);
289 static struct jcf_block * gen_jcf_label (struct jcf_partial *);
290 static void finish_jcf_block (struct jcf_partial *);
291 static void define_jcf_label (struct jcf_block *, struct jcf_partial *);
292 static struct jcf_block * get_jcf_label_here (struct jcf_partial *);
293 static void put_linenumber (int, struct jcf_partial *);
294 static void localvar_alloc (tree, struct jcf_partial *);
295 static void maybe_free_localvar (tree, struct jcf_partial *, int);
296 static int get_access_flags (tree);
297 static void write_chunks (FILE *, struct chunk *);
298 static int adjust_typed_op (tree, int);
299 static void generate_bytecode_conditional (tree, struct jcf_block *,
300 struct jcf_block *, int,
301 struct jcf_partial *);
302 static void generate_bytecode_return (tree, struct jcf_partial *);
303 static void perform_relocations (struct jcf_partial *);
304 static void init_jcf_state (struct jcf_partial *, struct obstack *);
305 static void init_jcf_method (struct jcf_partial *, tree);
306 static void release_jcf_state (struct jcf_partial *);
307 static int get_classfile_modifiers (tree class);
308 static struct chunk * generate_classfile (tree, struct jcf_partial *);
309 static struct jcf_handler *alloc_handler (struct jcf_block *,
311 struct jcf_partial *);
312 static void emit_iinc (tree, HOST_WIDE_INT, struct jcf_partial *);
313 static void emit_reloc (HOST_WIDE_INT, int, struct jcf_block *,
314 struct jcf_partial *);
315 static void push_constant1 (HOST_WIDE_INT, struct jcf_partial *);
316 static void push_constant2 (HOST_WIDE_INT, struct jcf_partial *);
317 static void push_int_const (HOST_WIDE_INT, struct jcf_partial *);
318 static int find_constant_wide (HOST_WIDE_INT, HOST_WIDE_INT,
319 struct jcf_partial *);
320 static void push_long_const (HOST_WIDE_INT, HOST_WIDE_INT,
321 struct jcf_partial *);
322 static int find_constant_index (tree, struct jcf_partial *);
323 static void push_long_const (HOST_WIDE_INT, HOST_WIDE_INT,
324 struct jcf_partial *);
325 static void field_op (tree, int, struct jcf_partial *);
326 static void maybe_wide (int, int, struct jcf_partial *);
327 static void emit_dup (int, int, struct jcf_partial *);
328 static void emit_pop (int, struct jcf_partial *);
329 static void emit_load_or_store (tree, int, struct jcf_partial *);
330 static void emit_load (tree, struct jcf_partial *);
331 static void emit_store (tree, struct jcf_partial *);
332 static void emit_unop (enum java_opcode, tree, struct jcf_partial *);
333 static void emit_binop (enum java_opcode, tree, struct jcf_partial *);
334 static void emit_reloc (HOST_WIDE_INT, int, struct jcf_block *,
335 struct jcf_partial *);
336 static void emit_switch_reloc (struct jcf_block *, struct jcf_partial *);
337 static void emit_case_reloc (struct jcf_relocation *, struct jcf_partial *);
338 static void emit_if (struct jcf_block *, int, int, struct jcf_partial *);
339 static void emit_goto (struct jcf_block *, struct jcf_partial *);
340 static void emit_jsr (struct jcf_block *, struct jcf_partial *);
341 static void call_cleanups (struct jcf_block *, struct jcf_partial *);
342 static char *make_class_file_name (tree);
343 static unsigned char *append_synthetic_attribute (struct jcf_partial *);
344 static void append_deprecated_attribute (struct jcf_partial *);
345 static void append_innerclasses_attribute (struct jcf_partial *, tree);
346 static void append_innerclasses_attribute_entry (struct jcf_partial *, tree, tree);
347 static void append_gcj_attribute (struct jcf_partial *, tree);
349 /* Utility macros for appending (big-endian) data to a buffer.
350 We assume a local variable 'ptr' points into where we want to
351 write next, and we assume enough space has been allocated. */
353 #ifdef ENABLE_JC1_CHECKING
354 static int CHECK_PUT (void *, struct jcf_partial *, int);
357 CHECK_PUT (void *ptr, struct jcf_partial *state, int i)
359 if ((unsigned char *) ptr < state->chunk->data
360 || (unsigned char *) ptr + i > state->chunk->data + state->chunk->size)
366 #define CHECK_PUT(PTR, STATE, I) ((void)0)
369 #define PUT1(X) (CHECK_PUT(ptr, state, 1), *ptr++ = (X))
370 #define PUT2(X) (PUT1((X) >> 8), PUT1((X) & 0xFF))
371 #define PUT4(X) (PUT2((X) >> 16), PUT2((X) & 0xFFFF))
372 #define PUTN(P, N) (CHECK_PUT(ptr, state, N), memcpy(ptr, P, N), ptr += (N))
374 /* There are some cases below where CHECK_PUT is guaranteed to fail.
375 Use the following macros in those specific cases. */
376 #define UNSAFE_PUT1(X) (*ptr++ = (X))
377 #define UNSAFE_PUT2(X) (UNSAFE_PUT1((X) >> 8), UNSAFE_PUT1((X) & 0xFF))
378 #define UNSAFE_PUT4(X) (UNSAFE_PUT2((X) >> 16), UNSAFE_PUT2((X) & 0xFFFF))
379 #define UNSAFE_PUTN(P, N) (memcpy(ptr, P, N), ptr += (N))
382 /* Allocate a new chunk on obstack WORK, and link it in after LAST.
383 Set the data and size fields to DATA and SIZE, respectively.
384 However, if DATA is NULL and SIZE>0, allocate a buffer as well. */
386 static struct chunk *
387 alloc_chunk (struct chunk *last, unsigned char *data,
388 int size, struct obstack *work)
390 struct chunk *chunk = obstack_alloc (work, sizeof(struct chunk));
392 if (data == NULL && size > 0)
393 data = obstack_alloc (work, size);
403 #ifdef ENABLE_JC1_CHECKING
404 static int CHECK_OP (struct jcf_partial *);
407 CHECK_OP (struct jcf_partial *state)
409 if (state->bytecode.ptr > state->bytecode.limit)
415 #define CHECK_OP(STATE) ((void) 0)
418 static unsigned char *
419 append_chunk (unsigned char *data, int size, struct jcf_partial *state)
421 state->chunk = alloc_chunk (state->chunk, data, size, state->chunk_obstack);
422 if (state->first == NULL)
423 state->first = state->chunk;
424 return state->chunk->data;
428 append_chunk_copy (unsigned char *data, int size, struct jcf_partial *state)
430 unsigned char *ptr = append_chunk (NULL, size, state);
431 memcpy (ptr, data, size);
434 static struct jcf_block *
435 gen_jcf_label (struct jcf_partial *state)
437 struct jcf_block *block
438 = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_block));
440 block->linenumber = -1;
441 block->pc = UNDEFINED_PC;
446 finish_jcf_block (struct jcf_partial *state)
448 struct jcf_block *block = state->last_block;
449 struct jcf_relocation *reloc;
450 int code_length = BUFFER_LENGTH (&state->bytecode);
451 int pc = state->code_length;
452 append_chunk_copy (state->bytecode.data, code_length, state);
453 BUFFER_RESET (&state->bytecode);
454 block->v.chunk = state->chunk;
456 /* Calculate code_length to the maximum value it can have. */
457 pc += block->v.chunk->size;
458 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
460 int kind = reloc->kind;
461 if (kind == SWITCH_ALIGN_RELOC)
463 else if (kind > BLOCK_START_RELOC)
464 pc += 2; /* 2-byte offset may grow to 4-byte offset */
466 pc += 5; /* May need to add a goto_w. */
468 state->code_length = pc;
472 define_jcf_label (struct jcf_block *label, struct jcf_partial *state)
474 if (state->last_block != NULL)
475 finish_jcf_block (state);
476 label->pc = state->code_length;
477 if (state->blocks == NULL)
478 state->blocks = label;
480 state->last_block->next = label;
481 state->last_block = label;
483 label->u.relocations = NULL;
486 static struct jcf_block *
487 get_jcf_label_here (struct jcf_partial *state)
489 if (state->last_block != NULL && BUFFER_LENGTH (&state->bytecode) == 0)
490 return state->last_block;
493 struct jcf_block *label = gen_jcf_label (state);
494 define_jcf_label (label, state);
499 /* Note a line number entry for the current PC and given LINE. */
502 put_linenumber (int line, struct jcf_partial *state)
504 struct jcf_block *label = get_jcf_label_here (state);
505 if (label->linenumber > 0)
507 label = gen_jcf_label (state);
508 define_jcf_label (label, state);
510 label->linenumber = line;
511 state->linenumber_count++;
514 /* Allocate a new jcf_handler, for a catch clause that catches exceptions
515 in the range (START_LABEL, END_LABEL). */
517 static struct jcf_handler *
518 alloc_handler (struct jcf_block *start_label, struct jcf_block *end_label,
519 struct jcf_partial *state)
521 struct jcf_handler *handler
522 = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_handler));
523 handler->start_label = start_label;
524 handler->end_label = end_label;
525 handler->handler_label = get_jcf_label_here (state);
526 if (state->handlers == NULL)
527 state->handlers = handler;
529 state->last_handler->next = handler;
530 state->last_handler = handler;
531 handler->next = NULL;
532 state->num_handlers++;
537 /* The index of jvm local variable allocated for this DECL.
538 This is assigned when generating .class files;
539 contrast DECL_LOCAL_SLOT_NUMBER which is set when *reading* a .class file.
540 (We don't allocate DECL_LANG_SPECIFIC for locals from Java source code.) */
542 #define DECL_LOCAL_INDEX(DECL) DECL_ALIGN(DECL)
546 struct localvar_info *next;
549 struct jcf_block *start_label;
550 struct jcf_block *end_label;
553 #define localvar_buffer ((struct localvar_info**) state->localvars.data)
554 #define localvar_max \
555 ((struct localvar_info**) state->localvars.ptr - localvar_buffer)
558 localvar_alloc (tree decl, struct jcf_partial *state)
560 struct jcf_block *start_label = get_jcf_label_here (state);
561 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
563 struct localvar_info *info;
564 struct localvar_info **ptr = localvar_buffer;
565 struct localvar_info **limit
566 = (struct localvar_info**) state->localvars.ptr;
567 for (index = 0; ptr < limit; index++, ptr++)
570 && (! wide || ((ptr+1) < limit && ptr[1] == NULL)))
575 buffer_grow (&state->localvars, 2 * sizeof (struct localvar_info*));
576 ptr = (struct localvar_info**) state->localvars.data + index;
577 state->localvars.ptr = (unsigned char *) (ptr + 1 + wide);
579 info = obstack_alloc (state->chunk_obstack, sizeof (struct localvar_info));
582 ptr[1] = (struct localvar_info *)(~0);
583 DECL_LOCAL_INDEX (decl) = index;
585 info->start_label = start_label;
587 if (debug_info_level > DINFO_LEVEL_TERSE
588 && DECL_NAME (decl) != NULL_TREE)
590 /* Generate debugging info. */
592 if (state->last_lvar != NULL)
593 state->last_lvar->next = info;
595 state->first_lvar = info;
596 state->last_lvar = info;
602 maybe_free_localvar (tree decl, struct jcf_partial *state, int really)
604 struct jcf_block *end_label = get_jcf_label_here (state);
605 int index = DECL_LOCAL_INDEX (decl);
606 struct localvar_info **ptr = &localvar_buffer [index];
607 struct localvar_info *info = *ptr;
608 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
610 info->end_label = end_label;
612 if (info->decl != decl)
619 if (ptr[1] != (struct localvar_info *)(~0))
626 #define STACK_TARGET 1
627 #define IGNORE_TARGET 2
629 /* Get the access flags of a class (TYPE_DECL), a method (FUNCTION_DECL), or
630 a field (FIELD_DECL or VAR_DECL, if static), as encoded in a .class file. */
633 get_access_flags (tree decl)
636 int isfield = TREE_CODE (decl) == FIELD_DECL || TREE_CODE (decl) == VAR_DECL;
638 if (isfield || TREE_CODE (decl) == FUNCTION_DECL)
640 if (TREE_PROTECTED (decl))
641 flags |= ACC_PROTECTED;
642 if (TREE_PRIVATE (decl))
643 flags |= ACC_PRIVATE;
645 else if (TREE_CODE (decl) == TYPE_DECL)
647 if (CLASS_PUBLIC (decl))
649 if (CLASS_FINAL (decl))
651 if (CLASS_SUPER (decl))
653 if (CLASS_ABSTRACT (decl))
654 flags |= ACC_ABSTRACT;
655 if (CLASS_INTERFACE (decl))
656 flags |= ACC_INTERFACE;
657 if (CLASS_STATIC (decl))
659 if (CLASS_PRIVATE (decl))
660 flags |= ACC_PRIVATE;
661 if (CLASS_PROTECTED (decl))
662 flags |= ACC_PROTECTED;
663 if (ANONYMOUS_CLASS_P (TREE_TYPE (decl))
664 || LOCAL_CLASS_P (TREE_TYPE (decl)))
665 flags |= ACC_PRIVATE;
666 if (CLASS_STRICTFP (decl))
672 if (TREE_CODE (decl) == FUNCTION_DECL)
674 if (METHOD_PUBLIC (decl))
676 if (METHOD_FINAL (decl))
678 if (METHOD_NATIVE (decl))
680 if (METHOD_STATIC (decl))
682 if (METHOD_SYNCHRONIZED (decl))
683 flags |= ACC_SYNCHRONIZED;
684 if (METHOD_ABSTRACT (decl))
685 flags |= ACC_ABSTRACT;
686 if (METHOD_STRICTFP (decl))
691 if (FIELD_PUBLIC (decl))
693 if (FIELD_FINAL (decl))
695 if (FIELD_STATIC (decl))
697 if (FIELD_VOLATILE (decl))
698 flags |= ACC_VOLATILE;
699 if (FIELD_TRANSIENT (decl))
700 flags |= ACC_TRANSIENT;
705 /* Write the list of segments starting at CHUNKS to STREAM. */
708 write_chunks (FILE* stream, struct chunk *chunks)
710 for (; chunks != NULL; chunks = chunks->next)
711 fwrite (chunks->data, chunks->size, 1, stream);
714 /* Push a 1-word constant in the constant pool at the given INDEX.
715 (Caller is responsible for doing NOTE_PUSH.) */
718 push_constant1 (HOST_WIDE_INT index, struct jcf_partial *state)
733 /* Push a 2-word constant in the constant pool at the given INDEX.
734 (Caller is responsible for doing NOTE_PUSH.) */
737 push_constant2 (HOST_WIDE_INT index, struct jcf_partial *state)
744 /* Push 32-bit integer constant on VM stack.
745 Caller is responsible for doing NOTE_PUSH. */
748 push_int_const (HOST_WIDE_INT i, struct jcf_partial *state)
751 if (i >= -1 && i <= 5)
752 OP1(OPCODE_iconst_0 + i);
753 else if (i >= -128 && i < 128)
758 else if (i >= -32768 && i < 32768)
765 i = find_constant1 (&state->cpool, CONSTANT_Integer,
766 (jword)(i & 0xFFFFFFFF));
767 push_constant1 (i, state);
772 find_constant_wide (HOST_WIDE_INT lo, HOST_WIDE_INT hi,
773 struct jcf_partial *state)
775 HOST_WIDE_INT w1, w2;
776 lshift_double (lo, hi, -32, 64, &w1, &w2, 1);
777 return find_constant2 (&state->cpool, CONSTANT_Long,
778 (jword)(w1 & 0xFFFFFFFF), (jword)(lo & 0xFFFFFFFF));
781 /* Find or allocate a constant pool entry for the given VALUE.
782 Return the index in the constant pool. */
785 find_constant_index (tree value, struct jcf_partial *state)
787 if (TREE_CODE (value) == INTEGER_CST)
789 if (TYPE_PRECISION (TREE_TYPE (value)) <= 32)
790 return find_constant1 (&state->cpool, CONSTANT_Integer,
791 (jword)(TREE_INT_CST_LOW (value) & 0xFFFFFFFF));
793 return find_constant_wide (TREE_INT_CST_LOW (value),
794 TREE_INT_CST_HIGH (value), state);
796 else if (TREE_CODE (value) == REAL_CST)
800 real_to_target (words, &TREE_REAL_CST (value),
801 TYPE_MODE (TREE_TYPE (value)));
802 words[0] &= 0xffffffff;
803 words[1] &= 0xffffffff;
805 if (TYPE_PRECISION (TREE_TYPE (value)) == 32)
806 return find_constant1 (&state->cpool, CONSTANT_Float, (jword)words[0]);
808 return find_constant2 (&state->cpool, CONSTANT_Double,
809 (jword)words[1-FLOAT_WORDS_BIG_ENDIAN],
810 (jword)words[FLOAT_WORDS_BIG_ENDIAN]);
812 else if (TREE_CODE (value) == STRING_CST)
813 return find_string_constant (&state->cpool, value);
819 /* Push 64-bit long constant on VM stack.
820 Caller is responsible for doing NOTE_PUSH. */
823 push_long_const (HOST_WIDE_INT lo, HOST_WIDE_INT hi, struct jcf_partial *state)
825 HOST_WIDE_INT highpart, dummy;
826 jint lowpart = WORD_TO_INT (lo);
828 rshift_double (lo, hi, 32, 64, &highpart, &dummy, 1);
830 if (highpart == 0 && (lowpart == 0 || lowpart == 1))
833 OP1(OPCODE_lconst_0 + lowpart);
835 else if ((highpart == 0 && lowpart > 0 && lowpart < 32768)
836 || (highpart == -1 && lowpart < 0 && lowpart >= -32768))
838 push_int_const (lowpart, state);
843 push_constant2 (find_constant_wide (lo, hi, state), state);
847 field_op (tree field, int opcode, struct jcf_partial *state)
849 int index = find_fieldref_index (&state->cpool, field);
855 /* Returns an integer in the range 0 (for 'int') through 4 (for object
856 reference) to 7 (for 'short') which matches the pattern of how JVM
857 opcodes typically depend on the operand type. */
860 adjust_typed_op (tree type, int max)
862 switch (TREE_CODE (type))
865 case RECORD_TYPE: return 4;
867 return TYPE_PRECISION (type) == 32 || max < 5 ? 0 : 5;
869 return TYPE_PRECISION (type) == 32 || max < 6 ? 0 : 6;
871 switch (TYPE_PRECISION (type))
873 case 8: return max < 5 ? 0 : 5;
874 case 16: return max < 7 ? 0 : 7;
880 switch (TYPE_PRECISION (type))
893 maybe_wide (int opcode, int index, struct jcf_partial *state)
910 /* Compile code to duplicate with offset, where
911 SIZE is the size of the stack item to duplicate (1 or 2), abd
912 OFFSET is where to insert the result (must be 0, 1, or 2).
913 (The new words get inserted at stack[SP-size-offset].) */
916 emit_dup (int size, int offset, struct jcf_partial *state)
923 kind = size == 1 ? OPCODE_dup : OPCODE_dup2;
924 else if (offset == 1)
925 kind = size == 1 ? OPCODE_dup_x1 : OPCODE_dup2_x1;
926 else if (offset == 2)
927 kind = size == 1 ? OPCODE_dup_x2 : OPCODE_dup2_x2;
935 emit_pop (int size, struct jcf_partial *state)
938 OP1 (OPCODE_pop - 1 + size);
942 emit_iinc (tree var, HOST_WIDE_INT value, struct jcf_partial *state)
944 int slot = DECL_LOCAL_INDEX (var);
946 if (value < -128 || value > 127 || slot >= 256)
964 emit_load_or_store (tree var, /* Variable to load from or store into. */
965 int opcode, /* Either OPCODE_iload or OPCODE_istore. */
966 struct jcf_partial *state)
968 tree type = TREE_TYPE (var);
969 int kind = adjust_typed_op (type, 4);
970 int index = DECL_LOCAL_INDEX (var);
974 OP1 (opcode + 5 + 4 * kind + index); /* [ilfda]{load,store}_[0123] */
977 maybe_wide (opcode + kind, index, state); /* [ilfda]{load,store} */
981 emit_load (tree var, struct jcf_partial *state)
983 emit_load_or_store (var, OPCODE_iload, state);
984 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
988 emit_store (tree var, struct jcf_partial *state)
990 emit_load_or_store (var, OPCODE_istore, state);
991 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
995 emit_unop (enum java_opcode opcode, tree type ATTRIBUTE_UNUSED,
996 struct jcf_partial *state)
1003 emit_binop (enum java_opcode opcode, tree type, struct jcf_partial *state)
1005 int size = TYPE_IS_WIDE (type) ? 2 : 1;
1012 emit_reloc (HOST_WIDE_INT value, int kind,
1013 struct jcf_block *target, struct jcf_partial *state)
1015 struct jcf_relocation *reloc
1016 = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1017 struct jcf_block *block = state->last_block;
1018 reloc->next = block->u.relocations;
1019 block->u.relocations = reloc;
1020 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1021 reloc->label = target;
1023 if (kind == 0 || kind == BLOCK_START_RELOC)
1025 else if (kind != SWITCH_ALIGN_RELOC)
1030 emit_switch_reloc (struct jcf_block *label, struct jcf_partial *state)
1032 emit_reloc (RELOCATION_VALUE_0, BLOCK_START_RELOC, label, state);
1035 /* Similar to emit_switch_reloc,
1036 but re-uses an existing case reloc. */
1039 emit_case_reloc (struct jcf_relocation *reloc, struct jcf_partial *state)
1041 struct jcf_block *block = state->last_block;
1042 reloc->next = block->u.relocations;
1043 block->u.relocations = reloc;
1044 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1045 reloc->kind = BLOCK_START_RELOC;
1049 /* Emit a conditional jump to TARGET with a 2-byte relative jump offset
1050 The opcode is OPCODE, the inverted opcode is INV_OPCODE. */
1053 emit_if (struct jcf_block *target, int opcode, int inv_opcode,
1054 struct jcf_partial *state)
1058 /* value is 1 byte from reloc back to start of instruction. */
1059 emit_reloc (RELOCATION_VALUE_1, - inv_opcode, target, state);
1063 emit_goto (struct jcf_block *target, struct jcf_partial *state)
1067 /* Value is 1 byte from reloc back to start of instruction. */
1068 emit_reloc (RELOCATION_VALUE_1, OPCODE_goto_w, target, state);
1072 emit_jsr (struct jcf_block *target, struct jcf_partial *state)
1076 /* Value is 1 byte from reloc back to start of instruction. */
1077 emit_reloc (RELOCATION_VALUE_1, OPCODE_jsr_w, target, state);
1081 /* Generate code to evaluate EXP. If the result is true,
1082 branch to TRUE_LABEL; otherwise, branch to FALSE_LABEL.
1083 TRUE_BRANCH_FIRST is a code generation hint that the
1084 TRUE_LABEL may follow right after this. (The idea is that we
1085 may be able to optimize away GOTO TRUE_LABEL; TRUE_LABEL:) */
1088 generate_bytecode_conditional (tree exp,
1089 struct jcf_block *true_label,
1090 struct jcf_block *false_label,
1091 int true_branch_first,
1092 struct jcf_partial *state)
1094 tree exp0, exp1, type;
1095 int save_SP = state->code_SP;
1096 enum java_opcode op, negop;
1099 switch (TREE_CODE (exp))
1102 emit_goto (integer_zerop (exp) ? false_label : true_label, state);
1106 struct jcf_block *then_label = gen_jcf_label (state);
1107 struct jcf_block *else_label = gen_jcf_label (state);
1108 int save_SP_before, save_SP_after;
1109 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1110 then_label, else_label, 1, state);
1111 define_jcf_label (then_label, state);
1112 save_SP_before = state->code_SP;
1113 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1114 true_label, false_label, 1, state);
1115 save_SP_after = state->code_SP;
1116 state->code_SP = save_SP_before;
1117 define_jcf_label (else_label, state);
1118 generate_bytecode_conditional (TREE_OPERAND (exp, 2),
1119 true_label, false_label,
1120 true_branch_first, state);
1121 if (state->code_SP != save_SP_after)
1125 case TRUTH_NOT_EXPR:
1126 generate_bytecode_conditional (TREE_OPERAND (exp, 0), false_label,
1127 true_label, ! true_branch_first, state);
1129 case TRUTH_ANDIF_EXPR:
1131 struct jcf_block *next_label = gen_jcf_label (state);
1132 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1133 next_label, false_label, 1, state);
1134 define_jcf_label (next_label, state);
1135 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1136 true_label, false_label, 1, state);
1139 case TRUTH_ORIF_EXPR:
1141 struct jcf_block *next_label = gen_jcf_label (state);
1142 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1143 true_label, next_label, 1, state);
1144 define_jcf_label (next_label, state);
1145 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1146 true_label, false_label, 1, state);
1150 /* Assuming op is one of the 2-operand if_icmp<COND> instructions,
1151 set it to the corresponding 1-operand if<COND> instructions. */
1155 /* The opcodes with their inverses are allocated in pairs.
1156 E.g. The inverse of if_icmplt (161) is if_icmpge (162). */
1157 negop = (op & 1) ? op + 1 : op - 1;
1159 if (true_branch_first)
1161 emit_if (false_label, negop, op, state);
1162 emit_goto (true_label, state);
1166 emit_if (true_label, op, negop, state);
1167 emit_goto (false_label, state);
1174 op = OPCODE_if_icmpeq;
1180 op = OPCODE_if_icmpne;
1186 op = OPCODE_if_icmpgt;
1192 op = OPCODE_if_icmplt;
1198 op = OPCODE_if_icmpge;
1204 op = OPCODE_if_icmple;
1210 /* UNLT_EXPR(a, b) means 'a < b || unordered(a, b)'. This is
1211 the same as the Java source expression '!(a >= b)', so handle
1213 struct jcf_block *tmp = true_label;
1214 true_label = false_label;
1216 true_branch_first = !true_branch_first;
1219 exp0 = TREE_OPERAND (exp, 0);
1220 exp1 = TREE_OPERAND (exp, 1);
1221 type = TREE_TYPE (exp0);
1222 switch (TREE_CODE (type))
1225 case POINTER_TYPE: case RECORD_TYPE:
1226 switch (TREE_CODE (exp))
1228 case EQ_EXPR: op = OPCODE_if_acmpeq; break;
1229 case NE_EXPR: op = OPCODE_if_acmpne; break;
1232 if (integer_zerop (exp1) || integer_zerop (exp0))
1234 generate_bytecode_insns (integer_zerop (exp0) ? exp1 : exp0,
1235 STACK_TARGET, state);
1236 op = op + (OPCODE_ifnull - OPCODE_if_acmpeq);
1237 negop = (op & 1) ? op - 1 : op + 1;
1241 generate_bytecode_insns (exp0, STACK_TARGET, state);
1242 generate_bytecode_insns (exp1, STACK_TARGET, state);
1246 generate_bytecode_insns (exp0, STACK_TARGET, state);
1247 generate_bytecode_insns (exp1, STACK_TARGET, state);
1248 if (op == OPCODE_if_icmplt || op == OPCODE_if_icmple)
1252 if (TYPE_PRECISION (type) > 32)
1263 if (TYPE_PRECISION (type) > 32)
1265 generate_bytecode_insns (exp0, STACK_TARGET, state);
1266 generate_bytecode_insns (exp1, STACK_TARGET, state);
1274 if (integer_zerop (exp1))
1276 generate_bytecode_insns (exp0, STACK_TARGET, state);
1280 if (integer_zerop (exp0))
1284 case OPCODE_if_icmplt:
1285 case OPCODE_if_icmpge:
1288 case OPCODE_if_icmpgt:
1289 case OPCODE_if_icmple:
1295 generate_bytecode_insns (exp1, STACK_TARGET, state);
1299 generate_bytecode_insns (exp0, STACK_TARGET, state);
1300 generate_bytecode_insns (exp1, STACK_TARGET, state);
1306 generate_bytecode_insns (exp, STACK_TARGET, state);
1308 if (true_branch_first)
1310 emit_if (false_label, OPCODE_ifeq, OPCODE_ifne, state);
1311 emit_goto (true_label, state);
1315 emit_if (true_label, OPCODE_ifne, OPCODE_ifeq, state);
1316 emit_goto (false_label, state);
1320 if (save_SP != state->code_SP)
1324 /* Call pending cleanups i.e. those for surrounding TRY_FINALLY_EXPRs.
1325 but only as far out as LIMIT (since we are about to jump to the
1326 emit label that is LIMIT). */
1329 call_cleanups (struct jcf_block *limit, struct jcf_partial *state)
1331 struct jcf_block *block = state->labeled_blocks;
1332 for (; block != limit; block = block->next)
1334 if (block->pc == PENDING_CLEANUP_PC)
1335 emit_jsr (block, state);
1340 generate_bytecode_return (tree exp, struct jcf_partial *state)
1342 tree return_type = TREE_TYPE (TREE_TYPE (state->current_method));
1343 int returns_void = TREE_CODE (return_type) == VOID_TYPE;
1348 switch (TREE_CODE (exp))
1351 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET,
1353 exp = TREE_OPERAND (exp, 1);
1357 struct jcf_block *then_label = gen_jcf_label (state);
1358 struct jcf_block *else_label = gen_jcf_label (state);
1359 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1360 then_label, else_label, 1, state);
1361 define_jcf_label (then_label, state);
1362 generate_bytecode_return (TREE_OPERAND (exp, 1), state);
1363 define_jcf_label (else_label, state);
1364 generate_bytecode_return (TREE_OPERAND (exp, 2), state);
1368 generate_bytecode_insns (exp,
1369 returns_void ? IGNORE_TARGET
1370 : STACK_TARGET, state);
1376 call_cleanups (NULL, state);
1380 op = OPCODE_ireturn + adjust_typed_op (return_type, 4);
1381 if (state->num_finalizers > 0)
1383 if (state->return_value_decl == NULL_TREE)
1385 state->return_value_decl
1386 = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1387 localvar_alloc (state->return_value_decl, state);
1389 emit_store (state->return_value_decl, state);
1390 call_cleanups (NULL, state);
1391 emit_load (state->return_value_decl, state);
1392 /* If we call maybe_free_localvar (state->return_value_decl, state, 1),
1393 then we risk the save decl erroneously re-used in the
1394 finalizer. Instead, we keep the state->return_value_decl
1395 allocated through the rest of the method. This is not
1396 the greatest solution, but it is at least simple and safe. */
1403 /* Generate bytecode for sub-expression EXP of METHOD.
1404 TARGET is one of STACK_TARGET or IGNORE_TARGET. */
1407 generate_bytecode_insns (tree exp, int target, struct jcf_partial *state)
1410 enum java_opcode jopcode;
1412 HOST_WIDE_INT value;
1417 if (exp == NULL && target == IGNORE_TARGET)
1420 type = TREE_TYPE (exp);
1422 switch (TREE_CODE (exp))
1425 if (BLOCK_EXPR_BODY (exp))
1428 tree body = BLOCK_EXPR_BODY (exp);
1429 long jsrs = state->num_jsrs;
1430 for (local = BLOCK_EXPR_DECLS (exp); local; )
1432 tree next = TREE_CHAIN (local);
1433 localvar_alloc (local, state);
1436 /* Avoid deep recursion for long blocks. */
1437 while (TREE_CODE (body) == COMPOUND_EXPR)
1439 generate_bytecode_insns (TREE_OPERAND (body, 0), target, state);
1440 body = TREE_OPERAND (body, 1);
1442 generate_bytecode_insns (body, target, state);
1444 for (local = BLOCK_EXPR_DECLS (exp); local; )
1446 tree next = TREE_CHAIN (local);
1447 maybe_free_localvar (local, state, state->num_jsrs <= jsrs);
1453 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
1454 /* Normally the first operand to a COMPOUND_EXPR must complete
1455 normally. However, in the special case of a do-while
1456 statement this is not necessarily the case. */
1457 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 0)))
1458 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1460 case EXPR_WITH_FILE_LOCATION:
1462 location_t saved_location = input_location;
1463 tree body = EXPR_WFL_NODE (exp);
1464 if (IS_EMPTY_STMT (body))
1466 input_filename = EXPR_WFL_FILENAME (exp);
1467 input_line = EXPR_WFL_LINENO (exp);
1468 if (EXPR_WFL_EMIT_LINE_NOTE (exp) && input_line > 0
1469 && debug_info_level > DINFO_LEVEL_NONE)
1470 put_linenumber (input_line, state);
1471 generate_bytecode_insns (body, target, state);
1472 input_location = saved_location;
1476 if (target == IGNORE_TARGET) ; /* do nothing */
1477 else if (TREE_CODE (type) == POINTER_TYPE)
1479 if (! integer_zerop (exp))
1482 OP1 (OPCODE_aconst_null);
1485 else if (TYPE_PRECISION (type) <= 32)
1487 push_int_const (TREE_INT_CST_LOW (exp), state);
1492 push_long_const (TREE_INT_CST_LOW (exp), TREE_INT_CST_HIGH (exp),
1499 int prec = TYPE_PRECISION (type) >> 5;
1501 if (real_zerop (exp) && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (exp)))
1502 OP1 (prec == 1 ? OPCODE_fconst_0 : OPCODE_dconst_0);
1503 else if (real_onep (exp))
1504 OP1 (prec == 1 ? OPCODE_fconst_1 : OPCODE_dconst_1);
1505 else if (prec == 1 && real_twop (exp))
1506 OP1 (OPCODE_fconst_2);
1507 /* ??? We could also use iconst_3/ldc followed by i2f/i2d
1508 for other float/double when the value is a small integer. */
1511 offset = find_constant_index (exp, state);
1513 push_constant1 (offset, state);
1515 push_constant2 (offset, state);
1521 push_constant1 (find_string_constant (&state->cpool, exp), state);
1525 if (TREE_STATIC (exp))
1527 field_op (exp, OPCODE_getstatic, state);
1528 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1531 /* ... fall through ... */
1533 emit_load (exp, state);
1535 case NON_LVALUE_EXPR:
1537 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1540 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1541 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1542 if (target != IGNORE_TARGET)
1544 jopcode = OPCODE_iaload + adjust_typed_op (type, 7);
1547 if (! TYPE_IS_WIDE (type))
1553 tree obj = TREE_OPERAND (exp, 0);
1554 tree field = TREE_OPERAND (exp, 1);
1555 int is_static = FIELD_STATIC (field);
1556 generate_bytecode_insns (obj,
1557 is_static ? IGNORE_TARGET : target, state);
1558 if (target != IGNORE_TARGET)
1560 if (DECL_NAME (field) == length_identifier_node && !is_static
1561 && TYPE_ARRAY_P (TREE_TYPE (obj)))
1564 OP1 (OPCODE_arraylength);
1568 field_op (field, is_static ? OPCODE_getstatic : OPCODE_getfield,
1572 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
1577 case TRUTH_ANDIF_EXPR:
1578 case TRUTH_ORIF_EXPR:
1592 struct jcf_block *then_label = gen_jcf_label (state);
1593 struct jcf_block *else_label = gen_jcf_label (state);
1594 struct jcf_block *end_label = gen_jcf_label (state);
1595 generate_bytecode_conditional (exp,
1596 then_label, else_label, 1, state);
1597 define_jcf_label (then_label, state);
1598 push_int_const (1, state);
1599 emit_goto (end_label, state);
1600 define_jcf_label (else_label, state);
1601 push_int_const (0, state);
1602 define_jcf_label (end_label, state);
1608 struct jcf_block *then_label = gen_jcf_label (state);
1609 struct jcf_block *else_label = gen_jcf_label (state);
1610 struct jcf_block *end_label = gen_jcf_label (state);
1611 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1612 then_label, else_label, 1, state);
1613 define_jcf_label (then_label, state);
1614 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1615 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 1))
1616 /* Not all expressions have CAN_COMPLETE_NORMALLY set properly. */
1617 || TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE)
1618 emit_goto (end_label, state);
1619 define_jcf_label (else_label, state);
1620 generate_bytecode_insns (TREE_OPERAND (exp, 2), target, state);
1621 define_jcf_label (end_label, state);
1622 /* COND_EXPR can be used in a binop. The stack must be adjusted. */
1623 if (TREE_TYPE (exp) != void_type_node)
1624 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1629 struct jcf_switch_state *sw_state = state->sw_state;
1630 struct jcf_relocation *reloc
1631 = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1632 HOST_WIDE_INT case_value = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
1634 reloc->label = get_jcf_label_here (state);
1635 reloc->offset = case_value;
1636 reloc->next = sw_state->cases;
1637 sw_state->cases = reloc;
1638 if (sw_state->num_cases == 0)
1640 sw_state->min_case = case_value;
1641 sw_state->max_case = case_value;
1645 if (case_value < sw_state->min_case)
1646 sw_state->min_case = case_value;
1647 if (case_value > sw_state->max_case)
1648 sw_state->max_case = case_value;
1650 sw_state->num_cases++;
1654 state->sw_state->default_label = get_jcf_label_here (state);
1659 /* The SWITCH_EXPR has three parts, generated in the following order:
1660 1. the switch_expression (the value used to select the correct case);
1662 3. the switch_instruction (the tableswitch/loopupswitch instruction.).
1663 After code generation, we will re-order them in the order 1, 3, 2.
1664 This is to avoid any extra GOTOs. */
1665 struct jcf_switch_state sw_state;
1666 struct jcf_block *expression_last; /* Last block of the switch_expression. */
1667 struct jcf_block *body_last; /* Last block of the switch_body. */
1668 struct jcf_block *switch_instruction; /* First block of switch_instruction. */
1669 struct jcf_block *instruction_last; /* Last block of the switch_instruction. */
1670 struct jcf_block *body_block;
1672 sw_state.prev = state->sw_state;
1673 state->sw_state = &sw_state;
1674 sw_state.cases = NULL;
1675 sw_state.num_cases = 0;
1676 sw_state.default_label = NULL;
1677 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1678 expression_last = state->last_block;
1679 /* Force a new block here. */
1680 body_block = gen_jcf_label (state);
1681 define_jcf_label (body_block, state);
1682 generate_bytecode_insns (TREE_OPERAND (exp, 1), IGNORE_TARGET, state);
1683 body_last = state->last_block;
1685 switch_instruction = gen_jcf_label (state);
1686 define_jcf_label (switch_instruction, state);
1687 if (sw_state.default_label == NULL)
1688 sw_state.default_label = gen_jcf_label (state);
1690 if (sw_state.num_cases <= 1)
1692 if (sw_state.num_cases == 0)
1694 emit_pop (1, state);
1699 push_int_const (sw_state.cases->offset, state);
1701 emit_if (sw_state.cases->label,
1702 OPCODE_if_icmpeq, OPCODE_if_icmpne, state);
1704 emit_goto (sw_state.default_label, state);
1709 unsigned HOST_WIDE_INT delta;
1710 /* Copy the chain of relocs into a sorted array. */
1711 struct jcf_relocation **relocs
1712 = xmalloc (sw_state.num_cases * sizeof (struct jcf_relocation *));
1713 /* The relocs arrays is a buffer with a gap.
1714 The assumption is that cases will normally come in "runs". */
1716 int gap_end = sw_state.num_cases;
1717 struct jcf_relocation *reloc;
1718 for (reloc = sw_state.cases; reloc != NULL; reloc = reloc->next)
1720 HOST_WIDE_INT case_value = reloc->offset;
1721 while (gap_end < sw_state.num_cases)
1723 struct jcf_relocation *end = relocs[gap_end];
1724 if (case_value <= end->offset)
1726 relocs[gap_start++] = end;
1729 while (gap_start > 0)
1731 struct jcf_relocation *before = relocs[gap_start-1];
1732 if (case_value >= before->offset)
1734 relocs[--gap_end] = before;
1737 relocs[gap_start++] = reloc;
1738 /* Note we don't check for duplicates. This is
1739 handled by the parser. */
1742 /* We could have DELTA < 0 if sw_state.min_case is
1743 something like Integer.MIN_VALUE. That is why delta is
1745 delta = sw_state.max_case - sw_state.min_case;
1746 if (2 * (unsigned) sw_state.num_cases >= delta)
1747 { /* Use tableswitch. */
1749 RESERVE (13 + 4 * (sw_state.max_case - sw_state.min_case + 1));
1750 OP1 (OPCODE_tableswitch);
1751 emit_reloc (RELOCATION_VALUE_0,
1752 SWITCH_ALIGN_RELOC, NULL, state);
1753 emit_switch_reloc (sw_state.default_label, state);
1754 OP4 (sw_state.min_case);
1755 OP4 (sw_state.max_case);
1756 for (i = sw_state.min_case; ; )
1758 reloc = relocs[index];
1759 if (i == reloc->offset)
1761 emit_case_reloc (reloc, state);
1762 if (i == sw_state.max_case)
1767 emit_switch_reloc (sw_state.default_label, state);
1772 { /* Use lookupswitch. */
1773 RESERVE(9 + 8 * sw_state.num_cases);
1774 OP1 (OPCODE_lookupswitch);
1775 emit_reloc (RELOCATION_VALUE_0,
1776 SWITCH_ALIGN_RELOC, NULL, state);
1777 emit_switch_reloc (sw_state.default_label, state);
1778 OP4 (sw_state.num_cases);
1779 for (i = 0; i < sw_state.num_cases; i++)
1781 struct jcf_relocation *reloc = relocs[i];
1782 OP4 (reloc->offset);
1783 emit_case_reloc (reloc, state);
1789 instruction_last = state->last_block;
1790 if (sw_state.default_label->pc < 0)
1791 define_jcf_label (sw_state.default_label, state);
1792 else /* Force a new block. */
1793 sw_state.default_label = get_jcf_label_here (state);
1794 /* Now re-arrange the blocks so the switch_instruction
1795 comes before the switch_body. */
1796 switch_length = state->code_length - switch_instruction->pc;
1797 switch_instruction->pc = body_block->pc;
1798 instruction_last->next = body_block;
1799 instruction_last->v.chunk->next = body_block->v.chunk;
1800 expression_last->next = switch_instruction;
1801 expression_last->v.chunk->next = switch_instruction->v.chunk;
1802 body_last->next = sw_state.default_label;
1803 body_last->v.chunk->next = NULL;
1804 state->chunk = body_last->v.chunk;
1805 for (; body_block != sw_state.default_label; body_block = body_block->next)
1806 body_block->pc += switch_length;
1808 state->sw_state = sw_state.prev;
1813 exp = TREE_OPERAND (exp, 0);
1814 if (exp == NULL_TREE)
1815 exp = build_java_empty_stmt ();
1816 else if (TREE_CODE (exp) != MODIFY_EXPR)
1819 exp = TREE_OPERAND (exp, 1);
1820 generate_bytecode_return (exp, state);
1822 case LABELED_BLOCK_EXPR:
1824 struct jcf_block *end_label = gen_jcf_label (state);
1825 end_label->next = state->labeled_blocks;
1826 state->labeled_blocks = end_label;
1827 end_label->pc = PENDING_EXIT_PC;
1828 end_label->u.labeled_block = exp;
1829 if (LABELED_BLOCK_BODY (exp))
1830 generate_bytecode_insns (LABELED_BLOCK_BODY (exp), target, state);
1831 if (state->labeled_blocks != end_label)
1833 state->labeled_blocks = end_label->next;
1834 define_jcf_label (end_label, state);
1839 tree body = TREE_OPERAND (exp, 0);
1841 if (TREE_CODE (body) == COMPOUND_EXPR
1842 && TREE_CODE (TREE_OPERAND (body, 0)) == EXIT_EXPR)
1844 /* Optimize: H: if (TEST) GOTO L; BODY; GOTO H; L:
1845 to: GOTO L; BODY; L: if (!TEST) GOTO L; */
1846 struct jcf_block *head_label;
1847 struct jcf_block *body_label;
1848 struct jcf_block *end_label = gen_jcf_label (state);
1849 struct jcf_block *exit_label = state->labeled_blocks;
1850 head_label = gen_jcf_label (state);
1851 emit_goto (head_label, state);
1852 body_label = get_jcf_label_here (state);
1853 generate_bytecode_insns (TREE_OPERAND (body, 1), target, state);
1854 define_jcf_label (head_label, state);
1855 generate_bytecode_conditional (TREE_OPERAND (body, 0),
1856 end_label, body_label, 1, state);
1857 define_jcf_label (end_label, state);
1862 struct jcf_block *head_label = get_jcf_label_here (state);
1863 generate_bytecode_insns (body, IGNORE_TARGET, state);
1864 if (CAN_COMPLETE_NORMALLY (body))
1865 emit_goto (head_label, state);
1871 struct jcf_block *label = state->labeled_blocks;
1872 struct jcf_block *end_label = gen_jcf_label (state);
1873 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1874 label, end_label, 0, state);
1875 define_jcf_label (end_label, state);
1878 case EXIT_BLOCK_EXPR:
1880 struct jcf_block *label = state->labeled_blocks;
1881 if (TREE_OPERAND (exp, 1) != NULL) goto notimpl;
1882 while (label->u.labeled_block != TREE_OPERAND (exp, 0))
1883 label = label->next;
1884 call_cleanups (label, state);
1885 emit_goto (label, state);
1889 case PREDECREMENT_EXPR: value = -1; post_op = 0; goto increment;
1890 case PREINCREMENT_EXPR: value = 1; post_op = 0; goto increment;
1891 case POSTDECREMENT_EXPR: value = -1; post_op = 1; goto increment;
1892 case POSTINCREMENT_EXPR: value = 1; post_op = 1; goto increment;
1895 arg = TREE_OPERAND (exp, 1);
1896 exp = TREE_OPERAND (exp, 0);
1897 type = TREE_TYPE (exp);
1898 size = TYPE_IS_WIDE (type) ? 2 : 1;
1899 if ((TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1900 && ! TREE_STATIC (exp)
1901 && TREE_CODE (type) == INTEGER_TYPE
1902 && TYPE_PRECISION (type) == 32)
1904 if (target != IGNORE_TARGET && post_op)
1905 emit_load (exp, state);
1906 emit_iinc (exp, value, state);
1907 if (target != IGNORE_TARGET && ! post_op)
1908 emit_load (exp, state);
1911 if (TREE_CODE (exp) == COMPONENT_REF)
1913 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1914 emit_dup (1, 0, state);
1915 /* Stack: ..., objectref, objectref. */
1916 field_op (TREE_OPERAND (exp, 1), OPCODE_getfield, state);
1918 /* Stack: ..., objectref, oldvalue. */
1921 else if (TREE_CODE (exp) == ARRAY_REF)
1923 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1924 generate_bytecode_insns (TREE_OPERAND (exp, 1), STACK_TARGET, state);
1925 emit_dup (2, 0, state);
1926 /* Stack: ..., array, index, array, index. */
1927 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (exp), 7);
1931 /* Stack: ..., array, index, oldvalue. */
1934 else if (TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1936 generate_bytecode_insns (exp, STACK_TARGET, state);
1937 /* Stack: ..., oldvalue. */
1943 if (target != IGNORE_TARGET && post_op)
1944 emit_dup (size, offset, state);
1945 /* Stack, if ARRAY_REF: ..., [result, ] array, index, oldvalue. */
1946 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, oldvalue. */
1947 /* Stack, otherwise: ..., [result, ] oldvalue. */
1948 generate_bytecode_insns (arg, STACK_TARGET, state);
1949 emit_binop ((value >= 0 ? OPCODE_iadd : OPCODE_isub)
1950 + adjust_typed_op (type, 3),
1952 if (target != IGNORE_TARGET && ! post_op)
1953 emit_dup (size, offset, state);
1954 /* Stack, if ARRAY_REF: ..., [result, ] array, index, newvalue. */
1955 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, newvalue. */
1956 /* Stack, otherwise: ..., [result, ] newvalue. */
1957 goto finish_assignment;
1961 tree lhs = TREE_OPERAND (exp, 0);
1962 tree rhs = TREE_OPERAND (exp, 1);
1965 /* See if we can use the iinc instruction. */
1966 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1967 && ! TREE_STATIC (lhs)
1968 && TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
1969 && TYPE_PRECISION (TREE_TYPE (lhs)) == 32
1970 && (TREE_CODE (rhs) == PLUS_EXPR || TREE_CODE (rhs) == MINUS_EXPR))
1972 tree arg0 = TREE_OPERAND (rhs, 0);
1973 tree arg1 = TREE_OPERAND (rhs, 1);
1974 HOST_WIDE_INT min_value = -32768;
1975 HOST_WIDE_INT max_value = 32767;
1976 if (TREE_CODE (rhs) == MINUS_EXPR)
1981 else if (arg1 == lhs)
1984 arg1 = TREE_OPERAND (rhs, 0);
1986 if (lhs == arg0 && TREE_CODE (arg1) == INTEGER_CST)
1988 HOST_WIDE_INT hi_value = TREE_INT_CST_HIGH (arg1);
1989 value = TREE_INT_CST_LOW (arg1);
1990 if ((hi_value == 0 && value <= max_value)
1991 || (hi_value == -1 && value >= min_value))
1993 if (TREE_CODE (rhs) == MINUS_EXPR)
1995 emit_iinc (lhs, value, state);
1996 if (target != IGNORE_TARGET)
1997 emit_load (lhs, state);
2003 if (TREE_CODE (lhs) == COMPONENT_REF)
2005 generate_bytecode_insns (TREE_OPERAND (lhs, 0),
2006 STACK_TARGET, state);
2009 else if (TREE_CODE (lhs) == ARRAY_REF)
2011 generate_bytecode_insns (TREE_OPERAND(lhs, 0),
2012 STACK_TARGET, state);
2013 generate_bytecode_insns (TREE_OPERAND(lhs, 1),
2014 STACK_TARGET, state);
2020 /* If the rhs is a binary expression and the left operand is
2021 `==' to the lhs then we have an OP= expression. In this
2022 case we must do some special processing. */
2023 if (TREE_CODE_CLASS (TREE_CODE (rhs)) == '2'
2024 && lhs == TREE_OPERAND (rhs, 0))
2026 if (TREE_CODE (lhs) == COMPONENT_REF)
2028 tree field = TREE_OPERAND (lhs, 1);
2029 if (! FIELD_STATIC (field))
2031 /* Duplicate the object reference so we can get
2033 emit_dup (TYPE_IS_WIDE (field) ? 2 : 1, 0, state);
2036 field_op (field, (FIELD_STATIC (field)
2041 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
2043 else if (TREE_CODE (lhs) == VAR_DECL
2044 || TREE_CODE (lhs) == PARM_DECL)
2046 if (FIELD_STATIC (lhs))
2048 field_op (lhs, OPCODE_getstatic, state);
2049 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs)) ? 2 : 1);
2052 emit_load (lhs, state);
2054 else if (TREE_CODE (lhs) == ARRAY_REF)
2056 /* Duplicate the array and index, which are on the
2057 stack, so that we can load the old value. */
2058 emit_dup (2, 0, state);
2060 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (lhs), 7);
2063 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs)) ? 2 : 1);
2068 /* This function correctly handles the case where the LHS
2069 of a binary expression is NULL_TREE. */
2070 rhs = build2 (TREE_CODE (rhs), TREE_TYPE (rhs),
2071 NULL_TREE, TREE_OPERAND (rhs, 1));
2074 generate_bytecode_insns (rhs, STACK_TARGET, state);
2075 if (target != IGNORE_TARGET)
2076 emit_dup (TYPE_IS_WIDE (type) ? 2 : 1 , offset, state);
2082 if (TREE_CODE (exp) == COMPONENT_REF)
2084 tree field = TREE_OPERAND (exp, 1);
2085 if (! FIELD_STATIC (field))
2088 FIELD_STATIC (field) ? OPCODE_putstatic : OPCODE_putfield,
2091 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
2093 else if (TREE_CODE (exp) == VAR_DECL
2094 || TREE_CODE (exp) == PARM_DECL)
2096 if (FIELD_STATIC (exp))
2098 field_op (exp, OPCODE_putstatic, state);
2099 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
2102 emit_store (exp, state);
2104 else if (TREE_CODE (exp) == ARRAY_REF)
2106 jopcode = OPCODE_iastore + adjust_typed_op (TREE_TYPE (exp), 7);
2109 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 4 : 3);
2115 jopcode = OPCODE_iadd;
2118 jopcode = OPCODE_isub;
2121 jopcode = OPCODE_imul;
2123 case TRUNC_DIV_EXPR:
2125 jopcode = OPCODE_idiv;
2127 case TRUNC_MOD_EXPR:
2128 jopcode = OPCODE_irem;
2130 case LSHIFT_EXPR: jopcode = OPCODE_ishl; goto binop;
2131 case RSHIFT_EXPR: jopcode = OPCODE_ishr; goto binop;
2132 case URSHIFT_EXPR: jopcode = OPCODE_iushr; goto binop;
2133 case TRUTH_AND_EXPR:
2134 case BIT_AND_EXPR: jopcode = OPCODE_iand; goto binop;
2136 case BIT_IOR_EXPR: jopcode = OPCODE_ior; goto binop;
2137 case TRUTH_XOR_EXPR:
2138 case BIT_XOR_EXPR: jopcode = OPCODE_ixor; goto binop;
2141 tree arg0 = TREE_OPERAND (exp, 0);
2142 tree arg1 = TREE_OPERAND (exp, 1);
2143 jopcode += adjust_typed_op (type, 3);
2144 if (arg0 != NULL_TREE && operand_equal_p (arg0, arg1, 0))
2146 /* fold may (e.g) convert 2*x to x+x. */
2147 generate_bytecode_insns (arg0, target, state);
2148 emit_dup (TYPE_PRECISION (TREE_TYPE (arg0)) > 32 ? 2 : 1, 0, state);
2152 /* ARG0 will be NULL_TREE if we're handling an `OP='
2153 expression. In this case the stack already holds the
2154 LHS. See the MODIFY_EXPR case. */
2155 if (arg0 != NULL_TREE)
2156 generate_bytecode_insns (arg0, target, state);
2157 if (jopcode >= OPCODE_lshl && jopcode <= OPCODE_lushr)
2158 arg1 = convert (int_type_node, arg1);
2159 generate_bytecode_insns (arg1, target, state);
2161 /* For most binary operations, both operands and the result have the
2162 same type. Shift operations are different. Using arg1's type
2163 gets us the correct SP adjustment in all cases. */
2164 if (target == STACK_TARGET)
2165 emit_binop (jopcode, TREE_TYPE (arg1), state);
2168 case TRUTH_NOT_EXPR:
2170 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2171 if (target == STACK_TARGET)
2173 int is_long = TYPE_PRECISION (TREE_TYPE (exp)) > 32;
2174 push_int_const (TREE_CODE (exp) == BIT_NOT_EXPR ? -1 : 1, state);
2178 NOTE_PUSH (1 + is_long);
2179 OP1 (OPCODE_ixor + is_long);
2180 NOTE_POP (1 + is_long);
2184 jopcode = OPCODE_ineg;
2185 jopcode += adjust_typed_op (type, 3);
2186 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2187 if (target == STACK_TARGET)
2188 emit_unop (jopcode, type, state);
2190 case INSTANCEOF_EXPR:
2192 int index = find_class_constant (&state->cpool, TREE_OPERAND (exp, 1));
2193 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2195 OP1 (OPCODE_instanceof);
2200 /* The first time through, the argument of the SAVE_EXPR will be
2201 something complex. Evaluate it, and replace the argument with
2202 a VAR_DECL that holds the result. */
2203 arg = TREE_OPERAND (exp, 0);
2204 if (TREE_CODE (arg) != VAR_DECL || DECL_NAME (arg))
2206 tree type = TREE_TYPE (exp);
2207 tree decl = build_decl (VAR_DECL, NULL_TREE, type);
2208 generate_bytecode_insns (arg, STACK_TARGET, state);
2209 localvar_alloc (decl, state);
2210 TREE_OPERAND (exp, 0) = decl;
2211 emit_dup (TYPE_IS_WIDE (type) ? 2 : 1, 0, state);
2212 emit_store (decl, state);
2216 tree type = TREE_TYPE (exp);
2217 emit_load (arg, state);
2218 NOTE_PUSH (TYPE_IS_WIDE (type) ? 2 : 1);
2224 case FIX_TRUNC_EXPR:
2226 tree src = TREE_OPERAND (exp, 0);
2227 tree src_type = TREE_TYPE (src);
2228 tree dst_type = TREE_TYPE (exp);
2229 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2230 if (target == IGNORE_TARGET || src_type == dst_type)
2232 if (TREE_CODE (dst_type) == POINTER_TYPE)
2234 if (TREE_CODE (exp) == CONVERT_EXPR)
2236 int index = find_class_constant (&state->cpool,
2237 TREE_TYPE (dst_type));
2239 OP1 (OPCODE_checkcast);
2243 else /* Convert numeric types. */
2245 int wide_src = TYPE_PRECISION (src_type) > 32;
2246 int wide_dst = TYPE_PRECISION (dst_type) > 32;
2247 NOTE_POP (1 + wide_src);
2249 if (TREE_CODE (dst_type) == REAL_TYPE)
2251 if (TREE_CODE (src_type) == REAL_TYPE)
2252 OP1 (wide_dst ? OPCODE_f2d : OPCODE_d2f);
2253 else if (TYPE_PRECISION (src_type) == 64)
2254 OP1 (OPCODE_l2f + wide_dst);
2256 OP1 (OPCODE_i2f + wide_dst);
2258 else /* Convert to integral type. */
2260 if (TREE_CODE (src_type) == REAL_TYPE)
2261 OP1 (OPCODE_f2i + wide_dst + 3 * wide_src);
2266 if (TYPE_PRECISION (dst_type) < 32)
2269 /* Already converted to int, if needed. */
2270 if (TYPE_PRECISION (dst_type) <= 8)
2272 else if (TYPE_UNSIGNED (dst_type))
2278 NOTE_PUSH (1 + wide_dst);
2285 tree try_clause = TREE_OPERAND (exp, 0);
2286 struct jcf_block *start_label = get_jcf_label_here (state);
2287 struct jcf_block *end_label; /* End of try clause. */
2288 struct jcf_block *finished_label = gen_jcf_label (state);
2289 tree clause = TREE_OPERAND (exp, 1);
2290 if (target != IGNORE_TARGET)
2292 generate_bytecode_insns (try_clause, IGNORE_TARGET, state);
2293 end_label = get_jcf_label_here (state);
2294 if (end_label == start_label)
2296 if (CAN_COMPLETE_NORMALLY (try_clause))
2297 emit_goto (finished_label, state);
2298 while (clause != NULL_TREE)
2300 tree catch_clause = TREE_OPERAND (clause, 0);
2301 tree exception_decl = BLOCK_EXPR_DECLS (catch_clause);
2302 struct jcf_handler *handler = alloc_handler (start_label,
2304 if (exception_decl == NULL_TREE)
2305 handler->type = NULL_TREE;
2307 handler->type = TREE_TYPE (TREE_TYPE (exception_decl));
2308 generate_bytecode_insns (catch_clause, IGNORE_TARGET, state);
2309 clause = TREE_CHAIN (clause);
2310 if (CAN_COMPLETE_NORMALLY (catch_clause) && clause != NULL_TREE)
2311 emit_goto (finished_label, state);
2313 define_jcf_label (finished_label, state);
2317 case TRY_FINALLY_EXPR:
2319 struct jcf_block *finished_label = NULL;
2320 struct jcf_block *finally_label, *start_label, *end_label;
2321 struct jcf_handler *handler;
2322 tree try_block = TREE_OPERAND (exp, 0);
2323 tree finally = TREE_OPERAND (exp, 1);
2324 tree return_link = NULL_TREE, exception_decl = NULL_TREE;
2326 tree exception_type;
2328 finally_label = gen_jcf_label (state);
2329 start_label = get_jcf_label_here (state);
2330 /* If the `finally' clause can complete normally, we emit it
2331 as a subroutine and let the other clauses call it via
2332 `jsr'. If it can't complete normally, then we simply emit
2333 `goto's directly to it. */
2334 if (CAN_COMPLETE_NORMALLY (finally))
2336 finally_label->pc = PENDING_CLEANUP_PC;
2337 finally_label->next = state->labeled_blocks;
2338 state->labeled_blocks = finally_label;
2339 state->num_finalizers++;
2342 generate_bytecode_insns (try_block, target, state);
2344 if (CAN_COMPLETE_NORMALLY (finally))
2346 if (state->labeled_blocks != finally_label)
2348 state->labeled_blocks = finally_label->next;
2350 end_label = get_jcf_label_here (state);
2352 if (end_label == start_label)
2354 state->num_finalizers--;
2355 define_jcf_label (finally_label, state);
2356 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2360 if (CAN_COMPLETE_NORMALLY (finally))
2362 return_link = build_decl (VAR_DECL, NULL_TREE,
2363 return_address_type_node);
2364 finished_label = gen_jcf_label (state);
2367 if (CAN_COMPLETE_NORMALLY (try_block))
2369 if (CAN_COMPLETE_NORMALLY (finally))
2371 emit_jsr (finally_label, state);
2372 emit_goto (finished_label, state);
2375 emit_goto (finally_label, state);
2378 /* Handle exceptions. */
2380 exception_type = build_pointer_type (throwable_type_node);
2381 if (CAN_COMPLETE_NORMALLY (finally))
2383 /* We're going to generate a subroutine, so we'll need to
2384 save and restore the exception around the `jsr'. */
2385 exception_decl = build_decl (VAR_DECL, NULL_TREE, exception_type);
2386 localvar_alloc (return_link, state);
2388 handler = alloc_handler (start_label, end_label, state);
2389 handler->type = NULL_TREE;
2390 if (CAN_COMPLETE_NORMALLY (finally))
2392 localvar_alloc (exception_decl, state);
2394 emit_store (exception_decl, state);
2395 emit_jsr (finally_label, state);
2396 emit_load (exception_decl, state);
2398 OP1 (OPCODE_athrow);
2403 /* We're not generating a subroutine. In this case we can
2404 simply have the exception handler pop the exception and
2405 then fall through to the `finally' block. */
2407 emit_pop (1, state);
2411 /* The finally block. If we're generating a subroutine, first
2412 save return PC into return_link. Otherwise, just generate
2413 the code for the `finally' block. */
2414 define_jcf_label (finally_label, state);
2415 if (CAN_COMPLETE_NORMALLY (finally))
2418 emit_store (return_link, state);
2421 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2422 if (CAN_COMPLETE_NORMALLY (finally))
2424 maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
2425 maybe_free_localvar (exception_decl, state, 1);
2426 maybe_free_localvar (return_link, state, 1);
2427 define_jcf_label (finished_label, state);
2432 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2434 OP1 (OPCODE_athrow);
2436 case NEW_ARRAY_INIT:
2438 tree values = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
2439 tree array_type = TREE_TYPE (TREE_TYPE (exp));
2440 tree element_type = TYPE_ARRAY_ELEMENT (array_type);
2441 HOST_WIDE_INT length = java_array_type_length (array_type);
2442 if (target == IGNORE_TARGET)
2444 for ( ; values != NULL_TREE; values = TREE_CHAIN (values))
2445 generate_bytecode_insns (TREE_VALUE (values), target, state);
2448 push_int_const (length, state);
2451 if (JPRIMITIVE_TYPE_P (element_type))
2453 int atype = encode_newarray_type (element_type);
2454 OP1 (OPCODE_newarray);
2459 int index = find_class_constant (&state->cpool,
2460 TREE_TYPE (element_type));
2461 OP1 (OPCODE_anewarray);
2465 jopcode = OPCODE_iastore + adjust_typed_op (element_type, 7);
2466 for ( ; values != NULL_TREE; values = TREE_CHAIN (values), offset++)
2468 int save_SP = state->code_SP;
2469 emit_dup (1, 0, state);
2470 push_int_const (offset, state);
2472 generate_bytecode_insns (TREE_VALUE (values), STACK_TARGET, state);
2475 state->code_SP = save_SP;
2479 case JAVA_EXC_OBJ_EXPR:
2480 NOTE_PUSH (1); /* Pushed by exception system. */
2485 /* This copes with cases where fold() has created MIN or MAX
2486 from a conditional expression. */
2487 enum tree_code code = TREE_CODE (exp) == MIN_EXPR ? LT_EXPR : GT_EXPR;
2488 tree op0 = TREE_OPERAND (exp, 0);
2489 tree op1 = TREE_OPERAND (exp, 1);
2491 if (TREE_SIDE_EFFECTS (op0) || TREE_SIDE_EFFECTS (op1))
2493 x = build3 (COND_EXPR, TREE_TYPE (exp),
2494 build2 (code, boolean_type_node, op0, op1),
2496 generate_bytecode_insns (x, target, state);
2499 case NEW_CLASS_EXPR:
2501 tree class = TREE_TYPE (TREE_TYPE (exp));
2502 int need_result = target != IGNORE_TARGET;
2503 int index = find_class_constant (&state->cpool, class);
2509 NOTE_PUSH (1 + need_result);
2511 /* ... fall though ... */
2514 tree f = TREE_OPERAND (exp, 0);
2515 tree x = TREE_OPERAND (exp, 1);
2516 int save_SP = state->code_SP;
2518 if (TREE_CODE (f) == ADDR_EXPR)
2519 f = TREE_OPERAND (f, 0);
2520 if (f == soft_newarray_node)
2522 int type_code = TREE_INT_CST_LOW (TREE_VALUE (x));
2523 generate_bytecode_insns (TREE_VALUE (TREE_CHAIN (x)),
2524 STACK_TARGET, state);
2526 OP1 (OPCODE_newarray);
2530 else if (f == soft_multianewarray_node)
2534 int index = find_class_constant (&state->cpool,
2535 TREE_TYPE (TREE_TYPE (exp)));
2536 x = TREE_CHAIN (x); /* Skip class argument. */
2537 ndims = TREE_INT_CST_LOW (TREE_VALUE (x));
2538 for (idim = ndims; --idim >= 0; )
2541 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2544 OP1 (OPCODE_multianewarray);
2549 else if (f == soft_anewarray_node)
2551 tree cl = TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (exp)));
2552 int index = find_class_constant (&state->cpool, TREE_TYPE (cl));
2553 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2555 OP1 (OPCODE_anewarray);
2559 else if (f == soft_monitorenter_node
2560 || f == soft_monitorexit_node
2563 if (f == soft_monitorenter_node)
2564 op = OPCODE_monitorenter;
2565 else if (f == soft_monitorexit_node)
2566 op = OPCODE_monitorexit;
2569 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2575 for ( ; x != NULL_TREE; x = TREE_CHAIN (x))
2577 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2579 nargs = state->code_SP - save_SP;
2580 state->code_SP = save_SP;
2581 if (f == soft_fmod_node)
2588 if (TREE_CODE (exp) == NEW_CLASS_EXPR)
2589 NOTE_POP (1); /* Pop implicit this. */
2590 if (TREE_CODE (f) == FUNCTION_DECL && DECL_CONTEXT (f) != NULL_TREE)
2592 tree context = DECL_CONTEXT (f);
2593 int index, interface = 0;
2595 if (METHOD_STATIC (f))
2596 OP1 (OPCODE_invokestatic);
2597 else if (DECL_CONSTRUCTOR_P (f) || CALL_USING_SUPER (exp)
2598 || METHOD_PRIVATE (f))
2599 OP1 (OPCODE_invokespecial);
2602 if (CLASS_INTERFACE (TYPE_NAME (context)))
2604 tree arg1 = TREE_VALUE (TREE_OPERAND (exp, 1));
2605 context = TREE_TYPE (TREE_TYPE (arg1));
2606 if (CLASS_INTERFACE (TYPE_NAME (context)))
2610 OP1 (OPCODE_invokeinterface);
2612 OP1 (OPCODE_invokevirtual);
2614 index = find_methodref_with_class_index (&state->cpool, f, context);
2624 f = TREE_TYPE (TREE_TYPE (f));
2625 if (TREE_CODE (f) != VOID_TYPE)
2627 int size = TYPE_IS_WIDE (f) ? 2 : 1;
2628 if (target == IGNORE_TARGET)
2629 emit_pop (size, state);
2639 error("internal error in generate_bytecode_insn - tree code not implemented: %s",
2640 tree_code_name [(int) TREE_CODE (exp)]);
2645 perform_relocations (struct jcf_partial *state)
2647 struct jcf_block *block;
2648 struct jcf_relocation *reloc;
2652 /* Before we start, the pc field of each block is an upper bound on
2653 the block's start pc (it may be less, if previous blocks need less
2654 than their maximum).
2656 The minimum size of each block is in the block's chunk->size. */
2658 /* First, figure out the actual locations of each block. */
2661 for (block = state->blocks; block != NULL; block = block->next)
2663 int block_size = block->v.chunk->size;
2667 /* Optimize GOTO L; L: by getting rid of the redundant goto.
2668 Assumes relocations are in reverse order. */
2669 reloc = block->u.relocations;
2670 while (reloc != NULL
2671 && reloc->kind == OPCODE_goto_w
2672 && reloc->label->pc == block->next->pc
2673 && reloc->offset + 2 == block_size)
2675 reloc = reloc->next;
2676 block->u.relocations = reloc;
2677 block->v.chunk->size -= 3;
2682 /* Optimize GOTO L; ... L: GOTO X by changing the first goto to
2683 jump directly to X. We're careful here to avoid an infinite
2684 loop if the `goto's themselves form one. We do this
2685 optimization because we can generate a goto-to-goto for some
2686 try/finally blocks. */
2687 while (reloc != NULL
2688 && reloc->kind == OPCODE_goto_w
2689 && reloc->label != block
2690 && reloc->label->v.chunk->data != NULL
2691 && reloc->label->v.chunk->data[0] == OPCODE_goto)
2693 /* Find the reloc for the first instruction of the
2694 destination block. */
2695 struct jcf_relocation *first_reloc;
2696 for (first_reloc = reloc->label->u.relocations;
2698 first_reloc = first_reloc->next)
2700 if (first_reloc->offset == 1
2701 && first_reloc->kind == OPCODE_goto_w)
2703 reloc->label = first_reloc->label;
2708 /* If we didn't do anything, exit the loop. */
2709 if (first_reloc == NULL)
2713 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
2715 if (reloc->kind == SWITCH_ALIGN_RELOC)
2717 /* We assume this is the first relocation in this block,
2718 so we know its final pc. */
2719 int where = pc + reloc->offset;
2720 int pad = ((where + 3) & ~3) - where;
2723 else if (reloc->kind < -1 || reloc->kind > BLOCK_START_RELOC)
2725 int delta = reloc->label->pc - (pc + reloc->offset - 1);
2726 int expand = reloc->kind > 0 ? 2 : 5;
2730 if (delta >= -32768 && delta <= 32767)
2736 block_size += expand;
2742 for (block = state->blocks; block != NULL; block = block->next)
2744 struct chunk *chunk = block->v.chunk;
2745 int old_size = chunk->size;
2746 int next_pc = block->next == NULL ? pc : block->next->pc;
2747 int new_size = next_pc - block->pc;
2748 unsigned char *new_ptr;
2749 unsigned char *old_buffer = chunk->data;
2750 unsigned char *old_ptr = old_buffer + old_size;
2751 if (new_size != old_size)
2753 chunk->data = obstack_alloc (state->chunk_obstack, new_size);
2754 chunk->size = new_size;
2756 new_ptr = chunk->data + new_size;
2758 /* We do the relocations from back to front, because
2759 the relocations are in reverse order. */
2760 for (reloc = block->u.relocations; ; reloc = reloc->next)
2762 /* new_ptr and old_ptr point into the old and new buffers,
2763 respectively. (If no relocations cause the buffer to
2764 grow, the buffer will be the same buffer, and new_ptr==old_ptr.)
2765 The bytes at higher address have been copied and relocations
2766 handled; those at lower addresses remain to process. */
2768 /* Lower old index of piece to be copied with no relocation.
2769 I.e. high index of the first piece that does need relocation. */
2770 int start = reloc == NULL ? 0
2771 : reloc->kind == SWITCH_ALIGN_RELOC ? reloc->offset
2772 : (reloc->kind == 0 || reloc->kind == BLOCK_START_RELOC)
2774 : reloc->offset + 2;
2777 int n = (old_ptr - old_buffer) - start;
2781 memcpy (new_ptr, old_ptr, n);
2782 if (old_ptr == old_buffer)
2785 new_offset = new_ptr - chunk->data;
2786 new_offset -= (reloc->kind == -1 ? 2 : 4);
2787 if (reloc->kind == 0)
2790 value = GET_u4 (old_ptr);
2792 else if (reloc->kind == BLOCK_START_RELOC)
2798 else if (reloc->kind == SWITCH_ALIGN_RELOC)
2800 int where = block->pc + reloc->offset;
2801 int pad = ((where + 3) & ~3) - where;
2809 value = GET_u2 (old_ptr);
2811 value += reloc->label->pc - (block->pc + new_offset);
2812 *--new_ptr = (unsigned char) value; value >>= 8;
2813 *--new_ptr = (unsigned char) value; value >>= 8;
2814 if (reloc->kind != -1)
2816 *--new_ptr = (unsigned char) value; value >>= 8;
2817 *--new_ptr = (unsigned char) value;
2819 if (reloc->kind > BLOCK_START_RELOC)
2821 /* Convert: OP TARGET to: OP_w TARGET; (OP is goto or jsr). */
2823 *--new_ptr = reloc->kind;
2825 else if (reloc->kind < -1)
2827 /* Convert: ifCOND TARGET to: ifNCOND T; goto_w TARGET; T: */
2829 *--new_ptr = OPCODE_goto_w;
2832 *--new_ptr = - reloc->kind;
2835 if (new_ptr != chunk->data)
2838 state->code_length = pc;
2842 init_jcf_state (struct jcf_partial *state, struct obstack *work)
2844 state->chunk_obstack = work;
2845 state->first = state->chunk = NULL;
2846 CPOOL_INIT (&state->cpool);
2847 BUFFER_INIT (&state->localvars);
2848 BUFFER_INIT (&state->bytecode);
2852 init_jcf_method (struct jcf_partial *state, tree method)
2854 state->current_method = method;
2855 state->blocks = state->last_block = NULL;
2856 state->linenumber_count = 0;
2857 state->first_lvar = state->last_lvar = NULL;
2858 state->lvar_count = 0;
2859 state->labeled_blocks = NULL;
2860 state->code_length = 0;
2861 BUFFER_RESET (&state->bytecode);
2862 BUFFER_RESET (&state->localvars);
2864 state->code_SP_max = 0;
2865 state->handlers = NULL;
2866 state->last_handler = NULL;
2867 state->num_handlers = 0;
2868 state->num_finalizers = 0;
2869 state->return_value_decl = NULL_TREE;
2873 release_jcf_state (struct jcf_partial *state)
2875 CPOOL_FINISH (&state->cpool);
2876 obstack_free (state->chunk_obstack, state->first);
2879 /* Get the access flags (modifiers) of a class (TYPE_DECL) to be used in the
2880 access_flags field of the class file header. */
2883 get_classfile_modifiers (tree class)
2885 /* These are the flags which are valid class file modifiers.
2887 int valid_toplevel_class_flags = (ACC_PUBLIC | ACC_FINAL | ACC_SUPER |
2888 ACC_INTERFACE | ACC_ABSTRACT);
2889 int flags = get_access_flags (class);
2891 /* ACC_SUPER should always be set, except for interfaces. */
2892 if (! (flags & ACC_INTERFACE))
2895 /* A protected member class becomes public at the top level. */
2896 if (flags & ACC_PROTECTED)
2897 flags |= ACC_PUBLIC;
2899 /* Filter out flags that are not valid for a class or interface in the
2900 top-level access_flags field. */
2901 flags &= valid_toplevel_class_flags;
2906 /* Get the access flags (modifiers) for a method to be used in the class
2910 get_method_access_flags (tree decl)
2912 int flags = get_access_flags (decl);
2914 /* Promote "private" inner-class constructors to package-private. */
2915 if (DECL_CONSTRUCTOR_P (decl)
2916 && INNER_CLASS_DECL_P (TYPE_NAME (DECL_CONTEXT (decl))))
2917 flags &= ~(ACC_PRIVATE);
2922 /* Generate and return a list of chunks containing the class CLAS
2923 in the .class file representation. The list can be written to a
2924 .class file using write_chunks. Allocate chunks from obstack WORK. */
2926 static GTY(()) tree SourceFile_node;
2927 static struct chunk *
2928 generate_classfile (tree clas, struct jcf_partial *state)
2930 struct chunk *cpool_chunk;
2931 const char *source_file, *s;
2934 char *fields_count_ptr;
2935 int fields_count = 0;
2936 char *methods_count_ptr;
2937 int methods_count = 0;
2940 = clas == object_type_node ? 0 : BINFO_N_BASE_BINFOS (TYPE_BINFO (clas));
2942 ptr = append_chunk (NULL, 8, state);
2943 PUT4 (0xCafeBabe); /* Magic number */
2944 PUT2 (3); /* Minor version */
2945 PUT2 (45); /* Major version */
2947 append_chunk (NULL, 0, state);
2948 cpool_chunk = state->chunk;
2950 /* Next allocate the chunk containing access_flags through fields_count. */
2951 if (clas == object_type_node)
2954 i = 8 + 2 * total_supers;
2955 ptr = append_chunk (NULL, i, state);
2956 i = get_classfile_modifiers (TYPE_NAME (clas));
2957 PUT2 (i); /* access_flags */
2958 i = find_class_constant (&state->cpool, clas); PUT2 (i); /* this_class */
2959 if (clas == object_type_node)
2961 PUT2(0); /* super_class */
2962 PUT2(0); /* interfaces_count */
2966 tree binfo = TYPE_BINFO (clas);
2967 tree base_binfo = BINFO_BASE_BINFO (binfo, 0);
2968 int j = find_class_constant (&state->cpool, BINFO_TYPE (base_binfo));
2970 PUT2 (j); /* super_class */
2971 PUT2 (total_supers - 1); /* interfaces_count */
2972 for (i = 1; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2974 j = find_class_constant (&state->cpool, BINFO_TYPE (base_binfo));
2978 fields_count_ptr = ptr;
2980 for (part = TYPE_FIELDS (clas); part; part = TREE_CHAIN (part))
2982 int have_value, attr_count = 0;
2983 if (DECL_NAME (part) == NULL_TREE || DECL_ARTIFICIAL (part))
2985 ptr = append_chunk (NULL, 8, state);
2986 i = get_access_flags (part); PUT2 (i);
2987 i = find_utf8_constant (&state->cpool, DECL_NAME (part)); PUT2 (i);
2988 i = find_utf8_constant (&state->cpool,
2989 build_java_signature (TREE_TYPE (part)));
2991 have_value = DECL_INITIAL (part) != NULL_TREE
2992 && FIELD_STATIC (part) && CONSTANT_VALUE_P (DECL_INITIAL (part))
2993 && FIELD_FINAL (part)
2994 && (JPRIMITIVE_TYPE_P (TREE_TYPE (part))
2995 || TREE_TYPE (part) == string_ptr_type_node);
2999 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part)
3000 || FIELD_SYNTHETIC (part))
3002 if (FIELD_DEPRECATED (part))
3005 PUT2 (attr_count); /* attributes_count */
3008 tree init = DECL_INITIAL (part);
3009 static tree ConstantValue_node = NULL_TREE;
3010 if (TREE_TYPE (part) != TREE_TYPE (init))
3011 fatal_error ("field initializer type mismatch");
3012 ptr = append_chunk (NULL, 8, state);
3013 if (ConstantValue_node == NULL_TREE)
3014 ConstantValue_node = get_identifier ("ConstantValue");
3015 i = find_utf8_constant (&state->cpool, ConstantValue_node);
3016 PUT2 (i); /* attribute_name_index */
3017 PUT4 (2); /* attribute_length */
3018 i = find_constant_index (init, state); PUT2 (i);
3020 /* Emit the "Synthetic" attribute for val$<x> and this$<n>
3021 fields and other fields which need it. */
3022 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part)
3023 || FIELD_SYNTHETIC (part))
3024 ptr = append_synthetic_attribute (state);
3025 if (FIELD_DEPRECATED (part))
3026 append_deprecated_attribute (state);
3029 ptr = fields_count_ptr; UNSAFE_PUT2 (fields_count);
3031 ptr = methods_count_ptr = append_chunk (NULL, 2, state);
3034 for (part = TYPE_METHODS (clas); part; part = TREE_CHAIN (part))
3036 struct jcf_block *block;
3037 tree function_body = DECL_FUNCTION_BODY (part);
3038 tree body = function_body == NULL_TREE ? NULL_TREE
3039 : BLOCK_EXPR_BODY (function_body);
3040 tree name = DECL_CONSTRUCTOR_P (part) ? init_identifier_node
3042 tree type = TREE_TYPE (part);
3043 tree save_function = current_function_decl;
3044 int synthetic_p = 0;
3046 /* Invisible Miranda methods shouldn't end up in the .class
3048 if (METHOD_INVISIBLE (part))
3051 current_function_decl = part;
3052 ptr = append_chunk (NULL, 8, state);
3053 i = get_method_access_flags (part); PUT2 (i);
3054 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
3055 i = find_utf8_constant (&state->cpool, build_java_signature (type));
3057 i = (body != NULL_TREE) + (DECL_FUNCTION_THROWS (part) != NULL_TREE);
3059 /* Make room for the Synthetic attribute (of zero length.) */
3060 if (DECL_FINIT_P (part)
3061 || DECL_INSTINIT_P (part)
3062 || OUTER_FIELD_ACCESS_IDENTIFIER_P (DECL_NAME (part))
3063 || TYPE_DOT_CLASS (clas) == part)
3068 /* Make room for Deprecated attribute. */
3069 if (METHOD_DEPRECATED (part))
3072 PUT2 (i); /* attributes_count */
3075 ptr = append_synthetic_attribute (state);
3077 if (body != NULL_TREE)
3079 int code_attributes_count = 0;
3080 static tree Code_node = NULL_TREE;
3083 struct jcf_handler *handler;
3084 if (Code_node == NULL_TREE)
3085 Code_node = get_identifier ("Code");
3086 ptr = append_chunk (NULL, 14, state);
3087 i = find_utf8_constant (&state->cpool, Code_node); PUT2 (i);
3089 init_jcf_method (state, part);
3090 get_jcf_label_here (state); /* Force a first block. */
3091 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
3092 localvar_alloc (t, state);
3093 state->num_jsrs = 0;
3094 generate_bytecode_insns (body, IGNORE_TARGET, state);
3095 if (CAN_COMPLETE_NORMALLY (body))
3097 if (TREE_CODE (TREE_TYPE (type)) != VOID_TYPE)
3100 OP1 (OPCODE_return);
3102 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
3103 maybe_free_localvar (t, state, 1);
3104 if (state->return_value_decl != NULL_TREE)
3105 maybe_free_localvar (state->return_value_decl, state, 1);
3106 finish_jcf_block (state);
3107 perform_relocations (state);
3110 i = 8 + state->code_length + 4 + 8 * state->num_handlers;
3111 if (state->linenumber_count > 0)
3113 code_attributes_count++;
3114 i += 8 + 4 * state->linenumber_count;
3116 if (state->lvar_count > 0)
3118 code_attributes_count++;
3119 i += 8 + 10 * state->lvar_count;
3121 UNSAFE_PUT4 (i); /* attribute_length */
3122 UNSAFE_PUT2 (state->code_SP_max); /* max_stack */
3123 UNSAFE_PUT2 (localvar_max); /* max_locals */
3124 UNSAFE_PUT4 (state->code_length);
3126 /* Emit the exception table. */
3127 ptr = append_chunk (NULL, 2 + 8 * state->num_handlers, state);
3128 PUT2 (state->num_handlers); /* exception_table_length */
3129 handler = state->handlers;
3130 for (; handler != NULL; handler = handler->next)
3133 PUT2 (handler->start_label->pc);
3134 PUT2 (handler->end_label->pc);
3135 PUT2 (handler->handler_label->pc);
3136 if (handler->type == NULL_TREE)
3139 type_index = find_class_constant (&state->cpool,
3144 ptr = append_chunk (NULL, 2, state);
3145 PUT2 (code_attributes_count);
3147 /* Write the LineNumberTable attribute. */
3148 if (state->linenumber_count > 0)
3150 static tree LineNumberTable_node = NULL_TREE;
3151 ptr = append_chunk (NULL,
3152 8 + 4 * state->linenumber_count, state);
3153 if (LineNumberTable_node == NULL_TREE)
3154 LineNumberTable_node = get_identifier ("LineNumberTable");
3155 i = find_utf8_constant (&state->cpool, LineNumberTable_node);
3156 PUT2 (i); /* attribute_name_index */
3157 i = 2+4*state->linenumber_count; PUT4(i); /* attribute_length */
3158 i = state->linenumber_count; PUT2 (i);
3159 for (block = state->blocks; block != NULL; block = block->next)
3161 int line = block->linenumber;
3170 /* Write the LocalVariableTable attribute. */
3171 if (state->lvar_count > 0)
3173 static tree LocalVariableTable_node = NULL_TREE;
3174 struct localvar_info *lvar = state->first_lvar;
3175 ptr = append_chunk (NULL, 8 + 10 * state->lvar_count, state);
3176 if (LocalVariableTable_node == NULL_TREE)
3177 LocalVariableTable_node = get_identifier("LocalVariableTable");
3178 i = find_utf8_constant (&state->cpool, LocalVariableTable_node);
3179 PUT2 (i); /* attribute_name_index */
3180 i = 2 + 10 * state->lvar_count; PUT4 (i); /* attribute_length */
3181 i = state->lvar_count; PUT2 (i);
3182 for ( ; lvar != NULL; lvar = lvar->next)
3184 tree name = DECL_NAME (lvar->decl);
3185 tree sig = build_java_signature (TREE_TYPE (lvar->decl));
3186 i = lvar->start_label->pc; PUT2 (i);
3187 i = lvar->end_label->pc - i; PUT2 (i);
3188 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
3189 i = find_utf8_constant (&state->cpool, sig); PUT2 (i);
3190 i = DECL_LOCAL_INDEX (lvar->decl); PUT2 (i);
3194 if (DECL_FUNCTION_THROWS (part) != NULL_TREE)
3196 tree t = DECL_FUNCTION_THROWS (part);
3197 int throws_count = list_length (t);
3198 static tree Exceptions_node = NULL_TREE;
3199 if (Exceptions_node == NULL_TREE)
3200 Exceptions_node = get_identifier ("Exceptions");
3201 ptr = append_chunk (NULL, 8 + 2 * throws_count, state);
3202 i = find_utf8_constant (&state->cpool, Exceptions_node);
3203 PUT2 (i); /* attribute_name_index */
3204 i = 2 + 2 * throws_count; PUT4(i); /* attribute_length */
3205 i = throws_count; PUT2 (i);
3206 for (; t != NULL_TREE; t = TREE_CHAIN (t))
3208 i = find_class_constant (&state->cpool, TREE_VALUE (t));
3213 if (METHOD_DEPRECATED (part))
3214 append_deprecated_attribute (state);
3217 current_function_decl = save_function;
3219 ptr = methods_count_ptr; UNSAFE_PUT2 (methods_count);
3221 source_file = DECL_SOURCE_FILE (TYPE_NAME (clas));
3222 for (s = source_file; ; s++)
3227 if (ch == '/' || ch == '\\')
3230 ptr = append_chunk (NULL, 10, state);
3232 i = 1; /* Source file always exists as an attribute */
3233 if (INNER_CLASS_TYPE_P (clas) || DECL_INNER_CLASS_LIST (TYPE_NAME (clas)))
3235 if (clas == object_type_node)
3237 if (CLASS_DEPRECATED (TYPE_NAME (clas)))
3240 PUT2 (i); /* attributes_count */
3242 /* generate the SourceFile attribute. */
3243 if (SourceFile_node == NULL_TREE)
3245 SourceFile_node = get_identifier ("SourceFile");
3248 i = find_utf8_constant (&state->cpool, SourceFile_node);
3249 PUT2 (i); /* attribute_name_index */
3251 i = find_utf8_constant (&state->cpool, get_identifier (source_file));
3253 append_gcj_attribute (state, clas);
3254 append_innerclasses_attribute (state, clas);
3255 if (CLASS_DEPRECATED (TYPE_NAME (clas)))
3256 append_deprecated_attribute (state);
3258 /* New finally generate the contents of the constant pool chunk. */
3259 i = count_constant_pool_bytes (&state->cpool);
3260 ptr = obstack_alloc (state->chunk_obstack, i);
3261 cpool_chunk->data = ptr;
3262 cpool_chunk->size = i;
3263 write_constant_pool (&state->cpool, ptr, i);
3264 return state->first;
3267 static GTY(()) tree Synthetic_node;
3268 static unsigned char *
3269 append_synthetic_attribute (struct jcf_partial *state)
3271 unsigned char *ptr = append_chunk (NULL, 6, state);
3274 if (Synthetic_node == NULL_TREE)
3276 Synthetic_node = get_identifier ("Synthetic");
3278 i = find_utf8_constant (&state->cpool, Synthetic_node);
3279 PUT2 (i); /* Attribute string index */
3280 PUT4 (0); /* Attribute length */
3286 append_deprecated_attribute (struct jcf_partial *state)
3288 unsigned char *ptr = append_chunk (NULL, 6, state);
3291 i = find_utf8_constant (&state->cpool, get_identifier ("Deprecated"));
3292 PUT2 (i); /* Attribute string index */
3293 PUT4 (0); /* Attribute length */
3297 append_gcj_attribute (struct jcf_partial *state, tree class)
3302 if (class != object_type_node)
3305 ptr = append_chunk (NULL, 6, state); /* 2+4 */
3306 i = find_utf8_constant (&state->cpool,
3307 get_identifier ("gnu.gcj.gcj-compiled"));
3308 PUT2 (i); /* Attribute string index */
3309 PUT4 (0); /* Attribute length */
3312 static tree InnerClasses_node;
3314 append_innerclasses_attribute (struct jcf_partial *state, tree class)
3316 tree orig_decl = TYPE_NAME (class);
3319 unsigned char *ptr, *length_marker, *number_marker;
3321 if (!INNER_CLASS_TYPE_P (class) && !DECL_INNER_CLASS_LIST (orig_decl))
3324 ptr = append_chunk (NULL, 8, state); /* 2+4+2 */
3326 if (InnerClasses_node == NULL_TREE)
3328 InnerClasses_node = get_identifier ("InnerClasses");
3330 i = find_utf8_constant (&state->cpool, InnerClasses_node);
3332 length_marker = ptr; PUT4 (0); /* length, to be later patched */
3333 number_marker = ptr; PUT2 (0); /* number of classes, tblp */
3335 /* Generate the entries: all inner classes visible from the one we
3336 process: itself, up and down. */
3337 while (class && INNER_CLASS_TYPE_P (class))
3341 decl = TYPE_NAME (class);
3342 n = IDENTIFIER_POINTER (DECL_NAME (decl)) +
3343 IDENTIFIER_LENGTH (DECL_NAME (decl));
3345 while (n[-1] != '$')
3347 append_innerclasses_attribute_entry (state, decl, get_identifier (n));
3350 class = TREE_TYPE (DECL_CONTEXT (TYPE_NAME (class)));
3354 for (current = DECL_INNER_CLASS_LIST (decl);
3355 current; current = TREE_CHAIN (current))
3357 append_innerclasses_attribute_entry (state, TREE_PURPOSE (current),
3358 TREE_VALUE (current));
3362 ptr = length_marker; PUT4 (8*length+2);
3363 ptr = number_marker; PUT2 (length);
3367 append_innerclasses_attribute_entry (struct jcf_partial *state,
3368 tree decl, tree name)
3371 int ocii = 0, ini = 0;
3372 unsigned char *ptr = append_chunk (NULL, 8, state);
3374 icii = find_class_constant (&state->cpool, TREE_TYPE (decl));
3376 /* Sun's implementation seems to generate ocii to 0 for inner
3377 classes (which aren't considered members of the class they're
3378 in.) The specs are saying that if the class is anonymous,
3379 inner_name_index must be zero. */
3380 if (!ANONYMOUS_CLASS_P (TREE_TYPE (decl)))
3382 ocii = find_class_constant (&state->cpool,
3383 TREE_TYPE (DECL_CONTEXT (decl)));
3384 ini = find_utf8_constant (&state->cpool, name);
3386 icaf = get_access_flags (decl);
3388 PUT2 (icii); PUT2 (ocii); PUT2 (ini); PUT2 (icaf);
3392 make_class_file_name (tree clas)
3394 const char *dname, *cname, *slash;
3399 cname = IDENTIFIER_POINTER (identifier_subst (DECL_NAME (TYPE_NAME (clas)),
3400 "", '.', DIR_SEPARATOR,
3402 if (jcf_write_base_directory == NULL)
3404 /* Make sure we put the class file into the .java file's
3405 directory, and not into some subdirectory thereof. */
3407 dname = DECL_SOURCE_FILE (TYPE_NAME (clas));
3408 slash = strrchr (dname, DIR_SEPARATOR);
3409 #ifdef DIR_SEPARATOR_2
3411 slash = strrchr (dname, DIR_SEPARATOR_2);
3417 sep = DIR_SEPARATOR;
3422 t = strrchr (cname, DIR_SEPARATOR);
3430 dname = jcf_write_base_directory;
3432 s = strrchr (dname, DIR_SEPARATOR);
3433 #ifdef DIR_SEPARATOR_2
3435 s = strrchr (dname, DIR_SEPARATOR_2);
3440 sep = DIR_SEPARATOR;
3442 slash = dname + strlen (dname);
3445 r = xmalloc (slash - dname + strlen (cname) + 2);
3446 strncpy (r, dname, slash - dname);
3447 r[slash - dname] = sep;
3448 strcpy (&r[slash - dname + 1], cname);
3450 /* We try to make new directories when we need them. We only do
3451 this for directories which "might not" exist. For instance, we
3452 assume the `-d' directory exists, but we don't assume that any
3453 subdirectory below it exists. It might be worthwhile to keep
3454 track of which directories we've created to avoid gratuitous
3456 dname = r + (slash - dname) + 1;
3459 char *s = strchr (dname, sep);
3463 /* Try to make directory if it doesn't already exist. */
3464 if (stat (r, &sb) == -1
3465 && mkdir (r, 0755) == -1
3466 /* The directory might have been made by another process. */
3468 fatal_error ("can't create directory %s: %m", r);
3471 /* Skip consecutive separators. */
3472 for (dname = s + 1; *dname && *dname == sep; ++dname)
3479 /* Write out the contents of a class (RECORD_TYPE) CLAS, as a .class file.
3480 The output .class file name is make_class_file_name(CLAS). */
3483 write_classfile (tree clas)
3485 struct obstack *work = &temporary_obstack;
3486 struct jcf_partial state[1];
3487 char *class_file_name = make_class_file_name (clas);
3488 struct chunk *chunks;
3490 if (class_file_name != NULL)
3493 char *temporary_file_name;
3495 /* The .class file is initially written to a ".tmp" file so that
3496 if multiple instances of the compiler are running at once
3497 they do not see partially formed class files. */
3498 temporary_file_name = concat (class_file_name, ".tmp", NULL);
3499 stream = fopen (temporary_file_name, "wb");
3501 fatal_error ("can't open %s for writing: %m", temporary_file_name);
3503 jcf_dependency_add_target (class_file_name);
3504 init_jcf_state (state, work);
3505 chunks = generate_classfile (clas, state);
3506 write_chunks (stream, chunks);
3507 if (fclose (stream))
3508 fatal_error ("error closing %s: %m", temporary_file_name);
3510 /* If a file named by the string pointed to by `new' exists
3511 prior to the call to the `rename' function, the behavior
3512 is implementation-defined. ISO 9899-1990 7.9.4.2.
3514 For example, on Win32 with MSVCRT, it is an error. */
3516 unlink (class_file_name);
3518 if (rename (temporary_file_name, class_file_name) == -1)
3520 remove (temporary_file_name);
3521 fatal_error ("can't create %s: %m", class_file_name);
3523 free (temporary_file_name);
3524 free (class_file_name);
3526 release_jcf_state (state);
3530 string concatenation
3531 synchronized statement
3534 #include "gt-java-jcf-write.h"