1 /* Write out a Java(TM) class file.
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA.
21 Java and all Java-based marks are trademarks or registered trademarks
22 of Sun Microsystems, Inc. in the United States and other countries.
23 The Free Software Foundation is independent of Sun Microsystems, Inc. */
27 #include "coretypes.h"
32 #include "java-tree.h"
36 #include "java-opcodes.h"
37 #include "parse.h" /* for BLOCK_EXPR_BODY */
43 extern struct obstack temporary_obstack;
45 /* Base directory in which `.class' files should be written.
46 NULL means to put the file into the same directory as the
47 corresponding .java file. */
48 const char *jcf_write_base_directory = NULL;
50 /* Make sure bytecode.data is big enough for at least N more bytes. */
53 do { CHECK_OP(state); \
54 if (state->bytecode.ptr + (N) > state->bytecode.limit) \
55 buffer_grow (&state->bytecode, N); } while (0)
57 /* Add a 1-byte instruction/operand I to bytecode.data,
58 assuming space has already been RESERVE'd. */
60 #define OP1(I) (*state->bytecode.ptr++ = (I), CHECK_OP(state))
62 /* Like OP1, but I is a 2-byte big endian integer. */
65 do { int _i = (I); OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
67 /* Like OP1, but I is a 4-byte big endian integer. */
70 do { int _i = (I); OP1 (_i >> 24); OP1 (_i >> 16); \
71 OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
73 /* Macro to call each time we push I words on the JVM stack. */
75 #define NOTE_PUSH(I) \
76 do { state->code_SP += (I); \
77 if (state->code_SP > state->code_SP_max) \
78 state->code_SP_max = state->code_SP; } while (0)
80 /* Macro to call each time we pop I words from the JVM stack. */
83 do { state->code_SP -= (I); if (state->code_SP < 0) abort(); } while (0)
85 /* A chunk or segment of a .class file. */
89 /* The next segment of this .class file. */
92 /* The actual data in this segment to be written to the .class file. */
95 /* The size of the segment to be written to the .class file. */
99 #define PENDING_CLEANUP_PC (-3)
100 #define PENDING_EXIT_PC (-2)
101 #define UNDEFINED_PC (-1)
103 /* Each "block" represents a label plus the bytecode instructions following.
104 There may be branches out of the block, but no incoming jumps, except
105 to the beginning of the block.
107 If (pc < 0), the jcf_block is not an actual block (i.e. it has no
108 associated code yet), but it is an undefined label.
113 /* For blocks that that are defined, the next block (in pc order).
114 For blocks that are not-yet-defined the end label of a LABELED_BLOCK_EXPR
115 or a cleanup expression (from a TRY_FINALLY_EXPR),
116 this is the next (outer) such end label, in a stack headed by
117 labeled_blocks in jcf_partial. */
118 struct jcf_block *next;
120 /* In the not-yet-defined end label for an unfinished EXIT_BLOCK_EXPR.
121 pc is PENDING_EXIT_PC.
122 In the not-yet-defined end label for pending cleanup subroutine,
123 pc is PENDING_CLEANUP_PC.
124 For other not-yet-defined labels, pc is UNDEFINED_PC.
126 If the label has been defined:
127 Until perform_relocations is finished, this is the maximum possible
128 value of the bytecode offset at the beginning of this block.
129 After perform_relocations, it is the actual offset (pc). */
134 /* After finish_jcf_block is called, the actual instructions
135 contained in this block. Before that NULL, and the instructions
136 are in state->bytecode. */
140 /* If pc==PENDING_CLEANUP_PC, start_label is the start of the region
141 covered by the cleanup. */
142 struct jcf_block *start_label;
146 /* Set of relocations (in reverse offset order) for this block. */
147 struct jcf_relocation *relocations;
149 /* If this block is that of the not-yet-defined end label of
150 a LABELED_BLOCK_EXPR, where LABELED_BLOCK is that LABELED_BLOCK_EXPR.
151 If pc==PENDING_CLEANUP_PC, the cleanup that needs to be run. */
156 /* A "relocation" type for the 0-3 bytes of padding at the start
157 of a tableswitch or a lookupswitch. */
158 #define SWITCH_ALIGN_RELOC 4
160 /* A relocation type for the labels in a tableswitch or a lookupswitch;
161 these are relative to the start of the instruction, but (due to
162 th 0-3 bytes of padding), we don't know the offset before relocation. */
163 #define BLOCK_START_RELOC 1
165 struct jcf_relocation
167 /* Next relocation for the current jcf_block. */
168 struct jcf_relocation *next;
170 /* The (byte) offset within the current block that needs to be relocated. */
171 HOST_WIDE_INT offset;
173 /* 0 if offset is a 4-byte relative offset.
174 4 (SWITCH_ALIGN_RELOC) if offset points to 0-3 padding bytes inserted
175 for proper alignment in tableswitch/lookupswitch instructions.
176 1 (BLOCK_START_RELOC) if offset points to a 4-byte offset relative
177 to the start of the containing block.
178 -1 if offset is a 2-byte relative offset.
179 < -1 if offset is the address of an instruction with a 2-byte offset
180 that does not have a corresponding 4-byte offset version, in which
181 case the absolute value of kind is the inverted opcode.
182 > 4 if offset is the address of an instruction (such as jsr) with a
183 2-byte offset that does have a corresponding 4-byte offset version,
184 in which case kind is the opcode of the 4-byte version (such as jsr_w). */
187 /* The label the relocation wants to actually transfer to. */
188 struct jcf_block *label;
191 #define RELOCATION_VALUE_0 ((HOST_WIDE_INT)0)
192 #define RELOCATION_VALUE_1 ((HOST_WIDE_INT)1)
194 /* State for single catch clause. */
198 struct jcf_handler *next;
200 struct jcf_block *start_label;
201 struct jcf_block *end_label;
202 struct jcf_block *handler_label;
204 /* The sub-class of Throwable handled, or NULL_TREE (for finally). */
208 /* State for the current switch statement. */
210 struct jcf_switch_state
212 struct jcf_switch_state *prev;
213 struct jcf_block *default_label;
215 struct jcf_relocation *cases;
217 HOST_WIDE_INT min_case, max_case;
220 /* This structure is used to contain the various pieces that will
221 become a .class file. */
227 struct obstack *chunk_obstack;
230 /* List of basic blocks for the current method. */
231 struct jcf_block *blocks;
232 struct jcf_block *last_block;
234 struct localvar_info *first_lvar;
235 struct localvar_info *last_lvar;
240 int linenumber_count;
242 /* Until perform_relocations, this is a upper bound on the number
243 of bytes (so far) in the instructions for the current method. */
246 /* Stack of undefined ending labels for LABELED_BLOCK_EXPR. */
247 struct jcf_block *labeled_blocks;
249 /* The current stack size (stack pointer) in the current method. */
252 /* The largest extent of stack size (stack pointer) in the current method. */
255 /* Contains a mapping from local var slot number to localvar_info. */
256 struct buffer localvars;
258 /* The buffer allocated for bytecode for the current jcf_block. */
259 struct buffer bytecode;
261 /* Chain of exception handlers for the current method. */
262 struct jcf_handler *handlers;
264 /* Last element in handlers chain. */
265 struct jcf_handler *last_handler;
267 /* Number of exception handlers for the current method. */
270 /* Number of finalizers we are currently nested within. */
273 /* If non-NULL, use this for the return value. */
274 tree return_value_decl;
276 /* Information about the current switch statement. */
277 struct jcf_switch_state *sw_state;
279 /* The count of jsr instructions that have been emitted. */
283 static void generate_bytecode_insns (tree, int, struct jcf_partial *);
284 static struct chunk * alloc_chunk (struct chunk *, unsigned char *,
285 int, struct obstack *);
286 static unsigned char * append_chunk (unsigned char *, int,
287 struct jcf_partial *);
288 static void append_chunk_copy (unsigned char *, int, struct jcf_partial *);
289 static struct jcf_block * gen_jcf_label (struct jcf_partial *);
290 static void finish_jcf_block (struct jcf_partial *);
291 static void define_jcf_label (struct jcf_block *, struct jcf_partial *);
292 static struct jcf_block * get_jcf_label_here (struct jcf_partial *);
293 static void put_linenumber (int, struct jcf_partial *);
294 static void localvar_alloc (tree, struct jcf_partial *);
295 static void maybe_free_localvar (tree, struct jcf_partial *, int);
296 static int get_access_flags (tree);
297 static void write_chunks (FILE *, struct chunk *);
298 static int adjust_typed_op (tree, int);
299 static void generate_bytecode_conditional (tree, struct jcf_block *,
300 struct jcf_block *, int,
301 struct jcf_partial *);
302 static void generate_bytecode_return (tree, struct jcf_partial *);
303 static void perform_relocations (struct jcf_partial *);
304 static void init_jcf_state (struct jcf_partial *, struct obstack *);
305 static void init_jcf_method (struct jcf_partial *, tree);
306 static void release_jcf_state (struct jcf_partial *);
307 static int get_classfile_modifiers (tree class);
308 static struct chunk * generate_classfile (tree, struct jcf_partial *);
309 static struct jcf_handler *alloc_handler (struct jcf_block *,
311 struct jcf_partial *);
312 static void emit_iinc (tree, HOST_WIDE_INT, struct jcf_partial *);
313 static void emit_reloc (HOST_WIDE_INT, int, struct jcf_block *,
314 struct jcf_partial *);
315 static void push_constant1 (HOST_WIDE_INT, struct jcf_partial *);
316 static void push_constant2 (HOST_WIDE_INT, struct jcf_partial *);
317 static void push_int_const (HOST_WIDE_INT, struct jcf_partial *);
318 static int find_constant_wide (HOST_WIDE_INT, HOST_WIDE_INT,
319 struct jcf_partial *);
320 static void push_long_const (HOST_WIDE_INT, HOST_WIDE_INT,
321 struct jcf_partial *);
322 static int find_constant_index (tree, struct jcf_partial *);
323 static void push_long_const (HOST_WIDE_INT, HOST_WIDE_INT,
324 struct jcf_partial *);
325 static void field_op (tree, int, struct jcf_partial *);
326 static void maybe_wide (int, int, struct jcf_partial *);
327 static void emit_dup (int, int, struct jcf_partial *);
328 static void emit_pop (int, struct jcf_partial *);
329 static void emit_load_or_store (tree, int, struct jcf_partial *);
330 static void emit_load (tree, struct jcf_partial *);
331 static void emit_store (tree, struct jcf_partial *);
332 static void emit_unop (enum java_opcode, tree, struct jcf_partial *);
333 static void emit_binop (enum java_opcode, tree, struct jcf_partial *);
334 static void emit_reloc (HOST_WIDE_INT, int, struct jcf_block *,
335 struct jcf_partial *);
336 static void emit_switch_reloc (struct jcf_block *, struct jcf_partial *);
337 static void emit_case_reloc (struct jcf_relocation *, struct jcf_partial *);
338 static void emit_if (struct jcf_block *, int, int, struct jcf_partial *);
339 static void emit_goto (struct jcf_block *, struct jcf_partial *);
340 static void emit_jsr (struct jcf_block *, struct jcf_partial *);
341 static void call_cleanups (struct jcf_block *, struct jcf_partial *);
342 static char *make_class_file_name (tree);
343 static unsigned char *append_synthetic_attribute (struct jcf_partial *);
344 static void append_deprecated_attribute (struct jcf_partial *);
345 static void append_innerclasses_attribute (struct jcf_partial *, tree);
346 static void append_innerclasses_attribute_entry (struct jcf_partial *, tree, tree);
347 static void append_gcj_attribute (struct jcf_partial *, tree);
349 /* Utility macros for appending (big-endian) data to a buffer.
350 We assume a local variable 'ptr' points into where we want to
351 write next, and we assume enough space has been allocated. */
353 #ifdef ENABLE_JC1_CHECKING
354 static int CHECK_PUT (void *, struct jcf_partial *, int);
357 CHECK_PUT (void *ptr, struct jcf_partial *state, int i)
359 if ((unsigned char *) ptr < state->chunk->data
360 || (unsigned char *) ptr + i > state->chunk->data + state->chunk->size)
366 #define CHECK_PUT(PTR, STATE, I) ((void)0)
369 #define PUT1(X) (CHECK_PUT(ptr, state, 1), *ptr++ = (X))
370 #define PUT2(X) (PUT1((X) >> 8), PUT1((X) & 0xFF))
371 #define PUT4(X) (PUT2((X) >> 16), PUT2((X) & 0xFFFF))
372 #define PUTN(P, N) (CHECK_PUT(ptr, state, N), memcpy(ptr, P, N), ptr += (N))
374 /* There are some cases below where CHECK_PUT is guaranteed to fail.
375 Use the following macros in those specific cases. */
376 #define UNSAFE_PUT1(X) (*ptr++ = (X))
377 #define UNSAFE_PUT2(X) (UNSAFE_PUT1((X) >> 8), UNSAFE_PUT1((X) & 0xFF))
378 #define UNSAFE_PUT4(X) (UNSAFE_PUT2((X) >> 16), UNSAFE_PUT2((X) & 0xFFFF))
379 #define UNSAFE_PUTN(P, N) (memcpy(ptr, P, N), ptr += (N))
382 /* Allocate a new chunk on obstack WORK, and link it in after LAST.
383 Set the data and size fields to DATA and SIZE, respectively.
384 However, if DATA is NULL and SIZE>0, allocate a buffer as well. */
386 static struct chunk *
387 alloc_chunk (struct chunk *last, unsigned char *data,
388 int size, struct obstack *work)
390 struct chunk *chunk = obstack_alloc (work, sizeof(struct chunk));
392 if (data == NULL && size > 0)
393 data = obstack_alloc (work, size);
403 #ifdef ENABLE_JC1_CHECKING
404 static int CHECK_OP (struct jcf_partial *);
407 CHECK_OP (struct jcf_partial *state)
409 if (state->bytecode.ptr > state->bytecode.limit)
415 #define CHECK_OP(STATE) ((void) 0)
418 static unsigned char *
419 append_chunk (unsigned char *data, int size, struct jcf_partial *state)
421 state->chunk = alloc_chunk (state->chunk, data, size, state->chunk_obstack);
422 if (state->first == NULL)
423 state->first = state->chunk;
424 return state->chunk->data;
428 append_chunk_copy (unsigned char *data, int size, struct jcf_partial *state)
430 unsigned char *ptr = append_chunk (NULL, size, state);
431 memcpy (ptr, data, size);
434 static struct jcf_block *
435 gen_jcf_label (struct jcf_partial *state)
437 struct jcf_block *block
438 = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_block));
440 block->linenumber = -1;
441 block->pc = UNDEFINED_PC;
446 finish_jcf_block (struct jcf_partial *state)
448 struct jcf_block *block = state->last_block;
449 struct jcf_relocation *reloc;
450 int code_length = BUFFER_LENGTH (&state->bytecode);
451 int pc = state->code_length;
452 append_chunk_copy (state->bytecode.data, code_length, state);
453 BUFFER_RESET (&state->bytecode);
454 block->v.chunk = state->chunk;
456 /* Calculate code_length to the maximum value it can have. */
457 pc += block->v.chunk->size;
458 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
460 int kind = reloc->kind;
461 if (kind == SWITCH_ALIGN_RELOC)
463 else if (kind > BLOCK_START_RELOC)
464 pc += 2; /* 2-byte offset may grow to 4-byte offset */
466 pc += 5; /* May need to add a goto_w. */
468 state->code_length = pc;
472 define_jcf_label (struct jcf_block *label, struct jcf_partial *state)
474 if (state->last_block != NULL)
475 finish_jcf_block (state);
476 label->pc = state->code_length;
477 if (state->blocks == NULL)
478 state->blocks = label;
480 state->last_block->next = label;
481 state->last_block = label;
483 label->u.relocations = NULL;
486 static struct jcf_block *
487 get_jcf_label_here (struct jcf_partial *state)
489 if (state->last_block != NULL && BUFFER_LENGTH (&state->bytecode) == 0)
490 return state->last_block;
493 struct jcf_block *label = gen_jcf_label (state);
494 define_jcf_label (label, state);
499 /* Note a line number entry for the current PC and given LINE. */
502 put_linenumber (int line, struct jcf_partial *state)
504 struct jcf_block *label = get_jcf_label_here (state);
505 if (label->linenumber > 0)
507 label = gen_jcf_label (state);
508 define_jcf_label (label, state);
510 label->linenumber = line;
511 state->linenumber_count++;
514 /* Allocate a new jcf_handler, for a catch clause that catches exceptions
515 in the range (START_LABEL, END_LABEL). */
517 static struct jcf_handler *
518 alloc_handler (struct jcf_block *start_label, struct jcf_block *end_label,
519 struct jcf_partial *state)
521 struct jcf_handler *handler
522 = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_handler));
523 handler->start_label = start_label;
524 handler->end_label = end_label;
525 handler->handler_label = get_jcf_label_here (state);
526 if (state->handlers == NULL)
527 state->handlers = handler;
529 state->last_handler->next = handler;
530 state->last_handler = handler;
531 handler->next = NULL;
532 state->num_handlers++;
537 /* The index of jvm local variable allocated for this DECL.
538 This is assigned when generating .class files;
539 contrast DECL_LOCAL_SLOT_NUMBER which is set when *reading* a .class file.
540 (We don't allocate DECL_LANG_SPECIFIC for locals from Java source code.) */
542 #define DECL_LOCAL_INDEX(DECL) DECL_ALIGN(DECL)
546 struct localvar_info *next;
549 struct jcf_block *start_label;
550 struct jcf_block *end_label;
553 #define localvar_buffer ((struct localvar_info**) state->localvars.data)
554 #define localvar_max \
555 ((struct localvar_info**) state->localvars.ptr - localvar_buffer)
558 localvar_alloc (tree decl, struct jcf_partial *state)
560 struct jcf_block *start_label = get_jcf_label_here (state);
561 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
563 struct localvar_info *info;
564 struct localvar_info **ptr = localvar_buffer;
565 struct localvar_info **limit
566 = (struct localvar_info**) state->localvars.ptr;
567 for (index = 0; ptr < limit; index++, ptr++)
570 && (! wide || ((ptr+1) < limit && ptr[1] == NULL)))
575 buffer_grow (&state->localvars, 2 * sizeof (struct localvar_info*));
576 ptr = (struct localvar_info**) state->localvars.data + index;
577 state->localvars.ptr = (unsigned char *) (ptr + 1 + wide);
579 info = obstack_alloc (state->chunk_obstack, sizeof (struct localvar_info));
582 ptr[1] = (struct localvar_info *)(~0);
583 DECL_LOCAL_INDEX (decl) = index;
585 info->start_label = start_label;
587 if (debug_info_level > DINFO_LEVEL_TERSE
588 && DECL_NAME (decl) != NULL_TREE)
590 /* Generate debugging info. */
592 if (state->last_lvar != NULL)
593 state->last_lvar->next = info;
595 state->first_lvar = info;
596 state->last_lvar = info;
602 maybe_free_localvar (tree decl, struct jcf_partial *state, int really)
604 struct jcf_block *end_label = get_jcf_label_here (state);
605 int index = DECL_LOCAL_INDEX (decl);
606 struct localvar_info **ptr = &localvar_buffer [index];
607 struct localvar_info *info = *ptr;
608 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
610 info->end_label = end_label;
612 if (info->decl != decl)
619 if (ptr[1] != (struct localvar_info *)(~0))
626 #define STACK_TARGET 1
627 #define IGNORE_TARGET 2
629 /* Get the access flags of a class (TYPE_DECL), a method (FUNCTION_DECL), or
630 a field (FIELD_DECL or VAR_DECL, if static), as encoded in a .class file. */
633 get_access_flags (tree decl)
636 int isfield = TREE_CODE (decl) == FIELD_DECL || TREE_CODE (decl) == VAR_DECL;
638 if (isfield || TREE_CODE (decl) == FUNCTION_DECL)
640 if (TREE_PROTECTED (decl))
641 flags |= ACC_PROTECTED;
642 if (TREE_PRIVATE (decl))
643 flags |= ACC_PRIVATE;
645 else if (TREE_CODE (decl) == TYPE_DECL)
647 if (CLASS_PUBLIC (decl))
649 if (CLASS_FINAL (decl))
651 if (CLASS_SUPER (decl))
653 if (CLASS_ABSTRACT (decl))
654 flags |= ACC_ABSTRACT;
655 if (CLASS_INTERFACE (decl))
656 flags |= ACC_INTERFACE;
657 if (CLASS_STATIC (decl))
659 if (CLASS_PRIVATE (decl))
660 flags |= ACC_PRIVATE;
661 if (CLASS_PROTECTED (decl))
662 flags |= ACC_PROTECTED;
663 if (ANONYMOUS_CLASS_P (TREE_TYPE (decl))
664 || LOCAL_CLASS_P (TREE_TYPE (decl)))
665 flags |= ACC_PRIVATE;
666 if (CLASS_STRICTFP (decl))
672 if (TREE_CODE (decl) == FUNCTION_DECL)
674 if (METHOD_PUBLIC (decl))
676 if (METHOD_FINAL (decl))
678 if (METHOD_NATIVE (decl))
680 if (METHOD_STATIC (decl))
682 if (METHOD_SYNCHRONIZED (decl))
683 flags |= ACC_SYNCHRONIZED;
684 if (METHOD_ABSTRACT (decl))
685 flags |= ACC_ABSTRACT;
686 if (METHOD_STRICTFP (decl))
691 if (FIELD_PUBLIC (decl))
693 if (FIELD_FINAL (decl))
695 if (FIELD_STATIC (decl))
697 if (FIELD_VOLATILE (decl))
698 flags |= ACC_VOLATILE;
699 if (FIELD_TRANSIENT (decl))
700 flags |= ACC_TRANSIENT;
705 /* Write the list of segments starting at CHUNKS to STREAM. */
708 write_chunks (FILE* stream, struct chunk *chunks)
710 for (; chunks != NULL; chunks = chunks->next)
711 fwrite (chunks->data, chunks->size, 1, stream);
714 /* Push a 1-word constant in the constant pool at the given INDEX.
715 (Caller is responsible for doing NOTE_PUSH.) */
718 push_constant1 (HOST_WIDE_INT index, struct jcf_partial *state)
733 /* Push a 2-word constant in the constant pool at the given INDEX.
734 (Caller is responsible for doing NOTE_PUSH.) */
737 push_constant2 (HOST_WIDE_INT index, struct jcf_partial *state)
744 /* Push 32-bit integer constant on VM stack.
745 Caller is responsible for doing NOTE_PUSH. */
748 push_int_const (HOST_WIDE_INT i, struct jcf_partial *state)
751 if (i >= -1 && i <= 5)
752 OP1(OPCODE_iconst_0 + i);
753 else if (i >= -128 && i < 128)
758 else if (i >= -32768 && i < 32768)
765 i = find_constant1 (&state->cpool, CONSTANT_Integer,
766 (jword)(i & 0xFFFFFFFF));
767 push_constant1 (i, state);
772 find_constant_wide (HOST_WIDE_INT lo, HOST_WIDE_INT hi,
773 struct jcf_partial *state)
775 unsigned HOST_WIDE_INT w1;
777 lshift_double (lo, hi, -32, 64, &w1, &w2, 1);
778 return find_constant2 (&state->cpool, CONSTANT_Long,
779 (jword)(w1 & 0xFFFFFFFF), (jword)(lo & 0xFFFFFFFF));
782 /* Find or allocate a constant pool entry for the given VALUE.
783 Return the index in the constant pool. */
786 find_constant_index (tree value, struct jcf_partial *state)
788 if (TREE_CODE (value) == INTEGER_CST)
790 if (TYPE_PRECISION (TREE_TYPE (value)) <= 32)
791 return find_constant1 (&state->cpool, CONSTANT_Integer,
792 (jword)(TREE_INT_CST_LOW (value) & 0xFFFFFFFF));
794 return find_constant_wide (TREE_INT_CST_LOW (value),
795 TREE_INT_CST_HIGH (value), state);
797 else if (TREE_CODE (value) == REAL_CST)
801 /* IEEE NaN can have many values, but the Java VM spec defines a
803 if (flag_emit_class_files
804 && REAL_VALUE_ISNAN (TREE_REAL_CST (value)))
806 if (TYPE_PRECISION (TREE_TYPE (value)) == 32)
807 return find_constant1 (&state->cpool, CONSTANT_Float,
810 return find_constant2 (&state->cpool, CONSTANT_Double,
811 0x7ff80000, 0x00000000);
814 real_to_target (words, &TREE_REAL_CST (value),
815 TYPE_MODE (TREE_TYPE (value)));
816 words[0] &= 0xffffffff;
817 words[1] &= 0xffffffff;
819 if (TYPE_PRECISION (TREE_TYPE (value)) == 32)
820 return find_constant1 (&state->cpool, CONSTANT_Float, (jword)words[0]);
822 return find_constant2 (&state->cpool, CONSTANT_Double,
823 (jword)words[1-FLOAT_WORDS_BIG_ENDIAN],
824 (jword)words[FLOAT_WORDS_BIG_ENDIAN]);
826 else if (TREE_CODE (value) == STRING_CST)
827 return find_string_constant (&state->cpool, value);
833 /* Push 64-bit long constant on VM stack.
834 Caller is responsible for doing NOTE_PUSH. */
837 push_long_const (HOST_WIDE_INT lo, HOST_WIDE_INT hi, struct jcf_partial *state)
839 unsigned HOST_WIDE_INT highpart;
841 jint lowpart = WORD_TO_INT (lo);
843 rshift_double (lo, hi, 32, 64, &highpart, &dummy, 1);
845 if (highpart == 0 && (lowpart == 0 || lowpart == 1))
848 OP1(OPCODE_lconst_0 + lowpart);
850 else if ((highpart == 0 && lowpart > 0 && lowpart < 32768)
851 || (highpart == (unsigned HOST_WIDE_INT)-1
852 && lowpart < 0 && lowpart >= -32768))
854 push_int_const (lowpart, state);
859 push_constant2 (find_constant_wide (lo, hi, state), state);
863 field_op (tree field, int opcode, struct jcf_partial *state)
865 int index = find_fieldref_index (&state->cpool, field);
871 /* Returns an integer in the range 0 (for 'int') through 4 (for object
872 reference) to 7 (for 'short') which matches the pattern of how JVM
873 opcodes typically depend on the operand type. */
876 adjust_typed_op (tree type, int max)
878 switch (TREE_CODE (type))
881 case RECORD_TYPE: return 4;
883 return TYPE_PRECISION (type) == 32 || max < 5 ? 0 : 5;
885 return TYPE_PRECISION (type) == 32 || max < 6 ? 0 : 6;
887 switch (TYPE_PRECISION (type))
889 case 8: return max < 5 ? 0 : 5;
890 case 16: return max < 7 ? 0 : 7;
896 switch (TYPE_PRECISION (type))
909 maybe_wide (int opcode, int index, struct jcf_partial *state)
926 /* Compile code to duplicate with offset, where
927 SIZE is the size of the stack item to duplicate (1 or 2), abd
928 OFFSET is where to insert the result (must be 0, 1, or 2).
929 (The new words get inserted at stack[SP-size-offset].) */
932 emit_dup (int size, int offset, struct jcf_partial *state)
939 kind = size == 1 ? OPCODE_dup : OPCODE_dup2;
940 else if (offset == 1)
941 kind = size == 1 ? OPCODE_dup_x1 : OPCODE_dup2_x1;
942 else if (offset == 2)
943 kind = size == 1 ? OPCODE_dup_x2 : OPCODE_dup2_x2;
951 emit_pop (int size, struct jcf_partial *state)
954 OP1 (OPCODE_pop - 1 + size);
958 emit_iinc (tree var, HOST_WIDE_INT value, struct jcf_partial *state)
960 int slot = DECL_LOCAL_INDEX (var);
962 if (value < -128 || value > 127 || slot >= 256)
980 emit_load_or_store (tree var, /* Variable to load from or store into. */
981 int opcode, /* Either OPCODE_iload or OPCODE_istore. */
982 struct jcf_partial *state)
984 tree type = TREE_TYPE (var);
985 int kind = adjust_typed_op (type, 4);
986 int index = DECL_LOCAL_INDEX (var);
990 OP1 (opcode + 5 + 4 * kind + index); /* [ilfda]{load,store}_[0123] */
993 maybe_wide (opcode + kind, index, state); /* [ilfda]{load,store} */
997 emit_load (tree var, struct jcf_partial *state)
999 emit_load_or_store (var, OPCODE_iload, state);
1000 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
1004 emit_store (tree var, struct jcf_partial *state)
1006 emit_load_or_store (var, OPCODE_istore, state);
1007 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
1011 emit_unop (enum java_opcode opcode, tree type ATTRIBUTE_UNUSED,
1012 struct jcf_partial *state)
1019 emit_binop (enum java_opcode opcode, tree type, struct jcf_partial *state)
1021 int size = TYPE_IS_WIDE (type) ? 2 : 1;
1028 emit_reloc (HOST_WIDE_INT value, int kind,
1029 struct jcf_block *target, struct jcf_partial *state)
1031 struct jcf_relocation *reloc
1032 = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1033 struct jcf_block *block = state->last_block;
1034 reloc->next = block->u.relocations;
1035 block->u.relocations = reloc;
1036 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1037 reloc->label = target;
1039 if (kind == 0 || kind == BLOCK_START_RELOC)
1041 else if (kind != SWITCH_ALIGN_RELOC)
1046 emit_switch_reloc (struct jcf_block *label, struct jcf_partial *state)
1048 emit_reloc (RELOCATION_VALUE_0, BLOCK_START_RELOC, label, state);
1051 /* Similar to emit_switch_reloc,
1052 but re-uses an existing case reloc. */
1055 emit_case_reloc (struct jcf_relocation *reloc, struct jcf_partial *state)
1057 struct jcf_block *block = state->last_block;
1058 reloc->next = block->u.relocations;
1059 block->u.relocations = reloc;
1060 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1061 reloc->kind = BLOCK_START_RELOC;
1065 /* Emit a conditional jump to TARGET with a 2-byte relative jump offset
1066 The opcode is OPCODE, the inverted opcode is INV_OPCODE. */
1069 emit_if (struct jcf_block *target, int opcode, int inv_opcode,
1070 struct jcf_partial *state)
1074 /* value is 1 byte from reloc back to start of instruction. */
1075 emit_reloc (RELOCATION_VALUE_1, - inv_opcode, target, state);
1079 emit_goto (struct jcf_block *target, struct jcf_partial *state)
1083 /* Value is 1 byte from reloc back to start of instruction. */
1084 emit_reloc (RELOCATION_VALUE_1, OPCODE_goto_w, target, state);
1088 emit_jsr (struct jcf_block *target, struct jcf_partial *state)
1092 /* Value is 1 byte from reloc back to start of instruction. */
1093 emit_reloc (RELOCATION_VALUE_1, OPCODE_jsr_w, target, state);
1097 /* Generate code to evaluate EXP. If the result is true,
1098 branch to TRUE_LABEL; otherwise, branch to FALSE_LABEL.
1099 TRUE_BRANCH_FIRST is a code generation hint that the
1100 TRUE_LABEL may follow right after this. (The idea is that we
1101 may be able to optimize away GOTO TRUE_LABEL; TRUE_LABEL:) */
1104 generate_bytecode_conditional (tree exp,
1105 struct jcf_block *true_label,
1106 struct jcf_block *false_label,
1107 int true_branch_first,
1108 struct jcf_partial *state)
1110 tree exp0, exp1, type;
1111 int save_SP = state->code_SP;
1112 enum java_opcode op, negop;
1115 switch (TREE_CODE (exp))
1118 emit_goto (integer_zerop (exp) ? false_label : true_label, state);
1122 struct jcf_block *then_label = gen_jcf_label (state);
1123 struct jcf_block *else_label = gen_jcf_label (state);
1124 int save_SP_before, save_SP_after;
1125 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1126 then_label, else_label, 1, state);
1127 define_jcf_label (then_label, state);
1128 save_SP_before = state->code_SP;
1129 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1130 true_label, false_label, 1, state);
1131 save_SP_after = state->code_SP;
1132 state->code_SP = save_SP_before;
1133 define_jcf_label (else_label, state);
1134 generate_bytecode_conditional (TREE_OPERAND (exp, 2),
1135 true_label, false_label,
1136 true_branch_first, state);
1137 if (state->code_SP != save_SP_after)
1141 case TRUTH_NOT_EXPR:
1142 generate_bytecode_conditional (TREE_OPERAND (exp, 0), false_label,
1143 true_label, ! true_branch_first, state);
1145 case TRUTH_ANDIF_EXPR:
1147 struct jcf_block *next_label = gen_jcf_label (state);
1148 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1149 next_label, false_label, 1, state);
1150 define_jcf_label (next_label, state);
1151 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1152 true_label, false_label, 1, state);
1155 case TRUTH_ORIF_EXPR:
1157 struct jcf_block *next_label = gen_jcf_label (state);
1158 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1159 true_label, next_label, 1, state);
1160 define_jcf_label (next_label, state);
1161 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1162 true_label, false_label, 1, state);
1166 /* Assuming op is one of the 2-operand if_icmp<COND> instructions,
1167 set it to the corresponding 1-operand if<COND> instructions. */
1171 /* The opcodes with their inverses are allocated in pairs.
1172 E.g. The inverse of if_icmplt (161) is if_icmpge (162). */
1173 negop = (op & 1) ? op + 1 : op - 1;
1175 if (true_branch_first)
1177 emit_if (false_label, negop, op, state);
1178 emit_goto (true_label, state);
1182 emit_if (true_label, op, negop, state);
1183 emit_goto (false_label, state);
1190 op = OPCODE_if_icmpeq;
1196 op = OPCODE_if_icmpne;
1202 op = OPCODE_if_icmpgt;
1208 op = OPCODE_if_icmplt;
1214 op = OPCODE_if_icmpge;
1220 op = OPCODE_if_icmple;
1226 /* UNLT_EXPR(a, b) means 'a < b || unordered(a, b)'. This is
1227 the same as the Java source expression '!(a >= b)', so handle
1229 struct jcf_block *tmp = true_label;
1230 true_label = false_label;
1232 true_branch_first = !true_branch_first;
1235 exp0 = TREE_OPERAND (exp, 0);
1236 exp1 = TREE_OPERAND (exp, 1);
1237 type = TREE_TYPE (exp0);
1238 switch (TREE_CODE (type))
1241 case POINTER_TYPE: case RECORD_TYPE:
1242 switch (TREE_CODE (exp))
1244 case EQ_EXPR: op = OPCODE_if_acmpeq; break;
1245 case NE_EXPR: op = OPCODE_if_acmpne; break;
1248 if (integer_zerop (exp1) || integer_zerop (exp0))
1250 generate_bytecode_insns (integer_zerop (exp0) ? exp1 : exp0,
1251 STACK_TARGET, state);
1252 op = op + (OPCODE_ifnull - OPCODE_if_acmpeq);
1253 negop = (op & 1) ? op - 1 : op + 1;
1257 generate_bytecode_insns (exp0, STACK_TARGET, state);
1258 generate_bytecode_insns (exp1, STACK_TARGET, state);
1262 generate_bytecode_insns (exp0, STACK_TARGET, state);
1263 generate_bytecode_insns (exp1, STACK_TARGET, state);
1264 if (op == OPCODE_if_icmplt || op == OPCODE_if_icmple)
1268 if (TYPE_PRECISION (type) > 32)
1279 if (TYPE_PRECISION (type) > 32)
1281 generate_bytecode_insns (exp0, STACK_TARGET, state);
1282 generate_bytecode_insns (exp1, STACK_TARGET, state);
1290 if (integer_zerop (exp1))
1292 generate_bytecode_insns (exp0, STACK_TARGET, state);
1296 if (integer_zerop (exp0))
1300 case OPCODE_if_icmplt:
1301 case OPCODE_if_icmpge:
1304 case OPCODE_if_icmpgt:
1305 case OPCODE_if_icmple:
1311 generate_bytecode_insns (exp1, STACK_TARGET, state);
1315 generate_bytecode_insns (exp0, STACK_TARGET, state);
1316 generate_bytecode_insns (exp1, STACK_TARGET, state);
1322 generate_bytecode_insns (exp, STACK_TARGET, state);
1324 if (true_branch_first)
1326 emit_if (false_label, OPCODE_ifeq, OPCODE_ifne, state);
1327 emit_goto (true_label, state);
1331 emit_if (true_label, OPCODE_ifne, OPCODE_ifeq, state);
1332 emit_goto (false_label, state);
1336 if (save_SP != state->code_SP)
1340 /* Call pending cleanups i.e. those for surrounding TRY_FINALLY_EXPRs.
1341 but only as far out as LIMIT (since we are about to jump to the
1342 emit label that is LIMIT). */
1345 call_cleanups (struct jcf_block *limit, struct jcf_partial *state)
1347 struct jcf_block *block = state->labeled_blocks;
1348 for (; block != limit; block = block->next)
1350 if (block->pc == PENDING_CLEANUP_PC)
1351 emit_jsr (block, state);
1356 generate_bytecode_return (tree exp, struct jcf_partial *state)
1358 tree return_type = TREE_TYPE (TREE_TYPE (state->current_method));
1359 int returns_void = TREE_CODE (return_type) == VOID_TYPE;
1364 switch (TREE_CODE (exp))
1367 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET,
1369 exp = TREE_OPERAND (exp, 1);
1373 struct jcf_block *then_label = gen_jcf_label (state);
1374 struct jcf_block *else_label = gen_jcf_label (state);
1375 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1376 then_label, else_label, 1, state);
1377 define_jcf_label (then_label, state);
1378 generate_bytecode_return (TREE_OPERAND (exp, 1), state);
1379 define_jcf_label (else_label, state);
1380 generate_bytecode_return (TREE_OPERAND (exp, 2), state);
1384 generate_bytecode_insns (exp,
1385 returns_void ? IGNORE_TARGET
1386 : STACK_TARGET, state);
1392 call_cleanups (NULL, state);
1396 op = OPCODE_ireturn + adjust_typed_op (return_type, 4);
1397 if (state->num_finalizers > 0)
1399 if (state->return_value_decl == NULL_TREE)
1401 state->return_value_decl
1402 = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1403 localvar_alloc (state->return_value_decl, state);
1405 emit_store (state->return_value_decl, state);
1406 call_cleanups (NULL, state);
1407 emit_load (state->return_value_decl, state);
1408 /* If we call maybe_free_localvar (state->return_value_decl, state, 1),
1409 then we risk the save decl erroneously re-used in the
1410 finalizer. Instead, we keep the state->return_value_decl
1411 allocated through the rest of the method. This is not
1412 the greatest solution, but it is at least simple and safe. */
1419 /* Generate bytecode for sub-expression EXP of METHOD.
1420 TARGET is one of STACK_TARGET or IGNORE_TARGET. */
1423 generate_bytecode_insns (tree exp, int target, struct jcf_partial *state)
1426 enum java_opcode jopcode;
1428 HOST_WIDE_INT value;
1433 if (exp == NULL && target == IGNORE_TARGET)
1436 type = TREE_TYPE (exp);
1438 switch (TREE_CODE (exp))
1441 if (BLOCK_EXPR_BODY (exp))
1444 tree body = BLOCK_EXPR_BODY (exp);
1445 long jsrs = state->num_jsrs;
1446 for (local = BLOCK_EXPR_DECLS (exp); local; )
1448 tree next = TREE_CHAIN (local);
1449 localvar_alloc (local, state);
1452 /* Avoid deep recursion for long blocks. */
1453 while (TREE_CODE (body) == COMPOUND_EXPR)
1455 generate_bytecode_insns (TREE_OPERAND (body, 0), target, state);
1456 body = TREE_OPERAND (body, 1);
1458 generate_bytecode_insns (body, target, state);
1460 for (local = BLOCK_EXPR_DECLS (exp); local; )
1462 tree next = TREE_CHAIN (local);
1463 maybe_free_localvar (local, state, state->num_jsrs <= jsrs);
1469 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
1470 /* Normally the first operand to a COMPOUND_EXPR must complete
1471 normally. However, in the special case of a do-while
1472 statement this is not necessarily the case. */
1473 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 0)))
1474 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1476 case EXPR_WITH_FILE_LOCATION:
1478 location_t saved_location = input_location;
1479 tree body = EXPR_WFL_NODE (exp);
1480 if (IS_EMPTY_STMT (body))
1482 #ifdef USE_MAPPED_LOCATION
1483 input_location = EXPR_LOCATION (exp);
1485 input_filename = EXPR_WFL_FILENAME (exp);
1486 input_line = EXPR_WFL_LINENO (exp);
1488 if (EXPR_WFL_EMIT_LINE_NOTE (exp) && input_line > 0
1489 && debug_info_level > DINFO_LEVEL_NONE)
1490 put_linenumber (input_line, state);
1491 generate_bytecode_insns (body, target, state);
1492 input_location = saved_location;
1496 if (target == IGNORE_TARGET) ; /* do nothing */
1497 else if (TREE_CODE (type) == POINTER_TYPE)
1499 if (! integer_zerop (exp))
1502 OP1 (OPCODE_aconst_null);
1505 else if (TYPE_PRECISION (type) <= 32)
1507 push_int_const (TREE_INT_CST_LOW (exp), state);
1512 push_long_const (TREE_INT_CST_LOW (exp), TREE_INT_CST_HIGH (exp),
1519 int prec = TYPE_PRECISION (type) >> 5;
1521 if (real_zerop (exp) && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (exp)))
1522 OP1 (prec == 1 ? OPCODE_fconst_0 : OPCODE_dconst_0);
1523 else if (real_onep (exp))
1524 OP1 (prec == 1 ? OPCODE_fconst_1 : OPCODE_dconst_1);
1525 else if (prec == 1 && real_twop (exp))
1526 OP1 (OPCODE_fconst_2);
1527 /* ??? We could also use iconst_3/ldc followed by i2f/i2d
1528 for other float/double when the value is a small integer. */
1531 offset = find_constant_index (exp, state);
1533 push_constant1 (offset, state);
1535 push_constant2 (offset, state);
1541 push_constant1 (find_string_constant (&state->cpool, exp), state);
1545 if (TREE_STATIC (exp))
1547 field_op (exp, OPCODE_getstatic, state);
1548 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1551 /* ... fall through ... */
1553 emit_load (exp, state);
1555 case NON_LVALUE_EXPR:
1557 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1560 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1561 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1562 if (target != IGNORE_TARGET)
1564 jopcode = OPCODE_iaload + adjust_typed_op (type, 7);
1567 if (! TYPE_IS_WIDE (type))
1573 tree obj = TREE_OPERAND (exp, 0);
1574 tree field = TREE_OPERAND (exp, 1);
1575 int is_static = FIELD_STATIC (field);
1576 generate_bytecode_insns (obj,
1577 is_static ? IGNORE_TARGET : target, state);
1578 if (target != IGNORE_TARGET)
1580 if (DECL_NAME (field) == length_identifier_node && !is_static
1581 && TYPE_ARRAY_P (TREE_TYPE (obj)))
1584 OP1 (OPCODE_arraylength);
1588 field_op (field, is_static ? OPCODE_getstatic : OPCODE_getfield,
1592 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
1597 case TRUTH_ANDIF_EXPR:
1598 case TRUTH_ORIF_EXPR:
1612 struct jcf_block *then_label = gen_jcf_label (state);
1613 struct jcf_block *else_label = gen_jcf_label (state);
1614 struct jcf_block *end_label = gen_jcf_label (state);
1615 generate_bytecode_conditional (exp,
1616 then_label, else_label, 1, state);
1617 define_jcf_label (then_label, state);
1618 push_int_const (1, state);
1619 emit_goto (end_label, state);
1620 define_jcf_label (else_label, state);
1621 push_int_const (0, state);
1622 define_jcf_label (end_label, state);
1628 struct jcf_block *then_label = gen_jcf_label (state);
1629 struct jcf_block *else_label = gen_jcf_label (state);
1630 struct jcf_block *end_label = gen_jcf_label (state);
1631 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1632 then_label, else_label, 1, state);
1633 define_jcf_label (then_label, state);
1634 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1635 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 1))
1636 /* Not all expressions have CAN_COMPLETE_NORMALLY set properly. */
1637 || TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE)
1638 emit_goto (end_label, state);
1639 define_jcf_label (else_label, state);
1640 generate_bytecode_insns (TREE_OPERAND (exp, 2), target, state);
1641 define_jcf_label (end_label, state);
1642 /* COND_EXPR can be used in a binop. The stack must be adjusted. */
1643 if (TREE_TYPE (exp) != void_type_node)
1644 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1649 struct jcf_switch_state *sw_state = state->sw_state;
1650 struct jcf_relocation *reloc
1651 = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1652 HOST_WIDE_INT case_value = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
1654 reloc->label = get_jcf_label_here (state);
1655 reloc->offset = case_value;
1656 reloc->next = sw_state->cases;
1657 sw_state->cases = reloc;
1658 if (sw_state->num_cases == 0)
1660 sw_state->min_case = case_value;
1661 sw_state->max_case = case_value;
1665 if (case_value < sw_state->min_case)
1666 sw_state->min_case = case_value;
1667 if (case_value > sw_state->max_case)
1668 sw_state->max_case = case_value;
1670 sw_state->num_cases++;
1674 state->sw_state->default_label = get_jcf_label_here (state);
1679 /* The SWITCH_EXPR has three parts, generated in the following order:
1680 1. the switch_expression (the value used to select the correct case);
1682 3. the switch_instruction (the tableswitch/loopupswitch instruction.).
1683 After code generation, we will re-order them in the order 1, 3, 2.
1684 This is to avoid any extra GOTOs. */
1685 struct jcf_switch_state sw_state;
1686 struct jcf_block *expression_last; /* Last block of the switch_expression. */
1687 struct jcf_block *body_last; /* Last block of the switch_body. */
1688 struct jcf_block *switch_instruction; /* First block of switch_instruction. */
1689 struct jcf_block *instruction_last; /* Last block of the switch_instruction. */
1690 struct jcf_block *body_block;
1692 sw_state.prev = state->sw_state;
1693 state->sw_state = &sw_state;
1694 sw_state.cases = NULL;
1695 sw_state.num_cases = 0;
1696 sw_state.default_label = NULL;
1697 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1698 expression_last = state->last_block;
1699 /* Force a new block here. */
1700 body_block = gen_jcf_label (state);
1701 define_jcf_label (body_block, state);
1702 generate_bytecode_insns (TREE_OPERAND (exp, 1), IGNORE_TARGET, state);
1703 body_last = state->last_block;
1705 switch_instruction = gen_jcf_label (state);
1706 define_jcf_label (switch_instruction, state);
1707 if (sw_state.default_label == NULL)
1708 sw_state.default_label = gen_jcf_label (state);
1710 if (sw_state.num_cases <= 1)
1712 if (sw_state.num_cases == 0)
1714 emit_pop (1, state);
1719 push_int_const (sw_state.cases->offset, state);
1721 emit_if (sw_state.cases->label,
1722 OPCODE_if_icmpeq, OPCODE_if_icmpne, state);
1724 emit_goto (sw_state.default_label, state);
1729 unsigned HOST_WIDE_INT delta;
1730 /* Copy the chain of relocs into a sorted array. */
1731 struct jcf_relocation **relocs
1732 = xmalloc (sw_state.num_cases * sizeof (struct jcf_relocation *));
1733 /* The relocs arrays is a buffer with a gap.
1734 The assumption is that cases will normally come in "runs". */
1736 int gap_end = sw_state.num_cases;
1737 struct jcf_relocation *reloc;
1738 for (reloc = sw_state.cases; reloc != NULL; reloc = reloc->next)
1740 HOST_WIDE_INT case_value = reloc->offset;
1741 while (gap_end < sw_state.num_cases)
1743 struct jcf_relocation *end = relocs[gap_end];
1744 if (case_value <= end->offset)
1746 relocs[gap_start++] = end;
1749 while (gap_start > 0)
1751 struct jcf_relocation *before = relocs[gap_start-1];
1752 if (case_value >= before->offset)
1754 relocs[--gap_end] = before;
1757 relocs[gap_start++] = reloc;
1758 /* Note we don't check for duplicates. This is
1759 handled by the parser. */
1762 /* We could have DELTA < 0 if sw_state.min_case is
1763 something like Integer.MIN_VALUE. That is why delta is
1765 delta = sw_state.max_case - sw_state.min_case;
1766 if (2 * (unsigned) sw_state.num_cases >= delta)
1767 { /* Use tableswitch. */
1769 RESERVE (13 + 4 * (sw_state.max_case - sw_state.min_case + 1));
1770 OP1 (OPCODE_tableswitch);
1771 emit_reloc (RELOCATION_VALUE_0,
1772 SWITCH_ALIGN_RELOC, NULL, state);
1773 emit_switch_reloc (sw_state.default_label, state);
1774 OP4 (sw_state.min_case);
1775 OP4 (sw_state.max_case);
1776 for (i = sw_state.min_case; ; )
1778 reloc = relocs[index];
1779 if (i == reloc->offset)
1781 emit_case_reloc (reloc, state);
1782 if (i == sw_state.max_case)
1787 emit_switch_reloc (sw_state.default_label, state);
1792 { /* Use lookupswitch. */
1793 RESERVE(9 + 8 * sw_state.num_cases);
1794 OP1 (OPCODE_lookupswitch);
1795 emit_reloc (RELOCATION_VALUE_0,
1796 SWITCH_ALIGN_RELOC, NULL, state);
1797 emit_switch_reloc (sw_state.default_label, state);
1798 OP4 (sw_state.num_cases);
1799 for (i = 0; i < sw_state.num_cases; i++)
1801 struct jcf_relocation *reloc = relocs[i];
1802 OP4 (reloc->offset);
1803 emit_case_reloc (reloc, state);
1809 instruction_last = state->last_block;
1810 if (sw_state.default_label->pc < 0)
1811 define_jcf_label (sw_state.default_label, state);
1812 else /* Force a new block. */
1813 sw_state.default_label = get_jcf_label_here (state);
1814 /* Now re-arrange the blocks so the switch_instruction
1815 comes before the switch_body. */
1816 switch_length = state->code_length - switch_instruction->pc;
1817 switch_instruction->pc = body_block->pc;
1818 instruction_last->next = body_block;
1819 instruction_last->v.chunk->next = body_block->v.chunk;
1820 expression_last->next = switch_instruction;
1821 expression_last->v.chunk->next = switch_instruction->v.chunk;
1822 body_last->next = sw_state.default_label;
1823 body_last->v.chunk->next = NULL;
1824 state->chunk = body_last->v.chunk;
1825 for (; body_block != sw_state.default_label; body_block = body_block->next)
1826 body_block->pc += switch_length;
1828 state->sw_state = sw_state.prev;
1833 exp = TREE_OPERAND (exp, 0);
1834 if (exp == NULL_TREE)
1835 exp = build_java_empty_stmt ();
1836 else if (TREE_CODE (exp) != MODIFY_EXPR)
1839 exp = TREE_OPERAND (exp, 1);
1840 generate_bytecode_return (exp, state);
1842 case LABELED_BLOCK_EXPR:
1844 struct jcf_block *end_label = gen_jcf_label (state);
1845 end_label->next = state->labeled_blocks;
1846 state->labeled_blocks = end_label;
1847 end_label->pc = PENDING_EXIT_PC;
1848 end_label->u.labeled_block = exp;
1849 if (LABELED_BLOCK_BODY (exp))
1850 generate_bytecode_insns (LABELED_BLOCK_BODY (exp), target, state);
1851 if (state->labeled_blocks != end_label)
1853 state->labeled_blocks = end_label->next;
1854 define_jcf_label (end_label, state);
1859 tree body = TREE_OPERAND (exp, 0);
1861 if (TREE_CODE (body) == COMPOUND_EXPR
1862 && TREE_CODE (TREE_OPERAND (body, 0)) == EXIT_EXPR)
1864 /* Optimize: H: if (TEST) GOTO L; BODY; GOTO H; L:
1865 to: GOTO L; BODY; L: if (!TEST) GOTO L; */
1866 struct jcf_block *head_label;
1867 struct jcf_block *body_label;
1868 struct jcf_block *end_label = gen_jcf_label (state);
1869 struct jcf_block *exit_label = state->labeled_blocks;
1870 head_label = gen_jcf_label (state);
1871 emit_goto (head_label, state);
1872 body_label = get_jcf_label_here (state);
1873 generate_bytecode_insns (TREE_OPERAND (body, 1), target, state);
1874 define_jcf_label (head_label, state);
1875 generate_bytecode_conditional (TREE_OPERAND (body, 0),
1876 end_label, body_label, 1, state);
1877 define_jcf_label (end_label, state);
1882 struct jcf_block *head_label = get_jcf_label_here (state);
1883 generate_bytecode_insns (body, IGNORE_TARGET, state);
1884 if (CAN_COMPLETE_NORMALLY (body))
1885 emit_goto (head_label, state);
1891 struct jcf_block *label = state->labeled_blocks;
1892 struct jcf_block *end_label = gen_jcf_label (state);
1893 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1894 label, end_label, 0, state);
1895 define_jcf_label (end_label, state);
1898 case EXIT_BLOCK_EXPR:
1900 struct jcf_block *label = state->labeled_blocks;
1901 if (TREE_OPERAND (exp, 1) != NULL) goto notimpl;
1902 while (label->u.labeled_block != TREE_OPERAND (exp, 0))
1903 label = label->next;
1904 call_cleanups (label, state);
1905 emit_goto (label, state);
1909 case PREDECREMENT_EXPR: value = -1; post_op = 0; goto increment;
1910 case PREINCREMENT_EXPR: value = 1; post_op = 0; goto increment;
1911 case POSTDECREMENT_EXPR: value = -1; post_op = 1; goto increment;
1912 case POSTINCREMENT_EXPR: value = 1; post_op = 1; goto increment;
1915 arg = TREE_OPERAND (exp, 1);
1916 exp = TREE_OPERAND (exp, 0);
1917 type = TREE_TYPE (exp);
1918 size = TYPE_IS_WIDE (type) ? 2 : 1;
1919 if ((TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1920 && ! TREE_STATIC (exp)
1921 && TREE_CODE (type) == INTEGER_TYPE
1922 && TYPE_PRECISION (type) == 32)
1924 if (target != IGNORE_TARGET && post_op)
1925 emit_load (exp, state);
1926 emit_iinc (exp, value, state);
1927 if (target != IGNORE_TARGET && ! post_op)
1928 emit_load (exp, state);
1931 if (TREE_CODE (exp) == COMPONENT_REF)
1933 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1934 emit_dup (1, 0, state);
1935 /* Stack: ..., objectref, objectref. */
1936 field_op (TREE_OPERAND (exp, 1), OPCODE_getfield, state);
1938 /* Stack: ..., objectref, oldvalue. */
1941 else if (TREE_CODE (exp) == ARRAY_REF)
1943 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1944 generate_bytecode_insns (TREE_OPERAND (exp, 1), STACK_TARGET, state);
1945 emit_dup (2, 0, state);
1946 /* Stack: ..., array, index, array, index. */
1947 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (exp), 7);
1951 /* Stack: ..., array, index, oldvalue. */
1954 else if (TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1956 generate_bytecode_insns (exp, STACK_TARGET, state);
1957 /* Stack: ..., oldvalue. */
1963 if (target != IGNORE_TARGET && post_op)
1964 emit_dup (size, offset, state);
1965 /* Stack, if ARRAY_REF: ..., [result, ] array, index, oldvalue. */
1966 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, oldvalue. */
1967 /* Stack, otherwise: ..., [result, ] oldvalue. */
1968 generate_bytecode_insns (arg, STACK_TARGET, state);
1969 emit_binop ((value >= 0 ? OPCODE_iadd : OPCODE_isub)
1970 + adjust_typed_op (type, 3),
1972 if (target != IGNORE_TARGET && ! post_op)
1973 emit_dup (size, offset, state);
1974 /* Stack, if ARRAY_REF: ..., [result, ] array, index, newvalue. */
1975 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, newvalue. */
1976 /* Stack, otherwise: ..., [result, ] newvalue. */
1977 goto finish_assignment;
1981 tree lhs = TREE_OPERAND (exp, 0);
1982 tree rhs = TREE_OPERAND (exp, 1);
1985 /* See if we can use the iinc instruction. */
1986 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1987 && ! TREE_STATIC (lhs)
1988 && TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
1989 && TYPE_PRECISION (TREE_TYPE (lhs)) == 32
1990 && (TREE_CODE (rhs) == PLUS_EXPR || TREE_CODE (rhs) == MINUS_EXPR))
1992 tree arg0 = TREE_OPERAND (rhs, 0);
1993 tree arg1 = TREE_OPERAND (rhs, 1);
1994 HOST_WIDE_INT min_value = -32768;
1995 HOST_WIDE_INT max_value = 32767;
1996 if (TREE_CODE (rhs) == MINUS_EXPR)
2001 else if (arg1 == lhs)
2004 arg1 = TREE_OPERAND (rhs, 0);
2006 if (lhs == arg0 && TREE_CODE (arg1) == INTEGER_CST)
2008 HOST_WIDE_INT hi_value = TREE_INT_CST_HIGH (arg1);
2009 value = TREE_INT_CST_LOW (arg1);
2010 if ((hi_value == 0 && value <= max_value)
2011 || (hi_value == -1 && value >= min_value))
2013 if (TREE_CODE (rhs) == MINUS_EXPR)
2015 emit_iinc (lhs, value, state);
2016 if (target != IGNORE_TARGET)
2017 emit_load (lhs, state);
2023 if (TREE_CODE (lhs) == COMPONENT_REF)
2025 generate_bytecode_insns (TREE_OPERAND (lhs, 0),
2026 STACK_TARGET, state);
2029 else if (TREE_CODE (lhs) == ARRAY_REF)
2031 generate_bytecode_insns (TREE_OPERAND(lhs, 0),
2032 STACK_TARGET, state);
2033 generate_bytecode_insns (TREE_OPERAND(lhs, 1),
2034 STACK_TARGET, state);
2040 /* If the rhs is a binary expression and the left operand is
2041 `==' to the lhs then we have an OP= expression. In this
2042 case we must do some special processing. */
2043 if (BINARY_CLASS_P (rhs) && lhs == TREE_OPERAND (rhs, 0))
2045 if (TREE_CODE (lhs) == COMPONENT_REF)
2047 tree field = TREE_OPERAND (lhs, 1);
2048 if (! FIELD_STATIC (field))
2050 /* Duplicate the object reference so we can get
2052 emit_dup (TYPE_IS_WIDE (field) ? 2 : 1, 0, state);
2055 field_op (field, (FIELD_STATIC (field)
2060 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
2062 else if (TREE_CODE (lhs) == VAR_DECL
2063 || TREE_CODE (lhs) == PARM_DECL)
2065 if (FIELD_STATIC (lhs))
2067 field_op (lhs, OPCODE_getstatic, state);
2068 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs)) ? 2 : 1);
2071 emit_load (lhs, state);
2073 else if (TREE_CODE (lhs) == ARRAY_REF)
2075 /* Duplicate the array and index, which are on the
2076 stack, so that we can load the old value. */
2077 emit_dup (2, 0, state);
2079 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (lhs), 7);
2082 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs)) ? 2 : 1);
2087 /* This function correctly handles the case where the LHS
2088 of a binary expression is NULL_TREE. */
2089 rhs = build2 (TREE_CODE (rhs), TREE_TYPE (rhs),
2090 NULL_TREE, TREE_OPERAND (rhs, 1));
2093 generate_bytecode_insns (rhs, STACK_TARGET, state);
2094 if (target != IGNORE_TARGET)
2095 emit_dup (TYPE_IS_WIDE (type) ? 2 : 1 , offset, state);
2101 if (TREE_CODE (exp) == COMPONENT_REF)
2103 tree field = TREE_OPERAND (exp, 1);
2104 if (! FIELD_STATIC (field))
2107 FIELD_STATIC (field) ? OPCODE_putstatic : OPCODE_putfield,
2110 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
2112 else if (TREE_CODE (exp) == VAR_DECL
2113 || TREE_CODE (exp) == PARM_DECL)
2115 if (FIELD_STATIC (exp))
2117 field_op (exp, OPCODE_putstatic, state);
2118 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
2121 emit_store (exp, state);
2123 else if (TREE_CODE (exp) == ARRAY_REF)
2125 jopcode = OPCODE_iastore + adjust_typed_op (TREE_TYPE (exp), 7);
2128 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 4 : 3);
2134 jopcode = OPCODE_iadd;
2137 jopcode = OPCODE_isub;
2140 jopcode = OPCODE_imul;
2142 case TRUNC_DIV_EXPR:
2144 jopcode = OPCODE_idiv;
2146 case TRUNC_MOD_EXPR:
2147 jopcode = OPCODE_irem;
2149 case LSHIFT_EXPR: jopcode = OPCODE_ishl; goto binop;
2150 case RSHIFT_EXPR: jopcode = OPCODE_ishr; goto binop;
2151 case URSHIFT_EXPR: jopcode = OPCODE_iushr; goto binop;
2152 case TRUTH_AND_EXPR:
2153 case BIT_AND_EXPR: jopcode = OPCODE_iand; goto binop;
2155 case BIT_IOR_EXPR: jopcode = OPCODE_ior; goto binop;
2156 case TRUTH_XOR_EXPR:
2157 case BIT_XOR_EXPR: jopcode = OPCODE_ixor; goto binop;
2160 tree arg0 = TREE_OPERAND (exp, 0);
2161 tree arg1 = TREE_OPERAND (exp, 1);
2162 jopcode += adjust_typed_op (type, 3);
2163 if (arg0 != NULL_TREE && operand_equal_p (arg0, arg1, 0))
2165 /* fold may (e.g) convert 2*x to x+x. */
2166 generate_bytecode_insns (arg0, target, state);
2167 emit_dup (TYPE_PRECISION (TREE_TYPE (arg0)) > 32 ? 2 : 1, 0, state);
2171 /* ARG0 will be NULL_TREE if we're handling an `OP='
2172 expression. In this case the stack already holds the
2173 LHS. See the MODIFY_EXPR case. */
2174 if (arg0 != NULL_TREE)
2175 generate_bytecode_insns (arg0, target, state);
2176 if (jopcode >= OPCODE_lshl && jopcode <= OPCODE_lushr)
2177 arg1 = convert (int_type_node, arg1);
2178 generate_bytecode_insns (arg1, target, state);
2180 /* For most binary operations, both operands and the result have the
2181 same type. Shift operations are different. Using arg1's type
2182 gets us the correct SP adjustment in all cases. */
2183 if (target == STACK_TARGET)
2184 emit_binop (jopcode, TREE_TYPE (arg1), state);
2187 case TRUTH_NOT_EXPR:
2189 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2190 if (target == STACK_TARGET)
2192 int is_long = TYPE_PRECISION (TREE_TYPE (exp)) > 32;
2193 push_int_const (TREE_CODE (exp) == BIT_NOT_EXPR ? -1 : 1, state);
2197 NOTE_PUSH (1 + is_long);
2198 OP1 (OPCODE_ixor + is_long);
2199 NOTE_POP (1 + is_long);
2203 jopcode = OPCODE_ineg;
2204 jopcode += adjust_typed_op (type, 3);
2205 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2206 if (target == STACK_TARGET)
2207 emit_unop (jopcode, type, state);
2209 case INSTANCEOF_EXPR:
2211 int index = find_class_constant (&state->cpool, TREE_OPERAND (exp, 1));
2212 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2214 OP1 (OPCODE_instanceof);
2219 /* The first time through, the argument of the SAVE_EXPR will be
2220 something complex. Evaluate it, and replace the argument with
2221 a VAR_DECL that holds the result. */
2222 arg = TREE_OPERAND (exp, 0);
2223 if (TREE_CODE (arg) != VAR_DECL || DECL_NAME (arg))
2225 tree type = TREE_TYPE (exp);
2226 tree decl = build_decl (VAR_DECL, NULL_TREE, type);
2227 generate_bytecode_insns (arg, STACK_TARGET, state);
2228 localvar_alloc (decl, state);
2229 TREE_OPERAND (exp, 0) = decl;
2230 emit_dup (TYPE_IS_WIDE (type) ? 2 : 1, 0, state);
2231 emit_store (decl, state);
2235 tree type = TREE_TYPE (exp);
2236 emit_load (arg, state);
2237 NOTE_PUSH (TYPE_IS_WIDE (type) ? 2 : 1);
2243 case FIX_TRUNC_EXPR:
2245 tree src = TREE_OPERAND (exp, 0);
2246 tree src_type = TREE_TYPE (src);
2247 tree dst_type = TREE_TYPE (exp);
2248 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2249 if (target == IGNORE_TARGET || src_type == dst_type)
2251 if (TREE_CODE (dst_type) == POINTER_TYPE)
2253 if (TREE_CODE (exp) == CONVERT_EXPR)
2255 int index = find_class_constant (&state->cpool,
2256 TREE_TYPE (dst_type));
2258 OP1 (OPCODE_checkcast);
2262 else /* Convert numeric types. */
2264 int wide_src = TYPE_PRECISION (src_type) > 32;
2265 int wide_dst = TYPE_PRECISION (dst_type) > 32;
2266 NOTE_POP (1 + wide_src);
2268 if (TREE_CODE (dst_type) == REAL_TYPE)
2270 if (TREE_CODE (src_type) == REAL_TYPE)
2271 OP1 (wide_dst ? OPCODE_f2d : OPCODE_d2f);
2272 else if (TYPE_PRECISION (src_type) == 64)
2273 OP1 (OPCODE_l2f + wide_dst);
2275 OP1 (OPCODE_i2f + wide_dst);
2277 else /* Convert to integral type. */
2279 if (TREE_CODE (src_type) == REAL_TYPE)
2280 OP1 (OPCODE_f2i + wide_dst + 3 * wide_src);
2285 if (TYPE_PRECISION (dst_type) < 32)
2288 /* Already converted to int, if needed. */
2289 if (TYPE_PRECISION (dst_type) <= 8)
2291 else if (TYPE_UNSIGNED (dst_type))
2297 NOTE_PUSH (1 + wide_dst);
2304 tree try_clause = TREE_OPERAND (exp, 0);
2305 struct jcf_block *start_label = get_jcf_label_here (state);
2306 struct jcf_block *end_label; /* End of try clause. */
2307 struct jcf_block *finished_label = gen_jcf_label (state);
2308 tree clause = TREE_OPERAND (exp, 1);
2309 if (target != IGNORE_TARGET)
2311 generate_bytecode_insns (try_clause, IGNORE_TARGET, state);
2312 end_label = get_jcf_label_here (state);
2313 if (end_label == start_label)
2315 if (CAN_COMPLETE_NORMALLY (try_clause))
2316 emit_goto (finished_label, state);
2317 while (clause != NULL_TREE)
2319 tree catch_clause = TREE_OPERAND (clause, 0);
2320 tree exception_decl = BLOCK_EXPR_DECLS (catch_clause);
2321 struct jcf_handler *handler = alloc_handler (start_label,
2323 if (exception_decl == NULL_TREE)
2324 handler->type = NULL_TREE;
2326 handler->type = TREE_TYPE (TREE_TYPE (exception_decl));
2327 generate_bytecode_insns (catch_clause, IGNORE_TARGET, state);
2328 clause = TREE_CHAIN (clause);
2329 if (CAN_COMPLETE_NORMALLY (catch_clause) && clause != NULL_TREE)
2330 emit_goto (finished_label, state);
2332 define_jcf_label (finished_label, state);
2336 case TRY_FINALLY_EXPR:
2338 struct jcf_block *finished_label = NULL;
2339 struct jcf_block *finally_label, *start_label, *end_label;
2340 struct jcf_handler *handler;
2341 tree try_block = TREE_OPERAND (exp, 0);
2342 tree finally = TREE_OPERAND (exp, 1);
2343 tree return_link = NULL_TREE, exception_decl = NULL_TREE;
2345 tree exception_type;
2347 finally_label = gen_jcf_label (state);
2348 start_label = get_jcf_label_here (state);
2349 /* If the `finally' clause can complete normally, we emit it
2350 as a subroutine and let the other clauses call it via
2351 `jsr'. If it can't complete normally, then we simply emit
2352 `goto's directly to it. */
2353 if (CAN_COMPLETE_NORMALLY (finally))
2355 finally_label->pc = PENDING_CLEANUP_PC;
2356 finally_label->next = state->labeled_blocks;
2357 state->labeled_blocks = finally_label;
2358 state->num_finalizers++;
2361 generate_bytecode_insns (try_block, target, state);
2363 if (CAN_COMPLETE_NORMALLY (finally))
2365 if (state->labeled_blocks != finally_label)
2367 state->labeled_blocks = finally_label->next;
2369 end_label = get_jcf_label_here (state);
2371 if (end_label == start_label)
2373 state->num_finalizers--;
2374 define_jcf_label (finally_label, state);
2375 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2379 if (CAN_COMPLETE_NORMALLY (finally))
2381 return_link = build_decl (VAR_DECL, NULL_TREE,
2382 return_address_type_node);
2383 finished_label = gen_jcf_label (state);
2386 if (CAN_COMPLETE_NORMALLY (try_block))
2388 if (CAN_COMPLETE_NORMALLY (finally))
2390 emit_jsr (finally_label, state);
2391 emit_goto (finished_label, state);
2394 emit_goto (finally_label, state);
2397 /* Handle exceptions. */
2399 exception_type = build_pointer_type (throwable_type_node);
2400 if (CAN_COMPLETE_NORMALLY (finally))
2402 /* We're going to generate a subroutine, so we'll need to
2403 save and restore the exception around the `jsr'. */
2404 exception_decl = build_decl (VAR_DECL, NULL_TREE, exception_type);
2405 localvar_alloc (return_link, state);
2407 handler = alloc_handler (start_label, end_label, state);
2408 handler->type = NULL_TREE;
2409 if (CAN_COMPLETE_NORMALLY (finally))
2411 localvar_alloc (exception_decl, state);
2413 emit_store (exception_decl, state);
2414 emit_jsr (finally_label, state);
2415 emit_load (exception_decl, state);
2417 OP1 (OPCODE_athrow);
2422 /* We're not generating a subroutine. In this case we can
2423 simply have the exception handler pop the exception and
2424 then fall through to the `finally' block. */
2426 emit_pop (1, state);
2430 /* The finally block. If we're generating a subroutine, first
2431 save return PC into return_link. Otherwise, just generate
2432 the code for the `finally' block. */
2433 define_jcf_label (finally_label, state);
2434 if (CAN_COMPLETE_NORMALLY (finally))
2437 emit_store (return_link, state);
2440 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2441 if (CAN_COMPLETE_NORMALLY (finally))
2443 maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
2444 maybe_free_localvar (exception_decl, state, 1);
2445 maybe_free_localvar (return_link, state, 1);
2446 define_jcf_label (finished_label, state);
2451 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2453 OP1 (OPCODE_athrow);
2455 case NEW_ARRAY_INIT:
2457 tree values = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
2458 tree array_type = TREE_TYPE (TREE_TYPE (exp));
2459 tree element_type = TYPE_ARRAY_ELEMENT (array_type);
2460 HOST_WIDE_INT length = java_array_type_length (array_type);
2461 if (target == IGNORE_TARGET)
2463 for ( ; values != NULL_TREE; values = TREE_CHAIN (values))
2464 generate_bytecode_insns (TREE_VALUE (values), target, state);
2467 push_int_const (length, state);
2470 if (JPRIMITIVE_TYPE_P (element_type))
2472 int atype = encode_newarray_type (element_type);
2473 OP1 (OPCODE_newarray);
2478 int index = find_class_constant (&state->cpool,
2479 TREE_TYPE (element_type));
2480 OP1 (OPCODE_anewarray);
2484 jopcode = OPCODE_iastore + adjust_typed_op (element_type, 7);
2485 for ( ; values != NULL_TREE; values = TREE_CHAIN (values), offset++)
2487 int save_SP = state->code_SP;
2488 emit_dup (1, 0, state);
2489 push_int_const (offset, state);
2491 generate_bytecode_insns (TREE_VALUE (values), STACK_TARGET, state);
2494 state->code_SP = save_SP;
2498 case JAVA_EXC_OBJ_EXPR:
2499 NOTE_PUSH (1); /* Pushed by exception system. */
2504 /* This copes with cases where fold() has created MIN or MAX
2505 from a conditional expression. */
2506 enum tree_code code = TREE_CODE (exp) == MIN_EXPR ? LT_EXPR : GT_EXPR;
2507 tree op0 = TREE_OPERAND (exp, 0);
2508 tree op1 = TREE_OPERAND (exp, 1);
2510 if (TREE_SIDE_EFFECTS (op0) || TREE_SIDE_EFFECTS (op1))
2512 x = build3 (COND_EXPR, TREE_TYPE (exp),
2513 build2 (code, boolean_type_node, op0, op1),
2515 generate_bytecode_insns (x, target, state);
2518 case NEW_CLASS_EXPR:
2520 tree class = TREE_TYPE (TREE_TYPE (exp));
2521 int need_result = target != IGNORE_TARGET;
2522 int index = find_class_constant (&state->cpool, class);
2528 NOTE_PUSH (1 + need_result);
2530 /* ... fall though ... */
2533 tree f = TREE_OPERAND (exp, 0);
2534 tree x = TREE_OPERAND (exp, 1);
2535 int save_SP = state->code_SP;
2537 if (TREE_CODE (f) == ADDR_EXPR)
2538 f = TREE_OPERAND (f, 0);
2539 if (f == soft_newarray_node)
2541 int type_code = TREE_INT_CST_LOW (TREE_VALUE (x));
2542 generate_bytecode_insns (TREE_VALUE (TREE_CHAIN (x)),
2543 STACK_TARGET, state);
2545 OP1 (OPCODE_newarray);
2549 else if (f == soft_multianewarray_node)
2553 int index = find_class_constant (&state->cpool,
2554 TREE_TYPE (TREE_TYPE (exp)));
2555 x = TREE_CHAIN (x); /* Skip class argument. */
2556 ndims = TREE_INT_CST_LOW (TREE_VALUE (x));
2557 for (idim = ndims; --idim >= 0; )
2560 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2563 OP1 (OPCODE_multianewarray);
2568 else if (f == soft_anewarray_node)
2570 tree cl = TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (exp)));
2571 int index = find_class_constant (&state->cpool, TREE_TYPE (cl));
2572 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2574 OP1 (OPCODE_anewarray);
2578 else if (f == soft_monitorenter_node
2579 || f == soft_monitorexit_node
2582 if (f == soft_monitorenter_node)
2583 op = OPCODE_monitorenter;
2584 else if (f == soft_monitorexit_node)
2585 op = OPCODE_monitorexit;
2588 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2594 for ( ; x != NULL_TREE; x = TREE_CHAIN (x))
2596 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2598 nargs = state->code_SP - save_SP;
2599 state->code_SP = save_SP;
2600 if (f == soft_fmod_node)
2607 if (TREE_CODE (exp) == NEW_CLASS_EXPR)
2608 NOTE_POP (1); /* Pop implicit this. */
2609 if (TREE_CODE (f) == FUNCTION_DECL && DECL_CONTEXT (f) != NULL_TREE)
2611 tree context = DECL_CONTEXT (f);
2612 int index, interface = 0;
2614 if (METHOD_STATIC (f))
2615 OP1 (OPCODE_invokestatic);
2616 else if (DECL_CONSTRUCTOR_P (f) || CALL_USING_SUPER (exp)
2617 || METHOD_PRIVATE (f))
2618 OP1 (OPCODE_invokespecial);
2621 if (CLASS_INTERFACE (TYPE_NAME (context)))
2623 tree arg1 = TREE_VALUE (TREE_OPERAND (exp, 1));
2624 context = TREE_TYPE (TREE_TYPE (arg1));
2625 if (CLASS_INTERFACE (TYPE_NAME (context)))
2629 OP1 (OPCODE_invokeinterface);
2631 OP1 (OPCODE_invokevirtual);
2633 index = find_methodref_with_class_index (&state->cpool, f, context);
2643 f = TREE_TYPE (TREE_TYPE (f));
2644 if (TREE_CODE (f) != VOID_TYPE)
2646 int size = TYPE_IS_WIDE (f) ? 2 : 1;
2647 if (target == IGNORE_TARGET)
2648 emit_pop (size, state);
2658 error("internal error in generate_bytecode_insn - tree code not implemented: %s",
2659 tree_code_name [(int) TREE_CODE (exp)]);
2664 perform_relocations (struct jcf_partial *state)
2666 struct jcf_block *block;
2667 struct jcf_relocation *reloc;
2671 /* Before we start, the pc field of each block is an upper bound on
2672 the block's start pc (it may be less, if previous blocks need less
2673 than their maximum).
2675 The minimum size of each block is in the block's chunk->size. */
2677 /* First, figure out the actual locations of each block. */
2680 for (block = state->blocks; block != NULL; block = block->next)
2682 int block_size = block->v.chunk->size;
2686 /* Optimize GOTO L; L: by getting rid of the redundant goto.
2687 Assumes relocations are in reverse order. */
2688 reloc = block->u.relocations;
2689 while (reloc != NULL
2690 && reloc->kind == OPCODE_goto_w
2691 && reloc->label->pc == block->next->pc
2692 && reloc->offset + 2 == block_size)
2694 reloc = reloc->next;
2695 block->u.relocations = reloc;
2696 block->v.chunk->size -= 3;
2701 /* Optimize GOTO L; ... L: GOTO X by changing the first goto to
2702 jump directly to X. We're careful here to avoid an infinite
2703 loop if the `goto's themselves form one. We do this
2704 optimization because we can generate a goto-to-goto for some
2705 try/finally blocks. */
2706 while (reloc != NULL
2707 && reloc->kind == OPCODE_goto_w
2708 && reloc->label != block
2709 && reloc->label->v.chunk->data != NULL
2710 && reloc->label->v.chunk->data[0] == OPCODE_goto)
2712 /* Find the reloc for the first instruction of the
2713 destination block. */
2714 struct jcf_relocation *first_reloc;
2715 for (first_reloc = reloc->label->u.relocations;
2717 first_reloc = first_reloc->next)
2719 if (first_reloc->offset == 1
2720 && first_reloc->kind == OPCODE_goto_w)
2722 reloc->label = first_reloc->label;
2727 /* If we didn't do anything, exit the loop. */
2728 if (first_reloc == NULL)
2732 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
2734 if (reloc->kind == SWITCH_ALIGN_RELOC)
2736 /* We assume this is the first relocation in this block,
2737 so we know its final pc. */
2738 int where = pc + reloc->offset;
2739 int pad = ((where + 3) & ~3) - where;
2742 else if (reloc->kind < -1 || reloc->kind > BLOCK_START_RELOC)
2744 int delta = reloc->label->pc - (pc + reloc->offset - 1);
2745 int expand = reloc->kind > 0 ? 2 : 5;
2749 if (delta >= -32768 && delta <= 32767)
2755 block_size += expand;
2761 for (block = state->blocks; block != NULL; block = block->next)
2763 struct chunk *chunk = block->v.chunk;
2764 int old_size = chunk->size;
2765 int next_pc = block->next == NULL ? pc : block->next->pc;
2766 int new_size = next_pc - block->pc;
2767 unsigned char *new_ptr;
2768 unsigned char *old_buffer = chunk->data;
2769 unsigned char *old_ptr = old_buffer + old_size;
2770 if (new_size != old_size)
2772 chunk->data = obstack_alloc (state->chunk_obstack, new_size);
2773 chunk->size = new_size;
2775 new_ptr = chunk->data + new_size;
2777 /* We do the relocations from back to front, because
2778 the relocations are in reverse order. */
2779 for (reloc = block->u.relocations; ; reloc = reloc->next)
2781 /* new_ptr and old_ptr point into the old and new buffers,
2782 respectively. (If no relocations cause the buffer to
2783 grow, the buffer will be the same buffer, and new_ptr==old_ptr.)
2784 The bytes at higher address have been copied and relocations
2785 handled; those at lower addresses remain to process. */
2787 /* Lower old index of piece to be copied with no relocation.
2788 I.e. high index of the first piece that does need relocation. */
2789 int start = reloc == NULL ? 0
2790 : reloc->kind == SWITCH_ALIGN_RELOC ? reloc->offset
2791 : (reloc->kind == 0 || reloc->kind == BLOCK_START_RELOC)
2793 : reloc->offset + 2;
2796 int n = (old_ptr - old_buffer) - start;
2800 memcpy (new_ptr, old_ptr, n);
2801 if (old_ptr == old_buffer)
2804 new_offset = new_ptr - chunk->data;
2805 new_offset -= (reloc->kind == -1 ? 2 : 4);
2806 if (reloc->kind == 0)
2809 value = GET_u4 (old_ptr);
2811 else if (reloc->kind == BLOCK_START_RELOC)
2817 else if (reloc->kind == SWITCH_ALIGN_RELOC)
2819 int where = block->pc + reloc->offset;
2820 int pad = ((where + 3) & ~3) - where;
2828 value = GET_u2 (old_ptr);
2830 value += reloc->label->pc - (block->pc + new_offset);
2831 *--new_ptr = (unsigned char) value; value >>= 8;
2832 *--new_ptr = (unsigned char) value; value >>= 8;
2833 if (reloc->kind != -1)
2835 *--new_ptr = (unsigned char) value; value >>= 8;
2836 *--new_ptr = (unsigned char) value;
2838 if (reloc->kind > BLOCK_START_RELOC)
2840 /* Convert: OP TARGET to: OP_w TARGET; (OP is goto or jsr). */
2842 *--new_ptr = reloc->kind;
2844 else if (reloc->kind < -1)
2846 /* Convert: ifCOND TARGET to: ifNCOND T; goto_w TARGET; T: */
2848 *--new_ptr = OPCODE_goto_w;
2851 *--new_ptr = - reloc->kind;
2854 if (new_ptr != chunk->data)
2857 state->code_length = pc;
2861 init_jcf_state (struct jcf_partial *state, struct obstack *work)
2863 state->chunk_obstack = work;
2864 state->first = state->chunk = NULL;
2865 CPOOL_INIT (&state->cpool);
2866 BUFFER_INIT (&state->localvars);
2867 BUFFER_INIT (&state->bytecode);
2871 init_jcf_method (struct jcf_partial *state, tree method)
2873 state->current_method = method;
2874 state->blocks = state->last_block = NULL;
2875 state->linenumber_count = 0;
2876 state->first_lvar = state->last_lvar = NULL;
2877 state->lvar_count = 0;
2878 state->labeled_blocks = NULL;
2879 state->code_length = 0;
2880 BUFFER_RESET (&state->bytecode);
2881 BUFFER_RESET (&state->localvars);
2883 state->code_SP_max = 0;
2884 state->handlers = NULL;
2885 state->last_handler = NULL;
2886 state->num_handlers = 0;
2887 state->num_finalizers = 0;
2888 state->return_value_decl = NULL_TREE;
2892 release_jcf_state (struct jcf_partial *state)
2894 CPOOL_FINISH (&state->cpool);
2895 obstack_free (state->chunk_obstack, state->first);
2898 /* Get the access flags (modifiers) of a class (TYPE_DECL) to be used in the
2899 access_flags field of the class file header. */
2902 get_classfile_modifiers (tree class)
2904 /* These are the flags which are valid class file modifiers.
2906 int valid_toplevel_class_flags = (ACC_PUBLIC | ACC_FINAL | ACC_SUPER |
2907 ACC_INTERFACE | ACC_ABSTRACT);
2908 int flags = get_access_flags (class);
2910 /* ACC_SUPER should always be set, except for interfaces. */
2911 if (! (flags & ACC_INTERFACE))
2914 /* A protected member class becomes public at the top level. */
2915 if (flags & ACC_PROTECTED)
2916 flags |= ACC_PUBLIC;
2918 /* Filter out flags that are not valid for a class or interface in the
2919 top-level access_flags field. */
2920 flags &= valid_toplevel_class_flags;
2925 /* Get the access flags (modifiers) for a method to be used in the class
2929 get_method_access_flags (tree decl)
2931 int flags = get_access_flags (decl);
2933 /* Promote "private" inner-class constructors to package-private. */
2934 if (DECL_CONSTRUCTOR_P (decl)
2935 && INNER_CLASS_DECL_P (TYPE_NAME (DECL_CONTEXT (decl))))
2936 flags &= ~(ACC_PRIVATE);
2941 /* Generate and return a list of chunks containing the class CLAS
2942 in the .class file representation. The list can be written to a
2943 .class file using write_chunks. Allocate chunks from obstack WORK. */
2945 static GTY(()) tree SourceFile_node;
2946 static struct chunk *
2947 generate_classfile (tree clas, struct jcf_partial *state)
2949 struct chunk *cpool_chunk;
2950 const char *source_file, *s;
2953 unsigned char *fields_count_ptr;
2954 int fields_count = 0;
2955 unsigned char *methods_count_ptr;
2956 int methods_count = 0;
2959 = clas == object_type_node ? 0 : BINFO_N_BASE_BINFOS (TYPE_BINFO (clas));
2961 ptr = append_chunk (NULL, 8, state);
2962 PUT4 (0xCafeBabe); /* Magic number */
2963 PUT2 (3); /* Minor version */
2964 PUT2 (45); /* Major version */
2966 append_chunk (NULL, 0, state);
2967 cpool_chunk = state->chunk;
2969 /* Next allocate the chunk containing access_flags through fields_count. */
2970 if (clas == object_type_node)
2973 i = 8 + 2 * total_supers;
2974 ptr = append_chunk (NULL, i, state);
2975 i = get_classfile_modifiers (TYPE_NAME (clas));
2976 PUT2 (i); /* access_flags */
2977 i = find_class_constant (&state->cpool, clas); PUT2 (i); /* this_class */
2978 if (clas == object_type_node)
2980 PUT2(0); /* super_class */
2981 PUT2(0); /* interfaces_count */
2985 tree binfo = TYPE_BINFO (clas);
2986 tree base_binfo = BINFO_BASE_BINFO (binfo, 0);
2987 int j = find_class_constant (&state->cpool, BINFO_TYPE (base_binfo));
2989 PUT2 (j); /* super_class */
2990 PUT2 (total_supers - 1); /* interfaces_count */
2991 for (i = 1; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2993 j = find_class_constant (&state->cpool, BINFO_TYPE (base_binfo));
2997 fields_count_ptr = ptr;
2999 for (part = TYPE_FIELDS (clas); part; part = TREE_CHAIN (part))
3001 int have_value, attr_count = 0;
3002 if (DECL_NAME (part) == NULL_TREE || DECL_ARTIFICIAL (part))
3004 ptr = append_chunk (NULL, 8, state);
3005 i = get_access_flags (part); PUT2 (i);
3006 i = find_utf8_constant (&state->cpool, DECL_NAME (part)); PUT2 (i);
3007 i = find_utf8_constant (&state->cpool,
3008 build_java_signature (TREE_TYPE (part)));
3010 have_value = DECL_INITIAL (part) != NULL_TREE
3011 && FIELD_STATIC (part) && CONSTANT_VALUE_P (DECL_INITIAL (part))
3012 && FIELD_FINAL (part)
3013 && (JPRIMITIVE_TYPE_P (TREE_TYPE (part))
3014 || TREE_TYPE (part) == string_ptr_type_node);
3018 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part)
3019 || FIELD_SYNTHETIC (part))
3021 if (FIELD_DEPRECATED (part))
3024 PUT2 (attr_count); /* attributes_count */
3027 tree init = DECL_INITIAL (part);
3028 static tree ConstantValue_node = NULL_TREE;
3029 if (TREE_TYPE (part) != TREE_TYPE (init))
3030 fatal_error ("field initializer type mismatch");
3031 ptr = append_chunk (NULL, 8, state);
3032 if (ConstantValue_node == NULL_TREE)
3033 ConstantValue_node = get_identifier ("ConstantValue");
3034 i = find_utf8_constant (&state->cpool, ConstantValue_node);
3035 PUT2 (i); /* attribute_name_index */
3036 PUT4 (2); /* attribute_length */
3037 i = find_constant_index (init, state); PUT2 (i);
3039 /* Emit the "Synthetic" attribute for val$<x> and this$<n>
3040 fields and other fields which need it. */
3041 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part)
3042 || FIELD_SYNTHETIC (part))
3043 ptr = append_synthetic_attribute (state);
3044 if (FIELD_DEPRECATED (part))
3045 append_deprecated_attribute (state);
3048 ptr = fields_count_ptr; UNSAFE_PUT2 (fields_count);
3050 ptr = methods_count_ptr = append_chunk (NULL, 2, state);
3053 for (part = TYPE_METHODS (clas); part; part = TREE_CHAIN (part))
3055 struct jcf_block *block;
3056 tree function_body = DECL_FUNCTION_BODY (part);
3057 tree body = function_body == NULL_TREE ? NULL_TREE
3058 : BLOCK_EXPR_BODY (function_body);
3059 tree name = DECL_CONSTRUCTOR_P (part) ? init_identifier_node
3061 tree type = TREE_TYPE (part);
3062 tree save_function = current_function_decl;
3063 int synthetic_p = 0;
3065 /* Invisible Miranda methods shouldn't end up in the .class
3067 if (METHOD_INVISIBLE (part))
3070 current_function_decl = part;
3071 ptr = append_chunk (NULL, 8, state);
3072 i = get_method_access_flags (part); PUT2 (i);
3073 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
3074 i = find_utf8_constant (&state->cpool, build_java_signature (type));
3076 i = (body != NULL_TREE) + (DECL_FUNCTION_THROWS (part) != NULL_TREE);
3078 /* Make room for the Synthetic attribute (of zero length.) */
3079 if (DECL_FINIT_P (part)
3080 || DECL_INSTINIT_P (part)
3081 || OUTER_FIELD_ACCESS_IDENTIFIER_P (DECL_NAME (part))
3082 || TYPE_DOT_CLASS (clas) == part)
3087 /* Make room for Deprecated attribute. */
3088 if (METHOD_DEPRECATED (part))
3091 PUT2 (i); /* attributes_count */
3094 ptr = append_synthetic_attribute (state);
3096 if (body != NULL_TREE)
3098 int code_attributes_count = 0;
3099 static tree Code_node = NULL_TREE;
3101 unsigned char *attr_len_ptr;
3102 struct jcf_handler *handler;
3103 if (Code_node == NULL_TREE)
3104 Code_node = get_identifier ("Code");
3105 ptr = append_chunk (NULL, 14, state);
3106 i = find_utf8_constant (&state->cpool, Code_node); PUT2 (i);
3108 init_jcf_method (state, part);
3109 get_jcf_label_here (state); /* Force a first block. */
3110 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
3111 localvar_alloc (t, state);
3112 state->num_jsrs = 0;
3113 generate_bytecode_insns (body, IGNORE_TARGET, state);
3114 if (CAN_COMPLETE_NORMALLY (body))
3116 if (TREE_CODE (TREE_TYPE (type)) != VOID_TYPE)
3119 OP1 (OPCODE_return);
3121 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
3122 maybe_free_localvar (t, state, 1);
3123 if (state->return_value_decl != NULL_TREE)
3124 maybe_free_localvar (state->return_value_decl, state, 1);
3125 finish_jcf_block (state);
3126 perform_relocations (state);
3129 i = 8 + state->code_length + 4 + 8 * state->num_handlers;
3130 if (state->linenumber_count > 0)
3132 code_attributes_count++;
3133 i += 8 + 4 * state->linenumber_count;
3135 if (state->lvar_count > 0)
3137 code_attributes_count++;
3138 i += 8 + 10 * state->lvar_count;
3140 UNSAFE_PUT4 (i); /* attribute_length */
3141 UNSAFE_PUT2 (state->code_SP_max); /* max_stack */
3142 UNSAFE_PUT2 (localvar_max); /* max_locals */
3143 UNSAFE_PUT4 (state->code_length);
3145 /* Emit the exception table. */
3146 ptr = append_chunk (NULL, 2 + 8 * state->num_handlers, state);
3147 PUT2 (state->num_handlers); /* exception_table_length */
3148 handler = state->handlers;
3149 for (; handler != NULL; handler = handler->next)
3152 PUT2 (handler->start_label->pc);
3153 PUT2 (handler->end_label->pc);
3154 PUT2 (handler->handler_label->pc);
3155 if (handler->type == NULL_TREE)
3158 type_index = find_class_constant (&state->cpool,
3163 ptr = append_chunk (NULL, 2, state);
3164 PUT2 (code_attributes_count);
3166 /* Write the LineNumberTable attribute. */
3167 if (state->linenumber_count > 0)
3169 static tree LineNumberTable_node = NULL_TREE;
3170 ptr = append_chunk (NULL,
3171 8 + 4 * state->linenumber_count, state);
3172 if (LineNumberTable_node == NULL_TREE)
3173 LineNumberTable_node = get_identifier ("LineNumberTable");
3174 i = find_utf8_constant (&state->cpool, LineNumberTable_node);
3175 PUT2 (i); /* attribute_name_index */
3176 i = 2+4*state->linenumber_count; PUT4(i); /* attribute_length */
3177 i = state->linenumber_count; PUT2 (i);
3178 for (block = state->blocks; block != NULL; block = block->next)
3180 int line = block->linenumber;
3189 /* Write the LocalVariableTable attribute. */
3190 if (state->lvar_count > 0)
3192 static tree LocalVariableTable_node = NULL_TREE;
3193 struct localvar_info *lvar = state->first_lvar;
3194 ptr = append_chunk (NULL, 8 + 10 * state->lvar_count, state);
3195 if (LocalVariableTable_node == NULL_TREE)
3196 LocalVariableTable_node = get_identifier("LocalVariableTable");
3197 i = find_utf8_constant (&state->cpool, LocalVariableTable_node);
3198 PUT2 (i); /* attribute_name_index */
3199 i = 2 + 10 * state->lvar_count; PUT4 (i); /* attribute_length */
3200 i = state->lvar_count; PUT2 (i);
3201 for ( ; lvar != NULL; lvar = lvar->next)
3203 tree name = DECL_NAME (lvar->decl);
3204 tree sig = build_java_signature (TREE_TYPE (lvar->decl));
3205 i = lvar->start_label->pc; PUT2 (i);
3206 i = lvar->end_label->pc - i; PUT2 (i);
3207 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
3208 i = find_utf8_constant (&state->cpool, sig); PUT2 (i);
3209 i = DECL_LOCAL_INDEX (lvar->decl); PUT2 (i);
3213 if (DECL_FUNCTION_THROWS (part) != NULL_TREE)
3215 tree t = DECL_FUNCTION_THROWS (part);
3216 int throws_count = list_length (t);
3217 static tree Exceptions_node = NULL_TREE;
3218 if (Exceptions_node == NULL_TREE)
3219 Exceptions_node = get_identifier ("Exceptions");
3220 ptr = append_chunk (NULL, 8 + 2 * throws_count, state);
3221 i = find_utf8_constant (&state->cpool, Exceptions_node);
3222 PUT2 (i); /* attribute_name_index */
3223 i = 2 + 2 * throws_count; PUT4(i); /* attribute_length */
3224 i = throws_count; PUT2 (i);
3225 for (; t != NULL_TREE; t = TREE_CHAIN (t))
3227 i = find_class_constant (&state->cpool, TREE_VALUE (t));
3232 if (METHOD_DEPRECATED (part))
3233 append_deprecated_attribute (state);
3236 current_function_decl = save_function;
3238 ptr = methods_count_ptr; UNSAFE_PUT2 (methods_count);
3240 source_file = DECL_SOURCE_FILE (TYPE_NAME (clas));
3241 for (s = source_file; ; s++)
3246 if (ch == '/' || ch == '\\')
3249 ptr = append_chunk (NULL, 10, state);
3251 i = 1; /* Source file always exists as an attribute */
3252 if (INNER_CLASS_TYPE_P (clas) || DECL_INNER_CLASS_LIST (TYPE_NAME (clas)))
3254 if (clas == object_type_node)
3256 if (CLASS_DEPRECATED (TYPE_NAME (clas)))
3259 PUT2 (i); /* attributes_count */
3261 /* generate the SourceFile attribute. */
3262 if (SourceFile_node == NULL_TREE)
3264 SourceFile_node = get_identifier ("SourceFile");
3267 i = find_utf8_constant (&state->cpool, SourceFile_node);
3268 PUT2 (i); /* attribute_name_index */
3270 i = find_utf8_constant (&state->cpool, get_identifier (source_file));
3272 append_gcj_attribute (state, clas);
3273 append_innerclasses_attribute (state, clas);
3274 if (CLASS_DEPRECATED (TYPE_NAME (clas)))
3275 append_deprecated_attribute (state);
3277 /* New finally generate the contents of the constant pool chunk. */
3278 i = count_constant_pool_bytes (&state->cpool);
3279 ptr = obstack_alloc (state->chunk_obstack, i);
3280 cpool_chunk->data = ptr;
3281 cpool_chunk->size = i;
3282 write_constant_pool (&state->cpool, ptr, i);
3283 return state->first;
3286 static GTY(()) tree Synthetic_node;
3287 static unsigned char *
3288 append_synthetic_attribute (struct jcf_partial *state)
3290 unsigned char *ptr = append_chunk (NULL, 6, state);
3293 if (Synthetic_node == NULL_TREE)
3295 Synthetic_node = get_identifier ("Synthetic");
3297 i = find_utf8_constant (&state->cpool, Synthetic_node);
3298 PUT2 (i); /* Attribute string index */
3299 PUT4 (0); /* Attribute length */
3305 append_deprecated_attribute (struct jcf_partial *state)
3307 unsigned char *ptr = append_chunk (NULL, 6, state);
3310 i = find_utf8_constant (&state->cpool, get_identifier ("Deprecated"));
3311 PUT2 (i); /* Attribute string index */
3312 PUT4 (0); /* Attribute length */
3316 append_gcj_attribute (struct jcf_partial *state, tree class)
3321 if (class != object_type_node)
3324 ptr = append_chunk (NULL, 6, state); /* 2+4 */
3325 i = find_utf8_constant (&state->cpool,
3326 get_identifier ("gnu.gcj.gcj-compiled"));
3327 PUT2 (i); /* Attribute string index */
3328 PUT4 (0); /* Attribute length */
3331 static tree InnerClasses_node;
3333 append_innerclasses_attribute (struct jcf_partial *state, tree class)
3335 tree orig_decl = TYPE_NAME (class);
3338 unsigned char *ptr, *length_marker, *number_marker;
3340 if (!INNER_CLASS_TYPE_P (class) && !DECL_INNER_CLASS_LIST (orig_decl))
3343 ptr = append_chunk (NULL, 8, state); /* 2+4+2 */
3345 if (InnerClasses_node == NULL_TREE)
3347 InnerClasses_node = get_identifier ("InnerClasses");
3349 i = find_utf8_constant (&state->cpool, InnerClasses_node);
3351 length_marker = ptr; PUT4 (0); /* length, to be later patched */
3352 number_marker = ptr; PUT2 (0); /* number of classes, tblp */
3354 /* Generate the entries: all inner classes visible from the one we
3355 process: itself, up and down. */
3356 while (class && INNER_CLASS_TYPE_P (class))
3360 decl = TYPE_NAME (class);
3361 n = IDENTIFIER_POINTER (DECL_NAME (decl)) +
3362 IDENTIFIER_LENGTH (DECL_NAME (decl));
3364 while (n[-1] != '$')
3366 append_innerclasses_attribute_entry (state, decl, get_identifier (n));
3369 class = TREE_TYPE (DECL_CONTEXT (TYPE_NAME (class)));
3373 for (current = DECL_INNER_CLASS_LIST (decl);
3374 current; current = TREE_CHAIN (current))
3376 append_innerclasses_attribute_entry (state, TREE_PURPOSE (current),
3377 TREE_VALUE (current));
3381 ptr = length_marker; PUT4 (8*length+2);
3382 ptr = number_marker; PUT2 (length);
3386 append_innerclasses_attribute_entry (struct jcf_partial *state,
3387 tree decl, tree name)
3390 int ocii = 0, ini = 0;
3391 unsigned char *ptr = append_chunk (NULL, 8, state);
3393 icii = find_class_constant (&state->cpool, TREE_TYPE (decl));
3395 /* Sun's implementation seems to generate ocii to 0 for inner
3396 classes (which aren't considered members of the class they're
3397 in.) The specs are saying that if the class is anonymous,
3398 inner_name_index must be zero. */
3399 if (!ANONYMOUS_CLASS_P (TREE_TYPE (decl)))
3401 ocii = find_class_constant (&state->cpool,
3402 TREE_TYPE (DECL_CONTEXT (decl)));
3403 ini = find_utf8_constant (&state->cpool, name);
3405 icaf = get_access_flags (decl);
3407 PUT2 (icii); PUT2 (ocii); PUT2 (ini); PUT2 (icaf);
3411 make_class_file_name (tree clas)
3413 const char *dname, *cname, *slash;
3418 cname = IDENTIFIER_POINTER (identifier_subst (DECL_NAME (TYPE_NAME (clas)),
3419 "", '.', DIR_SEPARATOR,
3421 if (jcf_write_base_directory == NULL)
3423 /* Make sure we put the class file into the .java file's
3424 directory, and not into some subdirectory thereof. */
3426 dname = DECL_SOURCE_FILE (TYPE_NAME (clas));
3427 slash = strrchr (dname, DIR_SEPARATOR);
3428 #ifdef DIR_SEPARATOR_2
3430 slash = strrchr (dname, DIR_SEPARATOR_2);
3436 sep = DIR_SEPARATOR;
3441 t = strrchr (cname, DIR_SEPARATOR);
3449 dname = jcf_write_base_directory;
3451 s = strrchr (dname, DIR_SEPARATOR);
3452 #ifdef DIR_SEPARATOR_2
3454 s = strrchr (dname, DIR_SEPARATOR_2);
3459 sep = DIR_SEPARATOR;
3461 slash = dname + strlen (dname);
3464 r = xmalloc (slash - dname + strlen (cname) + 2);
3465 strncpy (r, dname, slash - dname);
3466 r[slash - dname] = sep;
3467 strcpy (&r[slash - dname + 1], cname);
3469 /* We try to make new directories when we need them. We only do
3470 this for directories which "might not" exist. For instance, we
3471 assume the `-d' directory exists, but we don't assume that any
3472 subdirectory below it exists. It might be worthwhile to keep
3473 track of which directories we've created to avoid gratuitous
3475 dname = r + (slash - dname) + 1;
3478 char *s = strchr (dname, sep);
3482 /* Try to make directory if it doesn't already exist. */
3483 if (stat (r, &sb) == -1
3484 && mkdir (r, 0755) == -1
3485 /* The directory might have been made by another process. */
3487 fatal_error ("can't create directory %s: %m", r);
3490 /* Skip consecutive separators. */
3491 for (dname = s + 1; *dname && *dname == sep; ++dname)
3498 /* Write out the contents of a class (RECORD_TYPE) CLAS, as a .class file.
3499 The output .class file name is make_class_file_name(CLAS). */
3502 write_classfile (tree clas)
3504 struct obstack *work = &temporary_obstack;
3505 struct jcf_partial state[1];
3506 char *class_file_name = make_class_file_name (clas);
3507 struct chunk *chunks;
3509 if (class_file_name != NULL)
3512 char *temporary_file_name;
3514 /* The .class file is initially written to a ".tmp" file so that
3515 if multiple instances of the compiler are running at once
3516 they do not see partially formed class files. */
3517 temporary_file_name = concat (class_file_name, ".tmp", NULL);
3518 stream = fopen (temporary_file_name, "wb");
3520 fatal_error ("can't open %s for writing: %m", temporary_file_name);
3522 jcf_dependency_add_target (class_file_name);
3523 init_jcf_state (state, work);
3524 chunks = generate_classfile (clas, state);
3525 write_chunks (stream, chunks);
3526 if (fclose (stream))
3527 fatal_error ("error closing %s: %m", temporary_file_name);
3529 /* If a file named by the string pointed to by `new' exists
3530 prior to the call to the `rename' function, the behavior
3531 is implementation-defined. ISO 9899-1990 7.9.4.2.
3533 For example, on Win32 with MSVCRT, it is an error. */
3535 unlink (class_file_name);
3537 if (rename (temporary_file_name, class_file_name) == -1)
3539 remove (temporary_file_name);
3540 fatal_error ("can't create %s: %m", class_file_name);
3542 free (temporary_file_name);
3543 free (class_file_name);
3545 release_jcf_state (state);
3549 string concatenation
3550 synchronized statement
3553 #include "gt-java-jcf-write.h"