1 /* Write out a Java(TM) class file.
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA.
21 Java and all Java-based marks are trademarks or registered trademarks
22 of Sun Microsystems, Inc. in the United States and other countries.
23 The Free Software Foundation is independent of Sun Microsystems, Inc. */
27 #include "coretypes.h"
32 #include "java-tree.h"
36 #include "java-opcodes.h"
37 #include "parse.h" /* for BLOCK_EXPR_BODY */
43 extern struct obstack temporary_obstack;
45 /* Base directory in which `.class' files should be written.
46 NULL means to put the file into the same directory as the
47 corresponding .java file. */
48 const char *jcf_write_base_directory = NULL;
50 /* Make sure bytecode.data is big enough for at least N more bytes. */
53 do { CHECK_OP(state); \
54 if (state->bytecode.ptr + (N) > state->bytecode.limit) \
55 buffer_grow (&state->bytecode, N); } while (0)
57 /* Add a 1-byte instruction/operand I to bytecode.data,
58 assuming space has already been RESERVE'd. */
60 #define OP1(I) (*state->bytecode.ptr++ = (I), CHECK_OP(state))
62 /* Like OP1, but I is a 2-byte big endian integer. */
65 do { int _i = (I); OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
67 /* Like OP1, but I is a 4-byte big endian integer. */
70 do { int _i = (I); OP1 (_i >> 24); OP1 (_i >> 16); \
71 OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
73 /* Macro to call each time we push I words on the JVM stack. */
75 #define NOTE_PUSH(I) \
76 do { state->code_SP += (I); \
77 if (state->code_SP > state->code_SP_max) \
78 state->code_SP_max = state->code_SP; } while (0)
80 /* Macro to call each time we pop I words from the JVM stack. */
83 do { state->code_SP -= (I); if (state->code_SP < 0) abort(); } while (0)
85 /* A chunk or segment of a .class file. */
89 /* The next segment of this .class file. */
92 /* The actual data in this segment to be written to the .class file. */
95 /* The size of the segment to be written to the .class file. */
99 #define PENDING_CLEANUP_PC (-3)
100 #define PENDING_EXIT_PC (-2)
101 #define UNDEFINED_PC (-1)
103 /* Each "block" represents a label plus the bytecode instructions following.
104 There may be branches out of the block, but no incoming jumps, except
105 to the beginning of the block.
107 If (pc < 0), the jcf_block is not an actual block (i.e. it has no
108 associated code yet), but it is an undefined label.
113 /* For blocks that that are defined, the next block (in pc order).
114 For blocks that are not-yet-defined the end label of a LABELED_BLOCK_EXPR
115 or a cleanup expression (from a TRY_FINALLY_EXPR),
116 this is the next (outer) such end label, in a stack headed by
117 labeled_blocks in jcf_partial. */
118 struct jcf_block *next;
120 /* In the not-yet-defined end label for an unfinished EXIT_BLOCK_EXPR.
121 pc is PENDING_EXIT_PC.
122 In the not-yet-defined end label for pending cleanup subroutine,
123 pc is PENDING_CLEANUP_PC.
124 For other not-yet-defined labels, pc is UNDEFINED_PC.
126 If the label has been defined:
127 Until perform_relocations is finished, this is the maximum possible
128 value of the bytecode offset at the beginning of this block.
129 After perform_relocations, it is the actual offset (pc). */
134 /* After finish_jcf_block is called, the actual instructions
135 contained in this block. Before that NULL, and the instructions
136 are in state->bytecode. */
140 /* If pc==PENDING_CLEANUP_PC, start_label is the start of the region
141 covered by the cleanup. */
142 struct jcf_block *start_label;
146 /* Set of relocations (in reverse offset order) for this block. */
147 struct jcf_relocation *relocations;
149 /* If this block is that of the not-yet-defined end label of
150 a LABELED_BLOCK_EXPR, where LABELED_BLOCK is that LABELED_BLOCK_EXPR.
151 If pc==PENDING_CLEANUP_PC, the cleanup that needs to be run. */
156 /* A "relocation" type for the 0-3 bytes of padding at the start
157 of a tableswitch or a lookupswitch. */
158 #define SWITCH_ALIGN_RELOC 4
160 /* A relocation type for the labels in a tableswitch or a lookupswitch;
161 these are relative to the start of the instruction, but (due to
162 th 0-3 bytes of padding), we don't know the offset before relocation. */
163 #define BLOCK_START_RELOC 1
165 struct jcf_relocation
167 /* Next relocation for the current jcf_block. */
168 struct jcf_relocation *next;
170 /* The (byte) offset within the current block that needs to be relocated. */
171 HOST_WIDE_INT offset;
173 /* 0 if offset is a 4-byte relative offset.
174 4 (SWITCH_ALIGN_RELOC) if offset points to 0-3 padding bytes inserted
175 for proper alignment in tableswitch/lookupswitch instructions.
176 1 (BLOCK_START_RELOC) if offset points to a 4-byte offset relative
177 to the start of the containing block.
178 -1 if offset is a 2-byte relative offset.
179 < -1 if offset is the address of an instruction with a 2-byte offset
180 that does not have a corresponding 4-byte offset version, in which
181 case the absolute value of kind is the inverted opcode.
182 > 4 if offset is the address of an instruction (such as jsr) with a
183 2-byte offset that does have a corresponding 4-byte offset version,
184 in which case kind is the opcode of the 4-byte version (such as jsr_w). */
187 /* The label the relocation wants to actually transfer to. */
188 struct jcf_block *label;
191 #define RELOCATION_VALUE_0 ((HOST_WIDE_INT)0)
192 #define RELOCATION_VALUE_1 ((HOST_WIDE_INT)1)
194 /* State for single catch clause. */
198 struct jcf_handler *next;
200 struct jcf_block *start_label;
201 struct jcf_block *end_label;
202 struct jcf_block *handler_label;
204 /* The sub-class of Throwable handled, or NULL_TREE (for finally). */
208 /* State for the current switch statement. */
210 struct jcf_switch_state
212 struct jcf_switch_state *prev;
213 struct jcf_block *default_label;
215 struct jcf_relocation *cases;
217 HOST_WIDE_INT min_case, max_case;
220 /* This structure is used to contain the various pieces that will
221 become a .class file. */
227 struct obstack *chunk_obstack;
230 /* List of basic blocks for the current method. */
231 struct jcf_block *blocks;
232 struct jcf_block *last_block;
234 struct localvar_info *first_lvar;
235 struct localvar_info *last_lvar;
240 int linenumber_count;
242 /* Until perform_relocations, this is a upper bound on the number
243 of bytes (so far) in the instructions for the current method. */
246 /* Stack of undefined ending labels for LABELED_BLOCK_EXPR. */
247 struct jcf_block *labeled_blocks;
249 /* The current stack size (stack pointer) in the current method. */
252 /* The largest extent of stack size (stack pointer) in the current method. */
255 /* Contains a mapping from local var slot number to localvar_info. */
256 struct buffer localvars;
258 /* The buffer allocated for bytecode for the current jcf_block. */
259 struct buffer bytecode;
261 /* Chain of exception handlers for the current method. */
262 struct jcf_handler *handlers;
264 /* Last element in handlers chain. */
265 struct jcf_handler *last_handler;
267 /* Number of exception handlers for the current method. */
270 /* Number of finalizers we are currently nested within. */
273 /* If non-NULL, use this for the return value. */
274 tree return_value_decl;
276 /* Information about the current switch statement. */
277 struct jcf_switch_state *sw_state;
279 /* The count of jsr instructions that have been emmitted. */
283 static void generate_bytecode_insns (tree, int, struct jcf_partial *);
284 static struct chunk * alloc_chunk (struct chunk *, unsigned char *,
285 int, struct obstack *);
286 static unsigned char * append_chunk (unsigned char *, int,
287 struct jcf_partial *);
288 static void append_chunk_copy (unsigned char *, int, struct jcf_partial *);
289 static struct jcf_block * gen_jcf_label (struct jcf_partial *);
290 static void finish_jcf_block (struct jcf_partial *);
291 static void define_jcf_label (struct jcf_block *, struct jcf_partial *);
292 static struct jcf_block * get_jcf_label_here (struct jcf_partial *);
293 static void put_linenumber (int, struct jcf_partial *);
294 static void localvar_alloc (tree, struct jcf_partial *);
295 static void maybe_free_localvar (tree, struct jcf_partial *, int);
296 static int get_access_flags (tree);
297 static void write_chunks (FILE *, struct chunk *);
298 static int adjust_typed_op (tree, int);
299 static void generate_bytecode_conditional (tree, struct jcf_block *,
300 struct jcf_block *, int,
301 struct jcf_partial *);
302 static void generate_bytecode_return (tree, struct jcf_partial *);
303 static void perform_relocations (struct jcf_partial *);
304 static void init_jcf_state (struct jcf_partial *, struct obstack *);
305 static void init_jcf_method (struct jcf_partial *, tree);
306 static void release_jcf_state (struct jcf_partial *);
307 static struct chunk * generate_classfile (tree, struct jcf_partial *);
308 static struct jcf_handler *alloc_handler (struct jcf_block *,
310 struct jcf_partial *);
311 static void emit_iinc (tree, HOST_WIDE_INT, struct jcf_partial *);
312 static void emit_reloc (HOST_WIDE_INT, int, struct jcf_block *,
313 struct jcf_partial *);
314 static void push_constant1 (HOST_WIDE_INT, struct jcf_partial *);
315 static void push_constant2 (HOST_WIDE_INT, struct jcf_partial *);
316 static void push_int_const (HOST_WIDE_INT, struct jcf_partial *);
317 static int find_constant_wide (HOST_WIDE_INT, HOST_WIDE_INT,
318 struct jcf_partial *);
319 static void push_long_const (HOST_WIDE_INT, HOST_WIDE_INT,
320 struct jcf_partial *);
321 static int find_constant_index (tree, struct jcf_partial *);
322 static void push_long_const (HOST_WIDE_INT, HOST_WIDE_INT,
323 struct jcf_partial *);
324 static void field_op (tree, int, struct jcf_partial *);
325 static void maybe_wide (int, int, struct jcf_partial *);
326 static void emit_dup (int, int, struct jcf_partial *);
327 static void emit_pop (int, struct jcf_partial *);
328 static void emit_load_or_store (tree, int, struct jcf_partial *);
329 static void emit_load (tree, struct jcf_partial *);
330 static void emit_store (tree, struct jcf_partial *);
331 static void emit_unop (enum java_opcode, tree, struct jcf_partial *);
332 static void emit_binop (enum java_opcode, tree, struct jcf_partial *);
333 static void emit_reloc (HOST_WIDE_INT, int, struct jcf_block *,
334 struct jcf_partial *);
335 static void emit_switch_reloc (struct jcf_block *, struct jcf_partial *);
336 static void emit_case_reloc (struct jcf_relocation *, struct jcf_partial *);
337 static void emit_if (struct jcf_block *, int, int, struct jcf_partial *);
338 static void emit_goto (struct jcf_block *, struct jcf_partial *);
339 static void emit_jsr (struct jcf_block *, struct jcf_partial *);
340 static void call_cleanups (struct jcf_block *, struct jcf_partial *);
341 static char *make_class_file_name (tree);
342 static unsigned char *append_synthetic_attribute (struct jcf_partial *);
343 static void append_deprecated_attribute (struct jcf_partial *);
344 static void append_innerclasses_attribute (struct jcf_partial *, tree);
345 static void append_innerclasses_attribute_entry (struct jcf_partial *, tree, tree);
346 static void append_gcj_attribute (struct jcf_partial *, tree);
348 /* Utility macros for appending (big-endian) data to a buffer.
349 We assume a local variable 'ptr' points into where we want to
350 write next, and we assume enough space has been allocated. */
352 #ifdef ENABLE_JC1_CHECKING
353 static int CHECK_PUT (void *, struct jcf_partial *, int);
356 CHECK_PUT (void *ptr, struct jcf_partial *state, int i)
358 if ((unsigned char *) ptr < state->chunk->data
359 || (unsigned char *) ptr + i > state->chunk->data + state->chunk->size)
365 #define CHECK_PUT(PTR, STATE, I) ((void)0)
368 #define PUT1(X) (CHECK_PUT(ptr, state, 1), *ptr++ = (X))
369 #define PUT2(X) (PUT1((X) >> 8), PUT1((X) & 0xFF))
370 #define PUT4(X) (PUT2((X) >> 16), PUT2((X) & 0xFFFF))
371 #define PUTN(P, N) (CHECK_PUT(ptr, state, N), memcpy(ptr, P, N), ptr += (N))
373 /* There are some cases below where CHECK_PUT is guaranteed to fail.
374 Use the following macros in those specific cases. */
375 #define UNSAFE_PUT1(X) (*ptr++ = (X))
376 #define UNSAFE_PUT2(X) (UNSAFE_PUT1((X) >> 8), UNSAFE_PUT1((X) & 0xFF))
377 #define UNSAFE_PUT4(X) (UNSAFE_PUT2((X) >> 16), UNSAFE_PUT2((X) & 0xFFFF))
378 #define UNSAFE_PUTN(P, N) (memcpy(ptr, P, N), ptr += (N))
381 /* Allocate a new chunk on obstack WORK, and link it in after LAST.
382 Set the data and size fields to DATA and SIZE, respectively.
383 However, if DATA is NULL and SIZE>0, allocate a buffer as well. */
385 static struct chunk *
386 alloc_chunk (struct chunk *last, unsigned char *data,
387 int size, struct obstack *work)
389 struct chunk *chunk = obstack_alloc (work, sizeof(struct chunk));
391 if (data == NULL && size > 0)
392 data = obstack_alloc (work, size);
402 #ifdef ENABLE_JC1_CHECKING
403 static int CHECK_OP (struct jcf_partial *);
406 CHECK_OP (struct jcf_partial *state)
408 if (state->bytecode.ptr > state->bytecode.limit)
414 #define CHECK_OP(STATE) ((void) 0)
417 static unsigned char *
418 append_chunk (unsigned char *data, int size, struct jcf_partial *state)
420 state->chunk = alloc_chunk (state->chunk, data, size, state->chunk_obstack);
421 if (state->first == NULL)
422 state->first = state->chunk;
423 return state->chunk->data;
427 append_chunk_copy (unsigned char *data, int size, struct jcf_partial *state)
429 unsigned char *ptr = append_chunk (NULL, size, state);
430 memcpy (ptr, data, size);
433 static struct jcf_block *
434 gen_jcf_label (struct jcf_partial *state)
436 struct jcf_block *block
437 = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_block));
439 block->linenumber = -1;
440 block->pc = UNDEFINED_PC;
445 finish_jcf_block (struct jcf_partial *state)
447 struct jcf_block *block = state->last_block;
448 struct jcf_relocation *reloc;
449 int code_length = BUFFER_LENGTH (&state->bytecode);
450 int pc = state->code_length;
451 append_chunk_copy (state->bytecode.data, code_length, state);
452 BUFFER_RESET (&state->bytecode);
453 block->v.chunk = state->chunk;
455 /* Calculate code_length to the maximum value it can have. */
456 pc += block->v.chunk->size;
457 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
459 int kind = reloc->kind;
460 if (kind == SWITCH_ALIGN_RELOC)
462 else if (kind > BLOCK_START_RELOC)
463 pc += 2; /* 2-byte offset may grow to 4-byte offset */
465 pc += 5; /* May need to add a goto_w. */
467 state->code_length = pc;
471 define_jcf_label (struct jcf_block *label, struct jcf_partial *state)
473 if (state->last_block != NULL)
474 finish_jcf_block (state);
475 label->pc = state->code_length;
476 if (state->blocks == NULL)
477 state->blocks = label;
479 state->last_block->next = label;
480 state->last_block = label;
482 label->u.relocations = NULL;
485 static struct jcf_block *
486 get_jcf_label_here (struct jcf_partial *state)
488 if (state->last_block != NULL && BUFFER_LENGTH (&state->bytecode) == 0)
489 return state->last_block;
492 struct jcf_block *label = gen_jcf_label (state);
493 define_jcf_label (label, state);
498 /* Note a line number entry for the current PC and given LINE. */
501 put_linenumber (int line, struct jcf_partial *state)
503 struct jcf_block *label = get_jcf_label_here (state);
504 if (label->linenumber > 0)
506 label = gen_jcf_label (state);
507 define_jcf_label (label, state);
509 label->linenumber = line;
510 state->linenumber_count++;
513 /* Allocate a new jcf_handler, for a catch clause that catches exceptions
514 in the range (START_LABEL, END_LABEL). */
516 static struct jcf_handler *
517 alloc_handler (struct jcf_block *start_label, struct jcf_block *end_label,
518 struct jcf_partial *state)
520 struct jcf_handler *handler
521 = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_handler));
522 handler->start_label = start_label;
523 handler->end_label = end_label;
524 handler->handler_label = get_jcf_label_here (state);
525 if (state->handlers == NULL)
526 state->handlers = handler;
528 state->last_handler->next = handler;
529 state->last_handler = handler;
530 handler->next = NULL;
531 state->num_handlers++;
536 /* The index of jvm local variable allocated for this DECL.
537 This is assigned when generating .class files;
538 contrast DECL_LOCAL_SLOT_NUMBER which is set when *reading* a .class file.
539 (We don't allocate DECL_LANG_SPECIFIC for locals from Java source code.) */
541 #define DECL_LOCAL_INDEX(DECL) DECL_ALIGN(DECL)
545 struct localvar_info *next;
548 struct jcf_block *start_label;
549 struct jcf_block *end_label;
552 #define localvar_buffer ((struct localvar_info**) state->localvars.data)
553 #define localvar_max \
554 ((struct localvar_info**) state->localvars.ptr - localvar_buffer)
557 localvar_alloc (tree decl, struct jcf_partial *state)
559 struct jcf_block *start_label = get_jcf_label_here (state);
560 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
562 register struct localvar_info *info;
563 register struct localvar_info **ptr = localvar_buffer;
564 register struct localvar_info **limit
565 = (struct localvar_info**) state->localvars.ptr;
566 for (index = 0; ptr < limit; index++, ptr++)
569 && (! wide || ((ptr+1) < limit && ptr[1] == NULL)))
574 buffer_grow (&state->localvars, 2 * sizeof (struct localvar_info*));
575 ptr = (struct localvar_info**) state->localvars.data + index;
576 state->localvars.ptr = (unsigned char *) (ptr + 1 + wide);
578 info = obstack_alloc (state->chunk_obstack, sizeof (struct localvar_info));
581 ptr[1] = (struct localvar_info *)(~0);
582 DECL_LOCAL_INDEX (decl) = index;
584 info->start_label = start_label;
586 if (debug_info_level > DINFO_LEVEL_TERSE
587 && DECL_NAME (decl) != NULL_TREE)
589 /* Generate debugging info. */
591 if (state->last_lvar != NULL)
592 state->last_lvar->next = info;
594 state->first_lvar = info;
595 state->last_lvar = info;
601 maybe_free_localvar (tree decl, struct jcf_partial *state, int really)
603 struct jcf_block *end_label = get_jcf_label_here (state);
604 int index = DECL_LOCAL_INDEX (decl);
605 register struct localvar_info **ptr = &localvar_buffer [index];
606 register struct localvar_info *info = *ptr;
607 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
609 info->end_label = end_label;
611 if (info->decl != decl)
618 if (ptr[1] != (struct localvar_info *)(~0))
625 #define STACK_TARGET 1
626 #define IGNORE_TARGET 2
628 /* Get the access flags of a class (TYPE_DECL), a method (FUNCTION_DECL), or
629 a field (FIELD_DECL or VAR_DECL, if static), as encoded in a .class file. */
632 get_access_flags (tree decl)
635 int isfield = TREE_CODE (decl) == FIELD_DECL || TREE_CODE (decl) == VAR_DECL;
636 if (CLASS_PUBLIC (decl)) /* same as FIELD_PUBLIC and METHOD_PUBLIC */
638 if (CLASS_FINAL (decl)) /* same as FIELD_FINAL and METHOD_FINAL */
640 if (isfield || TREE_CODE (decl) == FUNCTION_DECL)
642 if (TREE_PROTECTED (decl))
643 flags |= ACC_PROTECTED;
644 if (TREE_PRIVATE (decl))
645 flags |= ACC_PRIVATE;
647 else if (TREE_CODE (decl) == TYPE_DECL)
649 if (CLASS_SUPER (decl))
651 if (CLASS_ABSTRACT (decl))
652 flags |= ACC_ABSTRACT;
653 if (CLASS_INTERFACE (decl))
654 flags |= ACC_INTERFACE;
655 if (CLASS_STATIC (decl))
657 if (CLASS_PRIVATE (decl))
658 flags |= ACC_PRIVATE;
659 if (CLASS_PROTECTED (decl))
660 flags |= ACC_PROTECTED;
661 if (ANONYMOUS_CLASS_P (TREE_TYPE (decl))
662 || LOCAL_CLASS_P (TREE_TYPE (decl)))
663 flags |= ACC_PRIVATE;
664 if (CLASS_STRICTFP (decl))
670 if (TREE_CODE (decl) == FUNCTION_DECL)
672 if (METHOD_NATIVE (decl))
674 if (METHOD_STATIC (decl))
676 if (METHOD_SYNCHRONIZED (decl))
677 flags |= ACC_SYNCHRONIZED;
678 if (METHOD_ABSTRACT (decl))
679 flags |= ACC_ABSTRACT;
680 if (METHOD_STRICTFP (decl))
685 if (FIELD_STATIC (decl))
687 if (FIELD_VOLATILE (decl))
688 flags |= ACC_VOLATILE;
689 if (FIELD_TRANSIENT (decl))
690 flags |= ACC_TRANSIENT;
695 /* Write the list of segments starting at CHUNKS to STREAM. */
698 write_chunks (FILE* stream, struct chunk *chunks)
700 for (; chunks != NULL; chunks = chunks->next)
701 fwrite (chunks->data, chunks->size, 1, stream);
704 /* Push a 1-word constant in the constant pool at the given INDEX.
705 (Caller is responsible for doing NOTE_PUSH.) */
708 push_constant1 (HOST_WIDE_INT index, struct jcf_partial *state)
723 /* Push a 2-word constant in the constant pool at the given INDEX.
724 (Caller is responsible for doing NOTE_PUSH.) */
727 push_constant2 (HOST_WIDE_INT index, struct jcf_partial *state)
734 /* Push 32-bit integer constant on VM stack.
735 Caller is responsible for doing NOTE_PUSH. */
738 push_int_const (HOST_WIDE_INT i, struct jcf_partial *state)
741 if (i >= -1 && i <= 5)
742 OP1(OPCODE_iconst_0 + i);
743 else if (i >= -128 && i < 128)
748 else if (i >= -32768 && i < 32768)
755 i = find_constant1 (&state->cpool, CONSTANT_Integer,
756 (jword)(i & 0xFFFFFFFF));
757 push_constant1 (i, state);
762 find_constant_wide (HOST_WIDE_INT lo, HOST_WIDE_INT hi,
763 struct jcf_partial *state)
765 HOST_WIDE_INT w1, w2;
766 lshift_double (lo, hi, -32, 64, &w1, &w2, 1);
767 return find_constant2 (&state->cpool, CONSTANT_Long,
768 (jword)(w1 & 0xFFFFFFFF), (jword)(lo & 0xFFFFFFFF));
771 /* Find or allocate a constant pool entry for the given VALUE.
772 Return the index in the constant pool. */
775 find_constant_index (tree value, struct jcf_partial *state)
777 if (TREE_CODE (value) == INTEGER_CST)
779 if (TYPE_PRECISION (TREE_TYPE (value)) <= 32)
780 return find_constant1 (&state->cpool, CONSTANT_Integer,
781 (jword)(TREE_INT_CST_LOW (value) & 0xFFFFFFFF));
783 return find_constant_wide (TREE_INT_CST_LOW (value),
784 TREE_INT_CST_HIGH (value), state);
786 else if (TREE_CODE (value) == REAL_CST)
790 real_to_target (words, &TREE_REAL_CST (value),
791 TYPE_MODE (TREE_TYPE (value)));
792 words[0] &= 0xffffffff;
793 words[1] &= 0xffffffff;
795 if (TYPE_PRECISION (TREE_TYPE (value)) == 32)
796 return find_constant1 (&state->cpool, CONSTANT_Float, (jword)words[0]);
798 return find_constant2 (&state->cpool, CONSTANT_Double,
799 (jword)words[1-FLOAT_WORDS_BIG_ENDIAN],
800 (jword)words[FLOAT_WORDS_BIG_ENDIAN]);
802 else if (TREE_CODE (value) == STRING_CST)
803 return find_string_constant (&state->cpool, value);
809 /* Push 64-bit long constant on VM stack.
810 Caller is responsible for doing NOTE_PUSH. */
813 push_long_const (HOST_WIDE_INT lo, HOST_WIDE_INT hi, struct jcf_partial *state)
815 HOST_WIDE_INT highpart, dummy;
816 jint lowpart = WORD_TO_INT (lo);
818 rshift_double (lo, hi, 32, 64, &highpart, &dummy, 1);
820 if (highpart == 0 && (lowpart == 0 || lowpart == 1))
823 OP1(OPCODE_lconst_0 + lowpart);
825 else if ((highpart == 0 && lowpart > 0 && lowpart < 32768)
826 || (highpart == -1 && lowpart < 0 && lowpart >= -32768))
828 push_int_const (lowpart, state);
833 push_constant2 (find_constant_wide (lo, hi, state), state);
837 field_op (tree field, int opcode, struct jcf_partial *state)
839 int index = find_fieldref_index (&state->cpool, field);
845 /* Returns an integer in the range 0 (for 'int') through 4 (for object
846 reference) to 7 (for 'short') which matches the pattern of how JVM
847 opcodes typically depend on the operand type. */
850 adjust_typed_op (tree type, int max)
852 switch (TREE_CODE (type))
855 case RECORD_TYPE: return 4;
857 return TYPE_PRECISION (type) == 32 || max < 5 ? 0 : 5;
859 return TYPE_PRECISION (type) == 32 || max < 6 ? 0 : 6;
861 switch (TYPE_PRECISION (type))
863 case 8: return max < 5 ? 0 : 5;
864 case 16: return max < 7 ? 0 : 7;
870 switch (TYPE_PRECISION (type))
883 maybe_wide (int opcode, int index, struct jcf_partial *state)
900 /* Compile code to duplicate with offset, where
901 SIZE is the size of the stack item to duplicate (1 or 2), abd
902 OFFSET is where to insert the result (must be 0, 1, or 2).
903 (The new words get inserted at stack[SP-size-offset].) */
906 emit_dup (int size, int offset, struct jcf_partial *state)
913 kind = size == 1 ? OPCODE_dup : OPCODE_dup2;
914 else if (offset == 1)
915 kind = size == 1 ? OPCODE_dup_x1 : OPCODE_dup2_x1;
916 else if (offset == 2)
917 kind = size == 1 ? OPCODE_dup_x2 : OPCODE_dup2_x2;
925 emit_pop (int size, struct jcf_partial *state)
928 OP1 (OPCODE_pop - 1 + size);
932 emit_iinc (tree var, HOST_WIDE_INT value, struct jcf_partial *state)
934 int slot = DECL_LOCAL_INDEX (var);
936 if (value < -128 || value > 127 || slot >= 256)
954 emit_load_or_store (tree var, /* Variable to load from or store into. */
955 int opcode, /* Either OPCODE_iload or OPCODE_istore. */
956 struct jcf_partial *state)
958 tree type = TREE_TYPE (var);
959 int kind = adjust_typed_op (type, 4);
960 int index = DECL_LOCAL_INDEX (var);
964 OP1 (opcode + 5 + 4 * kind + index); /* [ilfda]{load,store}_[0123] */
967 maybe_wide (opcode + kind, index, state); /* [ilfda]{load,store} */
971 emit_load (tree var, struct jcf_partial *state)
973 emit_load_or_store (var, OPCODE_iload, state);
974 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
978 emit_store (tree var, struct jcf_partial *state)
980 emit_load_or_store (var, OPCODE_istore, state);
981 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
985 emit_unop (enum java_opcode opcode, tree type ATTRIBUTE_UNUSED,
986 struct jcf_partial *state)
993 emit_binop (enum java_opcode opcode, tree type, struct jcf_partial *state)
995 int size = TYPE_IS_WIDE (type) ? 2 : 1;
1002 emit_reloc (HOST_WIDE_INT value, int kind,
1003 struct jcf_block *target, struct jcf_partial *state)
1005 struct jcf_relocation *reloc
1006 = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1007 struct jcf_block *block = state->last_block;
1008 reloc->next = block->u.relocations;
1009 block->u.relocations = reloc;
1010 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1011 reloc->label = target;
1013 if (kind == 0 || kind == BLOCK_START_RELOC)
1015 else if (kind != SWITCH_ALIGN_RELOC)
1020 emit_switch_reloc (struct jcf_block *label, struct jcf_partial *state)
1022 emit_reloc (RELOCATION_VALUE_0, BLOCK_START_RELOC, label, state);
1025 /* Similar to emit_switch_reloc,
1026 but re-uses an existing case reloc. */
1029 emit_case_reloc (struct jcf_relocation *reloc, struct jcf_partial *state)
1031 struct jcf_block *block = state->last_block;
1032 reloc->next = block->u.relocations;
1033 block->u.relocations = reloc;
1034 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1035 reloc->kind = BLOCK_START_RELOC;
1039 /* Emit a conditional jump to TARGET with a 2-byte relative jump offset
1040 The opcode is OPCODE, the inverted opcode is INV_OPCODE. */
1043 emit_if (struct jcf_block *target, int opcode, int inv_opcode,
1044 struct jcf_partial *state)
1048 /* value is 1 byte from reloc back to start of instruction. */
1049 emit_reloc (RELOCATION_VALUE_1, - inv_opcode, target, state);
1053 emit_goto (struct jcf_block *target, struct jcf_partial *state)
1057 /* Value is 1 byte from reloc back to start of instruction. */
1058 emit_reloc (RELOCATION_VALUE_1, OPCODE_goto_w, target, state);
1062 emit_jsr (struct jcf_block *target, struct jcf_partial *state)
1066 /* Value is 1 byte from reloc back to start of instruction. */
1067 emit_reloc (RELOCATION_VALUE_1, OPCODE_jsr_w, target, state);
1071 /* Generate code to evaluate EXP. If the result is true,
1072 branch to TRUE_LABEL; otherwise, branch to FALSE_LABEL.
1073 TRUE_BRANCH_FIRST is a code generation hint that the
1074 TRUE_LABEL may follow right after this. (The idea is that we
1075 may be able to optimize away GOTO TRUE_LABEL; TRUE_LABEL:) */
1078 generate_bytecode_conditional (tree exp,
1079 struct jcf_block *true_label,
1080 struct jcf_block *false_label,
1081 int true_branch_first,
1082 struct jcf_partial *state)
1084 tree exp0, exp1, type;
1085 int save_SP = state->code_SP;
1086 enum java_opcode op, negop;
1087 switch (TREE_CODE (exp))
1090 emit_goto (integer_zerop (exp) ? false_label : true_label, state);
1094 struct jcf_block *then_label = gen_jcf_label (state);
1095 struct jcf_block *else_label = gen_jcf_label (state);
1096 int save_SP_before, save_SP_after;
1097 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1098 then_label, else_label, 1, state);
1099 define_jcf_label (then_label, state);
1100 save_SP_before = state->code_SP;
1101 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1102 true_label, false_label, 1, state);
1103 save_SP_after = state->code_SP;
1104 state->code_SP = save_SP_before;
1105 define_jcf_label (else_label, state);
1106 generate_bytecode_conditional (TREE_OPERAND (exp, 2),
1107 true_label, false_label,
1108 true_branch_first, state);
1109 if (state->code_SP != save_SP_after)
1113 case TRUTH_NOT_EXPR:
1114 generate_bytecode_conditional (TREE_OPERAND (exp, 0), false_label,
1115 true_label, ! true_branch_first, state);
1117 case TRUTH_ANDIF_EXPR:
1119 struct jcf_block *next_label = gen_jcf_label (state);
1120 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1121 next_label, false_label, 1, state);
1122 define_jcf_label (next_label, state);
1123 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1124 true_label, false_label, 1, state);
1127 case TRUTH_ORIF_EXPR:
1129 struct jcf_block *next_label = gen_jcf_label (state);
1130 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1131 true_label, next_label, 1, state);
1132 define_jcf_label (next_label, state);
1133 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1134 true_label, false_label, 1, state);
1138 /* Assuming op is one of the 2-operand if_icmp<COND> instructions,
1139 set it to the corresponding 1-operand if<COND> instructions. */
1143 /* The opcodes with their inverses are allocated in pairs.
1144 E.g. The inverse of if_icmplt (161) is if_icmpge (162). */
1145 negop = (op & 1) ? op + 1 : op - 1;
1147 if (true_branch_first)
1149 emit_if (false_label, negop, op, state);
1150 emit_goto (true_label, state);
1154 emit_if (true_label, op, negop, state);
1155 emit_goto (false_label, state);
1159 op = OPCODE_if_icmpeq;
1162 op = OPCODE_if_icmpne;
1165 op = OPCODE_if_icmpgt;
1168 op = OPCODE_if_icmplt;
1171 op = OPCODE_if_icmpge;
1174 op = OPCODE_if_icmple;
1177 exp0 = TREE_OPERAND (exp, 0);
1178 exp1 = TREE_OPERAND (exp, 1);
1179 type = TREE_TYPE (exp0);
1180 switch (TREE_CODE (type))
1183 case POINTER_TYPE: case RECORD_TYPE:
1184 switch (TREE_CODE (exp))
1186 case EQ_EXPR: op = OPCODE_if_acmpeq; break;
1187 case NE_EXPR: op = OPCODE_if_acmpne; break;
1190 if (integer_zerop (exp1) || integer_zerop (exp0))
1192 generate_bytecode_insns (integer_zerop (exp0) ? exp1 : exp0,
1193 STACK_TARGET, state);
1194 op = op + (OPCODE_ifnull - OPCODE_if_acmpeq);
1195 negop = (op & 1) ? op - 1 : op + 1;
1199 generate_bytecode_insns (exp0, STACK_TARGET, state);
1200 generate_bytecode_insns (exp1, STACK_TARGET, state);
1204 generate_bytecode_insns (exp0, STACK_TARGET, state);
1205 generate_bytecode_insns (exp1, STACK_TARGET, state);
1206 if (op == OPCODE_if_icmplt || op == OPCODE_if_icmple)
1210 if (TYPE_PRECISION (type) > 32)
1221 if (TYPE_PRECISION (type) > 32)
1223 generate_bytecode_insns (exp0, STACK_TARGET, state);
1224 generate_bytecode_insns (exp1, STACK_TARGET, state);
1232 if (integer_zerop (exp1))
1234 generate_bytecode_insns (exp0, STACK_TARGET, state);
1238 if (integer_zerop (exp0))
1242 case OPCODE_if_icmplt:
1243 case OPCODE_if_icmpge:
1246 case OPCODE_if_icmpgt:
1247 case OPCODE_if_icmple:
1253 generate_bytecode_insns (exp1, STACK_TARGET, state);
1257 generate_bytecode_insns (exp0, STACK_TARGET, state);
1258 generate_bytecode_insns (exp1, STACK_TARGET, state);
1264 generate_bytecode_insns (exp, STACK_TARGET, state);
1266 if (true_branch_first)
1268 emit_if (false_label, OPCODE_ifeq, OPCODE_ifne, state);
1269 emit_goto (true_label, state);
1273 emit_if (true_label, OPCODE_ifne, OPCODE_ifeq, state);
1274 emit_goto (false_label, state);
1278 if (save_SP != state->code_SP)
1282 /* Call pending cleanups i.e. those for surrounding TRY_FINALLY_EXPRs.
1283 but only as far out as LIMIT (since we are about to jump to the
1284 emit label that is LIMIT). */
1287 call_cleanups (struct jcf_block *limit, struct jcf_partial *state)
1289 struct jcf_block *block = state->labeled_blocks;
1290 for (; block != limit; block = block->next)
1292 if (block->pc == PENDING_CLEANUP_PC)
1293 emit_jsr (block, state);
1298 generate_bytecode_return (tree exp, struct jcf_partial *state)
1300 tree return_type = TREE_TYPE (TREE_TYPE (state->current_method));
1301 int returns_void = TREE_CODE (return_type) == VOID_TYPE;
1306 switch (TREE_CODE (exp))
1309 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET,
1311 exp = TREE_OPERAND (exp, 1);
1315 struct jcf_block *then_label = gen_jcf_label (state);
1316 struct jcf_block *else_label = gen_jcf_label (state);
1317 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1318 then_label, else_label, 1, state);
1319 define_jcf_label (then_label, state);
1320 generate_bytecode_return (TREE_OPERAND (exp, 1), state);
1321 define_jcf_label (else_label, state);
1322 generate_bytecode_return (TREE_OPERAND (exp, 2), state);
1326 generate_bytecode_insns (exp,
1327 returns_void ? IGNORE_TARGET
1328 : STACK_TARGET, state);
1334 call_cleanups (NULL, state);
1338 op = OPCODE_ireturn + adjust_typed_op (return_type, 4);
1339 if (state->num_finalizers > 0)
1341 if (state->return_value_decl == NULL_TREE)
1343 state->return_value_decl
1344 = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1345 localvar_alloc (state->return_value_decl, state);
1347 emit_store (state->return_value_decl, state);
1348 call_cleanups (NULL, state);
1349 emit_load (state->return_value_decl, state);
1350 /* If we call maybe_free_localvar (state->return_value_decl, state, 1),
1351 then we risk the save decl erroneously re-used in the
1352 finalizer. Instead, we keep the state->return_value_decl
1353 allocated through the rest of the method. This is not
1354 the greatest solution, but it is at least simple and safe. */
1361 /* Generate bytecode for sub-expression EXP of METHOD.
1362 TARGET is one of STACK_TARGET or IGNORE_TARGET. */
1365 generate_bytecode_insns (tree exp, int target, struct jcf_partial *state)
1368 enum java_opcode jopcode;
1370 HOST_WIDE_INT value;
1375 if (exp == NULL && target == IGNORE_TARGET)
1378 type = TREE_TYPE (exp);
1380 switch (TREE_CODE (exp))
1383 if (BLOCK_EXPR_BODY (exp))
1386 tree body = BLOCK_EXPR_BODY (exp);
1387 long jsrs = state->num_jsrs;
1388 for (local = BLOCK_EXPR_DECLS (exp); local; )
1390 tree next = TREE_CHAIN (local);
1391 localvar_alloc (local, state);
1394 /* Avoid deep recursion for long blocks. */
1395 while (TREE_CODE (body) == COMPOUND_EXPR)
1397 generate_bytecode_insns (TREE_OPERAND (body, 0), target, state);
1398 body = TREE_OPERAND (body, 1);
1400 generate_bytecode_insns (body, target, state);
1402 for (local = BLOCK_EXPR_DECLS (exp); local; )
1404 tree next = TREE_CHAIN (local);
1405 maybe_free_localvar (local, state, state->num_jsrs <= jsrs);
1411 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
1412 /* Normally the first operand to a COMPOUND_EXPR must complete
1413 normally. However, in the special case of a do-while
1414 statement this is not necessarily the case. */
1415 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 0)))
1416 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1418 case EXPR_WITH_FILE_LOCATION:
1420 const char *saved_input_filename = input_filename;
1421 tree body = EXPR_WFL_NODE (exp);
1422 int saved_lineno = input_line;
1423 if (body == empty_stmt_node)
1425 input_filename = EXPR_WFL_FILENAME (exp);
1426 input_line = EXPR_WFL_LINENO (exp);
1427 if (EXPR_WFL_EMIT_LINE_NOTE (exp) && input_line > 0
1428 && debug_info_level > DINFO_LEVEL_NONE)
1429 put_linenumber (input_line, state);
1430 generate_bytecode_insns (body, target, state);
1431 input_filename = saved_input_filename;
1432 input_line = saved_lineno;
1436 if (target == IGNORE_TARGET) ; /* do nothing */
1437 else if (TREE_CODE (type) == POINTER_TYPE)
1439 if (! integer_zerop (exp))
1442 OP1 (OPCODE_aconst_null);
1445 else if (TYPE_PRECISION (type) <= 32)
1447 push_int_const (TREE_INT_CST_LOW (exp), state);
1452 push_long_const (TREE_INT_CST_LOW (exp), TREE_INT_CST_HIGH (exp),
1459 int prec = TYPE_PRECISION (type) >> 5;
1461 if (real_zerop (exp) && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (exp)))
1462 OP1 (prec == 1 ? OPCODE_fconst_0 : OPCODE_dconst_0);
1463 else if (real_onep (exp))
1464 OP1 (prec == 1 ? OPCODE_fconst_1 : OPCODE_dconst_1);
1465 /* FIXME Should also use fconst_2 for 2.0f.
1466 Also, should use iconst_2/ldc followed by i2f/i2d
1467 for other float/double when the value is a small integer. */
1470 offset = find_constant_index (exp, state);
1472 push_constant1 (offset, state);
1474 push_constant2 (offset, state);
1480 push_constant1 (find_string_constant (&state->cpool, exp), state);
1484 if (TREE_STATIC (exp))
1486 field_op (exp, OPCODE_getstatic, state);
1487 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1490 /* ... fall through ... */
1492 emit_load (exp, state);
1494 case NON_LVALUE_EXPR:
1496 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1499 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1500 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1501 if (target != IGNORE_TARGET)
1503 jopcode = OPCODE_iaload + adjust_typed_op (type, 7);
1506 if (! TYPE_IS_WIDE (type))
1512 tree obj = TREE_OPERAND (exp, 0);
1513 tree field = TREE_OPERAND (exp, 1);
1514 int is_static = FIELD_STATIC (field);
1515 generate_bytecode_insns (obj,
1516 is_static ? IGNORE_TARGET : target, state);
1517 if (target != IGNORE_TARGET)
1519 if (DECL_NAME (field) == length_identifier_node && !is_static
1520 && TYPE_ARRAY_P (TREE_TYPE (obj)))
1523 OP1 (OPCODE_arraylength);
1527 field_op (field, is_static ? OPCODE_getstatic : OPCODE_getfield,
1531 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
1536 case TRUTH_ANDIF_EXPR:
1537 case TRUTH_ORIF_EXPR:
1545 struct jcf_block *then_label = gen_jcf_label (state);
1546 struct jcf_block *else_label = gen_jcf_label (state);
1547 struct jcf_block *end_label = gen_jcf_label (state);
1548 generate_bytecode_conditional (exp,
1549 then_label, else_label, 1, state);
1550 define_jcf_label (then_label, state);
1551 push_int_const (1, state);
1552 emit_goto (end_label, state);
1553 define_jcf_label (else_label, state);
1554 push_int_const (0, state);
1555 define_jcf_label (end_label, state);
1561 struct jcf_block *then_label = gen_jcf_label (state);
1562 struct jcf_block *else_label = gen_jcf_label (state);
1563 struct jcf_block *end_label = gen_jcf_label (state);
1564 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1565 then_label, else_label, 1, state);
1566 define_jcf_label (then_label, state);
1567 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1568 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 1))
1569 /* Not all expressions have CAN_COMPLETE_NORMALLY set properly. */
1570 || TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE)
1571 emit_goto (end_label, state);
1572 define_jcf_label (else_label, state);
1573 generate_bytecode_insns (TREE_OPERAND (exp, 2), target, state);
1574 define_jcf_label (end_label, state);
1575 /* COND_EXPR can be used in a binop. The stack must be adjusted. */
1576 if (TREE_TYPE (exp) != void_type_node)
1577 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1582 struct jcf_switch_state *sw_state = state->sw_state;
1583 struct jcf_relocation *reloc
1584 = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1585 HOST_WIDE_INT case_value = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
1587 reloc->label = get_jcf_label_here (state);
1588 reloc->offset = case_value;
1589 reloc->next = sw_state->cases;
1590 sw_state->cases = reloc;
1591 if (sw_state->num_cases == 0)
1593 sw_state->min_case = case_value;
1594 sw_state->max_case = case_value;
1598 if (case_value < sw_state->min_case)
1599 sw_state->min_case = case_value;
1600 if (case_value > sw_state->max_case)
1601 sw_state->max_case = case_value;
1603 sw_state->num_cases++;
1607 state->sw_state->default_label = get_jcf_label_here (state);
1612 /* The SWITCH_EXPR has three parts, generated in the following order:
1613 1. the switch_expression (the value used to select the correct case);
1615 3. the switch_instruction (the tableswitch/loopupswitch instruction.).
1616 After code generation, we will re-order them in the order 1, 3, 2.
1617 This is to avoid any extra GOTOs. */
1618 struct jcf_switch_state sw_state;
1619 struct jcf_block *expression_last; /* Last block of the switch_expression. */
1620 struct jcf_block *body_last; /* Last block of the switch_body. */
1621 struct jcf_block *switch_instruction; /* First block of switch_instruction. */
1622 struct jcf_block *instruction_last; /* Last block of the switch_instruction. */
1623 struct jcf_block *body_block;
1625 sw_state.prev = state->sw_state;
1626 state->sw_state = &sw_state;
1627 sw_state.cases = NULL;
1628 sw_state.num_cases = 0;
1629 sw_state.default_label = NULL;
1630 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1631 expression_last = state->last_block;
1632 /* Force a new block here. */
1633 body_block = gen_jcf_label (state);
1634 define_jcf_label (body_block, state);
1635 generate_bytecode_insns (TREE_OPERAND (exp, 1), IGNORE_TARGET, state);
1636 body_last = state->last_block;
1638 switch_instruction = gen_jcf_label (state);
1639 define_jcf_label (switch_instruction, state);
1640 if (sw_state.default_label == NULL)
1641 sw_state.default_label = gen_jcf_label (state);
1643 if (sw_state.num_cases <= 1)
1645 if (sw_state.num_cases == 0)
1647 emit_pop (1, state);
1652 push_int_const (sw_state.cases->offset, state);
1654 emit_if (sw_state.cases->label,
1655 OPCODE_if_icmpeq, OPCODE_if_icmpne, state);
1657 emit_goto (sw_state.default_label, state);
1662 unsigned HOST_WIDE_INT delta;
1663 /* Copy the chain of relocs into a sorted array. */
1664 struct jcf_relocation **relocs
1665 = xmalloc (sw_state.num_cases * sizeof (struct jcf_relocation *));
1666 /* The relocs arrays is a buffer with a gap.
1667 The assumption is that cases will normally come in "runs". */
1669 int gap_end = sw_state.num_cases;
1670 struct jcf_relocation *reloc;
1671 for (reloc = sw_state.cases; reloc != NULL; reloc = reloc->next)
1673 HOST_WIDE_INT case_value = reloc->offset;
1674 while (gap_end < sw_state.num_cases)
1676 struct jcf_relocation *end = relocs[gap_end];
1677 if (case_value <= end->offset)
1679 relocs[gap_start++] = end;
1682 while (gap_start > 0)
1684 struct jcf_relocation *before = relocs[gap_start-1];
1685 if (case_value >= before->offset)
1687 relocs[--gap_end] = before;
1690 relocs[gap_start++] = reloc;
1691 /* Note we don't check for duplicates. This is
1692 handled by the parser. */
1695 /* We could have DELTA < 0 if sw_state.min_case is
1696 something like Integer.MIN_VALUE. That is why delta is
1698 delta = sw_state.max_case - sw_state.min_case;
1699 if (2 * (unsigned) sw_state.num_cases >= delta)
1700 { /* Use tableswitch. */
1702 RESERVE (13 + 4 * (sw_state.max_case - sw_state.min_case + 1));
1703 OP1 (OPCODE_tableswitch);
1704 emit_reloc (RELOCATION_VALUE_0,
1705 SWITCH_ALIGN_RELOC, NULL, state);
1706 emit_switch_reloc (sw_state.default_label, state);
1707 OP4 (sw_state.min_case);
1708 OP4 (sw_state.max_case);
1709 for (i = sw_state.min_case; ; )
1711 reloc = relocs[index];
1712 if (i == reloc->offset)
1714 emit_case_reloc (reloc, state);
1715 if (i == sw_state.max_case)
1720 emit_switch_reloc (sw_state.default_label, state);
1725 { /* Use lookupswitch. */
1726 RESERVE(9 + 8 * sw_state.num_cases);
1727 OP1 (OPCODE_lookupswitch);
1728 emit_reloc (RELOCATION_VALUE_0,
1729 SWITCH_ALIGN_RELOC, NULL, state);
1730 emit_switch_reloc (sw_state.default_label, state);
1731 OP4 (sw_state.num_cases);
1732 for (i = 0; i < sw_state.num_cases; i++)
1734 struct jcf_relocation *reloc = relocs[i];
1735 OP4 (reloc->offset);
1736 emit_case_reloc (reloc, state);
1742 instruction_last = state->last_block;
1743 if (sw_state.default_label->pc < 0)
1744 define_jcf_label (sw_state.default_label, state);
1745 else /* Force a new block. */
1746 sw_state.default_label = get_jcf_label_here (state);
1747 /* Now re-arrange the blocks so the switch_instruction
1748 comes before the switch_body. */
1749 switch_length = state->code_length - switch_instruction->pc;
1750 switch_instruction->pc = body_block->pc;
1751 instruction_last->next = body_block;
1752 instruction_last->v.chunk->next = body_block->v.chunk;
1753 expression_last->next = switch_instruction;
1754 expression_last->v.chunk->next = switch_instruction->v.chunk;
1755 body_last->next = sw_state.default_label;
1756 body_last->v.chunk->next = NULL;
1757 state->chunk = body_last->v.chunk;
1758 for (; body_block != sw_state.default_label; body_block = body_block->next)
1759 body_block->pc += switch_length;
1761 state->sw_state = sw_state.prev;
1766 exp = TREE_OPERAND (exp, 0);
1767 if (exp == NULL_TREE)
1768 exp = empty_stmt_node;
1769 else if (TREE_CODE (exp) != MODIFY_EXPR)
1772 exp = TREE_OPERAND (exp, 1);
1773 generate_bytecode_return (exp, state);
1775 case LABELED_BLOCK_EXPR:
1777 struct jcf_block *end_label = gen_jcf_label (state);
1778 end_label->next = state->labeled_blocks;
1779 state->labeled_blocks = end_label;
1780 end_label->pc = PENDING_EXIT_PC;
1781 end_label->u.labeled_block = exp;
1782 if (LABELED_BLOCK_BODY (exp))
1783 generate_bytecode_insns (LABELED_BLOCK_BODY (exp), target, state);
1784 if (state->labeled_blocks != end_label)
1786 state->labeled_blocks = end_label->next;
1787 define_jcf_label (end_label, state);
1792 tree body = TREE_OPERAND (exp, 0);
1794 if (TREE_CODE (body) == COMPOUND_EXPR
1795 && TREE_CODE (TREE_OPERAND (body, 0)) == EXIT_EXPR)
1797 /* Optimize: H: if (TEST) GOTO L; BODY; GOTO H; L:
1798 to: GOTO L; BODY; L: if (!TEST) GOTO L; */
1799 struct jcf_block *head_label;
1800 struct jcf_block *body_label;
1801 struct jcf_block *end_label = gen_jcf_label (state);
1802 struct jcf_block *exit_label = state->labeled_blocks;
1803 head_label = gen_jcf_label (state);
1804 emit_goto (head_label, state);
1805 body_label = get_jcf_label_here (state);
1806 generate_bytecode_insns (TREE_OPERAND (body, 1), target, state);
1807 define_jcf_label (head_label, state);
1808 generate_bytecode_conditional (TREE_OPERAND (body, 0),
1809 end_label, body_label, 1, state);
1810 define_jcf_label (end_label, state);
1815 struct jcf_block *head_label = get_jcf_label_here (state);
1816 generate_bytecode_insns (body, IGNORE_TARGET, state);
1817 if (CAN_COMPLETE_NORMALLY (body))
1818 emit_goto (head_label, state);
1824 struct jcf_block *label = state->labeled_blocks;
1825 struct jcf_block *end_label = gen_jcf_label (state);
1826 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1827 label, end_label, 0, state);
1828 define_jcf_label (end_label, state);
1831 case EXIT_BLOCK_EXPR:
1833 struct jcf_block *label = state->labeled_blocks;
1834 if (TREE_OPERAND (exp, 1) != NULL) goto notimpl;
1835 while (label->u.labeled_block != TREE_OPERAND (exp, 0))
1836 label = label->next;
1837 call_cleanups (label, state);
1838 emit_goto (label, state);
1842 case PREDECREMENT_EXPR: value = -1; post_op = 0; goto increment;
1843 case PREINCREMENT_EXPR: value = 1; post_op = 0; goto increment;
1844 case POSTDECREMENT_EXPR: value = -1; post_op = 1; goto increment;
1845 case POSTINCREMENT_EXPR: value = 1; post_op = 1; goto increment;
1848 arg = TREE_OPERAND (exp, 1);
1849 exp = TREE_OPERAND (exp, 0);
1850 type = TREE_TYPE (exp);
1851 size = TYPE_IS_WIDE (type) ? 2 : 1;
1852 if ((TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1853 && ! TREE_STATIC (exp)
1854 && TREE_CODE (type) == INTEGER_TYPE
1855 && TYPE_PRECISION (type) == 32)
1857 if (target != IGNORE_TARGET && post_op)
1858 emit_load (exp, state);
1859 emit_iinc (exp, value, state);
1860 if (target != IGNORE_TARGET && ! post_op)
1861 emit_load (exp, state);
1864 if (TREE_CODE (exp) == COMPONENT_REF)
1866 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1867 emit_dup (1, 0, state);
1868 /* Stack: ..., objectref, objectref. */
1869 field_op (TREE_OPERAND (exp, 1), OPCODE_getfield, state);
1871 /* Stack: ..., objectref, oldvalue. */
1874 else if (TREE_CODE (exp) == ARRAY_REF)
1876 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1877 generate_bytecode_insns (TREE_OPERAND (exp, 1), STACK_TARGET, state);
1878 emit_dup (2, 0, state);
1879 /* Stack: ..., array, index, array, index. */
1880 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (exp), 7);
1884 /* Stack: ..., array, index, oldvalue. */
1887 else if (TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1889 generate_bytecode_insns (exp, STACK_TARGET, state);
1890 /* Stack: ..., oldvalue. */
1896 if (target != IGNORE_TARGET && post_op)
1897 emit_dup (size, offset, state);
1898 /* Stack, if ARRAY_REF: ..., [result, ] array, index, oldvalue. */
1899 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, oldvalue. */
1900 /* Stack, otherwise: ..., [result, ] oldvalue. */
1901 generate_bytecode_insns (arg, STACK_TARGET, state);
1902 emit_binop ((value >= 0 ? OPCODE_iadd : OPCODE_isub)
1903 + adjust_typed_op (type, 3),
1905 if (target != IGNORE_TARGET && ! post_op)
1906 emit_dup (size, offset, state);
1907 /* Stack, if ARRAY_REF: ..., [result, ] array, index, newvalue. */
1908 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, newvalue. */
1909 /* Stack, otherwise: ..., [result, ] newvalue. */
1910 goto finish_assignment;
1914 tree lhs = TREE_OPERAND (exp, 0);
1915 tree rhs = TREE_OPERAND (exp, 1);
1918 /* See if we can use the iinc instruction. */
1919 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1920 && ! TREE_STATIC (lhs)
1921 && TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
1922 && TYPE_PRECISION (TREE_TYPE (lhs)) == 32
1923 && (TREE_CODE (rhs) == PLUS_EXPR || TREE_CODE (rhs) == MINUS_EXPR))
1925 tree arg0 = TREE_OPERAND (rhs, 0);
1926 tree arg1 = TREE_OPERAND (rhs, 1);
1927 HOST_WIDE_INT min_value = -32768;
1928 HOST_WIDE_INT max_value = 32767;
1929 if (TREE_CODE (rhs) == MINUS_EXPR)
1934 else if (arg1 == lhs)
1937 arg1 = TREE_OPERAND (rhs, 0);
1939 if (lhs == arg0 && TREE_CODE (arg1) == INTEGER_CST)
1941 HOST_WIDE_INT hi_value = TREE_INT_CST_HIGH (arg1);
1942 value = TREE_INT_CST_LOW (arg1);
1943 if ((hi_value == 0 && value <= max_value)
1944 || (hi_value == -1 && value >= min_value))
1946 if (TREE_CODE (rhs) == MINUS_EXPR)
1948 emit_iinc (lhs, value, state);
1949 if (target != IGNORE_TARGET)
1950 emit_load (lhs, state);
1956 if (TREE_CODE (lhs) == COMPONENT_REF)
1958 generate_bytecode_insns (TREE_OPERAND (lhs, 0),
1959 STACK_TARGET, state);
1962 else if (TREE_CODE (lhs) == ARRAY_REF)
1964 generate_bytecode_insns (TREE_OPERAND(lhs, 0),
1965 STACK_TARGET, state);
1966 generate_bytecode_insns (TREE_OPERAND(lhs, 1),
1967 STACK_TARGET, state);
1973 /* If the rhs is a binary expression and the left operand is
1974 `==' to the lhs then we have an OP= expression. In this
1975 case we must do some special processing. */
1976 if (TREE_CODE_CLASS (TREE_CODE (rhs)) == '2'
1977 && lhs == TREE_OPERAND (rhs, 0))
1979 if (TREE_CODE (lhs) == COMPONENT_REF)
1981 tree field = TREE_OPERAND (lhs, 1);
1982 if (! FIELD_STATIC (field))
1984 /* Duplicate the object reference so we can get
1986 emit_dup (TYPE_IS_WIDE (field) ? 2 : 1, 0, state);
1989 field_op (field, (FIELD_STATIC (field)
1994 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
1996 else if (TREE_CODE (lhs) == VAR_DECL
1997 || TREE_CODE (lhs) == PARM_DECL)
1999 if (FIELD_STATIC (lhs))
2001 field_op (lhs, OPCODE_getstatic, state);
2002 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs)) ? 2 : 1);
2005 emit_load (lhs, state);
2007 else if (TREE_CODE (lhs) == ARRAY_REF)
2009 /* Duplicate the array and index, which are on the
2010 stack, so that we can load the old value. */
2011 emit_dup (2, 0, state);
2013 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (lhs), 7);
2016 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs)) ? 2 : 1);
2021 /* This function correctly handles the case where the LHS
2022 of a binary expression is NULL_TREE. */
2023 rhs = build (TREE_CODE (rhs), TREE_TYPE (rhs),
2024 NULL_TREE, TREE_OPERAND (rhs, 1));
2027 generate_bytecode_insns (rhs, STACK_TARGET, state);
2028 if (target != IGNORE_TARGET)
2029 emit_dup (TYPE_IS_WIDE (type) ? 2 : 1 , offset, state);
2035 if (TREE_CODE (exp) == COMPONENT_REF)
2037 tree field = TREE_OPERAND (exp, 1);
2038 if (! FIELD_STATIC (field))
2041 FIELD_STATIC (field) ? OPCODE_putstatic : OPCODE_putfield,
2044 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
2046 else if (TREE_CODE (exp) == VAR_DECL
2047 || TREE_CODE (exp) == PARM_DECL)
2049 if (FIELD_STATIC (exp))
2051 field_op (exp, OPCODE_putstatic, state);
2052 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
2055 emit_store (exp, state);
2057 else if (TREE_CODE (exp) == ARRAY_REF)
2059 jopcode = OPCODE_iastore + adjust_typed_op (TREE_TYPE (exp), 7);
2062 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 4 : 3);
2068 jopcode = OPCODE_iadd;
2071 jopcode = OPCODE_isub;
2074 jopcode = OPCODE_imul;
2076 case TRUNC_DIV_EXPR:
2078 jopcode = OPCODE_idiv;
2080 case TRUNC_MOD_EXPR:
2081 jopcode = OPCODE_irem;
2083 case LSHIFT_EXPR: jopcode = OPCODE_ishl; goto binop;
2084 case RSHIFT_EXPR: jopcode = OPCODE_ishr; goto binop;
2085 case URSHIFT_EXPR: jopcode = OPCODE_iushr; goto binop;
2086 case TRUTH_AND_EXPR:
2087 case BIT_AND_EXPR: jopcode = OPCODE_iand; goto binop;
2089 case BIT_IOR_EXPR: jopcode = OPCODE_ior; goto binop;
2090 case TRUTH_XOR_EXPR:
2091 case BIT_XOR_EXPR: jopcode = OPCODE_ixor; goto binop;
2094 tree arg0 = TREE_OPERAND (exp, 0);
2095 tree arg1 = TREE_OPERAND (exp, 1);
2096 jopcode += adjust_typed_op (type, 3);
2097 if (arg0 == arg1 && TREE_CODE (arg0) == SAVE_EXPR)
2099 /* fold may (e.g) convert 2*x to x+x. */
2100 generate_bytecode_insns (TREE_OPERAND (arg0, 0), target, state);
2101 emit_dup (TYPE_PRECISION (TREE_TYPE (arg0)) > 32 ? 2 : 1, 0, state);
2105 /* ARG0 will be NULL_TREE if we're handling an `OP='
2106 expression. In this case the stack already holds the
2107 LHS. See the MODIFY_EXPR case. */
2108 if (arg0 != NULL_TREE)
2109 generate_bytecode_insns (arg0, target, state);
2110 if (jopcode >= OPCODE_lshl && jopcode <= OPCODE_lushr)
2111 arg1 = convert (int_type_node, arg1);
2112 generate_bytecode_insns (arg1, target, state);
2114 /* For most binary operations, both operands and the result have the
2115 same type. Shift operations are different. Using arg1's type
2116 gets us the correct SP adjustment in all cases. */
2117 if (target == STACK_TARGET)
2118 emit_binop (jopcode, TREE_TYPE (arg1), state);
2121 case TRUTH_NOT_EXPR:
2123 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2124 if (target == STACK_TARGET)
2126 int is_long = TYPE_PRECISION (TREE_TYPE (exp)) > 32;
2127 push_int_const (TREE_CODE (exp) == BIT_NOT_EXPR ? -1 : 1, state);
2131 NOTE_PUSH (1 + is_long);
2132 OP1 (OPCODE_ixor + is_long);
2133 NOTE_POP (1 + is_long);
2137 jopcode = OPCODE_ineg;
2138 jopcode += adjust_typed_op (type, 3);
2139 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2140 if (target == STACK_TARGET)
2141 emit_unop (jopcode, type, state);
2143 case INSTANCEOF_EXPR:
2145 int index = find_class_constant (&state->cpool, TREE_OPERAND (exp, 1));
2146 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2148 OP1 (OPCODE_instanceof);
2153 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2158 case FIX_TRUNC_EXPR:
2160 tree src = TREE_OPERAND (exp, 0);
2161 tree src_type = TREE_TYPE (src);
2162 tree dst_type = TREE_TYPE (exp);
2163 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2164 if (target == IGNORE_TARGET || src_type == dst_type)
2166 if (TREE_CODE (dst_type) == POINTER_TYPE)
2168 if (TREE_CODE (exp) == CONVERT_EXPR)
2170 int index = find_class_constant (&state->cpool,
2171 TREE_TYPE (dst_type));
2173 OP1 (OPCODE_checkcast);
2177 else /* Convert numeric types. */
2179 int wide_src = TYPE_PRECISION (src_type) > 32;
2180 int wide_dst = TYPE_PRECISION (dst_type) > 32;
2181 NOTE_POP (1 + wide_src);
2183 if (TREE_CODE (dst_type) == REAL_TYPE)
2185 if (TREE_CODE (src_type) == REAL_TYPE)
2186 OP1 (wide_dst ? OPCODE_f2d : OPCODE_d2f);
2187 else if (TYPE_PRECISION (src_type) == 64)
2188 OP1 (OPCODE_l2f + wide_dst);
2190 OP1 (OPCODE_i2f + wide_dst);
2192 else /* Convert to integral type. */
2194 if (TREE_CODE (src_type) == REAL_TYPE)
2195 OP1 (OPCODE_f2i + wide_dst + 3 * wide_src);
2200 if (TYPE_PRECISION (dst_type) < 32)
2203 /* Already converted to int, if needed. */
2204 if (TYPE_PRECISION (dst_type) <= 8)
2206 else if (TREE_UNSIGNED (dst_type))
2212 NOTE_PUSH (1 + wide_dst);
2219 tree try_clause = TREE_OPERAND (exp, 0);
2220 struct jcf_block *start_label = get_jcf_label_here (state);
2221 struct jcf_block *end_label; /* End of try clause. */
2222 struct jcf_block *finished_label = gen_jcf_label (state);
2223 tree clause = TREE_OPERAND (exp, 1);
2224 if (target != IGNORE_TARGET)
2226 generate_bytecode_insns (try_clause, IGNORE_TARGET, state);
2227 end_label = get_jcf_label_here (state);
2228 if (end_label == start_label)
2230 if (CAN_COMPLETE_NORMALLY (try_clause))
2231 emit_goto (finished_label, state);
2232 while (clause != NULL_TREE)
2234 tree catch_clause = TREE_OPERAND (clause, 0);
2235 tree exception_decl = BLOCK_EXPR_DECLS (catch_clause);
2236 struct jcf_handler *handler = alloc_handler (start_label,
2238 if (exception_decl == NULL_TREE)
2239 handler->type = NULL_TREE;
2241 handler->type = TREE_TYPE (TREE_TYPE (exception_decl));
2242 generate_bytecode_insns (catch_clause, IGNORE_TARGET, state);
2243 clause = TREE_CHAIN (clause);
2244 if (CAN_COMPLETE_NORMALLY (catch_clause) && clause != NULL_TREE)
2245 emit_goto (finished_label, state);
2247 define_jcf_label (finished_label, state);
2251 case TRY_FINALLY_EXPR:
2253 struct jcf_block *finished_label = NULL;
2254 struct jcf_block *finally_label, *start_label, *end_label;
2255 struct jcf_handler *handler;
2256 tree try_block = TREE_OPERAND (exp, 0);
2257 tree finally = TREE_OPERAND (exp, 1);
2258 tree return_link = NULL_TREE, exception_decl = NULL_TREE;
2260 tree exception_type;
2262 finally_label = gen_jcf_label (state);
2263 start_label = get_jcf_label_here (state);
2264 /* If the `finally' clause can complete normally, we emit it
2265 as a subroutine and let the other clauses call it via
2266 `jsr'. If it can't complete normally, then we simply emit
2267 `goto's directly to it. */
2268 if (CAN_COMPLETE_NORMALLY (finally))
2270 finally_label->pc = PENDING_CLEANUP_PC;
2271 finally_label->next = state->labeled_blocks;
2272 state->labeled_blocks = finally_label;
2273 state->num_finalizers++;
2276 generate_bytecode_insns (try_block, target, state);
2278 if (CAN_COMPLETE_NORMALLY (finally))
2280 if (state->labeled_blocks != finally_label)
2282 state->labeled_blocks = finally_label->next;
2284 end_label = get_jcf_label_here (state);
2286 if (end_label == start_label)
2288 state->num_finalizers--;
2289 define_jcf_label (finally_label, state);
2290 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2294 if (CAN_COMPLETE_NORMALLY (finally))
2296 return_link = build_decl (VAR_DECL, NULL_TREE,
2297 return_address_type_node);
2298 finished_label = gen_jcf_label (state);
2301 if (CAN_COMPLETE_NORMALLY (try_block))
2303 if (CAN_COMPLETE_NORMALLY (finally))
2305 emit_jsr (finally_label, state);
2306 emit_goto (finished_label, state);
2309 emit_goto (finally_label, state);
2312 /* Handle exceptions. */
2314 exception_type = build_pointer_type (throwable_type_node);
2315 if (CAN_COMPLETE_NORMALLY (finally))
2317 /* We're going to generate a subroutine, so we'll need to
2318 save and restore the exception around the `jsr'. */
2319 exception_decl = build_decl (VAR_DECL, NULL_TREE, exception_type);
2320 localvar_alloc (return_link, state);
2322 handler = alloc_handler (start_label, end_label, state);
2323 handler->type = NULL_TREE;
2324 if (CAN_COMPLETE_NORMALLY (finally))
2326 localvar_alloc (exception_decl, state);
2328 emit_store (exception_decl, state);
2329 emit_jsr (finally_label, state);
2330 emit_load (exception_decl, state);
2332 OP1 (OPCODE_athrow);
2337 /* We're not generating a subroutine. In this case we can
2338 simply have the exception handler pop the exception and
2339 then fall through to the `finally' block. */
2341 emit_pop (1, state);
2345 /* The finally block. If we're generating a subroutine, first
2346 save return PC into return_link. Otherwise, just generate
2347 the code for the `finally' block. */
2348 define_jcf_label (finally_label, state);
2349 if (CAN_COMPLETE_NORMALLY (finally))
2352 emit_store (return_link, state);
2355 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2356 if (CAN_COMPLETE_NORMALLY (finally))
2358 maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
2359 maybe_free_localvar (exception_decl, state, 1);
2360 maybe_free_localvar (return_link, state, 1);
2361 define_jcf_label (finished_label, state);
2366 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2368 OP1 (OPCODE_athrow);
2370 case NEW_ARRAY_INIT:
2372 tree values = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
2373 tree array_type = TREE_TYPE (TREE_TYPE (exp));
2374 tree element_type = TYPE_ARRAY_ELEMENT (array_type);
2375 HOST_WIDE_INT length = java_array_type_length (array_type);
2376 if (target == IGNORE_TARGET)
2378 for ( ; values != NULL_TREE; values = TREE_CHAIN (values))
2379 generate_bytecode_insns (TREE_VALUE (values), target, state);
2382 push_int_const (length, state);
2385 if (JPRIMITIVE_TYPE_P (element_type))
2387 int atype = encode_newarray_type (element_type);
2388 OP1 (OPCODE_newarray);
2393 int index = find_class_constant (&state->cpool,
2394 TREE_TYPE (element_type));
2395 OP1 (OPCODE_anewarray);
2399 jopcode = OPCODE_iastore + adjust_typed_op (element_type, 7);
2400 for ( ; values != NULL_TREE; values = TREE_CHAIN (values), offset++)
2402 int save_SP = state->code_SP;
2403 emit_dup (1, 0, state);
2404 push_int_const (offset, state);
2406 generate_bytecode_insns (TREE_VALUE (values), STACK_TARGET, state);
2409 state->code_SP = save_SP;
2413 case JAVA_EXC_OBJ_EXPR:
2414 NOTE_PUSH (1); /* Pushed by exception system. */
2416 case NEW_CLASS_EXPR:
2418 tree class = TREE_TYPE (TREE_TYPE (exp));
2419 int need_result = target != IGNORE_TARGET;
2420 int index = find_class_constant (&state->cpool, class);
2426 NOTE_PUSH (1 + need_result);
2428 /* ... fall though ... */
2431 tree f = TREE_OPERAND (exp, 0);
2432 tree x = TREE_OPERAND (exp, 1);
2433 int save_SP = state->code_SP;
2435 if (TREE_CODE (f) == ADDR_EXPR)
2436 f = TREE_OPERAND (f, 0);
2437 if (f == soft_newarray_node)
2439 int type_code = TREE_INT_CST_LOW (TREE_VALUE (x));
2440 generate_bytecode_insns (TREE_VALUE (TREE_CHAIN (x)),
2441 STACK_TARGET, state);
2443 OP1 (OPCODE_newarray);
2447 else if (f == soft_multianewarray_node)
2451 int index = find_class_constant (&state->cpool,
2452 TREE_TYPE (TREE_TYPE (exp)));
2453 x = TREE_CHAIN (x); /* Skip class argument. */
2454 ndims = TREE_INT_CST_LOW (TREE_VALUE (x));
2455 for (idim = ndims; --idim >= 0; )
2458 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2461 OP1 (OPCODE_multianewarray);
2466 else if (f == soft_anewarray_node)
2468 tree cl = TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (exp)));
2469 int index = find_class_constant (&state->cpool, TREE_TYPE (cl));
2470 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2472 OP1 (OPCODE_anewarray);
2476 else if (f == soft_monitorenter_node
2477 || f == soft_monitorexit_node
2480 if (f == soft_monitorenter_node)
2481 op = OPCODE_monitorenter;
2482 else if (f == soft_monitorexit_node)
2483 op = OPCODE_monitorexit;
2486 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2492 for ( ; x != NULL_TREE; x = TREE_CHAIN (x))
2494 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2496 nargs = state->code_SP - save_SP;
2497 state->code_SP = save_SP;
2498 if (f == soft_fmod_node)
2505 if (TREE_CODE (exp) == NEW_CLASS_EXPR)
2506 NOTE_POP (1); /* Pop implicit this. */
2507 if (TREE_CODE (f) == FUNCTION_DECL && DECL_CONTEXT (f) != NULL_TREE)
2509 tree context = DECL_CONTEXT (f);
2510 int index, interface = 0;
2512 if (METHOD_STATIC (f))
2513 OP1 (OPCODE_invokestatic);
2514 else if (DECL_CONSTRUCTOR_P (f) || CALL_USING_SUPER (exp)
2515 || METHOD_PRIVATE (f))
2516 OP1 (OPCODE_invokespecial);
2519 if (CLASS_INTERFACE (TYPE_NAME (context)))
2521 tree arg1 = TREE_VALUE (TREE_OPERAND (exp, 1));
2522 context = TREE_TYPE (TREE_TYPE (arg1));
2523 if (CLASS_INTERFACE (TYPE_NAME (context)))
2527 OP1 (OPCODE_invokeinterface);
2529 OP1 (OPCODE_invokevirtual);
2531 index = find_methodref_with_class_index (&state->cpool, f, context);
2541 f = TREE_TYPE (TREE_TYPE (f));
2542 if (TREE_CODE (f) != VOID_TYPE)
2544 int size = TYPE_IS_WIDE (f) ? 2 : 1;
2545 if (target == IGNORE_TARGET)
2546 emit_pop (size, state);
2556 error("internal error in generate_bytecode_insn - tree code not implemented: %s",
2557 tree_code_name [(int) TREE_CODE (exp)]);
2562 perform_relocations (struct jcf_partial *state)
2564 struct jcf_block *block;
2565 struct jcf_relocation *reloc;
2569 /* Before we start, the pc field of each block is an upper bound on
2570 the block's start pc (it may be less, if previous blocks need less
2571 than their maximum).
2573 The minimum size of each block is in the block's chunk->size. */
2575 /* First, figure out the actual locations of each block. */
2578 for (block = state->blocks; block != NULL; block = block->next)
2580 int block_size = block->v.chunk->size;
2584 /* Optimize GOTO L; L: by getting rid of the redundant goto.
2585 Assumes relocations are in reverse order. */
2586 reloc = block->u.relocations;
2587 while (reloc != NULL
2588 && reloc->kind == OPCODE_goto_w
2589 && reloc->label->pc == block->next->pc
2590 && reloc->offset + 2 == block_size)
2592 reloc = reloc->next;
2593 block->u.relocations = reloc;
2594 block->v.chunk->size -= 3;
2599 /* Optimize GOTO L; ... L: GOTO X by changing the first goto to
2600 jump directly to X. We're careful here to avoid an infinite
2601 loop if the `goto's themselves form one. We do this
2602 optimization because we can generate a goto-to-goto for some
2603 try/finally blocks. */
2604 while (reloc != NULL
2605 && reloc->kind == OPCODE_goto_w
2606 && reloc->label != block
2607 && reloc->label->v.chunk->data != NULL
2608 && reloc->label->v.chunk->data[0] == OPCODE_goto)
2610 /* Find the reloc for the first instruction of the
2611 destination block. */
2612 struct jcf_relocation *first_reloc;
2613 for (first_reloc = reloc->label->u.relocations;
2615 first_reloc = first_reloc->next)
2617 if (first_reloc->offset == 1
2618 && first_reloc->kind == OPCODE_goto_w)
2620 reloc->label = first_reloc->label;
2625 /* If we didn't do anything, exit the loop. */
2626 if (first_reloc == NULL)
2630 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
2632 if (reloc->kind == SWITCH_ALIGN_RELOC)
2634 /* We assume this is the first relocation in this block,
2635 so we know its final pc. */
2636 int where = pc + reloc->offset;
2637 int pad = ((where + 3) & ~3) - where;
2640 else if (reloc->kind < -1 || reloc->kind > BLOCK_START_RELOC)
2642 int delta = reloc->label->pc - (pc + reloc->offset - 1);
2643 int expand = reloc->kind > 0 ? 2 : 5;
2647 if (delta >= -32768 && delta <= 32767)
2653 block_size += expand;
2659 for (block = state->blocks; block != NULL; block = block->next)
2661 struct chunk *chunk = block->v.chunk;
2662 int old_size = chunk->size;
2663 int next_pc = block->next == NULL ? pc : block->next->pc;
2664 int new_size = next_pc - block->pc;
2665 unsigned char *new_ptr;
2666 unsigned char *old_buffer = chunk->data;
2667 unsigned char *old_ptr = old_buffer + old_size;
2668 if (new_size != old_size)
2670 chunk->data = obstack_alloc (state->chunk_obstack, new_size);
2671 chunk->size = new_size;
2673 new_ptr = chunk->data + new_size;
2675 /* We do the relocations from back to front, because
2676 the relocations are in reverse order. */
2677 for (reloc = block->u.relocations; ; reloc = reloc->next)
2679 /* new_ptr and old_ptr point into the old and new buffers,
2680 respectively. (If no relocations cause the buffer to
2681 grow, the buffer will be the same buffer, and new_ptr==old_ptr.)
2682 The bytes at higher address have been copied and relocations
2683 handled; those at lower addresses remain to process. */
2685 /* Lower old index of piece to be copied with no relocation.
2686 I.e. high index of the first piece that does need relocation. */
2687 int start = reloc == NULL ? 0
2688 : reloc->kind == SWITCH_ALIGN_RELOC ? reloc->offset
2689 : (reloc->kind == 0 || reloc->kind == BLOCK_START_RELOC)
2691 : reloc->offset + 2;
2694 int n = (old_ptr - old_buffer) - start;
2698 memcpy (new_ptr, old_ptr, n);
2699 if (old_ptr == old_buffer)
2702 new_offset = new_ptr - chunk->data;
2703 new_offset -= (reloc->kind == -1 ? 2 : 4);
2704 if (reloc->kind == 0)
2707 value = GET_u4 (old_ptr);
2709 else if (reloc->kind == BLOCK_START_RELOC)
2715 else if (reloc->kind == SWITCH_ALIGN_RELOC)
2717 int where = block->pc + reloc->offset;
2718 int pad = ((where + 3) & ~3) - where;
2726 value = GET_u2 (old_ptr);
2728 value += reloc->label->pc - (block->pc + new_offset);
2729 *--new_ptr = (unsigned char) value; value >>= 8;
2730 *--new_ptr = (unsigned char) value; value >>= 8;
2731 if (reloc->kind != -1)
2733 *--new_ptr = (unsigned char) value; value >>= 8;
2734 *--new_ptr = (unsigned char) value;
2736 if (reloc->kind > BLOCK_START_RELOC)
2738 /* Convert: OP TARGET to: OP_w TARGET; (OP is goto or jsr). */
2740 *--new_ptr = reloc->kind;
2742 else if (reloc->kind < -1)
2744 /* Convert: ifCOND TARGET to: ifNCOND T; goto_w TARGET; T: */
2746 *--new_ptr = OPCODE_goto_w;
2749 *--new_ptr = - reloc->kind;
2752 if (new_ptr != chunk->data)
2755 state->code_length = pc;
2759 init_jcf_state (struct jcf_partial *state, struct obstack *work)
2761 state->chunk_obstack = work;
2762 state->first = state->chunk = NULL;
2763 CPOOL_INIT (&state->cpool);
2764 BUFFER_INIT (&state->localvars);
2765 BUFFER_INIT (&state->bytecode);
2769 init_jcf_method (struct jcf_partial *state, tree method)
2771 state->current_method = method;
2772 state->blocks = state->last_block = NULL;
2773 state->linenumber_count = 0;
2774 state->first_lvar = state->last_lvar = NULL;
2775 state->lvar_count = 0;
2776 state->labeled_blocks = NULL;
2777 state->code_length = 0;
2778 BUFFER_RESET (&state->bytecode);
2779 BUFFER_RESET (&state->localvars);
2781 state->code_SP_max = 0;
2782 state->handlers = NULL;
2783 state->last_handler = NULL;
2784 state->num_handlers = 0;
2785 state->num_finalizers = 0;
2786 state->return_value_decl = NULL_TREE;
2790 release_jcf_state (struct jcf_partial *state)
2792 CPOOL_FINISH (&state->cpool);
2793 obstack_free (state->chunk_obstack, state->first);
2796 /* Generate and return a list of chunks containing the class CLAS
2797 in the .class file representation. The list can be written to a
2798 .class file using write_chunks. Allocate chunks from obstack WORK. */
2800 static GTY(()) tree SourceFile_node;
2801 static struct chunk *
2802 generate_classfile (tree clas, struct jcf_partial *state)
2804 struct chunk *cpool_chunk;
2805 const char *source_file, *s;
2808 char *fields_count_ptr;
2809 int fields_count = 0;
2810 char *methods_count_ptr;
2811 int methods_count = 0;
2814 = clas == object_type_node ? 0
2815 : TREE_VEC_LENGTH (TYPE_BINFO_BASETYPES (clas));
2817 ptr = append_chunk (NULL, 8, state);
2818 PUT4 (0xCafeBabe); /* Magic number */
2819 PUT2 (3); /* Minor version */
2820 PUT2 (45); /* Major version */
2822 append_chunk (NULL, 0, state);
2823 cpool_chunk = state->chunk;
2825 /* Next allocate the chunk containing acces_flags through fields_count. */
2826 if (clas == object_type_node)
2829 i = 8 + 2 * total_supers;
2830 ptr = append_chunk (NULL, i, state);
2831 i = get_access_flags (TYPE_NAME (clas));
2832 if (! (i & ACC_INTERFACE))
2834 PUT2 (i); /* acces_flags */
2835 i = find_class_constant (&state->cpool, clas); PUT2 (i); /* this_class */
2836 if (clas == object_type_node)
2838 PUT2(0); /* super_class */
2839 PUT2(0); /* interfaces_count */
2843 tree basetypes = TYPE_BINFO_BASETYPES (clas);
2844 tree base = BINFO_TYPE (TREE_VEC_ELT (basetypes, 0));
2845 int j = find_class_constant (&state->cpool, base);
2846 PUT2 (j); /* super_class */
2847 PUT2 (total_supers - 1); /* interfaces_count */
2848 for (i = 1; i < total_supers; i++)
2850 base = BINFO_TYPE (TREE_VEC_ELT (basetypes, i));
2851 j = find_class_constant (&state->cpool, base);
2855 fields_count_ptr = ptr;
2857 for (part = TYPE_FIELDS (clas); part; part = TREE_CHAIN (part))
2859 int have_value, attr_count = 0;
2860 if (DECL_NAME (part) == NULL_TREE || DECL_ARTIFICIAL (part))
2862 ptr = append_chunk (NULL, 8, state);
2863 i = get_access_flags (part); PUT2 (i);
2864 i = find_utf8_constant (&state->cpool, DECL_NAME (part)); PUT2 (i);
2865 i = find_utf8_constant (&state->cpool,
2866 build_java_signature (TREE_TYPE (part)));
2868 have_value = DECL_INITIAL (part) != NULL_TREE
2869 && FIELD_STATIC (part) && CONSTANT_VALUE_P (DECL_INITIAL (part))
2870 && FIELD_FINAL (part)
2871 && (JPRIMITIVE_TYPE_P (TREE_TYPE (part))
2872 || TREE_TYPE (part) == string_ptr_type_node);
2876 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part)
2877 || FIELD_SYNTHETIC (part))
2879 if (FIELD_DEPRECATED (part))
2882 PUT2 (attr_count); /* attributes_count */
2885 tree init = DECL_INITIAL (part);
2886 static tree ConstantValue_node = NULL_TREE;
2887 if (TREE_TYPE (part) != TREE_TYPE (init))
2888 fatal_error ("field initializer type mismatch");
2889 ptr = append_chunk (NULL, 8, state);
2890 if (ConstantValue_node == NULL_TREE)
2891 ConstantValue_node = get_identifier ("ConstantValue");
2892 i = find_utf8_constant (&state->cpool, ConstantValue_node);
2893 PUT2 (i); /* attribute_name_index */
2894 PUT4 (2); /* attribute_length */
2895 i = find_constant_index (init, state); PUT2 (i);
2897 /* Emit the "Synthetic" attribute for val$<x> and this$<n>
2898 fields and other fields which need it. */
2899 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part)
2900 || FIELD_SYNTHETIC (part))
2901 ptr = append_synthetic_attribute (state);
2902 if (FIELD_DEPRECATED (part))
2903 append_deprecated_attribute (state);
2906 ptr = fields_count_ptr; UNSAFE_PUT2 (fields_count);
2908 ptr = methods_count_ptr = append_chunk (NULL, 2, state);
2911 for (part = TYPE_METHODS (clas); part; part = TREE_CHAIN (part))
2913 struct jcf_block *block;
2914 tree function_body = DECL_FUNCTION_BODY (part);
2915 tree body = function_body == NULL_TREE ? NULL_TREE
2916 : BLOCK_EXPR_BODY (function_body);
2917 tree name = DECL_CONSTRUCTOR_P (part) ? init_identifier_node
2919 tree type = TREE_TYPE (part);
2920 tree save_function = current_function_decl;
2921 int synthetic_p = 0;
2923 /* Invisible Miranda methods shouldn't end up in the .class
2925 if (METHOD_INVISIBLE (part))
2928 current_function_decl = part;
2929 ptr = append_chunk (NULL, 8, state);
2930 i = get_access_flags (part); PUT2 (i);
2931 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
2932 i = find_utf8_constant (&state->cpool, build_java_signature (type));
2934 i = (body != NULL_TREE) + (DECL_FUNCTION_THROWS (part) != NULL_TREE);
2936 /* Make room for the Synthetic attribute (of zero length.) */
2937 if (DECL_FINIT_P (part)
2938 || DECL_INSTINIT_P (part)
2939 || OUTER_FIELD_ACCESS_IDENTIFIER_P (DECL_NAME (part))
2940 || TYPE_DOT_CLASS (clas) == part)
2945 /* Make room for Deprecated attribute. */
2946 if (METHOD_DEPRECATED (part))
2949 PUT2 (i); /* attributes_count */
2952 ptr = append_synthetic_attribute (state);
2954 if (body != NULL_TREE)
2956 int code_attributes_count = 0;
2957 static tree Code_node = NULL_TREE;
2960 struct jcf_handler *handler;
2961 if (Code_node == NULL_TREE)
2962 Code_node = get_identifier ("Code");
2963 ptr = append_chunk (NULL, 14, state);
2964 i = find_utf8_constant (&state->cpool, Code_node); PUT2 (i);
2966 init_jcf_method (state, part);
2967 get_jcf_label_here (state); /* Force a first block. */
2968 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
2969 localvar_alloc (t, state);
2970 state->num_jsrs = 0;
2971 generate_bytecode_insns (body, IGNORE_TARGET, state);
2972 if (CAN_COMPLETE_NORMALLY (body))
2974 if (TREE_CODE (TREE_TYPE (type)) != VOID_TYPE)
2977 OP1 (OPCODE_return);
2979 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
2980 maybe_free_localvar (t, state, 1);
2981 if (state->return_value_decl != NULL_TREE)
2982 maybe_free_localvar (state->return_value_decl, state, 1);
2983 finish_jcf_block (state);
2984 perform_relocations (state);
2987 i = 8 + state->code_length + 4 + 8 * state->num_handlers;
2988 if (state->linenumber_count > 0)
2990 code_attributes_count++;
2991 i += 8 + 4 * state->linenumber_count;
2993 if (state->lvar_count > 0)
2995 code_attributes_count++;
2996 i += 8 + 10 * state->lvar_count;
2998 UNSAFE_PUT4 (i); /* attribute_length */
2999 UNSAFE_PUT2 (state->code_SP_max); /* max_stack */
3000 UNSAFE_PUT2 (localvar_max); /* max_locals */
3001 UNSAFE_PUT4 (state->code_length);
3003 /* Emit the exception table. */
3004 ptr = append_chunk (NULL, 2 + 8 * state->num_handlers, state);
3005 PUT2 (state->num_handlers); /* exception_table_length */
3006 handler = state->handlers;
3007 for (; handler != NULL; handler = handler->next)
3010 PUT2 (handler->start_label->pc);
3011 PUT2 (handler->end_label->pc);
3012 PUT2 (handler->handler_label->pc);
3013 if (handler->type == NULL_TREE)
3016 type_index = find_class_constant (&state->cpool,
3021 ptr = append_chunk (NULL, 2, state);
3022 PUT2 (code_attributes_count);
3024 /* Write the LineNumberTable attribute. */
3025 if (state->linenumber_count > 0)
3027 static tree LineNumberTable_node = NULL_TREE;
3028 ptr = append_chunk (NULL,
3029 8 + 4 * state->linenumber_count, state);
3030 if (LineNumberTable_node == NULL_TREE)
3031 LineNumberTable_node = get_identifier ("LineNumberTable");
3032 i = find_utf8_constant (&state->cpool, LineNumberTable_node);
3033 PUT2 (i); /* attribute_name_index */
3034 i = 2+4*state->linenumber_count; PUT4(i); /* attribute_length */
3035 i = state->linenumber_count; PUT2 (i);
3036 for (block = state->blocks; block != NULL; block = block->next)
3038 int line = block->linenumber;
3047 /* Write the LocalVariableTable attribute. */
3048 if (state->lvar_count > 0)
3050 static tree LocalVariableTable_node = NULL_TREE;
3051 struct localvar_info *lvar = state->first_lvar;
3052 ptr = append_chunk (NULL, 8 + 10 * state->lvar_count, state);
3053 if (LocalVariableTable_node == NULL_TREE)
3054 LocalVariableTable_node = get_identifier("LocalVariableTable");
3055 i = find_utf8_constant (&state->cpool, LocalVariableTable_node);
3056 PUT2 (i); /* attribute_name_index */
3057 i = 2 + 10 * state->lvar_count; PUT4 (i); /* attribute_length */
3058 i = state->lvar_count; PUT2 (i);
3059 for ( ; lvar != NULL; lvar = lvar->next)
3061 tree name = DECL_NAME (lvar->decl);
3062 tree sig = build_java_signature (TREE_TYPE (lvar->decl));
3063 i = lvar->start_label->pc; PUT2 (i);
3064 i = lvar->end_label->pc - i; PUT2 (i);
3065 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
3066 i = find_utf8_constant (&state->cpool, sig); PUT2 (i);
3067 i = DECL_LOCAL_INDEX (lvar->decl); PUT2 (i);
3071 if (DECL_FUNCTION_THROWS (part) != NULL_TREE)
3073 tree t = DECL_FUNCTION_THROWS (part);
3074 int throws_count = list_length (t);
3075 static tree Exceptions_node = NULL_TREE;
3076 if (Exceptions_node == NULL_TREE)
3077 Exceptions_node = get_identifier ("Exceptions");
3078 ptr = append_chunk (NULL, 8 + 2 * throws_count, state);
3079 i = find_utf8_constant (&state->cpool, Exceptions_node);
3080 PUT2 (i); /* attribute_name_index */
3081 i = 2 + 2 * throws_count; PUT4(i); /* attribute_length */
3082 i = throws_count; PUT2 (i);
3083 for (; t != NULL_TREE; t = TREE_CHAIN (t))
3085 i = find_class_constant (&state->cpool, TREE_VALUE (t));
3090 if (METHOD_DEPRECATED (part))
3091 append_deprecated_attribute (state);
3094 current_function_decl = save_function;
3096 ptr = methods_count_ptr; UNSAFE_PUT2 (methods_count);
3098 source_file = DECL_SOURCE_FILE (TYPE_NAME (clas));
3099 for (s = source_file; ; s++)
3104 if (ch == '/' || ch == '\\')
3107 ptr = append_chunk (NULL, 10, state);
3109 i = 1; /* Source file always exists as an attribute */
3110 if (INNER_CLASS_TYPE_P (clas) || DECL_INNER_CLASS_LIST (TYPE_NAME (clas)))
3112 if (clas == object_type_node)
3114 if (CLASS_DEPRECATED (TYPE_NAME (clas)))
3117 PUT2 (i); /* attributes_count */
3119 /* generate the SourceFile attribute. */
3120 if (SourceFile_node == NULL_TREE)
3122 SourceFile_node = get_identifier ("SourceFile");
3125 i = find_utf8_constant (&state->cpool, SourceFile_node);
3126 PUT2 (i); /* attribute_name_index */
3128 i = find_utf8_constant (&state->cpool, get_identifier (source_file));
3130 append_gcj_attribute (state, clas);
3131 append_innerclasses_attribute (state, clas);
3132 if (CLASS_DEPRECATED (TYPE_NAME (clas)))
3133 append_deprecated_attribute (state);
3135 /* New finally generate the contents of the constant pool chunk. */
3136 i = count_constant_pool_bytes (&state->cpool);
3137 ptr = obstack_alloc (state->chunk_obstack, i);
3138 cpool_chunk->data = ptr;
3139 cpool_chunk->size = i;
3140 write_constant_pool (&state->cpool, ptr, i);
3141 return state->first;
3144 static GTY(()) tree Synthetic_node;
3145 static unsigned char *
3146 append_synthetic_attribute (struct jcf_partial *state)
3148 unsigned char *ptr = append_chunk (NULL, 6, state);
3151 if (Synthetic_node == NULL_TREE)
3153 Synthetic_node = get_identifier ("Synthetic");
3155 i = find_utf8_constant (&state->cpool, Synthetic_node);
3156 PUT2 (i); /* Attribute string index */
3157 PUT4 (0); /* Attribute length */
3163 append_deprecated_attribute (struct jcf_partial *state)
3165 unsigned char *ptr = append_chunk (NULL, 6, state);
3168 i = find_utf8_constant (&state->cpool, get_identifier ("Deprecated"));
3169 PUT2 (i); /* Attribute string index */
3170 PUT4 (0); /* Attribute length */
3174 append_gcj_attribute (struct jcf_partial *state, tree class)
3179 if (class != object_type_node)
3182 ptr = append_chunk (NULL, 6, state); /* 2+4 */
3183 i = find_utf8_constant (&state->cpool,
3184 get_identifier ("gnu.gcj.gcj-compiled"));
3185 PUT2 (i); /* Attribute string index */
3186 PUT4 (0); /* Attribute length */
3189 static tree InnerClasses_node;
3191 append_innerclasses_attribute (struct jcf_partial *state, tree class)
3193 tree orig_decl = TYPE_NAME (class);
3196 unsigned char *ptr, *length_marker, *number_marker;
3198 if (!INNER_CLASS_TYPE_P (class) && !DECL_INNER_CLASS_LIST (orig_decl))
3201 ptr = append_chunk (NULL, 8, state); /* 2+4+2 */
3203 if (InnerClasses_node == NULL_TREE)
3205 InnerClasses_node = get_identifier ("InnerClasses");
3207 i = find_utf8_constant (&state->cpool, InnerClasses_node);
3209 length_marker = ptr; PUT4 (0); /* length, to be later patched */
3210 number_marker = ptr; PUT2 (0); /* number of classes, tblp */
3212 /* Generate the entries: all inner classes visible from the one we
3213 process: itself, up and down. */
3214 while (class && INNER_CLASS_TYPE_P (class))
3218 decl = TYPE_NAME (class);
3219 n = IDENTIFIER_POINTER (DECL_NAME (decl)) +
3220 IDENTIFIER_LENGTH (DECL_NAME (decl));
3222 while (n[-1] != '$')
3224 append_innerclasses_attribute_entry (state, decl, get_identifier (n));
3227 class = TREE_TYPE (DECL_CONTEXT (TYPE_NAME (class)));
3231 for (current = DECL_INNER_CLASS_LIST (decl);
3232 current; current = TREE_CHAIN (current))
3234 append_innerclasses_attribute_entry (state, TREE_PURPOSE (current),
3235 TREE_VALUE (current));
3239 ptr = length_marker; PUT4 (8*length+2);
3240 ptr = number_marker; PUT2 (length);
3244 append_innerclasses_attribute_entry (struct jcf_partial *state,
3245 tree decl, tree name)
3248 int ocii = 0, ini = 0;
3249 unsigned char *ptr = append_chunk (NULL, 8, state);
3251 icii = find_class_constant (&state->cpool, TREE_TYPE (decl));
3253 /* Sun's implementation seems to generate ocii to 0 for inner
3254 classes (which aren't considered members of the class they're
3255 in.) The specs are saying that if the class is anonymous,
3256 inner_name_index must be zero. */
3257 if (!ANONYMOUS_CLASS_P (TREE_TYPE (decl)))
3259 ocii = find_class_constant (&state->cpool,
3260 TREE_TYPE (DECL_CONTEXT (decl)));
3261 ini = find_utf8_constant (&state->cpool, name);
3263 icaf = get_access_flags (decl);
3265 PUT2 (icii); PUT2 (ocii); PUT2 (ini); PUT2 (icaf);
3269 make_class_file_name (tree clas)
3271 const char *dname, *cname, *slash;
3276 cname = IDENTIFIER_POINTER (identifier_subst (DECL_NAME (TYPE_NAME (clas)),
3277 "", '.', DIR_SEPARATOR,
3279 if (jcf_write_base_directory == NULL)
3281 /* Make sure we put the class file into the .java file's
3282 directory, and not into some subdirectory thereof. */
3284 dname = DECL_SOURCE_FILE (TYPE_NAME (clas));
3285 slash = strrchr (dname, DIR_SEPARATOR);
3286 #ifdef DIR_SEPARATOR_2
3288 slash = strrchr (dname, DIR_SEPARATOR_2);
3294 sep = DIR_SEPARATOR;
3299 t = strrchr (cname, DIR_SEPARATOR);
3307 dname = jcf_write_base_directory;
3309 s = strrchr (dname, DIR_SEPARATOR);
3310 #ifdef DIR_SEPARATOR_2
3312 s = strrchr (dname, DIR_SEPARATOR_2);
3317 sep = DIR_SEPARATOR;
3319 slash = dname + strlen (dname);
3322 r = xmalloc (slash - dname + strlen (cname) + 2);
3323 strncpy (r, dname, slash - dname);
3324 r[slash - dname] = sep;
3325 strcpy (&r[slash - dname + 1], cname);
3327 /* We try to make new directories when we need them. We only do
3328 this for directories which "might not" exist. For instance, we
3329 assume the `-d' directory exists, but we don't assume that any
3330 subdirectory below it exists. It might be worthwhile to keep
3331 track of which directories we've created to avoid gratuitous
3333 dname = r + (slash - dname) + 1;
3336 char *s = strchr (dname, sep);
3340 if (stat (r, &sb) == -1
3341 /* Try to make it. */
3342 && mkdir (r, 0755) == -1)
3343 fatal_error ("can't create directory %s: %m", r);
3346 /* Skip consecutive separators. */
3347 for (dname = s + 1; *dname && *dname == sep; ++dname)
3354 /* Write out the contents of a class (RECORD_TYPE) CLAS, as a .class file.
3355 The output .class file name is make_class_file_name(CLAS). */
3358 write_classfile (tree clas)
3360 struct obstack *work = &temporary_obstack;
3361 struct jcf_partial state[1];
3362 char *class_file_name = make_class_file_name (clas);
3363 struct chunk *chunks;
3365 if (class_file_name != NULL)
3368 char *temporary_file_name;
3370 /* The .class file is initially written to a ".tmp" file so that
3371 if multiple instances of the compiler are running at once
3372 they do not see partially formed class files. */
3373 temporary_file_name = concat (class_file_name, ".tmp", NULL);
3374 stream = fopen (temporary_file_name, "wb");
3376 fatal_error ("can't open %s for writing: %m", temporary_file_name);
3378 jcf_dependency_add_target (class_file_name);
3379 init_jcf_state (state, work);
3380 chunks = generate_classfile (clas, state);
3381 write_chunks (stream, chunks);
3382 if (fclose (stream))
3383 fatal_error ("error closing %s: %m", temporary_file_name);
3385 /* If a file named by the string pointed to by `new' exists
3386 prior to the call to the `rename' function, the bahaviour
3387 is implementation-defined. ISO 9899-1990 7.9.4.2.
3389 For example, on Win32 with MSVCRT, it is an error. */
3391 unlink (class_file_name);
3393 if (rename (temporary_file_name, class_file_name) == -1)
3395 remove (temporary_file_name);
3396 fatal_error ("can't create %s: %m", class_file_name);
3398 free (temporary_file_name);
3399 free (class_file_name);
3401 release_jcf_state (state);
3405 string concatenation
3406 synchronized statement
3409 #include "gt-java-jcf-write.h"