1 /* Write out a Java(TM) class file.
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA.
21 Java and all Java-based marks are trademarks or registered trademarks
22 of Sun Microsystems, Inc. in the United States and other countries.
23 The Free Software Foundation is independent of Sun Microsystems, Inc. */
27 #include "coretypes.h"
32 #include "java-tree.h"
36 #include "java-opcodes.h"
37 #include "parse.h" /* for BLOCK_EXPR_BODY */
43 extern struct obstack temporary_obstack;
45 /* Base directory in which `.class' files should be written.
46 NULL means to put the file into the same directory as the
47 corresponding .java file. */
48 const char *jcf_write_base_directory = NULL;
50 /* Make sure bytecode.data is big enough for at least N more bytes. */
53 do { CHECK_OP(state); \
54 if (state->bytecode.ptr + (N) > state->bytecode.limit) \
55 buffer_grow (&state->bytecode, N); } while (0)
57 /* Add a 1-byte instruction/operand I to bytecode.data,
58 assuming space has already been RESERVE'd. */
60 #define OP1(I) (*state->bytecode.ptr++ = (I), CHECK_OP(state))
62 /* Like OP1, but I is a 2-byte big endian integer. */
65 do { int _i = (I); OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
67 /* Like OP1, but I is a 4-byte big endian integer. */
70 do { int _i = (I); OP1 (_i >> 24); OP1 (_i >> 16); \
71 OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
73 /* Macro to call each time we push I words on the JVM stack. */
75 #define NOTE_PUSH(I) \
76 do { state->code_SP += (I); \
77 if (state->code_SP > state->code_SP_max) \
78 state->code_SP_max = state->code_SP; } while (0)
80 /* Macro to call each time we pop I words from the JVM stack. */
83 do { state->code_SP -= (I); if (state->code_SP < 0) abort(); } while (0)
85 /* A chunk or segment of a .class file. */
89 /* The next segment of this .class file. */
92 /* The actual data in this segment to be written to the .class file. */
95 /* The size of the segment to be written to the .class file. */
99 #define PENDING_CLEANUP_PC (-3)
100 #define PENDING_EXIT_PC (-2)
101 #define UNDEFINED_PC (-1)
103 /* Each "block" represents a label plus the bytecode instructions following.
104 There may be branches out of the block, but no incoming jumps, except
105 to the beginning of the block.
107 If (pc < 0), the jcf_block is not an actual block (i.e. it has no
108 associated code yet), but it is an undefined label.
113 /* For blocks that that are defined, the next block (in pc order).
114 For blocks that are not-yet-defined the end label of a LABELED_BLOCK_EXPR
115 or a cleanup expression (from a TRY_FINALLY_EXPR),
116 this is the next (outer) such end label, in a stack headed by
117 labeled_blocks in jcf_partial. */
118 struct jcf_block *next;
120 /* In the not-yet-defined end label for an unfinished EXIT_BLOCK_EXPR.
121 pc is PENDING_EXIT_PC.
122 In the not-yet-defined end label for pending cleanup subroutine,
123 pc is PENDING_CLEANUP_PC.
124 For other not-yet-defined labels, pc is UNDEFINED_PC.
126 If the label has been defined:
127 Until perform_relocations is finished, this is the maximum possible
128 value of the bytecode offset at the beginning of this block.
129 After perform_relocations, it is the actual offset (pc). */
134 /* After finish_jcf_block is called, the actual instructions
135 contained in this block. Before that NULL, and the instructions
136 are in state->bytecode. */
140 /* If pc==PENDING_CLEANUP_PC, start_label is the start of the region
141 covered by the cleanup. */
142 struct jcf_block *start_label;
146 /* Set of relocations (in reverse offset order) for this block. */
147 struct jcf_relocation *relocations;
149 /* If this block is that of the not-yet-defined end label of
150 a LABELED_BLOCK_EXPR, where LABELED_BLOCK is that LABELED_BLOCK_EXPR.
151 If pc==PENDING_CLEANUP_PC, the cleanup that needs to be run. */
156 /* A "relocation" type for the 0-3 bytes of padding at the start
157 of a tableswitch or a lookupswitch. */
158 #define SWITCH_ALIGN_RELOC 4
160 /* A relocation type for the labels in a tableswitch or a lookupswitch;
161 these are relative to the start of the instruction, but (due to
162 th 0-3 bytes of padding), we don't know the offset before relocation. */
163 #define BLOCK_START_RELOC 1
165 struct jcf_relocation
167 /* Next relocation for the current jcf_block. */
168 struct jcf_relocation *next;
170 /* The (byte) offset within the current block that needs to be relocated. */
171 HOST_WIDE_INT offset;
173 /* 0 if offset is a 4-byte relative offset.
174 4 (SWITCH_ALIGN_RELOC) if offset points to 0-3 padding bytes inserted
175 for proper alignment in tableswitch/lookupswitch instructions.
176 1 (BLOCK_START_RELOC) if offset points to a 4-byte offset relative
177 to the start of the containing block.
178 -1 if offset is a 2-byte relative offset.
179 < -1 if offset is the address of an instruction with a 2-byte offset
180 that does not have a corresponding 4-byte offset version, in which
181 case the absolute value of kind is the inverted opcode.
182 > 4 if offset is the address of an instruction (such as jsr) with a
183 2-byte offset that does have a corresponding 4-byte offset version,
184 in which case kind is the opcode of the 4-byte version (such as jsr_w). */
187 /* The label the relocation wants to actually transfer to. */
188 struct jcf_block *label;
191 #define RELOCATION_VALUE_0 ((HOST_WIDE_INT)0)
192 #define RELOCATION_VALUE_1 ((HOST_WIDE_INT)1)
194 /* State for single catch clause. */
198 struct jcf_handler *next;
200 struct jcf_block *start_label;
201 struct jcf_block *end_label;
202 struct jcf_block *handler_label;
204 /* The sub-class of Throwable handled, or NULL_TREE (for finally). */
208 /* State for the current switch statement. */
210 struct jcf_switch_state
212 struct jcf_switch_state *prev;
213 struct jcf_block *default_label;
215 struct jcf_relocation *cases;
217 HOST_WIDE_INT min_case, max_case;
220 /* This structure is used to contain the various pieces that will
221 become a .class file. */
227 struct obstack *chunk_obstack;
230 /* List of basic blocks for the current method. */
231 struct jcf_block *blocks;
232 struct jcf_block *last_block;
234 struct localvar_info *first_lvar;
235 struct localvar_info *last_lvar;
240 int linenumber_count;
242 /* Until perform_relocations, this is a upper bound on the number
243 of bytes (so far) in the instructions for the current method. */
246 /* Stack of undefined ending labels for LABELED_BLOCK_EXPR. */
247 struct jcf_block *labeled_blocks;
249 /* The current stack size (stack pointer) in the current method. */
252 /* The largest extent of stack size (stack pointer) in the current method. */
255 /* Contains a mapping from local var slot number to localvar_info. */
256 struct buffer localvars;
258 /* The buffer allocated for bytecode for the current jcf_block. */
259 struct buffer bytecode;
261 /* Chain of exception handlers for the current method. */
262 struct jcf_handler *handlers;
264 /* Last element in handlers chain. */
265 struct jcf_handler *last_handler;
267 /* Number of exception handlers for the current method. */
270 /* Number of finalizers we are currently nested within. */
273 /* If non-NULL, use this for the return value. */
274 tree return_value_decl;
276 /* Information about the current switch statement. */
277 struct jcf_switch_state *sw_state;
279 /* The count of jsr instructions that have been emitted. */
283 static void generate_bytecode_insns (tree, int, struct jcf_partial *);
284 static struct chunk * alloc_chunk (struct chunk *, unsigned char *,
285 int, struct obstack *);
286 static unsigned char * append_chunk (unsigned char *, int,
287 struct jcf_partial *);
288 static void append_chunk_copy (unsigned char *, int, struct jcf_partial *);
289 static struct jcf_block * gen_jcf_label (struct jcf_partial *);
290 static void finish_jcf_block (struct jcf_partial *);
291 static void define_jcf_label (struct jcf_block *, struct jcf_partial *);
292 static struct jcf_block * get_jcf_label_here (struct jcf_partial *);
293 static void put_linenumber (int, struct jcf_partial *);
294 static void localvar_alloc (tree, struct jcf_partial *);
295 static void maybe_free_localvar (tree, struct jcf_partial *, int);
296 static int get_access_flags (tree);
297 static void write_chunks (FILE *, struct chunk *);
298 static int adjust_typed_op (tree, int);
299 static void generate_bytecode_conditional (tree, struct jcf_block *,
300 struct jcf_block *, int,
301 struct jcf_partial *);
302 static void generate_bytecode_return (tree, struct jcf_partial *);
303 static void perform_relocations (struct jcf_partial *);
304 static void init_jcf_state (struct jcf_partial *, struct obstack *);
305 static void init_jcf_method (struct jcf_partial *, tree);
306 static void release_jcf_state (struct jcf_partial *);
307 static struct chunk * generate_classfile (tree, struct jcf_partial *);
308 static struct jcf_handler *alloc_handler (struct jcf_block *,
310 struct jcf_partial *);
311 static void emit_iinc (tree, HOST_WIDE_INT, struct jcf_partial *);
312 static void emit_reloc (HOST_WIDE_INT, int, struct jcf_block *,
313 struct jcf_partial *);
314 static void push_constant1 (HOST_WIDE_INT, struct jcf_partial *);
315 static void push_constant2 (HOST_WIDE_INT, struct jcf_partial *);
316 static void push_int_const (HOST_WIDE_INT, struct jcf_partial *);
317 static int find_constant_wide (HOST_WIDE_INT, HOST_WIDE_INT,
318 struct jcf_partial *);
319 static void push_long_const (HOST_WIDE_INT, HOST_WIDE_INT,
320 struct jcf_partial *);
321 static int find_constant_index (tree, struct jcf_partial *);
322 static void push_long_const (HOST_WIDE_INT, HOST_WIDE_INT,
323 struct jcf_partial *);
324 static void field_op (tree, int, struct jcf_partial *);
325 static void maybe_wide (int, int, struct jcf_partial *);
326 static void emit_dup (int, int, struct jcf_partial *);
327 static void emit_pop (int, struct jcf_partial *);
328 static void emit_load_or_store (tree, int, struct jcf_partial *);
329 static void emit_load (tree, struct jcf_partial *);
330 static void emit_store (tree, struct jcf_partial *);
331 static void emit_unop (enum java_opcode, tree, struct jcf_partial *);
332 static void emit_binop (enum java_opcode, tree, struct jcf_partial *);
333 static void emit_reloc (HOST_WIDE_INT, int, struct jcf_block *,
334 struct jcf_partial *);
335 static void emit_switch_reloc (struct jcf_block *, struct jcf_partial *);
336 static void emit_case_reloc (struct jcf_relocation *, struct jcf_partial *);
337 static void emit_if (struct jcf_block *, int, int, struct jcf_partial *);
338 static void emit_goto (struct jcf_block *, struct jcf_partial *);
339 static void emit_jsr (struct jcf_block *, struct jcf_partial *);
340 static void call_cleanups (struct jcf_block *, struct jcf_partial *);
341 static char *make_class_file_name (tree);
342 static unsigned char *append_synthetic_attribute (struct jcf_partial *);
343 static void append_deprecated_attribute (struct jcf_partial *);
344 static void append_innerclasses_attribute (struct jcf_partial *, tree);
345 static void append_innerclasses_attribute_entry (struct jcf_partial *, tree, tree);
346 static void append_gcj_attribute (struct jcf_partial *, tree);
348 /* Utility macros for appending (big-endian) data to a buffer.
349 We assume a local variable 'ptr' points into where we want to
350 write next, and we assume enough space has been allocated. */
352 #ifdef ENABLE_JC1_CHECKING
353 static int CHECK_PUT (void *, struct jcf_partial *, int);
356 CHECK_PUT (void *ptr, struct jcf_partial *state, int i)
358 if ((unsigned char *) ptr < state->chunk->data
359 || (unsigned char *) ptr + i > state->chunk->data + state->chunk->size)
365 #define CHECK_PUT(PTR, STATE, I) ((void)0)
368 #define PUT1(X) (CHECK_PUT(ptr, state, 1), *ptr++ = (X))
369 #define PUT2(X) (PUT1((X) >> 8), PUT1((X) & 0xFF))
370 #define PUT4(X) (PUT2((X) >> 16), PUT2((X) & 0xFFFF))
371 #define PUTN(P, N) (CHECK_PUT(ptr, state, N), memcpy(ptr, P, N), ptr += (N))
373 /* There are some cases below where CHECK_PUT is guaranteed to fail.
374 Use the following macros in those specific cases. */
375 #define UNSAFE_PUT1(X) (*ptr++ = (X))
376 #define UNSAFE_PUT2(X) (UNSAFE_PUT1((X) >> 8), UNSAFE_PUT1((X) & 0xFF))
377 #define UNSAFE_PUT4(X) (UNSAFE_PUT2((X) >> 16), UNSAFE_PUT2((X) & 0xFFFF))
378 #define UNSAFE_PUTN(P, N) (memcpy(ptr, P, N), ptr += (N))
381 /* Allocate a new chunk on obstack WORK, and link it in after LAST.
382 Set the data and size fields to DATA and SIZE, respectively.
383 However, if DATA is NULL and SIZE>0, allocate a buffer as well. */
385 static struct chunk *
386 alloc_chunk (struct chunk *last, unsigned char *data,
387 int size, struct obstack *work)
389 struct chunk *chunk = obstack_alloc (work, sizeof(struct chunk));
391 if (data == NULL && size > 0)
392 data = obstack_alloc (work, size);
402 #ifdef ENABLE_JC1_CHECKING
403 static int CHECK_OP (struct jcf_partial *);
406 CHECK_OP (struct jcf_partial *state)
408 if (state->bytecode.ptr > state->bytecode.limit)
414 #define CHECK_OP(STATE) ((void) 0)
417 static unsigned char *
418 append_chunk (unsigned char *data, int size, struct jcf_partial *state)
420 state->chunk = alloc_chunk (state->chunk, data, size, state->chunk_obstack);
421 if (state->first == NULL)
422 state->first = state->chunk;
423 return state->chunk->data;
427 append_chunk_copy (unsigned char *data, int size, struct jcf_partial *state)
429 unsigned char *ptr = append_chunk (NULL, size, state);
430 memcpy (ptr, data, size);
433 static struct jcf_block *
434 gen_jcf_label (struct jcf_partial *state)
436 struct jcf_block *block
437 = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_block));
439 block->linenumber = -1;
440 block->pc = UNDEFINED_PC;
445 finish_jcf_block (struct jcf_partial *state)
447 struct jcf_block *block = state->last_block;
448 struct jcf_relocation *reloc;
449 int code_length = BUFFER_LENGTH (&state->bytecode);
450 int pc = state->code_length;
451 append_chunk_copy (state->bytecode.data, code_length, state);
452 BUFFER_RESET (&state->bytecode);
453 block->v.chunk = state->chunk;
455 /* Calculate code_length to the maximum value it can have. */
456 pc += block->v.chunk->size;
457 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
459 int kind = reloc->kind;
460 if (kind == SWITCH_ALIGN_RELOC)
462 else if (kind > BLOCK_START_RELOC)
463 pc += 2; /* 2-byte offset may grow to 4-byte offset */
465 pc += 5; /* May need to add a goto_w. */
467 state->code_length = pc;
471 define_jcf_label (struct jcf_block *label, struct jcf_partial *state)
473 if (state->last_block != NULL)
474 finish_jcf_block (state);
475 label->pc = state->code_length;
476 if (state->blocks == NULL)
477 state->blocks = label;
479 state->last_block->next = label;
480 state->last_block = label;
482 label->u.relocations = NULL;
485 static struct jcf_block *
486 get_jcf_label_here (struct jcf_partial *state)
488 if (state->last_block != NULL && BUFFER_LENGTH (&state->bytecode) == 0)
489 return state->last_block;
492 struct jcf_block *label = gen_jcf_label (state);
493 define_jcf_label (label, state);
498 /* Note a line number entry for the current PC and given LINE. */
501 put_linenumber (int line, struct jcf_partial *state)
503 struct jcf_block *label = get_jcf_label_here (state);
504 if (label->linenumber > 0)
506 label = gen_jcf_label (state);
507 define_jcf_label (label, state);
509 label->linenumber = line;
510 state->linenumber_count++;
513 /* Allocate a new jcf_handler, for a catch clause that catches exceptions
514 in the range (START_LABEL, END_LABEL). */
516 static struct jcf_handler *
517 alloc_handler (struct jcf_block *start_label, struct jcf_block *end_label,
518 struct jcf_partial *state)
520 struct jcf_handler *handler
521 = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_handler));
522 handler->start_label = start_label;
523 handler->end_label = end_label;
524 handler->handler_label = get_jcf_label_here (state);
525 if (state->handlers == NULL)
526 state->handlers = handler;
528 state->last_handler->next = handler;
529 state->last_handler = handler;
530 handler->next = NULL;
531 state->num_handlers++;
536 /* The index of jvm local variable allocated for this DECL.
537 This is assigned when generating .class files;
538 contrast DECL_LOCAL_SLOT_NUMBER which is set when *reading* a .class file.
539 (We don't allocate DECL_LANG_SPECIFIC for locals from Java source code.) */
541 #define DECL_LOCAL_INDEX(DECL) DECL_ALIGN(DECL)
545 struct localvar_info *next;
548 struct jcf_block *start_label;
549 struct jcf_block *end_label;
552 #define localvar_buffer ((struct localvar_info**) state->localvars.data)
553 #define localvar_max \
554 ((struct localvar_info**) state->localvars.ptr - localvar_buffer)
557 localvar_alloc (tree decl, struct jcf_partial *state)
559 struct jcf_block *start_label = get_jcf_label_here (state);
560 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
562 struct localvar_info *info;
563 struct localvar_info **ptr = localvar_buffer;
564 struct localvar_info **limit
565 = (struct localvar_info**) state->localvars.ptr;
566 for (index = 0; ptr < limit; index++, ptr++)
569 && (! wide || ((ptr+1) < limit && ptr[1] == NULL)))
574 buffer_grow (&state->localvars, 2 * sizeof (struct localvar_info*));
575 ptr = (struct localvar_info**) state->localvars.data + index;
576 state->localvars.ptr = (unsigned char *) (ptr + 1 + wide);
578 info = obstack_alloc (state->chunk_obstack, sizeof (struct localvar_info));
581 ptr[1] = (struct localvar_info *)(~0);
582 DECL_LOCAL_INDEX (decl) = index;
584 info->start_label = start_label;
586 if (debug_info_level > DINFO_LEVEL_TERSE
587 && DECL_NAME (decl) != NULL_TREE)
589 /* Generate debugging info. */
591 if (state->last_lvar != NULL)
592 state->last_lvar->next = info;
594 state->first_lvar = info;
595 state->last_lvar = info;
601 maybe_free_localvar (tree decl, struct jcf_partial *state, int really)
603 struct jcf_block *end_label = get_jcf_label_here (state);
604 int index = DECL_LOCAL_INDEX (decl);
605 struct localvar_info **ptr = &localvar_buffer [index];
606 struct localvar_info *info = *ptr;
607 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
609 info->end_label = end_label;
611 if (info->decl != decl)
618 if (ptr[1] != (struct localvar_info *)(~0))
625 #define STACK_TARGET 1
626 #define IGNORE_TARGET 2
628 /* Get the access flags of a class (TYPE_DECL), a method (FUNCTION_DECL), or
629 a field (FIELD_DECL or VAR_DECL, if static), as encoded in a .class file. */
632 get_access_flags (tree decl)
635 int isfield = TREE_CODE (decl) == FIELD_DECL || TREE_CODE (decl) == VAR_DECL;
637 if (isfield || TREE_CODE (decl) == FUNCTION_DECL)
639 if (TREE_PROTECTED (decl))
640 flags |= ACC_PROTECTED;
641 if (TREE_PRIVATE (decl))
642 flags |= ACC_PRIVATE;
644 else if (TREE_CODE (decl) == TYPE_DECL)
646 if (CLASS_PUBLIC (decl))
648 if (CLASS_FINAL (decl))
650 if (CLASS_SUPER (decl))
652 if (CLASS_ABSTRACT (decl))
653 flags |= ACC_ABSTRACT;
654 if (CLASS_INTERFACE (decl))
655 flags |= ACC_INTERFACE;
656 if (CLASS_STATIC (decl))
658 if (CLASS_PRIVATE (decl))
659 flags |= ACC_PRIVATE;
660 if (CLASS_PROTECTED (decl))
661 flags |= ACC_PROTECTED;
662 if (ANONYMOUS_CLASS_P (TREE_TYPE (decl))
663 || LOCAL_CLASS_P (TREE_TYPE (decl)))
664 flags |= ACC_PRIVATE;
665 if (CLASS_STRICTFP (decl))
671 if (TREE_CODE (decl) == FUNCTION_DECL)
673 if (METHOD_PUBLIC (decl))
675 if (METHOD_FINAL (decl))
677 if (METHOD_NATIVE (decl))
679 if (METHOD_STATIC (decl))
681 if (METHOD_SYNCHRONIZED (decl))
682 flags |= ACC_SYNCHRONIZED;
683 if (METHOD_ABSTRACT (decl))
684 flags |= ACC_ABSTRACT;
685 if (METHOD_STRICTFP (decl))
690 if (FIELD_PUBLIC (decl))
692 if (FIELD_FINAL (decl))
694 if (FIELD_STATIC (decl))
696 if (FIELD_VOLATILE (decl))
697 flags |= ACC_VOLATILE;
698 if (FIELD_TRANSIENT (decl))
699 flags |= ACC_TRANSIENT;
704 /* Write the list of segments starting at CHUNKS to STREAM. */
707 write_chunks (FILE* stream, struct chunk *chunks)
709 for (; chunks != NULL; chunks = chunks->next)
710 fwrite (chunks->data, chunks->size, 1, stream);
713 /* Push a 1-word constant in the constant pool at the given INDEX.
714 (Caller is responsible for doing NOTE_PUSH.) */
717 push_constant1 (HOST_WIDE_INT index, struct jcf_partial *state)
732 /* Push a 2-word constant in the constant pool at the given INDEX.
733 (Caller is responsible for doing NOTE_PUSH.) */
736 push_constant2 (HOST_WIDE_INT index, struct jcf_partial *state)
743 /* Push 32-bit integer constant on VM stack.
744 Caller is responsible for doing NOTE_PUSH. */
747 push_int_const (HOST_WIDE_INT i, struct jcf_partial *state)
750 if (i >= -1 && i <= 5)
751 OP1(OPCODE_iconst_0 + i);
752 else if (i >= -128 && i < 128)
757 else if (i >= -32768 && i < 32768)
764 i = find_constant1 (&state->cpool, CONSTANT_Integer,
765 (jword)(i & 0xFFFFFFFF));
766 push_constant1 (i, state);
771 find_constant_wide (HOST_WIDE_INT lo, HOST_WIDE_INT hi,
772 struct jcf_partial *state)
774 HOST_WIDE_INT w1, w2;
775 lshift_double (lo, hi, -32, 64, &w1, &w2, 1);
776 return find_constant2 (&state->cpool, CONSTANT_Long,
777 (jword)(w1 & 0xFFFFFFFF), (jword)(lo & 0xFFFFFFFF));
780 /* Find or allocate a constant pool entry for the given VALUE.
781 Return the index in the constant pool. */
784 find_constant_index (tree value, struct jcf_partial *state)
786 if (TREE_CODE (value) == INTEGER_CST)
788 if (TYPE_PRECISION (TREE_TYPE (value)) <= 32)
789 return find_constant1 (&state->cpool, CONSTANT_Integer,
790 (jword)(TREE_INT_CST_LOW (value) & 0xFFFFFFFF));
792 return find_constant_wide (TREE_INT_CST_LOW (value),
793 TREE_INT_CST_HIGH (value), state);
795 else if (TREE_CODE (value) == REAL_CST)
799 real_to_target (words, &TREE_REAL_CST (value),
800 TYPE_MODE (TREE_TYPE (value)));
801 words[0] &= 0xffffffff;
802 words[1] &= 0xffffffff;
804 if (TYPE_PRECISION (TREE_TYPE (value)) == 32)
805 return find_constant1 (&state->cpool, CONSTANT_Float, (jword)words[0]);
807 return find_constant2 (&state->cpool, CONSTANT_Double,
808 (jword)words[1-FLOAT_WORDS_BIG_ENDIAN],
809 (jword)words[FLOAT_WORDS_BIG_ENDIAN]);
811 else if (TREE_CODE (value) == STRING_CST)
812 return find_string_constant (&state->cpool, value);
818 /* Push 64-bit long constant on VM stack.
819 Caller is responsible for doing NOTE_PUSH. */
822 push_long_const (HOST_WIDE_INT lo, HOST_WIDE_INT hi, struct jcf_partial *state)
824 HOST_WIDE_INT highpart, dummy;
825 jint lowpart = WORD_TO_INT (lo);
827 rshift_double (lo, hi, 32, 64, &highpart, &dummy, 1);
829 if (highpart == 0 && (lowpart == 0 || lowpart == 1))
832 OP1(OPCODE_lconst_0 + lowpart);
834 else if ((highpart == 0 && lowpart > 0 && lowpart < 32768)
835 || (highpart == -1 && lowpart < 0 && lowpart >= -32768))
837 push_int_const (lowpart, state);
842 push_constant2 (find_constant_wide (lo, hi, state), state);
846 field_op (tree field, int opcode, struct jcf_partial *state)
848 int index = find_fieldref_index (&state->cpool, field);
854 /* Returns an integer in the range 0 (for 'int') through 4 (for object
855 reference) to 7 (for 'short') which matches the pattern of how JVM
856 opcodes typically depend on the operand type. */
859 adjust_typed_op (tree type, int max)
861 switch (TREE_CODE (type))
864 case RECORD_TYPE: return 4;
866 return TYPE_PRECISION (type) == 32 || max < 5 ? 0 : 5;
868 return TYPE_PRECISION (type) == 32 || max < 6 ? 0 : 6;
870 switch (TYPE_PRECISION (type))
872 case 8: return max < 5 ? 0 : 5;
873 case 16: return max < 7 ? 0 : 7;
879 switch (TYPE_PRECISION (type))
892 maybe_wide (int opcode, int index, struct jcf_partial *state)
909 /* Compile code to duplicate with offset, where
910 SIZE is the size of the stack item to duplicate (1 or 2), abd
911 OFFSET is where to insert the result (must be 0, 1, or 2).
912 (The new words get inserted at stack[SP-size-offset].) */
915 emit_dup (int size, int offset, struct jcf_partial *state)
922 kind = size == 1 ? OPCODE_dup : OPCODE_dup2;
923 else if (offset == 1)
924 kind = size == 1 ? OPCODE_dup_x1 : OPCODE_dup2_x1;
925 else if (offset == 2)
926 kind = size == 1 ? OPCODE_dup_x2 : OPCODE_dup2_x2;
934 emit_pop (int size, struct jcf_partial *state)
937 OP1 (OPCODE_pop - 1 + size);
941 emit_iinc (tree var, HOST_WIDE_INT value, struct jcf_partial *state)
943 int slot = DECL_LOCAL_INDEX (var);
945 if (value < -128 || value > 127 || slot >= 256)
963 emit_load_or_store (tree var, /* Variable to load from or store into. */
964 int opcode, /* Either OPCODE_iload or OPCODE_istore. */
965 struct jcf_partial *state)
967 tree type = TREE_TYPE (var);
968 int kind = adjust_typed_op (type, 4);
969 int index = DECL_LOCAL_INDEX (var);
973 OP1 (opcode + 5 + 4 * kind + index); /* [ilfda]{load,store}_[0123] */
976 maybe_wide (opcode + kind, index, state); /* [ilfda]{load,store} */
980 emit_load (tree var, struct jcf_partial *state)
982 emit_load_or_store (var, OPCODE_iload, state);
983 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
987 emit_store (tree var, struct jcf_partial *state)
989 emit_load_or_store (var, OPCODE_istore, state);
990 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
994 emit_unop (enum java_opcode opcode, tree type ATTRIBUTE_UNUSED,
995 struct jcf_partial *state)
1002 emit_binop (enum java_opcode opcode, tree type, struct jcf_partial *state)
1004 int size = TYPE_IS_WIDE (type) ? 2 : 1;
1011 emit_reloc (HOST_WIDE_INT value, int kind,
1012 struct jcf_block *target, struct jcf_partial *state)
1014 struct jcf_relocation *reloc
1015 = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1016 struct jcf_block *block = state->last_block;
1017 reloc->next = block->u.relocations;
1018 block->u.relocations = reloc;
1019 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1020 reloc->label = target;
1022 if (kind == 0 || kind == BLOCK_START_RELOC)
1024 else if (kind != SWITCH_ALIGN_RELOC)
1029 emit_switch_reloc (struct jcf_block *label, struct jcf_partial *state)
1031 emit_reloc (RELOCATION_VALUE_0, BLOCK_START_RELOC, label, state);
1034 /* Similar to emit_switch_reloc,
1035 but re-uses an existing case reloc. */
1038 emit_case_reloc (struct jcf_relocation *reloc, struct jcf_partial *state)
1040 struct jcf_block *block = state->last_block;
1041 reloc->next = block->u.relocations;
1042 block->u.relocations = reloc;
1043 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1044 reloc->kind = BLOCK_START_RELOC;
1048 /* Emit a conditional jump to TARGET with a 2-byte relative jump offset
1049 The opcode is OPCODE, the inverted opcode is INV_OPCODE. */
1052 emit_if (struct jcf_block *target, int opcode, int inv_opcode,
1053 struct jcf_partial *state)
1057 /* value is 1 byte from reloc back to start of instruction. */
1058 emit_reloc (RELOCATION_VALUE_1, - inv_opcode, target, state);
1062 emit_goto (struct jcf_block *target, struct jcf_partial *state)
1066 /* Value is 1 byte from reloc back to start of instruction. */
1067 emit_reloc (RELOCATION_VALUE_1, OPCODE_goto_w, target, state);
1071 emit_jsr (struct jcf_block *target, struct jcf_partial *state)
1075 /* Value is 1 byte from reloc back to start of instruction. */
1076 emit_reloc (RELOCATION_VALUE_1, OPCODE_jsr_w, target, state);
1080 /* Generate code to evaluate EXP. If the result is true,
1081 branch to TRUE_LABEL; otherwise, branch to FALSE_LABEL.
1082 TRUE_BRANCH_FIRST is a code generation hint that the
1083 TRUE_LABEL may follow right after this. (The idea is that we
1084 may be able to optimize away GOTO TRUE_LABEL; TRUE_LABEL:) */
1087 generate_bytecode_conditional (tree exp,
1088 struct jcf_block *true_label,
1089 struct jcf_block *false_label,
1090 int true_branch_first,
1091 struct jcf_partial *state)
1093 tree exp0, exp1, type;
1094 int save_SP = state->code_SP;
1095 enum java_opcode op, negop;
1096 switch (TREE_CODE (exp))
1099 emit_goto (integer_zerop (exp) ? false_label : true_label, state);
1103 struct jcf_block *then_label = gen_jcf_label (state);
1104 struct jcf_block *else_label = gen_jcf_label (state);
1105 int save_SP_before, save_SP_after;
1106 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1107 then_label, else_label, 1, state);
1108 define_jcf_label (then_label, state);
1109 save_SP_before = state->code_SP;
1110 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1111 true_label, false_label, 1, state);
1112 save_SP_after = state->code_SP;
1113 state->code_SP = save_SP_before;
1114 define_jcf_label (else_label, state);
1115 generate_bytecode_conditional (TREE_OPERAND (exp, 2),
1116 true_label, false_label,
1117 true_branch_first, state);
1118 if (state->code_SP != save_SP_after)
1122 case TRUTH_NOT_EXPR:
1123 generate_bytecode_conditional (TREE_OPERAND (exp, 0), false_label,
1124 true_label, ! true_branch_first, state);
1126 case TRUTH_ANDIF_EXPR:
1128 struct jcf_block *next_label = gen_jcf_label (state);
1129 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1130 next_label, false_label, 1, state);
1131 define_jcf_label (next_label, state);
1132 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1133 true_label, false_label, 1, state);
1136 case TRUTH_ORIF_EXPR:
1138 struct jcf_block *next_label = gen_jcf_label (state);
1139 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1140 true_label, next_label, 1, state);
1141 define_jcf_label (next_label, state);
1142 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1143 true_label, false_label, 1, state);
1147 /* Assuming op is one of the 2-operand if_icmp<COND> instructions,
1148 set it to the corresponding 1-operand if<COND> instructions. */
1152 /* The opcodes with their inverses are allocated in pairs.
1153 E.g. The inverse of if_icmplt (161) is if_icmpge (162). */
1154 negop = (op & 1) ? op + 1 : op - 1;
1156 if (true_branch_first)
1158 emit_if (false_label, negop, op, state);
1159 emit_goto (true_label, state);
1163 emit_if (true_label, op, negop, state);
1164 emit_goto (false_label, state);
1168 op = OPCODE_if_icmpeq;
1171 op = OPCODE_if_icmpne;
1174 op = OPCODE_if_icmpgt;
1177 op = OPCODE_if_icmplt;
1180 op = OPCODE_if_icmpge;
1183 op = OPCODE_if_icmple;
1186 exp0 = TREE_OPERAND (exp, 0);
1187 exp1 = TREE_OPERAND (exp, 1);
1188 type = TREE_TYPE (exp0);
1189 switch (TREE_CODE (type))
1192 case POINTER_TYPE: case RECORD_TYPE:
1193 switch (TREE_CODE (exp))
1195 case EQ_EXPR: op = OPCODE_if_acmpeq; break;
1196 case NE_EXPR: op = OPCODE_if_acmpne; break;
1199 if (integer_zerop (exp1) || integer_zerop (exp0))
1201 generate_bytecode_insns (integer_zerop (exp0) ? exp1 : exp0,
1202 STACK_TARGET, state);
1203 op = op + (OPCODE_ifnull - OPCODE_if_acmpeq);
1204 negop = (op & 1) ? op - 1 : op + 1;
1208 generate_bytecode_insns (exp0, STACK_TARGET, state);
1209 generate_bytecode_insns (exp1, STACK_TARGET, state);
1213 generate_bytecode_insns (exp0, STACK_TARGET, state);
1214 generate_bytecode_insns (exp1, STACK_TARGET, state);
1215 if (op == OPCODE_if_icmplt || op == OPCODE_if_icmple)
1219 if (TYPE_PRECISION (type) > 32)
1230 if (TYPE_PRECISION (type) > 32)
1232 generate_bytecode_insns (exp0, STACK_TARGET, state);
1233 generate_bytecode_insns (exp1, STACK_TARGET, state);
1241 if (integer_zerop (exp1))
1243 generate_bytecode_insns (exp0, STACK_TARGET, state);
1247 if (integer_zerop (exp0))
1251 case OPCODE_if_icmplt:
1252 case OPCODE_if_icmpge:
1255 case OPCODE_if_icmpgt:
1256 case OPCODE_if_icmple:
1262 generate_bytecode_insns (exp1, STACK_TARGET, state);
1266 generate_bytecode_insns (exp0, STACK_TARGET, state);
1267 generate_bytecode_insns (exp1, STACK_TARGET, state);
1273 generate_bytecode_insns (exp, STACK_TARGET, state);
1275 if (true_branch_first)
1277 emit_if (false_label, OPCODE_ifeq, OPCODE_ifne, state);
1278 emit_goto (true_label, state);
1282 emit_if (true_label, OPCODE_ifne, OPCODE_ifeq, state);
1283 emit_goto (false_label, state);
1287 if (save_SP != state->code_SP)
1291 /* Call pending cleanups i.e. those for surrounding TRY_FINALLY_EXPRs.
1292 but only as far out as LIMIT (since we are about to jump to the
1293 emit label that is LIMIT). */
1296 call_cleanups (struct jcf_block *limit, struct jcf_partial *state)
1298 struct jcf_block *block = state->labeled_blocks;
1299 for (; block != limit; block = block->next)
1301 if (block->pc == PENDING_CLEANUP_PC)
1302 emit_jsr (block, state);
1307 generate_bytecode_return (tree exp, struct jcf_partial *state)
1309 tree return_type = TREE_TYPE (TREE_TYPE (state->current_method));
1310 int returns_void = TREE_CODE (return_type) == VOID_TYPE;
1315 switch (TREE_CODE (exp))
1318 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET,
1320 exp = TREE_OPERAND (exp, 1);
1324 struct jcf_block *then_label = gen_jcf_label (state);
1325 struct jcf_block *else_label = gen_jcf_label (state);
1326 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1327 then_label, else_label, 1, state);
1328 define_jcf_label (then_label, state);
1329 generate_bytecode_return (TREE_OPERAND (exp, 1), state);
1330 define_jcf_label (else_label, state);
1331 generate_bytecode_return (TREE_OPERAND (exp, 2), state);
1335 generate_bytecode_insns (exp,
1336 returns_void ? IGNORE_TARGET
1337 : STACK_TARGET, state);
1343 call_cleanups (NULL, state);
1347 op = OPCODE_ireturn + adjust_typed_op (return_type, 4);
1348 if (state->num_finalizers > 0)
1350 if (state->return_value_decl == NULL_TREE)
1352 state->return_value_decl
1353 = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1354 localvar_alloc (state->return_value_decl, state);
1356 emit_store (state->return_value_decl, state);
1357 call_cleanups (NULL, state);
1358 emit_load (state->return_value_decl, state);
1359 /* If we call maybe_free_localvar (state->return_value_decl, state, 1),
1360 then we risk the save decl erroneously re-used in the
1361 finalizer. Instead, we keep the state->return_value_decl
1362 allocated through the rest of the method. This is not
1363 the greatest solution, but it is at least simple and safe. */
1370 /* Generate bytecode for sub-expression EXP of METHOD.
1371 TARGET is one of STACK_TARGET or IGNORE_TARGET. */
1374 generate_bytecode_insns (tree exp, int target, struct jcf_partial *state)
1377 enum java_opcode jopcode;
1379 HOST_WIDE_INT value;
1384 if (exp == NULL && target == IGNORE_TARGET)
1387 type = TREE_TYPE (exp);
1389 switch (TREE_CODE (exp))
1392 if (BLOCK_EXPR_BODY (exp))
1395 tree body = BLOCK_EXPR_BODY (exp);
1396 long jsrs = state->num_jsrs;
1397 for (local = BLOCK_EXPR_DECLS (exp); local; )
1399 tree next = TREE_CHAIN (local);
1400 localvar_alloc (local, state);
1403 /* Avoid deep recursion for long blocks. */
1404 while (TREE_CODE (body) == COMPOUND_EXPR)
1406 generate_bytecode_insns (TREE_OPERAND (body, 0), target, state);
1407 body = TREE_OPERAND (body, 1);
1409 generate_bytecode_insns (body, target, state);
1411 for (local = BLOCK_EXPR_DECLS (exp); local; )
1413 tree next = TREE_CHAIN (local);
1414 maybe_free_localvar (local, state, state->num_jsrs <= jsrs);
1420 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
1421 /* Normally the first operand to a COMPOUND_EXPR must complete
1422 normally. However, in the special case of a do-while
1423 statement this is not necessarily the case. */
1424 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 0)))
1425 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1427 case EXPR_WITH_FILE_LOCATION:
1429 location_t saved_location = input_location;
1430 tree body = EXPR_WFL_NODE (exp);
1431 if (body == empty_stmt_node)
1433 input_filename = EXPR_WFL_FILENAME (exp);
1434 input_line = EXPR_WFL_LINENO (exp);
1435 if (EXPR_WFL_EMIT_LINE_NOTE (exp) && input_line > 0
1436 && debug_info_level > DINFO_LEVEL_NONE)
1437 put_linenumber (input_line, state);
1438 generate_bytecode_insns (body, target, state);
1439 input_location = saved_location;
1443 if (target == IGNORE_TARGET) ; /* do nothing */
1444 else if (TREE_CODE (type) == POINTER_TYPE)
1446 if (! integer_zerop (exp))
1449 OP1 (OPCODE_aconst_null);
1452 else if (TYPE_PRECISION (type) <= 32)
1454 push_int_const (TREE_INT_CST_LOW (exp), state);
1459 push_long_const (TREE_INT_CST_LOW (exp), TREE_INT_CST_HIGH (exp),
1466 int prec = TYPE_PRECISION (type) >> 5;
1468 if (real_zerop (exp) && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (exp)))
1469 OP1 (prec == 1 ? OPCODE_fconst_0 : OPCODE_dconst_0);
1470 else if (real_onep (exp))
1471 OP1 (prec == 1 ? OPCODE_fconst_1 : OPCODE_dconst_1);
1472 else if (prec == 1 && real_twop (exp))
1473 OP1 (OPCODE_fconst_2);
1474 /* ??? We could also use iconst_3/ldc followed by i2f/i2d
1475 for other float/double when the value is a small integer. */
1478 offset = find_constant_index (exp, state);
1480 push_constant1 (offset, state);
1482 push_constant2 (offset, state);
1488 push_constant1 (find_string_constant (&state->cpool, exp), state);
1492 if (TREE_STATIC (exp))
1494 field_op (exp, OPCODE_getstatic, state);
1495 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1498 /* ... fall through ... */
1500 emit_load (exp, state);
1502 case NON_LVALUE_EXPR:
1504 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1507 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1508 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1509 if (target != IGNORE_TARGET)
1511 jopcode = OPCODE_iaload + adjust_typed_op (type, 7);
1514 if (! TYPE_IS_WIDE (type))
1520 tree obj = TREE_OPERAND (exp, 0);
1521 tree field = TREE_OPERAND (exp, 1);
1522 int is_static = FIELD_STATIC (field);
1523 generate_bytecode_insns (obj,
1524 is_static ? IGNORE_TARGET : target, state);
1525 if (target != IGNORE_TARGET)
1527 if (DECL_NAME (field) == length_identifier_node && !is_static
1528 && TYPE_ARRAY_P (TREE_TYPE (obj)))
1531 OP1 (OPCODE_arraylength);
1535 field_op (field, is_static ? OPCODE_getstatic : OPCODE_getfield,
1539 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
1544 case TRUTH_ANDIF_EXPR:
1545 case TRUTH_ORIF_EXPR:
1553 struct jcf_block *then_label = gen_jcf_label (state);
1554 struct jcf_block *else_label = gen_jcf_label (state);
1555 struct jcf_block *end_label = gen_jcf_label (state);
1556 generate_bytecode_conditional (exp,
1557 then_label, else_label, 1, state);
1558 define_jcf_label (then_label, state);
1559 push_int_const (1, state);
1560 emit_goto (end_label, state);
1561 define_jcf_label (else_label, state);
1562 push_int_const (0, state);
1563 define_jcf_label (end_label, state);
1569 struct jcf_block *then_label = gen_jcf_label (state);
1570 struct jcf_block *else_label = gen_jcf_label (state);
1571 struct jcf_block *end_label = gen_jcf_label (state);
1572 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1573 then_label, else_label, 1, state);
1574 define_jcf_label (then_label, state);
1575 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1576 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 1))
1577 /* Not all expressions have CAN_COMPLETE_NORMALLY set properly. */
1578 || TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE)
1579 emit_goto (end_label, state);
1580 define_jcf_label (else_label, state);
1581 generate_bytecode_insns (TREE_OPERAND (exp, 2), target, state);
1582 define_jcf_label (end_label, state);
1583 /* COND_EXPR can be used in a binop. The stack must be adjusted. */
1584 if (TREE_TYPE (exp) != void_type_node)
1585 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1590 struct jcf_switch_state *sw_state = state->sw_state;
1591 struct jcf_relocation *reloc
1592 = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1593 HOST_WIDE_INT case_value = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
1595 reloc->label = get_jcf_label_here (state);
1596 reloc->offset = case_value;
1597 reloc->next = sw_state->cases;
1598 sw_state->cases = reloc;
1599 if (sw_state->num_cases == 0)
1601 sw_state->min_case = case_value;
1602 sw_state->max_case = case_value;
1606 if (case_value < sw_state->min_case)
1607 sw_state->min_case = case_value;
1608 if (case_value > sw_state->max_case)
1609 sw_state->max_case = case_value;
1611 sw_state->num_cases++;
1615 state->sw_state->default_label = get_jcf_label_here (state);
1620 /* The SWITCH_EXPR has three parts, generated in the following order:
1621 1. the switch_expression (the value used to select the correct case);
1623 3. the switch_instruction (the tableswitch/loopupswitch instruction.).
1624 After code generation, we will re-order them in the order 1, 3, 2.
1625 This is to avoid any extra GOTOs. */
1626 struct jcf_switch_state sw_state;
1627 struct jcf_block *expression_last; /* Last block of the switch_expression. */
1628 struct jcf_block *body_last; /* Last block of the switch_body. */
1629 struct jcf_block *switch_instruction; /* First block of switch_instruction. */
1630 struct jcf_block *instruction_last; /* Last block of the switch_instruction. */
1631 struct jcf_block *body_block;
1633 sw_state.prev = state->sw_state;
1634 state->sw_state = &sw_state;
1635 sw_state.cases = NULL;
1636 sw_state.num_cases = 0;
1637 sw_state.default_label = NULL;
1638 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1639 expression_last = state->last_block;
1640 /* Force a new block here. */
1641 body_block = gen_jcf_label (state);
1642 define_jcf_label (body_block, state);
1643 generate_bytecode_insns (TREE_OPERAND (exp, 1), IGNORE_TARGET, state);
1644 body_last = state->last_block;
1646 switch_instruction = gen_jcf_label (state);
1647 define_jcf_label (switch_instruction, state);
1648 if (sw_state.default_label == NULL)
1649 sw_state.default_label = gen_jcf_label (state);
1651 if (sw_state.num_cases <= 1)
1653 if (sw_state.num_cases == 0)
1655 emit_pop (1, state);
1660 push_int_const (sw_state.cases->offset, state);
1662 emit_if (sw_state.cases->label,
1663 OPCODE_if_icmpeq, OPCODE_if_icmpne, state);
1665 emit_goto (sw_state.default_label, state);
1670 unsigned HOST_WIDE_INT delta;
1671 /* Copy the chain of relocs into a sorted array. */
1672 struct jcf_relocation **relocs
1673 = xmalloc (sw_state.num_cases * sizeof (struct jcf_relocation *));
1674 /* The relocs arrays is a buffer with a gap.
1675 The assumption is that cases will normally come in "runs". */
1677 int gap_end = sw_state.num_cases;
1678 struct jcf_relocation *reloc;
1679 for (reloc = sw_state.cases; reloc != NULL; reloc = reloc->next)
1681 HOST_WIDE_INT case_value = reloc->offset;
1682 while (gap_end < sw_state.num_cases)
1684 struct jcf_relocation *end = relocs[gap_end];
1685 if (case_value <= end->offset)
1687 relocs[gap_start++] = end;
1690 while (gap_start > 0)
1692 struct jcf_relocation *before = relocs[gap_start-1];
1693 if (case_value >= before->offset)
1695 relocs[--gap_end] = before;
1698 relocs[gap_start++] = reloc;
1699 /* Note we don't check for duplicates. This is
1700 handled by the parser. */
1703 /* We could have DELTA < 0 if sw_state.min_case is
1704 something like Integer.MIN_VALUE. That is why delta is
1706 delta = sw_state.max_case - sw_state.min_case;
1707 if (2 * (unsigned) sw_state.num_cases >= delta)
1708 { /* Use tableswitch. */
1710 RESERVE (13 + 4 * (sw_state.max_case - sw_state.min_case + 1));
1711 OP1 (OPCODE_tableswitch);
1712 emit_reloc (RELOCATION_VALUE_0,
1713 SWITCH_ALIGN_RELOC, NULL, state);
1714 emit_switch_reloc (sw_state.default_label, state);
1715 OP4 (sw_state.min_case);
1716 OP4 (sw_state.max_case);
1717 for (i = sw_state.min_case; ; )
1719 reloc = relocs[index];
1720 if (i == reloc->offset)
1722 emit_case_reloc (reloc, state);
1723 if (i == sw_state.max_case)
1728 emit_switch_reloc (sw_state.default_label, state);
1733 { /* Use lookupswitch. */
1734 RESERVE(9 + 8 * sw_state.num_cases);
1735 OP1 (OPCODE_lookupswitch);
1736 emit_reloc (RELOCATION_VALUE_0,
1737 SWITCH_ALIGN_RELOC, NULL, state);
1738 emit_switch_reloc (sw_state.default_label, state);
1739 OP4 (sw_state.num_cases);
1740 for (i = 0; i < sw_state.num_cases; i++)
1742 struct jcf_relocation *reloc = relocs[i];
1743 OP4 (reloc->offset);
1744 emit_case_reloc (reloc, state);
1750 instruction_last = state->last_block;
1751 if (sw_state.default_label->pc < 0)
1752 define_jcf_label (sw_state.default_label, state);
1753 else /* Force a new block. */
1754 sw_state.default_label = get_jcf_label_here (state);
1755 /* Now re-arrange the blocks so the switch_instruction
1756 comes before the switch_body. */
1757 switch_length = state->code_length - switch_instruction->pc;
1758 switch_instruction->pc = body_block->pc;
1759 instruction_last->next = body_block;
1760 instruction_last->v.chunk->next = body_block->v.chunk;
1761 expression_last->next = switch_instruction;
1762 expression_last->v.chunk->next = switch_instruction->v.chunk;
1763 body_last->next = sw_state.default_label;
1764 body_last->v.chunk->next = NULL;
1765 state->chunk = body_last->v.chunk;
1766 for (; body_block != sw_state.default_label; body_block = body_block->next)
1767 body_block->pc += switch_length;
1769 state->sw_state = sw_state.prev;
1774 exp = TREE_OPERAND (exp, 0);
1775 if (exp == NULL_TREE)
1776 exp = empty_stmt_node;
1777 else if (TREE_CODE (exp) != MODIFY_EXPR)
1780 exp = TREE_OPERAND (exp, 1);
1781 generate_bytecode_return (exp, state);
1783 case LABELED_BLOCK_EXPR:
1785 struct jcf_block *end_label = gen_jcf_label (state);
1786 end_label->next = state->labeled_blocks;
1787 state->labeled_blocks = end_label;
1788 end_label->pc = PENDING_EXIT_PC;
1789 end_label->u.labeled_block = exp;
1790 if (LABELED_BLOCK_BODY (exp))
1791 generate_bytecode_insns (LABELED_BLOCK_BODY (exp), target, state);
1792 if (state->labeled_blocks != end_label)
1794 state->labeled_blocks = end_label->next;
1795 define_jcf_label (end_label, state);
1800 tree body = TREE_OPERAND (exp, 0);
1802 if (TREE_CODE (body) == COMPOUND_EXPR
1803 && TREE_CODE (TREE_OPERAND (body, 0)) == EXIT_EXPR)
1805 /* Optimize: H: if (TEST) GOTO L; BODY; GOTO H; L:
1806 to: GOTO L; BODY; L: if (!TEST) GOTO L; */
1807 struct jcf_block *head_label;
1808 struct jcf_block *body_label;
1809 struct jcf_block *end_label = gen_jcf_label (state);
1810 struct jcf_block *exit_label = state->labeled_blocks;
1811 head_label = gen_jcf_label (state);
1812 emit_goto (head_label, state);
1813 body_label = get_jcf_label_here (state);
1814 generate_bytecode_insns (TREE_OPERAND (body, 1), target, state);
1815 define_jcf_label (head_label, state);
1816 generate_bytecode_conditional (TREE_OPERAND (body, 0),
1817 end_label, body_label, 1, state);
1818 define_jcf_label (end_label, state);
1823 struct jcf_block *head_label = get_jcf_label_here (state);
1824 generate_bytecode_insns (body, IGNORE_TARGET, state);
1825 if (CAN_COMPLETE_NORMALLY (body))
1826 emit_goto (head_label, state);
1832 struct jcf_block *label = state->labeled_blocks;
1833 struct jcf_block *end_label = gen_jcf_label (state);
1834 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1835 label, end_label, 0, state);
1836 define_jcf_label (end_label, state);
1839 case EXIT_BLOCK_EXPR:
1841 struct jcf_block *label = state->labeled_blocks;
1842 if (TREE_OPERAND (exp, 1) != NULL) goto notimpl;
1843 while (label->u.labeled_block != TREE_OPERAND (exp, 0))
1844 label = label->next;
1845 call_cleanups (label, state);
1846 emit_goto (label, state);
1850 case PREDECREMENT_EXPR: value = -1; post_op = 0; goto increment;
1851 case PREINCREMENT_EXPR: value = 1; post_op = 0; goto increment;
1852 case POSTDECREMENT_EXPR: value = -1; post_op = 1; goto increment;
1853 case POSTINCREMENT_EXPR: value = 1; post_op = 1; goto increment;
1856 arg = TREE_OPERAND (exp, 1);
1857 exp = TREE_OPERAND (exp, 0);
1858 type = TREE_TYPE (exp);
1859 size = TYPE_IS_WIDE (type) ? 2 : 1;
1860 if ((TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1861 && ! TREE_STATIC (exp)
1862 && TREE_CODE (type) == INTEGER_TYPE
1863 && TYPE_PRECISION (type) == 32)
1865 if (target != IGNORE_TARGET && post_op)
1866 emit_load (exp, state);
1867 emit_iinc (exp, value, state);
1868 if (target != IGNORE_TARGET && ! post_op)
1869 emit_load (exp, state);
1872 if (TREE_CODE (exp) == COMPONENT_REF)
1874 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1875 emit_dup (1, 0, state);
1876 /* Stack: ..., objectref, objectref. */
1877 field_op (TREE_OPERAND (exp, 1), OPCODE_getfield, state);
1879 /* Stack: ..., objectref, oldvalue. */
1882 else if (TREE_CODE (exp) == ARRAY_REF)
1884 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1885 generate_bytecode_insns (TREE_OPERAND (exp, 1), STACK_TARGET, state);
1886 emit_dup (2, 0, state);
1887 /* Stack: ..., array, index, array, index. */
1888 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (exp), 7);
1892 /* Stack: ..., array, index, oldvalue. */
1895 else if (TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1897 generate_bytecode_insns (exp, STACK_TARGET, state);
1898 /* Stack: ..., oldvalue. */
1904 if (target != IGNORE_TARGET && post_op)
1905 emit_dup (size, offset, state);
1906 /* Stack, if ARRAY_REF: ..., [result, ] array, index, oldvalue. */
1907 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, oldvalue. */
1908 /* Stack, otherwise: ..., [result, ] oldvalue. */
1909 generate_bytecode_insns (arg, STACK_TARGET, state);
1910 emit_binop ((value >= 0 ? OPCODE_iadd : OPCODE_isub)
1911 + adjust_typed_op (type, 3),
1913 if (target != IGNORE_TARGET && ! post_op)
1914 emit_dup (size, offset, state);
1915 /* Stack, if ARRAY_REF: ..., [result, ] array, index, newvalue. */
1916 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, newvalue. */
1917 /* Stack, otherwise: ..., [result, ] newvalue. */
1918 goto finish_assignment;
1922 tree lhs = TREE_OPERAND (exp, 0);
1923 tree rhs = TREE_OPERAND (exp, 1);
1926 /* See if we can use the iinc instruction. */
1927 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1928 && ! TREE_STATIC (lhs)
1929 && TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
1930 && TYPE_PRECISION (TREE_TYPE (lhs)) == 32
1931 && (TREE_CODE (rhs) == PLUS_EXPR || TREE_CODE (rhs) == MINUS_EXPR))
1933 tree arg0 = TREE_OPERAND (rhs, 0);
1934 tree arg1 = TREE_OPERAND (rhs, 1);
1935 HOST_WIDE_INT min_value = -32768;
1936 HOST_WIDE_INT max_value = 32767;
1937 if (TREE_CODE (rhs) == MINUS_EXPR)
1942 else if (arg1 == lhs)
1945 arg1 = TREE_OPERAND (rhs, 0);
1947 if (lhs == arg0 && TREE_CODE (arg1) == INTEGER_CST)
1949 HOST_WIDE_INT hi_value = TREE_INT_CST_HIGH (arg1);
1950 value = TREE_INT_CST_LOW (arg1);
1951 if ((hi_value == 0 && value <= max_value)
1952 || (hi_value == -1 && value >= min_value))
1954 if (TREE_CODE (rhs) == MINUS_EXPR)
1956 emit_iinc (lhs, value, state);
1957 if (target != IGNORE_TARGET)
1958 emit_load (lhs, state);
1964 if (TREE_CODE (lhs) == COMPONENT_REF)
1966 generate_bytecode_insns (TREE_OPERAND (lhs, 0),
1967 STACK_TARGET, state);
1970 else if (TREE_CODE (lhs) == ARRAY_REF)
1972 generate_bytecode_insns (TREE_OPERAND(lhs, 0),
1973 STACK_TARGET, state);
1974 generate_bytecode_insns (TREE_OPERAND(lhs, 1),
1975 STACK_TARGET, state);
1981 /* If the rhs is a binary expression and the left operand is
1982 `==' to the lhs then we have an OP= expression. In this
1983 case we must do some special processing. */
1984 if (TREE_CODE_CLASS (TREE_CODE (rhs)) == '2'
1985 && lhs == TREE_OPERAND (rhs, 0))
1987 if (TREE_CODE (lhs) == COMPONENT_REF)
1989 tree field = TREE_OPERAND (lhs, 1);
1990 if (! FIELD_STATIC (field))
1992 /* Duplicate the object reference so we can get
1994 emit_dup (TYPE_IS_WIDE (field) ? 2 : 1, 0, state);
1997 field_op (field, (FIELD_STATIC (field)
2002 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
2004 else if (TREE_CODE (lhs) == VAR_DECL
2005 || TREE_CODE (lhs) == PARM_DECL)
2007 if (FIELD_STATIC (lhs))
2009 field_op (lhs, OPCODE_getstatic, state);
2010 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs)) ? 2 : 1);
2013 emit_load (lhs, state);
2015 else if (TREE_CODE (lhs) == ARRAY_REF)
2017 /* Duplicate the array and index, which are on the
2018 stack, so that we can load the old value. */
2019 emit_dup (2, 0, state);
2021 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (lhs), 7);
2024 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs)) ? 2 : 1);
2029 /* This function correctly handles the case where the LHS
2030 of a binary expression is NULL_TREE. */
2031 rhs = build (TREE_CODE (rhs), TREE_TYPE (rhs),
2032 NULL_TREE, TREE_OPERAND (rhs, 1));
2035 generate_bytecode_insns (rhs, STACK_TARGET, state);
2036 if (target != IGNORE_TARGET)
2037 emit_dup (TYPE_IS_WIDE (type) ? 2 : 1 , offset, state);
2043 if (TREE_CODE (exp) == COMPONENT_REF)
2045 tree field = TREE_OPERAND (exp, 1);
2046 if (! FIELD_STATIC (field))
2049 FIELD_STATIC (field) ? OPCODE_putstatic : OPCODE_putfield,
2052 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
2054 else if (TREE_CODE (exp) == VAR_DECL
2055 || TREE_CODE (exp) == PARM_DECL)
2057 if (FIELD_STATIC (exp))
2059 field_op (exp, OPCODE_putstatic, state);
2060 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
2063 emit_store (exp, state);
2065 else if (TREE_CODE (exp) == ARRAY_REF)
2067 jopcode = OPCODE_iastore + adjust_typed_op (TREE_TYPE (exp), 7);
2070 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 4 : 3);
2076 jopcode = OPCODE_iadd;
2079 jopcode = OPCODE_isub;
2082 jopcode = OPCODE_imul;
2084 case TRUNC_DIV_EXPR:
2086 jopcode = OPCODE_idiv;
2088 case TRUNC_MOD_EXPR:
2089 jopcode = OPCODE_irem;
2091 case LSHIFT_EXPR: jopcode = OPCODE_ishl; goto binop;
2092 case RSHIFT_EXPR: jopcode = OPCODE_ishr; goto binop;
2093 case URSHIFT_EXPR: jopcode = OPCODE_iushr; goto binop;
2094 case TRUTH_AND_EXPR:
2095 case BIT_AND_EXPR: jopcode = OPCODE_iand; goto binop;
2097 case BIT_IOR_EXPR: jopcode = OPCODE_ior; goto binop;
2098 case TRUTH_XOR_EXPR:
2099 case BIT_XOR_EXPR: jopcode = OPCODE_ixor; goto binop;
2102 tree arg0 = TREE_OPERAND (exp, 0);
2103 tree arg1 = TREE_OPERAND (exp, 1);
2104 jopcode += adjust_typed_op (type, 3);
2105 if (arg0 != NULL_TREE && operand_equal_p (arg0, arg1, 0))
2107 /* fold may (e.g) convert 2*x to x+x. */
2108 generate_bytecode_insns (arg0, target, state);
2109 emit_dup (TYPE_PRECISION (TREE_TYPE (arg0)) > 32 ? 2 : 1, 0, state);
2113 /* ARG0 will be NULL_TREE if we're handling an `OP='
2114 expression. In this case the stack already holds the
2115 LHS. See the MODIFY_EXPR case. */
2116 if (arg0 != NULL_TREE)
2117 generate_bytecode_insns (arg0, target, state);
2118 if (jopcode >= OPCODE_lshl && jopcode <= OPCODE_lushr)
2119 arg1 = convert (int_type_node, arg1);
2120 generate_bytecode_insns (arg1, target, state);
2122 /* For most binary operations, both operands and the result have the
2123 same type. Shift operations are different. Using arg1's type
2124 gets us the correct SP adjustment in all cases. */
2125 if (target == STACK_TARGET)
2126 emit_binop (jopcode, TREE_TYPE (arg1), state);
2129 case TRUTH_NOT_EXPR:
2131 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2132 if (target == STACK_TARGET)
2134 int is_long = TYPE_PRECISION (TREE_TYPE (exp)) > 32;
2135 push_int_const (TREE_CODE (exp) == BIT_NOT_EXPR ? -1 : 1, state);
2139 NOTE_PUSH (1 + is_long);
2140 OP1 (OPCODE_ixor + is_long);
2141 NOTE_POP (1 + is_long);
2145 jopcode = OPCODE_ineg;
2146 jopcode += adjust_typed_op (type, 3);
2147 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2148 if (target == STACK_TARGET)
2149 emit_unop (jopcode, type, state);
2151 case INSTANCEOF_EXPR:
2153 int index = find_class_constant (&state->cpool, TREE_OPERAND (exp, 1));
2154 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2156 OP1 (OPCODE_instanceof);
2161 /* Because the state associated with a SAVE_EXPR tree node must
2162 be a RTL expression, we use it to store the DECL_LOCAL_INDEX
2163 of a temporary variable in a CONST_INT. */
2164 if (! SAVE_EXPR_RTL (exp))
2166 tree type = TREE_TYPE (exp);
2167 tree decl = build_decl (VAR_DECL, NULL_TREE, type);
2168 generate_bytecode_insns (TREE_OPERAND (exp, 0),
2169 STACK_TARGET, state);
2170 localvar_alloc (decl, state);
2171 SAVE_EXPR_RTL (exp) = GEN_INT (DECL_LOCAL_INDEX (decl));
2172 emit_dup (TYPE_IS_WIDE (type) ? 2 : 1, 0, state);
2173 emit_store (decl, state);
2177 /* The following code avoids creating a temporary DECL just
2178 to pass to emit_load. This code could be factored with
2179 the similar implementation in emit_load_or_store. */
2180 tree type = TREE_TYPE (exp);
2181 int kind = adjust_typed_op (type, 4);
2182 int index = (int) INTVAL (SAVE_EXPR_RTL (exp));
2185 RESERVE (1); /* [ilfda]load_[0123] */
2186 OP1 (OPCODE_iload + 5 + 4*kind + index);
2188 else /* [ilfda]load */
2189 maybe_wide (OPCODE_iload + kind, index, state);
2190 NOTE_PUSH (TYPE_IS_WIDE (type) ? 2 : 1);
2196 case FIX_TRUNC_EXPR:
2198 tree src = TREE_OPERAND (exp, 0);
2199 tree src_type = TREE_TYPE (src);
2200 tree dst_type = TREE_TYPE (exp);
2201 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2202 if (target == IGNORE_TARGET || src_type == dst_type)
2204 if (TREE_CODE (dst_type) == POINTER_TYPE)
2206 if (TREE_CODE (exp) == CONVERT_EXPR)
2208 int index = find_class_constant (&state->cpool,
2209 TREE_TYPE (dst_type));
2211 OP1 (OPCODE_checkcast);
2215 else /* Convert numeric types. */
2217 int wide_src = TYPE_PRECISION (src_type) > 32;
2218 int wide_dst = TYPE_PRECISION (dst_type) > 32;
2219 NOTE_POP (1 + wide_src);
2221 if (TREE_CODE (dst_type) == REAL_TYPE)
2223 if (TREE_CODE (src_type) == REAL_TYPE)
2224 OP1 (wide_dst ? OPCODE_f2d : OPCODE_d2f);
2225 else if (TYPE_PRECISION (src_type) == 64)
2226 OP1 (OPCODE_l2f + wide_dst);
2228 OP1 (OPCODE_i2f + wide_dst);
2230 else /* Convert to integral type. */
2232 if (TREE_CODE (src_type) == REAL_TYPE)
2233 OP1 (OPCODE_f2i + wide_dst + 3 * wide_src);
2238 if (TYPE_PRECISION (dst_type) < 32)
2241 /* Already converted to int, if needed. */
2242 if (TYPE_PRECISION (dst_type) <= 8)
2244 else if (TYPE_UNSIGNED (dst_type))
2250 NOTE_PUSH (1 + wide_dst);
2257 tree try_clause = TREE_OPERAND (exp, 0);
2258 struct jcf_block *start_label = get_jcf_label_here (state);
2259 struct jcf_block *end_label; /* End of try clause. */
2260 struct jcf_block *finished_label = gen_jcf_label (state);
2261 tree clause = TREE_OPERAND (exp, 1);
2262 if (target != IGNORE_TARGET)
2264 generate_bytecode_insns (try_clause, IGNORE_TARGET, state);
2265 end_label = get_jcf_label_here (state);
2266 if (end_label == start_label)
2268 if (CAN_COMPLETE_NORMALLY (try_clause))
2269 emit_goto (finished_label, state);
2270 while (clause != NULL_TREE)
2272 tree catch_clause = TREE_OPERAND (clause, 0);
2273 tree exception_decl = BLOCK_EXPR_DECLS (catch_clause);
2274 struct jcf_handler *handler = alloc_handler (start_label,
2276 if (exception_decl == NULL_TREE)
2277 handler->type = NULL_TREE;
2279 handler->type = TREE_TYPE (TREE_TYPE (exception_decl));
2280 generate_bytecode_insns (catch_clause, IGNORE_TARGET, state);
2281 clause = TREE_CHAIN (clause);
2282 if (CAN_COMPLETE_NORMALLY (catch_clause) && clause != NULL_TREE)
2283 emit_goto (finished_label, state);
2285 define_jcf_label (finished_label, state);
2289 case TRY_FINALLY_EXPR:
2291 struct jcf_block *finished_label = NULL;
2292 struct jcf_block *finally_label, *start_label, *end_label;
2293 struct jcf_handler *handler;
2294 tree try_block = TREE_OPERAND (exp, 0);
2295 tree finally = TREE_OPERAND (exp, 1);
2296 tree return_link = NULL_TREE, exception_decl = NULL_TREE;
2298 tree exception_type;
2300 finally_label = gen_jcf_label (state);
2301 start_label = get_jcf_label_here (state);
2302 /* If the `finally' clause can complete normally, we emit it
2303 as a subroutine and let the other clauses call it via
2304 `jsr'. If it can't complete normally, then we simply emit
2305 `goto's directly to it. */
2306 if (CAN_COMPLETE_NORMALLY (finally))
2308 finally_label->pc = PENDING_CLEANUP_PC;
2309 finally_label->next = state->labeled_blocks;
2310 state->labeled_blocks = finally_label;
2311 state->num_finalizers++;
2314 generate_bytecode_insns (try_block, target, state);
2316 if (CAN_COMPLETE_NORMALLY (finally))
2318 if (state->labeled_blocks != finally_label)
2320 state->labeled_blocks = finally_label->next;
2322 end_label = get_jcf_label_here (state);
2324 if (end_label == start_label)
2326 state->num_finalizers--;
2327 define_jcf_label (finally_label, state);
2328 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2332 if (CAN_COMPLETE_NORMALLY (finally))
2334 return_link = build_decl (VAR_DECL, NULL_TREE,
2335 return_address_type_node);
2336 finished_label = gen_jcf_label (state);
2339 if (CAN_COMPLETE_NORMALLY (try_block))
2341 if (CAN_COMPLETE_NORMALLY (finally))
2343 emit_jsr (finally_label, state);
2344 emit_goto (finished_label, state);
2347 emit_goto (finally_label, state);
2350 /* Handle exceptions. */
2352 exception_type = build_pointer_type (throwable_type_node);
2353 if (CAN_COMPLETE_NORMALLY (finally))
2355 /* We're going to generate a subroutine, so we'll need to
2356 save and restore the exception around the `jsr'. */
2357 exception_decl = build_decl (VAR_DECL, NULL_TREE, exception_type);
2358 localvar_alloc (return_link, state);
2360 handler = alloc_handler (start_label, end_label, state);
2361 handler->type = NULL_TREE;
2362 if (CAN_COMPLETE_NORMALLY (finally))
2364 localvar_alloc (exception_decl, state);
2366 emit_store (exception_decl, state);
2367 emit_jsr (finally_label, state);
2368 emit_load (exception_decl, state);
2370 OP1 (OPCODE_athrow);
2375 /* We're not generating a subroutine. In this case we can
2376 simply have the exception handler pop the exception and
2377 then fall through to the `finally' block. */
2379 emit_pop (1, state);
2383 /* The finally block. If we're generating a subroutine, first
2384 save return PC into return_link. Otherwise, just generate
2385 the code for the `finally' block. */
2386 define_jcf_label (finally_label, state);
2387 if (CAN_COMPLETE_NORMALLY (finally))
2390 emit_store (return_link, state);
2393 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2394 if (CAN_COMPLETE_NORMALLY (finally))
2396 maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
2397 maybe_free_localvar (exception_decl, state, 1);
2398 maybe_free_localvar (return_link, state, 1);
2399 define_jcf_label (finished_label, state);
2404 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2406 OP1 (OPCODE_athrow);
2408 case NEW_ARRAY_INIT:
2410 tree values = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
2411 tree array_type = TREE_TYPE (TREE_TYPE (exp));
2412 tree element_type = TYPE_ARRAY_ELEMENT (array_type);
2413 HOST_WIDE_INT length = java_array_type_length (array_type);
2414 if (target == IGNORE_TARGET)
2416 for ( ; values != NULL_TREE; values = TREE_CHAIN (values))
2417 generate_bytecode_insns (TREE_VALUE (values), target, state);
2420 push_int_const (length, state);
2423 if (JPRIMITIVE_TYPE_P (element_type))
2425 int atype = encode_newarray_type (element_type);
2426 OP1 (OPCODE_newarray);
2431 int index = find_class_constant (&state->cpool,
2432 TREE_TYPE (element_type));
2433 OP1 (OPCODE_anewarray);
2437 jopcode = OPCODE_iastore + adjust_typed_op (element_type, 7);
2438 for ( ; values != NULL_TREE; values = TREE_CHAIN (values), offset++)
2440 int save_SP = state->code_SP;
2441 emit_dup (1, 0, state);
2442 push_int_const (offset, state);
2444 generate_bytecode_insns (TREE_VALUE (values), STACK_TARGET, state);
2447 state->code_SP = save_SP;
2451 case JAVA_EXC_OBJ_EXPR:
2452 NOTE_PUSH (1); /* Pushed by exception system. */
2457 /* This copes with cases where fold() has created MIN or MAX
2458 from a conditional expression. */
2459 enum tree_code code = TREE_CODE (exp) == MIN_EXPR ? LT_EXPR : GT_EXPR;
2460 tree op0 = TREE_OPERAND (exp, 0);
2461 tree op1 = TREE_OPERAND (exp, 1);
2463 if (TREE_SIDE_EFFECTS (op0) || TREE_SIDE_EFFECTS (op1))
2465 x = build (COND_EXPR, TREE_TYPE (exp),
2466 build (code, boolean_type_node, op0, op1),
2468 generate_bytecode_insns (x, target, state);
2471 case NEW_CLASS_EXPR:
2473 tree class = TREE_TYPE (TREE_TYPE (exp));
2474 int need_result = target != IGNORE_TARGET;
2475 int index = find_class_constant (&state->cpool, class);
2481 NOTE_PUSH (1 + need_result);
2483 /* ... fall though ... */
2486 tree f = TREE_OPERAND (exp, 0);
2487 tree x = TREE_OPERAND (exp, 1);
2488 int save_SP = state->code_SP;
2490 if (TREE_CODE (f) == ADDR_EXPR)
2491 f = TREE_OPERAND (f, 0);
2492 if (f == soft_newarray_node)
2494 int type_code = TREE_INT_CST_LOW (TREE_VALUE (x));
2495 generate_bytecode_insns (TREE_VALUE (TREE_CHAIN (x)),
2496 STACK_TARGET, state);
2498 OP1 (OPCODE_newarray);
2502 else if (f == soft_multianewarray_node)
2506 int index = find_class_constant (&state->cpool,
2507 TREE_TYPE (TREE_TYPE (exp)));
2508 x = TREE_CHAIN (x); /* Skip class argument. */
2509 ndims = TREE_INT_CST_LOW (TREE_VALUE (x));
2510 for (idim = ndims; --idim >= 0; )
2513 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2516 OP1 (OPCODE_multianewarray);
2521 else if (f == soft_anewarray_node)
2523 tree cl = TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (exp)));
2524 int index = find_class_constant (&state->cpool, TREE_TYPE (cl));
2525 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2527 OP1 (OPCODE_anewarray);
2531 else if (f == soft_monitorenter_node
2532 || f == soft_monitorexit_node
2535 if (f == soft_monitorenter_node)
2536 op = OPCODE_monitorenter;
2537 else if (f == soft_monitorexit_node)
2538 op = OPCODE_monitorexit;
2541 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2547 for ( ; x != NULL_TREE; x = TREE_CHAIN (x))
2549 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2551 nargs = state->code_SP - save_SP;
2552 state->code_SP = save_SP;
2553 if (f == soft_fmod_node)
2560 if (TREE_CODE (exp) == NEW_CLASS_EXPR)
2561 NOTE_POP (1); /* Pop implicit this. */
2562 if (TREE_CODE (f) == FUNCTION_DECL && DECL_CONTEXT (f) != NULL_TREE)
2564 tree context = DECL_CONTEXT (f);
2565 int index, interface = 0;
2567 if (METHOD_STATIC (f))
2568 OP1 (OPCODE_invokestatic);
2569 else if (DECL_CONSTRUCTOR_P (f) || CALL_USING_SUPER (exp)
2570 || METHOD_PRIVATE (f))
2571 OP1 (OPCODE_invokespecial);
2574 if (CLASS_INTERFACE (TYPE_NAME (context)))
2576 tree arg1 = TREE_VALUE (TREE_OPERAND (exp, 1));
2577 context = TREE_TYPE (TREE_TYPE (arg1));
2578 if (CLASS_INTERFACE (TYPE_NAME (context)))
2582 OP1 (OPCODE_invokeinterface);
2584 OP1 (OPCODE_invokevirtual);
2586 index = find_methodref_with_class_index (&state->cpool, f, context);
2596 f = TREE_TYPE (TREE_TYPE (f));
2597 if (TREE_CODE (f) != VOID_TYPE)
2599 int size = TYPE_IS_WIDE (f) ? 2 : 1;
2600 if (target == IGNORE_TARGET)
2601 emit_pop (size, state);
2611 error("internal error in generate_bytecode_insn - tree code not implemented: %s",
2612 tree_code_name [(int) TREE_CODE (exp)]);
2617 perform_relocations (struct jcf_partial *state)
2619 struct jcf_block *block;
2620 struct jcf_relocation *reloc;
2624 /* Before we start, the pc field of each block is an upper bound on
2625 the block's start pc (it may be less, if previous blocks need less
2626 than their maximum).
2628 The minimum size of each block is in the block's chunk->size. */
2630 /* First, figure out the actual locations of each block. */
2633 for (block = state->blocks; block != NULL; block = block->next)
2635 int block_size = block->v.chunk->size;
2639 /* Optimize GOTO L; L: by getting rid of the redundant goto.
2640 Assumes relocations are in reverse order. */
2641 reloc = block->u.relocations;
2642 while (reloc != NULL
2643 && reloc->kind == OPCODE_goto_w
2644 && reloc->label->pc == block->next->pc
2645 && reloc->offset + 2 == block_size)
2647 reloc = reloc->next;
2648 block->u.relocations = reloc;
2649 block->v.chunk->size -= 3;
2654 /* Optimize GOTO L; ... L: GOTO X by changing the first goto to
2655 jump directly to X. We're careful here to avoid an infinite
2656 loop if the `goto's themselves form one. We do this
2657 optimization because we can generate a goto-to-goto for some
2658 try/finally blocks. */
2659 while (reloc != NULL
2660 && reloc->kind == OPCODE_goto_w
2661 && reloc->label != block
2662 && reloc->label->v.chunk->data != NULL
2663 && reloc->label->v.chunk->data[0] == OPCODE_goto)
2665 /* Find the reloc for the first instruction of the
2666 destination block. */
2667 struct jcf_relocation *first_reloc;
2668 for (first_reloc = reloc->label->u.relocations;
2670 first_reloc = first_reloc->next)
2672 if (first_reloc->offset == 1
2673 && first_reloc->kind == OPCODE_goto_w)
2675 reloc->label = first_reloc->label;
2680 /* If we didn't do anything, exit the loop. */
2681 if (first_reloc == NULL)
2685 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
2687 if (reloc->kind == SWITCH_ALIGN_RELOC)
2689 /* We assume this is the first relocation in this block,
2690 so we know its final pc. */
2691 int where = pc + reloc->offset;
2692 int pad = ((where + 3) & ~3) - where;
2695 else if (reloc->kind < -1 || reloc->kind > BLOCK_START_RELOC)
2697 int delta = reloc->label->pc - (pc + reloc->offset - 1);
2698 int expand = reloc->kind > 0 ? 2 : 5;
2702 if (delta >= -32768 && delta <= 32767)
2708 block_size += expand;
2714 for (block = state->blocks; block != NULL; block = block->next)
2716 struct chunk *chunk = block->v.chunk;
2717 int old_size = chunk->size;
2718 int next_pc = block->next == NULL ? pc : block->next->pc;
2719 int new_size = next_pc - block->pc;
2720 unsigned char *new_ptr;
2721 unsigned char *old_buffer = chunk->data;
2722 unsigned char *old_ptr = old_buffer + old_size;
2723 if (new_size != old_size)
2725 chunk->data = obstack_alloc (state->chunk_obstack, new_size);
2726 chunk->size = new_size;
2728 new_ptr = chunk->data + new_size;
2730 /* We do the relocations from back to front, because
2731 the relocations are in reverse order. */
2732 for (reloc = block->u.relocations; ; reloc = reloc->next)
2734 /* new_ptr and old_ptr point into the old and new buffers,
2735 respectively. (If no relocations cause the buffer to
2736 grow, the buffer will be the same buffer, and new_ptr==old_ptr.)
2737 The bytes at higher address have been copied and relocations
2738 handled; those at lower addresses remain to process. */
2740 /* Lower old index of piece to be copied with no relocation.
2741 I.e. high index of the first piece that does need relocation. */
2742 int start = reloc == NULL ? 0
2743 : reloc->kind == SWITCH_ALIGN_RELOC ? reloc->offset
2744 : (reloc->kind == 0 || reloc->kind == BLOCK_START_RELOC)
2746 : reloc->offset + 2;
2749 int n = (old_ptr - old_buffer) - start;
2753 memcpy (new_ptr, old_ptr, n);
2754 if (old_ptr == old_buffer)
2757 new_offset = new_ptr - chunk->data;
2758 new_offset -= (reloc->kind == -1 ? 2 : 4);
2759 if (reloc->kind == 0)
2762 value = GET_u4 (old_ptr);
2764 else if (reloc->kind == BLOCK_START_RELOC)
2770 else if (reloc->kind == SWITCH_ALIGN_RELOC)
2772 int where = block->pc + reloc->offset;
2773 int pad = ((where + 3) & ~3) - where;
2781 value = GET_u2 (old_ptr);
2783 value += reloc->label->pc - (block->pc + new_offset);
2784 *--new_ptr = (unsigned char) value; value >>= 8;
2785 *--new_ptr = (unsigned char) value; value >>= 8;
2786 if (reloc->kind != -1)
2788 *--new_ptr = (unsigned char) value; value >>= 8;
2789 *--new_ptr = (unsigned char) value;
2791 if (reloc->kind > BLOCK_START_RELOC)
2793 /* Convert: OP TARGET to: OP_w TARGET; (OP is goto or jsr). */
2795 *--new_ptr = reloc->kind;
2797 else if (reloc->kind < -1)
2799 /* Convert: ifCOND TARGET to: ifNCOND T; goto_w TARGET; T: */
2801 *--new_ptr = OPCODE_goto_w;
2804 *--new_ptr = - reloc->kind;
2807 if (new_ptr != chunk->data)
2810 state->code_length = pc;
2814 init_jcf_state (struct jcf_partial *state, struct obstack *work)
2816 state->chunk_obstack = work;
2817 state->first = state->chunk = NULL;
2818 CPOOL_INIT (&state->cpool);
2819 BUFFER_INIT (&state->localvars);
2820 BUFFER_INIT (&state->bytecode);
2824 init_jcf_method (struct jcf_partial *state, tree method)
2826 state->current_method = method;
2827 state->blocks = state->last_block = NULL;
2828 state->linenumber_count = 0;
2829 state->first_lvar = state->last_lvar = NULL;
2830 state->lvar_count = 0;
2831 state->labeled_blocks = NULL;
2832 state->code_length = 0;
2833 BUFFER_RESET (&state->bytecode);
2834 BUFFER_RESET (&state->localvars);
2836 state->code_SP_max = 0;
2837 state->handlers = NULL;
2838 state->last_handler = NULL;
2839 state->num_handlers = 0;
2840 state->num_finalizers = 0;
2841 state->return_value_decl = NULL_TREE;
2845 release_jcf_state (struct jcf_partial *state)
2847 CPOOL_FINISH (&state->cpool);
2848 obstack_free (state->chunk_obstack, state->first);
2851 /* Generate and return a list of chunks containing the class CLAS
2852 in the .class file representation. The list can be written to a
2853 .class file using write_chunks. Allocate chunks from obstack WORK. */
2855 static GTY(()) tree SourceFile_node;
2856 static struct chunk *
2857 generate_classfile (tree clas, struct jcf_partial *state)
2859 struct chunk *cpool_chunk;
2860 const char *source_file, *s;
2863 char *fields_count_ptr;
2864 int fields_count = 0;
2865 char *methods_count_ptr;
2866 int methods_count = 0;
2869 = clas == object_type_node ? 0
2870 : TREE_VEC_LENGTH (TYPE_BINFO_BASETYPES (clas));
2872 ptr = append_chunk (NULL, 8, state);
2873 PUT4 (0xCafeBabe); /* Magic number */
2874 PUT2 (3); /* Minor version */
2875 PUT2 (45); /* Major version */
2877 append_chunk (NULL, 0, state);
2878 cpool_chunk = state->chunk;
2880 /* Next allocate the chunk containing access_flags through fields_count. */
2881 if (clas == object_type_node)
2884 i = 8 + 2 * total_supers;
2885 ptr = append_chunk (NULL, i, state);
2886 i = get_access_flags (TYPE_NAME (clas));
2887 if (! (i & ACC_INTERFACE))
2889 PUT2 (i); /* access_flags */
2890 i = find_class_constant (&state->cpool, clas); PUT2 (i); /* this_class */
2891 if (clas == object_type_node)
2893 PUT2(0); /* super_class */
2894 PUT2(0); /* interfaces_count */
2898 tree basetypes = TYPE_BINFO_BASETYPES (clas);
2899 tree base = BINFO_TYPE (TREE_VEC_ELT (basetypes, 0));
2900 int j = find_class_constant (&state->cpool, base);
2901 PUT2 (j); /* super_class */
2902 PUT2 (total_supers - 1); /* interfaces_count */
2903 for (i = 1; i < total_supers; i++)
2905 base = BINFO_TYPE (TREE_VEC_ELT (basetypes, i));
2906 j = find_class_constant (&state->cpool, base);
2910 fields_count_ptr = ptr;
2912 for (part = TYPE_FIELDS (clas); part; part = TREE_CHAIN (part))
2914 int have_value, attr_count = 0;
2915 if (DECL_NAME (part) == NULL_TREE || DECL_ARTIFICIAL (part))
2917 ptr = append_chunk (NULL, 8, state);
2918 i = get_access_flags (part); PUT2 (i);
2919 i = find_utf8_constant (&state->cpool, DECL_NAME (part)); PUT2 (i);
2920 i = find_utf8_constant (&state->cpool,
2921 build_java_signature (TREE_TYPE (part)));
2923 have_value = DECL_INITIAL (part) != NULL_TREE
2924 && FIELD_STATIC (part) && CONSTANT_VALUE_P (DECL_INITIAL (part))
2925 && FIELD_FINAL (part)
2926 && (JPRIMITIVE_TYPE_P (TREE_TYPE (part))
2927 || TREE_TYPE (part) == string_ptr_type_node);
2931 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part)
2932 || FIELD_SYNTHETIC (part))
2934 if (FIELD_DEPRECATED (part))
2937 PUT2 (attr_count); /* attributes_count */
2940 tree init = DECL_INITIAL (part);
2941 static tree ConstantValue_node = NULL_TREE;
2942 if (TREE_TYPE (part) != TREE_TYPE (init))
2943 fatal_error ("field initializer type mismatch");
2944 ptr = append_chunk (NULL, 8, state);
2945 if (ConstantValue_node == NULL_TREE)
2946 ConstantValue_node = get_identifier ("ConstantValue");
2947 i = find_utf8_constant (&state->cpool, ConstantValue_node);
2948 PUT2 (i); /* attribute_name_index */
2949 PUT4 (2); /* attribute_length */
2950 i = find_constant_index (init, state); PUT2 (i);
2952 /* Emit the "Synthetic" attribute for val$<x> and this$<n>
2953 fields and other fields which need it. */
2954 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part)
2955 || FIELD_SYNTHETIC (part))
2956 ptr = append_synthetic_attribute (state);
2957 if (FIELD_DEPRECATED (part))
2958 append_deprecated_attribute (state);
2961 ptr = fields_count_ptr; UNSAFE_PUT2 (fields_count);
2963 ptr = methods_count_ptr = append_chunk (NULL, 2, state);
2966 for (part = TYPE_METHODS (clas); part; part = TREE_CHAIN (part))
2968 struct jcf_block *block;
2969 tree function_body = DECL_FUNCTION_BODY (part);
2970 tree body = function_body == NULL_TREE ? NULL_TREE
2971 : BLOCK_EXPR_BODY (function_body);
2972 tree name = DECL_CONSTRUCTOR_P (part) ? init_identifier_node
2974 tree type = TREE_TYPE (part);
2975 tree save_function = current_function_decl;
2976 int synthetic_p = 0;
2978 /* Invisible Miranda methods shouldn't end up in the .class
2980 if (METHOD_INVISIBLE (part))
2983 current_function_decl = part;
2984 ptr = append_chunk (NULL, 8, state);
2985 i = get_access_flags (part); PUT2 (i);
2986 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
2987 i = find_utf8_constant (&state->cpool, build_java_signature (type));
2989 i = (body != NULL_TREE) + (DECL_FUNCTION_THROWS (part) != NULL_TREE);
2991 /* Make room for the Synthetic attribute (of zero length.) */
2992 if (DECL_FINIT_P (part)
2993 || DECL_INSTINIT_P (part)
2994 || OUTER_FIELD_ACCESS_IDENTIFIER_P (DECL_NAME (part))
2995 || TYPE_DOT_CLASS (clas) == part)
3000 /* Make room for Deprecated attribute. */
3001 if (METHOD_DEPRECATED (part))
3004 PUT2 (i); /* attributes_count */
3007 ptr = append_synthetic_attribute (state);
3009 if (body != NULL_TREE)
3011 int code_attributes_count = 0;
3012 static tree Code_node = NULL_TREE;
3015 struct jcf_handler *handler;
3016 if (Code_node == NULL_TREE)
3017 Code_node = get_identifier ("Code");
3018 ptr = append_chunk (NULL, 14, state);
3019 i = find_utf8_constant (&state->cpool, Code_node); PUT2 (i);
3021 init_jcf_method (state, part);
3022 get_jcf_label_here (state); /* Force a first block. */
3023 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
3024 localvar_alloc (t, state);
3025 state->num_jsrs = 0;
3026 generate_bytecode_insns (body, IGNORE_TARGET, state);
3027 if (CAN_COMPLETE_NORMALLY (body))
3029 if (TREE_CODE (TREE_TYPE (type)) != VOID_TYPE)
3032 OP1 (OPCODE_return);
3034 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
3035 maybe_free_localvar (t, state, 1);
3036 if (state->return_value_decl != NULL_TREE)
3037 maybe_free_localvar (state->return_value_decl, state, 1);
3038 finish_jcf_block (state);
3039 perform_relocations (state);
3042 i = 8 + state->code_length + 4 + 8 * state->num_handlers;
3043 if (state->linenumber_count > 0)
3045 code_attributes_count++;
3046 i += 8 + 4 * state->linenumber_count;
3048 if (state->lvar_count > 0)
3050 code_attributes_count++;
3051 i += 8 + 10 * state->lvar_count;
3053 UNSAFE_PUT4 (i); /* attribute_length */
3054 UNSAFE_PUT2 (state->code_SP_max); /* max_stack */
3055 UNSAFE_PUT2 (localvar_max); /* max_locals */
3056 UNSAFE_PUT4 (state->code_length);
3058 /* Emit the exception table. */
3059 ptr = append_chunk (NULL, 2 + 8 * state->num_handlers, state);
3060 PUT2 (state->num_handlers); /* exception_table_length */
3061 handler = state->handlers;
3062 for (; handler != NULL; handler = handler->next)
3065 PUT2 (handler->start_label->pc);
3066 PUT2 (handler->end_label->pc);
3067 PUT2 (handler->handler_label->pc);
3068 if (handler->type == NULL_TREE)
3071 type_index = find_class_constant (&state->cpool,
3076 ptr = append_chunk (NULL, 2, state);
3077 PUT2 (code_attributes_count);
3079 /* Write the LineNumberTable attribute. */
3080 if (state->linenumber_count > 0)
3082 static tree LineNumberTable_node = NULL_TREE;
3083 ptr = append_chunk (NULL,
3084 8 + 4 * state->linenumber_count, state);
3085 if (LineNumberTable_node == NULL_TREE)
3086 LineNumberTable_node = get_identifier ("LineNumberTable");
3087 i = find_utf8_constant (&state->cpool, LineNumberTable_node);
3088 PUT2 (i); /* attribute_name_index */
3089 i = 2+4*state->linenumber_count; PUT4(i); /* attribute_length */
3090 i = state->linenumber_count; PUT2 (i);
3091 for (block = state->blocks; block != NULL; block = block->next)
3093 int line = block->linenumber;
3102 /* Write the LocalVariableTable attribute. */
3103 if (state->lvar_count > 0)
3105 static tree LocalVariableTable_node = NULL_TREE;
3106 struct localvar_info *lvar = state->first_lvar;
3107 ptr = append_chunk (NULL, 8 + 10 * state->lvar_count, state);
3108 if (LocalVariableTable_node == NULL_TREE)
3109 LocalVariableTable_node = get_identifier("LocalVariableTable");
3110 i = find_utf8_constant (&state->cpool, LocalVariableTable_node);
3111 PUT2 (i); /* attribute_name_index */
3112 i = 2 + 10 * state->lvar_count; PUT4 (i); /* attribute_length */
3113 i = state->lvar_count; PUT2 (i);
3114 for ( ; lvar != NULL; lvar = lvar->next)
3116 tree name = DECL_NAME (lvar->decl);
3117 tree sig = build_java_signature (TREE_TYPE (lvar->decl));
3118 i = lvar->start_label->pc; PUT2 (i);
3119 i = lvar->end_label->pc - i; PUT2 (i);
3120 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
3121 i = find_utf8_constant (&state->cpool, sig); PUT2 (i);
3122 i = DECL_LOCAL_INDEX (lvar->decl); PUT2 (i);
3126 if (DECL_FUNCTION_THROWS (part) != NULL_TREE)
3128 tree t = DECL_FUNCTION_THROWS (part);
3129 int throws_count = list_length (t);
3130 static tree Exceptions_node = NULL_TREE;
3131 if (Exceptions_node == NULL_TREE)
3132 Exceptions_node = get_identifier ("Exceptions");
3133 ptr = append_chunk (NULL, 8 + 2 * throws_count, state);
3134 i = find_utf8_constant (&state->cpool, Exceptions_node);
3135 PUT2 (i); /* attribute_name_index */
3136 i = 2 + 2 * throws_count; PUT4(i); /* attribute_length */
3137 i = throws_count; PUT2 (i);
3138 for (; t != NULL_TREE; t = TREE_CHAIN (t))
3140 i = find_class_constant (&state->cpool, TREE_VALUE (t));
3145 if (METHOD_DEPRECATED (part))
3146 append_deprecated_attribute (state);
3149 current_function_decl = save_function;
3151 ptr = methods_count_ptr; UNSAFE_PUT2 (methods_count);
3153 source_file = DECL_SOURCE_FILE (TYPE_NAME (clas));
3154 for (s = source_file; ; s++)
3159 if (ch == '/' || ch == '\\')
3162 ptr = append_chunk (NULL, 10, state);
3164 i = 1; /* Source file always exists as an attribute */
3165 if (INNER_CLASS_TYPE_P (clas) || DECL_INNER_CLASS_LIST (TYPE_NAME (clas)))
3167 if (clas == object_type_node)
3169 if (CLASS_DEPRECATED (TYPE_NAME (clas)))
3172 PUT2 (i); /* attributes_count */
3174 /* generate the SourceFile attribute. */
3175 if (SourceFile_node == NULL_TREE)
3177 SourceFile_node = get_identifier ("SourceFile");
3180 i = find_utf8_constant (&state->cpool, SourceFile_node);
3181 PUT2 (i); /* attribute_name_index */
3183 i = find_utf8_constant (&state->cpool, get_identifier (source_file));
3185 append_gcj_attribute (state, clas);
3186 append_innerclasses_attribute (state, clas);
3187 if (CLASS_DEPRECATED (TYPE_NAME (clas)))
3188 append_deprecated_attribute (state);
3190 /* New finally generate the contents of the constant pool chunk. */
3191 i = count_constant_pool_bytes (&state->cpool);
3192 ptr = obstack_alloc (state->chunk_obstack, i);
3193 cpool_chunk->data = ptr;
3194 cpool_chunk->size = i;
3195 write_constant_pool (&state->cpool, ptr, i);
3196 return state->first;
3199 static GTY(()) tree Synthetic_node;
3200 static unsigned char *
3201 append_synthetic_attribute (struct jcf_partial *state)
3203 unsigned char *ptr = append_chunk (NULL, 6, state);
3206 if (Synthetic_node == NULL_TREE)
3208 Synthetic_node = get_identifier ("Synthetic");
3210 i = find_utf8_constant (&state->cpool, Synthetic_node);
3211 PUT2 (i); /* Attribute string index */
3212 PUT4 (0); /* Attribute length */
3218 append_deprecated_attribute (struct jcf_partial *state)
3220 unsigned char *ptr = append_chunk (NULL, 6, state);
3223 i = find_utf8_constant (&state->cpool, get_identifier ("Deprecated"));
3224 PUT2 (i); /* Attribute string index */
3225 PUT4 (0); /* Attribute length */
3229 append_gcj_attribute (struct jcf_partial *state, tree class)
3234 if (class != object_type_node)
3237 ptr = append_chunk (NULL, 6, state); /* 2+4 */
3238 i = find_utf8_constant (&state->cpool,
3239 get_identifier ("gnu.gcj.gcj-compiled"));
3240 PUT2 (i); /* Attribute string index */
3241 PUT4 (0); /* Attribute length */
3244 static tree InnerClasses_node;
3246 append_innerclasses_attribute (struct jcf_partial *state, tree class)
3248 tree orig_decl = TYPE_NAME (class);
3251 unsigned char *ptr, *length_marker, *number_marker;
3253 if (!INNER_CLASS_TYPE_P (class) && !DECL_INNER_CLASS_LIST (orig_decl))
3256 ptr = append_chunk (NULL, 8, state); /* 2+4+2 */
3258 if (InnerClasses_node == NULL_TREE)
3260 InnerClasses_node = get_identifier ("InnerClasses");
3262 i = find_utf8_constant (&state->cpool, InnerClasses_node);
3264 length_marker = ptr; PUT4 (0); /* length, to be later patched */
3265 number_marker = ptr; PUT2 (0); /* number of classes, tblp */
3267 /* Generate the entries: all inner classes visible from the one we
3268 process: itself, up and down. */
3269 while (class && INNER_CLASS_TYPE_P (class))
3273 decl = TYPE_NAME (class);
3274 n = IDENTIFIER_POINTER (DECL_NAME (decl)) +
3275 IDENTIFIER_LENGTH (DECL_NAME (decl));
3277 while (n[-1] != '$')
3279 append_innerclasses_attribute_entry (state, decl, get_identifier (n));
3282 class = TREE_TYPE (DECL_CONTEXT (TYPE_NAME (class)));
3286 for (current = DECL_INNER_CLASS_LIST (decl);
3287 current; current = TREE_CHAIN (current))
3289 append_innerclasses_attribute_entry (state, TREE_PURPOSE (current),
3290 TREE_VALUE (current));
3294 ptr = length_marker; PUT4 (8*length+2);
3295 ptr = number_marker; PUT2 (length);
3299 append_innerclasses_attribute_entry (struct jcf_partial *state,
3300 tree decl, tree name)
3303 int ocii = 0, ini = 0;
3304 unsigned char *ptr = append_chunk (NULL, 8, state);
3306 icii = find_class_constant (&state->cpool, TREE_TYPE (decl));
3308 /* Sun's implementation seems to generate ocii to 0 for inner
3309 classes (which aren't considered members of the class they're
3310 in.) The specs are saying that if the class is anonymous,
3311 inner_name_index must be zero. */
3312 if (!ANONYMOUS_CLASS_P (TREE_TYPE (decl)))
3314 ocii = find_class_constant (&state->cpool,
3315 TREE_TYPE (DECL_CONTEXT (decl)));
3316 ini = find_utf8_constant (&state->cpool, name);
3318 icaf = get_access_flags (decl);
3320 PUT2 (icii); PUT2 (ocii); PUT2 (ini); PUT2 (icaf);
3324 make_class_file_name (tree clas)
3326 const char *dname, *cname, *slash;
3331 cname = IDENTIFIER_POINTER (identifier_subst (DECL_NAME (TYPE_NAME (clas)),
3332 "", '.', DIR_SEPARATOR,
3334 if (jcf_write_base_directory == NULL)
3336 /* Make sure we put the class file into the .java file's
3337 directory, and not into some subdirectory thereof. */
3339 dname = DECL_SOURCE_FILE (TYPE_NAME (clas));
3340 slash = strrchr (dname, DIR_SEPARATOR);
3341 #ifdef DIR_SEPARATOR_2
3343 slash = strrchr (dname, DIR_SEPARATOR_2);
3349 sep = DIR_SEPARATOR;
3354 t = strrchr (cname, DIR_SEPARATOR);
3362 dname = jcf_write_base_directory;
3364 s = strrchr (dname, DIR_SEPARATOR);
3365 #ifdef DIR_SEPARATOR_2
3367 s = strrchr (dname, DIR_SEPARATOR_2);
3372 sep = DIR_SEPARATOR;
3374 slash = dname + strlen (dname);
3377 r = xmalloc (slash - dname + strlen (cname) + 2);
3378 strncpy (r, dname, slash - dname);
3379 r[slash - dname] = sep;
3380 strcpy (&r[slash - dname + 1], cname);
3382 /* We try to make new directories when we need them. We only do
3383 this for directories which "might not" exist. For instance, we
3384 assume the `-d' directory exists, but we don't assume that any
3385 subdirectory below it exists. It might be worthwhile to keep
3386 track of which directories we've created to avoid gratuitous
3388 dname = r + (slash - dname) + 1;
3391 char *s = strchr (dname, sep);
3395 /* Try to make directory if it doesn't already exist. */
3396 if (stat (r, &sb) == -1
3397 && mkdir (r, 0755) == -1
3398 /* The directory might have been made by another process. */
3400 fatal_error ("can't create directory %s: %m", r);
3403 /* Skip consecutive separators. */
3404 for (dname = s + 1; *dname && *dname == sep; ++dname)
3411 /* Write out the contents of a class (RECORD_TYPE) CLAS, as a .class file.
3412 The output .class file name is make_class_file_name(CLAS). */
3415 write_classfile (tree clas)
3417 struct obstack *work = &temporary_obstack;
3418 struct jcf_partial state[1];
3419 char *class_file_name = make_class_file_name (clas);
3420 struct chunk *chunks;
3422 if (class_file_name != NULL)
3425 char *temporary_file_name;
3427 /* The .class file is initially written to a ".tmp" file so that
3428 if multiple instances of the compiler are running at once
3429 they do not see partially formed class files. */
3430 temporary_file_name = concat (class_file_name, ".tmp", NULL);
3431 stream = fopen (temporary_file_name, "wb");
3433 fatal_error ("can't open %s for writing: %m", temporary_file_name);
3435 jcf_dependency_add_target (class_file_name);
3436 init_jcf_state (state, work);
3437 chunks = generate_classfile (clas, state);
3438 write_chunks (stream, chunks);
3439 if (fclose (stream))
3440 fatal_error ("error closing %s: %m", temporary_file_name);
3442 /* If a file named by the string pointed to by `new' exists
3443 prior to the call to the `rename' function, the behavior
3444 is implementation-defined. ISO 9899-1990 7.9.4.2.
3446 For example, on Win32 with MSVCRT, it is an error. */
3448 unlink (class_file_name);
3450 if (rename (temporary_file_name, class_file_name) == -1)
3452 remove (temporary_file_name);
3453 fatal_error ("can't create %s: %m", class_file_name);
3455 free (temporary_file_name);
3456 free (class_file_name);
3458 release_jcf_state (state);
3462 string concatenation
3463 synchronized statement
3466 #include "gt-java-jcf-write.h"