1 /* Write out a Java(TM) class file.
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA.
21 Java and all Java-based marks are trademarks or registered trademarks
22 of Sun Microsystems, Inc. in the United States and other countries.
23 The Free Software Foundation is independent of Sun Microsystems, Inc. */
27 #include "coretypes.h"
32 #include "java-tree.h"
36 #include "java-opcodes.h"
37 #include "parse.h" /* for BLOCK_EXPR_BODY */
43 extern struct obstack temporary_obstack;
45 /* Base directory in which `.class' files should be written.
46 NULL means to put the file into the same directory as the
47 corresponding .java file. */
48 const char *jcf_write_base_directory = NULL;
50 /* Make sure bytecode.data is big enough for at least N more bytes. */
53 do { CHECK_OP(state); \
54 if (state->bytecode.ptr + (N) > state->bytecode.limit) \
55 buffer_grow (&state->bytecode, N); } while (0)
57 /* Add a 1-byte instruction/operand I to bytecode.data,
58 assuming space has already been RESERVE'd. */
60 #define OP1(I) (*state->bytecode.ptr++ = (I), CHECK_OP(state))
62 /* Like OP1, but I is a 2-byte big endian integer. */
65 do { int _i = (I); OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
67 /* Like OP1, but I is a 4-byte big endian integer. */
70 do { int _i = (I); OP1 (_i >> 24); OP1 (_i >> 16); \
71 OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
73 /* Macro to call each time we push I words on the JVM stack. */
75 #define NOTE_PUSH(I) \
76 do { state->code_SP += (I); \
77 if (state->code_SP > state->code_SP_max) \
78 state->code_SP_max = state->code_SP; } while (0)
80 /* Macro to call each time we pop I words from the JVM stack. */
83 do { state->code_SP -= (I); if (state->code_SP < 0) abort(); } while (0)
85 /* A chunk or segment of a .class file. */
89 /* The next segment of this .class file. */
92 /* The actual data in this segment to be written to the .class file. */
95 /* The size of the segment to be written to the .class file. */
99 #define PENDING_CLEANUP_PC (-3)
100 #define PENDING_EXIT_PC (-2)
101 #define UNDEFINED_PC (-1)
103 /* Each "block" represents a label plus the bytecode instructions following.
104 There may be branches out of the block, but no incoming jumps, except
105 to the beginning of the block.
107 If (pc < 0), the jcf_block is not an actual block (i.e. it has no
108 associated code yet), but it is an undefined label.
113 /* For blocks that that are defined, the next block (in pc order).
114 For blocks that are not-yet-defined the end label of a LABELED_BLOCK_EXPR
115 or a cleanup expression (from a TRY_FINALLY_EXPR),
116 this is the next (outer) such end label, in a stack headed by
117 labeled_blocks in jcf_partial. */
118 struct jcf_block *next;
120 /* In the not-yet-defined end label for an unfinished EXIT_BLOCK_EXPR.
121 pc is PENDING_EXIT_PC.
122 In the not-yet-defined end label for pending cleanup subroutine,
123 pc is PENDING_CLEANUP_PC.
124 For other not-yet-defined labels, pc is UNDEFINED_PC.
126 If the label has been defined:
127 Until perform_relocations is finished, this is the maximum possible
128 value of the bytecode offset at the beginning of this block.
129 After perform_relocations, it is the actual offset (pc). */
134 /* After finish_jcf_block is called, the actual instructions
135 contained in this block. Before that NULL, and the instructions
136 are in state->bytecode. */
140 /* If pc==PENDING_CLEANUP_PC, start_label is the start of the region
141 covered by the cleanup. */
142 struct jcf_block *start_label;
146 /* Set of relocations (in reverse offset order) for this block. */
147 struct jcf_relocation *relocations;
149 /* If this block is that of the not-yet-defined end label of
150 a LABELED_BLOCK_EXPR, where LABELED_BLOCK is that LABELED_BLOCK_EXPR.
151 If pc==PENDING_CLEANUP_PC, the cleanup that needs to be run. */
156 /* A "relocation" type for the 0-3 bytes of padding at the start
157 of a tableswitch or a lookupswitch. */
158 #define SWITCH_ALIGN_RELOC 4
160 /* A relocation type for the labels in a tableswitch or a lookupswitch;
161 these are relative to the start of the instruction, but (due to
162 th 0-3 bytes of padding), we don't know the offset before relocation. */
163 #define BLOCK_START_RELOC 1
165 struct jcf_relocation
167 /* Next relocation for the current jcf_block. */
168 struct jcf_relocation *next;
170 /* The (byte) offset within the current block that needs to be relocated. */
171 HOST_WIDE_INT offset;
173 /* 0 if offset is a 4-byte relative offset.
174 4 (SWITCH_ALIGN_RELOC) if offset points to 0-3 padding bytes inserted
175 for proper alignment in tableswitch/lookupswitch instructions.
176 1 (BLOCK_START_RELOC) if offset points to a 4-byte offset relative
177 to the start of the containing block.
178 -1 if offset is a 2-byte relative offset.
179 < -1 if offset is the address of an instruction with a 2-byte offset
180 that does not have a corresponding 4-byte offset version, in which
181 case the absolute value of kind is the inverted opcode.
182 > 4 if offset is the address of an instruction (such as jsr) with a
183 2-byte offset that does have a corresponding 4-byte offset version,
184 in which case kind is the opcode of the 4-byte version (such as jsr_w). */
187 /* The label the relocation wants to actually transfer to. */
188 struct jcf_block *label;
191 #define RELOCATION_VALUE_0 ((HOST_WIDE_INT)0)
192 #define RELOCATION_VALUE_1 ((HOST_WIDE_INT)1)
194 /* State for single catch clause. */
198 struct jcf_handler *next;
200 struct jcf_block *start_label;
201 struct jcf_block *end_label;
202 struct jcf_block *handler_label;
204 /* The sub-class of Throwable handled, or NULL_TREE (for finally). */
208 /* State for the current switch statement. */
210 struct jcf_switch_state
212 struct jcf_switch_state *prev;
213 struct jcf_block *default_label;
215 struct jcf_relocation *cases;
217 HOST_WIDE_INT min_case, max_case;
220 /* This structure is used to contain the various pieces that will
221 become a .class file. */
227 struct obstack *chunk_obstack;
230 /* List of basic blocks for the current method. */
231 struct jcf_block *blocks;
232 struct jcf_block *last_block;
234 struct localvar_info *first_lvar;
235 struct localvar_info *last_lvar;
240 int linenumber_count;
242 /* Until perform_relocations, this is a upper bound on the number
243 of bytes (so far) in the instructions for the current method. */
246 /* Stack of undefined ending labels for LABELED_BLOCK_EXPR. */
247 struct jcf_block *labeled_blocks;
249 /* The current stack size (stack pointer) in the current method. */
252 /* The largest extent of stack size (stack pointer) in the current method. */
255 /* Contains a mapping from local var slot number to localvar_info. */
256 struct buffer localvars;
258 /* The buffer allocated for bytecode for the current jcf_block. */
259 struct buffer bytecode;
261 /* Chain of exception handlers for the current method. */
262 struct jcf_handler *handlers;
264 /* Last element in handlers chain. */
265 struct jcf_handler *last_handler;
267 /* Number of exception handlers for the current method. */
270 /* Number of finalizers we are currently nested within. */
273 /* If non-NULL, use this for the return value. */
274 tree return_value_decl;
276 /* Information about the current switch statement. */
277 struct jcf_switch_state *sw_state;
279 /* The count of jsr instructions that have been emmitted. */
283 static void generate_bytecode_insns (tree, int, struct jcf_partial *);
284 static struct chunk * alloc_chunk (struct chunk *, unsigned char *,
285 int, struct obstack *);
286 static unsigned char * append_chunk (unsigned char *, int,
287 struct jcf_partial *);
288 static void append_chunk_copy (unsigned char *, int, struct jcf_partial *);
289 static struct jcf_block * gen_jcf_label (struct jcf_partial *);
290 static void finish_jcf_block (struct jcf_partial *);
291 static void define_jcf_label (struct jcf_block *, struct jcf_partial *);
292 static struct jcf_block * get_jcf_label_here (struct jcf_partial *);
293 static void put_linenumber (int, struct jcf_partial *);
294 static void localvar_alloc (tree, struct jcf_partial *);
295 static void maybe_free_localvar (tree, struct jcf_partial *, int);
296 static int get_access_flags (tree);
297 static void write_chunks (FILE *, struct chunk *);
298 static int adjust_typed_op (tree, int);
299 static void generate_bytecode_conditional (tree, struct jcf_block *,
300 struct jcf_block *, int,
301 struct jcf_partial *);
302 static void generate_bytecode_return (tree, struct jcf_partial *);
303 static void perform_relocations (struct jcf_partial *);
304 static void init_jcf_state (struct jcf_partial *, struct obstack *);
305 static void init_jcf_method (struct jcf_partial *, tree);
306 static void release_jcf_state (struct jcf_partial *);
307 static struct chunk * generate_classfile (tree, struct jcf_partial *);
308 static struct jcf_handler *alloc_handler (struct jcf_block *,
310 struct jcf_partial *);
311 static void emit_iinc (tree, HOST_WIDE_INT, struct jcf_partial *);
312 static void emit_reloc (HOST_WIDE_INT, int, struct jcf_block *,
313 struct jcf_partial *);
314 static void push_constant1 (HOST_WIDE_INT, struct jcf_partial *);
315 static void push_constant2 (HOST_WIDE_INT, struct jcf_partial *);
316 static void push_int_const (HOST_WIDE_INT, struct jcf_partial *);
317 static int find_constant_wide (HOST_WIDE_INT, HOST_WIDE_INT,
318 struct jcf_partial *);
319 static void push_long_const (HOST_WIDE_INT, HOST_WIDE_INT,
320 struct jcf_partial *);
321 static int find_constant_index (tree, struct jcf_partial *);
322 static void push_long_const (HOST_WIDE_INT, HOST_WIDE_INT,
323 struct jcf_partial *);
324 static void field_op (tree, int, struct jcf_partial *);
325 static void maybe_wide (int, int, struct jcf_partial *);
326 static void emit_dup (int, int, struct jcf_partial *);
327 static void emit_pop (int, struct jcf_partial *);
328 static void emit_load_or_store (tree, int, struct jcf_partial *);
329 static void emit_load (tree, struct jcf_partial *);
330 static void emit_store (tree, struct jcf_partial *);
331 static void emit_unop (enum java_opcode, tree, struct jcf_partial *);
332 static void emit_binop (enum java_opcode, tree, struct jcf_partial *);
333 static void emit_reloc (HOST_WIDE_INT, int, struct jcf_block *,
334 struct jcf_partial *);
335 static void emit_switch_reloc (struct jcf_block *, struct jcf_partial *);
336 static void emit_case_reloc (struct jcf_relocation *, struct jcf_partial *);
337 static void emit_if (struct jcf_block *, int, int, struct jcf_partial *);
338 static void emit_goto (struct jcf_block *, struct jcf_partial *);
339 static void emit_jsr (struct jcf_block *, struct jcf_partial *);
340 static void call_cleanups (struct jcf_block *, struct jcf_partial *);
341 static char *make_class_file_name (tree);
342 static unsigned char *append_synthetic_attribute (struct jcf_partial *);
343 static void append_deprecated_attribute (struct jcf_partial *);
344 static void append_innerclasses_attribute (struct jcf_partial *, tree);
345 static void append_innerclasses_attribute_entry (struct jcf_partial *, tree, tree);
346 static void append_gcj_attribute (struct jcf_partial *, tree);
348 /* Utility macros for appending (big-endian) data to a buffer.
349 We assume a local variable 'ptr' points into where we want to
350 write next, and we assume enough space has been allocated. */
352 #ifdef ENABLE_JC1_CHECKING
353 static int CHECK_PUT (void *, struct jcf_partial *, int);
356 CHECK_PUT (void *ptr, struct jcf_partial *state, int i)
358 if ((unsigned char *) ptr < state->chunk->data
359 || (unsigned char *) ptr + i > state->chunk->data + state->chunk->size)
365 #define CHECK_PUT(PTR, STATE, I) ((void)0)
368 #define PUT1(X) (CHECK_PUT(ptr, state, 1), *ptr++ = (X))
369 #define PUT2(X) (PUT1((X) >> 8), PUT1((X) & 0xFF))
370 #define PUT4(X) (PUT2((X) >> 16), PUT2((X) & 0xFFFF))
371 #define PUTN(P, N) (CHECK_PUT(ptr, state, N), memcpy(ptr, P, N), ptr += (N))
373 /* There are some cases below where CHECK_PUT is guaranteed to fail.
374 Use the following macros in those specific cases. */
375 #define UNSAFE_PUT1(X) (*ptr++ = (X))
376 #define UNSAFE_PUT2(X) (UNSAFE_PUT1((X) >> 8), UNSAFE_PUT1((X) & 0xFF))
377 #define UNSAFE_PUT4(X) (UNSAFE_PUT2((X) >> 16), UNSAFE_PUT2((X) & 0xFFFF))
378 #define UNSAFE_PUTN(P, N) (memcpy(ptr, P, N), ptr += (N))
381 /* Allocate a new chunk on obstack WORK, and link it in after LAST.
382 Set the data and size fields to DATA and SIZE, respectively.
383 However, if DATA is NULL and SIZE>0, allocate a buffer as well. */
385 static struct chunk *
386 alloc_chunk (struct chunk *last, unsigned char *data,
387 int size, struct obstack *work)
389 struct chunk *chunk = obstack_alloc (work, sizeof(struct chunk));
391 if (data == NULL && size > 0)
392 data = obstack_alloc (work, size);
402 #ifdef ENABLE_JC1_CHECKING
403 static int CHECK_OP (struct jcf_partial *);
406 CHECK_OP (struct jcf_partial *state)
408 if (state->bytecode.ptr > state->bytecode.limit)
414 #define CHECK_OP(STATE) ((void) 0)
417 static unsigned char *
418 append_chunk (unsigned char *data, int size, struct jcf_partial *state)
420 state->chunk = alloc_chunk (state->chunk, data, size, state->chunk_obstack);
421 if (state->first == NULL)
422 state->first = state->chunk;
423 return state->chunk->data;
427 append_chunk_copy (unsigned char *data, int size, struct jcf_partial *state)
429 unsigned char *ptr = append_chunk (NULL, size, state);
430 memcpy (ptr, data, size);
433 static struct jcf_block *
434 gen_jcf_label (struct jcf_partial *state)
436 struct jcf_block *block
437 = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_block));
439 block->linenumber = -1;
440 block->pc = UNDEFINED_PC;
445 finish_jcf_block (struct jcf_partial *state)
447 struct jcf_block *block = state->last_block;
448 struct jcf_relocation *reloc;
449 int code_length = BUFFER_LENGTH (&state->bytecode);
450 int pc = state->code_length;
451 append_chunk_copy (state->bytecode.data, code_length, state);
452 BUFFER_RESET (&state->bytecode);
453 block->v.chunk = state->chunk;
455 /* Calculate code_length to the maximum value it can have. */
456 pc += block->v.chunk->size;
457 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
459 int kind = reloc->kind;
460 if (kind == SWITCH_ALIGN_RELOC)
462 else if (kind > BLOCK_START_RELOC)
463 pc += 2; /* 2-byte offset may grow to 4-byte offset */
465 pc += 5; /* May need to add a goto_w. */
467 state->code_length = pc;
471 define_jcf_label (struct jcf_block *label, struct jcf_partial *state)
473 if (state->last_block != NULL)
474 finish_jcf_block (state);
475 label->pc = state->code_length;
476 if (state->blocks == NULL)
477 state->blocks = label;
479 state->last_block->next = label;
480 state->last_block = label;
482 label->u.relocations = NULL;
485 static struct jcf_block *
486 get_jcf_label_here (struct jcf_partial *state)
488 if (state->last_block != NULL && BUFFER_LENGTH (&state->bytecode) == 0)
489 return state->last_block;
492 struct jcf_block *label = gen_jcf_label (state);
493 define_jcf_label (label, state);
498 /* Note a line number entry for the current PC and given LINE. */
501 put_linenumber (int line, struct jcf_partial *state)
503 struct jcf_block *label = get_jcf_label_here (state);
504 if (label->linenumber > 0)
506 label = gen_jcf_label (state);
507 define_jcf_label (label, state);
509 label->linenumber = line;
510 state->linenumber_count++;
513 /* Allocate a new jcf_handler, for a catch clause that catches exceptions
514 in the range (START_LABEL, END_LABEL). */
516 static struct jcf_handler *
517 alloc_handler (struct jcf_block *start_label, struct jcf_block *end_label,
518 struct jcf_partial *state)
520 struct jcf_handler *handler
521 = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_handler));
522 handler->start_label = start_label;
523 handler->end_label = end_label;
524 handler->handler_label = get_jcf_label_here (state);
525 if (state->handlers == NULL)
526 state->handlers = handler;
528 state->last_handler->next = handler;
529 state->last_handler = handler;
530 handler->next = NULL;
531 state->num_handlers++;
536 /* The index of jvm local variable allocated for this DECL.
537 This is assigned when generating .class files;
538 contrast DECL_LOCAL_SLOT_NUMBER which is set when *reading* a .class file.
539 (We don't allocate DECL_LANG_SPECIFIC for locals from Java source code.) */
541 #define DECL_LOCAL_INDEX(DECL) DECL_ALIGN(DECL)
545 struct localvar_info *next;
548 struct jcf_block *start_label;
549 struct jcf_block *end_label;
552 #define localvar_buffer ((struct localvar_info**) state->localvars.data)
553 #define localvar_max \
554 ((struct localvar_info**) state->localvars.ptr - localvar_buffer)
557 localvar_alloc (tree decl, struct jcf_partial *state)
559 struct jcf_block *start_label = get_jcf_label_here (state);
560 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
562 register struct localvar_info *info;
563 register struct localvar_info **ptr = localvar_buffer;
564 register struct localvar_info **limit
565 = (struct localvar_info**) state->localvars.ptr;
566 for (index = 0; ptr < limit; index++, ptr++)
569 && (! wide || ((ptr+1) < limit && ptr[1] == NULL)))
574 buffer_grow (&state->localvars, 2 * sizeof (struct localvar_info*));
575 ptr = (struct localvar_info**) state->localvars.data + index;
576 state->localvars.ptr = (unsigned char *) (ptr + 1 + wide);
578 info = obstack_alloc (state->chunk_obstack, sizeof (struct localvar_info));
581 ptr[1] = (struct localvar_info *)(~0);
582 DECL_LOCAL_INDEX (decl) = index;
584 info->start_label = start_label;
586 if (debug_info_level > DINFO_LEVEL_TERSE
587 && DECL_NAME (decl) != NULL_TREE)
589 /* Generate debugging info. */
591 if (state->last_lvar != NULL)
592 state->last_lvar->next = info;
594 state->first_lvar = info;
595 state->last_lvar = info;
601 maybe_free_localvar (tree decl, struct jcf_partial *state, int really)
603 struct jcf_block *end_label = get_jcf_label_here (state);
604 int index = DECL_LOCAL_INDEX (decl);
605 register struct localvar_info **ptr = &localvar_buffer [index];
606 register struct localvar_info *info = *ptr;
607 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
609 info->end_label = end_label;
611 if (info->decl != decl)
618 if (ptr[1] != (struct localvar_info *)(~0))
625 #define STACK_TARGET 1
626 #define IGNORE_TARGET 2
628 /* Get the access flags of a class (TYPE_DECL), a method (FUNCTION_DECL), or
629 a field (FIELD_DECL or VAR_DECL, if static), as encoded in a .class file. */
632 get_access_flags (tree decl)
635 int isfield = TREE_CODE (decl) == FIELD_DECL || TREE_CODE (decl) == VAR_DECL;
636 if (CLASS_PUBLIC (decl)) /* same as FIELD_PUBLIC and METHOD_PUBLIC */
638 if (CLASS_FINAL (decl)) /* same as FIELD_FINAL and METHOD_FINAL */
640 if (isfield || TREE_CODE (decl) == FUNCTION_DECL)
642 if (TREE_PROTECTED (decl))
643 flags |= ACC_PROTECTED;
644 if (TREE_PRIVATE (decl))
645 flags |= ACC_PRIVATE;
647 else if (TREE_CODE (decl) == TYPE_DECL)
649 if (CLASS_SUPER (decl))
651 if (CLASS_ABSTRACT (decl))
652 flags |= ACC_ABSTRACT;
653 if (CLASS_INTERFACE (decl))
654 flags |= ACC_INTERFACE;
655 if (CLASS_STATIC (decl))
657 if (CLASS_PRIVATE (decl))
658 flags |= ACC_PRIVATE;
659 if (CLASS_PROTECTED (decl))
660 flags |= ACC_PROTECTED;
661 if (ANONYMOUS_CLASS_P (TREE_TYPE (decl))
662 || LOCAL_CLASS_P (TREE_TYPE (decl)))
663 flags |= ACC_PRIVATE;
664 if (CLASS_STRICTFP (decl))
670 if (TREE_CODE (decl) == FUNCTION_DECL)
672 if (METHOD_NATIVE (decl))
674 if (METHOD_STATIC (decl))
676 if (METHOD_SYNCHRONIZED (decl))
677 flags |= ACC_SYNCHRONIZED;
678 if (METHOD_ABSTRACT (decl))
679 flags |= ACC_ABSTRACT;
680 if (METHOD_STRICTFP (decl))
685 if (FIELD_STATIC (decl))
687 if (FIELD_VOLATILE (decl))
688 flags |= ACC_VOLATILE;
689 if (FIELD_TRANSIENT (decl))
690 flags |= ACC_TRANSIENT;
695 /* Write the list of segments starting at CHUNKS to STREAM. */
698 write_chunks (FILE* stream, struct chunk *chunks)
700 for (; chunks != NULL; chunks = chunks->next)
701 fwrite (chunks->data, chunks->size, 1, stream);
704 /* Push a 1-word constant in the constant pool at the given INDEX.
705 (Caller is responsible for doing NOTE_PUSH.) */
708 push_constant1 (HOST_WIDE_INT index, struct jcf_partial *state)
723 /* Push a 2-word constant in the constant pool at the given INDEX.
724 (Caller is responsible for doing NOTE_PUSH.) */
727 push_constant2 (HOST_WIDE_INT index, struct jcf_partial *state)
734 /* Push 32-bit integer constant on VM stack.
735 Caller is responsible for doing NOTE_PUSH. */
738 push_int_const (HOST_WIDE_INT i, struct jcf_partial *state)
741 if (i >= -1 && i <= 5)
742 OP1(OPCODE_iconst_0 + i);
743 else if (i >= -128 && i < 128)
748 else if (i >= -32768 && i < 32768)
755 i = find_constant1 (&state->cpool, CONSTANT_Integer,
756 (jword)(i & 0xFFFFFFFF));
757 push_constant1 (i, state);
762 find_constant_wide (HOST_WIDE_INT lo, HOST_WIDE_INT hi,
763 struct jcf_partial *state)
765 HOST_WIDE_INT w1, w2;
766 lshift_double (lo, hi, -32, 64, &w1, &w2, 1);
767 return find_constant2 (&state->cpool, CONSTANT_Long,
768 (jword)(w1 & 0xFFFFFFFF), (jword)(lo & 0xFFFFFFFF));
771 /* Find or allocate a constant pool entry for the given VALUE.
772 Return the index in the constant pool. */
775 find_constant_index (tree value, struct jcf_partial *state)
777 if (TREE_CODE (value) == INTEGER_CST)
779 if (TYPE_PRECISION (TREE_TYPE (value)) <= 32)
780 return find_constant1 (&state->cpool, CONSTANT_Integer,
781 (jword)(TREE_INT_CST_LOW (value) & 0xFFFFFFFF));
783 return find_constant_wide (TREE_INT_CST_LOW (value),
784 TREE_INT_CST_HIGH (value), state);
786 else if (TREE_CODE (value) == REAL_CST)
790 real_to_target (words, &TREE_REAL_CST (value),
791 TYPE_MODE (TREE_TYPE (value)));
792 words[0] &= 0xffffffff;
793 words[1] &= 0xffffffff;
795 if (TYPE_PRECISION (TREE_TYPE (value)) == 32)
796 return find_constant1 (&state->cpool, CONSTANT_Float, (jword)words[0]);
798 return find_constant2 (&state->cpool, CONSTANT_Double,
799 (jword)words[1-FLOAT_WORDS_BIG_ENDIAN],
800 (jword)words[FLOAT_WORDS_BIG_ENDIAN]);
802 else if (TREE_CODE (value) == STRING_CST)
803 return find_string_constant (&state->cpool, value);
809 /* Push 64-bit long constant on VM stack.
810 Caller is responsible for doing NOTE_PUSH. */
813 push_long_const (HOST_WIDE_INT lo, HOST_WIDE_INT hi, struct jcf_partial *state)
815 HOST_WIDE_INT highpart, dummy;
816 jint lowpart = WORD_TO_INT (lo);
818 rshift_double (lo, hi, 32, 64, &highpart, &dummy, 1);
820 if (highpart == 0 && (lowpart == 0 || lowpart == 1))
823 OP1(OPCODE_lconst_0 + lowpart);
825 else if ((highpart == 0 && lowpart > 0 && lowpart < 32768)
826 || (highpart == -1 && lowpart < 0 && lowpart >= -32768))
828 push_int_const (lowpart, state);
833 push_constant2 (find_constant_wide (lo, hi, state), state);
837 field_op (tree field, int opcode, struct jcf_partial *state)
839 int index = find_fieldref_index (&state->cpool, field);
845 /* Returns an integer in the range 0 (for 'int') through 4 (for object
846 reference) to 7 (for 'short') which matches the pattern of how JVM
847 opcodes typically depend on the operand type. */
850 adjust_typed_op (tree type, int max)
852 switch (TREE_CODE (type))
855 case RECORD_TYPE: return 4;
857 return TYPE_PRECISION (type) == 32 || max < 5 ? 0 : 5;
859 return TYPE_PRECISION (type) == 32 || max < 6 ? 0 : 6;
861 switch (TYPE_PRECISION (type))
863 case 8: return max < 5 ? 0 : 5;
864 case 16: return max < 7 ? 0 : 7;
870 switch (TYPE_PRECISION (type))
883 maybe_wide (int opcode, int index, struct jcf_partial *state)
900 /* Compile code to duplicate with offset, where
901 SIZE is the size of the stack item to duplicate (1 or 2), abd
902 OFFSET is where to insert the result (must be 0, 1, or 2).
903 (The new words get inserted at stack[SP-size-offset].) */
906 emit_dup (int size, int offset, struct jcf_partial *state)
913 kind = size == 1 ? OPCODE_dup : OPCODE_dup2;
914 else if (offset == 1)
915 kind = size == 1 ? OPCODE_dup_x1 : OPCODE_dup2_x1;
916 else if (offset == 2)
917 kind = size == 1 ? OPCODE_dup_x2 : OPCODE_dup2_x2;
925 emit_pop (int size, struct jcf_partial *state)
928 OP1 (OPCODE_pop - 1 + size);
932 emit_iinc (tree var, HOST_WIDE_INT value, struct jcf_partial *state)
934 int slot = DECL_LOCAL_INDEX (var);
936 if (value < -128 || value > 127 || slot >= 256)
954 emit_load_or_store (tree var, /* Variable to load from or store into. */
955 int opcode, /* Either OPCODE_iload or OPCODE_istore. */
956 struct jcf_partial *state)
958 tree type = TREE_TYPE (var);
959 int kind = adjust_typed_op (type, 4);
960 int index = DECL_LOCAL_INDEX (var);
964 OP1 (opcode + 5 + 4 * kind + index); /* [ilfda]{load,store}_[0123] */
967 maybe_wide (opcode + kind, index, state); /* [ilfda]{load,store} */
971 emit_load (tree var, struct jcf_partial *state)
973 emit_load_or_store (var, OPCODE_iload, state);
974 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
978 emit_store (tree var, struct jcf_partial *state)
980 emit_load_or_store (var, OPCODE_istore, state);
981 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
985 emit_unop (enum java_opcode opcode, tree type ATTRIBUTE_UNUSED,
986 struct jcf_partial *state)
993 emit_binop (enum java_opcode opcode, tree type, struct jcf_partial *state)
995 int size = TYPE_IS_WIDE (type) ? 2 : 1;
1002 emit_reloc (HOST_WIDE_INT value, int kind,
1003 struct jcf_block *target, struct jcf_partial *state)
1005 struct jcf_relocation *reloc
1006 = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1007 struct jcf_block *block = state->last_block;
1008 reloc->next = block->u.relocations;
1009 block->u.relocations = reloc;
1010 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1011 reloc->label = target;
1013 if (kind == 0 || kind == BLOCK_START_RELOC)
1015 else if (kind != SWITCH_ALIGN_RELOC)
1020 emit_switch_reloc (struct jcf_block *label, struct jcf_partial *state)
1022 emit_reloc (RELOCATION_VALUE_0, BLOCK_START_RELOC, label, state);
1025 /* Similar to emit_switch_reloc,
1026 but re-uses an existing case reloc. */
1029 emit_case_reloc (struct jcf_relocation *reloc, struct jcf_partial *state)
1031 struct jcf_block *block = state->last_block;
1032 reloc->next = block->u.relocations;
1033 block->u.relocations = reloc;
1034 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1035 reloc->kind = BLOCK_START_RELOC;
1039 /* Emit a conditional jump to TARGET with a 2-byte relative jump offset
1040 The opcode is OPCODE, the inverted opcode is INV_OPCODE. */
1043 emit_if (struct jcf_block *target, int opcode, int inv_opcode,
1044 struct jcf_partial *state)
1048 /* value is 1 byte from reloc back to start of instruction. */
1049 emit_reloc (RELOCATION_VALUE_1, - inv_opcode, target, state);
1053 emit_goto (struct jcf_block *target, struct jcf_partial *state)
1057 /* Value is 1 byte from reloc back to start of instruction. */
1058 emit_reloc (RELOCATION_VALUE_1, OPCODE_goto_w, target, state);
1062 emit_jsr (struct jcf_block *target, struct jcf_partial *state)
1066 /* Value is 1 byte from reloc back to start of instruction. */
1067 emit_reloc (RELOCATION_VALUE_1, OPCODE_jsr_w, target, state);
1071 /* Generate code to evaluate EXP. If the result is true,
1072 branch to TRUE_LABEL; otherwise, branch to FALSE_LABEL.
1073 TRUE_BRANCH_FIRST is a code generation hint that the
1074 TRUE_LABEL may follow right after this. (The idea is that we
1075 may be able to optimize away GOTO TRUE_LABEL; TRUE_LABEL:) */
1078 generate_bytecode_conditional (tree exp,
1079 struct jcf_block *true_label,
1080 struct jcf_block *false_label,
1081 int true_branch_first,
1082 struct jcf_partial *state)
1084 tree exp0, exp1, type;
1085 int save_SP = state->code_SP;
1086 enum java_opcode op, negop;
1087 switch (TREE_CODE (exp))
1090 emit_goto (integer_zerop (exp) ? false_label : true_label, state);
1094 struct jcf_block *then_label = gen_jcf_label (state);
1095 struct jcf_block *else_label = gen_jcf_label (state);
1096 int save_SP_before, save_SP_after;
1097 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1098 then_label, else_label, 1, state);
1099 define_jcf_label (then_label, state);
1100 save_SP_before = state->code_SP;
1101 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1102 true_label, false_label, 1, state);
1103 save_SP_after = state->code_SP;
1104 state->code_SP = save_SP_before;
1105 define_jcf_label (else_label, state);
1106 generate_bytecode_conditional (TREE_OPERAND (exp, 2),
1107 true_label, false_label,
1108 true_branch_first, state);
1109 if (state->code_SP != save_SP_after)
1113 case TRUTH_NOT_EXPR:
1114 generate_bytecode_conditional (TREE_OPERAND (exp, 0), false_label,
1115 true_label, ! true_branch_first, state);
1117 case TRUTH_ANDIF_EXPR:
1119 struct jcf_block *next_label = gen_jcf_label (state);
1120 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1121 next_label, false_label, 1, state);
1122 define_jcf_label (next_label, state);
1123 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1124 true_label, false_label, 1, state);
1127 case TRUTH_ORIF_EXPR:
1129 struct jcf_block *next_label = gen_jcf_label (state);
1130 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1131 true_label, next_label, 1, state);
1132 define_jcf_label (next_label, state);
1133 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1134 true_label, false_label, 1, state);
1138 /* Assuming op is one of the 2-operand if_icmp<COND> instructions,
1139 set it to the corresponding 1-operand if<COND> instructions. */
1143 /* The opcodes with their inverses are allocated in pairs.
1144 E.g. The inverse of if_icmplt (161) is if_icmpge (162). */
1145 negop = (op & 1) ? op + 1 : op - 1;
1147 if (true_branch_first)
1149 emit_if (false_label, negop, op, state);
1150 emit_goto (true_label, state);
1154 emit_if (true_label, op, negop, state);
1155 emit_goto (false_label, state);
1159 op = OPCODE_if_icmpeq;
1162 op = OPCODE_if_icmpne;
1165 op = OPCODE_if_icmpgt;
1168 op = OPCODE_if_icmplt;
1171 op = OPCODE_if_icmpge;
1174 op = OPCODE_if_icmple;
1177 exp0 = TREE_OPERAND (exp, 0);
1178 exp1 = TREE_OPERAND (exp, 1);
1179 type = TREE_TYPE (exp0);
1180 switch (TREE_CODE (type))
1183 case POINTER_TYPE: case RECORD_TYPE:
1184 switch (TREE_CODE (exp))
1186 case EQ_EXPR: op = OPCODE_if_acmpeq; break;
1187 case NE_EXPR: op = OPCODE_if_acmpne; break;
1190 if (integer_zerop (exp1) || integer_zerop (exp0))
1192 generate_bytecode_insns (integer_zerop (exp0) ? exp1 : exp0,
1193 STACK_TARGET, state);
1194 op = op + (OPCODE_ifnull - OPCODE_if_acmpeq);
1195 negop = (op & 1) ? op - 1 : op + 1;
1199 generate_bytecode_insns (exp0, STACK_TARGET, state);
1200 generate_bytecode_insns (exp1, STACK_TARGET, state);
1204 generate_bytecode_insns (exp0, STACK_TARGET, state);
1205 generate_bytecode_insns (exp1, STACK_TARGET, state);
1206 if (op == OPCODE_if_icmplt || op == OPCODE_if_icmple)
1210 if (TYPE_PRECISION (type) > 32)
1221 if (TYPE_PRECISION (type) > 32)
1223 generate_bytecode_insns (exp0, STACK_TARGET, state);
1224 generate_bytecode_insns (exp1, STACK_TARGET, state);
1232 if (integer_zerop (exp1))
1234 generate_bytecode_insns (exp0, STACK_TARGET, state);
1238 if (integer_zerop (exp0))
1242 case OPCODE_if_icmplt:
1243 case OPCODE_if_icmpge:
1246 case OPCODE_if_icmpgt:
1247 case OPCODE_if_icmple:
1253 generate_bytecode_insns (exp1, STACK_TARGET, state);
1257 generate_bytecode_insns (exp0, STACK_TARGET, state);
1258 generate_bytecode_insns (exp1, STACK_TARGET, state);
1264 generate_bytecode_insns (exp, STACK_TARGET, state);
1266 if (true_branch_first)
1268 emit_if (false_label, OPCODE_ifeq, OPCODE_ifne, state);
1269 emit_goto (true_label, state);
1273 emit_if (true_label, OPCODE_ifne, OPCODE_ifeq, state);
1274 emit_goto (false_label, state);
1278 if (save_SP != state->code_SP)
1282 /* Call pending cleanups i.e. those for surrounding TRY_FINALLY_EXPRs.
1283 but only as far out as LIMIT (since we are about to jump to the
1284 emit label that is LIMIT). */
1287 call_cleanups (struct jcf_block *limit, struct jcf_partial *state)
1289 struct jcf_block *block = state->labeled_blocks;
1290 for (; block != limit; block = block->next)
1292 if (block->pc == PENDING_CLEANUP_PC)
1293 emit_jsr (block, state);
1298 generate_bytecode_return (tree exp, struct jcf_partial *state)
1300 tree return_type = TREE_TYPE (TREE_TYPE (state->current_method));
1301 int returns_void = TREE_CODE (return_type) == VOID_TYPE;
1306 switch (TREE_CODE (exp))
1309 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET,
1311 exp = TREE_OPERAND (exp, 1);
1315 struct jcf_block *then_label = gen_jcf_label (state);
1316 struct jcf_block *else_label = gen_jcf_label (state);
1317 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1318 then_label, else_label, 1, state);
1319 define_jcf_label (then_label, state);
1320 generate_bytecode_return (TREE_OPERAND (exp, 1), state);
1321 define_jcf_label (else_label, state);
1322 generate_bytecode_return (TREE_OPERAND (exp, 2), state);
1326 generate_bytecode_insns (exp,
1327 returns_void ? IGNORE_TARGET
1328 : STACK_TARGET, state);
1334 call_cleanups (NULL, state);
1338 op = OPCODE_ireturn + adjust_typed_op (return_type, 4);
1339 if (state->num_finalizers > 0)
1341 if (state->return_value_decl == NULL_TREE)
1343 state->return_value_decl
1344 = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1345 localvar_alloc (state->return_value_decl, state);
1347 emit_store (state->return_value_decl, state);
1348 call_cleanups (NULL, state);
1349 emit_load (state->return_value_decl, state);
1350 /* If we call maybe_free_localvar (state->return_value_decl, state, 1),
1351 then we risk the save decl erroneously re-used in the
1352 finalizer. Instead, we keep the state->return_value_decl
1353 allocated through the rest of the method. This is not
1354 the greatest solution, but it is at least simple and safe. */
1361 /* Generate bytecode for sub-expression EXP of METHOD.
1362 TARGET is one of STACK_TARGET or IGNORE_TARGET. */
1365 generate_bytecode_insns (tree exp, int target, struct jcf_partial *state)
1368 enum java_opcode jopcode;
1370 HOST_WIDE_INT value;
1375 if (exp == NULL && target == IGNORE_TARGET)
1378 type = TREE_TYPE (exp);
1380 switch (TREE_CODE (exp))
1383 if (BLOCK_EXPR_BODY (exp))
1386 tree body = BLOCK_EXPR_BODY (exp);
1387 long jsrs = state->num_jsrs;
1388 for (local = BLOCK_EXPR_DECLS (exp); local; )
1390 tree next = TREE_CHAIN (local);
1391 localvar_alloc (local, state);
1394 /* Avoid deep recursion for long blocks. */
1395 while (TREE_CODE (body) == COMPOUND_EXPR)
1397 generate_bytecode_insns (TREE_OPERAND (body, 0), target, state);
1398 body = TREE_OPERAND (body, 1);
1400 generate_bytecode_insns (body, target, state);
1402 for (local = BLOCK_EXPR_DECLS (exp); local; )
1404 tree next = TREE_CHAIN (local);
1405 maybe_free_localvar (local, state, state->num_jsrs <= jsrs);
1411 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
1412 /* Normally the first operand to a COMPOUND_EXPR must complete
1413 normally. However, in the special case of a do-while
1414 statement this is not necessarily the case. */
1415 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 0)))
1416 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1418 case EXPR_WITH_FILE_LOCATION:
1420 const char *saved_input_filename = input_filename;
1421 tree body = EXPR_WFL_NODE (exp);
1422 int saved_lineno = input_line;
1423 if (body == empty_stmt_node)
1425 input_filename = EXPR_WFL_FILENAME (exp);
1426 input_line = EXPR_WFL_LINENO (exp);
1427 if (EXPR_WFL_EMIT_LINE_NOTE (exp) && input_line > 0
1428 && debug_info_level > DINFO_LEVEL_NONE)
1429 put_linenumber (input_line, state);
1430 generate_bytecode_insns (body, target, state);
1431 input_filename = saved_input_filename;
1432 input_line = saved_lineno;
1436 if (target == IGNORE_TARGET) ; /* do nothing */
1437 else if (TREE_CODE (type) == POINTER_TYPE)
1439 if (! integer_zerop (exp))
1442 OP1 (OPCODE_aconst_null);
1445 else if (TYPE_PRECISION (type) <= 32)
1447 push_int_const (TREE_INT_CST_LOW (exp), state);
1452 push_long_const (TREE_INT_CST_LOW (exp), TREE_INT_CST_HIGH (exp),
1459 int prec = TYPE_PRECISION (type) >> 5;
1461 if (real_zerop (exp) && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (exp)))
1462 OP1 (prec == 1 ? OPCODE_fconst_0 : OPCODE_dconst_0);
1463 else if (real_onep (exp))
1464 OP1 (prec == 1 ? OPCODE_fconst_1 : OPCODE_dconst_1);
1465 else if (prec == 1 && real_twop (exp))
1466 OP1 (OPCODE_fconst_2);
1467 /* ??? We could also use iconst_3/ldc followed by i2f/i2d
1468 for other float/double when the value is a small integer. */
1471 offset = find_constant_index (exp, state);
1473 push_constant1 (offset, state);
1475 push_constant2 (offset, state);
1481 push_constant1 (find_string_constant (&state->cpool, exp), state);
1485 if (TREE_STATIC (exp))
1487 field_op (exp, OPCODE_getstatic, state);
1488 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1491 /* ... fall through ... */
1493 emit_load (exp, state);
1495 case NON_LVALUE_EXPR:
1497 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1500 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1501 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1502 if (target != IGNORE_TARGET)
1504 jopcode = OPCODE_iaload + adjust_typed_op (type, 7);
1507 if (! TYPE_IS_WIDE (type))
1513 tree obj = TREE_OPERAND (exp, 0);
1514 tree field = TREE_OPERAND (exp, 1);
1515 int is_static = FIELD_STATIC (field);
1516 generate_bytecode_insns (obj,
1517 is_static ? IGNORE_TARGET : target, state);
1518 if (target != IGNORE_TARGET)
1520 if (DECL_NAME (field) == length_identifier_node && !is_static
1521 && TYPE_ARRAY_P (TREE_TYPE (obj)))
1524 OP1 (OPCODE_arraylength);
1528 field_op (field, is_static ? OPCODE_getstatic : OPCODE_getfield,
1532 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
1537 case TRUTH_ANDIF_EXPR:
1538 case TRUTH_ORIF_EXPR:
1546 struct jcf_block *then_label = gen_jcf_label (state);
1547 struct jcf_block *else_label = gen_jcf_label (state);
1548 struct jcf_block *end_label = gen_jcf_label (state);
1549 generate_bytecode_conditional (exp,
1550 then_label, else_label, 1, state);
1551 define_jcf_label (then_label, state);
1552 push_int_const (1, state);
1553 emit_goto (end_label, state);
1554 define_jcf_label (else_label, state);
1555 push_int_const (0, state);
1556 define_jcf_label (end_label, state);
1562 struct jcf_block *then_label = gen_jcf_label (state);
1563 struct jcf_block *else_label = gen_jcf_label (state);
1564 struct jcf_block *end_label = gen_jcf_label (state);
1565 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1566 then_label, else_label, 1, state);
1567 define_jcf_label (then_label, state);
1568 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1569 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 1))
1570 /* Not all expressions have CAN_COMPLETE_NORMALLY set properly. */
1571 || TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE)
1572 emit_goto (end_label, state);
1573 define_jcf_label (else_label, state);
1574 generate_bytecode_insns (TREE_OPERAND (exp, 2), target, state);
1575 define_jcf_label (end_label, state);
1576 /* COND_EXPR can be used in a binop. The stack must be adjusted. */
1577 if (TREE_TYPE (exp) != void_type_node)
1578 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1583 struct jcf_switch_state *sw_state = state->sw_state;
1584 struct jcf_relocation *reloc
1585 = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1586 HOST_WIDE_INT case_value = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
1588 reloc->label = get_jcf_label_here (state);
1589 reloc->offset = case_value;
1590 reloc->next = sw_state->cases;
1591 sw_state->cases = reloc;
1592 if (sw_state->num_cases == 0)
1594 sw_state->min_case = case_value;
1595 sw_state->max_case = case_value;
1599 if (case_value < sw_state->min_case)
1600 sw_state->min_case = case_value;
1601 if (case_value > sw_state->max_case)
1602 sw_state->max_case = case_value;
1604 sw_state->num_cases++;
1608 state->sw_state->default_label = get_jcf_label_here (state);
1613 /* The SWITCH_EXPR has three parts, generated in the following order:
1614 1. the switch_expression (the value used to select the correct case);
1616 3. the switch_instruction (the tableswitch/loopupswitch instruction.).
1617 After code generation, we will re-order them in the order 1, 3, 2.
1618 This is to avoid any extra GOTOs. */
1619 struct jcf_switch_state sw_state;
1620 struct jcf_block *expression_last; /* Last block of the switch_expression. */
1621 struct jcf_block *body_last; /* Last block of the switch_body. */
1622 struct jcf_block *switch_instruction; /* First block of switch_instruction. */
1623 struct jcf_block *instruction_last; /* Last block of the switch_instruction. */
1624 struct jcf_block *body_block;
1626 sw_state.prev = state->sw_state;
1627 state->sw_state = &sw_state;
1628 sw_state.cases = NULL;
1629 sw_state.num_cases = 0;
1630 sw_state.default_label = NULL;
1631 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1632 expression_last = state->last_block;
1633 /* Force a new block here. */
1634 body_block = gen_jcf_label (state);
1635 define_jcf_label (body_block, state);
1636 generate_bytecode_insns (TREE_OPERAND (exp, 1), IGNORE_TARGET, state);
1637 body_last = state->last_block;
1639 switch_instruction = gen_jcf_label (state);
1640 define_jcf_label (switch_instruction, state);
1641 if (sw_state.default_label == NULL)
1642 sw_state.default_label = gen_jcf_label (state);
1644 if (sw_state.num_cases <= 1)
1646 if (sw_state.num_cases == 0)
1648 emit_pop (1, state);
1653 push_int_const (sw_state.cases->offset, state);
1655 emit_if (sw_state.cases->label,
1656 OPCODE_if_icmpeq, OPCODE_if_icmpne, state);
1658 emit_goto (sw_state.default_label, state);
1663 unsigned HOST_WIDE_INT delta;
1664 /* Copy the chain of relocs into a sorted array. */
1665 struct jcf_relocation **relocs
1666 = xmalloc (sw_state.num_cases * sizeof (struct jcf_relocation *));
1667 /* The relocs arrays is a buffer with a gap.
1668 The assumption is that cases will normally come in "runs". */
1670 int gap_end = sw_state.num_cases;
1671 struct jcf_relocation *reloc;
1672 for (reloc = sw_state.cases; reloc != NULL; reloc = reloc->next)
1674 HOST_WIDE_INT case_value = reloc->offset;
1675 while (gap_end < sw_state.num_cases)
1677 struct jcf_relocation *end = relocs[gap_end];
1678 if (case_value <= end->offset)
1680 relocs[gap_start++] = end;
1683 while (gap_start > 0)
1685 struct jcf_relocation *before = relocs[gap_start-1];
1686 if (case_value >= before->offset)
1688 relocs[--gap_end] = before;
1691 relocs[gap_start++] = reloc;
1692 /* Note we don't check for duplicates. This is
1693 handled by the parser. */
1696 /* We could have DELTA < 0 if sw_state.min_case is
1697 something like Integer.MIN_VALUE. That is why delta is
1699 delta = sw_state.max_case - sw_state.min_case;
1700 if (2 * (unsigned) sw_state.num_cases >= delta)
1701 { /* Use tableswitch. */
1703 RESERVE (13 + 4 * (sw_state.max_case - sw_state.min_case + 1));
1704 OP1 (OPCODE_tableswitch);
1705 emit_reloc (RELOCATION_VALUE_0,
1706 SWITCH_ALIGN_RELOC, NULL, state);
1707 emit_switch_reloc (sw_state.default_label, state);
1708 OP4 (sw_state.min_case);
1709 OP4 (sw_state.max_case);
1710 for (i = sw_state.min_case; ; )
1712 reloc = relocs[index];
1713 if (i == reloc->offset)
1715 emit_case_reloc (reloc, state);
1716 if (i == sw_state.max_case)
1721 emit_switch_reloc (sw_state.default_label, state);
1726 { /* Use lookupswitch. */
1727 RESERVE(9 + 8 * sw_state.num_cases);
1728 OP1 (OPCODE_lookupswitch);
1729 emit_reloc (RELOCATION_VALUE_0,
1730 SWITCH_ALIGN_RELOC, NULL, state);
1731 emit_switch_reloc (sw_state.default_label, state);
1732 OP4 (sw_state.num_cases);
1733 for (i = 0; i < sw_state.num_cases; i++)
1735 struct jcf_relocation *reloc = relocs[i];
1736 OP4 (reloc->offset);
1737 emit_case_reloc (reloc, state);
1743 instruction_last = state->last_block;
1744 if (sw_state.default_label->pc < 0)
1745 define_jcf_label (sw_state.default_label, state);
1746 else /* Force a new block. */
1747 sw_state.default_label = get_jcf_label_here (state);
1748 /* Now re-arrange the blocks so the switch_instruction
1749 comes before the switch_body. */
1750 switch_length = state->code_length - switch_instruction->pc;
1751 switch_instruction->pc = body_block->pc;
1752 instruction_last->next = body_block;
1753 instruction_last->v.chunk->next = body_block->v.chunk;
1754 expression_last->next = switch_instruction;
1755 expression_last->v.chunk->next = switch_instruction->v.chunk;
1756 body_last->next = sw_state.default_label;
1757 body_last->v.chunk->next = NULL;
1758 state->chunk = body_last->v.chunk;
1759 for (; body_block != sw_state.default_label; body_block = body_block->next)
1760 body_block->pc += switch_length;
1762 state->sw_state = sw_state.prev;
1767 exp = TREE_OPERAND (exp, 0);
1768 if (exp == NULL_TREE)
1769 exp = empty_stmt_node;
1770 else if (TREE_CODE (exp) != MODIFY_EXPR)
1773 exp = TREE_OPERAND (exp, 1);
1774 generate_bytecode_return (exp, state);
1776 case LABELED_BLOCK_EXPR:
1778 struct jcf_block *end_label = gen_jcf_label (state);
1779 end_label->next = state->labeled_blocks;
1780 state->labeled_blocks = end_label;
1781 end_label->pc = PENDING_EXIT_PC;
1782 end_label->u.labeled_block = exp;
1783 if (LABELED_BLOCK_BODY (exp))
1784 generate_bytecode_insns (LABELED_BLOCK_BODY (exp), target, state);
1785 if (state->labeled_blocks != end_label)
1787 state->labeled_blocks = end_label->next;
1788 define_jcf_label (end_label, state);
1793 tree body = TREE_OPERAND (exp, 0);
1795 if (TREE_CODE (body) == COMPOUND_EXPR
1796 && TREE_CODE (TREE_OPERAND (body, 0)) == EXIT_EXPR)
1798 /* Optimize: H: if (TEST) GOTO L; BODY; GOTO H; L:
1799 to: GOTO L; BODY; L: if (!TEST) GOTO L; */
1800 struct jcf_block *head_label;
1801 struct jcf_block *body_label;
1802 struct jcf_block *end_label = gen_jcf_label (state);
1803 struct jcf_block *exit_label = state->labeled_blocks;
1804 head_label = gen_jcf_label (state);
1805 emit_goto (head_label, state);
1806 body_label = get_jcf_label_here (state);
1807 generate_bytecode_insns (TREE_OPERAND (body, 1), target, state);
1808 define_jcf_label (head_label, state);
1809 generate_bytecode_conditional (TREE_OPERAND (body, 0),
1810 end_label, body_label, 1, state);
1811 define_jcf_label (end_label, state);
1816 struct jcf_block *head_label = get_jcf_label_here (state);
1817 generate_bytecode_insns (body, IGNORE_TARGET, state);
1818 if (CAN_COMPLETE_NORMALLY (body))
1819 emit_goto (head_label, state);
1825 struct jcf_block *label = state->labeled_blocks;
1826 struct jcf_block *end_label = gen_jcf_label (state);
1827 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1828 label, end_label, 0, state);
1829 define_jcf_label (end_label, state);
1832 case EXIT_BLOCK_EXPR:
1834 struct jcf_block *label = state->labeled_blocks;
1835 if (TREE_OPERAND (exp, 1) != NULL) goto notimpl;
1836 while (label->u.labeled_block != TREE_OPERAND (exp, 0))
1837 label = label->next;
1838 call_cleanups (label, state);
1839 emit_goto (label, state);
1843 case PREDECREMENT_EXPR: value = -1; post_op = 0; goto increment;
1844 case PREINCREMENT_EXPR: value = 1; post_op = 0; goto increment;
1845 case POSTDECREMENT_EXPR: value = -1; post_op = 1; goto increment;
1846 case POSTINCREMENT_EXPR: value = 1; post_op = 1; goto increment;
1849 arg = TREE_OPERAND (exp, 1);
1850 exp = TREE_OPERAND (exp, 0);
1851 type = TREE_TYPE (exp);
1852 size = TYPE_IS_WIDE (type) ? 2 : 1;
1853 if ((TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1854 && ! TREE_STATIC (exp)
1855 && TREE_CODE (type) == INTEGER_TYPE
1856 && TYPE_PRECISION (type) == 32)
1858 if (target != IGNORE_TARGET && post_op)
1859 emit_load (exp, state);
1860 emit_iinc (exp, value, state);
1861 if (target != IGNORE_TARGET && ! post_op)
1862 emit_load (exp, state);
1865 if (TREE_CODE (exp) == COMPONENT_REF)
1867 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1868 emit_dup (1, 0, state);
1869 /* Stack: ..., objectref, objectref. */
1870 field_op (TREE_OPERAND (exp, 1), OPCODE_getfield, state);
1872 /* Stack: ..., objectref, oldvalue. */
1875 else if (TREE_CODE (exp) == ARRAY_REF)
1877 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1878 generate_bytecode_insns (TREE_OPERAND (exp, 1), STACK_TARGET, state);
1879 emit_dup (2, 0, state);
1880 /* Stack: ..., array, index, array, index. */
1881 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (exp), 7);
1885 /* Stack: ..., array, index, oldvalue. */
1888 else if (TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1890 generate_bytecode_insns (exp, STACK_TARGET, state);
1891 /* Stack: ..., oldvalue. */
1897 if (target != IGNORE_TARGET && post_op)
1898 emit_dup (size, offset, state);
1899 /* Stack, if ARRAY_REF: ..., [result, ] array, index, oldvalue. */
1900 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, oldvalue. */
1901 /* Stack, otherwise: ..., [result, ] oldvalue. */
1902 generate_bytecode_insns (arg, STACK_TARGET, state);
1903 emit_binop ((value >= 0 ? OPCODE_iadd : OPCODE_isub)
1904 + adjust_typed_op (type, 3),
1906 if (target != IGNORE_TARGET && ! post_op)
1907 emit_dup (size, offset, state);
1908 /* Stack, if ARRAY_REF: ..., [result, ] array, index, newvalue. */
1909 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, newvalue. */
1910 /* Stack, otherwise: ..., [result, ] newvalue. */
1911 goto finish_assignment;
1915 tree lhs = TREE_OPERAND (exp, 0);
1916 tree rhs = TREE_OPERAND (exp, 1);
1919 /* See if we can use the iinc instruction. */
1920 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1921 && ! TREE_STATIC (lhs)
1922 && TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
1923 && TYPE_PRECISION (TREE_TYPE (lhs)) == 32
1924 && (TREE_CODE (rhs) == PLUS_EXPR || TREE_CODE (rhs) == MINUS_EXPR))
1926 tree arg0 = TREE_OPERAND (rhs, 0);
1927 tree arg1 = TREE_OPERAND (rhs, 1);
1928 HOST_WIDE_INT min_value = -32768;
1929 HOST_WIDE_INT max_value = 32767;
1930 if (TREE_CODE (rhs) == MINUS_EXPR)
1935 else if (arg1 == lhs)
1938 arg1 = TREE_OPERAND (rhs, 0);
1940 if (lhs == arg0 && TREE_CODE (arg1) == INTEGER_CST)
1942 HOST_WIDE_INT hi_value = TREE_INT_CST_HIGH (arg1);
1943 value = TREE_INT_CST_LOW (arg1);
1944 if ((hi_value == 0 && value <= max_value)
1945 || (hi_value == -1 && value >= min_value))
1947 if (TREE_CODE (rhs) == MINUS_EXPR)
1949 emit_iinc (lhs, value, state);
1950 if (target != IGNORE_TARGET)
1951 emit_load (lhs, state);
1957 if (TREE_CODE (lhs) == COMPONENT_REF)
1959 generate_bytecode_insns (TREE_OPERAND (lhs, 0),
1960 STACK_TARGET, state);
1963 else if (TREE_CODE (lhs) == ARRAY_REF)
1965 generate_bytecode_insns (TREE_OPERAND(lhs, 0),
1966 STACK_TARGET, state);
1967 generate_bytecode_insns (TREE_OPERAND(lhs, 1),
1968 STACK_TARGET, state);
1974 /* If the rhs is a binary expression and the left operand is
1975 `==' to the lhs then we have an OP= expression. In this
1976 case we must do some special processing. */
1977 if (TREE_CODE_CLASS (TREE_CODE (rhs)) == '2'
1978 && lhs == TREE_OPERAND (rhs, 0))
1980 if (TREE_CODE (lhs) == COMPONENT_REF)
1982 tree field = TREE_OPERAND (lhs, 1);
1983 if (! FIELD_STATIC (field))
1985 /* Duplicate the object reference so we can get
1987 emit_dup (TYPE_IS_WIDE (field) ? 2 : 1, 0, state);
1990 field_op (field, (FIELD_STATIC (field)
1995 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
1997 else if (TREE_CODE (lhs) == VAR_DECL
1998 || TREE_CODE (lhs) == PARM_DECL)
2000 if (FIELD_STATIC (lhs))
2002 field_op (lhs, OPCODE_getstatic, state);
2003 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs)) ? 2 : 1);
2006 emit_load (lhs, state);
2008 else if (TREE_CODE (lhs) == ARRAY_REF)
2010 /* Duplicate the array and index, which are on the
2011 stack, so that we can load the old value. */
2012 emit_dup (2, 0, state);
2014 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (lhs), 7);
2017 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs)) ? 2 : 1);
2022 /* This function correctly handles the case where the LHS
2023 of a binary expression is NULL_TREE. */
2024 rhs = build (TREE_CODE (rhs), TREE_TYPE (rhs),
2025 NULL_TREE, TREE_OPERAND (rhs, 1));
2028 generate_bytecode_insns (rhs, STACK_TARGET, state);
2029 if (target != IGNORE_TARGET)
2030 emit_dup (TYPE_IS_WIDE (type) ? 2 : 1 , offset, state);
2036 if (TREE_CODE (exp) == COMPONENT_REF)
2038 tree field = TREE_OPERAND (exp, 1);
2039 if (! FIELD_STATIC (field))
2042 FIELD_STATIC (field) ? OPCODE_putstatic : OPCODE_putfield,
2045 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
2047 else if (TREE_CODE (exp) == VAR_DECL
2048 || TREE_CODE (exp) == PARM_DECL)
2050 if (FIELD_STATIC (exp))
2052 field_op (exp, OPCODE_putstatic, state);
2053 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
2056 emit_store (exp, state);
2058 else if (TREE_CODE (exp) == ARRAY_REF)
2060 jopcode = OPCODE_iastore + adjust_typed_op (TREE_TYPE (exp), 7);
2063 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 4 : 3);
2069 jopcode = OPCODE_iadd;
2072 jopcode = OPCODE_isub;
2075 jopcode = OPCODE_imul;
2077 case TRUNC_DIV_EXPR:
2079 jopcode = OPCODE_idiv;
2081 case TRUNC_MOD_EXPR:
2082 jopcode = OPCODE_irem;
2084 case LSHIFT_EXPR: jopcode = OPCODE_ishl; goto binop;
2085 case RSHIFT_EXPR: jopcode = OPCODE_ishr; goto binop;
2086 case URSHIFT_EXPR: jopcode = OPCODE_iushr; goto binop;
2087 case TRUTH_AND_EXPR:
2088 case BIT_AND_EXPR: jopcode = OPCODE_iand; goto binop;
2090 case BIT_IOR_EXPR: jopcode = OPCODE_ior; goto binop;
2091 case TRUTH_XOR_EXPR:
2092 case BIT_XOR_EXPR: jopcode = OPCODE_ixor; goto binop;
2095 tree arg0 = TREE_OPERAND (exp, 0);
2096 tree arg1 = TREE_OPERAND (exp, 1);
2097 jopcode += adjust_typed_op (type, 3);
2098 if (arg0 != NULL_TREE && operand_equal_p (arg0, arg1, 0))
2100 /* fold may (e.g) convert 2*x to x+x. */
2101 generate_bytecode_insns (arg0, target, state);
2102 emit_dup (TYPE_PRECISION (TREE_TYPE (arg0)) > 32 ? 2 : 1, 0, state);
2106 /* ARG0 will be NULL_TREE if we're handling an `OP='
2107 expression. In this case the stack already holds the
2108 LHS. See the MODIFY_EXPR case. */
2109 if (arg0 != NULL_TREE)
2110 generate_bytecode_insns (arg0, target, state);
2111 if (jopcode >= OPCODE_lshl && jopcode <= OPCODE_lushr)
2112 arg1 = convert (int_type_node, arg1);
2113 generate_bytecode_insns (arg1, target, state);
2115 /* For most binary operations, both operands and the result have the
2116 same type. Shift operations are different. Using arg1's type
2117 gets us the correct SP adjustment in all cases. */
2118 if (target == STACK_TARGET)
2119 emit_binop (jopcode, TREE_TYPE (arg1), state);
2122 case TRUTH_NOT_EXPR:
2124 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2125 if (target == STACK_TARGET)
2127 int is_long = TYPE_PRECISION (TREE_TYPE (exp)) > 32;
2128 push_int_const (TREE_CODE (exp) == BIT_NOT_EXPR ? -1 : 1, state);
2132 NOTE_PUSH (1 + is_long);
2133 OP1 (OPCODE_ixor + is_long);
2134 NOTE_POP (1 + is_long);
2138 jopcode = OPCODE_ineg;
2139 jopcode += adjust_typed_op (type, 3);
2140 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2141 if (target == STACK_TARGET)
2142 emit_unop (jopcode, type, state);
2144 case INSTANCEOF_EXPR:
2146 int index = find_class_constant (&state->cpool, TREE_OPERAND (exp, 1));
2147 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2149 OP1 (OPCODE_instanceof);
2154 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2159 case FIX_TRUNC_EXPR:
2161 tree src = TREE_OPERAND (exp, 0);
2162 tree src_type = TREE_TYPE (src);
2163 tree dst_type = TREE_TYPE (exp);
2164 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2165 if (target == IGNORE_TARGET || src_type == dst_type)
2167 if (TREE_CODE (dst_type) == POINTER_TYPE)
2169 if (TREE_CODE (exp) == CONVERT_EXPR)
2171 int index = find_class_constant (&state->cpool,
2172 TREE_TYPE (dst_type));
2174 OP1 (OPCODE_checkcast);
2178 else /* Convert numeric types. */
2180 int wide_src = TYPE_PRECISION (src_type) > 32;
2181 int wide_dst = TYPE_PRECISION (dst_type) > 32;
2182 NOTE_POP (1 + wide_src);
2184 if (TREE_CODE (dst_type) == REAL_TYPE)
2186 if (TREE_CODE (src_type) == REAL_TYPE)
2187 OP1 (wide_dst ? OPCODE_f2d : OPCODE_d2f);
2188 else if (TYPE_PRECISION (src_type) == 64)
2189 OP1 (OPCODE_l2f + wide_dst);
2191 OP1 (OPCODE_i2f + wide_dst);
2193 else /* Convert to integral type. */
2195 if (TREE_CODE (src_type) == REAL_TYPE)
2196 OP1 (OPCODE_f2i + wide_dst + 3 * wide_src);
2201 if (TYPE_PRECISION (dst_type) < 32)
2204 /* Already converted to int, if needed. */
2205 if (TYPE_PRECISION (dst_type) <= 8)
2207 else if (TREE_UNSIGNED (dst_type))
2213 NOTE_PUSH (1 + wide_dst);
2220 tree try_clause = TREE_OPERAND (exp, 0);
2221 struct jcf_block *start_label = get_jcf_label_here (state);
2222 struct jcf_block *end_label; /* End of try clause. */
2223 struct jcf_block *finished_label = gen_jcf_label (state);
2224 tree clause = TREE_OPERAND (exp, 1);
2225 if (target != IGNORE_TARGET)
2227 generate_bytecode_insns (try_clause, IGNORE_TARGET, state);
2228 end_label = get_jcf_label_here (state);
2229 if (end_label == start_label)
2231 if (CAN_COMPLETE_NORMALLY (try_clause))
2232 emit_goto (finished_label, state);
2233 while (clause != NULL_TREE)
2235 tree catch_clause = TREE_OPERAND (clause, 0);
2236 tree exception_decl = BLOCK_EXPR_DECLS (catch_clause);
2237 struct jcf_handler *handler = alloc_handler (start_label,
2239 if (exception_decl == NULL_TREE)
2240 handler->type = NULL_TREE;
2242 handler->type = TREE_TYPE (TREE_TYPE (exception_decl));
2243 generate_bytecode_insns (catch_clause, IGNORE_TARGET, state);
2244 clause = TREE_CHAIN (clause);
2245 if (CAN_COMPLETE_NORMALLY (catch_clause) && clause != NULL_TREE)
2246 emit_goto (finished_label, state);
2248 define_jcf_label (finished_label, state);
2252 case TRY_FINALLY_EXPR:
2254 struct jcf_block *finished_label = NULL;
2255 struct jcf_block *finally_label, *start_label, *end_label;
2256 struct jcf_handler *handler;
2257 tree try_block = TREE_OPERAND (exp, 0);
2258 tree finally = TREE_OPERAND (exp, 1);
2259 tree return_link = NULL_TREE, exception_decl = NULL_TREE;
2261 tree exception_type;
2263 finally_label = gen_jcf_label (state);
2264 start_label = get_jcf_label_here (state);
2265 /* If the `finally' clause can complete normally, we emit it
2266 as a subroutine and let the other clauses call it via
2267 `jsr'. If it can't complete normally, then we simply emit
2268 `goto's directly to it. */
2269 if (CAN_COMPLETE_NORMALLY (finally))
2271 finally_label->pc = PENDING_CLEANUP_PC;
2272 finally_label->next = state->labeled_blocks;
2273 state->labeled_blocks = finally_label;
2274 state->num_finalizers++;
2277 generate_bytecode_insns (try_block, target, state);
2279 if (CAN_COMPLETE_NORMALLY (finally))
2281 if (state->labeled_blocks != finally_label)
2283 state->labeled_blocks = finally_label->next;
2285 end_label = get_jcf_label_here (state);
2287 if (end_label == start_label)
2289 state->num_finalizers--;
2290 define_jcf_label (finally_label, state);
2291 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2295 if (CAN_COMPLETE_NORMALLY (finally))
2297 return_link = build_decl (VAR_DECL, NULL_TREE,
2298 return_address_type_node);
2299 finished_label = gen_jcf_label (state);
2302 if (CAN_COMPLETE_NORMALLY (try_block))
2304 if (CAN_COMPLETE_NORMALLY (finally))
2306 emit_jsr (finally_label, state);
2307 emit_goto (finished_label, state);
2310 emit_goto (finally_label, state);
2313 /* Handle exceptions. */
2315 exception_type = build_pointer_type (throwable_type_node);
2316 if (CAN_COMPLETE_NORMALLY (finally))
2318 /* We're going to generate a subroutine, so we'll need to
2319 save and restore the exception around the `jsr'. */
2320 exception_decl = build_decl (VAR_DECL, NULL_TREE, exception_type);
2321 localvar_alloc (return_link, state);
2323 handler = alloc_handler (start_label, end_label, state);
2324 handler->type = NULL_TREE;
2325 if (CAN_COMPLETE_NORMALLY (finally))
2327 localvar_alloc (exception_decl, state);
2329 emit_store (exception_decl, state);
2330 emit_jsr (finally_label, state);
2331 emit_load (exception_decl, state);
2333 OP1 (OPCODE_athrow);
2338 /* We're not generating a subroutine. In this case we can
2339 simply have the exception handler pop the exception and
2340 then fall through to the `finally' block. */
2342 emit_pop (1, state);
2346 /* The finally block. If we're generating a subroutine, first
2347 save return PC into return_link. Otherwise, just generate
2348 the code for the `finally' block. */
2349 define_jcf_label (finally_label, state);
2350 if (CAN_COMPLETE_NORMALLY (finally))
2353 emit_store (return_link, state);
2356 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2357 if (CAN_COMPLETE_NORMALLY (finally))
2359 maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
2360 maybe_free_localvar (exception_decl, state, 1);
2361 maybe_free_localvar (return_link, state, 1);
2362 define_jcf_label (finished_label, state);
2367 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2369 OP1 (OPCODE_athrow);
2371 case NEW_ARRAY_INIT:
2373 tree values = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
2374 tree array_type = TREE_TYPE (TREE_TYPE (exp));
2375 tree element_type = TYPE_ARRAY_ELEMENT (array_type);
2376 HOST_WIDE_INT length = java_array_type_length (array_type);
2377 if (target == IGNORE_TARGET)
2379 for ( ; values != NULL_TREE; values = TREE_CHAIN (values))
2380 generate_bytecode_insns (TREE_VALUE (values), target, state);
2383 push_int_const (length, state);
2386 if (JPRIMITIVE_TYPE_P (element_type))
2388 int atype = encode_newarray_type (element_type);
2389 OP1 (OPCODE_newarray);
2394 int index = find_class_constant (&state->cpool,
2395 TREE_TYPE (element_type));
2396 OP1 (OPCODE_anewarray);
2400 jopcode = OPCODE_iastore + adjust_typed_op (element_type, 7);
2401 for ( ; values != NULL_TREE; values = TREE_CHAIN (values), offset++)
2403 int save_SP = state->code_SP;
2404 emit_dup (1, 0, state);
2405 push_int_const (offset, state);
2407 generate_bytecode_insns (TREE_VALUE (values), STACK_TARGET, state);
2410 state->code_SP = save_SP;
2414 case JAVA_EXC_OBJ_EXPR:
2415 NOTE_PUSH (1); /* Pushed by exception system. */
2420 /* This copes with cases where fold() has created MIN or MAX
2421 from a conditional expression. */
2422 enum tree_code code = TREE_CODE (exp) == MIN_EXPR ? LT_EXPR : GT_EXPR;
2423 tree op0 = TREE_OPERAND (exp, 0);
2424 tree op1 = TREE_OPERAND (exp, 1);
2426 if (TREE_SIDE_EFFECTS (op0) || TREE_SIDE_EFFECTS (op1))
2428 x = build (COND_EXPR, TREE_TYPE (exp),
2429 build (code, boolean_type_node, op0, op1),
2431 generate_bytecode_insns (x, target, state);
2434 case NEW_CLASS_EXPR:
2436 tree class = TREE_TYPE (TREE_TYPE (exp));
2437 int need_result = target != IGNORE_TARGET;
2438 int index = find_class_constant (&state->cpool, class);
2444 NOTE_PUSH (1 + need_result);
2446 /* ... fall though ... */
2449 tree f = TREE_OPERAND (exp, 0);
2450 tree x = TREE_OPERAND (exp, 1);
2451 int save_SP = state->code_SP;
2453 if (TREE_CODE (f) == ADDR_EXPR)
2454 f = TREE_OPERAND (f, 0);
2455 if (f == soft_newarray_node)
2457 int type_code = TREE_INT_CST_LOW (TREE_VALUE (x));
2458 generate_bytecode_insns (TREE_VALUE (TREE_CHAIN (x)),
2459 STACK_TARGET, state);
2461 OP1 (OPCODE_newarray);
2465 else if (f == soft_multianewarray_node)
2469 int index = find_class_constant (&state->cpool,
2470 TREE_TYPE (TREE_TYPE (exp)));
2471 x = TREE_CHAIN (x); /* Skip class argument. */
2472 ndims = TREE_INT_CST_LOW (TREE_VALUE (x));
2473 for (idim = ndims; --idim >= 0; )
2476 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2479 OP1 (OPCODE_multianewarray);
2484 else if (f == soft_anewarray_node)
2486 tree cl = TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (exp)));
2487 int index = find_class_constant (&state->cpool, TREE_TYPE (cl));
2488 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2490 OP1 (OPCODE_anewarray);
2494 else if (f == soft_monitorenter_node
2495 || f == soft_monitorexit_node
2498 if (f == soft_monitorenter_node)
2499 op = OPCODE_monitorenter;
2500 else if (f == soft_monitorexit_node)
2501 op = OPCODE_monitorexit;
2504 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2510 for ( ; x != NULL_TREE; x = TREE_CHAIN (x))
2512 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2514 nargs = state->code_SP - save_SP;
2515 state->code_SP = save_SP;
2516 if (f == soft_fmod_node)
2523 if (TREE_CODE (exp) == NEW_CLASS_EXPR)
2524 NOTE_POP (1); /* Pop implicit this. */
2525 if (TREE_CODE (f) == FUNCTION_DECL && DECL_CONTEXT (f) != NULL_TREE)
2527 tree context = DECL_CONTEXT (f);
2528 int index, interface = 0;
2530 if (METHOD_STATIC (f))
2531 OP1 (OPCODE_invokestatic);
2532 else if (DECL_CONSTRUCTOR_P (f) || CALL_USING_SUPER (exp)
2533 || METHOD_PRIVATE (f))
2534 OP1 (OPCODE_invokespecial);
2537 if (CLASS_INTERFACE (TYPE_NAME (context)))
2539 tree arg1 = TREE_VALUE (TREE_OPERAND (exp, 1));
2540 context = TREE_TYPE (TREE_TYPE (arg1));
2541 if (CLASS_INTERFACE (TYPE_NAME (context)))
2545 OP1 (OPCODE_invokeinterface);
2547 OP1 (OPCODE_invokevirtual);
2549 index = find_methodref_with_class_index (&state->cpool, f, context);
2559 f = TREE_TYPE (TREE_TYPE (f));
2560 if (TREE_CODE (f) != VOID_TYPE)
2562 int size = TYPE_IS_WIDE (f) ? 2 : 1;
2563 if (target == IGNORE_TARGET)
2564 emit_pop (size, state);
2574 error("internal error in generate_bytecode_insn - tree code not implemented: %s",
2575 tree_code_name [(int) TREE_CODE (exp)]);
2580 perform_relocations (struct jcf_partial *state)
2582 struct jcf_block *block;
2583 struct jcf_relocation *reloc;
2587 /* Before we start, the pc field of each block is an upper bound on
2588 the block's start pc (it may be less, if previous blocks need less
2589 than their maximum).
2591 The minimum size of each block is in the block's chunk->size. */
2593 /* First, figure out the actual locations of each block. */
2596 for (block = state->blocks; block != NULL; block = block->next)
2598 int block_size = block->v.chunk->size;
2602 /* Optimize GOTO L; L: by getting rid of the redundant goto.
2603 Assumes relocations are in reverse order. */
2604 reloc = block->u.relocations;
2605 while (reloc != NULL
2606 && reloc->kind == OPCODE_goto_w
2607 && reloc->label->pc == block->next->pc
2608 && reloc->offset + 2 == block_size)
2610 reloc = reloc->next;
2611 block->u.relocations = reloc;
2612 block->v.chunk->size -= 3;
2617 /* Optimize GOTO L; ... L: GOTO X by changing the first goto to
2618 jump directly to X. We're careful here to avoid an infinite
2619 loop if the `goto's themselves form one. We do this
2620 optimization because we can generate a goto-to-goto for some
2621 try/finally blocks. */
2622 while (reloc != NULL
2623 && reloc->kind == OPCODE_goto_w
2624 && reloc->label != block
2625 && reloc->label->v.chunk->data != NULL
2626 && reloc->label->v.chunk->data[0] == OPCODE_goto)
2628 /* Find the reloc for the first instruction of the
2629 destination block. */
2630 struct jcf_relocation *first_reloc;
2631 for (first_reloc = reloc->label->u.relocations;
2633 first_reloc = first_reloc->next)
2635 if (first_reloc->offset == 1
2636 && first_reloc->kind == OPCODE_goto_w)
2638 reloc->label = first_reloc->label;
2643 /* If we didn't do anything, exit the loop. */
2644 if (first_reloc == NULL)
2648 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
2650 if (reloc->kind == SWITCH_ALIGN_RELOC)
2652 /* We assume this is the first relocation in this block,
2653 so we know its final pc. */
2654 int where = pc + reloc->offset;
2655 int pad = ((where + 3) & ~3) - where;
2658 else if (reloc->kind < -1 || reloc->kind > BLOCK_START_RELOC)
2660 int delta = reloc->label->pc - (pc + reloc->offset - 1);
2661 int expand = reloc->kind > 0 ? 2 : 5;
2665 if (delta >= -32768 && delta <= 32767)
2671 block_size += expand;
2677 for (block = state->blocks; block != NULL; block = block->next)
2679 struct chunk *chunk = block->v.chunk;
2680 int old_size = chunk->size;
2681 int next_pc = block->next == NULL ? pc : block->next->pc;
2682 int new_size = next_pc - block->pc;
2683 unsigned char *new_ptr;
2684 unsigned char *old_buffer = chunk->data;
2685 unsigned char *old_ptr = old_buffer + old_size;
2686 if (new_size != old_size)
2688 chunk->data = obstack_alloc (state->chunk_obstack, new_size);
2689 chunk->size = new_size;
2691 new_ptr = chunk->data + new_size;
2693 /* We do the relocations from back to front, because
2694 the relocations are in reverse order. */
2695 for (reloc = block->u.relocations; ; reloc = reloc->next)
2697 /* new_ptr and old_ptr point into the old and new buffers,
2698 respectively. (If no relocations cause the buffer to
2699 grow, the buffer will be the same buffer, and new_ptr==old_ptr.)
2700 The bytes at higher address have been copied and relocations
2701 handled; those at lower addresses remain to process. */
2703 /* Lower old index of piece to be copied with no relocation.
2704 I.e. high index of the first piece that does need relocation. */
2705 int start = reloc == NULL ? 0
2706 : reloc->kind == SWITCH_ALIGN_RELOC ? reloc->offset
2707 : (reloc->kind == 0 || reloc->kind == BLOCK_START_RELOC)
2709 : reloc->offset + 2;
2712 int n = (old_ptr - old_buffer) - start;
2716 memcpy (new_ptr, old_ptr, n);
2717 if (old_ptr == old_buffer)
2720 new_offset = new_ptr - chunk->data;
2721 new_offset -= (reloc->kind == -1 ? 2 : 4);
2722 if (reloc->kind == 0)
2725 value = GET_u4 (old_ptr);
2727 else if (reloc->kind == BLOCK_START_RELOC)
2733 else if (reloc->kind == SWITCH_ALIGN_RELOC)
2735 int where = block->pc + reloc->offset;
2736 int pad = ((where + 3) & ~3) - where;
2744 value = GET_u2 (old_ptr);
2746 value += reloc->label->pc - (block->pc + new_offset);
2747 *--new_ptr = (unsigned char) value; value >>= 8;
2748 *--new_ptr = (unsigned char) value; value >>= 8;
2749 if (reloc->kind != -1)
2751 *--new_ptr = (unsigned char) value; value >>= 8;
2752 *--new_ptr = (unsigned char) value;
2754 if (reloc->kind > BLOCK_START_RELOC)
2756 /* Convert: OP TARGET to: OP_w TARGET; (OP is goto or jsr). */
2758 *--new_ptr = reloc->kind;
2760 else if (reloc->kind < -1)
2762 /* Convert: ifCOND TARGET to: ifNCOND T; goto_w TARGET; T: */
2764 *--new_ptr = OPCODE_goto_w;
2767 *--new_ptr = - reloc->kind;
2770 if (new_ptr != chunk->data)
2773 state->code_length = pc;
2777 init_jcf_state (struct jcf_partial *state, struct obstack *work)
2779 state->chunk_obstack = work;
2780 state->first = state->chunk = NULL;
2781 CPOOL_INIT (&state->cpool);
2782 BUFFER_INIT (&state->localvars);
2783 BUFFER_INIT (&state->bytecode);
2787 init_jcf_method (struct jcf_partial *state, tree method)
2789 state->current_method = method;
2790 state->blocks = state->last_block = NULL;
2791 state->linenumber_count = 0;
2792 state->first_lvar = state->last_lvar = NULL;
2793 state->lvar_count = 0;
2794 state->labeled_blocks = NULL;
2795 state->code_length = 0;
2796 BUFFER_RESET (&state->bytecode);
2797 BUFFER_RESET (&state->localvars);
2799 state->code_SP_max = 0;
2800 state->handlers = NULL;
2801 state->last_handler = NULL;
2802 state->num_handlers = 0;
2803 state->num_finalizers = 0;
2804 state->return_value_decl = NULL_TREE;
2808 release_jcf_state (struct jcf_partial *state)
2810 CPOOL_FINISH (&state->cpool);
2811 obstack_free (state->chunk_obstack, state->first);
2814 /* Generate and return a list of chunks containing the class CLAS
2815 in the .class file representation. The list can be written to a
2816 .class file using write_chunks. Allocate chunks from obstack WORK. */
2818 static GTY(()) tree SourceFile_node;
2819 static struct chunk *
2820 generate_classfile (tree clas, struct jcf_partial *state)
2822 struct chunk *cpool_chunk;
2823 const char *source_file, *s;
2826 char *fields_count_ptr;
2827 int fields_count = 0;
2828 char *methods_count_ptr;
2829 int methods_count = 0;
2832 = clas == object_type_node ? 0
2833 : TREE_VEC_LENGTH (TYPE_BINFO_BASETYPES (clas));
2835 ptr = append_chunk (NULL, 8, state);
2836 PUT4 (0xCafeBabe); /* Magic number */
2837 PUT2 (3); /* Minor version */
2838 PUT2 (45); /* Major version */
2840 append_chunk (NULL, 0, state);
2841 cpool_chunk = state->chunk;
2843 /* Next allocate the chunk containing acces_flags through fields_count. */
2844 if (clas == object_type_node)
2847 i = 8 + 2 * total_supers;
2848 ptr = append_chunk (NULL, i, state);
2849 i = get_access_flags (TYPE_NAME (clas));
2850 if (! (i & ACC_INTERFACE))
2852 PUT2 (i); /* acces_flags */
2853 i = find_class_constant (&state->cpool, clas); PUT2 (i); /* this_class */
2854 if (clas == object_type_node)
2856 PUT2(0); /* super_class */
2857 PUT2(0); /* interfaces_count */
2861 tree basetypes = TYPE_BINFO_BASETYPES (clas);
2862 tree base = BINFO_TYPE (TREE_VEC_ELT (basetypes, 0));
2863 int j = find_class_constant (&state->cpool, base);
2864 PUT2 (j); /* super_class */
2865 PUT2 (total_supers - 1); /* interfaces_count */
2866 for (i = 1; i < total_supers; i++)
2868 base = BINFO_TYPE (TREE_VEC_ELT (basetypes, i));
2869 j = find_class_constant (&state->cpool, base);
2873 fields_count_ptr = ptr;
2875 for (part = TYPE_FIELDS (clas); part; part = TREE_CHAIN (part))
2877 int have_value, attr_count = 0;
2878 if (DECL_NAME (part) == NULL_TREE || DECL_ARTIFICIAL (part))
2880 ptr = append_chunk (NULL, 8, state);
2881 i = get_access_flags (part); PUT2 (i);
2882 i = find_utf8_constant (&state->cpool, DECL_NAME (part)); PUT2 (i);
2883 i = find_utf8_constant (&state->cpool,
2884 build_java_signature (TREE_TYPE (part)));
2886 have_value = DECL_INITIAL (part) != NULL_TREE
2887 && FIELD_STATIC (part) && CONSTANT_VALUE_P (DECL_INITIAL (part))
2888 && FIELD_FINAL (part)
2889 && (JPRIMITIVE_TYPE_P (TREE_TYPE (part))
2890 || TREE_TYPE (part) == string_ptr_type_node);
2894 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part)
2895 || FIELD_SYNTHETIC (part))
2897 if (FIELD_DEPRECATED (part))
2900 PUT2 (attr_count); /* attributes_count */
2903 tree init = DECL_INITIAL (part);
2904 static tree ConstantValue_node = NULL_TREE;
2905 if (TREE_TYPE (part) != TREE_TYPE (init))
2906 fatal_error ("field initializer type mismatch");
2907 ptr = append_chunk (NULL, 8, state);
2908 if (ConstantValue_node == NULL_TREE)
2909 ConstantValue_node = get_identifier ("ConstantValue");
2910 i = find_utf8_constant (&state->cpool, ConstantValue_node);
2911 PUT2 (i); /* attribute_name_index */
2912 PUT4 (2); /* attribute_length */
2913 i = find_constant_index (init, state); PUT2 (i);
2915 /* Emit the "Synthetic" attribute for val$<x> and this$<n>
2916 fields and other fields which need it. */
2917 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part)
2918 || FIELD_SYNTHETIC (part))
2919 ptr = append_synthetic_attribute (state);
2920 if (FIELD_DEPRECATED (part))
2921 append_deprecated_attribute (state);
2924 ptr = fields_count_ptr; UNSAFE_PUT2 (fields_count);
2926 ptr = methods_count_ptr = append_chunk (NULL, 2, state);
2929 for (part = TYPE_METHODS (clas); part; part = TREE_CHAIN (part))
2931 struct jcf_block *block;
2932 tree function_body = DECL_FUNCTION_BODY (part);
2933 tree body = function_body == NULL_TREE ? NULL_TREE
2934 : BLOCK_EXPR_BODY (function_body);
2935 tree name = DECL_CONSTRUCTOR_P (part) ? init_identifier_node
2937 tree type = TREE_TYPE (part);
2938 tree save_function = current_function_decl;
2939 int synthetic_p = 0;
2941 /* Invisible Miranda methods shouldn't end up in the .class
2943 if (METHOD_INVISIBLE (part))
2946 current_function_decl = part;
2947 ptr = append_chunk (NULL, 8, state);
2948 i = get_access_flags (part); PUT2 (i);
2949 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
2950 i = find_utf8_constant (&state->cpool, build_java_signature (type));
2952 i = (body != NULL_TREE) + (DECL_FUNCTION_THROWS (part) != NULL_TREE);
2954 /* Make room for the Synthetic attribute (of zero length.) */
2955 if (DECL_FINIT_P (part)
2956 || DECL_INSTINIT_P (part)
2957 || OUTER_FIELD_ACCESS_IDENTIFIER_P (DECL_NAME (part))
2958 || TYPE_DOT_CLASS (clas) == part)
2963 /* Make room for Deprecated attribute. */
2964 if (METHOD_DEPRECATED (part))
2967 PUT2 (i); /* attributes_count */
2970 ptr = append_synthetic_attribute (state);
2972 if (body != NULL_TREE)
2974 int code_attributes_count = 0;
2975 static tree Code_node = NULL_TREE;
2978 struct jcf_handler *handler;
2979 if (Code_node == NULL_TREE)
2980 Code_node = get_identifier ("Code");
2981 ptr = append_chunk (NULL, 14, state);
2982 i = find_utf8_constant (&state->cpool, Code_node); PUT2 (i);
2984 init_jcf_method (state, part);
2985 get_jcf_label_here (state); /* Force a first block. */
2986 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
2987 localvar_alloc (t, state);
2988 state->num_jsrs = 0;
2989 generate_bytecode_insns (body, IGNORE_TARGET, state);
2990 if (CAN_COMPLETE_NORMALLY (body))
2992 if (TREE_CODE (TREE_TYPE (type)) != VOID_TYPE)
2995 OP1 (OPCODE_return);
2997 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
2998 maybe_free_localvar (t, state, 1);
2999 if (state->return_value_decl != NULL_TREE)
3000 maybe_free_localvar (state->return_value_decl, state, 1);
3001 finish_jcf_block (state);
3002 perform_relocations (state);
3005 i = 8 + state->code_length + 4 + 8 * state->num_handlers;
3006 if (state->linenumber_count > 0)
3008 code_attributes_count++;
3009 i += 8 + 4 * state->linenumber_count;
3011 if (state->lvar_count > 0)
3013 code_attributes_count++;
3014 i += 8 + 10 * state->lvar_count;
3016 UNSAFE_PUT4 (i); /* attribute_length */
3017 UNSAFE_PUT2 (state->code_SP_max); /* max_stack */
3018 UNSAFE_PUT2 (localvar_max); /* max_locals */
3019 UNSAFE_PUT4 (state->code_length);
3021 /* Emit the exception table. */
3022 ptr = append_chunk (NULL, 2 + 8 * state->num_handlers, state);
3023 PUT2 (state->num_handlers); /* exception_table_length */
3024 handler = state->handlers;
3025 for (; handler != NULL; handler = handler->next)
3028 PUT2 (handler->start_label->pc);
3029 PUT2 (handler->end_label->pc);
3030 PUT2 (handler->handler_label->pc);
3031 if (handler->type == NULL_TREE)
3034 type_index = find_class_constant (&state->cpool,
3039 ptr = append_chunk (NULL, 2, state);
3040 PUT2 (code_attributes_count);
3042 /* Write the LineNumberTable attribute. */
3043 if (state->linenumber_count > 0)
3045 static tree LineNumberTable_node = NULL_TREE;
3046 ptr = append_chunk (NULL,
3047 8 + 4 * state->linenumber_count, state);
3048 if (LineNumberTable_node == NULL_TREE)
3049 LineNumberTable_node = get_identifier ("LineNumberTable");
3050 i = find_utf8_constant (&state->cpool, LineNumberTable_node);
3051 PUT2 (i); /* attribute_name_index */
3052 i = 2+4*state->linenumber_count; PUT4(i); /* attribute_length */
3053 i = state->linenumber_count; PUT2 (i);
3054 for (block = state->blocks; block != NULL; block = block->next)
3056 int line = block->linenumber;
3065 /* Write the LocalVariableTable attribute. */
3066 if (state->lvar_count > 0)
3068 static tree LocalVariableTable_node = NULL_TREE;
3069 struct localvar_info *lvar = state->first_lvar;
3070 ptr = append_chunk (NULL, 8 + 10 * state->lvar_count, state);
3071 if (LocalVariableTable_node == NULL_TREE)
3072 LocalVariableTable_node = get_identifier("LocalVariableTable");
3073 i = find_utf8_constant (&state->cpool, LocalVariableTable_node);
3074 PUT2 (i); /* attribute_name_index */
3075 i = 2 + 10 * state->lvar_count; PUT4 (i); /* attribute_length */
3076 i = state->lvar_count; PUT2 (i);
3077 for ( ; lvar != NULL; lvar = lvar->next)
3079 tree name = DECL_NAME (lvar->decl);
3080 tree sig = build_java_signature (TREE_TYPE (lvar->decl));
3081 i = lvar->start_label->pc; PUT2 (i);
3082 i = lvar->end_label->pc - i; PUT2 (i);
3083 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
3084 i = find_utf8_constant (&state->cpool, sig); PUT2 (i);
3085 i = DECL_LOCAL_INDEX (lvar->decl); PUT2 (i);
3089 if (DECL_FUNCTION_THROWS (part) != NULL_TREE)
3091 tree t = DECL_FUNCTION_THROWS (part);
3092 int throws_count = list_length (t);
3093 static tree Exceptions_node = NULL_TREE;
3094 if (Exceptions_node == NULL_TREE)
3095 Exceptions_node = get_identifier ("Exceptions");
3096 ptr = append_chunk (NULL, 8 + 2 * throws_count, state);
3097 i = find_utf8_constant (&state->cpool, Exceptions_node);
3098 PUT2 (i); /* attribute_name_index */
3099 i = 2 + 2 * throws_count; PUT4(i); /* attribute_length */
3100 i = throws_count; PUT2 (i);
3101 for (; t != NULL_TREE; t = TREE_CHAIN (t))
3103 i = find_class_constant (&state->cpool, TREE_VALUE (t));
3108 if (METHOD_DEPRECATED (part))
3109 append_deprecated_attribute (state);
3112 current_function_decl = save_function;
3114 ptr = methods_count_ptr; UNSAFE_PUT2 (methods_count);
3116 source_file = DECL_SOURCE_FILE (TYPE_NAME (clas));
3117 for (s = source_file; ; s++)
3122 if (ch == '/' || ch == '\\')
3125 ptr = append_chunk (NULL, 10, state);
3127 i = 1; /* Source file always exists as an attribute */
3128 if (INNER_CLASS_TYPE_P (clas) || DECL_INNER_CLASS_LIST (TYPE_NAME (clas)))
3130 if (clas == object_type_node)
3132 if (CLASS_DEPRECATED (TYPE_NAME (clas)))
3135 PUT2 (i); /* attributes_count */
3137 /* generate the SourceFile attribute. */
3138 if (SourceFile_node == NULL_TREE)
3140 SourceFile_node = get_identifier ("SourceFile");
3143 i = find_utf8_constant (&state->cpool, SourceFile_node);
3144 PUT2 (i); /* attribute_name_index */
3146 i = find_utf8_constant (&state->cpool, get_identifier (source_file));
3148 append_gcj_attribute (state, clas);
3149 append_innerclasses_attribute (state, clas);
3150 if (CLASS_DEPRECATED (TYPE_NAME (clas)))
3151 append_deprecated_attribute (state);
3153 /* New finally generate the contents of the constant pool chunk. */
3154 i = count_constant_pool_bytes (&state->cpool);
3155 ptr = obstack_alloc (state->chunk_obstack, i);
3156 cpool_chunk->data = ptr;
3157 cpool_chunk->size = i;
3158 write_constant_pool (&state->cpool, ptr, i);
3159 return state->first;
3162 static GTY(()) tree Synthetic_node;
3163 static unsigned char *
3164 append_synthetic_attribute (struct jcf_partial *state)
3166 unsigned char *ptr = append_chunk (NULL, 6, state);
3169 if (Synthetic_node == NULL_TREE)
3171 Synthetic_node = get_identifier ("Synthetic");
3173 i = find_utf8_constant (&state->cpool, Synthetic_node);
3174 PUT2 (i); /* Attribute string index */
3175 PUT4 (0); /* Attribute length */
3181 append_deprecated_attribute (struct jcf_partial *state)
3183 unsigned char *ptr = append_chunk (NULL, 6, state);
3186 i = find_utf8_constant (&state->cpool, get_identifier ("Deprecated"));
3187 PUT2 (i); /* Attribute string index */
3188 PUT4 (0); /* Attribute length */
3192 append_gcj_attribute (struct jcf_partial *state, tree class)
3197 if (class != object_type_node)
3200 ptr = append_chunk (NULL, 6, state); /* 2+4 */
3201 i = find_utf8_constant (&state->cpool,
3202 get_identifier ("gnu.gcj.gcj-compiled"));
3203 PUT2 (i); /* Attribute string index */
3204 PUT4 (0); /* Attribute length */
3207 static tree InnerClasses_node;
3209 append_innerclasses_attribute (struct jcf_partial *state, tree class)
3211 tree orig_decl = TYPE_NAME (class);
3214 unsigned char *ptr, *length_marker, *number_marker;
3216 if (!INNER_CLASS_TYPE_P (class) && !DECL_INNER_CLASS_LIST (orig_decl))
3219 ptr = append_chunk (NULL, 8, state); /* 2+4+2 */
3221 if (InnerClasses_node == NULL_TREE)
3223 InnerClasses_node = get_identifier ("InnerClasses");
3225 i = find_utf8_constant (&state->cpool, InnerClasses_node);
3227 length_marker = ptr; PUT4 (0); /* length, to be later patched */
3228 number_marker = ptr; PUT2 (0); /* number of classes, tblp */
3230 /* Generate the entries: all inner classes visible from the one we
3231 process: itself, up and down. */
3232 while (class && INNER_CLASS_TYPE_P (class))
3236 decl = TYPE_NAME (class);
3237 n = IDENTIFIER_POINTER (DECL_NAME (decl)) +
3238 IDENTIFIER_LENGTH (DECL_NAME (decl));
3240 while (n[-1] != '$')
3242 append_innerclasses_attribute_entry (state, decl, get_identifier (n));
3245 class = TREE_TYPE (DECL_CONTEXT (TYPE_NAME (class)));
3249 for (current = DECL_INNER_CLASS_LIST (decl);
3250 current; current = TREE_CHAIN (current))
3252 append_innerclasses_attribute_entry (state, TREE_PURPOSE (current),
3253 TREE_VALUE (current));
3257 ptr = length_marker; PUT4 (8*length+2);
3258 ptr = number_marker; PUT2 (length);
3262 append_innerclasses_attribute_entry (struct jcf_partial *state,
3263 tree decl, tree name)
3266 int ocii = 0, ini = 0;
3267 unsigned char *ptr = append_chunk (NULL, 8, state);
3269 icii = find_class_constant (&state->cpool, TREE_TYPE (decl));
3271 /* Sun's implementation seems to generate ocii to 0 for inner
3272 classes (which aren't considered members of the class they're
3273 in.) The specs are saying that if the class is anonymous,
3274 inner_name_index must be zero. */
3275 if (!ANONYMOUS_CLASS_P (TREE_TYPE (decl)))
3277 ocii = find_class_constant (&state->cpool,
3278 TREE_TYPE (DECL_CONTEXT (decl)));
3279 ini = find_utf8_constant (&state->cpool, name);
3281 icaf = get_access_flags (decl);
3283 PUT2 (icii); PUT2 (ocii); PUT2 (ini); PUT2 (icaf);
3287 make_class_file_name (tree clas)
3289 const char *dname, *cname, *slash;
3294 cname = IDENTIFIER_POINTER (identifier_subst (DECL_NAME (TYPE_NAME (clas)),
3295 "", '.', DIR_SEPARATOR,
3297 if (jcf_write_base_directory == NULL)
3299 /* Make sure we put the class file into the .java file's
3300 directory, and not into some subdirectory thereof. */
3302 dname = DECL_SOURCE_FILE (TYPE_NAME (clas));
3303 slash = strrchr (dname, DIR_SEPARATOR);
3304 #ifdef DIR_SEPARATOR_2
3306 slash = strrchr (dname, DIR_SEPARATOR_2);
3312 sep = DIR_SEPARATOR;
3317 t = strrchr (cname, DIR_SEPARATOR);
3325 dname = jcf_write_base_directory;
3327 s = strrchr (dname, DIR_SEPARATOR);
3328 #ifdef DIR_SEPARATOR_2
3330 s = strrchr (dname, DIR_SEPARATOR_2);
3335 sep = DIR_SEPARATOR;
3337 slash = dname + strlen (dname);
3340 r = xmalloc (slash - dname + strlen (cname) + 2);
3341 strncpy (r, dname, slash - dname);
3342 r[slash - dname] = sep;
3343 strcpy (&r[slash - dname + 1], cname);
3345 /* We try to make new directories when we need them. We only do
3346 this for directories which "might not" exist. For instance, we
3347 assume the `-d' directory exists, but we don't assume that any
3348 subdirectory below it exists. It might be worthwhile to keep
3349 track of which directories we've created to avoid gratuitous
3351 dname = r + (slash - dname) + 1;
3354 char *s = strchr (dname, sep);
3358 if (stat (r, &sb) == -1
3359 /* Try to make it. */
3360 && mkdir (r, 0755) == -1)
3361 fatal_error ("can't create directory %s: %m", r);
3364 /* Skip consecutive separators. */
3365 for (dname = s + 1; *dname && *dname == sep; ++dname)
3372 /* Write out the contents of a class (RECORD_TYPE) CLAS, as a .class file.
3373 The output .class file name is make_class_file_name(CLAS). */
3376 write_classfile (tree clas)
3378 struct obstack *work = &temporary_obstack;
3379 struct jcf_partial state[1];
3380 char *class_file_name = make_class_file_name (clas);
3381 struct chunk *chunks;
3383 if (class_file_name != NULL)
3386 char *temporary_file_name;
3388 /* The .class file is initially written to a ".tmp" file so that
3389 if multiple instances of the compiler are running at once
3390 they do not see partially formed class files. */
3391 temporary_file_name = concat (class_file_name, ".tmp", NULL);
3392 stream = fopen (temporary_file_name, "wb");
3394 fatal_error ("can't open %s for writing: %m", temporary_file_name);
3396 jcf_dependency_add_target (class_file_name);
3397 init_jcf_state (state, work);
3398 chunks = generate_classfile (clas, state);
3399 write_chunks (stream, chunks);
3400 if (fclose (stream))
3401 fatal_error ("error closing %s: %m", temporary_file_name);
3403 /* If a file named by the string pointed to by `new' exists
3404 prior to the call to the `rename' function, the bahaviour
3405 is implementation-defined. ISO 9899-1990 7.9.4.2.
3407 For example, on Win32 with MSVCRT, it is an error. */
3409 unlink (class_file_name);
3411 if (rename (temporary_file_name, class_file_name) == -1)
3413 remove (temporary_file_name);
3414 fatal_error ("can't create %s: %m", class_file_name);
3416 free (temporary_file_name);
3417 free (class_file_name);
3419 release_jcf_state (state);
3423 string concatenation
3424 synchronized statement
3427 #include "gt-java-jcf-write.h"