1 /* Write out a Java(TM) class file.
2 Copyright (C) 1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15 You should have received a copy of the GNU General Public License
16 along with GNU CC; see the file COPYING. If not, write to
17 the Free Software Foundation, 59 Temple Place - Suite 330,
18 Boston, MA 02111-1307, USA.
20 Java and all Java-based marks are trademarks or registered trademarks
21 of Sun Microsystems, Inc. in the United States and other countries.
22 The Free Software Foundation is independent of Sun Microsystems, Inc. */
29 #include "java-tree.h"
34 #include "java-opcodes.h"
35 #include "parse.h" /* for BLOCK_EXPR_BODY */
41 #define DIR_SEPARATOR '/'
44 extern struct obstack temporary_obstack;
46 /* Base directory in which `.class' files should be written.
47 NULL means to put the file into the same directory as the
48 corresponding .java file. */
49 char *jcf_write_base_directory = NULL;
51 /* Make sure bytecode.data is big enough for at least N more bytes. */
54 do { CHECK_OP(state); \
55 if (state->bytecode.ptr + (N) > state->bytecode.limit) \
56 buffer_grow (&state->bytecode, N); } while (0)
58 /* Add a 1-byte instruction/operand I to bytecode.data,
59 assuming space has already been RESERVE'd. */
61 #define OP1(I) (*state->bytecode.ptr++ = (I), CHECK_OP(state))
63 /* Like OP1, but I is a 2-byte big endian integer. */
66 do { int _i = (I); OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
68 /* Like OP1, but I is a 4-byte big endian integer. */
71 do { int _i = (I); OP1 (_i >> 24); OP1 (_i >> 16); \
72 OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
74 /* Macro to call each time we push I words on the JVM stack. */
76 #define NOTE_PUSH(I) \
77 do { state->code_SP += (I); \
78 if (state->code_SP > state->code_SP_max) \
79 state->code_SP_max = state->code_SP; } while (0)
81 /* Macro to call each time we pop I words from the JVM stack. */
84 do { state->code_SP -= (I); if (state->code_SP < 0) abort(); } while (0)
86 /* A chunk or segment of a .class file. */
90 /* The next segment of this .class file. */
93 /* The actual data in this segment to be written to the .class file. */
96 /* The size of the segment to be written to the .class file. */
100 #define PENDING_CLEANUP_PC (-3)
101 #define PENDING_EXIT_PC (-2)
102 #define UNDEFINED_PC (-1)
104 /* Each "block" represents a label plus the bytecode instructions following.
105 There may be branches out of the block, but no incoming jumps, except
106 to the beginning of the block.
108 If (pc < 0), the jcf_block is not an actual block (i.e. it has no
109 associated code yet), but it is an undefined label.
114 /* For blocks that that are defined, the next block (in pc order).
115 For blocks that are not-yet-defined the end label of a LABELED_BLOCK_EXPR
116 or a cleanup expression (from a TRY_FINALLY_EXPR),
117 this is the next (outer) such end label, in a stack headed by
118 labeled_blocks in jcf_partial. */
119 struct jcf_block *next;
121 /* In the not-yet-defined end label for an unfinished EXIT_BLOCK_EXPR.
122 pc is PENDING_EXIT_PC.
123 In the not-yet-defined end label for pending cleanup subroutine,
124 pc is PENDING_CLEANUP_PC.
125 For other not-yet-defined labels, pc is UNDEFINED_PC.
127 If the label has been defined:
128 Until perform_relocations is finished, this is the maximum possible
129 value of the bytecode offset at the begnning of this block.
130 After perform_relocations, it is the actual offset (pc). */
135 /* After finish_jcf_block is called, the actual instructions
136 contained in this block. Before that NULL, and the instructions
137 are in state->bytecode. */
141 /* If pc==PENDING_CLEANUP_PC, start_label is the start of the region
142 covered by the cleanup. */
143 struct jcf_block *start_label;
147 /* Set of relocations (in reverse offset order) for this block. */
148 struct jcf_relocation *relocations;
150 /* If this block is that of the not-yet-defined end label of
151 a LABELED_BLOCK_EXPR, where LABELED_BLOCK is that LABELED_BLOCK_EXPR.
152 If pc==PENDING_CLEANUP_PC, the cleanup that needs to be run. */
157 /* A "relocation" type for the 0-3 bytes of padding at the start
158 of a tableswitch or a lookupswitch. */
159 #define SWITCH_ALIGN_RELOC 4
161 /* A relocation type for the labels in a tableswitch or a lookupswitch;
162 these are relative to the start of the instruction, but (due to
163 th 0-3 bytes of padding), we don't know the offset before relocation. */
164 #define BLOCK_START_RELOC 1
166 struct jcf_relocation
168 /* Next relocation for the current jcf_block. */
169 struct jcf_relocation *next;
171 /* The (byte) offset within the current block that needs to be relocated. */
172 HOST_WIDE_INT offset;
174 /* 0 if offset is a 4-byte relative offset.
175 4 (SWITCH_ALIGN_RELOC) if offset points to 0-3 padding bytes inserted
176 for proper alignment in tableswitch/lookupswitch instructions.
177 1 (BLOCK_START_RELOC) if offset points to a 4-byte offset relative
178 to the start of the containing block.
179 -1 if offset is a 2-byte relative offset.
180 < -1 if offset is the address of an instruction with a 2-byte offset
181 that does not have a corresponding 4-byte offset version, in which
182 case the absolute value of kind is the inverted opcode.
183 > 4 if offset is the address of an instruction (such as jsr) with a
184 2-byte offset that does have a corresponding 4-byte offset version,
185 in which case kind is the opcode of the 4-byte version (such as jsr_w). */
188 /* The label the relocation wants to actually transfer to. */
189 struct jcf_block *label;
192 #define RELOCATION_VALUE_0 ((HOST_WIDE_INT)0)
193 #define RELOCATION_VALUE_1 ((HOST_WIDE_INT)1)
195 /* State for single catch clause. */
199 struct jcf_handler *next;
201 struct jcf_block *start_label;
202 struct jcf_block *end_label;
203 struct jcf_block *handler_label;
205 /* The sub-class of Throwable handled, or NULL_TREE (for finally). */
209 /* State for the current switch statement. */
211 struct jcf_switch_state
213 struct jcf_switch_state *prev;
214 struct jcf_block *default_label;
216 struct jcf_relocation *cases;
218 HOST_WIDE_INT min_case, max_case;
221 /* This structure is used to contain the various pieces that will
222 become a .class file. */
228 struct obstack *chunk_obstack;
231 /* List of basic blocks for the current method. */
232 struct jcf_block *blocks;
233 struct jcf_block *last_block;
235 struct localvar_info *first_lvar;
236 struct localvar_info *last_lvar;
241 int linenumber_count;
243 /* Until perform_relocations, this is a upper bound on the number
244 of bytes (so far) in the instructions for the current method. */
247 /* Stack of undefined ending labels for LABELED_BLOCK_EXPR. */
248 struct jcf_block *labeled_blocks;
250 /* The current stack size (stack pointer) in the current method. */
253 /* The largest extent of stack size (stack pointer) in the current method. */
256 /* Contains a mapping from local var slot number to localvar_info. */
257 struct buffer localvars;
259 /* The buffer allocated for bytecode for the current jcf_block. */
260 struct buffer bytecode;
262 /* Chain of exception handlers for the current method. */
263 struct jcf_handler *handlers;
265 /* Last element in handlers chain. */
266 struct jcf_handler *last_handler;
268 /* Number of exception handlers for the current method. */
271 /* Number of finalizers we are currently nested within. */
274 /* If non-NULL, use this for the return value. */
275 tree return_value_decl;
277 /* Information about the current switch statement. */
278 struct jcf_switch_state *sw_state;
281 static void generate_bytecode_insns PARAMS ((tree, int, struct jcf_partial *));
282 static struct chunk * alloc_chunk PARAMS ((struct chunk *, unsigned char *,
283 int, struct obstack *));
284 static unsigned char * append_chunk PARAMS ((unsigned char *, int,
285 struct jcf_partial *));
286 static void append_chunk_copy PARAMS ((unsigned char *, int,
287 struct jcf_partial *));
288 static struct jcf_block * gen_jcf_label PARAMS ((struct jcf_partial *));
289 static void finish_jcf_block PARAMS ((struct jcf_partial *));
290 static void define_jcf_label PARAMS ((struct jcf_block *,
291 struct jcf_partial *));
292 static struct jcf_block * get_jcf_label_here PARAMS ((struct jcf_partial *));
293 static void put_linenumber PARAMS ((int, struct jcf_partial *));
294 static void localvar_alloc PARAMS ((tree, struct jcf_partial *));
295 static void localvar_free PARAMS ((tree, struct jcf_partial *));
296 static int get_access_flags PARAMS ((tree));
297 static void write_chunks PARAMS ((FILE *, struct chunk *));
298 static int adjust_typed_op PARAMS ((tree, int));
299 static void generate_bytecode_conditional PARAMS ((tree, struct jcf_block *,
300 struct jcf_block *, int,
301 struct jcf_partial *));
302 static void generate_bytecode_return PARAMS ((tree, struct jcf_partial *));
303 static void perform_relocations PARAMS ((struct jcf_partial *));
304 static void init_jcf_state PARAMS ((struct jcf_partial *, struct obstack *));
305 static void init_jcf_method PARAMS ((struct jcf_partial *, tree));
306 static void release_jcf_state PARAMS ((struct jcf_partial *));
307 static struct chunk * generate_classfile PARAMS ((tree, struct jcf_partial *));
308 static struct jcf_handler *alloc_handler PARAMS ((struct jcf_block *,
310 struct jcf_partial *));
311 static void emit_iinc PARAMS ((tree, HOST_WIDE_INT, struct jcf_partial *));
312 static void emit_reloc PARAMS ((HOST_WIDE_INT, int, struct jcf_block *,
313 struct jcf_partial *));
314 static void push_constant1 PARAMS ((HOST_WIDE_INT, struct jcf_partial *));
315 static void push_constant2 PARAMS ((HOST_WIDE_INT, struct jcf_partial *));
316 static void push_int_const PARAMS ((HOST_WIDE_INT, struct jcf_partial *));
317 static int find_constant_wide PARAMS ((HOST_WIDE_INT, HOST_WIDE_INT,
318 struct jcf_partial *));
319 static void push_long_const PARAMS ((HOST_WIDE_INT, HOST_WIDE_INT,
320 struct jcf_partial *));
321 static int find_constant_index PARAMS ((tree, struct jcf_partial *));
322 static void push_long_const PARAMS ((HOST_WIDE_INT, HOST_WIDE_INT,
323 struct jcf_partial *));
324 static void field_op PARAMS ((tree, int, struct jcf_partial *));
325 static void maybe_wide PARAMS ((int, int, struct jcf_partial *));
326 static void emit_dup PARAMS ((int, int, struct jcf_partial *));
327 static void emit_pop PARAMS ((int, struct jcf_partial *));
328 static void emit_load_or_store PARAMS ((tree, int, struct jcf_partial *));
329 static void emit_load PARAMS ((tree, struct jcf_partial *));
330 static void emit_store PARAMS ((tree, struct jcf_partial *));
331 static void emit_unop PARAMS ((enum java_opcode, tree, struct jcf_partial *));
332 static void emit_binop PARAMS ((enum java_opcode, tree, struct jcf_partial *));
333 static void emit_reloc PARAMS ((HOST_WIDE_INT, int, struct jcf_block *,
334 struct jcf_partial *));
335 static void emit_switch_reloc PARAMS ((struct jcf_block *,
336 struct jcf_partial *));
337 static void emit_case_reloc PARAMS ((struct jcf_relocation *,
338 struct jcf_partial *));
339 static void emit_if PARAMS ((struct jcf_block *, int, int,
340 struct jcf_partial *));
341 static void emit_goto PARAMS ((struct jcf_block *, struct jcf_partial *));
342 static void emit_jsr PARAMS ((struct jcf_block *, struct jcf_partial *));
343 static void call_cleanups PARAMS ((struct jcf_block *, struct jcf_partial *));
344 static char *make_class_file_name PARAMS ((tree));
345 static unsigned char *append_synthetic_attribute PARAMS ((struct jcf_partial *));
346 static void append_innerclasses_attribute PARAMS ((struct jcf_partial *, tree));
347 static void append_innerclasses_attribute_entry PARAMS ((struct jcf_partial *, tree, tree));
348 static void append_gcj_attribute PARAMS ((struct jcf_partial *, tree));
350 /* Utility macros for appending (big-endian) data to a buffer.
351 We assume a local variable 'ptr' points into where we want to
352 write next, and we assume enough space has been allocated. */
354 #ifdef ENABLE_JC1_CHECKING
355 static int CHECK_PUT PARAMS ((void *, struct jcf_partial *, int));
358 CHECK_PUT (ptr, state, i)
360 struct jcf_partial *state;
363 if ((unsigned char *) ptr < state->chunk->data
364 || (unsigned char *) ptr + i > state->chunk->data + state->chunk->size)
370 #define CHECK_PUT(PTR, STATE, I) ((void)0)
373 #define PUT1(X) (CHECK_PUT(ptr, state, 1), *ptr++ = (X))
374 #define PUT2(X) (PUT1((X) >> 8), PUT1((X) & 0xFF))
375 #define PUT4(X) (PUT2((X) >> 16), PUT2((X) & 0xFFFF))
376 #define PUTN(P, N) (CHECK_PUT(ptr, state, N), memcpy(ptr, P, N), ptr += (N))
378 /* There are some cases below where CHECK_PUT is guaranteed to fail.
379 Use the following macros in those specific cases. */
380 #define UNSAFE_PUT1(X) (*ptr++ = (X))
381 #define UNSAFE_PUT2(X) (UNSAFE_PUT1((X) >> 8), UNSAFE_PUT1((X) & 0xFF))
382 #define UNSAFE_PUT4(X) (UNSAFE_PUT2((X) >> 16), UNSAFE_PUT2((X) & 0xFFFF))
383 #define UNSAFE_PUTN(P, N) (memcpy(ptr, P, N), ptr += (N))
386 /* Allocate a new chunk on obstack WORK, and link it in after LAST.
387 Set the data and size fields to DATA and SIZE, respectively.
388 However, if DATA is NULL and SIZE>0, allocate a buffer as well. */
390 static struct chunk *
391 alloc_chunk (last, data, size, work)
395 struct obstack *work;
397 struct chunk *chunk = (struct chunk *)
398 obstack_alloc (work, sizeof(struct chunk));
400 if (data == NULL && size > 0)
401 data = obstack_alloc (work, size);
411 #ifdef ENABLE_JC1_CHECKING
412 static int CHECK_OP PARAMS ((struct jcf_partial *));
416 struct jcf_partial *state;
418 if (state->bytecode.ptr > state->bytecode.limit)
424 #define CHECK_OP(STATE) ((void) 0)
427 static unsigned char *
428 append_chunk (data, size, state)
431 struct jcf_partial *state;
433 state->chunk = alloc_chunk (state->chunk, data, size, state->chunk_obstack);
434 if (state->first == NULL)
435 state->first = state->chunk;
436 return state->chunk->data;
440 append_chunk_copy (data, size, state)
443 struct jcf_partial *state;
445 unsigned char *ptr = append_chunk (NULL, size, state);
446 memcpy (ptr, data, size);
449 static struct jcf_block *
450 gen_jcf_label (state)
451 struct jcf_partial *state;
453 struct jcf_block *block = (struct jcf_block *)
454 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_block));
456 block->linenumber = -1;
457 block->pc = UNDEFINED_PC;
462 finish_jcf_block (state)
463 struct jcf_partial *state;
465 struct jcf_block *block = state->last_block;
466 struct jcf_relocation *reloc;
467 int code_length = BUFFER_LENGTH (&state->bytecode);
468 int pc = state->code_length;
469 append_chunk_copy (state->bytecode.data, code_length, state);
470 BUFFER_RESET (&state->bytecode);
471 block->v.chunk = state->chunk;
473 /* Calculate code_length to the maximum value it can have. */
474 pc += block->v.chunk->size;
475 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
477 int kind = reloc->kind;
478 if (kind == SWITCH_ALIGN_RELOC)
480 else if (kind > BLOCK_START_RELOC)
481 pc += 2; /* 2-byte offset may grow to 4-byte offset */
483 pc += 5; /* May need to add a goto_w. */
485 state->code_length = pc;
489 define_jcf_label (label, state)
490 struct jcf_block *label;
491 struct jcf_partial *state;
493 if (state->last_block != NULL)
494 finish_jcf_block (state);
495 label->pc = state->code_length;
496 if (state->blocks == NULL)
497 state->blocks = label;
499 state->last_block->next = label;
500 state->last_block = label;
502 label->u.relocations = NULL;
505 static struct jcf_block *
506 get_jcf_label_here (state)
507 struct jcf_partial *state;
509 if (state->last_block != NULL && BUFFER_LENGTH (&state->bytecode) == 0)
510 return state->last_block;
513 struct jcf_block *label = gen_jcf_label (state);
514 define_jcf_label (label, state);
519 /* Note a line number entry for the current PC and given LINE. */
522 put_linenumber (line, state)
524 struct jcf_partial *state;
526 struct jcf_block *label = get_jcf_label_here (state);
527 if (label->linenumber > 0)
529 label = gen_jcf_label (state);
530 define_jcf_label (label, state);
532 label->linenumber = line;
533 state->linenumber_count++;
536 /* Allocate a new jcf_handler, for a catch clause that catches exceptions
537 in the range (START_LABEL, END_LABEL). */
539 static struct jcf_handler *
540 alloc_handler (start_label, end_label, state)
541 struct jcf_block *start_label;
542 struct jcf_block *end_label;
543 struct jcf_partial *state;
545 struct jcf_handler *handler = (struct jcf_handler *)
546 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_handler));
547 handler->start_label = start_label;
548 handler->end_label = end_label;
549 handler->handler_label = get_jcf_label_here (state);
550 if (state->handlers == NULL)
551 state->handlers = handler;
553 state->last_handler->next = handler;
554 state->last_handler = handler;
555 handler->next = NULL;
556 state->num_handlers++;
561 /* The index of jvm local variable allocated for this DECL.
562 This is assigned when generating .class files;
563 contrast DECL_LOCAL_SLOT_NUMBER which is set when *reading* a .class file.
564 (We don't allocate DECL_LANG_SPECIFIC for locals from Java sourc code.) */
566 #define DECL_LOCAL_INDEX(DECL) DECL_ALIGN(DECL)
570 struct localvar_info *next;
573 struct jcf_block *start_label;
574 struct jcf_block *end_label;
577 #define localvar_buffer ((struct localvar_info**) state->localvars.data)
578 #define localvar_max \
579 ((struct localvar_info**) state->localvars.ptr - localvar_buffer)
582 localvar_alloc (decl, state)
584 struct jcf_partial *state;
586 struct jcf_block *start_label = get_jcf_label_here (state);
587 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
589 register struct localvar_info *info;
590 register struct localvar_info **ptr = localvar_buffer;
591 register struct localvar_info **limit
592 = (struct localvar_info**) state->localvars.ptr;
593 for (index = 0; ptr < limit; index++, ptr++)
596 && (! wide || ((ptr+1) < limit && ptr[1] == NULL)))
601 buffer_grow (&state->localvars, 2 * sizeof (struct localvar_info*));
602 ptr = (struct localvar_info**) state->localvars.data + index;
603 state->localvars.ptr = (unsigned char *) (ptr + 1 + wide);
605 info = (struct localvar_info *)
606 obstack_alloc (state->chunk_obstack, sizeof (struct localvar_info));
609 ptr[1] = (struct localvar_info *)(~0);
610 DECL_LOCAL_INDEX (decl) = index;
612 info->start_label = start_label;
614 if (debug_info_level > DINFO_LEVEL_TERSE
615 && DECL_NAME (decl) != NULL_TREE)
617 /* Generate debugging info. */
619 if (state->last_lvar != NULL)
620 state->last_lvar->next = info;
622 state->first_lvar = info;
623 state->last_lvar = info;
629 localvar_free (decl, state)
631 struct jcf_partial *state;
633 struct jcf_block *end_label = get_jcf_label_here (state);
634 int index = DECL_LOCAL_INDEX (decl);
635 register struct localvar_info **ptr = &localvar_buffer [index];
636 register struct localvar_info *info = *ptr;
637 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
639 info->end_label = end_label;
641 if (info->decl != decl)
646 if (ptr[1] != (struct localvar_info *)(~0))
653 #define STACK_TARGET 1
654 #define IGNORE_TARGET 2
656 /* Get the access flags of a class (TYPE_DECL), a method (FUNCTION_DECL), or
657 a field (FIELD_DECL or VAR_DECL, if static), as encoded in a .class file. */
660 get_access_flags (decl)
664 int isfield = TREE_CODE (decl) == FIELD_DECL || TREE_CODE (decl) == VAR_DECL;
665 if (CLASS_PUBLIC (decl)) /* same as FIELD_PUBLIC and METHOD_PUBLIC */
667 if (CLASS_FINAL (decl)) /* same as FIELD_FINAL and METHOD_FINAL */
669 if (isfield || TREE_CODE (decl) == FUNCTION_DECL)
671 if (TREE_PROTECTED (decl))
672 flags |= ACC_PROTECTED;
673 if (TREE_PRIVATE (decl))
674 flags |= ACC_PRIVATE;
676 else if (TREE_CODE (decl) == TYPE_DECL)
678 if (CLASS_SUPER (decl))
680 if (CLASS_ABSTRACT (decl))
681 flags |= ACC_ABSTRACT;
682 if (CLASS_INTERFACE (decl))
683 flags |= ACC_INTERFACE;
684 if (CLASS_STATIC (decl))
686 if (CLASS_PRIVATE (decl))
687 flags |= ACC_PRIVATE;
688 if (CLASS_PROTECTED (decl))
689 flags |= ACC_PROTECTED;
690 if (ANONYMOUS_CLASS_P (TREE_TYPE (decl))
691 || LOCAL_CLASS_P (TREE_TYPE (decl)))
692 flags |= ACC_PRIVATE;
693 if (CLASS_STRICTFP (decl))
699 if (TREE_CODE (decl) == FUNCTION_DECL)
701 if (METHOD_NATIVE (decl))
703 if (METHOD_STATIC (decl))
705 if (METHOD_SYNCHRONIZED (decl))
706 flags |= ACC_SYNCHRONIZED;
707 if (METHOD_ABSTRACT (decl))
708 flags |= ACC_ABSTRACT;
709 if (METHOD_STRICTFP (decl))
714 if (FIELD_STATIC (decl))
716 if (FIELD_VOLATILE (decl))
717 flags |= ACC_VOLATILE;
718 if (FIELD_TRANSIENT (decl))
719 flags |= ACC_TRANSIENT;
724 /* Write the list of segments starting at CHUNKS to STREAM. */
727 write_chunks (stream, chunks)
729 struct chunk *chunks;
731 for (; chunks != NULL; chunks = chunks->next)
732 fwrite (chunks->data, chunks->size, 1, stream);
735 /* Push a 1-word constant in the constant pool at the given INDEX.
736 (Caller is responsible for doing NOTE_PUSH.) */
739 push_constant1 (index, state)
741 struct jcf_partial *state;
756 /* Push a 2-word constant in the constant pool at the given INDEX.
757 (Caller is responsible for doing NOTE_PUSH.) */
760 push_constant2 (index, state)
762 struct jcf_partial *state;
769 /* Push 32-bit integer constant on VM stack.
770 Caller is responsible for doing NOTE_PUSH. */
773 push_int_const (i, state)
775 struct jcf_partial *state;
778 if (i >= -1 && i <= 5)
779 OP1(OPCODE_iconst_0 + i);
780 else if (i >= -128 && i < 128)
785 else if (i >= -32768 && i < 32768)
792 i = find_constant1 (&state->cpool, CONSTANT_Integer,
793 (jword)(i & 0xFFFFFFFF));
794 push_constant1 (i, state);
799 find_constant_wide (lo, hi, state)
800 HOST_WIDE_INT lo, hi;
801 struct jcf_partial *state;
803 HOST_WIDE_INT w1, w2;
804 lshift_double (lo, hi, -32, 64, &w1, &w2, 1);
805 return find_constant2 (&state->cpool, CONSTANT_Long,
806 (jword)(w1 & 0xFFFFFFFF), (jword)(lo & 0xFFFFFFFF));
809 /* Find or allocate a constant pool entry for the given VALUE.
810 Return the index in the constant pool. */
813 find_constant_index (value, state)
815 struct jcf_partial *state;
817 if (TREE_CODE (value) == INTEGER_CST)
819 if (TYPE_PRECISION (TREE_TYPE (value)) <= 32)
820 return find_constant1 (&state->cpool, CONSTANT_Integer,
821 (jword)(TREE_INT_CST_LOW (value) & 0xFFFFFFFF));
823 return find_constant_wide (TREE_INT_CST_LOW (value),
824 TREE_INT_CST_HIGH (value), state);
826 else if (TREE_CODE (value) == REAL_CST)
829 if (TYPE_PRECISION (TREE_TYPE (value)) == 32)
831 words[0] = etarsingle (TREE_REAL_CST (value)) & 0xFFFFFFFF;
832 return find_constant1 (&state->cpool, CONSTANT_Float,
837 etardouble (TREE_REAL_CST (value), words);
838 return find_constant2 (&state->cpool, CONSTANT_Double,
839 (jword)(words[1-FLOAT_WORDS_BIG_ENDIAN] &
841 (jword)(words[FLOAT_WORDS_BIG_ENDIAN] &
845 else if (TREE_CODE (value) == STRING_CST)
846 return find_string_constant (&state->cpool, value);
852 /* Push 64-bit long constant on VM stack.
853 Caller is responsible for doing NOTE_PUSH. */
856 push_long_const (lo, hi, state)
857 HOST_WIDE_INT lo, hi;
858 struct jcf_partial *state;
860 HOST_WIDE_INT highpart, dummy;
861 jint lowpart = WORD_TO_INT (lo);
863 rshift_double (lo, hi, 32, 64, &highpart, &dummy, 1);
865 if (highpart == 0 && (lowpart == 0 || lowpart == 1))
868 OP1(OPCODE_lconst_0 + lowpart);
870 else if ((highpart == 0 && lowpart > 0 && lowpart < 32768)
871 || (highpart == -1 && lowpart < 0 && lowpart >= -32768))
873 push_int_const (lowpart, state);
878 push_constant2 (find_constant_wide (lo, hi, state), state);
882 field_op (field, opcode, state)
885 struct jcf_partial *state;
887 int index = find_fieldref_index (&state->cpool, field);
893 /* Returns an integer in the range 0 (for 'int') through 4 (for object
894 reference) to 7 (for 'short') which matches the pattern of how JVM
895 opcodes typically depend on the operand type. */
898 adjust_typed_op (type, max)
902 switch (TREE_CODE (type))
905 case RECORD_TYPE: return 4;
907 return TYPE_PRECISION (type) == 32 || max < 5 ? 0 : 5;
909 return TYPE_PRECISION (type) == 32 || max < 6 ? 0 : 6;
911 switch (TYPE_PRECISION (type))
913 case 8: return max < 5 ? 0 : 5;
914 case 16: return max < 7 ? 0 : 7;
920 switch (TYPE_PRECISION (type))
933 maybe_wide (opcode, index, state)
935 struct jcf_partial *state;
952 /* Compile code to duplicate with offset, where
953 SIZE is the size of the stack item to duplicate (1 or 2), abd
954 OFFSET is where to insert the result (must be 0, 1, or 2).
955 (The new words get inserted at stack[SP-size-offset].) */
958 emit_dup (size, offset, state)
960 struct jcf_partial *state;
967 kind = size == 1 ? OPCODE_dup : OPCODE_dup2;
968 else if (offset == 1)
969 kind = size == 1 ? OPCODE_dup_x1 : OPCODE_dup2_x1;
970 else if (offset == 2)
971 kind = size == 1 ? OPCODE_dup_x2 : OPCODE_dup2_x2;
979 emit_pop (size, state)
981 struct jcf_partial *state;
984 OP1 (OPCODE_pop - 1 + size);
988 emit_iinc (var, value, state)
991 struct jcf_partial *state;
993 int slot = DECL_LOCAL_INDEX (var);
995 if (value < -128 || value > 127 || slot >= 256)
1013 emit_load_or_store (var, opcode, state)
1014 tree var; /* Variable to load from or store into. */
1015 int opcode; /* Either OPCODE_iload or OPCODE_istore. */
1016 struct jcf_partial *state;
1018 tree type = TREE_TYPE (var);
1019 int kind = adjust_typed_op (type, 4);
1020 int index = DECL_LOCAL_INDEX (var);
1024 OP1 (opcode + 5 + 4 * kind + index); /* [ilfda]{load,store}_[0123] */
1027 maybe_wide (opcode + kind, index, state); /* [ilfda]{load,store} */
1031 emit_load (var, state)
1033 struct jcf_partial *state;
1035 emit_load_or_store (var, OPCODE_iload, state);
1036 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
1040 emit_store (var, state)
1042 struct jcf_partial *state;
1044 emit_load_or_store (var, OPCODE_istore, state);
1045 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
1049 emit_unop (opcode, type, state)
1050 enum java_opcode opcode;
1051 tree type ATTRIBUTE_UNUSED;
1052 struct jcf_partial *state;
1059 emit_binop (opcode, type, state)
1060 enum java_opcode opcode;
1062 struct jcf_partial *state;
1064 int size = TYPE_IS_WIDE (type) ? 2 : 1;
1071 emit_reloc (value, kind, target, state)
1072 HOST_WIDE_INT value;
1074 struct jcf_block *target;
1075 struct jcf_partial *state;
1077 struct jcf_relocation *reloc = (struct jcf_relocation *)
1078 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1079 struct jcf_block *block = state->last_block;
1080 reloc->next = block->u.relocations;
1081 block->u.relocations = reloc;
1082 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1083 reloc->label = target;
1085 if (kind == 0 || kind == BLOCK_START_RELOC)
1087 else if (kind != SWITCH_ALIGN_RELOC)
1092 emit_switch_reloc (label, state)
1093 struct jcf_block *label;
1094 struct jcf_partial *state;
1096 emit_reloc (RELOCATION_VALUE_0, BLOCK_START_RELOC, label, state);
1099 /* Similar to emit_switch_reloc,
1100 but re-uses an existing case reloc. */
1103 emit_case_reloc (reloc, state)
1104 struct jcf_relocation *reloc;
1105 struct jcf_partial *state;
1107 struct jcf_block *block = state->last_block;
1108 reloc->next = block->u.relocations;
1109 block->u.relocations = reloc;
1110 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1111 reloc->kind = BLOCK_START_RELOC;
1115 /* Emit a conditional jump to TARGET with a 2-byte relative jump offset
1116 The opcode is OPCODE, the inverted opcode is INV_OPCODE. */
1119 emit_if (target, opcode, inv_opcode, state)
1120 struct jcf_block *target;
1121 int opcode, inv_opcode;
1122 struct jcf_partial *state;
1126 /* value is 1 byte from reloc back to start of instruction. */
1127 emit_reloc (RELOCATION_VALUE_1, - inv_opcode, target, state);
1131 emit_goto (target, state)
1132 struct jcf_block *target;
1133 struct jcf_partial *state;
1137 /* Value is 1 byte from reloc back to start of instruction. */
1138 emit_reloc (RELOCATION_VALUE_1, OPCODE_goto_w, target, state);
1142 emit_jsr (target, state)
1143 struct jcf_block *target;
1144 struct jcf_partial *state;
1148 /* Value is 1 byte from reloc back to start of instruction. */
1149 emit_reloc (RELOCATION_VALUE_1, OPCODE_jsr_w, target, state);
1152 /* Generate code to evaluate EXP. If the result is true,
1153 branch to TRUE_LABEL; otherwise, branch to FALSE_LABEL.
1154 TRUE_BRANCH_FIRST is a code geneation hint that the
1155 TRUE_LABEL may follow right after this. (The idea is that we
1156 may be able to optimize away GOTO TRUE_LABEL; TRUE_LABEL:) */
1159 generate_bytecode_conditional (exp, true_label, false_label,
1160 true_branch_first, state)
1162 struct jcf_block *true_label;
1163 struct jcf_block *false_label;
1164 int true_branch_first;
1165 struct jcf_partial *state;
1167 tree exp0, exp1, type;
1168 int save_SP = state->code_SP;
1169 enum java_opcode op, negop;
1170 switch (TREE_CODE (exp))
1173 emit_goto (integer_zerop (exp) ? false_label : true_label, state);
1177 struct jcf_block *then_label = gen_jcf_label (state);
1178 struct jcf_block *else_label = gen_jcf_label (state);
1179 int save_SP_before, save_SP_after;
1180 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1181 then_label, else_label, 1, state);
1182 define_jcf_label (then_label, state);
1183 save_SP_before = state->code_SP;
1184 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1185 true_label, false_label, 1, state);
1186 save_SP_after = state->code_SP;
1187 state->code_SP = save_SP_before;
1188 define_jcf_label (else_label, state);
1189 generate_bytecode_conditional (TREE_OPERAND (exp, 2),
1190 true_label, false_label,
1191 true_branch_first, state);
1192 if (state->code_SP != save_SP_after)
1196 case TRUTH_NOT_EXPR:
1197 generate_bytecode_conditional (TREE_OPERAND (exp, 0), false_label,
1198 true_label, ! true_branch_first, state);
1200 case TRUTH_ANDIF_EXPR:
1202 struct jcf_block *next_label = gen_jcf_label (state);
1203 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1204 next_label, false_label, 1, state);
1205 define_jcf_label (next_label, state);
1206 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1207 true_label, false_label, 1, state);
1210 case TRUTH_ORIF_EXPR:
1212 struct jcf_block *next_label = gen_jcf_label (state);
1213 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1214 true_label, next_label, 1, state);
1215 define_jcf_label (next_label, state);
1216 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1217 true_label, false_label, 1, state);
1221 /* Assuming op is one of the 2-operand if_icmp<COND> instructions,
1222 set it to the corresponding 1-operand if<COND> instructions. */
1226 /* The opcodes with their inverses are allocated in pairs.
1227 E.g. The inverse of if_icmplt (161) is if_icmpge (162). */
1228 negop = (op & 1) ? op + 1 : op - 1;
1230 if (true_branch_first)
1232 emit_if (false_label, negop, op, state);
1233 emit_goto (true_label, state);
1237 emit_if (true_label, op, negop, state);
1238 emit_goto (false_label, state);
1242 op = OPCODE_if_icmpeq;
1245 op = OPCODE_if_icmpne;
1248 op = OPCODE_if_icmpgt;
1251 op = OPCODE_if_icmplt;
1254 op = OPCODE_if_icmpge;
1257 op = OPCODE_if_icmple;
1260 exp0 = TREE_OPERAND (exp, 0);
1261 exp1 = TREE_OPERAND (exp, 1);
1262 type = TREE_TYPE (exp0);
1263 switch (TREE_CODE (type))
1266 case POINTER_TYPE: case RECORD_TYPE:
1267 switch (TREE_CODE (exp))
1269 case EQ_EXPR: op = OPCODE_if_acmpeq; break;
1270 case NE_EXPR: op = OPCODE_if_acmpne; break;
1273 if (integer_zerop (exp1) || integer_zerop (exp0))
1275 generate_bytecode_insns (integer_zerop (exp0) ? exp1 : exp0,
1276 STACK_TARGET, state);
1277 op = op + (OPCODE_ifnull - OPCODE_if_acmpeq);
1278 negop = (op & 1) ? op - 1 : op + 1;
1282 generate_bytecode_insns (exp0, STACK_TARGET, state);
1283 generate_bytecode_insns (exp1, STACK_TARGET, state);
1287 generate_bytecode_insns (exp0, STACK_TARGET, state);
1288 generate_bytecode_insns (exp1, STACK_TARGET, state);
1289 if (op == OPCODE_if_icmplt || op == OPCODE_if_icmple)
1293 if (TYPE_PRECISION (type) > 32)
1304 if (TYPE_PRECISION (type) > 32)
1306 generate_bytecode_insns (exp0, STACK_TARGET, state);
1307 generate_bytecode_insns (exp1, STACK_TARGET, state);
1315 if (integer_zerop (exp1))
1317 generate_bytecode_insns (exp0, STACK_TARGET, state);
1321 if (integer_zerop (exp0))
1325 case OPCODE_if_icmplt:
1326 case OPCODE_if_icmpge:
1329 case OPCODE_if_icmpgt:
1330 case OPCODE_if_icmple:
1336 generate_bytecode_insns (exp1, STACK_TARGET, state);
1340 generate_bytecode_insns (exp0, STACK_TARGET, state);
1341 generate_bytecode_insns (exp1, STACK_TARGET, state);
1347 generate_bytecode_insns (exp, STACK_TARGET, state);
1349 if (true_branch_first)
1351 emit_if (false_label, OPCODE_ifeq, OPCODE_ifne, state);
1352 emit_goto (true_label, state);
1356 emit_if (true_label, OPCODE_ifne, OPCODE_ifeq, state);
1357 emit_goto (false_label, state);
1361 if (save_SP != state->code_SP)
1365 /* Call pending cleanups i.e. those for surrounding TRY_FINALLY_EXPRs.
1366 but only as far out as LIMIT (since we are about to jump to the
1367 emit label that is LIMIT). */
1370 call_cleanups (limit, state)
1371 struct jcf_block *limit;
1372 struct jcf_partial *state;
1374 struct jcf_block *block = state->labeled_blocks;
1375 for (; block != limit; block = block->next)
1377 if (block->pc == PENDING_CLEANUP_PC)
1378 emit_jsr (block, state);
1383 generate_bytecode_return (exp, state)
1385 struct jcf_partial *state;
1387 tree return_type = TREE_TYPE (TREE_TYPE (state->current_method));
1388 int returns_void = TREE_CODE (return_type) == VOID_TYPE;
1393 switch (TREE_CODE (exp))
1396 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET,
1398 exp = TREE_OPERAND (exp, 1);
1402 struct jcf_block *then_label = gen_jcf_label (state);
1403 struct jcf_block *else_label = gen_jcf_label (state);
1404 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1405 then_label, else_label, 1, state);
1406 define_jcf_label (then_label, state);
1407 generate_bytecode_return (TREE_OPERAND (exp, 1), state);
1408 define_jcf_label (else_label, state);
1409 generate_bytecode_return (TREE_OPERAND (exp, 2), state);
1413 generate_bytecode_insns (exp,
1414 returns_void ? IGNORE_TARGET
1415 : STACK_TARGET, state);
1421 call_cleanups (NULL, state);
1425 op = OPCODE_ireturn + adjust_typed_op (return_type, 4);
1426 if (state->num_finalizers > 0)
1428 if (state->return_value_decl == NULL_TREE)
1430 state->return_value_decl
1431 = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1432 localvar_alloc (state->return_value_decl, state);
1434 emit_store (state->return_value_decl, state);
1435 call_cleanups (NULL, state);
1436 emit_load (state->return_value_decl, state);
1437 /* If we call localvar_free (state->return_value_decl, state),
1438 then we risk the save decl erroneously re-used in the
1439 finalizer. Instead, we keep the state->return_value_decl
1440 allocated through the rest of the method. This is not
1441 the greatest solution, but it is at least simple and safe. */
1448 /* Generate bytecode for sub-expression EXP of METHOD.
1449 TARGET is one of STACK_TARGET or IGNORE_TARGET. */
1452 generate_bytecode_insns (exp, target, state)
1455 struct jcf_partial *state;
1458 enum java_opcode jopcode;
1460 HOST_WIDE_INT value;
1465 if (exp == NULL && target == IGNORE_TARGET)
1468 type = TREE_TYPE (exp);
1470 switch (TREE_CODE (exp))
1473 if (BLOCK_EXPR_BODY (exp))
1476 tree body = BLOCK_EXPR_BODY (exp);
1477 for (local = BLOCK_EXPR_DECLS (exp); local; )
1479 tree next = TREE_CHAIN (local);
1480 localvar_alloc (local, state);
1483 /* Avoid deep recursion for long blocks. */
1484 while (TREE_CODE (body) == COMPOUND_EXPR)
1486 generate_bytecode_insns (TREE_OPERAND (body, 0), target, state);
1487 body = TREE_OPERAND (body, 1);
1489 generate_bytecode_insns (body, target, state);
1490 for (local = BLOCK_EXPR_DECLS (exp); local; )
1492 tree next = TREE_CHAIN (local);
1493 localvar_free (local, state);
1499 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
1500 /* Normally the first operand to a COMPOUND_EXPR must complete
1501 normally. However, in the special case of a do-while
1502 statement this is not necessarily the case. */
1503 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 0)))
1504 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1506 case EXPR_WITH_FILE_LOCATION:
1508 const char *saved_input_filename = input_filename;
1509 tree body = EXPR_WFL_NODE (exp);
1510 int saved_lineno = lineno;
1511 if (body == empty_stmt_node)
1513 input_filename = EXPR_WFL_FILENAME (exp);
1514 lineno = EXPR_WFL_LINENO (exp);
1515 if (EXPR_WFL_EMIT_LINE_NOTE (exp) && lineno > 0
1516 && debug_info_level > DINFO_LEVEL_NONE)
1517 put_linenumber (lineno, state);
1518 generate_bytecode_insns (body, target, state);
1519 input_filename = saved_input_filename;
1520 lineno = saved_lineno;
1524 if (target == IGNORE_TARGET) ; /* do nothing */
1525 else if (TREE_CODE (type) == POINTER_TYPE)
1527 if (! integer_zerop (exp))
1530 OP1 (OPCODE_aconst_null);
1533 else if (TYPE_PRECISION (type) <= 32)
1535 push_int_const (TREE_INT_CST_LOW (exp), state);
1540 push_long_const (TREE_INT_CST_LOW (exp), TREE_INT_CST_HIGH (exp),
1547 int prec = TYPE_PRECISION (type) >> 5;
1549 if (real_zerop (exp) && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (exp)))
1550 OP1 (prec == 1 ? OPCODE_fconst_0 : OPCODE_dconst_0);
1551 else if (real_onep (exp))
1552 OP1 (prec == 1 ? OPCODE_fconst_1 : OPCODE_dconst_1);
1553 /* FIXME Should also use fconst_2 for 2.0f.
1554 Also, should use iconst_2/ldc followed by i2f/i2d
1555 for other float/double when the value is a small integer. */
1558 offset = find_constant_index (exp, state);
1560 push_constant1 (offset, state);
1562 push_constant2 (offset, state);
1568 push_constant1 (find_string_constant (&state->cpool, exp), state);
1572 if (TREE_STATIC (exp))
1574 field_op (exp, OPCODE_getstatic, state);
1575 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1578 /* ... fall through ... */
1580 emit_load (exp, state);
1582 case NON_LVALUE_EXPR:
1584 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1587 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1588 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1589 if (target != IGNORE_TARGET)
1591 jopcode = OPCODE_iaload + adjust_typed_op (type, 7);
1594 if (! TYPE_IS_WIDE (type))
1600 tree obj = TREE_OPERAND (exp, 0);
1601 tree field = TREE_OPERAND (exp, 1);
1602 int is_static = FIELD_STATIC (field);
1603 generate_bytecode_insns (obj,
1604 is_static ? IGNORE_TARGET : target, state);
1605 if (target != IGNORE_TARGET)
1607 if (DECL_NAME (field) == length_identifier_node && !is_static
1608 && TYPE_ARRAY_P (TREE_TYPE (obj)))
1611 OP1 (OPCODE_arraylength);
1615 field_op (field, is_static ? OPCODE_getstatic : OPCODE_getfield,
1619 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
1624 case TRUTH_ANDIF_EXPR:
1625 case TRUTH_ORIF_EXPR:
1633 struct jcf_block *then_label = gen_jcf_label (state);
1634 struct jcf_block *else_label = gen_jcf_label (state);
1635 struct jcf_block *end_label = gen_jcf_label (state);
1636 generate_bytecode_conditional (exp,
1637 then_label, else_label, 1, state);
1638 define_jcf_label (then_label, state);
1639 push_int_const (1, state);
1640 emit_goto (end_label, state);
1641 define_jcf_label (else_label, state);
1642 push_int_const (0, state);
1643 define_jcf_label (end_label, state);
1649 struct jcf_block *then_label = gen_jcf_label (state);
1650 struct jcf_block *else_label = gen_jcf_label (state);
1651 struct jcf_block *end_label = gen_jcf_label (state);
1652 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1653 then_label, else_label, 1, state);
1654 define_jcf_label (then_label, state);
1655 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1656 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 1))
1657 /* Not all expressions have CAN_COMPLETE_NORMALLY set properly. */
1658 || TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE)
1659 emit_goto (end_label, state);
1660 define_jcf_label (else_label, state);
1661 generate_bytecode_insns (TREE_OPERAND (exp, 2), target, state);
1662 define_jcf_label (end_label, state);
1663 /* COND_EXPR can be used in a binop. The stack must be adjusted. */
1664 if (TREE_TYPE (exp) != void_type_node)
1665 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1670 struct jcf_switch_state *sw_state = state->sw_state;
1671 struct jcf_relocation *reloc = (struct jcf_relocation *)
1672 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1673 HOST_WIDE_INT case_value = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
1675 reloc->label = get_jcf_label_here (state);
1676 reloc->offset = case_value;
1677 reloc->next = sw_state->cases;
1678 sw_state->cases = reloc;
1679 if (sw_state->num_cases == 0)
1681 sw_state->min_case = case_value;
1682 sw_state->max_case = case_value;
1686 if (case_value < sw_state->min_case)
1687 sw_state->min_case = case_value;
1688 if (case_value > sw_state->max_case)
1689 sw_state->max_case = case_value;
1691 sw_state->num_cases++;
1695 state->sw_state->default_label = get_jcf_label_here (state);
1700 /* The SWITCH_EXPR has three parts, generated in the following order:
1701 1. the switch_expression (the value used to select the correct case);
1703 3. the switch_instruction (the tableswitch/loopupswitch instruction.).
1704 After code generation, we will re-order them in the order 1, 3, 2.
1705 This is to avoid any extra GOTOs. */
1706 struct jcf_switch_state sw_state;
1707 struct jcf_block *expression_last; /* Last block of the switch_expression. */
1708 struct jcf_block *body_last; /* Last block of the switch_body. */
1709 struct jcf_block *switch_instruction; /* First block of switch_instruction. */
1710 struct jcf_block *instruction_last; /* Last block of the switch_instruction. */
1711 struct jcf_block *body_block;
1713 sw_state.prev = state->sw_state;
1714 state->sw_state = &sw_state;
1715 sw_state.cases = NULL;
1716 sw_state.num_cases = 0;
1717 sw_state.default_label = NULL;
1718 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1719 expression_last = state->last_block;
1720 /* Force a new block here. */
1721 body_block = gen_jcf_label (state);
1722 define_jcf_label (body_block, state);
1723 generate_bytecode_insns (TREE_OPERAND (exp, 1), IGNORE_TARGET, state);
1724 body_last = state->last_block;
1726 switch_instruction = gen_jcf_label (state);
1727 define_jcf_label (switch_instruction, state);
1728 if (sw_state.default_label == NULL)
1729 sw_state.default_label = gen_jcf_label (state);
1731 if (sw_state.num_cases <= 1)
1733 if (sw_state.num_cases == 0)
1735 emit_pop (1, state);
1740 push_int_const (sw_state.cases->offset, state);
1742 emit_if (sw_state.cases->label,
1743 OPCODE_if_icmpeq, OPCODE_if_icmpne, state);
1745 emit_goto (sw_state.default_label, state);
1750 /* Copy the chain of relocs into a sorted array. */
1751 struct jcf_relocation **relocs = (struct jcf_relocation **)
1752 xmalloc (sw_state.num_cases * sizeof (struct jcf_relocation *));
1753 /* The relocs arrays is a buffer with a gap.
1754 The assumption is that cases will normally come in "runs". */
1756 int gap_end = sw_state.num_cases;
1757 struct jcf_relocation *reloc;
1758 for (reloc = sw_state.cases; reloc != NULL; reloc = reloc->next)
1760 HOST_WIDE_INT case_value = reloc->offset;
1761 while (gap_end < sw_state.num_cases)
1763 struct jcf_relocation *end = relocs[gap_end];
1764 if (case_value <= end->offset)
1766 relocs[gap_start++] = end;
1769 while (gap_start > 0)
1771 struct jcf_relocation *before = relocs[gap_start-1];
1772 if (case_value >= before->offset)
1774 relocs[--gap_end] = before;
1777 relocs[gap_start++] = reloc;
1778 /* Note we don't check for duplicates. This is
1779 handled by the parser. */
1782 if (2 * sw_state.num_cases
1783 >= sw_state.max_case - sw_state.min_case)
1784 { /* Use tableswitch. */
1786 RESERVE (13 + 4 * (sw_state.max_case - sw_state.min_case + 1));
1787 OP1 (OPCODE_tableswitch);
1788 emit_reloc (RELOCATION_VALUE_0,
1789 SWITCH_ALIGN_RELOC, NULL, state);
1790 emit_switch_reloc (sw_state.default_label, state);
1791 OP4 (sw_state.min_case);
1792 OP4 (sw_state.max_case);
1793 for (i = sw_state.min_case; ; )
1795 reloc = relocs[index];
1796 if (i == reloc->offset)
1798 emit_case_reloc (reloc, state);
1799 if (i == sw_state.max_case)
1804 emit_switch_reloc (sw_state.default_label, state);
1809 { /* Use lookupswitch. */
1810 RESERVE(9 + 8 * sw_state.num_cases);
1811 OP1 (OPCODE_lookupswitch);
1812 emit_reloc (RELOCATION_VALUE_0,
1813 SWITCH_ALIGN_RELOC, NULL, state);
1814 emit_switch_reloc (sw_state.default_label, state);
1815 OP4 (sw_state.num_cases);
1816 for (i = 0; i < sw_state.num_cases; i++)
1818 struct jcf_relocation *reloc = relocs[i];
1819 OP4 (reloc->offset);
1820 emit_case_reloc (reloc, state);
1826 instruction_last = state->last_block;
1827 if (sw_state.default_label->pc < 0)
1828 define_jcf_label (sw_state.default_label, state);
1829 else /* Force a new block. */
1830 sw_state.default_label = get_jcf_label_here (state);
1831 /* Now re-arrange the blocks so the switch_instruction
1832 comes before the switch_body. */
1833 switch_length = state->code_length - switch_instruction->pc;
1834 switch_instruction->pc = body_block->pc;
1835 instruction_last->next = body_block;
1836 instruction_last->v.chunk->next = body_block->v.chunk;
1837 expression_last->next = switch_instruction;
1838 expression_last->v.chunk->next = switch_instruction->v.chunk;
1839 body_last->next = sw_state.default_label;
1840 body_last->v.chunk->next = NULL;
1841 state->chunk = body_last->v.chunk;
1842 for (; body_block != sw_state.default_label; body_block = body_block->next)
1843 body_block->pc += switch_length;
1845 state->sw_state = sw_state.prev;
1850 exp = TREE_OPERAND (exp, 0);
1851 if (exp == NULL_TREE)
1852 exp = empty_stmt_node;
1853 else if (TREE_CODE (exp) != MODIFY_EXPR)
1856 exp = TREE_OPERAND (exp, 1);
1857 generate_bytecode_return (exp, state);
1859 case LABELED_BLOCK_EXPR:
1861 struct jcf_block *end_label = gen_jcf_label (state);
1862 end_label->next = state->labeled_blocks;
1863 state->labeled_blocks = end_label;
1864 end_label->pc = PENDING_EXIT_PC;
1865 end_label->u.labeled_block = exp;
1866 if (LABELED_BLOCK_BODY (exp))
1867 generate_bytecode_insns (LABELED_BLOCK_BODY (exp), target, state);
1868 if (state->labeled_blocks != end_label)
1870 state->labeled_blocks = end_label->next;
1871 define_jcf_label (end_label, state);
1876 tree body = TREE_OPERAND (exp, 0);
1878 if (TREE_CODE (body) == COMPOUND_EXPR
1879 && TREE_CODE (TREE_OPERAND (body, 0)) == EXIT_EXPR)
1881 /* Optimize: H: if (TEST) GOTO L; BODY; GOTO H; L:
1882 to: GOTO L; BODY; L: if (!TEST) GOTO L; */
1883 struct jcf_block *head_label;
1884 struct jcf_block *body_label;
1885 struct jcf_block *end_label = gen_jcf_label (state);
1886 struct jcf_block *exit_label = state->labeled_blocks;
1887 head_label = gen_jcf_label (state);
1888 emit_goto (head_label, state);
1889 body_label = get_jcf_label_here (state);
1890 generate_bytecode_insns (TREE_OPERAND (body, 1), target, state);
1891 define_jcf_label (head_label, state);
1892 generate_bytecode_conditional (TREE_OPERAND (body, 0),
1893 end_label, body_label, 1, state);
1894 define_jcf_label (end_label, state);
1899 struct jcf_block *head_label = get_jcf_label_here (state);
1900 generate_bytecode_insns (body, IGNORE_TARGET, state);
1901 if (CAN_COMPLETE_NORMALLY (body))
1902 emit_goto (head_label, state);
1908 struct jcf_block *label = state->labeled_blocks;
1909 struct jcf_block *end_label = gen_jcf_label (state);
1910 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1911 label, end_label, 0, state);
1912 define_jcf_label (end_label, state);
1915 case EXIT_BLOCK_EXPR:
1917 struct jcf_block *label = state->labeled_blocks;
1918 if (TREE_OPERAND (exp, 1) != NULL) goto notimpl;
1919 while (label->u.labeled_block != TREE_OPERAND (exp, 0))
1920 label = label->next;
1921 call_cleanups (label, state);
1922 emit_goto (label, state);
1926 case PREDECREMENT_EXPR: value = -1; post_op = 0; goto increment;
1927 case PREINCREMENT_EXPR: value = 1; post_op = 0; goto increment;
1928 case POSTDECREMENT_EXPR: value = -1; post_op = 1; goto increment;
1929 case POSTINCREMENT_EXPR: value = 1; post_op = 1; goto increment;
1932 arg = TREE_OPERAND (exp, 1);
1933 exp = TREE_OPERAND (exp, 0);
1934 type = TREE_TYPE (exp);
1935 size = TYPE_IS_WIDE (type) ? 2 : 1;
1936 if ((TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1937 && ! TREE_STATIC (exp)
1938 && TREE_CODE (type) == INTEGER_TYPE
1939 && TYPE_PRECISION (type) == 32)
1941 if (target != IGNORE_TARGET && post_op)
1942 emit_load (exp, state);
1943 emit_iinc (exp, value, state);
1944 if (target != IGNORE_TARGET && ! post_op)
1945 emit_load (exp, state);
1948 if (TREE_CODE (exp) == COMPONENT_REF)
1950 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1951 emit_dup (1, 0, state);
1952 /* Stack: ..., objectref, objectref. */
1953 field_op (TREE_OPERAND (exp, 1), OPCODE_getfield, state);
1955 /* Stack: ..., objectref, oldvalue. */
1958 else if (TREE_CODE (exp) == ARRAY_REF)
1960 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1961 generate_bytecode_insns (TREE_OPERAND (exp, 1), STACK_TARGET, state);
1962 emit_dup (2, 0, state);
1963 /* Stack: ..., array, index, array, index. */
1964 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (exp), 7);
1968 /* Stack: ..., array, index, oldvalue. */
1971 else if (TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1973 generate_bytecode_insns (exp, STACK_TARGET, state);
1974 /* Stack: ..., oldvalue. */
1980 if (target != IGNORE_TARGET && post_op)
1981 emit_dup (size, offset, state);
1982 /* Stack, if ARRAY_REF: ..., [result, ] array, index, oldvalue. */
1983 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, oldvalue. */
1984 /* Stack, otherwise: ..., [result, ] oldvalue. */
1985 generate_bytecode_insns (arg, STACK_TARGET, state);
1986 emit_binop ((value >= 0 ? OPCODE_iadd : OPCODE_isub)
1987 + adjust_typed_op (type, 3),
1989 if (target != IGNORE_TARGET && ! post_op)
1990 emit_dup (size, offset, state);
1991 /* Stack, if ARRAY_REF: ..., [result, ] array, index, newvalue. */
1992 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, newvalue. */
1993 /* Stack, otherwise: ..., [result, ] newvalue. */
1994 goto finish_assignment;
1998 tree lhs = TREE_OPERAND (exp, 0);
1999 tree rhs = TREE_OPERAND (exp, 1);
2002 /* See if we can use the iinc instruction. */
2003 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
2004 && ! TREE_STATIC (lhs)
2005 && TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
2006 && TYPE_PRECISION (TREE_TYPE (lhs)) == 32
2007 && (TREE_CODE (rhs) == PLUS_EXPR || TREE_CODE (rhs) == MINUS_EXPR))
2009 tree arg0 = TREE_OPERAND (rhs, 0);
2010 tree arg1 = TREE_OPERAND (rhs, 1);
2011 HOST_WIDE_INT min_value = -32768;
2012 HOST_WIDE_INT max_value = 32767;
2013 if (TREE_CODE (rhs) == MINUS_EXPR)
2018 else if (arg1 == lhs)
2021 arg1 = TREE_OPERAND (rhs, 0);
2023 if (lhs == arg0 && TREE_CODE (arg1) == INTEGER_CST)
2025 HOST_WIDE_INT hi_value = TREE_INT_CST_HIGH (arg1);
2026 value = TREE_INT_CST_LOW (arg1);
2027 if ((hi_value == 0 && value <= max_value)
2028 || (hi_value == -1 && value >= min_value))
2030 if (TREE_CODE (rhs) == MINUS_EXPR)
2032 emit_iinc (lhs, value, state);
2033 if (target != IGNORE_TARGET)
2034 emit_load (lhs, state);
2040 if (TREE_CODE (lhs) == COMPONENT_REF)
2042 generate_bytecode_insns (TREE_OPERAND (lhs, 0),
2043 STACK_TARGET, state);
2046 else if (TREE_CODE (lhs) == ARRAY_REF)
2048 generate_bytecode_insns (TREE_OPERAND(lhs, 0),
2049 STACK_TARGET, state);
2050 generate_bytecode_insns (TREE_OPERAND(lhs, 1),
2051 STACK_TARGET, state);
2057 /* If the rhs is a binary expression and the left operand is
2058 `==' to the lhs then we have an OP= expression. In this
2059 case we must do some special processing. */
2060 if (TREE_CODE_CLASS (TREE_CODE (rhs)) == '2'
2061 && lhs == TREE_OPERAND (rhs, 0))
2063 if (TREE_CODE (lhs) == COMPONENT_REF)
2065 tree field = TREE_OPERAND (lhs, 1);
2066 if (! FIELD_STATIC (field))
2068 /* Duplicate the object reference so we can get
2070 emit_dup (TYPE_IS_WIDE (field) ? 2 : 1, 0, state);
2073 field_op (field, (FIELD_STATIC (field)
2078 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
2080 else if (TREE_CODE (lhs) == VAR_DECL
2081 || TREE_CODE (lhs) == PARM_DECL)
2083 if (FIELD_STATIC (lhs))
2085 field_op (lhs, OPCODE_getstatic, state);
2086 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs)) ? 2 : 1);
2089 emit_load (lhs, state);
2091 else if (TREE_CODE (lhs) == ARRAY_REF)
2093 /* Duplicate the array and index, which are on the
2094 stack, so that we can load the old value. */
2095 emit_dup (2, 0, state);
2097 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (lhs), 7);
2100 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs)) ? 2 : 1);
2105 /* This function correctly handles the case where the LHS
2106 of a binary expression is NULL_TREE. */
2107 rhs = build (TREE_CODE (rhs), TREE_TYPE (rhs),
2108 NULL_TREE, TREE_OPERAND (rhs, 1));
2111 generate_bytecode_insns (rhs, STACK_TARGET, state);
2112 if (target != IGNORE_TARGET)
2113 emit_dup (TYPE_IS_WIDE (type) ? 2 : 1 , offset, state);
2119 if (TREE_CODE (exp) == COMPONENT_REF)
2121 tree field = TREE_OPERAND (exp, 1);
2122 if (! FIELD_STATIC (field))
2125 FIELD_STATIC (field) ? OPCODE_putstatic : OPCODE_putfield,
2128 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
2130 else if (TREE_CODE (exp) == VAR_DECL
2131 || TREE_CODE (exp) == PARM_DECL)
2133 if (FIELD_STATIC (exp))
2135 field_op (exp, OPCODE_putstatic, state);
2136 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
2139 emit_store (exp, state);
2141 else if (TREE_CODE (exp) == ARRAY_REF)
2143 jopcode = OPCODE_iastore + adjust_typed_op (TREE_TYPE (exp), 7);
2146 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 4 : 3);
2152 jopcode = OPCODE_iadd;
2155 jopcode = OPCODE_isub;
2158 jopcode = OPCODE_imul;
2160 case TRUNC_DIV_EXPR:
2162 jopcode = OPCODE_idiv;
2164 case TRUNC_MOD_EXPR:
2165 jopcode = OPCODE_irem;
2167 case LSHIFT_EXPR: jopcode = OPCODE_ishl; goto binop;
2168 case RSHIFT_EXPR: jopcode = OPCODE_ishr; goto binop;
2169 case URSHIFT_EXPR: jopcode = OPCODE_iushr; goto binop;
2170 case TRUTH_AND_EXPR:
2171 case BIT_AND_EXPR: jopcode = OPCODE_iand; goto binop;
2173 case BIT_IOR_EXPR: jopcode = OPCODE_ior; goto binop;
2174 case TRUTH_XOR_EXPR:
2175 case BIT_XOR_EXPR: jopcode = OPCODE_ixor; goto binop;
2178 tree arg0 = TREE_OPERAND (exp, 0);
2179 tree arg1 = TREE_OPERAND (exp, 1);
2180 jopcode += adjust_typed_op (type, 3);
2181 if (arg0 == arg1 && TREE_CODE (arg0) == SAVE_EXPR)
2183 /* fold may (e.g) convert 2*x to x+x. */
2184 generate_bytecode_insns (TREE_OPERAND (arg0, 0), target, state);
2185 emit_dup (TYPE_PRECISION (TREE_TYPE (arg0)) > 32 ? 2 : 1, 0, state);
2189 /* ARG0 will be NULL_TREE if we're handling an `OP='
2190 expression. In this case the stack already holds the
2191 LHS. See the MODIFY_EXPR case. */
2192 if (arg0 != NULL_TREE)
2193 generate_bytecode_insns (arg0, target, state);
2194 if (jopcode >= OPCODE_lshl && jopcode <= OPCODE_lushr)
2195 arg1 = convert (int_type_node, arg1);
2196 generate_bytecode_insns (arg1, target, state);
2198 /* For most binary operations, both operands and the result have the
2199 same type. Shift operations are different. Using arg1's type
2200 gets us the correct SP adjustment in all cases. */
2201 if (target == STACK_TARGET)
2202 emit_binop (jopcode, TREE_TYPE (arg1), state);
2205 case TRUTH_NOT_EXPR:
2207 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2208 if (target == STACK_TARGET)
2210 int is_long = TYPE_PRECISION (TREE_TYPE (exp)) > 32;
2211 push_int_const (TREE_CODE (exp) == BIT_NOT_EXPR ? -1 : 1, state);
2215 NOTE_PUSH (1 + is_long);
2216 OP1 (OPCODE_ixor + is_long);
2217 NOTE_POP (1 + is_long);
2221 jopcode = OPCODE_ineg;
2222 jopcode += adjust_typed_op (type, 3);
2223 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2224 if (target == STACK_TARGET)
2225 emit_unop (jopcode, type, state);
2227 case INSTANCEOF_EXPR:
2229 int index = find_class_constant (&state->cpool, TREE_OPERAND (exp, 1));
2230 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2232 OP1 (OPCODE_instanceof);
2237 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2242 case FIX_TRUNC_EXPR:
2244 tree src = TREE_OPERAND (exp, 0);
2245 tree src_type = TREE_TYPE (src);
2246 tree dst_type = TREE_TYPE (exp);
2247 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2248 if (target == IGNORE_TARGET || src_type == dst_type)
2250 if (TREE_CODE (dst_type) == POINTER_TYPE)
2252 if (TREE_CODE (exp) == CONVERT_EXPR)
2254 int index = find_class_constant (&state->cpool,
2255 TREE_TYPE (dst_type));
2257 OP1 (OPCODE_checkcast);
2261 else /* Convert numeric types. */
2263 int wide_src = TYPE_PRECISION (src_type) > 32;
2264 int wide_dst = TYPE_PRECISION (dst_type) > 32;
2265 NOTE_POP (1 + wide_src);
2267 if (TREE_CODE (dst_type) == REAL_TYPE)
2269 if (TREE_CODE (src_type) == REAL_TYPE)
2270 OP1 (wide_dst ? OPCODE_f2d : OPCODE_d2f);
2271 else if (TYPE_PRECISION (src_type) == 64)
2272 OP1 (OPCODE_l2f + wide_dst);
2274 OP1 (OPCODE_i2f + wide_dst);
2276 else /* Convert to integral type. */
2278 if (TREE_CODE (src_type) == REAL_TYPE)
2279 OP1 (OPCODE_f2i + wide_dst + 3 * wide_src);
2284 if (TYPE_PRECISION (dst_type) < 32)
2287 /* Already converted to int, if needed. */
2288 if (TYPE_PRECISION (dst_type) <= 8)
2290 else if (TREE_UNSIGNED (dst_type))
2296 NOTE_PUSH (1 + wide_dst);
2303 tree try_clause = TREE_OPERAND (exp, 0);
2304 struct jcf_block *start_label = get_jcf_label_here (state);
2305 struct jcf_block *end_label; /* End of try clause. */
2306 struct jcf_block *finished_label = gen_jcf_label (state);
2307 tree clause = TREE_OPERAND (exp, 1);
2308 if (target != IGNORE_TARGET)
2310 generate_bytecode_insns (try_clause, IGNORE_TARGET, state);
2311 end_label = get_jcf_label_here (state);
2312 if (end_label == start_label)
2314 if (CAN_COMPLETE_NORMALLY (try_clause))
2315 emit_goto (finished_label, state);
2316 while (clause != NULL_TREE)
2318 tree catch_clause = TREE_OPERAND (clause, 0);
2319 tree exception_decl = BLOCK_EXPR_DECLS (catch_clause);
2320 struct jcf_handler *handler = alloc_handler (start_label,
2322 if (exception_decl == NULL_TREE)
2323 handler->type = NULL_TREE;
2325 handler->type = TREE_TYPE (TREE_TYPE (exception_decl));
2326 generate_bytecode_insns (catch_clause, IGNORE_TARGET, state);
2327 clause = TREE_CHAIN (clause);
2328 if (CAN_COMPLETE_NORMALLY (catch_clause) && clause != NULL_TREE)
2329 emit_goto (finished_label, state);
2331 define_jcf_label (finished_label, state);
2335 case TRY_FINALLY_EXPR:
2337 struct jcf_block *finished_label = NULL;
2338 struct jcf_block *finally_label, *start_label, *end_label;
2339 struct jcf_handler *handler;
2340 tree try_block = TREE_OPERAND (exp, 0);
2341 tree finally = TREE_OPERAND (exp, 1);
2342 tree return_link = NULL_TREE, exception_decl = NULL_TREE;
2344 tree exception_type;
2346 finally_label = gen_jcf_label (state);
2347 start_label = get_jcf_label_here (state);
2348 /* If the `finally' clause can complete normally, we emit it
2349 as a subroutine and let the other clauses call it via
2350 `jsr'. If it can't complete normally, then we simply emit
2351 `goto's directly to it. */
2352 if (CAN_COMPLETE_NORMALLY (finally))
2354 finally_label->pc = PENDING_CLEANUP_PC;
2355 finally_label->next = state->labeled_blocks;
2356 state->labeled_blocks = finally_label;
2357 state->num_finalizers++;
2360 generate_bytecode_insns (try_block, target, state);
2362 if (CAN_COMPLETE_NORMALLY (finally))
2364 if (state->labeled_blocks != finally_label)
2366 state->labeled_blocks = finally_label->next;
2368 end_label = get_jcf_label_here (state);
2370 if (end_label == start_label)
2372 state->num_finalizers--;
2373 define_jcf_label (finally_label, state);
2374 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2378 if (CAN_COMPLETE_NORMALLY (finally))
2380 return_link = build_decl (VAR_DECL, NULL_TREE,
2381 return_address_type_node);
2382 finished_label = gen_jcf_label (state);
2385 if (CAN_COMPLETE_NORMALLY (try_block))
2387 if (CAN_COMPLETE_NORMALLY (finally))
2389 emit_jsr (finally_label, state);
2390 emit_goto (finished_label, state);
2393 emit_goto (finally_label, state);
2396 /* Handle exceptions. */
2398 exception_type = build_pointer_type (throwable_type_node);
2399 if (CAN_COMPLETE_NORMALLY (finally))
2401 /* We're going to generate a subroutine, so we'll need to
2402 save and restore the exception around the `jsr'. */
2403 exception_decl = build_decl (VAR_DECL, NULL_TREE, exception_type);
2404 localvar_alloc (return_link, state);
2406 handler = alloc_handler (start_label, end_label, state);
2407 handler->type = NULL_TREE;
2408 if (CAN_COMPLETE_NORMALLY (finally))
2410 localvar_alloc (exception_decl, state);
2412 emit_store (exception_decl, state);
2413 emit_jsr (finally_label, state);
2414 emit_load (exception_decl, state);
2416 OP1 (OPCODE_athrow);
2421 /* We're not generating a subroutine. In this case we can
2422 simply have the exception handler pop the exception and
2423 then fall through to the `finally' block. */
2425 emit_pop (1, state);
2429 /* The finally block. If we're generating a subroutine, first
2430 save return PC into return_link. Otherwise, just generate
2431 the code for the `finally' block. */
2432 define_jcf_label (finally_label, state);
2433 if (CAN_COMPLETE_NORMALLY (finally))
2436 emit_store (return_link, state);
2439 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2440 if (CAN_COMPLETE_NORMALLY (finally))
2442 maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
2443 localvar_free (exception_decl, state);
2444 localvar_free (return_link, state);
2445 define_jcf_label (finished_label, state);
2450 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2452 OP1 (OPCODE_athrow);
2454 case NEW_ARRAY_INIT:
2456 tree values = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
2457 tree array_type = TREE_TYPE (TREE_TYPE (exp));
2458 tree element_type = TYPE_ARRAY_ELEMENT (array_type);
2459 HOST_WIDE_INT length = java_array_type_length (array_type);
2460 if (target == IGNORE_TARGET)
2462 for ( ; values != NULL_TREE; values = TREE_CHAIN (values))
2463 generate_bytecode_insns (TREE_VALUE (values), target, state);
2466 push_int_const (length, state);
2469 if (JPRIMITIVE_TYPE_P (element_type))
2471 int atype = encode_newarray_type (element_type);
2472 OP1 (OPCODE_newarray);
2477 int index = find_class_constant (&state->cpool,
2478 TREE_TYPE (element_type));
2479 OP1 (OPCODE_anewarray);
2483 jopcode = OPCODE_iastore + adjust_typed_op (element_type, 7);
2484 for ( ; values != NULL_TREE; values = TREE_CHAIN (values), offset++)
2486 int save_SP = state->code_SP;
2487 emit_dup (1, 0, state);
2488 push_int_const (offset, state);
2490 generate_bytecode_insns (TREE_VALUE (values), STACK_TARGET, state);
2493 state->code_SP = save_SP;
2497 case JAVA_EXC_OBJ_EXPR:
2498 NOTE_PUSH (1); /* Pushed by exception system. */
2500 case NEW_CLASS_EXPR:
2502 tree class = TREE_TYPE (TREE_TYPE (exp));
2503 int need_result = target != IGNORE_TARGET;
2504 int index = find_class_constant (&state->cpool, class);
2510 NOTE_PUSH (1 + need_result);
2512 /* ... fall though ... */
2515 tree f = TREE_OPERAND (exp, 0);
2516 tree x = TREE_OPERAND (exp, 1);
2517 int save_SP = state->code_SP;
2519 if (TREE_CODE (f) == ADDR_EXPR)
2520 f = TREE_OPERAND (f, 0);
2521 if (f == soft_newarray_node)
2523 int type_code = TREE_INT_CST_LOW (TREE_VALUE (x));
2524 generate_bytecode_insns (TREE_VALUE (TREE_CHAIN (x)),
2525 STACK_TARGET, state);
2527 OP1 (OPCODE_newarray);
2531 else if (f == soft_multianewarray_node)
2535 int index = find_class_constant (&state->cpool,
2536 TREE_TYPE (TREE_TYPE (exp)));
2537 x = TREE_CHAIN (x); /* Skip class argument. */
2538 ndims = TREE_INT_CST_LOW (TREE_VALUE (x));
2539 for (idim = ndims; --idim >= 0; )
2542 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2545 OP1 (OPCODE_multianewarray);
2550 else if (f == soft_anewarray_node)
2552 tree cl = TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (exp)));
2553 int index = find_class_constant (&state->cpool, TREE_TYPE (cl));
2554 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2556 OP1 (OPCODE_anewarray);
2560 else if (f == soft_monitorenter_node
2561 || f == soft_monitorexit_node
2564 if (f == soft_monitorenter_node)
2565 op = OPCODE_monitorenter;
2566 else if (f == soft_monitorexit_node)
2567 op = OPCODE_monitorexit;
2570 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2576 for ( ; x != NULL_TREE; x = TREE_CHAIN (x))
2578 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2580 nargs = state->code_SP - save_SP;
2581 state->code_SP = save_SP;
2582 if (f == soft_fmod_node)
2589 if (TREE_CODE (exp) == NEW_CLASS_EXPR)
2590 NOTE_POP (1); /* Pop implicit this. */
2591 if (TREE_CODE (f) == FUNCTION_DECL && DECL_CONTEXT (f) != NULL_TREE)
2593 tree context = DECL_CONTEXT (f);
2594 int index, interface = 0;
2596 if (METHOD_STATIC (f))
2597 OP1 (OPCODE_invokestatic);
2598 else if (DECL_CONSTRUCTOR_P (f) || CALL_USING_SUPER (exp)
2599 || METHOD_PRIVATE (f))
2600 OP1 (OPCODE_invokespecial);
2603 if (CLASS_INTERFACE (TYPE_NAME (context)))
2605 tree arg1 = TREE_VALUE (TREE_OPERAND (exp, 1));
2606 context = TREE_TYPE (TREE_TYPE (arg1));
2607 if (CLASS_INTERFACE (TYPE_NAME (context)))
2611 OP1 (OPCODE_invokeinterface);
2613 OP1 (OPCODE_invokevirtual);
2615 index = find_methodref_with_class_index (&state->cpool, f, context);
2625 f = TREE_TYPE (TREE_TYPE (f));
2626 if (TREE_CODE (f) != VOID_TYPE)
2628 int size = TYPE_IS_WIDE (f) ? 2 : 1;
2629 if (target == IGNORE_TARGET)
2630 emit_pop (size, state);
2640 error("internal error in generate_bytecode_insn - tree code not implemented: %s",
2641 tree_code_name [(int) TREE_CODE (exp)]);
2646 perform_relocations (state)
2647 struct jcf_partial *state;
2649 struct jcf_block *block;
2650 struct jcf_relocation *reloc;
2654 /* Before we start, the pc field of each block is an upper bound on
2655 the block's start pc (it may be less, if previous blocks need less
2656 than their maximum).
2658 The minimum size of each block is in the block's chunk->size. */
2660 /* First, figure out the actual locations of each block. */
2663 for (block = state->blocks; block != NULL; block = block->next)
2665 int block_size = block->v.chunk->size;
2669 /* Optimize GOTO L; L: by getting rid of the redundant goto.
2670 Assumes relocations are in reverse order. */
2671 reloc = block->u.relocations;
2672 while (reloc != NULL
2673 && reloc->kind == OPCODE_goto_w
2674 && reloc->label->pc == block->next->pc
2675 && reloc->offset + 2 == block_size)
2677 reloc = reloc->next;
2678 block->u.relocations = reloc;
2679 block->v.chunk->size -= 3;
2684 /* Optimize GOTO L; ... L: GOTO X by changing the first goto to
2685 jump directly to X. We're careful here to avoid an infinite
2686 loop if the `goto's themselves form one. We do this
2687 optimization because we can generate a goto-to-goto for some
2688 try/finally blocks. */
2689 while (reloc != NULL
2690 && reloc->kind == OPCODE_goto_w
2691 && reloc->label != block
2692 && reloc->label->v.chunk->data != NULL
2693 && reloc->label->v.chunk->data[0] == OPCODE_goto)
2695 /* Find the reloc for the first instruction of the
2696 destination block. */
2697 struct jcf_relocation *first_reloc;
2698 for (first_reloc = reloc->label->u.relocations;
2700 first_reloc = first_reloc->next)
2702 if (first_reloc->offset == 1
2703 && first_reloc->kind == OPCODE_goto_w)
2705 reloc->label = first_reloc->label;
2710 /* If we didn't do anything, exit the loop. */
2711 if (first_reloc == NULL)
2715 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
2717 if (reloc->kind == SWITCH_ALIGN_RELOC)
2719 /* We assume this is the first relocation in this block,
2720 so we know its final pc. */
2721 int where = pc + reloc->offset;
2722 int pad = ((where + 3) & ~3) - where;
2725 else if (reloc->kind < -1 || reloc->kind > BLOCK_START_RELOC)
2727 int delta = reloc->label->pc - (pc + reloc->offset - 1);
2728 int expand = reloc->kind > 0 ? 2 : 5;
2732 if (delta >= -32768 && delta <= 32767)
2738 block_size += expand;
2744 for (block = state->blocks; block != NULL; block = block->next)
2746 struct chunk *chunk = block->v.chunk;
2747 int old_size = chunk->size;
2748 int next_pc = block->next == NULL ? pc : block->next->pc;
2749 int new_size = next_pc - block->pc;
2750 unsigned char *new_ptr;
2751 unsigned char *old_buffer = chunk->data;
2752 unsigned char *old_ptr = old_buffer + old_size;
2753 if (new_size != old_size)
2755 chunk->data = (unsigned char *)
2756 obstack_alloc (state->chunk_obstack, new_size);
2757 chunk->size = new_size;
2759 new_ptr = chunk->data + new_size;
2761 /* We do the relocations from back to front, because
2762 the relocations are in reverse order. */
2763 for (reloc = block->u.relocations; ; reloc = reloc->next)
2765 /* new_ptr and old_ptr point into the old and new buffers,
2766 respectively. (If no relocations cause the buffer to
2767 grow, the buffer will be the same buffer, and new_ptr==old_ptr.)
2768 The bytes at higher address have been copied and relocations
2769 handled; those at lower addresses remain to process. */
2771 /* Lower old index of piece to be copied with no relocation.
2772 I.e. high index of the first piece that does need relocation. */
2773 int start = reloc == NULL ? 0
2774 : reloc->kind == SWITCH_ALIGN_RELOC ? reloc->offset
2775 : (reloc->kind == 0 || reloc->kind == BLOCK_START_RELOC)
2777 : reloc->offset + 2;
2780 int n = (old_ptr - old_buffer) - start;
2784 memcpy (new_ptr, old_ptr, n);
2785 if (old_ptr == old_buffer)
2788 new_offset = new_ptr - chunk->data;
2789 new_offset -= (reloc->kind == -1 ? 2 : 4);
2790 if (reloc->kind == 0)
2793 value = GET_u4 (old_ptr);
2795 else if (reloc->kind == BLOCK_START_RELOC)
2801 else if (reloc->kind == SWITCH_ALIGN_RELOC)
2803 int where = block->pc + reloc->offset;
2804 int pad = ((where + 3) & ~3) - where;
2812 value = GET_u2 (old_ptr);
2814 value += reloc->label->pc - (block->pc + new_offset);
2815 *--new_ptr = (unsigned char) value; value >>= 8;
2816 *--new_ptr = (unsigned char) value; value >>= 8;
2817 if (reloc->kind != -1)
2819 *--new_ptr = (unsigned char) value; value >>= 8;
2820 *--new_ptr = (unsigned char) value;
2822 if (reloc->kind > BLOCK_START_RELOC)
2824 /* Convert: OP TARGET to: OP_w TARGET; (OP is goto or jsr). */
2826 *--new_ptr = reloc->kind;
2828 else if (reloc->kind < -1)
2830 /* Convert: ifCOND TARGET to: ifNCOND T; goto_w TARGET; T: */
2832 *--new_ptr = OPCODE_goto_w;
2835 *--new_ptr = - reloc->kind;
2838 if (new_ptr != chunk->data)
2841 state->code_length = pc;
2845 init_jcf_state (state, work)
2846 struct jcf_partial *state;
2847 struct obstack *work;
2849 state->chunk_obstack = work;
2850 state->first = state->chunk = NULL;
2851 CPOOL_INIT (&state->cpool);
2852 BUFFER_INIT (&state->localvars);
2853 BUFFER_INIT (&state->bytecode);
2857 init_jcf_method (state, method)
2858 struct jcf_partial *state;
2861 state->current_method = method;
2862 state->blocks = state->last_block = NULL;
2863 state->linenumber_count = 0;
2864 state->first_lvar = state->last_lvar = NULL;
2865 state->lvar_count = 0;
2866 state->labeled_blocks = NULL;
2867 state->code_length = 0;
2868 BUFFER_RESET (&state->bytecode);
2869 BUFFER_RESET (&state->localvars);
2871 state->code_SP_max = 0;
2872 state->handlers = NULL;
2873 state->last_handler = NULL;
2874 state->num_handlers = 0;
2875 state->num_finalizers = 0;
2876 state->return_value_decl = NULL_TREE;
2880 release_jcf_state (state)
2881 struct jcf_partial *state;
2883 CPOOL_FINISH (&state->cpool);
2884 obstack_free (state->chunk_obstack, state->first);
2887 /* Generate and return a list of chunks containing the class CLAS
2888 in the .class file representation. The list can be written to a
2889 .class file using write_chunks. Allocate chunks from obstack WORK. */
2891 static GTY(()) tree SourceFile_node;
2892 static struct chunk *
2893 generate_classfile (clas, state)
2895 struct jcf_partial *state;
2897 struct chunk *cpool_chunk;
2898 const char *source_file, *s;
2901 char *fields_count_ptr;
2902 int fields_count = 0;
2903 char *methods_count_ptr;
2904 int methods_count = 0;
2907 = clas == object_type_node ? 0
2908 : TREE_VEC_LENGTH (TYPE_BINFO_BASETYPES (clas));
2910 ptr = append_chunk (NULL, 8, state);
2911 PUT4 (0xCafeBabe); /* Magic number */
2912 PUT2 (3); /* Minor version */
2913 PUT2 (45); /* Major version */
2915 append_chunk (NULL, 0, state);
2916 cpool_chunk = state->chunk;
2918 /* Next allocate the chunk containing acces_flags through fields_counr. */
2919 if (clas == object_type_node)
2922 i = 8 + 2 * total_supers;
2923 ptr = append_chunk (NULL, i, state);
2924 i = get_access_flags (TYPE_NAME (clas));
2925 if (! (i & ACC_INTERFACE))
2927 PUT2 (i); /* acces_flags */
2928 i = find_class_constant (&state->cpool, clas); PUT2 (i); /* this_class */
2929 if (clas == object_type_node)
2931 PUT2(0); /* super_class */
2932 PUT2(0); /* interfaces_count */
2936 tree basetypes = TYPE_BINFO_BASETYPES (clas);
2937 tree base = BINFO_TYPE (TREE_VEC_ELT (basetypes, 0));
2938 int j = find_class_constant (&state->cpool, base);
2939 PUT2 (j); /* super_class */
2940 PUT2 (total_supers - 1); /* interfaces_count */
2941 for (i = 1; i < total_supers; i++)
2943 base = BINFO_TYPE (TREE_VEC_ELT (basetypes, i));
2944 j = find_class_constant (&state->cpool, base);
2948 fields_count_ptr = ptr;
2950 for (part = TYPE_FIELDS (clas); part; part = TREE_CHAIN (part))
2952 int have_value, attr_count = 0;
2953 if (DECL_NAME (part) == NULL_TREE || DECL_ARTIFICIAL (part))
2955 ptr = append_chunk (NULL, 8, state);
2956 i = get_access_flags (part); PUT2 (i);
2957 i = find_utf8_constant (&state->cpool, DECL_NAME (part)); PUT2 (i);
2958 i = find_utf8_constant (&state->cpool,
2959 build_java_signature (TREE_TYPE (part)));
2961 have_value = DECL_INITIAL (part) != NULL_TREE
2962 && FIELD_STATIC (part) && CONSTANT_VALUE_P (DECL_INITIAL (part))
2963 && FIELD_FINAL (part)
2964 && (JPRIMITIVE_TYPE_P (TREE_TYPE (part))
2965 || TREE_TYPE (part) == string_ptr_type_node);
2969 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part) || FIELD_SYNTHETIC (part))
2972 PUT2 (attr_count); /* attributes_count */
2975 tree init = DECL_INITIAL (part);
2976 static tree ConstantValue_node = NULL_TREE;
2977 if (TREE_TYPE (part) != TREE_TYPE (init))
2978 fatal_error ("field initializer type mismatch");
2979 ptr = append_chunk (NULL, 8, state);
2980 if (ConstantValue_node == NULL_TREE)
2981 ConstantValue_node = get_identifier ("ConstantValue");
2982 i = find_utf8_constant (&state->cpool, ConstantValue_node);
2983 PUT2 (i); /* attribute_name_index */
2984 PUT4 (2); /* attribute_length */
2985 i = find_constant_index (init, state); PUT2 (i);
2987 /* Emit the "Synthetic" attribute for val$<x> and this$<n>
2988 fields and other fields which need it. */
2989 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part)
2990 || FIELD_SYNTHETIC (part))
2991 ptr = append_synthetic_attribute (state);
2994 ptr = fields_count_ptr; UNSAFE_PUT2 (fields_count);
2996 ptr = methods_count_ptr = append_chunk (NULL, 2, state);
2999 for (part = TYPE_METHODS (clas); part; part = TREE_CHAIN (part))
3001 struct jcf_block *block;
3002 tree function_body = DECL_FUNCTION_BODY (part);
3003 tree body = function_body == NULL_TREE ? NULL_TREE
3004 : BLOCK_EXPR_BODY (function_body);
3005 tree name = DECL_CONSTRUCTOR_P (part) ? init_identifier_node
3007 tree type = TREE_TYPE (part);
3008 tree save_function = current_function_decl;
3009 int synthetic_p = 0;
3010 current_function_decl = part;
3011 ptr = append_chunk (NULL, 8, state);
3012 i = get_access_flags (part); PUT2 (i);
3013 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
3014 i = find_utf8_constant (&state->cpool, build_java_signature (type));
3016 i = (body != NULL_TREE) + (DECL_FUNCTION_THROWS (part) != NULL_TREE);
3018 /* Make room for the Synthetic attribute (of zero length.) */
3019 if (DECL_FINIT_P (part)
3020 || DECL_INSTINIT_P (part)
3021 || OUTER_FIELD_ACCESS_IDENTIFIER_P (DECL_NAME (part))
3022 || TYPE_DOT_CLASS (clas) == part)
3028 PUT2 (i); /* attributes_count */
3031 ptr = append_synthetic_attribute (state);
3033 if (body != NULL_TREE)
3035 int code_attributes_count = 0;
3036 static tree Code_node = NULL_TREE;
3039 struct jcf_handler *handler;
3040 if (Code_node == NULL_TREE)
3041 Code_node = get_identifier ("Code");
3042 ptr = append_chunk (NULL, 14, state);
3043 i = find_utf8_constant (&state->cpool, Code_node); PUT2 (i);
3045 init_jcf_method (state, part);
3046 get_jcf_label_here (state); /* Force a first block. */
3047 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
3048 localvar_alloc (t, state);
3049 generate_bytecode_insns (body, IGNORE_TARGET, state);
3050 if (CAN_COMPLETE_NORMALLY (body))
3052 if (TREE_CODE (TREE_TYPE (type)) != VOID_TYPE)
3055 OP1 (OPCODE_return);
3057 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
3058 localvar_free (t, state);
3059 if (state->return_value_decl != NULL_TREE)
3060 localvar_free (state->return_value_decl, state);
3061 finish_jcf_block (state);
3062 perform_relocations (state);
3065 i = 8 + state->code_length + 4 + 8 * state->num_handlers;
3066 if (state->linenumber_count > 0)
3068 code_attributes_count++;
3069 i += 8 + 4 * state->linenumber_count;
3071 if (state->lvar_count > 0)
3073 code_attributes_count++;
3074 i += 8 + 10 * state->lvar_count;
3076 UNSAFE_PUT4 (i); /* attribute_length */
3077 UNSAFE_PUT2 (state->code_SP_max); /* max_stack */
3078 UNSAFE_PUT2 (localvar_max); /* max_locals */
3079 UNSAFE_PUT4 (state->code_length);
3081 /* Emit the exception table. */
3082 ptr = append_chunk (NULL, 2 + 8 * state->num_handlers, state);
3083 PUT2 (state->num_handlers); /* exception_table_length */
3084 handler = state->handlers;
3085 for (; handler != NULL; handler = handler->next)
3088 PUT2 (handler->start_label->pc);
3089 PUT2 (handler->end_label->pc);
3090 PUT2 (handler->handler_label->pc);
3091 if (handler->type == NULL_TREE)
3094 type_index = find_class_constant (&state->cpool,
3099 ptr = append_chunk (NULL, 2, state);
3100 PUT2 (code_attributes_count);
3102 /* Write the LineNumberTable attribute. */
3103 if (state->linenumber_count > 0)
3105 static tree LineNumberTable_node = NULL_TREE;
3106 ptr = append_chunk (NULL,
3107 8 + 4 * state->linenumber_count, state);
3108 if (LineNumberTable_node == NULL_TREE)
3109 LineNumberTable_node = get_identifier ("LineNumberTable");
3110 i = find_utf8_constant (&state->cpool, LineNumberTable_node);
3111 PUT2 (i); /* attribute_name_index */
3112 i = 2+4*state->linenumber_count; PUT4(i); /* attribute_length */
3113 i = state->linenumber_count; PUT2 (i);
3114 for (block = state->blocks; block != NULL; block = block->next)
3116 int line = block->linenumber;
3125 /* Write the LocalVariableTable attribute. */
3126 if (state->lvar_count > 0)
3128 static tree LocalVariableTable_node = NULL_TREE;
3129 struct localvar_info *lvar = state->first_lvar;
3130 ptr = append_chunk (NULL, 8 + 10 * state->lvar_count, state);
3131 if (LocalVariableTable_node == NULL_TREE)
3132 LocalVariableTable_node = get_identifier("LocalVariableTable");
3133 i = find_utf8_constant (&state->cpool, LocalVariableTable_node);
3134 PUT2 (i); /* attribute_name_index */
3135 i = 2 + 10 * state->lvar_count; PUT4 (i); /* attribute_length */
3136 i = state->lvar_count; PUT2 (i);
3137 for ( ; lvar != NULL; lvar = lvar->next)
3139 tree name = DECL_NAME (lvar->decl);
3140 tree sig = build_java_signature (TREE_TYPE (lvar->decl));
3141 i = lvar->start_label->pc; PUT2 (i);
3142 i = lvar->end_label->pc - i; PUT2 (i);
3143 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
3144 i = find_utf8_constant (&state->cpool, sig); PUT2 (i);
3145 i = DECL_LOCAL_INDEX (lvar->decl); PUT2 (i);
3149 if (DECL_FUNCTION_THROWS (part) != NULL_TREE)
3151 tree t = DECL_FUNCTION_THROWS (part);
3152 int throws_count = list_length (t);
3153 static tree Exceptions_node = NULL_TREE;
3154 if (Exceptions_node == NULL_TREE)
3155 Exceptions_node = get_identifier ("Exceptions");
3156 ptr = append_chunk (NULL, 8 + 2 * throws_count, state);
3157 i = find_utf8_constant (&state->cpool, Exceptions_node);
3158 PUT2 (i); /* attribute_name_index */
3159 i = 2 + 2 * throws_count; PUT4(i); /* attribute_length */
3160 i = throws_count; PUT2 (i);
3161 for (; t != NULL_TREE; t = TREE_CHAIN (t))
3163 i = find_class_constant (&state->cpool, TREE_VALUE (t));
3168 current_function_decl = save_function;
3170 ptr = methods_count_ptr; UNSAFE_PUT2 (methods_count);
3172 source_file = DECL_SOURCE_FILE (TYPE_NAME (clas));
3173 for (s = source_file; ; s++)
3178 if (ch == '/' || ch == '\\')
3181 ptr = append_chunk (NULL, 10, state);
3183 i = 1; /* Source file always exists as an attribute */
3184 if (INNER_CLASS_TYPE_P (clas) || DECL_INNER_CLASS_LIST (TYPE_NAME (clas)))
3186 if (clas == object_type_node)
3188 PUT2 (i); /* attributes_count */
3190 /* generate the SourceFile attribute. */
3191 if (SourceFile_node == NULL_TREE)
3193 SourceFile_node = get_identifier ("SourceFile");
3196 i = find_utf8_constant (&state->cpool, SourceFile_node);
3197 PUT2 (i); /* attribute_name_index */
3199 i = find_utf8_constant (&state->cpool, get_identifier (source_file));
3201 append_gcj_attribute (state, clas);
3202 append_innerclasses_attribute (state, clas);
3204 /* New finally generate the contents of the constant pool chunk. */
3205 i = count_constant_pool_bytes (&state->cpool);
3206 ptr = obstack_alloc (state->chunk_obstack, i);
3207 cpool_chunk->data = ptr;
3208 cpool_chunk->size = i;
3209 write_constant_pool (&state->cpool, ptr, i);
3210 return state->first;
3213 static GTY(()) tree Synthetic_node;
3214 static unsigned char *
3215 append_synthetic_attribute (state)
3216 struct jcf_partial *state;
3218 unsigned char *ptr = append_chunk (NULL, 6, state);
3221 if (Synthetic_node == NULL_TREE)
3223 Synthetic_node = get_identifier ("Synthetic");
3225 i = find_utf8_constant (&state->cpool, Synthetic_node);
3226 PUT2 (i); /* Attribute string index */
3227 PUT4 (0); /* Attribute length */
3233 append_gcj_attribute (state, class)
3234 struct jcf_partial *state;
3240 if (class != object_type_node)
3243 ptr = append_chunk (NULL, 6, state); /* 2+4 */
3244 i = find_utf8_constant (&state->cpool,
3245 get_identifier ("gnu.gcj.gcj-compiled"));
3246 PUT2 (i); /* Attribute string index */
3247 PUT4 (0); /* Attribute length */
3250 static tree InnerClasses_node;
3252 append_innerclasses_attribute (state, class)
3253 struct jcf_partial *state;
3256 tree orig_decl = TYPE_NAME (class);
3259 unsigned char *ptr, *length_marker, *number_marker;
3261 if (!INNER_CLASS_TYPE_P (class) && !DECL_INNER_CLASS_LIST (orig_decl))
3264 ptr = append_chunk (NULL, 8, state); /* 2+4+2 */
3266 if (InnerClasses_node == NULL_TREE)
3268 InnerClasses_node = get_identifier ("InnerClasses");
3270 i = find_utf8_constant (&state->cpool, InnerClasses_node);
3272 length_marker = ptr; PUT4 (0); /* length, to be later patched */
3273 number_marker = ptr; PUT2 (0); /* number of classes, tblp */
3275 /* Generate the entries: all inner classes visible from the one we
3276 process: itself, up and down. */
3277 while (class && INNER_CLASS_TYPE_P (class))
3281 decl = TYPE_NAME (class);
3282 n = IDENTIFIER_POINTER (DECL_NAME (decl)) +
3283 IDENTIFIER_LENGTH (DECL_NAME (decl));
3285 while (n[-1] != '$')
3287 append_innerclasses_attribute_entry (state, decl, get_identifier (n));
3290 class = TREE_TYPE (DECL_CONTEXT (TYPE_NAME (class)));
3294 for (current = DECL_INNER_CLASS_LIST (decl);
3295 current; current = TREE_CHAIN (current))
3297 append_innerclasses_attribute_entry (state, TREE_PURPOSE (current),
3298 TREE_VALUE (current));
3302 ptr = length_marker; PUT4 (8*length+2);
3303 ptr = number_marker; PUT2 (length);
3307 append_innerclasses_attribute_entry (state, decl, name)
3308 struct jcf_partial *state;
3312 int ocii = 0, ini = 0;
3313 unsigned char *ptr = append_chunk (NULL, 8, state);
3315 icii = find_class_constant (&state->cpool, TREE_TYPE (decl));
3317 /* Sun's implementation seems to generate ocii to 0 for inner
3318 classes (which aren't considered members of the class they're
3319 in.) The specs are saying that if the class is anonymous,
3320 inner_name_index must be zero. */
3321 if (!ANONYMOUS_CLASS_P (TREE_TYPE (decl)))
3323 ocii = find_class_constant (&state->cpool,
3324 TREE_TYPE (DECL_CONTEXT (decl)));
3325 ini = find_utf8_constant (&state->cpool, name);
3327 icaf = get_access_flags (decl);
3329 PUT2 (icii); PUT2 (ocii); PUT2 (ini); PUT2 (icaf);
3333 make_class_file_name (clas)
3336 const char *dname, *cname, *slash;
3340 cname = IDENTIFIER_POINTER (identifier_subst (DECL_NAME (TYPE_NAME (clas)),
3341 "", '.', DIR_SEPARATOR,
3343 if (jcf_write_base_directory == NULL)
3345 /* Make sure we put the class file into the .java file's
3346 directory, and not into some subdirectory thereof. */
3348 dname = DECL_SOURCE_FILE (TYPE_NAME (clas));
3349 slash = strrchr (dname, DIR_SEPARATOR);
3355 t = strrchr (cname, DIR_SEPARATOR);
3361 dname = jcf_write_base_directory;
3362 slash = dname + strlen (dname);
3365 r = xmalloc (slash - dname + strlen (cname) + 2);
3366 strncpy (r, dname, slash - dname);
3367 r[slash - dname] = DIR_SEPARATOR;
3368 strcpy (&r[slash - dname + 1], cname);
3370 /* We try to make new directories when we need them. We only do
3371 this for directories which "might not" exist. For instance, we
3372 assume the `-d' directory exists, but we don't assume that any
3373 subdirectory below it exists. It might be worthwhile to keep
3374 track of which directories we've created to avoid gratuitous
3376 dname = r + (slash - dname) + 1;
3379 char *s = strchr (dname, DIR_SEPARATOR);
3383 if (stat (r, &sb) == -1
3384 /* Try to make it. */
3385 && mkdir (r, 0755) == -1)
3386 fatal_io_error ("can't create directory %s", r);
3389 /* Skip consecutive separators. */
3390 for (dname = s + 1; *dname && *dname == DIR_SEPARATOR; ++dname)
3397 /* Write out the contens of a class (RECORD_TYPE) CLAS, as a .class file.
3398 The output .class file name is make_class_file_name(CLAS). */
3401 write_classfile (clas)
3404 struct obstack *work = &temporary_obstack;
3405 struct jcf_partial state[1];
3406 char *class_file_name = make_class_file_name (clas);
3407 struct chunk *chunks;
3409 if (class_file_name != NULL)
3412 char *temporary_file_name;
3414 /* The .class file is initially written to a ".tmp" file so that
3415 if multiple instances of the compiler are running at once
3416 they do not see partially formed class files. */
3417 temporary_file_name = concat (class_file_name, ".tmp", NULL);
3418 stream = fopen (temporary_file_name, "wb");
3420 fatal_io_error ("can't open %s for writing", temporary_file_name);
3422 jcf_dependency_add_target (class_file_name);
3423 init_jcf_state (state, work);
3424 chunks = generate_classfile (clas, state);
3425 write_chunks (stream, chunks);
3426 if (fclose (stream))
3427 fatal_io_error ("error closing %s", temporary_file_name);
3428 if (rename (temporary_file_name, class_file_name) == -1)
3430 remove (temporary_file_name);
3431 fatal_io_error ("can't create %s", class_file_name);
3433 free (temporary_file_name);
3434 free (class_file_name);
3436 release_jcf_state (state);
3440 string concatenation
3441 synchronized statement
3444 #include "gt-java-jcf-write.h"