1 /* Write out a Java(TM) class file.
2 Copyright (C) 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15 You should have received a copy of the GNU General Public License
16 along with GNU CC; see the file COPYING. If not, write to
17 the Free Software Foundation, 59 Temple Place - Suite 330,
18 Boston, MA 02111-1307, USA.
20 Java and all Java-based marks are trademarks or registered trademarks
21 of Sun Microsystems, Inc. in the United States and other countries.
22 The Free Software Foundation is independent of Sun Microsystems, Inc. */
28 #include "java-tree.h"
33 #include "java-opcodes.h"
34 #include "parse.h" /* for BLOCK_EXPR_BODY */
40 #define DIR_SEPARATOR '/'
43 extern struct obstack temporary_obstack;
45 /* Base directory in which `.class' files should be written.
46 NULL means to put the file into the same directory as the
47 corresponding .java file. */
48 char *jcf_write_base_directory = NULL;
50 /* Make sure bytecode.data is big enough for at least N more bytes. */
53 do { CHECK_OP(state); \
54 if (state->bytecode.ptr + (N) > state->bytecode.limit) \
55 buffer_grow (&state->bytecode, N); } while (0)
57 /* Add a 1-byte instruction/operand I to bytecode.data,
58 assuming space has already been RESERVE'd. */
60 #define OP1(I) (state->last_bc = *state->bytecode.ptr++ = (I), CHECK_OP(state))
62 /* Like OP1, but I is a 2-byte big endian integer. */
65 do { int _i = (I); OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
67 /* Like OP1, but I is a 4-byte big endian integer. */
70 do { int _i = (I); OP1 (_i >> 24); OP1 (_i >> 16); \
71 OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
73 /* Macro to call each time we push I words on the JVM stack. */
75 #define NOTE_PUSH(I) \
76 do { state->code_SP += (I); \
77 if (state->code_SP > state->code_SP_max) \
78 state->code_SP_max = state->code_SP; } while (0)
80 /* Macro to call each time we pop I words from the JVM stack. */
83 do { state->code_SP -= (I); if (state->code_SP < 0) abort(); } while (0)
85 /* A chunk or segment of a .class file. */
89 /* The next segment of this .class file. */
92 /* The actual data in this segment to be written to the .class file. */
95 /* The size of the segment to be written to the .class file. */
99 #define PENDING_CLEANUP_PC (-3)
100 #define PENDING_EXIT_PC (-2)
101 #define UNDEFINED_PC (-1)
103 /* Each "block" represents a label plus the bytecode instructions following.
104 There may be branches out of the block, but no incoming jumps, except
105 to the beginning of the block.
107 If (pc < 0), the jcf_block is not an actual block (i.e. it has no
108 assocated code yet), but it is an undefined label.
113 /* For blocks that that are defined, the next block (in pc order).
114 For blocks that are not-yet-defined the end label of a LABELED_BLOCK_EXPR
115 or a cleanup expression (from a WITH_CLEANUP_EXPR),
116 this is the next (outer) such end label, in a stack headed by
117 labeled_blocks in jcf_partial. */
118 struct jcf_block *next;
120 /* In the not-yet-defined end label for an unfinished EXIT_BLOCK_EXPR.
121 pc is PENDING_EXIT_PC.
122 In the not-yet-defined end label for pending cleanup subroutine,
123 pc is PENDING_CLEANUP_PC.
124 For other not-yet-defined labels, pc is UNDEFINED_PC.
126 If the label has been defined:
127 Until perform_relocations is finished, this is the maximum possible
128 value of the bytecode offset at the begnning of this block.
129 After perform_relocations, it is the actual offset (pc). */
134 /* After finish_jcf_block is called, the actual instructions
135 contained in this block. Before that NULL, and the instructions
136 are in state->bytecode. */
140 /* If pc==PENDING_CLEANUP_PC, start_label is the start of the region
141 covered by the cleanup. */
142 struct jcf_block *start_label;
146 /* Set of relocations (in reverse offset order) for this block. */
147 struct jcf_relocation *relocations;
149 /* If this block is that of the not-yet-defined end label of
150 a LABELED_BLOCK_EXPR, where LABELED_BLOCK is that LABELED_BLOCK_EXPR.
151 If pc==PENDING_CLEANUP_PC, the cleanup that needs to be run. */
156 /* A "relocation" type for the 0-3 bytes of padding at the start
157 of a tableswitch or a lookupswitch. */
158 #define SWITCH_ALIGN_RELOC 4
160 /* A relocation type for the labels in a tableswitch or a lookupswitch;
161 these are relative to the start of the instruction, but (due to
162 th 0-3 bytes of padding), we don't know the offset before relocation. */
163 #define BLOCK_START_RELOC 1
165 struct jcf_relocation
167 /* Next relocation for the current jcf_block. */
168 struct jcf_relocation *next;
170 /* The (byte) offset within the current block that needs to be relocated. */
171 HOST_WIDE_INT offset;
173 /* 0 if offset is a 4-byte relative offset.
174 4 (SWITCH_ALIGN_RELOC) if offset points to 0-3 padding bytes inserted
175 for proper alignment in tableswitch/lookupswitch instructions.
176 1 (BLOCK_START_RELOC) if offset points to a 4-byte offset relative
177 to the start of the containing block.
178 -1 if offset is a 2-byte relative offset.
179 < -1 if offset is the address of an instruction with a 2-byte offset
180 that does not have a corresponding 4-byte offset version, in which
181 case the absolute value of kind is the inverted opcode.
182 > 4 if offset is the address of an instruction (such as jsr) with a
183 2-byte offset that does have a corresponding 4-byte offset version,
184 in which case kind is the opcode of the 4-byte version (such as jsr_w). */
187 /* The label the relocation wants to actually transfer to. */
188 struct jcf_block *label;
191 #define RELOCATION_VALUE_0 ((HOST_WIDE_INT)0)
192 #define RELOCATION_VALUE_1 ((HOST_WIDE_INT)1)
194 /* State for single catch clause. */
198 struct jcf_handler *next;
200 struct jcf_block *start_label;
201 struct jcf_block *end_label;
202 struct jcf_block *handler_label;
204 /* The sub-class of Throwable handled, or NULL_TREE (for finally). */
208 /* State for the current switch statement. */
210 struct jcf_switch_state
212 struct jcf_switch_state *prev;
213 struct jcf_block *default_label;
215 struct jcf_relocation *cases;
217 HOST_WIDE_INT min_case, max_case;
220 /* This structure is used to contain the various pieces that will
221 become a .class file. */
227 struct obstack *chunk_obstack;
230 /* List of basic blocks for the current method. */
231 struct jcf_block *blocks;
232 struct jcf_block *last_block;
234 struct localvar_info *first_lvar;
235 struct localvar_info *last_lvar;
240 int linenumber_count;
242 /* Until perform_relocations, this is a upper bound on the number
243 of bytes (so far) in the instructions for the current method. */
246 /* Stack of undefined ending labels for LABELED_BLOCK_EXPR. */
247 struct jcf_block *labeled_blocks;
249 /* The current stack size (stack pointer) in the current method. */
252 /* The largest extent of stack size (stack pointer) in the current method. */
255 /* Contains a mapping from local var slot number to localvar_info. */
256 struct buffer localvars;
258 /* The buffer allocated for bytecode for the current jcf_block. */
259 struct buffer bytecode;
261 /* Chain of exception handlers for the current method. */
262 struct jcf_handler *handlers;
264 /* Last element in handlers chain. */
265 struct jcf_handler *last_handler;
267 /* Number of exception handlers for the current method. */
270 /* Number of finalizers we are currently nested within. */
273 /* If non-NULL, use this for the return value. */
274 tree return_value_decl;
276 /* Information about the current switch statement. */
277 struct jcf_switch_state *sw_state;
279 enum java_opcode last_bc; /* The last emitted bytecode */
282 static void generate_bytecode_insns PARAMS ((tree, int, struct jcf_partial *));
283 static struct chunk * alloc_chunk PARAMS ((struct chunk *, unsigned char *,
284 int, struct obstack *));
285 static unsigned char * append_chunk PARAMS ((unsigned char *, int,
286 struct jcf_partial *));
287 static void append_chunk_copy PARAMS ((unsigned char *, int,
288 struct jcf_partial *));
289 static struct jcf_block * gen_jcf_label PARAMS ((struct jcf_partial *));
290 static void finish_jcf_block PARAMS ((struct jcf_partial *));
291 static void define_jcf_label PARAMS ((struct jcf_block *,
292 struct jcf_partial *));
293 static struct jcf_block * get_jcf_label_here PARAMS ((struct jcf_partial *));
294 static void put_linenumber PARAMS ((int, struct jcf_partial *));
295 static void localvar_alloc PARAMS ((tree, struct jcf_partial *));
296 static void localvar_free PARAMS ((tree, struct jcf_partial *));
297 static int get_access_flags PARAMS ((tree));
298 static void write_chunks PARAMS ((FILE *, struct chunk *));
299 static int adjust_typed_op PARAMS ((tree, int));
300 static void generate_bytecode_conditional PARAMS ((tree, struct jcf_block *,
301 struct jcf_block *, int,
302 struct jcf_partial *));
303 static void generate_bytecode_return PARAMS ((tree, struct jcf_partial *));
304 static void perform_relocations PARAMS ((struct jcf_partial *));
305 static void init_jcf_state PARAMS ((struct jcf_partial *, struct obstack *));
306 static void init_jcf_method PARAMS ((struct jcf_partial *, tree));
307 static void release_jcf_state PARAMS ((struct jcf_partial *));
308 static struct chunk * generate_classfile PARAMS ((tree, struct jcf_partial *));
309 static struct jcf_handler *alloc_handler PARAMS ((struct jcf_block *,
311 struct jcf_partial *));
312 static void emit_iinc PARAMS ((tree, HOST_WIDE_INT, struct jcf_partial *));
313 static void emit_reloc PARAMS ((HOST_WIDE_INT, int, struct jcf_block *,
314 struct jcf_partial *));
315 static void push_constant1 PARAMS ((HOST_WIDE_INT, struct jcf_partial *));
316 static void push_constant2 PARAMS ((HOST_WIDE_INT, struct jcf_partial *));
317 static void push_int_const PARAMS ((HOST_WIDE_INT, struct jcf_partial *));
318 static int find_constant_wide PARAMS ((HOST_WIDE_INT, HOST_WIDE_INT,
319 struct jcf_partial *));
320 static void push_long_const PARAMS ((HOST_WIDE_INT, HOST_WIDE_INT,
321 struct jcf_partial *));
322 static int find_constant_index PARAMS ((tree, struct jcf_partial *));
323 static void push_long_const PARAMS ((HOST_WIDE_INT, HOST_WIDE_INT,
324 struct jcf_partial *));
325 static void field_op PARAMS ((tree, int, struct jcf_partial *));
326 static void maybe_wide PARAMS ((int, int, struct jcf_partial *));
327 static void emit_dup PARAMS ((int, int, struct jcf_partial *));
328 static void emit_pop PARAMS ((int, struct jcf_partial *));
329 static void emit_load_or_store PARAMS ((tree, int, struct jcf_partial *));
330 static void emit_load PARAMS ((tree, struct jcf_partial *));
331 static void emit_store PARAMS ((tree, struct jcf_partial *));
332 static void emit_unop PARAMS ((enum java_opcode, tree, struct jcf_partial *));
333 static void emit_binop PARAMS ((enum java_opcode, tree, struct jcf_partial *));
334 static void emit_reloc PARAMS ((HOST_WIDE_INT, int, struct jcf_block *,
335 struct jcf_partial *));
336 static void emit_switch_reloc PARAMS ((struct jcf_block *,
337 struct jcf_partial *));
338 static void emit_case_reloc PARAMS ((struct jcf_relocation *,
339 struct jcf_partial *));
340 static void emit_if PARAMS ((struct jcf_block *, int, int,
341 struct jcf_partial *));
342 static void emit_goto PARAMS ((struct jcf_block *, struct jcf_partial *));
343 static void emit_jsr PARAMS ((struct jcf_block *, struct jcf_partial *));
344 static void call_cleanups PARAMS ((struct jcf_block *, struct jcf_partial *));
345 static char *make_class_file_name PARAMS ((tree));
346 static unsigned char *append_synthetic_attribute PARAMS ((struct jcf_partial *));
347 static void append_innerclasses_attribute PARAMS ((struct jcf_partial *, tree));
348 static void append_innerclasses_attribute_entry PARAMS ((struct jcf_partial *, tree, tree));
349 static void append_gcj_attribute PARAMS ((struct jcf_partial *, tree));
351 /* Utility macros for appending (big-endian) data to a buffer.
352 We assume a local variable 'ptr' points into where we want to
353 write next, and we assume enough space has been allocated. */
355 #ifdef ENABLE_JC1_CHECKING
356 static int CHECK_PUT PARAMS ((void *, struct jcf_partial *, int));
359 CHECK_PUT (ptr, state, i)
361 struct jcf_partial *state;
364 if ((unsigned char *) ptr < state->chunk->data
365 || (unsigned char *) ptr + i > state->chunk->data + state->chunk->size)
371 #define CHECK_PUT(PTR, STATE, I) ((void)0)
374 #define PUT1(X) (CHECK_PUT(ptr, state, 1), *ptr++ = (X))
375 #define PUT2(X) (PUT1((X) >> 8), PUT1((X) & 0xFF))
376 #define PUT4(X) (PUT2((X) >> 16), PUT2((X) & 0xFFFF))
377 #define PUTN(P, N) (CHECK_PUT(ptr, state, N), memcpy(ptr, P, N), ptr += (N))
379 /* There are some cases below where CHECK_PUT is guaranteed to fail.
380 Use the following macros in those specific cases. */
381 #define UNSAFE_PUT1(X) (*ptr++ = (X))
382 #define UNSAFE_PUT2(X) (UNSAFE_PUT1((X) >> 8), UNSAFE_PUT1((X) & 0xFF))
383 #define UNSAFE_PUT4(X) (UNSAFE_PUT2((X) >> 16), UNSAFE_PUT2((X) & 0xFFFF))
384 #define UNSAFE_PUTN(P, N) (memcpy(ptr, P, N), ptr += (N))
387 /* Allocate a new chunk on obstack WORK, and link it in after LAST.
388 Set the data and size fields to DATA and SIZE, respectively.
389 However, if DATA is NULL and SIZE>0, allocate a buffer as well. */
391 static struct chunk *
392 alloc_chunk (last, data, size, work)
396 struct obstack *work;
398 struct chunk *chunk = (struct chunk *)
399 obstack_alloc (work, sizeof(struct chunk));
401 if (data == NULL && size > 0)
402 data = obstack_alloc (work, size);
412 #ifdef ENABLE_JC1_CHECKING
413 static int CHECK_OP PARAMS ((struct jcf_partial *));
417 struct jcf_partial *state;
419 if (state->bytecode.ptr > state->bytecode.limit)
425 #define CHECK_OP(STATE) ((void) 0)
428 static unsigned char *
429 append_chunk (data, size, state)
432 struct jcf_partial *state;
434 state->chunk = alloc_chunk (state->chunk, data, size, state->chunk_obstack);
435 if (state->first == NULL)
436 state->first = state->chunk;
437 return state->chunk->data;
441 append_chunk_copy (data, size, state)
444 struct jcf_partial *state;
446 unsigned char *ptr = append_chunk (NULL, size, state);
447 memcpy (ptr, data, size);
450 static struct jcf_block *
451 gen_jcf_label (state)
452 struct jcf_partial *state;
454 struct jcf_block *block = (struct jcf_block *)
455 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_block));
457 block->linenumber = -1;
458 block->pc = UNDEFINED_PC;
463 finish_jcf_block (state)
464 struct jcf_partial *state;
466 struct jcf_block *block = state->last_block;
467 struct jcf_relocation *reloc;
468 int code_length = BUFFER_LENGTH (&state->bytecode);
469 int pc = state->code_length;
470 append_chunk_copy (state->bytecode.data, code_length, state);
471 BUFFER_RESET (&state->bytecode);
472 block->v.chunk = state->chunk;
474 /* Calculate code_length to the maximum value it can have. */
475 pc += block->v.chunk->size;
476 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
478 int kind = reloc->kind;
479 if (kind == SWITCH_ALIGN_RELOC)
481 else if (kind > BLOCK_START_RELOC)
482 pc += 2; /* 2-byte offset may grow to 4-byte offset */
484 pc += 5; /* May need to add a goto_w. */
486 state->code_length = pc;
490 define_jcf_label (label, state)
491 struct jcf_block *label;
492 struct jcf_partial *state;
494 if (state->last_block != NULL)
495 finish_jcf_block (state);
496 label->pc = state->code_length;
497 if (state->blocks == NULL)
498 state->blocks = label;
500 state->last_block->next = label;
501 state->last_block = label;
503 label->u.relocations = NULL;
506 static struct jcf_block *
507 get_jcf_label_here (state)
508 struct jcf_partial *state;
510 if (state->last_block != NULL && BUFFER_LENGTH (&state->bytecode) == 0)
511 return state->last_block;
514 struct jcf_block *label = gen_jcf_label (state);
515 define_jcf_label (label, state);
520 /* Note a line number entry for the current PC and given LINE. */
523 put_linenumber (line, state)
525 struct jcf_partial *state;
527 struct jcf_block *label = get_jcf_label_here (state);
528 if (label->linenumber > 0)
530 label = gen_jcf_label (state);
531 define_jcf_label (label, state);
533 label->linenumber = line;
534 state->linenumber_count++;
537 /* Allocate a new jcf_handler, for a catch clause that catches exceptions
538 in the range (START_LABEL, END_LABEL). */
540 static struct jcf_handler *
541 alloc_handler (start_label, end_label, state)
542 struct jcf_block *start_label;
543 struct jcf_block *end_label;
544 struct jcf_partial *state;
546 struct jcf_handler *handler = (struct jcf_handler *)
547 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_handler));
548 handler->start_label = start_label;
549 handler->end_label = end_label;
550 handler->handler_label = get_jcf_label_here (state);
551 if (state->handlers == NULL)
552 state->handlers = handler;
554 state->last_handler->next = handler;
555 state->last_handler = handler;
556 handler->next = NULL;
557 state->num_handlers++;
562 /* The index of jvm local variable allocated for this DECL.
563 This is assigned when generating .class files;
564 contrast DECL_LOCAL_SLOT_NUMBER which is set when *reading* a .class file.
565 (We don't allocate DECL_LANG_SPECIFIC for locals from Java sourc code.) */
567 #define DECL_LOCAL_INDEX(DECL) DECL_ALIGN(DECL)
571 struct localvar_info *next;
574 struct jcf_block *start_label;
575 struct jcf_block *end_label;
578 #define localvar_buffer ((struct localvar_info**) state->localvars.data)
579 #define localvar_max \
580 ((struct localvar_info**) state->localvars.ptr - localvar_buffer)
583 localvar_alloc (decl, state)
585 struct jcf_partial *state;
587 struct jcf_block *start_label = get_jcf_label_here (state);
588 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
590 register struct localvar_info *info;
591 register struct localvar_info **ptr = localvar_buffer;
592 register struct localvar_info **limit
593 = (struct localvar_info**) state->localvars.ptr;
594 for (index = 0; ptr < limit; index++, ptr++)
597 && (! wide || ((ptr+1) < limit && ptr[1] == NULL)))
602 buffer_grow (&state->localvars, 2 * sizeof (struct localvar_info*));
603 ptr = (struct localvar_info**) state->localvars.data + index;
604 state->localvars.ptr = (unsigned char *) (ptr + 1 + wide);
606 info = (struct localvar_info *)
607 obstack_alloc (state->chunk_obstack, sizeof (struct localvar_info));
610 ptr[1] = (struct localvar_info *)(~0);
611 DECL_LOCAL_INDEX (decl) = index;
613 info->start_label = start_label;
615 if (debug_info_level > DINFO_LEVEL_TERSE
616 && DECL_NAME (decl) != NULL_TREE)
618 /* Generate debugging info. */
620 if (state->last_lvar != NULL)
621 state->last_lvar->next = info;
623 state->first_lvar = info;
624 state->last_lvar = info;
630 localvar_free (decl, state)
632 struct jcf_partial *state;
634 struct jcf_block *end_label = get_jcf_label_here (state);
635 int index = DECL_LOCAL_INDEX (decl);
636 register struct localvar_info **ptr = &localvar_buffer [index];
637 register struct localvar_info *info = *ptr;
638 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
640 info->end_label = end_label;
642 if (info->decl != decl)
647 if (ptr[1] != (struct localvar_info *)(~0))
654 #define STACK_TARGET 1
655 #define IGNORE_TARGET 2
657 /* Get the access flags of a class (TYPE_DECL), a method (FUNCTION_DECL), or
658 a field (FIELD_DECL or VAR_DECL, if static), as encoded in a .class file. */
661 get_access_flags (decl)
665 int isfield = TREE_CODE (decl) == FIELD_DECL || TREE_CODE (decl) == VAR_DECL;
666 if (CLASS_PUBLIC (decl)) /* same as FIELD_PUBLIC and METHOD_PUBLIC */
668 if (CLASS_FINAL (decl)) /* same as FIELD_FINAL and METHOD_FINAL */
670 if (isfield || TREE_CODE (decl) == FUNCTION_DECL)
672 if (TREE_PROTECTED (decl))
673 flags |= ACC_PROTECTED;
674 if (TREE_PRIVATE (decl))
675 flags |= ACC_PRIVATE;
677 else if (TREE_CODE (decl) == TYPE_DECL)
679 if (CLASS_SUPER (decl))
681 if (CLASS_ABSTRACT (decl))
682 flags |= ACC_ABSTRACT;
683 if (CLASS_INTERFACE (decl))
684 flags |= ACC_INTERFACE;
685 if (CLASS_STATIC (decl))
687 if (ANONYMOUS_CLASS_P (TREE_TYPE (decl))
688 || LOCAL_CLASS_P (TREE_TYPE (decl)))
689 flags |= ACC_PRIVATE;
694 if (TREE_CODE (decl) == FUNCTION_DECL)
696 if (METHOD_NATIVE (decl))
698 if (METHOD_STATIC (decl))
700 if (METHOD_SYNCHRONIZED (decl))
701 flags |= ACC_SYNCHRONIZED;
702 if (METHOD_ABSTRACT (decl))
703 flags |= ACC_ABSTRACT;
707 if (FIELD_STATIC (decl))
709 if (FIELD_VOLATILE (decl))
710 flags |= ACC_VOLATILE;
711 if (FIELD_TRANSIENT (decl))
712 flags |= ACC_TRANSIENT;
717 /* Write the list of segments starting at CHUNKS to STREAM. */
720 write_chunks (stream, chunks)
722 struct chunk *chunks;
724 for (; chunks != NULL; chunks = chunks->next)
725 fwrite (chunks->data, chunks->size, 1, stream);
728 /* Push a 1-word constant in the constant pool at the given INDEX.
729 (Caller is responsible for doing NOTE_PUSH.) */
732 push_constant1 (index, state)
734 struct jcf_partial *state;
749 /* Push a 2-word constant in the constant pool at the given INDEX.
750 (Caller is responsible for doing NOTE_PUSH.) */
753 push_constant2 (index, state)
755 struct jcf_partial *state;
762 /* Push 32-bit integer constant on VM stack.
763 Caller is responsible for doing NOTE_PUSH. */
766 push_int_const (i, state)
768 struct jcf_partial *state;
771 if (i >= -1 && i <= 5)
772 OP1(OPCODE_iconst_0 + i);
773 else if (i >= -128 && i < 128)
778 else if (i >= -32768 && i < 32768)
785 i = find_constant1 (&state->cpool, CONSTANT_Integer,
786 (jword)(i & 0xFFFFFFFF));
787 push_constant1 (i, state);
792 find_constant_wide (lo, hi, state)
793 HOST_WIDE_INT lo, hi;
794 struct jcf_partial *state;
796 HOST_WIDE_INT w1, w2;
797 lshift_double (lo, hi, -32, 64, &w1, &w2, 1);
798 return find_constant2 (&state->cpool, CONSTANT_Long,
799 (jword)(w1 & 0xFFFFFFFF), (jword)(lo & 0xFFFFFFFF));
802 /* Find or allocate a constant pool entry for the given VALUE.
803 Return the index in the constant pool. */
806 find_constant_index (value, state)
808 struct jcf_partial *state;
810 if (TREE_CODE (value) == INTEGER_CST)
812 if (TYPE_PRECISION (TREE_TYPE (value)) <= 32)
813 return find_constant1 (&state->cpool, CONSTANT_Integer,
814 (jword)(TREE_INT_CST_LOW (value) & 0xFFFFFFFF));
816 return find_constant_wide (TREE_INT_CST_LOW (value),
817 TREE_INT_CST_HIGH (value), state);
819 else if (TREE_CODE (value) == REAL_CST)
822 if (TYPE_PRECISION (TREE_TYPE (value)) == 32)
824 words[0] = etarsingle (TREE_REAL_CST (value)) & 0xFFFFFFFF;
825 return find_constant1 (&state->cpool, CONSTANT_Float,
830 etardouble (TREE_REAL_CST (value), words);
831 return find_constant2 (&state->cpool, CONSTANT_Double,
832 (jword)(words[1-FLOAT_WORDS_BIG_ENDIAN] &
834 (jword)(words[FLOAT_WORDS_BIG_ENDIAN] &
838 else if (TREE_CODE (value) == STRING_CST)
839 return find_string_constant (&state->cpool, value);
845 /* Push 64-bit long constant on VM stack.
846 Caller is responsible for doing NOTE_PUSH. */
849 push_long_const (lo, hi, state)
850 HOST_WIDE_INT lo, hi;
851 struct jcf_partial *state;
853 if (hi == 0 && lo >= 0 && lo <= 1)
856 OP1(OPCODE_lconst_0 + lo);
858 else if ((hi == 0 && (jword)(lo & 0xFFFFFFFF) < 32768)
859 || (hi == -1 && (lo & 0xFFFFFFFF) >= (jword)-32768))
861 push_int_const (lo, state);
866 push_constant2 (find_constant_wide (lo, hi, state), state);
870 field_op (field, opcode, state)
873 struct jcf_partial *state;
875 int index = find_fieldref_index (&state->cpool, field);
881 /* Returns an integer in the range 0 (for 'int') through 4 (for object
882 reference) to 7 (for 'short') which matches the pattern of how JVM
883 opcodes typically depend on the operand type. */
886 adjust_typed_op (type, max)
890 switch (TREE_CODE (type))
893 case RECORD_TYPE: return 4;
895 return TYPE_PRECISION (type) == 32 || max < 5 ? 0 : 5;
897 return TYPE_PRECISION (type) == 32 || max < 6 ? 0 : 6;
899 switch (TYPE_PRECISION (type))
901 case 8: return max < 5 ? 0 : 5;
902 case 16: return max < 7 ? 0 : 7;
908 switch (TYPE_PRECISION (type))
921 maybe_wide (opcode, index, state)
923 struct jcf_partial *state;
940 /* Compile code to duplicate with offset, where
941 SIZE is the size of the stack item to duplicate (1 or 2), abd
942 OFFSET is where to insert the result (must be 0, 1, or 2).
943 (The new words get inserted at stack[SP-size-offset].) */
946 emit_dup (size, offset, state)
948 struct jcf_partial *state;
955 kind = size == 1 ? OPCODE_dup : OPCODE_dup2;
956 else if (offset == 1)
957 kind = size == 1 ? OPCODE_dup_x1 : OPCODE_dup2_x1;
958 else if (offset == 2)
959 kind = size == 1 ? OPCODE_dup_x2 : OPCODE_dup2_x2;
967 emit_pop (size, state)
969 struct jcf_partial *state;
972 OP1 (OPCODE_pop - 1 + size);
976 emit_iinc (var, value, state)
979 struct jcf_partial *state;
981 int slot = DECL_LOCAL_INDEX (var);
983 if (value < -128 || value > 127 || slot >= 256)
1001 emit_load_or_store (var, opcode, state)
1002 tree var; /* Variable to load from or store into. */
1003 int opcode; /* Either OPCODE_iload or OPCODE_istore. */
1004 struct jcf_partial *state;
1006 tree type = TREE_TYPE (var);
1007 int kind = adjust_typed_op (type, 4);
1008 int index = DECL_LOCAL_INDEX (var);
1012 OP1 (opcode + 5 + 4 * kind + index); /* [ilfda]{load,store}_[0123] */
1015 maybe_wide (opcode + kind, index, state); /* [ilfda]{load,store} */
1019 emit_load (var, state)
1021 struct jcf_partial *state;
1023 emit_load_or_store (var, OPCODE_iload, state);
1024 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
1028 emit_store (var, state)
1030 struct jcf_partial *state;
1032 emit_load_or_store (var, OPCODE_istore, state);
1033 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
1037 emit_unop (opcode, type, state)
1038 enum java_opcode opcode;
1039 tree type ATTRIBUTE_UNUSED;
1040 struct jcf_partial *state;
1047 emit_binop (opcode, type, state)
1048 enum java_opcode opcode;
1050 struct jcf_partial *state;
1052 int size = TYPE_IS_WIDE (type) ? 2 : 1;
1059 emit_reloc (value, kind, target, state)
1060 HOST_WIDE_INT value;
1062 struct jcf_block *target;
1063 struct jcf_partial *state;
1065 struct jcf_relocation *reloc = (struct jcf_relocation *)
1066 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1067 struct jcf_block *block = state->last_block;
1068 reloc->next = block->u.relocations;
1069 block->u.relocations = reloc;
1070 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1071 reloc->label = target;
1073 if (kind == 0 || kind == BLOCK_START_RELOC)
1075 else if (kind != SWITCH_ALIGN_RELOC)
1080 emit_switch_reloc (label, state)
1081 struct jcf_block *label;
1082 struct jcf_partial *state;
1084 emit_reloc (RELOCATION_VALUE_0, BLOCK_START_RELOC, label, state);
1087 /* Similar to emit_switch_reloc,
1088 but re-uses an existing case reloc. */
1091 emit_case_reloc (reloc, state)
1092 struct jcf_relocation *reloc;
1093 struct jcf_partial *state;
1095 struct jcf_block *block = state->last_block;
1096 reloc->next = block->u.relocations;
1097 block->u.relocations = reloc;
1098 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1099 reloc->kind = BLOCK_START_RELOC;
1103 /* Emit a conditional jump to TARGET with a 2-byte relative jump offset
1104 The opcode is OPCODE, the inverted opcode is INV_OPCODE. */
1107 emit_if (target, opcode, inv_opcode, state)
1108 struct jcf_block *target;
1109 int opcode, inv_opcode;
1110 struct jcf_partial *state;
1114 /* value is 1 byte from reloc back to start of instruction. */
1115 emit_reloc (RELOCATION_VALUE_1, - inv_opcode, target, state);
1119 emit_goto (target, state)
1120 struct jcf_block *target;
1121 struct jcf_partial *state;
1125 /* Value is 1 byte from reloc back to start of instruction. */
1126 emit_reloc (RELOCATION_VALUE_1, OPCODE_goto_w, target, state);
1130 emit_jsr (target, state)
1131 struct jcf_block *target;
1132 struct jcf_partial *state;
1136 /* Value is 1 byte from reloc back to start of instruction. */
1137 emit_reloc (RELOCATION_VALUE_1, OPCODE_jsr_w, target, state);
1140 /* Generate code to evaluate EXP. If the result is true,
1141 branch to TRUE_LABEL; otherwise, branch to FALSE_LABEL.
1142 TRUE_BRANCH_FIRST is a code geneation hint that the
1143 TRUE_LABEL may follow right after this. (The idea is that we
1144 may be able to optimize away GOTO TRUE_LABEL; TRUE_LABEL:) */
1147 generate_bytecode_conditional (exp, true_label, false_label,
1148 true_branch_first, state)
1150 struct jcf_block *true_label;
1151 struct jcf_block *false_label;
1152 int true_branch_first;
1153 struct jcf_partial *state;
1155 tree exp0, exp1, type;
1156 int save_SP = state->code_SP;
1157 enum java_opcode op, negop;
1158 switch (TREE_CODE (exp))
1161 emit_goto (integer_zerop (exp) ? false_label : true_label, state);
1165 struct jcf_block *then_label = gen_jcf_label (state);
1166 struct jcf_block *else_label = gen_jcf_label (state);
1167 int save_SP_before, save_SP_after;
1168 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1169 then_label, else_label, 1, state);
1170 define_jcf_label (then_label, state);
1171 save_SP_before = state->code_SP;
1172 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1173 true_label, false_label, 1, state);
1174 save_SP_after = state->code_SP;
1175 state->code_SP = save_SP_before;
1176 define_jcf_label (else_label, state);
1177 generate_bytecode_conditional (TREE_OPERAND (exp, 2),
1178 true_label, false_label,
1179 true_branch_first, state);
1180 if (state->code_SP != save_SP_after)
1184 case TRUTH_NOT_EXPR:
1185 generate_bytecode_conditional (TREE_OPERAND (exp, 0), false_label,
1186 true_label, ! true_branch_first, state);
1188 case TRUTH_ANDIF_EXPR:
1190 struct jcf_block *next_label = gen_jcf_label (state);
1191 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1192 next_label, false_label, 1, state);
1193 define_jcf_label (next_label, state);
1194 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1195 true_label, false_label, 1, state);
1198 case TRUTH_ORIF_EXPR:
1200 struct jcf_block *next_label = gen_jcf_label (state);
1201 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1202 true_label, next_label, 1, state);
1203 define_jcf_label (next_label, state);
1204 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1205 true_label, false_label, 1, state);
1209 /* Assuming op is one of the 2-operand if_icmp<COND> instructions,
1210 set it to the corresponding 1-operand if<COND> instructions. */
1214 /* The opcodes with their inverses are allocated in pairs.
1215 E.g. The inverse of if_icmplt (161) is if_icmpge (162). */
1216 negop = (op & 1) ? op + 1 : op - 1;
1218 if (true_branch_first)
1220 emit_if (false_label, negop, op, state);
1221 emit_goto (true_label, state);
1225 emit_if (true_label, op, negop, state);
1226 emit_goto (false_label, state);
1230 op = OPCODE_if_icmpeq;
1233 op = OPCODE_if_icmpne;
1236 op = OPCODE_if_icmpgt;
1239 op = OPCODE_if_icmplt;
1242 op = OPCODE_if_icmpge;
1245 op = OPCODE_if_icmple;
1248 exp0 = TREE_OPERAND (exp, 0);
1249 exp1 = TREE_OPERAND (exp, 1);
1250 type = TREE_TYPE (exp0);
1251 switch (TREE_CODE (type))
1254 case POINTER_TYPE: case RECORD_TYPE:
1255 switch (TREE_CODE (exp))
1257 case EQ_EXPR: op = OPCODE_if_acmpeq; break;
1258 case NE_EXPR: op = OPCODE_if_acmpne; break;
1261 if (integer_zerop (exp1) || integer_zerop (exp0))
1263 generate_bytecode_insns (integer_zerop (exp0) ? exp1 : exp0,
1264 STACK_TARGET, state);
1265 op = op + (OPCODE_ifnull - OPCODE_if_acmpeq);
1266 negop = (op & 1) ? op - 1 : op + 1;
1270 generate_bytecode_insns (exp0, STACK_TARGET, state);
1271 generate_bytecode_insns (exp1, STACK_TARGET, state);
1275 generate_bytecode_insns (exp0, STACK_TARGET, state);
1276 generate_bytecode_insns (exp1, STACK_TARGET, state);
1277 if (op == OPCODE_if_icmplt || op == OPCODE_if_icmple)
1281 if (TYPE_PRECISION (type) > 32)
1292 if (TYPE_PRECISION (type) > 32)
1294 generate_bytecode_insns (exp0, STACK_TARGET, state);
1295 generate_bytecode_insns (exp1, STACK_TARGET, state);
1303 if (integer_zerop (exp1))
1305 generate_bytecode_insns (exp0, STACK_TARGET, state);
1309 if (integer_zerop (exp0))
1313 case OPCODE_if_icmplt:
1314 case OPCODE_if_icmpge:
1317 case OPCODE_if_icmpgt:
1318 case OPCODE_if_icmple:
1324 generate_bytecode_insns (exp1, STACK_TARGET, state);
1328 generate_bytecode_insns (exp0, STACK_TARGET, state);
1329 generate_bytecode_insns (exp1, STACK_TARGET, state);
1335 generate_bytecode_insns (exp, STACK_TARGET, state);
1337 if (true_branch_first)
1339 emit_if (false_label, OPCODE_ifeq, OPCODE_ifne, state);
1340 emit_goto (true_label, state);
1344 emit_if (true_label, OPCODE_ifne, OPCODE_ifeq, state);
1345 emit_goto (false_label, state);
1349 if (save_SP != state->code_SP)
1353 /* Call pending cleanups i.e. those for surrounding CLEANUP_POINT_EXPRs
1354 but only as far out as LIMIT (since we are about to jump to the
1355 emit label that is LIMIT). */
1358 call_cleanups (limit, state)
1359 struct jcf_block *limit;
1360 struct jcf_partial *state;
1362 struct jcf_block *block = state->labeled_blocks;
1363 for (; block != limit; block = block->next)
1365 if (block->pc == PENDING_CLEANUP_PC)
1366 emit_jsr (block, state);
1371 generate_bytecode_return (exp, state)
1373 struct jcf_partial *state;
1375 tree return_type = TREE_TYPE (TREE_TYPE (state->current_method));
1376 int returns_void = TREE_CODE (return_type) == VOID_TYPE;
1381 switch (TREE_CODE (exp))
1384 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET,
1386 exp = TREE_OPERAND (exp, 1);
1390 struct jcf_block *then_label = gen_jcf_label (state);
1391 struct jcf_block *else_label = gen_jcf_label (state);
1392 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1393 then_label, else_label, 1, state);
1394 define_jcf_label (then_label, state);
1395 generate_bytecode_return (TREE_OPERAND (exp, 1), state);
1396 define_jcf_label (else_label, state);
1397 generate_bytecode_return (TREE_OPERAND (exp, 2), state);
1401 generate_bytecode_insns (exp,
1402 returns_void ? IGNORE_TARGET
1403 : STACK_TARGET, state);
1409 call_cleanups (NULL_PTR, state);
1413 op = OPCODE_ireturn + adjust_typed_op (return_type, 4);
1414 if (state->num_finalizers > 0)
1416 if (state->return_value_decl == NULL_TREE)
1418 state->return_value_decl
1419 = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1420 localvar_alloc (state->return_value_decl, state);
1422 emit_store (state->return_value_decl, state);
1423 call_cleanups (NULL_PTR, state);
1424 emit_load (state->return_value_decl, state);
1425 /* If we call localvar_free (state->return_value_decl, state),
1426 then we risk the save decl erroneously re-used in the
1427 finalizer. Instead, we keep the state->return_value_decl
1428 allocated through the rest of the method. This is not
1429 the greatest solution, but it is at least simple and safe. */
1436 /* Generate bytecode for sub-expression EXP of METHOD.
1437 TARGET is one of STACK_TARGET or IGNORE_TARGET. */
1440 generate_bytecode_insns (exp, target, state)
1443 struct jcf_partial *state;
1446 enum java_opcode jopcode;
1448 HOST_WIDE_INT value;
1453 if (exp == NULL && target == IGNORE_TARGET)
1456 type = TREE_TYPE (exp);
1458 switch (TREE_CODE (exp))
1461 if (BLOCK_EXPR_BODY (exp))
1464 tree body = BLOCK_EXPR_BODY (exp);
1465 for (local = BLOCK_EXPR_DECLS (exp); local; )
1467 tree next = TREE_CHAIN (local);
1468 localvar_alloc (local, state);
1471 /* Avoid deep recursion for long blocks. */
1472 while (TREE_CODE (body) == COMPOUND_EXPR)
1474 generate_bytecode_insns (TREE_OPERAND (body, 0), target, state);
1475 body = TREE_OPERAND (body, 1);
1477 generate_bytecode_insns (body, target, state);
1478 for (local = BLOCK_EXPR_DECLS (exp); local; )
1480 tree next = TREE_CHAIN (local);
1481 localvar_free (local, state);
1487 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
1488 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1490 case EXPR_WITH_FILE_LOCATION:
1492 const char *saved_input_filename = input_filename;
1493 tree body = EXPR_WFL_NODE (exp);
1494 int saved_lineno = lineno;
1495 if (body == empty_stmt_node)
1497 input_filename = EXPR_WFL_FILENAME (exp);
1498 lineno = EXPR_WFL_LINENO (exp);
1499 if (EXPR_WFL_EMIT_LINE_NOTE (exp) && lineno > 0
1500 && debug_info_level > DINFO_LEVEL_NONE)
1501 put_linenumber (lineno, state);
1502 generate_bytecode_insns (body, target, state);
1503 input_filename = saved_input_filename;
1504 lineno = saved_lineno;
1508 if (target == IGNORE_TARGET) ; /* do nothing */
1509 else if (TREE_CODE (type) == POINTER_TYPE)
1511 if (! integer_zerop (exp))
1514 OP1 (OPCODE_aconst_null);
1517 else if (TYPE_PRECISION (type) <= 32)
1519 push_int_const (TREE_INT_CST_LOW (exp), state);
1524 push_long_const (TREE_INT_CST_LOW (exp), TREE_INT_CST_HIGH (exp),
1531 int prec = TYPE_PRECISION (type) >> 5;
1533 if (real_zerop (exp))
1534 OP1 (prec == 1 ? OPCODE_fconst_0 : OPCODE_dconst_0);
1535 else if (real_onep (exp))
1536 OP1 (prec == 1 ? OPCODE_fconst_1 : OPCODE_dconst_1);
1537 /* FIXME Should also use fconst_2 for 2.0f.
1538 Also, should use iconst_2/ldc followed by i2f/i2d
1539 for other float/double when the value is a small integer. */
1542 offset = find_constant_index (exp, state);
1544 push_constant1 (offset, state);
1546 push_constant2 (offset, state);
1552 push_constant1 (find_string_constant (&state->cpool, exp), state);
1556 if (TREE_STATIC (exp))
1558 field_op (exp, OPCODE_getstatic, state);
1559 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1562 /* ... fall through ... */
1564 emit_load (exp, state);
1566 case NON_LVALUE_EXPR:
1568 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1571 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1572 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1573 if (target != IGNORE_TARGET)
1575 jopcode = OPCODE_iaload + adjust_typed_op (type, 7);
1578 if (! TYPE_IS_WIDE (type))
1584 tree obj = TREE_OPERAND (exp, 0);
1585 tree field = TREE_OPERAND (exp, 1);
1586 int is_static = FIELD_STATIC (field);
1587 generate_bytecode_insns (obj,
1588 is_static ? IGNORE_TARGET : target, state);
1589 if (target != IGNORE_TARGET)
1591 if (DECL_NAME (field) == length_identifier_node && !is_static
1592 && TYPE_ARRAY_P (TREE_TYPE (obj)))
1595 OP1 (OPCODE_arraylength);
1599 field_op (field, is_static ? OPCODE_getstatic : OPCODE_getfield,
1603 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
1608 case TRUTH_ANDIF_EXPR:
1609 case TRUTH_ORIF_EXPR:
1617 struct jcf_block *then_label = gen_jcf_label (state);
1618 struct jcf_block *else_label = gen_jcf_label (state);
1619 struct jcf_block *end_label = gen_jcf_label (state);
1620 generate_bytecode_conditional (exp,
1621 then_label, else_label, 1, state);
1622 define_jcf_label (then_label, state);
1623 push_int_const (1, state);
1624 emit_goto (end_label, state);
1625 define_jcf_label (else_label, state);
1626 push_int_const (0, state);
1627 define_jcf_label (end_label, state);
1633 struct jcf_block *then_label = gen_jcf_label (state);
1634 struct jcf_block *else_label = gen_jcf_label (state);
1635 struct jcf_block *end_label = gen_jcf_label (state);
1636 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1637 then_label, else_label, 1, state);
1638 define_jcf_label (then_label, state);
1639 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1640 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 1))
1641 /* Not all expressions have CAN_COMPLETE_NORMALLY set properly. */
1642 || TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE)
1643 emit_goto (end_label, state);
1644 define_jcf_label (else_label, state);
1645 generate_bytecode_insns (TREE_OPERAND (exp, 2), target, state);
1646 define_jcf_label (end_label, state);
1647 /* COND_EXPR can be used in a binop. The stack must be adjusted. */
1648 if (TREE_TYPE (exp) != void_type_node)
1649 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1654 struct jcf_switch_state *sw_state = state->sw_state;
1655 struct jcf_relocation *reloc = (struct jcf_relocation *)
1656 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1657 HOST_WIDE_INT case_value = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
1659 reloc->label = get_jcf_label_here (state);
1660 reloc->offset = case_value;
1661 reloc->next = sw_state->cases;
1662 sw_state->cases = reloc;
1663 if (sw_state->num_cases == 0)
1665 sw_state->min_case = case_value;
1666 sw_state->max_case = case_value;
1670 if (case_value < sw_state->min_case)
1671 sw_state->min_case = case_value;
1672 if (case_value > sw_state->max_case)
1673 sw_state->max_case = case_value;
1675 sw_state->num_cases++;
1679 state->sw_state->default_label = get_jcf_label_here (state);
1684 /* The SWITCH_EXPR has three parts, generated in the following order:
1685 1. the switch_expression (the value used to select the correct case);
1687 3. the switch_instruction (the tableswitch/loopupswitch instruction.).
1688 After code generation, we will re-order then in the order 1, 3, 2.
1689 This is to avoid an extra GOTOs. */
1690 struct jcf_switch_state sw_state;
1691 struct jcf_block *expression_last; /* Last block of the switch_expression. */
1692 struct jcf_block *body_last; /* Last block of the switch_body. */
1693 struct jcf_block *switch_instruction; /* First block of switch_instruction. */
1694 struct jcf_block *instruction_last; /* Last block of the switch_instruction. */
1695 struct jcf_block *body_block;
1697 sw_state.prev = state->sw_state;
1698 state->sw_state = &sw_state;
1699 sw_state.cases = NULL;
1700 sw_state.num_cases = 0;
1701 sw_state.default_label = NULL;
1702 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1703 expression_last = state->last_block;
1704 body_block = get_jcf_label_here (state); /* Force a new block here. */
1705 generate_bytecode_insns (TREE_OPERAND (exp, 1), IGNORE_TARGET, state);
1706 body_last = state->last_block;
1708 switch_instruction = gen_jcf_label (state);
1709 define_jcf_label (switch_instruction, state);
1710 if (sw_state.default_label == NULL)
1711 sw_state.default_label = gen_jcf_label (state);
1713 if (sw_state.num_cases <= 1)
1715 if (sw_state.num_cases == 0)
1717 emit_pop (1, state);
1722 push_int_const (sw_state.cases->offset, state);
1723 emit_if (sw_state.cases->label,
1724 OPCODE_ifeq, OPCODE_ifne, state);
1726 emit_goto (sw_state.default_label, state);
1731 /* Copy the chain of relocs into a sorted array. */
1732 struct jcf_relocation **relocs = (struct jcf_relocation **)
1733 xmalloc (sw_state.num_cases * sizeof (struct jcf_relocation *));
1734 /* The relocs arrays is a buffer with a gap.
1735 The assumption is that cases will normally come in "runs". */
1737 int gap_end = sw_state.num_cases;
1738 struct jcf_relocation *reloc;
1739 for (reloc = sw_state.cases; reloc != NULL; reloc = reloc->next)
1741 HOST_WIDE_INT case_value = reloc->offset;
1742 while (gap_end < sw_state.num_cases)
1744 struct jcf_relocation *end = relocs[gap_end];
1745 if (case_value <= end->offset)
1747 relocs[gap_start++] = end;
1750 while (gap_start > 0)
1752 struct jcf_relocation *before = relocs[gap_start-1];
1753 if (case_value >= before->offset)
1755 relocs[--gap_end] = before;
1758 relocs[gap_start++] = reloc;
1759 /* Note we don't check for duplicates. FIXME! */
1762 if (2 * sw_state.num_cases
1763 >= sw_state.max_case - sw_state.min_case)
1764 { /* Use tableswitch. */
1766 RESERVE (13 + 4 * (sw_state.max_case - sw_state.min_case + 1));
1767 OP1 (OPCODE_tableswitch);
1768 emit_reloc (RELOCATION_VALUE_0,
1769 SWITCH_ALIGN_RELOC, NULL, state);
1770 emit_switch_reloc (sw_state.default_label, state);
1771 OP4 (sw_state.min_case);
1772 OP4 (sw_state.max_case);
1773 for (i = sw_state.min_case; ; )
1775 reloc = relocs[index];
1776 if (i == reloc->offset)
1778 emit_case_reloc (reloc, state);
1779 if (i == sw_state.max_case)
1784 emit_switch_reloc (sw_state.default_label, state);
1789 { /* Use lookupswitch. */
1790 RESERVE(9 + 8 * sw_state.num_cases);
1791 OP1 (OPCODE_lookupswitch);
1792 emit_reloc (RELOCATION_VALUE_0,
1793 SWITCH_ALIGN_RELOC, NULL, state);
1794 emit_switch_reloc (sw_state.default_label, state);
1795 OP4 (sw_state.num_cases);
1796 for (i = 0; i < sw_state.num_cases; i++)
1798 struct jcf_relocation *reloc = relocs[i];
1799 OP4 (reloc->offset);
1800 emit_case_reloc (reloc, state);
1806 instruction_last = state->last_block;
1807 if (sw_state.default_label->pc < 0)
1808 define_jcf_label (sw_state.default_label, state);
1809 else /* Force a new block. */
1810 sw_state.default_label = get_jcf_label_here (state);
1811 /* Now re-arrange the blocks so the switch_instruction
1812 comes before the switch_body. */
1813 switch_length = state->code_length - switch_instruction->pc;
1814 switch_instruction->pc = body_block->pc;
1815 instruction_last->next = body_block;
1816 instruction_last->v.chunk->next = body_block->v.chunk;
1817 expression_last->next = switch_instruction;
1818 expression_last->v.chunk->next = switch_instruction->v.chunk;
1819 body_last->next = sw_state.default_label;
1820 body_last->v.chunk->next = NULL;
1821 state->chunk = body_last->v.chunk;
1822 for (; body_block != sw_state.default_label; body_block = body_block->next)
1823 body_block->pc += switch_length;
1825 state->sw_state = sw_state.prev;
1830 exp = TREE_OPERAND (exp, 0);
1831 if (exp == NULL_TREE)
1832 exp = empty_stmt_node;
1833 else if (TREE_CODE (exp) != MODIFY_EXPR)
1836 exp = TREE_OPERAND (exp, 1);
1837 generate_bytecode_return (exp, state);
1839 case LABELED_BLOCK_EXPR:
1841 struct jcf_block *end_label = gen_jcf_label (state);
1842 end_label->next = state->labeled_blocks;
1843 state->labeled_blocks = end_label;
1844 end_label->pc = PENDING_EXIT_PC;
1845 end_label->u.labeled_block = exp;
1846 if (LABELED_BLOCK_BODY (exp))
1847 generate_bytecode_insns (LABELED_BLOCK_BODY (exp), target, state);
1848 if (state->labeled_blocks != end_label)
1850 state->labeled_blocks = end_label->next;
1851 define_jcf_label (end_label, state);
1856 tree body = TREE_OPERAND (exp, 0);
1858 if (TREE_CODE (body) == COMPOUND_EXPR
1859 && TREE_CODE (TREE_OPERAND (body, 0)) == EXIT_EXPR)
1861 /* Optimize: H: if (TEST) GOTO L; BODY; GOTO H; L:
1862 to: GOTO L; BODY; L: if (!TEST) GOTO L; */
1863 struct jcf_block *head_label;
1864 struct jcf_block *body_label;
1865 struct jcf_block *end_label = gen_jcf_label (state);
1866 struct jcf_block *exit_label = state->labeled_blocks;
1867 head_label = gen_jcf_label (state);
1868 emit_goto (head_label, state);
1869 body_label = get_jcf_label_here (state);
1870 generate_bytecode_insns (TREE_OPERAND (body, 1), target, state);
1871 define_jcf_label (head_label, state);
1872 generate_bytecode_conditional (TREE_OPERAND (body, 0),
1873 end_label, body_label, 1, state);
1874 define_jcf_label (end_label, state);
1879 struct jcf_block *head_label = get_jcf_label_here (state);
1880 generate_bytecode_insns (body, IGNORE_TARGET, state);
1881 emit_goto (head_label, state);
1887 struct jcf_block *label = state->labeled_blocks;
1888 struct jcf_block *end_label = gen_jcf_label (state);
1889 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1890 label, end_label, 0, state);
1891 define_jcf_label (end_label, state);
1894 case EXIT_BLOCK_EXPR:
1896 struct jcf_block *label = state->labeled_blocks;
1897 if (TREE_OPERAND (exp, 1) != NULL) goto notimpl;
1898 while (label->u.labeled_block != TREE_OPERAND (exp, 0))
1899 label = label->next;
1900 call_cleanups (label, state);
1901 emit_goto (label, state);
1905 case PREDECREMENT_EXPR: value = -1; post_op = 0; goto increment;
1906 case PREINCREMENT_EXPR: value = 1; post_op = 0; goto increment;
1907 case POSTDECREMENT_EXPR: value = -1; post_op = 1; goto increment;
1908 case POSTINCREMENT_EXPR: value = 1; post_op = 1; goto increment;
1911 exp = TREE_OPERAND (exp, 0);
1912 type = TREE_TYPE (exp);
1913 size = TYPE_IS_WIDE (type) ? 2 : 1;
1914 if ((TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1915 && ! TREE_STATIC (exp)
1916 && TREE_CODE (type) == INTEGER_TYPE
1917 && TYPE_PRECISION (type) == 32)
1919 if (target != IGNORE_TARGET && post_op)
1920 emit_load (exp, state);
1921 emit_iinc (exp, value, state);
1922 if (target != IGNORE_TARGET && ! post_op)
1923 emit_load (exp, state);
1926 if (TREE_CODE (exp) == COMPONENT_REF)
1928 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1929 emit_dup (1, 0, state);
1930 /* Stack: ..., objectref, objectref. */
1931 field_op (TREE_OPERAND (exp, 1), OPCODE_getfield, state);
1933 /* Stack: ..., objectref, oldvalue. */
1936 else if (TREE_CODE (exp) == ARRAY_REF)
1938 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1939 generate_bytecode_insns (TREE_OPERAND (exp, 1), STACK_TARGET, state);
1940 emit_dup (2, 0, state);
1941 /* Stack: ..., array, index, array, index. */
1942 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (exp), 7);
1946 /* Stack: ..., array, index, oldvalue. */
1949 else if (TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1951 generate_bytecode_insns (exp, STACK_TARGET, state);
1952 /* Stack: ..., oldvalue. */
1958 if (target != IGNORE_TARGET && post_op)
1959 emit_dup (size, offset, state);
1960 /* Stack, if ARRAY_REF: ..., [result, ] array, index, oldvalue. */
1961 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, oldvalue. */
1962 /* Stack, otherwise: ..., [result, ] oldvalue. */
1964 push_int_const (value, state);
1966 push_long_const (value, (HOST_WIDE_INT)(value >= 0 ? 0 : -1), state);
1968 emit_binop (OPCODE_iadd + adjust_typed_op (type, 3), type, state);
1969 if (target != IGNORE_TARGET && ! post_op)
1970 emit_dup (size, offset, state);
1971 /* Stack, if ARRAY_REF: ..., [result, ] array, index, newvalue. */
1972 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, newvalue. */
1973 /* Stack, otherwise: ..., [result, ] newvalue. */
1974 goto finish_assignment;
1978 tree lhs = TREE_OPERAND (exp, 0);
1979 tree rhs = TREE_OPERAND (exp, 1);
1982 /* See if we can use the iinc instruction. */
1983 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1984 && ! TREE_STATIC (lhs)
1985 && TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
1986 && TYPE_PRECISION (TREE_TYPE (lhs)) == 32
1987 && (TREE_CODE (rhs) == PLUS_EXPR || TREE_CODE (rhs) == MINUS_EXPR))
1989 tree arg0 = TREE_OPERAND (rhs, 0);
1990 tree arg1 = TREE_OPERAND (rhs, 1);
1991 HOST_WIDE_INT min_value = -32768;
1992 HOST_WIDE_INT max_value = 32767;
1993 if (TREE_CODE (rhs) == MINUS_EXPR)
1998 else if (arg1 == lhs)
2001 arg1 = TREE_OPERAND (rhs, 0);
2003 if (lhs == arg0 && TREE_CODE (arg1) == INTEGER_CST)
2005 HOST_WIDE_INT hi_value = TREE_INT_CST_HIGH (arg1);
2006 value = TREE_INT_CST_LOW (arg1);
2007 if ((hi_value == 0 && value <= max_value)
2008 || (hi_value == -1 && value >= min_value))
2010 if (TREE_CODE (rhs) == MINUS_EXPR)
2012 emit_iinc (lhs, value, state);
2013 if (target != IGNORE_TARGET)
2014 emit_load (lhs, state);
2020 if (TREE_CODE (lhs) == COMPONENT_REF)
2022 generate_bytecode_insns (TREE_OPERAND (lhs, 0),
2023 STACK_TARGET, state);
2026 else if (TREE_CODE (lhs) == ARRAY_REF)
2028 generate_bytecode_insns (TREE_OPERAND(lhs, 0),
2029 STACK_TARGET, state);
2030 generate_bytecode_insns (TREE_OPERAND(lhs, 1),
2031 STACK_TARGET, state);
2036 generate_bytecode_insns (rhs, STACK_TARGET, state);
2037 if (target != IGNORE_TARGET)
2038 emit_dup (TYPE_IS_WIDE (type) ? 2 : 1 , offset, state);
2044 if (TREE_CODE (exp) == COMPONENT_REF)
2046 tree field = TREE_OPERAND (exp, 1);
2047 if (! FIELD_STATIC (field))
2050 FIELD_STATIC (field) ? OPCODE_putstatic : OPCODE_putfield,
2053 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
2055 else if (TREE_CODE (exp) == VAR_DECL
2056 || TREE_CODE (exp) == PARM_DECL)
2058 if (FIELD_STATIC (exp))
2060 field_op (exp, OPCODE_putstatic, state);
2061 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
2064 emit_store (exp, state);
2066 else if (TREE_CODE (exp) == ARRAY_REF)
2068 jopcode = OPCODE_iastore + adjust_typed_op (TREE_TYPE (exp), 7);
2071 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 4 : 3);
2077 jopcode = OPCODE_iadd;
2080 jopcode = OPCODE_isub;
2083 jopcode = OPCODE_imul;
2085 case TRUNC_DIV_EXPR:
2087 jopcode = OPCODE_idiv;
2089 case TRUNC_MOD_EXPR:
2090 jopcode = OPCODE_irem;
2092 case LSHIFT_EXPR: jopcode = OPCODE_ishl; goto binop;
2093 case RSHIFT_EXPR: jopcode = OPCODE_ishr; goto binop;
2094 case URSHIFT_EXPR: jopcode = OPCODE_iushr; goto binop;
2095 case TRUTH_AND_EXPR:
2096 case BIT_AND_EXPR: jopcode = OPCODE_iand; goto binop;
2098 case BIT_IOR_EXPR: jopcode = OPCODE_ior; goto binop;
2099 case TRUTH_XOR_EXPR:
2100 case BIT_XOR_EXPR: jopcode = OPCODE_ixor; goto binop;
2103 tree arg0 = TREE_OPERAND (exp, 0);
2104 tree arg1 = TREE_OPERAND (exp, 1);
2105 jopcode += adjust_typed_op (type, 3);
2106 if (arg0 == arg1 && TREE_CODE (arg0) == SAVE_EXPR)
2108 /* fold may (e.g) convert 2*x to x+x. */
2109 generate_bytecode_insns (TREE_OPERAND (arg0, 0), target, state);
2110 emit_dup (TYPE_PRECISION (TREE_TYPE (arg0)) > 32 ? 2 : 1, 0, state);
2114 generate_bytecode_insns (arg0, target, state);
2115 if (jopcode >= OPCODE_lshl && jopcode <= OPCODE_lushr)
2116 arg1 = convert (int_type_node, arg1);
2117 generate_bytecode_insns (arg1, target, state);
2119 /* For most binary operations, both operands and the result have the
2120 same type. Shift operations are different. Using arg1's type
2121 gets us the correct SP adjustment in all cases. */
2122 if (target == STACK_TARGET)
2123 emit_binop (jopcode, TREE_TYPE (arg1), state);
2126 case TRUTH_NOT_EXPR:
2128 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2129 if (target == STACK_TARGET)
2131 int is_long = TYPE_PRECISION (TREE_TYPE (exp)) > 32;
2132 push_int_const (TREE_CODE (exp) == BIT_NOT_EXPR ? -1 : 1, state);
2136 NOTE_PUSH (1 + is_long);
2137 OP1 (OPCODE_ixor + is_long);
2138 NOTE_POP (1 + is_long);
2142 jopcode = OPCODE_ineg;
2143 jopcode += adjust_typed_op (type, 3);
2144 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2145 if (target == STACK_TARGET)
2146 emit_unop (jopcode, type, state);
2148 case INSTANCEOF_EXPR:
2150 int index = find_class_constant (&state->cpool, TREE_OPERAND (exp, 1));
2151 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2153 OP1 (OPCODE_instanceof);
2158 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2163 case FIX_TRUNC_EXPR:
2165 tree src = TREE_OPERAND (exp, 0);
2166 tree src_type = TREE_TYPE (src);
2167 tree dst_type = TREE_TYPE (exp);
2168 /* Detect the situation of compiling an empty synchronized
2169 block. A nop should be emitted in order to produce
2170 verifiable bytecode. */
2171 if (exp == empty_stmt_node
2172 && state->last_bc == OPCODE_monitorenter
2173 && state->labeled_blocks
2174 && state->labeled_blocks->pc == PENDING_CLEANUP_PC)
2177 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2178 if (target == IGNORE_TARGET || src_type == dst_type)
2180 if (TREE_CODE (dst_type) == POINTER_TYPE)
2182 if (TREE_CODE (exp) == CONVERT_EXPR)
2184 int index = find_class_constant (&state->cpool,
2185 TREE_TYPE (dst_type));
2187 OP1 (OPCODE_checkcast);
2191 else /* Convert numeric types. */
2193 int wide_src = TYPE_PRECISION (src_type) > 32;
2194 int wide_dst = TYPE_PRECISION (dst_type) > 32;
2195 NOTE_POP (1 + wide_src);
2197 if (TREE_CODE (dst_type) == REAL_TYPE)
2199 if (TREE_CODE (src_type) == REAL_TYPE)
2200 OP1 (wide_dst ? OPCODE_f2d : OPCODE_d2f);
2201 else if (TYPE_PRECISION (src_type) == 64)
2202 OP1 (OPCODE_l2f + wide_dst);
2204 OP1 (OPCODE_i2f + wide_dst);
2206 else /* Convert to integral type. */
2208 if (TREE_CODE (src_type) == REAL_TYPE)
2209 OP1 (OPCODE_f2i + wide_dst + 3 * wide_src);
2214 if (TYPE_PRECISION (dst_type) < 32)
2217 /* Already converted to int, if needed. */
2218 if (TYPE_PRECISION (dst_type) <= 8)
2220 else if (TREE_UNSIGNED (dst_type))
2226 NOTE_PUSH (1 + wide_dst);
2231 case CLEANUP_POINT_EXPR:
2233 struct jcf_block *save_labeled_blocks = state->labeled_blocks;
2234 int can_complete = CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 0));
2235 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
2236 if (target != IGNORE_TARGET)
2238 while (state->labeled_blocks != save_labeled_blocks)
2240 struct jcf_block *finished_label = NULL;
2242 tree exception_type = build_pointer_type (throwable_type_node);
2243 tree exception_decl = build_decl (VAR_DECL, NULL_TREE,
2245 struct jcf_block *end_label = get_jcf_label_here (state);
2246 struct jcf_block *label = state->labeled_blocks;
2247 struct jcf_handler *handler;
2248 tree cleanup = label->u.labeled_block;
2249 state->labeled_blocks = label->next;
2250 state->num_finalizers--;
2253 finished_label = gen_jcf_label (state);
2254 emit_jsr (label, state);
2255 emit_goto (finished_label, state);
2256 if (! CAN_COMPLETE_NORMALLY (cleanup))
2259 handler = alloc_handler (label->v.start_label, end_label, state);
2260 handler->type = NULL_TREE;
2261 localvar_alloc (exception_decl, state);
2263 emit_store (exception_decl, state);
2264 emit_jsr (label, state);
2265 emit_load (exception_decl, state);
2267 OP1 (OPCODE_athrow);
2270 /* The finally block. */
2271 return_link = build_decl (VAR_DECL, NULL_TREE,
2272 return_address_type_node);
2273 define_jcf_label (label, state);
2275 localvar_alloc (return_link, state);
2276 emit_store (return_link, state);
2277 generate_bytecode_insns (cleanup, IGNORE_TARGET, state);
2278 maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
2279 localvar_free (return_link, state);
2280 localvar_free (exception_decl, state);
2281 if (finished_label != NULL)
2282 define_jcf_label (finished_label, state);
2287 case WITH_CLEANUP_EXPR:
2289 struct jcf_block *label;
2290 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
2291 label = gen_jcf_label (state);
2292 label->pc = PENDING_CLEANUP_PC;
2293 label->next = state->labeled_blocks;
2294 state->labeled_blocks = label;
2295 state->num_finalizers++;
2296 label->u.labeled_block = TREE_OPERAND (exp, 2);
2297 label->v.start_label = get_jcf_label_here (state);
2298 if (target != IGNORE_TARGET)
2305 tree try_clause = TREE_OPERAND (exp, 0);
2306 struct jcf_block *start_label = get_jcf_label_here (state);
2307 struct jcf_block *end_label; /* End of try clause. */
2308 struct jcf_block *finished_label = gen_jcf_label (state);
2309 tree clause = TREE_OPERAND (exp, 1);
2310 if (target != IGNORE_TARGET)
2312 generate_bytecode_insns (try_clause, IGNORE_TARGET, state);
2313 end_label = get_jcf_label_here (state);
2314 if (end_label == start_label)
2316 if (CAN_COMPLETE_NORMALLY (try_clause))
2317 emit_goto (finished_label, state);
2318 while (clause != NULL_TREE)
2320 tree catch_clause = TREE_OPERAND (clause, 0);
2321 tree exception_decl = BLOCK_EXPR_DECLS (catch_clause);
2322 struct jcf_handler *handler = alloc_handler (start_label, end_label, state);
2323 if (exception_decl == NULL_TREE)
2324 handler->type = NULL_TREE;
2326 handler->type = TREE_TYPE (TREE_TYPE (exception_decl));
2327 generate_bytecode_insns (catch_clause, IGNORE_TARGET, state);
2328 clause = TREE_CHAIN (clause);
2329 if (CAN_COMPLETE_NORMALLY (catch_clause) && clause != NULL_TREE)
2330 emit_goto (finished_label, state);
2332 define_jcf_label (finished_label, state);
2335 case TRY_FINALLY_EXPR:
2337 struct jcf_block *finished_label,
2338 *finally_label, *start_label, *end_label;
2339 struct jcf_handler *handler;
2340 tree try_block = TREE_OPERAND (exp, 0);
2341 tree finally = TREE_OPERAND (exp, 1);
2342 tree return_link = NULL_TREE, exception_decl = NULL_TREE;
2344 tree exception_type;
2346 finally_label = gen_jcf_label (state);
2347 start_label = get_jcf_label_here (state);
2348 finally_label->pc = PENDING_CLEANUP_PC;
2349 finally_label->next = state->labeled_blocks;
2350 state->labeled_blocks = finally_label;
2351 state->num_finalizers++;
2353 generate_bytecode_insns (try_block, target, state);
2354 if (state->labeled_blocks != finally_label)
2356 state->labeled_blocks = finally_label->next;
2357 end_label = get_jcf_label_here (state);
2359 if (end_label == start_label)
2361 state->num_finalizers--;
2362 define_jcf_label (finally_label, state);
2363 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2367 return_link = build_decl (VAR_DECL, NULL_TREE,
2368 return_address_type_node);
2369 finished_label = gen_jcf_label (state);
2372 if (CAN_COMPLETE_NORMALLY (try_block))
2374 emit_jsr (finally_label, state);
2375 emit_goto (finished_label, state);
2378 /* Handle exceptions. */
2380 exception_type = build_pointer_type (throwable_type_node);
2381 exception_decl = build_decl (VAR_DECL, NULL_TREE, exception_type);
2382 localvar_alloc (return_link, state);
2383 handler = alloc_handler (start_label, end_label, state);
2384 handler->type = NULL_TREE;
2385 localvar_alloc (exception_decl, state);
2387 emit_store (exception_decl, state);
2388 emit_jsr (finally_label, state);
2389 emit_load (exception_decl, state);
2391 OP1 (OPCODE_athrow);
2394 /* The finally block. First save return PC into return_link. */
2395 define_jcf_label (finally_label, state);
2397 emit_store (return_link, state);
2399 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2400 maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
2401 localvar_free (exception_decl, state);
2402 localvar_free (return_link, state);
2403 define_jcf_label (finished_label, state);
2407 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2409 OP1 (OPCODE_athrow);
2411 case NEW_ARRAY_INIT:
2413 tree values = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
2414 tree array_type = TREE_TYPE (TREE_TYPE (exp));
2415 tree element_type = TYPE_ARRAY_ELEMENT (array_type);
2416 HOST_WIDE_INT length = java_array_type_length (array_type);
2417 if (target == IGNORE_TARGET)
2419 for ( ; values != NULL_TREE; values = TREE_CHAIN (values))
2420 generate_bytecode_insns (TREE_VALUE (values), target, state);
2423 push_int_const (length, state);
2426 if (JPRIMITIVE_TYPE_P (element_type))
2428 int atype = encode_newarray_type (element_type);
2429 OP1 (OPCODE_newarray);
2434 int index = find_class_constant (&state->cpool,
2435 TREE_TYPE (element_type));
2436 OP1 (OPCODE_anewarray);
2440 jopcode = OPCODE_iastore + adjust_typed_op (element_type, 7);
2441 for ( ; values != NULL_TREE; values = TREE_CHAIN (values), offset++)
2443 int save_SP = state->code_SP;
2444 emit_dup (1, 0, state);
2445 push_int_const (offset, state);
2447 generate_bytecode_insns (TREE_VALUE (values), STACK_TARGET, state);
2450 state->code_SP = save_SP;
2454 case NEW_CLASS_EXPR:
2456 tree class = TREE_TYPE (TREE_TYPE (exp));
2457 int need_result = target != IGNORE_TARGET;
2458 int index = find_class_constant (&state->cpool, class);
2464 NOTE_PUSH (1 + need_result);
2466 /* ... fall though ... */
2469 tree f = TREE_OPERAND (exp, 0);
2470 tree x = TREE_OPERAND (exp, 1);
2471 int save_SP = state->code_SP;
2473 if (TREE_CODE (f) == ADDR_EXPR)
2474 f = TREE_OPERAND (f, 0);
2475 if (f == soft_newarray_node)
2477 int type_code = TREE_INT_CST_LOW (TREE_VALUE (x));
2478 generate_bytecode_insns (TREE_VALUE (TREE_CHAIN (x)),
2479 STACK_TARGET, state);
2481 OP1 (OPCODE_newarray);
2485 else if (f == soft_multianewarray_node)
2489 int index = find_class_constant (&state->cpool,
2490 TREE_TYPE (TREE_TYPE (exp)));
2491 x = TREE_CHAIN (x); /* Skip class argument. */
2492 ndims = TREE_INT_CST_LOW (TREE_VALUE (x));
2493 for (idim = ndims; --idim >= 0; )
2496 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2499 OP1 (OPCODE_multianewarray);
2504 else if (f == soft_anewarray_node)
2506 tree cl = TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (exp)));
2507 int index = find_class_constant (&state->cpool, TREE_TYPE (cl));
2508 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2510 OP1 (OPCODE_anewarray);
2514 else if (f == soft_monitorenter_node
2515 || f == soft_monitorexit_node
2518 if (f == soft_monitorenter_node)
2519 op = OPCODE_monitorenter;
2520 else if (f == soft_monitorexit_node)
2521 op = OPCODE_monitorexit;
2524 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2530 else if (exp == soft_exceptioninfo_call_node)
2532 NOTE_PUSH (1); /* Pushed by exception system. */
2535 for ( ; x != NULL_TREE; x = TREE_CHAIN (x))
2537 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2539 nargs = state->code_SP - save_SP;
2540 state->code_SP = save_SP;
2541 if (f == soft_fmod_node)
2548 if (TREE_CODE (exp) == NEW_CLASS_EXPR)
2549 NOTE_POP (1); /* Pop implicit this. */
2550 if (TREE_CODE (f) == FUNCTION_DECL && DECL_CONTEXT (f) != NULL_TREE)
2552 tree saved_context = NULL_TREE;
2553 int index, interface = 0;
2555 if (METHOD_STATIC (f))
2556 OP1 (OPCODE_invokestatic);
2557 else if (DECL_CONSTRUCTOR_P (f) || CALL_USING_SUPER (exp)
2558 || METHOD_PRIVATE (f))
2559 OP1 (OPCODE_invokespecial);
2560 else if (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (f))))
2562 OP1 (OPCODE_invokeinterface);
2566 OP1 (OPCODE_invokevirtual);
2569 saved_context = DECL_CONTEXT (f);
2571 TREE_TYPE (TREE_TYPE (TREE_VALUE (TREE_OPERAND (exp, 1))));
2573 index = find_methodref_index (&state->cpool, f);
2577 DECL_CONTEXT (f) = saved_context;
2584 f = TREE_TYPE (TREE_TYPE (f));
2585 if (TREE_CODE (f) != VOID_TYPE)
2587 int size = TYPE_IS_WIDE (f) ? 2 : 1;
2588 if (target == IGNORE_TARGET)
2589 emit_pop (size, state);
2599 error("internal error in generate_bytecode_insn - tree code not implemented: %s",
2600 tree_code_name [(int) TREE_CODE (exp)]);
2605 perform_relocations (state)
2606 struct jcf_partial *state;
2608 struct jcf_block *block;
2609 struct jcf_relocation *reloc;
2613 /* Before we start, the pc field of each block is an upper bound on
2614 the block's start pc (it may be less, if previous blocks need less
2615 than their maximum).
2617 The minimum size of each block is in the block's chunk->size. */
2619 /* First, figure out the actual locations of each block. */
2622 for (block = state->blocks; block != NULL; block = block->next)
2624 int block_size = block->v.chunk->size;
2628 /* Optimize GOTO L; L: by getting rid of the redundant goto.
2629 Assumes relocations are in reverse order. */
2630 reloc = block->u.relocations;
2631 while (reloc != NULL
2632 && reloc->kind == OPCODE_goto_w
2633 && reloc->label->pc == block->next->pc
2634 && reloc->offset + 2 == block_size)
2636 reloc = reloc->next;
2637 block->u.relocations = reloc;
2638 block->v.chunk->size -= 3;
2643 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
2645 if (reloc->kind == SWITCH_ALIGN_RELOC)
2647 /* We assume this is the first relocation in this block,
2648 so we know its final pc. */
2649 int where = pc + reloc->offset;
2650 int pad = ((where + 3) & ~3) - where;
2653 else if (reloc->kind < -1 || reloc->kind > BLOCK_START_RELOC)
2655 int delta = reloc->label->pc - (pc + reloc->offset - 1);
2656 int expand = reloc->kind > 0 ? 2 : 5;
2660 if (delta >= -32768 && delta <= 32767)
2666 block_size += expand;
2672 for (block = state->blocks; block != NULL; block = block->next)
2674 struct chunk *chunk = block->v.chunk;
2675 int old_size = chunk->size;
2676 int next_pc = block->next == NULL ? pc : block->next->pc;
2677 int new_size = next_pc - block->pc;
2678 unsigned char *new_ptr;
2679 unsigned char *old_buffer = chunk->data;
2680 unsigned char *old_ptr = old_buffer + old_size;
2681 if (new_size != old_size)
2683 chunk->data = (unsigned char *)
2684 obstack_alloc (state->chunk_obstack, new_size);
2685 chunk->size = new_size;
2687 new_ptr = chunk->data + new_size;
2689 /* We do the relocations from back to front, because
2690 the relocations are in reverse order. */
2691 for (reloc = block->u.relocations; ; reloc = reloc->next)
2693 /* new_ptr and old_ptr point into the old and new buffers,
2694 respectively. (If no relocations cause the buffer to
2695 grow, the buffer will be the same buffer, and new_ptr==old_ptr.)
2696 The bytes at higher adress have been copied and relocations
2697 handled; those at lower addresses remain to process. */
2699 /* Lower old index of piece to be copied with no relocation.
2700 I.e. high index of the first piece that does need relocation. */
2701 int start = reloc == NULL ? 0
2702 : reloc->kind == SWITCH_ALIGN_RELOC ? reloc->offset
2703 : (reloc->kind == 0 || reloc->kind == BLOCK_START_RELOC)
2705 : reloc->offset + 2;
2708 int n = (old_ptr - old_buffer) - start;
2712 memcpy (new_ptr, old_ptr, n);
2713 if (old_ptr == old_buffer)
2716 new_offset = new_ptr - chunk->data;
2717 new_offset -= (reloc->kind == -1 ? 2 : 4);
2718 if (reloc->kind == 0)
2721 value = GET_u4 (old_ptr);
2723 else if (reloc->kind == BLOCK_START_RELOC)
2729 else if (reloc->kind == SWITCH_ALIGN_RELOC)
2731 int where = block->pc + reloc->offset;
2732 int pad = ((where + 3) & ~3) - where;
2740 value = GET_u2 (old_ptr);
2742 value += reloc->label->pc - (block->pc + new_offset);
2743 *--new_ptr = (unsigned char) value; value >>= 8;
2744 *--new_ptr = (unsigned char) value; value >>= 8;
2745 if (reloc->kind != -1)
2747 *--new_ptr = (unsigned char) value; value >>= 8;
2748 *--new_ptr = (unsigned char) value;
2750 if (reloc->kind > BLOCK_START_RELOC)
2752 /* Convert: OP TARGET to: OP_w TARGET; (OP is goto or jsr). */
2754 *--new_ptr = reloc->kind;
2756 else if (reloc->kind < -1)
2758 /* Convert: ifCOND TARGET to: ifNCOND T; goto_w TARGET; T: */
2760 *--new_ptr = OPCODE_goto_w;
2763 *--new_ptr = - reloc->kind;
2766 if (new_ptr != chunk->data)
2769 state->code_length = pc;
2773 init_jcf_state (state, work)
2774 struct jcf_partial *state;
2775 struct obstack *work;
2777 state->chunk_obstack = work;
2778 state->first = state->chunk = NULL;
2779 CPOOL_INIT (&state->cpool);
2780 BUFFER_INIT (&state->localvars);
2781 BUFFER_INIT (&state->bytecode);
2785 init_jcf_method (state, method)
2786 struct jcf_partial *state;
2789 state->current_method = method;
2790 state->blocks = state->last_block = NULL;
2791 state->linenumber_count = 0;
2792 state->first_lvar = state->last_lvar = NULL;
2793 state->lvar_count = 0;
2794 state->labeled_blocks = NULL;
2795 state->code_length = 0;
2796 BUFFER_RESET (&state->bytecode);
2797 BUFFER_RESET (&state->localvars);
2799 state->code_SP_max = 0;
2800 state->handlers = NULL;
2801 state->last_handler = NULL;
2802 state->num_handlers = 0;
2803 state->num_finalizers = 0;
2804 state->return_value_decl = NULL_TREE;
2808 release_jcf_state (state)
2809 struct jcf_partial *state;
2811 CPOOL_FINISH (&state->cpool);
2812 obstack_free (state->chunk_obstack, state->first);
2815 /* Generate and return a list of chunks containing the class CLAS
2816 in the .class file representation. The list can be written to a
2817 .class file using write_chunks. Allocate chunks from obstack WORK. */
2819 static struct chunk *
2820 generate_classfile (clas, state)
2822 struct jcf_partial *state;
2824 struct chunk *cpool_chunk;
2825 const char *source_file, *s;
2828 char *fields_count_ptr;
2829 int fields_count = 0;
2830 char *methods_count_ptr;
2831 int methods_count = 0;
2832 static tree SourceFile_node = NULL_TREE;
2835 = clas == object_type_node ? 0
2836 : TREE_VEC_LENGTH (TYPE_BINFO_BASETYPES (clas));
2838 ptr = append_chunk (NULL, 8, state);
2839 PUT4 (0xCafeBabe); /* Magic number */
2840 PUT2 (3); /* Minor version */
2841 PUT2 (45); /* Major version */
2843 append_chunk (NULL, 0, state);
2844 cpool_chunk = state->chunk;
2846 /* Next allocate the chunk containing acces_flags through fields_counr. */
2847 if (clas == object_type_node)
2850 i = 8 + 2 * total_supers;
2851 ptr = append_chunk (NULL, i, state);
2852 i = get_access_flags (TYPE_NAME (clas));
2853 if (! (i & ACC_INTERFACE))
2855 PUT2 (i); /* acces_flags */
2856 i = find_class_constant (&state->cpool, clas); PUT2 (i); /* this_class */
2857 if (clas == object_type_node)
2859 PUT2(0); /* super_class */
2860 PUT2(0); /* interfaces_count */
2864 tree basetypes = TYPE_BINFO_BASETYPES (clas);
2865 tree base = BINFO_TYPE (TREE_VEC_ELT (basetypes, 0));
2866 int j = find_class_constant (&state->cpool, base);
2867 PUT2 (j); /* super_class */
2868 PUT2 (total_supers - 1); /* interfaces_count */
2869 for (i = 1; i < total_supers; i++)
2871 base = BINFO_TYPE (TREE_VEC_ELT (basetypes, i));
2872 j = find_class_constant (&state->cpool, base);
2876 fields_count_ptr = ptr;
2878 for (part = TYPE_FIELDS (clas); part; part = TREE_CHAIN (part))
2880 int have_value, attr_count = 0;
2881 if (DECL_NAME (part) == NULL_TREE || DECL_ARTIFICIAL (part))
2883 ptr = append_chunk (NULL, 8, state);
2884 i = get_access_flags (part); PUT2 (i);
2885 i = find_utf8_constant (&state->cpool, DECL_NAME (part)); PUT2 (i);
2886 i = find_utf8_constant (&state->cpool,
2887 build_java_signature (TREE_TYPE (part)));
2889 have_value = DECL_INITIAL (part) != NULL_TREE
2890 && FIELD_STATIC (part)
2891 && (TREE_CODE (DECL_INITIAL (part)) == STRING_CST
2892 || (TREE_CODE (DECL_INITIAL (part)) == INTEGER_CST
2893 && TREE_CODE (TREE_TYPE (DECL_INITIAL (part))) != POINTER_TYPE)
2894 || TREE_CODE (DECL_INITIAL (part)) == REAL_CST);
2898 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part))
2901 PUT2 (attr_count); /* attributes_count */
2904 tree init = DECL_INITIAL (part);
2905 static tree ConstantValue_node = NULL_TREE;
2906 ptr = append_chunk (NULL, 8, state);
2907 if (ConstantValue_node == NULL_TREE)
2908 ConstantValue_node = get_identifier ("ConstantValue");
2909 i = find_utf8_constant (&state->cpool, ConstantValue_node);
2910 PUT2 (i); /* attribute_name_index */
2911 PUT4 (2); /* attribute_length */
2912 i = find_constant_index (init, state); PUT2 (i);
2914 /* Emit the "Synthetic" attribute for val$<x> and this$<n> fields. */
2915 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part))
2916 ptr = append_synthetic_attribute (state);
2919 ptr = fields_count_ptr; UNSAFE_PUT2 (fields_count);
2921 ptr = methods_count_ptr = append_chunk (NULL, 2, state);
2924 for (part = TYPE_METHODS (clas); part; part = TREE_CHAIN (part))
2926 struct jcf_block *block;
2927 tree function_body = DECL_FUNCTION_BODY (part);
2928 tree body = function_body == NULL_TREE ? NULL_TREE
2929 : BLOCK_EXPR_BODY (function_body);
2930 tree name = DECL_CONSTRUCTOR_P (part) ? init_identifier_node
2932 tree type = TREE_TYPE (part);
2933 tree save_function = current_function_decl;
2934 int synthetic_p = 0;
2935 current_function_decl = part;
2936 ptr = append_chunk (NULL, 8, state);
2937 i = get_access_flags (part); PUT2 (i);
2938 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
2939 i = find_utf8_constant (&state->cpool, build_java_signature (type));
2941 i = (body != NULL_TREE) + (DECL_FUNCTION_THROWS (part) != NULL_TREE);
2943 /* Make room for the Synthetic attribute (of zero length.) */
2944 if (DECL_FINIT_P (part)
2945 || OUTER_FIELD_ACCESS_IDENTIFIER_P (DECL_NAME (part))
2946 || TYPE_DOT_CLASS (clas) == part)
2952 PUT2 (i); /* attributes_count */
2955 ptr = append_synthetic_attribute (state);
2957 if (body != NULL_TREE)
2959 int code_attributes_count = 0;
2960 static tree Code_node = NULL_TREE;
2963 struct jcf_handler *handler;
2964 if (Code_node == NULL_TREE)
2965 Code_node = get_identifier ("Code");
2966 ptr = append_chunk (NULL, 14, state);
2967 i = find_utf8_constant (&state->cpool, Code_node); PUT2 (i);
2969 init_jcf_method (state, part);
2970 get_jcf_label_here (state); /* Force a first block. */
2971 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
2972 localvar_alloc (t, state);
2973 generate_bytecode_insns (body, IGNORE_TARGET, state);
2974 if (CAN_COMPLETE_NORMALLY (body))
2976 if (TREE_CODE (TREE_TYPE (type)) != VOID_TYPE)
2979 OP1 (OPCODE_return);
2981 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
2982 localvar_free (t, state);
2983 if (state->return_value_decl != NULL_TREE)
2984 localvar_free (state->return_value_decl, state);
2985 finish_jcf_block (state);
2986 perform_relocations (state);
2989 i = 8 + state->code_length + 4 + 8 * state->num_handlers;
2990 if (state->linenumber_count > 0)
2992 code_attributes_count++;
2993 i += 8 + 4 * state->linenumber_count;
2995 if (state->lvar_count > 0)
2997 code_attributes_count++;
2998 i += 8 + 10 * state->lvar_count;
3000 UNSAFE_PUT4 (i); /* attribute_length */
3001 UNSAFE_PUT2 (state->code_SP_max); /* max_stack */
3002 UNSAFE_PUT2 (localvar_max); /* max_locals */
3003 UNSAFE_PUT4 (state->code_length);
3005 /* Emit the exception table. */
3006 ptr = append_chunk (NULL, 2 + 8 * state->num_handlers, state);
3007 PUT2 (state->num_handlers); /* exception_table_length */
3008 handler = state->handlers;
3009 for (; handler != NULL; handler = handler->next)
3012 PUT2 (handler->start_label->pc);
3013 PUT2 (handler->end_label->pc);
3014 PUT2 (handler->handler_label->pc);
3015 if (handler->type == NULL_TREE)
3018 type_index = find_class_constant (&state->cpool,
3023 ptr = append_chunk (NULL, 2, state);
3024 PUT2 (code_attributes_count);
3026 /* Write the LineNumberTable attribute. */
3027 if (state->linenumber_count > 0)
3029 static tree LineNumberTable_node = NULL_TREE;
3030 ptr = append_chunk (NULL,
3031 8 + 4 * state->linenumber_count, state);
3032 if (LineNumberTable_node == NULL_TREE)
3033 LineNumberTable_node = get_identifier ("LineNumberTable");
3034 i = find_utf8_constant (&state->cpool, LineNumberTable_node);
3035 PUT2 (i); /* attribute_name_index */
3036 i = 2+4*state->linenumber_count; PUT4(i); /* attribute_length */
3037 i = state->linenumber_count; PUT2 (i);
3038 for (block = state->blocks; block != NULL; block = block->next)
3040 int line = block->linenumber;
3049 /* Write the LocalVariableTable attribute. */
3050 if (state->lvar_count > 0)
3052 static tree LocalVariableTable_node = NULL_TREE;
3053 struct localvar_info *lvar = state->first_lvar;
3054 ptr = append_chunk (NULL, 8 + 10 * state->lvar_count, state);
3055 if (LocalVariableTable_node == NULL_TREE)
3056 LocalVariableTable_node = get_identifier("LocalVariableTable");
3057 i = find_utf8_constant (&state->cpool, LocalVariableTable_node);
3058 PUT2 (i); /* attribute_name_index */
3059 i = 2 + 10 * state->lvar_count; PUT4 (i); /* attribute_length */
3060 i = state->lvar_count; PUT2 (i);
3061 for ( ; lvar != NULL; lvar = lvar->next)
3063 tree name = DECL_NAME (lvar->decl);
3064 tree sig = build_java_signature (TREE_TYPE (lvar->decl));
3065 i = lvar->start_label->pc; PUT2 (i);
3066 i = lvar->end_label->pc - i; PUT2 (i);
3067 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
3068 i = find_utf8_constant (&state->cpool, sig); PUT2 (i);
3069 i = DECL_LOCAL_INDEX (lvar->decl); PUT2 (i);
3073 if (DECL_FUNCTION_THROWS (part) != NULL_TREE)
3075 tree t = DECL_FUNCTION_THROWS (part);
3076 int throws_count = list_length (t);
3077 static tree Exceptions_node = NULL_TREE;
3078 if (Exceptions_node == NULL_TREE)
3079 Exceptions_node = get_identifier ("Exceptions");
3080 ptr = append_chunk (NULL, 8 + 2 * throws_count, state);
3081 i = find_utf8_constant (&state->cpool, Exceptions_node);
3082 PUT2 (i); /* attribute_name_index */
3083 i = 2 + 2 * throws_count; PUT4(i); /* attribute_length */
3084 i = throws_count; PUT2 (i);
3085 for (; t != NULL_TREE; t = TREE_CHAIN (t))
3087 i = find_class_constant (&state->cpool, TREE_VALUE (t));
3092 current_function_decl = save_function;
3094 ptr = methods_count_ptr; UNSAFE_PUT2 (methods_count);
3096 source_file = DECL_SOURCE_FILE (TYPE_NAME (clas));
3097 for (s = source_file; ; s++)
3102 if (ch == '/' || ch == '\\')
3105 ptr = append_chunk (NULL, 10, state);
3107 i = 1; /* Source file always exists as an attribute */
3108 if (INNER_CLASS_TYPE_P (clas) || DECL_INNER_CLASS_LIST (TYPE_NAME (clas)))
3110 if (clas == object_type_node)
3112 PUT2 (i); /* attributes_count */
3114 /* generate the SourceFile attribute. */
3115 if (SourceFile_node == NULL_TREE)
3117 SourceFile_node = get_identifier ("SourceFile");
3118 ggc_add_tree_root (&SourceFile_node, 1);
3121 i = find_utf8_constant (&state->cpool, SourceFile_node);
3122 PUT2 (i); /* attribute_name_index */
3124 i = find_utf8_constant (&state->cpool, get_identifier (source_file));
3126 append_gcj_attribute (state, clas);
3127 append_innerclasses_attribute (state, clas);
3129 /* New finally generate the contents of the constant pool chunk. */
3130 i = count_constant_pool_bytes (&state->cpool);
3131 ptr = obstack_alloc (state->chunk_obstack, i);
3132 cpool_chunk->data = ptr;
3133 cpool_chunk->size = i;
3134 write_constant_pool (&state->cpool, ptr, i);
3135 return state->first;
3138 static unsigned char *
3139 append_synthetic_attribute (state)
3140 struct jcf_partial *state;
3142 static tree Synthetic_node = NULL_TREE;
3143 unsigned char *ptr = append_chunk (NULL, 6, state);
3146 if (Synthetic_node == NULL_TREE)
3148 Synthetic_node = get_identifier ("Synthetic");
3149 ggc_add_tree_root (&Synthetic_node, 1);
3151 i = find_utf8_constant (&state->cpool, Synthetic_node);
3152 PUT2 (i); /* Attribute string index */
3153 PUT4 (0); /* Attribute length */
3159 append_gcj_attribute (state, class)
3160 struct jcf_partial *state;
3166 if (class != object_type_node)
3169 ptr = append_chunk (NULL, 6, state); /* 2+4 */
3170 i = find_utf8_constant (&state->cpool,
3171 get_identifier ("gnu.gcj.gcj-compiled"));
3172 PUT2 (i); /* Attribute string index */
3173 PUT4 (0); /* Attribute length */
3177 append_innerclasses_attribute (state, class)
3178 struct jcf_partial *state;
3181 static tree InnerClasses_node = NULL_TREE;
3182 tree orig_decl = TYPE_NAME (class);
3185 unsigned char *ptr, *length_marker, *number_marker;
3187 if (!INNER_CLASS_TYPE_P (class) && !DECL_INNER_CLASS_LIST (orig_decl))
3190 ptr = append_chunk (NULL, 8, state); /* 2+4+2 */
3192 if (InnerClasses_node == NULL_TREE)
3194 InnerClasses_node = get_identifier ("InnerClasses");
3195 ggc_add_tree_root (&InnerClasses_node, 1);
3197 i = find_utf8_constant (&state->cpool, InnerClasses_node);
3199 length_marker = ptr; PUT4 (0); /* length, to be later patched */
3200 number_marker = ptr; PUT2 (0); /* number of classes, tblp */
3202 /* Generate the entries: all inner classes visible from the one we
3203 process: itself, up and down. */
3204 while (class && INNER_CLASS_TYPE_P (class))
3208 decl = TYPE_NAME (class);
3209 n = IDENTIFIER_POINTER (DECL_NAME (decl)) +
3210 IDENTIFIER_LENGTH (DECL_NAME (decl));
3212 while (n[-1] != '$')
3214 append_innerclasses_attribute_entry (state, decl, get_identifier (n));
3217 class = TREE_TYPE (DECL_CONTEXT (TYPE_NAME (class)));
3221 for (current = DECL_INNER_CLASS_LIST (decl);
3222 current; current = TREE_CHAIN (current))
3224 append_innerclasses_attribute_entry (state, TREE_PURPOSE (current),
3225 TREE_VALUE (current));
3229 ptr = length_marker; PUT4 (8*length+2);
3230 ptr = number_marker; PUT2 (length);
3234 append_innerclasses_attribute_entry (state, decl, name)
3235 struct jcf_partial *state;
3239 int ocii = 0, ini = 0;
3240 unsigned char *ptr = append_chunk (NULL, 8, state);
3242 icii = find_class_constant (&state->cpool, TREE_TYPE (decl));
3244 /* Sun's implementation seems to generate ocii to 0 for inner
3245 classes (which aren't considered members of the class they're
3246 in.) The specs are saying that if the class is anonymous,
3247 inner_name_index must be zero. */
3248 if (!ANONYMOUS_CLASS_P (TREE_TYPE (decl)))
3250 ocii = find_class_constant (&state->cpool,
3251 TREE_TYPE (DECL_CONTEXT (decl)));
3252 ini = find_utf8_constant (&state->cpool, name);
3254 icaf = get_access_flags (decl);
3256 PUT2 (icii); PUT2 (ocii); PUT2 (ini); PUT2 (icaf);
3260 make_class_file_name (clas)
3263 const char *dname, *cname, *slash;
3267 cname = IDENTIFIER_POINTER (identifier_subst (DECL_NAME (TYPE_NAME (clas)),
3268 "", '.', DIR_SEPARATOR,
3270 if (jcf_write_base_directory == NULL)
3272 /* Make sure we put the class file into the .java file's
3273 directory, and not into some subdirectory thereof. */
3275 dname = DECL_SOURCE_FILE (TYPE_NAME (clas));
3276 slash = strrchr (dname, DIR_SEPARATOR);
3282 t = strrchr (cname, DIR_SEPARATOR);
3288 dname = jcf_write_base_directory;
3289 slash = dname + strlen (dname);
3292 r = xmalloc (slash - dname + strlen (cname) + 2);
3293 strncpy (r, dname, slash - dname);
3294 r[slash - dname] = DIR_SEPARATOR;
3295 strcpy (&r[slash - dname + 1], cname);
3297 /* We try to make new directories when we need them. We only do
3298 this for directories which "might not" exist. For instance, we
3299 assume the `-d' directory exists, but we don't assume that any
3300 subdirectory below it exists. It might be worthwhile to keep
3301 track of which directories we've created to avoid gratuitous
3303 dname = r + (slash - dname) + 1;
3306 char *s = strchr (dname, DIR_SEPARATOR);
3310 if (stat (r, &sb) == -1
3311 /* Try to make it. */
3312 && mkdir (r, 0755) == -1)
3313 fatal_io_error ("can't create directory %s", r);
3316 /* Skip consecutive separators. */
3317 for (dname = s + 1; *dname && *dname == DIR_SEPARATOR; ++dname)
3324 /* Write out the contens of a class (RECORD_TYPE) CLAS, as a .class file.
3325 The output .class file name is make_class_file_name(CLAS). */
3328 write_classfile (clas)
3331 struct obstack *work = &temporary_obstack;
3332 struct jcf_partial state[1];
3333 char *class_file_name = make_class_file_name (clas);
3334 struct chunk *chunks;
3336 if (class_file_name != NULL)
3338 FILE *stream = fopen (class_file_name, "wb");
3340 fatal_io_error ("can't to open %s", class_file_name);
3342 jcf_dependency_add_target (class_file_name);
3343 init_jcf_state (state, work);
3344 chunks = generate_classfile (clas, state);
3345 write_chunks (stream, chunks);
3346 if (fclose (stream))
3347 fatal_io_error ("can't close %s", class_file_name);
3348 free (class_file_name);
3350 release_jcf_state (state);
3354 string concatenation
3355 synchronized statement