1 /* Handle exceptions for GNU compiler for the Java(TM) language.
2 Copyright (C) 1997, 1998, 1999, 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA.
21 Java and all Java-based marks are trademarks or registered trademarks
22 of Sun Microsystems, Inc. in the United States and other countries.
23 The Free Software Foundation is independent of Sun Microsystems, Inc. */
30 #include "java-tree.h"
32 #include "java-opcodes.h"
36 #include "java-except.h"
37 #include "eh-common.h"
40 static void expand_start_java_handler PARAMS ((struct eh_range *));
41 static void expand_end_java_handler PARAMS ((struct eh_range *));
42 static struct eh_range *find_handler_in_range PARAMS ((int, struct eh_range *,
44 static void link_handler PARAMS ((struct eh_range *, struct eh_range *));
45 static void check_start_handlers PARAMS ((struct eh_range *, int));
47 extern struct obstack permanent_obstack;
49 struct eh_range *current_method_handlers;
51 struct eh_range *current_try_block = NULL;
53 struct eh_range *eh_range_freelist = NULL;
55 /* These variables are used to speed up find_handler. */
57 static int cache_range_start, cache_range_end;
58 static struct eh_range *cache_range;
59 static struct eh_range *cache_next_child;
61 /* A dummy range that represents the entire method. */
63 struct eh_range whole_range;
65 #if defined(DEBUG_JAVA_BINDING_LEVELS)
69 extern void indent ();
73 /* Search for the most specific eh_range containing PC.
74 Assume PC is within RANGE.
75 CHILD is a list of children of RANGE such that any
76 previous children have end_pc values that are too low. */
78 static struct eh_range *
79 find_handler_in_range (pc, range, child)
81 struct eh_range *range;
82 register struct eh_range *child;
84 for (; child != NULL; child = child->next_sibling)
86 if (pc < child->start_pc)
88 if (pc < child->end_pc)
89 return find_handler_in_range (pc, child, child->first_child);
92 cache_range_start = pc;
93 cache_next_child = child;
94 cache_range_end = child == NULL ? range->end_pc : child->start_pc;
98 /* Find the inner-most handler that contains PC. */
105 if (pc >= cache_range_start)
108 if (pc < cache_range_end)
110 while (pc >= h->end_pc)
112 cache_next_child = h->next_sibling;
119 cache_next_child = h->first_child;
121 return find_handler_in_range (pc, h, cache_next_child);
124 /* Recursive helper routine for check_nested_ranges. */
127 link_handler (range, outer)
128 struct eh_range *range, *outer;
130 struct eh_range **ptr;
132 if (range->start_pc == outer->start_pc && range->end_pc == outer->end_pc)
134 outer->handlers = chainon (outer->handlers, range->handlers);
138 /* If the new range completely encloses the `outer' range, then insert it
139 between the outer range and its parent. */
140 if (range->start_pc <= outer->start_pc && range->end_pc >= outer->end_pc)
142 range->outer = outer->outer;
143 range->next_sibling = NULL;
144 range->first_child = outer;
146 struct eh_range **pr = &(outer->outer->first_child);
148 pr = &(*pr)->next_sibling;
151 outer->outer = range;
155 /* Handle overlapping ranges by splitting the new range. */
156 if (range->start_pc < outer->start_pc || range->end_pc > outer->end_pc)
159 = (struct eh_range *) oballoc (sizeof (struct eh_range));
160 if (range->start_pc < outer->start_pc)
162 h->start_pc = range->start_pc;
163 h->end_pc = outer->start_pc;
164 range->start_pc = outer->start_pc;
168 h->start_pc = outer->end_pc;
169 h->end_pc = range->end_pc;
170 range->end_pc = outer->end_pc;
172 h->first_child = NULL;
174 h->handlers = build_tree_list (TREE_PURPOSE (range->handlers),
175 TREE_VALUE (range->handlers));
176 h->next_sibling = NULL;
177 /* Restart both from the top to avoid having to make this
178 function smart about reentrancy. */
179 link_handler (h, &whole_range);
180 link_handler (range, &whole_range);
184 ptr = &outer->first_child;
185 for (;; ptr = &(*ptr)->next_sibling)
187 if (*ptr == NULL || range->end_pc <= (*ptr)->start_pc)
189 range->next_sibling = *ptr;
190 range->first_child = NULL;
191 range->outer = outer;
195 else if (range->start_pc < (*ptr)->end_pc)
197 link_handler (range, *ptr);
200 /* end_pc > (*ptr)->start_pc && start_pc >= (*ptr)->end_pc. */
204 /* The first pass of exception range processing (calling add_handler)
205 constructs a linked list of exception ranges. We turn this into
206 the data structure expected by the rest of the code, and also
207 ensure that exception ranges are properly nested. */
210 handle_nested_ranges ()
212 struct eh_range *ptr, *next;
214 ptr = whole_range.first_child;
215 whole_range.first_child = NULL;
216 for (; ptr; ptr = next)
218 next = ptr->next_sibling;
219 ptr->next_sibling = NULL;
220 link_handler (ptr, &whole_range);
225 /* Called to re-initialize the exception machinery for a new method. */
228 method_init_exceptions ()
230 whole_range.start_pc = 0;
231 whole_range.end_pc = DECL_CODE_LENGTH (current_function_decl) + 1;
232 whole_range.outer = NULL;
233 whole_range.first_child = NULL;
234 whole_range.next_sibling = NULL;
235 cache_range_start = 0xFFFFFF;
236 java_set_exception_lang_code ();
240 java_set_exception_lang_code ()
242 set_exception_lang_code (EH_LANG_Java);
243 set_exception_version_code (1);
246 /* Add an exception range. If we already have an exception range
247 which has the same handler and label, and the new range overlaps
248 that one, then we simply extend the existing range. Some bytecode
249 obfuscators generate seemingly nonoverlapping exception ranges
250 which, when coalesced, do in fact nest correctly.
252 This constructs an ordinary linked list which check_nested_ranges()
253 later turns into the data structure we actually want.
255 We expect the input to come in order of increasing START_PC. This
256 function doesn't attempt to detect the case where two previously
257 added disjoint ranges could be coalesced by a new range; that is
258 what the sorting counteracts. */
261 add_handler (start_pc, end_pc, handler, type)
262 int start_pc, end_pc;
266 struct eh_range *ptr, *prev = NULL, *h;
268 for (ptr = whole_range.first_child; ptr; ptr = ptr->next_sibling)
270 if (start_pc >= ptr->start_pc
271 && start_pc <= ptr->end_pc
272 && TREE_PURPOSE (ptr->handlers) == type
273 && TREE_VALUE (ptr->handlers) == handler)
275 /* Already found an overlapping range, so coalesce. */
276 ptr->end_pc = MAX (ptr->end_pc, end_pc);
282 h = (struct eh_range *) oballoc (sizeof (struct eh_range));
283 h->start_pc = start_pc;
285 h->first_child = NULL;
287 h->handlers = build_tree_list (type, handler);
288 h->next_sibling = NULL;
292 whole_range.first_child = h;
294 prev->next_sibling = h;
298 /* if there are any handlers for this range, issue start of region */
300 expand_start_java_handler (range)
301 struct eh_range *range;
303 #if defined(DEBUG_JAVA_BINDING_LEVELS)
305 fprintf (stderr, "expand start handler pc %d --> %d\n",
306 current_pc, range->end_pc);
307 #endif /* defined(DEBUG_JAVA_BINDING_LEVELS) */
309 push_obstacks (&permanent_obstack, &permanent_obstack);
310 expand_eh_region_start ();
315 prepare_eh_table_type (type)
320 /* The "type" (metch_info) in a (Java) exception table is one:
321 * a) NULL - meaning match any type in a try-finally.
322 * b) a pointer to a (ccmpiled) class (low-order bit 0).
323 * c) a pointer to the Utf8Const name of the class, plus one
324 * (which yields a value with low-order bit 1). */
326 push_obstacks (&permanent_obstack, &permanent_obstack);
327 if (type == NULL_TREE)
328 exp = null_pointer_node;
329 else if (is_compiled_class (type))
330 exp = build_class_ref (type);
333 (PLUS_EXPR, ptr_type_node,
334 build_utf8_ref (build_internal_class_name (type)),
340 /* if there are any handlers for this range, isssue end of range,
341 and then all handler blocks */
343 expand_end_java_handler (range)
344 struct eh_range *range;
346 tree handler = range->handlers;
347 force_poplevels (range->start_pc);
348 push_obstacks (&permanent_obstack, &permanent_obstack);
349 expand_start_all_catch ();
351 for ( ; handler != NULL_TREE; handler = TREE_CHAIN (handler))
353 start_catch_handler (prepare_eh_table_type (TREE_PURPOSE (handler)));
354 /* Push the thrown object on the top of the stack */
355 expand_goto (TREE_VALUE (handler));
356 expand_resume_after_catch ();
357 end_catch_handler ();
359 expand_end_all_catch ();
360 #if defined(DEBUG_JAVA_BINDING_LEVELS)
362 fprintf (stderr, "expand end handler pc %d <-- %d\n",
363 current_pc, range->start_pc);
364 #endif /* defined(DEBUG_JAVA_BINDING_LEVELS) */
367 /* Recursive helper routine for maybe_start_handlers. */
370 check_start_handlers (range, pc)
371 struct eh_range *range;
374 if (range != NULL_EH_RANGE && range->start_pc == pc)
376 check_start_handlers (range->outer, pc);
377 if (!range->expanded)
378 expand_start_java_handler (range);
383 static struct eh_range *current_range;
385 /* Emit any start-of-try-range starting at start_pc and ending after
389 maybe_start_try (start_pc, end_pc)
393 struct eh_range *range;
397 range = find_handler (start_pc);
398 while (range != NULL_EH_RANGE && range->start_pc == start_pc
399 && range->end_pc < end_pc)
400 range = range->outer;
402 current_range = range;
403 check_start_handlers (range, start_pc);
406 /* Emit any end-of-try-range ending at end_pc and starting before
410 maybe_end_try (start_pc, end_pc)
417 while (current_range != NULL_EH_RANGE && current_range->end_pc <= end_pc
418 && current_range->start_pc >= start_pc)
420 expand_end_java_handler (current_range);
421 current_range = current_range->outer;
425 /* Emit the handler labels and their code */
432 rtx funcend = gen_label_rtx ();
435 emit_insns (catch_clauses);
436 catch_clauses = NULL_RTX;
437 expand_leftover_cleanups ();
439 emit_label (funcend);
443 /* Resume executing at the statement immediately after the end of an
447 expand_resume_after_catch ()
449 expand_goto (top_label_entry (&caught_return_label_stack));