java-except.h (struct eh_range): Removed unused `next' member.
* java-except.h (struct eh_range): Removed unused `next' member. * verify.c (verify_jvm_instructions): Call check_nested_ranges after adding all exception handlers. Sort exception ranges in order of start PC. (struct pc_index): New structure. (start_pc_cmp): New function. * except.c (add_handler): Return `void'. Don't call link_handler; instead construct an ordinary linked list and do range coalescing. (check_nested_ranges): New function. (link_handler): Changed interface to allow merging of eh_ranges. Split overlapping ranges. Return `void'. From-SVN: r26995
This commit is contained in:
parent
c64f913e00
commit
5a9e5c6fb6
4 changed files with 191 additions and 56 deletions
|
@ -1,3 +1,18 @@
|
|||
1999-05-14 Tom Tromey <tromey@cygnus.com>
|
||||
|
||||
* java-except.h (struct eh_range): Removed unused `next' member.
|
||||
* verify.c (verify_jvm_instructions): Call check_nested_ranges
|
||||
after adding all exception handlers. Sort exception ranges in
|
||||
order of start PC.
|
||||
(struct pc_index): New structure.
|
||||
(start_pc_cmp): New function.
|
||||
* except.c (add_handler): Return `void'. Don't call link_handler;
|
||||
instead construct an ordinary linked list and do range
|
||||
coalescing.
|
||||
(check_nested_ranges): New function.
|
||||
(link_handler): Changed interface to allow merging of eh_ranges.
|
||||
Split overlapping ranges. Return `void'.
|
||||
|
||||
Mon May 17 19:20:24 1999 Alexandre Petit-Bianco <apbianco@cygnus.com>
|
||||
|
||||
* parse.y (constructor_block_end:): New rule, tagged <node>.
|
||||
|
|
|
@ -108,51 +108,102 @@ find_handler (pc)
|
|||
return find_handler_in_range (pc, h, cache_next_child);
|
||||
}
|
||||
|
||||
#if 0
|
||||
first_child;
|
||||
next_sibling;
|
||||
outer;
|
||||
#endif
|
||||
/* Recursive helper routine for check_nested_ranges. */
|
||||
|
||||
/* Recursive helper routine for add_handler. */
|
||||
|
||||
static int
|
||||
link_handler (start_pc, end_pc, handler, type, outer)
|
||||
int start_pc, end_pc;
|
||||
tree handler;
|
||||
tree type;
|
||||
struct eh_range *outer;
|
||||
static void
|
||||
link_handler (range, outer)
|
||||
struct eh_range *range, *outer;
|
||||
{
|
||||
struct eh_range **ptr;
|
||||
if (start_pc < outer->start_pc || end_pc > outer->end_pc)
|
||||
return 0; /* invalid or non-nested exception range */
|
||||
if (start_pc == outer->start_pc && end_pc == outer->end_pc)
|
||||
|
||||
if (range->start_pc == outer->start_pc && range->end_pc == outer->end_pc)
|
||||
{
|
||||
outer->handlers = tree_cons (type, handler, outer->handlers);
|
||||
return 1;
|
||||
outer->handlers = chainon (range->handlers, outer->handlers);
|
||||
return;
|
||||
}
|
||||
|
||||
/* If the new range completely encloses the `outer' range, then insert it
|
||||
between the outer range and its parent. */
|
||||
if (range->start_pc <= outer->start_pc && range->end_pc >= outer->end_pc)
|
||||
{
|
||||
range->outer = outer->outer;
|
||||
range->next_sibling = NULL;
|
||||
range->first_child = outer;
|
||||
outer->outer->first_child = range;
|
||||
outer->outer = range;
|
||||
return;
|
||||
}
|
||||
|
||||
/* Handle overlapping ranges by splitting the new range. */
|
||||
if (range->start_pc < outer->start_pc || range->end_pc > outer->end_pc)
|
||||
{
|
||||
struct eh_range *h
|
||||
= (struct eh_range *) oballoc (sizeof (struct eh_range));
|
||||
if (range->start_pc < outer->start_pc)
|
||||
{
|
||||
h->start_pc = range->start_pc;
|
||||
h->end_pc = outer->start_pc;
|
||||
range->start_pc = outer->start_pc;
|
||||
}
|
||||
else
|
||||
{
|
||||
h->start_pc = outer->end_pc;
|
||||
h->end_pc = range->end_pc;
|
||||
range->end_pc = outer->end_pc;
|
||||
}
|
||||
h->first_child = NULL;
|
||||
h->outer = NULL;
|
||||
h->handlers = build_tree_list (TREE_PURPOSE (range->handlers),
|
||||
TREE_VALUE (range->handlers));
|
||||
h->next_sibling = NULL;
|
||||
/* Restart both from the top to avoid having to make this
|
||||
function smart about reentrancy. */
|
||||
link_handler (h, &whole_range);
|
||||
link_handler (range, &whole_range);
|
||||
return;
|
||||
}
|
||||
|
||||
ptr = &outer->first_child;
|
||||
for (;; ptr = &(*ptr)->next_sibling)
|
||||
{
|
||||
if (*ptr == NULL || end_pc <= (*ptr)->start_pc)
|
||||
if (*ptr == NULL || range->end_pc <= (*ptr)->start_pc)
|
||||
{
|
||||
struct eh_range *h = (struct eh_range *)
|
||||
oballoc (sizeof (struct eh_range));
|
||||
h->start_pc = start_pc;
|
||||
h->end_pc = end_pc;
|
||||
h->next_sibling = *ptr;
|
||||
h->first_child = NULL;
|
||||
h->outer = outer;
|
||||
h->handlers = build_tree_list (type, handler);
|
||||
*ptr = h;
|
||||
return 1;
|
||||
range->next_sibling = *ptr;
|
||||
range->first_child = NULL;
|
||||
range->outer = outer;
|
||||
*ptr = range;
|
||||
return;
|
||||
}
|
||||
else if (range->start_pc < (*ptr)->end_pc)
|
||||
{
|
||||
link_handler (range, *ptr);
|
||||
return;
|
||||
}
|
||||
else if (start_pc < (*ptr)->end_pc)
|
||||
return link_handler (start_pc, end_pc, handler, type, *ptr);
|
||||
/* end_pc > (*ptr)->start_pc && start_pc >= (*ptr)->end_pc. */
|
||||
}
|
||||
}
|
||||
|
||||
/* The first pass of exception range processing (calling add_handler)
|
||||
constructs a linked list of exception ranges. We turn this into
|
||||
the data structure expected by the rest of the code, and also
|
||||
ensure that exception ranges are properly nested. */
|
||||
|
||||
void
|
||||
handle_nested_ranges ()
|
||||
{
|
||||
struct eh_range *ptr, *next;
|
||||
|
||||
ptr = whole_range.first_child;
|
||||
whole_range.first_child = NULL;
|
||||
for (; ptr; ptr = next)
|
||||
{
|
||||
next = ptr->next_sibling;
|
||||
ptr->next_sibling = NULL;
|
||||
link_handler (ptr, &whole_range);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* Called to re-initialize the exception machinery for a new method. */
|
||||
|
||||
void
|
||||
|
@ -174,13 +225,54 @@ java_set_exception_lang_code ()
|
|||
set_exception_version_code (1);
|
||||
}
|
||||
|
||||
int
|
||||
/* Add an exception range. If we already have an exception range
|
||||
which has the same handler and label, and the new range overlaps
|
||||
that one, then we simply extend the existing range. Some bytecode
|
||||
obfuscators generate seemingly nonoverlapping exception ranges
|
||||
which, when coalesced, do in fact nest correctly.
|
||||
|
||||
This constructs an ordinary linked list which check_nested_ranges()
|
||||
later turns into the data structure we actually want.
|
||||
|
||||
We expect the input to come in order of increasing START_PC. This
|
||||
function doesn't attempt to detect the case where two previously
|
||||
added disjoint ranges could be coalesced by a new range; that is
|
||||
what the sorting counteracts. */
|
||||
|
||||
void
|
||||
add_handler (start_pc, end_pc, handler, type)
|
||||
int start_pc, end_pc;
|
||||
tree handler;
|
||||
tree type;
|
||||
{
|
||||
return link_handler (start_pc, end_pc, handler, type, &whole_range);
|
||||
struct eh_range *ptr, *prev = NULL, *h;
|
||||
|
||||
for (ptr = whole_range.first_child; ptr; ptr = ptr->next_sibling)
|
||||
{
|
||||
if (start_pc >= ptr->start_pc
|
||||
&& start_pc <= ptr->end_pc
|
||||
&& TREE_PURPOSE (ptr->handlers) == type
|
||||
&& TREE_VALUE (ptr->handlers) == handler)
|
||||
{
|
||||
/* Already found an overlapping range, so coalesce. */
|
||||
ptr->end_pc = MAX (ptr->end_pc, end_pc);
|
||||
return;
|
||||
}
|
||||
prev = ptr;
|
||||
}
|
||||
|
||||
h = (struct eh_range *) oballoc (sizeof (struct eh_range));
|
||||
h->start_pc = start_pc;
|
||||
h->end_pc = end_pc;
|
||||
h->first_child = NULL;
|
||||
h->outer = NULL;
|
||||
h->handlers = build_tree_list (type, handler);
|
||||
h->next_sibling = NULL;
|
||||
|
||||
if (prev == NULL)
|
||||
whole_range.first_child = h;
|
||||
else
|
||||
prev->next_sibling = h;
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -47,11 +47,6 @@ struct eh_range
|
|||
|
||||
/* The next child of outer, in address order. */
|
||||
struct eh_range *next_sibling;
|
||||
|
||||
#if 0
|
||||
/* Next handler, sorted by ascending start_pc then descending end_pc. */
|
||||
tree next;
|
||||
#endif
|
||||
};
|
||||
|
||||
/* A dummy range that represents the entire method. */
|
||||
|
@ -69,6 +64,8 @@ extern void maybe_start_try PROTO ((int));
|
|||
|
||||
extern void maybe_end_try PROTO ((int));
|
||||
|
||||
extern int add_handler PROTO ((int, int, tree, tree));
|
||||
extern void add_handler PROTO ((int, int, tree, tree));
|
||||
|
||||
extern void handle_nested_ranges PROTO ((void));
|
||||
|
||||
extern void expand_resume_after_catch PROTO ((void));
|
||||
|
|
|
@ -300,6 +300,24 @@ type_stack_dup (size, offset)
|
|||
}
|
||||
}
|
||||
|
||||
/* This keeps track of a start PC and corresponding initial index. */
|
||||
struct pc_index
|
||||
{
|
||||
int start_pc;
|
||||
int index;
|
||||
};
|
||||
|
||||
/* A helper that is used when sorting exception ranges. */
|
||||
static int
|
||||
start_pc_cmp (xp, yp)
|
||||
const GENERIC_PTR xp;
|
||||
const GENERIC_PTR yp;
|
||||
{
|
||||
struct pc_index *x = (struct pc_index *) xp;
|
||||
struct pc_index *y = (struct pc_index *) yp;
|
||||
return x->start_pc - y->start_pc;
|
||||
}
|
||||
|
||||
/* This causes the next iteration to ignore the next instruction
|
||||
and look for some other unhandled instruction. */
|
||||
#define INVALIDATE_PC (prevpc = -1, oldpc = PC, PC = INVALID_PC)
|
||||
|
@ -341,6 +359,8 @@ verify_jvm_instructions (jcf, byte_ops, length)
|
|||
struct eh_range *prev_eh_ranges = NULL_EH_RANGE;
|
||||
struct eh_range *eh_ranges;
|
||||
tree return_type = TREE_TYPE (TREE_TYPE (current_function_decl));
|
||||
struct pc_index *starts;
|
||||
int eh_count;
|
||||
|
||||
jint int_value = -1;
|
||||
|
||||
|
@ -349,17 +369,28 @@ verify_jvm_instructions (jcf, byte_ops, length)
|
|||
/* Handle the exception table. */
|
||||
method_init_exceptions ();
|
||||
JCF_SEEK (jcf, DECL_CODE_OFFSET (current_function_decl) + length);
|
||||
i = JCF_readu2 (jcf);
|
||||
eh_count = JCF_readu2 (jcf);
|
||||
|
||||
/* We read the exception backwards. */
|
||||
p = jcf->read_ptr + 8 * i;
|
||||
while (--i >= 0)
|
||||
/* We read the exception handlers in order of increasing start PC.
|
||||
To do this we first read and sort the start PCs. */
|
||||
starts = (struct pc_index *) xmalloc (eh_count * sizeof (struct pc_index));
|
||||
for (i = 0; i < eh_count; ++i)
|
||||
{
|
||||
int start_pc = GET_u2 (p-8);
|
||||
int end_pc = GET_u2 (p-6);
|
||||
int handler_pc = GET_u2 (p-4);
|
||||
int catch_type = GET_u2 (p-2);
|
||||
p -= 8;
|
||||
starts[i].start_pc = GET_u2 (jcf->read_ptr + 8 * i);
|
||||
starts[i].index = i;
|
||||
}
|
||||
qsort (starts, eh_count, sizeof (struct pc_index), start_pc_cmp);
|
||||
|
||||
for (i = 0; i < eh_count; ++i)
|
||||
{
|
||||
int start_pc, end_pc, handler_pc, catch_type;
|
||||
|
||||
p = jcf->read_ptr + 8 * starts[i].index;
|
||||
|
||||
start_pc = GET_u2 (p);
|
||||
end_pc = GET_u2 (p+2);
|
||||
handler_pc = GET_u2 (p+4);
|
||||
catch_type = GET_u2 (p+6);
|
||||
|
||||
if (start_pc < 0 || start_pc >= length
|
||||
|| end_pc < 0 || end_pc > length || start_pc >= end_pc
|
||||
|
@ -370,21 +401,21 @@ verify_jvm_instructions (jcf, byte_ops, length)
|
|||
|| ! (instruction_bits [handler_pc] & BCODE_INSTRUCTION_START))
|
||||
{
|
||||
error ("bad pc in exception_table");
|
||||
free (starts);
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (! add_handler (start_pc, end_pc,
|
||||
lookup_label (handler_pc),
|
||||
catch_type == 0 ? NULL_TREE
|
||||
: get_class_constant (jcf, catch_type)))
|
||||
{
|
||||
error ("overlapping exception ranges are not supported");
|
||||
return 0;
|
||||
}
|
||||
add_handler (start_pc, end_pc,
|
||||
lookup_label (handler_pc),
|
||||
catch_type == 0 ? NULL_TREE
|
||||
: get_class_constant (jcf, catch_type));
|
||||
|
||||
instruction_bits [handler_pc] |= BCODE_EXCEPTION_TARGET;
|
||||
}
|
||||
|
||||
free (starts);
|
||||
handle_nested_ranges ();
|
||||
|
||||
for (PC = 0;;)
|
||||
{
|
||||
int index;
|
||||
|
|
Loading…
Add table
Reference in a new issue