summaryrefslogtreecommitdiff
path: root/gnu/usr.bin/gcc/except.c
diff options
context:
space:
mode:
Diffstat (limited to 'gnu/usr.bin/gcc/except.c')
-rw-r--r--gnu/usr.bin/gcc/except.c2319
1 files changed, 2319 insertions, 0 deletions
diff --git a/gnu/usr.bin/gcc/except.c b/gnu/usr.bin/gcc/except.c
new file mode 100644
index 00000000000..d0922668a4d
--- /dev/null
+++ b/gnu/usr.bin/gcc/except.c
@@ -0,0 +1,2319 @@
+/* Implements exception handling.
+ Copyright (C) 1989, 92-96, 1997 Free Software Foundation, Inc.
+ Contributed by Mike Stump <mrs@cygnus.com>.
+
+This file is part of GNU CC.
+
+GNU CC is free software; you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation; either version 2, or (at your option)
+any later version.
+
+GNU CC is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with GNU CC; see the file COPYING. If not, write to
+the Free Software Foundation, 59 Temple Place - Suite 330,
+Boston, MA 02111-1307, USA. */
+
+
+/* An exception is an event that can be signaled from within a
+ function. This event can then be "caught" or "trapped" by the
+ callers of this function. This potentially allows program flow to
+ be transferred to any arbitrary code associated with a function call
+ several levels up the stack.
+
+ The intended use for this mechanism is for signaling "exceptional
+ events" in an out-of-band fashion, hence its name. The C++ language
+ (and many other OO-styled or functional languages) practically
+ requires such a mechanism, as otherwise it becomes very difficult
+ or even impossible to signal failure conditions in complex
+ situations. The traditional C++ example is when an error occurs in
+ the process of constructing an object; without such a mechanism, it
+ is impossible to signal that the error occurs without adding global
+ state variables and error checks around every object construction.
+
+ The act of causing this event to occur is referred to as "throwing
+ an exception". (Alternate terms include "raising an exception" or
+ "signaling an exception".) The term "throw" is used because control
+ is returned to the callers of the function that is signaling the
+ exception, and thus there is the concept of "throwing" the
+ exception up the call stack.
+
+ There are two major codegen options for exception handling. The
+ flag -fsjlj-exceptions can be used to select the setjmp/longjmp
+ approach, which is the default. -fno-sjlj-exceptions can be used to
+ get the PC range table approach. While this is a compile time
+ flag, an entire application must be compiled with the same codegen
+ option. The first is a PC range table approach, the second is a
+ setjmp/longjmp based scheme. We will first discuss the PC range
+ table approach, after that, we will discuss the setjmp/longjmp
+ based approach.
+
+ It is appropriate to speak of the "context of a throw". This
+ context refers to the address where the exception is thrown from,
+ and is used to determine which exception region will handle the
+ exception.
+
+ Regions of code within a function can be marked such that if it
+ contains the context of a throw, control will be passed to a
+ designated "exception handler". These areas are known as "exception
+ regions". Exception regions cannot overlap, but they can be nested
+ to any arbitrary depth. Also, exception regions cannot cross
+ function boundaries.
+
+ Exception handlers can either be specified by the user (which we
+ will call a "user-defined handler") or generated by the compiler
+ (which we will designate as a "cleanup"). Cleanups are used to
+ perform tasks such as destruction of objects allocated on the
+ stack.
+
+ In the current implementation, cleanups are handled by allocating an
+ exception region for the area that the cleanup is designated for,
+ and the handler for the region performs the cleanup and then
+ rethrows the exception to the outer exception region. From the
+ standpoint of the current implementation, there is little
+ distinction made between a cleanup and a user-defined handler, and
+ the phrase "exception handler" can be used to refer to either one
+ equally well. (The section "Future Directions" below discusses how
+ this will change).
+
+ Each object file that is compiled with exception handling contains
+ a static array of exception handlers named __EXCEPTION_TABLE__.
+ Each entry contains the starting and ending addresses of the
+ exception region, and the address of the handler designated for
+ that region.
+
+ If the target does not use the DWARF 2 frame unwind information, at
+ program startup each object file invokes a function named
+ __register_exceptions with the address of its local
+ __EXCEPTION_TABLE__. __register_exceptions is defined in libgcc2.c, and
+ is responsible for recording all of the exception regions into one list
+ (which is kept in a static variable named exception_table_list).
+
+ On targets that support crtstuff.c, the unwind information
+ is stored in a section named .eh_frame and the information for the
+ entire shared object or program is registered with a call to
+ __register_frame_info. On other targets, the information for each
+ translation unit is registered from the file generated by collect2.
+ __register_frame_info is defined in frame.c, and is responsible for
+ recording all of the unwind regions into one list (which is kept in a
+ static variable named unwind_table_list).
+
+ The function __throw is actually responsible for doing the
+ throw. On machines that have unwind info support, __throw is generated
+ by code in libgcc2.c, otherwise __throw is generated on a
+ per-object-file basis for each source file compiled with
+ -fexceptions by the the C++ frontend. Before __throw is invoked,
+ the current context of the throw needs to be placed in the global
+ variable __eh_pc.
+
+ __throw attempts to find the appropriate exception handler for the
+ PC value stored in __eh_pc by calling __find_first_exception_table_match
+ (which is defined in libgcc2.c). If __find_first_exception_table_match
+ finds a relevant handler, __throw transfers control directly to it.
+
+ If a handler for the context being thrown from can't be found, __throw
+ walks (see Walking the stack below) the stack up the dynamic call chain to
+ continue searching for an appropriate exception handler based upon the
+ caller of the function it last sought a exception handler for. It stops
+ then either an exception handler is found, or when the top of the
+ call chain is reached.
+
+ If no handler is found, an external library function named
+ __terminate is called. If a handler is found, then we restart
+ our search for a handler at the end of the call chain, and repeat
+ the search process, but instead of just walking up the call chain,
+ we unwind the call chain as we walk up it.
+
+ Internal implementation details:
+
+ To associate a user-defined handler with a block of statements, the
+ function expand_start_try_stmts is used to mark the start of the
+ block of statements with which the handler is to be associated
+ (which is known as a "try block"). All statements that appear
+ afterwards will be associated with the try block.
+
+ A call to expand_start_all_catch marks the end of the try block,
+ and also marks the start of the "catch block" (the user-defined
+ handler) associated with the try block.
+
+ This user-defined handler will be invoked for *every* exception
+ thrown with the context of the try block. It is up to the handler
+ to decide whether or not it wishes to handle any given exception,
+ as there is currently no mechanism in this implementation for doing
+ this. (There are plans for conditionally processing an exception
+ based on its "type", which will provide a language-independent
+ mechanism).
+
+ If the handler chooses not to process the exception (perhaps by
+ looking at an "exception type" or some other additional data
+ supplied with the exception), it can fall through to the end of the
+ handler. expand_end_all_catch and expand_leftover_cleanups
+ add additional code to the end of each handler to take care of
+ rethrowing to the outer exception handler.
+
+ The handler also has the option to continue with "normal flow of
+ code", or in other words to resume executing at the statement
+ immediately after the end of the exception region. The variable
+ caught_return_label_stack contains a stack of labels, and jumping
+ to the topmost entry's label via expand_goto will resume normal
+ flow to the statement immediately after the end of the exception
+ region. If the handler falls through to the end, the exception will
+ be rethrown to the outer exception region.
+
+ The instructions for the catch block are kept as a separate
+ sequence, and will be emitted at the end of the function along with
+ the handlers specified via expand_eh_region_end. The end of the
+ catch block is marked with expand_end_all_catch.
+
+ Any data associated with the exception must currently be handled by
+ some external mechanism maintained in the frontend. For example,
+ the C++ exception mechanism passes an arbitrary value along with
+ the exception, and this is handled in the C++ frontend by using a
+ global variable to hold the value. (This will be changing in the
+ future.)
+
+ The mechanism in C++ for handling data associated with the
+ exception is clearly not thread-safe. For a thread-based
+ environment, another mechanism must be used (possibly using a
+ per-thread allocation mechanism if the size of the area that needs
+ to be allocated isn't known at compile time.)
+
+ Internally-generated exception regions (cleanups) are marked by
+ calling expand_eh_region_start to mark the start of the region,
+ and expand_eh_region_end (handler) is used to both designate the
+ end of the region and to associate a specified handler/cleanup with
+ the region. The rtl code in HANDLER will be invoked whenever an
+ exception occurs in the region between the calls to
+ expand_eh_region_start and expand_eh_region_end. After HANDLER is
+ executed, additional code is emitted to handle rethrowing the
+ exception to the outer exception handler. The code for HANDLER will
+ be emitted at the end of the function.
+
+ TARGET_EXPRs can also be used to designate exception regions. A
+ TARGET_EXPR gives an unwind-protect style interface commonly used
+ in functional languages such as LISP. The associated expression is
+ evaluated, and whether or not it (or any of the functions that it
+ calls) throws an exception, the protect expression is always
+ invoked. This implementation takes care of the details of
+ associating an exception table entry with the expression and
+ generating the necessary code (it actually emits the protect
+ expression twice, once for normal flow and once for the exception
+ case). As for the other handlers, the code for the exception case
+ will be emitted at the end of the function.
+
+ Cleanups can also be specified by using add_partial_entry (handler)
+ and end_protect_partials. add_partial_entry creates the start of
+ a new exception region; HANDLER will be invoked if an exception is
+ thrown with the context of the region between the calls to
+ add_partial_entry and end_protect_partials. end_protect_partials is
+ used to mark the end of these regions. add_partial_entry can be
+ called as many times as needed before calling end_protect_partials.
+ However, end_protect_partials should only be invoked once for each
+ group of calls to add_partial_entry as the entries are queued
+ and all of the outstanding entries are processed simultaneously
+ when end_protect_partials is invoked. Similarly to the other
+ handlers, the code for HANDLER will be emitted at the end of the
+ function.
+
+ The generated RTL for an exception region includes
+ NOTE_INSN_EH_REGION_BEG and NOTE_INSN_EH_REGION_END notes that mark
+ the start and end of the exception region. A unique label is also
+ generated at the start of the exception region, which is available
+ by looking at the ehstack variable. The topmost entry corresponds
+ to the current region.
+
+ In the current implementation, an exception can only be thrown from
+ a function call (since the mechanism used to actually throw an
+ exception involves calling __throw). If an exception region is
+ created but no function calls occur within that region, the region
+ can be safely optimized away (along with its exception handlers)
+ since no exceptions can ever be caught in that region. This
+ optimization is performed unless -fasynchronous-exceptions is
+ given. If the user wishes to throw from a signal handler, or other
+ asynchronous place, -fasynchronous-exceptions should be used when
+ compiling for maximally correct code, at the cost of additional
+ exception regions. Using -fasynchronous-exceptions only produces
+ code that is reasonably safe in such situations, but a correct
+ program cannot rely upon this working. It can be used in failsafe
+ code, where trying to continue on, and proceeding with potentially
+ incorrect results is better than halting the program.
+
+
+ Walking the stack:
+
+ The stack is walked by starting with a pointer to the current
+ frame, and finding the pointer to the callers frame. The unwind info
+ tells __throw how to find it.
+
+ Unwinding the stack:
+
+ When we use the term unwinding the stack, we mean undoing the
+ effects of the function prologue in a controlled fashion so that we
+ still have the flow of control. Otherwise, we could just return
+ (jump to the normal end of function epilogue).
+
+ This is done in __throw in libgcc2.c when we know that a handler exists
+ in a frame higher up the call stack than its immediate caller.
+
+ To unwind, we find the unwind data associated with the frame, if any.
+ If we don't find any, we call the library routine __terminate. If we do
+ find it, we use the information to copy the saved register values from
+ that frame into the register save area in the frame for __throw, return
+ into a stub which updates the stack pointer, and jump to the handler.
+ The normal function epilogue for __throw handles restoring the saved
+ values into registers.
+
+ When unwinding, we use this method if we know it will
+ work (if DWARF2_UNWIND_INFO is defined). Otherwise, we know that
+ an inline unwinder will have been emitted for any function that
+ __unwind_function cannot unwind. The inline unwinder appears as a
+ normal exception handler for the entire function, for any function
+ that we know cannot be unwound by __unwind_function. We inform the
+ compiler of whether a function can be unwound with
+ __unwind_function by having DOESNT_NEED_UNWINDER evaluate to true
+ when the unwinder isn't needed. __unwind_function is used as an
+ action of last resort. If no other method can be used for
+ unwinding, __unwind_function is used. If it cannot unwind, it
+ should call __terminate.
+
+ By default, if the target-specific backend doesn't supply a definition
+ for __unwind_function and doesn't support DWARF2_UNWIND_INFO, inlined
+ unwinders will be used instead. The main tradeoff here is in text space
+ utilization. Obviously, if inline unwinders have to be generated
+ repeatedly, this uses much more space than if a single routine is used.
+
+ However, it is simply not possible on some platforms to write a
+ generalized routine for doing stack unwinding without having some
+ form of additional data associated with each function. The current
+ implementation can encode this data in the form of additional
+ machine instructions or as static data in tabular form. The later
+ is called the unwind data.
+
+ The backend macro DOESNT_NEED_UNWINDER is used to conditionalize whether
+ or not per-function unwinders are needed. If DOESNT_NEED_UNWINDER is
+ defined and has a non-zero value, a per-function unwinder is not emitted
+ for the current function. If the static unwind data is supported, then
+ a per-function unwinder is not emitted.
+
+ On some platforms it is possible that neither __unwind_function
+ nor inlined unwinders are available. For these platforms it is not
+ possible to throw through a function call, and abort will be
+ invoked instead of performing the throw.
+
+ The reason the unwind data may be needed is that on some platforms
+ the order and types of data stored on the stack can vary depending
+ on the type of function, its arguments and returned values, and the
+ compilation options used (optimization versus non-optimization,
+ -fomit-frame-pointer, processor variations, etc).
+
+ Unfortunately, this also means that throwing through functions that
+ aren't compiled with exception handling support will still not be
+ possible on some platforms. This problem is currently being
+ investigated, but no solutions have been found that do not imply
+ some unacceptable performance penalties.
+
+ Future directions:
+
+ Currently __throw makes no differentiation between cleanups and
+ user-defined exception regions. While this makes the implementation
+ simple, it also implies that it is impossible to determine if a
+ user-defined exception handler exists for a given exception without
+ completely unwinding the stack in the process. This is undesirable
+ from the standpoint of debugging, as ideally it would be possible
+ to trap unhandled exceptions in the debugger before the process of
+ unwinding has even started.
+
+ This problem can be solved by marking user-defined handlers in a
+ special way (probably by adding additional bits to exception_table_list).
+ A two-pass scheme could then be used by __throw to iterate
+ through the table. The first pass would search for a relevant
+ user-defined handler for the current context of the throw, and if
+ one is found, the second pass would then invoke all needed cleanups
+ before jumping to the user-defined handler.
+
+ Many languages (including C++ and Ada) make execution of a
+ user-defined handler conditional on the "type" of the exception
+ thrown. (The type of the exception is actually the type of the data
+ that is thrown with the exception.) It will thus be necessary for
+ __throw to be able to determine if a given user-defined
+ exception handler will actually be executed, given the type of
+ exception.
+
+ One scheme is to add additional information to exception_table_list
+ as to the types of exceptions accepted by each handler. __throw
+ can do the type comparisons and then determine if the handler is
+ actually going to be executed.
+
+ There is currently no significant level of debugging support
+ available, other than to place a breakpoint on __throw. While
+ this is sufficient in most cases, it would be helpful to be able to
+ know where a given exception was going to be thrown to before it is
+ actually thrown, and to be able to choose between stopping before
+ every exception region (including cleanups), or just user-defined
+ exception regions. This should be possible to do in the two-pass
+ scheme by adding additional labels to __throw for appropriate
+ breakpoints, and additional debugger commands could be added to
+ query various state variables to determine what actions are to be
+ performed next.
+
+ Another major problem that is being worked on is the issue with stack
+ unwinding on various platforms. Currently the only platforms that have
+ support for the generation of a generic unwinder are the SPARC and MIPS.
+ All other ports require per-function unwinders, which produce large
+ amounts of code bloat.
+
+ For setjmp/longjmp based exception handling, some of the details
+ are as above, but there are some additional details. This section
+ discusses the details.
+
+ We don't use NOTE_INSN_EH_REGION_{BEG,END} pairs. We don't
+ optimize EH regions yet. We don't have to worry about machine
+ specific issues with unwinding the stack, as we rely upon longjmp
+ for all the machine specific details. There is no variable context
+ of a throw, just the one implied by the dynamic handler stack
+ pointed to by the dynamic handler chain. There is no exception
+ table, and no calls to __register_exceptions. __sjthrow is used
+ instead of __throw, and it works by using the dynamic handler
+ chain, and longjmp. -fasynchronous-exceptions has no effect, as
+ the elimination of trivial exception regions is not yet performed.
+
+ A frontend can set protect_cleanup_actions_with_terminate when all
+ the cleanup actions should be protected with an EH region that
+ calls terminate when an unhandled exception is throw. C++ does
+ this, Ada does not. */
+
+
+#include "config.h"
+#include "defaults.h"
+#include <stdio.h>
+#include "rtl.h"
+#include "tree.h"
+#include "flags.h"
+#include "except.h"
+#include "function.h"
+#include "insn-flags.h"
+#include "expr.h"
+#include "insn-codes.h"
+#include "regs.h"
+#include "hard-reg-set.h"
+#include "insn-config.h"
+#include "recog.h"
+#include "output.h"
+
+/* One to use setjmp/longjmp method of generating code for exception
+ handling. */
+
+int exceptions_via_longjmp = 2;
+
+/* One to enable asynchronous exception support. */
+
+int asynchronous_exceptions = 0;
+
+/* One to protect cleanup actions with a handler that calls
+ __terminate, zero otherwise. */
+
+int protect_cleanup_actions_with_terminate = 0;
+
+/* A list of labels used for exception handlers. Created by
+ find_exception_handler_labels for the optimization passes. */
+
+rtx exception_handler_labels;
+
+/* Nonzero means that __throw was invoked.
+
+ This is used by the C++ frontend to know if code needs to be emitted
+ for __throw or not. */
+
+int throw_used;
+
+/* The dynamic handler chain. Nonzero if the function has already
+ fetched a pointer to the dynamic handler chain for exception
+ handling. */
+
+rtx current_function_dhc;
+
+/* The dynamic cleanup chain. Nonzero if the function has already
+ fetched a pointer to the dynamic cleanup chain for exception
+ handling. */
+
+rtx current_function_dcc;
+
+/* A stack used for keeping track of the currently active exception
+ handling region. As each exception region is started, an entry
+ describing the region is pushed onto this stack. The current
+ region can be found by looking at the top of the stack, and as we
+ exit regions, the corresponding entries are popped.
+
+ Entries cannot overlap; they can be nested. So there is only one
+ entry at most that corresponds to the current instruction, and that
+ is the entry on the top of the stack. */
+
+static struct eh_stack ehstack;
+
+/* A queue used for tracking which exception regions have closed but
+ whose handlers have not yet been expanded. Regions are emitted in
+ groups in an attempt to improve paging performance.
+
+ As we exit a region, we enqueue a new entry. The entries are then
+ dequeued during expand_leftover_cleanups and expand_start_all_catch,
+
+ We should redo things so that we either take RTL for the handler,
+ or we expand the handler expressed as a tree immediately at region
+ end time. */
+
+static struct eh_queue ehqueue;
+
+/* Insns for all of the exception handlers for the current function.
+ They are currently emitted by the frontend code. */
+
+rtx catch_clauses;
+
+/* A TREE_CHAINed list of handlers for regions that are not yet
+ closed. The TREE_VALUE of each entry contains the handler for the
+ corresponding entry on the ehstack. */
+
+static tree protect_list;
+
+/* Stacks to keep track of various labels. */
+
+/* Keeps track of the label to resume to should one want to resume
+ normal control flow out of a handler (instead of, say, returning to
+ the caller of the current function or exiting the program). */
+
+struct label_node *caught_return_label_stack = NULL;
+
+/* Keeps track of the label used as the context of a throw to rethrow an
+ exception to the outer exception region. */
+
+struct label_node *outer_context_label_stack = NULL;
+
+/* A random data area for the front end's own use. */
+
+struct label_node *false_label_stack = NULL;
+
+/* The rtx and the tree for the saved PC value. */
+
+rtx eh_saved_pc_rtx;
+tree eh_saved_pc;
+
+rtx expand_builtin_return_addr PROTO((enum built_in_function, int, rtx));
+
+/* Various support routines to manipulate the various data structures
+ used by the exception handling code. */
+
+/* Push a label entry onto the given STACK. */
+
+void
+push_label_entry (stack, rlabel, tlabel)
+ struct label_node **stack;
+ rtx rlabel;
+ tree tlabel;
+{
+ struct label_node *newnode
+ = (struct label_node *) xmalloc (sizeof (struct label_node));
+
+ if (rlabel)
+ newnode->u.rlabel = rlabel;
+ else
+ newnode->u.tlabel = tlabel;
+ newnode->chain = *stack;
+ *stack = newnode;
+}
+
+/* Pop a label entry from the given STACK. */
+
+rtx
+pop_label_entry (stack)
+ struct label_node **stack;
+{
+ rtx label;
+ struct label_node *tempnode;
+
+ if (! *stack)
+ return NULL_RTX;
+
+ tempnode = *stack;
+ label = tempnode->u.rlabel;
+ *stack = (*stack)->chain;
+ free (tempnode);
+
+ return label;
+}
+
+/* Return the top element of the given STACK. */
+
+tree
+top_label_entry (stack)
+ struct label_node **stack;
+{
+ if (! *stack)
+ return NULL_TREE;
+
+ return (*stack)->u.tlabel;
+}
+
+/* Make a copy of ENTRY using xmalloc to allocate the space. */
+
+static struct eh_entry *
+copy_eh_entry (entry)
+ struct eh_entry *entry;
+{
+ struct eh_entry *newentry;
+
+ newentry = (struct eh_entry *) xmalloc (sizeof (struct eh_entry));
+ bcopy ((char *) entry, (char *) newentry, sizeof (struct eh_entry));
+
+ return newentry;
+}
+
+/* Push a new eh_node entry onto STACK. */
+
+static void
+push_eh_entry (stack)
+ struct eh_stack *stack;
+{
+ struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node));
+ struct eh_entry *entry = (struct eh_entry *) xmalloc (sizeof (struct eh_entry));
+
+ entry->outer_context = gen_label_rtx ();
+ entry->exception_handler_label = gen_label_rtx ();
+ entry->finalization = NULL_TREE;
+
+ node->entry = entry;
+ node->chain = stack->top;
+ stack->top = node;
+}
+
+/* Pop an entry from the given STACK. */
+
+static struct eh_entry *
+pop_eh_entry (stack)
+ struct eh_stack *stack;
+{
+ struct eh_node *tempnode;
+ struct eh_entry *tempentry;
+
+ tempnode = stack->top;
+ tempentry = tempnode->entry;
+ stack->top = stack->top->chain;
+ free (tempnode);
+
+ return tempentry;
+}
+
+/* Enqueue an ENTRY onto the given QUEUE. */
+
+static void
+enqueue_eh_entry (queue, entry)
+ struct eh_queue *queue;
+ struct eh_entry *entry;
+{
+ struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node));
+
+ node->entry = entry;
+ node->chain = NULL;
+
+ if (queue->head == NULL)
+ {
+ queue->head = node;
+ }
+ else
+ {
+ queue->tail->chain = node;
+ }
+ queue->tail = node;
+}
+
+/* Dequeue an entry from the given QUEUE. */
+
+static struct eh_entry *
+dequeue_eh_entry (queue)
+ struct eh_queue *queue;
+{
+ struct eh_node *tempnode;
+ struct eh_entry *tempentry;
+
+ if (queue->head == NULL)
+ return NULL;
+
+ tempnode = queue->head;
+ queue->head = queue->head->chain;
+
+ tempentry = tempnode->entry;
+ free (tempnode);
+
+ return tempentry;
+}
+
+/* Routine to see if exception exception handling is turned on.
+ DO_WARN is non-zero if we want to inform the user that exception
+ handling is turned off.
+
+ This is used to ensure that -fexceptions has been specified if the
+ compiler tries to use any exception-specific functions. */
+
+int
+doing_eh (do_warn)
+ int do_warn;
+{
+ if (! flag_exceptions)
+ {
+ static int warned = 0;
+ if (! warned && do_warn)
+ {
+ error ("exception handling disabled, use -fexceptions to enable");
+ warned = 1;
+ }
+ return 0;
+ }
+ return 1;
+}
+
+/* Given a return address in ADDR, determine the address we should use
+ to find the corresponding EH region. */
+
+rtx
+eh_outer_context (addr)
+ rtx addr;
+{
+ /* First mask out any unwanted bits. */
+#ifdef MASK_RETURN_ADDR
+ expand_and (addr, MASK_RETURN_ADDR, addr);
+#endif
+
+ /* Then adjust to find the real return address. */
+#if defined (RETURN_ADDR_OFFSET)
+ addr = plus_constant (addr, RETURN_ADDR_OFFSET);
+#endif
+
+ return addr;
+}
+
+/* Start a new exception region for a region of code that has a
+ cleanup action and push the HANDLER for the region onto
+ protect_list. All of the regions created with add_partial_entry
+ will be ended when end_protect_partials is invoked. */
+
+void
+add_partial_entry (handler)
+ tree handler;
+{
+ expand_eh_region_start ();
+
+ /* Make sure the entry is on the correct obstack. */
+ push_obstacks_nochange ();
+ resume_temporary_allocation ();
+
+ /* Because this is a cleanup action, we may have to protect the handler
+ with __terminate. */
+ handler = protect_with_terminate (handler);
+
+ protect_list = tree_cons (NULL_TREE, handler, protect_list);
+ pop_obstacks ();
+}
+
+/* Get a reference to the dynamic handler chain. It points to the
+ pointer to the next element in the dynamic handler chain. It ends
+ when there are no more elements in the dynamic handler chain, when
+ the value is &top_elt from libgcc2.c. Immediately after the
+ pointer, is an area suitable for setjmp/longjmp when
+ DONT_USE_BUILTIN_SETJMP is defined, and an area suitable for
+ __builtin_setjmp/__builtin_longjmp when DONT_USE_BUILTIN_SETJMP
+ isn't defined.
+
+ This routine is here to facilitate the porting of this code to
+ systems with threads. One can either replace the routine we emit a
+ call for here in libgcc2.c, or one can modify this routine to work
+ with their thread system. */
+
+rtx
+get_dynamic_handler_chain ()
+{
+#if 0
+ /* Do this once we figure out how to get this to the front of the
+ function, and we really only want one per real function, not one
+ per inlined function. */
+ if (current_function_dhc == 0)
+ {
+ rtx dhc, insns;
+ start_sequence ();
+
+ dhc = emit_library_call_value (get_dynamic_handler_chain_libfunc,
+ NULL_RTX, 1,
+ Pmode, 0);
+ current_function_dhc = copy_to_reg (dhc);
+ insns = get_insns ();
+ end_sequence ();
+ emit_insns_before (insns, get_first_nonparm_insn ());
+ }
+#else
+ rtx dhc;
+ dhc = emit_library_call_value (get_dynamic_handler_chain_libfunc,
+ NULL_RTX, 1,
+ Pmode, 0);
+ current_function_dhc = copy_to_reg (dhc);
+#endif
+
+ /* We don't want a copy of the dhc, but rather, the single dhc. */
+ return gen_rtx (MEM, Pmode, current_function_dhc);
+}
+
+/* Get a reference to the dynamic cleanup chain. It points to the
+ pointer to the next element in the dynamic cleanup chain.
+ Immediately after the pointer, are two Pmode variables, one for a
+ pointer to a function that performs the cleanup action, and the
+ second, the argument to pass to that function. */
+
+rtx
+get_dynamic_cleanup_chain ()
+{
+ rtx dhc, dcc;
+
+ dhc = get_dynamic_handler_chain ();
+ dcc = plus_constant (dhc, GET_MODE_SIZE (Pmode));
+
+ current_function_dcc = copy_to_reg (dcc);
+
+ /* We don't want a copy of the dcc, but rather, the single dcc. */
+ return gen_rtx (MEM, Pmode, current_function_dcc);
+}
+
+/* Generate code to evaluate X and jump to LABEL if the value is nonzero.
+ LABEL is an rtx of code CODE_LABEL, in this function. */
+
+void
+jumpif_rtx (x, label)
+ rtx x;
+ rtx label;
+{
+ jumpif (make_tree (type_for_mode (GET_MODE (x), 0), x), label);
+}
+
+/* Generate code to evaluate X and jump to LABEL if the value is zero.
+ LABEL is an rtx of code CODE_LABEL, in this function. */
+
+void
+jumpifnot_rtx (x, label)
+ rtx x;
+ rtx label;
+{
+ jumpifnot (make_tree (type_for_mode (GET_MODE (x), 0), x), label);
+}
+
+/* Start a dynamic cleanup on the EH runtime dynamic cleanup stack.
+ We just need to create an element for the cleanup list, and push it
+ into the chain.
+
+ A dynamic cleanup is a cleanup action implied by the presence of an
+ element on the EH runtime dynamic cleanup stack that is to be
+ performed when an exception is thrown. The cleanup action is
+ performed by __sjthrow when an exception is thrown. Only certain
+ actions can be optimized into dynamic cleanup actions. For the
+ restrictions on what actions can be performed using this routine,
+ see expand_eh_region_start_tree. */
+
+static void
+start_dynamic_cleanup (func, arg)
+ tree func;
+ tree arg;
+{
+ rtx dhc, dcc;
+ rtx new_func, new_arg;
+ rtx x, buf;
+ int size;
+
+ /* We allocate enough room for a pointer to the function, and
+ one argument. */
+ size = 2;
+
+ /* XXX, FIXME: The stack space allocated this way is too long lived,
+ but there is no allocation routine that allocates at the level of
+ the last binding contour. */
+ buf = assign_stack_local (BLKmode,
+ GET_MODE_SIZE (Pmode)*(size+1),
+ 0);
+
+ buf = change_address (buf, Pmode, NULL_RTX);
+
+ /* Store dcc into the first word of the newly allocated buffer. */
+
+ dcc = get_dynamic_cleanup_chain ();
+ emit_move_insn (buf, dcc);
+
+ /* Store func and arg into the cleanup list element. */
+
+ new_func = gen_rtx (MEM, Pmode, plus_constant (XEXP (buf, 0),
+ GET_MODE_SIZE (Pmode)));
+ new_arg = gen_rtx (MEM, Pmode, plus_constant (XEXP (buf, 0),
+ GET_MODE_SIZE (Pmode)*2));
+ x = expand_expr (func, new_func, Pmode, 0);
+ if (x != new_func)
+ emit_move_insn (new_func, x);
+
+ x = expand_expr (arg, new_arg, Pmode, 0);
+ if (x != new_arg)
+ emit_move_insn (new_arg, x);
+
+ /* Update the cleanup chain. */
+
+ emit_move_insn (dcc, XEXP (buf, 0));
+}
+
+/* Emit RTL to start a dynamic handler on the EH runtime dynamic
+ handler stack. This should only be used by expand_eh_region_start
+ or expand_eh_region_start_tree. */
+
+static void
+start_dynamic_handler ()
+{
+ rtx dhc, dcc;
+ rtx x, arg, buf;
+ int size;
+
+#ifndef DONT_USE_BUILTIN_SETJMP
+ /* The number of Pmode words for the setjmp buffer, when using the
+ builtin setjmp/longjmp, see expand_builtin, case
+ BUILT_IN_LONGJMP. */
+ size = 5;
+#else
+#ifdef JMP_BUF_SIZE
+ size = JMP_BUF_SIZE;
+#else
+ /* Should be large enough for most systems, if it is not,
+ JMP_BUF_SIZE should be defined with the proper value. It will
+ also tend to be larger than necessary for most systems, a more
+ optimal port will define JMP_BUF_SIZE. */
+ size = FIRST_PSEUDO_REGISTER+2;
+#endif
+#endif
+ /* XXX, FIXME: The stack space allocated this way is too long lived,
+ but there is no allocation routine that allocates at the level of
+ the last binding contour. */
+ arg = assign_stack_local (BLKmode,
+ GET_MODE_SIZE (Pmode)*(size+1),
+ 0);
+
+ arg = change_address (arg, Pmode, NULL_RTX);
+
+ /* Store dhc into the first word of the newly allocated buffer. */
+
+ dhc = get_dynamic_handler_chain ();
+ dcc = gen_rtx (MEM, Pmode, plus_constant (XEXP (arg, 0),
+ GET_MODE_SIZE (Pmode)));
+ emit_move_insn (arg, dhc);
+
+ /* Zero out the start of the cleanup chain. */
+ emit_move_insn (dcc, const0_rtx);
+
+ /* The jmpbuf starts two words into the area allocated. */
+ buf = plus_constant (XEXP (arg, 0), GET_MODE_SIZE (Pmode)*2);
+
+#ifdef DONT_USE_BUILTIN_SETJMP
+ x = emit_library_call_value (setjmp_libfunc, NULL_RTX, 1, SImode, 1,
+ buf, Pmode);
+#else
+ x = expand_builtin_setjmp (buf, NULL_RTX);
+#endif
+
+ /* If we come back here for a catch, transfer control to the
+ handler. */
+
+ jumpif_rtx (x, ehstack.top->entry->exception_handler_label);
+
+ /* We are committed to this, so update the handler chain. */
+
+ emit_move_insn (dhc, XEXP (arg, 0));
+}
+
+/* Start an exception handling region for the given cleanup action.
+ All instructions emitted after this point are considered to be part
+ of the region until expand_eh_region_end is invoked. CLEANUP is
+ the cleanup action to perform. The return value is true if the
+ exception region was optimized away. If that case,
+ expand_eh_region_end does not need to be called for this cleanup,
+ nor should it be.
+
+ This routine notices one particular common case in C++ code
+ generation, and optimizes it so as to not need the exception
+ region. It works by creating a dynamic cleanup action, instead of
+ of a using an exception region. */
+
+int
+expand_eh_region_start_tree (decl, cleanup)
+ tree decl;
+ tree cleanup;
+{
+ rtx note;
+
+ /* This is the old code. */
+ if (! doing_eh (0))
+ return 0;
+
+ /* The optimization only applies to actions protected with
+ terminate, and only applies if we are using the setjmp/longjmp
+ codegen method. */
+ if (exceptions_via_longjmp
+ && protect_cleanup_actions_with_terminate)
+ {
+ tree func, arg;
+ tree args;
+
+ /* Ignore any UNSAVE_EXPR. */
+ if (TREE_CODE (cleanup) == UNSAVE_EXPR)
+ cleanup = TREE_OPERAND (cleanup, 0);
+
+ /* Further, it only applies if the action is a call, if there
+ are 2 arguments, and if the second argument is 2. */
+
+ if (TREE_CODE (cleanup) == CALL_EXPR
+ && (args = TREE_OPERAND (cleanup, 1))
+ && (func = TREE_OPERAND (cleanup, 0))
+ && (arg = TREE_VALUE (args))
+ && (args = TREE_CHAIN (args))
+
+ /* is the second argument 2? */
+ && TREE_CODE (TREE_VALUE (args)) == INTEGER_CST
+ && TREE_INT_CST_LOW (TREE_VALUE (args)) == 2
+ && TREE_INT_CST_HIGH (TREE_VALUE (args)) == 0
+
+ /* Make sure there are no other arguments. */
+ && TREE_CHAIN (args) == NULL_TREE)
+ {
+ /* Arrange for returns and gotos to pop the entry we make on the
+ dynamic cleanup stack. */
+ expand_dcc_cleanup (decl);
+ start_dynamic_cleanup (func, arg);
+ return 1;
+ }
+ }
+
+ expand_eh_region_start_for_decl (decl);
+
+ return 0;
+}
+
+/* Just like expand_eh_region_start, except if a cleanup action is
+ entered on the cleanup chain, the TREE_PURPOSE of the element put
+ on the chain is DECL. DECL should be the associated VAR_DECL, if
+ any, otherwise it should be NULL_TREE. */
+
+void
+expand_eh_region_start_for_decl (decl)
+ tree decl;
+{
+ rtx note;
+
+ /* This is the old code. */
+ if (! doing_eh (0))
+ return;
+
+ if (exceptions_via_longjmp)
+ {
+ /* We need a new block to record the start and end of the
+ dynamic handler chain. We could always do this, but we
+ really want to permit jumping into such a block, and we want
+ to avoid any errors or performance impact in the SJ EH code
+ for now. */
+ expand_start_bindings (0);
+
+ /* But we don't need or want a new temporary level. */
+ pop_temp_slots ();
+
+ /* Mark this block as created by expand_eh_region_start. This
+ is so that we can pop the block with expand_end_bindings
+ automatically. */
+ mark_block_as_eh_region ();
+
+ /* Arrange for returns and gotos to pop the entry we make on the
+ dynamic handler stack. */
+ expand_dhc_cleanup (decl);
+ }
+
+ if (exceptions_via_longjmp == 0)
+ note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_BEG);
+ push_eh_entry (&ehstack);
+ if (exceptions_via_longjmp == 0)
+ NOTE_BLOCK_NUMBER (note)
+ = CODE_LABEL_NUMBER (ehstack.top->entry->exception_handler_label);
+ if (exceptions_via_longjmp)
+ start_dynamic_handler ();
+}
+
+/* Start an exception handling region. All instructions emitted after
+ this point are considered to be part of the region until
+ expand_eh_region_end is invoked. */
+
+void
+expand_eh_region_start ()
+{
+ expand_eh_region_start_for_decl (NULL_TREE);
+}
+
+/* End an exception handling region. The information about the region
+ is found on the top of ehstack.
+
+ HANDLER is either the cleanup for the exception region, or if we're
+ marking the end of a try block, HANDLER is integer_zero_node.
+
+ HANDLER will be transformed to rtl when expand_leftover_cleanups
+ is invoked. */
+
+void
+expand_eh_region_end (handler)
+ tree handler;
+{
+ struct eh_entry *entry;
+
+ if (! doing_eh (0))
+ return;
+
+ entry = pop_eh_entry (&ehstack);
+
+ if (exceptions_via_longjmp == 0)
+ {
+ rtx label;
+ rtx note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_END);
+ NOTE_BLOCK_NUMBER (note) = CODE_LABEL_NUMBER (entry->exception_handler_label);
+
+ label = gen_label_rtx ();
+ emit_jump (label);
+
+ /* Emit a label marking the end of this exception region that
+ is used for rethrowing into the outer context. */
+ emit_label (entry->outer_context);
+
+ /* Put in something that takes up space, as otherwise the end
+ address for this EH region could have the exact same address as
+ its outer region. This would cause us to miss the fact that
+ resuming exception handling with this PC value would be inside
+ the outer region. */
+ emit_insn (gen_nop ());
+ emit_barrier ();
+ emit_label (label);
+ }
+
+ entry->finalization = handler;
+
+ enqueue_eh_entry (&ehqueue, entry);
+
+ /* If we have already started ending the bindings, don't recurse.
+ This only happens when exceptions_via_longjmp is true. */
+ if (is_eh_region ())
+ {
+ /* Because we don't need or want a new temporary level and
+ because we didn't create one in expand_eh_region_start,
+ create a fake one now to avoid removing one in
+ expand_end_bindings. */
+ push_temp_slots ();
+
+ mark_block_as_not_eh_region ();
+
+ /* Maybe do this to prevent jumping in and so on... */
+ expand_end_bindings (NULL_TREE, 0, 0);
+ }
+}
+
+/* If we are using the setjmp/longjmp EH codegen method, we emit a
+ call to __sjthrow.
+
+ Otherwise, we emit a call to __throw and note that we threw
+ something, so we know we need to generate the necessary code for
+ __throw.
+
+ Before invoking throw, the __eh_pc variable must have been set up
+ to contain the PC being thrown from. This address is used by
+ __throw to determine which exception region (if any) is
+ responsible for handling the exception. */
+
+void
+emit_throw ()
+{
+ if (exceptions_via_longjmp)
+ {
+ emit_library_call (sjthrow_libfunc, 0, VOIDmode, 0);
+ }
+ else
+ {
+#ifdef JUMP_TO_THROW
+ emit_indirect_jump (throw_libfunc);
+#else
+#ifndef DWARF2_UNWIND_INFO
+ /* Prevent assemble_external from doing anything with this symbol. */
+ SYMBOL_REF_USED (throw_libfunc) = 1;
+#endif
+ emit_library_call (throw_libfunc, 0, VOIDmode, 0);
+#endif
+ throw_used = 1;
+ }
+ emit_barrier ();
+}
+
+/* An internal throw with an indirect CONTEXT we want to throw from.
+ CONTEXT evaluates to the context of the throw. */
+
+static void
+expand_internal_throw_indirect (context)
+ rtx context;
+{
+ assemble_external (eh_saved_pc);
+ emit_move_insn (eh_saved_pc_rtx, context);
+ emit_throw ();
+}
+
+/* An internal throw with a direct CONTEXT we want to throw from.
+ CONTEXT must be a label; its address will be used as the context of
+ the throw. */
+
+void
+expand_internal_throw (context)
+ rtx context;
+{
+ expand_internal_throw_indirect (gen_rtx (LABEL_REF, Pmode, context));
+}
+
+/* Called from expand_exception_blocks and expand_end_catch_block to
+ emit any pending handlers/cleanups queued from expand_eh_region_end. */
+
+void
+expand_leftover_cleanups ()
+{
+ struct eh_entry *entry;
+
+ while ((entry = dequeue_eh_entry (&ehqueue)) != 0)
+ {
+ rtx prev;
+
+ /* A leftover try block. Shouldn't be one here. */
+ if (entry->finalization == integer_zero_node)
+ abort ();
+
+ /* Output the label for the start of the exception handler. */
+ emit_label (entry->exception_handler_label);
+
+#ifdef HAVE_exception_receiver
+ if (! exceptions_via_longjmp)
+ if (HAVE_exception_receiver)
+ emit_insn (gen_exception_receiver ());
+#endif
+
+#ifdef HAVE_nonlocal_goto_receiver
+ if (! exceptions_via_longjmp)
+ if (HAVE_nonlocal_goto_receiver)
+ emit_insn (gen_nonlocal_goto_receiver ());
+#endif
+
+ /* And now generate the insns for the handler. */
+ expand_expr (entry->finalization, const0_rtx, VOIDmode, 0);
+
+ prev = get_last_insn ();
+ if (prev == NULL || GET_CODE (prev) != BARRIER)
+ {
+ if (exceptions_via_longjmp)
+ emit_throw ();
+ else
+ {
+ /* The below can be optimized away, and we could just
+ fall into the next EH handler, if we are certain they
+ are nested. */
+ /* Emit code to throw to the outer context if we fall off
+ the end of the handler. */
+ expand_internal_throw (entry->outer_context);
+ }
+ }
+
+ do_pending_stack_adjust ();
+ free (entry);
+ }
+}
+
+/* Called at the start of a block of try statements. */
+void
+expand_start_try_stmts ()
+{
+ if (! doing_eh (1))
+ return;
+
+ expand_eh_region_start ();
+}
+
+/* Generate RTL for the start of a group of catch clauses.
+
+ It is responsible for starting a new instruction sequence for the
+ instructions in the catch block, and expanding the handlers for the
+ internally-generated exception regions nested within the try block
+ corresponding to this catch block. */
+
+void
+expand_start_all_catch ()
+{
+ struct eh_entry *entry;
+ tree label;
+
+ if (! doing_eh (1))
+ return;
+
+ push_label_entry (&outer_context_label_stack,
+ ehstack.top->entry->outer_context, NULL_TREE);
+
+ /* End the try block. */
+ expand_eh_region_end (integer_zero_node);
+
+ emit_line_note (input_filename, lineno);
+ label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
+
+ /* The label for the exception handling block that we will save.
+ This is Lresume in the documentation. */
+ expand_label (label);
+
+ if (exceptions_via_longjmp == 0)
+ {
+ /* Put in something that takes up space, as otherwise the end
+ address for the EH region could have the exact same address as
+ the outer region, causing us to miss the fact that resuming
+ exception handling with this PC value would be inside the outer
+ region. */
+ emit_insn (gen_nop ());
+ }
+
+ /* Push the label that points to where normal flow is resumed onto
+ the top of the label stack. */
+ push_label_entry (&caught_return_label_stack, NULL_RTX, label);
+
+ /* Start a new sequence for all the catch blocks. We will add this
+ to the global sequence catch_clauses when we have completed all
+ the handlers in this handler-seq. */
+ start_sequence ();
+
+ while (1)
+ {
+ rtx prev;
+
+ entry = dequeue_eh_entry (&ehqueue);
+ /* Emit the label for the exception handler for this region, and
+ expand the code for the handler.
+
+ Note that a catch region is handled as a side-effect here;
+ for a try block, entry->finalization will contain
+ integer_zero_node, so no code will be generated in the
+ expand_expr call below. But, the label for the handler will
+ still be emitted, so any code emitted after this point will
+ end up being the handler. */
+ emit_label (entry->exception_handler_label);
+
+#ifdef HAVE_exception_receiver
+ if (! exceptions_via_longjmp)
+ if (HAVE_exception_receiver)
+ emit_insn (gen_exception_receiver ());
+#endif
+
+#ifdef HAVE_nonlocal_goto_receiver
+ if (! exceptions_via_longjmp)
+ if (HAVE_nonlocal_goto_receiver)
+ emit_insn (gen_nonlocal_goto_receiver ());
+#endif
+
+ /* When we get down to the matching entry for this try block, stop. */
+ if (entry->finalization == integer_zero_node)
+ {
+ /* Don't forget to free this entry. */
+ free (entry);
+ break;
+ }
+
+ /* And now generate the insns for the handler. */
+ expand_expr (entry->finalization, const0_rtx, VOIDmode, 0);
+
+ prev = get_last_insn ();
+ if (prev == NULL || GET_CODE (prev) != BARRIER)
+ {
+ if (exceptions_via_longjmp)
+ emit_throw ();
+ else
+ {
+ /* Code to throw out to outer context when we fall off end
+ of the handler. We can't do this here for catch blocks,
+ so it's done in expand_end_all_catch instead.
+
+ The below can be optimized away (and we could just fall
+ into the next EH handler) if we are certain they are
+ nested. */
+
+ expand_internal_throw (entry->outer_context);
+ }
+ }
+ do_pending_stack_adjust ();
+ free (entry);
+ }
+}
+
+/* Finish up the catch block. At this point all the insns for the
+ catch clauses have already been generated, so we only have to add
+ them to the catch_clauses list. We also want to make sure that if
+ we fall off the end of the catch clauses that we rethrow to the
+ outer EH region. */
+
+void
+expand_end_all_catch ()
+{
+ rtx new_catch_clause;
+
+ if (! doing_eh (1))
+ return;
+
+ if (exceptions_via_longjmp)
+ emit_throw ();
+ else
+ {
+ /* Code to throw out to outer context, if we fall off end of catch
+ handlers. This is rethrow (Lresume, same id, same obj) in the
+ documentation. We use Lresume because we know that it will throw
+ to the correct context.
+
+ In other words, if the catch handler doesn't exit or return, we
+ do a "throw" (using the address of Lresume as the point being
+ thrown from) so that the outer EH region can then try to process
+ the exception. */
+
+ expand_internal_throw (outer_context_label_stack->u.rlabel);
+ }
+
+ /* Now we have the complete catch sequence. */
+ new_catch_clause = get_insns ();
+ end_sequence ();
+
+ /* This level of catch blocks is done, so set up the successful
+ catch jump label for the next layer of catch blocks. */
+ pop_label_entry (&caught_return_label_stack);
+ pop_label_entry (&outer_context_label_stack);
+
+ /* Add the new sequence of catches to the main one for this function. */
+ push_to_sequence (catch_clauses);
+ emit_insns (new_catch_clause);
+ catch_clauses = get_insns ();
+ end_sequence ();
+
+ /* Here we fall through into the continuation code. */
+}
+
+/* End all the pending exception regions on protect_list. The handlers
+ will be emitted when expand_leftover_cleanups is invoked. */
+
+void
+end_protect_partials ()
+{
+ while (protect_list)
+ {
+ expand_eh_region_end (TREE_VALUE (protect_list));
+ protect_list = TREE_CHAIN (protect_list);
+ }
+}
+
+/* Arrange for __terminate to be called if there is an unhandled throw
+ from within E. */
+
+tree
+protect_with_terminate (e)
+ tree e;
+{
+ /* We only need to do this when using setjmp/longjmp EH and the
+ language requires it, as otherwise we protect all of the handlers
+ at once, if we need to. */
+ if (exceptions_via_longjmp && protect_cleanup_actions_with_terminate)
+ {
+ tree handler, result;
+
+ /* All cleanups must be on the function_obstack. */
+ push_obstacks_nochange ();
+ resume_temporary_allocation ();
+
+ handler = make_node (RTL_EXPR);
+ TREE_TYPE (handler) = void_type_node;
+ RTL_EXPR_RTL (handler) = const0_rtx;
+ TREE_SIDE_EFFECTS (handler) = 1;
+ start_sequence_for_rtl_expr (handler);
+
+ emit_library_call (terminate_libfunc, 0, VOIDmode, 0);
+ emit_barrier ();
+
+ RTL_EXPR_SEQUENCE (handler) = get_insns ();
+ end_sequence ();
+
+ result = build (TRY_CATCH_EXPR, TREE_TYPE (e), e, handler);
+ TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
+ TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
+ TREE_READONLY (result) = TREE_READONLY (e);
+
+ pop_obstacks ();
+
+ e = result;
+ }
+
+ return e;
+}
+
+/* The exception table that we build that is used for looking up and
+ dispatching exceptions, the current number of entries, and its
+ maximum size before we have to extend it.
+
+ The number in eh_table is the code label number of the exception
+ handler for the region. This is added by add_eh_table_entry and
+ used by output_exception_table_entry. */
+
+static int *eh_table;
+static int eh_table_size;
+static int eh_table_max_size;
+
+/* Note the need for an exception table entry for region N. If we
+ don't need to output an explicit exception table, avoid all of the
+ extra work.
+
+ Called from final_scan_insn when a NOTE_INSN_EH_REGION_BEG is seen.
+ N is the NOTE_BLOCK_NUMBER of the note, which comes from the code
+ label number of the exception handler for the region. */
+
+void
+add_eh_table_entry (n)
+ int n;
+{
+#ifndef OMIT_EH_TABLE
+ if (eh_table_size >= eh_table_max_size)
+ {
+ if (eh_table)
+ {
+ eh_table_max_size += eh_table_max_size>>1;
+
+ if (eh_table_max_size < 0)
+ abort ();
+
+ eh_table = (int *) xrealloc (eh_table,
+ eh_table_max_size * sizeof (int));
+ }
+ else
+ {
+ eh_table_max_size = 252;
+ eh_table = (int *) xmalloc (eh_table_max_size * sizeof (int));
+ }
+ }
+ eh_table[eh_table_size++] = n;
+#endif
+}
+
+/* Return a non-zero value if we need to output an exception table.
+
+ On some platforms, we don't have to output a table explicitly.
+ This routine doesn't mean we don't have one. */
+
+int
+exception_table_p ()
+{
+ if (eh_table)
+ return 1;
+
+ return 0;
+}
+
+/* 1 if we need a static constructor to register EH table info. */
+
+int
+register_exception_table_p ()
+{
+#if defined (DWARF2_UNWIND_INFO)
+ return 0;
+#endif
+
+ return exception_table_p ();
+}
+
+/* Output the entry of the exception table corresponding to to the
+ exception region numbered N to file FILE.
+
+ N is the code label number corresponding to the handler of the
+ region. */
+
+static void
+output_exception_table_entry (file, n)
+ FILE *file;
+ int n;
+{
+ char buf[256];
+ rtx sym;
+
+ ASM_GENERATE_INTERNAL_LABEL (buf, "LEHB", n);
+ sym = gen_rtx (SYMBOL_REF, Pmode, buf);
+ assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
+
+ ASM_GENERATE_INTERNAL_LABEL (buf, "LEHE", n);
+ sym = gen_rtx (SYMBOL_REF, Pmode, buf);
+ assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
+
+ ASM_GENERATE_INTERNAL_LABEL (buf, "L", n);
+ sym = gen_rtx (SYMBOL_REF, Pmode, buf);
+ assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
+
+ putc ('\n', file); /* blank line */
+}
+
+/* Output the exception table if we have and need one. */
+
+void
+output_exception_table ()
+{
+ int i;
+ extern FILE *asm_out_file;
+
+ if (! doing_eh (0) || ! eh_table)
+ return;
+
+ exception_section ();
+
+ /* Beginning marker for table. */
+ assemble_align (GET_MODE_ALIGNMENT (ptr_mode));
+ assemble_label ("__EXCEPTION_TABLE__");
+
+ for (i = 0; i < eh_table_size; ++i)
+ output_exception_table_entry (asm_out_file, eh_table[i]);
+
+ free (eh_table);
+
+ /* Ending marker for table. */
+ assemble_label ("__EXCEPTION_END__");
+ assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
+ assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
+ assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
+ putc ('\n', asm_out_file); /* blank line */
+}
+
+/* Generate code to initialize the exception table at program startup
+ time. */
+
+void
+register_exception_table ()
+{
+ emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__register_exceptions"), 0,
+ VOIDmode, 1,
+ gen_rtx (SYMBOL_REF, Pmode, "__EXCEPTION_TABLE__"),
+ Pmode);
+}
+
+/* Emit the RTL for the start of the per-function unwinder for the
+ current function. See emit_unwinder for further information.
+
+ DOESNT_NEED_UNWINDER is a target-specific macro that determines if
+ the current function actually needs a per-function unwinder or not.
+ By default, all functions need one. */
+
+void
+start_eh_unwinder ()
+{
+#ifdef DOESNT_NEED_UNWINDER
+ if (DOESNT_NEED_UNWINDER)
+ return;
+#endif
+
+ /* If we are using the setjmp/longjmp implementation, we don't need a
+ per function unwinder. */
+
+ if (exceptions_via_longjmp)
+ return;
+
+#ifdef DWARF2_UNWIND_INFO
+ return;
+#endif
+
+ expand_eh_region_start ();
+}
+
+/* Emit insns for the end of the per-function unwinder for the
+ current function. */
+
+void
+end_eh_unwinder ()
+{
+ tree expr;
+ rtx return_val_rtx, ret_val, label, end, insns;
+
+ if (! doing_eh (0))
+ return;
+
+#ifdef DOESNT_NEED_UNWINDER
+ if (DOESNT_NEED_UNWINDER)
+ return;
+#endif
+
+ /* If we are using the setjmp/longjmp implementation, we don't need a
+ per function unwinder. */
+
+ if (exceptions_via_longjmp)
+ return;
+
+#ifdef DWARF2_UNWIND_INFO
+ return;
+#else /* DWARF2_UNWIND_INFO */
+
+ assemble_external (eh_saved_pc);
+
+ expr = make_node (RTL_EXPR);
+ TREE_TYPE (expr) = void_type_node;
+ RTL_EXPR_RTL (expr) = const0_rtx;
+ TREE_SIDE_EFFECTS (expr) = 1;
+ start_sequence_for_rtl_expr (expr);
+
+ /* ret_val will contain the address of the code where the call
+ to the current function occurred. */
+ ret_val = expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
+ 0, hard_frame_pointer_rtx);
+ return_val_rtx = copy_to_reg (ret_val);
+
+ /* Get the address we need to use to determine what exception
+ handler should be invoked, and store it in __eh_pc. */
+ return_val_rtx = eh_outer_context (return_val_rtx);
+ return_val_rtx = expand_binop (Pmode, sub_optab, return_val_rtx, GEN_INT (1),
+ NULL_RTX, 0, OPTAB_LIB_WIDEN);
+ emit_move_insn (eh_saved_pc_rtx, return_val_rtx);
+
+ /* Either set things up so we do a return directly to __throw, or
+ we return here instead. */
+#ifdef JUMP_TO_THROW
+ emit_move_insn (ret_val, throw_libfunc);
+#else
+ label = gen_label_rtx ();
+ emit_move_insn (ret_val, gen_rtx (LABEL_REF, Pmode, label));
+#endif
+
+#ifdef RETURN_ADDR_OFFSET
+ return_val_rtx = plus_constant (ret_val, -RETURN_ADDR_OFFSET);
+ if (return_val_rtx != ret_val)
+ emit_move_insn (ret_val, return_val_rtx);
+#endif
+
+ end = gen_label_rtx ();
+ emit_jump (end);
+
+ RTL_EXPR_SEQUENCE (expr) = get_insns ();
+ end_sequence ();
+
+ expand_eh_region_end (expr);
+
+ emit_jump (end);
+
+#ifndef JUMP_TO_THROW
+ emit_label (label);
+ emit_throw ();
+#endif
+
+ expand_leftover_cleanups ();
+
+ emit_label (end);
+
+#ifdef HAVE_return
+ if (HAVE_return)
+ {
+ emit_jump_insn (gen_return ());
+ emit_barrier ();
+ }
+#endif
+#endif /* DWARF2_UNWIND_INFO */
+}
+
+/* If necessary, emit insns for the per function unwinder for the
+ current function. Called after all the code that needs unwind
+ protection is output.
+
+ The unwinder takes care of catching any exceptions that have not
+ been previously caught within the function, unwinding the stack to
+ the next frame, and rethrowing using the address of the current
+ function's caller as the context of the throw.
+
+ On some platforms __throw can do this by itself (or with the help
+ of __unwind_function) so the per-function unwinder is
+ unnecessary.
+
+ We cannot place the unwinder into the function until after we know
+ we are done inlining, as we don't want to have more than one
+ unwinder per non-inlined function. */
+
+void
+emit_unwinder ()
+{
+ rtx insns, insn;
+
+ start_sequence ();
+ start_eh_unwinder ();
+ insns = get_insns ();
+ end_sequence ();
+
+ /* We place the start of the exception region associated with the
+ per function unwinder at the top of the function. */
+ if (insns)
+ emit_insns_after (insns, get_insns ());
+
+ start_sequence ();
+ end_eh_unwinder ();
+ insns = get_insns ();
+ end_sequence ();
+
+ /* And we place the end of the exception region before the USE and
+ CLOBBER insns that may come at the end of the function. */
+ if (insns == 0)
+ return;
+
+ insn = get_last_insn ();
+ while (GET_CODE (insn) == NOTE
+ || (GET_CODE (insn) == INSN
+ && (GET_CODE (PATTERN (insn)) == USE
+ || GET_CODE (PATTERN (insn)) == CLOBBER)))
+ insn = PREV_INSN (insn);
+
+ if (GET_CODE (insn) == CODE_LABEL
+ && GET_CODE (PREV_INSN (insn)) == BARRIER)
+ {
+ insn = PREV_INSN (insn);
+ }
+ else
+ {
+ rtx label = gen_label_rtx ();
+ emit_label_after (label, insn);
+ insn = emit_jump_insn_after (gen_jump (label), insn);
+ insn = emit_barrier_after (insn);
+ }
+
+ emit_insns_after (insns, insn);
+}
+
+/* Scan the current insns and build a list of handler labels. The
+ resulting list is placed in the global variable exception_handler_labels.
+
+ It is called after the last exception handling region is added to
+ the current function (when the rtl is almost all built for the
+ current function) and before the jump optimization pass. */
+
+void
+find_exception_handler_labels ()
+{
+ rtx insn;
+ int max_labelno = max_label_num ();
+ int min_labelno = get_first_label_num ();
+ rtx *labels;
+
+ exception_handler_labels = NULL_RTX;
+
+ /* If we aren't doing exception handling, there isn't much to check. */
+ if (! doing_eh (0))
+ return;
+
+ /* Generate a handy reference to each label. */
+
+ /* We call xmalloc here instead of alloca; we did the latter in the past,
+ but found that it can sometimes end up being asked to allocate space
+ for more than 1 million labels. */
+ labels = (rtx *) xmalloc ((max_labelno - min_labelno) * sizeof (rtx));
+ bzero ((char *) labels, (max_labelno - min_labelno) * sizeof (rtx));
+
+ /* Arrange for labels to be indexed directly by CODE_LABEL_NUMBER. */
+ labels -= min_labelno;
+
+ for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
+ {
+ if (GET_CODE (insn) == CODE_LABEL)
+ if (CODE_LABEL_NUMBER (insn) >= min_labelno
+ && CODE_LABEL_NUMBER (insn) < max_labelno)
+ labels[CODE_LABEL_NUMBER (insn)] = insn;
+ }
+
+ /* For each start of a region, add its label to the list. */
+
+ for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
+ {
+ if (GET_CODE (insn) == NOTE
+ && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
+ {
+ rtx label = NULL_RTX;
+
+ if (NOTE_BLOCK_NUMBER (insn) >= min_labelno
+ && NOTE_BLOCK_NUMBER (insn) < max_labelno)
+ {
+ label = labels[NOTE_BLOCK_NUMBER (insn)];
+
+ if (label)
+ exception_handler_labels
+ = gen_rtx (EXPR_LIST, VOIDmode,
+ label, exception_handler_labels);
+ else
+ warning ("didn't find handler for EH region %d",
+ NOTE_BLOCK_NUMBER (insn));
+ }
+ else
+ warning ("mismatched EH region %d", NOTE_BLOCK_NUMBER (insn));
+ }
+ }
+
+ free (labels + min_labelno);
+}
+
+/* Perform sanity checking on the exception_handler_labels list.
+
+ Can be called after find_exception_handler_labels is called to
+ build the list of exception handlers for the current function and
+ before we finish processing the current function. */
+
+void
+check_exception_handler_labels ()
+{
+ rtx insn, handler;
+
+ /* If we aren't doing exception handling, there isn't much to check. */
+ if (! doing_eh (0))
+ return;
+
+ /* Ensure that the CODE_LABEL_NUMBER for the CODE_LABEL entry point
+ in each handler corresponds to the CODE_LABEL_NUMBER of the
+ handler. */
+
+ for (handler = exception_handler_labels;
+ handler;
+ handler = XEXP (handler, 1))
+ {
+ for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
+ {
+ if (GET_CODE (insn) == CODE_LABEL)
+ {
+ if (CODE_LABEL_NUMBER (insn)
+ == CODE_LABEL_NUMBER (XEXP (handler, 0)))
+ {
+ if (insn != XEXP (handler, 0))
+ warning ("mismatched handler %d",
+ CODE_LABEL_NUMBER (insn));
+ break;
+ }
+ }
+ }
+ if (insn == NULL_RTX)
+ warning ("handler not found %d",
+ CODE_LABEL_NUMBER (XEXP (handler, 0)));
+ }
+
+ /* Now go through and make sure that for each region there is a
+ corresponding label. */
+ for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
+ {
+ if (GET_CODE (insn) == NOTE
+ && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
+ || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
+ {
+ for (handler = exception_handler_labels;
+ handler;
+ handler = XEXP (handler, 1))
+ {
+ if (CODE_LABEL_NUMBER (XEXP (handler, 0))
+ == NOTE_BLOCK_NUMBER (insn))
+ break;
+ }
+ if (handler == NULL_RTX)
+ warning ("region exists, no handler %d",
+ NOTE_BLOCK_NUMBER (insn));
+ }
+ }
+}
+
+/* This group of functions initializes the exception handling data
+ structures at the start of the compilation, initializes the data
+ structures at the start of a function, and saves and restores the
+ exception handling data structures for the start/end of a nested
+ function. */
+
+/* Toplevel initialization for EH things. */
+
+void
+init_eh ()
+{
+ /* Generate rtl to reference the variable in which the PC of the
+ current context is saved. */
+ tree type = build_pointer_type (make_node (VOID_TYPE));
+
+ eh_saved_pc = build_decl (VAR_DECL, get_identifier ("__eh_pc"), type);
+ DECL_EXTERNAL (eh_saved_pc) = 1;
+ TREE_PUBLIC (eh_saved_pc) = 1;
+ make_decl_rtl (eh_saved_pc, NULL_PTR, 1);
+ eh_saved_pc_rtx = DECL_RTL (eh_saved_pc);
+}
+
+/* Initialize the per-function EH information. */
+
+void
+init_eh_for_function ()
+{
+ ehstack.top = 0;
+ ehqueue.head = ehqueue.tail = 0;
+ catch_clauses = NULL_RTX;
+ false_label_stack = 0;
+ caught_return_label_stack = 0;
+ protect_list = NULL_TREE;
+ current_function_dhc = NULL_RTX;
+ current_function_dcc = NULL_RTX;
+}
+
+/* Save some of the per-function EH info into the save area denoted by
+ P.
+
+ This is currently called from save_stmt_status. */
+
+void
+save_eh_status (p)
+ struct function *p;
+{
+ p->ehstack = ehstack;
+ p->ehqueue = ehqueue;
+ p->catch_clauses = catch_clauses;
+ p->false_label_stack = false_label_stack;
+ p->caught_return_label_stack = caught_return_label_stack;
+ p->protect_list = protect_list;
+ p->dhc = current_function_dhc;
+ p->dcc = current_function_dcc;
+
+ init_eh ();
+}
+
+/* Restore the per-function EH info saved into the area denoted by P.
+
+ This is currently called from restore_stmt_status. */
+
+void
+restore_eh_status (p)
+ struct function *p;
+{
+ protect_list = p->protect_list;
+ caught_return_label_stack = p->caught_return_label_stack;
+ false_label_stack = p->false_label_stack;
+ catch_clauses = p->catch_clauses;
+ ehqueue = p->ehqueue;
+ ehstack = p->ehstack;
+ current_function_dhc = p->dhc;
+ current_function_dcc = p->dcc;
+}
+
+/* This section is for the exception handling specific optimization
+ pass. First are the internal routines, and then the main
+ optimization pass. */
+
+/* Determine if the given INSN can throw an exception. */
+
+static int
+can_throw (insn)
+ rtx insn;
+{
+ /* Calls can always potentially throw exceptions. */
+ if (GET_CODE (insn) == CALL_INSN)
+ return 1;
+
+ if (asynchronous_exceptions)
+ {
+ /* If we wanted asynchronous exceptions, then everything but NOTEs
+ and CODE_LABELs could throw. */
+ if (GET_CODE (insn) != NOTE && GET_CODE (insn) != CODE_LABEL)
+ return 1;
+ }
+
+ return 0;
+}
+
+/* Scan a exception region looking for the matching end and then
+ remove it if possible. INSN is the start of the region, N is the
+ region number, and DELETE_OUTER is to note if anything in this
+ region can throw.
+
+ Regions are removed if they cannot possibly catch an exception.
+ This is determined by invoking can_throw on each insn within the
+ region; if can_throw returns true for any of the instructions, the
+ region can catch an exception, since there is an insn within the
+ region that is capable of throwing an exception.
+
+ Returns the NOTE_INSN_EH_REGION_END corresponding to this region, or
+ calls abort if it can't find one.
+
+ Can abort if INSN is not a NOTE_INSN_EH_REGION_BEGIN, or if N doesn't
+ correspond to the region number, or if DELETE_OUTER is NULL. */
+
+static rtx
+scan_region (insn, n, delete_outer)
+ rtx insn;
+ int n;
+ int *delete_outer;
+{
+ rtx start = insn;
+
+ /* Assume we can delete the region. */
+ int delete = 1;
+
+ if (! (GET_CODE (insn) == NOTE
+ && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
+ && NOTE_BLOCK_NUMBER (insn) == n
+ && delete_outer != NULL))
+ abort ();
+
+ insn = NEXT_INSN (insn);
+
+ /* Look for the matching end. */
+ while (! (GET_CODE (insn) == NOTE
+ && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
+ {
+ /* If anything can throw, we can't remove the region. */
+ if (delete && can_throw (insn))
+ {
+ delete = 0;
+ }
+
+ /* Watch out for and handle nested regions. */
+ if (GET_CODE (insn) == NOTE
+ && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
+ {
+ insn = scan_region (insn, NOTE_BLOCK_NUMBER (insn), &delete);
+ }
+
+ insn = NEXT_INSN (insn);
+ }
+
+ /* The _BEG/_END NOTEs must match and nest. */
+ if (NOTE_BLOCK_NUMBER (insn) != n)
+ abort ();
+
+ /* If anything in this exception region can throw, we can throw. */
+ if (! delete)
+ *delete_outer = 0;
+ else
+ {
+ /* Delete the start and end of the region. */
+ delete_insn (start);
+ delete_insn (insn);
+
+ /* Only do this part if we have built the exception handler
+ labels. */
+ if (exception_handler_labels)
+ {
+ rtx x, *prev = &exception_handler_labels;
+
+ /* Find it in the list of handlers. */
+ for (x = exception_handler_labels; x; x = XEXP (x, 1))
+ {
+ rtx label = XEXP (x, 0);
+ if (CODE_LABEL_NUMBER (label) == n)
+ {
+ /* If we are the last reference to the handler,
+ delete it. */
+ if (--LABEL_NUSES (label) == 0)
+ delete_insn (label);
+
+ if (optimize)
+ {
+ /* Remove it from the list of exception handler
+ labels, if we are optimizing. If we are not, then
+ leave it in the list, as we are not really going to
+ remove the region. */
+ *prev = XEXP (x, 1);
+ XEXP (x, 1) = 0;
+ XEXP (x, 0) = 0;
+ }
+
+ break;
+ }
+ prev = &XEXP (x, 1);
+ }
+ }
+ }
+ return insn;
+}
+
+/* Perform various interesting optimizations for exception handling
+ code.
+
+ We look for empty exception regions and make them go (away). The
+ jump optimization code will remove the handler if nothing else uses
+ it. */
+
+void
+exception_optimize ()
+{
+ rtx insn, regions = NULL_RTX;
+ int n;
+
+ /* The below doesn't apply to setjmp/longjmp EH. */
+ if (exceptions_via_longjmp)
+ return;
+
+ /* Remove empty regions. */
+ for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
+ {
+ if (GET_CODE (insn) == NOTE
+ && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
+ {
+ /* Since scan_region will return the NOTE_INSN_EH_REGION_END
+ insn, we will indirectly skip through all the insns
+ inbetween. We are also guaranteed that the value of insn
+ returned will be valid, as otherwise scan_region won't
+ return. */
+ insn = scan_region (insn, NOTE_BLOCK_NUMBER (insn), &n);
+ }
+ }
+}
+
+/* Various hooks for the DWARF 2 __throw routine. */
+
+/* Do any necessary initialization to access arbitrary stack frames.
+ On the SPARC, this means flushing the register windows. */
+
+void
+expand_builtin_unwind_init ()
+{
+ /* Set this so all the registers get saved in our frame; we need to be
+ able to copy the saved values for any registers from frames we unwind. */
+ current_function_has_nonlocal_label = 1;
+
+#ifdef SETUP_FRAME_ADDRESSES
+ SETUP_FRAME_ADDRESSES ();
+#endif
+}
+
+/* Given a value extracted from the return address register or stack slot,
+ return the actual address encoded in that value. */
+
+rtx
+expand_builtin_extract_return_addr (addr_tree)
+ tree addr_tree;
+{
+ rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
+ return eh_outer_context (addr);
+}
+
+/* Given an actual address in addr_tree, do any necessary encoding
+ and return the value to be stored in the return address register or
+ stack slot so the epilogue will return to that address. */
+
+rtx
+expand_builtin_frob_return_addr (addr_tree)
+ tree addr_tree;
+{
+ rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
+#ifdef RETURN_ADDR_OFFSET
+ addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
+#endif
+ return addr;
+}
+
+/* Given an actual address in addr_tree, set the return address register up
+ so the epilogue will return to that address. If the return address is
+ not in a register, do nothing. */
+
+void
+expand_builtin_set_return_addr_reg (addr_tree)
+ tree addr_tree;
+{
+ rtx tmp;
+ rtx ra = expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
+ 0, hard_frame_pointer_rtx);
+
+ if (GET_CODE (ra) != REG || REGNO (ra) >= FIRST_PSEUDO_REGISTER)
+ return;
+
+ tmp = force_operand (expand_builtin_frob_return_addr (addr_tree), ra);
+ if (tmp != ra)
+ emit_move_insn (ra, tmp);
+}
+
+/* Choose two registers for communication between the main body of
+ __throw and the stub for adjusting the stack pointer. The first register
+ is used to pass the address of the exception handler; the second register
+ is used to pass the stack pointer offset.
+
+ For register 1 we use the return value register for a void *.
+ For register 2 we use the static chain register if it exists and is
+ different from register 1, otherwise some arbitrary call-clobbered
+ register. */
+
+static void
+eh_regs (r1, r2, outgoing)
+ rtx *r1, *r2;
+ int outgoing;
+{
+ rtx reg1, reg2;
+
+#ifdef FUNCTION_OUTGOING_VALUE
+ if (outgoing)
+ reg1 = FUNCTION_OUTGOING_VALUE (build_pointer_type (void_type_node),
+ current_function_decl);
+ else
+#endif
+ reg1 = FUNCTION_VALUE (build_pointer_type (void_type_node),
+ current_function_decl);
+
+#ifdef STATIC_CHAIN_REGNUM
+ if (outgoing)
+ reg2 = static_chain_incoming_rtx;
+ else
+ reg2 = static_chain_rtx;
+ if (REGNO (reg2) == REGNO (reg1))
+#endif /* STATIC_CHAIN_REGNUM */
+ reg2 = NULL_RTX;
+
+ if (reg2 == NULL_RTX)
+ {
+ int i;
+ for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
+ if (call_used_regs[i] && ! fixed_regs[i] && i != REGNO (reg1))
+ {
+ reg2 = gen_rtx (REG, Pmode, i);
+ break;
+ }
+
+ if (reg2 == NULL_RTX)
+ abort ();
+ }
+
+ *r1 = reg1;
+ *r2 = reg2;
+}
+
+/* Emit inside of __throw a stub which adjusts the stack pointer and jumps
+ to the exception handler. __throw will set up the necessary values
+ and then return to the stub. */
+
+rtx
+expand_builtin_eh_stub ()
+{
+ rtx stub_start = gen_label_rtx ();
+ rtx after_stub = gen_label_rtx ();
+ rtx handler, offset, temp;
+
+ emit_jump (after_stub);
+ emit_label (stub_start);
+
+ eh_regs (&handler, &offset, 0);
+
+ adjust_stack (offset);
+ emit_indirect_jump (handler);
+
+ emit_label (after_stub);
+ return gen_rtx (LABEL_REF, Pmode, stub_start);
+}
+
+/* Set up the registers for passing the handler address and stack offset
+ to the stub above. */
+
+void
+expand_builtin_set_eh_regs (handler, offset)
+ tree handler, offset;
+{
+ rtx reg1, reg2;
+
+ eh_regs (&reg1, &reg2, 1);
+
+ store_expr (offset, reg2, 0);
+ store_expr (handler, reg1, 0);
+
+ /* These will be used by the stub. */
+ emit_insn (gen_rtx (USE, VOIDmode, reg1));
+ emit_insn (gen_rtx (USE, VOIDmode, reg2));
+}