summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authormrs <mrs@138bc75d-0d04-0410-961f-82ee72b054a4>1997-04-23 20:04:25 +0000
committermrs <mrs@138bc75d-0d04-0410-961f-82ee72b054a4>1997-04-23 20:04:25 +0000
commit694ec51983be9bfc22d051e98e22e185c6b00651 (patch)
treeed1bd25525ef36f94c97fb911c74203e02095a25
parentc446d93c756b52b25588a6816d1728f4852d2068 (diff)
downloadppe42-gcc-694ec51983be9bfc22d051e98e22e185c6b00651.tar.gz
ppe42-gcc-694ec51983be9bfc22d051e98e22e185c6b00651.zip
Add setjmp/longjmp exception handling.
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@13968 138bc75d-0d04-0410-961f-82ee72b054a4
-rw-r--r--gcc/calls.c24
-rw-r--r--gcc/except.h64
-rw-r--r--gcc/expr.c535
-rw-r--r--gcc/expr.h15
-rw-r--r--gcc/flags.h4
-rw-r--r--gcc/function.h3
-rw-r--r--gcc/libgcc2.c166
-rw-r--r--gcc/stmt.c394
-rw-r--r--gcc/toplev.c6
-rw-r--r--gcc/tree.def23
-rw-r--r--gcc/tree.h7
11 files changed, 665 insertions, 576 deletions
diff --git a/gcc/calls.c b/gcc/calls.c
index aef3386c6f1..9b741b316bf 100644
--- a/gcc/calls.c
+++ b/gcc/calls.c
@@ -590,7 +590,6 @@ expand_call (exp, target, ignore)
int old_pending_adj = 0;
int old_stack_arg_under_construction;
int old_inhibit_defer_pop = inhibit_defer_pop;
- tree old_cleanups = cleanups_this_call;
rtx call_fusage = 0;
register tree p;
register int i, j;
@@ -720,17 +719,6 @@ expand_call (exp, target, ignore)
/* If inlining succeeded, return. */
if ((HOST_WIDE_INT) temp != -1)
{
- if (flag_short_temps)
- {
- /* Perform all cleanups needed for the arguments of this
- call (i.e. destructors in C++). It is ok if these
- destructors clobber RETURN_VALUE_REG, because the
- only time we care about this is when TARGET is that
- register. But in C++, we take care to never return
- that register directly. */
- expand_cleanups_to (old_cleanups);
- }
-
#ifdef ACCUMULATE_OUTGOING_ARGS
/* If the outgoing argument list must be preserved, push
the stack before executing the inlined function if it
@@ -1979,8 +1967,9 @@ expand_call (exp, target, ignore)
/* If value type not void, return an rtx for the value. */
- /* If there are cleanups to be called, don't use a hard reg as target. */
- if (cleanups_this_call != old_cleanups
+ /* If there are cleanups to be called, don't use a hard reg as target.
+ We need to double check this and see if it matters anymore. */
+ if (any_pending_cleanups ()
&& target && REG_P (target)
&& REGNO (target) < FIRST_PSEUDO_REGISTER)
target = 0;
@@ -2153,13 +2142,6 @@ expand_call (exp, target, ignore)
}
#endif
- if (flag_short_temps)
- {
- /* Perform all cleanups needed for the arguments of this call
- (i.e. destructors in C++). */
- expand_cleanups_to (old_cleanups);
- }
-
/* If size of args is variable or this was a constructor call for a stack
argument, restore saved stack-pointer value. */
diff --git a/gcc/except.h b/gcc/except.h
index 7c31e950fee..e939afa88c7 100644
--- a/gcc/except.h
+++ b/gcc/except.h
@@ -82,8 +82,35 @@ struct eh_queue {
};
+/* Start an exception handling region. All instructions emitted after
+ this point are considered to be part of the region until
+ expand_eh_region_end () is invoked. */
+
extern void expand_eh_region_start PROTO((void));
+/* Start an exception handling region for the given cleanup action.
+ All instructions emitted after this point are considered to be part
+ of the region until expand_eh_region_end () is invoked. CLEANUP is
+ the cleanup action to perform. The return value is true if the
+ exception region was optimized away. If that case,
+ expand_eh_region_end does not need to be called for this cleanup,
+ nor should it be.
+
+ This routine notices one particular common case in C++ code
+ generation, and optimizes it so as to not need the exception
+ region. */
+
+extern int expand_eh_region_start_tree PROTO((tree));
+
+/* End an exception handling region. The information about the region
+ is found on the top of ehstack.
+
+ HANDLER is either the cleanup for the exception region, or if we're
+ marking the end of a try block, HANDLER is integer_zero_node.
+
+ HANDLER will be transformed to rtl when expand_leftover_cleanups ()
+ is invoked. */
+
extern void expand_eh_region_end PROTO((tree));
/* Push RLABEL or TLABEL onto LABELSTACK. Only one of RLABEL or TLABEL
@@ -101,16 +128,6 @@ extern rtx pop_label_entry PROTO((struct label_node **labelstack));
extern tree top_label_entry PROTO((struct label_node **labelstack));
-/* The stack used to keep track of the exception region corresponding to
- the current instruction. */
-
-extern struct eh_stack ehstack;
-
-/* A queue used to track closed exception regions whose handlers have
- not been emitted yet. */
-
-extern struct eh_queue ehqueue;
-
/* A set of insns for the catch clauses in the current function. They
will be emitted at the end of the current function. */
@@ -233,3 +250,30 @@ extern rtx eh_saved_pc_rtx;
unnecessary exception regions. Invoked from jump_optimize (). */
extern void exception_optimize PROTO((void));
+
+/* Get the dynamic handler chain. */
+extern rtx get_dynamic_handler_chain PROTO((void));
+
+/* Get the dynamic cleanup chain. */
+extern rtx get_dynamic_cleanup_chain PROTO((void));
+
+/* Throw an exception. */
+
+extern void emit_throw PROTO((void));
+
+/* One to use setjmp/longjmp method of generating code. */
+
+extern int exceptions_via_longjmp;
+
+/* One to enable asynchronous exception support. */
+
+extern int asynchronous_exceptions;
+
+/* One to protect cleanup actions with a handler that calls
+ __terminate, zero otherwise. */
+
+extern int protect_cleanup_actions_with_terminate;
+
+#ifdef TREE_CODE
+extern tree protect_with_terminate PROTO((tree));
+#endif
diff --git a/gcc/expr.c b/gcc/expr.c
index 505ea7cab20..b3babf6403f 100644
--- a/gcc/expr.c
+++ b/gcc/expr.c
@@ -94,10 +94,6 @@ int pending_stack_adjust;
and in other cases as well. */
int inhibit_defer_pop;
-/* A list of all cleanups which belong to the arguments of
- function calls being expanded by expand_call. */
-tree cleanups_this_call;
-
/* When temporaries are created by TARGET_EXPRs, they are created at
this level of temp_slot_level, so that they can remain allocated
until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
@@ -211,7 +207,6 @@ static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
static void do_jump_for_compare PROTO((rtx, rtx, rtx));
static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
-static tree defer_cleanups_to PROTO((tree));
extern tree truthvalue_conversion PROTO((tree));
/* Record for each mode whether we can move a register directly to or
@@ -357,7 +352,6 @@ init_expr ()
pending_stack_adjust = 0;
inhibit_defer_pop = 0;
- cleanups_this_call = 0;
saveregs_value = 0;
apply_args_value = 0;
forced_labels = 0;
@@ -375,14 +369,12 @@ save_expr_status (p)
p->pending_stack_adjust = pending_stack_adjust;
p->inhibit_defer_pop = inhibit_defer_pop;
- p->cleanups_this_call = cleanups_this_call;
p->saveregs_value = saveregs_value;
p->apply_args_value = apply_args_value;
p->forced_labels = forced_labels;
pending_stack_adjust = 0;
inhibit_defer_pop = 0;
- cleanups_this_call = 0;
saveregs_value = 0;
apply_args_value = 0;
forced_labels = 0;
@@ -397,7 +389,6 @@ restore_expr_status (p)
{
pending_stack_adjust = p->pending_stack_adjust;
inhibit_defer_pop = p->inhibit_defer_pop;
- cleanups_this_call = p->cleanups_this_call;
saveregs_value = p->saveregs_value;
apply_args_value = p->apply_args_value;
forced_labels = p->forced_labels;
@@ -3014,17 +3005,6 @@ store_expr (exp, target, want_value)
For non-BLKmode, it is more efficient not to do this. */
rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
- rtx flag = NULL_RTX;
- tree left_cleanups = NULL_TREE;
- tree right_cleanups = NULL_TREE;
- tree old_cleanups = cleanups_this_call;
-
- /* Used to save a pointer to the place to put the setting of
- the flag that indicates if this side of the conditional was
- taken. We backpatch the code, if we find out later that we
- have any conditional cleanups that need to be performed. */
- rtx dest_right_flag = NULL_RTX;
- rtx dest_left_flag = NULL_RTX;
emit_queue ();
target = protect_from_queue (target, 1);
@@ -3032,75 +3012,20 @@ store_expr (exp, target, want_value)
do_pending_stack_adjust ();
NO_DEFER_POP;
jumpifnot (TREE_OPERAND (exp, 0), lab1);
+ start_cleanup_deferal ();
store_expr (TREE_OPERAND (exp, 1), target, 0);
- dest_left_flag = get_last_insn ();
- /* Handle conditional cleanups, if any. */
- left_cleanups = defer_cleanups_to (old_cleanups);
+ end_cleanup_deferal ();
emit_queue ();
emit_jump_insn (gen_jump (lab2));
emit_barrier ();
emit_label (lab1);
+ start_cleanup_deferal ();
store_expr (TREE_OPERAND (exp, 2), target, 0);
- dest_right_flag = get_last_insn ();
- /* Handle conditional cleanups, if any. */
- right_cleanups = defer_cleanups_to (old_cleanups);
+ end_cleanup_deferal ();
emit_queue ();
emit_label (lab2);
OK_DEFER_POP;
- /* Add back in any conditional cleanups. */
- if (left_cleanups || right_cleanups)
- {
- tree new_cleanups;
- tree cond;
- rtx last;
-
- /* Now that we know that a flag is needed, go back and add in the
- setting of the flag. */
-
- flag = gen_reg_rtx (word_mode);
-
- /* Do the left side flag. */
- last = get_last_insn ();
- /* Flag left cleanups as needed. */
- emit_move_insn (flag, const1_rtx);
- /* ??? deprecated, use sequences instead. */
- reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
-
- /* Do the right side flag. */
- last = get_last_insn ();
- /* Flag left cleanups as needed. */
- emit_move_insn (flag, const0_rtx);
- /* ??? deprecated, use sequences instead. */
- reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
-
- /* All cleanups must be on the function_obstack. */
- push_obstacks_nochange ();
- resume_temporary_allocation ();
-
- /* convert flag, which is an rtx, into a tree. */
- cond = make_node (RTL_EXPR);
- TREE_TYPE (cond) = integer_type_node;
- RTL_EXPR_RTL (cond) = flag;
- RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
- cond = save_expr (cond);
-
- if (! left_cleanups)
- left_cleanups = integer_zero_node;
- if (! right_cleanups)
- right_cleanups = integer_zero_node;
- new_cleanups = build (COND_EXPR, void_type_node,
- truthvalue_conversion (cond),
- left_cleanups, right_cleanups);
- new_cleanups = fold (new_cleanups);
-
- pop_obstacks ();
-
- /* Now add in the conditionalized cleanups. */
- cleanups_this_call
- = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
- expand_eh_region_start ();
- }
return want_value ? target : NULL_RTX;
}
else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
@@ -5187,7 +5112,7 @@ expand_expr (exp, target, tmode, modifier)
int vars_need_expansion = 0;
/* Need to open a binding contour here because
- if there are any cleanups they most be contained here. */
+ if there are any cleanups they must be contained here. */
expand_start_bindings (0);
/* Mark the corresponding BLOCK for output in its proper place. */
@@ -5831,30 +5756,28 @@ expand_expr (exp, target, tmode, modifier)
{
RTL_EXPR_RTL (exp)
= expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
- cleanups_this_call
- = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
+ expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
+
/* That's it for this cleanup. */
TREE_OPERAND (exp, 2) = 0;
- expand_eh_region_start ();
}
return RTL_EXPR_RTL (exp);
case CLEANUP_POINT_EXPR:
{
extern int temp_slot_level;
- tree old_cleanups = cleanups_this_call;
- int old_temp_level = target_temp_slot_level;
- push_temp_slots ();
+ /* Start a new binding layer that will keep track of all cleanup
+ actions to be performed. */
+ expand_start_bindings (0);
+
target_temp_slot_level = temp_slot_level;
+
op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
/* If we're going to use this value, load it up now. */
if (! ignore)
op0 = force_not_mem (op0);
- expand_cleanups_to (old_cleanups);
preserve_temp_slots (op0);
- free_temp_slots ();
- pop_temp_slots ();
- target_temp_slot_level = old_temp_level;
+ expand_end_bindings (NULL_TREE, 0, 0);
}
return op0;
@@ -6556,17 +6479,6 @@ expand_expr (exp, target, tmode, modifier)
}
{
- rtx flag = NULL_RTX;
- tree left_cleanups = NULL_TREE;
- tree right_cleanups = NULL_TREE;
-
- /* Used to save a pointer to the place to put the setting of
- the flag that indicates if this side of the conditional was
- taken. We backpatch the code, if we find out later that we
- have any conditional cleanups that need to be performed. */
- rtx dest_right_flag = NULL_RTX;
- rtx dest_left_flag = NULL_RTX;
-
/* Note that COND_EXPRs whose type is a structure or union
are required to be constructed to contain assignments of
a temporary variable, so that we can evaluate them here
@@ -6577,7 +6489,6 @@ expand_expr (exp, target, tmode, modifier)
tree singleton = 0;
tree binary_op = 0, unary_op = 0;
- tree old_cleanups = cleanups_this_call;
/* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
convert it to our mode, if necessary. */
@@ -6705,7 +6616,6 @@ expand_expr (exp, target, tmode, modifier)
NO_DEFER_POP;
op0 = gen_label_rtx ();
- flag = gen_reg_rtx (word_mode);
if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
{
if (temp != 0)
@@ -6724,14 +6634,12 @@ expand_expr (exp, target, tmode, modifier)
else
expand_expr (singleton,
ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
- dest_left_flag = get_last_insn ();
if (singleton == TREE_OPERAND (exp, 1))
jumpif (TREE_OPERAND (exp, 0), op0);
else
jumpifnot (TREE_OPERAND (exp, 0), op0);
- /* Allows cleanups up to here. */
- old_cleanups = cleanups_this_call;
+ start_cleanup_deferal ();
if (binary_op && temp == 0)
/* Just touch the other operand. */
expand_expr (TREE_OPERAND (binary_op, 1),
@@ -6746,43 +6654,7 @@ expand_expr (exp, target, tmode, modifier)
make_tree (type, temp)),
temp, 0);
op1 = op0;
- dest_right_flag = get_last_insn ();
- }
-#if 0
- /* This is now done in jump.c and is better done there because it
- produces shorter register lifetimes. */
-
- /* Check for both possibilities either constants or variables
- in registers (but not the same as the target!). If so, can
- save branches by assigning one, branching, and assigning the
- other. */
- else if (temp && GET_MODE (temp) != BLKmode
- && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
- || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
- || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
- && DECL_RTL (TREE_OPERAND (exp, 1))
- && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
- && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
- && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
- || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
- || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
- && DECL_RTL (TREE_OPERAND (exp, 2))
- && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
- && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
- {
- if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
- temp = gen_reg_rtx (mode);
- store_expr (TREE_OPERAND (exp, 2), temp, 0);
- dest_left_flag = get_last_insn ();
- jumpifnot (TREE_OPERAND (exp, 0), op0);
-
- /* Allows cleanups up to here. */
- old_cleanups = cleanups_this_call;
- store_expr (TREE_OPERAND (exp, 1), temp, 0);
- op1 = op0;
- dest_right_flag = get_last_insn ();
}
-#endif
/* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
comparison operator. If we have one of these cases, set the
output to A, branch on A (cse will merge these two references),
@@ -6798,14 +6670,11 @@ expand_expr (exp, target, tmode, modifier)
if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
temp = gen_reg_rtx (mode);
store_expr (TREE_OPERAND (exp, 1), temp, 0);
- dest_left_flag = get_last_insn ();
jumpif (TREE_OPERAND (exp, 0), op0);
- /* Allows cleanups up to here. */
- old_cleanups = cleanups_this_call;
+ start_cleanup_deferal ();
store_expr (TREE_OPERAND (exp, 2), temp, 0);
op1 = op0;
- dest_right_flag = get_last_insn ();
}
else if (temp
&& TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
@@ -6818,102 +6687,42 @@ expand_expr (exp, target, tmode, modifier)
if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
temp = gen_reg_rtx (mode);
store_expr (TREE_OPERAND (exp, 2), temp, 0);
- dest_left_flag = get_last_insn ();
jumpifnot (TREE_OPERAND (exp, 0), op0);
- /* Allows cleanups up to here. */
- old_cleanups = cleanups_this_call;
+ start_cleanup_deferal ();
store_expr (TREE_OPERAND (exp, 1), temp, 0);
op1 = op0;
- dest_right_flag = get_last_insn ();
}
else
{
op1 = gen_label_rtx ();
jumpifnot (TREE_OPERAND (exp, 0), op0);
- /* Allows cleanups up to here. */
- old_cleanups = cleanups_this_call;
+ start_cleanup_deferal ();
if (temp != 0)
store_expr (TREE_OPERAND (exp, 1), temp, 0);
else
expand_expr (TREE_OPERAND (exp, 1),
ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
- dest_left_flag = get_last_insn ();
-
- /* Handle conditional cleanups, if any. */
- left_cleanups = defer_cleanups_to (old_cleanups);
-
+ end_cleanup_deferal ();
emit_queue ();
emit_jump_insn (gen_jump (op1));
emit_barrier ();
emit_label (op0);
+ start_cleanup_deferal ();
if (temp != 0)
store_expr (TREE_OPERAND (exp, 2), temp, 0);
else
expand_expr (TREE_OPERAND (exp, 2),
ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
- dest_right_flag = get_last_insn ();
}
- /* Handle conditional cleanups, if any. */
- right_cleanups = defer_cleanups_to (old_cleanups);
+ end_cleanup_deferal ();
emit_queue ();
emit_label (op1);
OK_DEFER_POP;
- /* Add back in, any conditional cleanups. */
- if (left_cleanups || right_cleanups)
- {
- tree new_cleanups;
- tree cond;
- rtx last;
-
- /* Now that we know that a flag is needed, go back and add in the
- setting of the flag. */
-
- /* Do the left side flag. */
- last = get_last_insn ();
- /* Flag left cleanups as needed. */
- emit_move_insn (flag, const1_rtx);
- /* ??? deprecated, use sequences instead. */
- reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
-
- /* Do the right side flag. */
- last = get_last_insn ();
- /* Flag left cleanups as needed. */
- emit_move_insn (flag, const0_rtx);
- /* ??? deprecated, use sequences instead. */
- reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
-
- /* All cleanups must be on the function_obstack. */
- push_obstacks_nochange ();
- resume_temporary_allocation ();
-
- /* convert flag, which is an rtx, into a tree. */
- cond = make_node (RTL_EXPR);
- TREE_TYPE (cond) = integer_type_node;
- RTL_EXPR_RTL (cond) = flag;
- RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
- cond = save_expr (cond);
-
- if (! left_cleanups)
- left_cleanups = integer_zero_node;
- if (! right_cleanups)
- right_cleanups = integer_zero_node;
- new_cleanups = build (COND_EXPR, void_type_node,
- truthvalue_conversion (cond),
- left_cleanups, right_cleanups);
- new_cleanups = fold (new_cleanups);
-
- pop_obstacks ();
-
- /* Now add in the conditionalized cleanups. */
- cleanups_this_call
- = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
- expand_eh_region_start ();
- }
return temp;
}
@@ -6996,13 +6805,7 @@ expand_expr (exp, target, tmode, modifier)
store_expr (exp1, target, 0);
- if (cleanups)
- {
- cleanups_this_call = tree_cons (NULL_TREE,
- cleanups,
- cleanups_this_call);
- expand_eh_region_start ();
- }
+ expand_decl_cleanup (NULL_TREE, cleanups);
return target;
}
@@ -7285,6 +7088,33 @@ expand_expr (exp, target, tmode, modifier)
return target;
}
+ case TRY_CATCH_EXPR:
+ {
+ tree handler = TREE_OPERAND (exp, 1);
+
+ expand_eh_region_start ();
+
+ op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
+
+ expand_eh_region_end (handler);
+
+ return op0;
+ }
+
+ case POPDCC_EXPR:
+ {
+ rtx dcc = get_dynamic_cleanup_chain ();
+ emit_move_insn (dcc, validize_mem (gen_rtx (MEM, Pmode, dcc)));
+ return const0_rtx;
+ }
+
+ case POPDHC_EXPR:
+ {
+ rtx dhc = get_dynamic_handler_chain ();
+ emit_move_insn (dhc, validize_mem (gen_rtx (MEM, Pmode, dhc)));
+ return const0_rtx;
+ }
+
case ERROR_MARK:
op0 = CONST0_RTX (tmode);
if (op0 != 0)
@@ -9925,68 +9755,6 @@ do_pending_stack_adjust ()
pending_stack_adjust = 0;
}
}
-
-/* Defer the expansion all cleanups up to OLD_CLEANUPS.
- Returns the cleanups to be performed. */
-
-static tree
-defer_cleanups_to (old_cleanups)
- tree old_cleanups;
-{
- tree new_cleanups = NULL_TREE;
- tree cleanups = cleanups_this_call;
- tree last = NULL_TREE;
-
- while (cleanups_this_call != old_cleanups)
- {
- expand_eh_region_end (TREE_VALUE (cleanups_this_call));
- last = cleanups_this_call;
- cleanups_this_call = TREE_CHAIN (cleanups_this_call);
- }
-
- if (last)
- {
- /* Remove the list from the chain of cleanups. */
- TREE_CHAIN (last) = NULL_TREE;
-
- /* reverse them so that we can build them in the right order. */
- cleanups = nreverse (cleanups);
-
- /* All cleanups must be on the function_obstack. */
- push_obstacks_nochange ();
- resume_temporary_allocation ();
-
- while (cleanups)
- {
- if (new_cleanups)
- new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
- TREE_VALUE (cleanups), new_cleanups);
- else
- new_cleanups = TREE_VALUE (cleanups);
-
- cleanups = TREE_CHAIN (cleanups);
- }
-
- pop_obstacks ();
- }
-
- return new_cleanups;
-}
-
-/* Expand all cleanups up to OLD_CLEANUPS.
- Needed here, and also for language-dependent calls. */
-
-void
-expand_cleanups_to (old_cleanups)
- tree old_cleanups;
-{
- while (cleanups_this_call != old_cleanups)
- {
- expand_eh_region_end (TREE_VALUE (cleanups_this_call));
- expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
- cleanups_this_call = TREE_CHAIN (cleanups_this_call);
- }
-}
/* Expand conditional expressions. */
@@ -10131,131 +9899,21 @@ do_jump (exp, if_false_label, if_true_label)
break;
case TRUTH_ANDIF_EXPR:
- {
- rtx seq1, seq2;
- tree cleanups, old_cleanups;
-
- if (if_false_label == 0)
- if_false_label = drop_through_label = gen_label_rtx ();
- start_sequence ();
- do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
- seq1 = get_insns ();
- end_sequence ();
-
- old_cleanups = cleanups_this_call;
- start_sequence ();
- do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
- seq2 = get_insns ();
- cleanups = defer_cleanups_to (old_cleanups);
- end_sequence ();
-
- if (cleanups)
- {
- rtx flag = gen_reg_rtx (word_mode);
- tree new_cleanups;
- tree cond;
-
- /* Flag cleanups as not needed. */
- emit_move_insn (flag, const0_rtx);
- emit_insns (seq1);
-
- /* Flag cleanups as needed. */
- emit_move_insn (flag, const1_rtx);
- emit_insns (seq2);
-
- /* All cleanups must be on the function_obstack. */
- push_obstacks_nochange ();
- resume_temporary_allocation ();
-
- /* convert flag, which is an rtx, into a tree. */
- cond = make_node (RTL_EXPR);
- TREE_TYPE (cond) = integer_type_node;
- RTL_EXPR_RTL (cond) = flag;
- RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
- cond = save_expr (cond);
-
- new_cleanups = build (COND_EXPR, void_type_node,
- truthvalue_conversion (cond),
- cleanups, integer_zero_node);
- new_cleanups = fold (new_cleanups);
-
- pop_obstacks ();
-
- /* Now add in the conditionalized cleanups. */
- cleanups_this_call
- = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
- expand_eh_region_start ();
- }
- else
- {
- emit_insns (seq1);
- emit_insns (seq2);
- }
- }
+ if (if_false_label == 0)
+ if_false_label = drop_through_label = gen_label_rtx ();
+ do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
+ start_cleanup_deferal ();
+ do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
+ end_cleanup_deferal ();
break;
case TRUTH_ORIF_EXPR:
- {
- rtx seq1, seq2;
- tree cleanups, old_cleanups;
-
- if (if_true_label == 0)
- if_true_label = drop_through_label = gen_label_rtx ();
- start_sequence ();
- do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
- seq1 = get_insns ();
- end_sequence ();
-
- old_cleanups = cleanups_this_call;
- start_sequence ();
- do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
- seq2 = get_insns ();
- cleanups = defer_cleanups_to (old_cleanups);
- end_sequence ();
-
- if (cleanups)
- {
- rtx flag = gen_reg_rtx (word_mode);
- tree new_cleanups;
- tree cond;
-
- /* Flag cleanups as not needed. */
- emit_move_insn (flag, const0_rtx);
- emit_insns (seq1);
-
- /* Flag cleanups as needed. */
- emit_move_insn (flag, const1_rtx);
- emit_insns (seq2);
-
- /* All cleanups must be on the function_obstack. */
- push_obstacks_nochange ();
- resume_temporary_allocation ();
-
- /* convert flag, which is an rtx, into a tree. */
- cond = make_node (RTL_EXPR);
- TREE_TYPE (cond) = integer_type_node;
- RTL_EXPR_RTL (cond) = flag;
- RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
- cond = save_expr (cond);
-
- new_cleanups = build (COND_EXPR, void_type_node,
- truthvalue_conversion (cond),
- cleanups, integer_zero_node);
- new_cleanups = fold (new_cleanups);
-
- pop_obstacks ();
-
- /* Now add in the conditionalized cleanups. */
- cleanups_this_call
- = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
- expand_eh_region_start ();
- }
- else
- {
- emit_insns (seq1);
- emit_insns (seq2);
- }
- }
+ if (if_true_label == 0)
+ if_true_label = drop_through_label = gen_label_rtx ();
+ do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
+ start_cleanup_deferal ();
+ do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
+ end_cleanup_deferal ();
break;
case COMPOUND_EXPR:
@@ -10311,18 +9969,12 @@ do_jump (exp, if_false_label, if_true_label)
else
{
- rtx seq1, seq2;
- tree cleanups_left_side, cleanups_right_side, old_cleanups;
-
register rtx label1 = gen_label_rtx ();
drop_through_label = gen_label_rtx ();
do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
- /* We need to save the cleanups for the lhs and rhs separately.
- Keep track of the cleanups seen before the lhs. */
- old_cleanups = cleanups_this_call;
- start_sequence ();
+ start_cleanup_deferal ();
/* Now the THEN-expression. */
do_jump (TREE_OPERAND (exp, 1),
if_false_label ? if_false_label : drop_through_label,
@@ -10330,71 +9982,12 @@ do_jump (exp, if_false_label, if_true_label)
/* In case the do_jump just above never jumps. */
do_pending_stack_adjust ();
emit_label (label1);
- seq1 = get_insns ();
- /* Now grab the cleanups for the lhs. */
- cleanups_left_side = defer_cleanups_to (old_cleanups);
- end_sequence ();
- /* And keep track of where we start before the rhs. */
- old_cleanups = cleanups_this_call;
- start_sequence ();
/* Now the ELSE-expression. */
do_jump (TREE_OPERAND (exp, 2),
if_false_label ? if_false_label : drop_through_label,
if_true_label ? if_true_label : drop_through_label);
- seq2 = get_insns ();
- /* Grab the cleanups for the rhs. */
- cleanups_right_side = defer_cleanups_to (old_cleanups);
- end_sequence ();
-
- if (cleanups_left_side || cleanups_right_side)
- {
- /* Make the cleanups for the THEN and ELSE clauses
- conditional based on which half is executed. */
- rtx flag = gen_reg_rtx (word_mode);
- tree new_cleanups;
- tree cond;
-
- /* Set the flag to 0 so that we know we executed the lhs. */
- emit_move_insn (flag, const0_rtx);
- emit_insns (seq1);
-
- /* Set the flag to 1 so that we know we executed the rhs. */
- emit_move_insn (flag, const1_rtx);
- emit_insns (seq2);
-
- /* Make sure the cleanup lives on the function_obstack. */
- push_obstacks_nochange ();
- resume_temporary_allocation ();
-
- /* Now, build up a COND_EXPR that tests the value of the
- flag, and then either do the cleanups for the lhs or the
- rhs. */
- cond = make_node (RTL_EXPR);
- TREE_TYPE (cond) = integer_type_node;
- RTL_EXPR_RTL (cond) = flag;
- RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
- cond = save_expr (cond);
-
- new_cleanups = build (COND_EXPR, void_type_node,
- truthvalue_conversion (cond),
- cleanups_right_side, cleanups_left_side);
- new_cleanups = fold (new_cleanups);
-
- pop_obstacks ();
-
- /* Now add in the conditionalized cleanups. */
- cleanups_this_call
- = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
- expand_eh_region_start ();
- }
- else
- {
- /* No cleanups were needed, so emit the two sequences
- directly. */
- emit_insns (seq1);
- emit_insns (seq2);
- }
+ end_cleanup_deferal ();
}
break;
@@ -11336,7 +10929,7 @@ bc_load_memory (type, decl)
else
abort ();
else
- /* See corresponding comment in bc_store_memory(). */
+ /* See corresponding comment in bc_store_memory. */
if (TYPE_MODE (type) == BLKmode
|| TYPE_MODE (type) == VOIDmode)
return;
@@ -12012,7 +11605,7 @@ bc_load_bit_field (offset, size, unsignedp)
/* Load: sign-extend if signed, else zero-extend */
bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
-}
+}
/* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
diff --git a/gcc/expr.h b/gcc/expr.h
index 573acdf7b0b..ee954ef43b7 100644
--- a/gcc/expr.h
+++ b/gcc/expr.h
@@ -108,12 +108,6 @@ extern tree nonlocal_labels;
These are the arguments to function calls that have already returned. */
extern int pending_stack_adjust;
-/* A list of all cleanups which belong to the arguments of
- function calls being expanded by expand_call. */
-#ifdef TREE_CODE /* Don't lose if tree.h not included. */
-extern tree cleanups_this_call;
-#endif
-
/* When temporaries are created by TARGET_EXPRs, they are created at
this level of temp_slot_level, so that they can remain allocated
until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
@@ -359,6 +353,12 @@ extern rtx memset_libfunc;
extern rtx bzero_libfunc;
extern rtx throw_libfunc;
+extern rtx sjthrow_libfunc;
+extern rtx sjpopnthrow_libfunc;
+extern rtx terminate_libfunc;
+extern rtx setjmp_libfunc;
+extern rtx longjmp_libfunc;
+extern rtx get_dynamic_handler_chain_libfunc;
extern rtx eqhf2_libfunc;
extern rtx nehf2_libfunc;
@@ -705,9 +705,6 @@ extern void clear_pending_stack_adjust PROTO((void));
extern void do_pending_stack_adjust PROTO((void));
#ifdef TREE_CODE
-/* Expand all cleanups up to OLD_CLEANUPS. */
-extern void expand_cleanups_to PROTO((tree));
-
/* Generate code to evaluate EXP and jump to LABEL if the value is zero. */
extern void jumpifnot PROTO((tree, rtx));
diff --git a/gcc/flags.h b/gcc/flags.h
index 82c10854fc8..ce4b96a8dd4 100644
--- a/gcc/flags.h
+++ b/gcc/flags.h
@@ -308,10 +308,6 @@ extern int flag_schedule_insns_after_reload;
extern int flag_delayed_branch;
-/* Nonzero means to run cleanups after CALL_EXPRs. */
-
-extern int flag_short_temps;
-
/* Nonzero means pretend it is OK to examine bits of target floats,
even if that isn't true. The resulting code will have incorrect constants,
but the same series of instructions that the native compiler would make. */
diff --git a/gcc/function.h b/gcc/function.h
index 4bc8e3feba7..63c26ac5816 100644
--- a/gcc/function.h
+++ b/gcc/function.h
@@ -135,11 +135,12 @@ struct function
struct label_node *false_label_stack;
struct label_node *caught_return_label_stack;
tree protect_list;
+ rtx dhc;
+ rtx dcc;
/* For expr.c. */
int pending_stack_adjust;
int inhibit_defer_pop;
- tree cleanups_this_call;
rtx saveregs_value;
rtx apply_args_value;
rtx forced_labels;
diff --git a/gcc/libgcc2.c b/gcc/libgcc2.c
index 338707029ce..725161865dd 100644
--- a/gcc/libgcc2.c
+++ b/gcc/libgcc2.c
@@ -3102,6 +3102,172 @@ EH_TABLE_LOOKUP
#else
+void
+__default_terminate ()
+{
+ abort ();
+}
+
+void (*__terminate_func)() = __default_terminate;
+
+void
+__terminate ()
+{
+ (*__terminate_func)();
+}
+
+/* Calls to __sjthrow are generated by the compiler when an exception
+ is raised when using the setjmp/longjmp exception handling codegen
+ method. */
+
+extern longjmp (void *, int);
+
+extern void *__eh_type;
+
+static void *top_elt[2];
+void **__dynamic_handler_chain = top_elt;
+
+/* Routine to get the head of the current thread's dynamic handler chain
+ use for exception handling.
+
+ TODO: make thread safe. */
+
+void ***
+__get_dynamic_handler_chain ()
+{
+ return &__dynamic_handler_chain;
+}
+
+/* This is used to throw an exception when the setjmp/longjmp codegen
+ method is used for exception handling.
+
+ We call __terminate if there are no handlers left (we know this
+ when the dynamic handler chain is top_elt). Otherwise we run the
+ cleanup actions off the dynamic cleanup stack, and pop the top of
+ the dynamic handler chain, and use longjmp to transfer back to the
+ associated handler. */
+
+void
+__sjthrow ()
+{
+ void ***dhc = __get_dynamic_handler_chain ();
+ void *jmpbuf;
+ void (*func)(void *, int);
+ void *arg;
+ void ***cleanup;
+
+ /* The cleanup chain is one word into the buffer. Get the cleanup
+ chain. */
+ cleanup = (void***)&(*dhc)[1];
+
+ /* If there are any cleanups in the chain, run them now. */
+ if (cleanup[0])
+ {
+ double store[200];
+ void **buf = (void**)store;
+ buf[1] = 0;
+ buf[0] = (*dhc);
+
+ /* try { */
+ if (! setjmp (&buf[2]))
+ {
+ *dhc = buf;
+ while (cleanup[0])
+ {
+ func = (void(*)(void*, int))cleanup[0][1];
+ arg = (void*)cleanup[0][2];
+
+ /* Update this before running the cleanup. */
+ cleanup[0] = (void **)cleanup[0][0];
+
+ (*func)(arg, 2);
+ }
+ *dhc = buf[0];
+ }
+ /* catch (...) */
+ else
+ {
+ __terminate ();
+ }
+ }
+
+ /* We must call terminate if we try and rethrow an exception, when
+ there is no exception currently active and when there are no
+ handlers left. */
+ if (! __eh_type || (*dhc) == top_elt)
+ __terminate ();
+
+ /* Find the jmpbuf associated with the top element of the dynamic
+ handler chain. The jumpbuf starts two words into the buffer. */
+ jmpbuf = &(*dhc)[2];
+
+ /* Then we pop the top element off the dynamic handler chain. */
+ *dhc = (void**)(*dhc)[0];
+
+ /* And then we jump to the handler. */
+
+#ifdef USE_BUILTIN_SETJMP
+ __builtin_longjmp (jmpbuf, 1);
+#else
+ longjmp (jmpbuf, 1);
+#endif
+}
+
+/* Run cleanups on the dynamic cleanup stack for the current dynamic
+ handler, then pop the handler off the dynamic handler stack, and
+ then throw. This is used to skip the first handler, and transfer
+ control to the next handler in the dynamic handler stack. */
+
+void
+__sjpopnthrow ()
+{
+ void ***dhc = __get_dynamic_handler_chain ();
+ void *jmpbuf;
+ void (*func)(void *, int);
+ void *arg;
+ void ***cleanup;
+
+ /* The cleanup chain is one word into the buffer. Get the cleanup
+ chain. */
+ cleanup = (void***)&(*dhc)[1];
+
+ /* If there are any cleanups in the chain, run them now. */
+ if (cleanup[0])
+ {
+ double store[200];
+ void **buf = (void**)store;
+ buf[1] = 0;
+ buf[0] = (*dhc);
+
+ /* try { */
+ if (! setjmp (&buf[2]))
+ {
+ *dhc = buf;
+ while (cleanup[0])
+ {
+ func = (void(*)(void*, int))cleanup[0][1];
+ arg = (void*)cleanup[0][2];
+
+ /* Update this before running the cleanup. */
+ cleanup[0] = (void **)cleanup[0][0];
+
+ (*func)(arg, 2);
+ }
+ *dhc = buf[0];
+ }
+ /* catch (...) */
+ else
+ {
+ __terminate ();
+ }
+ }
+
+ /* Then we pop the top element off the dynamic handler chain. */
+ *dhc = (void**)(*dhc)[0];
+
+ __sjthrow ();
+}
+
typedef struct {
void *start;
void *end;
diff --git a/gcc/stmt.c b/gcc/stmt.c
index a3f42d5a0cc..04eaaff5475 100644
--- a/gcc/stmt.c
+++ b/gcc/stmt.c
@@ -129,12 +129,10 @@ extern rtx arg_pointer_save_area;
/* Chain of all RTL_EXPRs that have insns in them. */
extern tree rtl_expr_chain;
-#if 0 /* Turned off because 0 seems to work just as well. */
-/* Cleanup lists are required for binding levels regardless of whether
- that binding level has cleanups or not. This node serves as the
- cleanup list whenever an empty list is required. */
-static tree empty_cleanup_list;
-#endif
+/* Stack allocation level in which temporaries for TARGET_EXPRs live. */
+extern int target_temp_slot_level;
+
+extern int temp_slot_level;
/* Functions and data structures for expanding case statements. */
@@ -262,7 +260,7 @@ struct nesting
as they were at the locus where this block appears.
There is an element for each containing block,
ordered innermost containing block first.
- The tail of this list can be 0 (was empty_cleanup_list),
+ The tail of this list can be 0,
if all remaining elements would be empty lists.
The element's TREE_VALUE is the cleanup-list of that block,
which may be null. */
@@ -274,6 +272,28 @@ struct nesting
int function_call_count;
/* Bytecode specific: stack level to restore stack to on exit. */
int bc_stack_level;
+ /* Nonzero if this is associated with a EH region. */
+ int exception_region;
+ /* The saved target_temp_slot_level from our outer block.
+ We may reset target_temp_slot_level to be the level of
+ this block, if that is done, target_temp_slot_level
+ reverts to the saved target_temp_slot_level at the very
+ end of the block. */
+ int target_temp_slot_level;
+ /* True if we are currently emitting insns in an area of
+ output code that is controlled by a conditional
+ expression. This is used by the cleanup handling code to
+ generate conditional cleanup actions. */
+ int conditional_code;
+ /* A place to move the start of the exception region for any
+ of the conditional cleanups, must be at the end or after
+ the start of the last unconditional cleanup, and before any
+ conditional branch points. */
+ rtx last_unconditional_cleanup;
+ /* When in a conditional context, this is the specific
+ cleanup list associated with last_unconditional_cleanup,
+ where we place the conditionalized cleanups. */
+ tree *cleanup_ptr;
} block;
/* For switch (C) or case (Pascal) statements,
and also for dummies (see `expand_start_case_dummy'). */
@@ -391,7 +411,7 @@ struct goto_fixup
rtx stack_level;
/* List of lists of cleanup expressions to be run by this goto.
There is one element for each block that this goto is within.
- The tail of this list can be 0 (was empty_cleanup_list),
+ The tail of this list can be 0,
if all remaining elements would be empty.
The TREE_VALUE contains the cleanup list of that block as of the
time this goto was seen.
@@ -1021,11 +1041,7 @@ expand_fixup (tree_label, rtl_label, last_insn)
fixup->block_start_count = block_start_count;
fixup->stack_level = 0;
fixup->cleanup_list_list
- = (((block->data.block.outer_cleanups
-#if 0
- && block->data.block.outer_cleanups != empty_cleanup_list
-#endif
- )
+ = ((block->data.block.outer_cleanups
|| block->data.block.cleanups)
? tree_cons (NULL_TREE, block->data.block.cleanups,
block->data.block.outer_cleanups)
@@ -1301,7 +1317,7 @@ bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
/* Emit code to restore the stack and continue */
bc_emit_bytecode_labeldef (f->label);
- /* Save stack_depth across call, since bc_adjust_stack () will alter
+ /* Save stack_depth across call, since bc_adjust_stack will alter
the perceived stack depth via the instructions generated. */
if (f->bc_stack_level >= 0)
@@ -2918,10 +2934,7 @@ expand_return (retval)
result_reg_mode = tmpmode;
result_reg = gen_reg_rtx (result_reg_mode);
- /* Now that the value is in pseudos, copy it to the result reg(s). */
- expand_cleanups_to (NULL_TREE);
emit_queue ();
- free_temp_slots ();
for (i = 0; i < n_regs; i++)
emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
result_pseudos[i]);
@@ -2940,10 +2953,7 @@ expand_return (retval)
val = gen_reg_rtx (DECL_MODE (DECL_RESULT (current_function_decl)));
val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
val = force_not_mem (val);
- expand_cleanups_to (NULL_TREE);
emit_queue ();
- /* All temporaries have now been used. */
- free_temp_slots ();
/* Return the calculated value, doing cleanups first. */
expand_value_return (val);
}
@@ -2952,9 +2962,7 @@ expand_return (retval)
/* No cleanups or no hard reg used;
calculate value into hard return reg. */
expand_expr (retval, const0_rtx, VOIDmode, 0);
- expand_cleanups_to (NULL_TREE);
emit_queue ();
- free_temp_slots ();
expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
}
}
@@ -3061,22 +3069,13 @@ expand_start_bindings (exit_flag)
thisblock->data.block.stack_level = 0;
thisblock->data.block.cleanups = 0;
thisblock->data.block.function_call_count = 0;
-#if 0
- if (block_stack)
- {
- if (block_stack->data.block.cleanups == NULL_TREE
- && (block_stack->data.block.outer_cleanups == NULL_TREE
- || block_stack->data.block.outer_cleanups == empty_cleanup_list))
- thisblock->data.block.outer_cleanups = empty_cleanup_list;
- else
- thisblock->data.block.outer_cleanups
- = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
- block_stack->data.block.outer_cleanups);
- }
- else
- thisblock->data.block.outer_cleanups = 0;
-#endif
-#if 1
+ thisblock->data.block.exception_region = 0;
+ thisblock->data.block.target_temp_slot_level = target_temp_slot_level;
+
+ thisblock->data.block.conditional_code = 0;
+ thisblock->data.block.last_unconditional_cleanup = note;
+ thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
+
if (block_stack
&& !(block_stack->data.block.cleanups == NULL_TREE
&& block_stack->data.block.outer_cleanups == NULL_TREE))
@@ -3085,7 +3084,6 @@ expand_start_bindings (exit_flag)
block_stack->data.block.outer_cleanups);
else
thisblock->data.block.outer_cleanups = 0;
-#endif
thisblock->data.block.label_chain = 0;
thisblock->data.block.innermost_stack_block = stack_block_stack;
thisblock->data.block.first_insn = note;
@@ -3101,6 +3099,91 @@ expand_start_bindings (exit_flag)
}
}
+/* Specify the scope of temporaries created by TARGET_EXPRs. Similar
+ to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
+ expand_expr are made. After we end the region, we know that all
+ space for all temporaries that were created by TARGET_EXPRs will be
+ destroyed and their space freed for reuse. */
+
+void
+expand_start_target_temps ()
+{
+ /* This is so that even if the result is preserved, the space
+ allocated will be freed, as we know that it is no longer in use. */
+ push_temp_slots ();
+
+ /* Start a new binding layer that will keep track of all cleanup
+ actions to be performed. */
+ expand_start_bindings (0);
+
+ target_temp_slot_level = temp_slot_level;
+}
+
+void
+expand_end_target_temps ()
+{
+ expand_end_bindings (NULL_TREE, 0, 0);
+
+ /* This is so that even if the result is preserved, the space
+ allocated will be freed, as we know that it is no longer in use. */
+ pop_temp_slots ();
+}
+
+/* Mark top block of block_stack as an implicit binding for an
+ exception region. This is used to prevent infinite recursion when
+ ending a binding with expand_end_bindings. It is only ever called
+ by expand_eh_region_start, as that it the only way to create a
+ block stack for a exception region. */
+
+void
+mark_block_as_eh_region ()
+{
+ block_stack->data.block.exception_region = 1;
+ if (block_stack->next
+ && block_stack->next->data.block.conditional_code)
+ {
+ block_stack->data.block.conditional_code
+ = block_stack->next->data.block.conditional_code;
+ block_stack->data.block.last_unconditional_cleanup
+ = block_stack->next->data.block.last_unconditional_cleanup;
+ block_stack->data.block.cleanup_ptr
+ = block_stack->next->data.block.cleanup_ptr;
+ }
+}
+
+/* True if we are currently emitting insns in an area of output code
+ that is controlled by a conditional expression. This is used by
+ the cleanup handling code to generate conditional cleanup actions. */
+
+int
+conditional_context ()
+{
+ return block_stack && block_stack->data.block.conditional_code;
+}
+
+/* Mark top block of block_stack as not for an implicit binding for an
+ exception region. This is only ever done by expand_eh_region_end
+ to let expand_end_bindings know that it is being called explicitly
+ to end the binding layer for just the binding layer associated with
+ the exception region, otherwise expand_end_bindings would try and
+ end all implicit binding layers for exceptions regions, and then
+ one normal binding layer. */
+
+void
+mark_block_as_not_eh_region ()
+{
+ block_stack->data.block.exception_region = 0;
+}
+
+/* True if the top block of block_stack was marked as for an exception
+ region by mark_block_as_eh_region. */
+
+int
+is_eh_region ()
+{
+ return block_stack && block_stack->data.block.exception_region;
+}
+
/* Given a pointer to a BLOCK node, save a pointer to the most recently
generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
BLOCK node. */
@@ -3128,15 +3211,34 @@ expand_end_bindings (vars, mark_ends, dont_jump_in)
int mark_ends;
int dont_jump_in;
{
- register struct nesting *thisblock = block_stack;
+ register struct nesting *thisblock;
register tree decl;
+ while (block_stack->data.block.exception_region)
+ {
+ /* Because we don't need or want a new temporary level and
+ because we didn't create one in expand_eh_region_start,
+ create a fake one now to avoid removing one in
+ expand_end_bindings. */
+ push_temp_slots ();
+
+ block_stack->data.block.exception_region = 0;
+
+ expand_end_bindings (NULL_TREE, 0, 0);
+ }
+
if (output_bytecode)
{
bc_expand_end_bindings (vars, mark_ends, dont_jump_in);
return;
}
+ /* Since expand_eh_region_start does an expand_start_bindings, we
+ have to first end all the bindings that were created by
+ expand_eh_region_start. */
+
+ thisblock = block_stack;
+
if (warn_unused)
for (decl = vars; decl; decl = TREE_CHAIN (decl))
if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
@@ -3362,6 +3464,9 @@ expand_end_bindings (vars, mark_ends, dont_jump_in)
use_variable (rtl);
}
+ /* Restore the temporary level of TARGET_EXPRs. */
+ target_temp_slot_level = thisblock->data.block.target_temp_slot_level;
+
/* Restore block_stack level for containing block. */
stack_block_stack = thisblock->data.block.innermost_stack_block;
@@ -3614,7 +3719,7 @@ bc_expand_decl (decl, cleanup)
else if (DECL_SIZE (decl) == 0)
/* Variable with incomplete type. The stack offset herein will be
- fixed later in expand_decl_init (). */
+ fixed later in expand_decl_init. */
DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
else if (TREE_CONSTANT (DECL_SIZE (decl)))
@@ -3765,7 +3870,8 @@ bc_expand_decl_init (decl)
We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
CLEANUP multiple times, and have the correct semantics. This
- happens in exception handling, and for non-local gotos.
+ happens in exception handling, for gotos, returns, breaks that
+ leave the current scope.
If CLEANUP is nonzero and DECL is zero, we record a cleanup
that is not associated with any particular variable. */
@@ -3784,16 +3890,164 @@ expand_decl_cleanup (decl, cleanup)
if (cleanup != 0)
{
+ tree t;
+ rtx seq;
+ tree *cleanups = &thisblock->data.block.cleanups;
+ int cond_context = conditional_context ();
+
+ if (cond_context)
+ {
+ rtx flag = gen_reg_rtx (word_mode);
+ rtx set_flag_0;
+ tree cond;
+
+ start_sequence ();
+ emit_move_insn (flag, const0_rtx);
+ set_flag_0 = get_insns ();
+ end_sequence ();
+
+ thisblock->data.block.last_unconditional_cleanup
+ = emit_insns_after (set_flag_0,
+ thisblock->data.block.last_unconditional_cleanup);
+
+ emit_move_insn (flag, const1_rtx);
+
+ /* All cleanups must be on the function_obstack. */
+ push_obstacks_nochange ();
+ resume_temporary_allocation ();
+
+ cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
+ DECL_RTL (cond) = flag;
+
+ /* Conditionalize the cleanup. */
+ cleanup = build (COND_EXPR, void_type_node,
+ truthvalue_conversion (cond),
+ cleanup, integer_zero_node);
+ cleanup = fold (cleanup);
+
+ pop_obstacks ();
+
+ cleanups = thisblock->data.block.cleanup_ptr;
+ }
+
+ /* All cleanups must be on the function_obstack. */
+ push_obstacks_nochange ();
+ resume_temporary_allocation ();
cleanup = unsave_expr (cleanup);
+ pop_obstacks ();
+
+ t = *cleanups = temp_tree_cons (decl, cleanup, *cleanups);
+
+ if (! cond_context)
+ /* If this block has a cleanup, it belongs in stack_block_stack. */
+ stack_block_stack = thisblock;
+
+ if (cond_context)
+ {
+ start_sequence ();
+ }
- thisblock->data.block.cleanups
- = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
- /* If this block has a cleanup, it belongs in stack_block_stack. */
- stack_block_stack = thisblock;
- expand_eh_region_start ();
+ /* If this was optimized so that there is no exception region for the
+ cleanup, then mark the TREE_LIST node, so that we can later tell
+ if we need to call expand_eh_region_end. */
+ if (expand_eh_region_start_tree (cleanup))
+ TREE_ADDRESSABLE (t) = 1;
+
+ if (cond_context)
+ {
+ seq = get_insns ();
+ end_sequence ();
+ thisblock->data.block.last_unconditional_cleanup
+ = emit_insns_after (seq,
+ thisblock->data.block.last_unconditional_cleanup);
+ }
+ else
+ {
+ thisblock->data.block.last_unconditional_cleanup
+ = get_last_insn ();
+ thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
+ }
}
return 1;
}
+
+/* Arrange for the top element of the dynamic cleanup chain to be
+ popped if we exit the current binding contour. If the current
+ contour is left via an exception, then __sjthrow will pop the top
+ element off the dynamic cleanup chain. The code that avoids doing
+ the action we push into the cleanup chain in the exceptional case
+ is contained in expand_cleanups.
+
+ This routine is only used by expand_eh_region_start, and that is
+ the only way in which an exception region should be started. This
+ routine is only used when using the setjmp/longjmp codegen method
+ for exception handling. */
+
+int
+expand_dcc_cleanup ()
+{
+ struct nesting *thisblock = block_stack;
+ tree cleanup;
+
+ /* Error if we are not in any block. */
+ if (thisblock == 0)
+ return 0;
+
+ /* Record the cleanup for the dynamic handler chain. */
+
+ /* All cleanups must be on the function_obstack. */
+ push_obstacks_nochange ();
+ resume_temporary_allocation ();
+ cleanup = make_node (POPDCC_EXPR);
+ pop_obstacks ();
+
+ /* Add the cleanup in a manner similar to expand_decl_cleanup. */
+ thisblock->data.block.cleanups
+ = temp_tree_cons (NULL_TREE, cleanup, thisblock->data.block.cleanups);
+
+ /* If this block has a cleanup, it belongs in stack_block_stack. */
+ stack_block_stack = thisblock;
+ return 1;
+}
+
+/* Arrange for the top element of the dynamic handler chain to be
+ popped if we exit the current binding contour. If the current
+ contour is left via an exception, then __sjthrow will pop the
+ top element off the dynamic handler chain. The code that avoids
+ doing the action we push into the handler chain in the exceptional
+ case is contained in expand_cleanups.
+
+ This routine is only used by expand_eh_region_start, and that is
+ the only way in which an exception region should be started. This
+ routine is only used when using the setjmp/longjmp codegen method
+ for exception handling. */
+
+int
+expand_dhc_cleanup ()
+{
+ struct nesting *thisblock = block_stack;
+ tree cleanup;
+
+ /* Error if we are not in any block. */
+ if (thisblock == 0)
+ return 0;
+
+ /* Record the cleanup for the dynamic handler chain. */
+
+ /* All cleanups must be on the function_obstack. */
+ push_obstacks_nochange ();
+ resume_temporary_allocation ();
+ cleanup = make_node (POPDHC_EXPR);
+ pop_obstacks ();
+
+ /* Add the cleanup in a manner similar to expand_decl_cleanup. */
+ thisblock->data.block.cleanups
+ = temp_tree_cons (NULL_TREE, cleanup, thisblock->data.block.cleanups);
+
+ /* If this block has a cleanup, it belongs in stack_block_stack. */
+ stack_block_stack = thisblock;
+ return 1;
+}
/* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
DECL_ELTS is the list of elements that belong to DECL's type.
@@ -3891,7 +4145,19 @@ expand_cleanups (list, dont_do, in_fixup, reachable)
else
{
if (! in_fixup)
- expand_eh_region_end (TREE_VALUE (tail));
+ {
+ tree cleanup = TREE_VALUE (tail);
+
+ /* See expand_d{h,c}c_cleanup for why we avoid this. */
+ if (TREE_CODE (cleanup) != POPDHC_EXPR
+ && TREE_CODE (cleanup) != POPDCC_EXPR
+ /* See expand_eh_region_start_tree for this case. */
+ && ! TREE_ADDRESSABLE (tail))
+ {
+ cleanup = protect_with_terminate (cleanup);
+ expand_eh_region_end (cleanup);
+ }
+ }
if (reachable)
{
@@ -3910,6 +4176,29 @@ expand_cleanups (list, dont_do, in_fixup, reachable)
}
}
+/* Mark when the context we are emitting RTL for as a conditional
+ context, so that any cleanup actions we register with
+ expand_decl_init will be properly conditionalized when those
+ cleanup actions are later performed. Must be called before any
+ expression (tree) is expanded that is within a contidional context. */
+
+void
+start_cleanup_deferal ()
+{
+ ++block_stack->data.block.conditional_code;
+}
+
+/* Mark the end of a conditional region of code. Because cleanup
+ deferals may be nested, we may still be in a conditional region
+ after we end the currently deferred cleanups, only after we end all
+ deferred cleanups, are we back in unconditional code. */
+
+void
+end_cleanup_deferal ()
+{
+ --block_stack->data.block.conditional_code;
+}
+
/* Move all cleanups from the current block_stack
to the containing block_stack, where they are assumed to
have been created. If anything can cause a temporary to
@@ -3953,11 +4242,7 @@ any_pending_cleanups (this_contour)
if (this_contour && block_stack->data.block.cleanups != NULL)
return 1;
if (block_stack->data.block.cleanups == 0
- && (block_stack->data.block.outer_cleanups == 0
-#if 0
- || block_stack->data.block.outer_cleanups == empty_cleanup_list
-#endif
- ))
+ && block_stack->data.block.outer_cleanups == 0)
return 0;
for (block = block_stack->next; block; block = block->next)
@@ -4101,7 +4386,7 @@ case_index_expr_type ()
If VALUE is a duplicate or overlaps, return 2 and do nothing
except store the (first) duplicate node in *DUPLICATE.
If VALUE is out of range, return 3 and do nothing.
- If we are jumping into the scope of a cleaup or var-sized array, return 5.
+ If we are jumping into the scope of a cleanup or var-sized array, return 5.
Return 0 on success.
Extended to handle range statements. */
@@ -6186,4 +6471,3 @@ unroll_block_trees ()
reorder_blocks (block_vector, block, get_insns ());
}
-
diff --git a/gcc/toplev.c b/gcc/toplev.c
index 67aae1bb11c..855aaca571f 100644
--- a/gcc/toplev.c
+++ b/gcc/toplev.c
@@ -528,10 +528,6 @@ int flag_shared_data;
int flag_delayed_branch;
-/* Nonzero means to run cleanups after CALL_EXPRs. */
-
-int flag_short_temps;
-
/* Nonzero if we are compiling pure (sharable) code.
Value is 1 if we are doing reasonable (i.e. simple
offset into offset table) pic. Value is 2 if we can
@@ -646,6 +642,8 @@ struct { char *string; int *variable; int on_value;} f_options[] =
{"pic", &flag_pic, 1},
{"PIC", &flag_pic, 2},
{"exceptions", &flag_exceptions, 1},
+ {"sjlj-exceptions", &exceptions_via_longjmp, 1},
+ {"asynchronous-exceptions", &asynchronous_exceptions, 1},
{"profile-arcs", &profile_arc_flag, 1},
{"test-coverage", &flag_test_coverage, 1},
{"branch-probabilities", &flag_branch_probabilities, 1},
diff --git a/gcc/tree.def b/gcc/tree.def
index 8ba266a4efa..e77adeb5c65 100644
--- a/gcc/tree.def
+++ b/gcc/tree.def
@@ -458,7 +458,10 @@ DEFTREECODE (METHOD_CALL_EXPR, "method_call_expr", "e", 4)
manages to act on the proper value.
The cleanup is executed by the first enclosing CLEANUP_POINT_EXPR, if
it exists, otherwise it is the responsibility of the caller to manually
- call expand_cleanups_to, as needed. */
+ call expand_start_target_temps/expand_end_target_temps, as needed.
+
+ This differs from TRY_CATCH_EXPR in that operand 2 is always
+ evaluated when an exception isn't throw when cleanups are run. */
DEFTREECODE (WITH_CLEANUP_EXPR, "with_cleanup_expr", "e", 3)
/* Specify a cleanup point.
@@ -689,6 +692,24 @@ DEFTREECODE (PREDECREMENT_EXPR, "predecrement_expr", "e", 2)
DEFTREECODE (PREINCREMENT_EXPR, "preincrement_expr", "e", 2)
DEFTREECODE (POSTDECREMENT_EXPR, "postdecrement_expr", "e", 2)
DEFTREECODE (POSTINCREMENT_EXPR, "postincrement_expr", "e", 2)
+
+/* Evalute operand 1. If and only if an exception is thrown during
+ the evaluation of operand 1, evaluate operand 2.
+
+ This differs from WITH_CLEANUP_EXPR, in that operand 2 is never
+ evaluated unless an exception is throw. */
+DEFTREECODE (TRY_CATCH_EXPR, "try_catch_expr", "e", 2)
+
+/* Pop the top element off the dynamic handler chain. Used in
+ conjunction with setjmp/longjmp based exception handling, see
+ except.c for more details. This is meant to be used only by the
+ exception handling backend, expand_dhc_cleanup specifically. */
+DEFTREECODE (POPDHC_EXPR, "popdhc_expr", "s", 0)
+
+/* Pop the top element off the dynamic cleanup chain. Used in
+ conjunction with the exception handling. This is meant to be used
+ only by the exception handling backend. */
+DEFTREECODE (POPDCC_EXPR, "popdcc_expr", "s", 0)
/* These types of expressions have no useful value,
and always have side effects. */
diff --git a/gcc/tree.h b/gcc/tree.h
index ec1ce349a45..e0ae9c91036 100644
--- a/gcc/tree.h
+++ b/gcc/tree.h
@@ -1690,7 +1690,14 @@ extern void expand_null_return PROTO((void));
extern void expand_return PROTO((tree));
extern void expand_start_bindings PROTO((int));
extern void expand_end_bindings PROTO((tree, int, int));
+extern void start_cleanup_deferal PROTO((void));
+extern void end_cleanup_deferal PROTO((void));
+extern void mark_block_as_eh_region PROTO((void));
+extern void mark_block_as_not_eh_region PROTO((void));
+extern int is_eh_region PROTO((void));
+extern int conditional_context PROTO((void));
extern tree last_cleanup_this_contour PROTO((void));
+extern int expand_dhc_cleanup PROTO((void));
extern void expand_start_case PROTO((int, tree, tree,
char *));
extern void expand_end_case PROTO((tree));
OpenPOWER on IntegriCloud