tree-eh.c (do_return_redirection): Remove return_value_p parameter.

* tree-eh.c (do_return_redirection): Remove return_value_p
	parameter.  Change all callers.
	(lower_try_finally_nofallthru): Remove local return_val.
	(lower_try_finally_onedest): Likewise.
	(lower_try_finally_copy): Likewise.
	(lower_try_finally_switch): Likewise.

From-SVN: r180459
This commit is contained in:
Ian Lance Taylor 2011-10-25 22:59:40 +00:00 committed by Ian Lance Taylor
parent d7115452b2
commit 8d686507c0
2 changed files with 34 additions and 60 deletions

View File

@ -1,3 +1,12 @@
2011-10-25 Ian Lance Taylor <iant@google.com>
* tree-eh.c (do_return_redirection): Remove return_value_p
parameter. Change all callers.
(lower_try_finally_nofallthru): Remove local return_val.
(lower_try_finally_onedest): Likewise.
(lower_try_finally_copy): Likewise.
(lower_try_finally_switch): Likewise.
2011-10-25 H.J. Lu <hongjiu.lu@intel.com>
* config/i386/mmx.md (*mmx_maskmovq): Replace :SI with :P and

View File

@ -711,33 +711,18 @@ verify_norecord_switch_expr (struct leh_state *state, gimple switch_expr)
#define verify_norecord_switch_expr(state, switch_expr)
#endif
/* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
whatever is needed to finish the return. If MOD is non-null, insert it
before the new branch. RETURN_VALUE_P is a cache containing a temporary
variable to be used in manipulating the value returned from the function. */
/* Redirect a RETURN_EXPR pointed to by Q to FINLAB. If MOD is
non-null, insert it before the new branch. */
static void
do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
tree *return_value_p)
do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod)
{
tree ret_expr;
gimple x;
/* In the case of a return, the queue node must be a gimple statement. */
gcc_assert (!q->is_label);
ret_expr = gimple_return_retval (q->stmt.g);
if (ret_expr)
{
if (!*return_value_p)
*return_value_p = ret_expr;
else
gcc_assert (*return_value_p == ret_expr);
q->cont_stmt = q->stmt.g;
/* The nasty part about redirecting the return value is that the
return value itself is to be computed before the FINALLY block
is executed. e.g.
/* Note that the return value may have already been computed, e.g.,
int x;
int foo (void)
@ -750,26 +735,10 @@ do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
}
}
should return 0, not 1. Arrange for this to happen by copying
computed the return value into a local temporary. This also
allows us to redirect multiple return statements through the
same destination block; whether this is a net win or not really
depends, I guess, but it does make generation of the switch in
lower_try_finally_switch easier. */
should return 0, not 1. We don't have to do anything to make
this happens because the return value has been placed in the
RESULT_DECL already. */
if (TREE_CODE (ret_expr) == RESULT_DECL)
{
if (!*return_value_p)
*return_value_p = ret_expr;
else
gcc_assert (*return_value_p == ret_expr);
q->cont_stmt = q->stmt.g;
}
else
gcc_unreachable ();
}
else
/* If we don't return a value, all return statements are the same. */
q->cont_stmt = q->stmt.g;
if (!q->repl_stmt)
@ -1041,7 +1010,7 @@ static void
lower_try_finally_nofallthru (struct leh_state *state,
struct leh_tf_state *tf)
{
tree lab, return_val;
tree lab;
gimple x;
gimple_seq finally;
struct goto_queue_node *q, *qe;
@ -1055,12 +1024,11 @@ lower_try_finally_nofallthru (struct leh_state *state,
x = gimple_build_label (lab);
gimple_seq_add_stmt (&tf->top_p_seq, x);
return_val = NULL;
q = tf->goto_queue;
qe = q + tf->goto_queue_active;
for (; q < qe; ++q)
if (q->index < 0)
do_return_redirection (q, lab, NULL, &return_val);
do_return_redirection (q, lab, NULL);
else
do_goto_redirection (q, lab, NULL, tf);
@ -1126,9 +1094,8 @@ lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
if (tf->may_return)
{
/* Reachable by return expressions only. Redirect them. */
tree return_val = NULL;
for (; q < qe; ++q)
do_return_redirection (q, finally_label, NULL, &return_val);
do_return_redirection (q, finally_label, NULL);
replace_goto_queue (tf);
}
else
@ -1197,7 +1164,6 @@ lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
if (tf->goto_queue)
{
struct goto_queue_node *q, *qe;
tree return_val = NULL;
int return_index, index;
struct labels_s
{
@ -1230,7 +1196,7 @@ lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
= create_artificial_label (tf_loc);
if (index == return_index)
do_return_redirection (q, lab, NULL, &return_val);
do_return_redirection (q, lab, NULL);
else
do_goto_redirection (q, lab, NULL, tf);
@ -1257,7 +1223,7 @@ lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
lab = labels[index].label;
if (index == return_index)
do_return_redirection (q, lab, NULL, &return_val);
do_return_redirection (q, lab, NULL);
else
do_goto_redirection (q, lab, NULL, tf);
}
@ -1280,7 +1246,6 @@ static void
lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
{
struct goto_queue_node *q, *qe;
tree return_val = NULL;
tree finally_tmp, finally_label;
int return_index, eh_index, fallthru_index;
int nlabels, ndests, j, last_case_index;
@ -1401,7 +1366,7 @@ lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
build_int_cst (integer_type_node,
return_index));
gimple_seq_add_stmt (&mod, x);
do_return_redirection (q, finally_label, mod, &return_val);
do_return_redirection (q, finally_label, mod);
switch_id = return_index;
}
else