re PR tree-optimization/46590 (long compile time with -O2 and many loops)

PR tree-optimization/46590
	* cfgexpand.c: Revert last change (r183305).
	* gimplify.c (gimplify_bind_expr): Add clobbers for all non-gimple
	regs.
	* tree-eh.c (cleanup_empty_eh): Try to optimize clobbers before
	checking for emptiness.

From-SVN: r183566
This commit is contained in:
Michael Matz 2012-01-26 15:50:33 +00:00 committed by Michael Matz
parent bfb676c121
commit 81bfd1977a
4 changed files with 34 additions and 30 deletions

View File

@ -1,3 +1,12 @@
2012-01-26 Michael Matz <matz@suse.de>
PR tree-optimization/46590
* cfgexpand.c: Revert last change (r183305).
* gimplify.c (gimplify_bind_expr): Add clobbers for all non-gimple
regs.
* tree-eh.c (cleanup_empty_eh): Try to optimize clobbers before
checking for emptiness.
2012-01-26 Jakub Jelinek <jakub@redhat.com> 2012-01-26 Jakub Jelinek <jakub@redhat.com>
PR middle-end/51895 PR middle-end/51895

View File

@ -440,12 +440,11 @@ visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
/* Helper routine for add_scope_conflicts, calculating the active partitions /* Helper routine for add_scope_conflicts, calculating the active partitions
at the end of BB, leaving the result in WORK. We're called to generate at the end of BB, leaving the result in WORK. We're called to generate
conflicts when OLD_CONFLICTS is non-null, otherwise we're just tracking conflicts when FOR_CONFLICT is true, otherwise we're just tracking
liveness. If we generate conflicts then OLD_CONFLICTS stores the bits liveness. */
for which we generated conflicts already. */
static void static void
add_scope_conflicts_1 (basic_block bb, bitmap work, bitmap old_conflicts) add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
{ {
edge e; edge e;
edge_iterator ei; edge_iterator ei;
@ -482,7 +481,7 @@ add_scope_conflicts_1 (basic_block bb, bitmap work, bitmap old_conflicts)
} }
else if (!is_gimple_debug (stmt)) else if (!is_gimple_debug (stmt))
{ {
if (old_conflicts if (for_conflict
&& visit == visit_op) && visit == visit_op)
{ {
/* If this is the first real instruction in this BB we need /* If this is the first real instruction in this BB we need
@ -490,27 +489,16 @@ add_scope_conflicts_1 (basic_block bb, bitmap work, bitmap old_conflicts)
Unlike classical liveness for named objects we can't Unlike classical liveness for named objects we can't
rely on seeing a def/use of the names we're interested in. rely on seeing a def/use of the names we're interested in.
There might merely be indirect loads/stores. We'd not add any There might merely be indirect loads/stores. We'd not add any
conflicts for such partitions. We know that we generated conflicts for such partitions. */
conflicts between all partitions in old_conflicts already,
so we need to generate only the new ones, avoiding to
repeatedly pay the O(N^2) cost for each basic block. */
bitmap_iterator bi; bitmap_iterator bi;
unsigned i; unsigned i;
EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
EXECUTE_IF_AND_COMPL_IN_BITMAP (work, old_conflicts, 0, i, bi)
{ {
unsigned j; unsigned j;
bitmap_iterator bj; bitmap_iterator bj;
/* First the conflicts between new and old_conflicts. */ EXECUTE_IF_SET_IN_BITMAP (work, i + 1, j, bj)
EXECUTE_IF_SET_IN_BITMAP (old_conflicts, 0, j, bj)
add_stack_var_conflict (i, j);
/* Then the conflicts between only the new members. */
EXECUTE_IF_AND_COMPL_IN_BITMAP (work, old_conflicts, i + 1,
j, bj)
add_stack_var_conflict (i, j); add_stack_var_conflict (i, j);
} }
/* And remember for the next basic block. */
bitmap_ior_into (old_conflicts, work);
visit = visit_conflict; visit = visit_conflict;
} }
walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit); walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
@ -527,7 +515,6 @@ add_scope_conflicts (void)
basic_block bb; basic_block bb;
bool changed; bool changed;
bitmap work = BITMAP_ALLOC (NULL); bitmap work = BITMAP_ALLOC (NULL);
bitmap old_conflicts;
/* We approximate the live range of a stack variable by taking the first /* We approximate the live range of a stack variable by taking the first
mention of its name as starting point(s), and by the end-of-scope mention of its name as starting point(s), and by the end-of-scope
@ -549,18 +536,15 @@ add_scope_conflicts (void)
FOR_EACH_BB (bb) FOR_EACH_BB (bb)
{ {
bitmap active = (bitmap)bb->aux; bitmap active = (bitmap)bb->aux;
add_scope_conflicts_1 (bb, work, NULL); add_scope_conflicts_1 (bb, work, false);
if (bitmap_ior_into (active, work)) if (bitmap_ior_into (active, work))
changed = true; changed = true;
} }
} }
old_conflicts = BITMAP_ALLOC (NULL);
FOR_EACH_BB (bb) FOR_EACH_BB (bb)
add_scope_conflicts_1 (bb, work, old_conflicts); add_scope_conflicts_1 (bb, work, true);
BITMAP_FREE (old_conflicts);
BITMAP_FREE (work); BITMAP_FREE (work);
FOR_ALL_BB (bb) FOR_ALL_BB (bb)
BITMAP_FREE (bb->aux); BITMAP_FREE (bb->aux);

View File

@ -1231,7 +1231,7 @@ gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
&& !DECL_HAS_VALUE_EXPR_P (t) && !DECL_HAS_VALUE_EXPR_P (t)
/* Only care for variables that have to be in memory. Others /* Only care for variables that have to be in memory. Others
will be rewritten into SSA names, hence moved to the top-level. */ will be rewritten into SSA names, hence moved to the top-level. */
&& needs_to_live_in_memory (t)) && !is_gimple_reg (t))
{ {
tree clobber = build_constructor (TREE_TYPE (t), NULL); tree clobber = build_constructor (TREE_TYPE (t), NULL);
TREE_THIS_VOLATILE (clobber) = 1; TREE_THIS_VOLATILE (clobber) = 1;

View File

@ -4056,6 +4056,7 @@ cleanup_empty_eh (eh_landing_pad lp)
edge_iterator ei; edge_iterator ei;
edge e, e_out; edge e, e_out;
bool has_non_eh_pred; bool has_non_eh_pred;
bool ret = false;
int new_lp_nr; int new_lp_nr;
/* There can be zero or one edges out of BB. This is the quickest test. */ /* There can be zero or one edges out of BB. This is the quickest test. */
@ -4070,6 +4071,16 @@ cleanup_empty_eh (eh_landing_pad lp)
default: default:
return false; return false;
} }
resx = last_stmt (bb);
if (resx && is_gimple_resx (resx))
{
if (stmt_can_throw_external (resx))
optimize_clobbers (bb);
else if (sink_clobbers (bb))
ret = true;
}
gsi = gsi_after_labels (bb); gsi = gsi_after_labels (bb);
/* Make sure to skip debug statements. */ /* Make sure to skip debug statements. */
@ -4081,9 +4092,9 @@ cleanup_empty_eh (eh_landing_pad lp)
{ {
/* For the degenerate case of an infinite loop bail out. */ /* For the degenerate case of an infinite loop bail out. */
if (infinite_empty_loop_p (e_out)) if (infinite_empty_loop_p (e_out))
return false; return ret;
return cleanup_empty_eh_unsplit (bb, e_out, lp); return ret | cleanup_empty_eh_unsplit (bb, e_out, lp);
} }
/* The block should consist only of a single RESX statement, modulo a /* The block should consist only of a single RESX statement, modulo a
@ -4096,7 +4107,7 @@ cleanup_empty_eh (eh_landing_pad lp)
resx = gsi_stmt (gsi); resx = gsi_stmt (gsi);
} }
if (!is_gimple_resx (resx)) if (!is_gimple_resx (resx))
return false; return ret;
gcc_assert (gsi_one_before_end_p (gsi)); gcc_assert (gsi_one_before_end_p (gsi));
/* Determine if there are non-EH edges, or resx edges into the handler. */ /* Determine if there are non-EH edges, or resx edges into the handler. */
@ -4172,7 +4183,7 @@ cleanup_empty_eh (eh_landing_pad lp)
return true; return true;
} }
return false; return ret;
succeed: succeed:
if (dump_file && (dump_flags & TDF_DETAILS)) if (dump_file && (dump_flags & TDF_DETAILS))