re PR tree-optimization/90316 (large compile time increase in opt / alias stmt walking for Go example)

2019-05-07  Richard Biener  <rguenther@suse.de>

	PR tree-optimization/90316
	* tree-ssa-alias.h (get_continuation_for_phi): Take walking
	limit by reference.
	(walk_non_aliased_vuses): Take walking limit argument.
	* tree-ssa-alias.c (maybe_skip_until): Take limit and abort
	walking if it is reached instead of just counting.
	(get_continuation_for_phi): Likewise.
	(walk_non_aliased_vuses): Likewise, instead of leaving counter
	limiting to the callback.
	* tree-ssa-sccvn.c (vn_reference_lookup_2): Adjust.
	(vn_reference_lookup_3): Likewise.
	(vn_reference_lookup_pieces): Likewise.
	(vn_reference_lookup): Likewise.
	* tree-ssa-pre.c (translate_vuse_through_block): Limit walking.
	* tree-ssa-scopedtables.c (vuse_eq): Adjust.
	(avail_exprs_stack::lookup_avail_expr): Likewise.

From-SVN: r270940
This commit is contained in:
Richard Biener 2019-05-07 11:17:00 +00:00
parent bca0a3216d
commit 3cf8b3e341
6 changed files with 61 additions and 43 deletions

View File

@ -1,4 +1,23 @@
2019-05-03 Jan Hubicka <hubicka@ucw.cz>
2019-05-07 Richard Biener <rguenther@suse.de>
PR tree-optimization/90316
* tree-ssa-alias.h (get_continuation_for_phi): Take walking
limit by reference.
(walk_non_aliased_vuses): Take walking limit argument.
* tree-ssa-alias.c (maybe_skip_until): Take limit and abort
walking if it is reached instead of just counting.
(get_continuation_for_phi): Likewise.
(walk_non_aliased_vuses): Likewise, instead of leaving counter
limiting to the callback.
* tree-ssa-sccvn.c (vn_reference_lookup_2): Adjust.
(vn_reference_lookup_3): Likewise.
(vn_reference_lookup_pieces): Likewise.
(vn_reference_lookup): Likewise.
* tree-ssa-pre.c (translate_vuse_through_block): Limit walking.
* tree-ssa-scopedtables.c (vuse_eq): Adjust.
(avail_exprs_stack::lookup_avail_expr): Likewise.
2019-05-07 Jan Hubicka <hubicka@ucw.cz>
* tree-ssa-alias.c (aliasing_component_refs_p): Continue looking
for comparaible types in the second direction even if first one

View File

@ -2601,7 +2601,7 @@ stmt_kills_ref_p (gimple *stmt, tree ref)
static bool
maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
ao_ref *ref, tree vuse, unsigned int *cnt, bitmap *visited,
ao_ref *ref, tree vuse, unsigned int &limit, bitmap *visited,
bool abort_on_visited,
void *(*translate)(ao_ref *, tree, void *, bool *),
void *data)
@ -2636,7 +2636,7 @@ maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
/* An already visited PHI node ends the walk successfully. */
if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
return !abort_on_visited;
vuse = get_continuation_for_phi (def_stmt, ref, cnt,
vuse = get_continuation_for_phi (def_stmt, ref, limit,
visited, abort_on_visited,
translate, data);
if (!vuse)
@ -2648,7 +2648,9 @@ maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
else
{
/* A clobbering statement or the end of the IL ends it failing. */
++*cnt;
if ((int)limit <= 0)
return false;
--limit;
if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
{
bool disambiguate_only = true;
@ -2676,12 +2678,13 @@ maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
/* Starting from a PHI node for the virtual operand of the memory reference
REF find a continuation virtual operand that allows to continue walking
statements dominating PHI skipping only statements that cannot possibly
clobber REF. Increments *CNT for each alias disambiguation done.
clobber REF. Decrements LIMIT for each alias disambiguation done
and aborts the walk, returning NULL_TREE if it reaches zero.
Returns NULL_TREE if no suitable virtual operand can be found. */
tree
get_continuation_for_phi (gimple *phi, ao_ref *ref,
unsigned int *cnt, bitmap *visited,
unsigned int &limit, bitmap *visited,
bool abort_on_visited,
void *(*translate)(ao_ref *, tree, void *, bool *),
void *data)
@ -2723,7 +2726,7 @@ get_continuation_for_phi (gimple *phi, ao_ref *ref,
arg1 = PHI_ARG_DEF (phi, i);
if (arg1 == arg0)
;
else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, cnt, visited,
else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, limit, visited,
abort_on_visited,
/* Do not translate when walking over
backedges. */
@ -2759,18 +2762,22 @@ get_continuation_for_phi (gimple *phi, ao_ref *ref,
implement optimistic value-numbering for example. Note that the
VUSE argument is assumed to be valueized already.
LIMIT specifies the number of alias queries we are allowed to do,
the walk stops when it reaches zero and NULL is returned. LIMIT
is decremented by the number of alias queries (plus adjustments
done by the callbacks) upon return.
TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
void *
walk_non_aliased_vuses (ao_ref *ref, tree vuse,
void *(*walker)(ao_ref *, tree, unsigned int, void *),
void *(*walker)(ao_ref *, tree, void *),
void *(*translate)(ao_ref *, tree, void *, bool *),
tree (*valueize)(tree),
void *data)
unsigned &limit, void *data)
{
bitmap visited = NULL;
void *res;
unsigned int cnt = 0;
bool translated = false;
timevar_push (TV_ALIAS_STMT_WALK);
@ -2780,7 +2787,7 @@ walk_non_aliased_vuses (ao_ref *ref, tree vuse,
gimple *def_stmt;
/* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
res = (*walker) (ref, vuse, cnt, data);
res = (*walker) (ref, vuse, data);
/* Abort walk. */
if (res == (void *)-1)
{
@ -2804,11 +2811,15 @@ walk_non_aliased_vuses (ao_ref *ref, tree vuse,
if (gimple_nop_p (def_stmt))
break;
else if (gimple_code (def_stmt) == GIMPLE_PHI)
vuse = get_continuation_for_phi (def_stmt, ref, &cnt,
vuse = get_continuation_for_phi (def_stmt, ref, limit,
&visited, translated, translate, data);
else
{
cnt++;
if ((int)limit <= 0)
{
res = NULL;
break;
}
if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
{
if (!translate)

View File

@ -132,15 +132,13 @@ extern bool call_may_clobber_ref_p_1 (gcall *, ao_ref *);
extern bool stmt_kills_ref_p (gimple *, tree);
extern bool stmt_kills_ref_p (gimple *, ao_ref *);
extern tree get_continuation_for_phi (gimple *, ao_ref *,
unsigned int *, bitmap *, bool,
unsigned int &, bitmap *, bool,
void *(*)(ao_ref *, tree, void *, bool *),
void *);
extern void *walk_non_aliased_vuses (ao_ref *, tree,
void *(*)(ao_ref *, tree,
unsigned int, void *),
void *(*)(ao_ref *, tree, void *),
void *(*)(ao_ref *, tree, void *, bool *),
tree (*)(tree),
void *);
tree (*)(tree), unsigned &, void *);
extern int walk_aliased_vdefs (ao_ref *, tree,
bool (*)(ao_ref *, tree, void *),
void *, bitmap *,

View File

@ -1151,6 +1151,7 @@ translate_vuse_through_block (vec<vn_reference_op_s> operands,
if (gimple_bb (phi) != phiblock)
return vuse;
unsigned int cnt = PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS);
use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
/* Use the alias-oracle to find either the PHI node in this block,
@ -1159,8 +1160,10 @@ translate_vuse_through_block (vec<vn_reference_op_s> operands,
if (gimple_code (phi) == GIMPLE_PHI)
e = find_edge (block, phiblock);
else if (use_oracle)
while (!stmt_may_clobber_ref_p_1 (phi, &ref))
while (cnt > 0
&& !stmt_may_clobber_ref_p_1 (phi, &ref))
{
--cnt;
vuse = gimple_vuse (phi);
phi = SSA_NAME_DEF_STMT (vuse);
if (gimple_bb (phi) != phiblock)
@ -1179,10 +1182,9 @@ translate_vuse_through_block (vec<vn_reference_op_s> operands,
if (use_oracle)
{
bitmap visited = NULL;
unsigned int cnt;
/* Try to find a vuse that dominates this phi node by skipping
non-clobbering statements. */
vuse = get_continuation_for_phi (phi, &ref, &cnt, &visited, false,
vuse = get_continuation_for_phi (phi, &ref, cnt, &visited, false,
NULL, NULL);
if (visited)
BITMAP_FREE (visited);

View File

@ -1671,19 +1671,12 @@ vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
with the current VUSE and performs the expression lookup. */
static void *
vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
unsigned int cnt, void *vr_)
vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
{
vn_reference_t vr = (vn_reference_t)vr_;
vn_reference_s **slot;
hashval_t hash;
/* This bounds the stmt walks we perform on reference lookups
to O(1) instead of O(N) where N is the number of dominating
stores. */
if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
return (void *)-1;
if (last_vuse_ptr)
*last_vuse_ptr = vuse;
@ -2023,8 +2016,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
last_vuse_ptr = NULL;
tree saved_vuse = vr->vuse;
hashval_t saved_hashcode = vr->hashcode;
void *res = vn_reference_lookup_2 (ref,
gimple_vuse (def_stmt), 0, vr);
void *res = vn_reference_lookup_2 (ref, gimple_vuse (def_stmt), vr);
/* Need to restore vr->vuse and vr->hashcode. */
vr->vuse = saved_vuse;
vr->hashcode = saved_hashcode;
@ -2671,13 +2663,14 @@ vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
&& vr1.vuse)
{
ao_ref r;
unsigned limit = PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS);
vn_walk_kind = kind;
if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
*vnresult =
(vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
vn_reference_lookup_2,
vn_reference_lookup_3,
vuse_valueize, &vr1);
vuse_valueize, limit, &vr1);
gcc_checking_assert (vr1.operands == shared_lookup_references);
}
@ -2720,6 +2713,7 @@ vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
{
vn_reference_t wvnresult;
ao_ref r;
unsigned limit = PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS);
/* Make sure to use a valueized reference if we valueized anything.
Otherwise preserve the full reference for advanced TBAA. */
if (!valuezied_anything
@ -2733,7 +2727,7 @@ vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
(vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
vn_reference_lookup_2,
vn_reference_lookup_3,
vuse_valueize, &vr1);
vuse_valueize, limit, &vr1);
gcc_checking_assert (vr1.operands == shared_lookup_references);
if (wvnresult)
{

View File

@ -100,19 +100,12 @@ avail_exprs_stack::record_expr (class expr_hash_elt *elt1,
the desired memory state. */
static void *
vuse_eq (ao_ref *, tree vuse1, unsigned int cnt, void *data)
vuse_eq (ao_ref *, tree vuse1, void *data)
{
tree vuse2 = (tree) data;
if (vuse1 == vuse2)
return data;
/* This bounds the stmt walks we perform on reference lookups
to O(1) instead of O(N) where N is the number of dominating
stores leading to a candidate. We re-use the SCCVN param
for this as it is basically the same complexity. */
if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
return (void *)-1;
return NULL;
}
@ -299,13 +292,14 @@ avail_exprs_stack::lookup_avail_expr (gimple *stmt, bool insert, bool tbaa_p)
up the virtual use-def chain using walk_non_aliased_vuses.
But don't do this when removing expressions from the hash. */
ao_ref ref;
unsigned limit = PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS);
if (!(vuse1 && vuse2
&& gimple_assign_single_p (stmt)
&& TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
&& (ao_ref_init (&ref, gimple_assign_rhs1 (stmt)),
ref.base_alias_set = ref.ref_alias_set = tbaa_p ? -1 : 0, true)
&& walk_non_aliased_vuses (&ref, vuse2,
vuse_eq, NULL, NULL, vuse1) != NULL))
&& walk_non_aliased_vuses (&ref, vuse2, vuse_eq, NULL, NULL,
limit, vuse1) != NULL))
{
if (insert)
{