Fix PHI ordering problems in the path solver.

After auditing the PHI range calculations, I'm not convinced we've
caught all the corner cases.  They haven't shown up in the wild (yet),
but better safe than sorry.

We shouldn't write anything to the cache or trigger additional
lookups while calculating a PHI, as this may cause ordering problems.
We should resolve the PHI with either the cache as it stands, or by
asking for ranges on entry to the path.  I've documented this.

There was one dubious case where we called fold_range in
ssa_range_in_phi, which mostly by luck wasn't triggering lookups,
because fold_range solves a PHI by calling range_on_edge, which is set
to pick up global ranges by default in path_range_query.  This is
fragile, so I've rewritten the call to explicitly use cached or global
ranges.

Also, the cache should be avoided in ssa_range_in_phi when the arg is
defined in the PHI's block, as not doing so could create an ordering
problem.  We have a similar check when calculating relations in PHIs.

Tested on x86-64 & ppc64le Linux.

gcc/ChangeLog:

	* gimple-range-path.cc (path_range_query::internal_range_of_expr):
	Remove useless code.
	(path_range_query::ssa_defined_in_bb): New.
	(path_range_query::ssa_range_in_phi): Avoid fold_range call that
	could trigger additional lookups.
	Do not use the cache for ARGs defined in this block.
	(path_range_query::compute_ranges_in_block): Use ssa_defined_in_bb.
	(path_range_query::maybe_register_phi_relation): Same.
	(path_range_query::range_of_stmt): Adjust comment.
	* gimple-range-path.h (ssa_defined_in_bb): New.
This commit is contained in:
Aldy Hernandez 2021-11-15 09:56:56 +01:00
parent 540d92ae9b
commit fcdf49a0ad
2 changed files with 42 additions and 20 deletions

View File

@ -202,8 +202,8 @@ path_range_query::internal_range_of_expr (irange &r, tree name, gimple *stmt)
return true;
}
basic_block bb = stmt ? gimple_bb (stmt) : exit_bb ();
if (stmt && range_defined_in_block (r, name, bb))
if (stmt
&& range_defined_in_block (r, name, gimple_bb (stmt)))
{
if (TREE_CODE (name) == SSA_NAME)
r.intersect (gimple_range_global (name));
@ -250,36 +250,62 @@ path_range_query::set_path (const vec<basic_block> &path)
bitmap_clear (m_has_cache_entry);
}
bool
path_range_query::ssa_defined_in_bb (tree name, basic_block bb)
{
return (TREE_CODE (name) == SSA_NAME
&& SSA_NAME_DEF_STMT (name)
&& gimple_bb (SSA_NAME_DEF_STMT (name)) == bb);
}
// Return the range of the result of PHI in R.
//
// Since PHIs are calculated in parallel at the beginning of the
// block, we must be careful to never save anything to the cache here.
// It is the caller's responsibility to adjust the cache. Also,
// calculating the PHI's range must not trigger additional lookups.
void
path_range_query::ssa_range_in_phi (irange &r, gphi *phi)
{
tree name = gimple_phi_result (phi);
basic_block bb = gimple_bb (phi);
unsigned nargs = gimple_phi_num_args (phi);
if (at_entry ())
{
if (m_resolve && m_ranger->range_of_expr (r, name, phi))
return;
// Try fold just in case we can resolve simple things like PHI <5(99), 6(88)>.
if (!fold_range (r, phi, this))
r.set_varying (TREE_TYPE (name));
// Try to fold the phi exclusively with global or cached values.
// This will get things like PHI <5(99), 6(88)>. We do this by
// calling range_of_expr with no context.
int_range_max arg_range;
r.set_undefined ();
for (size_t i = 0; i < nargs; ++i)
{
tree arg = gimple_phi_arg_def (phi, i);
if (range_of_expr (arg_range, arg, /*stmt=*/NULL))
r.union_ (arg_range);
else
{
r.set_varying (TREE_TYPE (name));
return;
}
}
return;
}
basic_block prev = prev_bb ();
edge e_in = find_edge (prev, bb);
unsigned nargs = gimple_phi_num_args (phi);
for (size_t i = 0; i < nargs; ++i)
if (e_in == gimple_phi_arg_edge (phi, i))
{
tree arg = gimple_phi_arg_def (phi, i);
if (!get_cache (r, arg))
// Avoid using the cache for ARGs defined in this block, as
// that could create an ordering problem.
if (ssa_defined_in_bb (arg, bb) || !get_cache (r, arg))
{
if (m_resolve)
{
@ -393,10 +419,7 @@ path_range_query::compute_ranges_in_block (basic_block bb)
EXECUTE_IF_SET_IN_BITMAP (m_imports, 0, i, bi)
{
tree name = ssa_name (i);
gimple *def_stmt = SSA_NAME_DEF_STMT (name);
basic_block def_bb = gimple_bb (def_stmt);
if (def_bb == bb)
if (ssa_defined_in_bb (name, bb))
clear_cache (name);
}
@ -705,8 +728,8 @@ path_range_query::range_of_stmt (irange &r, gimple *stmt, tree)
if (!irange::supports_type_p (type))
return false;
// If resolving unknowns, fold the statement as it would have
// appeared at the end of the path.
// If resolving unknowns, fold the statement making use of any
// relations along the path.
if (m_resolve)
{
fold_using_range f;
@ -714,8 +737,7 @@ path_range_query::range_of_stmt (irange &r, gimple *stmt, tree)
if (!f.fold_stmt (r, stmt, src))
r.set_varying (type);
}
// Otherwise, use the global ranger to fold it as it would appear in
// the original IL. This is more conservative, but faster.
// Otherwise, fold without relations.
else if (!fold_range (r, stmt, this))
r.set_varying (type);
@ -727,11 +749,10 @@ path_range_query::maybe_register_phi_relation (gphi *phi, tree arg)
{
basic_block bb = gimple_bb (phi);
tree result = gimple_phi_result (phi);
gimple *def = SSA_NAME_DEF_STMT (arg);
// Avoid recording the equivalence if the arg is defined in this
// block, as that would create an ordering problem.
if (def && gimple_bb (def) == bb)
// block, as that could create an ordering problem.
if (ssa_defined_in_bb (arg, bb))
return;
if (dump_file && (dump_flags & TDF_DETAILS))

View File

@ -66,6 +66,7 @@ private:
void maybe_register_phi_relation (gphi *, tree arg);
bool add_to_imports (tree name, bitmap imports);
bool import_p (tree name);
bool ssa_defined_in_bb (tree name, basic_block bb);
// Path navigation.
void set_path (const vec<basic_block> &);