make FOR_EACH_IMM_USE_STMT safe for early exits

Use a dtor to automatically remove ITER from IMM_USE list in
FOR_EACH_IMM_USE_STMT.


for  gcc/ChangeLog

	* ssa-iterators.h (end_imm_use_stmt_traverse): Forward
	declare.
	(auto_end_imm_use_stmt_traverse): New struct.
	(FOR_EACH_IMM_USE_STMT): Use it.
	(BREAK_FROM_IMM_USE_STMT, RETURN_FROM_IMM_USE_STMT): Remove,
	along with uses...
	* gimple-ssa-strength-reduction.c: ... here, ...
	* graphite-scop-detection.c: ... here, ...
	* ipa-modref.c, ipa-pure-const.c, ipa-sra.c: ... here, ...
	* tree-predcom.c, tree-ssa-ccp.c: ... here, ...
	* tree-ssa-dce.c, tree-ssa-dse.c: ... here, ...
	* tree-ssa-loop-ivopts.c, tree-ssa-math-opts.c: ... here, ...
	* tree-ssa-phiprop.c, tree-ssa.c: ... here, ...
	* tree-vect-slp.c: ... and here, ...
	* doc/tree-ssa.texi: ... and the example here.
This commit is contained in:
Alexandre Oliva 2021-01-11 23:37:59 -03:00 committed by Alexandre Oliva
parent ab88f36072
commit 640296c367
16 changed files with 54 additions and 67 deletions

View File

@ -385,20 +385,8 @@ optimization can manipulate the stmt when all the uses have been
processed. This is a little slower than the FAST version since it adds a
placeholder element and must sort through the list a bit for each statement.
This placeholder element must be also be removed if the loop is
terminated early. The macro @code{BREAK_FROM_IMM_USE_STMT} is provided
to do this :
@smallexample
FOR_EACH_IMM_USE_STMT (stmt, iterator, ssa_var)
@{
if (stmt == last_stmt)
BREAK_FROM_IMM_USE_STMT (iterator);
FOR_EACH_IMM_USE_ON_STMT (imm_use_p, iterator)
SET_USE (imm_use_p, ssa_var_2);
fold_stmt (stmt);
@}
@end smallexample
terminated early; a destructor takes care of that when leaving the
@code{FOR_EACH_IMM_USE_STMT} scope.
There are checks in @code{verify_ssa} which verify that the immediate use list
is up to date, as well as checking that an optimization didn't break from the

View File

@ -524,7 +524,7 @@ uses_consumed_by_stmt (tree name, gimple *stmt, unsigned recurse = 0)
recurse + 1))
{
retval = false;
BREAK_FROM_IMM_USE_STMT (iter);
break;
}
}

View File

@ -1262,9 +1262,7 @@ build_cross_bb_scalars_def (scop_p scop, tree def, basic_block def_bb,
&& (def_bb != gimple_bb (use_stmt) && !is_gimple_debug (use_stmt)))
{
add_write (writes, def);
/* This is required by the FOR_EACH_IMM_USE_STMT when we want to break
before all the uses have been visited. */
BREAK_FROM_IMM_USE_STMT (imm_iter);
break;
}
}

View File

@ -1597,9 +1597,7 @@ analyze_ssa_name_flags (tree name, vec<modref_lattice> &lattice, int depth,
FOR_EACH_IMM_USE_STMT (use_stmt, ui, name)
{
if (lattice[index].flags == 0)
{
BREAK_FROM_IMM_USE_STMT (ui);
}
break;
if (is_gimple_debug (use_stmt))
continue;
if (dump_file)

View File

@ -842,20 +842,20 @@ check_retval_uses (tree retval, gimple *stmt)
{
tree op2 = gimple_cond_rhs (cond);
if (!integer_zerop (op2))
RETURN_FROM_IMM_USE_STMT (use_iter, false);
return false;
}
else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
{
enum tree_code code = gimple_assign_rhs_code (ga);
if (TREE_CODE_CLASS (code) != tcc_comparison)
RETURN_FROM_IMM_USE_STMT (use_iter, false);
return false;
if (!integer_zerop (gimple_assign_rhs2 (ga)))
RETURN_FROM_IMM_USE_STMT (use_iter, false);
return false;
}
else if (is_gimple_debug (use_stmt))
;
else if (use_stmt != stmt)
RETURN_FROM_IMM_USE_STMT (use_iter, false);
return false;
return true;
}

View File

@ -850,7 +850,7 @@ isra_track_scalar_value_uses (function *fun, cgraph_node *node, tree name,
|| (arg_count = gimple_call_num_args (call)) == 0)
{
res = -1;
BREAK_FROM_IMM_USE_STMT (imm_iter);
break;
}
cgraph_edge *cs = node->get_edge (stmt);
@ -874,7 +874,7 @@ isra_track_scalar_value_uses (function *fun, cgraph_node *node, tree name,
|| all_uses != simple_uses)
{
res = -1;
BREAK_FROM_IMM_USE_STMT (imm_iter);
break;
}
res += all_uses;
}
@ -891,7 +891,7 @@ isra_track_scalar_value_uses (function *fun, cgraph_node *node, tree name,
if (TREE_CODE (lhs) != SSA_NAME)
{
res = -1;
BREAK_FROM_IMM_USE_STMT (imm_iter);
break;
}
gcc_assert (!gimple_vdef (stmt));
if (bitmap_set_bit (analyzed, SSA_NAME_VERSION (lhs)))
@ -901,7 +901,7 @@ isra_track_scalar_value_uses (function *fun, cgraph_node *node, tree name,
if (tmp < 0)
{
res = tmp;
BREAK_FROM_IMM_USE_STMT (imm_iter);
break;
}
res += tmp;
}
@ -909,7 +909,7 @@ isra_track_scalar_value_uses (function *fun, cgraph_node *node, tree name,
else
{
res = -1;
BREAK_FROM_IMM_USE_STMT (imm_iter);
break;
}
}
return res;
@ -1016,7 +1016,7 @@ ptr_parm_has_nonarg_uses (cgraph_node *node, function *fun, tree parm,
|| (arg_count = gimple_call_num_args (call)) == 0)
{
ret = true;
BREAK_FROM_IMM_USE_STMT (ui);
break;
}
cgraph_edge *cs = node->get_edge (stmt);
@ -1062,7 +1062,7 @@ ptr_parm_has_nonarg_uses (cgraph_node *node, function *fun, tree parm,
if (uses_ok != all_uses)
{
ret = true;
BREAK_FROM_IMM_USE_STMT (ui);
break;
}
}
@ -1975,7 +1975,7 @@ ssa_name_only_returned_p (tree name, bitmap analyzed)
if (t != name)
{
res = false;
BREAK_FROM_IMM_USE_STMT (imm_iter);
break;
}
}
else if ((is_gimple_assign (stmt) && !gimple_has_volatile_ops (stmt))
@ -1991,20 +1991,20 @@ ssa_name_only_returned_p (tree name, bitmap analyzed)
if (TREE_CODE (lhs) != SSA_NAME)
{
res = false;
BREAK_FROM_IMM_USE_STMT (imm_iter);
break;
}
gcc_assert (!gimple_vdef (stmt));
if (bitmap_set_bit (analyzed, SSA_NAME_VERSION (lhs))
&& !ssa_name_only_returned_p (lhs, analyzed))
{
res = false;
BREAK_FROM_IMM_USE_STMT (imm_iter);
break;
}
}
else
{
res = false;
BREAK_FROM_IMM_USE_STMT (imm_iter);
break;
}
}
return res;

View File

@ -77,29 +77,32 @@ struct imm_use_iterator
!end_readonly_imm_use_p (&(ITER)); \
(void) ((DEST) = next_readonly_imm_use (&(ITER))))
/* Use this iterator to visit each stmt which has a use of SSAVAR. */
/* Forward declare for use in the class below. */
static inline void end_imm_use_stmt_traverse (imm_use_iterator *);
/* arrange to automatically call, upon descruction, end_imm_use_stmt_traverse
with a given pointer to imm_use_iterator. */
struct auto_end_imm_use_stmt_traverse
{
imm_use_iterator *imm;
auto_end_imm_use_stmt_traverse (imm_use_iterator *imm)
: imm (imm) {}
~auto_end_imm_use_stmt_traverse ()
{ end_imm_use_stmt_traverse (imm); }
};
/* Use this iterator to visit each stmt which has a use of SSAVAR. The
destructor of the auto_end_imm_use_stmt_traverse object deals with removing
ITER from SSAVAR's IMM_USE list even when leaving the scope early. */
#define FOR_EACH_IMM_USE_STMT(STMT, ITER, SSAVAR) \
for ((STMT) = first_imm_use_stmt (&(ITER), (SSAVAR)); \
for (struct auto_end_imm_use_stmt_traverse \
auto_end_imm_use_stmt_traverse \
((((STMT) = first_imm_use_stmt (&(ITER), (SSAVAR))), \
&(ITER))); \
!end_imm_use_stmt_p (&(ITER)); \
(void) ((STMT) = next_imm_use_stmt (&(ITER))))
/* Use this to terminate the FOR_EACH_IMM_USE_STMT loop early. Failure to
do so will result in leaving a iterator marker node in the immediate
use list, and nothing good will come from that. */
#define BREAK_FROM_IMM_USE_STMT(ITER) \
{ \
end_imm_use_stmt_traverse (&(ITER)); \
break; \
}
/* Similarly for return. */
#define RETURN_FROM_IMM_USE_STMT(ITER, VAL) \
{ \
end_imm_use_stmt_traverse (&(ITER)); \
return (VAL); \
}
/* Use this iterator in combination with FOR_EACH_IMM_USE_STMT to
get access to each occurrence of ssavar on the stmt returned by
that iterator.. for instance:

View File

@ -2367,7 +2367,7 @@ base_names_in_chain_on (class loop *loop, tree name, tree var)
&& flow_bb_inside_loop_p (loop, gimple_bb (stmt)))
{
phi = stmt;
BREAK_FROM_IMM_USE_STMT (iter);
break;
}
}
if (!phi)

View File

@ -3020,7 +3020,7 @@ optimize_atomic_bit_test_and (gimple_stmt_iterator *gsip,
}
use_bool = false;
BREAK_FROM_IMM_USE_STMT (iter);
break;
}
tree new_lhs = make_ssa_name (TREE_TYPE (lhs));

View File

@ -1516,7 +1516,7 @@ eliminate_unnecessary_stmts (void)
|| gimple_plf (stmt, STMT_NECESSARY))
{
found = true;
BREAK_FROM_IMM_USE_STMT (iter);
break;
}
}
if (found)

View File

@ -693,7 +693,7 @@ dse_optimize_redundant_stores (gimple *stmt)
{
/* Limit stmt walking. */
if (++cnt > param_dse_max_alias_queries_per_store)
BREAK_FROM_IMM_USE_STMT (ui);
break;
/* If USE_STMT stores 0 into one or more of the same locations
as STMT and STMT would kill USE_STMT, then we can just remove
@ -712,7 +712,7 @@ dse_optimize_redundant_stores (gimple *stmt)
ao_ref write;
if (!initialize_ao_ref_for_dse (use_stmt, &write))
BREAK_FROM_IMM_USE_STMT (ui)
break;
if (valid_ao_ref_for_dse (&write)
&& stmt_kills_ref_p (stmt, &write))
@ -805,7 +805,7 @@ dse_classify_store (ao_ref *ref, gimple *stmt,
if (++cnt > param_dse_max_alias_queries_per_store)
{
fail = true;
BREAK_FROM_IMM_USE_STMT (ui);
break;
}
/* We have visited ourselves already so ignore STMT for the
@ -852,7 +852,7 @@ dse_classify_store (ao_ref *ref, gimple *stmt,
}
fail = true;
BREAK_FROM_IMM_USE_STMT (ui);
break;
}
/* If this is a store, remember it as we possibly need to walk the
defs uses. */

View File

@ -7627,7 +7627,7 @@ remove_unused_ivs (struct ivopts_data *data, bitmap toremove)
count++;
if (count > 1)
BREAK_FROM_IMM_USE_STMT (imm_iter);
break;
}
if (!count)

View File

@ -1285,7 +1285,7 @@ execute_cse_sincos_1 (tree name)
and, in subsequent rounds, that the built_in type is the same
type, or a compatible type. */
if (type != t && !types_compatible_p (type, t))
RETURN_FROM_IMM_USE_STMT (use_iter, false);
return false;
}
if (seen_cos + seen_sin + seen_cexpi <= 1)
return false;

View File

@ -119,7 +119,7 @@ phivn_valid_p (struct phiprop_d *phivn, tree name, basic_block bb)
&& !dominated_by_p (CDI_DOMINATORS, gimple_bb (use_stmt), bb))
{
ok = false;
BREAK_FROM_IMM_USE_STMT (ui2);
break;
}
}

View File

@ -607,7 +607,7 @@ release_defs_bitset (bitmap toremove)
}
if (!remove_now)
BREAK_FROM_IMM_USE_STMT (uit);
break;
}
if (remove_now)

View File

@ -3857,7 +3857,7 @@ vect_bb_slp_mark_live_stmts (bb_vec_info bb_vinfo, slp_tree node,
mark_visited = false;
else
STMT_VINFO_LIVE_P (stmt_info) = false;
BREAK_FROM_IMM_USE_STMT (use_iter);
break;
}
}
/* We have to verify whether we can insert the lane extract
@ -4124,7 +4124,7 @@ vect_bb_slp_scalar_cost (vec_info *vinfo,
(vect_stmt_to_vectorize (use_stmt_info)))
{
(*life)[i] = true;
BREAK_FROM_IMM_USE_STMT (use_iter);
break;
}
}
}