modref: Comment spelling fixes

This fixes some spelling mistakes in ipa-modref*.

2022-03-02  Jakub Jelinek  <jakub@redhat.com>

	* ipa-modref-tree.cc (modref_access_node::contains,
	modref_access_node::closer_pair_p, modref_access_node::insert,
	modref_access_node::insert_kill): Comment spelling fixes.
	* ipa-modref.cc: Likewise.
	(modref_summary::finalize, ignore_nondeterminism_p,
	class modref_access_analysis,
	modref_access_analysis::set_side_effects,
	modref_access_analysis::set_nondeterministic,
	modref_access_analysis::record_global_memory_load,
	modref_access_analysis::propagate, modref_access_analysis::analyze,
	struct escape_point, class modref_lattice, modref_lattice::merge,
	modref_lattice::merge_deref, class modref_eaf_analysis,
	modref_eaf_analysis::merge_call_lhs_flags,
	modref_eaf_analysis::analyze_ssa_name, modref_eaf_analysis::propagate,
	modref_eaf_analysis::record_escape_points, remap_kills,
	update_escape_summary, remove_useless_summaries,
	ipa_merge_modref_summary_after_inlining, pass_ipa_modref::execute):
	Likewise.
	* ipa-modref.h (struct modref_summary, interposable_eaf_flags):
	Likewise.
	* ipa-modref-tree.h (enum modref_special_parms,
	struct modref_access_node): Likewise.
This commit is contained in:
Jakub Jelinek 2022-03-02 10:25:25 +01:00
parent 2f1fa70db5
commit 02c808938e
4 changed files with 54 additions and 54 deletions

View File

@ -71,13 +71,13 @@ modref_access_node::contains (const modref_access_node &a) const
/* Accesses are never below parm_offset, so look
for smaller offset.
If access ranges are known still allow merging
when bit offsets comparsion passes. */
when bit offsets comparison passes. */
if (!known_le (parm_offset, a.parm_offset)
&& !range_info_useful_p ())
return false;
/* We allow negative aoffset_adj here in case
there is an useful range. This is because adding
a.offset may result in non-ngative offset again.
a.offset may result in non-negative offset again.
Ubsan fails on val << LOG_BITS_PER_UNIT where val
is negative. */
aoffset_adj = (a.parm_offset - parm_offset)
@ -89,7 +89,7 @@ modref_access_node::contains (const modref_access_node &a) const
if (!a.range_info_useful_p ())
return false;
/* Sizes of stores are used to check that object is big enough
to fit the store, so smaller or unknown sotre is more general
to fit the store, so smaller or unknown store is more general
than large store. */
if (known_size_p (size)
&& (!known_size_p (a.size)
@ -266,7 +266,7 @@ modref_access_node::closer_pair_p (const modref_access_node &a1,
gcc_unreachable ();
/* Now compute distnace of the intervals. */
/* Now compute distance of the intervals. */
poly_int64 dist1, dist2;
if (known_le (offseta1, offsetb1))
{
@ -509,7 +509,7 @@ modref_access_node::stream_in (struct lto_input_block *ib)
If RECORD_ADJUSTMENTs is true avoid too many interval extensions.
Return true if record was changed.
Reutrn 0 if nothing changed, 1 if insert was successful and -1
Return 0 if nothing changed, 1 if insert was successful and -1
if entries should be collapsed. */
int
modref_access_node::insert (vec <modref_access_node, va_gc> *&accesses,
@ -800,7 +800,7 @@ modref_access_node::insert_kill (vec<modref_access_node> &kills,
gcc_checking_assert (a.useful_for_kill_p ());
/* See if we have corresponding entry already or we can merge with
neighbouring entry. */
neighboring entry. */
FOR_EACH_VEC_ELT (kills, index, a2)
{
if (a2->contains_for_kills (a))

View File

@ -50,8 +50,8 @@ enum modref_special_parms {
MODREF_RETSLOT_PARM = -3,
/* Used for bases that points to memory that escapes from function. */
MODREF_GLOBAL_MEMORY_PARM = -4,
/* Used in modref_parm_map to tak references which can be removed
from the summary during summary update since they now points to loca
/* Used in modref_parm_map to take references which can be removed
from the summary during summary update since they now points to local
memory. */
MODREF_LOCAL_MEMORY_PARM = -5
};
@ -101,7 +101,7 @@ struct GTY(()) modref_access_node
bool range_info_useful_p () const;
/* Return tree corresponding to parameter of the range in STMT. */
tree get_call_arg (const gcall *stmt) const;
/* Build ao_ref corresponding to the access and return true if succesful. */
/* Build ao_ref corresponding to the access and return true if successful. */
bool get_ao_ref (const gcall *stmt, class ao_ref *ref) const;
/* Stream access to OB. */
void stream_out (struct output_block *ob) const;
@ -109,7 +109,7 @@ struct GTY(()) modref_access_node
static modref_access_node stream_in (struct lto_input_block *ib);
/* Insert A into vector ACCESSES. Limit size of vector to MAX_ACCESSES and
if RECORD_ADJUSTMENT is true keep track of adjustment counts.
Return 0 if nothing changed, 1 is insertion suceeded and -1 if failed. */
Return 0 if nothing changed, 1 is insertion succeeded and -1 if failed. */
static int insert (vec <modref_access_node, va_gc> *&accesses,
modref_access_node a, size_t max_accesses,
bool record_adjustments);
@ -173,7 +173,7 @@ struct GTY((user)) modref_ref_node
if (every_access)
return false;
/* Only the following kind of paramters needs to be tracked.
/* Only the following kind of parameters needs to be tracked.
We do not track return slots because they are seen as a direct store
in the caller. */
gcc_checking_assert (a.parm_index >= 0

View File

@ -37,7 +37,7 @@ along with GCC; see the file COPYING3. If not see
The following information is computed
1) load/store access tree described in ipa-modref-tree.h
This is used by tree-ssa-alias to disambiguate load/stores
2) EAF flags used by points-to analysis (in tree-ssa-structlias).
2) EAF flags used by points-to analysis (in tree-ssa-structalias).
and defined in tree-core.h.
and stored to optimization_summaries.
@ -50,7 +50,7 @@ along with GCC; see the file COPYING3. If not see
necessary because gimple_call_fnspec performs additional
analysis except for looking callee fndecl.
- escape_summary holds escape points for given call edge.
That is a vector recording what function parmaeters
That is a vector recording what function parameters
may escape to a function call (and with what parameter index). */
#include "config.h"
@ -680,7 +680,7 @@ modref_summary::finalize (tree fun)
global_memory_written = !stores || stores->global_access_p ();
/* We can do DSE if we know function has no side effects and
we can analyse all stores. Disable dse if there are too many
we can analyze all stores. Disable dse if there are too many
stores to try. */
if (side_effects || global_memory_written || writes_errno)
try_dse = false;
@ -788,7 +788,7 @@ get_modref_function_summary (gcall *call, bool *interposed)
namespace {
/* Return true if ECF flags says that nondeterminsm can be ignored. */
/* Return true if ECF flags says that nondeterminism can be ignored. */
static bool
ignore_nondeterminism_p (tree caller, int flags)
@ -966,23 +966,23 @@ private:
void propagate ();
/* Summary being computed.
We work eitehr with m_summary or m_summary_lto. Never on both. */
We work either with m_summary or m_summary_lto. Never on both. */
modref_summary *m_summary;
modref_summary_lto *m_summary_lto;
/* Recursive calls needs simplisitc dataflow after analysis finished.
/* Recursive calls needs simplistic dataflow after analysis finished.
Collect all calls into this vector during analysis and later process
them in propagate. */
auto_vec <gimple *, 32> m_recursive_calls;
/* ECF flags of function being analysed. */
/* ECF flags of function being analyzed. */
int m_ecf_flags;
/* True if IPA propagation will be done later. */
bool m_ipa;
/* Set true if statement currently analysed is known to be
/* Set true if statement currently analyze is known to be
executed each time function is called. */
bool m_always_executed;
};
/* Set side_effects flag and return if someting changed. */
/* Set side_effects flag and return if something changed. */
bool
modref_access_analysis::set_side_effects ()
@ -1002,7 +1002,7 @@ modref_access_analysis::set_side_effects ()
return changed;
}
/* Set nondeterministic flag and return if someting changed. */
/* Set nondeterministic flag and return if something changed. */
bool
modref_access_analysis::set_nondeterministic ()
@ -1211,7 +1211,7 @@ modref_access_analysis::record_unknown_store ()
return changed;
}
/* Record unknown load from gloal memory. */
/* Record unknown load from global memory. */
bool
modref_access_analysis::record_global_memory_load ()
@ -1228,7 +1228,7 @@ modref_access_analysis::record_global_memory_load ()
return changed;
}
/* Record unknown store from gloal memory. */
/* Record unknown store from global memory. */
bool
modref_access_analysis::record_global_memory_store ()
@ -1838,7 +1838,7 @@ modref_access_analysis::analyze_stmt (gimple *stmt, bool always_executed)
}
}
/* Propagate load/stres acress recursive calls. */
/* Propagate load/stores across recursive calls. */
void
modref_access_analysis::propagate ()
@ -1885,7 +1885,7 @@ modref_access_analysis::analyze ()
!gsi_end_p (si); gsi_next_nondebug (&si))
{
/* NULL memory accesses terminates BB. These accesses are known
to trip undefined behaviour. gimple-ssa-isolate-paths turns them
to trip undefined behavior. gimple-ssa-isolate-paths turns them
to volatile accesses and adds builtin_trap call which would
confuse us otherwise. */
if (infer_nonnull_range_by_dereference (gsi_stmt (si),
@ -1899,7 +1899,7 @@ modref_access_analysis::analyze ()
}
analyze_stmt (gsi_stmt (si), always_executed);
/* Avoid doing useles work. */
/* Avoid doing useless work. */
if ((!m_summary || !m_summary->useful_p (m_ecf_flags, false))
&& (!m_summary_lto
|| !m_summary_lto->useful_p (m_ecf_flags, false)))
@ -1914,7 +1914,7 @@ modref_access_analysis::analyze ()
if (!summary_useful)
break;
}
/* In non-IPA mode we need to perform iterative datafow on recursive calls.
/* In non-IPA mode we need to perform iterative dataflow on recursive calls.
This needs to be done after all other side effects are computed. */
if (summary_useful)
{
@ -1990,13 +1990,13 @@ struct escape_point
/* Argument it escapes to. */
int arg;
/* Flags already known about the argument (this can save us from recording
esape points if local analysis did good job already). */
escape points if local analysis did good job already). */
eaf_flags_t min_flags;
/* Does value escape directly or indiretly? */
/* Does value escape directly or indirectly? */
bool direct;
};
/* Lattice used during the eaf flags analsysis dataflow. For a given SSA name
/* Lattice used during the eaf flags analysis dataflow. For a given SSA name
we aim to compute its flags and escape points. We also use the lattice
to dynamically build dataflow graph to propagate on. */
@ -2019,7 +2019,7 @@ public:
Only remember them and do the merging at IPA propagation time. */
vec <escape_point, va_heap, vl_ptr> escape_points;
/* Representation of a graph for dataaflow. This graph is built on-demand
/* Representation of a graph for dataflow. This graph is built on-demand
using modref_eaf_analysis::analyze_ssa and later solved by
modref_eaf_analysis::propagate.
Each edge represents the fact that flags of current lattice should be
@ -2140,7 +2140,7 @@ modref_lattice::merge (int f)
if ((flags & f) != flags)
{
flags &= f;
/* Prune obvoiusly useless flags;
/* Prune obviously useless flags;
We do not have ECF_FLAGS handy which is not big problem since
we will do final flags cleanup before producing summary.
Merging should be fast so it can work well with dataflow. */
@ -2152,7 +2152,7 @@ modref_lattice::merge (int f)
return false;
}
/* Merge in WITH. Return true if anyting changed. */
/* Merge in WITH. Return true if anything changed. */
bool
modref_lattice::merge (const modref_lattice &with)
@ -2173,7 +2173,7 @@ modref_lattice::merge (const modref_lattice &with)
}
/* Merge in deref of WITH. If IGNORE_STORES is true do not consider
stores. Return true if anyting changed. */
stores. Return true if anything changed. */
bool
modref_lattice::merge_deref (const modref_lattice &with, bool ignore_stores)
@ -2218,12 +2218,12 @@ modref_lattice::merge_direct_store ()
}
/* Analyzer of EAF flags.
This is genrally dataflow problem over the SSA graph, however we only
This is generally dataflow problem over the SSA graph, however we only
care about flags of few selected ssa names (arguments, return slot and
static chain). So we first call analyze_ssa_name on all relevant names
and perform a DFS walk to discover SSA names where flags needs to be
determined. For acyclic graphs we try to determine final flags during
this walk. Once cycles or recursin depth is met we enlist SSA names
this walk. Once cycles or recursion depth is met we enlist SSA names
for dataflow which is done by propagate call.
After propagation the flags can be obtained using get_ssa_name_flags. */
@ -2233,7 +2233,7 @@ class modref_eaf_analysis
public:
/* Mark NAME as relevant for analysis. */
void analyze_ssa_name (tree name, bool deferred = false);
/* Dataflow slover. */
/* Dataflow solver. */
void propagate ();
/* Return flags computed earlier for NAME. */
int get_ssa_name_flags (tree name)
@ -2260,7 +2260,7 @@ public:
m_lattice[i].release ();
}
private:
/* If true, we produce analysis for IPA mode. In this case escape points ar
/* If true, we produce analysis for IPA mode. In this case escape points are
collected. */
bool m_ipa;
/* Depth of recursion of analyze_ssa_name. */
@ -2276,7 +2276,7 @@ private:
};
/* Call statements may return tgeir parameters. Consider argument number
/* Call statements may return their parameters. Consider argument number
ARG of USE_STMT and determine flags that can needs to be cleared
in case pointer possibly indirectly references from ARG I is returned.
If DIRECT is true consider direct returns and if INDIRECT consider
@ -2425,7 +2425,7 @@ modref_eaf_analysis::analyze_ssa_name (tree name, bool deferred)
print_gimple_stmt (dump_file, use_stmt, 0);
}
/* If we see a direct non-debug use, clear unused bit.
All dereferneces should be accounted below using deref_flags. */
All dereferences should be accounted below using deref_flags. */
m_lattice[index].merge (~EAF_UNUSED);
/* Gimple return may load the return value.
@ -2499,7 +2499,7 @@ modref_eaf_analysis::analyze_ssa_name (tree name, bool deferred)
the callee's return slot is returned it means that
arg is written to itself which is an escape.
Since we do not track the memory it is written to we
need to give up on analysisng it. */
need to give up on analyzing it. */
if (!isretslot)
{
if (!(call_flags & (EAF_NOT_RETURNED_DIRECTLY
@ -2768,7 +2768,7 @@ modref_eaf_analysis::propagate ()
rpo.safe_grow (m_names_to_propagate.length (), true);
stack.reserve_exact (m_names_to_propagate.length ());
/* We reuse known flag for RPO DFS walk bookeeping. */
/* We reuse known flag for RPO DFS walk bookkeeping. */
if (flag_checking)
FOR_EACH_VEC_ELT (m_names_to_propagate, i, index)
gcc_assert (!m_lattice[index].known && m_lattice[index].changed);
@ -2813,7 +2813,7 @@ modref_eaf_analysis::propagate ()
}
}
/* Perform itrative dataflow. */
/* Perform iterative dataflow. */
while (changed)
{
changed = false;
@ -2890,9 +2890,9 @@ modref_eaf_analysis::record_escape_points (tree name, int parm_index, int flags)
/* Determine EAF flags for function parameters
and fill in SUMMARY/SUMMARY_LTO. If IPA is true work in IPA mode
where we also collect scape points.
where we also collect escape points.
PAST_FLAGS, PAST_RETSLOT_FLAGS, PAST_STATIC_CHAIN_FLAGS can be
used to preserve flags from prevoius (IPA) run for cases where
used to preserve flags from previous (IPA) run for cases where
late optimizations changed code in a way we can no longer analyze
it easily. */
@ -4043,7 +4043,7 @@ remap_arg_flags (auto_vec <eaf_flags_t> &arg_flags, clone_info *info)
}
}
/* Update kills accrdoing to the parm map MAP. */
/* Update kills according to the parm map MAP. */
static void
remap_kills (vec <modref_access_node> &kills, const vec <int> &map)
@ -4359,7 +4359,7 @@ update_escape_summary_1 (cgraph_edge *e,
escape_summaries->remove (e);
}
/* Update escape map fo NODE. */
/* Update escape map for NODE. */
static void
update_escape_summary (cgraph_node *node,
@ -4632,7 +4632,7 @@ propagate_unknown_call (cgraph_node *node,
return changed;
}
/* Maybe remove summaies of NODE pointed to by CUR_SUMMARY_PTR
/* Maybe remove summaries of NODE pointed to by CUR_SUMMARY_PTR
and CUR_SUMMARY_LTO_PTR if they are useless according to ECF_FLAGS. */
static void
@ -5311,8 +5311,8 @@ ipa_merge_modref_summary_after_inlining (cgraph_edge *edge)
}
/* Now merge escape summaries.
For every escape to the callee we need to merge calle flags
and remap calees escapes. */
For every escape to the callee we need to merge callee flags
and remap callee's escapes. */
class escape_summary *sum = escape_summaries->get (edge);
int max_escape = -1;
escape_entry *ee;
@ -5482,7 +5482,7 @@ pass_ipa_modref::execute (function *)
delete escape_summaries;
escape_summaries = NULL;
/* If we posibly made constructors const/pure we may need to remove
/* If we possibly made constructors const/pure we may need to remove
them. */
return pureconst ? TODO_remove_functions : 0;
}

View File

@ -51,7 +51,7 @@ struct GTY(()) modref_summary
it is still useful for CSE. */
unsigned calls_interposable : 1;
/* Flags coputed by finalize method. */
/* Flags computed by finalize method. */
/* Total number of accesses in loads tree. */
unsigned int load_accesses;
@ -101,7 +101,7 @@ static const int implicit_retslot_eaf_flags
| EAF_NOT_RETURNED_INDIRECTLY;
/* If function does not bind to current def (i.e. it is inline in comdat
section), the modref analysis may not match the behaviour of function
section), the modref analysis may not match the behavior of function
which will be later symbol interposed to. All side effects must match
however it is possible that the other function body contains more loads
which may trap.
@ -120,7 +120,7 @@ interposable_eaf_flags (int modref_flags, int flags)
| EAF_NOT_RETURNED_DIRECTLY | EAF_NOT_RETURNED_INDIRECTLY
| EAF_NO_DIRECT_CLOBBER | EAF_NO_INDIRECT_CLOBBER;
}
/* We can not deterine that value is not read at all. */
/* We can not determine that value is not read at all. */
if ((modref_flags & EAF_NO_DIRECT_READ) && !(flags & EAF_NO_DIRECT_READ))
modref_flags &= ~EAF_NO_DIRECT_READ;
if ((modref_flags & EAF_NO_INDIRECT_READ) && !(flags & EAF_NO_INDIRECT_READ))