Commonize ipa-pta constraint generation for calls

Commonize the three paths to produce constraints for function call
and makes it more flexible, so we can implement new features more easily.  Main
idea is to not special case pure and const since we can now describe all of
pure/const via their EAF flags (implicit_const_eaf_flags and
implicit_pure_eaf_flags) and info on existence of global memory loads/stores in
function which is readily available in the modref tree.

While rewriting the function, I dropped some of optimizations in the way we
generate constraints. Some of them we may want to add back, but I think the
constraint solver should be fast to get rid of them quickly, so it looks like
bit of premature optimization.

We now always produce one additional PTA variable (callescape) for things that
escape into function call and thus can be stored to parameters or global memory
(if modified). This is no longer the same as global escape in case function is
not reading global memory. It is also not same as call use, since we now
understand the fact that interposable functions may use parameter in a way that
is not releavnt for PTA (so we can not optimize out stores initializing the
memory, but we can be safe about fact that pointers stored does not escape).

Compared to previous code we now handle correctly EAF_NOT_RETURNED in all cases
(previously we did so only when all parameters had the flag) and also handle
NOCLOBBER in more cases (since we make difference between global escape and
call escape). Because I commonized code handling args and static chains, we
could now easily extend modref to also track flags for static chain and return
slot which I plan to do next.

Otherwise I put some effort into producing constraints that produce similar
solutions as before (so it is harder to debug differences). For example if
global memory is written one can simply move callescape to escape rather then
making everything escape by its own constraints, but it affects ipa-pta
testcases.

gcc/ChangeLog:

	* ipa-modref-tree.h (modref_tree::global_access_p): New member
	function.
	* ipa-modref.c:
	(implicint_const_eaf_flags,implicit_pure_eaf_flags,
	ignore_stores_eaf_flags): Move to ipa-modref.h
	(remove_useless_eaf_flags): Remove early exit on NOCLOBBER.
	(modref_summary::global_memory_read_p): New member function.
	(modref_summary::global_memory_written_p): New member function.
	* ipa-modref.h (modref_summary::global_memory_read_p,
	modref_summary::global_memory_written_p): Declare.
	(implicint_const_eaf_flags,implicit_pure_eaf_flags,
	ignore_stores_eaf_flags): move here.
	* tree-ssa-structalias.c: Include ipa-modref-tree.h, ipa-modref.h
	and attr-fnspec.h.
	(handle_rhs_call): Rewrite.
	(handle_call_arg): New function.
	(determine_global_memory_access): New function.
	(handle_const_call): Remove
	(handle_pure_call): Remove
	(find_func_aliases_for_call): Update use of handle_rhs_call.
	(compute_points_to_sets): Handle global memory acccesses
	selectively

gcc/testsuite/ChangeLog:

	* gcc.dg/torture/ssa-pta-fn-1.c: Fix template; add noipa.
	* gcc.dg/tree-ssa/pta-callused.c: Fix template.
This commit is contained in:
Jan Hubicka 2021-10-11 18:43:26 +02:00
parent 0de8c2f810
commit 008e7397da
6 changed files with 323 additions and 301 deletions

View File

@ -148,7 +148,8 @@ struct GTY(()) modref_access_node
poly_int64 offset1, poly_int64 size1, poly_int64 max_size1,
bool record_adjustments)
{
if (known_eq (offset, offset1)
if (known_eq (parm_offset, parm_offset1)
&& known_eq (offset, offset1)
&& known_eq (size, size1)
&& known_eq (max_size, max_size1))
return;
@ -577,6 +578,10 @@ struct GTY((user)) modref_ref_node
}
(*accesses)[best1].forced_merge (best2 < 0 ? a : (*accesses)[best2],
record_adjustments);
/* CHeck that merging indeed merged ranges. */
gcc_checking_assert ((*accesses)[best1].contains (best2 < 0 ? a : (*accesses)[best2]));
/*if (best2 >= 0)
accesses->unordered_remove (best2);*/
if (!(*accesses)[best1].useful_p ())
{
collapse ();
@ -1012,6 +1017,31 @@ struct GTY((user)) modref_tree
return NULL;
}
/* Return true if tree contains access to global memory. */
bool global_access_p ()
{
size_t i, j, k;
modref_base_node <T> *base_node;
modref_ref_node <T> *ref_node;
modref_access_node *access_node;
if (every_base)
return true;
FOR_EACH_VEC_SAFE_ELT (bases, i, base_node)
{
if (base_node->every_ref)
return true;
FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
{
if (ref_node->every_access)
return true;
FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
if (access_node->parm_index < 0)
return true;
}
}
return false;
}
/* Return ggc allocated instance. We explicitly call destructors via
ggc_delete and do not want finalizers to be registered and
called at the garbage collection time. */

View File

@ -85,6 +85,7 @@ along with GCC; see the file COPYING3. If not see
#include "ssa-iterators.h"
#include "stringpool.h"
#include "tree-ssanames.h"
#include "attribs.h"
namespace {
@ -280,17 +281,6 @@ modref_summary::~modref_summary ()
ggc_delete (stores);
}
/* All flags that are implied by the ECF_CONST functions. */
const int implicit_const_eaf_flags = EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE
| EAF_NODIRECTESCAPE | EAF_NOREAD;
/* All flags that are implied by the ECF_PURE function. */
const int implicit_pure_eaf_flags = EAF_NOCLOBBER | EAF_NOESCAPE
| EAF_NODIRECTESCAPE;
/* All flags implied when we know we can ignore stores (i.e. when handling
call to noreturn). */
const int ignore_stores_eaf_flags = EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE
| EAF_NODIRECTESCAPE;
/* Remove all flags from EAF_FLAGS that are implied by ECF_FLAGS and not
useful to track. If returns_void is true moreover clear
EAF_NOT_RETURNED. */
@ -305,10 +295,6 @@ remove_useless_eaf_flags (int eaf_flags, int ecf_flags, bool returns_void)
eaf_flags &= ~implicit_pure_eaf_flags;
else if ((ecf_flags & ECF_NORETURN) || returns_void)
eaf_flags &= ~EAF_NOT_RETURNED;
/* Only NOCLOBBER or DIRECT flags alone are not useful (see comments
in tree-ssa-alias.c). Give up earlier. */
if ((eaf_flags & ~(EAF_DIRECT | EAF_NOCLOBBER)) == 0)
return 0;
return eaf_flags;
}
@ -345,6 +331,26 @@ modref_summary::useful_p (int ecf_flags, bool check_flags)
return stores && !stores->every_base;
}
/* Return true if global memory is read
(that is loads summary contains global memory access). */
bool
modref_summary::global_memory_read_p ()
{
if (!loads)
return true;
return loads->global_access_p ();
}
/* Return true if global memory is written. */
bool
modref_summary::global_memory_written_p ()
{
if (!stores)
return true;
return stores->global_access_p ();
}
/* Single function summary used for LTO. */
typedef modref_tree <tree> modref_records_lto;
@ -2016,7 +2022,8 @@ analyze_function (function *f, bool ipa)
DECL_PURE_P (current_function_decl) ? " (pure)" : "");
/* Don't analyze this function if it's compiled with -fno-strict-aliasing. */
if (!flag_ipa_modref)
if (!flag_ipa_modref
|| lookup_attribute ("noipa", DECL_ATTRIBUTES (current_function_decl)))
return;
/* Compute no-LTO summaries when local optimization is going to happen. */

View File

@ -37,10 +37,23 @@ struct GTY(()) modref_summary
~modref_summary ();
void dump (FILE *);
bool useful_p (int ecf_flags, bool check_flags = true);
bool global_memory_read_p ();
bool global_memory_written_p ();
};
modref_summary *get_modref_function_summary (cgraph_node *func);
void ipa_modref_c_finalize ();
void ipa_merge_modref_summary_after_inlining (cgraph_edge *e);
/* All flags that are implied by the ECF_CONST functions. */
static const int implicit_const_eaf_flags = EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE
| EAF_NODIRECTESCAPE | EAF_NOREAD;
/* All flags that are implied by the ECF_PURE function. */
static const int implicit_pure_eaf_flags = EAF_NOCLOBBER | EAF_NOESCAPE
| EAF_NODIRECTESCAPE;
/* All flags implied when we know we can ignore stores (i.e. when handling
call to noreturn). */
static const int ignore_stores_eaf_flags = EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE
| EAF_NODIRECTESCAPE;
#endif

View File

@ -6,13 +6,13 @@ extern void abort (void);
int *g;
int dummy;
int * __attribute__((noinline,const))
int * __attribute__((noinline,const,noipa))
foo_const(int *p) { return p; }
int * __attribute__((noinline,pure))
int * __attribute__((noinline,pure,noipa))
foo_pure(int *p) { return p + dummy; }
int * __attribute__((noinline))
int * __attribute__((noinline,noipa))
foo_normal(int *p) { g = p; return p; }
void test_const(void)
@ -58,4 +58,4 @@ int main()
/* { dg-final { scan-tree-dump "q_const_. = { NONLOCAL i }" "alias" } } */
/* { dg-final { scan-tree-dump "q_pure_. = { ESCAPED NONLOCAL i }" "alias" } } */
/* { dg-final { scan-tree-dump "q_normal_. = { ESCAPED NONLOCAL }" "alias" } } */
/* { dg-final { scan-tree-dump "q_normal_. = { ESCAPED NONLOCAL i }" "alias" } } */

View File

@ -22,5 +22,5 @@ int bar (int b)
return *foo (&q);
}
/* { dg-final { scan-tree-dump "CALLUSED\\(\[0-9\]+\\) = { f.* i q }" "alias" } } */
/* { dg-final { scan-tree-dump "CALLUSED\\(\[0-9\]+\\) = { NONLOCAL f.* i q }" "alias" } } */

View File

@ -44,6 +44,9 @@
#include "tree-ssa.h"
#include "tree-cfg.h"
#include "gimple-range.h"
#include "ipa-modref-tree.h"
#include "ipa-modref.h"
#include "attr-fnspec.h"
/* The idea behind this analyzer is to generate set constraints from the
program, then solve the resulting constraints in order to generate the
@ -4048,98 +4051,203 @@ get_function_part_constraint (varinfo_t fi, unsigned part)
return c;
}
/* For non-IPA mode, generate constraints necessary for a call on the
RHS. */
/* Produce constraints for argument ARG of call STMT with eaf flags
FLAGS. RESULTS is array holding constraints for return value.
CALLESCAPE_ID is variable where call loocal escapes are added.
WRITES_GLOVEL_MEMORY is true if callee may write global memory. */
static void
handle_rhs_call (gcall *stmt, vec<ce_s> *results)
handle_call_arg (gcall *stmt, tree arg, vec<ce_s> *results, int flags,
int callescape_id, bool writes_global_memory)
{
struct constraint_expr rhsc;
unsigned i;
bool returns_uses = false;
/* If the argument is not used we can ignore it.
Similarly argument is invisile for us if it not clobbered, does not
escape, is not read and can not be returned. */
if ((flags & EAF_UNUSED)
|| ((flags & (EAF_NOCLOBBER | EAF_NOESCAPE | EAF_NOREAD
| EAF_NOT_RETURNED))
== (EAF_NOCLOBBER | EAF_NOESCAPE | EAF_NOREAD
| EAF_NOT_RETURNED)))
return;
for (i = 0; i < gimple_call_num_args (stmt); ++i)
varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
tem->is_reg_var = true;
make_constraint_to (tem->id, arg);
make_any_offset_constraints (tem);
if (!(flags & EAF_DIRECT))
make_transitive_closure_constraints (tem);
if (!(flags & EAF_NOT_RETURNED))
{
struct constraint_expr cexpr;
cexpr.var = tem->id;
cexpr.type = SCALAR;
cexpr.offset = 0;
results->safe_push (cexpr);
}
if (!(flags & EAF_NOREAD))
{
varinfo_t uses = get_call_use_vi (stmt);
make_copy_constraint (uses, tem->id);
}
if (!(flags & EAF_NOCLOBBER))
{
struct constraint_expr lhs, rhs;
/* *arg = callescape. */
lhs.type = DEREF;
lhs.var = tem->id;
lhs.offset = 0;
rhs.type = SCALAR;
rhs.var = callescape_id;
rhs.offset = 0;
process_constraint (new_constraint (lhs, rhs));
/* callclobbered = arg. */
make_copy_constraint (get_call_clobber_vi (stmt), tem->id);
}
if (!(flags & (EAF_NOESCAPE | EAF_NODIRECTESCAPE)))
{
struct constraint_expr lhs, rhs;
/* callescape = arg; */
lhs.var = callescape_id;
lhs.offset = 0;
lhs.type = SCALAR;
rhs.var = tem->id;
rhs.offset = 0;
rhs.type = SCALAR;
process_constraint (new_constraint (lhs, rhs));
if (writes_global_memory)
make_escape_constraint (arg);
}
else if (!(flags & EAF_NOESCAPE))
{
struct constraint_expr lhs, rhs;
/* callescape = *(arg + UNKNOWN); */
lhs.var = callescape_id;
lhs.offset = 0;
lhs.type = SCALAR;
rhs.var = tem->id;
rhs.offset = UNKNOWN_OFFSET;
rhs.type = DEREF;
process_constraint (new_constraint (lhs, rhs));
if (writes_global_memory)
make_indirect_escape_constraint (tem);
}
}
/* Determine global memory access of call STMT and update
WRITES_GLOBAL_MEMORY, READS_GLOBAL_MEMORY and USES_GLOBAL_MEMORY. */
static void
determine_global_memory_access (gcall *stmt,
bool *writes_global_memory,
bool *reads_global_memory,
bool *uses_global_memory)
{
tree callee;
cgraph_node *node;
modref_summary *summary;
/* We need to detrmine reads to set uses. */
gcc_assert (!uses_global_memory || reads_global_memory);
if ((callee = gimple_call_fndecl (stmt)) != NULL_TREE
&& (node = cgraph_node::get (callee)) != NULL
&& (summary = get_modref_function_summary (node)))
{
if (writes_global_memory && *writes_global_memory)
*writes_global_memory = summary->global_memory_written_p ();
if (reads_global_memory && *reads_global_memory)
*reads_global_memory = summary->global_memory_read_p ();
if (reads_global_memory && uses_global_memory
&& !*reads_global_memory && node->binds_to_current_def_p ())
*uses_global_memory = false;
}
if ((writes_global_memory && *writes_global_memory)
|| (uses_global_memory && *uses_global_memory)
|| (reads_global_memory && *reads_global_memory))
{
attr_fnspec fnspec = gimple_call_fnspec (stmt);
if (fnspec.known_p ())
{
if (writes_global_memory
&& !fnspec.global_memory_written_p ())
*writes_global_memory = false;
if (reads_global_memory && !fnspec.global_memory_read_p ())
{
*reads_global_memory = false;
if (uses_global_memory)
*uses_global_memory = false;
}
}
}
}
/* For non-IPA mode, generate constraints necessary for a call on the
RHS and collect return value constraint to RESULTS to be used later in
handle_lhs_call.
IMPLICIT_EAF_FLAGS are added to each function argument. If
WRITES_GLOBAL_MEMORY is true function is assumed to possibly write to global
memory. Similar for READS_GLOBAL_MEMORY. */
static void
handle_rhs_call (gcall *stmt, vec<ce_s> *results,
int implicit_eaf_flags,
bool writes_global_memory,
bool reads_global_memory)
{
determine_global_memory_access (stmt, &writes_global_memory,
&reads_global_memory,
NULL);
varinfo_t callescape = new_var_info (NULL_TREE, "callescape", true);
/* If function can use global memory, add it to callescape
and to possible return values. If not we can still use/return addresses
of global symbols. */
struct constraint_expr lhs, rhs;
lhs.type = SCALAR;
lhs.var = callescape->id;
lhs.offset = 0;
rhs.type = reads_global_memory ? SCALAR : ADDRESSOF;
rhs.var = nonlocal_id;
rhs.offset = 0;
process_constraint (new_constraint (lhs, rhs));
results->safe_push (rhs);
varinfo_t uses = get_call_use_vi (stmt);
make_copy_constraint (uses, callescape->id);
for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
{
tree arg = gimple_call_arg (stmt, i);
int flags = gimple_call_arg_flags (stmt, i);
/* If the argument is not used we can ignore it.
Similarly argument is invisile for us if it not clobbered, does not
escape, is not read and can not be returned. */
if ((flags & EAF_UNUSED)
|| ((flags & (EAF_NOCLOBBER | EAF_NOESCAPE | EAF_NOREAD
| EAF_NOT_RETURNED))
== (EAF_NOCLOBBER | EAF_NOESCAPE | EAF_NOREAD
| EAF_NOT_RETURNED)))
continue;
/* As we compute ESCAPED context-insensitive we do not gain
any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
set. The argument would still get clobbered through the
escape solution. */
if ((flags & EAF_NOCLOBBER)
&& (flags & (EAF_NOESCAPE | EAF_NODIRECTESCAPE)))
{
varinfo_t uses = get_call_use_vi (stmt);
varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
tem->is_reg_var = true;
make_constraint_to (tem->id, arg);
make_any_offset_constraints (tem);
if (!(flags & EAF_DIRECT))
make_transitive_closure_constraints (tem);
make_copy_constraint (uses, tem->id);
/* TODO: This is overly conservative when some parameters are
returned while others are not. */
if (!(flags & EAF_NOT_RETURNED))
returns_uses = true;
if (!(flags & (EAF_NOESCAPE | EAF_DIRECT)))
make_indirect_escape_constraint (tem);
}
else if (flags & (EAF_NOESCAPE | EAF_NODIRECTESCAPE))
{
struct constraint_expr lhs, rhs;
varinfo_t uses = get_call_use_vi (stmt);
varinfo_t clobbers = get_call_clobber_vi (stmt);
varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
tem->is_reg_var = true;
make_constraint_to (tem->id, arg);
make_any_offset_constraints (tem);
if (!(flags & EAF_DIRECT))
make_transitive_closure_constraints (tem);
make_copy_constraint (uses, tem->id);
if (!(flags & EAF_NOT_RETURNED))
returns_uses = true;
make_copy_constraint (clobbers, tem->id);
/* Add *tem = nonlocal, do not add *tem = callused as
EAF_NOESCAPE parameters do not escape to other parameters
and all other uses appear in NONLOCAL as well. */
lhs.type = DEREF;
lhs.var = tem->id;
lhs.offset = 0;
rhs.type = SCALAR;
rhs.var = nonlocal_id;
rhs.offset = 0;
process_constraint (new_constraint (lhs, rhs));
if (!(flags & (EAF_NOESCAPE | EAF_DIRECT)))
make_indirect_escape_constraint (tem);
}
else
make_escape_constraint (arg);
}
/* If we added to the calls uses solution make sure we account for
pointers to it to be returned. */
if (returns_uses)
{
rhsc.var = get_call_use_vi (stmt)->id;
rhsc.offset = UNKNOWN_OFFSET;
rhsc.type = SCALAR;
results->safe_push (rhsc);
handle_call_arg (stmt, arg, results,
flags | implicit_eaf_flags,
callescape->id, writes_global_memory);
}
/* The static chain escapes as well. */
if (gimple_call_chain (stmt))
make_escape_constraint (gimple_call_chain (stmt));
handle_call_arg (stmt, gimple_call_chain (stmt), results,
implicit_eaf_flags,
callescape->id, writes_global_memory);
/* And if we applied NRV the address of the return slot escapes as well. */
if (gimple_call_return_slot_opt_p (stmt)
@ -4147,20 +4255,17 @@ handle_rhs_call (gcall *stmt, vec<ce_s> *results)
&& TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
{
auto_vec<ce_s> tmpc;
struct constraint_expr lhsc, *c;
get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
lhsc.var = escaped_id;
lhsc.offset = 0;
lhsc.type = SCALAR;
FOR_EACH_VEC_ELT (tmpc, i, c)
process_constraint (new_constraint (lhsc, *c));
}
struct constraint_expr *c;
unsigned i;
/* Regular functions return nonlocal memory. */
rhsc.var = nonlocal_id;
rhsc.offset = 0;
rhsc.type = SCALAR;
results->safe_push (rhsc);
get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
make_constraints_to (callescape->id, tmpc);
if (writes_global_memory)
make_constraints_to (escaped_id, tmpc);
FOR_EACH_VEC_ELT (tmpc, i, c)
results->safe_push (*c);
}
}
/* For non-IPA mode, generate constraints necessary for a call
@ -4227,160 +4332,6 @@ handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> &rhsc,
process_all_all_constraints (lhsc, rhsc);
}
/* For non-IPA mode, generate constraints necessary for a call of a
const function that returns a pointer in the statement STMT. */
static void
handle_const_call (gcall *stmt, vec<ce_s> *results)
{
struct constraint_expr rhsc;
unsigned int k;
bool need_uses = false;
/* Treat nested const functions the same as pure functions as far
as the static chain is concerned. */
if (gimple_call_chain (stmt))
{
varinfo_t uses = get_call_use_vi (stmt);
make_constraint_to (uses->id, gimple_call_chain (stmt));
need_uses = true;
}
/* And if we applied NRV the address of the return slot escapes as well. */
if (gimple_call_return_slot_opt_p (stmt)
&& gimple_call_lhs (stmt) != NULL_TREE
&& TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
{
varinfo_t uses = get_call_use_vi (stmt);
auto_vec<ce_s> tmpc;
get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
make_constraints_to (uses->id, tmpc);
need_uses = true;
}
if (need_uses)
{
varinfo_t uses = get_call_use_vi (stmt);
make_any_offset_constraints (uses);
make_transitive_closure_constraints (uses);
rhsc.var = uses->id;
rhsc.offset = 0;
rhsc.type = SCALAR;
results->safe_push (rhsc);
}
/* May return offsetted arguments. */
varinfo_t tem = NULL;
for (k = 0; k < gimple_call_num_args (stmt); ++k)
{
int flags = gimple_call_arg_flags (stmt, k);
/* If the argument is not used or not returned we can ignore it. */
if (flags & (EAF_UNUSED | EAF_NOT_RETURNED))
continue;
if (!tem)
{
tem = new_var_info (NULL_TREE, "callarg", true);
tem->is_reg_var = true;
}
tree arg = gimple_call_arg (stmt, k);
auto_vec<ce_s> argc;
get_constraint_for_rhs (arg, &argc);
make_constraints_to (tem->id, argc);
}
if (tem)
{
ce_s ce;
ce.type = SCALAR;
ce.var = tem->id;
ce.offset = UNKNOWN_OFFSET;
results->safe_push (ce);
}
/* May return addresses of globals. */
rhsc.var = nonlocal_id;
rhsc.offset = 0;
rhsc.type = ADDRESSOF;
results->safe_push (rhsc);
}
/* For non-IPA mode, generate constraints necessary for a call to a
pure function in statement STMT. */
static void
handle_pure_call (gcall *stmt, vec<ce_s> *results)
{
struct constraint_expr rhsc;
unsigned i;
varinfo_t uses = NULL;
bool record_uses = false;
/* Memory reached from pointer arguments is call-used. */
for (i = 0; i < gimple_call_num_args (stmt); ++i)
{
tree arg = gimple_call_arg (stmt, i);
int flags = gimple_call_arg_flags (stmt, i);
/* If the argument is not used we can ignore it. */
if ((flags & EAF_UNUSED)
|| (flags & (EAF_NOT_RETURNED | EAF_NOREAD))
== (EAF_NOT_RETURNED | EAF_NOREAD))
continue;
if (!uses)
{
uses = get_call_use_vi (stmt);
make_any_offset_constraints (uses);
make_transitive_closure_constraints (uses);
}
make_constraint_to (uses->id, arg);
if (!(flags & EAF_NOT_RETURNED))
record_uses = true;
}
/* The static chain is used as well. */
if (gimple_call_chain (stmt))
{
if (!uses)
{
uses = get_call_use_vi (stmt);
make_any_offset_constraints (uses);
make_transitive_closure_constraints (uses);
}
make_constraint_to (uses->id, gimple_call_chain (stmt));
record_uses = true;
}
/* And if we applied NRV the address of the return slot. */
if (gimple_call_return_slot_opt_p (stmt)
&& gimple_call_lhs (stmt) != NULL_TREE
&& TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
{
if (!uses)
{
uses = get_call_use_vi (stmt);
make_any_offset_constraints (uses);
make_transitive_closure_constraints (uses);
}
auto_vec<ce_s> tmpc;
get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
make_constraints_to (uses->id, tmpc);
record_uses = true;
}
/* Pure functions may return call-used and nonlocal memory. */
if (record_uses)
{
rhsc.var = uses->id;
rhsc.offset = 0;
rhsc.type = SCALAR;
results->safe_push (rhsc);
}
rhsc.var = nonlocal_id;
rhsc.offset = 0;
rhsc.type = SCALAR;
results->safe_push (rhsc);
}
/* Return the varinfo for the callee of CALL. */
@ -4931,13 +4882,13 @@ find_func_aliases_for_call (struct function *fn, gcall *t)
if (flags & (ECF_CONST|ECF_NOVOPS))
{
if (gimple_call_lhs (t))
handle_const_call (t, &rhsc);
handle_rhs_call (t, &rhsc, implicit_const_eaf_flags, false, false);
}
/* Pure functions can return addresses in and of memory
reachable from their arguments, but they are not an escape
point for reachable memory of their arguments. */
else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
handle_pure_call (t, &rhsc);
handle_rhs_call (t, &rhsc, implicit_pure_eaf_flags, true, false);
/* If the call is to a replaceable operator delete and results
from a delete expression as opposed to a direct call to
such operator, then the effects for PTA (in particular
@ -4947,7 +4898,7 @@ find_func_aliases_for_call (struct function *fn, gcall *t)
&& gimple_call_from_new_or_delete (t))
;
else
handle_rhs_call (t, &rhsc);
handle_rhs_call (t, &rhsc, 0, true, true);
if (gimple_call_lhs (t))
handle_lhs_call (t, gimple_call_lhs (t),
gimple_call_return_flags (t), rhsc, fndecl);
@ -7582,43 +7533,64 @@ compute_points_to_sets (void)
pt = gimple_call_use_set (stmt);
if (gimple_call_flags (stmt) & ECF_CONST)
memset (pt, 0, sizeof (struct pt_solution));
else if ((vi = lookup_call_use_vi (stmt)) != NULL)
{
*pt = find_what_var_points_to (cfun->decl, vi);
/* Escaped (and thus nonlocal) variables are always
implicitly used by calls. */
/* ??? ESCAPED can be empty even though NONLOCAL
always escaped. */
pt->nonlocal = 1;
pt->escaped = 1;
}
else
{
/* If there is nothing special about this call then
we have made everything that is used also escape. */
*pt = cfun->gimple_df->escaped;
pt->nonlocal = 1;
bool uses_global_memory = true;
bool reads_global_memory = true;
determine_global_memory_access (stmt, NULL,
&reads_global_memory,
&uses_global_memory);
if (!uses_global_memory)
;
else if ((vi = lookup_call_use_vi (stmt)) != NULL)
{
*pt = find_what_var_points_to (cfun->decl, vi);
/* Escaped (and thus nonlocal) variables are always
implicitly used by calls. */
/* ??? ESCAPED can be empty even though NONLOCAL
always escaped. */
pt->nonlocal = uses_global_memory;
pt->escaped = uses_global_memory;
}
else if (uses_global_memory)
{
/* If there is nothing special about this call then
we have made everything that is used also escape. */
*pt = cfun->gimple_df->escaped;
pt->nonlocal = 1;
}
}
pt = gimple_call_clobber_set (stmt);
if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
memset (pt, 0, sizeof (struct pt_solution));
else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
{
*pt = find_what_var_points_to (cfun->decl, vi);
/* Escaped (and thus nonlocal) variables are always
implicitly clobbered by calls. */
/* ??? ESCAPED can be empty even though NONLOCAL
always escaped. */
pt->nonlocal = 1;
pt->escaped = 1;
}
else
{
/* If there is nothing special about this call then
we have made everything that is used also escape. */
*pt = cfun->gimple_df->escaped;
pt->nonlocal = 1;
bool writes_global_memory = true;
determine_global_memory_access (stmt, &writes_global_memory,
NULL, NULL);
if (!writes_global_memory)
;
else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
{
*pt = find_what_var_points_to (cfun->decl, vi);
/* Escaped (and thus nonlocal) variables are always
implicitly clobbered by calls. */
/* ??? ESCAPED can be empty even though NONLOCAL
always escaped. */
pt->nonlocal = writes_global_memory;
pt->escaped = writes_global_memory;
}
else if (writes_global_memory)
{
/* If there is nothing special about this call then
we have made everything that is used also escape. */
*pt = cfun->gimple_df->escaped;
pt->nonlocal = 1;
}
}
}
}