cgraph.c (cgraph_edge_cannot_lead_to_return): Also check if caller is noreturn.

* cgraph.c (cgraph_edge_cannot_lead_to_return): Also check
	if caller is noreturn.
	* ipa-reference.c (analyze_function): Use ipa_ref_cannot_lead_to_return
	* ipa-ref.h (ipa_ref_cannot_lead_to_return): New function.
	* ipa-ref.c (ipa_ref_cannot_lead_to_return): New function.
	* ipa-pure-const.c (check_decl): Add IPA parameter.
	(state_from_flags): New function.
	(better_state, worse_state): New functions.
	(check_call): When in IPA mode, do not care about callees.
	(check_load, check_store): Update.
	(check_ipa_load, check_ipa_store): New.
	(check_stmt): When in IPA mode, use IPA checkers.
	(analyze_function): Use state_from_flags.
	(propagate): Check indirect edges and references.

From-SVN: r160380
This commit is contained in:
Jan Hubicka 2010-06-07 17:33:34 +02:00 committed by Jan Hubicka
parent dba16b83d4
commit f10ea64027
9 changed files with 344 additions and 124 deletions

View File

@ -1,3 +1,20 @@
2010-06-07 Jan Hubicka <jh@suse.cz>
* cgraph.c (cgraph_edge_cannot_lead_to_return): Also check
if caller is noreturn.
* ipa-reference.c (analyze_function): Use ipa_ref_cannot_lead_to_return
* ipa-ref.h (ipa_ref_cannot_lead_to_return): New function.
* ipa-ref.c (ipa_ref_cannot_lead_to_return): New function.
* ipa-pure-const.c (check_decl): Add IPA parameter.
(state_from_flags): New function.
(better_state, worse_state): New functions.
(check_call): When in IPA mode, do not care about callees.
(check_load, check_store): Update.
(check_ipa_load, check_ipa_store): New.
(check_stmt): When in IPA mode, use IPA checkers.
(analyze_function): Use state_from_flags.
(propagate): Check indirect edges and references.
2010-06-07 Kazu Hirata <kazu@codesourcery.com>
PR rtl-optimization/44404

View File

@ -2606,6 +2606,8 @@ cgraph_node_cannot_return (struct cgraph_node *node)
bool
cgraph_edge_cannot_lead_to_return (struct cgraph_edge *e)
{
if (cgraph_node_cannot_return (e->caller))
return true;
if (e->indirect_unknown_callee)
{
int flags = e->indirect_info->ecf_flags;

View File

@ -227,7 +227,7 @@ set_function_state (struct cgraph_node *node, funct_state s)
static inline void
check_decl (funct_state local,
tree t, bool checking_write)
tree t, bool checking_write, bool ipa)
{
/* Do not want to do anything with volatile except mark any
function that uses one to be not const or pure. */
@ -253,6 +253,11 @@ check_decl (funct_state local,
return;
}
/* In IPA mode we are not interested in checking actual loads and stores;
they will be processed at propagation time using ipa_ref. */
if (ipa)
return;
/* Since we have dealt with the locals and params cases above, if we
are CHECKING_WRITE, this cannot be a pure or constant
function. */
@ -333,6 +338,78 @@ check_op (funct_state local, tree t, bool checking_write)
}
}
/* compute state based on ECF FLAGS and store to STATE and LOOPING. */
static void
state_from_flags (enum pure_const_state_e *state, bool *looping,
int flags, bool cannot_lead_to_return)
{
*looping = false;
if (flags & ECF_LOOPING_CONST_OR_PURE)
{
*looping = true;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " looping");
}
if (flags & ECF_CONST)
{
*state = IPA_CONST;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " const\n");
}
else if (flags & ECF_PURE)
{
*state = IPA_PURE;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " pure\n");
}
else if (cannot_lead_to_return)
{
*state = IPA_PURE;
*looping = true;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " ignoring side effects->pure looping\n");
}
else
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " neihter\n");
*state = IPA_NEITHER;
*looping = true;
}
}
/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
into STATE and LOOPING better of the two variants.
Be sure to merge looping correctly. IPA_NEITHER functions
have looping 0 even if they don't have to return. */
static inline void
better_state (enum pure_const_state_e *state, bool *looping,
enum pure_const_state_e state2, bool looping2)
{
if (state2 < *state)
{
if (*state == IPA_NEITHER)
*looping = looping2;
else
*looping = MIN (*looping, looping2);
}
else if (state2 != IPA_NEITHER)
*looping = MIN (*looping, looping2);
}
/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
into STATE and LOOPING worse of the two variants. */
static inline void
worse_state (enum pure_const_state_e *state, bool *looping,
enum pure_const_state_e state2, bool looping2)
{
*state = MAX (*state, state2);
*looping = MAX (*looping, looping2);
}
/* Check the parameters of a function call to CALL_EXPR to see if
there are any references in the parameters that are not allowed for
pure or const functions. Also check to see if this is either an
@ -422,8 +499,10 @@ check_call (funct_state local, gimple call, bool ipa)
Look to see if there are any bits available for the callee (such as by
declaration or because it is builtin) and process solely on the basis of
those bits. */
else if (!ipa || !callee_t)
else if (!ipa)
{
enum pure_const_state_e call_state;
bool call_looping;
if (possibly_throws && cfun->can_throw_non_call_exceptions)
{
if (dump_file)
@ -442,68 +521,61 @@ check_call (funct_state local, gimple call, bool ipa)
}
local->can_throw = true;
}
if (flags & ECF_CONST)
{
if (callee_t && DECL_LOOPING_CONST_OR_PURE_P (callee_t))
{
if (dump_file)
fprintf (dump_file, " calls looping pure.\n");
local->looping = true;
}
}
else if (flags & ECF_PURE)
{
if (callee_t && DECL_LOOPING_CONST_OR_PURE_P (callee_t))
{
if (dump_file)
fprintf (dump_file, " calls looping const.\n");
local->looping = true;
}
if (dump_file)
fprintf (dump_file, " pure function call in not const\n");
if (local->pure_const_state == IPA_CONST)
local->pure_const_state = IPA_PURE;
}
else if ((flags & (ECF_NORETURN | ECF_NOTHROW))
== (ECF_NORETURN | ECF_NOTHROW)
|| (!flag_exceptions && (flags & ECF_NORETURN)))
{
if (dump_file)
fprintf (dump_file, " noreturn nothrow call is looping pure\n");
if (local->pure_const_state == IPA_CONST)
local->pure_const_state = IPA_PURE;
local->looping = true;
}
else
{
if (dump_file)
fprintf (dump_file, " uknown function call is not const/pure\n");
local->pure_const_state = IPA_NEITHER;
local->looping = true;
}
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " checking flags for call:");
state_from_flags (&call_state, &call_looping, flags,
((flags & (ECF_NORETURN | ECF_NOTHROW))
== (ECF_NORETURN | ECF_NOTHROW))
|| (!flag_exceptions && (flags & ECF_NORETURN)));
worse_state (&local->pure_const_state, &local->looping,
call_state, call_looping);
}
/* Direct functions calls are handled by IPA propagation. */
}
/* Wrapper around check_decl for loads. */
/* Wrapper around check_decl for loads in local more. */
static bool
check_load (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
{
if (DECL_P (op))
check_decl ((funct_state)data, op, false);
check_decl ((funct_state)data, op, false, false);
else
check_op ((funct_state)data, op, false);
return false;
}
/* Wrapper around check_decl for stores. */
/* Wrapper around check_decl for stores in local more. */
static bool
check_store (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
{
if (DECL_P (op))
check_decl ((funct_state)data, op, true);
check_decl ((funct_state)data, op, true, false);
else
check_op ((funct_state)data, op, true);
return false;
}
/* Wrapper around check_decl for loads in ipa mode. */
static bool
check_ipa_load (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
{
if (DECL_P (op))
check_decl ((funct_state)data, op, false, true);
else
check_op ((funct_state)data, op, false);
return false;
}
/* Wrapper around check_decl for stores in ipa mode. */
static bool
check_ipa_store (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
{
if (DECL_P (op))
check_decl ((funct_state)data, op, true, true);
else
check_op ((funct_state)data, op, true);
return false;
@ -527,7 +599,9 @@ check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
}
/* Look for loads and stores. */
walk_stmt_load_store_ops (stmt, local, check_load, check_store);
walk_stmt_load_store_ops (stmt, local,
ipa ? check_ipa_load : check_load,
ipa ? check_ipa_store : check_store);
if (gimple_code (stmt) != GIMPLE_CALL
&& stmt_could_throw_p (stmt))
@ -669,21 +743,15 @@ end:
}
}
if (TREE_READONLY (decl))
{
l->pure_const_state = IPA_CONST;
l->state_previously_known = IPA_CONST;
if (!DECL_LOOPING_CONST_OR_PURE_P (decl))
l->looping = false, l->looping_previously_known = false;
}
if (DECL_PURE_P (decl))
{
if (l->pure_const_state != IPA_CONST)
l->pure_const_state = IPA_PURE;
l->state_previously_known = IPA_PURE;
if (!DECL_LOOPING_CONST_OR_PURE_P (decl))
l->looping = false, l->looping_previously_known = false;
}
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " checking previously known:");
state_from_flags (&l->state_previously_known, &l->looping_previously_known,
flags_from_decl_or_type (fn->decl),
cgraph_node_cannot_return (fn));
better_state (&l->pure_const_state, &l->looping,
l->state_previously_known,
l->looping_previously_known);
if (TREE_NOTHROW (decl))
l->can_throw = false;
@ -953,6 +1021,7 @@ self_recursive_p (struct cgraph_node *node)
return false;
}
/* Produce the global information by preforming a transitive closure
on the local information that was produced by generate_summary.
Note that there is no function_transform pass since this only
@ -995,9 +1064,13 @@ propagate (void)
/* Find the worst state for any node in the cycle. */
w = node;
while (w)
while (w && pure_const_state != IPA_NEITHER)
{
struct cgraph_edge *e;
struct cgraph_edge *ie;
int i;
struct ipa_ref *ref;
funct_state w_l = get_function_state (w);
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " Visiting %s/%i state:%s looping %i\n",
@ -1005,16 +1078,19 @@ propagate (void)
w->uid,
pure_const_names[w_l->pure_const_state],
w_l->looping);
if (pure_const_state < w_l->pure_const_state)
pure_const_state = w_l->pure_const_state;
if (w_l->looping)
looping = true;
/* First merge in function body properties. */
worse_state (&pure_const_state, &looping,
w_l->pure_const_state, w_l->looping);
if (pure_const_state == IPA_NEITHER)
break;
/* For overwritable nodes we can not assume anything. */
if (cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
{
looping |= w_l->looping_previously_known;
if (pure_const_state < w_l->state_previously_known)
pure_const_state = w_l->state_previously_known;
worse_state (&pure_const_state, &looping,
w_l->state_previously_known,
w_l->looping_previously_known);
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file,
@ -1022,16 +1098,18 @@ propagate (void)
pure_const_names[w_l->state_previously_known],
w_l->looping_previously_known);
}
break;
}
if (pure_const_state == IPA_NEITHER)
break;
count++;
/* We consider recursive cycles as possibly infinite.
This might be relaxed since infinite recursion leads to stack
overflow. */
if (count > 1)
looping = true;
/* Now walk the edges and merge in callee properties. */
for (e = w->callees; e; e = e->next_callee)
{
struct cgraph_node *y = e->callee;
@ -1059,10 +1137,9 @@ propagate (void)
&& cgraph_edge_cannot_lead_to_return (e))
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file,
" Ignoring side effects -> pure, looping\n");
}
fprintf (dump_file,
" Ignoring side effects"
" -> pure, looping\n");
edge_state = IPA_PURE;
edge_looping = true;
}
@ -1073,55 +1150,78 @@ propagate (void)
}
}
else
{
int flags = flags_from_decl_or_type (y->decl);
state_from_flags (&edge_state, &edge_looping,
flags_from_decl_or_type (y->decl),
cgraph_edge_cannot_lead_to_return (e));
if (flags & ECF_LOOPING_CONST_OR_PURE)
{
edge_looping = true;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " unavailable looping");
}
if (flags & ECF_CONST)
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " const\n");
}
else if (flags & ECF_PURE)
{
edge_state = IPA_PURE;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " pure\n");
}
else if (cgraph_edge_cannot_lead_to_return (e))
{
edge_state = IPA_PURE;
edge_looping = true;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " ignoring side effects->pure looping\n");
}
else
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " neihter\n");
edge_state = IPA_NEITHER;
edge_looping = true;
}
}
/* Merge the results with what we already know.
When we found function to be NEITHER, but we know
it is looping pure const, be sure to set the looping flag. */
pure_const_state = MAX (pure_const_state, MIN (edge_state,
w_l->state_previously_known));
if (edge_state > w_l->state_previously_known)
looping = MAX (looping, w_l->looping_previously_known);
else
looping = MAX (looping, MIN (edge_looping,
w_l->looping_previously_known));
/* Merge the results with what we already know. */
better_state (&edge_state, &edge_looping,
w_l->state_previously_known,
w_l->looping_previously_known);
worse_state (&pure_const_state, &looping,
edge_state, edge_looping);
if (pure_const_state == IPA_NEITHER)
break;
}
if (pure_const_state == IPA_NEITHER)
break;
/* Now process the indirect call. */
for (ie = node->indirect_calls; ie; ie = ie->next_callee)
{
enum pure_const_state_e edge_state = IPA_CONST;
bool edge_looping = false;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " Indirect call");
state_from_flags (&edge_state, &edge_looping,
ie->indirect_info->ecf_flags,
cgraph_edge_cannot_lead_to_return (ie));
/* Merge the results with what we already know. */
better_state (&edge_state, &edge_looping,
w_l->state_previously_known,
w_l->looping_previously_known);
worse_state (&pure_const_state, &looping,
edge_state, edge_looping);
if (pure_const_state == IPA_NEITHER)
break;
}
if (pure_const_state == IPA_NEITHER)
break;
/* And finally all loads and stores. */
for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
{
enum pure_const_state_e ref_state = IPA_CONST;
bool ref_looping = false;
switch (ref->use)
{
case IPA_REF_LOAD:
/* readonly reads are safe. */
if (TREE_READONLY (ipa_ref_varpool_node (ref)->decl))
break;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " nonreadonly global var read\n");
ref_state = IPA_PURE;
break;
case IPA_REF_STORE:
if (ipa_ref_cannot_lead_to_return (ref))
break;
ref_state = IPA_NEITHER;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " global var write\n");
break;
case IPA_REF_ADDR:
break;
}
better_state (&ref_state, &ref_looping,
w_l->state_previously_known,
w_l->looping_previously_known);
worse_state (&pure_const_state, &looping,
ref_state, ref_looping);
if (pure_const_state == IPA_NEITHER)
break;
}
w_info = (struct ipa_dfs_info *) w->aux;
w = w_info->next_cycle;
}
@ -1220,7 +1320,7 @@ propagate (void)
w = node;
while (w)
{
struct cgraph_edge *e;
struct cgraph_edge *e, *ie;
funct_state w_l = get_function_state (w);
if (w_l->can_throw
@ -1247,6 +1347,9 @@ propagate (void)
else if (e->can_throw_external && !TREE_NOTHROW (y->decl))
can_throw = true;
}
for (ie = node->indirect_calls; ie; ie = ie->next_callee)
if (ie->can_throw_external)
can_throw = true;
w_info = (struct ipa_dfs_info *) w->aux;
w = w_info->next_cycle;
}

View File

@ -233,3 +233,11 @@ ipa_clone_refering (struct cgraph_node *dest_node,
dest_node, dest_varpool_node,
ref->use, ref->stmt);
}
/* Return true when execution of REF can load to return from
function. */
bool
ipa_ref_cannot_lead_to_return (struct ipa_ref *ref)
{
return cgraph_node_cannot_return (ipa_ref_refering_node (ref));
}

View File

@ -88,4 +88,5 @@ void ipa_dump_references (FILE *, struct ipa_ref_list *);
void ipa_dump_refering (FILE *, struct ipa_ref_list *);
void ipa_clone_references (struct cgraph_node *, struct varpool_node *, struct ipa_ref_list *);
void ipa_clone_refering (struct cgraph_node *, struct varpool_node *, struct ipa_ref_list *);
bool ipa_ref_cannot_lead_to_return (struct ipa_ref *);

View File

@ -439,6 +439,8 @@ analyze_function (struct cgraph_node *fn)
bitmap_set_bit (local->statics_read, DECL_UID (var));
break;
case IPA_REF_STORE:
if (ipa_ref_cannot_lead_to_return (ref))
break;
bitmap_set_bit (local->statics_written, DECL_UID (var));
break;
case IPA_REF_ADDR:

View File

@ -512,8 +512,9 @@ cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
FIXME: This can not be done in between gimplify and omp_expand since
readonly flag plays role on what is shared and what is not. Currently we do
this transformation as part of ipa-reference pass, but it would make sense
to do it before early optimizations. */
this transformation as part of whole program visibility and re-do at
ipa-reference pass (to take into account clonning), but it would
make sense to do it before early optimizations. */
void
ipa_discover_readonly_nonaddressable_vars (void)
@ -825,6 +826,8 @@ whole_program_function_and_variable_visibility (void)
fprintf (dump_file, " %s", varpool_node_name (vnode));
fprintf (dump_file, "\n\n");
}
if (optimize)
ipa_discover_readonly_nonaddressable_vars ();
return 0;
}

View File

@ -1,3 +1,7 @@
2010-06-07 Jan Hubicka <jh@suse.cz>
* gcc.dg/ipa/pure-const-1.c: New testcase.
2010-06-07 Kazu Hirata <kazu@codesourcery.com>
PR rtl-optimization/44404

View File

@ -0,0 +1,80 @@
/* { dg-do compile } */
/* { dg-options "-O3 -fdump-tree-local-pure-const1 -fdump-ipa-pure-const -fdump-tree-optimized -fno-early-inlining" } */
void abort (void);
int error_code;
static int val;
__attribute__ ((noinline, noclone))
static int
i_am_pure1 (int a)
{
if (a > 50)
abort ();
return a;
}
__attribute__ ((noinline, noclone))
static int
i_am_const2 (int a)
{
return a+val;
}
__attribute__ ((noinline, noclone))
int
call_me(int a)
{
return a;
}
inline int
call_callback(int (*fn)(int), int a)
{
return fn(a);
}
__attribute__ ((noinline, noclone))
i_am_const3(int a)
{
return call_callback (call_me, a);
}
__attribute__ ((noinline))
explode_badly()
{
error_code = 0xbad;
abort ();
}
__attribute__ ((noinline, noclone))
i_am_pure4(int a)
{
if (a > 50)
explode_badly ();
return a;
}
test()
{
int s;
s = i_am_pure1(5);
s += i_am_pure1(5);
s += i_am_const2(5);
s += i_am_const2(5);
s += i_am_const3(5);
s += i_am_const3(5);
s += i_am_pure4(5);
s += i_am_pure4(5);
return s;
}
/* { dg-final { scan-tree-dump-times "i_am_pure1 .5" 1 "optimized"} } */
/* { dg-final { scan-tree-dump-times "i_am_const2 .5" 1 "optimized"} } */
/* { dg-final { scan-tree-dump-times "i_am_const3 .5" 1 "optimized"} } */
/* { dg-final { scan-tree-dump-times "i_am_pure4 .5" 1 "optimized"} } */
/* { dg-final { scan-tree-dump "found to be looping pure: i_am_pure1" "local-pure-const1"} } */
/* { dg-final { scan-tree-dump "found to be looping pure: i_am_pure4" "local-pure-const1"} } */
/* { dg-final { scan-ipa-dump "found to be const: i_am_const2" "pure-const"} } */
/* { dg-final { scan-ipa-dump "found to be const: i_am_const3" "pure-const"} } */
/* { dg-final { cleanup-tree-dump "local-pure-const1" } } */
/* { dg-final { cleanup-tree-dump "optimized" } } */
/* { dg-final { cleanup-ipa-dump "pure-const" } } */