predict.c (probably_never_executed): New function.

2013-10-02  Teresa Johnson  <tejohnson@google.com>

	* predict.c (probably_never_executed): New function.
	(probably_never_executed_bb_p): Invoke probably_never_executed.
	(probably_never_executed_edge_p): Ditto.
	* bb-reorder.c (find_rarely_executed_basic_blocks_and_crossing_edges):
	Treat profile insanities conservatively.

From-SVN: r203152
This commit is contained in:
Teresa Johnson 2013-10-03 05:06:05 +00:00 committed by Teresa Johnson
parent 6977865936
commit 79221839a3
3 changed files with 43 additions and 14 deletions

View File

@ -1,3 +1,11 @@
2013-10-02 Teresa Johnson <tejohnson@google.com>
* predict.c (probably_never_executed): New function.
(probably_never_executed_bb_p): Invoke probably_never_executed.
(probably_never_executed_edge_p): Ditto.
* bb-reorder.c (find_rarely_executed_basic_blocks_and_crossing_edges):
Treat profile insanities conservatively.
2013-10-02 John David Anglin <danglin@gcc.gnu.org>
* config.gcc (hppa*64*-*-linux*): Don't add pa/t-linux to tmake_file.

View File

@ -1564,7 +1564,23 @@ find_rarely_executed_basic_blocks_and_crossing_edges (void)
/* Mark which partition (hot/cold) each basic block belongs in. */
FOR_EACH_BB (bb)
{
bool cold_bb = false;
if (probably_never_executed_bb_p (cfun, bb))
{
/* Handle profile insanities created by upstream optimizations
by also checking the incoming edge weights. If there is a non-cold
incoming edge, conservatively prevent this block from being split
into the cold section. */
cold_bb = true;
FOR_EACH_EDGE (e, ei, bb->preds)
if (!probably_never_executed_edge_p (cfun, e))
{
cold_bb = false;
break;
}
}
if (cold_bb)
{
BB_SET_PARTITION (bb, BB_COLD_PARTITION);
cold_bb_count++;

View File

@ -226,23 +226,26 @@ maybe_hot_edge_p (edge e)
}
/* Return true in case BB is probably never executed. */
bool
probably_never_executed_bb_p (struct function *fun, const_basic_block bb)
/* Return true if profile COUNT and FREQUENCY, or function FUN static
node frequency reflects never being executed. */
static bool
probably_never_executed (struct function *fun,
gcov_type count, int frequency)
{
gcc_checking_assert (fun);
if (profile_status_for_function (fun) == PROFILE_READ)
{
if ((bb->count * 4 + profile_info->runs / 2) / profile_info->runs > 0)
if ((count * 4 + profile_info->runs / 2) / profile_info->runs > 0)
return false;
if (!bb->frequency)
if (!frequency)
return true;
if (!ENTRY_BLOCK_PTR->frequency)
return false;
if (ENTRY_BLOCK_PTR->count && ENTRY_BLOCK_PTR->count < REG_BR_PROB_BASE)
{
return (RDIV (bb->frequency * ENTRY_BLOCK_PTR->count,
return (RDIV (frequency * ENTRY_BLOCK_PTR->count,
ENTRY_BLOCK_PTR->frequency)
< REG_BR_PROB_BASE / 4);
}
@ -256,19 +259,21 @@ probably_never_executed_bb_p (struct function *fun, const_basic_block bb)
}
/* Return true in case BB is probably never executed. */
bool
probably_never_executed_bb_p (struct function *fun, const_basic_block bb)
{
return probably_never_executed (fun, bb->count, bb->frequency);
}
/* Return true in case edge E is probably never executed. */
bool
probably_never_executed_edge_p (struct function *fun, edge e)
{
gcc_checking_assert (fun);
if (profile_info && flag_branch_probabilities)
return ((e->count + profile_info->runs / 2) / profile_info->runs) == 0;
if ((!profile_info || !flag_branch_probabilities)
&& (cgraph_get_node (fun->decl)->frequency
== NODE_FREQUENCY_UNLIKELY_EXECUTED))
return true;
return false;
return probably_never_executed (fun, e->count, EDGE_FREQUENCY (e));
}
/* Return true if NODE should be optimized for size. */