diff --git a/gcc/ChangeLog b/gcc/ChangeLog index e614e8ee7ed..1a3aff53ed5 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,11 @@ +2013-10-02 Teresa Johnson + + * predict.c (probably_never_executed): New function. + (probably_never_executed_bb_p): Invoke probably_never_executed. + (probably_never_executed_edge_p): Ditto. + * bb-reorder.c (find_rarely_executed_basic_blocks_and_crossing_edges): + Treat profile insanities conservatively. + 2013-10-02 John David Anglin * config.gcc (hppa*64*-*-linux*): Don't add pa/t-linux to tmake_file. diff --git a/gcc/bb-reorder.c b/gcc/bb-reorder.c index b89f9855e77..c5a42d359dc 100644 --- a/gcc/bb-reorder.c +++ b/gcc/bb-reorder.c @@ -1564,7 +1564,23 @@ find_rarely_executed_basic_blocks_and_crossing_edges (void) /* Mark which partition (hot/cold) each basic block belongs in. */ FOR_EACH_BB (bb) { + bool cold_bb = false; + if (probably_never_executed_bb_p (cfun, bb)) + { + /* Handle profile insanities created by upstream optimizations + by also checking the incoming edge weights. If there is a non-cold + incoming edge, conservatively prevent this block from being split + into the cold section. */ + cold_bb = true; + FOR_EACH_EDGE (e, ei, bb->preds) + if (!probably_never_executed_edge_p (cfun, e)) + { + cold_bb = false; + break; + } + } + if (cold_bb) { BB_SET_PARTITION (bb, BB_COLD_PARTITION); cold_bb_count++; diff --git a/gcc/predict.c b/gcc/predict.c index 4815e75248e..2909117ef6b 100644 --- a/gcc/predict.c +++ b/gcc/predict.c @@ -226,23 +226,26 @@ maybe_hot_edge_p (edge e) } -/* Return true in case BB is probably never executed. */ -bool -probably_never_executed_bb_p (struct function *fun, const_basic_block bb) +/* Return true if profile COUNT and FREQUENCY, or function FUN static + node frequency reflects never being executed. */ + +static bool +probably_never_executed (struct function *fun, + gcov_type count, int frequency) { gcc_checking_assert (fun); if (profile_status_for_function (fun) == PROFILE_READ) { - if ((bb->count * 4 + profile_info->runs / 2) / profile_info->runs > 0) + if ((count * 4 + profile_info->runs / 2) / profile_info->runs > 0) return false; - if (!bb->frequency) + if (!frequency) return true; if (!ENTRY_BLOCK_PTR->frequency) return false; if (ENTRY_BLOCK_PTR->count && ENTRY_BLOCK_PTR->count < REG_BR_PROB_BASE) { - return (RDIV (bb->frequency * ENTRY_BLOCK_PTR->count, + return (RDIV (frequency * ENTRY_BLOCK_PTR->count, ENTRY_BLOCK_PTR->frequency) < REG_BR_PROB_BASE / 4); } @@ -256,19 +259,21 @@ probably_never_executed_bb_p (struct function *fun, const_basic_block bb) } +/* Return true in case BB is probably never executed. */ + +bool +probably_never_executed_bb_p (struct function *fun, const_basic_block bb) +{ + return probably_never_executed (fun, bb->count, bb->frequency); +} + + /* Return true in case edge E is probably never executed. */ bool probably_never_executed_edge_p (struct function *fun, edge e) { - gcc_checking_assert (fun); - if (profile_info && flag_branch_probabilities) - return ((e->count + profile_info->runs / 2) / profile_info->runs) == 0; - if ((!profile_info || !flag_branch_probabilities) - && (cgraph_get_node (fun->decl)->frequency - == NODE_FREQUENCY_UNLIKELY_EXECUTED)) - return true; - return false; + return probably_never_executed (fun, e->count, EDGE_FREQUENCY (e)); } /* Return true if NODE should be optimized for size. */