This patch fixes PR bootstrap/57077.

This patch fixes PR bootstrap/57077. Certain new uses of apply_probability
are actually scaling the counts up, and the scale factor should not 
be treated as a probability as the value may exceed REG_BR_PROB_BASE.
One example (from the PR) is when scaling counts up in LTO when merging
profiles. Another example I found when preparing the patch to use
the rounding divide in more places is when inlining COMDAT functions.

Add new helper function apply_scale that does the scaling without
the probability range check. I audited the new uses of apply_probability
and changed the calls as appropriate.

2013-04-29  Teresa Johnson  <tejohnson@google.com>

	PR bootstrap/57077
	* basic-block.h (apply_scale): New function.
	(apply_probability): Use apply_scale.
	* gimple-streamer-in.c (input_bb): Ditto.
	* lto-streamer-in.c (input_cfg): Ditto.
	* lto-cgraph.c (merge_profile_summaries): Ditto.
	* tree-optimize.c (execute_fixup_cfg): Ditto.
	* tree-inline.c (copy_bb): Update comment to use
	apply_scale.
	(copy_edges_for_bb): Ditto.
	(copy_cfg_body): Ditto.

From-SVN: r198416
This commit is contained in:
Teresa Johnson 2013-04-29 13:22:46 +00:00 committed by Teresa Johnson
parent 315bbd2e3c
commit f41f80f908
7 changed files with 46 additions and 22 deletions

View File

@ -1,3 +1,17 @@
2013-04-29 Teresa Johnson <tejohnson@google.com>
PR bootstrap/57077
* basic-block.h (apply_scale): New function.
(apply_probability): Use apply_scale.
* gimple-streamer-in.c (input_bb): Ditto.
* lto-streamer-in.c (input_cfg): Ditto.
* lto-cgraph.c (merge_profile_summaries): Ditto.
* tree-optimize.c (execute_fixup_cfg): Ditto.
* tree-inline.c (copy_bb): Update comment to use
apply_scale.
(copy_edges_for_bb): Ditto.
(copy_cfg_body): Ditto.
2013-04-29 Tom de Vries <tom@codesourcery.com>
* tree-ssa-tail-merge.c (find_same_succ_bb): Skip loop latch bbs.

View File

@ -500,7 +500,7 @@ struct edge_list
REG_BR_PROB_BASE)
/* Compute a scale factor (or probability) suitable for scaling of
gcov_type values via apply_probability(). */
gcov_type values via apply_probability() and apply_scale(). */
#define GCOV_COMPUTE_SCALE(num,den) \
((den) ? RDIV ((num) * REG_BR_PROB_BASE, (den)) : REG_BR_PROB_BASE)
@ -952,13 +952,23 @@ combine_probabilities (int prob1, int prob2)
return RDIV (prob1 * prob2, REG_BR_PROB_BASE);
}
/* Apply scale factor SCALE on frequency or count FREQ. Use this
interface when potentially scaling up, so that SCALE is not
constrained to be < REG_BR_PROB_BASE. */
static inline gcov_type
apply_scale (gcov_type freq, int scale)
{
return RDIV (freq * scale, REG_BR_PROB_BASE);
}
/* Apply probability PROB on frequency or count FREQ. */
static inline gcov_type
apply_probability (gcov_type freq, int prob)
{
check_probability (prob);
return RDIV (freq * prob, REG_BR_PROB_BASE);
return apply_scale (freq, prob);
}
/* Return inverse probability for PROB. */

View File

@ -329,8 +329,8 @@ input_bb (struct lto_input_block *ib, enum LTO_tags tag,
index = streamer_read_uhwi (ib);
bb = BASIC_BLOCK_FOR_FUNCTION (fn, index);
bb->count = apply_probability (streamer_read_gcov_count (ib),
count_materialization_scale);
bb->count = apply_scale (streamer_read_gcov_count (ib),
count_materialization_scale);
bb->frequency = streamer_read_hwi (ib);
bb->flags = streamer_read_hwi (ib);

View File

@ -1347,10 +1347,10 @@ merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
file_data->profile_info.runs);
lto_gcov_summary.sum_max
= MAX (lto_gcov_summary.sum_max,
apply_probability (file_data->profile_info.sum_max, scale));
apply_scale (file_data->profile_info.sum_max, scale));
lto_gcov_summary.sum_all
= MAX (lto_gcov_summary.sum_all,
apply_probability (file_data->profile_info.sum_all, scale));
apply_scale (file_data->profile_info.sum_all, scale));
/* Save a pointer to the profile_info with the largest
scaled sum_all and the scale for use in merging the
histogram. */
@ -1372,8 +1372,8 @@ merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
/* Scale up the min value as we did the corresponding sum_all
above. Use that to find the new histogram index. */
gcov_type scaled_min
= apply_probability (saved_profile_info->histogram[h_ix].min_value,
saved_scale);
= apply_scale (saved_profile_info->histogram[h_ix].min_value,
saved_scale);
/* The new index may be shared with another scaled histogram entry,
so we need to account for a non-zero histogram entry at new_ix. */
unsigned new_ix = gcov_histo_index (scaled_min);
@ -1386,8 +1386,8 @@ merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
here and place the scaled cumulative counter value in the bucket
corresponding to the scaled minimum counter value. */
lto_gcov_summary.histogram[new_ix].cum_value
+= apply_probability (saved_profile_info->histogram[h_ix].cum_value,
saved_scale);
+= apply_scale (saved_profile_info->histogram[h_ix].cum_value,
saved_scale);
lto_gcov_summary.histogram[new_ix].num_counters
+= saved_profile_info->histogram[h_ix].num_counters;
}
@ -1419,8 +1419,8 @@ merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
if (scale == REG_BR_PROB_BASE)
continue;
for (edge = node->callees; edge; edge = edge->next_callee)
edge->count = apply_probability (edge->count, scale);
node->count = apply_probability (node->count, scale);
edge->count = apply_scale (edge->count, scale);
node->count = apply_scale (node->count, scale);
}
}

View File

@ -635,8 +635,8 @@ input_cfg (struct lto_input_block *ib, struct function *fn,
dest_index = streamer_read_uhwi (ib);
probability = (int) streamer_read_hwi (ib);
count = apply_probability ((gcov_type) streamer_read_gcov_count (ib),
count_materialization_scale);
count = apply_scale ((gcov_type) streamer_read_gcov_count (ib),
count_materialization_scale);
edge_flags = streamer_read_uhwi (ib);
dest = BASIC_BLOCK_FOR_FUNCTION (fn, dest_index);

View File

@ -1519,7 +1519,7 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
basic_block_info automatically. */
copy_basic_block = create_basic_block (NULL, (void *) 0,
(basic_block) prev->aux);
/* Update to use apply_probability(). */
/* Update to use apply_scale(). */
copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
/* We are going to rebuild frequencies from scratch. These values
@ -1891,7 +1891,7 @@ copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
&& old_edge->dest->aux != EXIT_BLOCK_PTR)
flags |= EDGE_FALLTHRU;
new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
/* Update to use apply_probability(). */
/* Update to use apply_scale(). */
new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
new_edge->probability = old_edge->probability;
}
@ -2278,7 +2278,7 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
incoming_frequency += EDGE_FREQUENCY (e);
incoming_count += e->count;
}
/* Update to use apply_probability(). */
/* Update to use apply_scale(). */
incoming_count = incoming_count * count_scale / REG_BR_PROB_BASE;
/* Update to use EDGE_FREQUENCY. */
incoming_frequency

View File

@ -131,15 +131,15 @@ execute_fixup_cfg (void)
ENTRY_BLOCK_PTR->count);
ENTRY_BLOCK_PTR->count = cgraph_get_node (current_function_decl)->count;
EXIT_BLOCK_PTR->count = apply_probability (EXIT_BLOCK_PTR->count,
count_scale);
EXIT_BLOCK_PTR->count = apply_scale (EXIT_BLOCK_PTR->count,
count_scale);
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
e->count = apply_probability (e->count, count_scale);
e->count = apply_scale (e->count, count_scale);
FOR_EACH_BB (bb)
{
bb->count = apply_probability (bb->count, count_scale);
bb->count = apply_scale (bb->count, count_scale);
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
@ -172,7 +172,7 @@ execute_fixup_cfg (void)
}
FOR_EACH_EDGE (e, ei, bb->succs)
e->count = apply_probability (e->count, count_scale);
e->count = apply_scale (e->count, count_scale);
/* If we have a basic block with no successors that does not
end with a control statement or a noreturn call end it with