Account prefetch_mod and unroll_factor for the computation of the prefetch count.

2010-06-14  Changpeng Fang  <changpeng.fang@amd.com>

	* tree-ssa-loop-prefetch.c (nothing_to_prefetch_p): New.  Return
	true if no prefetch is going to be generated for a given group.
	(estimate_prefetch_count): Use prefetch_mod and unroll_factor to
	estimate the prefetch_count.
	(loop_prefetch_arrays): Call nothing_to_prefetch_p; estimate the
	prefetch count by considering the unroll_factor and prefetch_mod
	for is_loop_prefetching_profitable.

From-SVN: r160766
This commit is contained in:
Changpeng Fang 2010-06-14 20:51:26 +00:00 committed by Sebastian Pop
parent 33d6748532
commit d50585233d
2 changed files with 43 additions and 6 deletions

View File

@ -1,3 +1,13 @@
2010-06-14 Changpeng Fang <changpeng.fang@amd.com>
* tree-ssa-loop-prefetch.c (nothing_to_prefetch_p): New. Return
true if no prefetch is going to be generated for a given group.
(estimate_prefetch_count): Use prefetch_mod and unroll_factor to
estimate the prefetch_count.
(loop_prefetch_arrays): Call nothing_to_prefetch_p; estimate the
prefetch count by considering the unroll_factor and prefetch_mod
for is_loop_prefetching_profitable.
2010-06-14 Andreas Schwab <schwab@linux-m68k.org>
* config/m68k/m68k.c (m68k_delegitimize_address): Don't do

View File

@ -994,18 +994,40 @@ schedule_prefetches (struct mem_ref_group *groups, unsigned unroll_factor,
return any;
}
/* Estimate the number of prefetches in the given GROUPS. */
/* Return TRUE if no prefetch is going to be generated in the given
GROUPS. */
static int
estimate_prefetch_count (struct mem_ref_group *groups)
static bool
nothing_to_prefetch_p (struct mem_ref_group *groups)
{
struct mem_ref *ref;
for (; groups; groups = groups->next)
for (ref = groups->refs; ref; ref = ref->next)
if (should_issue_prefetch_p (ref))
return false;
return true;
}
/* Estimate the number of prefetches in the given GROUPS.
UNROLL_FACTOR is the factor by which LOOP was unrolled. */
static int
estimate_prefetch_count (struct mem_ref_group *groups, unsigned unroll_factor)
{
struct mem_ref *ref;
unsigned n_prefetches;
int prefetch_count = 0;
for (; groups; groups = groups->next)
for (ref = groups->refs; ref; ref = ref->next)
if (should_issue_prefetch_p (ref))
prefetch_count++;
{
n_prefetches = ((unroll_factor + ref->prefetch_mod - 1)
/ ref->prefetch_mod);
prefetch_count += n_prefetches;
}
return prefetch_count;
}
@ -1716,8 +1738,7 @@ loop_prefetch_arrays (struct loop *loop)
/* Step 2: estimate the reuse effects. */
prune_by_reuse (refs);
prefetch_count = estimate_prefetch_count (refs);
if (prefetch_count == 0)
if (nothing_to_prefetch_p (refs))
goto fail;
determine_loop_nest_reuse (loop, refs, no_other_refs);
@ -1733,6 +1754,12 @@ loop_prefetch_arrays (struct loop *loop)
ninsns = tree_num_loop_insns (loop, &eni_size_weights);
unroll_factor = determine_unroll_factor (loop, refs, ninsns, &desc,
est_niter);
/* Estimate prefetch count for the unrolled loop. */
prefetch_count = estimate_prefetch_count (refs, unroll_factor);
if (prefetch_count == 0)
goto fail;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Ahead %d, unroll factor %d, trip count "
HOST_WIDE_INT_PRINT_DEC "\n"