gcov-profile: use static pool for TOPN first

gcc/ChangeLog:

	PR gcov-profile/97461
	* gcov-io.h (GCOV_PREALLOCATED_KVP): Pre-allocate 64
	static counters.

libgcc/ChangeLog:

	PR gcov-profile/97461
	* libgcov.h (gcov_counter_add): Use first static counters
	as it should help to have malloc wrappers set up.

gcc/testsuite/ChangeLog:

	PR gcov-profile/97461
	* gcc.dg/tree-prof/pr97461.c: New test.
This commit is contained in:
Martin Liska 2020-10-19 17:40:00 +02:00
parent 21508c47f9
commit 14e19b82c1
3 changed files with 65 additions and 19 deletions

View File

@ -293,7 +293,7 @@ GCOV_COUNTERS
#define GCOV_TOPN_MAXIMUM_TRACKED_VALUES 32
/* Number of pre-allocated gcov_kvp structures. */
#define GCOV_PREALLOCATED_KVP 16
#define GCOV_PREALLOCATED_KVP 64
/* Convert a counter index to a tag. */
#define GCOV_TAG_FOR_COUNTER(COUNT) \

View File

@ -0,0 +1,58 @@
/* PR gcov-profile/97461 */
/* { dg-options "-O2 -ldl" } */
#define _GNU_SOURCE
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
static int malloc_depth = 0;
static char memory[128* 1024];
static size_t memory_p = 0;
void f1(void) {}
void f2(void) {}
typedef void (*fun_t)(void);
static const fun_t funs[2] = { f1, f2, };
static void * malloc_impl(size_t size) {
void * r = &memory[memory_p];
memory_p += size;
// force TOPN profile
funs[size % 2]();
return r;
}
// Override default malloc, check it it get s called recursively
void * malloc(size_t size) {
// Must not be called recursively. Malloc implementation does not support it.
if (malloc_depth != 0) __builtin_trap();
++malloc_depth;
void * r = malloc_impl(size);
--malloc_depth;
return r;
}
// Called from gcov
void *calloc(size_t nmemb, size_t size) {
// Must not be called recursively. Malloc implementation does not support it.
if (malloc_depth != 0) __builtin_trap();
++malloc_depth;
void * r = malloc_impl(size * nmemb);
memset(r, 0, size * nmemb);
--malloc_depth;
return r;
}
void free(void *ptr){}
int main() {
void * p = malloc(8);
return p != 0 ? 0 : 1;
}

View File

@ -404,22 +404,16 @@ gcov_counter_add (gcov_type *counter, gcov_type value,
*counter += value;
}
/* Allocate gcov_kvp from heap. If we are recursively called, then allocate
it from a list of pre-allocated pool. */
/* Allocate gcov_kvp from statically pre-allocated pool,
or use heap otherwise. */
static inline struct gcov_kvp *
allocate_gcov_kvp (void)
{
struct gcov_kvp *new_node = NULL;
static
#if defined(HAVE_CC_TLS)
__thread
#endif
volatile unsigned in_recursion ATTRIBUTE_UNUSED = 0;
#if !defined(IN_GCOV_TOOL) && !defined(L_gcov_merge_topn)
if (__builtin_expect (in_recursion, 0))
if (__gcov_kvp_pool_index < GCOV_PREALLOCATED_KVP)
{
unsigned index;
#if GCOV_SUPPORTS_ATOMIC
@ -430,17 +424,11 @@ __thread
#endif
if (index < GCOV_PREALLOCATED_KVP)
new_node = &__gcov_kvp_pool[index];
else
/* Do not crash in the situation. */
return NULL;
}
else
#endif
{
in_recursion = 1;
new_node = (struct gcov_kvp *)xcalloc (1, sizeof (struct gcov_kvp));
in_recursion = 0;
}
if (new_node == NULL)
new_node = (struct gcov_kvp *)xcalloc (1, sizeof (struct gcov_kvp));
return new_node;
}