Reformat malloc to gnu style.

This commit is contained in:
Ondřej Bílka 2014-01-02 09:38:18 +01:00
parent 9a3c6a6ff6
commit 6c8dbf00f5
18 changed files with 3843 additions and 3559 deletions

View File

@ -1,3 +1,37 @@
2013-01-02 Ondřej Bílka <neleai@seznam.cz>
* malloc/arena.c (malloc_atfork, free_atfork, ptmalloc_lock_all,
ptmalloc_unlock_all, ptmalloc_unlock_all2, next_env_entry,
__failing_morecore, ptmalloc_init, dump_heap, new_heap, grow_heap,
heap_trim, _int_new_arena, get_free_list, reused_arena, arena_get2):
Convert to GNU style.
* malloc/hooks.c (memalign_hook_ini, __malloc_check_init,
mem2mem_check, mem2chunk_check, top_check, realloc_check,
memalign_check, __malloc_set_state): Likewise.
* malloc/mallocbug.c (main): Likewise.
* malloc/malloc.c (__malloc_assert, malloc_init_state, free_perturb,
do_check_malloced_chunk, do_check_malloc_state, sysmalloc, systrim,
mremap_chunk, __libc_malloc, __libc_free, __libc_realloc, _mid_memalign,
_int_malloc, malloc_consolidate, _int_realloc, _int_memalign, mtrim,
musable, __libc_mallopt, __posix_memalign, malloc_info): Likewise.
* malloc/malloc.h: Likewise.
* malloc/mcheck.c (checkhdr, unlink_blk, link_blk, freehook, mallochook,
memalignhook, reallochook, mabort): Likewise.
* malloc/mcheck.h: Likewise.
* malloc/memusage.c (update_data, me, malloc, realloc, calloc, free, mmap,
mmap64, mremap, munmap, dest): Likewise.
* malloc/memusagestat.c (main, parse_opt, more_help): Likewise.
* malloc/morecore.c (__default_morecore): Likewise.
* malloc/mtrace.c (tr_break, lock_and_info, mtrace): Likewise.
* malloc/obstack.c (_obstack_begin, _obstack_newchunk,
_obstack_allocated_p, obstack_free, _obstack_memory_used,
print_and_abort): Likewise.
* malloc/obstack.h: Likewise.
* malloc/set-freeres.c (__libc_freeres): Likewise.
* malloc/tst-mallocstate.c (main): Likewise.
* malloc/tst-mtrace.c (main): Likewise.
* malloc/tst-realloc.c (do_test): Likewise.
2013-01-02 Siddhesh Poyarekar <siddhesh@redhat.com>
[BZ #16366]

View File

@ -53,7 +53,8 @@
malloc_chunks. It is allocated with mmap() and always starts at an
address aligned to HEAP_MAX_SIZE. */
typedef struct _heap_info {
typedef struct _heap_info
{
mstate ar_ptr; /* Arena for this heap. */
struct _heap_info *prev; /* Previous heap. */
size_t size; /* Current size in bytes. */
@ -156,17 +157,24 @@ malloc_atfork(size_t sz, const void *caller)
void *victim;
tsd_getspecific (arena_key, vptr);
if(vptr == ATFORK_ARENA_PTR) {
if (vptr == ATFORK_ARENA_PTR)
{
/* We are the only thread that may allocate at all. */
if(save_malloc_hook != malloc_check) {
if (save_malloc_hook != malloc_check)
{
return _int_malloc (&main_arena, sz);
} else {
}
else
{
if (top_check () < 0)
return 0;
victim = _int_malloc (&main_arena, sz + 1);
return mem2mem_check (victim, sz);
}
} else {
}
else
{
/* Suspend the thread until the `atfork' handlers have completed.
By that time, the hooks will have been reset as well, so that
mALLOc() can be used again. */
@ -216,6 +224,7 @@ ptmalloc_lock_all (void)
if (__malloc_initialized < 1)
return;
if (mutex_trylock (&list_lock))
{
void *my_arena;
@ -228,10 +237,12 @@ ptmalloc_lock_all (void)
/* This thread has to wait its turn. */
(void) mutex_lock (&list_lock);
}
for(ar_ptr = &main_arena;;) {
for (ar_ptr = &main_arena;; )
{
(void) mutex_lock (&ar_ptr->mutex);
ar_ptr = ar_ptr->next;
if(ar_ptr == &main_arena) break;
if (ar_ptr == &main_arena)
break;
}
save_malloc_hook = __malloc_hook;
save_free_hook = __free_hook;
@ -251,15 +262,19 @@ ptmalloc_unlock_all (void)
if (__malloc_initialized < 1)
return;
if (--atfork_recursive_cntr != 0)
return;
tsd_setspecific (arena_key, save_arena);
__malloc_hook = save_malloc_hook;
__free_hook = save_free_hook;
for(ar_ptr = &main_arena;;) {
for (ar_ptr = &main_arena;; )
{
(void) mutex_unlock (&ar_ptr->mutex);
ar_ptr = ar_ptr->next;
if(ar_ptr == &main_arena) break;
if (ar_ptr == &main_arena)
break;
}
(void) mutex_unlock (&list_lock);
}
@ -278,18 +293,22 @@ ptmalloc_unlock_all2 (void)
if (__malloc_initialized < 1)
return;
tsd_setspecific (arena_key, save_arena);
__malloc_hook = save_malloc_hook;
__free_hook = save_free_hook;
free_list = NULL;
for(ar_ptr = &main_arena;;) {
for (ar_ptr = &main_arena;; )
{
mutex_init (&ar_ptr->mutex);
if (ar_ptr != save_arena) {
if (ar_ptr != save_arena)
{
ar_ptr->next_free = free_list;
free_list = ar_ptr;
}
ar_ptr = ar_ptr->next;
if(ar_ptr == &main_arena) break;
if (ar_ptr == &main_arena)
break;
}
mutex_init (&list_lock);
atfork_recursive_cntr = 0;
@ -298,9 +317,7 @@ ptmalloc_unlock_all2 (void)
# else
# define ptmalloc_unlock_all2 ptmalloc_unlock_all
# endif
#endif /* !NO_THREADS */
/* Initialization routine. */
@ -353,7 +370,9 @@ libc_hidden_proto (_dl_open_hook);
static void
ptmalloc_init (void)
{
if(__malloc_initialized >= 0) return;
if (__malloc_initialized >= 0)
return;
__malloc_initialized = 0;
#ifdef SHARED
@ -433,7 +452,8 @@ ptmalloc_init (void)
}
}
}
if(s && s[0]) {
if (s && s[0])
{
__libc_mallopt (M_CHECK_ACTION, (int) (s[0] - '0'));
if (check_action != 0)
__malloc_check_init ();
@ -450,7 +470,7 @@ thread_atfork_static(ptmalloc_lock_all, ptmalloc_unlock_all, \
ptmalloc_unlock_all2)
#endif
/* Managing heaps and arenas (for concurrent threads) */
@ -469,12 +489,16 @@ dump_heap(heap_info *heap)
(char *) (heap + 1) : (char *) (heap + 1) + sizeof (struct malloc_state);
p = (mchunkptr) (((unsigned long) ptr + MALLOC_ALIGN_MASK) &
~MALLOC_ALIGN_MASK);
for(;;) {
for (;; )
{
fprintf (stderr, "chunk %p size %10lx", p, (long) p->size);
if(p == top(heap->ar_ptr)) {
if (p == top (heap->ar_ptr))
{
fprintf (stderr, " (top)\n");
break;
} else if(p->size == (0|PREV_INUSE)) {
}
else if (p->size == (0 | PREV_INUSE))
{
fprintf (stderr, " (fence)\n");
break;
}
@ -482,7 +506,6 @@ dump_heap(heap_info *heap)
p = next_chunk (p);
}
}
#endif /* MALLOC_DEBUG > 1 */
/* If consecutive mmap (0, HEAP_MAX_SIZE << 1, ...) calls return decreasing
@ -522,18 +545,22 @@ new_heap(size_t size, size_t top_pad)
mapping (on Linux, this is the case for all non-writable mappings
anyway). */
p2 = MAP_FAILED;
if(aligned_heap_area) {
if (aligned_heap_area)
{
p2 = (char *) MMAP (aligned_heap_area, HEAP_MAX_SIZE, PROT_NONE,
MAP_NORESERVE);
aligned_heap_area = NULL;
if (p2 != MAP_FAILED && ((unsigned long)p2 & (HEAP_MAX_SIZE-1))) {
if (p2 != MAP_FAILED && ((unsigned long) p2 & (HEAP_MAX_SIZE - 1)))
{
__munmap (p2, HEAP_MAX_SIZE);
p2 = MAP_FAILED;
}
}
if(p2 == MAP_FAILED) {
if (p2 == MAP_FAILED)
{
p1 = (char *) MMAP (0, HEAP_MAX_SIZE << 1, PROT_NONE, MAP_NORESERVE);
if(p1 != MAP_FAILED) {
if (p1 != MAP_FAILED)
{
p2 = (char *) (((unsigned long) p1 + (HEAP_MAX_SIZE - 1))
& ~(HEAP_MAX_SIZE - 1));
ul = p2 - p1;
@ -542,19 +569,24 @@ new_heap(size_t size, size_t top_pad)
else
aligned_heap_area = p2 + HEAP_MAX_SIZE;
__munmap (p2 + HEAP_MAX_SIZE, HEAP_MAX_SIZE - ul);
} else {
}
else
{
/* Try to take the chance that an allocation of only HEAP_MAX_SIZE
is already aligned. */
p2 = (char *) MMAP (0, HEAP_MAX_SIZE, PROT_NONE, MAP_NORESERVE);
if (p2 == MAP_FAILED)
return 0;
if((unsigned long)p2 & (HEAP_MAX_SIZE-1)) {
if ((unsigned long) p2 & (HEAP_MAX_SIZE - 1))
{
__munmap (p2, HEAP_MAX_SIZE);
return 0;
}
}
}
if(__mprotect(p2, size, PROT_READ|PROT_WRITE) != 0) {
if (__mprotect (p2, size, PROT_READ | PROT_WRITE) != 0)
{
__munmap (p2, HEAP_MAX_SIZE);
return 0;
}
@ -579,11 +611,14 @@ grow_heap(heap_info *h, long diff)
new_size = (long) h->size + diff;
if ((unsigned long) new_size > (unsigned long) HEAP_MAX_SIZE)
return -1;
if((unsigned long) new_size > h->mprotect_size) {
if ((unsigned long) new_size > h->mprotect_size)
{
if (__mprotect ((char *) h + h->mprotect_size,
(unsigned long) new_size - h->mprotect_size,
PROT_READ | PROT_WRITE) != 0)
return -2;
h->mprotect_size = new_size;
}
@ -602,6 +637,7 @@ shrink_heap(heap_info *h, long diff)
new_size = (long) h->size - diff;
if (new_size < (long) sizeof (*h))
return -1;
/* Try to re-map the extra heap space freshly to save memory, and make it
inaccessible. See malloc-sysdep.h to know when this is true. */
if (__builtin_expect (check_may_shrink_heap (), 0))
@ -609,6 +645,7 @@ shrink_heap(heap_info *h, long diff)
if ((char *) MMAP ((char *) h + new_size, diff, PROT_NONE,
MAP_FIXED) == (char *) MAP_FAILED)
return -2;
h->mprotect_size = new_size;
}
else
@ -640,7 +677,8 @@ heap_trim(heap_info *heap, size_t pad)
long new_size, top_size, extra, prev_size, misalign;
/* Can this heap go away completely? */
while(top_chunk == chunk_at_offset(heap, sizeof(*heap))) {
while (top_chunk == chunk_at_offset (heap, sizeof (*heap)))
{
prev_heap = heap->prev;
prev_size = prev_heap->size - (MINSIZE - 2 * SIZE_SZ);
p = chunk_at_offset (prev_heap, prev_size);
@ -661,7 +699,8 @@ heap_trim(heap_info *heap, size_t pad)
LIBC_PROBE (memory_heap_free, 2, heap, heap->size);
delete_heap (heap);
heap = prev_heap;
if(!prev_inuse(p)) { /* consolidate backward */
if (!prev_inuse (p)) /* consolidate backward */
{
p = prev_chunk (p);
unlink (p, bck, fwd);
}
@ -675,9 +714,11 @@ heap_trim(heap_info *heap, size_t pad)
extra = (top_size - pad - MINSIZE - 1) & ~(pagesz - 1);
if (extra < (long) pagesz)
return 0;
/* Try to shrink. */
if (shrink_heap (heap, extra) != 0)
return 0;
ar_ptr->system_mem -= extra;
arena_mem -= extra;
@ -699,7 +740,8 @@ _int_new_arena(size_t size)
h = new_heap (size + (sizeof (*h) + sizeof (*a) + MALLOC_ALIGNMENT),
mp_.top_pad);
if(!h) {
if (!h)
{
/* Maybe size is too large to fit in a single heap. So, just try
to create a minimally-sized arena and let _int_malloc() attempt
to deal with the large request via mmap_chunk(). */
@ -863,11 +905,14 @@ static mstate
arena_get_retry (mstate ar_ptr, size_t bytes)
{
LIBC_PROBE (memory_arena_retry, 2, bytes, ar_ptr);
if(ar_ptr != &main_arena) {
if (ar_ptr != &main_arena)
{
(void) mutex_unlock (&ar_ptr->mutex);
ar_ptr = &main_arena;
(void) mutex_lock (&ar_ptr->mutex);
} else {
}
else
{
/* Grab ar_ptr->next prior to releasing its lock. */
mstate prev = ar_ptr->next ? ar_ptr : 0;
(void) mutex_unlock (&ar_ptr->mutex);

View File

@ -71,7 +71,8 @@ static int disallow_malloc_check;
void
__malloc_check_init (void)
{
if (disallow_malloc_check) {
if (disallow_malloc_check)
{
disallow_malloc_check = 0;
return;
}
@ -106,8 +107,10 @@ malloc_check_get_size(mchunkptr p)
for (size = chunksize (p) - 1 + (chunk_is_mmapped (p) ? 0 : SIZE_SZ);
(c = ((unsigned char *) p)[size]) != magic;
size -= c) {
if(c<=0 || size<(c+2*SIZE_SZ)) {
size -= c)
{
if (c <= 0 || size < (c + 2 * SIZE_SZ))
{
malloc_printerr (check_action, "malloc_check_get_size: memory corruption",
chunk2mem (p));
return 0;
@ -131,11 +134,14 @@ mem2mem_check(void *ptr, size_t sz)
if (!ptr)
return ptr;
p = mem2chunk (ptr);
for (i = chunksize (p) - (chunk_is_mmapped (p) ? 2 * SIZE_SZ + 1 : SIZE_SZ + 1);
i > sz;
i -= 0xFF) {
if(i-sz < 0x100) {
i -= 0xFF)
{
if (i - sz < 0x100)
{
m_ptr[i] = (unsigned char) (i - sz);
break;
}
@ -156,9 +162,12 @@ mem2chunk_check(void* mem, unsigned char **magic_p)
INTERNAL_SIZE_T sz, c;
unsigned char magic;
if(!aligned_OK(mem)) return NULL;
if (!aligned_OK (mem))
return NULL;
p = mem2chunk (mem);
if (!chunk_is_mmapped(p)) {
if (!chunk_is_mmapped (p))
{
/* Must be a chunk in conventional heap memory. */
int contig = contiguous (&main_arena);
sz = chunksize (p);
@ -170,11 +179,16 @@ mem2chunk_check(void* mem, unsigned char **magic_p)
(contig && (char *) prev_chunk (p) < mp_.sbrk_base) ||
next_chunk (prev_chunk (p)) != p)))
return NULL;
magic = MAGICBYTE (p);
for(sz += SIZE_SZ-1; (c = ((unsigned char*)p)[sz]) != magic; sz -= c) {
if(c<=0 || sz<(c+2*SIZE_SZ)) return NULL;
for (sz += SIZE_SZ - 1; (c = ((unsigned char *) p)[sz]) != magic; sz -= c)
{
if (c <= 0 || sz < (c + 2 * SIZE_SZ))
return NULL;
}
} else {
}
else
{
unsigned long offset, page_mask = GLRO (dl_pagesize) - 1;
/* mmap()ed chunks have MALLOC_ALIGNMENT or higher power-of-two
@ -189,9 +203,12 @@ mem2chunk_check(void* mem, unsigned char **magic_p)
((((unsigned long) p - p->prev_size) & page_mask) != 0) ||
((sz = chunksize (p)), ((p->prev_size + sz) & page_mask) != 0))
return NULL;
magic = MAGICBYTE (p);
for(sz -= 1; (c = ((unsigned char*)p)[sz]) != magic; sz -= c) {
if(c<=0 || sz<(c+2*SIZE_SZ)) return NULL;
for (sz -= 1; (c = ((unsigned char *) p)[sz]) != magic; sz -= c)
{
if (c <= 0 || sz < (c + 2 * SIZE_SZ))
return NULL;
}
}
((unsigned char *) p)[sz] ^= 0xFF;
@ -252,7 +269,8 @@ malloc_check(size_t sz, const void *caller)
{
void *victim;
if (sz+1 == 0) {
if (sz + 1 == 0)
{
__set_errno (ENOMEM);
return NULL;
}
@ -268,16 +286,20 @@ free_check(void* mem, const void *caller)
{
mchunkptr p;
if(!mem) return;
if (!mem)
return;
(void) mutex_lock (&main_arena.mutex);
p = mem2chunk_check (mem, NULL);
if(!p) {
if (!p)
{
(void) mutex_unlock (&main_arena.mutex);
malloc_printerr (check_action, "free(): invalid pointer", mem);
return;
}
if (chunk_is_mmapped(p)) {
if (chunk_is_mmapped (p))
{
(void) mutex_unlock (&main_arena.mutex);
munmap_chunk (p);
return;
@ -293,19 +315,24 @@ realloc_check(void* oldmem, size_t bytes, const void *caller)
void *newmem = 0;
unsigned char *magic_p;
if (bytes+1 == 0) {
if (bytes + 1 == 0)
{
__set_errno (ENOMEM);
return NULL;
}
if (oldmem == 0) return malloc_check(bytes, NULL);
if (bytes == 0) {
if (oldmem == 0)
return malloc_check (bytes, NULL);
if (bytes == 0)
{
free_check (oldmem, NULL);
return NULL;
}
(void) mutex_lock (&main_arena.mutex);
const mchunkptr oldp = mem2chunk_check (oldmem, &magic_p);
(void) mutex_unlock (&main_arena.mutex);
if(!oldp) {
if (!oldp)
{
malloc_printerr (check_action, "realloc(): invalid pointer", oldmem);
return malloc_check (bytes, NULL);
}
@ -314,7 +341,8 @@ realloc_check(void* oldmem, size_t bytes, const void *caller)
checked_request2size (bytes + 1, nb);
(void) mutex_lock (&main_arena.mutex);
if (chunk_is_mmapped(oldp)) {
if (chunk_is_mmapped (oldp))
{
#if HAVE_MREMAP
mchunkptr newp = mremap_chunk (oldp, nb);
if (newp)
@ -325,18 +353,23 @@ realloc_check(void* oldmem, size_t bytes, const void *caller)
/* Note the extra SIZE_SZ overhead. */
if (oldsize - SIZE_SZ >= nb)
newmem = oldmem; /* do nothing */
else {
else
{
/* Must alloc, copy, free. */
if (top_check () >= 0)
newmem = _int_malloc (&main_arena, bytes + 1);
if (newmem) {
if (newmem)
{
memcpy (newmem, oldmem, oldsize - 2 * SIZE_SZ);
munmap_chunk (oldp);
}
}
}
} else {
if (top_check() >= 0) {
}
else
{
if (top_check () >= 0)
{
INTERNAL_SIZE_T nb;
checked_request2size (bytes + 1, nb);
newmem = _int_realloc (&main_arena, oldp, oldsize, nb);
@ -346,7 +379,8 @@ realloc_check(void* oldmem, size_t bytes, const void *caller)
/* mem2chunk_check changed the magic byte in the old chunk.
If newmem is NULL, then the old chunk will still be used though,
so we need to invert that change here. */
if (newmem == NULL) *magic_p ^= 0xFF;
if (newmem == NULL)
*magic_p ^= 0xFF;
(void) mutex_unlock (&main_arena.mutex);
@ -358,8 +392,11 @@ memalign_check(size_t alignment, size_t bytes, const void *caller)
{
void *mem;
if (alignment <= MALLOC_ALIGNMENT) return malloc_check(bytes, NULL);
if (alignment < MINSIZE) alignment = MINSIZE;
if (alignment <= MALLOC_ALIGNMENT)
return malloc_check (bytes, NULL);
if (alignment < MINSIZE)
alignment = MINSIZE;
/* If the alignment is greater than SIZE_MAX / 2 + 1 it cannot be a
power of 2 and will cause overflow in the check below. */
@ -377,9 +414,11 @@ memalign_check(size_t alignment, size_t bytes, const void *caller)
}
/* Make sure alignment is power of 2. */
if (!powerof2(alignment)) {
if (!powerof2 (alignment))
{
size_t a = MALLOC_ALIGNMENT * 2;
while (a < alignment) a <<= 1;
while (a < alignment)
a <<= 1;
alignment = a;
}
@ -410,7 +449,8 @@ memalign_check(size_t alignment, size_t bytes, const void *caller)
#define MALLOC_STATE_MAGIC 0x444c4541l
#define MALLOC_STATE_VERSION (0 * 0x100l + 4l) /* major*0x100 + minor */
struct malloc_save_state {
struct malloc_save_state
{
long magic;
long version;
mbinptr av[NBINS * 2 + 2];
@ -444,6 +484,7 @@ __malloc_get_state(void)
ms = (struct malloc_save_state *) __libc_malloc (sizeof (*ms));
if (!ms)
return 0;
(void) mutex_lock (&main_arena.mutex);
malloc_consolidate (&main_arena);
ms->magic = MALLOC_STATE_MAGIC;
@ -452,11 +493,13 @@ __malloc_get_state(void)
ms->av[1] = 0; /* used to be binblocks, now no longer used */
ms->av[2] = top (&main_arena);
ms->av[3] = 0; /* used to be undefined */
for(i=1; i<NBINS; i++) {
for (i = 1; i < NBINS; i++)
{
b = bin_at (&main_arena, i);
if (first (b) == b)
ms->av[2 * i + 2] = ms->av[2 * i + 3] = 0; /* empty bin */
else {
else
{
ms->av[2 * i + 2] = first (b);
ms->av[2 * i + 3] = last (b);
}
@ -492,9 +535,13 @@ __malloc_set_state(void* msptr)
disallow_malloc_check = 1;
ptmalloc_init ();
if(ms->magic != MALLOC_STATE_MAGIC) return -1;
if (ms->magic != MALLOC_STATE_MAGIC)
return -1;
/* Must fail if the major version is too high. */
if((ms->version & ~0xffl) > (MALLOC_STATE_VERSION & ~0xffl)) return -2;
if ((ms->version & ~0xffl) > (MALLOC_STATE_VERSION & ~0xffl))
return -2;
(void) mutex_lock (&main_arena.mutex);
/* There are no fastchunks. */
clear_fastchunks (&main_arena);
@ -508,15 +555,20 @@ __malloc_set_state(void* msptr)
main_arena.binmap[i] = 0;
top (&main_arena) = ms->av[2];
main_arena.last_remainder = 0;
for(i=1; i<NBINS; i++) {
for (i = 1; i < NBINS; i++)
{
b = bin_at (&main_arena, i);
if(ms->av[2*i+2] == 0) {
if (ms->av[2 * i + 2] == 0)
{
assert (ms->av[2 * i + 3] == 0);
first (b) = last (b) = b;
} else {
}
else
{
if (ms->version >= 3 &&
(i < NSMALLBINS || (largebin_index (chunksize (ms->av[2 * i + 2])) == i &&
largebin_index(chunksize(ms->av[2*i+3]))==i))) {
largebin_index (chunksize (ms->av[2 * i + 3])) == i)))
{
first (b) = ms->av[2 * i + 2];
last (b) = ms->av[2 * i + 3];
/* Make sure the links to the bins within the heap are correct. */
@ -524,7 +576,9 @@ __malloc_set_state(void* msptr)
last (b)->fd = b;
/* Set bit in binblocks. */
mark_bin (&main_arena, i);
} else {
}
else
{
/* Oops, index computation from chunksize must have changed.
Link the whole list into unsorted_chunks. */
first (b) = last (b) = b;
@ -536,11 +590,14 @@ __malloc_set_state(void* msptr)
}
}
}
if (ms->version < 3) {
if (ms->version < 3)
{
/* Clear fd_nextsize and bk_nextsize fields. */
b = unsorted_chunks (&main_arena)->fd;
while (b != unsorted_chunks(&main_arena)) {
if (!in_smallbin_range(chunksize(b))) {
while (b != unsorted_chunks (&main_arena))
{
if (!in_smallbin_range (chunksize (b)))
{
b->fd_nextsize = NULL;
b->bk_nextsize = NULL;
}
@ -560,13 +617,15 @@ __malloc_set_state(void* msptr)
mp_.mmapped_mem = ms->mmapped_mem;
mp_.max_mmapped_mem = ms->max_mmapped_mem;
/* add version-dependent code here */
if (ms->version >= 1) {
if (ms->version >= 1)
{
/* Check whether it is safe to enable malloc checking, or whether
it is necessary to disable it. */
if (ms->using_malloc_checking && !using_malloc_checking &&
!disallow_malloc_check)
__malloc_check_init ();
else if (!ms->using_malloc_checking && using_malloc_checking) {
else if (!ms->using_malloc_checking && using_malloc_checking)
{
__malloc_hook = NULL;
__free_hook = NULL;
__realloc_hook = NULL;
@ -574,7 +633,8 @@ __malloc_set_state(void* msptr)
using_malloc_checking = 0;
}
}
if (ms->version >= 4) {
if (ms->version >= 4)
{
mp_.arena_test = ms->arena_test;
mp_.arena_max = ms->arena_max;
narenas = ms->narenas;

File diff suppressed because it is too large Load Diff

View File

@ -168,5 +168,4 @@ extern void __malloc_check_init (void) __THROW __MALLOC_DEPRECATED;
__END_DECLS
#endif /* malloc.h */

View File

@ -69,8 +69,7 @@ static int pedantic;
# define flood memset
#else
static void flood (__ptr_t, int, size_t);
static void
flood (ptr, val, size)
static void flood (ptr, val, size)
__ptr_t ptr;
int val;
size_t size;
@ -372,8 +371,7 @@ mabort (enum mcheck_status status)
#define malloc_opt_barrier(x) \
({ __typeof (x) __x = x; __asm ("" : "+m" (__x)); __x; })
int
mcheck (func)
int mcheck (func)
void (*func)(enum mcheck_status);
{
abortfunc = (func != NULL) ? func : &mabort;
@ -404,8 +402,7 @@ mcheck (func)
libc_hidden_def (mcheck)
#endif
int
mcheck_pedantic (func)
int mcheck_pedantic (func)
void (*func)(enum mcheck_status);
{
int res = mcheck (func);

View File

@ -57,5 +57,4 @@ extern void mtrace (void) __THROW;
extern void muntrace (void) __THROW;
__END_DECLS
#endif /* mcheck.h */

View File

@ -334,6 +334,7 @@ malloc (size_t len)
{
if (initialized == -1)
return NULL;
me ();
}
@ -385,6 +386,7 @@ realloc (void *old, size_t len)
{
if (initialized == -1)
return NULL;
me ();
}
@ -404,6 +406,7 @@ realloc (void *old, size_t len)
if (real->magic != MAGIC)
/* This is no memory allocated here. */
return (*reallocp)(old, len);
old_len = real->length;
}
@ -477,6 +480,7 @@ calloc (size_t n, size_t len)
{
if (initialized == -1)
return NULL;
me ();
}
@ -526,6 +530,7 @@ free (void *ptr)
{
if (initialized == -1)
return;
me ();
}
@ -577,6 +582,7 @@ mmap (void *start, size_t len, int prot, int flags, int fd, off_t offset)
{
if (initialized == -1)
return NULL;
me ();
}
@ -629,6 +635,7 @@ mmap64 (void *start, size_t len, int prot, int flags, int fd, off64_t offset)
{
if (initialized == -1)
return NULL;
me ();
}
@ -686,6 +693,7 @@ mremap (void *start, size_t old_len, size_t len, int flags, ...)
{
if (initialized == -1)
return NULL;
me ();
}
@ -746,6 +754,7 @@ munmap (void *start, size_t len)
{
if (initialized == -1)
return -1;
me ();
}
@ -785,6 +794,7 @@ dest (void)
/* If we haven't done anything here just return. */
if (not_me)
return;
/* If we should call any of the memory functions don't do any profiling. */
not_me = true;

View File

@ -564,7 +564,9 @@ more_help (int key, const char *text, void *input)
For bug reporting instructions, please see:\n\
%s.\n"), REPORT_BUGS_TO) < 0)
return NULL;
return tp;
default:
break;
}

View File

@ -47,6 +47,7 @@ __default_morecore (ptrdiff_t increment)
void *result = (void *) __sbrk (increment);
if (result == (void *) -1)
return NULL;
return result;
}
libc_hidden_def (__default_morecore)

View File

@ -79,8 +79,7 @@ libc_hidden_def (tr_break)
static void tr_where (const __ptr_t, Dl_info *) __THROW internal_function;
static void
internal_function
tr_where (caller, info)
internal_function tr_where (caller, info)
const __ptr_t caller;
Dl_info *info;
{
@ -131,8 +130,7 @@ lock_and_info (const __ptr_t caller, Dl_info *mem)
static void tr_freehook (__ptr_t, const __ptr_t) __THROW;
static void
tr_freehook (ptr, caller)
static void tr_freehook (ptr, caller)
__ptr_t ptr;
const __ptr_t caller;
{
@ -160,8 +158,7 @@ tr_freehook (ptr, caller)
}
static __ptr_t tr_mallochook (size_t, const __ptr_t) __THROW;
static __ptr_t
tr_mallochook (size, caller)
static __ptr_t tr_mallochook (size, caller)
size_t size;
const __ptr_t caller;
{
@ -191,8 +188,7 @@ tr_mallochook (size, caller)
static __ptr_t tr_reallochook (__ptr_t, size_t, const __ptr_t)
__THROW;
static __ptr_t
tr_reallochook (ptr, size, caller)
static __ptr_t tr_reallochook (ptr, size, caller)
__ptr_t ptr;
size_t size;
const __ptr_t caller;
@ -244,8 +240,7 @@ tr_reallochook (ptr, size, caller)
static __ptr_t tr_memalignhook (size_t, size_t,
const __ptr_t) __THROW;
static __ptr_t
tr_memalignhook (alignment, size, caller)
static __ptr_t tr_memalignhook (alignment, size, caller)
size_t alignment, size;
const __ptr_t caller;
{

View File

@ -139,7 +139,7 @@ compat_symbol (libc, _obstack_compat, _obstack, GLIBC_2_0);
(*(void (*)(void *))(h)->freefun)((old_chunk)); \
} while (0)
/* Initialize an obstack H for use. Specify chunk size SIZE (0 means default).
Objects start on multiples of ALIGNMENT (0 means use default).
CHUNKFUN is the function to use to allocate chunks,
@ -343,7 +343,7 @@ _obstack_allocated_p (struct obstack *h, void *obj)
}
return lp != 0;
}
/* Free objects in obstack H, including OBJ and everything allocate
more recently than OBJ. If OBJ is zero, free everything in H. */
@ -384,7 +384,7 @@ obstack_free (struct obstack *h, void *obj)
called by non-GCC compilers. */
strong_alias (obstack_free, _obstack_free)
# endif
int
_obstack_memory_used (struct obstack *h)
{
@ -397,7 +397,7 @@ _obstack_memory_used (struct obstack *h)
}
return nbytes;
}
/* Define the error handler. */
# ifdef _LIBC
# include <libintl.h>
@ -435,5 +435,4 @@ print_and_abort (void)
# endif
exit (obstack_exit_failure);
}
#endif /* !ELIDE_CODE */

View File

@ -107,7 +107,7 @@ Summary:
#ifdef __cplusplus
extern "C" {
#endif
/* We need the type of a pointer subtraction. If __PTRDIFF_TYPE__ is
defined, as with GNU C, use that; that way we don't pollute the
namespace with <stddef.h>'s symbols. Otherwise, include <stddef.h>
@ -187,7 +187,7 @@ extern int _obstack_memory_used (struct obstack *);
void obstack_free (struct obstack *__obstack, void *__glibc_block);
/* Error handler called when `obstack_chunk_alloc' failed to allocate
more memory. This can be set to a user defined function which
should either abort gracefully or use longjump - but shouldn't
@ -196,7 +196,7 @@ extern void (*obstack_alloc_failed_handler) (void);
/* Exit value used when `print_and_abort' is used. */
extern int obstack_exit_failure;
/* Pointer to beginning of object being allocated or to be allocated next.
Note that this might not be the final address of the object
because a new chunk might be needed to hold the final size. */
@ -247,7 +247,7 @@ extern int obstack_exit_failure;
#define obstack_blank_fast(h, n) ((h)->next_free += (n))
#define obstack_memory_used(h) _obstack_memory_used (h)
#if defined __GNUC__
/* NextStep 2.0 cc is really gcc 1.93 but it defines __GNUC__ = 2 and
does not implement __extension__. But that compiler doesn't define
@ -399,7 +399,7 @@ __extension__ \
if (__obj > (void *) __o->chunk && __obj < (void *) __o->chunk_limit) \
__o->next_free = __o->object_base = (char *) __obj; \
else (obstack_free) (__o, __obj); })
#else /* not __GNUC__ */
# define obstack_object_size(h) \
@ -497,11 +497,9 @@ __extension__ \
? (((h)->next_free = (h)->object_base \
= (h)->temp.tempint + (char *) (h)->chunk), 0) \
: ((obstack_free) ((h), (h)->temp.tempint + (char *) (h)->chunk), 0)))
#endif /* not __GNUC__ */
#ifdef __cplusplus
} /* C++ */
#endif
#endif /* obstack.h */