* include/atomic.h: Define catomic_and if not already defined.

* sysdeps/x86_64/bits/atomic.h: Define catomic_and.
	* sysdeps/i386/i486/bits/atomic.h: Likewise.
This commit is contained in:
Ulrich Drepper 2009-02-08 23:50:23 +00:00
parent 6dd6a580df
commit 6c03cd11e9
4 changed files with 64 additions and 27 deletions

View File

@ -1,3 +1,9 @@
2009-02-08 Ulrich Drepper <drepper@redhat.com>
* include/atomic.h: Define catomic_and if not already defined.
* sysdeps/x86_64/bits/atomic.h: Define catomic_and.
* sysdeps/i386/i486/bits/atomic.h: Likewise.
2009-02-07 Ulrich Drepper <drepper@redhat.com>
* malloc/malloc.c (_int_free): Second argument is now mchunkptr.

View File

@ -1,5 +1,5 @@
/* Internal macros for atomic operations for GNU C Library.
Copyright (C) 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
Copyright (C) 2002-2006, 2009 Free Software Foundation, Inc.
This file is part of the GNU C Library.
Contributed by Ulrich Drepper <drepper@redhat.com>, 2002.
@ -415,6 +415,22 @@
} while (0)
#endif
#ifndef catomic_and
# define catomic_and(mem, mask) \
do { \
__typeof (*(mem)) __atg20_old; \
__typeof (mem) __atg20_memp = (mem); \
__typeof (*(mem)) __atg20_mask = (mask); \
\
do \
__atg20_old = (*__atg20_memp); \
while (__builtin_expect \
(catomic_compare_and_exchange_bool_acq (__atg20_memp, \
__atg20_old & __atg20_mask,\
__atg20_old), 0)); \
} while (0)
#endif
/* Atomically *mem &= mask and return the old value of *mem. */
#ifndef atomic_and_val
# define atomic_and_val(mem, mask) \

View File

@ -1,4 +1,4 @@
/* Copyright (C) 2002, 2003, 2004, 2006, 2007 Free Software Foundation, Inc.
/* Copyright (C) 2002-2004, 2006, 2007, 2009 Free Software Foundation, Inc.
This file is part of the GNU C Library.
Contributed by Ulrich Drepper <drepper@redhat.com>, 2002.
@ -500,24 +500,34 @@ typedef uintmax_t uatomic_max_t;
#define atomic_delay() asm ("rep; nop")
#define atomic_and(mem, mask) \
#define __arch_and_body(lock, mem, mask) \
do { \
if (sizeof (*mem) == 1) \
__asm __volatile (LOCK_PREFIX "andb %b1, %0" \
__asm __volatile (lock "andb %b1, %0" \
: "=m" (*mem) \
: "iq" (mask), "m" (*mem)); \
: "iq" (mask), "m" (*mem), \
"i" (offsetof (tcbhead_t, multiple_threads))); \
else if (sizeof (*mem) == 2) \
__asm __volatile (LOCK_PREFIX "andw %w1, %0" \
__asm __volatile (lock "andw %w1, %0" \
: "=m" (*mem) \
: "ir" (mask), "m" (*mem)); \
: "ir" (mask), "m" (*mem), \
"i" (offsetof (tcbhead_t, multiple_threads))); \
else if (sizeof (*mem) == 4) \
__asm __volatile (LOCK_PREFIX "andl %1, %0" \
__asm __volatile (lock "andl %1, %0" \
: "=m" (*mem) \
: "ir" (mask), "m" (*mem)); \
: "ir" (mask), "m" (*mem), \
"i" (offsetof (tcbhead_t, multiple_threads))); \
else \
abort (); \
} while (0)
#define __arch_cprefix \
"cmpl $0, %%gs:%P3\n\tje 0f\n\tlock\n0:\t"
#define atomic_and(mem, mask) __arch_and_body (LOCK_PREFIX, mem, mask)
#define catomic_and(mem, mask) __arch_and_body (__arch_cprefix, mem, mask)
#define __arch_or_body(lock, mem, mask) \
do { \
@ -542,7 +552,4 @@ typedef uintmax_t uatomic_max_t;
#define atomic_or(mem, mask) __arch_or_body (LOCK_PREFIX, mem, mask)
#define __arch_or_cprefix \
"cmpl $0, %%gs:%P3\n\tje 0f\n\tlock\n0:\t"
#define catomic_or(mem, mask) __arch_or_body (__arch_or_cprefix, mem, mask)
#define catomic_or(mem, mask) __arch_or_body (__arch_cprefix, mem, mask)

View File

@ -1,4 +1,4 @@
/* Copyright (C) 2002, 2003, 2004, 2006, 2007 Free Software Foundation, Inc.
/* Copyright (C) 2002-2004, 2006, 2007, 2009 Free Software Foundation, Inc.
This file is part of the GNU C Library.
Contributed by Ulrich Drepper <drepper@redhat.com>, 2002.
@ -437,26 +437,37 @@ typedef uintmax_t uatomic_max_t;
#define atomic_delay() asm ("rep; nop")
#define atomic_and(mem, mask) \
#define __arch_and_body(lock, mem, mask) \
do { \
if (sizeof (*mem) == 1) \
__asm __volatile (LOCK_PREFIX "andb %b1, %0" \
__asm __volatile (lock "andb %b1, %0" \
: "=m" (*mem) \
: "iq" (mask), "m" (*mem)); \
: "iq" (mask), "m" (*mem), \
"i" (offsetof (tcbhead_t, multiple_threads))); \
else if (sizeof (*mem) == 2) \
__asm __volatile (LOCK_PREFIX "andw %w1, %0" \
__asm __volatile (lock "andw %w1, %0" \
: "=m" (*mem) \
: "ir" (mask), "m" (*mem)); \
: "ir" (mask), "m" (*mem), \
"i" (offsetof (tcbhead_t, multiple_threads))); \
else if (sizeof (*mem) == 4) \
__asm __volatile (LOCK_PREFIX "andl %1, %0" \
__asm __volatile (lock "andl %1, %0" \
: "=m" (*mem) \
: "ir" (mask), "m" (*mem)); \
: "ir" (mask), "m" (*mem), \
"i" (offsetof (tcbhead_t, multiple_threads))); \
else \
__asm __volatile (LOCK_PREFIX "andq %q1, %0" \
__asm __volatile (lock "andq %q1, %0" \
: "=m" (*mem) \
: "ir" (mask), "m" (*mem)); \
: "ir" (mask), "m" (*mem), \
"i" (offsetof (tcbhead_t, multiple_threads))); \
} while (0)
#define __arch_cprefix \
"cmpl $0, %%fs:%P3\n\tje 0f\n\tlock\n0:\t"
#define atomic_and(mem, mask) __arch_and_body (LOCK_PREFIX, mem, mask)
#define catomic_and(mem, mask) __arch_and_body (__arch_cprefix, mem, mask)
#define __arch_or_body(lock, mem, mask) \
do { \
@ -484,7 +495,4 @@ typedef uintmax_t uatomic_max_t;
#define atomic_or(mem, mask) __arch_or_body (LOCK_PREFIX, mem, mask)
#define __arch_or_cprefix \
"cmpl $0, %%fs:%P3\n\tje 0f\n\tlock\n0:\t"
#define catomic_or(mem, mask) __arch_or_body (__arch_or_cprefix, mem, mask)
#define catomic_or(mem, mask) __arch_or_body (__arch_cprefix, mem, mask)