[PATCH] ARM SMP: Use exclusive load/store for __xchg

Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
This commit is contained in:
Russell King 2005-07-26 19:39:31 +01:00 committed by Russell King
parent 6b6a93c687
commit 9560782f9a
1 changed files with 59 additions and 30 deletions

View File

@ -323,12 +323,8 @@ do { \
* NOTE that this solution won't work on an SMP system, so explcitly
* forbid it here.
*/
#ifdef CONFIG_SMP
#error SMP is not supported on SA1100/SA110
#else
#define swp_is_buggy
#endif
#endif
static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
{
@ -337,35 +333,68 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size
#ifdef swp_is_buggy
unsigned long flags;
#endif
#if __LINUX_ARM_ARCH__ >= 6
unsigned int tmp;
#endif
switch (size) {
#ifdef swp_is_buggy
case 1:
local_irq_save(flags);
ret = *(volatile unsigned char *)ptr;
*(volatile unsigned char *)ptr = x;
local_irq_restore(flags);
break;
case 4:
local_irq_save(flags);
ret = *(volatile unsigned long *)ptr;
*(volatile unsigned long *)ptr = x;
local_irq_restore(flags);
break;
#else
case 1: __asm__ __volatile__ ("swpb %0, %1, [%2]"
: "=&r" (ret)
: "r" (x), "r" (ptr)
: "memory", "cc");
break;
case 4: __asm__ __volatile__ ("swp %0, %1, [%2]"
: "=&r" (ret)
: "r" (x), "r" (ptr)
: "memory", "cc");
break;
#if __LINUX_ARM_ARCH__ >= 6
case 1:
asm volatile("@ __xchg1\n"
"1: ldrexb %0, [%3]\n"
" strexb %1, %2, [%3]\n"
" teq %1, #0\n"
" bne 1b"
: "=&r" (ret), "=&r" (tmp)
: "r" (x), "r" (ptr)
: "memory", "cc");
break;
case 4:
asm volatile("@ __xchg4\n"
"1: ldrex %0, [%3]\n"
" strex %1, %2, [%3]\n"
" teq %1, #0\n"
" bne 1b"
: "=&r" (ret), "=&r" (tmp)
: "r" (x), "r" (ptr)
: "memory", "cc");
break;
#elif defined(swp_is_buggy)
#ifdef CONFIG_SMP
#error SMP is not supported on this platform
#endif
default: __bad_xchg(ptr, size), ret = 0;
case 1:
local_irq_save(flags);
ret = *(volatile unsigned char *)ptr;
*(volatile unsigned char *)ptr = x;
local_irq_restore(flags);
break;
case 4:
local_irq_save(flags);
ret = *(volatile unsigned long *)ptr;
*(volatile unsigned long *)ptr = x;
local_irq_restore(flags);
break;
#else
case 1:
asm volatile("@ __xchg1\n"
" swpb %0, %1, [%2]"
: "=&r" (ret)
: "r" (x), "r" (ptr)
: "memory", "cc");
break;
case 4:
asm volatile("@ __xchg4\n"
" swp %0, %1, [%2]"
: "=&r" (ret)
: "r" (x), "r" (ptr)
: "memory", "cc");
break;
#endif
default:
__bad_xchg(ptr, size), ret = 0;
break;
}
return ret;