ARM: v6k: select cmpxchg code sequences according to V6 variants

If CONFIG_CPU_V6 is enabled, we must avoid the byte/halfword/doubleword
exclusive operations, which aren't implemented before V6K.  Use the
generic versions (or omit them) instead.

If CONFIG_CPU_V6 is not set, but CONFIG_CPU_32v6K is enabled, we have
the K extnesions, so use these new instructions.

Acked-by: Tony Lindgren <tony@atomide.com>
Tested-by: Sourav Poddar <sourav.poddar@ti.com>
Tested-by: Will Deacon <will.deacon@arm.com>
Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
This commit is contained in:
Russell King 2011-01-17 15:42:42 +00:00
parent 7db44c75a2
commit 4ed67a5359
1 changed files with 9 additions and 8 deletions

View File

@ -347,6 +347,7 @@ void cpu_idle_wait(void);
#include <asm-generic/cmpxchg-local.h> #include <asm-generic/cmpxchg-local.h>
#if __LINUX_ARM_ARCH__ < 6 #if __LINUX_ARM_ARCH__ < 6
/* min ARCH < ARMv6 */
#ifdef CONFIG_SMP #ifdef CONFIG_SMP
#error "SMP is not supported on this platform" #error "SMP is not supported on this platform"
@ -365,7 +366,7 @@ void cpu_idle_wait(void);
#include <asm-generic/cmpxchg.h> #include <asm-generic/cmpxchg.h>
#endif #endif
#else /* __LINUX_ARM_ARCH__ >= 6 */ #else /* min ARCH >= ARMv6 */
extern void __bad_cmpxchg(volatile void *ptr, int size); extern void __bad_cmpxchg(volatile void *ptr, int size);
@ -379,7 +380,7 @@ static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
unsigned long oldval, res; unsigned long oldval, res;
switch (size) { switch (size) {
#ifdef CONFIG_CPU_32v6K #ifndef CONFIG_CPU_V6 /* min ARCH >= ARMv6K */
case 1: case 1:
do { do {
asm volatile("@ __cmpxchg1\n" asm volatile("@ __cmpxchg1\n"
@ -404,7 +405,7 @@ static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
: "memory", "cc"); : "memory", "cc");
} while (res); } while (res);
break; break;
#endif /* CONFIG_CPU_32v6K */ #endif
case 4: case 4:
do { do {
asm volatile("@ __cmpxchg4\n" asm volatile("@ __cmpxchg4\n"
@ -450,12 +451,12 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr,
unsigned long ret; unsigned long ret;
switch (size) { switch (size) {
#ifndef CONFIG_CPU_32v6K #ifdef CONFIG_CPU_V6 /* min ARCH == ARMv6 */
case 1: case 1:
case 2: case 2:
ret = __cmpxchg_local_generic(ptr, old, new, size); ret = __cmpxchg_local_generic(ptr, old, new, size);
break; break;
#endif /* !CONFIG_CPU_32v6K */ #endif
default: default:
ret = __cmpxchg(ptr, old, new, size); ret = __cmpxchg(ptr, old, new, size);
} }
@ -469,7 +470,7 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr,
(unsigned long)(n), \ (unsigned long)(n), \
sizeof(*(ptr)))) sizeof(*(ptr))))
#ifdef CONFIG_CPU_32v6K #ifndef CONFIG_CPU_V6 /* min ARCH >= ARMv6K */
/* /*
* Note : ARMv7-M (currently unsupported by Linux) does not support * Note : ARMv7-M (currently unsupported by Linux) does not support
@ -524,11 +525,11 @@ static inline unsigned long long __cmpxchg64_mb(volatile void *ptr,
(unsigned long long)(o), \ (unsigned long long)(o), \
(unsigned long long)(n))) (unsigned long long)(n)))
#else /* !CONFIG_CPU_32v6K */ #else /* min ARCH = ARMv6 */
#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
#endif /* CONFIG_CPU_32v6K */ #endif
#endif /* __LINUX_ARM_ARCH__ >= 6 */ #endif /* __LINUX_ARM_ARCH__ >= 6 */