2008-06-01 Paul Brook <paul@codesourcery.com>

Zack Weinberg  <zack@codesourcery.com>
	    Daniel Jacobowitz  <dan@codesourcery.com>

	* sysdeps/arm/nptl/pthread_spin_lock.S,
	sysdeps/arm/nptl/pthread_spin_trylock.S: Delete.
	* sysdeps/arm/nptl/pthread_spin_lock.c,
	sysdeps/arm/nptl/pthread_spin_trylock.c: New files using
	atomic_compare_and_exchange_val_acq to take spinlocks.
	* sysdeps/unix/sysv/linux/arm/nptl/bits/atomic.h (lll_trylock,
	lll_cond_trylock): Use atomic_compare_and_exchange_val_acq.
	(__lll_trylock, __lll_cond_trylock): Delete.
	* sysdeps/unix/sysv/linux/arm/nptl/bits/atomic.h
	(atomic_exchange_acq): Delete.
	(atomic_full_barrier): Define.
	(__arch_compare_and_exchange_val_32_acq): Use named operands.
	* sysdeps/unix/sysv/linux/arm/eabi/configure.in: Update
	arch_minimum_kernel to 2.6.16.
	* sysdeps/unix/sysv/linux/arm/eabi/configure: Regenerated.
This commit is contained in:
Daniel Jacobowitz 2008-06-02 01:57:03 +00:00
parent 83d53ff1aa
commit 1ba025a9a2
7 changed files with 61 additions and 90 deletions

View File

@ -1,3 +1,23 @@
2008-06-01 Paul Brook <paul@codesourcery.com>
Zack Weinberg <zack@codesourcery.com>
Daniel Jacobowitz <dan@codesourcery.com>
* sysdeps/arm/nptl/pthread_spin_lock.S,
sysdeps/arm/nptl/pthread_spin_trylock.S: Delete.
* sysdeps/arm/nptl/pthread_spin_lock.c,
sysdeps/arm/nptl/pthread_spin_trylock.c: New files using
atomic_compare_and_exchange_val_acq to take spinlocks.
* sysdeps/unix/sysv/linux/arm/nptl/bits/atomic.h (lll_trylock,
lll_cond_trylock): Use atomic_compare_and_exchange_val_acq.
(__lll_trylock, __lll_cond_trylock): Delete.
* sysdeps/unix/sysv/linux/arm/nptl/bits/atomic.h
(atomic_exchange_acq): Delete.
(atomic_full_barrier): Define.
(__arch_compare_and_exchange_val_32_acq): Use named operands.
* sysdeps/unix/sysv/linux/arm/eabi/configure.in: Update
arch_minimum_kernel to 2.6.16.
* sysdeps/unix/sysv/linux/arm/eabi/configure: Regenerated.
2008-04-21 Daniel Jacobowitz <dan@codesourcery.com>
* sysdeps/unix/sysv/linux/arm/check_pf.c: Update from generic

View File

@ -1,4 +1,4 @@
/* Copyright (C) 2005 Free Software Foundation, Inc.
/* Copyright (C) 2008 Free Software Foundation, Inc.
This file is part of the GNU C Library.
The GNU C Library is free software; you can redistribute it and/or
@ -16,19 +16,15 @@
Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
02111-1307 USA. */
#define _ERRNO_H 1
#include <bits/errno.h>
#include <atomic.h>
#include "pthreadP.h"
#include <sysdep.h>
int
pthread_spin_lock (pthread_spinlock_t *lock)
{
while (atomic_compare_and_exchange_val_acq (lock, 1, 0) != 0)
while (*lock != 0)
;
.text
.align 4
ENTRY (pthread_spin_trylock)
mov r1, #1
swp r2, r1, [r0]
teq r2, #0
moveq r0, #0
movne r0, #EBUSY
PSEUDO_RET_NOERRNO
END (pthread_spin_trylock)
return 0;
}

View File

@ -1,4 +1,4 @@
/* Copyright (C) 2005 Free Software Foundation, Inc.
/* Copyright (C) 2008 Free Software Foundation, Inc.
This file is part of the GNU C Library.
The GNU C Library is free software; you can redistribute it and/or
@ -16,16 +16,12 @@
Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
02111-1307 USA. */
#include <sysdep.h>
#include <errno.h>
#include <atomic.h>
#include "pthreadP.h"
.text
.align 4
ENTRY (pthread_spin_lock)
mov r1, #1
1: swp r2, r1, [r0]
teq r2, #0
bne 1b
mov r0, #0
PSEUDO_RET_NOERRNO
END (pthread_spin_lock)
int
pthread_spin_trylock (pthread_spinlock_t *lock)
{
return atomic_compare_and_exchange_val_acq (lock, 1, 0) ? EBUSY : 0;
}

View File

@ -1,5 +1,5 @@
# This file is generated from configure.in by Autoconf. DO NOT EDIT!
# Local configure fragment for sysdeps/unix/sysv/linux/arm/eabi.
arch_minimum_kernel=2.6.14
arch_minimum_kernel=2.6.16
libc_cv_gcc_unwind_find_fde=no

View File

@ -1,5 +1,5 @@
GLIBC_PROVIDES dnl See aclocal.m4 in the top level source directory.
# Local configure fragment for sysdeps/unix/sysv/linux/arm/eabi.
arch_minimum_kernel=2.6.14
arch_minimum_kernel=2.6.16
libc_cv_gcc_unwind_find_fde=no

View File

@ -37,22 +37,12 @@ typedef uintmax_t uatomic_max_t;
void __arm_link_error (void);
#define atomic_exchange_acq(mem, newvalue) \
({ __typeof (*mem) result; \
if (sizeof (*mem) == 1) \
__asm__ __volatile__ ("swpb %0, %1, [%2]" \
: "=&r,&r" (result) \
: "r,0" (newvalue), "r,r" (mem) : "memory"); \
else if (sizeof (*mem) == 4) \
__asm__ __volatile__ ("swp %0, %1, [%2]" \
: "=&r,&r" (result) \
: "r,0" (newvalue), "r,r" (mem) : "memory"); \
else \
{ \
result = 0; \
abort (); \
} \
result; })
#define atomic_full_barrier() \
__asm__ __volatile__ \
("mov\tip, #0xffff0fff\n\t" \
"mov\tlr, pc\n\t" \
"add\tpc, ip, #(0xffff0fa0 - 0xffff0fff)" \
: : : "ip", "lr", "cc", "memory");
/* Atomic compare and exchange. This sequence relies on the kernel to
provide a compare and exchange operation which is atomic on the
@ -76,18 +66,19 @@ void __arm_link_error (void);
register __typeof (oldval) a_tmp asm ("r3"); \
register __typeof (oldval) a_oldval2 asm ("r4") = (oldval); \
__asm__ __volatile__ \
("0:\tldr\t%1,[%3]\n\t" \
"cmp\t%1, %4\n\t" \
("0:\tldr\t%[tmp],[%[ptr]]\n\t" \
"cmp\t%[tmp], %[old2]\n\t" \
"bne\t1f\n\t" \
"mov\t%0, %4\n\t" \
"mov\t%1, #0xffff0fff\n\t" \
"mov\t%[old], %[old2]\n\t" \
"mov\t%[tmp], #0xffff0fff\n\t" \
"mov\tlr, pc\n\t" \
"add\tpc, %1, #(0xffff0fc0 - 0xffff0fff)\n\t" \
"add\tpc, %[tmp], #(0xffff0fc0 - 0xffff0fff)\n\t" \
"bcc\t0b\n\t" \
"mov\t%1, %4\n\t" \
"mov\t%[tmp], %[old2]\n\t" \
"1:" \
: "=&r" (a_oldval), "=&r" (a_tmp) \
: "r" (a_newval), "r" (a_ptr), "r" (a_oldval2) \
: [old] "=&r" (a_oldval), [tmp] "=&r" (a_tmp) \
: [new] "r" (a_newval), [ptr] "r" (a_ptr), \
[old2] "r" (a_oldval2) \
: "ip", "lr", "cc", "memory"); \
a_tmp; })

View File

@ -126,43 +126,11 @@
})
static inline int __attribute__((always_inline))
__lll_trylock (int *futex)
{
int flag = 1, old;
asm volatile (
"\tswp %[old], %[flag], [%[futex]] @ try to take the lock\n"
"\tcmp %[old], #1 @ check old lock value\n"
"\tmovlo %[flag], #0 @ if we got it, return 0\n"
"\tswphi %[flag], %[old], [%[futex]] @ if it was contested,\n"
" @ restore the contested flag,\n"
" @ and check whether that won."
: [futex] "+&r" (futex), [flag] "+&r" (flag), [old] "=&r" (old)
: : "memory" );
return flag;
}
#define lll_trylock(lock) __lll_trylock (&(lock))
static inline int __attribute__((always_inline))
__lll_cond_trylock (int *futex)
{
int flag = 2, old;
asm volatile (
"\tswp %[old], %[flag], [%[futex]] @ try to take the lock\n"
"\tcmp %[old], #1 @ check old lock value\n"
"\tmovlo %[flag], #0 @ if we got it, return 0\n"
"\tswphi %[flag], %[old], [%[futex]] @ if it was contested,\n"
" @ restore the contested flag,\n"
" @ and check whether that won."
: [futex] "+&r" (futex), [flag] "+&r" (flag), [old] "=&r" (old)
: : "memory" );
return flag;
}
#define lll_cond_trylock(lock) __lll_cond_trylock (&(lock))
#define lll_trylock(lock) \
atomic_compare_and_exchange_val_acq(&(lock), 1, 0)
#define lll_cond_trylock(lock) \
atomic_compare_and_exchange_val_acq(&(lock), 2, 0)
#define __lll_robust_trylock(futex, id) \
(atomic_compare_and_exchange_val_acq (futex, id, 0) != 0)