diff options
author | Ted Unangst <tedu@cvs.openbsd.org> | 2013-06-01 20:47:41 +0000 |
---|---|---|
committer | Ted Unangst <tedu@cvs.openbsd.org> | 2013-06-01 20:47:41 +0000 |
commit | 2fbfcceb960fb807aa9b99a0597dedde67f9d6f8 (patch) | |
tree | 8898346871e5f91b8f2a10e9d30e44dbbf668349 /lib/librthread/arch | |
parent | 751ab61a6dd86c9f4339695dcb646ee54892c289 (diff) |
cleanup and consolidate the spinlock_lock (what a name!) code.
it's now atomic_lock to better reflect its usage, and librthread now
features a new spinlock that's really a ticket lock.
thrlseep can handle both types of lock via a flag in the clock arg.
(temp back compat hack)
remove some old stuff that's accumulated along the way and no longer used.
some feedback from dlg, who is concerned with all things ticket lock.
(you need to boot a new kernel before installing librthread)
Diffstat (limited to 'lib/librthread/arch')
-rw-r--r-- | lib/librthread/arch/alpha/_atomic_lock.S | 4 | ||||
-rw-r--r-- | lib/librthread/arch/amd64/_atomic_lock.c | 12 | ||||
-rw-r--r-- | lib/librthread/arch/arm/_atomic_lock.c | 14 | ||||
-rw-r--r-- | lib/librthread/arch/hppa/_atomic_lock.c | 10 | ||||
-rw-r--r-- | lib/librthread/arch/hppa64/_atomic_lock.c | 10 | ||||
-rw-r--r-- | lib/librthread/arch/i386/_atomic_lock.c | 12 | ||||
-rw-r--r-- | lib/librthread/arch/m68k/_atomic_lock.c | 14 | ||||
-rw-r--r-- | lib/librthread/arch/m88k/_atomic_lock.c | 12 | ||||
-rw-r--r-- | lib/librthread/arch/mips64/_atomic_lock.c | 12 | ||||
-rw-r--r-- | lib/librthread/arch/powerpc/_atomic_lock.c | 12 | ||||
-rw-r--r-- | lib/librthread/arch/sh/_atomic_lock.c | 15 | ||||
-rw-r--r-- | lib/librthread/arch/sparc/_atomic_lock.c | 12 | ||||
-rw-r--r-- | lib/librthread/arch/sparc64/_atomic_lock.c | 12 | ||||
-rw-r--r-- | lib/librthread/arch/vax/_atomic_lock.c | 21 |
14 files changed, 79 insertions, 93 deletions
diff --git a/lib/librthread/arch/alpha/_atomic_lock.S b/lib/librthread/arch/alpha/_atomic_lock.S index 41481216c4a..a0b2993cc80 100644 --- a/lib/librthread/arch/alpha/_atomic_lock.S +++ b/lib/librthread/arch/alpha/_atomic_lock.S @@ -1,4 +1,4 @@ -/* $OpenBSD: _atomic_lock.S,v 1.1 2005/12/04 05:47:38 brad Exp $ */ +/* $OpenBSD: _atomic_lock.S,v 1.2 2013/06/01 20:47:40 tedu Exp $ */ /* David Leonard, <d@csee.uq.edu.au>. Public domain. */ #include <machine/asm.h> @@ -8,7 +8,7 @@ LEAF(_atomic_lock,1) /* NOTE: using ldl_l/stl_c instead of ldq_l and ldq_c as machine/spinlock.h - defines _spinlock_lock_t as int */ + defines _atomic_lock_t as int */ 0: ldl_l v0, 0(a0) /* read existing lock value */ mov 1, t0 /* locked value to store */ stl_c t0, 0(a0) /* attempt to store, status in t0 */ diff --git a/lib/librthread/arch/amd64/_atomic_lock.c b/lib/librthread/arch/amd64/_atomic_lock.c index 9f60c785f40..f3527aaf081 100644 --- a/lib/librthread/arch/amd64/_atomic_lock.c +++ b/lib/librthread/arch/amd64/_atomic_lock.c @@ -1,4 +1,4 @@ -/* $OpenBSD: _atomic_lock.c,v 1.3 2009/06/01 22:52:38 guenther Exp $ */ +/* $OpenBSD: _atomic_lock.c,v 1.4 2013/06/01 20:47:40 tedu Exp $ */ /* David Leonard, <d@csee.uq.edu.au>. Public domain. */ @@ -6,21 +6,21 @@ * Atomic lock for amd64 -- taken from i386 code. */ -#include <spinlock.h> +#include <machine/spinlock.h> int -_atomic_lock(volatile _spinlock_lock_t *lock) +_atomic_lock(volatile _atomic_lock_t *lock) { - _spinlock_lock_t old; + _atomic_lock_t old; /* * Use the eXCHanGe instruction to swap the lock value with * a local variable containing the locked state. */ - old = _SPINLOCK_LOCKED; + old = _ATOMIC_LOCK_LOCKED; __asm__("xchg %0,(%2)" : "=r" (old) : "0" (old), "r" (lock)); - return (old != _SPINLOCK_UNLOCKED); + return (old != _ATOMIC_LOCK_UNLOCKED); } diff --git a/lib/librthread/arch/arm/_atomic_lock.c b/lib/librthread/arch/arm/_atomic_lock.c index 48bca7da276..6a5956d26d7 100644 --- a/lib/librthread/arch/arm/_atomic_lock.c +++ b/lib/librthread/arch/arm/_atomic_lock.c @@ -1,4 +1,4 @@ -/* $OpenBSD: _atomic_lock.c,v 1.3 2013/01/23 20:49:55 patrick Exp $ */ +/* $OpenBSD: _atomic_lock.c,v 1.4 2013/06/01 20:47:40 tedu Exp $ */ /* * Copyright (c) 2004 Dale Rahn. All rights reserved. @@ -28,12 +28,12 @@ * Atomic lock for arm */ -#include <spinlock.h> +#include <machine/spinlock.h> int -_atomic_lock(volatile _spinlock_lock_t *lock) +_atomic_lock(volatile _atomic_lock_t *lock) { - _spinlock_lock_t old; + _atomic_lock_t old; #ifdef ARM_V7PLUS_LOCKS uint32_t scratch = 0; @@ -43,12 +43,12 @@ _atomic_lock(volatile _spinlock_lock_t *lock) " cmp %2, #0 \n" " bne 1b \n" : "+r" (old), "+r" (lock), "+r" (scratch) - : "r" (_SPINLOCK_LOCKED)); + : "r" (_ATOMIC_LOCK_LOCKED)); #else __asm__("swp %0, %2, [%1]" : "=r" (old), "=r" (lock) - : "r" (_SPINLOCK_LOCKED), "1" (lock) ); + : "r" (_ATOMIC_LOCK_LOCKED), "1" (lock) ); #endif - return (old != _SPINLOCK_UNLOCKED); + return (old != _ATOMIC_LOCK_UNLOCKED); } diff --git a/lib/librthread/arch/hppa/_atomic_lock.c b/lib/librthread/arch/hppa/_atomic_lock.c index c01fa9f78f7..cdde57c2a0e 100644 --- a/lib/librthread/arch/hppa/_atomic_lock.c +++ b/lib/librthread/arch/hppa/_atomic_lock.c @@ -1,4 +1,4 @@ -/* $OpenBSD: _atomic_lock.c,v 1.6 2012/03/03 14:42:33 miod Exp $ */ +/* $OpenBSD: _atomic_lock.c,v 1.7 2013/06/01 20:47:40 tedu Exp $ */ /* * Copyright (c) 2005 Marco Peereboom <marco@openbsd.org> * @@ -15,16 +15,16 @@ * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ -#include <spinlock.h> +#include <machine/spinlock.h> #ifdef DIAGNOSTIC #include <stdio.h> #include <stdlib.h> #endif int -_atomic_lock(volatile _spinlock_lock_t *lock) +_atomic_lock(volatile _atomic_lock_t *lock) { - volatile _spinlock_lock_t old; + volatile _atomic_lock_t old; #ifdef DIAGNOSTIC if ((unsigned long)lock & 0xf) { @@ -37,5 +37,5 @@ _atomic_lock(volatile _spinlock_lock_t *lock) : "=&r" (old), "+m" (lock) : "r" (lock)); - return (old == _SPINLOCK_LOCKED); + return (old == _ATOMIC_LOCK_LOCKED); } diff --git a/lib/librthread/arch/hppa64/_atomic_lock.c b/lib/librthread/arch/hppa64/_atomic_lock.c index 756af207428..886d2d132a0 100644 --- a/lib/librthread/arch/hppa64/_atomic_lock.c +++ b/lib/librthread/arch/hppa64/_atomic_lock.c @@ -1,4 +1,4 @@ -/* $OpenBSD: _atomic_lock.c,v 1.1 2012/04/13 14:38:22 jsing Exp $ */ +/* $OpenBSD: _atomic_lock.c,v 1.2 2013/06/01 20:47:40 tedu Exp $ */ /* * Copyright (c) 2005 Marco Peereboom <marco@openbsd.org> * @@ -15,16 +15,16 @@ * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ -#include <spinlock.h> +#include <machine/spinlock.h> #ifdef DIAGNOSTIC #include <stdio.h> #include <stdlib.h> #endif int -_atomic_lock(volatile _spinlock_lock_t *lock) +_atomic_lock(volatile _atomic_lock_t *lock) { - volatile _spinlock_lock_t old; + volatile _atomic_lock_t old; #ifdef DIAGNOSTIC if ((unsigned long)lock & 0xf) { @@ -37,5 +37,5 @@ _atomic_lock(volatile _spinlock_lock_t *lock) : "=&r" (old), "+m" (lock) : "r" (lock)); - return (old == _SPINLOCK_LOCKED); + return (old == _ATOMIC_LOCK_LOCKED); } diff --git a/lib/librthread/arch/i386/_atomic_lock.c b/lib/librthread/arch/i386/_atomic_lock.c index 2a42259680e..b765644fb82 100644 --- a/lib/librthread/arch/i386/_atomic_lock.c +++ b/lib/librthread/arch/i386/_atomic_lock.c @@ -1,25 +1,25 @@ -/* $OpenBSD: _atomic_lock.c,v 1.3 2010/12/03 19:44:22 miod Exp $ */ +/* $OpenBSD: _atomic_lock.c,v 1.4 2013/06/01 20:47:40 tedu Exp $ */ /* David Leonard, <d@csee.uq.edu.au>. Public domain. */ /* * Atomic lock for i386 */ -#include <spinlock.h> +#include <machine/spinlock.h> int -_atomic_lock(volatile _spinlock_lock_t *lock) +_atomic_lock(volatile _atomic_lock_t *lock) { - _spinlock_lock_t old; + _atomic_lock_t old; /* * Use the eXCHanGe instruction to swap the lock value with * a local variable containing the locked state. */ - old = _SPINLOCK_LOCKED; + old = _ATOMIC_LOCK_LOCKED; __asm__("xchg %0,(%2)" : "=r" (old) : "0" (old), "r" (lock)); - return (old != _SPINLOCK_UNLOCKED); + return (old != _ATOMIC_LOCK_UNLOCKED); } diff --git a/lib/librthread/arch/m68k/_atomic_lock.c b/lib/librthread/arch/m68k/_atomic_lock.c index 50f5dcab8a0..6d1bdacfc37 100644 --- a/lib/librthread/arch/m68k/_atomic_lock.c +++ b/lib/librthread/arch/m68k/_atomic_lock.c @@ -1,16 +1,16 @@ -/* $OpenBSD: _atomic_lock.c,v 1.3 2008/10/02 23:29:26 deraadt Exp $ */ +/* $OpenBSD: _atomic_lock.c,v 1.4 2013/06/01 20:47:40 tedu Exp $ */ /* David Leonard, <d@csee.uq.edu.au>. Public domain. */ /* * Atomic lock for m68k */ -#include <spinlock.h> +#include <machine/spinlock.h> int -_atomic_lock(volatile _spinlock_lock_t *lock) +_atomic_lock(volatile _atomic_lock_t *lock) { - _spinlock_lock_t old; + _atomic_lock_t old; /* * The Compare And Swap instruction (mc68020 and above) @@ -29,10 +29,10 @@ _atomic_lock(volatile _spinlock_lock_t *lock) * if Z then Du -> <ea> * else <ea> -> Dc; */ - old = _SPINLOCK_UNLOCKED; + old = _ATOMIC_LOCK_UNLOCKED; __asm__("casl %0, %2, %1" : "=d" (old), "=m" (*lock) - : "d" (_SPINLOCK_LOCKED), + : "d" (_ATOMIC_LOCK_LOCKED), "0" (old), "1" (*lock) : "cc"); - return (old != _SPINLOCK_UNLOCKED); + return (old != _ATOMIC_LOCK_UNLOCKED); } diff --git a/lib/librthread/arch/m88k/_atomic_lock.c b/lib/librthread/arch/m88k/_atomic_lock.c index e1ff84b76c4..445ee7ac065 100644 --- a/lib/librthread/arch/m88k/_atomic_lock.c +++ b/lib/librthread/arch/m88k/_atomic_lock.c @@ -1,4 +1,4 @@ -/* $OpenBSD: _atomic_lock.c,v 1.3 2013/01/05 11:20:55 miod Exp $ */ +/* $OpenBSD: _atomic_lock.c,v 1.4 2013/06/01 20:47:40 tedu Exp $ */ /* * Copyright (c) 2003, Miodrag Vallat. @@ -29,16 +29,16 @@ * Atomic lock for m88k */ -#include <spinlock.h> +#include <machine/spinlock.h> int -_atomic_lock(volatile _spinlock_lock_t *lock) +_atomic_lock(volatile _atomic_lock_t *lock) { - _spinlock_lock_t old; + _atomic_lock_t old; - old = _SPINLOCK_LOCKED; + old = _ATOMIC_LOCK_LOCKED; __asm__ __volatile__ ("xmem %0, %2, %%r0" : "=r" (old) : "0" (old), "r" (lock)); - return (old != _SPINLOCK_UNLOCKED); + return (old != _ATOMIC_LOCK_UNLOCKED); } diff --git a/lib/librthread/arch/mips64/_atomic_lock.c b/lib/librthread/arch/mips64/_atomic_lock.c index 3b000eb0914..e02abd40af4 100644 --- a/lib/librthread/arch/mips64/_atomic_lock.c +++ b/lib/librthread/arch/mips64/_atomic_lock.c @@ -1,16 +1,16 @@ -/* $OpenBSD: _atomic_lock.c,v 1.5 2013/05/06 00:23:49 guenther Exp $ */ +/* $OpenBSD: _atomic_lock.c,v 1.6 2013/06/01 20:47:40 tedu Exp $ */ /* * Atomic lock for mips * Written by Miodrag Vallat <miod@openbsd.org> - placed in the public domain. */ -#include "spinlock.h" +#include <machine/spinlock.h> int -_atomic_lock(volatile _spinlock_lock_t *lock) +_atomic_lock(volatile _atomic_lock_t *lock) { - _spinlock_lock_t old; + _atomic_lock_t old; __asm__ __volatile__ ( ".set noreorder\n" @@ -20,8 +20,8 @@ _atomic_lock(volatile _spinlock_lock_t *lock) " addi %2, $0, %3\n" ".set reorder\n" : "=&r"(old) - : "r"(lock), "r"(_SPINLOCK_LOCKED), "i"(_SPINLOCK_LOCKED) + : "r"(lock), "r"(_ATOMIC_LOCK_LOCKED), "i"(_SPINLOCK_LOCKED) : "memory"); - return (old != _SPINLOCK_UNLOCKED); + return (old != _ATOMIC_LOCK_UNLOCKED); } diff --git a/lib/librthread/arch/powerpc/_atomic_lock.c b/lib/librthread/arch/powerpc/_atomic_lock.c index 68035588c22..705f682114c 100644 --- a/lib/librthread/arch/powerpc/_atomic_lock.c +++ b/lib/librthread/arch/powerpc/_atomic_lock.c @@ -1,4 +1,4 @@ -/* $OpenBSD: _atomic_lock.c,v 1.4 2008/10/01 14:59:18 drahn Exp $ */ +/* $OpenBSD: _atomic_lock.c,v 1.5 2013/06/01 20:47:40 tedu Exp $ */ /* * Copyright (c) 1998 Dale Rahn <drahn@openbsd.org> * @@ -19,22 +19,22 @@ * Atomic lock for powerpc */ -#include <spinlock.h> +#include <machine/spinlock.h> int -_atomic_lock(volatile _spinlock_lock_t *lock) +_atomic_lock(volatile _atomic_lock_t *lock) { - _spinlock_lock_t old; + _atomic_lock_t old; __asm__("1: lwarx 0,0,%1 \n" " stwcx. %2,0,%1 \n" " bne- 1b \n" " mr %0, 0 \n" : "=r" (old), "=r" (lock) - : "r" (_SPINLOCK_LOCKED), "1" (lock) : "0" + : "r" (_ATOMIC_LOCK_LOCKED), "1" (lock) : "0" ); - return (old != _SPINLOCK_UNLOCKED); + return (old != _ATOMIC_LOCK_UNLOCKED); /* * Dale <drahn@openbsd.org> says: diff --git a/lib/librthread/arch/sh/_atomic_lock.c b/lib/librthread/arch/sh/_atomic_lock.c index ec68b6d09ef..4dca89705db 100644 --- a/lib/librthread/arch/sh/_atomic_lock.c +++ b/lib/librthread/arch/sh/_atomic_lock.c @@ -1,4 +1,4 @@ -/* $OpenBSD: _atomic_lock.c,v 1.2 2008/06/26 05:42:05 ray Exp $ */ +/* $OpenBSD: _atomic_lock.c,v 1.3 2013/06/01 20:47:40 tedu Exp $ */ /*- * Copyright (c) 2002 The NetBSD Foundation, Inc. @@ -29,12 +29,12 @@ * POSSIBILITY OF SUCH DAMAGE. */ -#include "spinlock.h" +#include <machine/spinlock.h> int -_atomic_lock(volatile _spinlock_lock_t *lock) +_atomic_lock(volatile _atomic_lock_t *lock) { - _spinlock_lock_t old; + _atomic_lock_t old; __asm volatile( " tas.b %0 \n" @@ -44,10 +44,3 @@ _atomic_lock(volatile _spinlock_lock_t *lock) return (old == 0); } - -int -_atomic_is_locked(volatile _spinlock_lock_t *lock) -{ - - return (*lock != _SPINLOCK_UNLOCKED); -} diff --git a/lib/librthread/arch/sparc/_atomic_lock.c b/lib/librthread/arch/sparc/_atomic_lock.c index 036a7abb6e9..9c95d05e844 100644 --- a/lib/librthread/arch/sparc/_atomic_lock.c +++ b/lib/librthread/arch/sparc/_atomic_lock.c @@ -1,23 +1,23 @@ -/* $OpenBSD: _atomic_lock.c,v 1.4 2011/10/13 05:41:06 guenther Exp $ */ +/* $OpenBSD: _atomic_lock.c,v 1.5 2013/06/01 20:47:40 tedu Exp $ */ /* David Leonard, <d@csee.uq.edu.au>. Public domain. */ /* * Atomic lock for sparc */ -#include <spinlock.h> +#include <machine/spinlock.h> int -_atomic_lock(volatile _spinlock_lock_t * lock) +_atomic_lock(volatile _atomic_lock_t * lock) { - _spinlock_lock_t old; + _atomic_lock_t old; /* * " ldstub [address], reg_rd * * The atomic load-store instructions copy a byte from memory * into r[rd]m then rewrite the addressed byte in memory to all - * ones [_SPINLOCK_LOCKED]. The operation is performed + * ones [_ATOMIC_LOCK_LOCKED]. The operation is performed * atomically, that is, without allowing intervening interrupts * or deferred traps. In a multiprocessor system, two or more * processors executing atomic load-store unsigned byte [...] @@ -37,5 +37,5 @@ _atomic_lock(volatile _spinlock_lock_t * lock) */ __asm__("ldstub [%1], %0" : "=&r" (old) : "r" (lock) : "memory"); - return (old == _SPINLOCK_LOCKED); + return (old == _ATOMIC_LOCK_LOCKED); } diff --git a/lib/librthread/arch/sparc64/_atomic_lock.c b/lib/librthread/arch/sparc64/_atomic_lock.c index 750d9e5aa14..e18426b848e 100644 --- a/lib/librthread/arch/sparc64/_atomic_lock.c +++ b/lib/librthread/arch/sparc64/_atomic_lock.c @@ -1,23 +1,23 @@ -/* $OpenBSD: _atomic_lock.c,v 1.4 2011/10/13 05:41:06 guenther Exp $ */ +/* $OpenBSD: _atomic_lock.c,v 1.5 2013/06/01 20:47:40 tedu Exp $ */ /* David Leonard, <d@csee.uq.edu.au>. Public domain. */ /* * Atomic lock for sparc64 */ -#include <spinlock.h> +#include <machine/spinlock.h> int -_atomic_lock(volatile _spinlock_lock_t * lock) +_atomic_lock(volatile _atomic_lock_t * lock) { - _spinlock_lock_t old; + _atomic_lock_t old; /* * " ldstub [address], reg_rd * * The atomic load-store instructions copy a byte from memory * into r[rd]m then rewrite the addressed byte in memory to all - * ones [_SPINLOCK_LOCKED]. The operation is performed + * ones [_ATOMIC_LOCK_LOCKED]. The operation is performed * atomically, that is, without allowing intervening interrupts * or deferred traps. In a multiprocessor system, two or more * processors executing atomic load-store unsigned byte [...] @@ -37,5 +37,5 @@ _atomic_lock(volatile _spinlock_lock_t * lock) */ __asm__("ldstub [%1], %0" : "=&r" (old) : "r" (lock) : "memory"); - return (old == _SPINLOCK_LOCKED); + return (old == _ATOMIC_LOCK_LOCKED); } diff --git a/lib/librthread/arch/vax/_atomic_lock.c b/lib/librthread/arch/vax/_atomic_lock.c index ac8b8bf704a..7357d01f1b6 100644 --- a/lib/librthread/arch/vax/_atomic_lock.c +++ b/lib/librthread/arch/vax/_atomic_lock.c @@ -1,16 +1,16 @@ -/* $OpenBSD: _atomic_lock.c,v 1.2 2006/01/05 22:33:24 marc Exp $ */ +/* $OpenBSD: _atomic_lock.c,v 1.3 2013/06/01 20:47:40 tedu Exp $ */ /* * Atomic lock for vax * Written by Miodrag Vallat <miod@openbsd.org> - placed in the public domain. */ -#include <spinlock.h> +#include <machine/spinlock.h> int -_atomic_lock(volatile _spinlock_lock_t *lock) +_atomic_lock(volatile _atomic_lock_t *lock) { - _spinlock_lock_t old; + _atomic_lock_t old; /* * The Branch on Bit Set and Set Interlocked instruction @@ -24,19 +24,12 @@ _atomic_lock(volatile _spinlock_lock_t *lock) * ``Control instructions''. */ __asm__ ( - "movl $1, %1\n" /* _SPINLOCK_LOCKED */ + "movl $1, %1\n" /* _ATOMIC_LOCK_LOCKED */ "bbssi $0, %0, 1f\n" - "movl $0, %1\n" /* _SPINLOCK_UNLOCKED */ + "movl $0, %1\n" /* _ATOMIC_LOCK_UNLOCKED */ "1: \n" : "=m" (*lock), "=r" (old) : "0" (*lock) ); - return (old != _SPINLOCK_UNLOCKED); -} - -int -_atomic_is_locked(volatile _spinlock_lock_t *lock) -{ - - return (*lock != _SPINLOCK_UNLOCKED); + return (old != _ATOMIC_LOCK_UNLOCKED); } |