summaryrefslogtreecommitdiff
path: root/lib/libc_r/arch/alpha/_atomic_lock.c
diff options
context:
space:
mode:
authorDavid Leonard <d@cvs.openbsd.org>1998-11-20 11:15:39 +0000
committerDavid Leonard <d@cvs.openbsd.org>1998-11-20 11:15:39 +0000
commit394c7a9821726b84f284c0c4385b1a9198afa0b0 (patch)
treee3fcaf31862eb53986f206217f7986fe433c6ce7 /lib/libc_r/arch/alpha/_atomic_lock.c
parentd2d530d679e5709dfdaa5ea40bea4a4d25694930 (diff)
Move atomic_lock code from asm to C with inline asm;
Add m68k, mips and sparc. (needs more careful checking) Add 'slow_atomic_lock' for crippled archs.
Diffstat (limited to 'lib/libc_r/arch/alpha/_atomic_lock.c')
-rw-r--r--lib/libc_r/arch/alpha/_atomic_lock.c33
1 files changed, 33 insertions, 0 deletions
diff --git a/lib/libc_r/arch/alpha/_atomic_lock.c b/lib/libc_r/arch/alpha/_atomic_lock.c
new file mode 100644
index 00000000000..74575fc11d7
--- /dev/null
+++ b/lib/libc_r/arch/alpha/_atomic_lock.c
@@ -0,0 +1,33 @@
+/* $OpenBSD: _atomic_lock.c,v 1.1 1998/11/20 11:15:35 d Exp $ */
+/* Atomic lock for alpha */
+
+#include "spinlock.h"
+
+register_t
+_atomic_lock(volatile register_t * lock)
+{
+ register_t old;
+ register_t new;
+ int success;
+
+ do {
+ /* load the value of the thread-lock (lock mem on load) */
+ __asm__( "ldq_l %0, %1" : "=r"(old) : "m"(*lock) );
+ if (old)
+ new = old; /* in-use: put it back */
+ else
+ new = 1; /* free: store a 1 in the lock */
+
+ success = 0;
+ /* store the new value of the thrd-lock (unlock mem on store) */
+ /*
+ * XXX may need to add large branch forward for main line
+ * branch prediction to be right :(
+ */
+ __asm__( "stq_c %2, %0; beq %2, 1f; mov 1,%1; 1:"
+ : "=m"(*lock), "=r"(success)
+ : "r"(new) );
+ } while (!success);
+
+ return old;
+}