summaryrefslogtreecommitdiff
path: root/lib/libpthread/arch/amd64/_atomic_lock.c
diff options
context:
space:
mode:
Diffstat (limited to 'lib/libpthread/arch/amd64/_atomic_lock.c')
-rw-r--r--lib/libpthread/arch/amd64/_atomic_lock.c16
1 files changed, 14 insertions, 2 deletions
diff --git a/lib/libpthread/arch/amd64/_atomic_lock.c b/lib/libpthread/arch/amd64/_atomic_lock.c
index 087186e2858..70bdb487381 100644
--- a/lib/libpthread/arch/amd64/_atomic_lock.c
+++ b/lib/libpthread/arch/amd64/_atomic_lock.c
@@ -1,5 +1,6 @@
+/* $OpenBSD: _atomic_lock.c,v 1.2 2004/02/25 03:48:36 deraadt Exp $ */
/*
- * Atomic lock for amd64
+ * Atomic lock for amd64 -- taken from i386 code.
*/
#include "spinlock.h"
@@ -7,5 +8,16 @@
int
_atomic_lock(volatile _spinlock_lock_t *lock)
{
- /* dummy for now */
+ _spinlock_lock_t old;
+
+ /*
+ * Use the eXCHanGe instruction to swap the lock value with
+ * a local variable containing the locked state.
+ */
+ old = _SPINLOCK_LOCKED;
+ __asm__("xchg %0,%1"
+ : "=r" (old), "=m" (*lock)
+ : "0" (old), "1" (*lock));
+
+ return (old != _SPINLOCK_UNLOCKED);
}