diff options
author | Jonathan Gray <jsg@cvs.openbsd.org> | 2019-08-17 06:07:23 +0000 |
---|---|---|
committer | Jonathan Gray <jsg@cvs.openbsd.org> | 2019-08-17 06:07:23 +0000 |
commit | 4c9a1f2cf0dcad85f9c74e32303ce5fabaa94d5b (patch) | |
tree | 01ea9cc607972d94bde202cf9e5a00267afbc332 /sys/dev | |
parent | 369ae9472ce0277fd1b95e1400d2c6975254e6f2 (diff) |
change drm memory barriers to be closer to what linux does on
amd64 and i386
ok kettenis@
Diffstat (limited to 'sys/dev')
-rw-r--r-- | sys/dev/pci/drm/include/linux/atomic.h | 26 |
1 files changed, 19 insertions, 7 deletions
diff --git a/sys/dev/pci/drm/include/linux/atomic.h b/sys/dev/pci/drm/include/linux/atomic.h index 66e53126f5b..5e23da1490e 100644 --- a/sys/dev/pci/drm/include/linux/atomic.h +++ b/sys/dev/pci/drm/include/linux/atomic.h @@ -1,4 +1,4 @@ -/* $OpenBSD: atomic.h,v 1.4 2019/07/25 02:42:44 jsg Exp $ */ +/* $OpenBSD: atomic.h,v 1.5 2019/08/17 06:07:22 jsg Exp $ */ /** * \file drm_atomic.h * Atomic operations used in the DRM which may or may not be provided by the OS. @@ -370,17 +370,29 @@ find_next_bit(volatile void *p, int max, int b) (b) = find_next_zero_bit((p), (max), (b) + 1)) #if defined(__i386__) -#define rmb() __asm __volatile("lock; addl $0,0(%%esp)" : : : "memory"); -#define wmb() __asm __volatile("" : : : "memory"); -#define mb() __asm __volatile("lock; addl $0,0(%%esp)" : : : "memory"); +#define rmb() __asm __volatile("lock; addl $0,-4(%%esp)" : : : "memory", "cc") +#define wmb() __asm __volatile("lock; addl $0,-4(%%esp)" : : : "memory", "cc") +#define mb() __asm __volatile("lock; addl $0,-4(%%esp)" : : : "memory", "cc") +#define smp_mb() __asm __volatile("lock; addl $0,-4(%%esp)" : : : "memory", "cc") +#define smp_rmb() __asm __volatile("" : : : "memory") +#define smp_wmb() __asm __volatile("" : : : "memory") +#define __smp_store_mb(var, value) do { (void)xchg(&var, value); } while (0) +#define smp_mb__after_atomic() do { } while (0) +#define smp_mb__before_atomic() do { } while (0) #elif defined(__alpha__) #define rmb() alpha_mb(); #define wmb() alpha_wmb(); #define mb() alpha_mb(); #elif defined(__amd64__) -#define rmb() __asm __volatile("lock; addl $0,0(%%rsp)" : : : "memory"); -#define wmb() __asm __volatile("" : : : "memory"); -#define mb() __asm __volatile("lock; addl $0,0(%%rsp)" : : : "memory"); +#define rmb() __asm __volatile("lfence" : : : "memory") +#define wmb() __asm __volatile("sfence" : : : "memory") +#define mb() __asm __volatile("mfence" : : : "memory") +#define smp_mb() __asm __volatile("lock; addl $0,-4(%%rsp)" : : : "memory", "cc"); +#define smp_rmb() __asm __volatile("" : : : "memory") +#define smp_wmb() __asm __volatile("" : : : "memory") +#define __smp_store_mb(var, value) do { (void)xchg(&var, value); } while (0) +#define smp_mb__after_atomic() do { } while (0) +#define smp_mb__before_atomic() do { } while (0) #elif defined(__aarch64__) #define rmb() __membar("dsb ld") #define wmb() __membar("dsb st") |