1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
|
/* $OpenBSD: lock.h,v 1.3 2009/11/21 23:28:14 syuu Exp $ */
/* public domain */
#ifndef _MIPS64_LOCK_H_
#define _MIPS64_LOCK_H_
typedef volatile u_int __cpu_simple_lock_t;
#define __SIMPLELOCK_LOCKED 1
#define __SIMPLELOCK_UNLOCKED 0
static __inline__ void
__cpu_simple_lock_init(__cpu_simple_lock_t *l)
{
*l = __SIMPLELOCK_UNLOCKED;
}
static __inline__ void
__cpu_simple_lock(__cpu_simple_lock_t *l)
{
__cpu_simple_lock_t old, new;
do {
new = __SIMPLELOCK_LOCKED;
__asm__ __volatile__
("1:\tll\t%0, %1\n"
"\tsc\t%2, %1\n"
"\tbeqz\t%2, 1b\n"
"\t nop" : "=&r" (old) : "m" (*l), "r" (new));
} while (old != __SIMPLELOCK_UNLOCKED);
}
static __inline__ int
__cpu_simple_lock_try(__cpu_simple_lock_t *l)
{
__cpu_simple_lock_t old, new = __SIMPLELOCK_LOCKED;
__asm__ __volatile__
("1:\tll\t%0, %1\n"
"\tsc\t%2, %1\n"
"\tbeqz\t%2, 1b\n"
"\t nop" : "=&r" (old) : "m" (*l), "r" (new));
return (old == __SIMPLELOCK_UNLOCKED);
}
static __inline__ void
__cpu_simple_unlock(__cpu_simple_lock_t *l)
{
*l = __SIMPLELOCK_UNLOCKED;
}
#define rw_cas __cpu_cas
static __inline int
__cpu_cas(volatile unsigned long *addr, unsigned long old, unsigned long new)
{
int success, scratch0, scratch1;
__asm volatile(
".set noreorder\n"
"1:\n"
"lld %0, (%5)\n"
"bne %0, %3, 2f\n"
"move %1, %4\n"
"scd %1, (%5)\n"
"beqz %1, 1b\n"
"move %2, $0\n"
"j 3f\n"
"nop\n"
"2:\n"
"daddi %2, $0, 1\n"
"3:\n"
".set reorder\n"
: "=&r"(scratch0), "=&r"(scratch1), "=&r"(success)
: "r"(old), "r"(new), "r"(addr)
: "memory");
return success;
}
#endif /* _MIPS64_LOCK_H_ */
|