RetroArch
spinlock.h
Go to the documentation of this file.
1 #ifndef __SPINLOCK_H__
2 #define __SPINLOCK_H__
3 
4 #include <gctypes.h>
5 #include <lwp_threads.h>
6 
7 typedef struct {
9 } spinlock_t;
10 
11 #define SPIN_LOCK_UNLOCKED (spinlock_t){0}
12 
13 #define spin_lock_init(x) do { *(x) = SPIN_LOCK_UNLOCKED; }while(0)
14 
15 static __inline__ u32 _test_and_set(u32 *atomic)
16 {
17  register u32 ret;
18 
19  __asm__ __volatile__ ("1: lwarx %0,0,%1\n"
20  " cmpwi 0,%0,0\n"
21  " bne- 2f\n"
22  " stwcx. %2,0,%1\n"
23  " bne- 1b\n"
24  " isync\n"
25  "2:" : "=&r"(ret)
26  : "r"(atomic), "r"(1)
27  : "cr0", "memory");
28 
29  return ret;
30 }
31 
32 static __inline__ u32 atomic_inc(u32 *pint)
33 {
34  register u32 ret;
35  __asm__ __volatile__(
36  "1: lwarx %0,0,%1\n\
37  addi %0,%0,1\n\
38  stwcx. %0,0,%1\n\
39  bne- 1b\n\
40  isync\n"
41  : "=&r"(ret) : "r"(pint)
42  : "cr0", "memory");
43  return ret;
44 }
45 
46 static __inline__ u32 atomic_dec(u32 *pint)
47 {
48  register u32 ret;
49  __asm__ __volatile__(
50  "1: lwarx %0,0,%1\n\
51  addi %0,%0,-1\n\
52  stwcx. %0,0,%1\n\
53  bne- 1b\n\
54  isync\n"
55  : "=&r"(ret) : "r"(pint)
56  : "cr0", "memory");
57  return ret;
58 }
59 
60 static __inline__ void spin_lock(spinlock_t *lock)
61 {
62  register u32 tmp;
63 
64  __asm__ __volatile__(
65  "b 1f # spin_lock\n\
66 2: lwzx %0,0,%1\n\
67  cmpwi 0,%0,0\n\
68  bne+ 2b\n\
69 1: lwarx %0,0,%1\n\
70  cmpwi 0,%0,0\n\
71  bne- 2b\n\
72  stwcx. %2,0,%1\n\
73  bne- 2b\n\
74  isync"
75  : "=&r"(tmp)
76  : "r"(lock), "r"(1)
77  : "cr0", "memory");
78 }
79 
80 static __inline__ void spin_lock_irqsave(spinlock_t *lock,register u32 *p_isr_level)
81 {
82  register u32 level;
83  register u32 tmp;
84 
86 
87  __asm__ __volatile__(
88  " b 1f # spin_lock\n\
89  2: lwzx %0,0,%1\n\
90  cmpwi 0,%0,0\n\
91  bne+ 2b\n\
92  1: lwarx %0,0,%1\n\
93  cmpwi 0,%0,0\n\
94  bne- 2b\n\
95  stwcx. %2,0,%1\n\
96  bne- 2b\n\
97  isync"
98  : "=&r"(tmp)
99  : "r"(lock), "r"(1)
100  : "cr0", "memory");
101 
102  *p_isr_level = level;
103 }
104 
105 static __inline__ void spin_unlock(spinlock_t *lock)
106 {
107  __asm__ __volatile__("eieio # spin_unlock": : :"memory");
108  lock->lock = 0;
109 }
110 
111 static __inline__ void spin_unlock_irqrestore(spinlock_t *lock,register u32 isr_level)
112 {
113  __asm__ __volatile__(
114  "eieio # spin_unlock"
115  : : :"memory");
116  lock->lock = 0;
117 
118  _CPU_ISR_Restore(isr_level);
119 }
120 
121 typedef struct {
123 } rwlock_t;
124 
125 #define RW_LOCK_UNLOCKED (rwlock_t){0}
126 
127 #define read_lock_init(lp) do { *(lp) = RW_LOCK_UNLOCKED; }while(0)
128 
129 static __inline__ void read_lock(rwlock_t *rw)
130 {
131  register u32 tmp;
132 
133  __asm__ __volatile__(
134  "b 2f # read_lock\n\
135 1: lwzx %0,0,%1\n\
136  cmpwi 0,%0,0\n\
137  blt+ 1b\n\
138 2: lwarx %0,0,%1\n\
139  addic. %0,%0,1\n\
140  ble- 1b\n\
141  stwcx. %0,0,%1\n\
142  bne- 2b\n\
143  isync"
144  : "=&r"(tmp)
145  : "r"(&rw->lock)
146  : "cr0", "memory");
147 }
148 
149 static __inline__ void read_unlock(rwlock_t *rw)
150 {
151  register u32 tmp;
152 
153  __asm__ __volatile__(
154  "eieio # read_unlock\n\
155 1: lwarx %0,0,%1\n\
156  addic %0,%0,-1\n\
157  stwcx. %0,0,%1\n\
158  bne- 1b"
159  : "=&r"(tmp)
160  : "r"(&rw->lock)
161  : "cr0", "memory");
162 }
163 
164 static __inline__ void write_lock(rwlock_t *rw)
165 {
166  register u32 tmp;
167 
168  __asm__ __volatile__(
169  "b 2f # write_lock\n\
170 1: lwzx %0,0,%1\n\
171  cmpwi 0,%0,0\n\
172  bne+ 1b\n\
173 2: lwarx %0,0,%1\n\
174  cmpwi 0,%0,0\n\
175  bne- 1b\n\
176  stwcx. %2,0,%1\n\
177  bne- 2b\n\
178  isync"
179  : "=&r"(tmp)
180  : "r"(&rw->lock), "r"(-1)
181  : "cr0", "memory");
182 }
183 
184 static __inline__ void write_unlock(rwlock_t *rw)
185 {
186  __asm__ __volatile__("eieio # write_unlock": : :"memory");
187  rw->lock = 0;
188 }
189 
190 
191 #endif
Definition: spinlock.h:7
static __inline__ void write_lock(rwlock_t *rw)
Definition: spinlock.h:164
#define _CPU_ISR_Disable(_isr_cookie)
Definition: gx_gfx.c:47
__asm__(".arm\n" ".align 4\n" ".globl co_switch_arm\n" ".globl _co_switch_arm\n" "co_switch_arm:\n" "_co_switch_arm:\n" " stmia r1!, {r4, r5, r6, r7, r8, r9, r10, r11, sp, lr}\n" " ldmia r0!, {r4, r5, r6, r7, r8, r9, r10, r11, sp, pc}\n")
#define _CPU_ISR_Restore(_isr_cookie)
Definition: gx_gfx.c:62
Data type definitions.
static __inline__ void spin_unlock(spinlock_t *lock)
Definition: spinlock.h:105
vu32 lock
Definition: spinlock.h:122
static __inline__ void spin_lock(spinlock_t *lock)
Definition: spinlock.h:60
static __inline__ u32 atomic_dec(u32 *pint)
Definition: spinlock.h:46
static l_mem atomic(lua_State *L)
Definition: lgc.c:982
Definition: spinlock.h:121
static __inline__ void spin_lock_irqsave(spinlock_t *lock, register u32 *p_isr_level)
Definition: spinlock.h:80
GLint level
Definition: glext.h:6293
static __inline__ void spin_unlock_irqrestore(spinlock_t *lock, register u32 isr_level)
Definition: spinlock.h:111
static __inline__ void read_lock(rwlock_t *rw)
Definition: spinlock.h:129
static __inline__ u32 atomic_inc(u32 *pint)
Definition: spinlock.h:32
vu32 lock
Definition: spinlock.h:8
volatile u32 vu32
32bit unsigned volatile integer
Definition: gctypes.h:29
static __inline__ u32 _test_and_set(u32 *atomic)
Definition: spinlock.h:15
static __inline__ void write_unlock(rwlock_t *rw)
Definition: spinlock.h:184
uint32_t u32
32bit unsigned integer
Definition: gctypes.h:19
static __inline__ void read_unlock(rwlock_t *rw)
Definition: spinlock.h:149