MIPS: Select CONFIG_ARCH_USE_CMPXCHG_LOCKREF for MIPS64
[linux-drm-fsl-dcu.git] / arch / mips / include / asm / spinlock.h
1 /*
2  * This file is subject to the terms and conditions of the GNU General Public
3  * License.  See the file "COPYING" in the main directory of this archive
4  * for more details.
5  *
6  * Copyright (C) 1999, 2000, 06 Ralf Baechle (ralf@linux-mips.org)
7  * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
8  */
9 #ifndef _ASM_SPINLOCK_H
10 #define _ASM_SPINLOCK_H
11
12 #include <linux/compiler.h>
13
14 #include <asm/barrier.h>
15 #include <asm/compiler.h>
16 #include <asm/war.h>
17
18 /*
19  * Your basic SMP spinlocks, allowing only a single CPU anywhere
20  *
21  * Simple spin lock operations.  There are two variants, one clears IRQ's
22  * on the local processor, one does not.
23  *
24  * These are fair FIFO ticket locks
25  *
26  * (the type definitions are in asm/spinlock_types.h)
27  */
28
29
30 /*
31  * Ticket locks are conceptually two parts, one indicating the current head of
32  * the queue, and the other indicating the current tail. The lock is acquired
33  * by atomically noting the tail and incrementing it by one (thus adding
34  * ourself to the queue and noting our position), then waiting until the head
35  * becomes equal to the the initial value of the tail.
36  */
37
38 static inline int arch_spin_is_locked(arch_spinlock_t *lock)
39 {
40         u32 counters = ACCESS_ONCE(lock->lock);
41
42         return ((counters >> 16) ^ counters) & 0xffff;
43 }
44
45 static inline int arch_spin_value_unlocked(arch_spinlock_t lock)
46 {
47         return lock.h.serving_now == lock.h.ticket;
48 }
49
50 #define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock)
51 #define arch_spin_unlock_wait(x) \
52         while (arch_spin_is_locked(x)) { cpu_relax(); }
53
54 static inline int arch_spin_is_contended(arch_spinlock_t *lock)
55 {
56         u32 counters = ACCESS_ONCE(lock->lock);
57
58         return (((counters >> 16) - counters) & 0xffff) > 1;
59 }
60 #define arch_spin_is_contended  arch_spin_is_contended
61
62 static inline void arch_spin_lock(arch_spinlock_t *lock)
63 {
64         int my_ticket;
65         int tmp;
66         int inc = 0x10000;
67
68         if (R10000_LLSC_WAR) {
69                 __asm__ __volatile__ (
70                 "       .set push               # arch_spin_lock        \n"
71                 "       .set noreorder                                  \n"
72                 "                                                       \n"
73                 "1:     ll      %[ticket], %[ticket_ptr]                \n"
74                 "       addu    %[my_ticket], %[ticket], %[inc]         \n"
75                 "       sc      %[my_ticket], %[ticket_ptr]             \n"
76                 "       beqzl   %[my_ticket], 1b                        \n"
77                 "        nop                                            \n"
78                 "       srl     %[my_ticket], %[ticket], 16             \n"
79                 "       andi    %[ticket], %[ticket], 0xffff            \n"
80                 "       bne     %[ticket], %[my_ticket], 4f             \n"
81                 "        subu   %[ticket], %[my_ticket], %[ticket]      \n"
82                 "2:                                                     \n"
83                 "       .subsection 2                                   \n"
84                 "4:     andi    %[ticket], %[ticket], 0xffff            \n"
85                 "       sll     %[ticket], 5                            \n"
86                 "                                                       \n"
87                 "6:     bnez    %[ticket], 6b                           \n"
88                 "        subu   %[ticket], 1                            \n"
89                 "                                                       \n"
90                 "       lhu     %[ticket], %[serving_now_ptr]           \n"
91                 "       beq     %[ticket], %[my_ticket], 2b             \n"
92                 "        subu   %[ticket], %[my_ticket], %[ticket]      \n"
93                 "       b       4b                                      \n"
94                 "        subu   %[ticket], %[ticket], 1                 \n"
95                 "       .previous                                       \n"
96                 "       .set pop                                        \n"
97                 : [ticket_ptr] "+" GCC_OFF_SMALL_ASM() (lock->lock),
98                   [serving_now_ptr] "+m" (lock->h.serving_now),
99                   [ticket] "=&r" (tmp),
100                   [my_ticket] "=&r" (my_ticket)
101                 : [inc] "r" (inc));
102         } else {
103                 __asm__ __volatile__ (
104                 "       .set push               # arch_spin_lock        \n"
105                 "       .set noreorder                                  \n"
106                 "                                                       \n"
107                 "1:     ll      %[ticket], %[ticket_ptr]                \n"
108                 "       addu    %[my_ticket], %[ticket], %[inc]         \n"
109                 "       sc      %[my_ticket], %[ticket_ptr]             \n"
110                 "       beqz    %[my_ticket], 1b                        \n"
111                 "        srl    %[my_ticket], %[ticket], 16             \n"
112                 "       andi    %[ticket], %[ticket], 0xffff            \n"
113                 "       bne     %[ticket], %[my_ticket], 4f             \n"
114                 "        subu   %[ticket], %[my_ticket], %[ticket]      \n"
115                 "2:                                                     \n"
116                 "       .subsection 2                                   \n"
117                 "4:     andi    %[ticket], %[ticket], 0xffff            \n"
118                 "       sll     %[ticket], 5                            \n"
119                 "                                                       \n"
120                 "6:     bnez    %[ticket], 6b                           \n"
121                 "        subu   %[ticket], 1                            \n"
122                 "                                                       \n"
123                 "       lhu     %[ticket], %[serving_now_ptr]           \n"
124                 "       beq     %[ticket], %[my_ticket], 2b             \n"
125                 "        subu   %[ticket], %[my_ticket], %[ticket]      \n"
126                 "       b       4b                                      \n"
127                 "        subu   %[ticket], %[ticket], 1                 \n"
128                 "       .previous                                       \n"
129                 "       .set pop                                        \n"
130                 : [ticket_ptr] "+" GCC_OFF_SMALL_ASM() (lock->lock),
131                   [serving_now_ptr] "+m" (lock->h.serving_now),
132                   [ticket] "=&r" (tmp),
133                   [my_ticket] "=&r" (my_ticket)
134                 : [inc] "r" (inc));
135         }
136
137         smp_llsc_mb();
138 }
139
140 static inline void arch_spin_unlock(arch_spinlock_t *lock)
141 {
142         unsigned int serving_now = lock->h.serving_now + 1;
143         wmb();
144         lock->h.serving_now = (u16)serving_now;
145         nudge_writes();
146 }
147
148 static inline unsigned int arch_spin_trylock(arch_spinlock_t *lock)
149 {
150         int tmp, tmp2, tmp3;
151         int inc = 0x10000;
152
153         if (R10000_LLSC_WAR) {
154                 __asm__ __volatile__ (
155                 "       .set push               # arch_spin_trylock     \n"
156                 "       .set noreorder                                  \n"
157                 "                                                       \n"
158                 "1:     ll      %[ticket], %[ticket_ptr]                \n"
159                 "       srl     %[my_ticket], %[ticket], 16             \n"
160                 "       andi    %[now_serving], %[ticket], 0xffff       \n"
161                 "       bne     %[my_ticket], %[now_serving], 3f        \n"
162                 "        addu   %[ticket], %[ticket], %[inc]            \n"
163                 "       sc      %[ticket], %[ticket_ptr]                \n"
164                 "       beqzl   %[ticket], 1b                           \n"
165                 "        li     %[ticket], 1                            \n"
166                 "2:                                                     \n"
167                 "       .subsection 2                                   \n"
168                 "3:     b       2b                                      \n"
169                 "        li     %[ticket], 0                            \n"
170                 "       .previous                                       \n"
171                 "       .set pop                                        \n"
172                 : [ticket_ptr] "+" GCC_OFF_SMALL_ASM() (lock->lock),
173                   [ticket] "=&r" (tmp),
174                   [my_ticket] "=&r" (tmp2),
175                   [now_serving] "=&r" (tmp3)
176                 : [inc] "r" (inc));
177         } else {
178                 __asm__ __volatile__ (
179                 "       .set push               # arch_spin_trylock     \n"
180                 "       .set noreorder                                  \n"
181                 "                                                       \n"
182                 "1:     ll      %[ticket], %[ticket_ptr]                \n"
183                 "       srl     %[my_ticket], %[ticket], 16             \n"
184                 "       andi    %[now_serving], %[ticket], 0xffff       \n"
185                 "       bne     %[my_ticket], %[now_serving], 3f        \n"
186                 "        addu   %[ticket], %[ticket], %[inc]            \n"
187                 "       sc      %[ticket], %[ticket_ptr]                \n"
188                 "       beqz    %[ticket], 1b                           \n"
189                 "        li     %[ticket], 1                            \n"
190                 "2:                                                     \n"
191                 "       .subsection 2                                   \n"
192                 "3:     b       2b                                      \n"
193                 "        li     %[ticket], 0                            \n"
194                 "       .previous                                       \n"
195                 "       .set pop                                        \n"
196                 : [ticket_ptr] "+" GCC_OFF_SMALL_ASM() (lock->lock),
197                   [ticket] "=&r" (tmp),
198                   [my_ticket] "=&r" (tmp2),
199                   [now_serving] "=&r" (tmp3)
200                 : [inc] "r" (inc));
201         }
202
203         smp_llsc_mb();
204
205         return tmp;
206 }
207
208 /*
209  * Read-write spinlocks, allowing multiple readers but only one writer.
210  *
211  * NOTE! it is quite common to have readers in interrupts but no interrupt
212  * writers. For those circumstances we can "mix" irq-safe locks - any writer
213  * needs to get a irq-safe write-lock, but readers can get non-irqsafe
214  * read-locks.
215  */
216
217 /*
218  * read_can_lock - would read_trylock() succeed?
219  * @lock: the rwlock in question.
220  */
221 #define arch_read_can_lock(rw)  ((rw)->lock >= 0)
222
223 /*
224  * write_can_lock - would write_trylock() succeed?
225  * @lock: the rwlock in question.
226  */
227 #define arch_write_can_lock(rw) (!(rw)->lock)
228
229 static inline void arch_read_lock(arch_rwlock_t *rw)
230 {
231         unsigned int tmp;
232
233         if (R10000_LLSC_WAR) {
234                 __asm__ __volatile__(
235                 "       .set    noreorder       # arch_read_lock        \n"
236                 "1:     ll      %1, %2                                  \n"
237                 "       bltz    %1, 1b                                  \n"
238                 "        addu   %1, 1                                   \n"
239                 "       sc      %1, %0                                  \n"
240                 "       beqzl   %1, 1b                                  \n"
241                 "        nop                                            \n"
242                 "       .set    reorder                                 \n"
243                 : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp)
244                 : GCC_OFF_SMALL_ASM() (rw->lock)
245                 : "memory");
246         } else {
247                 do {
248                         __asm__ __volatile__(
249                         "1:     ll      %1, %2  # arch_read_lock        \n"
250                         "       bltz    %1, 1b                          \n"
251                         "        addu   %1, 1                           \n"
252                         "2:     sc      %1, %0                          \n"
253                         : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp)
254                         : GCC_OFF_SMALL_ASM() (rw->lock)
255                         : "memory");
256                 } while (unlikely(!tmp));
257         }
258
259         smp_llsc_mb();
260 }
261
262 static inline void arch_read_unlock(arch_rwlock_t *rw)
263 {
264         unsigned int tmp;
265
266         smp_mb__before_llsc();
267
268         if (R10000_LLSC_WAR) {
269                 __asm__ __volatile__(
270                 "1:     ll      %1, %2          # arch_read_unlock      \n"
271                 "       addiu   %1, -1                                  \n"
272                 "       sc      %1, %0                                  \n"
273                 "       beqzl   %1, 1b                                  \n"
274                 : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp)
275                 : GCC_OFF_SMALL_ASM() (rw->lock)
276                 : "memory");
277         } else {
278                 do {
279                         __asm__ __volatile__(
280                         "1:     ll      %1, %2  # arch_read_unlock      \n"
281                         "       addiu   %1, -1                          \n"
282                         "       sc      %1, %0                          \n"
283                         : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp)
284                         : GCC_OFF_SMALL_ASM() (rw->lock)
285                         : "memory");
286                 } while (unlikely(!tmp));
287         }
288 }
289
290 static inline void arch_write_lock(arch_rwlock_t *rw)
291 {
292         unsigned int tmp;
293
294         if (R10000_LLSC_WAR) {
295                 __asm__ __volatile__(
296                 "       .set    noreorder       # arch_write_lock       \n"
297                 "1:     ll      %1, %2                                  \n"
298                 "       bnez    %1, 1b                                  \n"
299                 "        lui    %1, 0x8000                              \n"
300                 "       sc      %1, %0                                  \n"
301                 "       beqzl   %1, 1b                                  \n"
302                 "        nop                                            \n"
303                 "       .set    reorder                                 \n"
304                 : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp)
305                 : GCC_OFF_SMALL_ASM() (rw->lock)
306                 : "memory");
307         } else {
308                 do {
309                         __asm__ __volatile__(
310                         "1:     ll      %1, %2  # arch_write_lock       \n"
311                         "       bnez    %1, 1b                          \n"
312                         "        lui    %1, 0x8000                      \n"
313                         "2:     sc      %1, %0                          \n"
314                         : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp)
315                         : GCC_OFF_SMALL_ASM() (rw->lock)
316                         : "memory");
317                 } while (unlikely(!tmp));
318         }
319
320         smp_llsc_mb();
321 }
322
323 static inline void arch_write_unlock(arch_rwlock_t *rw)
324 {
325         smp_mb__before_llsc();
326
327         __asm__ __volatile__(
328         "                               # arch_write_unlock     \n"
329         "       sw      $0, %0                                  \n"
330         : "=m" (rw->lock)
331         : "m" (rw->lock)
332         : "memory");
333 }
334
335 static inline int arch_read_trylock(arch_rwlock_t *rw)
336 {
337         unsigned int tmp;
338         int ret;
339
340         if (R10000_LLSC_WAR) {
341                 __asm__ __volatile__(
342                 "       .set    noreorder       # arch_read_trylock     \n"
343                 "       li      %2, 0                                   \n"
344                 "1:     ll      %1, %3                                  \n"
345                 "       bltz    %1, 2f                                  \n"
346                 "        addu   %1, 1                                   \n"
347                 "       sc      %1, %0                                  \n"
348                 "       .set    reorder                                 \n"
349                 "       beqzl   %1, 1b                                  \n"
350                 "        nop                                            \n"
351                 __WEAK_LLSC_MB
352                 "       li      %2, 1                                   \n"
353                 "2:                                                     \n"
354                 : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret)
355                 : GCC_OFF_SMALL_ASM() (rw->lock)
356                 : "memory");
357         } else {
358                 __asm__ __volatile__(
359                 "       .set    noreorder       # arch_read_trylock     \n"
360                 "       li      %2, 0                                   \n"
361                 "1:     ll      %1, %3                                  \n"
362                 "       bltz    %1, 2f                                  \n"
363                 "        addu   %1, 1                                   \n"
364                 "       sc      %1, %0                                  \n"
365                 "       beqz    %1, 1b                                  \n"
366                 "        nop                                            \n"
367                 "       .set    reorder                                 \n"
368                 __WEAK_LLSC_MB
369                 "       li      %2, 1                                   \n"
370                 "2:                                                     \n"
371                 : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret)
372                 : GCC_OFF_SMALL_ASM() (rw->lock)
373                 : "memory");
374         }
375
376         return ret;
377 }
378
379 static inline int arch_write_trylock(arch_rwlock_t *rw)
380 {
381         unsigned int tmp;
382         int ret;
383
384         if (R10000_LLSC_WAR) {
385                 __asm__ __volatile__(
386                 "       .set    noreorder       # arch_write_trylock    \n"
387                 "       li      %2, 0                                   \n"
388                 "1:     ll      %1, %3                                  \n"
389                 "       bnez    %1, 2f                                  \n"
390                 "        lui    %1, 0x8000                              \n"
391                 "       sc      %1, %0                                  \n"
392                 "       beqzl   %1, 1b                                  \n"
393                 "        nop                                            \n"
394                 __WEAK_LLSC_MB
395                 "       li      %2, 1                                   \n"
396                 "       .set    reorder                                 \n"
397                 "2:                                                     \n"
398                 : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret)
399                 : GCC_OFF_SMALL_ASM() (rw->lock)
400                 : "memory");
401         } else {
402                 do {
403                         __asm__ __volatile__(
404                         "       ll      %1, %3  # arch_write_trylock    \n"
405                         "       li      %2, 0                           \n"
406                         "       bnez    %1, 2f                          \n"
407                         "       lui     %1, 0x8000                      \n"
408                         "       sc      %1, %0                          \n"
409                         "       li      %2, 1                           \n"
410                         "2:                                             \n"
411                         : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp),
412                           "=&r" (ret)
413                         : GCC_OFF_SMALL_ASM() (rw->lock)
414                         : "memory");
415                 } while (unlikely(!tmp));
416
417                 smp_llsc_mb();
418         }
419
420         return ret;
421 }
422
423 #define arch_read_lock_flags(lock, flags) arch_read_lock(lock)
424 #define arch_write_lock_flags(lock, flags) arch_write_lock(lock)
425
426 #define arch_spin_relax(lock)   cpu_relax()
427 #define arch_read_relax(lock)   cpu_relax()
428 #define arch_write_relax(lock)  cpu_relax()
429
430 #endif /* _ASM_SPINLOCK_H */