MIPS: Whitespace cleanup.
[linux-drm-fsl-dcu.git] / arch / mips / include / asm / cmpxchg.h
1 /*
2  * This file is subject to the terms and conditions of the GNU General Public
3  * License.  See the file "COPYING" in the main directory of this archive
4  * for more details.
5  *
6  * Copyright (C) 2003, 06, 07 by Ralf Baechle (ralf@linux-mips.org)
7  */
8 #ifndef __ASM_CMPXCHG_H
9 #define __ASM_CMPXCHG_H
10
11 #include <linux/bug.h>
12 #include <linux/irqflags.h>
13 #include <asm/war.h>
14
15 static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
16 {
17         __u32 retval;
18
19         smp_mb__before_llsc();
20
21         if (kernel_uses_llsc && R10000_LLSC_WAR) {
22                 unsigned long dummy;
23
24                 __asm__ __volatile__(
25                 "       .set    mips3                                   \n"
26                 "1:     ll      %0, %3                  # xchg_u32      \n"
27                 "       .set    mips0                                   \n"
28                 "       move    %2, %z4                                 \n"
29                 "       .set    mips3                                   \n"
30                 "       sc      %2, %1                                  \n"
31                 "       beqzl   %2, 1b                                  \n"
32                 "       .set    mips0                                   \n"
33                 : "=&r" (retval), "=m" (*m), "=&r" (dummy)
34                 : "R" (*m), "Jr" (val)
35                 : "memory");
36         } else if (kernel_uses_llsc) {
37                 unsigned long dummy;
38
39                 do {
40                         __asm__ __volatile__(
41                         "       .set    mips3                           \n"
42                         "       ll      %0, %3          # xchg_u32      \n"
43                         "       .set    mips0                           \n"
44                         "       move    %2, %z4                         \n"
45                         "       .set    mips3                           \n"
46                         "       sc      %2, %1                          \n"
47                         "       .set    mips0                           \n"
48                         : "=&r" (retval), "=m" (*m), "=&r" (dummy)
49                         : "R" (*m), "Jr" (val)
50                         : "memory");
51                 } while (unlikely(!dummy));
52         } else {
53                 unsigned long flags;
54
55                 raw_local_irq_save(flags);
56                 retval = *m;
57                 *m = val;
58                 raw_local_irq_restore(flags);   /* implies memory barrier  */
59         }
60
61         smp_llsc_mb();
62
63         return retval;
64 }
65
66 #ifdef CONFIG_64BIT
67 static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
68 {
69         __u64 retval;
70
71         smp_mb__before_llsc();
72
73         if (kernel_uses_llsc && R10000_LLSC_WAR) {
74                 unsigned long dummy;
75
76                 __asm__ __volatile__(
77                 "       .set    mips3                                   \n"
78                 "1:     lld     %0, %3                  # xchg_u64      \n"
79                 "       move    %2, %z4                                 \n"
80                 "       scd     %2, %1                                  \n"
81                 "       beqzl   %2, 1b                                  \n"
82                 "       .set    mips0                                   \n"
83                 : "=&r" (retval), "=m" (*m), "=&r" (dummy)
84                 : "R" (*m), "Jr" (val)
85                 : "memory");
86         } else if (kernel_uses_llsc) {
87                 unsigned long dummy;
88
89                 do {
90                         __asm__ __volatile__(
91                         "       .set    mips3                           \n"
92                         "       lld     %0, %3          # xchg_u64      \n"
93                         "       move    %2, %z4                         \n"
94                         "       scd     %2, %1                          \n"
95                         "       .set    mips0                           \n"
96                         : "=&r" (retval), "=m" (*m), "=&r" (dummy)
97                         : "R" (*m), "Jr" (val)
98                         : "memory");
99                 } while (unlikely(!dummy));
100         } else {
101                 unsigned long flags;
102
103                 raw_local_irq_save(flags);
104                 retval = *m;
105                 *m = val;
106                 raw_local_irq_restore(flags);   /* implies memory barrier  */
107         }
108
109         smp_llsc_mb();
110
111         return retval;
112 }
113 #else
114 extern __u64 __xchg_u64_unsupported_on_32bit_kernels(volatile __u64 * m, __u64 val);
115 #define __xchg_u64 __xchg_u64_unsupported_on_32bit_kernels
116 #endif
117
118 static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
119 {
120         switch (size) {
121         case 4:
122                 return __xchg_u32(ptr, x);
123         case 8:
124                 return __xchg_u64(ptr, x);
125         }
126
127         return x;
128 }
129
130 #define xchg(ptr, x)                                                    \
131 ({                                                                      \
132         BUILD_BUG_ON(sizeof(*(ptr)) & ~0xc);                            \
133                                                                         \
134         ((__typeof__(*(ptr)))                                           \
135                 __xchg((unsigned long)(x), (ptr), sizeof(*(ptr))));     \
136 })
137
138 #define __HAVE_ARCH_CMPXCHG 1
139
140 #define __cmpxchg_asm(ld, st, m, old, new)                              \
141 ({                                                                      \
142         __typeof(*(m)) __ret;                                           \
143                                                                         \
144         if (kernel_uses_llsc && R10000_LLSC_WAR) {                      \
145                 __asm__ __volatile__(                                   \
146                 "       .set    push                            \n"     \
147                 "       .set    noat                            \n"     \
148                 "       .set    mips3                           \n"     \
149                 "1:     " ld "  %0, %2          # __cmpxchg_asm \n"     \
150                 "       bne     %0, %z3, 2f                     \n"     \
151                 "       .set    mips0                           \n"     \
152                 "       move    $1, %z4                         \n"     \
153                 "       .set    mips3                           \n"     \
154                 "       " st "  $1, %1                          \n"     \
155                 "       beqzl   $1, 1b                          \n"     \
156                 "2:                                             \n"     \
157                 "       .set    pop                             \n"     \
158                 : "=&r" (__ret), "=R" (*m)                              \
159                 : "R" (*m), "Jr" (old), "Jr" (new)                      \
160                 : "memory");                                            \
161         } else if (kernel_uses_llsc) {                                  \
162                 __asm__ __volatile__(                                   \
163                 "       .set    push                            \n"     \
164                 "       .set    noat                            \n"     \
165                 "       .set    mips3                           \n"     \
166                 "1:     " ld "  %0, %2          # __cmpxchg_asm \n"     \
167                 "       bne     %0, %z3, 2f                     \n"     \
168                 "       .set    mips0                           \n"     \
169                 "       move    $1, %z4                         \n"     \
170                 "       .set    mips3                           \n"     \
171                 "       " st "  $1, %1                          \n"     \
172                 "       beqz    $1, 1b                          \n"     \
173                 "       .set    pop                             \n"     \
174                 "2:                                             \n"     \
175                 : "=&r" (__ret), "=R" (*m)                              \
176                 : "R" (*m), "Jr" (old), "Jr" (new)                      \
177                 : "memory");                                            \
178         } else {                                                        \
179                 unsigned long __flags;                                  \
180                                                                         \
181                 raw_local_irq_save(__flags);                            \
182                 __ret = *m;                                             \
183                 if (__ret == old)                                       \
184                         *m = new;                                       \
185                 raw_local_irq_restore(__flags);                         \
186         }                                                               \
187                                                                         \
188         __ret;                                                          \
189 })
190
191 /*
192  * This function doesn't exist, so you'll get a linker error
193  * if something tries to do an invalid cmpxchg().
194  */
195 extern void __cmpxchg_called_with_bad_pointer(void);
196
197 #define __cmpxchg(ptr, old, new, pre_barrier, post_barrier)             \
198 ({                                                                      \
199         __typeof__(ptr) __ptr = (ptr);                                  \
200         __typeof__(*(ptr)) __old = (old);                               \
201         __typeof__(*(ptr)) __new = (new);                               \
202         __typeof__(*(ptr)) __res = 0;                                   \
203                                                                         \
204         pre_barrier;                                                    \
205                                                                         \
206         switch (sizeof(*(__ptr))) {                                     \
207         case 4:                                                         \
208                 __res = __cmpxchg_asm("ll", "sc", __ptr, __old, __new); \
209                 break;                                                  \
210         case 8:                                                         \
211                 if (sizeof(long) == 8) {                                \
212                         __res = __cmpxchg_asm("lld", "scd", __ptr,      \
213                                            __old, __new);               \
214                         break;                                          \
215                 }                                                       \
216         default:                                                        \
217                 __cmpxchg_called_with_bad_pointer();                    \
218                 break;                                                  \
219         }                                                               \
220                                                                         \
221         post_barrier;                                                   \
222                                                                         \
223         __res;                                                          \
224 })
225
226 #define cmpxchg(ptr, old, new)          __cmpxchg(ptr, old, new, smp_mb__before_llsc(), smp_llsc_mb())
227 #define cmpxchg_local(ptr, old, new)    __cmpxchg(ptr, old, new, , )
228
229 #define cmpxchg64(ptr, o, n)                                            \
230   ({                                                                    \
231         BUILD_BUG_ON(sizeof(*(ptr)) != 8);                              \
232         cmpxchg((ptr), (o), (n));                                       \
233   })
234
235 #ifdef CONFIG_64BIT
236 #define cmpxchg64_local(ptr, o, n)                                      \
237   ({                                                                    \
238         BUILD_BUG_ON(sizeof(*(ptr)) != 8);                              \
239         cmpxchg_local((ptr), (o), (n));                                 \
240   })
241 #else
242 #include <asm-generic/cmpxchg-local.h>
243 #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
244 #endif
245
246 #endif /* __ASM_CMPXCHG_H */