]> git.karo-electronics.de Git - karo-tx-linux.git/blob - arch/powerpc/include/asm/cmpxchg.h
powerpc: Make {cmp}xchg* and their atomic_ versions fully ordered
[karo-tx-linux.git] / arch / powerpc / include / asm / cmpxchg.h
1 #ifndef _ASM_POWERPC_CMPXCHG_H_
2 #define _ASM_POWERPC_CMPXCHG_H_
3
4 #ifdef __KERNEL__
5 #include <linux/compiler.h>
6 #include <asm/synch.h>
7 #include <asm/asm-compat.h>
8
9 /*
10  * Atomic exchange
11  *
12  * Changes the memory location '*ptr' to be val and returns
13  * the previous value stored there.
14  */
15 static __always_inline unsigned long
16 __xchg_u32(volatile void *p, unsigned long val)
17 {
18         unsigned long prev;
19
20         __asm__ __volatile__(
21         PPC_ATOMIC_ENTRY_BARRIER
22 "1:     lwarx   %0,0,%2 \n"
23         PPC405_ERR77(0,%2)
24 "       stwcx.  %3,0,%2 \n\
25         bne-    1b"
26         PPC_ATOMIC_EXIT_BARRIER
27         : "=&r" (prev), "+m" (*(volatile unsigned int *)p)
28         : "r" (p), "r" (val)
29         : "cc", "memory");
30
31         return prev;
32 }
33
34 /*
35  * Atomic exchange
36  *
37  * Changes the memory location '*ptr' to be val and returns
38  * the previous value stored there.
39  */
40 static __always_inline unsigned long
41 __xchg_u32_local(volatile void *p, unsigned long val)
42 {
43         unsigned long prev;
44
45         __asm__ __volatile__(
46 "1:     lwarx   %0,0,%2 \n"
47         PPC405_ERR77(0,%2)
48 "       stwcx.  %3,0,%2 \n\
49         bne-    1b"
50         : "=&r" (prev), "+m" (*(volatile unsigned int *)p)
51         : "r" (p), "r" (val)
52         : "cc", "memory");
53
54         return prev;
55 }
56
57 #ifdef CONFIG_PPC64
58 static __always_inline unsigned long
59 __xchg_u64(volatile void *p, unsigned long val)
60 {
61         unsigned long prev;
62
63         __asm__ __volatile__(
64         PPC_ATOMIC_ENTRY_BARRIER
65 "1:     ldarx   %0,0,%2 \n"
66         PPC405_ERR77(0,%2)
67 "       stdcx.  %3,0,%2 \n\
68         bne-    1b"
69         PPC_ATOMIC_EXIT_BARRIER
70         : "=&r" (prev), "+m" (*(volatile unsigned long *)p)
71         : "r" (p), "r" (val)
72         : "cc", "memory");
73
74         return prev;
75 }
76
77 static __always_inline unsigned long
78 __xchg_u64_local(volatile void *p, unsigned long val)
79 {
80         unsigned long prev;
81
82         __asm__ __volatile__(
83 "1:     ldarx   %0,0,%2 \n"
84         PPC405_ERR77(0,%2)
85 "       stdcx.  %3,0,%2 \n\
86         bne-    1b"
87         : "=&r" (prev), "+m" (*(volatile unsigned long *)p)
88         : "r" (p), "r" (val)
89         : "cc", "memory");
90
91         return prev;
92 }
93 #endif
94
95 /*
96  * This function doesn't exist, so you'll get a linker error
97  * if something tries to do an invalid xchg().
98  */
99 extern void __xchg_called_with_bad_pointer(void);
100
101 static __always_inline unsigned long
102 __xchg(volatile void *ptr, unsigned long x, unsigned int size)
103 {
104         switch (size) {
105         case 4:
106                 return __xchg_u32(ptr, x);
107 #ifdef CONFIG_PPC64
108         case 8:
109                 return __xchg_u64(ptr, x);
110 #endif
111         }
112         __xchg_called_with_bad_pointer();
113         return x;
114 }
115
116 static __always_inline unsigned long
117 __xchg_local(volatile void *ptr, unsigned long x, unsigned int size)
118 {
119         switch (size) {
120         case 4:
121                 return __xchg_u32_local(ptr, x);
122 #ifdef CONFIG_PPC64
123         case 8:
124                 return __xchg_u64_local(ptr, x);
125 #endif
126         }
127         __xchg_called_with_bad_pointer();
128         return x;
129 }
130 #define xchg(ptr,x)                                                          \
131   ({                                                                         \
132      __typeof__(*(ptr)) _x_ = (x);                                           \
133      (__typeof__(*(ptr))) __xchg((ptr), (unsigned long)_x_, sizeof(*(ptr))); \
134   })
135
136 #define xchg_local(ptr,x)                                                    \
137   ({                                                                         \
138      __typeof__(*(ptr)) _x_ = (x);                                           \
139      (__typeof__(*(ptr))) __xchg_local((ptr),                                \
140                 (unsigned long)_x_, sizeof(*(ptr)));                         \
141   })
142
143 /*
144  * Compare and exchange - if *p == old, set it to new,
145  * and return the old value of *p.
146  */
147
148 static __always_inline unsigned long
149 __cmpxchg_u32(volatile unsigned int *p, unsigned long old, unsigned long new)
150 {
151         unsigned int prev;
152
153         __asm__ __volatile__ (
154         PPC_ATOMIC_ENTRY_BARRIER
155 "1:     lwarx   %0,0,%2         # __cmpxchg_u32\n\
156         cmpw    0,%0,%3\n\
157         bne-    2f\n"
158         PPC405_ERR77(0,%2)
159 "       stwcx.  %4,0,%2\n\
160         bne-    1b"
161         PPC_ATOMIC_EXIT_BARRIER
162         "\n\
163 2:"
164         : "=&r" (prev), "+m" (*p)
165         : "r" (p), "r" (old), "r" (new)
166         : "cc", "memory");
167
168         return prev;
169 }
170
171 static __always_inline unsigned long
172 __cmpxchg_u32_local(volatile unsigned int *p, unsigned long old,
173                         unsigned long new)
174 {
175         unsigned int prev;
176
177         __asm__ __volatile__ (
178 "1:     lwarx   %0,0,%2         # __cmpxchg_u32\n\
179         cmpw    0,%0,%3\n\
180         bne-    2f\n"
181         PPC405_ERR77(0,%2)
182 "       stwcx.  %4,0,%2\n\
183         bne-    1b"
184         "\n\
185 2:"
186         : "=&r" (prev), "+m" (*p)
187         : "r" (p), "r" (old), "r" (new)
188         : "cc", "memory");
189
190         return prev;
191 }
192
193 #ifdef CONFIG_PPC64
194 static __always_inline unsigned long
195 __cmpxchg_u64(volatile unsigned long *p, unsigned long old, unsigned long new)
196 {
197         unsigned long prev;
198
199         __asm__ __volatile__ (
200         PPC_ATOMIC_ENTRY_BARRIER
201 "1:     ldarx   %0,0,%2         # __cmpxchg_u64\n\
202         cmpd    0,%0,%3\n\
203         bne-    2f\n\
204         stdcx.  %4,0,%2\n\
205         bne-    1b"
206         PPC_ATOMIC_EXIT_BARRIER
207         "\n\
208 2:"
209         : "=&r" (prev), "+m" (*p)
210         : "r" (p), "r" (old), "r" (new)
211         : "cc", "memory");
212
213         return prev;
214 }
215
216 static __always_inline unsigned long
217 __cmpxchg_u64_local(volatile unsigned long *p, unsigned long old,
218                         unsigned long new)
219 {
220         unsigned long prev;
221
222         __asm__ __volatile__ (
223 "1:     ldarx   %0,0,%2         # __cmpxchg_u64\n\
224         cmpd    0,%0,%3\n\
225         bne-    2f\n\
226         stdcx.  %4,0,%2\n\
227         bne-    1b"
228         "\n\
229 2:"
230         : "=&r" (prev), "+m" (*p)
231         : "r" (p), "r" (old), "r" (new)
232         : "cc", "memory");
233
234         return prev;
235 }
236 #endif
237
238 /* This function doesn't exist, so you'll get a linker error
239    if something tries to do an invalid cmpxchg().  */
240 extern void __cmpxchg_called_with_bad_pointer(void);
241
242 static __always_inline unsigned long
243 __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new,
244           unsigned int size)
245 {
246         switch (size) {
247         case 4:
248                 return __cmpxchg_u32(ptr, old, new);
249 #ifdef CONFIG_PPC64
250         case 8:
251                 return __cmpxchg_u64(ptr, old, new);
252 #endif
253         }
254         __cmpxchg_called_with_bad_pointer();
255         return old;
256 }
257
258 static __always_inline unsigned long
259 __cmpxchg_local(volatile void *ptr, unsigned long old, unsigned long new,
260           unsigned int size)
261 {
262         switch (size) {
263         case 4:
264                 return __cmpxchg_u32_local(ptr, old, new);
265 #ifdef CONFIG_PPC64
266         case 8:
267                 return __cmpxchg_u64_local(ptr, old, new);
268 #endif
269         }
270         __cmpxchg_called_with_bad_pointer();
271         return old;
272 }
273
274 #define cmpxchg(ptr, o, n)                                               \
275   ({                                                                     \
276      __typeof__(*(ptr)) _o_ = (o);                                       \
277      __typeof__(*(ptr)) _n_ = (n);                                       \
278      (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_,           \
279                                     (unsigned long)_n_, sizeof(*(ptr))); \
280   })
281
282
283 #define cmpxchg_local(ptr, o, n)                                         \
284   ({                                                                     \
285      __typeof__(*(ptr)) _o_ = (o);                                       \
286      __typeof__(*(ptr)) _n_ = (n);                                       \
287      (__typeof__(*(ptr))) __cmpxchg_local((ptr), (unsigned long)_o_,     \
288                                     (unsigned long)_n_, sizeof(*(ptr))); \
289   })
290
291 #ifdef CONFIG_PPC64
292 #define cmpxchg64(ptr, o, n)                                            \
293   ({                                                                    \
294         BUILD_BUG_ON(sizeof(*(ptr)) != 8);                              \
295         cmpxchg((ptr), (o), (n));                                       \
296   })
297 #define cmpxchg64_local(ptr, o, n)                                      \
298   ({                                                                    \
299         BUILD_BUG_ON(sizeof(*(ptr)) != 8);                              \
300         cmpxchg_local((ptr), (o), (n));                                 \
301   })
302 #define cmpxchg64_relaxed       cmpxchg64_local
303 #else
304 #include <asm-generic/cmpxchg-local.h>
305 #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
306 #endif
307
308 #endif /* __KERNEL__ */
309 #endif /* _ASM_POWERPC_CMPXCHG_H_ */