1 #ifndef _ASM_POWERPC_ATOMIC_H_
2 #define _ASM_POWERPC_ATOMIC_H_
5 * PowerPC atomic operations
8 #include <linux/types.h>
11 #include <linux/compiler.h>
12 #include <asm/synch.h>
13 #include <asm/asm-compat.h>
14 #include <asm/system.h>
16 #define ATOMIC_INIT(i) { (i) }
18 static __inline__ int atomic_read(const atomic_t *v)
22 __asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
27 static __inline__ void atomic_set(atomic_t *v, int i)
29 __asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
32 static __inline__ void atomic_add(int a, atomic_t *v)
37 "1: lwarx %0,0,%3 # atomic_add\n\
42 : "=&r" (t), "+m" (v->counter)
43 : "r" (a), "r" (&v->counter)
47 static __inline__ int atomic_add_return(int a, atomic_t *v)
52 PPC_ATOMIC_ENTRY_BARRIER
53 "1: lwarx %0,0,%2 # atomic_add_return\n\
58 PPC_ATOMIC_EXIT_BARRIER
60 : "r" (a), "r" (&v->counter)
66 #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
68 static __inline__ void atomic_sub(int a, atomic_t *v)
73 "1: lwarx %0,0,%3 # atomic_sub\n\
78 : "=&r" (t), "+m" (v->counter)
79 : "r" (a), "r" (&v->counter)
83 static __inline__ int atomic_sub_return(int a, atomic_t *v)
88 PPC_ATOMIC_ENTRY_BARRIER
89 "1: lwarx %0,0,%2 # atomic_sub_return\n\
94 PPC_ATOMIC_EXIT_BARRIER
96 : "r" (a), "r" (&v->counter)
102 static __inline__ void atomic_inc(atomic_t *v)
106 __asm__ __volatile__(
107 "1: lwarx %0,0,%2 # atomic_inc\n\
112 : "=&r" (t), "+m" (v->counter)
117 static __inline__ int atomic_inc_return(atomic_t *v)
121 __asm__ __volatile__(
122 PPC_ATOMIC_ENTRY_BARRIER
123 "1: lwarx %0,0,%1 # atomic_inc_return\n\
128 PPC_ATOMIC_EXIT_BARRIER
131 : "cc", "xer", "memory");
137 * atomic_inc_and_test - increment and test
138 * @v: pointer of type atomic_t
140 * Atomically increments @v by 1
141 * and returns true if the result is zero, or false for all
144 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
146 static __inline__ void atomic_dec(atomic_t *v)
150 __asm__ __volatile__(
151 "1: lwarx %0,0,%2 # atomic_dec\n\
156 : "=&r" (t), "+m" (v->counter)
161 static __inline__ int atomic_dec_return(atomic_t *v)
165 __asm__ __volatile__(
166 PPC_ATOMIC_ENTRY_BARRIER
167 "1: lwarx %0,0,%1 # atomic_dec_return\n\
172 PPC_ATOMIC_EXIT_BARRIER
175 : "cc", "xer", "memory");
180 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
181 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
184 * __atomic_add_unless - add unless the number is a given value
185 * @v: pointer of type atomic_t
186 * @a: the amount to add to v...
187 * @u: ...unless v is equal to u.
189 * Atomically adds @a to @v, so long as it was not @u.
190 * Returns the old value of @v.
192 static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
196 __asm__ __volatile__ (
197 PPC_ATOMIC_ENTRY_BARRIER
198 "1: lwarx %0,0,%1 # __atomic_add_unless\n\
205 PPC_ATOMIC_EXIT_BARRIER
209 : "r" (&v->counter), "r" (a), "r" (u)
216 * atomic_inc_not_zero - increment unless the number is zero
217 * @v: pointer of type atomic_t
219 * Atomically increments @v by 1, so long as @v is non-zero.
220 * Returns non-zero if @v was non-zero, and zero otherwise.
222 static __inline__ int atomic_inc_not_zero(atomic_t *v)
226 __asm__ __volatile__ (
227 PPC_ATOMIC_ENTRY_BARRIER
228 "1: lwarx %0,0,%2 # atomic_inc_not_zero\n\
235 PPC_ATOMIC_EXIT_BARRIER
238 : "=&r" (t1), "=&r" (t2)
240 : "cc", "xer", "memory");
244 #define atomic_inc_not_zero(v) atomic_inc_not_zero((v))
246 #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
247 #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
250 * Atomically test *v and decrement if it is greater than 0.
251 * The function returns the old value of *v minus 1, even if
252 * the atomic variable, v, was not decremented.
254 static __inline__ int atomic_dec_if_positive(atomic_t *v)
258 __asm__ __volatile__(
259 PPC_ATOMIC_ENTRY_BARRIER
260 "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
267 PPC_ATOMIC_EXIT_BARRIER
276 #define smp_mb__before_atomic_dec() smp_mb()
277 #define smp_mb__after_atomic_dec() smp_mb()
278 #define smp_mb__before_atomic_inc() smp_mb()
279 #define smp_mb__after_atomic_inc() smp_mb()
283 #define ATOMIC64_INIT(i) { (i) }
285 static __inline__ long atomic64_read(const atomic64_t *v)
289 __asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
294 static __inline__ void atomic64_set(atomic64_t *v, long i)
296 __asm__ __volatile__("std%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
299 static __inline__ void atomic64_add(long a, atomic64_t *v)
303 __asm__ __volatile__(
304 "1: ldarx %0,0,%3 # atomic64_add\n\
308 : "=&r" (t), "+m" (v->counter)
309 : "r" (a), "r" (&v->counter)
313 static __inline__ long atomic64_add_return(long a, atomic64_t *v)
317 __asm__ __volatile__(
318 PPC_ATOMIC_ENTRY_BARRIER
319 "1: ldarx %0,0,%2 # atomic64_add_return\n\
323 PPC_ATOMIC_EXIT_BARRIER
325 : "r" (a), "r" (&v->counter)
331 #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
333 static __inline__ void atomic64_sub(long a, atomic64_t *v)
337 __asm__ __volatile__(
338 "1: ldarx %0,0,%3 # atomic64_sub\n\
342 : "=&r" (t), "+m" (v->counter)
343 : "r" (a), "r" (&v->counter)
347 static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
351 __asm__ __volatile__(
352 PPC_ATOMIC_ENTRY_BARRIER
353 "1: ldarx %0,0,%2 # atomic64_sub_return\n\
357 PPC_ATOMIC_EXIT_BARRIER
359 : "r" (a), "r" (&v->counter)
365 static __inline__ void atomic64_inc(atomic64_t *v)
369 __asm__ __volatile__(
370 "1: ldarx %0,0,%2 # atomic64_inc\n\
374 : "=&r" (t), "+m" (v->counter)
379 static __inline__ long atomic64_inc_return(atomic64_t *v)
383 __asm__ __volatile__(
384 PPC_ATOMIC_ENTRY_BARRIER
385 "1: ldarx %0,0,%1 # atomic64_inc_return\n\
389 PPC_ATOMIC_EXIT_BARRIER
392 : "cc", "xer", "memory");
398 * atomic64_inc_and_test - increment and test
399 * @v: pointer of type atomic64_t
401 * Atomically increments @v by 1
402 * and returns true if the result is zero, or false for all
405 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
407 static __inline__ void atomic64_dec(atomic64_t *v)
411 __asm__ __volatile__(
412 "1: ldarx %0,0,%2 # atomic64_dec\n\
416 : "=&r" (t), "+m" (v->counter)
421 static __inline__ long atomic64_dec_return(atomic64_t *v)
425 __asm__ __volatile__(
426 PPC_ATOMIC_ENTRY_BARRIER
427 "1: ldarx %0,0,%1 # atomic64_dec_return\n\
431 PPC_ATOMIC_EXIT_BARRIER
434 : "cc", "xer", "memory");
439 #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
440 #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
443 * Atomically test *v and decrement if it is greater than 0.
444 * The function returns the old value of *v minus 1.
446 static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
450 __asm__ __volatile__(
451 PPC_ATOMIC_ENTRY_BARRIER
452 "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
457 PPC_ATOMIC_EXIT_BARRIER
461 : "cc", "xer", "memory");
466 #define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
467 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
470 * atomic64_add_unless - add unless the number is a given value
471 * @v: pointer of type atomic64_t
472 * @a: the amount to add to v...
473 * @u: ...unless v is equal to u.
475 * Atomically adds @a to @v, so long as it was not @u.
476 * Returns the old value of @v.
478 static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
482 __asm__ __volatile__ (
483 PPC_ATOMIC_ENTRY_BARRIER
484 "1: ldarx %0,0,%1 # __atomic_add_unless\n\
490 PPC_ATOMIC_EXIT_BARRIER
494 : "r" (&v->counter), "r" (a), "r" (u)
501 * atomic_inc64_not_zero - increment unless the number is zero
502 * @v: pointer of type atomic64_t
504 * Atomically increments @v by 1, so long as @v is non-zero.
505 * Returns non-zero if @v was non-zero, and zero otherwise.
507 static __inline__ long atomic64_inc_not_zero(atomic64_t *v)
511 __asm__ __volatile__ (
512 PPC_ATOMIC_ENTRY_BARRIER
513 "1: ldarx %0,0,%2 # atomic64_inc_not_zero\n\
519 PPC_ATOMIC_EXIT_BARRIER
522 : "=&r" (t1), "=&r" (t2)
524 : "cc", "xer", "memory");
529 #endif /* __powerpc64__ */
531 #endif /* __KERNEL__ */
532 #endif /* _ASM_POWERPC_ATOMIC_H_ */