1 #ifndef _ASM_POWERPC_ATOMIC_H_
2 #define _ASM_POWERPC_ATOMIC_H_
5 * PowerPC atomic operations
9 #include <linux/types.h>
10 #include <asm/cmpxchg.h>
12 #define ATOMIC_INIT(i) { (i) }
14 static __inline__ int atomic_read(const atomic_t *v)
18 __asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
23 static __inline__ void atomic_set(atomic_t *v, int i)
25 __asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
28 static __inline__ void atomic_add(int a, atomic_t *v)
33 "1: lwarx %0,0,%3 # atomic_add\n\
38 : "=&r" (t), "+m" (v->counter)
39 : "r" (a), "r" (&v->counter)
43 static __inline__ int atomic_add_return(int a, atomic_t *v)
48 PPC_ATOMIC_ENTRY_BARRIER
49 "1: lwarx %0,0,%2 # atomic_add_return\n\
54 PPC_ATOMIC_EXIT_BARRIER
56 : "r" (a), "r" (&v->counter)
62 #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
64 static __inline__ void atomic_sub(int a, atomic_t *v)
69 "1: lwarx %0,0,%3 # atomic_sub\n\
74 : "=&r" (t), "+m" (v->counter)
75 : "r" (a), "r" (&v->counter)
79 static __inline__ int atomic_sub_return(int a, atomic_t *v)
84 PPC_ATOMIC_ENTRY_BARRIER
85 "1: lwarx %0,0,%2 # atomic_sub_return\n\
90 PPC_ATOMIC_EXIT_BARRIER
92 : "r" (a), "r" (&v->counter)
98 static __inline__ void atomic_inc(atomic_t *v)
102 __asm__ __volatile__(
103 "1: lwarx %0,0,%2 # atomic_inc\n\
108 : "=&r" (t), "+m" (v->counter)
113 static __inline__ int atomic_inc_return(atomic_t *v)
117 __asm__ __volatile__(
118 PPC_ATOMIC_ENTRY_BARRIER
119 "1: lwarx %0,0,%1 # atomic_inc_return\n\
124 PPC_ATOMIC_EXIT_BARRIER
127 : "cc", "xer", "memory");
133 * atomic_inc_and_test - increment and test
134 * @v: pointer of type atomic_t
136 * Atomically increments @v by 1
137 * and returns true if the result is zero, or false for all
140 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
142 static __inline__ void atomic_dec(atomic_t *v)
146 __asm__ __volatile__(
147 "1: lwarx %0,0,%2 # atomic_dec\n\
152 : "=&r" (t), "+m" (v->counter)
157 static __inline__ int atomic_dec_return(atomic_t *v)
161 __asm__ __volatile__(
162 PPC_ATOMIC_ENTRY_BARRIER
163 "1: lwarx %0,0,%1 # atomic_dec_return\n\
168 PPC_ATOMIC_EXIT_BARRIER
171 : "cc", "xer", "memory");
176 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
177 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
180 * __atomic_add_unless - add unless the number is a given value
181 * @v: pointer of type atomic_t
182 * @a: the amount to add to v...
183 * @u: ...unless v is equal to u.
185 * Atomically adds @a to @v, so long as it was not @u.
186 * Returns the old value of @v.
188 static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
192 __asm__ __volatile__ (
193 PPC_ATOMIC_ENTRY_BARRIER
194 "1: lwarx %0,0,%1 # __atomic_add_unless\n\
201 PPC_ATOMIC_EXIT_BARRIER
205 : "r" (&v->counter), "r" (a), "r" (u)
212 * atomic_inc_not_zero - increment unless the number is zero
213 * @v: pointer of type atomic_t
215 * Atomically increments @v by 1, so long as @v is non-zero.
216 * Returns non-zero if @v was non-zero, and zero otherwise.
218 static __inline__ int atomic_inc_not_zero(atomic_t *v)
222 __asm__ __volatile__ (
223 PPC_ATOMIC_ENTRY_BARRIER
224 "1: lwarx %0,0,%2 # atomic_inc_not_zero\n\
231 PPC_ATOMIC_EXIT_BARRIER
234 : "=&r" (t1), "=&r" (t2)
236 : "cc", "xer", "memory");
240 #define atomic_inc_not_zero(v) atomic_inc_not_zero((v))
242 #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
243 #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
246 * Atomically test *v and decrement if it is greater than 0.
247 * The function returns the old value of *v minus 1, even if
248 * the atomic variable, v, was not decremented.
250 static __inline__ int atomic_dec_if_positive(atomic_t *v)
254 __asm__ __volatile__(
255 PPC_ATOMIC_ENTRY_BARRIER
256 "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
263 PPC_ATOMIC_EXIT_BARRIER
271 #define atomic_dec_if_positive atomic_dec_if_positive
273 #define smp_mb__before_atomic_dec() smp_mb()
274 #define smp_mb__after_atomic_dec() smp_mb()
275 #define smp_mb__before_atomic_inc() smp_mb()
276 #define smp_mb__after_atomic_inc() smp_mb()
280 #define ATOMIC64_INIT(i) { (i) }
282 static __inline__ long atomic64_read(const atomic64_t *v)
286 __asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
291 static __inline__ void atomic64_set(atomic64_t *v, long i)
293 __asm__ __volatile__("std%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
296 static __inline__ void atomic64_add(long a, atomic64_t *v)
300 __asm__ __volatile__(
301 "1: ldarx %0,0,%3 # atomic64_add\n\
305 : "=&r" (t), "+m" (v->counter)
306 : "r" (a), "r" (&v->counter)
310 static __inline__ long atomic64_add_return(long a, atomic64_t *v)
314 __asm__ __volatile__(
315 PPC_ATOMIC_ENTRY_BARRIER
316 "1: ldarx %0,0,%2 # atomic64_add_return\n\
320 PPC_ATOMIC_EXIT_BARRIER
322 : "r" (a), "r" (&v->counter)
328 #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
330 static __inline__ void atomic64_sub(long a, atomic64_t *v)
334 __asm__ __volatile__(
335 "1: ldarx %0,0,%3 # atomic64_sub\n\
339 : "=&r" (t), "+m" (v->counter)
340 : "r" (a), "r" (&v->counter)
344 static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
348 __asm__ __volatile__(
349 PPC_ATOMIC_ENTRY_BARRIER
350 "1: ldarx %0,0,%2 # atomic64_sub_return\n\
354 PPC_ATOMIC_EXIT_BARRIER
356 : "r" (a), "r" (&v->counter)
362 static __inline__ void atomic64_inc(atomic64_t *v)
366 __asm__ __volatile__(
367 "1: ldarx %0,0,%2 # atomic64_inc\n\
371 : "=&r" (t), "+m" (v->counter)
376 static __inline__ long atomic64_inc_return(atomic64_t *v)
380 __asm__ __volatile__(
381 PPC_ATOMIC_ENTRY_BARRIER
382 "1: ldarx %0,0,%1 # atomic64_inc_return\n\
386 PPC_ATOMIC_EXIT_BARRIER
389 : "cc", "xer", "memory");
395 * atomic64_inc_and_test - increment and test
396 * @v: pointer of type atomic64_t
398 * Atomically increments @v by 1
399 * and returns true if the result is zero, or false for all
402 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
404 static __inline__ void atomic64_dec(atomic64_t *v)
408 __asm__ __volatile__(
409 "1: ldarx %0,0,%2 # atomic64_dec\n\
413 : "=&r" (t), "+m" (v->counter)
418 static __inline__ long atomic64_dec_return(atomic64_t *v)
422 __asm__ __volatile__(
423 PPC_ATOMIC_ENTRY_BARRIER
424 "1: ldarx %0,0,%1 # atomic64_dec_return\n\
428 PPC_ATOMIC_EXIT_BARRIER
431 : "cc", "xer", "memory");
436 #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
437 #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
440 * Atomically test *v and decrement if it is greater than 0.
441 * The function returns the old value of *v minus 1.
443 static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
447 __asm__ __volatile__(
448 PPC_ATOMIC_ENTRY_BARRIER
449 "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
454 PPC_ATOMIC_EXIT_BARRIER
458 : "cc", "xer", "memory");
463 #define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
464 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
467 * atomic64_add_unless - add unless the number is a given value
468 * @v: pointer of type atomic64_t
469 * @a: the amount to add to v...
470 * @u: ...unless v is equal to u.
472 * Atomically adds @a to @v, so long as it was not @u.
473 * Returns the old value of @v.
475 static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
479 __asm__ __volatile__ (
480 PPC_ATOMIC_ENTRY_BARRIER
481 "1: ldarx %0,0,%1 # __atomic_add_unless\n\
487 PPC_ATOMIC_EXIT_BARRIER
491 : "r" (&v->counter), "r" (a), "r" (u)
498 * atomic_inc64_not_zero - increment unless the number is zero
499 * @v: pointer of type atomic64_t
501 * Atomically increments @v by 1, so long as @v is non-zero.
502 * Returns non-zero if @v was non-zero, and zero otherwise.
504 static __inline__ long atomic64_inc_not_zero(atomic64_t *v)
508 __asm__ __volatile__ (
509 PPC_ATOMIC_ENTRY_BARRIER
510 "1: ldarx %0,0,%2 # atomic64_inc_not_zero\n\
516 PPC_ATOMIC_EXIT_BARRIER
519 : "=&r" (t1), "=&r" (t2)
521 : "cc", "xer", "memory");
526 #endif /* __powerpc64__ */
528 #endif /* __KERNEL__ */
529 #endif /* _ASM_POWERPC_ATOMIC_H_ */