2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04 by Ralf Baechle
16 * As workaround for the ATOMIC_DEC_AND_LOCK / atomic_dec_and_lock mess in
17 * <linux/spinlock.h> we have to include <linux/spinlock.h> outside the
18 * main big wrapper ...
20 #include <linux/config.h>
21 #include <linux/spinlock.h>
26 #include <asm/cpu-features.h>
29 extern spinlock_t atomic_lock;
31 typedef struct { volatile int counter; } atomic_t;
33 #define ATOMIC_INIT(i) { (i) }
36 * atomic_read - read atomic variable
37 * @v: pointer of type atomic_t
39 * Atomically reads the value of @v.
41 #define atomic_read(v) ((v)->counter)
44 * atomic_set - set atomic variable
45 * @v: pointer of type atomic_t
48 * Atomically sets the value of @v to @i.
50 #define atomic_set(v,i) ((v)->counter = (i))
53 * atomic_add - add integer to atomic variable
54 * @i: integer value to add
55 * @v: pointer of type atomic_t
57 * Atomically adds @i to @v.
59 static __inline__ void atomic_add(int i, atomic_t * v)
61 if (cpu_has_llsc && R10000_LLSC_WAR) {
66 "1: ll %0, %1 # atomic_add \n"
71 : "=&r" (temp), "=m" (v->counter)
72 : "Ir" (i), "m" (v->counter));
73 } else if (cpu_has_llsc) {
78 "1: ll %0, %1 # atomic_add \n"
83 : "=&r" (temp), "=m" (v->counter)
84 : "Ir" (i), "m" (v->counter));
88 spin_lock_irqsave(&atomic_lock, flags);
90 spin_unlock_irqrestore(&atomic_lock, flags);
95 * atomic_sub - subtract the atomic variable
96 * @i: integer value to subtract
97 * @v: pointer of type atomic_t
99 * Atomically subtracts @i from @v.
101 static __inline__ void atomic_sub(int i, atomic_t * v)
103 if (cpu_has_llsc && R10000_LLSC_WAR) {
106 __asm__ __volatile__(
108 "1: ll %0, %1 # atomic_sub \n"
113 : "=&r" (temp), "=m" (v->counter)
114 : "Ir" (i), "m" (v->counter));
115 } else if (cpu_has_llsc) {
118 __asm__ __volatile__(
120 "1: ll %0, %1 # atomic_sub \n"
125 : "=&r" (temp), "=m" (v->counter)
126 : "Ir" (i), "m" (v->counter));
130 spin_lock_irqsave(&atomic_lock, flags);
132 spin_unlock_irqrestore(&atomic_lock, flags);
137 * Same as above, but return the result value
139 static __inline__ int atomic_add_return(int i, atomic_t * v)
141 unsigned long result;
143 if (cpu_has_llsc && R10000_LLSC_WAR) {
146 __asm__ __volatile__(
148 "1: ll %1, %2 # atomic_add_return \n"
149 " addu %0, %1, %3 \n"
152 " addu %0, %1, %3 \n"
155 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
156 : "Ir" (i), "m" (v->counter)
158 } else if (cpu_has_llsc) {
161 __asm__ __volatile__(
163 "1: ll %1, %2 # atomic_add_return \n"
164 " addu %0, %1, %3 \n"
167 " addu %0, %1, %3 \n"
170 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
171 : "Ir" (i), "m" (v->counter)
176 spin_lock_irqsave(&atomic_lock, flags);
180 spin_unlock_irqrestore(&atomic_lock, flags);
186 static __inline__ int atomic_sub_return(int i, atomic_t * v)
188 unsigned long result;
190 if (cpu_has_llsc && R10000_LLSC_WAR) {
193 __asm__ __volatile__(
195 "1: ll %1, %2 # atomic_sub_return \n"
196 " subu %0, %1, %3 \n"
199 " subu %0, %1, %3 \n"
202 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
203 : "Ir" (i), "m" (v->counter)
205 } else if (cpu_has_llsc) {
208 __asm__ __volatile__(
210 "1: ll %1, %2 # atomic_sub_return \n"
211 " subu %0, %1, %3 \n"
214 " subu %0, %1, %3 \n"
217 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
218 : "Ir" (i), "m" (v->counter)
223 spin_lock_irqsave(&atomic_lock, flags);
227 spin_unlock_irqrestore(&atomic_lock, flags);
234 * atomic_sub_if_positive - add integer to atomic variable
235 * @v: pointer of type atomic_t
237 * Atomically test @v and decrement if it is greater than 0.
238 * The function returns the old value of @v minus 1.
240 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
242 unsigned long result;
244 if (cpu_has_llsc && R10000_LLSC_WAR) {
247 __asm__ __volatile__(
249 "1: ll %1, %2 # atomic_sub_if_positive\n"
250 " subu %0, %1, %3 \n"
257 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
258 : "Ir" (i), "m" (v->counter)
260 } else if (cpu_has_llsc) {
263 __asm__ __volatile__(
265 "1: ll %1, %2 # atomic_sub_if_positive\n"
266 " subu %0, %1, %3 \n"
273 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
274 : "Ir" (i), "m" (v->counter)
279 spin_lock_irqsave(&atomic_lock, flags);
284 spin_unlock_irqrestore(&atomic_lock, flags);
290 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
293 * atomic_add_unless - add unless the number is a given value
294 * @v: pointer of type atomic_t
295 * @a: the amount to add to v...
296 * @u: ...unless v is equal to u.
298 * Atomically adds @a to @v, so long as it was not @u.
299 * Returns non-zero if @v was not @u, and zero otherwise.
301 #define atomic_add_unless(v, a, u) \
304 c = atomic_read(v); \
305 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
309 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
311 #define atomic_dec_return(v) atomic_sub_return(1,(v))
312 #define atomic_inc_return(v) atomic_add_return(1,(v))
315 * atomic_sub_and_test - subtract value from variable and test result
316 * @i: integer value to subtract
317 * @v: pointer of type atomic_t
319 * Atomically subtracts @i from @v and returns
320 * true if the result is zero, or false for all
323 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
326 * atomic_inc_and_test - increment and test
327 * @v: pointer of type atomic_t
329 * Atomically increments @v by 1
330 * and returns true if the result is zero, or false for all
333 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
336 * atomic_dec_and_test - decrement by 1 and test
337 * @v: pointer of type atomic_t
339 * Atomically decrements @v by 1 and
340 * returns true if the result is 0, or false for all other
343 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
346 * atomic_dec_if_positive - decrement by 1 if old value positive
347 * @v: pointer of type atomic_t
349 #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
352 * atomic_inc - increment atomic variable
353 * @v: pointer of type atomic_t
355 * Atomically increments @v by 1.
357 #define atomic_inc(v) atomic_add(1,(v))
360 * atomic_dec - decrement and test
361 * @v: pointer of type atomic_t
363 * Atomically decrements @v by 1.
365 #define atomic_dec(v) atomic_sub(1,(v))
368 * atomic_add_negative - add and test if negative
369 * @v: pointer of type atomic_t
370 * @i: integer value to add
372 * Atomically adds @i to @v and returns true
373 * if the result is negative, or false when
374 * result is greater than or equal to zero.
376 #define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
380 typedef struct { volatile __s64 counter; } atomic64_t;
382 #define ATOMIC64_INIT(i) { (i) }
385 * atomic64_read - read atomic variable
386 * @v: pointer of type atomic64_t
389 #define atomic64_read(v) ((v)->counter)
392 * atomic64_set - set atomic variable
393 * @v: pointer of type atomic64_t
396 #define atomic64_set(v,i) ((v)->counter = (i))
399 * atomic64_add - add integer to atomic variable
400 * @i: integer value to add
401 * @v: pointer of type atomic64_t
403 * Atomically adds @i to @v.
405 static __inline__ void atomic64_add(long i, atomic64_t * v)
407 if (cpu_has_llsc && R10000_LLSC_WAR) {
410 __asm__ __volatile__(
412 "1: lld %0, %1 # atomic64_add \n"
417 : "=&r" (temp), "=m" (v->counter)
418 : "Ir" (i), "m" (v->counter));
419 } else if (cpu_has_llsc) {
422 __asm__ __volatile__(
424 "1: lld %0, %1 # atomic64_add \n"
429 : "=&r" (temp), "=m" (v->counter)
430 : "Ir" (i), "m" (v->counter));
434 spin_lock_irqsave(&atomic_lock, flags);
436 spin_unlock_irqrestore(&atomic_lock, flags);
441 * atomic64_sub - subtract the atomic variable
442 * @i: integer value to subtract
443 * @v: pointer of type atomic64_t
445 * Atomically subtracts @i from @v.
447 static __inline__ void atomic64_sub(long i, atomic64_t * v)
449 if (cpu_has_llsc && R10000_LLSC_WAR) {
452 __asm__ __volatile__(
454 "1: lld %0, %1 # atomic64_sub \n"
459 : "=&r" (temp), "=m" (v->counter)
460 : "Ir" (i), "m" (v->counter));
461 } else if (cpu_has_llsc) {
464 __asm__ __volatile__(
466 "1: lld %0, %1 # atomic64_sub \n"
471 : "=&r" (temp), "=m" (v->counter)
472 : "Ir" (i), "m" (v->counter));
476 spin_lock_irqsave(&atomic_lock, flags);
478 spin_unlock_irqrestore(&atomic_lock, flags);
483 * Same as above, but return the result value
485 static __inline__ long atomic64_add_return(long i, atomic64_t * v)
487 unsigned long result;
489 if (cpu_has_llsc && R10000_LLSC_WAR) {
492 __asm__ __volatile__(
494 "1: lld %1, %2 # atomic64_add_return \n"
495 " addu %0, %1, %3 \n"
498 " addu %0, %1, %3 \n"
501 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
502 : "Ir" (i), "m" (v->counter)
504 } else if (cpu_has_llsc) {
507 __asm__ __volatile__(
509 "1: lld %1, %2 # atomic64_add_return \n"
510 " addu %0, %1, %3 \n"
513 " addu %0, %1, %3 \n"
516 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
517 : "Ir" (i), "m" (v->counter)
522 spin_lock_irqsave(&atomic_lock, flags);
526 spin_unlock_irqrestore(&atomic_lock, flags);
532 static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
534 unsigned long result;
536 if (cpu_has_llsc && R10000_LLSC_WAR) {
539 __asm__ __volatile__(
541 "1: lld %1, %2 # atomic64_sub_return \n"
542 " subu %0, %1, %3 \n"
545 " subu %0, %1, %3 \n"
548 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
549 : "Ir" (i), "m" (v->counter)
551 } else if (cpu_has_llsc) {
554 __asm__ __volatile__(
556 "1: lld %1, %2 # atomic64_sub_return \n"
557 " subu %0, %1, %3 \n"
560 " subu %0, %1, %3 \n"
563 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
564 : "Ir" (i), "m" (v->counter)
569 spin_lock_irqsave(&atomic_lock, flags);
573 spin_unlock_irqrestore(&atomic_lock, flags);
580 * atomic64_sub_if_positive - add integer to atomic variable
581 * @v: pointer of type atomic64_t
583 * Atomically test @v and decrement if it is greater than 0.
584 * The function returns the old value of @v minus 1.
586 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
588 unsigned long result;
590 if (cpu_has_llsc && R10000_LLSC_WAR) {
593 __asm__ __volatile__(
595 "1: lld %1, %2 # atomic64_sub_if_positive\n"
596 " dsubu %0, %1, %3 \n"
603 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
604 : "Ir" (i), "m" (v->counter)
606 } else if (cpu_has_llsc) {
609 __asm__ __volatile__(
611 "1: lld %1, %2 # atomic64_sub_if_positive\n"
612 " dsubu %0, %1, %3 \n"
619 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
620 : "Ir" (i), "m" (v->counter)
625 spin_lock_irqsave(&atomic_lock, flags);
630 spin_unlock_irqrestore(&atomic_lock, flags);
636 #define atomic64_dec_return(v) atomic64_sub_return(1,(v))
637 #define atomic64_inc_return(v) atomic64_add_return(1,(v))
640 * atomic64_sub_and_test - subtract value from variable and test result
641 * @i: integer value to subtract
642 * @v: pointer of type atomic64_t
644 * Atomically subtracts @i from @v and returns
645 * true if the result is zero, or false for all
648 #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
651 * atomic64_inc_and_test - increment and test
652 * @v: pointer of type atomic64_t
654 * Atomically increments @v by 1
655 * and returns true if the result is zero, or false for all
658 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
661 * atomic64_dec_and_test - decrement by 1 and test
662 * @v: pointer of type atomic64_t
664 * Atomically decrements @v by 1 and
665 * returns true if the result is 0, or false for all other
668 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
671 * atomic64_dec_if_positive - decrement by 1 if old value positive
672 * @v: pointer of type atomic64_t
674 #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
677 * atomic64_inc - increment atomic variable
678 * @v: pointer of type atomic64_t
680 * Atomically increments @v by 1.
682 #define atomic64_inc(v) atomic64_add(1,(v))
685 * atomic64_dec - decrement and test
686 * @v: pointer of type atomic64_t
688 * Atomically decrements @v by 1.
690 #define atomic64_dec(v) atomic64_sub(1,(v))
693 * atomic64_add_negative - add and test if negative
694 * @v: pointer of type atomic64_t
695 * @i: integer value to add
697 * Atomically adds @i to @v and returns true
698 * if the result is negative, or false when
699 * result is greater than or equal to zero.
701 #define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
703 #endif /* CONFIG_64BIT */
706 * atomic*_return operations are serializing but not the non-*_return
709 #define smp_mb__before_atomic_dec() smp_mb()
710 #define smp_mb__after_atomic_dec() smp_mb()
711 #define smp_mb__before_atomic_inc() smp_mb()
712 #define smp_mb__after_atomic_inc() smp_mb()
714 #endif /* _ASM_ATOMIC_H */