1 #ifndef _ASM_X86_ATOMIC_H
2 #define _ASM_X86_ATOMIC_H
4 #include <linux/compiler.h>
5 #include <linux/types.h>
6 #include <asm/processor.h>
7 #include <asm/alternative.h>
8 #include <asm/cmpxchg.h>
10 #include <asm/barrier.h>
13 * Atomic operations that C can't guarantee us. Useful for
14 * resource counting etc..
17 #define ATOMIC_INIT(i) { (i) }
20 * atomic_read - read atomic variable
21 * @v: pointer of type atomic_t
23 * Atomically reads the value of @v.
25 static __always_inline int atomic_read(const atomic_t *v)
27 return ACCESS_ONCE((v)->counter);
31 * atomic_set - set atomic variable
32 * @v: pointer of type atomic_t
35 * Atomically sets the value of @v to @i.
37 static __always_inline void atomic_set(atomic_t *v, int i)
43 * atomic_add - add integer to atomic variable
44 * @i: integer value to add
45 * @v: pointer of type atomic_t
47 * Atomically adds @i to @v.
49 static __always_inline void atomic_add(int i, atomic_t *v)
51 asm volatile(LOCK_PREFIX "addl %1,%0"
57 * atomic_sub - subtract integer from atomic variable
58 * @i: integer value to subtract
59 * @v: pointer of type atomic_t
61 * Atomically subtracts @i from @v.
63 static __always_inline void atomic_sub(int i, atomic_t *v)
65 asm volatile(LOCK_PREFIX "subl %1,%0"
71 * atomic_sub_and_test - subtract value from variable and test result
72 * @i: integer value to subtract
73 * @v: pointer of type atomic_t
75 * Atomically subtracts @i from @v and returns
76 * true if the result is zero, or false for all
79 static __always_inline int atomic_sub_and_test(int i, atomic_t *v)
81 GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", "e");
85 * atomic_inc - increment atomic variable
86 * @v: pointer of type atomic_t
88 * Atomically increments @v by 1.
90 static __always_inline void atomic_inc(atomic_t *v)
92 asm volatile(LOCK_PREFIX "incl %0"
97 * atomic_dec - decrement atomic variable
98 * @v: pointer of type atomic_t
100 * Atomically decrements @v by 1.
102 static __always_inline void atomic_dec(atomic_t *v)
104 asm volatile(LOCK_PREFIX "decl %0"
105 : "+m" (v->counter));
109 * atomic_dec_and_test - decrement and test
110 * @v: pointer of type atomic_t
112 * Atomically decrements @v by 1 and
113 * returns true if the result is 0, or false for all other
116 static __always_inline int atomic_dec_and_test(atomic_t *v)
118 GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", "e");
122 * atomic_inc_and_test - increment and test
123 * @v: pointer of type atomic_t
125 * Atomically increments @v by 1
126 * and returns true if the result is zero, or false for all
129 static __always_inline int atomic_inc_and_test(atomic_t *v)
131 GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, "%0", "e");
135 * atomic_add_negative - add and test if negative
136 * @i: integer value to add
137 * @v: pointer of type atomic_t
139 * Atomically adds @i to @v and returns true
140 * if the result is negative, or false when
141 * result is greater than or equal to zero.
143 static __always_inline int atomic_add_negative(int i, atomic_t *v)
145 GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, "er", i, "%0", "s");
149 * atomic_add_return - add integer and return
150 * @i: integer value to add
151 * @v: pointer of type atomic_t
153 * Atomically adds @i to @v and returns @i + @v
155 static __always_inline int atomic_add_return(int i, atomic_t *v)
157 return i + xadd(&v->counter, i);
161 * atomic_sub_return - subtract integer and return
162 * @v: pointer of type atomic_t
163 * @i: integer value to subtract
165 * Atomically subtracts @i from @v and returns @v - @i
167 static __always_inline int atomic_sub_return(int i, atomic_t *v)
169 return atomic_add_return(-i, v);
172 #define atomic_inc_return(v) (atomic_add_return(1, v))
173 #define atomic_dec_return(v) (atomic_sub_return(1, v))
175 static __always_inline int atomic_cmpxchg(atomic_t *v, int old, int new)
177 return cmpxchg(&v->counter, old, new);
180 static inline int atomic_xchg(atomic_t *v, int new)
182 return xchg(&v->counter, new);
186 * __atomic_add_unless - add unless the number is already a given value
187 * @v: pointer of type atomic_t
188 * @a: the amount to add to v...
189 * @u: ...unless v is equal to u.
191 * Atomically adds @a to @v, so long as @v was not already @u.
192 * Returns the old value of @v.
194 static __always_inline int __atomic_add_unless(atomic_t *v, int a, int u)
199 if (unlikely(c == (u)))
201 old = atomic_cmpxchg((v), c, c + (a));
202 if (likely(old == c))
210 * atomic_inc_short - increment of a short integer
211 * @v: pointer to type int
213 * Atomically adds 1 to @v
214 * Returns the new value of @u
216 static __always_inline short int atomic_inc_short(short int *v)
218 asm(LOCK_PREFIX "addw $1, %0" : "+m" (*v));
222 /* These are x86-specific, used by some header files */
223 #define atomic_clear_mask(mask, addr) \
224 asm volatile(LOCK_PREFIX "andl %0,%1" \
225 : : "r" (~(mask)), "m" (*(addr)) : "memory")
227 #define atomic_set_mask(mask, addr) \
228 asm volatile(LOCK_PREFIX "orl %0,%1" \
229 : : "r" ((unsigned)(mask)), "m" (*(addr)) \
233 # include <asm/atomic64_32.h>
235 # include <asm/atomic64_64.h>
238 #endif /* _ASM_X86_ATOMIC_H */