1 #ifndef _ASM_X86_ATOMIC_H
2 #define _ASM_X86_ATOMIC_H
4 #include <linux/compiler.h>
5 #include <linux/types.h>
6 #include <asm/processor.h>
7 #include <asm/alternative.h>
8 #include <asm/cmpxchg.h>
11 * Atomic operations that C can't guarantee us. Useful for
12 * resource counting etc..
15 #define ATOMIC_INIT(i) { (i) }
18 * atomic_read - read atomic variable
19 * @v: pointer of type atomic_t
21 * Atomically reads the value of @v.
23 static inline int atomic_read(const atomic_t *v)
25 return (*(volatile int *)&(v)->counter);
29 * atomic_set - set atomic variable
30 * @v: pointer of type atomic_t
33 * Atomically sets the value of @v to @i.
35 static inline void atomic_set(atomic_t *v, int i)
41 * atomic_add - add integer to atomic variable
42 * @i: integer value to add
43 * @v: pointer of type atomic_t
45 * Atomically adds @i to @v.
47 static inline void atomic_add(int i, atomic_t *v)
49 asm volatile(LOCK_PREFIX "addl %1,%0"
55 * atomic_sub - subtract integer from atomic variable
56 * @i: integer value to subtract
57 * @v: pointer of type atomic_t
59 * Atomically subtracts @i from @v.
61 static inline void atomic_sub(int i, atomic_t *v)
63 asm volatile(LOCK_PREFIX "subl %1,%0"
69 * atomic_sub_and_test - subtract value from variable and test result
70 * @i: integer value to subtract
71 * @v: pointer of type atomic_t
73 * Atomically subtracts @i from @v and returns
74 * true if the result is zero, or false for all
77 static inline int atomic_sub_and_test(int i, atomic_t *v)
81 asm volatile(LOCK_PREFIX "subl %2,%0; sete %1"
82 : "+m" (v->counter), "=qm" (c)
83 : "ir" (i) : "memory");
88 * atomic_inc - increment atomic variable
89 * @v: pointer of type atomic_t
91 * Atomically increments @v by 1.
93 static inline void atomic_inc(atomic_t *v)
95 asm volatile(LOCK_PREFIX "incl %0"
100 * atomic_dec - decrement atomic variable
101 * @v: pointer of type atomic_t
103 * Atomically decrements @v by 1.
105 static inline void atomic_dec(atomic_t *v)
107 asm volatile(LOCK_PREFIX "decl %0"
108 : "+m" (v->counter));
112 * atomic_dec_and_test - decrement and test
113 * @v: pointer of type atomic_t
115 * Atomically decrements @v by 1 and
116 * returns true if the result is 0, or false for all other
119 static inline int atomic_dec_and_test(atomic_t *v)
123 asm volatile(LOCK_PREFIX "decl %0; sete %1"
124 : "+m" (v->counter), "=qm" (c)
130 * atomic_inc_and_test - increment and test
131 * @v: pointer of type atomic_t
133 * Atomically increments @v by 1
134 * and returns true if the result is zero, or false for all
137 static inline int atomic_inc_and_test(atomic_t *v)
141 asm volatile(LOCK_PREFIX "incl %0; sete %1"
142 : "+m" (v->counter), "=qm" (c)
148 * atomic_add_negative - add and test if negative
149 * @i: integer value to add
150 * @v: pointer of type atomic_t
152 * Atomically adds @i to @v and returns true
153 * if the result is negative, or false when
154 * result is greater than or equal to zero.
156 static inline int atomic_add_negative(int i, atomic_t *v)
160 asm volatile(LOCK_PREFIX "addl %2,%0; sets %1"
161 : "+m" (v->counter), "=qm" (c)
162 : "ir" (i) : "memory");
167 * atomic_add_return - add integer and return
168 * @i: integer value to add
169 * @v: pointer of type atomic_t
171 * Atomically adds @i to @v and returns @i + @v
173 static inline int atomic_add_return(int i, atomic_t *v)
178 if (unlikely(boot_cpu_data.x86 <= 3))
181 /* Modern 486+ processor */
182 return i + xadd(&v->counter, i);
185 no_xadd: /* Legacy 386 processor */
186 raw_local_irq_save(flags);
187 __i = atomic_read(v);
188 atomic_set(v, i + __i);
189 raw_local_irq_restore(flags);
195 * atomic_sub_return - subtract integer and return
196 * @v: pointer of type atomic_t
197 * @i: integer value to subtract
199 * Atomically subtracts @i from @v and returns @v - @i
201 static inline int atomic_sub_return(int i, atomic_t *v)
203 return atomic_add_return(-i, v);
206 #define atomic_inc_return(v) (atomic_add_return(1, v))
207 #define atomic_dec_return(v) (atomic_sub_return(1, v))
209 static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
211 return cmpxchg(&v->counter, old, new);
214 static inline int atomic_xchg(atomic_t *v, int new)
216 return xchg(&v->counter, new);
220 * __atomic_add_unless - add unless the number is already a given value
221 * @v: pointer of type atomic_t
222 * @a: the amount to add to v...
223 * @u: ...unless v is equal to u.
225 * Atomically adds @a to @v, so long as @v was not already @u.
226 * Returns the old value of @v.
228 static inline int __atomic_add_unless(atomic_t *v, int a, int u)
233 if (unlikely(c == (u)))
235 old = atomic_cmpxchg((v), c, c + (a));
236 if (likely(old == c))
244 * atomic_inc_short - increment of a short integer
245 * @v: pointer to type int
247 * Atomically adds 1 to @v
248 * Returns the new value of @u
250 static inline short int atomic_inc_short(short int *v)
252 asm(LOCK_PREFIX "addw $1, %0" : "+m" (*v));
258 * atomic_or_long - OR of two long integers
259 * @v1: pointer to type unsigned long
260 * @v2: pointer to type unsigned long
262 * Atomically ORs @v1 and @v2
263 * Returns the result of the OR
265 static inline void atomic_or_long(unsigned long *v1, unsigned long v2)
267 asm(LOCK_PREFIX "orq %1, %0" : "+m" (*v1) : "r" (v2));
271 /* These are x86-specific, used by some header files */
272 #define atomic_clear_mask(mask, addr) \
273 asm volatile(LOCK_PREFIX "andl %0,%1" \
274 : : "r" (~(mask)), "m" (*(addr)) : "memory")
276 #define atomic_set_mask(mask, addr) \
277 asm volatile(LOCK_PREFIX "orl %0,%1" \
278 : : "r" ((unsigned)(mask)), "m" (*(addr)) \
281 /* Atomic operations are already serializing on x86 */
282 #define smp_mb__before_atomic_dec() barrier()
283 #define smp_mb__after_atomic_dec() barrier()
284 #define smp_mb__before_atomic_inc() barrier()
285 #define smp_mb__after_atomic_inc() barrier()
288 # include <asm/atomic64_32.h>
290 # include <asm/atomic64_64.h>
293 #endif /* _ASM_X86_ATOMIC_H */