2 * Copyright IBM Corp. 1999, 2009
3 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
5 * Arnd Bergmann <arndb@de.ibm.com>,
7 * Atomic operations that C can't guarantee us.
8 * Useful for resource counting etc.
9 * s390 uses 'Compare And Swap' for atomicity in SMP environment.
13 #ifndef __ARCH_S390_ATOMIC__
14 #define __ARCH_S390_ATOMIC__
16 #include <linux/compiler.h>
17 #include <linux/types.h>
18 #include <asm/cmpxchg.h>
20 #define ATOMIC_INIT(i) { (i) }
22 #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
24 #define __ATOMIC_OR "lao"
25 #define __ATOMIC_AND "lan"
26 #define __ATOMIC_ADD "laa"
28 #define __ATOMIC_LOOP(ptr, op_val, op_string) \
32 typecheck(atomic_t *, ptr); \
34 op_string " %0,%2,%1\n" \
35 : "=d" (old_val), "+Q" ((ptr)->counter) \
41 #else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
43 #define __ATOMIC_OR "or"
44 #define __ATOMIC_AND "nr"
45 #define __ATOMIC_ADD "ar"
47 #define __ATOMIC_LOOP(ptr, op_val, op_string) \
49 int old_val, new_val; \
51 typecheck(atomic_t *, ptr); \
55 op_string " %1,%3\n" \
58 : "=&d" (old_val), "=&d" (new_val), "+Q" ((ptr)->counter)\
64 #endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
66 static inline int atomic_read(const atomic_t *v)
72 : "=d" (c) : "Q" (v->counter));
76 static inline void atomic_set(atomic_t *v, int i)
80 : "=Q" (v->counter) : "d" (i));
83 static inline int atomic_add_return(int i, atomic_t *v)
85 return __ATOMIC_LOOP(v, i, __ATOMIC_ADD) + i;
88 static inline void atomic_add(int i, atomic_t *v)
90 #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
91 if (__builtin_constant_p(i) && (i > -129) && (i < 128)) {
98 atomic_add_return(i, v);
101 atomic_add_return(i, v);
105 #define atomic_add_negative(_i, _v) (atomic_add_return(_i, _v) < 0)
106 #define atomic_inc(_v) atomic_add(1, _v)
107 #define atomic_inc_return(_v) atomic_add_return(1, _v)
108 #define atomic_inc_and_test(_v) (atomic_add_return(1, _v) == 0)
109 #define atomic_sub(_i, _v) atomic_add(-(int)_i, _v)
110 #define atomic_sub_return(_i, _v) atomic_add_return(-(int)(_i), _v)
111 #define atomic_sub_and_test(_i, _v) (atomic_sub_return(_i, _v) == 0)
112 #define atomic_dec(_v) atomic_sub(1, _v)
113 #define atomic_dec_return(_v) atomic_sub_return(1, _v)
114 #define atomic_dec_and_test(_v) (atomic_sub_return(1, _v) == 0)
116 static inline void atomic_clear_mask(unsigned long mask, atomic_t *v)
118 __ATOMIC_LOOP(v, ~mask, __ATOMIC_AND);
121 static inline void atomic_set_mask(unsigned long mask, atomic_t *v)
123 __ATOMIC_LOOP(v, mask, __ATOMIC_OR);
126 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
128 static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
132 : "+d" (old), "+Q" (v->counter)
138 static inline int __atomic_add_unless(atomic_t *v, int a, int u)
143 if (unlikely(c == u))
145 old = atomic_cmpxchg(v, c, c + a);
146 if (likely(old == c))
156 #define ATOMIC64_INIT(i) { (i) }
160 #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
162 #define __ATOMIC64_OR "laog"
163 #define __ATOMIC64_AND "lang"
164 #define __ATOMIC64_ADD "laag"
166 #define __ATOMIC64_LOOP(ptr, op_val, op_string) \
170 typecheck(atomic64_t *, ptr); \
172 op_string " %0,%2,%1\n" \
173 : "=d" (old_val), "+Q" ((ptr)->counter) \
179 #else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
181 #define __ATOMIC64_OR "ogr"
182 #define __ATOMIC64_AND "ngr"
183 #define __ATOMIC64_ADD "agr"
185 #define __ATOMIC64_LOOP(ptr, op_val, op_string) \
187 long long old_val, new_val; \
189 typecheck(atomic64_t *, ptr); \
193 op_string " %1,%3\n" \
196 : "=&d" (old_val), "=&d" (new_val), "+Q" ((ptr)->counter)\
202 #endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
204 static inline long long atomic64_read(const atomic64_t *v)
210 : "=d" (c) : "Q" (v->counter));
214 static inline void atomic64_set(atomic64_t *v, long long i)
218 : "=Q" (v->counter) : "d" (i));
221 static inline long long atomic64_add_return(long long i, atomic64_t *v)
223 return __ATOMIC64_LOOP(v, i, __ATOMIC64_ADD) + i;
226 static inline void atomic64_clear_mask(unsigned long mask, atomic64_t *v)
228 __ATOMIC64_LOOP(v, ~mask, __ATOMIC64_AND);
231 static inline void atomic64_set_mask(unsigned long mask, atomic64_t *v)
233 __ATOMIC64_LOOP(v, mask, __ATOMIC64_OR);
236 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
238 static inline long long atomic64_cmpxchg(atomic64_t *v,
239 long long old, long long new)
243 : "+d" (old), "+Q" (v->counter)
249 #undef __ATOMIC64_LOOP
251 #else /* CONFIG_64BIT */
257 static inline long long atomic64_read(const atomic64_t *v)
263 : "=&d" (rp) : "Q" (v->counter) );
267 static inline void atomic64_set(atomic64_t *v, long long i)
269 register_pair rp = {.pair = i};
273 : "=Q" (v->counter) : "d" (rp) );
276 static inline long long atomic64_xchg(atomic64_t *v, long long new)
278 register_pair rp_new = {.pair = new};
279 register_pair rp_old;
285 : "=&d" (rp_old), "+Q" (v->counter)
291 static inline long long atomic64_cmpxchg(atomic64_t *v,
292 long long old, long long new)
294 register_pair rp_old = {.pair = old};
295 register_pair rp_new = {.pair = new};
299 : "+&d" (rp_old), "+Q" (v->counter)
306 static inline long long atomic64_add_return(long long i, atomic64_t *v)
311 old = atomic64_read(v);
313 } while (atomic64_cmpxchg(v, old, new) != old);
317 static inline void atomic64_set_mask(unsigned long long mask, atomic64_t *v)
322 old = atomic64_read(v);
324 } while (atomic64_cmpxchg(v, old, new) != old);
327 static inline void atomic64_clear_mask(unsigned long long mask, atomic64_t *v)
332 old = atomic64_read(v);
334 } while (atomic64_cmpxchg(v, old, new) != old);
337 #endif /* CONFIG_64BIT */
339 static inline void atomic64_add(long long i, atomic64_t *v)
341 #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
342 if (__builtin_constant_p(i) && (i > -129) && (i < 128)) {
349 atomic64_add_return(i, v);
352 atomic64_add_return(i, v);
356 static inline int atomic64_add_unless(atomic64_t *v, long long i, long long u)
360 c = atomic64_read(v);
362 if (unlikely(c == u))
364 old = atomic64_cmpxchg(v, c, c + i);
365 if (likely(old == c))
372 static inline long long atomic64_dec_if_positive(atomic64_t *v)
374 long long c, old, dec;
376 c = atomic64_read(v);
379 if (unlikely(dec < 0))
381 old = atomic64_cmpxchg((v), c, dec);
382 if (likely(old == c))
389 #define atomic64_add_negative(_i, _v) (atomic64_add_return(_i, _v) < 0)
390 #define atomic64_inc(_v) atomic64_add(1, _v)
391 #define atomic64_inc_return(_v) atomic64_add_return(1, _v)
392 #define atomic64_inc_and_test(_v) (atomic64_add_return(1, _v) == 0)
393 #define atomic64_sub_return(_i, _v) atomic64_add_return(-(long long)(_i), _v)
394 #define atomic64_sub(_i, _v) atomic64_add(-(long long)_i, _v)
395 #define atomic64_sub_and_test(_i, _v) (atomic64_sub_return(_i, _v) == 0)
396 #define atomic64_dec(_v) atomic64_sub(1, _v)
397 #define atomic64_dec_return(_v) atomic64_sub_return(1, _v)
398 #define atomic64_dec_and_test(_v) (atomic64_sub_return(1, _v) == 0)
399 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
401 #define smp_mb__before_atomic_dec() smp_mb()
402 #define smp_mb__after_atomic_dec() smp_mb()
403 #define smp_mb__before_atomic_inc() smp_mb()
404 #define smp_mb__after_atomic_inc() smp_mb()
406 #endif /* __ARCH_S390_ATOMIC__ */