1 /* Atomic operations usable in machine independent code */
2 #ifndef _LINUX_ATOMIC_H
3 #define _LINUX_ATOMIC_H
4 #include <asm/atomic.h>
5 #include <asm/barrier.h>
8 * Relaxed variants of xchg, cmpxchg and some atomic operations.
10 * We support four variants:
12 * - Fully ordered: The default implementation, no suffix required.
13 * - Acquire: Provides ACQUIRE semantics, _acquire suffix.
14 * - Release: Provides RELEASE semantics, _release suffix.
15 * - Relaxed: No ordering guarantees, _relaxed suffix.
17 * For compound atomics performing both a load and a store, ACQUIRE
18 * semantics apply only to the load and RELEASE semantics only to the
19 * store portion of the operation. Note that a failed cmpxchg_acquire
20 * does -not- imply any memory ordering constraints.
22 * See Documentation/memory-barriers.txt for ACQUIRE/RELEASE definitions.
25 #ifndef atomic_read_acquire
26 #define atomic_read_acquire(v) smp_load_acquire(&(v)->counter)
29 #ifndef atomic_set_release
30 #define atomic_set_release(v, i) smp_store_release(&(v)->counter, (i))
34 * The idea here is to build acquire/release variants by adding explicit
35 * barriers on top of the relaxed variant. In the case where the relaxed
36 * variant is already fully ordered, no additional barriers are needed.
38 * Besides, if an arch has a special barrier for acquire/release, it could
39 * implement its own __atomic_op_* and use the same framework for building
42 #ifndef __atomic_op_acquire
43 #define __atomic_op_acquire(op, args...) \
45 typeof(op##_relaxed(args)) __ret = op##_relaxed(args); \
46 smp_mb__after_atomic(); \
51 #ifndef __atomic_op_release
52 #define __atomic_op_release(op, args...) \
54 smp_mb__before_atomic(); \
59 #ifndef __atomic_op_fence
60 #define __atomic_op_fence(op, args...) \
62 typeof(op##_relaxed(args)) __ret; \
63 smp_mb__before_atomic(); \
64 __ret = op##_relaxed(args); \
65 smp_mb__after_atomic(); \
70 /* atomic_add_return_relaxed */
71 #ifndef atomic_add_return_relaxed
72 #define atomic_add_return_relaxed atomic_add_return
73 #define atomic_add_return_acquire atomic_add_return
74 #define atomic_add_return_release atomic_add_return
76 #else /* atomic_add_return_relaxed */
78 #ifndef atomic_add_return_acquire
79 #define atomic_add_return_acquire(...) \
80 __atomic_op_acquire(atomic_add_return, __VA_ARGS__)
83 #ifndef atomic_add_return_release
84 #define atomic_add_return_release(...) \
85 __atomic_op_release(atomic_add_return, __VA_ARGS__)
88 #ifndef atomic_add_return
89 #define atomic_add_return(...) \
90 __atomic_op_fence(atomic_add_return, __VA_ARGS__)
92 #endif /* atomic_add_return_relaxed */
94 /* atomic_inc_return_relaxed */
95 #ifndef atomic_inc_return_relaxed
96 #define atomic_inc_return_relaxed atomic_inc_return
97 #define atomic_inc_return_acquire atomic_inc_return
98 #define atomic_inc_return_release atomic_inc_return
100 #else /* atomic_inc_return_relaxed */
102 #ifndef atomic_inc_return_acquire
103 #define atomic_inc_return_acquire(...) \
104 __atomic_op_acquire(atomic_inc_return, __VA_ARGS__)
107 #ifndef atomic_inc_return_release
108 #define atomic_inc_return_release(...) \
109 __atomic_op_release(atomic_inc_return, __VA_ARGS__)
112 #ifndef atomic_inc_return
113 #define atomic_inc_return(...) \
114 __atomic_op_fence(atomic_inc_return, __VA_ARGS__)
116 #endif /* atomic_inc_return_relaxed */
118 /* atomic_sub_return_relaxed */
119 #ifndef atomic_sub_return_relaxed
120 #define atomic_sub_return_relaxed atomic_sub_return
121 #define atomic_sub_return_acquire atomic_sub_return
122 #define atomic_sub_return_release atomic_sub_return
124 #else /* atomic_sub_return_relaxed */
126 #ifndef atomic_sub_return_acquire
127 #define atomic_sub_return_acquire(...) \
128 __atomic_op_acquire(atomic_sub_return, __VA_ARGS__)
131 #ifndef atomic_sub_return_release
132 #define atomic_sub_return_release(...) \
133 __atomic_op_release(atomic_sub_return, __VA_ARGS__)
136 #ifndef atomic_sub_return
137 #define atomic_sub_return(...) \
138 __atomic_op_fence(atomic_sub_return, __VA_ARGS__)
140 #endif /* atomic_sub_return_relaxed */
142 /* atomic_dec_return_relaxed */
143 #ifndef atomic_dec_return_relaxed
144 #define atomic_dec_return_relaxed atomic_dec_return
145 #define atomic_dec_return_acquire atomic_dec_return
146 #define atomic_dec_return_release atomic_dec_return
148 #else /* atomic_dec_return_relaxed */
150 #ifndef atomic_dec_return_acquire
151 #define atomic_dec_return_acquire(...) \
152 __atomic_op_acquire(atomic_dec_return, __VA_ARGS__)
155 #ifndef atomic_dec_return_release
156 #define atomic_dec_return_release(...) \
157 __atomic_op_release(atomic_dec_return, __VA_ARGS__)
160 #ifndef atomic_dec_return
161 #define atomic_dec_return(...) \
162 __atomic_op_fence(atomic_dec_return, __VA_ARGS__)
164 #endif /* atomic_dec_return_relaxed */
167 /* atomic_fetch_add_relaxed */
168 #ifndef atomic_fetch_add_relaxed
169 #define atomic_fetch_add_relaxed atomic_fetch_add
170 #define atomic_fetch_add_acquire atomic_fetch_add
171 #define atomic_fetch_add_release atomic_fetch_add
173 #else /* atomic_fetch_add_relaxed */
175 #ifndef atomic_fetch_add_acquire
176 #define atomic_fetch_add_acquire(...) \
177 __atomic_op_acquire(atomic_fetch_add, __VA_ARGS__)
180 #ifndef atomic_fetch_add_release
181 #define atomic_fetch_add_release(...) \
182 __atomic_op_release(atomic_fetch_add, __VA_ARGS__)
185 #ifndef atomic_fetch_add
186 #define atomic_fetch_add(...) \
187 __atomic_op_fence(atomic_fetch_add, __VA_ARGS__)
189 #endif /* atomic_fetch_add_relaxed */
191 /* atomic_fetch_inc_relaxed */
192 #ifndef atomic_fetch_inc_relaxed
194 #ifndef atomic_fetch_inc
195 #define atomic_fetch_inc(v) atomic_fetch_add(1, (v))
196 #define atomic_fetch_inc_relaxed(v) atomic_fetch_add_relaxed(1, (v))
197 #define atomic_fetch_inc_acquire(v) atomic_fetch_add_acquire(1, (v))
198 #define atomic_fetch_inc_release(v) atomic_fetch_add_release(1, (v))
199 #else /* atomic_fetch_inc */
200 #define atomic_fetch_inc_relaxed atomic_fetch_inc
201 #define atomic_fetch_inc_acquire atomic_fetch_inc
202 #define atomic_fetch_inc_release atomic_fetch_inc
203 #endif /* atomic_fetch_inc */
205 #else /* atomic_fetch_inc_relaxed */
207 #ifndef atomic_fetch_inc_acquire
208 #define atomic_fetch_inc_acquire(...) \
209 __atomic_op_acquire(atomic_fetch_inc, __VA_ARGS__)
212 #ifndef atomic_fetch_inc_release
213 #define atomic_fetch_inc_release(...) \
214 __atomic_op_release(atomic_fetch_inc, __VA_ARGS__)
217 #ifndef atomic_fetch_inc
218 #define atomic_fetch_inc(...) \
219 __atomic_op_fence(atomic_fetch_inc, __VA_ARGS__)
221 #endif /* atomic_fetch_inc_relaxed */
223 /* atomic_fetch_sub_relaxed */
224 #ifndef atomic_fetch_sub_relaxed
225 #define atomic_fetch_sub_relaxed atomic_fetch_sub
226 #define atomic_fetch_sub_acquire atomic_fetch_sub
227 #define atomic_fetch_sub_release atomic_fetch_sub
229 #else /* atomic_fetch_sub_relaxed */
231 #ifndef atomic_fetch_sub_acquire
232 #define atomic_fetch_sub_acquire(...) \
233 __atomic_op_acquire(atomic_fetch_sub, __VA_ARGS__)
236 #ifndef atomic_fetch_sub_release
237 #define atomic_fetch_sub_release(...) \
238 __atomic_op_release(atomic_fetch_sub, __VA_ARGS__)
241 #ifndef atomic_fetch_sub
242 #define atomic_fetch_sub(...) \
243 __atomic_op_fence(atomic_fetch_sub, __VA_ARGS__)
245 #endif /* atomic_fetch_sub_relaxed */
247 /* atomic_fetch_dec_relaxed */
248 #ifndef atomic_fetch_dec_relaxed
250 #ifndef atomic_fetch_dec
251 #define atomic_fetch_dec(v) atomic_fetch_sub(1, (v))
252 #define atomic_fetch_dec_relaxed(v) atomic_fetch_sub_relaxed(1, (v))
253 #define atomic_fetch_dec_acquire(v) atomic_fetch_sub_acquire(1, (v))
254 #define atomic_fetch_dec_release(v) atomic_fetch_sub_release(1, (v))
255 #else /* atomic_fetch_dec */
256 #define atomic_fetch_dec_relaxed atomic_fetch_dec
257 #define atomic_fetch_dec_acquire atomic_fetch_dec
258 #define atomic_fetch_dec_release atomic_fetch_dec
259 #endif /* atomic_fetch_dec */
261 #else /* atomic_fetch_dec_relaxed */
263 #ifndef atomic_fetch_dec_acquire
264 #define atomic_fetch_dec_acquire(...) \
265 __atomic_op_acquire(atomic_fetch_dec, __VA_ARGS__)
268 #ifndef atomic_fetch_dec_release
269 #define atomic_fetch_dec_release(...) \
270 __atomic_op_release(atomic_fetch_dec, __VA_ARGS__)
273 #ifndef atomic_fetch_dec
274 #define atomic_fetch_dec(...) \
275 __atomic_op_fence(atomic_fetch_dec, __VA_ARGS__)
277 #endif /* atomic_fetch_dec_relaxed */
279 /* atomic_fetch_or_relaxed */
280 #ifndef atomic_fetch_or_relaxed
281 #define atomic_fetch_or_relaxed atomic_fetch_or
282 #define atomic_fetch_or_acquire atomic_fetch_or
283 #define atomic_fetch_or_release atomic_fetch_or
285 #else /* atomic_fetch_or_relaxed */
287 #ifndef atomic_fetch_or_acquire
288 #define atomic_fetch_or_acquire(...) \
289 __atomic_op_acquire(atomic_fetch_or, __VA_ARGS__)
292 #ifndef atomic_fetch_or_release
293 #define atomic_fetch_or_release(...) \
294 __atomic_op_release(atomic_fetch_or, __VA_ARGS__)
297 #ifndef atomic_fetch_or
298 #define atomic_fetch_or(...) \
299 __atomic_op_fence(atomic_fetch_or, __VA_ARGS__)
301 #endif /* atomic_fetch_or_relaxed */
303 /* atomic_fetch_and_relaxed */
304 #ifndef atomic_fetch_and_relaxed
305 #define atomic_fetch_and_relaxed atomic_fetch_and
306 #define atomic_fetch_and_acquire atomic_fetch_and
307 #define atomic_fetch_and_release atomic_fetch_and
309 #else /* atomic_fetch_and_relaxed */
311 #ifndef atomic_fetch_and_acquire
312 #define atomic_fetch_and_acquire(...) \
313 __atomic_op_acquire(atomic_fetch_and, __VA_ARGS__)
316 #ifndef atomic_fetch_and_release
317 #define atomic_fetch_and_release(...) \
318 __atomic_op_release(atomic_fetch_and, __VA_ARGS__)
321 #ifndef atomic_fetch_and
322 #define atomic_fetch_and(...) \
323 __atomic_op_fence(atomic_fetch_and, __VA_ARGS__)
325 #endif /* atomic_fetch_and_relaxed */
328 /* atomic_fetch_andnot_relaxed */
329 #ifndef atomic_fetch_andnot_relaxed
330 #define atomic_fetch_andnot_relaxed atomic_fetch_andnot
331 #define atomic_fetch_andnot_acquire atomic_fetch_andnot
332 #define atomic_fetch_andnot_release atomic_fetch_andnot
334 #else /* atomic_fetch_andnot_relaxed */
336 #ifndef atomic_fetch_andnot_acquire
337 #define atomic_fetch_andnot_acquire(...) \
338 __atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__)
341 #ifndef atomic_fetch_andnot_release
342 #define atomic_fetch_andnot_release(...) \
343 __atomic_op_release(atomic_fetch_andnot, __VA_ARGS__)
346 #ifndef atomic_fetch_andnot
347 #define atomic_fetch_andnot(...) \
348 __atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__)
350 #endif /* atomic_fetch_andnot_relaxed */
351 #endif /* atomic_andnot */
353 /* atomic_fetch_xor_relaxed */
354 #ifndef atomic_fetch_xor_relaxed
355 #define atomic_fetch_xor_relaxed atomic_fetch_xor
356 #define atomic_fetch_xor_acquire atomic_fetch_xor
357 #define atomic_fetch_xor_release atomic_fetch_xor
359 #else /* atomic_fetch_xor_relaxed */
361 #ifndef atomic_fetch_xor_acquire
362 #define atomic_fetch_xor_acquire(...) \
363 __atomic_op_acquire(atomic_fetch_xor, __VA_ARGS__)
366 #ifndef atomic_fetch_xor_release
367 #define atomic_fetch_xor_release(...) \
368 __atomic_op_release(atomic_fetch_xor, __VA_ARGS__)
371 #ifndef atomic_fetch_xor
372 #define atomic_fetch_xor(...) \
373 __atomic_op_fence(atomic_fetch_xor, __VA_ARGS__)
375 #endif /* atomic_fetch_xor_relaxed */
378 /* atomic_xchg_relaxed */
379 #ifndef atomic_xchg_relaxed
380 #define atomic_xchg_relaxed atomic_xchg
381 #define atomic_xchg_acquire atomic_xchg
382 #define atomic_xchg_release atomic_xchg
384 #else /* atomic_xchg_relaxed */
386 #ifndef atomic_xchg_acquire
387 #define atomic_xchg_acquire(...) \
388 __atomic_op_acquire(atomic_xchg, __VA_ARGS__)
391 #ifndef atomic_xchg_release
392 #define atomic_xchg_release(...) \
393 __atomic_op_release(atomic_xchg, __VA_ARGS__)
397 #define atomic_xchg(...) \
398 __atomic_op_fence(atomic_xchg, __VA_ARGS__)
400 #endif /* atomic_xchg_relaxed */
402 /* atomic_cmpxchg_relaxed */
403 #ifndef atomic_cmpxchg_relaxed
404 #define atomic_cmpxchg_relaxed atomic_cmpxchg
405 #define atomic_cmpxchg_acquire atomic_cmpxchg
406 #define atomic_cmpxchg_release atomic_cmpxchg
408 #else /* atomic_cmpxchg_relaxed */
410 #ifndef atomic_cmpxchg_acquire
411 #define atomic_cmpxchg_acquire(...) \
412 __atomic_op_acquire(atomic_cmpxchg, __VA_ARGS__)
415 #ifndef atomic_cmpxchg_release
416 #define atomic_cmpxchg_release(...) \
417 __atomic_op_release(atomic_cmpxchg, __VA_ARGS__)
420 #ifndef atomic_cmpxchg
421 #define atomic_cmpxchg(...) \
422 __atomic_op_fence(atomic_cmpxchg, __VA_ARGS__)
424 #endif /* atomic_cmpxchg_relaxed */
426 /* cmpxchg_relaxed */
427 #ifndef cmpxchg_relaxed
428 #define cmpxchg_relaxed cmpxchg
429 #define cmpxchg_acquire cmpxchg
430 #define cmpxchg_release cmpxchg
432 #else /* cmpxchg_relaxed */
434 #ifndef cmpxchg_acquire
435 #define cmpxchg_acquire(...) \
436 __atomic_op_acquire(cmpxchg, __VA_ARGS__)
439 #ifndef cmpxchg_release
440 #define cmpxchg_release(...) \
441 __atomic_op_release(cmpxchg, __VA_ARGS__)
445 #define cmpxchg(...) \
446 __atomic_op_fence(cmpxchg, __VA_ARGS__)
448 #endif /* cmpxchg_relaxed */
450 /* cmpxchg64_relaxed */
451 #ifndef cmpxchg64_relaxed
452 #define cmpxchg64_relaxed cmpxchg64
453 #define cmpxchg64_acquire cmpxchg64
454 #define cmpxchg64_release cmpxchg64
456 #else /* cmpxchg64_relaxed */
458 #ifndef cmpxchg64_acquire
459 #define cmpxchg64_acquire(...) \
460 __atomic_op_acquire(cmpxchg64, __VA_ARGS__)
463 #ifndef cmpxchg64_release
464 #define cmpxchg64_release(...) \
465 __atomic_op_release(cmpxchg64, __VA_ARGS__)
469 #define cmpxchg64(...) \
470 __atomic_op_fence(cmpxchg64, __VA_ARGS__)
472 #endif /* cmpxchg64_relaxed */
476 #define xchg_relaxed xchg
477 #define xchg_acquire xchg
478 #define xchg_release xchg
480 #else /* xchg_relaxed */
483 #define xchg_acquire(...) __atomic_op_acquire(xchg, __VA_ARGS__)
487 #define xchg_release(...) __atomic_op_release(xchg, __VA_ARGS__)
491 #define xchg(...) __atomic_op_fence(xchg, __VA_ARGS__)
493 #endif /* xchg_relaxed */
496 * atomic_add_unless - add unless the number is already a given value
497 * @v: pointer of type atomic_t
498 * @a: the amount to add to v...
499 * @u: ...unless v is equal to u.
501 * Atomically adds @a to @v, so long as @v was not already @u.
502 * Returns non-zero if @v was not @u, and zero otherwise.
504 static inline int atomic_add_unless(atomic_t *v, int a, int u)
506 return __atomic_add_unless(v, a, u) != u;
510 * atomic_inc_not_zero - increment unless the number is zero
511 * @v: pointer of type atomic_t
513 * Atomically increments @v by 1, so long as @v is non-zero.
514 * Returns non-zero if @v was non-zero, and zero otherwise.
516 #ifndef atomic_inc_not_zero
517 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
520 #ifndef atomic_andnot
521 static inline void atomic_andnot(int i, atomic_t *v)
526 static inline int atomic_fetch_andnot(int i, atomic_t *v)
528 return atomic_fetch_and(~i, v);
531 static inline int atomic_fetch_andnot_relaxed(int i, atomic_t *v)
533 return atomic_fetch_and_relaxed(~i, v);
536 static inline int atomic_fetch_andnot_acquire(int i, atomic_t *v)
538 return atomic_fetch_and_acquire(~i, v);
541 static inline int atomic_fetch_andnot_release(int i, atomic_t *v)
543 return atomic_fetch_and_release(~i, v);
548 * atomic_inc_not_zero_hint - increment if not null
549 * @v: pointer of type atomic_t
550 * @hint: probable value of the atomic before the increment
552 * This version of atomic_inc_not_zero() gives a hint of probable
553 * value of the atomic. This helps processor to not read the memory
554 * before doing the atomic read/modify/write cycle, lowering
555 * number of bus transactions on some arches.
557 * Returns: 0 if increment was not done, 1 otherwise.
559 #ifndef atomic_inc_not_zero_hint
560 static inline int atomic_inc_not_zero_hint(atomic_t *v, int hint)
564 /* sanity test, should be removed by compiler if hint is a constant */
566 return atomic_inc_not_zero(v);
569 val = atomic_cmpxchg(v, c, c + 1);
579 #ifndef atomic_inc_unless_negative
580 static inline int atomic_inc_unless_negative(atomic_t *p)
583 for (v = 0; v >= 0; v = v1) {
584 v1 = atomic_cmpxchg(p, v, v + 1);
592 #ifndef atomic_dec_unless_positive
593 static inline int atomic_dec_unless_positive(atomic_t *p)
596 for (v = 0; v <= 0; v = v1) {
597 v1 = atomic_cmpxchg(p, v, v - 1);
606 * atomic_dec_if_positive - decrement by 1 if old value positive
607 * @v: pointer of type atomic_t
609 * The function returns the old value of *v minus 1, even if
610 * the atomic variable, v, was not decremented.
612 #ifndef atomic_dec_if_positive
613 static inline int atomic_dec_if_positive(atomic_t *v)
619 if (unlikely(dec < 0))
621 old = atomic_cmpxchg((v), c, dec);
622 if (likely(old == c))
630 #ifdef CONFIG_GENERIC_ATOMIC64
631 #include <asm-generic/atomic64.h>
634 #ifndef atomic64_read_acquire
635 #define atomic64_read_acquire(v) smp_load_acquire(&(v)->counter)
638 #ifndef atomic64_set_release
639 #define atomic64_set_release(v, i) smp_store_release(&(v)->counter, (i))
642 /* atomic64_add_return_relaxed */
643 #ifndef atomic64_add_return_relaxed
644 #define atomic64_add_return_relaxed atomic64_add_return
645 #define atomic64_add_return_acquire atomic64_add_return
646 #define atomic64_add_return_release atomic64_add_return
648 #else /* atomic64_add_return_relaxed */
650 #ifndef atomic64_add_return_acquire
651 #define atomic64_add_return_acquire(...) \
652 __atomic_op_acquire(atomic64_add_return, __VA_ARGS__)
655 #ifndef atomic64_add_return_release
656 #define atomic64_add_return_release(...) \
657 __atomic_op_release(atomic64_add_return, __VA_ARGS__)
660 #ifndef atomic64_add_return
661 #define atomic64_add_return(...) \
662 __atomic_op_fence(atomic64_add_return, __VA_ARGS__)
664 #endif /* atomic64_add_return_relaxed */
666 /* atomic64_inc_return_relaxed */
667 #ifndef atomic64_inc_return_relaxed
668 #define atomic64_inc_return_relaxed atomic64_inc_return
669 #define atomic64_inc_return_acquire atomic64_inc_return
670 #define atomic64_inc_return_release atomic64_inc_return
672 #else /* atomic64_inc_return_relaxed */
674 #ifndef atomic64_inc_return_acquire
675 #define atomic64_inc_return_acquire(...) \
676 __atomic_op_acquire(atomic64_inc_return, __VA_ARGS__)
679 #ifndef atomic64_inc_return_release
680 #define atomic64_inc_return_release(...) \
681 __atomic_op_release(atomic64_inc_return, __VA_ARGS__)
684 #ifndef atomic64_inc_return
685 #define atomic64_inc_return(...) \
686 __atomic_op_fence(atomic64_inc_return, __VA_ARGS__)
688 #endif /* atomic64_inc_return_relaxed */
691 /* atomic64_sub_return_relaxed */
692 #ifndef atomic64_sub_return_relaxed
693 #define atomic64_sub_return_relaxed atomic64_sub_return
694 #define atomic64_sub_return_acquire atomic64_sub_return
695 #define atomic64_sub_return_release atomic64_sub_return
697 #else /* atomic64_sub_return_relaxed */
699 #ifndef atomic64_sub_return_acquire
700 #define atomic64_sub_return_acquire(...) \
701 __atomic_op_acquire(atomic64_sub_return, __VA_ARGS__)
704 #ifndef atomic64_sub_return_release
705 #define atomic64_sub_return_release(...) \
706 __atomic_op_release(atomic64_sub_return, __VA_ARGS__)
709 #ifndef atomic64_sub_return
710 #define atomic64_sub_return(...) \
711 __atomic_op_fence(atomic64_sub_return, __VA_ARGS__)
713 #endif /* atomic64_sub_return_relaxed */
715 /* atomic64_dec_return_relaxed */
716 #ifndef atomic64_dec_return_relaxed
717 #define atomic64_dec_return_relaxed atomic64_dec_return
718 #define atomic64_dec_return_acquire atomic64_dec_return
719 #define atomic64_dec_return_release atomic64_dec_return
721 #else /* atomic64_dec_return_relaxed */
723 #ifndef atomic64_dec_return_acquire
724 #define atomic64_dec_return_acquire(...) \
725 __atomic_op_acquire(atomic64_dec_return, __VA_ARGS__)
728 #ifndef atomic64_dec_return_release
729 #define atomic64_dec_return_release(...) \
730 __atomic_op_release(atomic64_dec_return, __VA_ARGS__)
733 #ifndef atomic64_dec_return
734 #define atomic64_dec_return(...) \
735 __atomic_op_fence(atomic64_dec_return, __VA_ARGS__)
737 #endif /* atomic64_dec_return_relaxed */
740 /* atomic64_fetch_add_relaxed */
741 #ifndef atomic64_fetch_add_relaxed
742 #define atomic64_fetch_add_relaxed atomic64_fetch_add
743 #define atomic64_fetch_add_acquire atomic64_fetch_add
744 #define atomic64_fetch_add_release atomic64_fetch_add
746 #else /* atomic64_fetch_add_relaxed */
748 #ifndef atomic64_fetch_add_acquire
749 #define atomic64_fetch_add_acquire(...) \
750 __atomic_op_acquire(atomic64_fetch_add, __VA_ARGS__)
753 #ifndef atomic64_fetch_add_release
754 #define atomic64_fetch_add_release(...) \
755 __atomic_op_release(atomic64_fetch_add, __VA_ARGS__)
758 #ifndef atomic64_fetch_add
759 #define atomic64_fetch_add(...) \
760 __atomic_op_fence(atomic64_fetch_add, __VA_ARGS__)
762 #endif /* atomic64_fetch_add_relaxed */
764 /* atomic64_fetch_inc_relaxed */
765 #ifndef atomic64_fetch_inc_relaxed
767 #ifndef atomic64_fetch_inc
768 #define atomic64_fetch_inc(v) atomic64_fetch_add(1, (v))
769 #define atomic64_fetch_inc_relaxed(v) atomic64_fetch_add_relaxed(1, (v))
770 #define atomic64_fetch_inc_acquire(v) atomic64_fetch_add_acquire(1, (v))
771 #define atomic64_fetch_inc_release(v) atomic64_fetch_add_release(1, (v))
772 #else /* atomic64_fetch_inc */
773 #define atomic64_fetch_inc_relaxed atomic64_fetch_inc
774 #define atomic64_fetch_inc_acquire atomic64_fetch_inc
775 #define atomic64_fetch_inc_release atomic64_fetch_inc
776 #endif /* atomic64_fetch_inc */
778 #else /* atomic64_fetch_inc_relaxed */
780 #ifndef atomic64_fetch_inc_acquire
781 #define atomic64_fetch_inc_acquire(...) \
782 __atomic_op_acquire(atomic64_fetch_inc, __VA_ARGS__)
785 #ifndef atomic64_fetch_inc_release
786 #define atomic64_fetch_inc_release(...) \
787 __atomic_op_release(atomic64_fetch_inc, __VA_ARGS__)
790 #ifndef atomic64_fetch_inc
791 #define atomic64_fetch_inc(...) \
792 __atomic_op_fence(atomic64_fetch_inc, __VA_ARGS__)
794 #endif /* atomic64_fetch_inc_relaxed */
796 /* atomic64_fetch_sub_relaxed */
797 #ifndef atomic64_fetch_sub_relaxed
798 #define atomic64_fetch_sub_relaxed atomic64_fetch_sub
799 #define atomic64_fetch_sub_acquire atomic64_fetch_sub
800 #define atomic64_fetch_sub_release atomic64_fetch_sub
802 #else /* atomic64_fetch_sub_relaxed */
804 #ifndef atomic64_fetch_sub_acquire
805 #define atomic64_fetch_sub_acquire(...) \
806 __atomic_op_acquire(atomic64_fetch_sub, __VA_ARGS__)
809 #ifndef atomic64_fetch_sub_release
810 #define atomic64_fetch_sub_release(...) \
811 __atomic_op_release(atomic64_fetch_sub, __VA_ARGS__)
814 #ifndef atomic64_fetch_sub
815 #define atomic64_fetch_sub(...) \
816 __atomic_op_fence(atomic64_fetch_sub, __VA_ARGS__)
818 #endif /* atomic64_fetch_sub_relaxed */
820 /* atomic64_fetch_dec_relaxed */
821 #ifndef atomic64_fetch_dec_relaxed
823 #ifndef atomic64_fetch_dec
824 #define atomic64_fetch_dec(v) atomic64_fetch_sub(1, (v))
825 #define atomic64_fetch_dec_relaxed(v) atomic64_fetch_sub_relaxed(1, (v))
826 #define atomic64_fetch_dec_acquire(v) atomic64_fetch_sub_acquire(1, (v))
827 #define atomic64_fetch_dec_release(v) atomic64_fetch_sub_release(1, (v))
828 #else /* atomic64_fetch_dec */
829 #define atomic64_fetch_dec_relaxed atomic64_fetch_dec
830 #define atomic64_fetch_dec_acquire atomic64_fetch_dec
831 #define atomic64_fetch_dec_release atomic64_fetch_dec
832 #endif /* atomic64_fetch_dec */
834 #else /* atomic64_fetch_dec_relaxed */
836 #ifndef atomic64_fetch_dec_acquire
837 #define atomic64_fetch_dec_acquire(...) \
838 __atomic_op_acquire(atomic64_fetch_dec, __VA_ARGS__)
841 #ifndef atomic64_fetch_dec_release
842 #define atomic64_fetch_dec_release(...) \
843 __atomic_op_release(atomic64_fetch_dec, __VA_ARGS__)
846 #ifndef atomic64_fetch_dec
847 #define atomic64_fetch_dec(...) \
848 __atomic_op_fence(atomic64_fetch_dec, __VA_ARGS__)
850 #endif /* atomic64_fetch_dec_relaxed */
852 /* atomic64_fetch_or_relaxed */
853 #ifndef atomic64_fetch_or_relaxed
854 #define atomic64_fetch_or_relaxed atomic64_fetch_or
855 #define atomic64_fetch_or_acquire atomic64_fetch_or
856 #define atomic64_fetch_or_release atomic64_fetch_or
858 #else /* atomic64_fetch_or_relaxed */
860 #ifndef atomic64_fetch_or_acquire
861 #define atomic64_fetch_or_acquire(...) \
862 __atomic_op_acquire(atomic64_fetch_or, __VA_ARGS__)
865 #ifndef atomic64_fetch_or_release
866 #define atomic64_fetch_or_release(...) \
867 __atomic_op_release(atomic64_fetch_or, __VA_ARGS__)
870 #ifndef atomic64_fetch_or
871 #define atomic64_fetch_or(...) \
872 __atomic_op_fence(atomic64_fetch_or, __VA_ARGS__)
874 #endif /* atomic64_fetch_or_relaxed */
876 /* atomic64_fetch_and_relaxed */
877 #ifndef atomic64_fetch_and_relaxed
878 #define atomic64_fetch_and_relaxed atomic64_fetch_and
879 #define atomic64_fetch_and_acquire atomic64_fetch_and
880 #define atomic64_fetch_and_release atomic64_fetch_and
882 #else /* atomic64_fetch_and_relaxed */
884 #ifndef atomic64_fetch_and_acquire
885 #define atomic64_fetch_and_acquire(...) \
886 __atomic_op_acquire(atomic64_fetch_and, __VA_ARGS__)
889 #ifndef atomic64_fetch_and_release
890 #define atomic64_fetch_and_release(...) \
891 __atomic_op_release(atomic64_fetch_and, __VA_ARGS__)
894 #ifndef atomic64_fetch_and
895 #define atomic64_fetch_and(...) \
896 __atomic_op_fence(atomic64_fetch_and, __VA_ARGS__)
898 #endif /* atomic64_fetch_and_relaxed */
900 #ifdef atomic64_andnot
901 /* atomic64_fetch_andnot_relaxed */
902 #ifndef atomic64_fetch_andnot_relaxed
903 #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
904 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
905 #define atomic64_fetch_andnot_release atomic64_fetch_andnot
907 #else /* atomic64_fetch_andnot_relaxed */
909 #ifndef atomic64_fetch_andnot_acquire
910 #define atomic64_fetch_andnot_acquire(...) \
911 __atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__)
914 #ifndef atomic64_fetch_andnot_release
915 #define atomic64_fetch_andnot_release(...) \
916 __atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__)
919 #ifndef atomic64_fetch_andnot
920 #define atomic64_fetch_andnot(...) \
921 __atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__)
923 #endif /* atomic64_fetch_andnot_relaxed */
924 #endif /* atomic64_andnot */
926 /* atomic64_fetch_xor_relaxed */
927 #ifndef atomic64_fetch_xor_relaxed
928 #define atomic64_fetch_xor_relaxed atomic64_fetch_xor
929 #define atomic64_fetch_xor_acquire atomic64_fetch_xor
930 #define atomic64_fetch_xor_release atomic64_fetch_xor
932 #else /* atomic64_fetch_xor_relaxed */
934 #ifndef atomic64_fetch_xor_acquire
935 #define atomic64_fetch_xor_acquire(...) \
936 __atomic_op_acquire(atomic64_fetch_xor, __VA_ARGS__)
939 #ifndef atomic64_fetch_xor_release
940 #define atomic64_fetch_xor_release(...) \
941 __atomic_op_release(atomic64_fetch_xor, __VA_ARGS__)
944 #ifndef atomic64_fetch_xor
945 #define atomic64_fetch_xor(...) \
946 __atomic_op_fence(atomic64_fetch_xor, __VA_ARGS__)
948 #endif /* atomic64_fetch_xor_relaxed */
951 /* atomic64_xchg_relaxed */
952 #ifndef atomic64_xchg_relaxed
953 #define atomic64_xchg_relaxed atomic64_xchg
954 #define atomic64_xchg_acquire atomic64_xchg
955 #define atomic64_xchg_release atomic64_xchg
957 #else /* atomic64_xchg_relaxed */
959 #ifndef atomic64_xchg_acquire
960 #define atomic64_xchg_acquire(...) \
961 __atomic_op_acquire(atomic64_xchg, __VA_ARGS__)
964 #ifndef atomic64_xchg_release
965 #define atomic64_xchg_release(...) \
966 __atomic_op_release(atomic64_xchg, __VA_ARGS__)
969 #ifndef atomic64_xchg
970 #define atomic64_xchg(...) \
971 __atomic_op_fence(atomic64_xchg, __VA_ARGS__)
973 #endif /* atomic64_xchg_relaxed */
975 /* atomic64_cmpxchg_relaxed */
976 #ifndef atomic64_cmpxchg_relaxed
977 #define atomic64_cmpxchg_relaxed atomic64_cmpxchg
978 #define atomic64_cmpxchg_acquire atomic64_cmpxchg
979 #define atomic64_cmpxchg_release atomic64_cmpxchg
981 #else /* atomic64_cmpxchg_relaxed */
983 #ifndef atomic64_cmpxchg_acquire
984 #define atomic64_cmpxchg_acquire(...) \
985 __atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__)
988 #ifndef atomic64_cmpxchg_release
989 #define atomic64_cmpxchg_release(...) \
990 __atomic_op_release(atomic64_cmpxchg, __VA_ARGS__)
993 #ifndef atomic64_cmpxchg
994 #define atomic64_cmpxchg(...) \
995 __atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__)
997 #endif /* atomic64_cmpxchg_relaxed */
999 #ifndef atomic64_andnot
1000 static inline void atomic64_andnot(long long i, atomic64_t *v)
1002 atomic64_and(~i, v);
1005 static inline long long atomic64_fetch_andnot(long long i, atomic64_t *v)
1007 return atomic64_fetch_and(~i, v);
1010 static inline long long atomic64_fetch_andnot_relaxed(long long i, atomic64_t *v)
1012 return atomic64_fetch_and_relaxed(~i, v);
1015 static inline long long atomic64_fetch_andnot_acquire(long long i, atomic64_t *v)
1017 return atomic64_fetch_and_acquire(~i, v);
1020 static inline long long atomic64_fetch_andnot_release(long long i, atomic64_t *v)
1022 return atomic64_fetch_and_release(~i, v);
1026 #include <asm-generic/atomic-long.h>
1028 #endif /* _LINUX_ATOMIC_H */