1 #ifndef TOOLS_ASM_X86_CMPXCHG_H
2 #define TOOLS_ASM_X86_CMPXCHG_H
4 #include <linux/compiler.h>
7 * Non-existant functions to indicate usage errors at link time
8 * (or compile-time if the compiler implements __compiletime_error().
10 extern void __cmpxchg_wrong_size(void)
11 __compiletime_error("Bad argument size for cmpxchg");
14 * Constants for operation sizes. On 32-bit, the 64-bit size it set to
15 * -1 because sizeof will never return -1, thereby making those switch
16 * case statements guaranteeed dead code which the compiler will
17 * eliminate, and allowing the "missing symbol in the default case" to
18 * indicate a usage error.
20 #define __X86_CASE_B 1
21 #define __X86_CASE_W 2
22 #define __X86_CASE_L 4
24 #define __X86_CASE_Q 8
26 #define __X86_CASE_Q -1 /* sizeof will never return -1 */
30 * Atomic compare and exchange. Compare OLD with MEM, if identical,
31 * store NEW in MEM. Return the initial value in MEM. Success is
32 * indicated by comparing RETURN with OLD.
34 #define __raw_cmpxchg(ptr, old, new, size, lock) \
36 __typeof__(*(ptr)) __ret; \
37 __typeof__(*(ptr)) __old = (old); \
38 __typeof__(*(ptr)) __new = (new); \
42 volatile u8 *__ptr = (volatile u8 *)(ptr); \
43 asm volatile(lock "cmpxchgb %2,%1" \
44 : "=a" (__ret), "+m" (*__ptr) \
45 : "q" (__new), "0" (__old) \
51 volatile u16 *__ptr = (volatile u16 *)(ptr); \
52 asm volatile(lock "cmpxchgw %2,%1" \
53 : "=a" (__ret), "+m" (*__ptr) \
54 : "r" (__new), "0" (__old) \
60 volatile u32 *__ptr = (volatile u32 *)(ptr); \
61 asm volatile(lock "cmpxchgl %2,%1" \
62 : "=a" (__ret), "+m" (*__ptr) \
63 : "r" (__new), "0" (__old) \
69 volatile u64 *__ptr = (volatile u64 *)(ptr); \
70 asm volatile(lock "cmpxchgq %2,%1" \
71 : "=a" (__ret), "+m" (*__ptr) \
72 : "r" (__new), "0" (__old) \
77 __cmpxchg_wrong_size(); \
82 #define __cmpxchg(ptr, old, new, size) \
83 __raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX)
85 #define cmpxchg(ptr, old, new) \
86 __cmpxchg(ptr, old, new, sizeof(*(ptr)))
89 #endif /* TOOLS_ASM_X86_CMPXCHG_H */