]> git.karo-electronics.de Git - karo-tx-linux.git/blob - tools/arch/x86/include/asm/cmpxchg.h
Merge tag 'perf-core-for-mingo-4.12-20170503' of git://git.kernel.org/pub/scm/linux...
[karo-tx-linux.git] / tools / arch / x86 / include / asm / cmpxchg.h
1 #ifndef TOOLS_ASM_X86_CMPXCHG_H
2 #define TOOLS_ASM_X86_CMPXCHG_H
3
4 #include <linux/compiler.h>
5
6 /*
7  * Non-existant functions to indicate usage errors at link time
8  * (or compile-time if the compiler implements __compiletime_error().
9  */
10 extern void __cmpxchg_wrong_size(void)
11         __compiletime_error("Bad argument size for cmpxchg");
12
13 /*
14  * Constants for operation sizes. On 32-bit, the 64-bit size it set to
15  * -1 because sizeof will never return -1, thereby making those switch
16  * case statements guaranteeed dead code which the compiler will
17  * eliminate, and allowing the "missing symbol in the default case" to
18  * indicate a usage error.
19  */
20 #define __X86_CASE_B    1
21 #define __X86_CASE_W    2
22 #define __X86_CASE_L    4
23 #ifdef __x86_64__
24 #define __X86_CASE_Q    8
25 #else
26 #define __X86_CASE_Q    -1              /* sizeof will never return -1 */
27 #endif
28
29 /*
30  * Atomic compare and exchange.  Compare OLD with MEM, if identical,
31  * store NEW in MEM.  Return the initial value in MEM.  Success is
32  * indicated by comparing RETURN with OLD.
33  */
34 #define __raw_cmpxchg(ptr, old, new, size, lock)                        \
35 ({                                                                      \
36         __typeof__(*(ptr)) __ret;                                       \
37         __typeof__(*(ptr)) __old = (old);                               \
38         __typeof__(*(ptr)) __new = (new);                               \
39         switch (size) {                                                 \
40         case __X86_CASE_B:                                              \
41         {                                                               \
42                 volatile u8 *__ptr = (volatile u8 *)(ptr);              \
43                 asm volatile(lock "cmpxchgb %2,%1"                      \
44                              : "=a" (__ret), "+m" (*__ptr)              \
45                              : "q" (__new), "0" (__old)                 \
46                              : "memory");                               \
47                 break;                                                  \
48         }                                                               \
49         case __X86_CASE_W:                                              \
50         {                                                               \
51                 volatile u16 *__ptr = (volatile u16 *)(ptr);            \
52                 asm volatile(lock "cmpxchgw %2,%1"                      \
53                              : "=a" (__ret), "+m" (*__ptr)              \
54                              : "r" (__new), "0" (__old)                 \
55                              : "memory");                               \
56                 break;                                                  \
57         }                                                               \
58         case __X86_CASE_L:                                              \
59         {                                                               \
60                 volatile u32 *__ptr = (volatile u32 *)(ptr);            \
61                 asm volatile(lock "cmpxchgl %2,%1"                      \
62                              : "=a" (__ret), "+m" (*__ptr)              \
63                              : "r" (__new), "0" (__old)                 \
64                              : "memory");                               \
65                 break;                                                  \
66         }                                                               \
67         case __X86_CASE_Q:                                              \
68         {                                                               \
69                 volatile u64 *__ptr = (volatile u64 *)(ptr);            \
70                 asm volatile(lock "cmpxchgq %2,%1"                      \
71                              : "=a" (__ret), "+m" (*__ptr)              \
72                              : "r" (__new), "0" (__old)                 \
73                              : "memory");                               \
74                 break;                                                  \
75         }                                                               \
76         default:                                                        \
77                 __cmpxchg_wrong_size();                                 \
78         }                                                               \
79         __ret;                                                          \
80 })
81
82 #define __cmpxchg(ptr, old, new, size)                                  \
83         __raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX)
84
85 #define cmpxchg(ptr, old, new)                                          \
86         __cmpxchg(ptr, old, new, sizeof(*(ptr)))
87
88
89 #endif  /* TOOLS_ASM_X86_CMPXCHG_H */