From: Jeremy Fitzhardinge Date: Fri, 24 Jun 2011 01:19:19 +0000 (-0700) Subject: x86: Add xadd helper macro X-Git-Tag: next-20110810~12^2~2^2~1 X-Git-Url: https://git.karo-electronics.de/?a=commitdiff_plain;h=847c73e8042e565a2cc4934c84103ab82e0eac42;p=karo-tx-linux.git x86: Add xadd helper macro Signed-off-by: Jeremy Fitzhardinge Link: http://lkml.kernel.org/r/ce03e48f4b70a2a31accf32c8b41b781674e57c3.1308878118.git.jeremy.fitzhardinge@citrix.com Signed-off-by: H. Peter Anvin --- diff --git a/arch/x86/include/asm/cmpxchg_32.h b/arch/x86/include/asm/cmpxchg_32.h index 284a6e8f7ce1..30f0318bccdd 100644 --- a/arch/x86/include/asm/cmpxchg_32.h +++ b/arch/x86/include/asm/cmpxchg_32.h @@ -280,4 +280,25 @@ static inline unsigned long cmpxchg_386(volatile void *ptr, unsigned long old, #endif +#define xadd(ptr, inc) \ + do { \ + switch (sizeof(*(ptr))) { \ + case 1: \ + asm volatile (LOCK_PREFIX "xaddb %b0, %1\n" \ + : "+r" (inc), "+m" (*(ptr)) \ + : : "memory", "cc"); \ + break; \ + case 2: \ + asm volatile (LOCK_PREFIX "xaddw %w0, %1\n" \ + : "+r" (inc), "+m" (*(ptr)) \ + : : "memory", "cc"); \ + break; \ + case 4: \ + asm volatile (LOCK_PREFIX "xaddl %0, %1\n" \ + : "+r" (inc), "+m" (*(ptr)) \ + : : "memory", "cc"); \ + break; \ + } \ + } while(0) + #endif /* _ASM_X86_CMPXCHG_32_H */ diff --git a/arch/x86/include/asm/cmpxchg_64.h b/arch/x86/include/asm/cmpxchg_64.h index 423ae58aa020..62da1ffc9a31 100644 --- a/arch/x86/include/asm/cmpxchg_64.h +++ b/arch/x86/include/asm/cmpxchg_64.h @@ -151,4 +151,30 @@ extern void __cmpxchg_wrong_size(void); cmpxchg_local((ptr), (o), (n)); \ }) +#define xadd(ptr, inc) \ + do { \ + switch (sizeof(*(ptr))) { \ + case 1: \ + asm volatile (LOCK_PREFIX "xaddb %b0, %1\n" \ + : "+r" (inc), "+m" (*(ptr)) \ + : : "memory", "cc"); \ + break; \ + case 2: \ + asm volatile (LOCK_PREFIX "xaddw %w0, %1\n" \ + : "+r" (inc), "+m" (*(ptr)) \ + : : "memory", "cc"); \ + break; \ + case 4: \ + asm volatile (LOCK_PREFIX "xaddl %0, %1\n" \ + : "+r" (inc), "+m" (*(ptr)) \ + : : "memory", "cc"); \ + break; \ + case 8: \ + asm volatile (LOCK_PREFIX "xaddq %q0, %1\n" \ + : "+r" (inc), "+m" (*(ptr)) \ + : : "memory", "cc"); \ + break; \ + } \ + } while(0) + #endif /* _ASM_X86_CMPXCHG_64_H */