From 847c73e8042e565a2cc4934c84103ab82e0eac42 Mon Sep 17 00:00:00 2001 From: Jeremy Fitzhardinge Date: Thu, 23 Jun 2011 18:19:19 -0700 Subject: [PATCH] x86: Add xadd helper macro Signed-off-by: Jeremy Fitzhardinge Link: http://lkml.kernel.org/r/ce03e48f4b70a2a31accf32c8b41b781674e57c3.1308878118.git.jeremy.fitzhardinge@citrix.com Signed-off-by: H. Peter Anvin --- arch/x86/include/asm/cmpxchg_32.h | 21 +++++++++++++++++++++ arch/x86/include/asm/cmpxchg_64.h | 26 ++++++++++++++++++++++++++ 2 files changed, 47 insertions(+) diff --git a/arch/x86/include/asm/cmpxchg_32.h b/arch/x86/include/asm/cmpxchg_32.h index 284a6e8f7ce1..30f0318bccdd 100644 --- a/arch/x86/include/asm/cmpxchg_32.h +++ b/arch/x86/include/asm/cmpxchg_32.h @@ -280,4 +280,25 @@ static inline unsigned long cmpxchg_386(volatile void *ptr, unsigned long old, #endif +#define xadd(ptr, inc) \ + do { \ + switch (sizeof(*(ptr))) { \ + case 1: \ + asm volatile (LOCK_PREFIX "xaddb %b0, %1\n" \ + : "+r" (inc), "+m" (*(ptr)) \ + : : "memory", "cc"); \ + break; \ + case 2: \ + asm volatile (LOCK_PREFIX "xaddw %w0, %1\n" \ + : "+r" (inc), "+m" (*(ptr)) \ + : : "memory", "cc"); \ + break; \ + case 4: \ + asm volatile (LOCK_PREFIX "xaddl %0, %1\n" \ + : "+r" (inc), "+m" (*(ptr)) \ + : : "memory", "cc"); \ + break; \ + } \ + } while(0) + #endif /* _ASM_X86_CMPXCHG_32_H */ diff --git a/arch/x86/include/asm/cmpxchg_64.h b/arch/x86/include/asm/cmpxchg_64.h index 423ae58aa020..62da1ffc9a31 100644 --- a/arch/x86/include/asm/cmpxchg_64.h +++ b/arch/x86/include/asm/cmpxchg_64.h @@ -151,4 +151,30 @@ extern void __cmpxchg_wrong_size(void); cmpxchg_local((ptr), (o), (n)); \ }) +#define xadd(ptr, inc) \ + do { \ + switch (sizeof(*(ptr))) { \ + case 1: \ + asm volatile (LOCK_PREFIX "xaddb %b0, %1\n" \ + : "+r" (inc), "+m" (*(ptr)) \ + : : "memory", "cc"); \ + break; \ + case 2: \ + asm volatile (LOCK_PREFIX "xaddw %w0, %1\n" \ + : "+r" (inc), "+m" (*(ptr)) \ + : : "memory", "cc"); \ + break; \ + case 4: \ + asm volatile (LOCK_PREFIX "xaddl %0, %1\n" \ + : "+r" (inc), "+m" (*(ptr)) \ + : : "memory", "cc"); \ + break; \ + case 8: \ + asm volatile (LOCK_PREFIX "xaddq %q0, %1\n" \ + : "+r" (inc), "+m" (*(ptr)) \ + : : "memory", "cc"); \ + break; \ + } \ + } while(0) + #endif /* _ASM_X86_CMPXCHG_64_H */ -- 2.39.5