]> git.karo-electronics.de Git - karo-tx-linux.git/commitdiff
MIPS: cmpxchg: Drop __xchg_u{32,64} functions
authorPaul Burton <paul.burton@imgtec.com>
Sat, 10 Jun 2017 00:26:37 +0000 (17:26 -0700)
committerRalf Baechle <ralf@linux-mips.org>
Thu, 29 Jun 2017 00:42:25 +0000 (02:42 +0200)
The __xchg_u32() & __xchg_u64() functions now add very little value.
This patch therefore removes them, by:

  - Moving memory barriers out of them & into xchg(), which also removes
    the duplication & readies us to support xchg_relaxed() if we wish to.

  - Calling __xchg_asm() directly from __xchg().

  - Performing the check for CONFIG_64BIT being enabled in the size=8
    case of __xchg().

Signed-off-by: Paul Burton <paul.burton@imgtec.com>
Cc: linux-mips@linux-mips.org
Patchwork: https://patchwork.linux-mips.org/patch/16352/
Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
arch/mips/include/asm/cmpxchg.h

index fe652c3e5d8cba5496c00531bea0db45b5a3f184..e9c1e97bc29da63ff50099b08e021dcd6c30c04f 100644 (file)
@@ -70,40 +70,18 @@ extern unsigned long __xchg_called_with_bad_pointer(void)
        __ret;                                                          \
 })
 
-static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
-{
-       __u32 retval;
-
-       smp_mb__before_llsc();
-       retval = __xchg_asm("ll", "sc", m, val);
-       smp_llsc_mb();
-
-       return retval;
-}
-
-#ifdef CONFIG_64BIT
-static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
-{
-       __u64 retval;
-
-       smp_mb__before_llsc();
-       retval = __xchg_asm("lld", "scd", m, val);
-       smp_llsc_mb();
-
-       return retval;
-}
-#else
-extern __u64 __xchg_u64_unsupported_on_32bit_kernels(volatile __u64 * m, __u64 val);
-#define __xchg_u64 __xchg_u64_unsupported_on_32bit_kernels
-#endif
-
 static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
 {
        switch (size) {
        case 4:
-               return __xchg_u32(ptr, x);
+               return __xchg_asm("ll", "sc", (volatile u32 *)ptr, x);
+
        case 8:
-               return __xchg_u64(ptr, x);
+               if (!IS_ENABLED(CONFIG_64BIT))
+                       return __xchg_called_with_bad_pointer();
+
+               return __xchg_asm("lld", "scd", (volatile u64 *)ptr, x);
+
        default:
                return __xchg_called_with_bad_pointer();
        }
@@ -111,10 +89,18 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz
 
 #define xchg(ptr, x)                                                   \
 ({                                                                     \
+       __typeof__(*(ptr)) __res;                                       \
+                                                                       \
        BUILD_BUG_ON(sizeof(*(ptr)) & ~0xc);                            \
                                                                        \
-       ((__typeof__(*(ptr)))                                           \
-               __xchg((unsigned long)(x), (ptr), sizeof(*(ptr))));     \
+       smp_mb__before_llsc();                                          \
+                                                                       \
+       __res = (__typeof__(*(ptr)))                                    \
+               __xchg((unsigned long)(x), (ptr), sizeof(*(ptr)));      \
+                                                                       \
+       smp_llsc_mb();                                                  \
+                                                                       \
+       __res;                                                          \
 })
 
 #define __cmpxchg_asm(ld, st, m, old, new)                             \