]> git.karo-electronics.de Git - karo-tx-linux.git/commitdiff
xtensa: implement robust futex atomic uaccess ops
authorBaruch Siach <baruch@tkos.co.il>
Mon, 18 Nov 2013 04:46:43 +0000 (06:46 +0200)
committerChris Zankel <chris@zankel.net>
Tue, 14 Jan 2014 18:19:51 +0000 (10:19 -0800)
This enables the set_robust_list(2) system call.

Signed-off-by: Baruch Siach <baruch@tkos.co.il>
Signed-off-by: Chris Zankel <chris@zankel.net>
arch/xtensa/include/asm/Kbuild
arch/xtensa/include/asm/futex.h [new file with mode: 0644]

index 228d6aee3a16528f0e5972371e5a1fd6feac9a79..5851db291583face7d032c2929dd5c91b58204d6 100644 (file)
@@ -8,7 +8,6 @@ generic-y += emergency-restart.h
 generic-y += errno.h
 generic-y += exec.h
 generic-y += fcntl.h
-generic-y += futex.h
 generic-y += hardirq.h
 generic-y += ioctl.h
 generic-y += irq_regs.h
diff --git a/arch/xtensa/include/asm/futex.h b/arch/xtensa/include/asm/futex.h
new file mode 100644 (file)
index 0000000..b39531b
--- /dev/null
@@ -0,0 +1,147 @@
+/*
+ * Atomic futex routines
+ *
+ * Based on the PowerPC implementataion
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License version 2 as
+ * published by the Free Software Foundation.
+ *
+ * Copyright (C) 2013 TangoTec Ltd.
+ *
+ * Baruch Siach <baruch@tkos.co.il>
+ */
+
+#ifndef _ASM_XTENSA_FUTEX_H
+#define _ASM_XTENSA_FUTEX_H
+
+#ifdef __KERNEL__
+
+#include <linux/futex.h>
+#include <linux/uaccess.h>
+#include <linux/errno.h>
+
+#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
+       __asm__ __volatile(                             \
+       "1:     l32i    %0, %2, 0\n"                    \
+               insn "\n"                               \
+       "       wsr     %0, scompare1\n"                \
+       "2:     s32c1i  %1, %2, 0\n"                    \
+       "       bne     %1, %0, 1b\n"                   \
+       "       movi    %1, 0\n"                        \
+       "3:\n"                                          \
+       "       .section .fixup,\"ax\"\n"               \
+       "       .align 4\n"                             \
+       "4:     .long   3b\n"                           \
+       "5:     l32r    %0, 4b\n"                       \
+       "       movi    %1, %3\n"                       \
+       "       jx      %0\n"                           \
+       "       .previous\n"                            \
+       "       .section __ex_table,\"a\"\n"            \
+       "       .long 1b,5b,2b,5b\n"                    \
+       "       .previous\n"                            \
+       : "=&r" (oldval), "=&r" (ret)                   \
+       : "r" (uaddr), "I" (-EFAULT), "r" (oparg)       \
+       : "memory")
+
+static inline int futex_atomic_op_inuser(int encoded_op, u32 __user *uaddr)
+{
+       int op = (encoded_op >> 28) & 7;
+       int cmp = (encoded_op >> 24) & 15;
+       int oparg = (encoded_op << 8) >> 20;
+       int cmparg = (encoded_op << 20) >> 20;
+       int oldval = 0, ret;
+       if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
+               oparg = 1 << oparg;
+
+       if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
+               return -EFAULT;
+
+#if !XCHAL_HAVE_S32C1I
+       return -ENOSYS;
+#endif
+
+       pagefault_disable();
+
+       switch (op) {
+       case FUTEX_OP_SET:
+               __futex_atomic_op("mov %1, %4", ret, oldval, uaddr, oparg);
+               break;
+       case FUTEX_OP_ADD:
+               __futex_atomic_op("add %1, %0, %4", ret, oldval, uaddr,
+                               oparg);
+               break;
+       case FUTEX_OP_OR:
+               __futex_atomic_op("or %1, %0, %4", ret, oldval, uaddr,
+                               oparg);
+               break;
+       case FUTEX_OP_ANDN:
+               __futex_atomic_op("and %1, %0, %4", ret, oldval, uaddr,
+                               ~oparg);
+               break;
+       case FUTEX_OP_XOR:
+               __futex_atomic_op("xor %1, %0, %4", ret, oldval, uaddr,
+                               oparg);
+               break;
+       default:
+               ret = -ENOSYS;
+       }
+
+       pagefault_enable();
+
+       if (ret)
+               return ret;
+
+       switch (cmp) {
+       case FUTEX_OP_CMP_EQ: return (oldval == cmparg);
+       case FUTEX_OP_CMP_NE: return (oldval != cmparg);
+       case FUTEX_OP_CMP_LT: return (oldval < cmparg);
+       case FUTEX_OP_CMP_GE: return (oldval >= cmparg);
+       case FUTEX_OP_CMP_LE: return (oldval <= cmparg);
+       case FUTEX_OP_CMP_GT: return (oldval > cmparg);
+       }
+
+       return -ENOSYS;
+}
+
+static inline int
+futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
+                             u32 oldval, u32 newval)
+{
+       int ret = 0;
+       u32 prev;
+
+       if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
+               return -EFAULT;
+
+#if !XCHAL_HAVE_S32C1I
+       return -ENOSYS;
+#endif
+
+       __asm__ __volatile__ (
+       "       # futex_atomic_cmpxchg_inatomic\n"
+       "1:     l32i    %1, %3, 0\n"
+       "       mov     %0, %5\n"
+       "       wsr     %1, scompare1\n"
+       "2:     s32c1i  %0, %3, 0\n"
+       "3:\n"
+       "       .section .fixup,\"ax\"\n"
+       "       .align 4\n"
+       "4:     .long   3b\n"
+       "5:     l32r    %1, 4b\n"
+       "       movi    %0, %6\n"
+       "       jx      %1\n"
+       "       .previous\n"
+       "       .section __ex_table,\"a\"\n"
+       "       .long 1b,5b,2b,5b\n"
+       "       .previous\n"
+       : "+r" (ret), "=&r" (prev), "+m" (*uaddr)
+       : "r" (uaddr), "r" (oldval), "r" (newval), "I" (-EFAULT)
+       : "memory");
+
+       *uval = prev;
+       return ret;
+}
+
+#endif /* __KERNEL__ */
+#endif /* _ASM_XTENSA_FUTEX_H */