]> git.karo-electronics.de Git - mv-sheeva.git/blobdiff - arch/mips/include/asm/bitops.h
Merge branch 'master' into csb1725
[mv-sheeva.git] / arch / mips / include / asm / bitops.h
index b0ce7ca2851f8fc32413e407942cc7367d6643d9..50b4ef288c53f0be1a78d9de79e40e2337a3f76d 100644 (file)
@@ -73,30 +73,26 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
                : "ir" (1UL << bit), "m" (*m));
 #ifdef CONFIG_CPU_MIPSR2
        } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
-               __asm__ __volatile__(
-               "1:     " __LL "%0, %1                  # set_bit       \n"
-               "       " __INS "%0, %4, %2, 1                          \n"
-               "       " __SC "%0, %1                                  \n"
-               "       beqz    %0, 2f                                  \n"
-               "       .subsection 2                                   \n"
-               "2:     b       1b                                      \n"
-               "       .previous                                       \n"
-               : "=&r" (temp), "=m" (*m)
-               : "ir" (bit), "m" (*m), "r" (~0));
+               do {
+                       __asm__ __volatile__(
+                       "       " __LL "%0, %1          # set_bit       \n"
+                       "       " __INS "%0, %3, %2, 1                  \n"
+                       "       " __SC "%0, %1                          \n"
+                       : "=&r" (temp), "+m" (*m)
+                       : "ir" (bit), "r" (~0));
+               } while (unlikely(!temp));
 #endif /* CONFIG_CPU_MIPSR2 */
        } else if (kernel_uses_llsc) {
-               __asm__ __volatile__(
-               "       .set    mips3                                   \n"
-               "1:     " __LL "%0, %1                  # set_bit       \n"
-               "       or      %0, %2                                  \n"
-               "       " __SC  "%0, %1                                 \n"
-               "       beqz    %0, 2f                                  \n"
-               "       .subsection 2                                   \n"
-               "2:     b       1b                                      \n"
-               "       .previous                                       \n"
-               "       .set    mips0                                   \n"
-               : "=&r" (temp), "=m" (*m)
-               : "ir" (1UL << bit), "m" (*m));
+               do {
+                       __asm__ __volatile__(
+                       "       .set    mips3                           \n"
+                       "       " __LL "%0, %1          # set_bit       \n"
+                       "       or      %0, %2                          \n"
+                       "       " __SC  "%0, %1                         \n"
+                       "       .set    mips0                           \n"
+                       : "=&r" (temp), "+m" (*m)
+                       : "ir" (1UL << bit));
+               } while (unlikely(!temp));
        } else {
                volatile unsigned long *a = addr;
                unsigned long mask;
@@ -134,34 +130,30 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
                "       " __SC "%0, %1                                  \n"
                "       beqzl   %0, 1b                                  \n"
                "       .set    mips0                                   \n"
-               : "=&r" (temp), "=m" (*m)
-               : "ir" (~(1UL << bit)), "m" (*m));
+               : "=&r" (temp), "+m" (*m)
+               : "ir" (~(1UL << bit)));
 #ifdef CONFIG_CPU_MIPSR2
        } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
-               __asm__ __volatile__(
-               "1:     " __LL "%0, %1                  # clear_bit     \n"
-               "       " __INS "%0, $0, %2, 1                          \n"
-               "       " __SC "%0, %1                                  \n"
-               "       beqz    %0, 2f                                  \n"
-               "       .subsection 2                                   \n"
-               "2:     b       1b                                      \n"
-               "       .previous                                       \n"
-               : "=&r" (temp), "=m" (*m)
-               : "ir" (bit), "m" (*m));
+               do {
+                       __asm__ __volatile__(
+                       "       " __LL "%0, %1          # clear_bit     \n"
+                       "       " __INS "%0, $0, %2, 1                  \n"
+                       "       " __SC "%0, %1                          \n"
+                       : "=&r" (temp), "+m" (*m)
+                       : "ir" (bit));
+               } while (unlikely(!temp));
 #endif /* CONFIG_CPU_MIPSR2 */
        } else if (kernel_uses_llsc) {
-               __asm__ __volatile__(
-               "       .set    mips3                                   \n"
-               "1:     " __LL "%0, %1                  # clear_bit     \n"
-               "       and     %0, %2                                  \n"
-               "       " __SC "%0, %1                                  \n"
-               "       beqz    %0, 2f                                  \n"
-               "       .subsection 2                                   \n"
-               "2:     b       1b                                      \n"
-               "       .previous                                       \n"
-               "       .set    mips0                                   \n"
-               : "=&r" (temp), "=m" (*m)
-               : "ir" (~(1UL << bit)), "m" (*m));
+               do {
+                       __asm__ __volatile__(
+                       "       .set    mips3                           \n"
+                       "       " __LL "%0, %1          # clear_bit     \n"
+                       "       and     %0, %2                          \n"
+                       "       " __SC "%0, %1                          \n"
+                       "       .set    mips0                           \n"
+                       : "=&r" (temp), "+m" (*m)
+                       : "ir" (~(1UL << bit)));
+               } while (unlikely(!temp));
        } else {
                volatile unsigned long *a = addr;
                unsigned long mask;
@@ -213,24 +205,22 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
                "       " __SC  "%0, %1                         \n"
                "       beqzl   %0, 1b                          \n"
                "       .set    mips0                           \n"
-               : "=&r" (temp), "=m" (*m)
-               : "ir" (1UL << bit), "m" (*m));
+               : "=&r" (temp), "+m" (*m)
+               : "ir" (1UL << bit));
        } else if (kernel_uses_llsc) {
                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
                unsigned long temp;
 
-               __asm__ __volatile__(
-               "       .set    mips3                           \n"
-               "1:     " __LL "%0, %1          # change_bit    \n"
-               "       xor     %0, %2                          \n"
-               "       " __SC  "%0, %1                         \n"
-               "       beqz    %0, 2f                          \n"
-               "       .subsection 2                           \n"
-               "2:     b       1b                              \n"
-               "       .previous                               \n"
-               "       .set    mips0                           \n"
-               : "=&r" (temp), "=m" (*m)
-               : "ir" (1UL << bit), "m" (*m));
+               do {
+                       __asm__ __volatile__(
+                       "       .set    mips3                           \n"
+                       "       " __LL "%0, %1          # change_bit    \n"
+                       "       xor     %0, %2                          \n"
+                       "       " __SC  "%0, %1                         \n"
+                       "       .set    mips0                           \n"
+                       : "=&r" (temp), "+m" (*m)
+                       : "ir" (1UL << bit));
+               } while (unlikely(!temp));
        } else {
                volatile unsigned long *a = addr;
                unsigned long mask;
@@ -272,30 +262,26 @@ static inline int test_and_set_bit(unsigned long nr,
                "       beqzl   %2, 1b                                  \n"
                "       and     %2, %0, %3                              \n"
                "       .set    mips0                                   \n"
-               : "=&r" (temp), "=m" (*m), "=&r" (res)
-               : "r" (1UL << bit), "m" (*m)
+               : "=&r" (temp), "+m" (*m), "=&r" (res)
+               : "r" (1UL << bit)
                : "memory");
        } else if (kernel_uses_llsc) {
                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
                unsigned long temp;
 
-               __asm__ __volatile__(
-               "       .set    push                                    \n"
-               "       .set    noreorder                               \n"
-               "       .set    mips3                                   \n"
-               "1:     " __LL "%0, %1          # test_and_set_bit      \n"
-               "       or      %2, %0, %3                              \n"
-               "       " __SC  "%2, %1                                 \n"
-               "       beqz    %2, 2f                                  \n"
-               "        and    %2, %0, %3                              \n"
-               "       .subsection 2                                   \n"
-               "2:     b       1b                                      \n"
-               "        nop                                            \n"
-               "       .previous                                       \n"
-               "       .set    pop                                     \n"
-               : "=&r" (temp), "=m" (*m), "=&r" (res)
-               : "r" (1UL << bit), "m" (*m)
-               : "memory");
+               do {
+                       __asm__ __volatile__(
+                       "       .set    mips3                           \n"
+                       "       " __LL "%0, %1  # test_and_set_bit      \n"
+                       "       or      %2, %0, %3                      \n"
+                       "       " __SC  "%2, %1                         \n"
+                       "       .set    mips0                           \n"
+                       : "=&r" (temp), "+m" (*m), "=&r" (res)
+                       : "r" (1UL << bit)
+                       : "memory");
+               } while (unlikely(!res));
+
+               res = temp & (1UL << bit);
        } else {
                volatile unsigned long *a = addr;
                unsigned long mask;
@@ -340,30 +326,26 @@ static inline int test_and_set_bit_lock(unsigned long nr,
                "       beqzl   %2, 1b                                  \n"
                "       and     %2, %0, %3                              \n"
                "       .set    mips0                                   \n"
-               : "=&r" (temp), "=m" (*m), "=&r" (res)
-               : "r" (1UL << bit), "m" (*m)
+               : "=&r" (temp), "+m" (*m), "=&r" (res)
+               : "r" (1UL << bit)
                : "memory");
        } else if (kernel_uses_llsc) {
                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
                unsigned long temp;
 
-               __asm__ __volatile__(
-               "       .set    push                                    \n"
-               "       .set    noreorder                               \n"
-               "       .set    mips3                                   \n"
-               "1:     " __LL "%0, %1          # test_and_set_bit      \n"
-               "       or      %2, %0, %3                              \n"
-               "       " __SC  "%2, %1                                 \n"
-               "       beqz    %2, 2f                                  \n"
-               "        and    %2, %0, %3                              \n"
-               "       .subsection 2                                   \n"
-               "2:     b       1b                                      \n"
-               "        nop                                            \n"
-               "       .previous                                       \n"
-               "       .set    pop                                     \n"
-               : "=&r" (temp), "=m" (*m), "=&r" (res)
-               : "r" (1UL << bit), "m" (*m)
-               : "memory");
+               do {
+                       __asm__ __volatile__(
+                       "       .set    mips3                           \n"
+                       "       " __LL "%0, %1  # test_and_set_bit      \n"
+                       "       or      %2, %0, %3                      \n"
+                       "       " __SC  "%2, %1                         \n"
+                       "       .set    mips0                           \n"
+                       : "=&r" (temp), "+m" (*m), "=&r" (res)
+                       : "r" (1UL << bit)
+                       : "memory");
+               } while (unlikely(!res));
+
+               res = temp & (1UL << bit);
        } else {
                volatile unsigned long *a = addr;
                unsigned long mask;
@@ -410,49 +392,43 @@ static inline int test_and_clear_bit(unsigned long nr,
                "       beqzl   %2, 1b                                  \n"
                "       and     %2, %0, %3                              \n"
                "       .set    mips0                                   \n"
-               : "=&r" (temp), "=m" (*m), "=&r" (res)
-               : "r" (1UL << bit), "m" (*m)
+               : "=&r" (temp), "+m" (*m), "=&r" (res)
+               : "r" (1UL << bit)
                : "memory");
 #ifdef CONFIG_CPU_MIPSR2
        } else if (kernel_uses_llsc && __builtin_constant_p(nr)) {
                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
                unsigned long temp;
 
-               __asm__ __volatile__(
-               "1:     " __LL  "%0, %1         # test_and_clear_bit    \n"
-               "       " __EXT "%2, %0, %3, 1                          \n"
-               "       " __INS "%0, $0, %3, 1                          \n"
-               "       " __SC  "%0, %1                                 \n"
-               "       beqz    %0, 2f                                  \n"
-               "       .subsection 2                                   \n"
-               "2:     b       1b                                      \n"
-               "       .previous                                       \n"
-               : "=&r" (temp), "=m" (*m), "=&r" (res)
-               : "ir" (bit), "m" (*m)
-               : "memory");
+               do {
+                       __asm__ __volatile__(
+                       "       " __LL  "%0, %1 # test_and_clear_bit    \n"
+                       "       " __EXT "%2, %0, %3, 1                  \n"
+                       "       " __INS "%0, $0, %3, 1                  \n"
+                       "       " __SC  "%0, %1                         \n"
+                       : "=&r" (temp), "+m" (*m), "=&r" (res)
+                       : "ir" (bit)
+                       : "memory");
+               } while (unlikely(!temp));
 #endif
        } else if (kernel_uses_llsc) {
                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
                unsigned long temp;
 
-               __asm__ __volatile__(
-               "       .set    push                                    \n"
-               "       .set    noreorder                               \n"
-               "       .set    mips3                                   \n"
-               "1:     " __LL  "%0, %1         # test_and_clear_bit    \n"
-               "       or      %2, %0, %3                              \n"
-               "       xor     %2, %3                                  \n"
-               "       " __SC  "%2, %1                                 \n"
-               "       beqz    %2, 2f                                  \n"
-               "        and    %2, %0, %3                              \n"
-               "       .subsection 2                                   \n"
-               "2:     b       1b                                      \n"
-               "        nop                                            \n"
-               "       .previous                                       \n"
-               "       .set    pop                                     \n"
-               : "=&r" (temp), "=m" (*m), "=&r" (res)
-               : "r" (1UL << bit), "m" (*m)
-               : "memory");
+               do {
+                       __asm__ __volatile__(
+                       "       .set    mips3                           \n"
+                       "       " __LL  "%0, %1 # test_and_clear_bit    \n"
+                       "       or      %2, %0, %3                      \n"
+                       "       xor     %2, %3                          \n"
+                       "       " __SC  "%2, %1                         \n"
+                       "       .set    mips0                           \n"
+                       : "=&r" (temp), "+m" (*m), "=&r" (res)
+                       : "r" (1UL << bit)
+                       : "memory");
+               } while (unlikely(!res));
+
+               res = temp & (1UL << bit);
        } else {
                volatile unsigned long *a = addr;
                unsigned long mask;
@@ -499,30 +475,26 @@ static inline int test_and_change_bit(unsigned long nr,
                "       beqzl   %2, 1b                                  \n"
                "       and     %2, %0, %3                              \n"
                "       .set    mips0                                   \n"
-               : "=&r" (temp), "=m" (*m), "=&r" (res)
-               : "r" (1UL << bit), "m" (*m)
+               : "=&r" (temp), "+m" (*m), "=&r" (res)
+               : "r" (1UL << bit)
                : "memory");
        } else if (kernel_uses_llsc) {
                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
                unsigned long temp;
 
-               __asm__ __volatile__(
-               "       .set    push                                    \n"
-               "       .set    noreorder                               \n"
-               "       .set    mips3                                   \n"
-               "1:     " __LL  "%0, %1         # test_and_change_bit   \n"
-               "       xor     %2, %0, %3                              \n"
-               "       " __SC  "\t%2, %1                               \n"
-               "       beqz    %2, 2f                                  \n"
-               "        and    %2, %0, %3                              \n"
-               "       .subsection 2                                   \n"
-               "2:     b       1b                                      \n"
-               "        nop                                            \n"
-               "       .previous                                       \n"
-               "       .set    pop                                     \n"
-               : "=&r" (temp), "=m" (*m), "=&r" (res)
-               : "r" (1UL << bit), "m" (*m)
-               : "memory");
+               do {
+                       __asm__ __volatile__(
+                       "       .set    mips3                           \n"
+                       "       " __LL  "%0, %1 # test_and_change_bit   \n"
+                       "       xor     %2, %0, %3                      \n"
+                       "       " __SC  "\t%2, %1                       \n"
+                       "       .set    mips0                           \n"
+                       : "=&r" (temp), "+m" (*m), "=&r" (res)
+                       : "r" (1UL << bit)
+                       : "memory");
+               } while (unlikely(!res));
+
+               res = temp & (1UL << bit);
        } else {
                volatile unsigned long *a = addr;
                unsigned long mask;