From 15e7e5c1ebf556cd620c9b091e121091ac760f6d Mon Sep 17 00:00:00 2001 From: Will Deacon Date: Wed, 5 Jun 2013 11:27:26 +0100 Subject: [PATCH] ARM: 7749/1: spinlock: retry trylock operation if strex fails on free lock An exclusive store instruction may fail for reasons other than lock contention (e.g. a cache eviction during the critical section) so, in line with other architectures using similar exclusive instructions (alpha, mips, powerpc), retry the trylock operation if the lock appears to be free but the strex reported failure. Reported-by: Tony Thompson Signed-off-by: Will Deacon Signed-off-by: Russell King --- arch/arm/include/asm/spinlock.h | 25 ++++++++++++++----------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/arch/arm/include/asm/spinlock.h b/arch/arm/include/asm/spinlock.h index 6220e9fdf4c7..f8b8965666e9 100644 --- a/arch/arm/include/asm/spinlock.h +++ b/arch/arm/include/asm/spinlock.h @@ -97,19 +97,22 @@ static inline void arch_spin_lock(arch_spinlock_t *lock) static inline int arch_spin_trylock(arch_spinlock_t *lock) { - unsigned long tmp; + unsigned long contended, res; u32 slock; - __asm__ __volatile__( -" ldrex %0, [%2]\n" -" subs %1, %0, %0, ror #16\n" -" addeq %0, %0, %3\n" -" strexeq %1, %0, [%2]" - : "=&r" (slock), "=&r" (tmp) - : "r" (&lock->slock), "I" (1 << TICKET_SHIFT) - : "cc"); - - if (tmp == 0) { + do { + __asm__ __volatile__( + " ldrex %0, [%3]\n" + " mov %2, #0\n" + " subs %1, %0, %0, ror #16\n" + " addeq %0, %0, %4\n" + " strexeq %2, %0, [%3]" + : "=&r" (slock), "=&r" (contended), "=r" (res) + : "r" (&lock->slock), "I" (1 << TICKET_SHIFT) + : "cc"); + } while (res); + + if (!contended) { smp_mb(); return 1; } else { -- 2.39.5