From: Christophe Leroy christophe.leroy@csgroup.eu
commit 18db466a9a306406dab3b134014d9f6ed642471c upstream.
Commit 9401f4e46cf6 ("powerpc: Use lwarx/ldarx directly instead of PPC_LWARX/LDARX macros") properly handled the eh field of lwarx in asm/bitops.h but failed to clear it for PPC32 in asm/simple_spinlock.h
So, do as in arch_atomic_try_cmpxchg_lock(), set it to 1 if PPC64 but set it to 0 if PPC32. For that use IS_ENABLED(CONFIG_PPC64) which returns 1 when CONFIG_PPC64 is set and 0 otherwise.
Fixes: 9401f4e46cf6 ("powerpc: Use lwarx/ldarx directly instead of PPC_LWARX/LDARX macros") Cc: stable@vger.kernel.org # v5.15+ Reported-by: Pali Rohár pali@kernel.org Signed-off-by: Christophe Leroy christophe.leroy@csgroup.eu Tested-by: Pali Rohár pali@kernel.org Reviewed-by: Segher Boessenkool segher@kernel.crashing.org [mpe: Use symbolic names, use 'n' constraint per Segher] Signed-off-by: Michael Ellerman mpe@ellerman.id.au Link: https://lore.kernel.org/r/a1176e19e627dd6a1b8d24c6c457a8ab874b7d12.165943093... Signed-off-by: Greg Kroah-Hartman gregkh@linuxfoundation.org --- arch/powerpc/include/asm/simple_spinlock.h | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-)
--- a/arch/powerpc/include/asm/simple_spinlock.h +++ b/arch/powerpc/include/asm/simple_spinlock.h @@ -48,10 +48,11 @@ static inline int arch_spin_is_locked(ar static inline unsigned long __arch_spin_trylock(arch_spinlock_t *lock) { unsigned long tmp, token; + unsigned int eh = IS_ENABLED(CONFIG_PPC64);
token = LOCK_TOKEN; __asm__ __volatile__( -"1: lwarx %0,0,%2,1\n\ +"1: lwarx %0,0,%2,%[eh]\n\ cmpwi 0,%0,0\n\ bne- 2f\n\ stwcx. %1,0,%2\n\ @@ -59,7 +60,7 @@ static inline unsigned long __arch_spin_ PPC_ACQUIRE_BARRIER "2:" : "=&r" (tmp) - : "r" (token), "r" (&lock->slock) + : "r" (token), "r" (&lock->slock), [eh] "n" (eh) : "cr0", "memory");
return tmp; @@ -177,9 +178,10 @@ static inline void arch_spin_unlock(arch static inline long __arch_read_trylock(arch_rwlock_t *rw) { long tmp; + unsigned int eh = IS_ENABLED(CONFIG_PPC64);
__asm__ __volatile__( -"1: lwarx %0,0,%1,1\n" +"1: lwarx %0,0,%1,%[eh]\n" __DO_SIGN_EXTEND " addic. %0,%0,1\n\ ble- 2f\n" @@ -187,7 +189,7 @@ static inline long __arch_read_trylock(a bne- 1b\n" PPC_ACQUIRE_BARRIER "2:" : "=&r" (tmp) - : "r" (&rw->lock) + : "r" (&rw->lock), [eh] "n" (eh) : "cr0", "xer", "memory");
return tmp; @@ -200,17 +202,18 @@ static inline long __arch_read_trylock(a static inline long __arch_write_trylock(arch_rwlock_t *rw) { long tmp, token; + unsigned int eh = IS_ENABLED(CONFIG_PPC64);
token = WRLOCK_TOKEN; __asm__ __volatile__( -"1: lwarx %0,0,%2,1\n\ +"1: lwarx %0,0,%2,%[eh]\n\ cmpwi 0,%0,0\n\ bne- 2f\n" " stwcx. %1,0,%2\n\ bne- 1b\n" PPC_ACQUIRE_BARRIER "2:" : "=&r" (tmp) - : "r" (token), "r" (&rw->lock) + : "r" (token), "r" (&rw->lock), [eh] "n" (eh) : "cr0", "memory");
return tmp;