From: Greg Kroah-Hartman gregkh@linuxfoundation.org
This reverts commit d754a529a8be55f009c6679d772c472c1632cd5b which was commit 3c1d3f0979721a39dd2980c97466127ce65aa130 upstream.
This breaks the build and should be reverted.
Cc: Guenter Roeck linux@roeck-us.net Cc: Paul Burton paul.burton@mips.com Cc: linux-mips@vger.kernel.org Cc: Huacai Chen chenhc@lemote.com Cc: Jiaxun Yang jiaxun.yang@flygoat.com Cc: linux-kernel@vger.kernel.org Cc: Sasha Levin sashal@kernel.org Signed-off-by: Greg Kroah-Hartman gregkh@linuxfoundation.org --- arch/mips/include/asm/barrier.h | 13 ++++++------- arch/mips/include/asm/futex.h | 15 ++++++++------- 2 files changed, 14 insertions(+), 14 deletions(-)
--- a/arch/mips/include/asm/barrier.h +++ b/arch/mips/include/asm/barrier.h @@ -218,14 +218,13 @@ * ordering will be done by smp_llsc_mb() and friends. */ #if defined(CONFIG_WEAK_REORDERING_BEYOND_LLSC) && defined(CONFIG_SMP) -# define __WEAK_LLSC_MB sync -# define smp_llsc_mb() \ - __asm__ __volatile__(__stringify(__WEAK_LLSC_MB) : : :"memory") -# define __LLSC_CLOBBER +#define __WEAK_LLSC_MB " sync \n" +#define smp_llsc_mb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory") +#define __LLSC_CLOBBER #else -# define __WEAK_LLSC_MB -# define smp_llsc_mb() do { } while (0) -# define __LLSC_CLOBBER "memory" +#define __WEAK_LLSC_MB " \n" +#define smp_llsc_mb() do { } while (0) +#define __LLSC_CLOBBER "memory" #endif
#ifdef CONFIG_CPU_CAVIUM_OCTEON --- a/arch/mips/include/asm/futex.h +++ b/arch/mips/include/asm/futex.h @@ -16,7 +16,6 @@ #include <asm/barrier.h> #include <asm/compiler.h> #include <asm/errno.h> -#include <asm/sync.h> #include <asm/war.h>
#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \ @@ -33,7 +32,7 @@ " .set arch=r4000 \n" \ "2: sc $1, %2 \n" \ " beqzl $1, 1b \n" \ - __stringify(__WEAK_LLSC_MB) \ + __WEAK_LLSC_MB \ "3: \n" \ " .insn \n" \ " .set pop \n" \ @@ -51,19 +50,19 @@ "i" (-EFAULT) \ : "memory"); \ } else if (cpu_has_llsc) { \ + loongson_llsc_mb(); \ __asm__ __volatile__( \ " .set push \n" \ " .set noat \n" \ " .set push \n" \ " .set "MIPS_ISA_ARCH_LEVEL" \n" \ - " " __SYNC(full, loongson3_war) " \n" \ "1: "user_ll("%1", "%4")" # __futex_atomic_op\n" \ " .set pop \n" \ " " insn " \n" \ " .set "MIPS_ISA_ARCH_LEVEL" \n" \ "2: "user_sc("$1", "%2")" \n" \ " beqz $1, 1b \n" \ - __stringify(__WEAK_LLSC_MB) \ + __WEAK_LLSC_MB \ "3: \n" \ " .insn \n" \ " .set pop \n" \ @@ -148,7 +147,7 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, " .set arch=r4000 \n" "2: sc $1, %2 \n" " beqzl $1, 1b \n" - __stringify(__WEAK_LLSC_MB) + __WEAK_LLSC_MB "3: \n" " .insn \n" " .set pop \n" @@ -165,13 +164,13 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, "i" (-EFAULT) : "memory"); } else if (cpu_has_llsc) { + loongson_llsc_mb(); __asm__ __volatile__( "# futex_atomic_cmpxchg_inatomic \n" " .set push \n" " .set noat \n" " .set push \n" " .set "MIPS_ISA_ARCH_LEVEL" \n" - " " __SYNC(full, loongson3_war) " \n" "1: "user_ll("%1", "%3")" \n" " bne %1, %z4, 3f \n" " .set pop \n" @@ -179,7 +178,8 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, " .set "MIPS_ISA_ARCH_LEVEL" \n" "2: "user_sc("$1", "%2")" \n" " beqz $1, 1b \n" - "3: " __SYNC_ELSE(full, loongson3_war, __WEAK_LLSC_MB) "\n" + __WEAK_LLSC_MB + "3: \n" " .insn \n" " .set pop \n" " .section .fixup,"ax" \n" @@ -194,6 +194,7 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, : GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval), "i" (-EFAULT) : "memory"); + loongson_llsc_mb(); } else return -ENOSYS;