MIPS: retire "asm/llsc.h"
authorHuang Pei <huangpei@loongson.cn>
Wed, 15 Dec 2021 08:45:00 +0000 (16:45 +0800)
committerThomas Bogendoerfer <tsbogend@alpha.franken.de>
Wed, 5 Jan 2022 10:16:35 +0000 (11:16 +0100)
all that "asm/llsc.h" does is just to help inline asm, which can be
stringifyed from "asm/asm.h"

+. Since "asm/asm.h" has all we need, retire "asm/llsc.h"

+. remove unused header file

Inspired-by: Maciej W. Rozycki <macro@orcam.me.uk>
Signed-off-by: Huang Pei <huangpei@loongson.cn>
Signed-off-by: Thomas Bogendoerfer <tsbogend@alpha.franken.de>
arch/mips/include/asm/asm.h
arch/mips/include/asm/atomic.h
arch/mips/include/asm/bitops.h
arch/mips/include/asm/cmpxchg.h
arch/mips/include/asm/kvm_host.h
arch/mips/include/asm/llsc.h [deleted file]

index f3302b13d3e0d3ad256c461cc573a982a01a2386..6ffdd4b5e1d0606a1a451f7ec0f8a284834230c6 100644 (file)
@@ -222,6 +222,8 @@ symbol              =       value
 #define LONG_SRLV      srlv
 #define LONG_SRA       sra
 #define LONG_SRAV      srav
+#define LONG_INS       ins
+#define LONG_EXT       ext
 
 #ifdef __ASSEMBLY__
 #define LONG           .word
@@ -249,6 +251,8 @@ symbol              =       value
 #define LONG_SRLV      dsrlv
 #define LONG_SRA       dsra
 #define LONG_SRAV      dsrav
+#define LONG_INS       dins
+#define LONG_EXT       dext
 
 #ifdef __ASSEMBLY__
 #define LONG           .dword
index a0b9e7c1e4fce24344e98d95c536bdb8f1f750f4..712fb5a6a5682b26d0e78a862796deecc4130b05 100644 (file)
 
 #include <linux/irqflags.h>
 #include <linux/types.h>
+#include <asm/asm.h>
 #include <asm/barrier.h>
 #include <asm/compiler.h>
 #include <asm/cpu-features.h>
 #include <asm/cmpxchg.h>
-#include <asm/llsc.h>
 #include <asm/sync.h>
-#include <asm/war.h>
 
 #define ATOMIC_OPS(pfx, type)                                          \
 static __always_inline type arch_##pfx##_read(const pfx##_t *v)                \
@@ -74,7 +73,7 @@ static __inline__ void arch_##pfx##_##op(type i, pfx##_t * v)         \
        "1:     " #ll " %0, %1          # " #pfx "_" #op "      \n"     \
        "       " #asm_op " %0, %2                              \n"     \
        "       " #sc " %0, %1                                  \n"     \
-       "\t" __SC_BEQZ "%0, 1b                                  \n"     \
+       "\t" __stringify(SC_BEQZ) "     %0, 1b                  \n"     \
        "       .set    pop                                     \n"     \
        : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)            \
        : "Ir" (i) : __LLSC_CLOBBER);                                   \
@@ -104,7 +103,7 @@ arch_##pfx##_##op##_return_relaxed(type i, pfx##_t * v)                     \
        "1:     " #ll " %1, %2          # " #pfx "_" #op "_return\n"    \
        "       " #asm_op " %0, %1, %3                          \n"     \
        "       " #sc " %0, %2                                  \n"     \
-       "\t" __SC_BEQZ "%0, 1b                                  \n"     \
+       "\t" __stringify(SC_BEQZ) "     %0, 1b                  \n"     \
        "       " #asm_op " %0, %1, %3                          \n"     \
        "       .set    pop                                     \n"     \
        : "=&r" (result), "=&r" (temp),                                 \
@@ -137,7 +136,7 @@ arch_##pfx##_fetch_##op##_relaxed(type i, pfx##_t * v)                      \
        "1:     " #ll " %1, %2          # " #pfx "_fetch_" #op "\n"     \
        "       " #asm_op " %0, %1, %3                          \n"     \
        "       " #sc " %0, %2                                  \n"     \
-       "\t" __SC_BEQZ "%0, 1b                                  \n"     \
+       "\t" __stringify(SC_BEQZ) "     %0, 1b                  \n"     \
        "       .set    pop                                     \n"     \
        "       move    %0, %1                                  \n"     \
        : "=&r" (result), "=&r" (temp),                                 \
@@ -237,7 +236,7 @@ static __inline__ type arch_##pfx##_sub_if_positive(type i, pfx##_t * v)    \
        "       .set    push                                    \n"     \
        "       .set    " MIPS_ISA_LEVEL "                      \n"     \
        "       " #sc " %1, %2                                  \n"     \
-       "       " __SC_BEQZ "%1, 1b                             \n"     \
+       "       " __stringify(SC_BEQZ) "        %1, 1b          \n"     \
        "2:     " __SYNC(full, loongson3_war) "                 \n"     \
        "       .set    pop                                     \n"     \
        : "=&r" (result), "=&r" (temp),                                 \
index dc2a6234dd3c7ca88164c51718421b67226ca3b4..3812082b82956508d4a09e39ea48d0b24ce725f6 100644 (file)
 #include <linux/bits.h>
 #include <linux/compiler.h>
 #include <linux/types.h>
+#include <asm/asm.h>
 #include <asm/barrier.h>
 #include <asm/byteorder.h>             /* sigh ... */
 #include <asm/compiler.h>
 #include <asm/cpu-features.h>
-#include <asm/isa-rev.h>
-#include <asm/llsc.h>
 #include <asm/sgidefs.h>
-#include <asm/war.h>
 
 #define __bit_op(mem, insn, inputs...) do {                    \
        unsigned long __temp;                                   \
        "       .set            push                    \n"     \
        "       .set            " MIPS_ISA_LEVEL "      \n"     \
        "       " __SYNC(full, loongson3_war) "         \n"     \
-       "1:     " __LL          "%0, %1                 \n"     \
+       "1:     " __stringify(LONG_LL)  "       %0, %1  \n"     \
        "       " insn          "                       \n"     \
-       "       " __SC          "%0, %1                 \n"     \
-       "       " __SC_BEQZ     "%0, 1b                 \n"     \
+       "       " __stringify(LONG_SC)  "       %0, %1  \n"     \
+       "       " __stringify(SC_BEQZ)  "       %0, 1b  \n"     \
        "       .set            pop                     \n"     \
        : "=&r"(__temp), "+" GCC_OFF_SMALL_ASM()(mem)           \
        : inputs                                                \
        "       .set            push                    \n"     \
        "       .set            " MIPS_ISA_LEVEL "      \n"     \
        "       " __SYNC(full, loongson3_war) "         \n"     \
-       "1:     " __LL          ll_dst ", %2            \n"     \
+       "1:     " __stringify(LONG_LL) " "      ll_dst ", %2\n" \
        "       " insn          "                       \n"     \
-       "       " __SC          "%1, %2                 \n"     \
-       "       " __SC_BEQZ     "%1, 1b                 \n"     \
+       "       " __stringify(LONG_SC)  "       %1, %2  \n"     \
+       "       " __stringify(SC_BEQZ)  "       %1, 1b  \n"     \
        "       .set            pop                     \n"     \
        : "=&r"(__orig), "=&r"(__temp),                         \
          "+" GCC_OFF_SMALL_ASM()(mem)                          \
@@ -98,7 +96,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
        }
 
        if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit) && (bit >= 16)) {
-               __bit_op(*m, __INS "%0, %3, %2, 1", "i"(bit), "r"(~0));
+               __bit_op(*m, __stringify(LONG_INS) " %0, %3, %2, 1", "i"(bit), "r"(~0));
                return;
        }
 
@@ -126,7 +124,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
        }
 
        if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit)) {
-               __bit_op(*m, __INS "%0, $0, %2, 1", "i"(bit));
+               __bit_op(*m, __stringify(LONG_INS) " %0, $0, %2, 1", "i"(bit));
                return;
        }
 
@@ -234,8 +232,8 @@ static inline int test_and_clear_bit(unsigned long nr,
                res = __mips_test_and_clear_bit(nr, addr);
        } else if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(nr)) {
                res = __test_bit_op(*m, "%1",
-                                   __EXT "%0, %1, %3, 1;"
-                                   __INS "%1, $0, %3, 1",
+                                   __stringify(LONG_EXT) " %0, %1, %3, 1;"
+                                   __stringify(LONG_INS) " %1, $0, %3, 1",
                                    "i"(bit));
        } else {
                orig = __test_bit_op(*m, "%0",
index 66a8b293fd80b6b1d6ebe591538e08da703b256e..7ec9493b28614f631ea2bccef15de758d6bd0eac 100644 (file)
 
 #include <linux/bug.h>
 #include <linux/irqflags.h>
+#include <asm/asm.h>
 #include <asm/compiler.h>
-#include <asm/llsc.h>
 #include <asm/sync.h>
-#include <asm/war.h>
 
 /*
  * These functions doesn't exist, so if they are called you'll either:
@@ -48,7 +47,7 @@ extern unsigned long __xchg_called_with_bad_pointer(void)
                "       move    $1, %z3                         \n"     \
                "       .set    " MIPS_ISA_ARCH_LEVEL "         \n"     \
                "       " st "  $1, %1                          \n"     \
-               "\t" __SC_BEQZ  "$1, 1b                         \n"     \
+               "\t" __stringify(SC_BEQZ)       "       $1, 1b  \n"     \
                "       .set    pop                             \n"     \
                : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)           \
                : GCC_OFF_SMALL_ASM() (*m), "Jr" (val)                  \
@@ -127,7 +126,7 @@ unsigned long __xchg(volatile void *ptr, unsigned long x, int size)
                "       move    $1, %z4                         \n"     \
                "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"     \
                "       " st "  $1, %1                          \n"     \
-               "\t" __SC_BEQZ  "$1, 1b                         \n"     \
+               "\t" __stringify(SC_BEQZ)       "       $1, 1b  \n"     \
                "       .set    pop                             \n"     \
                "2:     " __SYNC(full, loongson3_war) "         \n"     \
                : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)           \
@@ -282,7 +281,7 @@ static inline unsigned long __cmpxchg64(volatile void *ptr,
        /* Attempt to store new at ptr */
        "       scd     %L1, %2                         \n"
        /* If we failed, loop! */
-       "\t" __SC_BEQZ "%L1, 1b                         \n"
+       "\t" __stringify(SC_BEQZ) "     %L1, 1b         \n"
        "2:     " __SYNC(full, loongson3_war) "         \n"
        "       .set    pop                             \n"
        : "=&r"(ret),
index 696f6b00937762cba91438eeb9ea5828d8bcf089..999bdd4f25b4fee241ee8c997c11044141ff6744 100644 (file)
@@ -20,6 +20,7 @@
 #include <linux/threads.h>
 #include <linux/spinlock.h>
 
+#include <asm/asm.h>
 #include <asm/inst.h>
 #include <asm/mipsregs.h>
 
@@ -379,9 +380,9 @@ static inline void _kvm_atomic_set_c0_guest_reg(unsigned long *reg,
                __asm__ __volatile__(
                "       .set    push                            \n"
                "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
-               "       " __LL "%0, %1                          \n"
+               "       "__stringify(LONG_LL)   " %0, %1        \n"
                "       or      %0, %2                          \n"
-               "       " __SC  "%0, %1                         \n"
+               "       "__stringify(LONG_SC)   " %0, %1        \n"
                "       .set    pop                             \n"
                : "=&r" (temp), "+m" (*reg)
                : "r" (val));
@@ -396,9 +397,9 @@ static inline void _kvm_atomic_clear_c0_guest_reg(unsigned long *reg,
                __asm__ __volatile__(
                "       .set    push                            \n"
                "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
-               "       " __LL "%0, %1                          \n"
+               "       "__stringify(LONG_LL)   " %0, %1        \n"
                "       and     %0, %2                          \n"
-               "       " __SC  "%0, %1                         \n"
+               "       "__stringify(LONG_SC)   " %0, %1        \n"
                "       .set    pop                             \n"
                : "=&r" (temp), "+m" (*reg)
                : "r" (~val));
@@ -414,10 +415,10 @@ static inline void _kvm_atomic_change_c0_guest_reg(unsigned long *reg,
                __asm__ __volatile__(
                "       .set    push                            \n"
                "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
-               "       " __LL "%0, %1                          \n"
+               "       "__stringify(LONG_LL)   " %0, %1        \n"
                "       and     %0, %2                          \n"
                "       or      %0, %3                          \n"
-               "       " __SC  "%0, %1                         \n"
+               "       "__stringify(LONG_SC)   " %0, %1        \n"
                "       .set    pop                             \n"
                : "=&r" (temp), "+m" (*reg)
                : "r" (~change), "r" (val & change));
diff --git a/arch/mips/include/asm/llsc.h b/arch/mips/include/asm/llsc.h
deleted file mode 100644 (file)
index ec09fe5..0000000
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * This file is subject to the terms and conditions of the GNU General Public
- * License.  See the file "COPYING" in the main directory of this archive
- * for more details.
- *
- * Macros for 32/64-bit neutral inline assembler
- */
-
-#ifndef __ASM_LLSC_H
-#define __ASM_LLSC_H
-
-#include <asm/isa-rev.h>
-
-#if _MIPS_SZLONG == 32
-#define __LL           "ll     "
-#define __SC           "sc     "
-#define __INS          "ins    "
-#define __EXT          "ext    "
-#elif _MIPS_SZLONG == 64
-#define __LL           "lld    "
-#define __SC           "scd    "
-#define __INS          "dins   "
-#define __EXT          "dext   "
-#endif
-
-/*
- * Using a branch-likely instruction to check the result of an sc instruction
- * works around a bug present in R10000 CPUs prior to revision 3.0 that could
- * cause ll-sc sequences to execute non-atomically.
- */
-#ifdef CONFIG_WAR_R10000_LLSC
-# define __SC_BEQZ "beqzl      "
-#elif MIPS_ISA_REV >= 6
-# define __SC_BEQZ "beqzc      "
-#else
-# define __SC_BEQZ "beqz       "
-#endif
-
-#endif /* __ASM_LLSC_H  */