atomics/treewide: Make atomic_fetch_add_unless() optional
authorMark Rutland <mark.rutland@arm.com>
Thu, 21 Jun 2018 12:13:09 +0000 (13:13 +0100)
committerIngo Molnar <mingo@kernel.org>
Thu, 21 Jun 2018 12:22:33 +0000 (14:22 +0200)
Several architectures these have a near-identical implementation based
on atomic_read() and atomic_cmpxchg() which we can instead define in
<linux/atomic.h>, so let's do so, using something close to the existing
x86 implementation with try_cmpxchg().

Where an architecture provides its own atomic_fetch_add_unless(), it
must define a preprocessor symbol for it. The instrumented atomics are
updated accordingly.

Note that arch/arc's existing atomic_fetch_add_unless() had redundant
barriers, as these are already present in its atomic_cmpxchg()
implementation.

There should be no functional change as a result of this patch.

Signed-off-by: Mark Rutland <mark.rutland@arm.com>
Reviewed-by: Geert Uytterhoeven <geert@linux-m68k.org>
Reviewed-by: Will Deacon <will.deacon@arm.com>
Acked-by: Geert Uytterhoeven <geert@linux-m68k.org>
Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Acked-by: Palmer Dabbelt <palmer@sifive.com>
Cc: Boqun Feng <boqun.feng@gmail.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Cc: Vineet Gupta <vgupta@synopsys.com>
Link: https://lore.kernel.org/lkml/20180621121321.4761-7-mark.rutland@arm.com
Signed-off-by: Ingo Molnar <mingo@kernel.org>
21 files changed:
arch/alpha/include/asm/atomic.h
arch/arc/include/asm/atomic.h
arch/arm/include/asm/atomic.h
arch/arm64/include/asm/atomic.h
arch/h8300/include/asm/atomic.h
arch/hexagon/include/asm/atomic.h
arch/ia64/include/asm/atomic.h
arch/m68k/include/asm/atomic.h
arch/mips/include/asm/atomic.h
arch/parisc/include/asm/atomic.h
arch/powerpc/include/asm/atomic.h
arch/riscv/include/asm/atomic.h
arch/s390/include/asm/atomic.h
arch/sh/include/asm/atomic.h
arch/sparc/include/asm/atomic_32.h
arch/sparc/include/asm/atomic_64.h
arch/x86/include/asm/atomic.h
arch/xtensa/include/asm/atomic.h
include/asm-generic/atomic-instrumented.h
include/asm-generic/atomic.h
include/linux/atomic.h

index eb0f25e4c5ddbde21a6e8d6aac6f1acbf925f3a2..4a800a3424a383a977855465541aeae8165e6e7a 100644 (file)
@@ -235,7 +235,7 @@ static __inline__ int atomic_fetch_add_unless(atomic_t *v, int a, int u)
        smp_mb();
        return old;
 }
-
+#define atomic_fetch_add_unless atomic_fetch_add_unless
 
 /**
  * atomic64_add_unless - add unless the number is a given value
index 1406825b5e7d2d86784dcda005458d85cd9b721b..60da80481c5d00375cbc8ac1fd40e137ee330811 100644 (file)
@@ -308,34 +308,6 @@ ATOMIC_OPS(xor, ^=, CTOP_INST_AXOR_DI_R2_R2_R3)
 #undef ATOMIC_OP_RETURN
 #undef ATOMIC_OP
 
-/**
- * atomic_fetch_add_unless - add unless the number is a given value
- * @v: pointer of type atomic_t
- * @a: the amount to add to v...
- * @u: ...unless v is equal to u.
- *
- * Atomically adds @a to @v, so long as it was not @u.
- * Returns the old value of @v
- */
-#define atomic_fetch_add_unless(v, a, u)                                       \
-({                                                                     \
-       int c, old;                                                     \
-                                                                       \
-       /*                                                              \
-        * Explicit full memory barrier needed before/after as          \
-        * LLOCK/SCOND thmeselves don't provide any such semantics      \
-        */                                                             \
-       smp_mb();                                                       \
-                                                                       \
-       c = atomic_read(v);                                             \
-       while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c)\
-               c = old;                                                \
-                                                                       \
-       smp_mb();                                                       \
-                                                                       \
-       c;                                                              \
-})
-
 #define atomic_inc(v)                  atomic_add(1, v)
 #define atomic_dec(v)                  atomic_sub(1, v)
 
index 02f3894faa48bdfe2eae52ae0feddbdc7d962d5a..74460aa00fa06095b707b136c1e6857c326377ba 100644 (file)
@@ -156,6 +156,7 @@ static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
 
        return oldval;
 }
+#define atomic_fetch_add_unless                atomic_fetch_add_unless
 
 #else /* ARM_ARCH_6 */
 
@@ -215,16 +216,6 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
        return ret;
 }
 
-static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
-{
-       int c, old;
-
-       c = atomic_read(v);
-       while (c != u && (old = atomic_cmpxchg((v), c, c + a)) != c)
-               c = old;
-       return c;
-}
-
 #endif /* __LINUX_ARM_ARCH__ */
 
 #define ATOMIC_OPS(op, c_op, asm_op)                                   \
index ad50412889c5198b08e0b945d55cd8bf8090ccb4..22c8c43d66890418b6c9d35dbb8c5171350e8384 100644 (file)
 #define atomic_dec_and_test(v)         (atomic_dec_return(v) == 0)
 #define atomic_sub_and_test(i, v)      (atomic_sub_return((i), (v)) == 0)
 #define atomic_add_negative(i, v)      (atomic_add_return((i), (v)) < 0)
-#define atomic_fetch_add_unless(v, a, u)       ___atomic_add_unless(v, a, u,)
 #define atomic_andnot                  atomic_andnot
 
 /*
index 5c856887fdf2fe45c0d8943c77d2d53a18f016b2..7103649463080795c4c2f663aa0d82361f4887fb 100644 (file)
@@ -106,5 +106,6 @@ static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
        arch_local_irq_restore(flags);
        return ret;
 }
+#define atomic_fetch_add_unless                atomic_fetch_add_unless
 
 #endif /* __ARCH_H8300_ATOMIC __ */
index d2feeba93c4457bbd6a7163d94c8e7594d5391a1..86c67e9adbfa0ddfc138eceb67b7b601045f7854 100644 (file)
@@ -196,6 +196,7 @@ static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
        );
        return __oldval;
 }
+#define atomic_fetch_add_unless atomic_fetch_add_unless
 
 #define atomic_inc(v) atomic_add(1, (v))
 #define atomic_dec(v) atomic_sub(1, (v))
index 93d48b8232200db0c77f4194ebe6be0978bf4e33..cfe44086338e4d98b75a15aa76a67fa14afbab2e 100644 (file)
@@ -215,22 +215,6 @@ ATOMIC64_FETCH_OP(xor, ^)
        (cmpxchg(&((v)->counter), old, new))
 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
 
-static __inline__ int atomic_fetch_add_unless(atomic_t *v, int a, int u)
-{
-       int c, old;
-       c = atomic_read(v);
-       for (;;) {
-               if (unlikely(c == (u)))
-                       break;
-               old = atomic_cmpxchg((v), c, c + (a));
-               if (likely(old == c))
-                       break;
-               c = old;
-       }
-       return c;
-}
-
-
 static __inline__ long atomic64_add_unless(atomic64_t *v, long a, long u)
 {
        long c, old;
index 8022d9ea1213f2117da4ba2e0d26461ddd06e0e0..596882cda2246db2f1aeb84c9e3a42b6cf07eacc 100644 (file)
@@ -211,19 +211,4 @@ static inline int atomic_add_negative(int i, atomic_t *v)
        return c != 0;
 }
 
-static __inline__ int atomic_fetch_add_unless(atomic_t *v, int a, int u)
-{
-       int c, old;
-       c = atomic_read(v);
-       for (;;) {
-               if (unlikely(c == (u)))
-                       break;
-               old = atomic_cmpxchg((v), c, c + (a));
-               if (likely(old == c))
-                       break;
-               c = old;
-       }
-       return c;
-}
-
 #endif /* __ARCH_M68K_ATOMIC __ */
index 502e691c63937ab63227ec29d07f70e1f3dd5050..794734e730d9a32eba608d23219de5fcdf0cec34 100644 (file)
@@ -274,30 +274,6 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
 
-/**
- * atomic_fetch_add_unless - add unless the number is a given value
- * @v: pointer of type atomic_t
- * @a: the amount to add to v...
- * @u: ...unless v is equal to u.
- *
- * Atomically adds @a to @v, so long as it was not @u.
- * Returns the old value of @v.
- */
-static __inline__ int atomic_fetch_add_unless(atomic_t *v, int a, int u)
-{
-       int c, old;
-       c = atomic_read(v);
-       for (;;) {
-               if (unlikely(c == (u)))
-                       break;
-               old = atomic_cmpxchg((v), c, c + (a));
-               if (likely(old == c))
-                       break;
-               c = old;
-       }
-       return c;
-}
-
 #define atomic_dec_return(v) atomic_sub_return(1, (v))
 #define atomic_inc_return(v) atomic_add_return(1, (v))
 
index 3fd0243bf40526dbf13cd2094879a0236df5d04f..b2b6261d05e7b9451440f371baf20da2299ddc62 100644 (file)
@@ -77,30 +77,6 @@ static __inline__ int atomic_read(const atomic_t *v)
 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
 
-/**
- * atomic_fetch_add_unless - add unless the number is a given value
- * @v: pointer of type atomic_t
- * @a: the amount to add to v...
- * @u: ...unless v is equal to u.
- *
- * Atomically adds @a to @v, so long as it was not @u.
- * Returns the old value of @v.
- */
-static __inline__ int atomic_fetch_add_unless(atomic_t *v, int a, int u)
-{
-       int c, old;
-       c = atomic_read(v);
-       for (;;) {
-               if (unlikely(c == (u)))
-                       break;
-               old = atomic_cmpxchg((v), c, c + (a));
-               if (likely(old == c))
-                       break;
-               c = old;
-       }
-       return c;
-}
-
 #define ATOMIC_OP(op, c_op)                                            \
 static __inline__ void atomic_##op(int i, atomic_t *v)                 \
 {                                                                      \
index e59620ee4f6b5c6498e92ca0a1f0467b77ead5a7..b5646c079c1694e2603e88f1ca275ff36d4c46ff 100644 (file)
@@ -248,6 +248,7 @@ static __inline__ int atomic_fetch_add_unless(atomic_t *v, int a, int u)
 
        return t;
 }
+#define atomic_fetch_add_unless atomic_fetch_add_unless
 
 /**
  * atomic_inc_not_zero - increment unless the number is zero
index 18259e90f57e8f405bd035b830f1291edf7a9afb..5f161daefcd23ba83db25cf189741d3568185d5d 100644 (file)
@@ -349,6 +349,7 @@ static __always_inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
                : "memory");
        return prev;
 }
+#define atomic_fetch_add_unless atomic_fetch_add_unless
 
 #ifndef CONFIG_GENERIC_ATOMIC64
 static __always_inline long __atomic64_add_unless(atomic64_t *v, long a, long u)
index 66dac30a4fe1e8bb41b01d30c54a15895360bfef..26c6b713a7a34db7fb02405ad9d7229cc84f8701 100644 (file)
@@ -90,21 +90,6 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
        return __atomic_cmpxchg(&v->counter, old, new);
 }
 
-static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
-{
-       int c, old;
-       c = atomic_read(v);
-       for (;;) {
-               if (unlikely(c == u))
-                       break;
-               old = atomic_cmpxchg(v, c, c + a);
-               if (likely(old == c))
-                       break;
-               c = old;
-       }
-       return c;
-}
-
 #define ATOMIC64_INIT(i)  { (i) }
 
 static inline long atomic64_read(const atomic64_t *v)
index ef45931ebac5a691b7cd36d843603ed19fcad575..422fac764ca1c91aa5c16f79269e690810c3802f 100644 (file)
 #define atomic_xchg(v, new)            (xchg(&((v)->counter), new))
 #define atomic_cmpxchg(v, o, n)                (cmpxchg(&((v)->counter), (o), (n)))
 
-/**
- * atomic_fetch_add_unless - add unless the number is a given value
- * @v: pointer of type atomic_t
- * @a: the amount to add to v...
- * @u: ...unless v is equal to u.
- *
- * Atomically adds @a to @v, so long as it was not @u.
- * Returns the old value of @v.
- */
-static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
-{
-       int c, old;
-       c = atomic_read(v);
-       for (;;) {
-               if (unlikely(c == (u)))
-                       break;
-               old = atomic_cmpxchg((v), c, c + (a));
-               if (likely(old == c))
-                       break;
-               c = old;
-       }
-
-       return c;
-}
-
 #endif /* CONFIG_CPU_J2 */
 
 #endif /* __ASM_SH_ATOMIC_H */
index a58f4b43bcc74e6edb91b6718e5c1b9993fe63f2..9d7a15acc0c519d3c3702e82931e50988cf96f05 100644 (file)
@@ -30,6 +30,8 @@ int atomic_xchg(atomic_t *, int);
 int atomic_fetch_add_unless(atomic_t *, int, int);
 void atomic_set(atomic_t *, int);
 
+#define atomic_fetch_add_unless        atomic_fetch_add_unless
+
 #define atomic_set_release(v, i)       atomic_set((v), (i))
 
 #define atomic_read(v)          READ_ONCE((v)->counter)
index 07830a316464aa7269f9850ad4fb44528908415f..e4f1c93db31ffadd9b6e72d8bfe3983b9584e19e 100644 (file)
@@ -89,21 +89,6 @@ static inline int atomic_xchg(atomic_t *v, int new)
        return xchg(&v->counter, new);
 }
 
-static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
-{
-       int c, old;
-       c = atomic_read(v);
-       for (;;) {
-               if (unlikely(c == (u)))
-                       break;
-               old = atomic_cmpxchg((v), c, c + (a));
-               if (likely(old == c))
-                       break;
-               c = old;
-       }
-       return c;
-}
-
 #define atomic64_cmpxchg(v, o, n) \
        ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
index 84ed0bd76aefa67b5a5be6e7f1cf75aa70ccb739..616327ac9d39ccd146b808434d47ed5a2a09a7ca 100644 (file)
@@ -253,27 +253,6 @@ static inline int arch_atomic_fetch_xor(int i, atomic_t *v)
        return val;
 }
 
-/**
- * arch_atomic_fetch_add_unless - add unless the number is already a given value
- * @v: pointer of type atomic_t
- * @a: the amount to add to v...
- * @u: ...unless v is equal to u.
- *
- * Atomically adds @a to @v, so long as @v was not already @u.
- * Returns the old value of @v.
- */
-static __always_inline int arch_atomic_fetch_add_unless(atomic_t *v, int a, int u)
-{
-       int c = arch_atomic_read(v);
-
-       do {
-               if (unlikely(c == u))
-                       break;
-       } while (!arch_atomic_try_cmpxchg(v, &c, c + a));
-
-       return c;
-}
-
 #ifdef CONFIG_X86_32
 # include <asm/atomic64_32.h>
 #else
index 4188e56c06c9e8a04508d7f4aeb40606ddffed0d..f4c9f82c40c64ffa01a80e8e1db0d2d1f569c4ed 100644 (file)
@@ -274,30 +274,6 @@ ATOMIC_OPS(xor)
 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
 
-/**
- * atomic_fetch_add_unless - add unless the number is a given value
- * @v: pointer of type atomic_t
- * @a: the amount to add to v...
- * @u: ...unless v is equal to u.
- *
- * Atomically adds @a to @v, so long as it was not @u.
- * Returns the old value of @v.
- */
-static __inline__ int atomic_fetch_add_unless(atomic_t *v, int a, int u)
-{
-       int c, old;
-       c = atomic_read(v);
-       for (;;) {
-               if (unlikely(c == (u)))
-                       break;
-               old = atomic_cmpxchg((v), c, c + (a));
-               if (likely(old == c))
-                       break;
-               c = old;
-       }
-       return c;
-}
-
 #endif /* __KERNEL__ */
 
 #endif /* _XTENSA_ATOMIC_H */
index 83bb88d791c43fbc4dd2a25c3d96fa9c020bd09b..1f9b2a767d3c22609fdc91648eafc0880cf0961b 100644 (file)
@@ -84,12 +84,14 @@ static __always_inline bool atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 ne
 }
 #endif
 
+#ifdef arch_atomic_fetch_add_unless
+#define atomic_fetch_add_unless atomic_fetch_add_unless
 static __always_inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
 {
        kasan_check_write(v, sizeof(*v));
        return arch_atomic_fetch_add_unless(v, a, u);
 }
-
+#endif
 
 static __always_inline bool atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
 {
index 10051ed6d08858284359ddc3d446b07ed5576401..757e458212208c3a5693b94de08f7da6a518299e 100644 (file)
@@ -221,15 +221,4 @@ static inline void atomic_dec(atomic_t *v)
 #define atomic_xchg(ptr, v)            (xchg(&(ptr)->counter, (v)))
 #define atomic_cmpxchg(v, old, new)    (cmpxchg(&((v)->counter), (old), (new)))
 
-#ifndef atomic_fetch_add_unless
-static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
-{
-       int c, old;
-       c = atomic_read(v);
-       while (c != u && (old = atomic_cmpxchg(v, c, c + a)) != c)
-               c = old;
-       return c;
-}
-#endif
-
 #endif /* __ASM_GENERIC_ATOMIC_H */
index ae3f30923d05b31e9999c31d093018b6d78ee7a7..b89ba36cab9412e61c9ea116362a65c603da2611 100644 (file)
 #endif
 #endif /* xchg_relaxed */
 
+/**
+ * atomic_fetch_add_unless - add unless the number is already a given value
+ * @v: pointer of type atomic_t
+ * @a: the amount to add to v...
+ * @u: ...unless v is equal to u.
+ *
+ * Atomically adds @a to @v, if @v was not already @u.
+ * Returns the original value of @v.
+ */
+#ifndef atomic_fetch_add_unless
+static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
+{
+       int c = atomic_read(v);
+
+       do {
+               if (unlikely(c == u))
+                       break;
+       } while (!atomic_try_cmpxchg(v, &c, c + a));
+
+       return c;
+}
+#endif
+
 /**
  * atomic_add_unless - add unless the number is already a given value
  * @v: pointer of type atomic_t