locking/barriers: Use '__unqual_scalar_typeof' for load-acquire macros
authorWill Deacon <will@kernel.org>
Thu, 19 Dec 2019 16:22:56 +0000 (16:22 +0000)
committerWill Deacon <will@kernel.org>
Thu, 16 Apr 2020 11:28:34 +0000 (12:28 +0100)
Passing volatile-qualified pointers to the asm-generic implementations of
the load-acquire macros results in a re-load from the stack due to the
temporary result variable inheriting the volatile semantics thanks to the
use of 'typeof()'.

Define these temporary variables using 'unqual_scalar_typeof' to drop
the volatile qualifier in the case that they are scalar types.

Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Arnd Bergmann <arnd@arndb.de>
Signed-off-by: Will Deacon <will@kernel.org>
include/asm-generic/barrier.h

index 85b28eb80b11fc4c72d2b88c9fd79e71f42cbdbf..2eacaf7d62f6123210f0412045e7feab2134cf48 100644 (file)
@@ -128,10 +128,10 @@ do {                                                                      \
 #ifndef __smp_load_acquire
 #define __smp_load_acquire(p)                                          \
 ({                                                                     \
-       typeof(*p) ___p1 = READ_ONCE(*p);                               \
+       __unqual_scalar_typeof(*p) ___p1 = READ_ONCE(*p);               \
        compiletime_assert_atomic_type(*p);                             \
        __smp_mb();                                                     \
-       ___p1;                                                          \
+       (typeof(*p))___p1;                                              \
 })
 #endif
 
@@ -183,10 +183,10 @@ do {                                                                      \
 #ifndef smp_load_acquire
 #define smp_load_acquire(p)                                            \
 ({                                                                     \
-       typeof(*p) ___p1 = READ_ONCE(*p);                               \
+       __unqual_scalar_typeof(*p) ___p1 = READ_ONCE(*p);               \
        compiletime_assert_atomic_type(*p);                             \
        barrier();                                                      \
-       ___p1;                                                          \
+       (typeof(*p))___p1;                                              \
 })
 #endif
 
@@ -229,14 +229,14 @@ do {                                                                      \
 #ifndef smp_cond_load_relaxed
 #define smp_cond_load_relaxed(ptr, cond_expr) ({               \
        typeof(ptr) __PTR = (ptr);                              \
-       typeof(*ptr) VAL;                                       \
+       __unqual_scalar_typeof(*ptr) VAL;                       \
        for (;;) {                                              \
                VAL = READ_ONCE(*__PTR);                        \
                if (cond_expr)                                  \
                        break;                                  \
                cpu_relax();                                    \
        }                                                       \
-       VAL;                                                    \
+       (typeof(*ptr))VAL;                                      \
 })
 #endif
 
@@ -250,10 +250,10 @@ do {                                                                      \
  */
 #ifndef smp_cond_load_acquire
 #define smp_cond_load_acquire(ptr, cond_expr) ({               \
-       typeof(*ptr) _val;                                      \
+       __unqual_scalar_typeof(*ptr) _val;                      \
        _val = smp_cond_load_relaxed(ptr, cond_expr);           \
        smp_acquire__after_ctrl_dep();                          \
-       _val;                                                   \
+       (typeof(*ptr))_val;                                     \
 })
 #endif