Commit 10223c52 authored by Will Deacon's avatar Will Deacon

arm64: barrier: Use '__unqual_scalar_typeof' for acquire/release macros

Passing volatile-qualified pointers to the arm64 implementations of the
load-acquire/store-release macros results in a re-load from the stack
and a bunch of associated stack-protector churn due to the temporary
result variable inheriting the volatile semantics thanks to the use of
'typeof()'.

Define these temporary variables using 'unqual_scalar_typeof' to drop
the volatile qualifier in the case that they are scalar types.

Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Arnd Bergmann <arnd@arndb.de>
Acked-by: default avatarMark Rutland <mark.rutland@arm.com>
Signed-off-by: default avatarWill Deacon <will@kernel.org>
parent 54988727
...@@ -76,8 +76,8 @@ static inline unsigned long array_index_mask_nospec(unsigned long idx, ...@@ -76,8 +76,8 @@ static inline unsigned long array_index_mask_nospec(unsigned long idx,
#define __smp_store_release(p, v) \ #define __smp_store_release(p, v) \
do { \ do { \
typeof(p) __p = (p); \ typeof(p) __p = (p); \
union { typeof(*p) __val; char __c[1]; } __u = \ union { __unqual_scalar_typeof(*p) __val; char __c[1]; } __u = \
{ .__val = (__force typeof(*p)) (v) }; \ { .__val = (__force __unqual_scalar_typeof(*p)) (v) }; \
compiletime_assert_atomic_type(*p); \ compiletime_assert_atomic_type(*p); \
kasan_check_write(__p, sizeof(*p)); \ kasan_check_write(__p, sizeof(*p)); \
switch (sizeof(*p)) { \ switch (sizeof(*p)) { \
...@@ -110,7 +110,7 @@ do { \ ...@@ -110,7 +110,7 @@ do { \
#define __smp_load_acquire(p) \ #define __smp_load_acquire(p) \
({ \ ({ \
union { typeof(*p) __val; char __c[1]; } __u; \ union { __unqual_scalar_typeof(*p) __val; char __c[1]; } __u; \
typeof(p) __p = (p); \ typeof(p) __p = (p); \
compiletime_assert_atomic_type(*p); \ compiletime_assert_atomic_type(*p); \
kasan_check_read(__p, sizeof(*p)); \ kasan_check_read(__p, sizeof(*p)); \
...@@ -136,33 +136,33 @@ do { \ ...@@ -136,33 +136,33 @@ do { \
: "Q" (*__p) : "memory"); \ : "Q" (*__p) : "memory"); \
break; \ break; \
} \ } \
__u.__val; \ (typeof(*p))__u.__val; \
}) })
#define smp_cond_load_relaxed(ptr, cond_expr) \ #define smp_cond_load_relaxed(ptr, cond_expr) \
({ \ ({ \
typeof(ptr) __PTR = (ptr); \ typeof(ptr) __PTR = (ptr); \
typeof(*ptr) VAL; \ __unqual_scalar_typeof(*ptr) VAL; \
for (;;) { \ for (;;) { \
VAL = READ_ONCE(*__PTR); \ VAL = READ_ONCE(*__PTR); \
if (cond_expr) \ if (cond_expr) \
break; \ break; \
__cmpwait_relaxed(__PTR, VAL); \ __cmpwait_relaxed(__PTR, VAL); \
} \ } \
VAL; \ (typeof(*ptr))VAL; \
}) })
#define smp_cond_load_acquire(ptr, cond_expr) \ #define smp_cond_load_acquire(ptr, cond_expr) \
({ \ ({ \
typeof(ptr) __PTR = (ptr); \ typeof(ptr) __PTR = (ptr); \
typeof(*ptr) VAL; \ __unqual_scalar_typeof(*ptr) VAL; \
for (;;) { \ for (;;) { \
VAL = smp_load_acquire(__PTR); \ VAL = smp_load_acquire(__PTR); \
if (cond_expr) \ if (cond_expr) \
break; \ break; \
__cmpwait_relaxed(__PTR, VAL); \ __cmpwait_relaxed(__PTR, VAL); \
} \ } \
VAL; \ (typeof(*ptr))VAL; \
}) })
#include <asm-generic/barrier.h> #include <asm-generic/barrier.h>
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment