Commit 0fbbf07c authored by Mathieu Desnoyers's avatar Mathieu Desnoyers Committed by Shuah Khan

selftests/rseq: Fix arm64 buggy load-acquire/store-release macros

The arm64 load-acquire/store-release macros from the Linux kernel rseq
selftests are buggy. Remplace them by a working implementation.
Signed-off-by: default avatarMathieu Desnoyers <mathieu.desnoyers@efficios.com>
Cc: Catalin Marinas <catalin.marinas@arm.com>
Cc: Will Deacon <will@kernel.org>
Cc: Peter Zijlstra <peterz@infradead.org>
Signed-off-by: default avatarShuah Khan <skhan@linuxfoundation.org>
parent d6aaa23a
...@@ -27,59 +27,61 @@ ...@@ -27,59 +27,61 @@
#define rseq_smp_load_acquire(p) \ #define rseq_smp_load_acquire(p) \
__extension__ ({ \ __extension__ ({ \
__typeof(*p) ____p1; \ union { rseq_unqual_scalar_typeof(*(p)) __val; char __c[sizeof(*(p))]; } __u; \
switch (sizeof(*p)) { \ switch (sizeof(*(p))) { \
case 1: \ case 1: \
asm volatile ("ldarb %w0, %1" \ __asm__ __volatile__ ("ldarb %w0, %1" \
: "=r" (*(__u8 *)p) \ : "=r" (*(__u8 *)__u.__c) \
: "Q" (*p) : "memory"); \ : "Q" (*(p)) : "memory"); \
break; \ break; \
case 2: \ case 2: \
asm volatile ("ldarh %w0, %1" \ __asm__ __volatile__ ("ldarh %w0, %1" \
: "=r" (*(__u16 *)p) \ : "=r" (*(__u16 *)__u.__c) \
: "Q" (*p) : "memory"); \ : "Q" (*(p)) : "memory"); \
break; \ break; \
case 4: \ case 4: \
asm volatile ("ldar %w0, %1" \ __asm__ __volatile__ ("ldar %w0, %1" \
: "=r" (*(__u32 *)p) \ : "=r" (*(__u32 *)__u.__c) \
: "Q" (*p) : "memory"); \ : "Q" (*(p)) : "memory"); \
break; \ break; \
case 8: \ case 8: \
asm volatile ("ldar %0, %1" \ __asm__ __volatile__ ("ldar %0, %1" \
: "=r" (*(__u64 *)p) \ : "=r" (*(__u64 *)__u.__c) \
: "Q" (*p) : "memory"); \ : "Q" (*(p)) : "memory"); \
break; \ break; \
} \ } \
____p1; \ (rseq_unqual_scalar_typeof(*(p)))__u.__val; \
}) })
#define rseq_smp_acquire__after_ctrl_dep() rseq_smp_rmb() #define rseq_smp_acquire__after_ctrl_dep() rseq_smp_rmb()
#define rseq_smp_store_release(p, v) \ #define rseq_smp_store_release(p, v) \
do { \ do { \
switch (sizeof(*p)) { \ union { rseq_unqual_scalar_typeof(*(p)) __val; char __c[sizeof(*(p))]; } __u = \
{ .__val = (rseq_unqual_scalar_typeof(*(p))) (v) }; \
switch (sizeof(*(p))) { \
case 1: \ case 1: \
asm volatile ("stlrb %w1, %0" \ __asm__ __volatile__ ("stlrb %w1, %0" \
: "=Q" (*p) \ : "=Q" (*(p)) \
: "r" ((__u8)v) \ : "r" (*(__u8 *)__u.__c) \
: "memory"); \ : "memory"); \
break; \ break; \
case 2: \ case 2: \
asm volatile ("stlrh %w1, %0" \ __asm__ __volatile__ ("stlrh %w1, %0" \
: "=Q" (*p) \ : "=Q" (*(p)) \
: "r" ((__u16)v) \ : "r" (*(__u16 *)__u.__c) \
: "memory"); \ : "memory"); \
break; \ break; \
case 4: \ case 4: \
asm volatile ("stlr %w1, %0" \ __asm__ __volatile__ ("stlr %w1, %0" \
: "=Q" (*p) \ : "=Q" (*(p)) \
: "r" ((__u32)v) \ : "r" (*(__u32 *)__u.__c) \
: "memory"); \ : "memory"); \
break; \ break; \
case 8: \ case 8: \
asm volatile ("stlr %1, %0" \ __asm__ __volatile__ ("stlr %1, %0" \
: "=Q" (*p) \ : "=Q" (*(p)) \
: "r" ((__u64)v) \ : "r" (*(__u64 *)__u.__c) \
: "memory"); \ : "memory"); \
break; \ break; \
} \ } \
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment