Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

selftests/rseq: Fix arm64 buggy load-acquire/store-release macros

The arm64 load-acquire/store-release macros from the Linux kernel rseq
selftests are buggy. Remplace them by a working implementation.

Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
Cc: Catalin Marinas <catalin.marinas@arm.com>
Cc: Will Deacon <will@kernel.org>
Cc: Peter Zijlstra <peterz@infradead.org>
Signed-off-by: Shuah Khan <skhan@linuxfoundation.org>

authored by

Mathieu Desnoyers and committed by
Shuah Khan
0fbbf07c d6aaa23a

+30 -28
+30 -28
tools/testing/selftests/rseq/rseq-arm64.h
··· 27 27 28 28 #define rseq_smp_load_acquire(p) \ 29 29 __extension__ ({ \ 30 - __typeof(*p) ____p1; \ 31 - switch (sizeof(*p)) { \ 30 + union { rseq_unqual_scalar_typeof(*(p)) __val; char __c[sizeof(*(p))]; } __u; \ 31 + switch (sizeof(*(p))) { \ 32 32 case 1: \ 33 - asm volatile ("ldarb %w0, %1" \ 34 - : "=r" (*(__u8 *)p) \ 35 - : "Q" (*p) : "memory"); \ 33 + __asm__ __volatile__ ("ldarb %w0, %1" \ 34 + : "=r" (*(__u8 *)__u.__c) \ 35 + : "Q" (*(p)) : "memory"); \ 36 36 break; \ 37 37 case 2: \ 38 - asm volatile ("ldarh %w0, %1" \ 39 - : "=r" (*(__u16 *)p) \ 40 - : "Q" (*p) : "memory"); \ 38 + __asm__ __volatile__ ("ldarh %w0, %1" \ 39 + : "=r" (*(__u16 *)__u.__c) \ 40 + : "Q" (*(p)) : "memory"); \ 41 41 break; \ 42 42 case 4: \ 43 - asm volatile ("ldar %w0, %1" \ 44 - : "=r" (*(__u32 *)p) \ 45 - : "Q" (*p) : "memory"); \ 43 + __asm__ __volatile__ ("ldar %w0, %1" \ 44 + : "=r" (*(__u32 *)__u.__c) \ 45 + : "Q" (*(p)) : "memory"); \ 46 46 break; \ 47 47 case 8: \ 48 - asm volatile ("ldar %0, %1" \ 49 - : "=r" (*(__u64 *)p) \ 50 - : "Q" (*p) : "memory"); \ 48 + __asm__ __volatile__ ("ldar %0, %1" \ 49 + : "=r" (*(__u64 *)__u.__c) \ 50 + : "Q" (*(p)) : "memory"); \ 51 51 break; \ 52 52 } \ 53 - ____p1; \ 53 + (rseq_unqual_scalar_typeof(*(p)))__u.__val; \ 54 54 }) 55 55 56 56 #define rseq_smp_acquire__after_ctrl_dep() rseq_smp_rmb() 57 57 58 58 #define rseq_smp_store_release(p, v) \ 59 59 do { \ 60 - switch (sizeof(*p)) { \ 60 + union { rseq_unqual_scalar_typeof(*(p)) __val; char __c[sizeof(*(p))]; } __u = \ 61 + { .__val = (rseq_unqual_scalar_typeof(*(p))) (v) }; \ 62 + switch (sizeof(*(p))) { \ 61 63 case 1: \ 62 - asm volatile ("stlrb %w1, %0" \ 63 - : "=Q" (*p) \ 64 - : "r" ((__u8)v) \ 64 + __asm__ __volatile__ ("stlrb %w1, %0" \ 65 + : "=Q" (*(p)) \ 66 + : "r" (*(__u8 *)__u.__c) \ 65 67 : "memory"); \ 66 68 break; \ 67 69 case 2: \ 68 - asm volatile ("stlrh %w1, %0" \ 69 - : "=Q" (*p) \ 70 - : "r" ((__u16)v) \ 70 + __asm__ __volatile__ ("stlrh %w1, %0" \ 71 + : "=Q" (*(p)) \ 72 + : "r" (*(__u16 *)__u.__c) \ 71 73 : "memory"); \ 72 74 break; \ 73 75 case 4: \ 74 - asm volatile ("stlr %w1, %0" \ 75 - : "=Q" (*p) \ 76 - : "r" ((__u32)v) \ 76 + __asm__ __volatile__ ("stlr %w1, %0" \ 77 + : "=Q" (*(p)) \ 78 + : "r" (*(__u32 *)__u.__c) \ 77 79 : "memory"); \ 78 80 break; \ 79 81 case 8: \ 80 - asm volatile ("stlr %1, %0" \ 81 - : "=Q" (*p) \ 82 - : "r" ((__u64)v) \ 82 + __asm__ __volatile__ ("stlr %1, %0" \ 83 + : "=Q" (*(p)) \ 84 + : "r" (*(__u64 *)__u.__c) \ 83 85 : "memory"); \ 84 86 break; \ 85 87 } \