Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

tools/virtio: Fix arm64 ringtest compilation error

Add cpu_relax() for arm64 instead of directly assert(), and add assert.h
header file. Also, add smp_wmb and smp_mb for arm64.

Compilation error as follows, avoid __always_inline undefined.

$ make
cc -Wall -pthread -O2 -ggdb -flto -fwhole-program -c -o ring.o ring.c
In file included from ring.c:10:
main.h: In function ‘busy_wait’:
main.h:99:21: warning: implicit declaration of function ‘assert’
[-Wimplicit-function-declaration]
99 | #define cpu_relax() assert(0)
| ^~~~~~
main.h:107:17: note: in expansion of macro ‘cpu_relax’
107 | cpu_relax();
| ^~~~~~~~~
main.h:12:1: note: ‘assert’ is defined in header ‘<assert.h>’; did you
forget to ‘#include <assert.h>’?
11 | #include <stdbool.h>
+++ |+#include <assert.h>
12 |
main.h: At top level:
main.h:143:23: error: expected ‘;’ before ‘void’
143 | static __always_inline
| ^
| ;
144 | void __read_once_size(const volatile void *p, void *res, int
size)
| ~~~~
main.h:158:23: error: expected ‘;’ before ‘void’
158 | static __always_inline void __write_once_size(volatile void *p,
void *res, int size)
| ^~~~~
| ;
make: *** [<builtin>: ring.o] Error 1

Signed-off-by: Rong Tao <rongtao@cestc.cn>
Message-Id: <tencent_F53E159DD7925174445D830DA19FACF44B07@qq.com>
Signed-off-by: Michael S. Tsirkin <mst@redhat.com>

authored by

Rong Tao and committed by
Michael S. Tsirkin
57380fd1 a90e8608

+11
+11
tools/virtio/ringtest/main.h
··· 8 8 #ifndef MAIN_H 9 9 #define MAIN_H 10 10 11 + #include <assert.h> 11 12 #include <stdbool.h> 12 13 13 14 extern int param; ··· 96 95 #define cpu_relax() asm ("rep; nop" ::: "memory") 97 96 #elif defined(__s390x__) 98 97 #define cpu_relax() barrier() 98 + #elif defined(__aarch64__) 99 + #define cpu_relax() asm ("yield" ::: "memory") 99 100 #else 100 101 #define cpu_relax() assert(0) 101 102 #endif ··· 115 112 116 113 #if defined(__x86_64__) || defined(__i386__) 117 114 #define smp_mb() asm volatile("lock; addl $0,-132(%%rsp)" ::: "memory", "cc") 115 + #elif defined(__aarch64__) 116 + #define smp_mb() asm volatile("dmb ish" ::: "memory") 118 117 #else 119 118 /* 120 119 * Not using __ATOMIC_SEQ_CST since gcc docs say they are only synchronized ··· 141 136 142 137 #if defined(__i386__) || defined(__x86_64__) || defined(__s390x__) 143 138 #define smp_wmb() barrier() 139 + #elif defined(__aarch64__) 140 + #define smp_wmb() asm volatile("dmb ishst" ::: "memory") 144 141 #else 145 142 #define smp_wmb() smp_release() 143 + #endif 144 + 145 + #ifndef __always_inline 146 + #define __always_inline inline __attribute__((always_inline)) 146 147 #endif 147 148 148 149 static __always_inline