at v5.10 5.8 kB view raw
1/* SPDX-License-Identifier: GPL-2.0 */ 2#ifndef _TOOLS_LINUX_COMPILER_H_ 3#define _TOOLS_LINUX_COMPILER_H_ 4 5#ifdef __GNUC__ 6#include <linux/compiler-gcc.h> 7#endif 8 9#ifndef __compiletime_error 10# define __compiletime_error(message) 11#endif 12 13#ifdef __OPTIMIZE__ 14# define __compiletime_assert(condition, msg, prefix, suffix) \ 15 do { \ 16 extern void prefix ## suffix(void) __compiletime_error(msg); \ 17 if (!(condition)) \ 18 prefix ## suffix(); \ 19 } while (0) 20#else 21# define __compiletime_assert(condition, msg, prefix, suffix) do { } while (0) 22#endif 23 24#define _compiletime_assert(condition, msg, prefix, suffix) \ 25 __compiletime_assert(condition, msg, prefix, suffix) 26 27/** 28 * compiletime_assert - break build and emit msg if condition is false 29 * @condition: a compile-time constant condition to check 30 * @msg: a message to emit if condition is false 31 * 32 * In tradition of POSIX assert, this macro will break the build if the 33 * supplied condition is *false*, emitting the supplied error message if the 34 * compiler has support to do so. 35 */ 36#define compiletime_assert(condition, msg) \ 37 _compiletime_assert(condition, msg, __compiletime_assert_, __COUNTER__) 38 39/* Optimization barrier */ 40/* The "volatile" is due to gcc bugs */ 41#define barrier() __asm__ __volatile__("": : :"memory") 42 43#ifndef __always_inline 44# define __always_inline inline __attribute__((always_inline)) 45#endif 46 47#ifndef noinline 48#define noinline 49#endif 50 51/* Are two types/vars the same type (ignoring qualifiers)? */ 52#ifndef __same_type 53# define __same_type(a, b) __builtin_types_compatible_p(typeof(a), typeof(b)) 54#endif 55 56#ifdef __ANDROID__ 57/* 58 * FIXME: Big hammer to get rid of tons of: 59 * "warning: always_inline function might not be inlinable" 60 * 61 * At least on android-ndk-r12/platforms/android-24/arch-arm 62 */ 63#undef __always_inline 64#define __always_inline inline 65#endif 66 67#define __user 68#define __rcu 69#define __read_mostly 70 71#ifndef __attribute_const__ 72# define __attribute_const__ 73#endif 74 75#ifndef __maybe_unused 76# define __maybe_unused __attribute__((unused)) 77#endif 78 79#ifndef __used 80# define __used __attribute__((__unused__)) 81#endif 82 83#ifndef __packed 84# define __packed __attribute__((__packed__)) 85#endif 86 87#ifndef __force 88# define __force 89#endif 90 91#ifndef __weak 92# define __weak __attribute__((weak)) 93#endif 94 95#ifndef likely 96# define likely(x) __builtin_expect(!!(x), 1) 97#endif 98 99#ifndef unlikely 100# define unlikely(x) __builtin_expect(!!(x), 0) 101#endif 102 103#ifndef __init 104# define __init 105#endif 106 107#ifndef noinline 108# define noinline 109#endif 110 111#include <linux/types.h> 112 113/* 114 * Following functions are taken from kernel sources and 115 * break aliasing rules in their original form. 116 * 117 * While kernel is compiled with -fno-strict-aliasing, 118 * perf uses -Wstrict-aliasing=3 which makes build fail 119 * under gcc 4.4. 120 * 121 * Using extra __may_alias__ type to allow aliasing 122 * in this case. 123 */ 124typedef __u8 __attribute__((__may_alias__)) __u8_alias_t; 125typedef __u16 __attribute__((__may_alias__)) __u16_alias_t; 126typedef __u32 __attribute__((__may_alias__)) __u32_alias_t; 127typedef __u64 __attribute__((__may_alias__)) __u64_alias_t; 128 129static __always_inline void __read_once_size(const volatile void *p, void *res, int size) 130{ 131 switch (size) { 132 case 1: *(__u8_alias_t *) res = *(volatile __u8_alias_t *) p; break; 133 case 2: *(__u16_alias_t *) res = *(volatile __u16_alias_t *) p; break; 134 case 4: *(__u32_alias_t *) res = *(volatile __u32_alias_t *) p; break; 135 case 8: *(__u64_alias_t *) res = *(volatile __u64_alias_t *) p; break; 136 default: 137 barrier(); 138 __builtin_memcpy((void *)res, (const void *)p, size); 139 barrier(); 140 } 141} 142 143static __always_inline void __write_once_size(volatile void *p, void *res, int size) 144{ 145 switch (size) { 146 case 1: *(volatile __u8_alias_t *) p = *(__u8_alias_t *) res; break; 147 case 2: *(volatile __u16_alias_t *) p = *(__u16_alias_t *) res; break; 148 case 4: *(volatile __u32_alias_t *) p = *(__u32_alias_t *) res; break; 149 case 8: *(volatile __u64_alias_t *) p = *(__u64_alias_t *) res; break; 150 default: 151 barrier(); 152 __builtin_memcpy((void *)p, (const void *)res, size); 153 barrier(); 154 } 155} 156 157/* 158 * Prevent the compiler from merging or refetching reads or writes. The 159 * compiler is also forbidden from reordering successive instances of 160 * READ_ONCE and WRITE_ONCE, but only when the compiler is aware of some 161 * particular ordering. One way to make the compiler aware of ordering is to 162 * put the two invocations of READ_ONCE or WRITE_ONCE in different C 163 * statements. 164 * 165 * These two macros will also work on aggregate data types like structs or 166 * unions. If the size of the accessed data type exceeds the word size of 167 * the machine (e.g., 32 bits or 64 bits) READ_ONCE() and WRITE_ONCE() will 168 * fall back to memcpy and print a compile-time warning. 169 * 170 * Their two major use cases are: (1) Mediating communication between 171 * process-level code and irq/NMI handlers, all running on the same CPU, 172 * and (2) Ensuring that the compiler does not fold, spindle, or otherwise 173 * mutilate accesses that either do not require ordering or that interact 174 * with an explicit memory barrier or atomic instruction that provides the 175 * required ordering. 176 */ 177 178#define READ_ONCE(x) \ 179({ \ 180 union { typeof(x) __val; char __c[1]; } __u = \ 181 { .__c = { 0 } }; \ 182 __read_once_size(&(x), __u.__c, sizeof(x)); \ 183 __u.__val; \ 184}) 185 186#define WRITE_ONCE(x, val) \ 187({ \ 188 union { typeof(x) __val; char __c[1]; } __u = \ 189 { .__val = (val) }; \ 190 __write_once_size(&(x), __u.__c, sizeof(x)); \ 191 __u.__val; \ 192}) 193 194 195#ifndef __fallthrough 196# define __fallthrough 197#endif 198 199/* Indirect macros required for expanded argument pasting, eg. __LINE__. */ 200#define ___PASTE(a, b) a##b 201#define __PASTE(a, b) ___PASTE(a, b) 202 203#endif /* _TOOLS_LINUX_COMPILER_H */