Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux
1
fork

Configure Feed

Select the types of activity you want to include in your feed.

at v6.19-rc3 69 lines 1.9 kB view raw
1/* SPDX-License-Identifier: GPL-2.0 */ 2/* 3 * Copyright (C) 2020 Google LLC. 4 */ 5#ifndef __ASM_RWONCE_H 6#define __ASM_RWONCE_H 7 8#if defined(CONFIG_LTO) && !defined(__ASSEMBLER__) 9 10#include <linux/compiler_types.h> 11#include <asm/alternative-macros.h> 12 13#ifndef BUILD_VDSO 14 15#define __LOAD_RCPC(sfx, regs...) \ 16 ALTERNATIVE( \ 17 "ldar" #sfx "\t" #regs, \ 18 ".arch_extension rcpc\n" \ 19 "ldapr" #sfx "\t" #regs, \ 20 ARM64_HAS_LDAPR) 21 22/* 23 * When building with LTO, there is an increased risk of the compiler 24 * converting an address dependency headed by a READ_ONCE() invocation 25 * into a control dependency and consequently allowing for harmful 26 * reordering by the CPU. 27 * 28 * Ensure that such transformations are harmless by overriding the generic 29 * READ_ONCE() definition with one that provides RCpc acquire semantics 30 * when building with LTO. 31 */ 32#define __READ_ONCE(x) \ 33({ \ 34 typeof(&(x)) __x = &(x); \ 35 int atomic = 1; \ 36 union { __unqual_scalar_typeof(*__x) __val; char __c[1]; } __u; \ 37 switch (sizeof(x)) { \ 38 case 1: \ 39 asm volatile(__LOAD_RCPC(b, %w0, %1) \ 40 : "=r" (*(__u8 *)__u.__c) \ 41 : "Q" (*__x) : "memory"); \ 42 break; \ 43 case 2: \ 44 asm volatile(__LOAD_RCPC(h, %w0, %1) \ 45 : "=r" (*(__u16 *)__u.__c) \ 46 : "Q" (*__x) : "memory"); \ 47 break; \ 48 case 4: \ 49 asm volatile(__LOAD_RCPC(, %w0, %1) \ 50 : "=r" (*(__u32 *)__u.__c) \ 51 : "Q" (*__x) : "memory"); \ 52 break; \ 53 case 8: \ 54 asm volatile(__LOAD_RCPC(, %0, %1) \ 55 : "=r" (*(__u64 *)__u.__c) \ 56 : "Q" (*__x) : "memory"); \ 57 break; \ 58 default: \ 59 atomic = 0; \ 60 } \ 61 atomic ? (typeof(*__x))__u.__val : (*(volatile typeof(__x))__x);\ 62}) 63 64#endif /* !BUILD_VDSO */ 65#endif /* CONFIG_LTO && !__ASSEMBLER__ */ 66 67#include <asm-generic/rwonce.h> 68 69#endif /* __ASM_RWONCE_H */