Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

ARM: barrier: allow options to be passed to memory barrier instructions

On ARMv7, the memory barrier instructions take an optional `option'
field which can be used to constrain the effects of a memory barrier
based on shareability and access type.

This patch allows the caller to pass these options if required, and
updates the smp_*() barriers to request inner-shareable barriers,
affecting only stores for the _wmb variant. wmb() is also changed to
use the -st version of dsb.

Reported-by: Albin Tonnerre <albin.tonnerre@arm.com>
Reviewed-by: Catalin Marinas <catalin.marinas@arm.com>
Signed-off-by: Will Deacon <will.deacon@arm.com>

+18 -18
+2 -2
arch/arm/include/asm/assembler.h
··· 220 220 #ifdef CONFIG_SMP 221 221 #if __LINUX_ARM_ARCH__ >= 7 222 222 .ifeqs "\mode","arm" 223 - ALT_SMP(dmb) 223 + ALT_SMP(dmb ish) 224 224 .else 225 - ALT_SMP(W(dmb)) 225 + ALT_SMP(W(dmb) ish) 226 226 .endif 227 227 #elif __LINUX_ARM_ARCH__ == 6 228 228 ALT_SMP(mcr p15, 0, r0, c7, c10, 5) @ dmb
+16 -16
arch/arm/include/asm/barrier.h
··· 14 14 #endif 15 15 16 16 #if __LINUX_ARM_ARCH__ >= 7 17 - #define isb() __asm__ __volatile__ ("isb" : : : "memory") 18 - #define dsb() __asm__ __volatile__ ("dsb" : : : "memory") 19 - #define dmb() __asm__ __volatile__ ("dmb" : : : "memory") 17 + #define isb(option) __asm__ __volatile__ ("isb " #option : : : "memory") 18 + #define dsb(option) __asm__ __volatile__ ("dsb " #option : : : "memory") 19 + #define dmb(option) __asm__ __volatile__ ("dmb " #option : : : "memory") 20 20 #elif defined(CONFIG_CPU_XSC3) || __LINUX_ARM_ARCH__ == 6 21 - #define isb() __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \ 21 + #define isb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \ 22 22 : : "r" (0) : "memory") 23 - #define dsb() __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \ 23 + #define dsb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \ 24 24 : : "r" (0) : "memory") 25 - #define dmb() __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 5" \ 25 + #define dmb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 5" \ 26 26 : : "r" (0) : "memory") 27 27 #elif defined(CONFIG_CPU_FA526) 28 - #define isb() __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \ 28 + #define isb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \ 29 29 : : "r" (0) : "memory") 30 - #define dsb() __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \ 30 + #define dsb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \ 31 31 : : "r" (0) : "memory") 32 - #define dmb() __asm__ __volatile__ ("" : : : "memory") 32 + #define dmb(x) __asm__ __volatile__ ("" : : : "memory") 33 33 #else 34 - #define isb() __asm__ __volatile__ ("" : : : "memory") 35 - #define dsb() __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \ 34 + #define isb(x) __asm__ __volatile__ ("" : : : "memory") 35 + #define dsb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \ 36 36 : : "r" (0) : "memory") 37 - #define dmb() __asm__ __volatile__ ("" : : : "memory") 37 + #define dmb(x) __asm__ __volatile__ ("" : : : "memory") 38 38 #endif 39 39 40 40 #ifdef CONFIG_ARCH_HAS_BARRIERS ··· 42 42 #elif defined(CONFIG_ARM_DMA_MEM_BUFFERABLE) || defined(CONFIG_SMP) 43 43 #define mb() do { dsb(); outer_sync(); } while (0) 44 44 #define rmb() dsb() 45 - #define wmb() mb() 45 + #define wmb() do { dsb(st); outer_sync(); } while (0) 46 46 #else 47 47 #define mb() barrier() 48 48 #define rmb() barrier() ··· 54 54 #define smp_rmb() barrier() 55 55 #define smp_wmb() barrier() 56 56 #else 57 - #define smp_mb() dmb() 58 - #define smp_rmb() dmb() 59 - #define smp_wmb() dmb() 57 + #define smp_mb() dmb(ish) 58 + #define smp_rmb() smp_mb() 59 + #define smp_wmb() dmb(ishst) 60 60 #endif 61 61 62 62 #define read_barrier_depends() do { } while(0)