Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

mips: define __smp_xxx

This defines __smp_xxx barriers for mips,
for use by virtualization.

smp_xxx barriers are removed as they are
defined correctly by asm-generic/barriers.h

Note: the only exception is smp_mb__before_llsc which is mips-specific.
We define both the __smp_mb__before_llsc variant (for use in
asm/barriers.h) and smp_mb__before_llsc (for use elsewhere on this
architecture).

Signed-off-by: Michael S. Tsirkin <mst@redhat.com>
Acked-by: Arnd Bergmann <arnd@arndb.de>
Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org>

+14 -12
+14 -12
arch/mips/include/asm/barrier.h
··· 85 85 #define wmb() fast_wmb() 86 86 #define rmb() fast_rmb() 87 87 88 - #if defined(CONFIG_WEAK_ORDERING) && defined(CONFIG_SMP) 88 + #if defined(CONFIG_WEAK_ORDERING) 89 89 # ifdef CONFIG_CPU_CAVIUM_OCTEON 90 - # define smp_mb() __sync() 91 - # define smp_rmb() barrier() 92 - # define smp_wmb() __syncw() 90 + # define __smp_mb() __sync() 91 + # define __smp_rmb() barrier() 92 + # define __smp_wmb() __syncw() 93 93 # else 94 - # define smp_mb() __asm__ __volatile__("sync" : : :"memory") 95 - # define smp_rmb() __asm__ __volatile__("sync" : : :"memory") 96 - # define smp_wmb() __asm__ __volatile__("sync" : : :"memory") 94 + # define __smp_mb() __asm__ __volatile__("sync" : : :"memory") 95 + # define __smp_rmb() __asm__ __volatile__("sync" : : :"memory") 96 + # define __smp_wmb() __asm__ __volatile__("sync" : : :"memory") 97 97 # endif 98 98 #else 99 - #define smp_mb() barrier() 100 - #define smp_rmb() barrier() 101 - #define smp_wmb() barrier() 99 + #define __smp_mb() barrier() 100 + #define __smp_rmb() barrier() 101 + #define __smp_wmb() barrier() 102 102 #endif 103 103 104 104 #if defined(CONFIG_WEAK_REORDERING_BEYOND_LLSC) && defined(CONFIG_SMP) ··· 111 111 112 112 #ifdef CONFIG_CPU_CAVIUM_OCTEON 113 113 #define smp_mb__before_llsc() smp_wmb() 114 + #define __smp_mb__before_llsc() __smp_wmb() 114 115 /* Cause previous writes to become visible on all CPUs as soon as possible */ 115 116 #define nudge_writes() __asm__ __volatile__(".set push\n\t" \ 116 117 ".set arch=octeon\n\t" \ ··· 119 118 ".set pop" : : : "memory") 120 119 #else 121 120 #define smp_mb__before_llsc() smp_llsc_mb() 121 + #define __smp_mb__before_llsc() smp_llsc_mb() 122 122 #define nudge_writes() mb() 123 123 #endif 124 124 125 - #define smp_mb__before_atomic() smp_mb__before_llsc() 126 - #define smp_mb__after_atomic() smp_llsc_mb() 125 + #define __smp_mb__before_atomic() __smp_mb__before_llsc() 126 + #define __smp_mb__after_atomic() smp_llsc_mb() 127 127 128 128 #include <asm-generic/barrier.h> 129 129