Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

s390/cmpxchg,percpu: implement cmpxchg_double()

Signed-off-by: Heiko Carstens <heiko.carstens@de.ibm.com>
Signed-off-by: Martin Schwidefsky <schwidefsky@de.ibm.com>

authored by

Heiko Carstens and committed by
Martin Schwidefsky
b1d6b40c ba6f5c2a

+83 -1
+1
arch/s390/Kconfig
··· 96 96 select HAVE_MEMBLOCK 97 97 select HAVE_MEMBLOCK_NODE_MAP 98 98 select HAVE_CMPXCHG_LOCAL 99 + select HAVE_CMPXCHG_DOUBLE 99 100 select ARCH_DISCARD_MEMBLOCK 100 101 select BUILDTIME_EXTABLE_SORT 101 102 select ARCH_INLINE_SPIN_TRYLOCK
+61
arch/s390/include/asm/cmpxchg.h
··· 7 7 #ifndef __ASM_CMPXCHG_H 8 8 #define __ASM_CMPXCHG_H 9 9 10 + #include <linux/mmdebug.h> 10 11 #include <linux/types.h> 12 + #include <linux/bug.h> 11 13 12 14 extern void __xchg_called_with_bad_pointer(void); 13 15 ··· 204 202 __ret; \ 205 203 }) 206 204 #endif /* CONFIG_64BIT */ 205 + 206 + #define __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, insn) \ 207 + ({ \ 208 + register __typeof__(*(p1)) __old1 asm("2") = (o1); \ 209 + register __typeof__(*(p2)) __old2 asm("3") = (o2); \ 210 + register __typeof__(*(p1)) __new1 asm("4") = (n1); \ 211 + register __typeof__(*(p2)) __new2 asm("5") = (n2); \ 212 + int cc; \ 213 + asm volatile( \ 214 + insn " %[old],%[new],%[ptr]\n" \ 215 + " ipm %[cc]\n" \ 216 + " srl %[cc],28" \ 217 + : [cc] "=d" (cc), [old] "+d" (__old1), "+d" (__old2) \ 218 + : [new] "d" (__new1), "d" (__new2), \ 219 + [ptr] "Q" (*(p1)), "Q" (*(p2)) \ 220 + : "memory", "cc"); \ 221 + !cc; \ 222 + }) 223 + 224 + #define __cmpxchg_double_4(p1, p2, o1, o2, n1, n2) \ 225 + __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, "cds") 226 + 227 + #define __cmpxchg_double_8(p1, p2, o1, o2, n1, n2) \ 228 + __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, "cdsg") 229 + 230 + extern void __cmpxchg_double_called_with_bad_pointer(void); 231 + 232 + #define __cmpxchg_double(p1, p2, o1, o2, n1, n2) \ 233 + ({ \ 234 + int __ret; \ 235 + switch (sizeof(*(p1))) { \ 236 + case 4: \ 237 + __ret = __cmpxchg_double_4(p1, p2, o1, o2, n1, n2); \ 238 + break; \ 239 + case 8: \ 240 + __ret = __cmpxchg_double_8(p1, p2, o1, o2, n1, n2); \ 241 + break; \ 242 + default: \ 243 + __cmpxchg_double_called_with_bad_pointer(); \ 244 + } \ 245 + __ret; \ 246 + }) 247 + 248 + #define cmpxchg_double(p1, p2, o1, o2, n1, n2) \ 249 + ({ \ 250 + __typeof__(p1) __p1 = (p1); \ 251 + __typeof__(p2) __p2 = (p2); \ 252 + int __ret; \ 253 + BUILD_BUG_ON(sizeof(*(p1)) != sizeof(long)); \ 254 + BUILD_BUG_ON(sizeof(*(p2)) != sizeof(long)); \ 255 + VM_BUG_ON((unsigned long)((__p1) + 1) != (unsigned long)(__p2));\ 256 + if (sizeof(long) == 4) \ 257 + __ret = __cmpxchg_double_4(__p1, __p2, o1, o2, n1, n2); \ 258 + else \ 259 + __ret = __cmpxchg_double_8(__p1, __p2, o1, o2, n1, n2); \ 260 + __ret; \ 261 + }) 262 + 263 + #define system_has_cmpxchg_double() 1 207 264 208 265 #include <asm-generic/cmpxchg-local.h> 209 266
+21 -1
arch/s390/include/asm/percpu.h
··· 67 67 #define this_cpu_xor_4(pcp, val) arch_this_cpu_to_op(pcp, val, ^) 68 68 #define this_cpu_xor_8(pcp, val) arch_this_cpu_to_op(pcp, val, ^) 69 69 70 - #define arch_this_cpu_cmpxchg(pcp, oval, nval) \ 70 + #define arch_this_cpu_cmpxchg(pcp, oval, nval) \ 71 71 ({ \ 72 72 typedef typeof(pcp) pcp_op_T__; \ 73 73 pcp_op_T__ ret__; \ ··· 106 106 #define this_cpu_xchg_4(pcp, nval) arch_this_cpu_xchg(pcp, nval) 107 107 #ifdef CONFIG_64BIT 108 108 #define this_cpu_xchg_8(pcp, nval) arch_this_cpu_xchg(pcp, nval) 109 + #endif 110 + 111 + #define arch_this_cpu_cmpxchg_double(pcp1, pcp2, o1, o2, n1, n2) \ 112 + ({ \ 113 + typeof(pcp1) o1__ = (o1), n1__ = (n1); \ 114 + typeof(pcp2) o2__ = (o2), n2__ = (n2); \ 115 + typeof(pcp1) *p1__; \ 116 + typeof(pcp2) *p2__; \ 117 + int ret__; \ 118 + preempt_disable(); \ 119 + p1__ = __this_cpu_ptr(&(pcp1)); \ 120 + p2__ = __this_cpu_ptr(&(pcp2)); \ 121 + ret__ = __cmpxchg_double(p1__, p2__, o1__, o2__, n1__, n2__); \ 122 + preempt_enable(); \ 123 + ret__; \ 124 + }) 125 + 126 + #define this_cpu_cmpxchg_double_4 arch_this_cpu_cmpxchg_double 127 + #ifdef CONFIG_64BIT 128 + #define this_cpu_cmpxchg_double_8 arch_this_cpu_cmpxchg_double 109 129 #endif 110 130 111 131 #include <asm-generic/percpu.h>