[PATCH] mm: fill arch atomic64 gaps

alpha, sparc64, x86_64 are each missing some primitives from their atomic64
support: fill in the gaps I've noticed by extrapolating asm, follow the
groupings in each file. But powerpc and parisc still lack atomic64.

Signed-off-by: Hugh Dickins <hugh@veritas.com>
Cc: Richard Henderson <rth@twiddle.net>
Cc: Ivan Kokshaysky <ink@jurassic.park.msu.ru>
Cc: "David S. Miller" <davem@davemloft.net>
Cc: Andi Kleen <ak@muc.de>
Cc: Nick Piggin <nickpiggin@yahoo.com.au>
Signed-off-by: Andrew Morton <akpm@osdl.org>
Signed-off-by: Linus Torvalds <torvalds@osdl.org>

authored by Hugh Dickins and committed by Linus Torvalds 7c72aaf2 7ce774b4

+44 -15
+5 -2
include/asm-alpha/atomic.h
··· 118 118 return result; 119 119 } 120 120 121 - #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) 122 - 123 121 static __inline__ long atomic64_add_return(long i, atomic64_t * v) 124 122 { 125 123 long temp, result; ··· 187 189 }) 188 190 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) 189 191 192 + #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) 193 + #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0) 194 + 190 195 #define atomic_dec_return(v) atomic_sub_return(1,(v)) 191 196 #define atomic64_dec_return(v) atomic64_sub_return(1,(v)) 192 197 ··· 200 199 #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0) 201 200 202 201 #define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0) 202 + #define atomic64_inc_and_test(v) (atomic64_add_return(1, (v)) == 0) 203 + 203 204 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0) 204 205 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0) 205 206
+1
include/asm-sparc64/atomic.h
··· 54 54 * other cases. 55 55 */ 56 56 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) 57 + #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0) 57 58 58 59 #define atomic_sub_and_test(i, v) (atomic_sub_ret(i, v) == 0) 59 60 #define atomic64_sub_and_test(i, v) (atomic64_sub_ret(i, v) == 0)
+38 -13
include/asm-x86_64/atomic.h
··· 160 160 161 161 /** 162 162 * atomic_add_negative - add and test if negative 163 - * @v: pointer of type atomic_t 164 163 * @i: integer value to add 164 + * @v: pointer of type atomic_t 165 165 * 166 166 * Atomically adds @i to @v and returns true 167 167 * if the result is negative, or false when ··· 177 177 :"ir" (i), "m" (v->counter) : "memory"); 178 178 return c; 179 179 } 180 + 181 + /** 182 + * atomic_add_return - add and return 183 + * @i: integer value to add 184 + * @v: pointer of type atomic_t 185 + * 186 + * Atomically adds @i to @v and returns @i + @v 187 + */ 188 + static __inline__ int atomic_add_return(int i, atomic_t *v) 189 + { 190 + int __i = i; 191 + __asm__ __volatile__( 192 + LOCK "xaddl %0, %1;" 193 + :"=r"(i) 194 + :"m"(v->counter), "0"(i)); 195 + return i + __i; 196 + } 197 + 198 + static __inline__ int atomic_sub_return(int i, atomic_t *v) 199 + { 200 + return atomic_add_return(-i,v); 201 + } 202 + 203 + #define atomic_inc_return(v) (atomic_add_return(1,v)) 204 + #define atomic_dec_return(v) (atomic_sub_return(1,v)) 180 205 181 206 /* An 64bit atomic type */ 182 207 ··· 345 320 346 321 /** 347 322 * atomic64_add_negative - add and test if negative 348 - * @v: pointer to atomic64_t 349 323 * @i: integer value to add 324 + * @v: pointer to type atomic64_t 350 325 * 351 326 * Atomically adds @i to @v and returns true 352 327 * if the result is negative, or false when 353 328 * result is greater than or equal to zero. 354 329 */ 355 - static __inline__ long atomic64_add_negative(long i, atomic64_t *v) 330 + static __inline__ int atomic64_add_negative(long i, atomic64_t *v) 356 331 { 357 332 unsigned char c; 358 333 ··· 364 339 } 365 340 366 341 /** 367 - * atomic_add_return - add and return 368 - * @v: pointer of type atomic_t 342 + * atomic64_add_return - add and return 369 343 * @i: integer value to add 344 + * @v: pointer to type atomic64_t 370 345 * 371 346 * Atomically adds @i to @v and returns @i + @v 372 347 */ 373 - static __inline__ int atomic_add_return(int i, atomic_t *v) 348 + static __inline__ long atomic64_add_return(long i, atomic64_t *v) 374 349 { 375 - int __i = i; 350 + long __i = i; 376 351 __asm__ __volatile__( 377 - LOCK "xaddl %0, %1;" 352 + LOCK "xaddq %0, %1;" 378 353 :"=r"(i) 379 354 :"m"(v->counter), "0"(i)); 380 355 return i + __i; 381 356 } 382 357 383 - static __inline__ int atomic_sub_return(int i, atomic_t *v) 358 + static __inline__ long atomic64_sub_return(long i, atomic64_t *v) 384 359 { 385 - return atomic_add_return(-i,v); 360 + return atomic64_add_return(-i,v); 386 361 } 362 + 363 + #define atomic64_inc_return(v) (atomic64_add_return(1,v)) 364 + #define atomic64_dec_return(v) (atomic64_sub_return(1,v)) 387 365 388 366 #define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new)) 389 367 ··· 408 380 c != (u); \ 409 381 }) 410 382 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) 411 - 412 - #define atomic_inc_return(v) (atomic_add_return(1,v)) 413 - #define atomic_dec_return(v) (atomic_sub_return(1,v)) 414 383 415 384 /* These are x86-specific, used by some header files */ 416 385 #define atomic_clear_mask(mask, addr) \