at v2.6.26 128 lines 3.8 kB view raw
1/* atomic.h: Thankfully the V9 is at least reasonable for this 2 * stuff. 3 * 4 * Copyright (C) 1996, 1997, 2000 David S. Miller (davem@redhat.com) 5 */ 6 7#ifndef __ARCH_SPARC64_ATOMIC__ 8#define __ARCH_SPARC64_ATOMIC__ 9 10#include <linux/types.h> 11#include <asm/system.h> 12 13typedef struct { volatile int counter; } atomic_t; 14typedef struct { volatile __s64 counter; } atomic64_t; 15 16#define ATOMIC_INIT(i) { (i) } 17#define ATOMIC64_INIT(i) { (i) } 18 19#define atomic_read(v) ((v)->counter) 20#define atomic64_read(v) ((v)->counter) 21 22#define atomic_set(v, i) (((v)->counter) = i) 23#define atomic64_set(v, i) (((v)->counter) = i) 24 25extern void atomic_add(int, atomic_t *); 26extern void atomic64_add(int, atomic64_t *); 27extern void atomic_sub(int, atomic_t *); 28extern void atomic64_sub(int, atomic64_t *); 29 30extern int atomic_add_ret(int, atomic_t *); 31extern int atomic64_add_ret(int, atomic64_t *); 32extern int atomic_sub_ret(int, atomic_t *); 33extern int atomic64_sub_ret(int, atomic64_t *); 34 35#define atomic_dec_return(v) atomic_sub_ret(1, v) 36#define atomic64_dec_return(v) atomic64_sub_ret(1, v) 37 38#define atomic_inc_return(v) atomic_add_ret(1, v) 39#define atomic64_inc_return(v) atomic64_add_ret(1, v) 40 41#define atomic_sub_return(i, v) atomic_sub_ret(i, v) 42#define atomic64_sub_return(i, v) atomic64_sub_ret(i, v) 43 44#define atomic_add_return(i, v) atomic_add_ret(i, v) 45#define atomic64_add_return(i, v) atomic64_add_ret(i, v) 46 47/* 48 * atomic_inc_and_test - increment and test 49 * @v: pointer of type atomic_t 50 * 51 * Atomically increments @v by 1 52 * and returns true if the result is zero, or false for all 53 * other cases. 54 */ 55#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) 56#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0) 57 58#define atomic_sub_and_test(i, v) (atomic_sub_ret(i, v) == 0) 59#define atomic64_sub_and_test(i, v) (atomic64_sub_ret(i, v) == 0) 60 61#define atomic_dec_and_test(v) (atomic_sub_ret(1, v) == 0) 62#define atomic64_dec_and_test(v) (atomic64_sub_ret(1, v) == 0) 63 64#define atomic_inc(v) atomic_add(1, v) 65#define atomic64_inc(v) atomic64_add(1, v) 66 67#define atomic_dec(v) atomic_sub(1, v) 68#define atomic64_dec(v) atomic64_sub(1, v) 69 70#define atomic_add_negative(i, v) (atomic_add_ret(i, v) < 0) 71#define atomic64_add_negative(i, v) (atomic64_add_ret(i, v) < 0) 72 73#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) 74#define atomic_xchg(v, new) (xchg(&((v)->counter), new)) 75 76static inline int atomic_add_unless(atomic_t *v, int a, int u) 77{ 78 int c, old; 79 c = atomic_read(v); 80 for (;;) { 81 if (unlikely(c == (u))) 82 break; 83 old = atomic_cmpxchg((v), c, c + (a)); 84 if (likely(old == c)) 85 break; 86 c = old; 87 } 88 return c != (u); 89} 90 91#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) 92 93#define atomic64_cmpxchg(v, o, n) \ 94 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n))) 95#define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) 96 97static inline int atomic64_add_unless(atomic64_t *v, long a, long u) 98{ 99 long c, old; 100 c = atomic64_read(v); 101 for (;;) { 102 if (unlikely(c == (u))) 103 break; 104 old = atomic64_cmpxchg((v), c, c + (a)); 105 if (likely(old == c)) 106 break; 107 c = old; 108 } 109 return c != (u); 110} 111 112#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) 113 114/* Atomic operations are already serializing */ 115#ifdef CONFIG_SMP 116#define smp_mb__before_atomic_dec() membar_storeload_loadload(); 117#define smp_mb__after_atomic_dec() membar_storeload_storestore(); 118#define smp_mb__before_atomic_inc() membar_storeload_loadload(); 119#define smp_mb__after_atomic_inc() membar_storeload_storestore(); 120#else 121#define smp_mb__before_atomic_dec() barrier() 122#define smp_mb__after_atomic_dec() barrier() 123#define smp_mb__before_atomic_inc() barrier() 124#define smp_mb__after_atomic_inc() barrier() 125#endif 126 127#include <asm-generic/atomic.h> 128#endif /* !(__ARCH_SPARC64_ATOMIC__) */