at v2.6.26-rc9 164 lines 3.6 kB view raw
1/* $Id: atomic.h,v 1.3 2001/07/25 16:15:19 bjornw Exp $ */ 2 3#ifndef __ASM_CRIS_ATOMIC__ 4#define __ASM_CRIS_ATOMIC__ 5 6#include <linux/compiler.h> 7 8#include <asm/system.h> 9#include <asm/arch/atomic.h> 10 11/* 12 * Atomic operations that C can't guarantee us. Useful for 13 * resource counting etc.. 14 */ 15 16typedef struct { volatile int counter; } atomic_t; 17 18#define ATOMIC_INIT(i) { (i) } 19 20#define atomic_read(v) ((v)->counter) 21#define atomic_set(v,i) (((v)->counter) = (i)) 22 23/* These should be written in asm but we do it in C for now. */ 24 25static inline void atomic_add(int i, volatile atomic_t *v) 26{ 27 unsigned long flags; 28 cris_atomic_save(v, flags); 29 v->counter += i; 30 cris_atomic_restore(v, flags); 31} 32 33static inline void atomic_sub(int i, volatile atomic_t *v) 34{ 35 unsigned long flags; 36 cris_atomic_save(v, flags); 37 v->counter -= i; 38 cris_atomic_restore(v, flags); 39} 40 41static inline int atomic_add_return(int i, volatile atomic_t *v) 42{ 43 unsigned long flags; 44 int retval; 45 cris_atomic_save(v, flags); 46 retval = (v->counter += i); 47 cris_atomic_restore(v, flags); 48 return retval; 49} 50 51#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) 52 53static inline int atomic_sub_return(int i, volatile atomic_t *v) 54{ 55 unsigned long flags; 56 int retval; 57 cris_atomic_save(v, flags); 58 retval = (v->counter -= i); 59 cris_atomic_restore(v, flags); 60 return retval; 61} 62 63static inline int atomic_sub_and_test(int i, volatile atomic_t *v) 64{ 65 int retval; 66 unsigned long flags; 67 cris_atomic_save(v, flags); 68 retval = (v->counter -= i) == 0; 69 cris_atomic_restore(v, flags); 70 return retval; 71} 72 73static inline void atomic_inc(volatile atomic_t *v) 74{ 75 unsigned long flags; 76 cris_atomic_save(v, flags); 77 (v->counter)++; 78 cris_atomic_restore(v, flags); 79} 80 81static inline void atomic_dec(volatile atomic_t *v) 82{ 83 unsigned long flags; 84 cris_atomic_save(v, flags); 85 (v->counter)--; 86 cris_atomic_restore(v, flags); 87} 88 89static inline int atomic_inc_return(volatile atomic_t *v) 90{ 91 unsigned long flags; 92 int retval; 93 cris_atomic_save(v, flags); 94 retval = ++(v->counter); 95 cris_atomic_restore(v, flags); 96 return retval; 97} 98 99static inline int atomic_dec_return(volatile atomic_t *v) 100{ 101 unsigned long flags; 102 int retval; 103 cris_atomic_save(v, flags); 104 retval = --(v->counter); 105 cris_atomic_restore(v, flags); 106 return retval; 107} 108static inline int atomic_dec_and_test(volatile atomic_t *v) 109{ 110 int retval; 111 unsigned long flags; 112 cris_atomic_save(v, flags); 113 retval = --(v->counter) == 0; 114 cris_atomic_restore(v, flags); 115 return retval; 116} 117 118static inline int atomic_inc_and_test(volatile atomic_t *v) 119{ 120 int retval; 121 unsigned long flags; 122 cris_atomic_save(v, flags); 123 retval = ++(v->counter) == 0; 124 cris_atomic_restore(v, flags); 125 return retval; 126} 127 128static inline int atomic_cmpxchg(atomic_t *v, int old, int new) 129{ 130 int ret; 131 unsigned long flags; 132 133 cris_atomic_save(v, flags); 134 ret = v->counter; 135 if (likely(ret == old)) 136 v->counter = new; 137 cris_atomic_restore(v, flags); 138 return ret; 139} 140 141#define atomic_xchg(v, new) (xchg(&((v)->counter), new)) 142 143static inline int atomic_add_unless(atomic_t *v, int a, int u) 144{ 145 int ret; 146 unsigned long flags; 147 148 cris_atomic_save(v, flags); 149 ret = v->counter; 150 if (ret != u) 151 v->counter += a; 152 cris_atomic_restore(v, flags); 153 return ret != u; 154} 155#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) 156 157/* Atomic operations are already serializing */ 158#define smp_mb__before_atomic_dec() barrier() 159#define smp_mb__after_atomic_dec() barrier() 160#define smp_mb__before_atomic_inc() barrier() 161#define smp_mb__after_atomic_inc() barrier() 162 163#include <asm-generic/atomic.h> 164#endif