at v2.6.16 132 lines 3.0 kB view raw
1/* 2 * include/asm-v850/atomic.h -- Atomic operations 3 * 4 * Copyright (C) 2001,02 NEC Corporation 5 * Copyright (C) 2001,02 Miles Bader <miles@gnu.org> 6 * 7 * This file is subject to the terms and conditions of the GNU General 8 * Public License. See the file COPYING in the main directory of this 9 * archive for more details. 10 * 11 * Written by Miles Bader <miles@gnu.org> 12 */ 13 14#ifndef __V850_ATOMIC_H__ 15#define __V850_ATOMIC_H__ 16 17#include <linux/config.h> 18 19#include <asm/system.h> 20 21#ifdef CONFIG_SMP 22#error SMP not supported 23#endif 24 25typedef struct { int counter; } atomic_t; 26 27#define ATOMIC_INIT(i) { (i) } 28 29#ifdef __KERNEL__ 30 31#define atomic_read(v) ((v)->counter) 32#define atomic_set(v,i) (((v)->counter) = (i)) 33 34static inline int atomic_add_return (int i, volatile atomic_t *v) 35{ 36 unsigned long flags; 37 int res; 38 39 local_irq_save (flags); 40 res = v->counter + i; 41 v->counter = res; 42 local_irq_restore (flags); 43 44 return res; 45} 46 47static __inline__ int atomic_sub_return (int i, volatile atomic_t *v) 48{ 49 unsigned long flags; 50 int res; 51 52 local_irq_save (flags); 53 res = v->counter - i; 54 v->counter = res; 55 local_irq_restore (flags); 56 57 return res; 58} 59 60static __inline__ void atomic_clear_mask (unsigned long mask, unsigned long *addr) 61{ 62 unsigned long flags; 63 64 local_irq_save (flags); 65 *addr &= ~mask; 66 local_irq_restore (flags); 67} 68 69#endif 70 71#define atomic_add(i, v) atomic_add_return ((i), (v)) 72#define atomic_sub(i, v) atomic_sub_return ((i), (v)) 73 74#define atomic_dec_return(v) atomic_sub_return (1, (v)) 75#define atomic_inc_return(v) atomic_add_return (1, (v)) 76#define atomic_inc(v) atomic_inc_return (v) 77#define atomic_dec(v) atomic_dec_return (v) 78 79/* 80 * atomic_inc_and_test - increment and test 81 * @v: pointer of type atomic_t 82 * 83 * Atomically increments @v by 1 84 * and returns true if the result is zero, or false for all 85 * other cases. 86 */ 87#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) 88 89#define atomic_sub_and_test(i,v) (atomic_sub_return ((i), (v)) == 0) 90#define atomic_dec_and_test(v) (atomic_sub_return (1, (v)) == 0) 91#define atomic_add_negative(i,v) (atomic_add_return ((i), (v)) < 0) 92 93static inline int atomic_cmpxchg(atomic_t *v, int old, int new) 94{ 95 int ret; 96 unsigned long flags; 97 98 local_irq_save(flags); 99 ret = v->counter; 100 if (likely(ret == old)) 101 v->counter = new; 102 local_irq_restore(flags); 103 104 return ret; 105} 106 107#define atomic_xchg(v, new) (xchg(&((v)->counter), new)) 108 109static inline int atomic_add_unless(atomic_t *v, int a, int u) 110{ 111 int ret; 112 unsigned long flags; 113 114 local_irq_save(flags); 115 ret = v->counter; 116 if (ret != u) 117 v->counter += a; 118 local_irq_restore(flags); 119 120 return ret != u; 121} 122 123#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) 124 125/* Atomic operations are already serializing on ARM */ 126#define smp_mb__before_atomic_dec() barrier() 127#define smp_mb__after_atomic_dec() barrier() 128#define smp_mb__before_atomic_inc() barrier() 129#define smp_mb__after_atomic_inc() barrier() 130 131#include <asm-generic/atomic.h> 132#endif /* __V850_ATOMIC_H__ */