at v2.6.23 131 lines 3.0 kB view raw
1/* 2 * include/asm-v850/atomic.h -- Atomic operations 3 * 4 * Copyright (C) 2001,02 NEC Corporation 5 * Copyright (C) 2001,02 Miles Bader <miles@gnu.org> 6 * 7 * This file is subject to the terms and conditions of the GNU General 8 * Public License. See the file COPYING in the main directory of this 9 * archive for more details. 10 * 11 * Written by Miles Bader <miles@gnu.org> 12 */ 13 14#ifndef __V850_ATOMIC_H__ 15#define __V850_ATOMIC_H__ 16 17 18#include <asm/system.h> 19 20#ifdef CONFIG_SMP 21#error SMP not supported 22#endif 23 24typedef struct { int counter; } atomic_t; 25 26#define ATOMIC_INIT(i) { (i) } 27 28#ifdef __KERNEL__ 29 30#define atomic_read(v) ((v)->counter) 31#define atomic_set(v,i) (((v)->counter) = (i)) 32 33static inline int atomic_add_return (int i, volatile atomic_t *v) 34{ 35 unsigned long flags; 36 int res; 37 38 local_irq_save (flags); 39 res = v->counter + i; 40 v->counter = res; 41 local_irq_restore (flags); 42 43 return res; 44} 45 46static __inline__ int atomic_sub_return (int i, volatile atomic_t *v) 47{ 48 unsigned long flags; 49 int res; 50 51 local_irq_save (flags); 52 res = v->counter - i; 53 v->counter = res; 54 local_irq_restore (flags); 55 56 return res; 57} 58 59static __inline__ void atomic_clear_mask (unsigned long mask, unsigned long *addr) 60{ 61 unsigned long flags; 62 63 local_irq_save (flags); 64 *addr &= ~mask; 65 local_irq_restore (flags); 66} 67 68#endif 69 70#define atomic_add(i, v) atomic_add_return ((i), (v)) 71#define atomic_sub(i, v) atomic_sub_return ((i), (v)) 72 73#define atomic_dec_return(v) atomic_sub_return (1, (v)) 74#define atomic_inc_return(v) atomic_add_return (1, (v)) 75#define atomic_inc(v) atomic_inc_return (v) 76#define atomic_dec(v) atomic_dec_return (v) 77 78/* 79 * atomic_inc_and_test - increment and test 80 * @v: pointer of type atomic_t 81 * 82 * Atomically increments @v by 1 83 * and returns true if the result is zero, or false for all 84 * other cases. 85 */ 86#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) 87 88#define atomic_sub_and_test(i,v) (atomic_sub_return ((i), (v)) == 0) 89#define atomic_dec_and_test(v) (atomic_sub_return (1, (v)) == 0) 90#define atomic_add_negative(i,v) (atomic_add_return ((i), (v)) < 0) 91 92static inline int atomic_cmpxchg(atomic_t *v, int old, int new) 93{ 94 int ret; 95 unsigned long flags; 96 97 local_irq_save(flags); 98 ret = v->counter; 99 if (likely(ret == old)) 100 v->counter = new; 101 local_irq_restore(flags); 102 103 return ret; 104} 105 106#define atomic_xchg(v, new) (xchg(&((v)->counter), new)) 107 108static inline int atomic_add_unless(atomic_t *v, int a, int u) 109{ 110 int ret; 111 unsigned long flags; 112 113 local_irq_save(flags); 114 ret = v->counter; 115 if (ret != u) 116 v->counter += a; 117 local_irq_restore(flags); 118 119 return ret != u; 120} 121 122#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) 123 124/* Atomic operations are already serializing on ARM */ 125#define smp_mb__before_atomic_dec() barrier() 126#define smp_mb__after_atomic_dec() barrier() 127#define smp_mb__before_atomic_inc() barrier() 128#define smp_mb__after_atomic_inc() barrier() 129 130#include <asm-generic/atomic.h> 131#endif /* __V850_ATOMIC_H__ */