at v2.6.13-rc2 114 lines 2.5 kB view raw
1#ifndef __ASM_SH_ATOMIC_H 2#define __ASM_SH_ATOMIC_H 3 4/* 5 * Atomic operations that C can't guarantee us. Useful for 6 * resource counting etc.. 7 * 8 */ 9 10typedef struct { volatile int counter; } atomic_t; 11 12#define ATOMIC_INIT(i) ( (atomic_t) { (i) } ) 13 14#define atomic_read(v) ((v)->counter) 15#define atomic_set(v,i) ((v)->counter = (i)) 16 17#include <asm/system.h> 18 19/* 20 * To get proper branch prediction for the main line, we must branch 21 * forward to code at the end of this object's .text section, then 22 * branch back to restart the operation. 23 */ 24 25static __inline__ void atomic_add(int i, atomic_t * v) 26{ 27 unsigned long flags; 28 29 local_irq_save(flags); 30 *(long *)v += i; 31 local_irq_restore(flags); 32} 33 34static __inline__ void atomic_sub(int i, atomic_t *v) 35{ 36 unsigned long flags; 37 38 local_irq_save(flags); 39 *(long *)v -= i; 40 local_irq_restore(flags); 41} 42 43static __inline__ int atomic_add_return(int i, atomic_t * v) 44{ 45 unsigned long temp, flags; 46 47 local_irq_save(flags); 48 temp = *(long *)v; 49 temp += i; 50 *(long *)v = temp; 51 local_irq_restore(flags); 52 53 return temp; 54} 55 56#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) 57 58static __inline__ int atomic_sub_return(int i, atomic_t * v) 59{ 60 unsigned long temp, flags; 61 62 local_irq_save(flags); 63 temp = *(long *)v; 64 temp -= i; 65 *(long *)v = temp; 66 local_irq_restore(flags); 67 68 return temp; 69} 70 71#define atomic_dec_return(v) atomic_sub_return(1,(v)) 72#define atomic_inc_return(v) atomic_add_return(1,(v)) 73 74/* 75 * atomic_inc_and_test - increment and test 76 * @v: pointer of type atomic_t 77 * 78 * Atomically increments @v by 1 79 * and returns true if the result is zero, or false for all 80 * other cases. 81 */ 82#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) 83 84#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0) 85#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0) 86 87#define atomic_inc(v) atomic_add(1,(v)) 88#define atomic_dec(v) atomic_sub(1,(v)) 89 90static __inline__ void atomic_clear_mask(unsigned int mask, atomic_t *v) 91{ 92 unsigned long flags; 93 94 local_irq_save(flags); 95 *(long *)v &= ~mask; 96 local_irq_restore(flags); 97} 98 99static __inline__ void atomic_set_mask(unsigned int mask, atomic_t *v) 100{ 101 unsigned long flags; 102 103 local_irq_save(flags); 104 *(long *)v |= mask; 105 local_irq_restore(flags); 106} 107 108/* Atomic operations are already serializing on SH */ 109#define smp_mb__before_atomic_dec() barrier() 110#define smp_mb__after_atomic_dec() barrier() 111#define smp_mb__before_atomic_inc() barrier() 112#define smp_mb__after_atomic_inc() barrier() 113 114#endif /* __ASM_SH_ATOMIC_H */