at v3.1 4.9 kB view raw
1/* 2 * Generic C implementation of atomic counter operations. Usable on 3 * UP systems only. Do not include in machine independent code. 4 * 5 * Originally implemented for MN10300. 6 * 7 * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved. 8 * Written by David Howells (dhowells@redhat.com) 9 * 10 * This program is free software; you can redistribute it and/or 11 * modify it under the terms of the GNU General Public Licence 12 * as published by the Free Software Foundation; either version 13 * 2 of the Licence, or (at your option) any later version. 14 */ 15#ifndef __ASM_GENERIC_ATOMIC_H 16#define __ASM_GENERIC_ATOMIC_H 17 18#ifdef CONFIG_SMP 19/* Force people to define core atomics */ 20# if !defined(atomic_add_return) || !defined(atomic_sub_return) || \ 21 !defined(atomic_clear_mask) || !defined(atomic_set_mask) 22# error "SMP requires a little arch-specific magic" 23# endif 24#endif 25 26/* 27 * Atomic operations that C can't guarantee us. Useful for 28 * resource counting etc.. 29 */ 30 31#define ATOMIC_INIT(i) { (i) } 32 33#ifdef __KERNEL__ 34 35/** 36 * atomic_read - read atomic variable 37 * @v: pointer of type atomic_t 38 * 39 * Atomically reads the value of @v. 40 */ 41#ifndef atomic_read 42#define atomic_read(v) (*(volatile int *)&(v)->counter) 43#endif 44 45/** 46 * atomic_set - set atomic variable 47 * @v: pointer of type atomic_t 48 * @i: required value 49 * 50 * Atomically sets the value of @v to @i. 51 */ 52#define atomic_set(v, i) (((v)->counter) = (i)) 53 54#include <linux/irqflags.h> 55#include <asm/system.h> 56 57/** 58 * atomic_add_return - add integer to atomic variable 59 * @i: integer value to add 60 * @v: pointer of type atomic_t 61 * 62 * Atomically adds @i to @v and returns the result 63 */ 64#ifndef atomic_add_return 65static inline int atomic_add_return(int i, atomic_t *v) 66{ 67 unsigned long flags; 68 int temp; 69 70 raw_local_irq_save(flags); /* Don't trace it in an irqsoff handler */ 71 temp = v->counter; 72 temp += i; 73 v->counter = temp; 74 raw_local_irq_restore(flags); 75 76 return temp; 77} 78#endif 79 80/** 81 * atomic_sub_return - subtract integer from atomic variable 82 * @i: integer value to subtract 83 * @v: pointer of type atomic_t 84 * 85 * Atomically subtracts @i from @v and returns the result 86 */ 87#ifndef atomic_sub_return 88static inline int atomic_sub_return(int i, atomic_t *v) 89{ 90 unsigned long flags; 91 int temp; 92 93 raw_local_irq_save(flags); /* Don't trace it in an irqsoff handler */ 94 temp = v->counter; 95 temp -= i; 96 v->counter = temp; 97 raw_local_irq_restore(flags); 98 99 return temp; 100} 101#endif 102 103static inline int atomic_add_negative(int i, atomic_t *v) 104{ 105 return atomic_add_return(i, v) < 0; 106} 107 108static inline void atomic_add(int i, atomic_t *v) 109{ 110 atomic_add_return(i, v); 111} 112 113static inline void atomic_sub(int i, atomic_t *v) 114{ 115 atomic_sub_return(i, v); 116} 117 118static inline void atomic_inc(atomic_t *v) 119{ 120 atomic_add_return(1, v); 121} 122 123static inline void atomic_dec(atomic_t *v) 124{ 125 atomic_sub_return(1, v); 126} 127 128#define atomic_dec_return(v) atomic_sub_return(1, (v)) 129#define atomic_inc_return(v) atomic_add_return(1, (v)) 130 131#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0) 132#define atomic_dec_and_test(v) (atomic_dec_return(v) == 0) 133#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) 134 135#define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v))) 136#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new))) 137 138#define cmpxchg_local(ptr, o, n) \ 139 ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\ 140 (unsigned long)(n), sizeof(*(ptr)))) 141 142#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) 143 144static inline int __atomic_add_unless(atomic_t *v, int a, int u) 145{ 146 int c, old; 147 c = atomic_read(v); 148 while (c != u && (old = atomic_cmpxchg(v, c, c + a)) != c) 149 c = old; 150 return c; 151} 152 153/** 154 * atomic_clear_mask - Atomically clear bits in atomic variable 155 * @mask: Mask of the bits to be cleared 156 * @v: pointer of type atomic_t 157 * 158 * Atomically clears the bits set in @mask from @v 159 */ 160#ifndef atomic_clear_mask 161static inline void atomic_clear_mask(unsigned long mask, atomic_t *v) 162{ 163 unsigned long flags; 164 165 mask = ~mask; 166 raw_local_irq_save(flags); /* Don't trace it in a irqsoff handler */ 167 v->counter &= mask; 168 raw_local_irq_restore(flags); 169} 170#endif 171 172/** 173 * atomic_set_mask - Atomically set bits in atomic variable 174 * @mask: Mask of the bits to be set 175 * @v: pointer of type atomic_t 176 * 177 * Atomically sets the bits set in @mask in @v 178 */ 179#ifndef atomic_set_mask 180static inline void atomic_set_mask(unsigned int mask, atomic_t *v) 181{ 182 unsigned long flags; 183 184 raw_local_irq_save(flags); /* Don't trace it in a irqsoff handler */ 185 v->counter |= mask; 186 raw_local_irq_restore(flags); 187} 188#endif 189 190/* Assume that atomic operations are already serializing */ 191#define smp_mb__before_atomic_dec() barrier() 192#define smp_mb__after_atomic_dec() barrier() 193#define smp_mb__before_atomic_inc() barrier() 194#define smp_mb__after_atomic_inc() barrier() 195 196#endif /* __KERNEL__ */ 197#endif /* __ASM_GENERIC_ATOMIC_H */