at v2.6.13 381 lines 8.4 kB view raw
1#ifndef __ARCH_X86_64_ATOMIC__ 2#define __ARCH_X86_64_ATOMIC__ 3 4#include <linux/config.h> 5 6/* atomic_t should be 32 bit signed type */ 7 8/* 9 * Atomic operations that C can't guarantee us. Useful for 10 * resource counting etc.. 11 */ 12 13#ifdef CONFIG_SMP 14#define LOCK "lock ; " 15#else 16#define LOCK "" 17#endif 18 19/* 20 * Make sure gcc doesn't try to be clever and move things around 21 * on us. We need to use _exactly_ the address the user gave us, 22 * not some alias that contains the same information. 23 */ 24typedef struct { volatile int counter; } atomic_t; 25 26#define ATOMIC_INIT(i) { (i) } 27 28/** 29 * atomic_read - read atomic variable 30 * @v: pointer of type atomic_t 31 * 32 * Atomically reads the value of @v. 33 */ 34#define atomic_read(v) ((v)->counter) 35 36/** 37 * atomic_set - set atomic variable 38 * @v: pointer of type atomic_t 39 * @i: required value 40 * 41 * Atomically sets the value of @v to @i. 42 */ 43#define atomic_set(v,i) (((v)->counter) = (i)) 44 45/** 46 * atomic_add - add integer to atomic variable 47 * @i: integer value to add 48 * @v: pointer of type atomic_t 49 * 50 * Atomically adds @i to @v. 51 */ 52static __inline__ void atomic_add(int i, atomic_t *v) 53{ 54 __asm__ __volatile__( 55 LOCK "addl %1,%0" 56 :"=m" (v->counter) 57 :"ir" (i), "m" (v->counter)); 58} 59 60/** 61 * atomic_sub - subtract the atomic variable 62 * @i: integer value to subtract 63 * @v: pointer of type atomic_t 64 * 65 * Atomically subtracts @i from @v. 66 */ 67static __inline__ void atomic_sub(int i, atomic_t *v) 68{ 69 __asm__ __volatile__( 70 LOCK "subl %1,%0" 71 :"=m" (v->counter) 72 :"ir" (i), "m" (v->counter)); 73} 74 75/** 76 * atomic_sub_and_test - subtract value from variable and test result 77 * @i: integer value to subtract 78 * @v: pointer of type atomic_t 79 * 80 * Atomically subtracts @i from @v and returns 81 * true if the result is zero, or false for all 82 * other cases. 83 */ 84static __inline__ int atomic_sub_and_test(int i, atomic_t *v) 85{ 86 unsigned char c; 87 88 __asm__ __volatile__( 89 LOCK "subl %2,%0; sete %1" 90 :"=m" (v->counter), "=qm" (c) 91 :"ir" (i), "m" (v->counter) : "memory"); 92 return c; 93} 94 95/** 96 * atomic_inc - increment atomic variable 97 * @v: pointer of type atomic_t 98 * 99 * Atomically increments @v by 1. 100 */ 101static __inline__ void atomic_inc(atomic_t *v) 102{ 103 __asm__ __volatile__( 104 LOCK "incl %0" 105 :"=m" (v->counter) 106 :"m" (v->counter)); 107} 108 109/** 110 * atomic_dec - decrement atomic variable 111 * @v: pointer of type atomic_t 112 * 113 * Atomically decrements @v by 1. 114 */ 115static __inline__ void atomic_dec(atomic_t *v) 116{ 117 __asm__ __volatile__( 118 LOCK "decl %0" 119 :"=m" (v->counter) 120 :"m" (v->counter)); 121} 122 123/** 124 * atomic_dec_and_test - decrement and test 125 * @v: pointer of type atomic_t 126 * 127 * Atomically decrements @v by 1 and 128 * returns true if the result is 0, or false for all other 129 * cases. 130 */ 131static __inline__ int atomic_dec_and_test(atomic_t *v) 132{ 133 unsigned char c; 134 135 __asm__ __volatile__( 136 LOCK "decl %0; sete %1" 137 :"=m" (v->counter), "=qm" (c) 138 :"m" (v->counter) : "memory"); 139 return c != 0; 140} 141 142/** 143 * atomic_inc_and_test - increment and test 144 * @v: pointer of type atomic_t 145 * 146 * Atomically increments @v by 1 147 * and returns true if the result is zero, or false for all 148 * other cases. 149 */ 150static __inline__ int atomic_inc_and_test(atomic_t *v) 151{ 152 unsigned char c; 153 154 __asm__ __volatile__( 155 LOCK "incl %0; sete %1" 156 :"=m" (v->counter), "=qm" (c) 157 :"m" (v->counter) : "memory"); 158 return c != 0; 159} 160 161/** 162 * atomic_add_negative - add and test if negative 163 * @v: pointer of type atomic_t 164 * @i: integer value to add 165 * 166 * Atomically adds @i to @v and returns true 167 * if the result is negative, or false when 168 * result is greater than or equal to zero. 169 */ 170static __inline__ int atomic_add_negative(int i, atomic_t *v) 171{ 172 unsigned char c; 173 174 __asm__ __volatile__( 175 LOCK "addl %2,%0; sets %1" 176 :"=m" (v->counter), "=qm" (c) 177 :"ir" (i), "m" (v->counter) : "memory"); 178 return c; 179} 180 181/* An 64bit atomic type */ 182 183typedef struct { volatile long counter; } atomic64_t; 184 185#define ATOMIC64_INIT(i) { (i) } 186 187/** 188 * atomic64_read - read atomic64 variable 189 * @v: pointer of type atomic64_t 190 * 191 * Atomically reads the value of @v. 192 * Doesn't imply a read memory barrier. 193 */ 194#define atomic64_read(v) ((v)->counter) 195 196/** 197 * atomic64_set - set atomic64 variable 198 * @v: pointer to type atomic64_t 199 * @i: required value 200 * 201 * Atomically sets the value of @v to @i. 202 */ 203#define atomic64_set(v,i) (((v)->counter) = (i)) 204 205/** 206 * atomic64_add - add integer to atomic64 variable 207 * @i: integer value to add 208 * @v: pointer to type atomic64_t 209 * 210 * Atomically adds @i to @v. 211 */ 212static __inline__ void atomic64_add(long i, atomic64_t *v) 213{ 214 __asm__ __volatile__( 215 LOCK "addq %1,%0" 216 :"=m" (v->counter) 217 :"ir" (i), "m" (v->counter)); 218} 219 220/** 221 * atomic64_sub - subtract the atomic64 variable 222 * @i: integer value to subtract 223 * @v: pointer to type atomic64_t 224 * 225 * Atomically subtracts @i from @v. 226 */ 227static __inline__ void atomic64_sub(long i, atomic64_t *v) 228{ 229 __asm__ __volatile__( 230 LOCK "subq %1,%0" 231 :"=m" (v->counter) 232 :"ir" (i), "m" (v->counter)); 233} 234 235/** 236 * atomic64_sub_and_test - subtract value from variable and test result 237 * @i: integer value to subtract 238 * @v: pointer to type atomic64_t 239 * 240 * Atomically subtracts @i from @v and returns 241 * true if the result is zero, or false for all 242 * other cases. 243 */ 244static __inline__ int atomic64_sub_and_test(long i, atomic64_t *v) 245{ 246 unsigned char c; 247 248 __asm__ __volatile__( 249 LOCK "subq %2,%0; sete %1" 250 :"=m" (v->counter), "=qm" (c) 251 :"ir" (i), "m" (v->counter) : "memory"); 252 return c; 253} 254 255/** 256 * atomic64_inc - increment atomic64 variable 257 * @v: pointer to type atomic64_t 258 * 259 * Atomically increments @v by 1. 260 */ 261static __inline__ void atomic64_inc(atomic64_t *v) 262{ 263 __asm__ __volatile__( 264 LOCK "incq %0" 265 :"=m" (v->counter) 266 :"m" (v->counter)); 267} 268 269/** 270 * atomic64_dec - decrement atomic64 variable 271 * @v: pointer to type atomic64_t 272 * 273 * Atomically decrements @v by 1. 274 */ 275static __inline__ void atomic64_dec(atomic64_t *v) 276{ 277 __asm__ __volatile__( 278 LOCK "decq %0" 279 :"=m" (v->counter) 280 :"m" (v->counter)); 281} 282 283/** 284 * atomic64_dec_and_test - decrement and test 285 * @v: pointer to type atomic64_t 286 * 287 * Atomically decrements @v by 1 and 288 * returns true if the result is 0, or false for all other 289 * cases. 290 */ 291static __inline__ int atomic64_dec_and_test(atomic64_t *v) 292{ 293 unsigned char c; 294 295 __asm__ __volatile__( 296 LOCK "decq %0; sete %1" 297 :"=m" (v->counter), "=qm" (c) 298 :"m" (v->counter) : "memory"); 299 return c != 0; 300} 301 302/** 303 * atomic64_inc_and_test - increment and test 304 * @v: pointer to type atomic64_t 305 * 306 * Atomically increments @v by 1 307 * and returns true if the result is zero, or false for all 308 * other cases. 309 */ 310static __inline__ int atomic64_inc_and_test(atomic64_t *v) 311{ 312 unsigned char c; 313 314 __asm__ __volatile__( 315 LOCK "incq %0; sete %1" 316 :"=m" (v->counter), "=qm" (c) 317 :"m" (v->counter) : "memory"); 318 return c != 0; 319} 320 321/** 322 * atomic64_add_negative - add and test if negative 323 * @v: pointer to atomic64_t 324 * @i: integer value to add 325 * 326 * Atomically adds @i to @v and returns true 327 * if the result is negative, or false when 328 * result is greater than or equal to zero. 329 */ 330static __inline__ long atomic64_add_negative(long i, atomic64_t *v) 331{ 332 unsigned char c; 333 334 __asm__ __volatile__( 335 LOCK "addq %2,%0; sets %1" 336 :"=m" (v->counter), "=qm" (c) 337 :"ir" (i), "m" (v->counter) : "memory"); 338 return c; 339} 340 341/** 342 * atomic_add_return - add and return 343 * @v: pointer of type atomic_t 344 * @i: integer value to add 345 * 346 * Atomically adds @i to @v and returns @i + @v 347 */ 348static __inline__ int atomic_add_return(int i, atomic_t *v) 349{ 350 int __i = i; 351 __asm__ __volatile__( 352 LOCK "xaddl %0, %1;" 353 :"=r"(i) 354 :"m"(v->counter), "0"(i)); 355 return i + __i; 356} 357 358static __inline__ int atomic_sub_return(int i, atomic_t *v) 359{ 360 return atomic_add_return(-i,v); 361} 362 363#define atomic_inc_return(v) (atomic_add_return(1,v)) 364#define atomic_dec_return(v) (atomic_sub_return(1,v)) 365 366/* These are x86-specific, used by some header files */ 367#define atomic_clear_mask(mask, addr) \ 368__asm__ __volatile__(LOCK "andl %0,%1" \ 369: : "r" (~(mask)),"m" (*addr) : "memory") 370 371#define atomic_set_mask(mask, addr) \ 372__asm__ __volatile__(LOCK "orl %0,%1" \ 373: : "r" ((unsigned)mask),"m" (*(addr)) : "memory") 374 375/* Atomic operations are already serializing on x86 */ 376#define smp_mb__before_atomic_dec() barrier() 377#define smp_mb__after_atomic_dec() barrier() 378#define smp_mb__before_atomic_inc() barrier() 379#define smp_mb__after_atomic_inc() barrier() 380 381#endif