at v2.6.22 296 lines 7.1 kB view raw
1/* 2 * Copyright (C) 2004-2006 Atmel Corporation 3 * 4 * This program is free software; you can redistribute it and/or modify 5 * it under the terms of the GNU General Public License version 2 as 6 * published by the Free Software Foundation. 7 */ 8#ifndef __ASM_AVR32_BITOPS_H 9#define __ASM_AVR32_BITOPS_H 10 11#include <asm/byteorder.h> 12#include <asm/system.h> 13 14/* 15 * clear_bit() doesn't provide any barrier for the compiler 16 */ 17#define smp_mb__before_clear_bit() barrier() 18#define smp_mb__after_clear_bit() barrier() 19 20/* 21 * set_bit - Atomically set a bit in memory 22 * @nr: the bit to set 23 * @addr: the address to start counting from 24 * 25 * This function is atomic and may not be reordered. See __set_bit() 26 * if you do not require the atomic guarantees. 27 * 28 * Note that @nr may be almost arbitrarily large; this function is not 29 * restricted to acting on a single-word quantity. 30 */ 31static inline void set_bit(int nr, volatile void * addr) 32{ 33 unsigned long *p = ((unsigned long *)addr) + nr / BITS_PER_LONG; 34 unsigned long tmp; 35 36 if (__builtin_constant_p(nr)) { 37 asm volatile( 38 "1: ssrf 5\n" 39 " ld.w %0, %2\n" 40 " sbr %0, %3\n" 41 " stcond %1, %0\n" 42 " brne 1b" 43 : "=&r"(tmp), "=o"(*p) 44 : "m"(*p), "i"(nr) 45 : "cc"); 46 } else { 47 unsigned long mask = 1UL << (nr % BITS_PER_LONG); 48 asm volatile( 49 "1: ssrf 5\n" 50 " ld.w %0, %2\n" 51 " or %0, %3\n" 52 " stcond %1, %0\n" 53 " brne 1b" 54 : "=&r"(tmp), "=o"(*p) 55 : "m"(*p), "r"(mask) 56 : "cc"); 57 } 58} 59 60/* 61 * clear_bit - Clears a bit in memory 62 * @nr: Bit to clear 63 * @addr: Address to start counting from 64 * 65 * clear_bit() is atomic and may not be reordered. However, it does 66 * not contain a memory barrier, so if it is used for locking purposes, 67 * you should call smp_mb__before_clear_bit() and/or smp_mb__after_clear_bit() 68 * in order to ensure changes are visible on other processors. 69 */ 70static inline void clear_bit(int nr, volatile void * addr) 71{ 72 unsigned long *p = ((unsigned long *)addr) + nr / BITS_PER_LONG; 73 unsigned long tmp; 74 75 if (__builtin_constant_p(nr)) { 76 asm volatile( 77 "1: ssrf 5\n" 78 " ld.w %0, %2\n" 79 " cbr %0, %3\n" 80 " stcond %1, %0\n" 81 " brne 1b" 82 : "=&r"(tmp), "=o"(*p) 83 : "m"(*p), "i"(nr) 84 : "cc"); 85 } else { 86 unsigned long mask = 1UL << (nr % BITS_PER_LONG); 87 asm volatile( 88 "1: ssrf 5\n" 89 " ld.w %0, %2\n" 90 " andn %0, %3\n" 91 " stcond %1, %0\n" 92 " brne 1b" 93 : "=&r"(tmp), "=o"(*p) 94 : "m"(*p), "r"(mask) 95 : "cc"); 96 } 97} 98 99/* 100 * change_bit - Toggle a bit in memory 101 * @nr: Bit to change 102 * @addr: Address to start counting from 103 * 104 * change_bit() is atomic and may not be reordered. 105 * Note that @nr may be almost arbitrarily large; this function is not 106 * restricted to acting on a single-word quantity. 107 */ 108static inline void change_bit(int nr, volatile void * addr) 109{ 110 unsigned long *p = ((unsigned long *)addr) + nr / BITS_PER_LONG; 111 unsigned long mask = 1UL << (nr % BITS_PER_LONG); 112 unsigned long tmp; 113 114 asm volatile( 115 "1: ssrf 5\n" 116 " ld.w %0, %2\n" 117 " eor %0, %3\n" 118 " stcond %1, %0\n" 119 " brne 1b" 120 : "=&r"(tmp), "=o"(*p) 121 : "m"(*p), "r"(mask) 122 : "cc"); 123} 124 125/* 126 * test_and_set_bit - Set a bit and return its old value 127 * @nr: Bit to set 128 * @addr: Address to count from 129 * 130 * This operation is atomic and cannot be reordered. 131 * It also implies a memory barrier. 132 */ 133static inline int test_and_set_bit(int nr, volatile void * addr) 134{ 135 unsigned long *p = ((unsigned long *)addr) + nr / BITS_PER_LONG; 136 unsigned long mask = 1UL << (nr % BITS_PER_LONG); 137 unsigned long tmp, old; 138 139 if (__builtin_constant_p(nr)) { 140 asm volatile( 141 "1: ssrf 5\n" 142 " ld.w %0, %3\n" 143 " mov %2, %0\n" 144 " sbr %0, %4\n" 145 " stcond %1, %0\n" 146 " brne 1b" 147 : "=&r"(tmp), "=o"(*p), "=&r"(old) 148 : "m"(*p), "i"(nr) 149 : "memory", "cc"); 150 } else { 151 asm volatile( 152 "1: ssrf 5\n" 153 " ld.w %2, %3\n" 154 " or %0, %2, %4\n" 155 " stcond %1, %0\n" 156 " brne 1b" 157 : "=&r"(tmp), "=o"(*p), "=&r"(old) 158 : "m"(*p), "r"(mask) 159 : "memory", "cc"); 160 } 161 162 return (old & mask) != 0; 163} 164 165/* 166 * test_and_clear_bit - Clear a bit and return its old value 167 * @nr: Bit to clear 168 * @addr: Address to count from 169 * 170 * This operation is atomic and cannot be reordered. 171 * It also implies a memory barrier. 172 */ 173static inline int test_and_clear_bit(int nr, volatile void * addr) 174{ 175 unsigned long *p = ((unsigned long *)addr) + nr / BITS_PER_LONG; 176 unsigned long mask = 1UL << (nr % BITS_PER_LONG); 177 unsigned long tmp, old; 178 179 if (__builtin_constant_p(nr)) { 180 asm volatile( 181 "1: ssrf 5\n" 182 " ld.w %0, %3\n" 183 " mov %2, %0\n" 184 " cbr %0, %4\n" 185 " stcond %1, %0\n" 186 " brne 1b" 187 : "=&r"(tmp), "=o"(*p), "=&r"(old) 188 : "m"(*p), "i"(nr) 189 : "memory", "cc"); 190 } else { 191 asm volatile( 192 "1: ssrf 5\n" 193 " ld.w %0, %3\n" 194 " mov %2, %0\n" 195 " andn %0, %4\n" 196 " stcond %1, %0\n" 197 " brne 1b" 198 : "=&r"(tmp), "=o"(*p), "=&r"(old) 199 : "m"(*p), "r"(mask) 200 : "memory", "cc"); 201 } 202 203 return (old & mask) != 0; 204} 205 206/* 207 * test_and_change_bit - Change a bit and return its old value 208 * @nr: Bit to change 209 * @addr: Address to count from 210 * 211 * This operation is atomic and cannot be reordered. 212 * It also implies a memory barrier. 213 */ 214static inline int test_and_change_bit(int nr, volatile void * addr) 215{ 216 unsigned long *p = ((unsigned long *)addr) + nr / BITS_PER_LONG; 217 unsigned long mask = 1UL << (nr % BITS_PER_LONG); 218 unsigned long tmp, old; 219 220 asm volatile( 221 "1: ssrf 5\n" 222 " ld.w %2, %3\n" 223 " eor %0, %2, %4\n" 224 " stcond %1, %0\n" 225 " brne 1b" 226 : "=&r"(tmp), "=o"(*p), "=&r"(old) 227 : "m"(*p), "r"(mask) 228 : "memory", "cc"); 229 230 return (old & mask) != 0; 231} 232 233#include <asm-generic/bitops/non-atomic.h> 234 235/* Find First bit Set */ 236static inline unsigned long __ffs(unsigned long word) 237{ 238 unsigned long result; 239 240 asm("brev %1\n\t" 241 "clz %0,%1" 242 : "=r"(result), "=&r"(word) 243 : "1"(word)); 244 return result; 245} 246 247/* Find First Zero */ 248static inline unsigned long ffz(unsigned long word) 249{ 250 return __ffs(~word); 251} 252 253/* Find Last bit Set */ 254static inline int fls(unsigned long word) 255{ 256 unsigned long result; 257 258 asm("clz %0,%1" : "=r"(result) : "r"(word)); 259 return 32 - result; 260} 261 262unsigned long find_first_zero_bit(const unsigned long *addr, 263 unsigned long size); 264unsigned long find_next_zero_bit(const unsigned long *addr, 265 unsigned long size, 266 unsigned long offset); 267unsigned long find_first_bit(const unsigned long *addr, 268 unsigned long size); 269unsigned long find_next_bit(const unsigned long *addr, 270 unsigned long size, 271 unsigned long offset); 272 273/* 274 * ffs: find first bit set. This is defined the same way as 275 * the libc and compiler builtin ffs routines, therefore 276 * differs in spirit from the above ffz (man ffs). 277 * 278 * The difference is that bit numbering starts at 1, and if no bit is set, 279 * the function returns 0. 280 */ 281static inline int ffs(unsigned long word) 282{ 283 if(word == 0) 284 return 0; 285 return __ffs(word) + 1; 286} 287 288#include <asm-generic/bitops/fls64.h> 289#include <asm-generic/bitops/sched.h> 290#include <asm-generic/bitops/hweight.h> 291 292#include <asm-generic/bitops/ext2-non-atomic.h> 293#include <asm-generic/bitops/ext2-atomic.h> 294#include <asm-generic/bitops/minix-le.h> 295 296#endif /* __ASM_AVR32_BITOPS_H */