at v2.6.19-rc2 271 lines 6.2 kB view raw
1/* bitops.h: bit operations for the Fujitsu FR-V CPUs 2 * 3 * For an explanation of how atomic ops work in this arch, see: 4 * Documentation/fujitsu/frv/atomic-ops.txt 5 * 6 * Copyright (C) 2004 Red Hat, Inc. All Rights Reserved. 7 * Written by David Howells (dhowells@redhat.com) 8 * 9 * This program is free software; you can redistribute it and/or 10 * modify it under the terms of the GNU General Public License 11 * as published by the Free Software Foundation; either version 12 * 2 of the License, or (at your option) any later version. 13 */ 14#ifndef _ASM_BITOPS_H 15#define _ASM_BITOPS_H 16 17#include <linux/compiler.h> 18#include <asm/byteorder.h> 19#include <asm/system.h> 20#include <asm/atomic.h> 21 22#ifdef __KERNEL__ 23 24#include <asm-generic/bitops/ffz.h> 25 26/* 27 * clear_bit() doesn't provide any barrier for the compiler. 28 */ 29#define smp_mb__before_clear_bit() barrier() 30#define smp_mb__after_clear_bit() barrier() 31 32static inline int test_and_clear_bit(int nr, volatile void *addr) 33{ 34 volatile unsigned long *ptr = addr; 35 unsigned long mask = 1UL << (nr & 31); 36 ptr += nr >> 5; 37 return (atomic_test_and_ANDNOT_mask(mask, ptr) & mask) != 0; 38} 39 40static inline int test_and_set_bit(int nr, volatile void *addr) 41{ 42 volatile unsigned long *ptr = addr; 43 unsigned long mask = 1UL << (nr & 31); 44 ptr += nr >> 5; 45 return (atomic_test_and_OR_mask(mask, ptr) & mask) != 0; 46} 47 48static inline int test_and_change_bit(int nr, volatile void *addr) 49{ 50 volatile unsigned long *ptr = addr; 51 unsigned long mask = 1UL << (nr & 31); 52 ptr += nr >> 5; 53 return (atomic_test_and_XOR_mask(mask, ptr) & mask) != 0; 54} 55 56static inline void clear_bit(int nr, volatile void *addr) 57{ 58 test_and_clear_bit(nr, addr); 59} 60 61static inline void set_bit(int nr, volatile void *addr) 62{ 63 test_and_set_bit(nr, addr); 64} 65 66static inline void change_bit(int nr, volatile void * addr) 67{ 68 test_and_change_bit(nr, addr); 69} 70 71static inline void __clear_bit(int nr, volatile void * addr) 72{ 73 volatile unsigned long *a = addr; 74 int mask; 75 76 a += nr >> 5; 77 mask = 1 << (nr & 31); 78 *a &= ~mask; 79} 80 81static inline void __set_bit(int nr, volatile void * addr) 82{ 83 volatile unsigned long *a = addr; 84 int mask; 85 86 a += nr >> 5; 87 mask = 1 << (nr & 31); 88 *a |= mask; 89} 90 91static inline void __change_bit(int nr, volatile void *addr) 92{ 93 volatile unsigned long *a = addr; 94 int mask; 95 96 a += nr >> 5; 97 mask = 1 << (nr & 31); 98 *a ^= mask; 99} 100 101static inline int __test_and_clear_bit(int nr, volatile void * addr) 102{ 103 volatile unsigned long *a = addr; 104 int mask, retval; 105 106 a += nr >> 5; 107 mask = 1 << (nr & 31); 108 retval = (mask & *a) != 0; 109 *a &= ~mask; 110 return retval; 111} 112 113static inline int __test_and_set_bit(int nr, volatile void * addr) 114{ 115 volatile unsigned long *a = addr; 116 int mask, retval; 117 118 a += nr >> 5; 119 mask = 1 << (nr & 31); 120 retval = (mask & *a) != 0; 121 *a |= mask; 122 return retval; 123} 124 125static inline int __test_and_change_bit(int nr, volatile void * addr) 126{ 127 volatile unsigned long *a = addr; 128 int mask, retval; 129 130 a += nr >> 5; 131 mask = 1 << (nr & 31); 132 retval = (mask & *a) != 0; 133 *a ^= mask; 134 return retval; 135} 136 137/* 138 * This routine doesn't need to be atomic. 139 */ 140static inline int __constant_test_bit(int nr, const volatile void * addr) 141{ 142 return ((1UL << (nr & 31)) & (((const volatile unsigned int *) addr)[nr >> 5])) != 0; 143} 144 145static inline int __test_bit(int nr, const volatile void * addr) 146{ 147 int * a = (int *) addr; 148 int mask; 149 150 a += nr >> 5; 151 mask = 1 << (nr & 0x1f); 152 return ((mask & *a) != 0); 153} 154 155#define test_bit(nr,addr) \ 156(__builtin_constant_p(nr) ? \ 157 __constant_test_bit((nr),(addr)) : \ 158 __test_bit((nr),(addr))) 159 160#include <asm-generic/bitops/find.h> 161 162/** 163 * fls - find last bit set 164 * @x: the word to search 165 * 166 * This is defined the same way as ffs: 167 * - return 32..1 to indicate bit 31..0 most significant bit set 168 * - return 0 to indicate no bits set 169 */ 170#define fls(x) \ 171({ \ 172 int bit; \ 173 \ 174 asm(" subcc %1,gr0,gr0,icc0 \n" \ 175 " ckne icc0,cc4 \n" \ 176 " cscan.p %1,gr0,%0 ,cc4,#1 \n" \ 177 " csub %0,%0,%0 ,cc4,#0 \n" \ 178 " csub %2,%0,%0 ,cc4,#1 \n" \ 179 : "=&r"(bit) \ 180 : "r"(x), "r"(32) \ 181 : "icc0", "cc4" \ 182 ); \ 183 \ 184 bit; \ 185}) 186 187/** 188 * fls64 - find last bit set in a 64-bit value 189 * @n: the value to search 190 * 191 * This is defined the same way as ffs: 192 * - return 64..1 to indicate bit 63..0 most significant bit set 193 * - return 0 to indicate no bits set 194 */ 195static inline __attribute__((const)) 196int fls64(u64 n) 197{ 198 union { 199 u64 ll; 200 struct { u32 h, l; }; 201 } _; 202 int bit, x, y; 203 204 _.ll = n; 205 206 asm(" subcc.p %3,gr0,gr0,icc0 \n" 207 " subcc %4,gr0,gr0,icc1 \n" 208 " ckne icc0,cc4 \n" 209 " ckne icc1,cc5 \n" 210 " norcr cc4,cc5,cc6 \n" 211 " csub.p %0,%0,%0 ,cc6,1 \n" 212 " orcr cc5,cc4,cc4 \n" 213 " andcr cc4,cc5,cc4 \n" 214 " cscan.p %3,gr0,%0 ,cc4,0 \n" 215 " setlos #64,%1 \n" 216 " cscan.p %4,gr0,%0 ,cc4,1 \n" 217 " setlos #32,%2 \n" 218 " csub.p %1,%0,%0 ,cc4,0 \n" 219 " csub %2,%0,%0 ,cc4,1 \n" 220 : "=&r"(bit), "=r"(x), "=r"(y) 221 : "0r"(_.h), "r"(_.l) 222 : "icc0", "icc1", "cc4", "cc5", "cc6" 223 ); 224 return bit; 225 226} 227 228/** 229 * ffs - find first bit set 230 * @x: the word to search 231 * 232 * - return 32..1 to indicate bit 31..0 most least significant bit set 233 * - return 0 to indicate no bits set 234 */ 235static inline __attribute__((const)) 236int ffs(int x) 237{ 238 /* Note: (x & -x) gives us a mask that is the least significant 239 * (rightmost) 1-bit of the value in x. 240 */ 241 return fls(x & -x); 242} 243 244/** 245 * __ffs - find first bit set 246 * @x: the word to search 247 * 248 * - return 31..0 to indicate bit 31..0 most least significant bit set 249 * - if no bits are set in x, the result is undefined 250 */ 251static inline __attribute__((const)) 252int __ffs(unsigned long x) 253{ 254 int bit; 255 asm("scan %1,gr0,%0" : "=r"(bit) : "r"(x & -x)); 256 return 31 - bit; 257} 258 259#include <asm-generic/bitops/sched.h> 260#include <asm-generic/bitops/hweight.h> 261 262#include <asm-generic/bitops/ext2-non-atomic.h> 263 264#define ext2_set_bit_atomic(lock,nr,addr) test_and_set_bit ((nr) ^ 0x18, (addr)) 265#define ext2_clear_bit_atomic(lock,nr,addr) test_and_clear_bit((nr) ^ 0x18, (addr)) 266 267#include <asm-generic/bitops/minix-le.h> 268 269#endif /* __KERNEL__ */ 270 271#endif /* _ASM_BITOPS_H */