at v3.2-rc6 3.8 kB view raw
1#ifndef _LINUX_BITOPS_H 2#define _LINUX_BITOPS_H 3#include <asm/types.h> 4 5#ifdef __KERNEL__ 6#define BIT(nr) (1UL << (nr)) 7#define BIT_MASK(nr) (1UL << ((nr) % BITS_PER_LONG)) 8#define BIT_WORD(nr) ((nr) / BITS_PER_LONG) 9#define BITS_PER_BYTE 8 10#define BITS_TO_LONGS(nr) DIV_ROUND_UP(nr, BITS_PER_BYTE * sizeof(long)) 11#endif 12 13extern unsigned int __sw_hweight8(unsigned int w); 14extern unsigned int __sw_hweight16(unsigned int w); 15extern unsigned int __sw_hweight32(unsigned int w); 16extern unsigned long __sw_hweight64(__u64 w); 17 18/* 19 * Include this here because some architectures need generic_ffs/fls in 20 * scope 21 */ 22#include <asm/bitops.h> 23 24#define for_each_set_bit(bit, addr, size) \ 25 for ((bit) = find_first_bit((addr), (size)); \ 26 (bit) < (size); \ 27 (bit) = find_next_bit((addr), (size), (bit) + 1)) 28 29static __inline__ int get_bitmask_order(unsigned int count) 30{ 31 int order; 32 33 order = fls(count); 34 return order; /* We could be slightly more clever with -1 here... */ 35} 36 37static __inline__ int get_count_order(unsigned int count) 38{ 39 int order; 40 41 order = fls(count) - 1; 42 if (count & (count - 1)) 43 order++; 44 return order; 45} 46 47static inline unsigned long hweight_long(unsigned long w) 48{ 49 return sizeof(w) == 4 ? hweight32(w) : hweight64(w); 50} 51 52/** 53 * rol32 - rotate a 32-bit value left 54 * @word: value to rotate 55 * @shift: bits to roll 56 */ 57static inline __u32 rol32(__u32 word, unsigned int shift) 58{ 59 return (word << shift) | (word >> (32 - shift)); 60} 61 62/** 63 * ror32 - rotate a 32-bit value right 64 * @word: value to rotate 65 * @shift: bits to roll 66 */ 67static inline __u32 ror32(__u32 word, unsigned int shift) 68{ 69 return (word >> shift) | (word << (32 - shift)); 70} 71 72/** 73 * rol16 - rotate a 16-bit value left 74 * @word: value to rotate 75 * @shift: bits to roll 76 */ 77static inline __u16 rol16(__u16 word, unsigned int shift) 78{ 79 return (word << shift) | (word >> (16 - shift)); 80} 81 82/** 83 * ror16 - rotate a 16-bit value right 84 * @word: value to rotate 85 * @shift: bits to roll 86 */ 87static inline __u16 ror16(__u16 word, unsigned int shift) 88{ 89 return (word >> shift) | (word << (16 - shift)); 90} 91 92/** 93 * rol8 - rotate an 8-bit value left 94 * @word: value to rotate 95 * @shift: bits to roll 96 */ 97static inline __u8 rol8(__u8 word, unsigned int shift) 98{ 99 return (word << shift) | (word >> (8 - shift)); 100} 101 102/** 103 * ror8 - rotate an 8-bit value right 104 * @word: value to rotate 105 * @shift: bits to roll 106 */ 107static inline __u8 ror8(__u8 word, unsigned int shift) 108{ 109 return (word >> shift) | (word << (8 - shift)); 110} 111 112/** 113 * sign_extend32 - sign extend a 32-bit value using specified bit as sign-bit 114 * @value: value to sign extend 115 * @index: 0 based bit index (0<=index<32) to sign bit 116 */ 117static inline __s32 sign_extend32(__u32 value, int index) 118{ 119 __u8 shift = 31 - index; 120 return (__s32)(value << shift) >> shift; 121} 122 123static inline unsigned fls_long(unsigned long l) 124{ 125 if (sizeof(l) == 4) 126 return fls(l); 127 return fls64(l); 128} 129 130/** 131 * __ffs64 - find first set bit in a 64 bit word 132 * @word: The 64 bit word 133 * 134 * On 64 bit arches this is a synomyn for __ffs 135 * The result is not defined if no bits are set, so check that @word 136 * is non-zero before calling this. 137 */ 138static inline unsigned long __ffs64(u64 word) 139{ 140#if BITS_PER_LONG == 32 141 if (((u32)word) == 0UL) 142 return __ffs((u32)(word >> 32)) + 32; 143#elif BITS_PER_LONG != 64 144#error BITS_PER_LONG not 32 or 64 145#endif 146 return __ffs((unsigned long)word); 147} 148 149#ifdef __KERNEL__ 150 151#ifndef find_last_bit 152/** 153 * find_last_bit - find the last set bit in a memory region 154 * @addr: The address to start the search at 155 * @size: The maximum size to search 156 * 157 * Returns the bit number of the first set bit, or size. 158 */ 159extern unsigned long find_last_bit(const unsigned long *addr, 160 unsigned long size); 161#endif 162 163#endif /* __KERNEL__ */ 164#endif