Linux kernel mirror (for testing)
git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel
os
linux
1/*
2 * include/asm-xtensa/bitops.h
3 *
4 * Atomic operations that C can't guarantee us.Useful for resource counting etc.
5 *
6 * This file is subject to the terms and conditions of the GNU General Public
7 * License. See the file "COPYING" in the main directory of this archive
8 * for more details.
9 *
10 * Copyright (C) 2001 - 2007 Tensilica Inc.
11 */
12
13#ifndef _XTENSA_BITOPS_H
14#define _XTENSA_BITOPS_H
15
16#ifdef __KERNEL__
17
18#ifndef _LINUX_BITOPS_H
19#error only <linux/bitops.h> can be included directly
20#endif
21
22#include <asm/processor.h>
23#include <asm/byteorder.h>
24
25#ifdef CONFIG_SMP
26# error SMP not supported on this architecture
27#endif
28
29#define smp_mb__before_clear_bit() barrier()
30#define smp_mb__after_clear_bit() barrier()
31
32#include <asm-generic/bitops/non-atomic.h>
33
34#if XCHAL_HAVE_NSA
35
36static inline unsigned long __cntlz (unsigned long x)
37{
38 int lz;
39 asm ("nsau %0, %1" : "=r" (lz) : "r" (x));
40 return lz;
41}
42
43/*
44 * ffz: Find first zero in word. Undefined if no zero exists.
45 * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1).
46 */
47
48static inline int ffz(unsigned long x)
49{
50 return 31 - __cntlz(~x & -~x);
51}
52
53/*
54 * __ffs: Find first bit set in word. Return 0 for bit 0
55 */
56
57static inline int __ffs(unsigned long x)
58{
59 return 31 - __cntlz(x & -x);
60}
61
62/*
63 * ffs: Find first bit set in word. This is defined the same way as
64 * the libc and compiler builtin ffs routines, therefore
65 * differs in spirit from the above ffz (man ffs).
66 */
67
68static inline int ffs(unsigned long x)
69{
70 return 32 - __cntlz(x & -x);
71}
72
73/*
74 * fls: Find last (most-significant) bit set in word.
75 * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
76 */
77
78static inline int fls (unsigned int x)
79{
80 return 32 - __cntlz(x);
81}
82
83/**
84 * __fls - find last (most-significant) set bit in a long word
85 * @word: the word to search
86 *
87 * Undefined if no set bit exists, so code should check against 0 first.
88 */
89static inline unsigned long __fls(unsigned long word)
90{
91 return 31 - __cntlz(word);
92}
93#else
94
95/* Use the generic implementation if we don't have the nsa/nsau instructions. */
96
97# include <asm-generic/bitops/ffs.h>
98# include <asm-generic/bitops/__ffs.h>
99# include <asm-generic/bitops/ffz.h>
100# include <asm-generic/bitops/fls.h>
101# include <asm-generic/bitops/__fls.h>
102
103#endif
104
105#include <asm-generic/bitops/fls64.h>
106
107#if XCHAL_HAVE_S32C1I
108
109static inline void set_bit(unsigned int bit, volatile unsigned long *p)
110{
111 unsigned long tmp, value;
112 unsigned long mask = 1UL << (bit & 31);
113
114 p += bit >> 5;
115
116 __asm__ __volatile__(
117 "1: l32i %1, %3, 0\n"
118 " wsr %1, scompare1\n"
119 " or %0, %1, %2\n"
120 " s32c1i %0, %3, 0\n"
121 " bne %0, %1, 1b\n"
122 : "=&a" (tmp), "=&a" (value)
123 : "a" (mask), "a" (p)
124 : "memory");
125}
126
127static inline void clear_bit(unsigned int bit, volatile unsigned long *p)
128{
129 unsigned long tmp, value;
130 unsigned long mask = 1UL << (bit & 31);
131
132 p += bit >> 5;
133
134 __asm__ __volatile__(
135 "1: l32i %1, %3, 0\n"
136 " wsr %1, scompare1\n"
137 " and %0, %1, %2\n"
138 " s32c1i %0, %3, 0\n"
139 " bne %0, %1, 1b\n"
140 : "=&a" (tmp), "=&a" (value)
141 : "a" (~mask), "a" (p)
142 : "memory");
143}
144
145static inline void change_bit(unsigned int bit, volatile unsigned long *p)
146{
147 unsigned long tmp, value;
148 unsigned long mask = 1UL << (bit & 31);
149
150 p += bit >> 5;
151
152 __asm__ __volatile__(
153 "1: l32i %1, %3, 0\n"
154 " wsr %1, scompare1\n"
155 " xor %0, %1, %2\n"
156 " s32c1i %0, %3, 0\n"
157 " bne %0, %1, 1b\n"
158 : "=&a" (tmp), "=&a" (value)
159 : "a" (mask), "a" (p)
160 : "memory");
161}
162
163static inline int
164test_and_set_bit(unsigned int bit, volatile unsigned long *p)
165{
166 unsigned long tmp, value;
167 unsigned long mask = 1UL << (bit & 31);
168
169 p += bit >> 5;
170
171 __asm__ __volatile__(
172 "1: l32i %1, %3, 0\n"
173 " wsr %1, scompare1\n"
174 " or %0, %1, %2\n"
175 " s32c1i %0, %3, 0\n"
176 " bne %0, %1, 1b\n"
177 : "=&a" (tmp), "=&a" (value)
178 : "a" (mask), "a" (p)
179 : "memory");
180
181 return tmp & mask;
182}
183
184static inline int
185test_and_clear_bit(unsigned int bit, volatile unsigned long *p)
186{
187 unsigned long tmp, value;
188 unsigned long mask = 1UL << (bit & 31);
189
190 p += bit >> 5;
191
192 __asm__ __volatile__(
193 "1: l32i %1, %3, 0\n"
194 " wsr %1, scompare1\n"
195 " and %0, %1, %2\n"
196 " s32c1i %0, %3, 0\n"
197 " bne %0, %1, 1b\n"
198 : "=&a" (tmp), "=&a" (value)
199 : "a" (~mask), "a" (p)
200 : "memory");
201
202 return tmp & mask;
203}
204
205static inline int
206test_and_change_bit(unsigned int bit, volatile unsigned long *p)
207{
208 unsigned long tmp, value;
209 unsigned long mask = 1UL << (bit & 31);
210
211 p += bit >> 5;
212
213 __asm__ __volatile__(
214 "1: l32i %1, %3, 0\n"
215 " wsr %1, scompare1\n"
216 " xor %0, %1, %2\n"
217 " s32c1i %0, %3, 0\n"
218 " bne %0, %1, 1b\n"
219 : "=&a" (tmp), "=&a" (value)
220 : "a" (mask), "a" (p)
221 : "memory");
222
223 return tmp & mask;
224}
225
226#else
227
228#include <asm-generic/bitops/atomic.h>
229
230#endif /* XCHAL_HAVE_S32C1I */
231
232#include <asm-generic/bitops/find.h>
233#include <asm-generic/bitops/le.h>
234
235#include <asm-generic/bitops/ext2-atomic-setbit.h>
236
237#include <asm-generic/bitops/hweight.h>
238#include <asm-generic/bitops/lock.h>
239#include <asm-generic/bitops/sched.h>
240
241#endif /* __KERNEL__ */
242
243#endif /* _XTENSA_BITOPS_H */