]> git.proxmox.com Git - mirror_ubuntu-focal-kernel.git/blame - arch/mips/include/asm/bitops.h
UBUNTU: Ubuntu-5.4.0-117.132
[mirror_ubuntu-focal-kernel.git] / arch / mips / include / asm / bitops.h
CommitLineData
1da177e4
LT
1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
102fa15c 6 * Copyright (c) 1994 - 1997, 99, 2000, 06, 07 Ralf Baechle (ralf@linux-mips.org)
1da177e4
LT
7 * Copyright (c) 1999, 2000 Silicon Graphics, Inc.
8 */
9#ifndef _ASM_BITOPS_H
10#define _ASM_BITOPS_H
11
0624517d
JS
12#ifndef _LINUX_BITOPS_H
13#error only <linux/bitops.h> can be included directly
14#endif
15
1da177e4
LT
16#include <linux/compiler.h>
17#include <linux/types.h>
0004a9df 18#include <asm/barrier.h>
1da177e4 19#include <asm/byteorder.h> /* sigh ... */
b0984c43 20#include <asm/compiler.h>
1da177e4 21#include <asm/cpu-features.h>
05490626 22#include <asm/llsc.h>
4ffd8b38
RB
23#include <asm/sgidefs.h>
24#include <asm/war.h>
1da177e4 25
92d11594
JQ
26/*
27 * These are the "slower" versions of the functions and are in bitops.c.
28 * These functions call raw_local_irq_{save,restore}().
29 */
30void __mips_set_bit(unsigned long nr, volatile unsigned long *addr);
31void __mips_clear_bit(unsigned long nr, volatile unsigned long *addr);
32void __mips_change_bit(unsigned long nr, volatile unsigned long *addr);
33int __mips_test_and_set_bit(unsigned long nr,
34 volatile unsigned long *addr);
35int __mips_test_and_set_bit_lock(unsigned long nr,
36 volatile unsigned long *addr);
37int __mips_test_and_clear_bit(unsigned long nr,
38 volatile unsigned long *addr);
39int __mips_test_and_change_bit(unsigned long nr,
40 volatile unsigned long *addr);
41
42
1da177e4
LT
43/*
44 * set_bit - Atomically set a bit in memory
45 * @nr: the bit to set
46 * @addr: the address to start counting from
47 *
48 * This function is atomic and may not be reordered. See __set_bit()
49 * if you do not require the atomic guarantees.
50 * Note that @nr may be almost arbitrarily large; this function is not
51 * restricted to acting on a single-word quantity.
52 */
53static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
54{
55 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
9de79c50 56 int bit = nr & SZLONG_MASK;
1da177e4
LT
57 unsigned long temp;
58
b791d119 59 if (kernel_uses_llsc && R10000_LLSC_WAR) {
1da177e4 60 __asm__ __volatile__(
378ed6f0 61 " .set push \n"
a809d460 62 " .set arch=r4000 \n"
1da177e4
LT
63 "1: " __LL "%0, %1 # set_bit \n"
64 " or %0, %2 \n"
aac8aa77 65 " " __SC "%0, %1 \n"
1da177e4 66 " beqzl %0, 1b \n"
378ed6f0 67 " .set pop \n"
94bfb75a 68 : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*m)
42344113
PZ
69 : "ir" (1UL << bit), GCC_OFF_SMALL_ASM() (*m)
70 : __LLSC_CLOBBER);
87a927ef 71#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
b791d119 72 } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
e02e07e3 73 loongson_llsc_mb();
7837314d
RB
74 do {
75 __asm__ __volatile__(
76 " " __LL "%0, %1 # set_bit \n"
77 " " __INS "%0, %3, %2, 1 \n"
78 " " __SC "%0, %1 \n"
94bfb75a 79 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
42344113
PZ
80 : "ir" (bit), "r" (~0)
81 : __LLSC_CLOBBER);
7837314d 82 } while (unlikely(!temp));
87a927ef 83#endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */
b791d119 84 } else if (kernel_uses_llsc) {
e02e07e3 85 loongson_llsc_mb();
7837314d
RB
86 do {
87 __asm__ __volatile__(
378ed6f0 88 " .set push \n"
87a927ef 89 " .set "MIPS_ISA_ARCH_LEVEL" \n"
7837314d
RB
90 " " __LL "%0, %1 # set_bit \n"
91 " or %0, %2 \n"
92 " " __SC "%0, %1 \n"
378ed6f0 93 " .set pop \n"
94bfb75a 94 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
42344113
PZ
95 : "ir" (1UL << bit)
96 : __LLSC_CLOBBER);
7837314d 97 } while (unlikely(!temp));
92d11594
JQ
98 } else
99 __mips_set_bit(nr, addr);
1da177e4
LT
100}
101
1da177e4
LT
102/*
103 * clear_bit - Clears a bit in memory
104 * @nr: Bit to clear
105 * @addr: Address to start counting from
106 *
107 * clear_bit() is atomic and may not be reordered. However, it does
108 * not contain a memory barrier, so if it is used for locking purposes,
91bbefe6 109 * you should call smp_mb__before_atomic() and/or smp_mb__after_atomic()
1da177e4
LT
110 * in order to ensure changes are visible on other processors.
111 */
112static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
113{
114 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
9de79c50 115 int bit = nr & SZLONG_MASK;
1da177e4
LT
116 unsigned long temp;
117
b791d119 118 if (kernel_uses_llsc && R10000_LLSC_WAR) {
1da177e4 119 __asm__ __volatile__(
378ed6f0 120 " .set push \n"
a809d460 121 " .set arch=r4000 \n"
1da177e4
LT
122 "1: " __LL "%0, %1 # clear_bit \n"
123 " and %0, %2 \n"
124 " " __SC "%0, %1 \n"
125 " beqzl %0, 1b \n"
378ed6f0 126 " .set pop \n"
94bfb75a 127 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
42344113
PZ
128 : "ir" (~(1UL << bit))
129 : __LLSC_CLOBBER);
87a927ef 130#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
b791d119 131 } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
e02e07e3 132 loongson_llsc_mb();
7837314d
RB
133 do {
134 __asm__ __volatile__(
135 " " __LL "%0, %1 # clear_bit \n"
136 " " __INS "%0, $0, %2, 1 \n"
137 " " __SC "%0, %1 \n"
94bfb75a 138 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
42344113
PZ
139 : "ir" (bit)
140 : __LLSC_CLOBBER);
7837314d 141 } while (unlikely(!temp));
87a927ef 142#endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */
b791d119 143 } else if (kernel_uses_llsc) {
e02e07e3 144 loongson_llsc_mb();
7837314d
RB
145 do {
146 __asm__ __volatile__(
378ed6f0 147 " .set push \n"
87a927ef 148 " .set "MIPS_ISA_ARCH_LEVEL" \n"
7837314d
RB
149 " " __LL "%0, %1 # clear_bit \n"
150 " and %0, %2 \n"
151 " " __SC "%0, %1 \n"
378ed6f0 152 " .set pop \n"
94bfb75a 153 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
42344113
PZ
154 : "ir" (~(1UL << bit))
155 : __LLSC_CLOBBER);
7837314d 156 } while (unlikely(!temp));
92d11594
JQ
157 } else
158 __mips_clear_bit(nr, addr);
1da177e4
LT
159}
160
728697cd
NP
161/*
162 * clear_bit_unlock - Clears a bit in memory
163 * @nr: Bit to clear
164 * @addr: Address to start counting from
165 *
166 * clear_bit() is atomic and implies release semantics before the memory
167 * operation. It can be used for an unlock.
168 */
169static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
170{
91bbefe6 171 smp_mb__before_atomic();
728697cd
NP
172 clear_bit(nr, addr);
173}
174
1da177e4
LT
175/*
176 * change_bit - Toggle a bit in memory
177 * @nr: Bit to change
178 * @addr: Address to start counting from
179 *
180 * change_bit() is atomic and may not be reordered.
181 * Note that @nr may be almost arbitrarily large; this function is not
182 * restricted to acting on a single-word quantity.
183 */
184static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
185{
9de79c50 186 int bit = nr & SZLONG_MASK;
b961153b 187
b791d119 188 if (kernel_uses_llsc && R10000_LLSC_WAR) {
1da177e4
LT
189 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
190 unsigned long temp;
191
192 __asm__ __volatile__(
378ed6f0 193 " .set push \n"
a809d460 194 " .set arch=r4000 \n"
1da177e4
LT
195 "1: " __LL "%0, %1 # change_bit \n"
196 " xor %0, %2 \n"
aac8aa77 197 " " __SC "%0, %1 \n"
1da177e4 198 " beqzl %0, 1b \n"
378ed6f0 199 " .set pop \n"
94bfb75a 200 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
42344113
PZ
201 : "ir" (1UL << bit)
202 : __LLSC_CLOBBER);
b791d119 203 } else if (kernel_uses_llsc) {
1da177e4
LT
204 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
205 unsigned long temp;
206
e02e07e3 207 loongson_llsc_mb();
7837314d
RB
208 do {
209 __asm__ __volatile__(
378ed6f0 210 " .set push \n"
87a927ef 211 " .set "MIPS_ISA_ARCH_LEVEL" \n"
7837314d
RB
212 " " __LL "%0, %1 # change_bit \n"
213 " xor %0, %2 \n"
214 " " __SC "%0, %1 \n"
378ed6f0 215 " .set pop \n"
94bfb75a 216 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
42344113
PZ
217 : "ir" (1UL << bit)
218 : __LLSC_CLOBBER);
7837314d 219 } while (unlikely(!temp));
92d11594
JQ
220 } else
221 __mips_change_bit(nr, addr);
1da177e4
LT
222}
223
1da177e4
LT
224/*
225 * test_and_set_bit - Set a bit and return its old value
226 * @nr: Bit to set
227 * @addr: Address to count from
228 *
229 * This operation is atomic and cannot be reordered.
230 * It also implies a memory barrier.
231 */
232static inline int test_and_set_bit(unsigned long nr,
233 volatile unsigned long *addr)
234{
9de79c50 235 int bit = nr & SZLONG_MASK;
ff72b7a6 236 unsigned long res;
b961153b 237
f252ffd5 238 smp_mb__before_llsc();
c8f30ae5 239
b791d119 240 if (kernel_uses_llsc && R10000_LLSC_WAR) {
1da177e4 241 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
ff72b7a6 242 unsigned long temp;
1da177e4
LT
243
244 __asm__ __volatile__(
378ed6f0 245 " .set push \n"
a809d460 246 " .set arch=r4000 \n"
1da177e4
LT
247 "1: " __LL "%0, %1 # test_and_set_bit \n"
248 " or %2, %0, %3 \n"
249 " " __SC "%2, %1 \n"
250 " beqzl %2, 1b \n"
251 " and %2, %0, %3 \n"
378ed6f0 252 " .set pop \n"
94bfb75a 253 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
7837314d 254 : "r" (1UL << bit)
42344113 255 : __LLSC_CLOBBER);
b791d119 256 } else if (kernel_uses_llsc) {
1da177e4 257 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
ff72b7a6 258 unsigned long temp;
1da177e4 259
1c6c1ca3 260 loongson_llsc_mb();
7837314d
RB
261 do {
262 __asm__ __volatile__(
378ed6f0 263 " .set push \n"
87a927ef 264 " .set "MIPS_ISA_ARCH_LEVEL" \n"
7837314d
RB
265 " " __LL "%0, %1 # test_and_set_bit \n"
266 " or %2, %0, %3 \n"
267 " " __SC "%2, %1 \n"
378ed6f0 268 " .set pop \n"
94bfb75a 269 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
7837314d 270 : "r" (1UL << bit)
42344113 271 : __LLSC_CLOBBER);
7837314d
RB
272 } while (unlikely(!res));
273
274 res = temp & (1UL << bit);
92d11594
JQ
275 } else
276 res = __mips_test_and_set_bit(nr, addr);
0004a9df 277
17099b11 278 smp_llsc_mb();
ff72b7a6
RB
279
280 return res != 0;
1da177e4
LT
281}
282
728697cd
NP
283/*
284 * test_and_set_bit_lock - Set a bit and return its old value
285 * @nr: Bit to set
286 * @addr: Address to count from
287 *
288 * This operation is atomic and implies acquire ordering semantics
289 * after the memory operation.
290 */
291static inline int test_and_set_bit_lock(unsigned long nr,
292 volatile unsigned long *addr)
293{
9de79c50 294 int bit = nr & SZLONG_MASK;
728697cd
NP
295 unsigned long res;
296
b791d119 297 if (kernel_uses_llsc && R10000_LLSC_WAR) {
728697cd
NP
298 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
299 unsigned long temp;
300
301 __asm__ __volatile__(
378ed6f0 302 " .set push \n"
a809d460 303 " .set arch=r4000 \n"
728697cd
NP
304 "1: " __LL "%0, %1 # test_and_set_bit \n"
305 " or %2, %0, %3 \n"
306 " " __SC "%2, %1 \n"
307 " beqzl %2, 1b \n"
308 " and %2, %0, %3 \n"
378ed6f0 309 " .set pop \n"
7837314d
RB
310 : "=&r" (temp), "+m" (*m), "=&r" (res)
311 : "r" (1UL << bit)
42344113 312 : __LLSC_CLOBBER);
b791d119 313 } else if (kernel_uses_llsc) {
728697cd
NP
314 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
315 unsigned long temp;
316
1c6c1ca3 317 loongson_llsc_mb();
7837314d
RB
318 do {
319 __asm__ __volatile__(
378ed6f0 320 " .set push \n"
87a927ef 321 " .set "MIPS_ISA_ARCH_LEVEL" \n"
7837314d
RB
322 " " __LL "%0, %1 # test_and_set_bit \n"
323 " or %2, %0, %3 \n"
324 " " __SC "%2, %1 \n"
378ed6f0 325 " .set pop \n"
94bfb75a 326 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
7837314d 327 : "r" (1UL << bit)
42344113 328 : __LLSC_CLOBBER);
7837314d
RB
329 } while (unlikely(!res));
330
331 res = temp & (1UL << bit);
92d11594
JQ
332 } else
333 res = __mips_test_and_set_bit_lock(nr, addr);
728697cd
NP
334
335 smp_llsc_mb();
336
337 return res != 0;
338}
1da177e4
LT
339/*
340 * test_and_clear_bit - Clear a bit and return its old value
341 * @nr: Bit to clear
342 * @addr: Address to count from
343 *
344 * This operation is atomic and cannot be reordered.
345 * It also implies a memory barrier.
346 */
347static inline int test_and_clear_bit(unsigned long nr,
348 volatile unsigned long *addr)
349{
9de79c50 350 int bit = nr & SZLONG_MASK;
ff72b7a6 351 unsigned long res;
b961153b 352
f252ffd5 353 smp_mb__before_llsc();
c8f30ae5 354
b791d119 355 if (kernel_uses_llsc && R10000_LLSC_WAR) {
1da177e4 356 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
8e09ffb6 357 unsigned long temp;
1da177e4
LT
358
359 __asm__ __volatile__(
378ed6f0 360 " .set push \n"
a809d460 361 " .set arch=r4000 \n"
1da177e4
LT
362 "1: " __LL "%0, %1 # test_and_clear_bit \n"
363 " or %2, %0, %3 \n"
364 " xor %2, %3 \n"
70342287 365 " " __SC "%2, %1 \n"
1da177e4
LT
366 " beqzl %2, 1b \n"
367 " and %2, %0, %3 \n"
378ed6f0 368 " .set pop \n"
94bfb75a 369 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
7837314d 370 : "r" (1UL << bit)
42344113 371 : __LLSC_CLOBBER);
87a927ef 372#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
b791d119 373 } else if (kernel_uses_llsc && __builtin_constant_p(nr)) {
102fa15c 374 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
ff72b7a6 375 unsigned long temp;
102fa15c 376
1c6c1ca3 377 loongson_llsc_mb();
7837314d
RB
378 do {
379 __asm__ __volatile__(
70342287 380 " " __LL "%0, %1 # test_and_clear_bit \n"
7837314d 381 " " __EXT "%2, %0, %3, 1 \n"
70342287
RB
382 " " __INS "%0, $0, %3, 1 \n"
383 " " __SC "%0, %1 \n"
94bfb75a 384 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
7837314d 385 : "ir" (bit)
42344113 386 : __LLSC_CLOBBER);
7837314d 387 } while (unlikely(!temp));
102fa15c 388#endif
b791d119 389 } else if (kernel_uses_llsc) {
1da177e4 390 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
ff72b7a6 391 unsigned long temp;
1da177e4 392
1c6c1ca3 393 loongson_llsc_mb();
7837314d
RB
394 do {
395 __asm__ __volatile__(
378ed6f0 396 " .set push \n"
87a927ef 397 " .set "MIPS_ISA_ARCH_LEVEL" \n"
70342287 398 " " __LL "%0, %1 # test_and_clear_bit \n"
7837314d
RB
399 " or %2, %0, %3 \n"
400 " xor %2, %3 \n"
70342287 401 " " __SC "%2, %1 \n"
378ed6f0 402 " .set pop \n"
94bfb75a 403 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
7837314d 404 : "r" (1UL << bit)
42344113 405 : __LLSC_CLOBBER);
7837314d
RB
406 } while (unlikely(!res));
407
408 res = temp & (1UL << bit);
92d11594
JQ
409 } else
410 res = __mips_test_and_clear_bit(nr, addr);
0004a9df 411
17099b11 412 smp_llsc_mb();
ff72b7a6
RB
413
414 return res != 0;
1da177e4
LT
415}
416
1da177e4
LT
417/*
418 * test_and_change_bit - Change a bit and return its old value
419 * @nr: Bit to change
420 * @addr: Address to count from
421 *
422 * This operation is atomic and cannot be reordered.
423 * It also implies a memory barrier.
424 */
425static inline int test_and_change_bit(unsigned long nr,
426 volatile unsigned long *addr)
427{
9de79c50 428 int bit = nr & SZLONG_MASK;
ff72b7a6 429 unsigned long res;
b961153b 430
f252ffd5 431 smp_mb__before_llsc();
c8f30ae5 432
b791d119 433 if (kernel_uses_llsc && R10000_LLSC_WAR) {
1da177e4 434 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
ff72b7a6 435 unsigned long temp;
1da177e4
LT
436
437 __asm__ __volatile__(
378ed6f0 438 " .set push \n"
a809d460 439 " .set arch=r4000 \n"
aac8aa77 440 "1: " __LL "%0, %1 # test_and_change_bit \n"
1da177e4 441 " xor %2, %0, %3 \n"
aac8aa77 442 " " __SC "%2, %1 \n"
1da177e4
LT
443 " beqzl %2, 1b \n"
444 " and %2, %0, %3 \n"
378ed6f0 445 " .set pop \n"
94bfb75a 446 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
7837314d 447 : "r" (1UL << bit)
42344113 448 : __LLSC_CLOBBER);
b791d119 449 } else if (kernel_uses_llsc) {
1da177e4 450 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
ff72b7a6 451 unsigned long temp;
1da177e4 452
1c6c1ca3 453 loongson_llsc_mb();
7837314d
RB
454 do {
455 __asm__ __volatile__(
378ed6f0 456 " .set push \n"
87a927ef 457 " .set "MIPS_ISA_ARCH_LEVEL" \n"
70342287 458 " " __LL "%0, %1 # test_and_change_bit \n"
7837314d
RB
459 " xor %2, %0, %3 \n"
460 " " __SC "\t%2, %1 \n"
378ed6f0 461 " .set pop \n"
94bfb75a 462 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
7837314d 463 : "r" (1UL << bit)
42344113 464 : __LLSC_CLOBBER);
7837314d
RB
465 } while (unlikely(!res));
466
467 res = temp & (1UL << bit);
92d11594
JQ
468 } else
469 res = __mips_test_and_change_bit(nr, addr);
0004a9df 470
17099b11 471 smp_llsc_mb();
ff72b7a6
RB
472
473 return res != 0;
1da177e4
LT
474}
475
3c9ee7ef 476#include <asm-generic/bitops/non-atomic.h>
1da177e4 477
728697cd
NP
478/*
479 * __clear_bit_unlock - Clears a bit in memory
480 * @nr: Bit to clear
481 * @addr: Address to start counting from
482 *
483 * __clear_bit() is non-atomic and implies release semantics before the memory
484 * operation. It can be used for an unlock if no other CPUs can concurrently
485 * modify other bits in the word.
486 */
487static inline void __clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
488{
6f6ed482 489 smp_mb__before_llsc();
728697cd 490 __clear_bit(nr, addr);
34ae9c91 491 nudge_writes();
728697cd
NP
492}
493
1da177e4 494/*
ec917c2c 495 * Return the bit position (0..63) of the most significant 1 bit in a word
65903265
RB
496 * Returns -1 if no 1 bit exists
497 */
e9ea596c 498static __always_inline unsigned long __fls(unsigned long word)
65903265 499{
4816227b 500 int num;
65903265 501
cb5d4aad 502 if (BITS_PER_LONG == 32 && !__builtin_constant_p(word) &&
47740eb8 503 __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
49a89efb 504 __asm__(
ec917c2c 505 " .set push \n"
87a927ef 506 " .set "MIPS_ISA_LEVEL" \n"
ec917c2c
RB
507 " clz %0, %1 \n"
508 " .set pop \n"
4816227b
RB
509 : "=r" (num)
510 : "r" (word));
65903265 511
4816227b 512 return 31 - num;
ec917c2c
RB
513 }
514
cb5d4aad 515 if (BITS_PER_LONG == 64 && !__builtin_constant_p(word) &&
4816227b
RB
516 __builtin_constant_p(cpu_has_mips64) && cpu_has_mips64) {
517 __asm__(
518 " .set push \n"
87a927ef 519 " .set "MIPS_ISA_LEVEL" \n"
4816227b
RB
520 " dclz %0, %1 \n"
521 " .set pop \n"
522 : "=r" (num)
523 : "r" (word));
65903265 524
4816227b
RB
525 return 63 - num;
526 }
527
528 num = BITS_PER_LONG - 1;
65903265 529
4816227b
RB
530#if BITS_PER_LONG == 64
531 if (!(word & (~0ul << 32))) {
532 num -= 32;
533 word <<= 32;
534 }
535#endif
536 if (!(word & (~0ul << (BITS_PER_LONG-16)))) {
537 num -= 16;
538 word <<= 16;
539 }
540 if (!(word & (~0ul << (BITS_PER_LONG-8)))) {
541 num -= 8;
542 word <<= 8;
543 }
544 if (!(word & (~0ul << (BITS_PER_LONG-4)))) {
545 num -= 4;
546 word <<= 4;
547 }
548 if (!(word & (~0ul << (BITS_PER_LONG-2)))) {
549 num -= 2;
550 word <<= 2;
551 }
552 if (!(word & (~0ul << (BITS_PER_LONG-1))))
553 num -= 1;
554 return num;
65903265 555}
65903265
RB
556
557/*
558 * __ffs - find first bit in word.
1da177e4
LT
559 * @word: The word to search
560 *
65903265
RB
561 * Returns 0..SZLONG-1
562 * Undefined if no bit exists, so code should check against 0 first.
1da177e4 563 */
e9ea596c 564static __always_inline unsigned long __ffs(unsigned long word)
1da177e4 565{
ddc0d009 566 return __fls(word & -word);
1da177e4
LT
567}
568
569/*
bc818247 570 * fls - find last bit set.
1da177e4
LT
571 * @word: The word to search
572 *
bc818247
AN
573 * This is defined the same way as ffs.
574 * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
1da177e4 575 */
3fc2579e 576static inline int fls(unsigned int x)
1da177e4 577{
4816227b 578 int r;
65903265 579
cb5d4aad
MR
580 if (!__builtin_constant_p(x) &&
581 __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
db873131
MR
582 __asm__(
583 " .set push \n"
87a927ef 584 " .set "MIPS_ISA_LEVEL" \n"
db873131
MR
585 " clz %0, %1 \n"
586 " .set pop \n"
587 : "=r" (x)
588 : "r" (x));
1da177e4 589
4816227b
RB
590 return 32 - x;
591 }
bc818247 592
4816227b
RB
593 r = 32;
594 if (!x)
595 return 0;
596 if (!(x & 0xffff0000u)) {
597 x <<= 16;
598 r -= 16;
599 }
600 if (!(x & 0xff000000u)) {
601 x <<= 8;
602 r -= 8;
603 }
604 if (!(x & 0xf0000000u)) {
605 x <<= 4;
606 r -= 4;
607 }
608 if (!(x & 0xc0000000u)) {
609 x <<= 2;
610 r -= 2;
611 }
612 if (!(x & 0x80000000u)) {
613 x <<= 1;
614 r -= 1;
615 }
616 return r;
65903265 617}
4816227b 618
bc818247 619#include <asm-generic/bitops/fls64.h>
65903265
RB
620
621/*
bc818247 622 * ffs - find first bit set.
65903265
RB
623 * @word: The word to search
624 *
bc818247
AN
625 * This is defined the same way as
626 * the libc and compiler builtin ffs routines, therefore
627 * differs in spirit from the above ffz (man ffs).
65903265 628 */
bc818247 629static inline int ffs(int word)
65903265 630{
bc818247
AN
631 if (!word)
632 return 0;
2caf1900 633
bc818247 634 return fls(word & -word);
65903265
RB
635}
636
bc818247 637#include <asm-generic/bitops/ffz.h>
3c9ee7ef 638#include <asm-generic/bitops/find.h>
1da177e4
LT
639
640#ifdef __KERNEL__
641
3c9ee7ef 642#include <asm-generic/bitops/sched.h>
1a403d1d
DD
643
644#include <asm/arch_hweight.h>
645#include <asm-generic/bitops/const_hweight.h>
646
861b5ae7 647#include <asm-generic/bitops/le.h>
3c9ee7ef 648#include <asm-generic/bitops/ext2-atomic.h>
1da177e4
LT
649
650#endif /* __KERNEL__ */
651
652#endif /* _ASM_BITOPS_H */