]>
git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blob - arch/arc/include/asm/bitops.h
2 * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com)
4 * This program is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License version 2 as
6 * published by the Free Software Foundation.
12 #ifndef _LINUX_BITOPS_H
13 #error only <linux/bitops.h> can be included directly
18 #include <linux/types.h>
19 #include <linux/compiler.h>
20 #include <asm/barrier.h>
23 * Hardware assisted read-modify-write using ARC700 LLOCK/SCOND insns.
24 * The Kconfig glue ensures that in SMP, this is only set if the container
25 * SoC/platform has cross-core coherent LLOCK/SCOND
27 #if defined(CONFIG_ARC_HAS_LLSC)
29 static inline void set_bit(unsigned long nr
, volatile unsigned long *m
)
35 if (__builtin_constant_p(nr
))
39 "1: llock %0, [%1] \n"
48 static inline void clear_bit(unsigned long nr
, volatile unsigned long *m
)
54 if (__builtin_constant_p(nr
))
58 "1: llock %0, [%1] \n"
67 static inline void change_bit(unsigned long nr
, volatile unsigned long *m
)
73 if (__builtin_constant_p(nr
))
77 "1: llock %0, [%1] \n"
90 * set it and return 0 (old value)
92 * return 1 (old value).
94 * Since ARC lacks a equivalent h/w primitive, the bit is set unconditionally
95 * and the old value of bit is returned
97 static inline int test_and_set_bit(unsigned long nr
, volatile unsigned long *m
)
99 unsigned long old
, temp
;
103 if (__builtin_constant_p(nr
))
106 __asm__
__volatile__(
107 "1: llock %0, [%2] \n"
108 " bset %1, %0, %3 \n"
111 : "=&r"(old
), "=&r"(temp
)
115 return (old
& (1 << nr
)) != 0;
119 test_and_clear_bit(unsigned long nr
, volatile unsigned long *m
)
121 unsigned int old
, temp
;
125 if (__builtin_constant_p(nr
))
128 __asm__
__volatile__(
129 "1: llock %0, [%2] \n"
130 " bclr %1, %0, %3 \n"
133 : "=&r"(old
), "=&r"(temp
)
137 return (old
& (1 << nr
)) != 0;
141 test_and_change_bit(unsigned long nr
, volatile unsigned long *m
)
143 unsigned int old
, temp
;
147 if (__builtin_constant_p(nr
))
150 __asm__
__volatile__(
151 "1: llock %0, [%2] \n"
152 " bxor %1, %0, %3 \n"
155 : "=&r"(old
), "=&r"(temp
)
159 return (old
& (1 << nr
)) != 0;
162 #else /* !CONFIG_ARC_HAS_LLSC */
167 * Non hardware assisted Atomic-R-M-W
168 * Locking would change to irq-disabling only (UP) and spinlocks (SMP)
170 * There's "significant" micro-optimization in writing our own variants of
171 * bitops (over generic variants)
173 * (1) The generic APIs have "signed" @nr while we have it "unsigned"
174 * This avoids extra code to be generated for pointer arithmatic, since
175 * is "not sure" that index is NOT -ve
176 * (2) Utilize the fact that ARCompact bit fidding insn (BSET/BCLR/ASL) etc
177 * only consider bottom 5 bits of @nr, so NO need to mask them off.
178 * (GCC Quirk: however for constant @nr we still need to do the masking
182 static inline void set_bit(unsigned long nr
, volatile unsigned long *m
)
184 unsigned long temp
, flags
;
187 if (__builtin_constant_p(nr
))
193 *m
= temp
| (1UL << nr
);
195 bitops_unlock(flags
);
198 static inline void clear_bit(unsigned long nr
, volatile unsigned long *m
)
200 unsigned long temp
, flags
;
203 if (__builtin_constant_p(nr
))
209 *m
= temp
& ~(1UL << nr
);
211 bitops_unlock(flags
);
214 static inline void change_bit(unsigned long nr
, volatile unsigned long *m
)
216 unsigned long temp
, flags
;
219 if (__builtin_constant_p(nr
))
225 *m
= temp
^ (1UL << nr
);
227 bitops_unlock(flags
);
230 static inline int test_and_set_bit(unsigned long nr
, volatile unsigned long *m
)
232 unsigned long old
, flags
;
235 if (__builtin_constant_p(nr
))
241 *m
= old
| (1 << nr
);
243 bitops_unlock(flags
);
245 return (old
& (1 << nr
)) != 0;
249 test_and_clear_bit(unsigned long nr
, volatile unsigned long *m
)
251 unsigned long old
, flags
;
254 if (__builtin_constant_p(nr
))
260 *m
= old
& ~(1 << nr
);
262 bitops_unlock(flags
);
264 return (old
& (1 << nr
)) != 0;
268 test_and_change_bit(unsigned long nr
, volatile unsigned long *m
)
270 unsigned long old
, flags
;
273 if (__builtin_constant_p(nr
))
279 *m
= old
^ (1 << nr
);
281 bitops_unlock(flags
);
283 return (old
& (1 << nr
)) != 0;
286 #endif /* CONFIG_ARC_HAS_LLSC */
288 /***************************************
289 * Non atomic variants
290 **************************************/
292 static inline void __set_bit(unsigned long nr
, volatile unsigned long *m
)
297 if (__builtin_constant_p(nr
))
301 *m
= temp
| (1UL << nr
);
304 static inline void __clear_bit(unsigned long nr
, volatile unsigned long *m
)
309 if (__builtin_constant_p(nr
))
313 *m
= temp
& ~(1UL << nr
);
316 static inline void __change_bit(unsigned long nr
, volatile unsigned long *m
)
321 if (__builtin_constant_p(nr
))
325 *m
= temp
^ (1UL << nr
);
329 __test_and_set_bit(unsigned long nr
, volatile unsigned long *m
)
334 if (__builtin_constant_p(nr
))
338 *m
= old
| (1 << nr
);
340 return (old
& (1 << nr
)) != 0;
344 __test_and_clear_bit(unsigned long nr
, volatile unsigned long *m
)
349 if (__builtin_constant_p(nr
))
353 *m
= old
& ~(1 << nr
);
355 return (old
& (1 << nr
)) != 0;
359 __test_and_change_bit(unsigned long nr
, volatile unsigned long *m
)
364 if (__builtin_constant_p(nr
))
368 *m
= old
^ (1 << nr
);
370 return (old
& (1 << nr
)) != 0;
374 * This routine doesn't need to be atomic.
377 __constant_test_bit(unsigned int nr
, const volatile unsigned long *addr
)
379 return ((1UL << (nr
& 31)) &
380 (((const volatile unsigned int *)addr
)[nr
>> 5])) != 0;
384 __test_bit(unsigned int nr
, const volatile unsigned long *addr
)
390 /* ARC700 only considers 5 bits in bit-fiddling insn */
393 return ((mask
& *addr
) != 0);
396 #define test_bit(nr, addr) (__builtin_constant_p(nr) ? \
397 __constant_test_bit((nr), (addr)) : \
398 __test_bit((nr), (addr)))
401 * Count the number of zeros, starting from MSB
402 * Helper for fls( ) friends
403 * This is a pure count, so (1-32) or (0-31) doesn't apply
404 * It could be 0 to 32, based on num of 0's in there
405 * clz(0x8000_0000) = 0, clz(0xFFFF_FFFF)=0, clz(0) = 32, clz(1) = 31
407 static inline __attribute__ ((const)) int clz(unsigned int x
)
411 __asm__
__volatile__(
414 " add.p %0, %0, 1 \n"
422 static inline int constant_fls(int x
)
428 if (!(x
& 0xffff0000u
)) {
432 if (!(x
& 0xff000000u
)) {
436 if (!(x
& 0xf0000000u
)) {
440 if (!(x
& 0xc0000000u
)) {
444 if (!(x
& 0x80000000u
)) {
452 * fls = Find Last Set in word
454 * fls(1) = 1, fls(0x80000000) = 32, fls(0) = 0
456 static inline __attribute__ ((const)) int fls(unsigned long x
)
458 if (__builtin_constant_p(x
))
459 return constant_fls(x
);
465 * __fls: Similar to fls, but zero based (0-31)
467 static inline __attribute__ ((const)) int __fls(unsigned long x
)
476 * ffs = Find First Set in word (LSB to MSB)
477 * @result: [1-32], 0 if all 0's
479 #define ffs(x) ({ unsigned long __t = (x); fls(__t & -__t); })
482 * __ffs: Similar to ffs, but zero based (0-31)
484 static inline __attribute__ ((const)) int __ffs(unsigned long word
)
489 return ffs(word
) - 1;
493 * ffz = Find First Zero in word.
494 * @return:[0-31], 32 if all 1's
496 #define ffz(x) __ffs(~(x))
498 #include <asm-generic/bitops/hweight.h>
499 #include <asm-generic/bitops/fls64.h>
500 #include <asm-generic/bitops/sched.h>
501 #include <asm-generic/bitops/lock.h>
503 #include <asm-generic/bitops/find.h>
504 #include <asm-generic/bitops/le.h>
505 #include <asm-generic/bitops/ext2-atomic-setbit.h>
507 #endif /* !__ASSEMBLY__ */