]>
Commit | Line | Data |
---|---|---|
1 | /* SPDX-License-Identifier: GPL-2.0 */ | |
2 | #ifndef __LINUX_BITMAP_H | |
3 | #define __LINUX_BITMAP_H | |
4 | ||
5 | #ifndef __ASSEMBLY__ | |
6 | ||
7 | #include <linux/types.h> | |
8 | #include <linux/bitops.h> | |
9 | #include <linux/string.h> | |
10 | #include <linux/kernel.h> | |
11 | ||
12 | /* | |
13 | * bitmaps provide bit arrays that consume one or more unsigned | |
14 | * longs. The bitmap interface and available operations are listed | |
15 | * here, in bitmap.h | |
16 | * | |
17 | * Function implementations generic to all architectures are in | |
18 | * lib/bitmap.c. Functions implementations that are architecture | |
19 | * specific are in various include/asm-<arch>/bitops.h headers | |
20 | * and other arch/<arch> specific files. | |
21 | * | |
22 | * See lib/bitmap.c for more details. | |
23 | */ | |
24 | ||
25 | /** | |
26 | * DOC: bitmap overview | |
27 | * | |
28 | * The available bitmap operations and their rough meaning in the | |
29 | * case that the bitmap is a single unsigned long are thus: | |
30 | * | |
31 | * Note that nbits should be always a compile time evaluable constant. | |
32 | * Otherwise many inlines will generate horrible code. | |
33 | * | |
34 | * :: | |
35 | * | |
36 | * bitmap_zero(dst, nbits) *dst = 0UL | |
37 | * bitmap_fill(dst, nbits) *dst = ~0UL | |
38 | * bitmap_copy(dst, src, nbits) *dst = *src | |
39 | * bitmap_and(dst, src1, src2, nbits) *dst = *src1 & *src2 | |
40 | * bitmap_or(dst, src1, src2, nbits) *dst = *src1 | *src2 | |
41 | * bitmap_xor(dst, src1, src2, nbits) *dst = *src1 ^ *src2 | |
42 | * bitmap_andnot(dst, src1, src2, nbits) *dst = *src1 & ~(*src2) | |
43 | * bitmap_complement(dst, src, nbits) *dst = ~(*src) | |
44 | * bitmap_equal(src1, src2, nbits) Are *src1 and *src2 equal? | |
45 | * bitmap_intersects(src1, src2, nbits) Do *src1 and *src2 overlap? | |
46 | * bitmap_subset(src1, src2, nbits) Is *src1 a subset of *src2? | |
47 | * bitmap_empty(src, nbits) Are all bits zero in *src? | |
48 | * bitmap_full(src, nbits) Are all bits set in *src? | |
49 | * bitmap_weight(src, nbits) Hamming Weight: number set bits | |
50 | * bitmap_set(dst, pos, nbits) Set specified bit area | |
51 | * bitmap_clear(dst, pos, nbits) Clear specified bit area | |
52 | * bitmap_find_next_zero_area(buf, len, pos, n, mask) Find bit free area | |
53 | * bitmap_find_next_zero_area_off(buf, len, pos, n, mask) as above | |
54 | * bitmap_shift_right(dst, src, n, nbits) *dst = *src >> n | |
55 | * bitmap_shift_left(dst, src, n, nbits) *dst = *src << n | |
56 | * bitmap_remap(dst, src, old, new, nbits) *dst = map(old, new)(src) | |
57 | * bitmap_bitremap(oldbit, old, new, nbits) newbit = map(old, new)(oldbit) | |
58 | * bitmap_onto(dst, orig, relmap, nbits) *dst = orig relative to relmap | |
59 | * bitmap_fold(dst, orig, sz, nbits) dst bits = orig bits mod sz | |
60 | * bitmap_parse(buf, buflen, dst, nbits) Parse bitmap dst from kernel buf | |
61 | * bitmap_parse_user(ubuf, ulen, dst, nbits) Parse bitmap dst from user buf | |
62 | * bitmap_parselist(buf, dst, nbits) Parse bitmap dst from kernel buf | |
63 | * bitmap_parselist_user(buf, dst, nbits) Parse bitmap dst from user buf | |
64 | * bitmap_find_free_region(bitmap, bits, order) Find and allocate bit region | |
65 | * bitmap_release_region(bitmap, pos, order) Free specified bit region | |
66 | * bitmap_allocate_region(bitmap, pos, order) Allocate specified bit region | |
67 | * bitmap_from_u32array(dst, nbits, buf, nwords) *dst = *buf (nwords 32b words) | |
68 | * bitmap_to_u32array(buf, nwords, src, nbits) *buf = *dst (nwords 32b words) | |
69 | * | |
70 | */ | |
71 | ||
72 | /** | |
73 | * DOC: bitmap bitops | |
74 | * | |
75 | * Also the following operations in asm/bitops.h apply to bitmaps.:: | |
76 | * | |
77 | * set_bit(bit, addr) *addr |= bit | |
78 | * clear_bit(bit, addr) *addr &= ~bit | |
79 | * change_bit(bit, addr) *addr ^= bit | |
80 | * test_bit(bit, addr) Is bit set in *addr? | |
81 | * test_and_set_bit(bit, addr) Set bit and return old value | |
82 | * test_and_clear_bit(bit, addr) Clear bit and return old value | |
83 | * test_and_change_bit(bit, addr) Change bit and return old value | |
84 | * find_first_zero_bit(addr, nbits) Position first zero bit in *addr | |
85 | * find_first_bit(addr, nbits) Position first set bit in *addr | |
86 | * find_next_zero_bit(addr, nbits, bit) Position next zero bit in *addr >= bit | |
87 | * find_next_bit(addr, nbits, bit) Position next set bit in *addr >= bit | |
88 | * | |
89 | */ | |
90 | ||
91 | /** | |
92 | * DOC: declare bitmap | |
93 | * The DECLARE_BITMAP(name,bits) macro, in linux/types.h, can be used | |
94 | * to declare an array named 'name' of just enough unsigned longs to | |
95 | * contain all bit positions from 0 to 'bits' - 1. | |
96 | */ | |
97 | ||
98 | /* | |
99 | * lib/bitmap.c provides these functions: | |
100 | */ | |
101 | ||
102 | extern int __bitmap_empty(const unsigned long *bitmap, unsigned int nbits); | |
103 | extern int __bitmap_full(const unsigned long *bitmap, unsigned int nbits); | |
104 | extern int __bitmap_equal(const unsigned long *bitmap1, | |
105 | const unsigned long *bitmap2, unsigned int nbits); | |
106 | extern void __bitmap_complement(unsigned long *dst, const unsigned long *src, | |
107 | unsigned int nbits); | |
108 | extern void __bitmap_shift_right(unsigned long *dst, const unsigned long *src, | |
109 | unsigned int shift, unsigned int nbits); | |
110 | extern void __bitmap_shift_left(unsigned long *dst, const unsigned long *src, | |
111 | unsigned int shift, unsigned int nbits); | |
112 | extern int __bitmap_and(unsigned long *dst, const unsigned long *bitmap1, | |
113 | const unsigned long *bitmap2, unsigned int nbits); | |
114 | extern void __bitmap_or(unsigned long *dst, const unsigned long *bitmap1, | |
115 | const unsigned long *bitmap2, unsigned int nbits); | |
116 | extern void __bitmap_xor(unsigned long *dst, const unsigned long *bitmap1, | |
117 | const unsigned long *bitmap2, unsigned int nbits); | |
118 | extern int __bitmap_andnot(unsigned long *dst, const unsigned long *bitmap1, | |
119 | const unsigned long *bitmap2, unsigned int nbits); | |
120 | extern int __bitmap_intersects(const unsigned long *bitmap1, | |
121 | const unsigned long *bitmap2, unsigned int nbits); | |
122 | extern int __bitmap_subset(const unsigned long *bitmap1, | |
123 | const unsigned long *bitmap2, unsigned int nbits); | |
124 | extern int __bitmap_weight(const unsigned long *bitmap, unsigned int nbits); | |
125 | extern void __bitmap_set(unsigned long *map, unsigned int start, int len); | |
126 | extern void __bitmap_clear(unsigned long *map, unsigned int start, int len); | |
127 | ||
128 | extern unsigned long bitmap_find_next_zero_area_off(unsigned long *map, | |
129 | unsigned long size, | |
130 | unsigned long start, | |
131 | unsigned int nr, | |
132 | unsigned long align_mask, | |
133 | unsigned long align_offset); | |
134 | ||
135 | /** | |
136 | * bitmap_find_next_zero_area - find a contiguous aligned zero area | |
137 | * @map: The address to base the search on | |
138 | * @size: The bitmap size in bits | |
139 | * @start: The bitnumber to start searching at | |
140 | * @nr: The number of zeroed bits we're looking for | |
141 | * @align_mask: Alignment mask for zero area | |
142 | * | |
143 | * The @align_mask should be one less than a power of 2; the effect is that | |
144 | * the bit offset of all zero areas this function finds is multiples of that | |
145 | * power of 2. A @align_mask of 0 means no alignment is required. | |
146 | */ | |
147 | static inline unsigned long | |
148 | bitmap_find_next_zero_area(unsigned long *map, | |
149 | unsigned long size, | |
150 | unsigned long start, | |
151 | unsigned int nr, | |
152 | unsigned long align_mask) | |
153 | { | |
154 | return bitmap_find_next_zero_area_off(map, size, start, nr, | |
155 | align_mask, 0); | |
156 | } | |
157 | ||
158 | extern int __bitmap_parse(const char *buf, unsigned int buflen, int is_user, | |
159 | unsigned long *dst, int nbits); | |
160 | extern int bitmap_parse_user(const char __user *ubuf, unsigned int ulen, | |
161 | unsigned long *dst, int nbits); | |
162 | extern int bitmap_parselist(const char *buf, unsigned long *maskp, | |
163 | int nmaskbits); | |
164 | extern int bitmap_parselist_user(const char __user *ubuf, unsigned int ulen, | |
165 | unsigned long *dst, int nbits); | |
166 | extern void bitmap_remap(unsigned long *dst, const unsigned long *src, | |
167 | const unsigned long *old, const unsigned long *new, unsigned int nbits); | |
168 | extern int bitmap_bitremap(int oldbit, | |
169 | const unsigned long *old, const unsigned long *new, int bits); | |
170 | extern void bitmap_onto(unsigned long *dst, const unsigned long *orig, | |
171 | const unsigned long *relmap, unsigned int bits); | |
172 | extern void bitmap_fold(unsigned long *dst, const unsigned long *orig, | |
173 | unsigned int sz, unsigned int nbits); | |
174 | extern int bitmap_find_free_region(unsigned long *bitmap, unsigned int bits, int order); | |
175 | extern void bitmap_release_region(unsigned long *bitmap, unsigned int pos, int order); | |
176 | extern int bitmap_allocate_region(unsigned long *bitmap, unsigned int pos, int order); | |
177 | extern unsigned int bitmap_from_u32array(unsigned long *bitmap, | |
178 | unsigned int nbits, | |
179 | const u32 *buf, | |
180 | unsigned int nwords); | |
181 | extern unsigned int bitmap_to_u32array(u32 *buf, | |
182 | unsigned int nwords, | |
183 | const unsigned long *bitmap, | |
184 | unsigned int nbits); | |
185 | #ifdef __BIG_ENDIAN | |
186 | extern void bitmap_copy_le(unsigned long *dst, const unsigned long *src, unsigned int nbits); | |
187 | #else | |
188 | #define bitmap_copy_le bitmap_copy | |
189 | #endif | |
190 | extern unsigned int bitmap_ord_to_pos(const unsigned long *bitmap, unsigned int ord, unsigned int nbits); | |
191 | extern int bitmap_print_to_pagebuf(bool list, char *buf, | |
192 | const unsigned long *maskp, int nmaskbits); | |
193 | ||
194 | #define BITMAP_FIRST_WORD_MASK(start) (~0UL << ((start) & (BITS_PER_LONG - 1))) | |
195 | #define BITMAP_LAST_WORD_MASK(nbits) (~0UL >> (-(nbits) & (BITS_PER_LONG - 1))) | |
196 | ||
197 | /* | |
198 | * The static inlines below do not handle constant nbits==0 correctly, | |
199 | * so make such users (should any ever turn up) call the out-of-line | |
200 | * versions. | |
201 | */ | |
202 | #define small_const_nbits(nbits) \ | |
203 | (__builtin_constant_p(nbits) && (nbits) <= BITS_PER_LONG && (nbits) > 0) | |
204 | ||
205 | static inline void bitmap_zero(unsigned long *dst, unsigned int nbits) | |
206 | { | |
207 | if (small_const_nbits(nbits)) | |
208 | *dst = 0UL; | |
209 | else { | |
210 | unsigned int len = BITS_TO_LONGS(nbits) * sizeof(unsigned long); | |
211 | memset(dst, 0, len); | |
212 | } | |
213 | } | |
214 | ||
215 | static inline void bitmap_fill(unsigned long *dst, unsigned int nbits) | |
216 | { | |
217 | unsigned int nlongs = BITS_TO_LONGS(nbits); | |
218 | if (!small_const_nbits(nbits)) { | |
219 | unsigned int len = (nlongs - 1) * sizeof(unsigned long); | |
220 | memset(dst, 0xff, len); | |
221 | } | |
222 | dst[nlongs - 1] = BITMAP_LAST_WORD_MASK(nbits); | |
223 | } | |
224 | ||
225 | static inline void bitmap_copy(unsigned long *dst, const unsigned long *src, | |
226 | unsigned int nbits) | |
227 | { | |
228 | if (small_const_nbits(nbits)) | |
229 | *dst = *src; | |
230 | else { | |
231 | unsigned int len = BITS_TO_LONGS(nbits) * sizeof(unsigned long); | |
232 | memcpy(dst, src, len); | |
233 | } | |
234 | } | |
235 | ||
236 | static inline int bitmap_and(unsigned long *dst, const unsigned long *src1, | |
237 | const unsigned long *src2, unsigned int nbits) | |
238 | { | |
239 | if (small_const_nbits(nbits)) | |
240 | return (*dst = *src1 & *src2 & BITMAP_LAST_WORD_MASK(nbits)) != 0; | |
241 | return __bitmap_and(dst, src1, src2, nbits); | |
242 | } | |
243 | ||
244 | static inline void bitmap_or(unsigned long *dst, const unsigned long *src1, | |
245 | const unsigned long *src2, unsigned int nbits) | |
246 | { | |
247 | if (small_const_nbits(nbits)) | |
248 | *dst = *src1 | *src2; | |
249 | else | |
250 | __bitmap_or(dst, src1, src2, nbits); | |
251 | } | |
252 | ||
253 | static inline void bitmap_xor(unsigned long *dst, const unsigned long *src1, | |
254 | const unsigned long *src2, unsigned int nbits) | |
255 | { | |
256 | if (small_const_nbits(nbits)) | |
257 | *dst = *src1 ^ *src2; | |
258 | else | |
259 | __bitmap_xor(dst, src1, src2, nbits); | |
260 | } | |
261 | ||
262 | static inline int bitmap_andnot(unsigned long *dst, const unsigned long *src1, | |
263 | const unsigned long *src2, unsigned int nbits) | |
264 | { | |
265 | if (small_const_nbits(nbits)) | |
266 | return (*dst = *src1 & ~(*src2) & BITMAP_LAST_WORD_MASK(nbits)) != 0; | |
267 | return __bitmap_andnot(dst, src1, src2, nbits); | |
268 | } | |
269 | ||
270 | static inline void bitmap_complement(unsigned long *dst, const unsigned long *src, | |
271 | unsigned int nbits) | |
272 | { | |
273 | if (small_const_nbits(nbits)) | |
274 | *dst = ~(*src); | |
275 | else | |
276 | __bitmap_complement(dst, src, nbits); | |
277 | } | |
278 | ||
279 | #ifdef __LITTLE_ENDIAN | |
280 | #define BITMAP_MEM_ALIGNMENT 8 | |
281 | #else | |
282 | #define BITMAP_MEM_ALIGNMENT (8 * sizeof(unsigned long)) | |
283 | #endif | |
284 | #define BITMAP_MEM_MASK (BITMAP_MEM_ALIGNMENT - 1) | |
285 | ||
286 | static inline int bitmap_equal(const unsigned long *src1, | |
287 | const unsigned long *src2, unsigned int nbits) | |
288 | { | |
289 | if (small_const_nbits(nbits)) | |
290 | return !((*src1 ^ *src2) & BITMAP_LAST_WORD_MASK(nbits)); | |
291 | if (__builtin_constant_p(nbits & BITMAP_MEM_MASK) && | |
292 | IS_ALIGNED(nbits, BITMAP_MEM_ALIGNMENT)) | |
293 | return !memcmp(src1, src2, nbits / 8); | |
294 | return __bitmap_equal(src1, src2, nbits); | |
295 | } | |
296 | ||
297 | static inline int bitmap_intersects(const unsigned long *src1, | |
298 | const unsigned long *src2, unsigned int nbits) | |
299 | { | |
300 | if (small_const_nbits(nbits)) | |
301 | return ((*src1 & *src2) & BITMAP_LAST_WORD_MASK(nbits)) != 0; | |
302 | else | |
303 | return __bitmap_intersects(src1, src2, nbits); | |
304 | } | |
305 | ||
306 | static inline int bitmap_subset(const unsigned long *src1, | |
307 | const unsigned long *src2, unsigned int nbits) | |
308 | { | |
309 | if (small_const_nbits(nbits)) | |
310 | return ! ((*src1 & ~(*src2)) & BITMAP_LAST_WORD_MASK(nbits)); | |
311 | else | |
312 | return __bitmap_subset(src1, src2, nbits); | |
313 | } | |
314 | ||
315 | static inline int bitmap_empty(const unsigned long *src, unsigned nbits) | |
316 | { | |
317 | if (small_const_nbits(nbits)) | |
318 | return ! (*src & BITMAP_LAST_WORD_MASK(nbits)); | |
319 | ||
320 | return find_first_bit(src, nbits) == nbits; | |
321 | } | |
322 | ||
323 | static inline int bitmap_full(const unsigned long *src, unsigned int nbits) | |
324 | { | |
325 | if (small_const_nbits(nbits)) | |
326 | return ! (~(*src) & BITMAP_LAST_WORD_MASK(nbits)); | |
327 | ||
328 | return find_first_zero_bit(src, nbits) == nbits; | |
329 | } | |
330 | ||
331 | static __always_inline int bitmap_weight(const unsigned long *src, unsigned int nbits) | |
332 | { | |
333 | if (small_const_nbits(nbits)) | |
334 | return hweight_long(*src & BITMAP_LAST_WORD_MASK(nbits)); | |
335 | return __bitmap_weight(src, nbits); | |
336 | } | |
337 | ||
338 | static __always_inline void bitmap_set(unsigned long *map, unsigned int start, | |
339 | unsigned int nbits) | |
340 | { | |
341 | if (__builtin_constant_p(nbits) && nbits == 1) | |
342 | __set_bit(start, map); | |
343 | else if (__builtin_constant_p(start & BITMAP_MEM_MASK) && | |
344 | IS_ALIGNED(start, BITMAP_MEM_ALIGNMENT) && | |
345 | __builtin_constant_p(nbits & BITMAP_MEM_MASK) && | |
346 | IS_ALIGNED(nbits, BITMAP_MEM_ALIGNMENT)) | |
347 | memset((char *)map + start / 8, 0xff, nbits / 8); | |
348 | else | |
349 | __bitmap_set(map, start, nbits); | |
350 | } | |
351 | ||
352 | static __always_inline void bitmap_clear(unsigned long *map, unsigned int start, | |
353 | unsigned int nbits) | |
354 | { | |
355 | if (__builtin_constant_p(nbits) && nbits == 1) | |
356 | __clear_bit(start, map); | |
357 | else if (__builtin_constant_p(start & BITMAP_MEM_MASK) && | |
358 | IS_ALIGNED(start, BITMAP_MEM_ALIGNMENT) && | |
359 | __builtin_constant_p(nbits & BITMAP_MEM_MASK) && | |
360 | IS_ALIGNED(nbits, BITMAP_MEM_ALIGNMENT)) | |
361 | memset((char *)map + start / 8, 0, nbits / 8); | |
362 | else | |
363 | __bitmap_clear(map, start, nbits); | |
364 | } | |
365 | ||
366 | static inline void bitmap_shift_right(unsigned long *dst, const unsigned long *src, | |
367 | unsigned int shift, unsigned int nbits) | |
368 | { | |
369 | if (small_const_nbits(nbits)) | |
370 | *dst = (*src & BITMAP_LAST_WORD_MASK(nbits)) >> shift; | |
371 | else | |
372 | __bitmap_shift_right(dst, src, shift, nbits); | |
373 | } | |
374 | ||
375 | static inline void bitmap_shift_left(unsigned long *dst, const unsigned long *src, | |
376 | unsigned int shift, unsigned int nbits) | |
377 | { | |
378 | if (small_const_nbits(nbits)) | |
379 | *dst = (*src << shift) & BITMAP_LAST_WORD_MASK(nbits); | |
380 | else | |
381 | __bitmap_shift_left(dst, src, shift, nbits); | |
382 | } | |
383 | ||
384 | static inline int bitmap_parse(const char *buf, unsigned int buflen, | |
385 | unsigned long *maskp, int nmaskbits) | |
386 | { | |
387 | return __bitmap_parse(buf, buflen, 0, maskp, nmaskbits); | |
388 | } | |
389 | ||
390 | /** | |
391 | * BITMAP_FROM_U64() - Represent u64 value in the format suitable for bitmap. | |
392 | * @n: u64 value | |
393 | * | |
394 | * Linux bitmaps are internally arrays of unsigned longs, i.e. 32-bit | |
395 | * integers in 32-bit environment, and 64-bit integers in 64-bit one. | |
396 | * | |
397 | * There are four combinations of endianness and length of the word in linux | |
398 | * ABIs: LE64, BE64, LE32 and BE32. | |
399 | * | |
400 | * On 64-bit kernels 64-bit LE and BE numbers are naturally ordered in | |
401 | * bitmaps and therefore don't require any special handling. | |
402 | * | |
403 | * On 32-bit kernels 32-bit LE ABI orders lo word of 64-bit number in memory | |
404 | * prior to hi, and 32-bit BE orders hi word prior to lo. The bitmap on the | |
405 | * other hand is represented as an array of 32-bit words and the position of | |
406 | * bit N may therefore be calculated as: word #(N/32) and bit #(N%32) in that | |
407 | * word. For example, bit #42 is located at 10th position of 2nd word. | |
408 | * It matches 32-bit LE ABI, and we can simply let the compiler store 64-bit | |
409 | * values in memory as it usually does. But for BE we need to swap hi and lo | |
410 | * words manually. | |
411 | * | |
412 | * With all that, the macro BITMAP_FROM_U64() does explicit reordering of hi and | |
413 | * lo parts of u64. For LE32 it does nothing, and for BE environment it swaps | |
414 | * hi and lo words, as is expected by bitmap. | |
415 | */ | |
416 | #if __BITS_PER_LONG == 64 | |
417 | #define BITMAP_FROM_U64(n) (n) | |
418 | #else | |
419 | #define BITMAP_FROM_U64(n) ((unsigned long) ((u64)(n) & ULONG_MAX)), \ | |
420 | ((unsigned long) ((u64)(n) >> 32)) | |
421 | #endif | |
422 | ||
423 | /** | |
424 | * bitmap_from_u64 - Check and swap words within u64. | |
425 | * @mask: source bitmap | |
426 | * @dst: destination bitmap | |
427 | * | |
428 | * In 32-bit Big Endian kernel, when using ``(u32 *)(&val)[*]`` | |
429 | * to read u64 mask, we will get the wrong word. | |
430 | * That is ``(u32 *)(&val)[0]`` gets the upper 32 bits, | |
431 | * but we expect the lower 32-bits of u64. | |
432 | */ | |
433 | static inline void bitmap_from_u64(unsigned long *dst, u64 mask) | |
434 | { | |
435 | dst[0] = mask & ULONG_MAX; | |
436 | ||
437 | if (sizeof(mask) > sizeof(unsigned long)) | |
438 | dst[1] = mask >> 32; | |
439 | } | |
440 | ||
441 | #endif /* __ASSEMBLY__ */ | |
442 | ||
443 | #endif /* __LINUX_BITMAP_H */ |