]> git.proxmox.com Git - mirror_ubuntu-zesty-kernel.git/blob - lib/bitmap.c
mm/slub: calculate start order with reserved in consideration
[mirror_ubuntu-zesty-kernel.git] / lib / bitmap.c
1 /*
2 * lib/bitmap.c
3 * Helper functions for bitmap.h.
4 *
5 * This source code is licensed under the GNU General Public License,
6 * Version 2. See the file COPYING for more details.
7 */
8 #include <linux/export.h>
9 #include <linux/thread_info.h>
10 #include <linux/ctype.h>
11 #include <linux/errno.h>
12 #include <linux/bitmap.h>
13 #include <linux/bitops.h>
14 #include <linux/bug.h>
15
16 #include <asm/page.h>
17 #include <asm/uaccess.h>
18
19 /*
20 * bitmaps provide an array of bits, implemented using an an
21 * array of unsigned longs. The number of valid bits in a
22 * given bitmap does _not_ need to be an exact multiple of
23 * BITS_PER_LONG.
24 *
25 * The possible unused bits in the last, partially used word
26 * of a bitmap are 'don't care'. The implementation makes
27 * no particular effort to keep them zero. It ensures that
28 * their value will not affect the results of any operation.
29 * The bitmap operations that return Boolean (bitmap_empty,
30 * for example) or scalar (bitmap_weight, for example) results
31 * carefully filter out these unused bits from impacting their
32 * results.
33 *
34 * These operations actually hold to a slightly stronger rule:
35 * if you don't input any bitmaps to these ops that have some
36 * unused bits set, then they won't output any set unused bits
37 * in output bitmaps.
38 *
39 * The byte ordering of bitmaps is more natural on little
40 * endian architectures. See the big-endian headers
41 * include/asm-ppc64/bitops.h and include/asm-s390/bitops.h
42 * for the best explanations of this ordering.
43 */
44
45 int __bitmap_equal(const unsigned long *bitmap1,
46 const unsigned long *bitmap2, unsigned int bits)
47 {
48 unsigned int k, lim = bits/BITS_PER_LONG;
49 for (k = 0; k < lim; ++k)
50 if (bitmap1[k] != bitmap2[k])
51 return 0;
52
53 if (bits % BITS_PER_LONG)
54 if ((bitmap1[k] ^ bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits))
55 return 0;
56
57 return 1;
58 }
59 EXPORT_SYMBOL(__bitmap_equal);
60
61 void __bitmap_complement(unsigned long *dst, const unsigned long *src, unsigned int bits)
62 {
63 unsigned int k, lim = bits/BITS_PER_LONG;
64 for (k = 0; k < lim; ++k)
65 dst[k] = ~src[k];
66
67 if (bits % BITS_PER_LONG)
68 dst[k] = ~src[k];
69 }
70 EXPORT_SYMBOL(__bitmap_complement);
71
72 /**
73 * __bitmap_shift_right - logical right shift of the bits in a bitmap
74 * @dst : destination bitmap
75 * @src : source bitmap
76 * @shift : shift by this many bits
77 * @nbits : bitmap size, in bits
78 *
79 * Shifting right (dividing) means moving bits in the MS -> LS bit
80 * direction. Zeros are fed into the vacated MS positions and the
81 * LS bits shifted off the bottom are lost.
82 */
83 void __bitmap_shift_right(unsigned long *dst, const unsigned long *src,
84 unsigned shift, unsigned nbits)
85 {
86 unsigned k, lim = BITS_TO_LONGS(nbits);
87 unsigned off = shift/BITS_PER_LONG, rem = shift % BITS_PER_LONG;
88 unsigned long mask = BITMAP_LAST_WORD_MASK(nbits);
89 for (k = 0; off + k < lim; ++k) {
90 unsigned long upper, lower;
91
92 /*
93 * If shift is not word aligned, take lower rem bits of
94 * word above and make them the top rem bits of result.
95 */
96 if (!rem || off + k + 1 >= lim)
97 upper = 0;
98 else {
99 upper = src[off + k + 1];
100 if (off + k + 1 == lim - 1)
101 upper &= mask;
102 upper <<= (BITS_PER_LONG - rem);
103 }
104 lower = src[off + k];
105 if (off + k == lim - 1)
106 lower &= mask;
107 lower >>= rem;
108 dst[k] = lower | upper;
109 }
110 if (off)
111 memset(&dst[lim - off], 0, off*sizeof(unsigned long));
112 }
113 EXPORT_SYMBOL(__bitmap_shift_right);
114
115
116 /**
117 * __bitmap_shift_left - logical left shift of the bits in a bitmap
118 * @dst : destination bitmap
119 * @src : source bitmap
120 * @shift : shift by this many bits
121 * @nbits : bitmap size, in bits
122 *
123 * Shifting left (multiplying) means moving bits in the LS -> MS
124 * direction. Zeros are fed into the vacated LS bit positions
125 * and those MS bits shifted off the top are lost.
126 */
127
128 void __bitmap_shift_left(unsigned long *dst, const unsigned long *src,
129 unsigned int shift, unsigned int nbits)
130 {
131 int k;
132 unsigned int lim = BITS_TO_LONGS(nbits);
133 unsigned int off = shift/BITS_PER_LONG, rem = shift % BITS_PER_LONG;
134 for (k = lim - off - 1; k >= 0; --k) {
135 unsigned long upper, lower;
136
137 /*
138 * If shift is not word aligned, take upper rem bits of
139 * word below and make them the bottom rem bits of result.
140 */
141 if (rem && k > 0)
142 lower = src[k - 1] >> (BITS_PER_LONG - rem);
143 else
144 lower = 0;
145 upper = src[k] << rem;
146 dst[k + off] = lower | upper;
147 }
148 if (off)
149 memset(dst, 0, off*sizeof(unsigned long));
150 }
151 EXPORT_SYMBOL(__bitmap_shift_left);
152
153 int __bitmap_and(unsigned long *dst, const unsigned long *bitmap1,
154 const unsigned long *bitmap2, unsigned int bits)
155 {
156 unsigned int k;
157 unsigned int lim = bits/BITS_PER_LONG;
158 unsigned long result = 0;
159
160 for (k = 0; k < lim; k++)
161 result |= (dst[k] = bitmap1[k] & bitmap2[k]);
162 if (bits % BITS_PER_LONG)
163 result |= (dst[k] = bitmap1[k] & bitmap2[k] &
164 BITMAP_LAST_WORD_MASK(bits));
165 return result != 0;
166 }
167 EXPORT_SYMBOL(__bitmap_and);
168
169 void __bitmap_or(unsigned long *dst, const unsigned long *bitmap1,
170 const unsigned long *bitmap2, unsigned int bits)
171 {
172 unsigned int k;
173 unsigned int nr = BITS_TO_LONGS(bits);
174
175 for (k = 0; k < nr; k++)
176 dst[k] = bitmap1[k] | bitmap2[k];
177 }
178 EXPORT_SYMBOL(__bitmap_or);
179
180 void __bitmap_xor(unsigned long *dst, const unsigned long *bitmap1,
181 const unsigned long *bitmap2, unsigned int bits)
182 {
183 unsigned int k;
184 unsigned int nr = BITS_TO_LONGS(bits);
185
186 for (k = 0; k < nr; k++)
187 dst[k] = bitmap1[k] ^ bitmap2[k];
188 }
189 EXPORT_SYMBOL(__bitmap_xor);
190
191 int __bitmap_andnot(unsigned long *dst, const unsigned long *bitmap1,
192 const unsigned long *bitmap2, unsigned int bits)
193 {
194 unsigned int k;
195 unsigned int lim = bits/BITS_PER_LONG;
196 unsigned long result = 0;
197
198 for (k = 0; k < lim; k++)
199 result |= (dst[k] = bitmap1[k] & ~bitmap2[k]);
200 if (bits % BITS_PER_LONG)
201 result |= (dst[k] = bitmap1[k] & ~bitmap2[k] &
202 BITMAP_LAST_WORD_MASK(bits));
203 return result != 0;
204 }
205 EXPORT_SYMBOL(__bitmap_andnot);
206
207 int __bitmap_intersects(const unsigned long *bitmap1,
208 const unsigned long *bitmap2, unsigned int bits)
209 {
210 unsigned int k, lim = bits/BITS_PER_LONG;
211 for (k = 0; k < lim; ++k)
212 if (bitmap1[k] & bitmap2[k])
213 return 1;
214
215 if (bits % BITS_PER_LONG)
216 if ((bitmap1[k] & bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits))
217 return 1;
218 return 0;
219 }
220 EXPORT_SYMBOL(__bitmap_intersects);
221
222 int __bitmap_subset(const unsigned long *bitmap1,
223 const unsigned long *bitmap2, unsigned int bits)
224 {
225 unsigned int k, lim = bits/BITS_PER_LONG;
226 for (k = 0; k < lim; ++k)
227 if (bitmap1[k] & ~bitmap2[k])
228 return 0;
229
230 if (bits % BITS_PER_LONG)
231 if ((bitmap1[k] & ~bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits))
232 return 0;
233 return 1;
234 }
235 EXPORT_SYMBOL(__bitmap_subset);
236
237 int __bitmap_weight(const unsigned long *bitmap, unsigned int bits)
238 {
239 unsigned int k, lim = bits/BITS_PER_LONG;
240 int w = 0;
241
242 for (k = 0; k < lim; k++)
243 w += hweight_long(bitmap[k]);
244
245 if (bits % BITS_PER_LONG)
246 w += hweight_long(bitmap[k] & BITMAP_LAST_WORD_MASK(bits));
247
248 return w;
249 }
250 EXPORT_SYMBOL(__bitmap_weight);
251
252 void bitmap_set(unsigned long *map, unsigned int start, int len)
253 {
254 unsigned long *p = map + BIT_WORD(start);
255 const unsigned int size = start + len;
256 int bits_to_set = BITS_PER_LONG - (start % BITS_PER_LONG);
257 unsigned long mask_to_set = BITMAP_FIRST_WORD_MASK(start);
258
259 while (len - bits_to_set >= 0) {
260 *p |= mask_to_set;
261 len -= bits_to_set;
262 bits_to_set = BITS_PER_LONG;
263 mask_to_set = ~0UL;
264 p++;
265 }
266 if (len) {
267 mask_to_set &= BITMAP_LAST_WORD_MASK(size);
268 *p |= mask_to_set;
269 }
270 }
271 EXPORT_SYMBOL(bitmap_set);
272
273 void bitmap_clear(unsigned long *map, unsigned int start, int len)
274 {
275 unsigned long *p = map + BIT_WORD(start);
276 const unsigned int size = start + len;
277 int bits_to_clear = BITS_PER_LONG - (start % BITS_PER_LONG);
278 unsigned long mask_to_clear = BITMAP_FIRST_WORD_MASK(start);
279
280 while (len - bits_to_clear >= 0) {
281 *p &= ~mask_to_clear;
282 len -= bits_to_clear;
283 bits_to_clear = BITS_PER_LONG;
284 mask_to_clear = ~0UL;
285 p++;
286 }
287 if (len) {
288 mask_to_clear &= BITMAP_LAST_WORD_MASK(size);
289 *p &= ~mask_to_clear;
290 }
291 }
292 EXPORT_SYMBOL(bitmap_clear);
293
294 /**
295 * bitmap_find_next_zero_area_off - find a contiguous aligned zero area
296 * @map: The address to base the search on
297 * @size: The bitmap size in bits
298 * @start: The bitnumber to start searching at
299 * @nr: The number of zeroed bits we're looking for
300 * @align_mask: Alignment mask for zero area
301 * @align_offset: Alignment offset for zero area.
302 *
303 * The @align_mask should be one less than a power of 2; the effect is that
304 * the bit offset of all zero areas this function finds plus @align_offset
305 * is multiple of that power of 2.
306 */
307 unsigned long bitmap_find_next_zero_area_off(unsigned long *map,
308 unsigned long size,
309 unsigned long start,
310 unsigned int nr,
311 unsigned long align_mask,
312 unsigned long align_offset)
313 {
314 unsigned long index, end, i;
315 again:
316 index = find_next_zero_bit(map, size, start);
317
318 /* Align allocation */
319 index = __ALIGN_MASK(index + align_offset, align_mask) - align_offset;
320
321 end = index + nr;
322 if (end > size)
323 return end;
324 i = find_next_bit(map, end, index);
325 if (i < end) {
326 start = i + 1;
327 goto again;
328 }
329 return index;
330 }
331 EXPORT_SYMBOL(bitmap_find_next_zero_area_off);
332
333 /*
334 * Bitmap printing & parsing functions: first version by Nadia Yvette Chambers,
335 * second version by Paul Jackson, third by Joe Korty.
336 */
337
338 #define CHUNKSZ 32
339 #define nbits_to_hold_value(val) fls(val)
340 #define BASEDEC 10 /* fancier cpuset lists input in decimal */
341
342 /**
343 * __bitmap_parse - convert an ASCII hex string into a bitmap.
344 * @buf: pointer to buffer containing string.
345 * @buflen: buffer size in bytes. If string is smaller than this
346 * then it must be terminated with a \0.
347 * @is_user: location of buffer, 0 indicates kernel space
348 * @maskp: pointer to bitmap array that will contain result.
349 * @nmaskbits: size of bitmap, in bits.
350 *
351 * Commas group hex digits into chunks. Each chunk defines exactly 32
352 * bits of the resultant bitmask. No chunk may specify a value larger
353 * than 32 bits (%-EOVERFLOW), and if a chunk specifies a smaller value
354 * then leading 0-bits are prepended. %-EINVAL is returned for illegal
355 * characters and for grouping errors such as "1,,5", ",44", "," and "".
356 * Leading and trailing whitespace accepted, but not embedded whitespace.
357 */
358 int __bitmap_parse(const char *buf, unsigned int buflen,
359 int is_user, unsigned long *maskp,
360 int nmaskbits)
361 {
362 int c, old_c, totaldigits, ndigits, nchunks, nbits;
363 u32 chunk;
364 const char __user __force *ubuf = (const char __user __force *)buf;
365
366 bitmap_zero(maskp, nmaskbits);
367
368 nchunks = nbits = totaldigits = c = 0;
369 do {
370 chunk = 0;
371 ndigits = totaldigits;
372
373 /* Get the next chunk of the bitmap */
374 while (buflen) {
375 old_c = c;
376 if (is_user) {
377 if (__get_user(c, ubuf++))
378 return -EFAULT;
379 }
380 else
381 c = *buf++;
382 buflen--;
383 if (isspace(c))
384 continue;
385
386 /*
387 * If the last character was a space and the current
388 * character isn't '\0', we've got embedded whitespace.
389 * This is a no-no, so throw an error.
390 */
391 if (totaldigits && c && isspace(old_c))
392 return -EINVAL;
393
394 /* A '\0' or a ',' signal the end of the chunk */
395 if (c == '\0' || c == ',')
396 break;
397
398 if (!isxdigit(c))
399 return -EINVAL;
400
401 /*
402 * Make sure there are at least 4 free bits in 'chunk'.
403 * If not, this hexdigit will overflow 'chunk', so
404 * throw an error.
405 */
406 if (chunk & ~((1UL << (CHUNKSZ - 4)) - 1))
407 return -EOVERFLOW;
408
409 chunk = (chunk << 4) | hex_to_bin(c);
410 totaldigits++;
411 }
412 if (ndigits == totaldigits)
413 return -EINVAL;
414 if (nchunks == 0 && chunk == 0)
415 continue;
416
417 __bitmap_shift_left(maskp, maskp, CHUNKSZ, nmaskbits);
418 *maskp |= chunk;
419 nchunks++;
420 nbits += (nchunks == 1) ? nbits_to_hold_value(chunk) : CHUNKSZ;
421 if (nbits > nmaskbits)
422 return -EOVERFLOW;
423 } while (buflen && c == ',');
424
425 return 0;
426 }
427 EXPORT_SYMBOL(__bitmap_parse);
428
429 /**
430 * bitmap_parse_user - convert an ASCII hex string in a user buffer into a bitmap
431 *
432 * @ubuf: pointer to user buffer containing string.
433 * @ulen: buffer size in bytes. If string is smaller than this
434 * then it must be terminated with a \0.
435 * @maskp: pointer to bitmap array that will contain result.
436 * @nmaskbits: size of bitmap, in bits.
437 *
438 * Wrapper for __bitmap_parse(), providing it with user buffer.
439 *
440 * We cannot have this as an inline function in bitmap.h because it needs
441 * linux/uaccess.h to get the access_ok() declaration and this causes
442 * cyclic dependencies.
443 */
444 int bitmap_parse_user(const char __user *ubuf,
445 unsigned int ulen, unsigned long *maskp,
446 int nmaskbits)
447 {
448 if (!access_ok(VERIFY_READ, ubuf, ulen))
449 return -EFAULT;
450 return __bitmap_parse((const char __force *)ubuf,
451 ulen, 1, maskp, nmaskbits);
452
453 }
454 EXPORT_SYMBOL(bitmap_parse_user);
455
456 /**
457 * bitmap_print_to_pagebuf - convert bitmap to list or hex format ASCII string
458 * @list: indicates whether the bitmap must be list
459 * @buf: page aligned buffer into which string is placed
460 * @maskp: pointer to bitmap to convert
461 * @nmaskbits: size of bitmap, in bits
462 *
463 * Output format is a comma-separated list of decimal numbers and
464 * ranges if list is specified or hex digits grouped into comma-separated
465 * sets of 8 digits/set. Returns the number of characters written to buf.
466 *
467 * It is assumed that @buf is a pointer into a PAGE_SIZE area and that
468 * sufficient storage remains at @buf to accommodate the
469 * bitmap_print_to_pagebuf() output.
470 */
471 int bitmap_print_to_pagebuf(bool list, char *buf, const unsigned long *maskp,
472 int nmaskbits)
473 {
474 ptrdiff_t len = PTR_ALIGN(buf + PAGE_SIZE - 1, PAGE_SIZE) - buf;
475 int n = 0;
476
477 if (len > 1)
478 n = list ? scnprintf(buf, len, "%*pbl\n", nmaskbits, maskp) :
479 scnprintf(buf, len, "%*pb\n", nmaskbits, maskp);
480 return n;
481 }
482 EXPORT_SYMBOL(bitmap_print_to_pagebuf);
483
484 /**
485 * __bitmap_parselist - convert list format ASCII string to bitmap
486 * @buf: read nul-terminated user string from this buffer
487 * @buflen: buffer size in bytes. If string is smaller than this
488 * then it must be terminated with a \0.
489 * @is_user: location of buffer, 0 indicates kernel space
490 * @maskp: write resulting mask here
491 * @nmaskbits: number of bits in mask to be written
492 *
493 * Input format is a comma-separated list of decimal numbers and
494 * ranges. Consecutively set bits are shown as two hyphen-separated
495 * decimal numbers, the smallest and largest bit numbers set in
496 * the range.
497 *
498 * Returns 0 on success, -errno on invalid input strings.
499 * Error values:
500 * %-EINVAL: second number in range smaller than first
501 * %-EINVAL: invalid character in string
502 * %-ERANGE: bit number specified too large for mask
503 */
504 static int __bitmap_parselist(const char *buf, unsigned int buflen,
505 int is_user, unsigned long *maskp,
506 int nmaskbits)
507 {
508 unsigned a, b;
509 int c, old_c, totaldigits, ndigits;
510 const char __user __force *ubuf = (const char __user __force *)buf;
511 int at_start, in_range;
512
513 totaldigits = c = 0;
514 bitmap_zero(maskp, nmaskbits);
515 do {
516 at_start = 1;
517 in_range = 0;
518 a = b = 0;
519 ndigits = totaldigits;
520
521 /* Get the next cpu# or a range of cpu#'s */
522 while (buflen) {
523 old_c = c;
524 if (is_user) {
525 if (__get_user(c, ubuf++))
526 return -EFAULT;
527 } else
528 c = *buf++;
529 buflen--;
530 if (isspace(c))
531 continue;
532
533 /* A '\0' or a ',' signal the end of a cpu# or range */
534 if (c == '\0' || c == ',')
535 break;
536 /*
537 * whitespaces between digits are not allowed,
538 * but it's ok if whitespaces are on head or tail.
539 * when old_c is whilespace,
540 * if totaldigits == ndigits, whitespace is on head.
541 * if whitespace is on tail, it should not run here.
542 * as c was ',' or '\0',
543 * the last code line has broken the current loop.
544 */
545 if ((totaldigits != ndigits) && isspace(old_c))
546 return -EINVAL;
547
548 if (c == '-') {
549 if (at_start || in_range)
550 return -EINVAL;
551 b = 0;
552 in_range = 1;
553 at_start = 1;
554 continue;
555 }
556
557 if (!isdigit(c))
558 return -EINVAL;
559
560 b = b * 10 + (c - '0');
561 if (!in_range)
562 a = b;
563 at_start = 0;
564 totaldigits++;
565 }
566 if (ndigits == totaldigits)
567 continue;
568 /* if no digit is after '-', it's wrong*/
569 if (at_start && in_range)
570 return -EINVAL;
571 if (!(a <= b))
572 return -EINVAL;
573 if (b >= nmaskbits)
574 return -ERANGE;
575 while (a <= b) {
576 set_bit(a, maskp);
577 a++;
578 }
579 } while (buflen && c == ',');
580 return 0;
581 }
582
583 int bitmap_parselist(const char *bp, unsigned long *maskp, int nmaskbits)
584 {
585 char *nl = strchrnul(bp, '\n');
586 int len = nl - bp;
587
588 return __bitmap_parselist(bp, len, 0, maskp, nmaskbits);
589 }
590 EXPORT_SYMBOL(bitmap_parselist);
591
592
593 /**
594 * bitmap_parselist_user()
595 *
596 * @ubuf: pointer to user buffer containing string.
597 * @ulen: buffer size in bytes. If string is smaller than this
598 * then it must be terminated with a \0.
599 * @maskp: pointer to bitmap array that will contain result.
600 * @nmaskbits: size of bitmap, in bits.
601 *
602 * Wrapper for bitmap_parselist(), providing it with user buffer.
603 *
604 * We cannot have this as an inline function in bitmap.h because it needs
605 * linux/uaccess.h to get the access_ok() declaration and this causes
606 * cyclic dependencies.
607 */
608 int bitmap_parselist_user(const char __user *ubuf,
609 unsigned int ulen, unsigned long *maskp,
610 int nmaskbits)
611 {
612 if (!access_ok(VERIFY_READ, ubuf, ulen))
613 return -EFAULT;
614 return __bitmap_parselist((const char __force *)ubuf,
615 ulen, 1, maskp, nmaskbits);
616 }
617 EXPORT_SYMBOL(bitmap_parselist_user);
618
619
620 /**
621 * bitmap_pos_to_ord - find ordinal of set bit at given position in bitmap
622 * @buf: pointer to a bitmap
623 * @pos: a bit position in @buf (0 <= @pos < @nbits)
624 * @nbits: number of valid bit positions in @buf
625 *
626 * Map the bit at position @pos in @buf (of length @nbits) to the
627 * ordinal of which set bit it is. If it is not set or if @pos
628 * is not a valid bit position, map to -1.
629 *
630 * If for example, just bits 4 through 7 are set in @buf, then @pos
631 * values 4 through 7 will get mapped to 0 through 3, respectively,
632 * and other @pos values will get mapped to -1. When @pos value 7
633 * gets mapped to (returns) @ord value 3 in this example, that means
634 * that bit 7 is the 3rd (starting with 0th) set bit in @buf.
635 *
636 * The bit positions 0 through @bits are valid positions in @buf.
637 */
638 static int bitmap_pos_to_ord(const unsigned long *buf, unsigned int pos, unsigned int nbits)
639 {
640 if (pos >= nbits || !test_bit(pos, buf))
641 return -1;
642
643 return __bitmap_weight(buf, pos);
644 }
645
646 /**
647 * bitmap_ord_to_pos - find position of n-th set bit in bitmap
648 * @buf: pointer to bitmap
649 * @ord: ordinal bit position (n-th set bit, n >= 0)
650 * @nbits: number of valid bit positions in @buf
651 *
652 * Map the ordinal offset of bit @ord in @buf to its position in @buf.
653 * Value of @ord should be in range 0 <= @ord < weight(buf). If @ord
654 * >= weight(buf), returns @nbits.
655 *
656 * If for example, just bits 4 through 7 are set in @buf, then @ord
657 * values 0 through 3 will get mapped to 4 through 7, respectively,
658 * and all other @ord values returns @nbits. When @ord value 3
659 * gets mapped to (returns) @pos value 7 in this example, that means
660 * that the 3rd set bit (starting with 0th) is at position 7 in @buf.
661 *
662 * The bit positions 0 through @nbits-1 are valid positions in @buf.
663 */
664 unsigned int bitmap_ord_to_pos(const unsigned long *buf, unsigned int ord, unsigned int nbits)
665 {
666 unsigned int pos;
667
668 for (pos = find_first_bit(buf, nbits);
669 pos < nbits && ord;
670 pos = find_next_bit(buf, nbits, pos + 1))
671 ord--;
672
673 return pos;
674 }
675
676 /**
677 * bitmap_remap - Apply map defined by a pair of bitmaps to another bitmap
678 * @dst: remapped result
679 * @src: subset to be remapped
680 * @old: defines domain of map
681 * @new: defines range of map
682 * @nbits: number of bits in each of these bitmaps
683 *
684 * Let @old and @new define a mapping of bit positions, such that
685 * whatever position is held by the n-th set bit in @old is mapped
686 * to the n-th set bit in @new. In the more general case, allowing
687 * for the possibility that the weight 'w' of @new is less than the
688 * weight of @old, map the position of the n-th set bit in @old to
689 * the position of the m-th set bit in @new, where m == n % w.
690 *
691 * If either of the @old and @new bitmaps are empty, or if @src and
692 * @dst point to the same location, then this routine copies @src
693 * to @dst.
694 *
695 * The positions of unset bits in @old are mapped to themselves
696 * (the identify map).
697 *
698 * Apply the above specified mapping to @src, placing the result in
699 * @dst, clearing any bits previously set in @dst.
700 *
701 * For example, lets say that @old has bits 4 through 7 set, and
702 * @new has bits 12 through 15 set. This defines the mapping of bit
703 * position 4 to 12, 5 to 13, 6 to 14 and 7 to 15, and of all other
704 * bit positions unchanged. So if say @src comes into this routine
705 * with bits 1, 5 and 7 set, then @dst should leave with bits 1,
706 * 13 and 15 set.
707 */
708 void bitmap_remap(unsigned long *dst, const unsigned long *src,
709 const unsigned long *old, const unsigned long *new,
710 unsigned int nbits)
711 {
712 unsigned int oldbit, w;
713
714 if (dst == src) /* following doesn't handle inplace remaps */
715 return;
716 bitmap_zero(dst, nbits);
717
718 w = bitmap_weight(new, nbits);
719 for_each_set_bit(oldbit, src, nbits) {
720 int n = bitmap_pos_to_ord(old, oldbit, nbits);
721
722 if (n < 0 || w == 0)
723 set_bit(oldbit, dst); /* identity map */
724 else
725 set_bit(bitmap_ord_to_pos(new, n % w, nbits), dst);
726 }
727 }
728 EXPORT_SYMBOL(bitmap_remap);
729
730 /**
731 * bitmap_bitremap - Apply map defined by a pair of bitmaps to a single bit
732 * @oldbit: bit position to be mapped
733 * @old: defines domain of map
734 * @new: defines range of map
735 * @bits: number of bits in each of these bitmaps
736 *
737 * Let @old and @new define a mapping of bit positions, such that
738 * whatever position is held by the n-th set bit in @old is mapped
739 * to the n-th set bit in @new. In the more general case, allowing
740 * for the possibility that the weight 'w' of @new is less than the
741 * weight of @old, map the position of the n-th set bit in @old to
742 * the position of the m-th set bit in @new, where m == n % w.
743 *
744 * The positions of unset bits in @old are mapped to themselves
745 * (the identify map).
746 *
747 * Apply the above specified mapping to bit position @oldbit, returning
748 * the new bit position.
749 *
750 * For example, lets say that @old has bits 4 through 7 set, and
751 * @new has bits 12 through 15 set. This defines the mapping of bit
752 * position 4 to 12, 5 to 13, 6 to 14 and 7 to 15, and of all other
753 * bit positions unchanged. So if say @oldbit is 5, then this routine
754 * returns 13.
755 */
756 int bitmap_bitremap(int oldbit, const unsigned long *old,
757 const unsigned long *new, int bits)
758 {
759 int w = bitmap_weight(new, bits);
760 int n = bitmap_pos_to_ord(old, oldbit, bits);
761 if (n < 0 || w == 0)
762 return oldbit;
763 else
764 return bitmap_ord_to_pos(new, n % w, bits);
765 }
766 EXPORT_SYMBOL(bitmap_bitremap);
767
768 /**
769 * bitmap_onto - translate one bitmap relative to another
770 * @dst: resulting translated bitmap
771 * @orig: original untranslated bitmap
772 * @relmap: bitmap relative to which translated
773 * @bits: number of bits in each of these bitmaps
774 *
775 * Set the n-th bit of @dst iff there exists some m such that the
776 * n-th bit of @relmap is set, the m-th bit of @orig is set, and
777 * the n-th bit of @relmap is also the m-th _set_ bit of @relmap.
778 * (If you understood the previous sentence the first time your
779 * read it, you're overqualified for your current job.)
780 *
781 * In other words, @orig is mapped onto (surjectively) @dst,
782 * using the map { <n, m> | the n-th bit of @relmap is the
783 * m-th set bit of @relmap }.
784 *
785 * Any set bits in @orig above bit number W, where W is the
786 * weight of (number of set bits in) @relmap are mapped nowhere.
787 * In particular, if for all bits m set in @orig, m >= W, then
788 * @dst will end up empty. In situations where the possibility
789 * of such an empty result is not desired, one way to avoid it is
790 * to use the bitmap_fold() operator, below, to first fold the
791 * @orig bitmap over itself so that all its set bits x are in the
792 * range 0 <= x < W. The bitmap_fold() operator does this by
793 * setting the bit (m % W) in @dst, for each bit (m) set in @orig.
794 *
795 * Example [1] for bitmap_onto():
796 * Let's say @relmap has bits 30-39 set, and @orig has bits
797 * 1, 3, 5, 7, 9 and 11 set. Then on return from this routine,
798 * @dst will have bits 31, 33, 35, 37 and 39 set.
799 *
800 * When bit 0 is set in @orig, it means turn on the bit in
801 * @dst corresponding to whatever is the first bit (if any)
802 * that is turned on in @relmap. Since bit 0 was off in the
803 * above example, we leave off that bit (bit 30) in @dst.
804 *
805 * When bit 1 is set in @orig (as in the above example), it
806 * means turn on the bit in @dst corresponding to whatever
807 * is the second bit that is turned on in @relmap. The second
808 * bit in @relmap that was turned on in the above example was
809 * bit 31, so we turned on bit 31 in @dst.
810 *
811 * Similarly, we turned on bits 33, 35, 37 and 39 in @dst,
812 * because they were the 4th, 6th, 8th and 10th set bits
813 * set in @relmap, and the 4th, 6th, 8th and 10th bits of
814 * @orig (i.e. bits 3, 5, 7 and 9) were also set.
815 *
816 * When bit 11 is set in @orig, it means turn on the bit in
817 * @dst corresponding to whatever is the twelfth bit that is
818 * turned on in @relmap. In the above example, there were
819 * only ten bits turned on in @relmap (30..39), so that bit
820 * 11 was set in @orig had no affect on @dst.
821 *
822 * Example [2] for bitmap_fold() + bitmap_onto():
823 * Let's say @relmap has these ten bits set:
824 * 40 41 42 43 45 48 53 61 74 95
825 * (for the curious, that's 40 plus the first ten terms of the
826 * Fibonacci sequence.)
827 *
828 * Further lets say we use the following code, invoking
829 * bitmap_fold() then bitmap_onto, as suggested above to
830 * avoid the possibility of an empty @dst result:
831 *
832 * unsigned long *tmp; // a temporary bitmap's bits
833 *
834 * bitmap_fold(tmp, orig, bitmap_weight(relmap, bits), bits);
835 * bitmap_onto(dst, tmp, relmap, bits);
836 *
837 * Then this table shows what various values of @dst would be, for
838 * various @orig's. I list the zero-based positions of each set bit.
839 * The tmp column shows the intermediate result, as computed by
840 * using bitmap_fold() to fold the @orig bitmap modulo ten
841 * (the weight of @relmap).
842 *
843 * @orig tmp @dst
844 * 0 0 40
845 * 1 1 41
846 * 9 9 95
847 * 10 0 40 (*)
848 * 1 3 5 7 1 3 5 7 41 43 48 61
849 * 0 1 2 3 4 0 1 2 3 4 40 41 42 43 45
850 * 0 9 18 27 0 9 8 7 40 61 74 95
851 * 0 10 20 30 0 40
852 * 0 11 22 33 0 1 2 3 40 41 42 43
853 * 0 12 24 36 0 2 4 6 40 42 45 53
854 * 78 102 211 1 2 8 41 42 74 (*)
855 *
856 * (*) For these marked lines, if we hadn't first done bitmap_fold()
857 * into tmp, then the @dst result would have been empty.
858 *
859 * If either of @orig or @relmap is empty (no set bits), then @dst
860 * will be returned empty.
861 *
862 * If (as explained above) the only set bits in @orig are in positions
863 * m where m >= W, (where W is the weight of @relmap) then @dst will
864 * once again be returned empty.
865 *
866 * All bits in @dst not set by the above rule are cleared.
867 */
868 void bitmap_onto(unsigned long *dst, const unsigned long *orig,
869 const unsigned long *relmap, unsigned int bits)
870 {
871 unsigned int n, m; /* same meaning as in above comment */
872
873 if (dst == orig) /* following doesn't handle inplace mappings */
874 return;
875 bitmap_zero(dst, bits);
876
877 /*
878 * The following code is a more efficient, but less
879 * obvious, equivalent to the loop:
880 * for (m = 0; m < bitmap_weight(relmap, bits); m++) {
881 * n = bitmap_ord_to_pos(orig, m, bits);
882 * if (test_bit(m, orig))
883 * set_bit(n, dst);
884 * }
885 */
886
887 m = 0;
888 for_each_set_bit(n, relmap, bits) {
889 /* m == bitmap_pos_to_ord(relmap, n, bits) */
890 if (test_bit(m, orig))
891 set_bit(n, dst);
892 m++;
893 }
894 }
895 EXPORT_SYMBOL(bitmap_onto);
896
897 /**
898 * bitmap_fold - fold larger bitmap into smaller, modulo specified size
899 * @dst: resulting smaller bitmap
900 * @orig: original larger bitmap
901 * @sz: specified size
902 * @nbits: number of bits in each of these bitmaps
903 *
904 * For each bit oldbit in @orig, set bit oldbit mod @sz in @dst.
905 * Clear all other bits in @dst. See further the comment and
906 * Example [2] for bitmap_onto() for why and how to use this.
907 */
908 void bitmap_fold(unsigned long *dst, const unsigned long *orig,
909 unsigned int sz, unsigned int nbits)
910 {
911 unsigned int oldbit;
912
913 if (dst == orig) /* following doesn't handle inplace mappings */
914 return;
915 bitmap_zero(dst, nbits);
916
917 for_each_set_bit(oldbit, orig, nbits)
918 set_bit(oldbit % sz, dst);
919 }
920 EXPORT_SYMBOL(bitmap_fold);
921
922 /*
923 * Common code for bitmap_*_region() routines.
924 * bitmap: array of unsigned longs corresponding to the bitmap
925 * pos: the beginning of the region
926 * order: region size (log base 2 of number of bits)
927 * reg_op: operation(s) to perform on that region of bitmap
928 *
929 * Can set, verify and/or release a region of bits in a bitmap,
930 * depending on which combination of REG_OP_* flag bits is set.
931 *
932 * A region of a bitmap is a sequence of bits in the bitmap, of
933 * some size '1 << order' (a power of two), aligned to that same
934 * '1 << order' power of two.
935 *
936 * Returns 1 if REG_OP_ISFREE succeeds (region is all zero bits).
937 * Returns 0 in all other cases and reg_ops.
938 */
939
940 enum {
941 REG_OP_ISFREE, /* true if region is all zero bits */
942 REG_OP_ALLOC, /* set all bits in region */
943 REG_OP_RELEASE, /* clear all bits in region */
944 };
945
946 static int __reg_op(unsigned long *bitmap, unsigned int pos, int order, int reg_op)
947 {
948 int nbits_reg; /* number of bits in region */
949 int index; /* index first long of region in bitmap */
950 int offset; /* bit offset region in bitmap[index] */
951 int nlongs_reg; /* num longs spanned by region in bitmap */
952 int nbitsinlong; /* num bits of region in each spanned long */
953 unsigned long mask; /* bitmask for one long of region */
954 int i; /* scans bitmap by longs */
955 int ret = 0; /* return value */
956
957 /*
958 * Either nlongs_reg == 1 (for small orders that fit in one long)
959 * or (offset == 0 && mask == ~0UL) (for larger multiword orders.)
960 */
961 nbits_reg = 1 << order;
962 index = pos / BITS_PER_LONG;
963 offset = pos - (index * BITS_PER_LONG);
964 nlongs_reg = BITS_TO_LONGS(nbits_reg);
965 nbitsinlong = min(nbits_reg, BITS_PER_LONG);
966
967 /*
968 * Can't do "mask = (1UL << nbitsinlong) - 1", as that
969 * overflows if nbitsinlong == BITS_PER_LONG.
970 */
971 mask = (1UL << (nbitsinlong - 1));
972 mask += mask - 1;
973 mask <<= offset;
974
975 switch (reg_op) {
976 case REG_OP_ISFREE:
977 for (i = 0; i < nlongs_reg; i++) {
978 if (bitmap[index + i] & mask)
979 goto done;
980 }
981 ret = 1; /* all bits in region free (zero) */
982 break;
983
984 case REG_OP_ALLOC:
985 for (i = 0; i < nlongs_reg; i++)
986 bitmap[index + i] |= mask;
987 break;
988
989 case REG_OP_RELEASE:
990 for (i = 0; i < nlongs_reg; i++)
991 bitmap[index + i] &= ~mask;
992 break;
993 }
994 done:
995 return ret;
996 }
997
998 /**
999 * bitmap_find_free_region - find a contiguous aligned mem region
1000 * @bitmap: array of unsigned longs corresponding to the bitmap
1001 * @bits: number of bits in the bitmap
1002 * @order: region size (log base 2 of number of bits) to find
1003 *
1004 * Find a region of free (zero) bits in a @bitmap of @bits bits and
1005 * allocate them (set them to one). Only consider regions of length
1006 * a power (@order) of two, aligned to that power of two, which
1007 * makes the search algorithm much faster.
1008 *
1009 * Return the bit offset in bitmap of the allocated region,
1010 * or -errno on failure.
1011 */
1012 int bitmap_find_free_region(unsigned long *bitmap, unsigned int bits, int order)
1013 {
1014 unsigned int pos, end; /* scans bitmap by regions of size order */
1015
1016 for (pos = 0 ; (end = pos + (1U << order)) <= bits; pos = end) {
1017 if (!__reg_op(bitmap, pos, order, REG_OP_ISFREE))
1018 continue;
1019 __reg_op(bitmap, pos, order, REG_OP_ALLOC);
1020 return pos;
1021 }
1022 return -ENOMEM;
1023 }
1024 EXPORT_SYMBOL(bitmap_find_free_region);
1025
1026 /**
1027 * bitmap_release_region - release allocated bitmap region
1028 * @bitmap: array of unsigned longs corresponding to the bitmap
1029 * @pos: beginning of bit region to release
1030 * @order: region size (log base 2 of number of bits) to release
1031 *
1032 * This is the complement to __bitmap_find_free_region() and releases
1033 * the found region (by clearing it in the bitmap).
1034 *
1035 * No return value.
1036 */
1037 void bitmap_release_region(unsigned long *bitmap, unsigned int pos, int order)
1038 {
1039 __reg_op(bitmap, pos, order, REG_OP_RELEASE);
1040 }
1041 EXPORT_SYMBOL(bitmap_release_region);
1042
1043 /**
1044 * bitmap_allocate_region - allocate bitmap region
1045 * @bitmap: array of unsigned longs corresponding to the bitmap
1046 * @pos: beginning of bit region to allocate
1047 * @order: region size (log base 2 of number of bits) to allocate
1048 *
1049 * Allocate (set bits in) a specified region of a bitmap.
1050 *
1051 * Return 0 on success, or %-EBUSY if specified region wasn't
1052 * free (not all bits were zero).
1053 */
1054 int bitmap_allocate_region(unsigned long *bitmap, unsigned int pos, int order)
1055 {
1056 if (!__reg_op(bitmap, pos, order, REG_OP_ISFREE))
1057 return -EBUSY;
1058 return __reg_op(bitmap, pos, order, REG_OP_ALLOC);
1059 }
1060 EXPORT_SYMBOL(bitmap_allocate_region);
1061
1062 /**
1063 * bitmap_copy_le - copy a bitmap, putting the bits into little-endian order.
1064 * @dst: destination buffer
1065 * @src: bitmap to copy
1066 * @nbits: number of bits in the bitmap
1067 *
1068 * Require nbits % BITS_PER_LONG == 0.
1069 */
1070 #ifdef __BIG_ENDIAN
1071 void bitmap_copy_le(unsigned long *dst, const unsigned long *src, unsigned int nbits)
1072 {
1073 unsigned int i;
1074
1075 for (i = 0; i < nbits/BITS_PER_LONG; i++) {
1076 if (BITS_PER_LONG == 64)
1077 dst[i] = cpu_to_le64(src[i]);
1078 else
1079 dst[i] = cpu_to_le32(src[i]);
1080 }
1081 }
1082 EXPORT_SYMBOL(bitmap_copy_le);
1083 #endif