]> git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blob - arch/s390/include/asm/bitops.h
s390/bitops: use flogr instruction to implement __ffs, ffs, __fls, fls and fls64
[mirror_ubuntu-bionic-kernel.git] / arch / s390 / include / asm / bitops.h
1 /*
2 * Copyright IBM Corp. 1999,2013
3 *
4 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
5 *
6 * The description below was taken in large parts from the powerpc
7 * bitops header file:
8 * Within a word, bits are numbered LSB first. Lot's of places make
9 * this assumption by directly testing bits with (val & (1<<nr)).
10 * This can cause confusion for large (> 1 word) bitmaps on a
11 * big-endian system because, unlike little endian, the number of each
12 * bit depends on the word size.
13 *
14 * The bitop functions are defined to work on unsigned longs, so for an
15 * s390x system the bits end up numbered:
16 * |63..............0|127............64|191...........128|255...........196|
17 * and on s390:
18 * |31.....0|63....31|95....64|127...96|159..128|191..160|223..192|255..224|
19 *
20 * There are a few little-endian macros used mostly for filesystem
21 * bitmaps, these work on similar bit arrays layouts, but
22 * byte-oriented:
23 * |7...0|15...8|23...16|31...24|39...32|47...40|55...48|63...56|
24 *
25 * The main difference is that bit 3-5 (64b) or 3-4 (32b) in the bit
26 * number field needs to be reversed compared to the big-endian bit
27 * fields. This can be achieved by XOR with 0x38 (64b) or 0x18 (32b).
28 *
29 * We also have special functions which work with an MSB0 encoding:
30 * on an s390x system the bits are numbered:
31 * |0..............63|64............127|128...........191|192...........255|
32 * and on s390:
33 * |0.....31|31....63|64....95|96...127|128..159|160..191|192..223|224..255|
34 *
35 * The main difference is that bit 0-63 (64b) or 0-31 (32b) in the bit
36 * number field needs to be reversed compared to the LSB0 encoded bit
37 * fields. This can be achieved by XOR with 0x3f (64b) or 0x1f (32b).
38 *
39 */
40
41 #ifndef _S390_BITOPS_H
42 #define _S390_BITOPS_H
43
44 #ifndef _LINUX_BITOPS_H
45 #error only <linux/bitops.h> can be included directly
46 #endif
47
48 #include <linux/typecheck.h>
49 #include <linux/compiler.h>
50
51 #ifndef CONFIG_64BIT
52
53 #define __BITOPS_OR "or"
54 #define __BITOPS_AND "nr"
55 #define __BITOPS_XOR "xr"
56
57 #define __BITOPS_LOOP(__addr, __val, __op_string) \
58 ({ \
59 unsigned long __old, __new; \
60 \
61 typecheck(unsigned long *, (__addr)); \
62 asm volatile( \
63 " l %0,%2\n" \
64 "0: lr %1,%0\n" \
65 __op_string " %1,%3\n" \
66 " cs %0,%1,%2\n" \
67 " jl 0b" \
68 : "=&d" (__old), "=&d" (__new), "+Q" (*(__addr))\
69 : "d" (__val) \
70 : "cc"); \
71 __old; \
72 })
73
74 #else /* CONFIG_64BIT */
75
76 #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
77
78 #define __BITOPS_OR "laog"
79 #define __BITOPS_AND "lang"
80 #define __BITOPS_XOR "laxg"
81
82 #define __BITOPS_LOOP(__addr, __val, __op_string) \
83 ({ \
84 unsigned long __old; \
85 \
86 typecheck(unsigned long *, (__addr)); \
87 asm volatile( \
88 __op_string " %0,%2,%1\n" \
89 : "=d" (__old), "+Q" (*(__addr)) \
90 : "d" (__val) \
91 : "cc"); \
92 __old; \
93 })
94
95 #else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
96
97 #define __BITOPS_OR "ogr"
98 #define __BITOPS_AND "ngr"
99 #define __BITOPS_XOR "xgr"
100
101 #define __BITOPS_LOOP(__addr, __val, __op_string) \
102 ({ \
103 unsigned long __old, __new; \
104 \
105 typecheck(unsigned long *, (__addr)); \
106 asm volatile( \
107 " lg %0,%2\n" \
108 "0: lgr %1,%0\n" \
109 __op_string " %1,%3\n" \
110 " csg %0,%1,%2\n" \
111 " jl 0b" \
112 : "=&d" (__old), "=&d" (__new), "+Q" (*(__addr))\
113 : "d" (__val) \
114 : "cc"); \
115 __old; \
116 })
117
118 #endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
119
120 #endif /* CONFIG_64BIT */
121
122 #define __BITOPS_WORDS(bits) (((bits) + BITS_PER_LONG - 1) / BITS_PER_LONG)
123
124 static inline unsigned long *
125 __bitops_word(unsigned long nr, volatile unsigned long *ptr)
126 {
127 unsigned long addr;
128
129 addr = (unsigned long)ptr + ((nr ^ (nr & (BITS_PER_LONG - 1))) >> 3);
130 return (unsigned long *)addr;
131 }
132
133 static inline unsigned char *
134 __bitops_byte(unsigned long nr, volatile unsigned long *ptr)
135 {
136 return ((unsigned char *)ptr) + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
137 }
138
139 static inline void set_bit(unsigned long nr, volatile unsigned long *ptr)
140 {
141 unsigned long *addr = __bitops_word(nr, ptr);
142 unsigned long mask;
143
144 #ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
145 if (__builtin_constant_p(nr)) {
146 unsigned char *caddr = __bitops_byte(nr, ptr);
147
148 asm volatile(
149 "oi %0,%b1\n"
150 : "+Q" (*caddr)
151 : "i" (1 << (nr & 7))
152 : "cc");
153 return;
154 }
155 #endif
156 mask = 1UL << (nr & (BITS_PER_LONG - 1));
157 __BITOPS_LOOP(addr, mask, __BITOPS_OR);
158 }
159
160 static inline void clear_bit(unsigned long nr, volatile unsigned long *ptr)
161 {
162 unsigned long *addr = __bitops_word(nr, ptr);
163 unsigned long mask;
164
165 #ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
166 if (__builtin_constant_p(nr)) {
167 unsigned char *caddr = __bitops_byte(nr, ptr);
168
169 asm volatile(
170 "ni %0,%b1\n"
171 : "+Q" (*caddr)
172 : "i" (~(1 << (nr & 7)))
173 : "cc");
174 return;
175 }
176 #endif
177 mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
178 __BITOPS_LOOP(addr, mask, __BITOPS_AND);
179 }
180
181 static inline void change_bit(unsigned long nr, volatile unsigned long *ptr)
182 {
183 unsigned long *addr = __bitops_word(nr, ptr);
184 unsigned long mask;
185
186 #ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
187 if (__builtin_constant_p(nr)) {
188 unsigned char *caddr = __bitops_byte(nr, ptr);
189
190 asm volatile(
191 "xi %0,%b1\n"
192 : "+Q" (*caddr)
193 : "i" (1 << (nr & 7))
194 : "cc");
195 return;
196 }
197 #endif
198 mask = 1UL << (nr & (BITS_PER_LONG - 1));
199 __BITOPS_LOOP(addr, mask, __BITOPS_XOR);
200 }
201
202 static inline int
203 test_and_set_bit(unsigned long nr, volatile unsigned long *ptr)
204 {
205 unsigned long *addr = __bitops_word(nr, ptr);
206 unsigned long old, mask;
207
208 mask = 1UL << (nr & (BITS_PER_LONG - 1));
209 old = __BITOPS_LOOP(addr, mask, __BITOPS_OR);
210 barrier();
211 return (old & mask) != 0;
212 }
213
214 static inline int
215 test_and_clear_bit(unsigned long nr, volatile unsigned long *ptr)
216 {
217 unsigned long *addr = __bitops_word(nr, ptr);
218 unsigned long old, mask;
219
220 mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
221 old = __BITOPS_LOOP(addr, mask, __BITOPS_AND);
222 barrier();
223 return (old & ~mask) != 0;
224 }
225
226 static inline int
227 test_and_change_bit(unsigned long nr, volatile unsigned long *ptr)
228 {
229 unsigned long *addr = __bitops_word(nr, ptr);
230 unsigned long old, mask;
231
232 mask = 1UL << (nr & (BITS_PER_LONG - 1));
233 old = __BITOPS_LOOP(addr, mask, __BITOPS_XOR);
234 barrier();
235 return (old & mask) != 0;
236 }
237
238 static inline void __set_bit(unsigned long nr, volatile unsigned long *ptr)
239 {
240 unsigned char *addr = __bitops_byte(nr, ptr);
241
242 *addr |= 1 << (nr & 7);
243 }
244
245 static inline void
246 __clear_bit(unsigned long nr, volatile unsigned long *ptr)
247 {
248 unsigned char *addr = __bitops_byte(nr, ptr);
249
250 *addr &= ~(1 << (nr & 7));
251 }
252
253 static inline void __change_bit(unsigned long nr, volatile unsigned long *ptr)
254 {
255 unsigned char *addr = __bitops_byte(nr, ptr);
256
257 *addr ^= 1 << (nr & 7);
258 }
259
260 static inline int
261 __test_and_set_bit(unsigned long nr, volatile unsigned long *ptr)
262 {
263 unsigned char *addr = __bitops_byte(nr, ptr);
264 unsigned char ch;
265
266 ch = *addr;
267 *addr |= 1 << (nr & 7);
268 return (ch >> (nr & 7)) & 1;
269 }
270
271 static inline int
272 __test_and_clear_bit(unsigned long nr, volatile unsigned long *ptr)
273 {
274 unsigned char *addr = __bitops_byte(nr, ptr);
275 unsigned char ch;
276
277 ch = *addr;
278 *addr &= ~(1 << (nr & 7));
279 return (ch >> (nr & 7)) & 1;
280 }
281
282 static inline int
283 __test_and_change_bit(unsigned long nr, volatile unsigned long *ptr)
284 {
285 unsigned char *addr = __bitops_byte(nr, ptr);
286 unsigned char ch;
287
288 ch = *addr;
289 *addr ^= 1 << (nr & 7);
290 return (ch >> (nr & 7)) & 1;
291 }
292
293 static inline int test_bit(unsigned long nr, const volatile unsigned long *ptr)
294 {
295 const volatile unsigned char *addr;
296
297 addr = ((const volatile unsigned char *)ptr);
298 addr += (nr ^ (BITS_PER_LONG - 8)) >> 3;
299 return (*addr >> (nr & 7)) & 1;
300 }
301
302 /*
303 * ATTENTION:
304 * find_first_bit_left() and find_next_bit_left() use MSB0 encoding.
305 */
306 unsigned long find_first_bit_left(const unsigned long *addr, unsigned long size);
307 unsigned long find_next_bit_left(const unsigned long *addr, unsigned long size,
308 unsigned long offset);
309
310 #ifdef CONFIG_HAVE_MARCH_Z9_109_FEATURES
311
312 /**
313 * __flogr - find leftmost one
314 * @word - The word to search
315 *
316 * Returns the bit number of the most significant bit set,
317 * where the most significant bit has bit number 0.
318 * If no bit is set this function returns 64.
319 */
320 static inline unsigned char __flogr(unsigned long word)
321 {
322 if (__builtin_constant_p(word)) {
323 unsigned long bit = 0;
324
325 if (!word)
326 return 64;
327 if (!(word & 0xffffffff00000000UL)) {
328 word <<= 32;
329 bit += 32;
330 }
331 if (!(word & 0xffff000000000000UL)) {
332 word <<= 16;
333 bit += 16;
334 }
335 if (!(word & 0xff00000000000000UL)) {
336 word <<= 8;
337 bit += 8;
338 }
339 if (!(word & 0xf000000000000000UL)) {
340 word <<= 4;
341 bit += 4;
342 }
343 if (!(word & 0xc000000000000000UL)) {
344 word <<= 2;
345 bit += 2;
346 }
347 if (!(word & 0x8000000000000000UL)) {
348 word <<= 1;
349 bit += 1;
350 }
351 return bit;
352 } else {
353 register unsigned long bit asm("4") = word;
354 register unsigned long out asm("5");
355
356 asm volatile(
357 " flogr %[bit],%[bit]\n"
358 : [bit] "+d" (bit), [out] "=d" (out) : : "cc");
359 return bit;
360 }
361 }
362
363 /**
364 * __ffs - find first bit in word.
365 * @word: The word to search
366 *
367 * Undefined if no bit exists, so code should check against 0 first.
368 */
369 static inline unsigned long __ffs(unsigned long word)
370 {
371 return __flogr(-word & word) ^ (BITS_PER_LONG - 1);
372 }
373
374 /**
375 * ffs - find first bit set
376 * @word: the word to search
377 *
378 * This is defined the same way as the libc and
379 * compiler builtin ffs routines (man ffs).
380 */
381 static inline int ffs(int word)
382 {
383 unsigned long mask = 2 * BITS_PER_LONG - 1;
384 unsigned int val = (unsigned int)word;
385
386 return (1 + (__flogr(-val & val) ^ (BITS_PER_LONG - 1))) & mask;
387 }
388
389 /**
390 * __fls - find last (most-significant) set bit in a long word
391 * @word: the word to search
392 *
393 * Undefined if no set bit exists, so code should check against 0 first.
394 */
395 static inline unsigned long __fls(unsigned long word)
396 {
397 return __flogr(word) ^ (BITS_PER_LONG - 1);
398 }
399
400 /**
401 * fls64 - find last set bit in a 64-bit word
402 * @word: the word to search
403 *
404 * This is defined in a similar way as the libc and compiler builtin
405 * ffsll, but returns the position of the most significant set bit.
406 *
407 * fls64(value) returns 0 if value is 0 or the position of the last
408 * set bit if value is nonzero. The last (most significant) bit is
409 * at position 64.
410 */
411 static inline int fls64(unsigned long word)
412 {
413 unsigned long mask = 2 * BITS_PER_LONG - 1;
414
415 return (1 + (__flogr(word) ^ (BITS_PER_LONG - 1))) & mask;
416 }
417
418 /**
419 * fls - find last (most-significant) bit set
420 * @word: the word to search
421 *
422 * This is defined the same way as ffs.
423 * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
424 */
425 static inline int fls(int word)
426 {
427 return fls64((unsigned int)word);
428 }
429
430 #else /* CONFIG_HAVE_MARCH_Z9_109_FEATURES */
431
432 #include <asm-generic/bitops/__ffs.h>
433 #include <asm-generic/bitops/ffs.h>
434 #include <asm-generic/bitops/__fls.h>
435 #include <asm-generic/bitops/fls.h>
436 #include <asm-generic/bitops/fls64.h>
437
438 #endif /* CONFIG_HAVE_MARCH_Z9_109_FEATURES */
439
440 #include <asm-generic/bitops/ffz.h>
441 #include <asm-generic/bitops/find.h>
442 #include <asm-generic/bitops/hweight.h>
443 #include <asm-generic/bitops/lock.h>
444 #include <asm-generic/bitops/sched.h>
445 #include <asm-generic/bitops/le.h>
446 #include <asm-generic/bitops/ext2-atomic-setbit.h>
447
448 #endif /* _S390_BITOPS_H */