]>
Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | #ifndef _S390_BITOPS_H |
2 | #define _S390_BITOPS_H | |
3 | ||
4 | /* | |
5 | * include/asm-s390/bitops.h | |
6 | * | |
7 | * S390 version | |
8 | * Copyright (C) 1999 IBM Deutschland Entwicklung GmbH, IBM Corporation | |
9 | * Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com) | |
10 | * | |
11 | * Derived from "include/asm-i386/bitops.h" | |
12 | * Copyright (C) 1992, Linus Torvalds | |
13 | * | |
14 | */ | |
15 | #include <linux/config.h> | |
16 | #include <linux/compiler.h> | |
17 | ||
18 | /* | |
19 | * 32 bit bitops format: | |
20 | * bit 0 is the LSB of *addr; bit 31 is the MSB of *addr; | |
21 | * bit 32 is the LSB of *(addr+4). That combined with the | |
22 | * big endian byte order on S390 give the following bit | |
23 | * order in memory: | |
24 | * 1f 1e 1d 1c 1b 1a 19 18 17 16 15 14 13 12 11 10 \ | |
25 | * 0f 0e 0d 0c 0b 0a 09 08 07 06 05 04 03 02 01 00 | |
26 | * after that follows the next long with bit numbers | |
27 | * 3f 3e 3d 3c 3b 3a 39 38 37 36 35 34 33 32 31 30 | |
28 | * 2f 2e 2d 2c 2b 2a 29 28 27 26 25 24 23 22 21 20 | |
29 | * The reason for this bit ordering is the fact that | |
30 | * in the architecture independent code bits operations | |
31 | * of the form "flags |= (1 << bitnr)" are used INTERMIXED | |
32 | * with operation of the form "set_bit(bitnr, flags)". | |
33 | * | |
34 | * 64 bit bitops format: | |
35 | * bit 0 is the LSB of *addr; bit 63 is the MSB of *addr; | |
36 | * bit 64 is the LSB of *(addr+8). That combined with the | |
37 | * big endian byte order on S390 give the following bit | |
38 | * order in memory: | |
39 | * 3f 3e 3d 3c 3b 3a 39 38 37 36 35 34 33 32 31 30 | |
40 | * 2f 2e 2d 2c 2b 2a 29 28 27 26 25 24 23 22 21 20 | |
41 | * 1f 1e 1d 1c 1b 1a 19 18 17 16 15 14 13 12 11 10 | |
42 | * 0f 0e 0d 0c 0b 0a 09 08 07 06 05 04 03 02 01 00 | |
43 | * after that follows the next long with bit numbers | |
44 | * 7f 7e 7d 7c 7b 7a 79 78 77 76 75 74 73 72 71 70 | |
45 | * 6f 6e 6d 6c 6b 6a 69 68 67 66 65 64 63 62 61 60 | |
46 | * 5f 5e 5d 5c 5b 5a 59 58 57 56 55 54 53 52 51 50 | |
47 | * 4f 4e 4d 4c 4b 4a 49 48 47 46 45 44 43 42 41 40 | |
48 | * The reason for this bit ordering is the fact that | |
49 | * in the architecture independent code bits operations | |
50 | * of the form "flags |= (1 << bitnr)" are used INTERMIXED | |
51 | * with operation of the form "set_bit(bitnr, flags)". | |
52 | */ | |
53 | ||
54 | /* set ALIGN_CS to 1 if the SMP safe bit operations should | |
55 | * align the address to 4 byte boundary. It seems to work | |
56 | * without the alignment. | |
57 | */ | |
58 | #ifdef __KERNEL__ | |
59 | #define ALIGN_CS 0 | |
60 | #else | |
61 | #define ALIGN_CS 1 | |
62 | #ifndef CONFIG_SMP | |
63 | #error "bitops won't work without CONFIG_SMP" | |
64 | #endif | |
65 | #endif | |
66 | ||
67 | /* bitmap tables from arch/S390/kernel/bitmap.S */ | |
68 | extern const char _oi_bitmap[]; | |
69 | extern const char _ni_bitmap[]; | |
70 | extern const char _zb_findmap[]; | |
71 | extern const char _sb_findmap[]; | |
72 | ||
73 | #ifndef __s390x__ | |
74 | ||
75 | #define __BITOPS_ALIGN 3 | |
76 | #define __BITOPS_WORDSIZE 32 | |
77 | #define __BITOPS_OR "or" | |
78 | #define __BITOPS_AND "nr" | |
79 | #define __BITOPS_XOR "xr" | |
80 | ||
81 | #define __BITOPS_LOOP(__old, __new, __addr, __val, __op_string) \ | |
82 | __asm__ __volatile__(" l %0,0(%4)\n" \ | |
83 | "0: lr %1,%0\n" \ | |
84 | __op_string " %1,%3\n" \ | |
85 | " cs %0,%1,0(%4)\n" \ | |
86 | " jl 0b" \ | |
87 | : "=&d" (__old), "=&d" (__new), \ | |
88 | "=m" (*(unsigned long *) __addr) \ | |
89 | : "d" (__val), "a" (__addr), \ | |
90 | "m" (*(unsigned long *) __addr) : "cc" ); | |
91 | ||
92 | #else /* __s390x__ */ | |
93 | ||
94 | #define __BITOPS_ALIGN 7 | |
95 | #define __BITOPS_WORDSIZE 64 | |
96 | #define __BITOPS_OR "ogr" | |
97 | #define __BITOPS_AND "ngr" | |
98 | #define __BITOPS_XOR "xgr" | |
99 | ||
100 | #define __BITOPS_LOOP(__old, __new, __addr, __val, __op_string) \ | |
101 | __asm__ __volatile__(" lg %0,0(%4)\n" \ | |
102 | "0: lgr %1,%0\n" \ | |
103 | __op_string " %1,%3\n" \ | |
104 | " csg %0,%1,0(%4)\n" \ | |
105 | " jl 0b" \ | |
106 | : "=&d" (__old), "=&d" (__new), \ | |
107 | "=m" (*(unsigned long *) __addr) \ | |
108 | : "d" (__val), "a" (__addr), \ | |
109 | "m" (*(unsigned long *) __addr) : "cc" ); | |
110 | ||
111 | #endif /* __s390x__ */ | |
112 | ||
113 | #define __BITOPS_WORDS(bits) (((bits)+__BITOPS_WORDSIZE-1)/__BITOPS_WORDSIZE) | |
114 | #define __BITOPS_BARRIER() __asm__ __volatile__ ( "" : : : "memory" ) | |
115 | ||
116 | #ifdef CONFIG_SMP | |
117 | /* | |
118 | * SMP safe set_bit routine based on compare and swap (CS) | |
119 | */ | |
120 | static inline void set_bit_cs(unsigned long nr, volatile unsigned long *ptr) | |
121 | { | |
122 | unsigned long addr, old, new, mask; | |
123 | ||
124 | addr = (unsigned long) ptr; | |
125 | #if ALIGN_CS == 1 | |
126 | nr += (addr & __BITOPS_ALIGN) << 3; /* add alignment to bit number */ | |
127 | addr ^= addr & __BITOPS_ALIGN; /* align address to 8 */ | |
128 | #endif | |
129 | /* calculate address for CS */ | |
130 | addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; | |
131 | /* make OR mask */ | |
132 | mask = 1UL << (nr & (__BITOPS_WORDSIZE - 1)); | |
133 | /* Do the atomic update. */ | |
134 | __BITOPS_LOOP(old, new, addr, mask, __BITOPS_OR); | |
135 | } | |
136 | ||
137 | /* | |
138 | * SMP safe clear_bit routine based on compare and swap (CS) | |
139 | */ | |
140 | static inline void clear_bit_cs(unsigned long nr, volatile unsigned long *ptr) | |
141 | { | |
142 | unsigned long addr, old, new, mask; | |
143 | ||
144 | addr = (unsigned long) ptr; | |
145 | #if ALIGN_CS == 1 | |
146 | nr += (addr & __BITOPS_ALIGN) << 3; /* add alignment to bit number */ | |
147 | addr ^= addr & __BITOPS_ALIGN; /* align address to 8 */ | |
148 | #endif | |
149 | /* calculate address for CS */ | |
150 | addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; | |
151 | /* make AND mask */ | |
152 | mask = ~(1UL << (nr & (__BITOPS_WORDSIZE - 1))); | |
153 | /* Do the atomic update. */ | |
154 | __BITOPS_LOOP(old, new, addr, mask, __BITOPS_AND); | |
155 | } | |
156 | ||
157 | /* | |
158 | * SMP safe change_bit routine based on compare and swap (CS) | |
159 | */ | |
160 | static inline void change_bit_cs(unsigned long nr, volatile unsigned long *ptr) | |
161 | { | |
162 | unsigned long addr, old, new, mask; | |
163 | ||
164 | addr = (unsigned long) ptr; | |
165 | #if ALIGN_CS == 1 | |
166 | nr += (addr & __BITOPS_ALIGN) << 3; /* add alignment to bit number */ | |
167 | addr ^= addr & __BITOPS_ALIGN; /* align address to 8 */ | |
168 | #endif | |
169 | /* calculate address for CS */ | |
170 | addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; | |
171 | /* make XOR mask */ | |
172 | mask = 1UL << (nr & (__BITOPS_WORDSIZE - 1)); | |
173 | /* Do the atomic update. */ | |
174 | __BITOPS_LOOP(old, new, addr, mask, __BITOPS_XOR); | |
175 | } | |
176 | ||
177 | /* | |
178 | * SMP safe test_and_set_bit routine based on compare and swap (CS) | |
179 | */ | |
180 | static inline int | |
181 | test_and_set_bit_cs(unsigned long nr, volatile unsigned long *ptr) | |
182 | { | |
183 | unsigned long addr, old, new, mask; | |
184 | ||
185 | addr = (unsigned long) ptr; | |
186 | #if ALIGN_CS == 1 | |
187 | nr += (addr & __BITOPS_ALIGN) << 3; /* add alignment to bit number */ | |
188 | addr ^= addr & __BITOPS_ALIGN; /* align address to 8 */ | |
189 | #endif | |
190 | /* calculate address for CS */ | |
191 | addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; | |
192 | /* make OR/test mask */ | |
193 | mask = 1UL << (nr & (__BITOPS_WORDSIZE - 1)); | |
194 | /* Do the atomic update. */ | |
195 | __BITOPS_LOOP(old, new, addr, mask, __BITOPS_OR); | |
196 | __BITOPS_BARRIER(); | |
197 | return (old & mask) != 0; | |
198 | } | |
199 | ||
200 | /* | |
201 | * SMP safe test_and_clear_bit routine based on compare and swap (CS) | |
202 | */ | |
203 | static inline int | |
204 | test_and_clear_bit_cs(unsigned long nr, volatile unsigned long *ptr) | |
205 | { | |
206 | unsigned long addr, old, new, mask; | |
207 | ||
208 | addr = (unsigned long) ptr; | |
209 | #if ALIGN_CS == 1 | |
210 | nr += (addr & __BITOPS_ALIGN) << 3; /* add alignment to bit number */ | |
211 | addr ^= addr & __BITOPS_ALIGN; /* align address to 8 */ | |
212 | #endif | |
213 | /* calculate address for CS */ | |
214 | addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; | |
215 | /* make AND/test mask */ | |
216 | mask = ~(1UL << (nr & (__BITOPS_WORDSIZE - 1))); | |
217 | /* Do the atomic update. */ | |
218 | __BITOPS_LOOP(old, new, addr, mask, __BITOPS_AND); | |
219 | __BITOPS_BARRIER(); | |
220 | return (old ^ new) != 0; | |
221 | } | |
222 | ||
223 | /* | |
224 | * SMP safe test_and_change_bit routine based on compare and swap (CS) | |
225 | */ | |
226 | static inline int | |
227 | test_and_change_bit_cs(unsigned long nr, volatile unsigned long *ptr) | |
228 | { | |
229 | unsigned long addr, old, new, mask; | |
230 | ||
231 | addr = (unsigned long) ptr; | |
232 | #if ALIGN_CS == 1 | |
233 | nr += (addr & __BITOPS_ALIGN) << 3; /* add alignment to bit number */ | |
234 | addr ^= addr & __BITOPS_ALIGN; /* align address to 8 */ | |
235 | #endif | |
236 | /* calculate address for CS */ | |
237 | addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; | |
238 | /* make XOR/test mask */ | |
239 | mask = 1UL << (nr & (__BITOPS_WORDSIZE - 1)); | |
240 | /* Do the atomic update. */ | |
241 | __BITOPS_LOOP(old, new, addr, mask, __BITOPS_XOR); | |
242 | __BITOPS_BARRIER(); | |
243 | return (old & mask) != 0; | |
244 | } | |
245 | #endif /* CONFIG_SMP */ | |
246 | ||
247 | /* | |
248 | * fast, non-SMP set_bit routine | |
249 | */ | |
250 | static inline void __set_bit(unsigned long nr, volatile unsigned long *ptr) | |
251 | { | |
252 | unsigned long addr; | |
253 | ||
254 | addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); | |
255 | asm volatile("oc 0(1,%1),0(%2)" | |
256 | : "=m" (*(char *) addr) | |
257 | : "a" (addr), "a" (_oi_bitmap + (nr & 7)), | |
258 | "m" (*(char *) addr) : "cc" ); | |
259 | } | |
260 | ||
261 | static inline void | |
262 | __constant_set_bit(const unsigned long nr, volatile unsigned long *ptr) | |
263 | { | |
264 | unsigned long addr; | |
265 | ||
266 | addr = ((unsigned long) ptr) + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); | |
267 | switch (nr&7) { | |
268 | case 0: | |
269 | asm volatile ("oi 0(%1),0x01" : "=m" (*(char *) addr) | |
270 | : "a" (addr), "m" (*(char *) addr) : "cc" ); | |
271 | break; | |
272 | case 1: | |
273 | asm volatile ("oi 0(%1),0x02" : "=m" (*(char *) addr) | |
274 | : "a" (addr), "m" (*(char *) addr) : "cc" ); | |
275 | break; | |
276 | case 2: | |
277 | asm volatile ("oi 0(%1),0x04" : "=m" (*(char *) addr) | |
278 | : "a" (addr), "m" (*(char *) addr) : "cc" ); | |
279 | break; | |
280 | case 3: | |
281 | asm volatile ("oi 0(%1),0x08" : "=m" (*(char *) addr) | |
282 | : "a" (addr), "m" (*(char *) addr) : "cc" ); | |
283 | break; | |
284 | case 4: | |
285 | asm volatile ("oi 0(%1),0x10" : "=m" (*(char *) addr) | |
286 | : "a" (addr), "m" (*(char *) addr) : "cc" ); | |
287 | break; | |
288 | case 5: | |
289 | asm volatile ("oi 0(%1),0x20" : "=m" (*(char *) addr) | |
290 | : "a" (addr), "m" (*(char *) addr) : "cc" ); | |
291 | break; | |
292 | case 6: | |
293 | asm volatile ("oi 0(%1),0x40" : "=m" (*(char *) addr) | |
294 | : "a" (addr), "m" (*(char *) addr) : "cc" ); | |
295 | break; | |
296 | case 7: | |
297 | asm volatile ("oi 0(%1),0x80" : "=m" (*(char *) addr) | |
298 | : "a" (addr), "m" (*(char *) addr) : "cc" ); | |
299 | break; | |
300 | } | |
301 | } | |
302 | ||
303 | #define set_bit_simple(nr,addr) \ | |
304 | (__builtin_constant_p((nr)) ? \ | |
305 | __constant_set_bit((nr),(addr)) : \ | |
306 | __set_bit((nr),(addr)) ) | |
307 | ||
308 | /* | |
309 | * fast, non-SMP clear_bit routine | |
310 | */ | |
311 | static inline void | |
312 | __clear_bit(unsigned long nr, volatile unsigned long *ptr) | |
313 | { | |
314 | unsigned long addr; | |
315 | ||
316 | addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); | |
317 | asm volatile("nc 0(1,%1),0(%2)" | |
318 | : "=m" (*(char *) addr) | |
319 | : "a" (addr), "a" (_ni_bitmap + (nr & 7)), | |
320 | "m" (*(char *) addr) : "cc" ); | |
321 | } | |
322 | ||
323 | static inline void | |
324 | __constant_clear_bit(const unsigned long nr, volatile unsigned long *ptr) | |
325 | { | |
326 | unsigned long addr; | |
327 | ||
328 | addr = ((unsigned long) ptr) + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); | |
329 | switch (nr&7) { | |
330 | case 0: | |
331 | asm volatile ("ni 0(%1),0xFE" : "=m" (*(char *) addr) | |
332 | : "a" (addr), "m" (*(char *) addr) : "cc" ); | |
333 | break; | |
334 | case 1: | |
335 | asm volatile ("ni 0(%1),0xFD": "=m" (*(char *) addr) | |
336 | : "a" (addr), "m" (*(char *) addr) : "cc" ); | |
337 | break; | |
338 | case 2: | |
339 | asm volatile ("ni 0(%1),0xFB" : "=m" (*(char *) addr) | |
340 | : "a" (addr), "m" (*(char *) addr) : "cc" ); | |
341 | break; | |
342 | case 3: | |
343 | asm volatile ("ni 0(%1),0xF7" : "=m" (*(char *) addr) | |
344 | : "a" (addr), "m" (*(char *) addr) : "cc" ); | |
345 | break; | |
346 | case 4: | |
347 | asm volatile ("ni 0(%1),0xEF" : "=m" (*(char *) addr) | |
348 | : "a" (addr), "m" (*(char *) addr) : "cc" ); | |
349 | break; | |
350 | case 5: | |
351 | asm volatile ("ni 0(%1),0xDF" : "=m" (*(char *) addr) | |
352 | : "a" (addr), "m" (*(char *) addr) : "cc" ); | |
353 | break; | |
354 | case 6: | |
355 | asm volatile ("ni 0(%1),0xBF" : "=m" (*(char *) addr) | |
356 | : "a" (addr), "m" (*(char *) addr) : "cc" ); | |
357 | break; | |
358 | case 7: | |
359 | asm volatile ("ni 0(%1),0x7F" : "=m" (*(char *) addr) | |
360 | : "a" (addr), "m" (*(char *) addr) : "cc" ); | |
361 | break; | |
362 | } | |
363 | } | |
364 | ||
365 | #define clear_bit_simple(nr,addr) \ | |
366 | (__builtin_constant_p((nr)) ? \ | |
367 | __constant_clear_bit((nr),(addr)) : \ | |
368 | __clear_bit((nr),(addr)) ) | |
369 | ||
370 | /* | |
371 | * fast, non-SMP change_bit routine | |
372 | */ | |
373 | static inline void __change_bit(unsigned long nr, volatile unsigned long *ptr) | |
374 | { | |
375 | unsigned long addr; | |
376 | ||
377 | addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); | |
378 | asm volatile("xc 0(1,%1),0(%2)" | |
379 | : "=m" (*(char *) addr) | |
380 | : "a" (addr), "a" (_oi_bitmap + (nr & 7)), | |
381 | "m" (*(char *) addr) : "cc" ); | |
382 | } | |
383 | ||
384 | static inline void | |
385 | __constant_change_bit(const unsigned long nr, volatile unsigned long *ptr) | |
386 | { | |
387 | unsigned long addr; | |
388 | ||
389 | addr = ((unsigned long) ptr) + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); | |
390 | switch (nr&7) { | |
391 | case 0: | |
392 | asm volatile ("xi 0(%1),0x01" : "=m" (*(char *) addr) | |
393 | : "a" (addr), "m" (*(char *) addr) : "cc" ); | |
394 | break; | |
395 | case 1: | |
396 | asm volatile ("xi 0(%1),0x02" : "=m" (*(char *) addr) | |
397 | : "a" (addr), "m" (*(char *) addr) : "cc" ); | |
398 | break; | |
399 | case 2: | |
400 | asm volatile ("xi 0(%1),0x04" : "=m" (*(char *) addr) | |
401 | : "a" (addr), "m" (*(char *) addr) : "cc" ); | |
402 | break; | |
403 | case 3: | |
404 | asm volatile ("xi 0(%1),0x08" : "=m" (*(char *) addr) | |
405 | : "a" (addr), "m" (*(char *) addr) : "cc" ); | |
406 | break; | |
407 | case 4: | |
408 | asm volatile ("xi 0(%1),0x10" : "=m" (*(char *) addr) | |
409 | : "a" (addr), "m" (*(char *) addr) : "cc" ); | |
410 | break; | |
411 | case 5: | |
412 | asm volatile ("xi 0(%1),0x20" : "=m" (*(char *) addr) | |
413 | : "a" (addr), "m" (*(char *) addr) : "cc" ); | |
414 | break; | |
415 | case 6: | |
416 | asm volatile ("xi 0(%1),0x40" : "=m" (*(char *) addr) | |
417 | : "a" (addr), "m" (*(char *) addr) : "cc" ); | |
418 | break; | |
419 | case 7: | |
420 | asm volatile ("xi 0(%1),0x80" : "=m" (*(char *) addr) | |
421 | : "a" (addr), "m" (*(char *) addr) : "cc" ); | |
422 | break; | |
423 | } | |
424 | } | |
425 | ||
426 | #define change_bit_simple(nr,addr) \ | |
427 | (__builtin_constant_p((nr)) ? \ | |
428 | __constant_change_bit((nr),(addr)) : \ | |
429 | __change_bit((nr),(addr)) ) | |
430 | ||
431 | /* | |
432 | * fast, non-SMP test_and_set_bit routine | |
433 | */ | |
434 | static inline int | |
435 | test_and_set_bit_simple(unsigned long nr, volatile unsigned long *ptr) | |
436 | { | |
437 | unsigned long addr; | |
438 | unsigned char ch; | |
439 | ||
440 | addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); | |
441 | ch = *(unsigned char *) addr; | |
442 | asm volatile("oc 0(1,%1),0(%2)" | |
443 | : "=m" (*(char *) addr) | |
444 | : "a" (addr), "a" (_oi_bitmap + (nr & 7)), | |
445 | "m" (*(char *) addr) : "cc", "memory" ); | |
446 | return (ch >> (nr & 7)) & 1; | |
447 | } | |
448 | #define __test_and_set_bit(X,Y) test_and_set_bit_simple(X,Y) | |
449 | ||
450 | /* | |
451 | * fast, non-SMP test_and_clear_bit routine | |
452 | */ | |
453 | static inline int | |
454 | test_and_clear_bit_simple(unsigned long nr, volatile unsigned long *ptr) | |
455 | { | |
456 | unsigned long addr; | |
457 | unsigned char ch; | |
458 | ||
459 | addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); | |
460 | ch = *(unsigned char *) addr; | |
461 | asm volatile("nc 0(1,%1),0(%2)" | |
462 | : "=m" (*(char *) addr) | |
463 | : "a" (addr), "a" (_ni_bitmap + (nr & 7)), | |
464 | "m" (*(char *) addr) : "cc", "memory" ); | |
465 | return (ch >> (nr & 7)) & 1; | |
466 | } | |
467 | #define __test_and_clear_bit(X,Y) test_and_clear_bit_simple(X,Y) | |
468 | ||
469 | /* | |
470 | * fast, non-SMP test_and_change_bit routine | |
471 | */ | |
472 | static inline int | |
473 | test_and_change_bit_simple(unsigned long nr, volatile unsigned long *ptr) | |
474 | { | |
475 | unsigned long addr; | |
476 | unsigned char ch; | |
477 | ||
478 | addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); | |
479 | ch = *(unsigned char *) addr; | |
480 | asm volatile("xc 0(1,%1),0(%2)" | |
481 | : "=m" (*(char *) addr) | |
482 | : "a" (addr), "a" (_oi_bitmap + (nr & 7)), | |
483 | "m" (*(char *) addr) : "cc", "memory" ); | |
484 | return (ch >> (nr & 7)) & 1; | |
485 | } | |
486 | #define __test_and_change_bit(X,Y) test_and_change_bit_simple(X,Y) | |
487 | ||
488 | #ifdef CONFIG_SMP | |
489 | #define set_bit set_bit_cs | |
490 | #define clear_bit clear_bit_cs | |
491 | #define change_bit change_bit_cs | |
492 | #define test_and_set_bit test_and_set_bit_cs | |
493 | #define test_and_clear_bit test_and_clear_bit_cs | |
494 | #define test_and_change_bit test_and_change_bit_cs | |
495 | #else | |
496 | #define set_bit set_bit_simple | |
497 | #define clear_bit clear_bit_simple | |
498 | #define change_bit change_bit_simple | |
499 | #define test_and_set_bit test_and_set_bit_simple | |
500 | #define test_and_clear_bit test_and_clear_bit_simple | |
501 | #define test_and_change_bit test_and_change_bit_simple | |
502 | #endif | |
503 | ||
504 | ||
505 | /* | |
506 | * This routine doesn't need to be atomic. | |
507 | */ | |
508 | ||
509 | static inline int __test_bit(unsigned long nr, const volatile unsigned long *ptr) | |
510 | { | |
511 | unsigned long addr; | |
512 | unsigned char ch; | |
513 | ||
514 | addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); | |
515 | ch = *(volatile unsigned char *) addr; | |
516 | return (ch >> (nr & 7)) & 1; | |
517 | } | |
518 | ||
519 | static inline int | |
520 | __constant_test_bit(unsigned long nr, const volatile unsigned long *addr) { | |
521 | return (((volatile char *) addr) | |
522 | [(nr^(__BITOPS_WORDSIZE-8))>>3] & (1<<(nr&7))); | |
523 | } | |
524 | ||
525 | #define test_bit(nr,addr) \ | |
526 | (__builtin_constant_p((nr)) ? \ | |
527 | __constant_test_bit((nr),(addr)) : \ | |
528 | __test_bit((nr),(addr)) ) | |
529 | ||
530 | #ifndef __s390x__ | |
531 | ||
532 | /* | |
533 | * Find-bit routines.. | |
534 | */ | |
535 | static inline int | |
536 | find_first_zero_bit(const unsigned long * addr, unsigned int size) | |
537 | { | |
538 | typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype; | |
539 | unsigned long cmp, count; | |
540 | unsigned int res; | |
541 | ||
542 | if (!size) | |
543 | return 0; | |
544 | __asm__(" lhi %1,-1\n" | |
545 | " lr %2,%3\n" | |
546 | " slr %0,%0\n" | |
547 | " ahi %2,31\n" | |
548 | " srl %2,5\n" | |
549 | "0: c %1,0(%0,%4)\n" | |
550 | " jne 1f\n" | |
551 | " ahi %0,4\n" | |
552 | " brct %2,0b\n" | |
553 | " lr %0,%3\n" | |
554 | " j 4f\n" | |
555 | "1: l %2,0(%0,%4)\n" | |
556 | " sll %0,3\n" | |
557 | " lhi %1,0xff\n" | |
558 | " tml %2,0xffff\n" | |
559 | " jno 2f\n" | |
560 | " ahi %0,16\n" | |
561 | " srl %2,16\n" | |
562 | "2: tml %2,0x00ff\n" | |
563 | " jno 3f\n" | |
564 | " ahi %0,8\n" | |
565 | " srl %2,8\n" | |
566 | "3: nr %2,%1\n" | |
567 | " ic %2,0(%2,%5)\n" | |
568 | " alr %0,%2\n" | |
569 | "4:" | |
570 | : "=&a" (res), "=&d" (cmp), "=&a" (count) | |
571 | : "a" (size), "a" (addr), "a" (&_zb_findmap), | |
572 | "m" (*(addrtype *) addr) : "cc" ); | |
573 | return (res < size) ? res : size; | |
574 | } | |
575 | ||
576 | static inline int | |
577 | find_first_bit(const unsigned long * addr, unsigned int size) | |
578 | { | |
579 | typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype; | |
580 | unsigned long cmp, count; | |
581 | unsigned int res; | |
582 | ||
583 | if (!size) | |
584 | return 0; | |
585 | __asm__(" slr %1,%1\n" | |
586 | " lr %2,%3\n" | |
587 | " slr %0,%0\n" | |
588 | " ahi %2,31\n" | |
589 | " srl %2,5\n" | |
590 | "0: c %1,0(%0,%4)\n" | |
591 | " jne 1f\n" | |
592 | " ahi %0,4\n" | |
593 | " brct %2,0b\n" | |
594 | " lr %0,%3\n" | |
595 | " j 4f\n" | |
596 | "1: l %2,0(%0,%4)\n" | |
597 | " sll %0,3\n" | |
598 | " lhi %1,0xff\n" | |
599 | " tml %2,0xffff\n" | |
600 | " jnz 2f\n" | |
601 | " ahi %0,16\n" | |
602 | " srl %2,16\n" | |
603 | "2: tml %2,0x00ff\n" | |
604 | " jnz 3f\n" | |
605 | " ahi %0,8\n" | |
606 | " srl %2,8\n" | |
607 | "3: nr %2,%1\n" | |
608 | " ic %2,0(%2,%5)\n" | |
609 | " alr %0,%2\n" | |
610 | "4:" | |
611 | : "=&a" (res), "=&d" (cmp), "=&a" (count) | |
612 | : "a" (size), "a" (addr), "a" (&_sb_findmap), | |
613 | "m" (*(addrtype *) addr) : "cc" ); | |
614 | return (res < size) ? res : size; | |
615 | } | |
616 | ||
617 | static inline int | |
618 | find_next_zero_bit (const unsigned long * addr, int size, int offset) | |
619 | { | |
620 | unsigned long * p = ((unsigned long *) addr) + (offset >> 5); | |
621 | unsigned long bitvec, reg; | |
622 | int set, bit = offset & 31, res; | |
623 | ||
624 | if (bit) { | |
625 | /* | |
626 | * Look for zero in first word | |
627 | */ | |
628 | bitvec = (*p) >> bit; | |
629 | __asm__(" slr %0,%0\n" | |
630 | " lhi %2,0xff\n" | |
631 | " tml %1,0xffff\n" | |
632 | " jno 0f\n" | |
633 | " ahi %0,16\n" | |
634 | " srl %1,16\n" | |
635 | "0: tml %1,0x00ff\n" | |
636 | " jno 1f\n" | |
637 | " ahi %0,8\n" | |
638 | " srl %1,8\n" | |
639 | "1: nr %1,%2\n" | |
640 | " ic %1,0(%1,%3)\n" | |
641 | " alr %0,%1" | |
642 | : "=&d" (set), "+a" (bitvec), "=&d" (reg) | |
643 | : "a" (&_zb_findmap) : "cc" ); | |
644 | if (set < (32 - bit)) | |
645 | return set + offset; | |
646 | offset += 32 - bit; | |
647 | p++; | |
648 | } | |
649 | /* | |
650 | * No zero yet, search remaining full words for a zero | |
651 | */ | |
652 | res = find_first_zero_bit (p, size - 32 * (p - (unsigned long *) addr)); | |
653 | return (offset + res); | |
654 | } | |
655 | ||
656 | static inline int | |
657 | find_next_bit (const unsigned long * addr, int size, int offset) | |
658 | { | |
659 | unsigned long * p = ((unsigned long *) addr) + (offset >> 5); | |
660 | unsigned long bitvec, reg; | |
661 | int set, bit = offset & 31, res; | |
662 | ||
663 | if (bit) { | |
664 | /* | |
665 | * Look for set bit in first word | |
666 | */ | |
667 | bitvec = (*p) >> bit; | |
668 | __asm__(" slr %0,%0\n" | |
669 | " lhi %2,0xff\n" | |
670 | " tml %1,0xffff\n" | |
671 | " jnz 0f\n" | |
672 | " ahi %0,16\n" | |
673 | " srl %1,16\n" | |
674 | "0: tml %1,0x00ff\n" | |
675 | " jnz 1f\n" | |
676 | " ahi %0,8\n" | |
677 | " srl %1,8\n" | |
678 | "1: nr %1,%2\n" | |
679 | " ic %1,0(%1,%3)\n" | |
680 | " alr %0,%1" | |
681 | : "=&d" (set), "+a" (bitvec), "=&d" (reg) | |
682 | : "a" (&_sb_findmap) : "cc" ); | |
683 | if (set < (32 - bit)) | |
684 | return set + offset; | |
685 | offset += 32 - bit; | |
686 | p++; | |
687 | } | |
688 | /* | |
689 | * No set bit yet, search remaining full words for a bit | |
690 | */ | |
691 | res = find_first_bit (p, size - 32 * (p - (unsigned long *) addr)); | |
692 | return (offset + res); | |
693 | } | |
694 | ||
695 | #else /* __s390x__ */ | |
696 | ||
697 | /* | |
698 | * Find-bit routines.. | |
699 | */ | |
700 | static inline unsigned long | |
701 | find_first_zero_bit(const unsigned long * addr, unsigned long size) | |
702 | { | |
703 | typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype; | |
704 | unsigned long res, cmp, count; | |
705 | ||
706 | if (!size) | |
707 | return 0; | |
708 | __asm__(" lghi %1,-1\n" | |
709 | " lgr %2,%3\n" | |
710 | " slgr %0,%0\n" | |
711 | " aghi %2,63\n" | |
712 | " srlg %2,%2,6\n" | |
713 | "0: cg %1,0(%0,%4)\n" | |
714 | " jne 1f\n" | |
715 | " aghi %0,8\n" | |
716 | " brct %2,0b\n" | |
717 | " lgr %0,%3\n" | |
718 | " j 5f\n" | |
719 | "1: lg %2,0(%0,%4)\n" | |
720 | " sllg %0,%0,3\n" | |
721 | " clr %2,%1\n" | |
722 | " jne 2f\n" | |
723 | " aghi %0,32\n" | |
724 | " srlg %2,%2,32\n" | |
725 | "2: lghi %1,0xff\n" | |
726 | " tmll %2,0xffff\n" | |
727 | " jno 3f\n" | |
728 | " aghi %0,16\n" | |
729 | " srl %2,16\n" | |
730 | "3: tmll %2,0x00ff\n" | |
731 | " jno 4f\n" | |
732 | " aghi %0,8\n" | |
733 | " srl %2,8\n" | |
734 | "4: ngr %2,%1\n" | |
735 | " ic %2,0(%2,%5)\n" | |
736 | " algr %0,%2\n" | |
737 | "5:" | |
738 | : "=&a" (res), "=&d" (cmp), "=&a" (count) | |
739 | : "a" (size), "a" (addr), "a" (&_zb_findmap), | |
740 | "m" (*(addrtype *) addr) : "cc" ); | |
741 | return (res < size) ? res : size; | |
742 | } | |
743 | ||
744 | static inline unsigned long | |
745 | find_first_bit(const unsigned long * addr, unsigned long size) | |
746 | { | |
747 | typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype; | |
748 | unsigned long res, cmp, count; | |
749 | ||
750 | if (!size) | |
751 | return 0; | |
752 | __asm__(" slgr %1,%1\n" | |
753 | " lgr %2,%3\n" | |
754 | " slgr %0,%0\n" | |
755 | " aghi %2,63\n" | |
756 | " srlg %2,%2,6\n" | |
757 | "0: cg %1,0(%0,%4)\n" | |
758 | " jne 1f\n" | |
759 | " aghi %0,8\n" | |
760 | " brct %2,0b\n" | |
761 | " lgr %0,%3\n" | |
762 | " j 5f\n" | |
763 | "1: lg %2,0(%0,%4)\n" | |
764 | " sllg %0,%0,3\n" | |
765 | " clr %2,%1\n" | |
766 | " jne 2f\n" | |
767 | " aghi %0,32\n" | |
768 | " srlg %2,%2,32\n" | |
769 | "2: lghi %1,0xff\n" | |
770 | " tmll %2,0xffff\n" | |
771 | " jnz 3f\n" | |
772 | " aghi %0,16\n" | |
773 | " srl %2,16\n" | |
774 | "3: tmll %2,0x00ff\n" | |
775 | " jnz 4f\n" | |
776 | " aghi %0,8\n" | |
777 | " srl %2,8\n" | |
778 | "4: ngr %2,%1\n" | |
779 | " ic %2,0(%2,%5)\n" | |
780 | " algr %0,%2\n" | |
781 | "5:" | |
782 | : "=&a" (res), "=&d" (cmp), "=&a" (count) | |
783 | : "a" (size), "a" (addr), "a" (&_sb_findmap), | |
784 | "m" (*(addrtype *) addr) : "cc" ); | |
785 | return (res < size) ? res : size; | |
786 | } | |
787 | ||
788 | static inline unsigned long | |
789 | find_next_zero_bit (const unsigned long * addr, unsigned long size, unsigned long offset) | |
790 | { | |
791 | unsigned long * p = ((unsigned long *) addr) + (offset >> 6); | |
792 | unsigned long bitvec, reg; | |
793 | unsigned long set, bit = offset & 63, res; | |
794 | ||
795 | if (bit) { | |
796 | /* | |
797 | * Look for zero in first word | |
798 | */ | |
799 | bitvec = (*p) >> bit; | |
800 | __asm__(" lhi %2,-1\n" | |
801 | " slgr %0,%0\n" | |
802 | " clr %1,%2\n" | |
803 | " jne 0f\n" | |
804 | " aghi %0,32\n" | |
805 | " srlg %1,%1,32\n" | |
806 | "0: lghi %2,0xff\n" | |
807 | " tmll %1,0xffff\n" | |
808 | " jno 1f\n" | |
809 | " aghi %0,16\n" | |
810 | " srlg %1,%1,16\n" | |
811 | "1: tmll %1,0x00ff\n" | |
812 | " jno 2f\n" | |
813 | " aghi %0,8\n" | |
814 | " srlg %1,%1,8\n" | |
815 | "2: ngr %1,%2\n" | |
816 | " ic %1,0(%1,%3)\n" | |
817 | " algr %0,%1" | |
818 | : "=&d" (set), "+a" (bitvec), "=&d" (reg) | |
819 | : "a" (&_zb_findmap) : "cc" ); | |
820 | if (set < (64 - bit)) | |
821 | return set + offset; | |
822 | offset += 64 - bit; | |
823 | p++; | |
824 | } | |
825 | /* | |
826 | * No zero yet, search remaining full words for a zero | |
827 | */ | |
828 | res = find_first_zero_bit (p, size - 64 * (p - (unsigned long *) addr)); | |
829 | return (offset + res); | |
830 | } | |
831 | ||
832 | static inline unsigned long | |
833 | find_next_bit (const unsigned long * addr, unsigned long size, unsigned long offset) | |
834 | { | |
835 | unsigned long * p = ((unsigned long *) addr) + (offset >> 6); | |
836 | unsigned long bitvec, reg; | |
837 | unsigned long set, bit = offset & 63, res; | |
838 | ||
839 | if (bit) { | |
840 | /* | |
841 | * Look for zero in first word | |
842 | */ | |
843 | bitvec = (*p) >> bit; | |
844 | __asm__(" slgr %0,%0\n" | |
845 | " ltr %1,%1\n" | |
846 | " jnz 0f\n" | |
847 | " aghi %0,32\n" | |
848 | " srlg %1,%1,32\n" | |
849 | "0: lghi %2,0xff\n" | |
850 | " tmll %1,0xffff\n" | |
851 | " jnz 1f\n" | |
852 | " aghi %0,16\n" | |
853 | " srlg %1,%1,16\n" | |
854 | "1: tmll %1,0x00ff\n" | |
855 | " jnz 2f\n" | |
856 | " aghi %0,8\n" | |
857 | " srlg %1,%1,8\n" | |
858 | "2: ngr %1,%2\n" | |
859 | " ic %1,0(%1,%3)\n" | |
860 | " algr %0,%1" | |
861 | : "=&d" (set), "+a" (bitvec), "=&d" (reg) | |
862 | : "a" (&_sb_findmap) : "cc" ); | |
863 | if (set < (64 - bit)) | |
864 | return set + offset; | |
865 | offset += 64 - bit; | |
866 | p++; | |
867 | } | |
868 | /* | |
869 | * No set bit yet, search remaining full words for a bit | |
870 | */ | |
871 | res = find_first_bit (p, size - 64 * (p - (unsigned long *) addr)); | |
872 | return (offset + res); | |
873 | } | |
874 | ||
875 | #endif /* __s390x__ */ | |
876 | ||
877 | /* | |
878 | * ffz = Find First Zero in word. Undefined if no zero exists, | |
879 | * so code should check against ~0UL first.. | |
880 | */ | |
881 | static inline unsigned long ffz(unsigned long word) | |
882 | { | |
883 | unsigned long bit = 0; | |
884 | ||
885 | #ifdef __s390x__ | |
886 | if (likely((word & 0xffffffff) == 0xffffffff)) { | |
887 | word >>= 32; | |
888 | bit += 32; | |
889 | } | |
890 | #endif | |
891 | if (likely((word & 0xffff) == 0xffff)) { | |
892 | word >>= 16; | |
893 | bit += 16; | |
894 | } | |
895 | if (likely((word & 0xff) == 0xff)) { | |
896 | word >>= 8; | |
897 | bit += 8; | |
898 | } | |
899 | return bit + _zb_findmap[word & 0xff]; | |
900 | } | |
901 | ||
902 | /* | |
903 | * __ffs = find first bit in word. Undefined if no bit exists, | |
904 | * so code should check against 0UL first.. | |
905 | */ | |
906 | static inline unsigned long __ffs (unsigned long word) | |
907 | { | |
908 | unsigned long bit = 0; | |
909 | ||
910 | #ifdef __s390x__ | |
911 | if (likely((word & 0xffffffff) == 0)) { | |
912 | word >>= 32; | |
913 | bit += 32; | |
914 | } | |
915 | #endif | |
916 | if (likely((word & 0xffff) == 0)) { | |
917 | word >>= 16; | |
918 | bit += 16; | |
919 | } | |
920 | if (likely((word & 0xff) == 0)) { | |
921 | word >>= 8; | |
922 | bit += 8; | |
923 | } | |
924 | return bit + _sb_findmap[word & 0xff]; | |
925 | } | |
926 | ||
927 | /* | |
928 | * Every architecture must define this function. It's the fastest | |
929 | * way of searching a 140-bit bitmap where the first 100 bits are | |
930 | * unlikely to be set. It's guaranteed that at least one of the 140 | |
931 | * bits is cleared. | |
932 | */ | |
933 | static inline int sched_find_first_bit(unsigned long *b) | |
934 | { | |
935 | return find_first_bit(b, 140); | |
936 | } | |
937 | ||
938 | /* | |
939 | * ffs: find first bit set. This is defined the same way as | |
940 | * the libc and compiler builtin ffs routines, therefore | |
941 | * differs in spirit from the above ffz (man ffs). | |
942 | */ | |
943 | #define ffs(x) generic_ffs(x) | |
944 | ||
945 | /* | |
946 | * fls: find last bit set. | |
947 | */ | |
948 | #define fls(x) generic_fls(x) | |
949 | ||
950 | /* | |
951 | * hweightN: returns the hamming weight (i.e. the number | |
952 | * of bits set) of a N-bit word | |
953 | */ | |
954 | #define hweight64(x) \ | |
955 | ({ \ | |
956 | unsigned long __x = (x); \ | |
957 | unsigned int __w; \ | |
958 | __w = generic_hweight32((unsigned int) __x); \ | |
959 | __w += generic_hweight32((unsigned int) (__x>>32)); \ | |
960 | __w; \ | |
961 | }) | |
962 | #define hweight32(x) generic_hweight32(x) | |
963 | #define hweight16(x) generic_hweight16(x) | |
964 | #define hweight8(x) generic_hweight8(x) | |
965 | ||
966 | ||
967 | #ifdef __KERNEL__ | |
968 | ||
969 | /* | |
970 | * ATTENTION: intel byte ordering convention for ext2 and minix !! | |
971 | * bit 0 is the LSB of addr; bit 31 is the MSB of addr; | |
972 | * bit 32 is the LSB of (addr+4). | |
973 | * That combined with the little endian byte order of Intel gives the | |
974 | * following bit order in memory: | |
975 | * 07 06 05 04 03 02 01 00 15 14 13 12 11 10 09 08 \ | |
976 | * 23 22 21 20 19 18 17 16 31 30 29 28 27 26 25 24 | |
977 | */ | |
978 | ||
979 | #define ext2_set_bit(nr, addr) \ | |
980 | test_and_set_bit((nr)^(__BITOPS_WORDSIZE - 8), (unsigned long *)addr) | |
981 | #define ext2_set_bit_atomic(lock, nr, addr) \ | |
982 | test_and_set_bit((nr)^(__BITOPS_WORDSIZE - 8), (unsigned long *)addr) | |
983 | #define ext2_clear_bit(nr, addr) \ | |
984 | test_and_clear_bit((nr)^(__BITOPS_WORDSIZE - 8), (unsigned long *)addr) | |
985 | #define ext2_clear_bit_atomic(lock, nr, addr) \ | |
986 | test_and_clear_bit((nr)^(__BITOPS_WORDSIZE - 8), (unsigned long *)addr) | |
987 | #define ext2_test_bit(nr, addr) \ | |
988 | test_bit((nr)^(__BITOPS_WORDSIZE - 8), (unsigned long *)addr) | |
989 | ||
990 | #ifndef __s390x__ | |
991 | ||
992 | static inline int | |
993 | ext2_find_first_zero_bit(void *vaddr, unsigned int size) | |
994 | { | |
995 | typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype; | |
996 | unsigned long cmp, count; | |
997 | unsigned int res; | |
998 | ||
999 | if (!size) | |
1000 | return 0; | |
1001 | __asm__(" lhi %1,-1\n" | |
1002 | " lr %2,%3\n" | |
1003 | " ahi %2,31\n" | |
1004 | " srl %2,5\n" | |
1005 | " slr %0,%0\n" | |
1006 | "0: cl %1,0(%0,%4)\n" | |
1007 | " jne 1f\n" | |
1008 | " ahi %0,4\n" | |
1009 | " brct %2,0b\n" | |
1010 | " lr %0,%3\n" | |
1011 | " j 4f\n" | |
1012 | "1: l %2,0(%0,%4)\n" | |
1013 | " sll %0,3\n" | |
1014 | " ahi %0,24\n" | |
1015 | " lhi %1,0xff\n" | |
1016 | " tmh %2,0xffff\n" | |
1017 | " jo 2f\n" | |
1018 | " ahi %0,-16\n" | |
1019 | " srl %2,16\n" | |
1020 | "2: tml %2,0xff00\n" | |
1021 | " jo 3f\n" | |
1022 | " ahi %0,-8\n" | |
1023 | " srl %2,8\n" | |
1024 | "3: nr %2,%1\n" | |
1025 | " ic %2,0(%2,%5)\n" | |
1026 | " alr %0,%2\n" | |
1027 | "4:" | |
1028 | : "=&a" (res), "=&d" (cmp), "=&a" (count) | |
1029 | : "a" (size), "a" (vaddr), "a" (&_zb_findmap), | |
1030 | "m" (*(addrtype *) vaddr) : "cc" ); | |
1031 | return (res < size) ? res : size; | |
1032 | } | |
1033 | ||
1034 | static inline int | |
1035 | ext2_find_next_zero_bit(void *vaddr, unsigned int size, unsigned offset) | |
1036 | { | |
1037 | unsigned long *addr = vaddr; | |
1038 | unsigned long *p = addr + (offset >> 5); | |
1039 | unsigned long word, reg; | |
1040 | unsigned int bit = offset & 31UL, res; | |
1041 | ||
1042 | if (offset >= size) | |
1043 | return size; | |
1044 | ||
1045 | if (bit) { | |
1046 | __asm__(" ic %0,0(%1)\n" | |
1047 | " icm %0,2,1(%1)\n" | |
1048 | " icm %0,4,2(%1)\n" | |
1049 | " icm %0,8,3(%1)" | |
1050 | : "=&a" (word) : "a" (p) : "cc" ); | |
1051 | word >>= bit; | |
1052 | res = bit; | |
1053 | /* Look for zero in first longword */ | |
1054 | __asm__(" lhi %2,0xff\n" | |
1055 | " tml %1,0xffff\n" | |
1056 | " jno 0f\n" | |
1057 | " ahi %0,16\n" | |
1058 | " srl %1,16\n" | |
1059 | "0: tml %1,0x00ff\n" | |
1060 | " jno 1f\n" | |
1061 | " ahi %0,8\n" | |
1062 | " srl %1,8\n" | |
1063 | "1: nr %1,%2\n" | |
1064 | " ic %1,0(%1,%3)\n" | |
1065 | " alr %0,%1" | |
1066 | : "+&d" (res), "+&a" (word), "=&d" (reg) | |
1067 | : "a" (&_zb_findmap) : "cc" ); | |
1068 | if (res < 32) | |
1069 | return (p - addr)*32 + res; | |
1070 | p++; | |
1071 | } | |
1072 | /* No zero yet, search remaining full bytes for a zero */ | |
1073 | res = ext2_find_first_zero_bit (p, size - 32 * (p - addr)); | |
1074 | return (p - addr) * 32 + res; | |
1075 | } | |
1076 | ||
1077 | #else /* __s390x__ */ | |
1078 | ||
1079 | static inline unsigned long | |
1080 | ext2_find_first_zero_bit(void *vaddr, unsigned long size) | |
1081 | { | |
1082 | typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype; | |
1083 | unsigned long res, cmp, count; | |
1084 | ||
1085 | if (!size) | |
1086 | return 0; | |
1087 | __asm__(" lghi %1,-1\n" | |
1088 | " lgr %2,%3\n" | |
1089 | " aghi %2,63\n" | |
1090 | " srlg %2,%2,6\n" | |
1091 | " slgr %0,%0\n" | |
1092 | "0: clg %1,0(%0,%4)\n" | |
1093 | " jne 1f\n" | |
1094 | " aghi %0,8\n" | |
1095 | " brct %2,0b\n" | |
1096 | " lgr %0,%3\n" | |
1097 | " j 5f\n" | |
1098 | "1: cl %1,0(%0,%4)\n" | |
1099 | " jne 2f\n" | |
1100 | " aghi %0,4\n" | |
1101 | "2: l %2,0(%0,%4)\n" | |
1102 | " sllg %0,%0,3\n" | |
1103 | " aghi %0,24\n" | |
1104 | " lghi %1,0xff\n" | |
1105 | " tmlh %2,0xffff\n" | |
1106 | " jo 3f\n" | |
1107 | " aghi %0,-16\n" | |
1108 | " srl %2,16\n" | |
1109 | "3: tmll %2,0xff00\n" | |
1110 | " jo 4f\n" | |
1111 | " aghi %0,-8\n" | |
1112 | " srl %2,8\n" | |
1113 | "4: ngr %2,%1\n" | |
1114 | " ic %2,0(%2,%5)\n" | |
1115 | " algr %0,%2\n" | |
1116 | "5:" | |
1117 | : "=&a" (res), "=&d" (cmp), "=&a" (count) | |
1118 | : "a" (size), "a" (vaddr), "a" (&_zb_findmap), | |
1119 | "m" (*(addrtype *) vaddr) : "cc" ); | |
1120 | return (res < size) ? res : size; | |
1121 | } | |
1122 | ||
1123 | static inline unsigned long | |
1124 | ext2_find_next_zero_bit(void *vaddr, unsigned long size, unsigned long offset) | |
1125 | { | |
1126 | unsigned long *addr = vaddr; | |
1127 | unsigned long *p = addr + (offset >> 6); | |
1128 | unsigned long word, reg; | |
1129 | unsigned long bit = offset & 63UL, res; | |
1130 | ||
1131 | if (offset >= size) | |
1132 | return size; | |
1133 | ||
1134 | if (bit) { | |
1135 | __asm__(" lrvg %0,%1" /* load reversed, neat instruction */ | |
1136 | : "=a" (word) : "m" (*p) ); | |
1137 | word >>= bit; | |
1138 | res = bit; | |
1139 | /* Look for zero in first 8 byte word */ | |
1140 | __asm__(" lghi %2,0xff\n" | |
1141 | " tmll %1,0xffff\n" | |
1142 | " jno 2f\n" | |
1143 | " ahi %0,16\n" | |
1144 | " srlg %1,%1,16\n" | |
1145 | "0: tmll %1,0xffff\n" | |
1146 | " jno 2f\n" | |
1147 | " ahi %0,16\n" | |
1148 | " srlg %1,%1,16\n" | |
1149 | "1: tmll %1,0xffff\n" | |
1150 | " jno 2f\n" | |
1151 | " ahi %0,16\n" | |
1152 | " srl %1,16\n" | |
1153 | "2: tmll %1,0x00ff\n" | |
1154 | " jno 3f\n" | |
1155 | " ahi %0,8\n" | |
1156 | " srl %1,8\n" | |
1157 | "3: ngr %1,%2\n" | |
1158 | " ic %1,0(%1,%3)\n" | |
1159 | " alr %0,%1" | |
1160 | : "+&d" (res), "+a" (word), "=&d" (reg) | |
1161 | : "a" (&_zb_findmap) : "cc" ); | |
1162 | if (res < 64) | |
1163 | return (p - addr)*64 + res; | |
1164 | p++; | |
1165 | } | |
1166 | /* No zero yet, search remaining full bytes for a zero */ | |
1167 | res = ext2_find_first_zero_bit (p, size - 64 * (p - addr)); | |
1168 | return (p - addr) * 64 + res; | |
1169 | } | |
1170 | ||
1171 | #endif /* __s390x__ */ | |
1172 | ||
1173 | /* Bitmap functions for the minix filesystem. */ | |
1174 | /* FIXME !!! */ | |
1175 | #define minix_test_and_set_bit(nr,addr) \ | |
1176 | test_and_set_bit(nr,(unsigned long *)addr) | |
1177 | #define minix_set_bit(nr,addr) \ | |
1178 | set_bit(nr,(unsigned long *)addr) | |
1179 | #define minix_test_and_clear_bit(nr,addr) \ | |
1180 | test_and_clear_bit(nr,(unsigned long *)addr) | |
1181 | #define minix_test_bit(nr,addr) \ | |
1182 | test_bit(nr,(unsigned long *)addr) | |
1183 | #define minix_find_first_zero_bit(addr,size) \ | |
1184 | find_first_zero_bit(addr,size) | |
1185 | ||
1186 | #endif /* __KERNEL__ */ | |
1187 | ||
1188 | #endif /* _S390_BITOPS_H */ |