]>
Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | #ifndef _LINUX_BITOPS_H |
2 | #define _LINUX_BITOPS_H | |
3 | #include <asm/types.h> | |
4 | ||
d05be13b | 5 | #ifdef __KERNEL__ |
93043ece | 6 | #define BIT(nr) (1UL << (nr)) |
bfd1ff63 | 7 | #define BIT_ULL(nr) (1ULL << (nr)) |
d05be13b JS |
8 | #define BIT_MASK(nr) (1UL << ((nr) % BITS_PER_LONG)) |
9 | #define BIT_WORD(nr) ((nr) / BITS_PER_LONG) | |
bfd1ff63 SP |
10 | #define BIT_ULL_MASK(nr) (1ULL << ((nr) % BITS_PER_LONG_LONG)) |
11 | #define BIT_ULL_WORD(nr) ((nr) / BITS_PER_LONG_LONG) | |
d05be13b | 12 | #define BITS_PER_BYTE 8 |
ede9c697 | 13 | #define BITS_TO_LONGS(nr) DIV_ROUND_UP(nr, BITS_PER_BYTE * sizeof(long)) |
d05be13b JS |
14 | #endif |
15 | ||
10ef6b0d CG |
16 | /* |
17 | * Create a contiguous bitmask starting at bit position @l and ending at | |
18 | * position @h. For example | |
19 | * GENMASK_ULL(39, 21) gives us the 64bit vector 0x000000ffffe00000. | |
20 | */ | |
00b4d9a1 MC |
21 | #define GENMASK(h, l) \ |
22 | (((~0UL) << (l)) & (~0UL >> (BITS_PER_LONG - 1 - (h)))) | |
23 | ||
24 | #define GENMASK_ULL(h, l) \ | |
25 | (((~0ULL) << (l)) & (~0ULL >> (BITS_PER_LONG_LONG - 1 - (h)))) | |
10ef6b0d | 26 | |
4677d4a5 BP |
27 | extern unsigned int __sw_hweight8(unsigned int w); |
28 | extern unsigned int __sw_hweight16(unsigned int w); | |
29 | extern unsigned int __sw_hweight32(unsigned int w); | |
30 | extern unsigned long __sw_hweight64(__u64 w); | |
31 | ||
1da177e4 LT |
32 | /* |
33 | * Include this here because some architectures need generic_ffs/fls in | |
34 | * scope | |
35 | */ | |
36 | #include <asm/bitops.h> | |
37 | ||
984b3f57 | 38 | #define for_each_set_bit(bit, addr, size) \ |
1e2ad28f RR |
39 | for ((bit) = find_first_bit((addr), (size)); \ |
40 | (bit) < (size); \ | |
41 | (bit) = find_next_bit((addr), (size), (bit) + 1)) | |
42 | ||
43 | /* same as for_each_set_bit() but use bit as value to start with */ | |
307b1cd7 | 44 | #define for_each_set_bit_from(bit, addr, size) \ |
1e2ad28f RR |
45 | for ((bit) = find_next_bit((addr), (size), (bit)); \ |
46 | (bit) < (size); \ | |
3e037454 SN |
47 | (bit) = find_next_bit((addr), (size), (bit) + 1)) |
48 | ||
03f4a822 AM |
49 | #define for_each_clear_bit(bit, addr, size) \ |
50 | for ((bit) = find_first_zero_bit((addr), (size)); \ | |
51 | (bit) < (size); \ | |
52 | (bit) = find_next_zero_bit((addr), (size), (bit) + 1)) | |
53 | ||
54 | /* same as for_each_clear_bit() but use bit as value to start with */ | |
55 | #define for_each_clear_bit_from(bit, addr, size) \ | |
56 | for ((bit) = find_next_zero_bit((addr), (size), (bit)); \ | |
57 | (bit) < (size); \ | |
58 | (bit) = find_next_zero_bit((addr), (size), (bit) + 1)) | |
59 | ||
1da177e4 LT |
60 | static __inline__ int get_bitmask_order(unsigned int count) |
61 | { | |
62 | int order; | |
9f41699e | 63 | |
1da177e4 LT |
64 | order = fls(count); |
65 | return order; /* We could be slightly more clever with -1 here... */ | |
66 | } | |
67 | ||
94605eff SS |
68 | static __inline__ int get_count_order(unsigned int count) |
69 | { | |
70 | int order; | |
9f41699e | 71 | |
94605eff SS |
72 | order = fls(count) - 1; |
73 | if (count & (count - 1)) | |
74 | order++; | |
75 | return order; | |
76 | } | |
77 | ||
1da177e4 LT |
78 | static inline unsigned long hweight_long(unsigned long w) |
79 | { | |
e9bebd6f | 80 | return sizeof(w) == 4 ? hweight32(w) : hweight64(w); |
1da177e4 LT |
81 | } |
82 | ||
f2ea0f5f AD |
83 | /** |
84 | * rol64 - rotate a 64-bit value left | |
85 | * @word: value to rotate | |
86 | * @shift: bits to roll | |
87 | */ | |
88 | static inline __u64 rol64(__u64 word, unsigned int shift) | |
89 | { | |
90 | return (word << shift) | (word >> (64 - shift)); | |
91 | } | |
92 | ||
93 | /** | |
94 | * ror64 - rotate a 64-bit value right | |
95 | * @word: value to rotate | |
96 | * @shift: bits to roll | |
97 | */ | |
98 | static inline __u64 ror64(__u64 word, unsigned int shift) | |
99 | { | |
100 | return (word >> shift) | (word << (64 - shift)); | |
101 | } | |
102 | ||
45f8bde0 | 103 | /** |
1da177e4 | 104 | * rol32 - rotate a 32-bit value left |
1da177e4 LT |
105 | * @word: value to rotate |
106 | * @shift: bits to roll | |
107 | */ | |
108 | static inline __u32 rol32(__u32 word, unsigned int shift) | |
109 | { | |
110 | return (word << shift) | (word >> (32 - shift)); | |
111 | } | |
112 | ||
45f8bde0 | 113 | /** |
1da177e4 | 114 | * ror32 - rotate a 32-bit value right |
1da177e4 LT |
115 | * @word: value to rotate |
116 | * @shift: bits to roll | |
117 | */ | |
118 | static inline __u32 ror32(__u32 word, unsigned int shift) | |
119 | { | |
120 | return (word >> shift) | (word << (32 - shift)); | |
121 | } | |
122 | ||
3afe3925 HH |
123 | /** |
124 | * rol16 - rotate a 16-bit value left | |
125 | * @word: value to rotate | |
126 | * @shift: bits to roll | |
127 | */ | |
128 | static inline __u16 rol16(__u16 word, unsigned int shift) | |
129 | { | |
130 | return (word << shift) | (word >> (16 - shift)); | |
131 | } | |
132 | ||
133 | /** | |
134 | * ror16 - rotate a 16-bit value right | |
135 | * @word: value to rotate | |
136 | * @shift: bits to roll | |
137 | */ | |
138 | static inline __u16 ror16(__u16 word, unsigned int shift) | |
139 | { | |
140 | return (word >> shift) | (word << (16 - shift)); | |
141 | } | |
142 | ||
143 | /** | |
144 | * rol8 - rotate an 8-bit value left | |
145 | * @word: value to rotate | |
146 | * @shift: bits to roll | |
147 | */ | |
148 | static inline __u8 rol8(__u8 word, unsigned int shift) | |
149 | { | |
150 | return (word << shift) | (word >> (8 - shift)); | |
151 | } | |
152 | ||
153 | /** | |
154 | * ror8 - rotate an 8-bit value right | |
155 | * @word: value to rotate | |
156 | * @shift: bits to roll | |
157 | */ | |
158 | static inline __u8 ror8(__u8 word, unsigned int shift) | |
159 | { | |
160 | return (word >> shift) | (word << (8 - shift)); | |
161 | } | |
7919a57b AH |
162 | |
163 | /** | |
164 | * sign_extend32 - sign extend a 32-bit value using specified bit as sign-bit | |
165 | * @value: value to sign extend | |
166 | * @index: 0 based bit index (0<=index<32) to sign bit | |
167 | */ | |
168 | static inline __s32 sign_extend32(__u32 value, int index) | |
169 | { | |
170 | __u8 shift = 31 - index; | |
171 | return (__s32)(value << shift) >> shift; | |
172 | } | |
3afe3925 | 173 | |
962749af AM |
174 | static inline unsigned fls_long(unsigned long l) |
175 | { | |
176 | if (sizeof(l) == 4) | |
177 | return fls(l); | |
178 | return fls64(l); | |
179 | } | |
180 | ||
952043ac SW |
181 | /** |
182 | * __ffs64 - find first set bit in a 64 bit word | |
183 | * @word: The 64 bit word | |
184 | * | |
185 | * On 64 bit arches this is a synomyn for __ffs | |
186 | * The result is not defined if no bits are set, so check that @word | |
187 | * is non-zero before calling this. | |
188 | */ | |
189 | static inline unsigned long __ffs64(u64 word) | |
190 | { | |
191 | #if BITS_PER_LONG == 32 | |
192 | if (((u32)word) == 0UL) | |
193 | return __ffs((u32)(word >> 32)) + 32; | |
194 | #elif BITS_PER_LONG != 64 | |
195 | #error BITS_PER_LONG not 32 or 64 | |
196 | #endif | |
197 | return __ffs((unsigned long)word); | |
198 | } | |
199 | ||
64970b68 | 200 | #ifdef __KERNEL__ |
77b9bd9c | 201 | |
00a1a053 TT |
202 | #ifndef set_mask_bits |
203 | #define set_mask_bits(ptr, _mask, _bits) \ | |
204 | ({ \ | |
205 | const typeof(*ptr) mask = (_mask), bits = (_bits); \ | |
206 | typeof(*ptr) old, new; \ | |
207 | \ | |
208 | do { \ | |
209 | old = ACCESS_ONCE(*ptr); \ | |
210 | new = (old & ~mask) | bits; \ | |
211 | } while (cmpxchg(ptr, old, new) != old); \ | |
212 | \ | |
213 | new; \ | |
214 | }) | |
215 | #endif | |
216 | ||
19de85ef | 217 | #ifndef find_last_bit |
ab53d472 RR |
218 | /** |
219 | * find_last_bit - find the last set bit in a memory region | |
220 | * @addr: The address to start the search at | |
221 | * @size: The maximum size to search | |
222 | * | |
223 | * Returns the bit number of the first set bit, or size. | |
224 | */ | |
225 | extern unsigned long find_last_bit(const unsigned long *addr, | |
226 | unsigned long size); | |
19de85ef | 227 | #endif |
ab53d472 | 228 | |
64970b68 | 229 | #endif /* __KERNEL__ */ |
1da177e4 | 230 | #endif |