]>
Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | /* |
2 | * Copyright (2004) Linus Torvalds | |
3 | * | |
4 | * Author: Zwane Mwaikambo <zwane@fsmlabs.com> | |
5 | * | |
fb1c8f93 IM |
6 | * Copyright (2004, 2005) Ingo Molnar |
7 | * | |
8 | * This file contains the spinlock/rwlock implementations for the | |
9 | * SMP and the DEBUG_SPINLOCK cases. (UP-nondebug inlines them) | |
0cb91a22 AK |
10 | * |
11 | * Note that some architectures have special knowledge about the | |
12 | * stack frames of these functions in their profile_pc. If you | |
13 | * change anything significant here that could change the stack | |
14 | * frame contact the architecture maintainers. | |
1da177e4 LT |
15 | */ |
16 | ||
1da177e4 LT |
17 | #include <linux/linkage.h> |
18 | #include <linux/preempt.h> | |
19 | #include <linux/spinlock.h> | |
20 | #include <linux/interrupt.h> | |
8a25d5de | 21 | #include <linux/debug_locks.h> |
1da177e4 LT |
22 | #include <linux/module.h> |
23 | ||
892a7c67 | 24 | #ifndef _spin_trylock |
1da177e4 LT |
25 | int __lockfunc _spin_trylock(spinlock_t *lock) |
26 | { | |
69d0ee73 | 27 | return __spin_trylock(lock); |
1da177e4 LT |
28 | } |
29 | EXPORT_SYMBOL(_spin_trylock); | |
892a7c67 | 30 | #endif |
1da177e4 | 31 | |
892a7c67 | 32 | #ifndef _read_trylock |
1da177e4 LT |
33 | int __lockfunc _read_trylock(rwlock_t *lock) |
34 | { | |
69d0ee73 | 35 | return __read_trylock(lock); |
1da177e4 LT |
36 | } |
37 | EXPORT_SYMBOL(_read_trylock); | |
892a7c67 | 38 | #endif |
1da177e4 | 39 | |
892a7c67 | 40 | #ifndef _write_trylock |
1da177e4 LT |
41 | int __lockfunc _write_trylock(rwlock_t *lock) |
42 | { | |
69d0ee73 | 43 | return __write_trylock(lock); |
1da177e4 LT |
44 | } |
45 | EXPORT_SYMBOL(_write_trylock); | |
892a7c67 | 46 | #endif |
1da177e4 | 47 | |
8a25d5de IM |
48 | /* |
49 | * If lockdep is enabled then we use the non-preemption spin-ops | |
50 | * even on CONFIG_PREEMPT, because lockdep assumes that interrupts are | |
51 | * not re-enabled during lock-acquire (which the preempt-spin-ops do): | |
52 | */ | |
95c354fe | 53 | #if !defined(CONFIG_GENERIC_LOCKBREAK) || defined(CONFIG_DEBUG_LOCK_ALLOC) |
1da177e4 | 54 | |
892a7c67 | 55 | #ifndef _read_lock |
1da177e4 LT |
56 | void __lockfunc _read_lock(rwlock_t *lock) |
57 | { | |
69d0ee73 | 58 | __read_lock(lock); |
1da177e4 LT |
59 | } |
60 | EXPORT_SYMBOL(_read_lock); | |
892a7c67 | 61 | #endif |
1da177e4 | 62 | |
892a7c67 | 63 | #ifndef _spin_lock_irqsave |
1da177e4 LT |
64 | unsigned long __lockfunc _spin_lock_irqsave(spinlock_t *lock) |
65 | { | |
69d0ee73 | 66 | return __spin_lock_irqsave(lock); |
1da177e4 LT |
67 | } |
68 | EXPORT_SYMBOL(_spin_lock_irqsave); | |
892a7c67 | 69 | #endif |
1da177e4 | 70 | |
892a7c67 | 71 | #ifndef _spin_lock_irq |
1da177e4 LT |
72 | void __lockfunc _spin_lock_irq(spinlock_t *lock) |
73 | { | |
69d0ee73 | 74 | __spin_lock_irq(lock); |
1da177e4 LT |
75 | } |
76 | EXPORT_SYMBOL(_spin_lock_irq); | |
892a7c67 | 77 | #endif |
1da177e4 | 78 | |
892a7c67 | 79 | #ifndef _spin_lock_bh |
1da177e4 LT |
80 | void __lockfunc _spin_lock_bh(spinlock_t *lock) |
81 | { | |
69d0ee73 | 82 | __spin_lock_bh(lock); |
1da177e4 LT |
83 | } |
84 | EXPORT_SYMBOL(_spin_lock_bh); | |
892a7c67 | 85 | #endif |
1da177e4 | 86 | |
892a7c67 | 87 | #ifndef _read_lock_irqsave |
1da177e4 LT |
88 | unsigned long __lockfunc _read_lock_irqsave(rwlock_t *lock) |
89 | { | |
69d0ee73 | 90 | return __read_lock_irqsave(lock); |
1da177e4 LT |
91 | } |
92 | EXPORT_SYMBOL(_read_lock_irqsave); | |
892a7c67 | 93 | #endif |
1da177e4 | 94 | |
892a7c67 | 95 | #ifndef _read_lock_irq |
1da177e4 LT |
96 | void __lockfunc _read_lock_irq(rwlock_t *lock) |
97 | { | |
69d0ee73 | 98 | __read_lock_irq(lock); |
1da177e4 LT |
99 | } |
100 | EXPORT_SYMBOL(_read_lock_irq); | |
892a7c67 | 101 | #endif |
1da177e4 | 102 | |
892a7c67 | 103 | #ifndef _read_lock_bh |
1da177e4 LT |
104 | void __lockfunc _read_lock_bh(rwlock_t *lock) |
105 | { | |
69d0ee73 | 106 | __read_lock_bh(lock); |
1da177e4 LT |
107 | } |
108 | EXPORT_SYMBOL(_read_lock_bh); | |
892a7c67 | 109 | #endif |
1da177e4 | 110 | |
892a7c67 | 111 | #ifndef _write_lock_irqsave |
1da177e4 LT |
112 | unsigned long __lockfunc _write_lock_irqsave(rwlock_t *lock) |
113 | { | |
69d0ee73 | 114 | return __write_lock_irqsave(lock); |
1da177e4 LT |
115 | } |
116 | EXPORT_SYMBOL(_write_lock_irqsave); | |
892a7c67 | 117 | #endif |
1da177e4 | 118 | |
892a7c67 | 119 | #ifndef _write_lock_irq |
1da177e4 LT |
120 | void __lockfunc _write_lock_irq(rwlock_t *lock) |
121 | { | |
69d0ee73 | 122 | __write_lock_irq(lock); |
1da177e4 LT |
123 | } |
124 | EXPORT_SYMBOL(_write_lock_irq); | |
892a7c67 | 125 | #endif |
1da177e4 | 126 | |
892a7c67 | 127 | #ifndef _write_lock_bh |
1da177e4 LT |
128 | void __lockfunc _write_lock_bh(rwlock_t *lock) |
129 | { | |
69d0ee73 | 130 | __write_lock_bh(lock); |
1da177e4 LT |
131 | } |
132 | EXPORT_SYMBOL(_write_lock_bh); | |
892a7c67 | 133 | #endif |
1da177e4 | 134 | |
892a7c67 | 135 | #ifndef _spin_lock |
1da177e4 LT |
136 | void __lockfunc _spin_lock(spinlock_t *lock) |
137 | { | |
69d0ee73 | 138 | __spin_lock(lock); |
1da177e4 | 139 | } |
1da177e4 | 140 | EXPORT_SYMBOL(_spin_lock); |
892a7c67 | 141 | #endif |
1da177e4 | 142 | |
892a7c67 | 143 | #ifndef _write_lock |
1da177e4 LT |
144 | void __lockfunc _write_lock(rwlock_t *lock) |
145 | { | |
69d0ee73 | 146 | __write_lock(lock); |
1da177e4 | 147 | } |
1da177e4 | 148 | EXPORT_SYMBOL(_write_lock); |
892a7c67 | 149 | #endif |
1da177e4 LT |
150 | |
151 | #else /* CONFIG_PREEMPT: */ | |
152 | ||
153 | /* | |
154 | * This could be a long-held lock. We both prepare to spin for a long | |
155 | * time (making _this_ CPU preemptable if possible), and we also signal | |
156 | * towards that other CPU that it should break the lock ASAP. | |
157 | * | |
158 | * (We do this in a function because inlining it would be excessive.) | |
159 | */ | |
160 | ||
161 | #define BUILD_LOCK_OPS(op, locktype) \ | |
162 | void __lockfunc _##op##_lock(locktype##_t *lock) \ | |
163 | { \ | |
1da177e4 | 164 | for (;;) { \ |
ee25e96f | 165 | preempt_disable(); \ |
1da177e4 LT |
166 | if (likely(_raw_##op##_trylock(lock))) \ |
167 | break; \ | |
168 | preempt_enable(); \ | |
ee25e96f | 169 | \ |
1da177e4 LT |
170 | if (!(lock)->break_lock) \ |
171 | (lock)->break_lock = 1; \ | |
172 | while (!op##_can_lock(lock) && (lock)->break_lock) \ | |
ef6edc97 | 173 | _raw_##op##_relax(&lock->raw_lock); \ |
1da177e4 LT |
174 | } \ |
175 | (lock)->break_lock = 0; \ | |
176 | } \ | |
177 | \ | |
178 | EXPORT_SYMBOL(_##op##_lock); \ | |
179 | \ | |
180 | unsigned long __lockfunc _##op##_lock_irqsave(locktype##_t *lock) \ | |
181 | { \ | |
182 | unsigned long flags; \ | |
183 | \ | |
1da177e4 | 184 | for (;;) { \ |
ee25e96f | 185 | preempt_disable(); \ |
1da177e4 LT |
186 | local_irq_save(flags); \ |
187 | if (likely(_raw_##op##_trylock(lock))) \ | |
188 | break; \ | |
189 | local_irq_restore(flags); \ | |
1da177e4 | 190 | preempt_enable(); \ |
ee25e96f | 191 | \ |
1da177e4 LT |
192 | if (!(lock)->break_lock) \ |
193 | (lock)->break_lock = 1; \ | |
194 | while (!op##_can_lock(lock) && (lock)->break_lock) \ | |
ef6edc97 | 195 | _raw_##op##_relax(&lock->raw_lock); \ |
1da177e4 LT |
196 | } \ |
197 | (lock)->break_lock = 0; \ | |
198 | return flags; \ | |
199 | } \ | |
200 | \ | |
201 | EXPORT_SYMBOL(_##op##_lock_irqsave); \ | |
202 | \ | |
203 | void __lockfunc _##op##_lock_irq(locktype##_t *lock) \ | |
204 | { \ | |
205 | _##op##_lock_irqsave(lock); \ | |
206 | } \ | |
207 | \ | |
208 | EXPORT_SYMBOL(_##op##_lock_irq); \ | |
209 | \ | |
210 | void __lockfunc _##op##_lock_bh(locktype##_t *lock) \ | |
211 | { \ | |
212 | unsigned long flags; \ | |
213 | \ | |
214 | /* */ \ | |
215 | /* Careful: we must exclude softirqs too, hence the */ \ | |
216 | /* irq-disabling. We use the generic preemption-aware */ \ | |
217 | /* function: */ \ | |
218 | /**/ \ | |
219 | flags = _##op##_lock_irqsave(lock); \ | |
220 | local_bh_disable(); \ | |
221 | local_irq_restore(flags); \ | |
222 | } \ | |
223 | \ | |
224 | EXPORT_SYMBOL(_##op##_lock_bh) | |
225 | ||
226 | /* | |
227 | * Build preemption-friendly versions of the following | |
228 | * lock-spinning functions: | |
229 | * | |
230 | * _[spin|read|write]_lock() | |
231 | * _[spin|read|write]_lock_irq() | |
232 | * _[spin|read|write]_lock_irqsave() | |
233 | * _[spin|read|write]_lock_bh() | |
234 | */ | |
235 | BUILD_LOCK_OPS(spin, spinlock); | |
236 | BUILD_LOCK_OPS(read, rwlock); | |
237 | BUILD_LOCK_OPS(write, rwlock); | |
238 | ||
239 | #endif /* CONFIG_PREEMPT */ | |
240 | ||
8a25d5de IM |
241 | #ifdef CONFIG_DEBUG_LOCK_ALLOC |
242 | ||
243 | void __lockfunc _spin_lock_nested(spinlock_t *lock, int subclass) | |
244 | { | |
245 | preempt_disable(); | |
246 | spin_acquire(&lock->dep_map, subclass, 0, _RET_IP_); | |
4fe87745 | 247 | LOCK_CONTENDED(lock, _raw_spin_trylock, _raw_spin_lock); |
8a25d5de | 248 | } |
8a25d5de | 249 | EXPORT_SYMBOL(_spin_lock_nested); |
b7d39aff | 250 | |
cfd3ef23 AV |
251 | unsigned long __lockfunc _spin_lock_irqsave_nested(spinlock_t *lock, int subclass) |
252 | { | |
253 | unsigned long flags; | |
254 | ||
255 | local_irq_save(flags); | |
256 | preempt_disable(); | |
257 | spin_acquire(&lock->dep_map, subclass, 0, _RET_IP_); | |
e8c158bb RH |
258 | LOCK_CONTENDED_FLAGS(lock, _raw_spin_trylock, _raw_spin_lock, |
259 | _raw_spin_lock_flags, &flags); | |
cfd3ef23 AV |
260 | return flags; |
261 | } | |
cfd3ef23 | 262 | EXPORT_SYMBOL(_spin_lock_irqsave_nested); |
8a25d5de | 263 | |
b7d39aff PZ |
264 | void __lockfunc _spin_lock_nest_lock(spinlock_t *lock, |
265 | struct lockdep_map *nest_lock) | |
266 | { | |
267 | preempt_disable(); | |
268 | spin_acquire_nest(&lock->dep_map, 0, 0, nest_lock, _RET_IP_); | |
269 | LOCK_CONTENDED(lock, _raw_spin_trylock, _raw_spin_lock); | |
270 | } | |
b7d39aff PZ |
271 | EXPORT_SYMBOL(_spin_lock_nest_lock); |
272 | ||
8a25d5de IM |
273 | #endif |
274 | ||
892a7c67 | 275 | #ifndef _spin_unlock |
1da177e4 LT |
276 | void __lockfunc _spin_unlock(spinlock_t *lock) |
277 | { | |
69d0ee73 | 278 | __spin_unlock(lock); |
1da177e4 LT |
279 | } |
280 | EXPORT_SYMBOL(_spin_unlock); | |
892a7c67 | 281 | #endif |
1da177e4 | 282 | |
892a7c67 | 283 | #ifndef _write_unlock |
1da177e4 LT |
284 | void __lockfunc _write_unlock(rwlock_t *lock) |
285 | { | |
69d0ee73 | 286 | __write_unlock(lock); |
1da177e4 LT |
287 | } |
288 | EXPORT_SYMBOL(_write_unlock); | |
892a7c67 | 289 | #endif |
1da177e4 | 290 | |
892a7c67 | 291 | #ifndef _read_unlock |
1da177e4 LT |
292 | void __lockfunc _read_unlock(rwlock_t *lock) |
293 | { | |
69d0ee73 | 294 | __read_unlock(lock); |
1da177e4 LT |
295 | } |
296 | EXPORT_SYMBOL(_read_unlock); | |
892a7c67 | 297 | #endif |
1da177e4 | 298 | |
892a7c67 | 299 | #ifndef _spin_unlock_irqrestore |
1da177e4 LT |
300 | void __lockfunc _spin_unlock_irqrestore(spinlock_t *lock, unsigned long flags) |
301 | { | |
69d0ee73 | 302 | __spin_unlock_irqrestore(lock, flags); |
1da177e4 LT |
303 | } |
304 | EXPORT_SYMBOL(_spin_unlock_irqrestore); | |
892a7c67 | 305 | #endif |
1da177e4 | 306 | |
892a7c67 | 307 | #ifndef _spin_unlock_irq |
1da177e4 LT |
308 | void __lockfunc _spin_unlock_irq(spinlock_t *lock) |
309 | { | |
69d0ee73 | 310 | __spin_unlock_irq(lock); |
1da177e4 LT |
311 | } |
312 | EXPORT_SYMBOL(_spin_unlock_irq); | |
892a7c67 | 313 | #endif |
1da177e4 | 314 | |
892a7c67 | 315 | #ifndef _spin_unlock_bh |
1da177e4 LT |
316 | void __lockfunc _spin_unlock_bh(spinlock_t *lock) |
317 | { | |
69d0ee73 | 318 | __spin_unlock_bh(lock); |
1da177e4 LT |
319 | } |
320 | EXPORT_SYMBOL(_spin_unlock_bh); | |
892a7c67 | 321 | #endif |
1da177e4 | 322 | |
892a7c67 | 323 | #ifndef _read_unlock_irqrestore |
1da177e4 LT |
324 | void __lockfunc _read_unlock_irqrestore(rwlock_t *lock, unsigned long flags) |
325 | { | |
69d0ee73 | 326 | __read_unlock_irqrestore(lock, flags); |
1da177e4 LT |
327 | } |
328 | EXPORT_SYMBOL(_read_unlock_irqrestore); | |
892a7c67 | 329 | #endif |
1da177e4 | 330 | |
892a7c67 | 331 | #ifndef _read_unlock_irq |
1da177e4 LT |
332 | void __lockfunc _read_unlock_irq(rwlock_t *lock) |
333 | { | |
69d0ee73 | 334 | __read_unlock_irq(lock); |
1da177e4 LT |
335 | } |
336 | EXPORT_SYMBOL(_read_unlock_irq); | |
892a7c67 | 337 | #endif |
1da177e4 | 338 | |
892a7c67 | 339 | #ifndef _read_unlock_bh |
1da177e4 LT |
340 | void __lockfunc _read_unlock_bh(rwlock_t *lock) |
341 | { | |
69d0ee73 | 342 | __read_unlock_bh(lock); |
1da177e4 LT |
343 | } |
344 | EXPORT_SYMBOL(_read_unlock_bh); | |
892a7c67 | 345 | #endif |
1da177e4 | 346 | |
892a7c67 | 347 | #ifndef _write_unlock_irqrestore |
1da177e4 LT |
348 | void __lockfunc _write_unlock_irqrestore(rwlock_t *lock, unsigned long flags) |
349 | { | |
69d0ee73 | 350 | __write_unlock_irqrestore(lock, flags); |
1da177e4 LT |
351 | } |
352 | EXPORT_SYMBOL(_write_unlock_irqrestore); | |
892a7c67 | 353 | #endif |
1da177e4 | 354 | |
892a7c67 | 355 | #ifndef _write_unlock_irq |
1da177e4 LT |
356 | void __lockfunc _write_unlock_irq(rwlock_t *lock) |
357 | { | |
69d0ee73 | 358 | __write_unlock_irq(lock); |
1da177e4 LT |
359 | } |
360 | EXPORT_SYMBOL(_write_unlock_irq); | |
892a7c67 | 361 | #endif |
1da177e4 | 362 | |
892a7c67 | 363 | #ifndef _write_unlock_bh |
1da177e4 LT |
364 | void __lockfunc _write_unlock_bh(rwlock_t *lock) |
365 | { | |
69d0ee73 | 366 | __write_unlock_bh(lock); |
1da177e4 LT |
367 | } |
368 | EXPORT_SYMBOL(_write_unlock_bh); | |
892a7c67 | 369 | #endif |
1da177e4 | 370 | |
892a7c67 | 371 | #ifndef _spin_trylock_bh |
1da177e4 LT |
372 | int __lockfunc _spin_trylock_bh(spinlock_t *lock) |
373 | { | |
69d0ee73 | 374 | return __spin_trylock_bh(lock); |
1da177e4 LT |
375 | } |
376 | EXPORT_SYMBOL(_spin_trylock_bh); | |
892a7c67 | 377 | #endif |
1da177e4 | 378 | |
0764d23c | 379 | notrace int in_lock_functions(unsigned long addr) |
1da177e4 LT |
380 | { |
381 | /* Linker adds these: start and end of __lockfunc functions */ | |
382 | extern char __lock_text_start[], __lock_text_end[]; | |
383 | ||
384 | return addr >= (unsigned long)__lock_text_start | |
385 | && addr < (unsigned long)__lock_text_end; | |
386 | } | |
387 | EXPORT_SYMBOL(in_lock_functions); |