2 * Copyright (2004) Linus Torvalds
4 * Author: Zwane Mwaikambo <zwane@fsmlabs.com>
6 * Copyright (2004, 2005) Ingo Molnar
8 * This file contains the spinlock/rwlock implementations for the
9 * SMP and the DEBUG_SPINLOCK cases. (UP-nondebug inlines them)
12 #include <linux/linkage.h>
13 #include <linux/preempt.h>
14 #include <linux/spinlock.h>
15 #include <linux/interrupt.h>
16 #include <linux/module.h>
19 * Generic declaration of the raw read_trylock() function,
20 * architectures are supposed to optimize this:
22 int __lockfunc
generic__raw_read_trylock(raw_rwlock_t
*lock
)
24 __raw_read_lock(lock
);
27 EXPORT_SYMBOL(generic__raw_read_trylock
);
29 int __lockfunc
_spin_trylock(spinlock_t
*lock
)
32 if (_raw_spin_trylock(lock
))
38 EXPORT_SYMBOL(_spin_trylock
);
40 int __lockfunc
_read_trylock(rwlock_t
*lock
)
43 if (_raw_read_trylock(lock
))
49 EXPORT_SYMBOL(_read_trylock
);
51 int __lockfunc
_write_trylock(rwlock_t
*lock
)
54 if (_raw_write_trylock(lock
))
60 EXPORT_SYMBOL(_write_trylock
);
62 #if !defined(CONFIG_PREEMPT) || !defined(CONFIG_SMP)
64 void __lockfunc
_read_lock(rwlock_t
*lock
)
69 EXPORT_SYMBOL(_read_lock
);
71 unsigned long __lockfunc
_spin_lock_irqsave(spinlock_t
*lock
)
75 local_irq_save(flags
);
77 _raw_spin_lock_flags(lock
, &flags
);
80 EXPORT_SYMBOL(_spin_lock_irqsave
);
82 void __lockfunc
_spin_lock_irq(spinlock_t
*lock
)
88 EXPORT_SYMBOL(_spin_lock_irq
);
90 void __lockfunc
_spin_lock_bh(spinlock_t
*lock
)
96 EXPORT_SYMBOL(_spin_lock_bh
);
98 unsigned long __lockfunc
_read_lock_irqsave(rwlock_t
*lock
)
102 local_irq_save(flags
);
104 _raw_read_lock(lock
);
107 EXPORT_SYMBOL(_read_lock_irqsave
);
109 void __lockfunc
_read_lock_irq(rwlock_t
*lock
)
113 _raw_read_lock(lock
);
115 EXPORT_SYMBOL(_read_lock_irq
);
117 void __lockfunc
_read_lock_bh(rwlock_t
*lock
)
121 _raw_read_lock(lock
);
123 EXPORT_SYMBOL(_read_lock_bh
);
125 unsigned long __lockfunc
_write_lock_irqsave(rwlock_t
*lock
)
129 local_irq_save(flags
);
131 _raw_write_lock(lock
);
134 EXPORT_SYMBOL(_write_lock_irqsave
);
136 void __lockfunc
_write_lock_irq(rwlock_t
*lock
)
140 _raw_write_lock(lock
);
142 EXPORT_SYMBOL(_write_lock_irq
);
144 void __lockfunc
_write_lock_bh(rwlock_t
*lock
)
148 _raw_write_lock(lock
);
150 EXPORT_SYMBOL(_write_lock_bh
);
152 void __lockfunc
_spin_lock(spinlock_t
*lock
)
155 _raw_spin_lock(lock
);
158 EXPORT_SYMBOL(_spin_lock
);
160 void __lockfunc
_write_lock(rwlock_t
*lock
)
163 _raw_write_lock(lock
);
166 EXPORT_SYMBOL(_write_lock
);
168 #else /* CONFIG_PREEMPT: */
171 * This could be a long-held lock. We both prepare to spin for a long
172 * time (making _this_ CPU preemptable if possible), and we also signal
173 * towards that other CPU that it should break the lock ASAP.
175 * (We do this in a function because inlining it would be excessive.)
178 #define BUILD_LOCK_OPS(op, locktype) \
179 void __lockfunc _##op##_lock(locktype##_t *lock) \
183 if (likely(_raw_##op##_trylock(lock))) \
187 if (!(lock)->break_lock) \
188 (lock)->break_lock = 1; \
189 while (!op##_can_lock(lock) && (lock)->break_lock) \
192 (lock)->break_lock = 0; \
195 EXPORT_SYMBOL(_##op##_lock); \
197 unsigned long __lockfunc _##op##_lock_irqsave(locktype##_t *lock) \
199 unsigned long flags; \
203 local_irq_save(flags); \
204 if (likely(_raw_##op##_trylock(lock))) \
206 local_irq_restore(flags); \
209 if (!(lock)->break_lock) \
210 (lock)->break_lock = 1; \
211 while (!op##_can_lock(lock) && (lock)->break_lock) \
214 (lock)->break_lock = 0; \
218 EXPORT_SYMBOL(_##op##_lock_irqsave); \
220 void __lockfunc _##op##_lock_irq(locktype##_t *lock) \
222 _##op##_lock_irqsave(lock); \
225 EXPORT_SYMBOL(_##op##_lock_irq); \
227 void __lockfunc _##op##_lock_bh(locktype##_t *lock) \
229 unsigned long flags; \
232 /* Careful: we must exclude softirqs too, hence the */ \
233 /* irq-disabling. We use the generic preemption-aware */ \
236 flags = _##op##_lock_irqsave(lock); \
237 local_bh_disable(); \
238 local_irq_restore(flags); \
241 EXPORT_SYMBOL(_##op##_lock_bh)
244 * Build preemption-friendly versions of the following
245 * lock-spinning functions:
247 * _[spin|read|write]_lock()
248 * _[spin|read|write]_lock_irq()
249 * _[spin|read|write]_lock_irqsave()
250 * _[spin|read|write]_lock_bh()
252 BUILD_LOCK_OPS(spin
, spinlock
);
253 BUILD_LOCK_OPS(read
, rwlock
);
254 BUILD_LOCK_OPS(write
, rwlock
);
256 #endif /* CONFIG_PREEMPT */
258 void __lockfunc
_spin_unlock(spinlock_t
*lock
)
260 _raw_spin_unlock(lock
);
263 EXPORT_SYMBOL(_spin_unlock
);
265 void __lockfunc
_write_unlock(rwlock_t
*lock
)
267 _raw_write_unlock(lock
);
270 EXPORT_SYMBOL(_write_unlock
);
272 void __lockfunc
_read_unlock(rwlock_t
*lock
)
274 _raw_read_unlock(lock
);
277 EXPORT_SYMBOL(_read_unlock
);
279 void __lockfunc
_spin_unlock_irqrestore(spinlock_t
*lock
, unsigned long flags
)
281 _raw_spin_unlock(lock
);
282 local_irq_restore(flags
);
285 EXPORT_SYMBOL(_spin_unlock_irqrestore
);
287 void __lockfunc
_spin_unlock_irq(spinlock_t
*lock
)
289 _raw_spin_unlock(lock
);
293 EXPORT_SYMBOL(_spin_unlock_irq
);
295 void __lockfunc
_spin_unlock_bh(spinlock_t
*lock
)
297 _raw_spin_unlock(lock
);
298 preempt_enable_no_resched();
301 EXPORT_SYMBOL(_spin_unlock_bh
);
303 void __lockfunc
_read_unlock_irqrestore(rwlock_t
*lock
, unsigned long flags
)
305 _raw_read_unlock(lock
);
306 local_irq_restore(flags
);
309 EXPORT_SYMBOL(_read_unlock_irqrestore
);
311 void __lockfunc
_read_unlock_irq(rwlock_t
*lock
)
313 _raw_read_unlock(lock
);
317 EXPORT_SYMBOL(_read_unlock_irq
);
319 void __lockfunc
_read_unlock_bh(rwlock_t
*lock
)
321 _raw_read_unlock(lock
);
322 preempt_enable_no_resched();
325 EXPORT_SYMBOL(_read_unlock_bh
);
327 void __lockfunc
_write_unlock_irqrestore(rwlock_t
*lock
, unsigned long flags
)
329 _raw_write_unlock(lock
);
330 local_irq_restore(flags
);
333 EXPORT_SYMBOL(_write_unlock_irqrestore
);
335 void __lockfunc
_write_unlock_irq(rwlock_t
*lock
)
337 _raw_write_unlock(lock
);
341 EXPORT_SYMBOL(_write_unlock_irq
);
343 void __lockfunc
_write_unlock_bh(rwlock_t
*lock
)
345 _raw_write_unlock(lock
);
346 preempt_enable_no_resched();
349 EXPORT_SYMBOL(_write_unlock_bh
);
351 int __lockfunc
_spin_trylock_bh(spinlock_t
*lock
)
355 if (_raw_spin_trylock(lock
))
358 preempt_enable_no_resched();
362 EXPORT_SYMBOL(_spin_trylock_bh
);
364 int in_lock_functions(unsigned long addr
)
366 /* Linker adds these: start and end of __lockfunc functions */
367 extern char __lock_text_start
[], __lock_text_end
[];
369 return addr
>= (unsigned long)__lock_text_start
370 && addr
< (unsigned long)__lock_text_end
;
372 EXPORT_SYMBOL(in_lock_functions
);