]>
Commit | Line | Data |
---|---|---|
1a4d82fc JJ |
1 | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT |
2 | // file at the top-level directory of this distribution and at | |
3 | // http://rust-lang.org/COPYRIGHT. | |
4 | // | |
5 | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or | |
6 | // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license | |
7 | // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your | |
8 | // option. This file may not be copied, modified, or distributed | |
9 | // except according to those terms. | |
10 | ||
11 | use prelude::v1::*; | |
12 | ||
13 | use cell::UnsafeCell; | |
9346a6ac | 14 | use fmt; |
1a4d82fc | 15 | use marker; |
b039eaaf | 16 | use mem; |
1a4d82fc | 17 | use ops::{Deref, DerefMut}; |
b039eaaf | 18 | use ptr; |
9346a6ac | 19 | use sys_common::poison::{self, LockResult, TryLockError, TryLockResult}; |
1a4d82fc JJ |
20 | use sys_common::rwlock as sys; |
21 | ||
22 | /// A reader-writer lock | |
23 | /// | |
24 | /// This type of lock allows a number of readers or at most one writer at any | |
25 | /// point in time. The write portion of this lock typically allows modification | |
26 | /// of the underlying data (exclusive access) and the read portion of this lock | |
27 | /// typically allows for read-only access (shared access). | |
28 | /// | |
62682a34 SL |
29 | /// The priority policy of the lock is dependent on the underlying operating |
30 | /// system's implementation, and this type does not guarantee that any | |
31 | /// particular policy will be used. | |
32 | /// | |
1a4d82fc | 33 | /// The type parameter `T` represents the data that this lock protects. It is |
bd371182 | 34 | /// required that `T` satisfies `Send` to be shared across threads and `Sync` to |
1a4d82fc JJ |
35 | /// allow concurrent access through readers. The RAII guards returned from the |
36 | /// locking methods implement `Deref` (and `DerefMut` for the `write` methods) | |
37 | /// to allow access to the contained of the lock. | |
38 | /// | |
39 | /// # Poisoning | |
40 | /// | |
41 | /// RwLocks, like Mutexes, will become poisoned on panics. Note, however, that | |
42 | /// an RwLock may only be poisoned if a panic occurs while it is locked | |
43 | /// exclusively (write mode). If a panic occurs in any reader, then the lock | |
44 | /// will not be poisoned. | |
45 | /// | |
46 | /// # Examples | |
47 | /// | |
48 | /// ``` | |
49 | /// use std::sync::RwLock; | |
50 | /// | |
85aaf69f | 51 | /// let lock = RwLock::new(5); |
1a4d82fc JJ |
52 | /// |
53 | /// // many reader locks can be held at once | |
54 | /// { | |
55 | /// let r1 = lock.read().unwrap(); | |
56 | /// let r2 = lock.read().unwrap(); | |
57 | /// assert_eq!(*r1, 5); | |
58 | /// assert_eq!(*r2, 5); | |
59 | /// } // read locks are dropped at this point | |
60 | /// | |
61 | /// // only one write lock may be held, however | |
62 | /// { | |
63 | /// let mut w = lock.write().unwrap(); | |
64 | /// *w += 1; | |
65 | /// assert_eq!(*w, 6); | |
66 | /// } // write lock is dropped here | |
67 | /// ``` | |
85aaf69f | 68 | #[stable(feature = "rust1", since = "1.0.0")] |
d9579d0f | 69 | pub struct RwLock<T: ?Sized> { |
1a4d82fc JJ |
70 | inner: Box<StaticRwLock>, |
71 | data: UnsafeCell<T>, | |
72 | } | |
73 | ||
92a42be0 | 74 | #[stable(feature = "rust1", since = "1.0.0")] |
d9579d0f | 75 | unsafe impl<T: ?Sized + Send + Sync> Send for RwLock<T> {} |
92a42be0 | 76 | #[stable(feature = "rust1", since = "1.0.0")] |
d9579d0f | 77 | unsafe impl<T: ?Sized + Send + Sync> Sync for RwLock<T> {} |
1a4d82fc JJ |
78 | |
79 | /// Structure representing a statically allocated RwLock. | |
80 | /// | |
81 | /// This structure is intended to be used inside of a `static` and will provide | |
82 | /// automatic global access as well as lazy initialization. The internal | |
83 | /// resources of this RwLock, however, must be manually deallocated. | |
84 | /// | |
c34b1796 | 85 | /// # Examples |
1a4d82fc JJ |
86 | /// |
87 | /// ``` | |
c1a9b12d SL |
88 | /// #![feature(static_rwlock)] |
89 | /// | |
1a4d82fc JJ |
90 | /// use std::sync::{StaticRwLock, RW_LOCK_INIT}; |
91 | /// | |
92 | /// static LOCK: StaticRwLock = RW_LOCK_INIT; | |
93 | /// | |
94 | /// { | |
95 | /// let _g = LOCK.read().unwrap(); | |
96 | /// // ... shared read access | |
97 | /// } | |
98 | /// { | |
99 | /// let _g = LOCK.write().unwrap(); | |
100 | /// // ... exclusive write access | |
101 | /// } | |
102 | /// unsafe { LOCK.destroy() } // free all resources | |
103 | /// ``` | |
62682a34 | 104 | #[unstable(feature = "static_rwlock", |
e9174d1e SL |
105 | reason = "may be merged with RwLock in the future", |
106 | issue = "27717")] | |
1a4d82fc JJ |
107 | pub struct StaticRwLock { |
108 | lock: sys::RWLock, | |
109 | poison: poison::Flag, | |
110 | } | |
111 | ||
1a4d82fc | 112 | /// Constant initialization for a statically-initialized rwlock. |
62682a34 | 113 | #[unstable(feature = "static_rwlock", |
e9174d1e SL |
114 | reason = "may be merged with RwLock in the future", |
115 | issue = "27717")] | |
62682a34 | 116 | pub const RW_LOCK_INIT: StaticRwLock = StaticRwLock::new(); |
1a4d82fc JJ |
117 | |
118 | /// RAII structure used to release the shared read access of a lock when | |
119 | /// dropped. | |
120 | #[must_use] | |
85aaf69f | 121 | #[stable(feature = "rust1", since = "1.0.0")] |
d9579d0f | 122 | pub struct RwLockReadGuard<'a, T: ?Sized + 'a> { |
1a4d82fc JJ |
123 | __lock: &'a StaticRwLock, |
124 | __data: &'a UnsafeCell<T>, | |
1a4d82fc JJ |
125 | } |
126 | ||
92a42be0 | 127 | #[stable(feature = "rust1", since = "1.0.0")] |
d9579d0f | 128 | impl<'a, T: ?Sized> !marker::Send for RwLockReadGuard<'a, T> {} |
85aaf69f | 129 | |
1a4d82fc JJ |
130 | /// RAII structure used to release the exclusive write access of a lock when |
131 | /// dropped. | |
132 | #[must_use] | |
85aaf69f | 133 | #[stable(feature = "rust1", since = "1.0.0")] |
d9579d0f | 134 | pub struct RwLockWriteGuard<'a, T: ?Sized + 'a> { |
1a4d82fc JJ |
135 | __lock: &'a StaticRwLock, |
136 | __data: &'a UnsafeCell<T>, | |
137 | __poison: poison::Guard, | |
1a4d82fc JJ |
138 | } |
139 | ||
92a42be0 | 140 | #[stable(feature = "rust1", since = "1.0.0")] |
d9579d0f | 141 | impl<'a, T: ?Sized> !marker::Send for RwLockWriteGuard<'a, T> {} |
85aaf69f | 142 | |
c34b1796 | 143 | impl<T> RwLock<T> { |
85aaf69f SL |
144 | /// Creates a new instance of an `RwLock<T>` which is unlocked. |
145 | /// | |
146 | /// # Examples | |
147 | /// | |
148 | /// ``` | |
149 | /// use std::sync::RwLock; | |
150 | /// | |
151 | /// let lock = RwLock::new(5); | |
152 | /// ``` | |
153 | #[stable(feature = "rust1", since = "1.0.0")] | |
1a4d82fc | 154 | pub fn new(t: T) -> RwLock<T> { |
62682a34 | 155 | RwLock { inner: box StaticRwLock::new(), data: UnsafeCell::new(t) } |
1a4d82fc | 156 | } |
d9579d0f | 157 | } |
1a4d82fc | 158 | |
d9579d0f | 159 | impl<T: ?Sized> RwLock<T> { |
1a4d82fc JJ |
160 | /// Locks this rwlock with shared read access, blocking the current thread |
161 | /// until it can be acquired. | |
162 | /// | |
163 | /// The calling thread will be blocked until there are no more writers which | |
164 | /// hold the lock. There may be other readers currently inside the lock when | |
165 | /// this method returns. This method does not provide any guarantees with | |
166 | /// respect to the ordering of whether contentious readers or writers will | |
167 | /// acquire the lock first. | |
168 | /// | |
169 | /// Returns an RAII guard which will release this thread's shared access | |
170 | /// once it is dropped. | |
171 | /// | |
172 | /// # Failure | |
173 | /// | |
174 | /// This function will return an error if the RwLock is poisoned. An RwLock | |
175 | /// is poisoned whenever a writer panics while holding an exclusive lock. | |
176 | /// The failure will occur immediately after the lock has been acquired. | |
177 | #[inline] | |
85aaf69f | 178 | #[stable(feature = "rust1", since = "1.0.0")] |
1a4d82fc JJ |
179 | pub fn read(&self) -> LockResult<RwLockReadGuard<T>> { |
180 | unsafe { self.inner.lock.read() } | |
181 | RwLockReadGuard::new(&*self.inner, &self.data) | |
182 | } | |
183 | ||
d9579d0f AL |
184 | /// Attempts to acquire this rwlock with shared read access. |
185 | /// | |
186 | /// If the access could not be granted at this time, then `Err` is returned. | |
187 | /// Otherwise, an RAII guard is returned which will release the shared access | |
188 | /// when it is dropped. | |
1a4d82fc | 189 | /// |
d9579d0f AL |
190 | /// This function does not block. |
191 | /// | |
192 | /// This function does not provide any guarantees with respect to the ordering | |
193 | /// of whether contentious readers or writers will acquire the lock first. | |
1a4d82fc JJ |
194 | /// |
195 | /// # Failure | |
196 | /// | |
197 | /// This function will return an error if the RwLock is poisoned. An RwLock | |
198 | /// is poisoned whenever a writer panics while holding an exclusive lock. An | |
199 | /// error will only be returned if the lock would have otherwise been | |
200 | /// acquired. | |
201 | #[inline] | |
85aaf69f | 202 | #[stable(feature = "rust1", since = "1.0.0")] |
1a4d82fc JJ |
203 | pub fn try_read(&self) -> TryLockResult<RwLockReadGuard<T>> { |
204 | if unsafe { self.inner.lock.try_read() } { | |
205 | Ok(try!(RwLockReadGuard::new(&*self.inner, &self.data))) | |
206 | } else { | |
207 | Err(TryLockError::WouldBlock) | |
208 | } | |
209 | } | |
210 | ||
9346a6ac | 211 | /// Locks this rwlock with exclusive write access, blocking the current |
1a4d82fc JJ |
212 | /// thread until it can be acquired. |
213 | /// | |
214 | /// This function will not return while other writers or other readers | |
215 | /// currently have access to the lock. | |
216 | /// | |
217 | /// Returns an RAII guard which will drop the write access of this rwlock | |
218 | /// when dropped. | |
219 | /// | |
220 | /// # Failure | |
221 | /// | |
222 | /// This function will return an error if the RwLock is poisoned. An RwLock | |
223 | /// is poisoned whenever a writer panics while holding an exclusive lock. | |
224 | /// An error will be returned when the lock is acquired. | |
225 | #[inline] | |
85aaf69f | 226 | #[stable(feature = "rust1", since = "1.0.0")] |
1a4d82fc JJ |
227 | pub fn write(&self) -> LockResult<RwLockWriteGuard<T>> { |
228 | unsafe { self.inner.lock.write() } | |
229 | RwLockWriteGuard::new(&*self.inner, &self.data) | |
230 | } | |
231 | ||
9346a6ac | 232 | /// Attempts to lock this rwlock with exclusive write access. |
1a4d82fc | 233 | /// |
d9579d0f AL |
234 | /// If the lock could not be acquired at this time, then `Err` is returned. |
235 | /// Otherwise, an RAII guard is returned which will release the lock when | |
236 | /// it is dropped. | |
237 | /// | |
238 | /// This function does not block. | |
239 | /// | |
240 | /// This function does not provide any guarantees with respect to the ordering | |
241 | /// of whether contentious readers or writers will acquire the lock first. | |
1a4d82fc JJ |
242 | /// |
243 | /// # Failure | |
244 | /// | |
245 | /// This function will return an error if the RwLock is poisoned. An RwLock | |
246 | /// is poisoned whenever a writer panics while holding an exclusive lock. An | |
247 | /// error will only be returned if the lock would have otherwise been | |
248 | /// acquired. | |
249 | #[inline] | |
85aaf69f | 250 | #[stable(feature = "rust1", since = "1.0.0")] |
1a4d82fc | 251 | pub fn try_write(&self) -> TryLockResult<RwLockWriteGuard<T>> { |
bd371182 | 252 | if unsafe { self.inner.lock.try_write() } { |
1a4d82fc JJ |
253 | Ok(try!(RwLockWriteGuard::new(&*self.inner, &self.data))) |
254 | } else { | |
255 | Err(TryLockError::WouldBlock) | |
256 | } | |
257 | } | |
85aaf69f | 258 | |
9346a6ac | 259 | /// Determines whether the lock is poisoned. |
85aaf69f SL |
260 | /// |
261 | /// If another thread is active, the lock can still become poisoned at any | |
262 | /// time. You should not trust a `false` value for program correctness | |
263 | /// without additional synchronization. | |
264 | #[inline] | |
62682a34 | 265 | #[stable(feature = "sync_poison", since = "1.2.0")] |
85aaf69f SL |
266 | pub fn is_poisoned(&self) -> bool { |
267 | self.inner.poison.get() | |
268 | } | |
b039eaaf SL |
269 | |
270 | /// Consumes this `RwLock`, returning the underlying data. | |
271 | /// | |
272 | /// # Failure | |
273 | /// | |
274 | /// This function will return an error if the RwLock is poisoned. An RwLock | |
275 | /// is poisoned whenever a writer panics while holding an exclusive lock. An | |
276 | /// error will only be returned if the lock would have otherwise been | |
277 | /// acquired. | |
92a42be0 | 278 | #[stable(feature = "rwlock_into_inner", since = "1.6.0")] |
b039eaaf SL |
279 | pub fn into_inner(self) -> LockResult<T> where T: Sized { |
280 | // We know statically that there are no outstanding references to | |
281 | // `self` so there's no need to lock the inner StaticRwLock. | |
282 | // | |
283 | // To get the inner value, we'd like to call `data.into_inner()`, | |
284 | // but because `RwLock` impl-s `Drop`, we can't move out of it, so | |
285 | // we'll have to destructure it manually instead. | |
286 | unsafe { | |
287 | // Like `let RwLock { inner, data } = self`. | |
288 | let (inner, data) = { | |
289 | let RwLock { ref inner, ref data } = self; | |
290 | (ptr::read(inner), ptr::read(data)) | |
291 | }; | |
292 | mem::forget(self); | |
293 | inner.lock.destroy(); // Keep in sync with the `Drop` impl. | |
294 | ||
295 | poison::map_result(inner.poison.borrow(), |_| data.into_inner()) | |
296 | } | |
297 | } | |
298 | ||
299 | /// Returns a mutable reference to the underlying data. | |
300 | /// | |
301 | /// Since this call borrows the `RwLock` mutably, no actual locking needs to | |
302 | /// take place---the mutable borrow statically guarantees no locks exist. | |
303 | /// | |
304 | /// # Failure | |
305 | /// | |
306 | /// This function will return an error if the RwLock is poisoned. An RwLock | |
307 | /// is poisoned whenever a writer panics while holding an exclusive lock. An | |
308 | /// error will only be returned if the lock would have otherwise been | |
309 | /// acquired. | |
92a42be0 | 310 | #[stable(feature = "rwlock_get_mut", since = "1.6.0")] |
b039eaaf SL |
311 | pub fn get_mut(&mut self) -> LockResult<&mut T> { |
312 | // We know statically that there are no other references to `self`, so | |
313 | // there's no need to lock the inner StaticRwLock. | |
314 | let data = unsafe { &mut *self.data.get() }; | |
315 | poison::map_result(self.inner.poison.borrow(), |_| data ) | |
316 | } | |
1a4d82fc JJ |
317 | } |
318 | ||
85aaf69f | 319 | #[stable(feature = "rust1", since = "1.0.0")] |
d9579d0f | 320 | impl<T: ?Sized> Drop for RwLock<T> { |
b039eaaf | 321 | #[unsafe_destructor_blind_to_params] |
1a4d82fc | 322 | fn drop(&mut self) { |
b039eaaf | 323 | // IMPORTANT: This code needs to be kept in sync with `RwLock::into_inner`. |
1a4d82fc JJ |
324 | unsafe { self.inner.lock.destroy() } |
325 | } | |
326 | } | |
327 | ||
c34b1796 | 328 | #[stable(feature = "rust1", since = "1.0.0")] |
d9579d0f | 329 | impl<T: ?Sized + fmt::Debug> fmt::Debug for RwLock<T> { |
c34b1796 AL |
330 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
331 | match self.try_read() { | |
d9579d0f | 332 | Ok(guard) => write!(f, "RwLock {{ data: {:?} }}", &*guard), |
c34b1796 | 333 | Err(TryLockError::Poisoned(err)) => { |
d9579d0f | 334 | write!(f, "RwLock {{ data: Poisoned({:?}) }}", &**err.get_ref()) |
c34b1796 AL |
335 | }, |
336 | Err(TryLockError::WouldBlock) => write!(f, "RwLock {{ <locked> }}") | |
337 | } | |
338 | } | |
339 | } | |
340 | ||
1a4d82fc JJ |
341 | struct Dummy(UnsafeCell<()>); |
342 | unsafe impl Sync for Dummy {} | |
62682a34 | 343 | static DUMMY: Dummy = Dummy(UnsafeCell::new(())); |
1a4d82fc | 344 | |
62682a34 | 345 | #[unstable(feature = "static_rwlock", |
e9174d1e SL |
346 | reason = "may be merged with RwLock in the future", |
347 | issue = "27717")] | |
1a4d82fc | 348 | impl StaticRwLock { |
62682a34 SL |
349 | /// Creates a new rwlock. |
350 | pub const fn new() -> StaticRwLock { | |
351 | StaticRwLock { | |
352 | lock: sys::RWLock::new(), | |
353 | poison: poison::Flag::new(), | |
354 | } | |
355 | } | |
356 | ||
1a4d82fc JJ |
357 | /// Locks this rwlock with shared read access, blocking the current thread |
358 | /// until it can be acquired. | |
359 | /// | |
360 | /// See `RwLock::read`. | |
361 | #[inline] | |
1a4d82fc JJ |
362 | pub fn read(&'static self) -> LockResult<RwLockReadGuard<'static, ()>> { |
363 | unsafe { self.lock.read() } | |
364 | RwLockReadGuard::new(self, &DUMMY.0) | |
365 | } | |
366 | ||
9346a6ac | 367 | /// Attempts to acquire this lock with shared read access. |
1a4d82fc JJ |
368 | /// |
369 | /// See `RwLock::try_read`. | |
370 | #[inline] | |
1a4d82fc JJ |
371 | pub fn try_read(&'static self) |
372 | -> TryLockResult<RwLockReadGuard<'static, ()>> { | |
373 | if unsafe { self.lock.try_read() } { | |
374 | Ok(try!(RwLockReadGuard::new(self, &DUMMY.0))) | |
375 | } else { | |
376 | Err(TryLockError::WouldBlock) | |
377 | } | |
378 | } | |
379 | ||
9346a6ac | 380 | /// Locks this rwlock with exclusive write access, blocking the current |
1a4d82fc JJ |
381 | /// thread until it can be acquired. |
382 | /// | |
383 | /// See `RwLock::write`. | |
384 | #[inline] | |
1a4d82fc JJ |
385 | pub fn write(&'static self) -> LockResult<RwLockWriteGuard<'static, ()>> { |
386 | unsafe { self.lock.write() } | |
387 | RwLockWriteGuard::new(self, &DUMMY.0) | |
388 | } | |
389 | ||
9346a6ac | 390 | /// Attempts to lock this rwlock with exclusive write access. |
1a4d82fc JJ |
391 | /// |
392 | /// See `RwLock::try_write`. | |
393 | #[inline] | |
1a4d82fc JJ |
394 | pub fn try_write(&'static self) |
395 | -> TryLockResult<RwLockWriteGuard<'static, ()>> { | |
396 | if unsafe { self.lock.try_write() } { | |
397 | Ok(try!(RwLockWriteGuard::new(self, &DUMMY.0))) | |
398 | } else { | |
399 | Err(TryLockError::WouldBlock) | |
400 | } | |
401 | } | |
402 | ||
9346a6ac | 403 | /// Deallocates all resources associated with this static lock. |
1a4d82fc JJ |
404 | /// |
405 | /// This method is unsafe to call as there is no guarantee that there are no | |
406 | /// active users of the lock, and this also doesn't prevent any future users | |
407 | /// of this lock. This method is required to be called to not leak memory on | |
408 | /// all platforms. | |
1a4d82fc JJ |
409 | pub unsafe fn destroy(&'static self) { |
410 | self.lock.destroy() | |
411 | } | |
412 | } | |
413 | ||
d9579d0f | 414 | impl<'rwlock, T: ?Sized> RwLockReadGuard<'rwlock, T> { |
1a4d82fc JJ |
415 | fn new(lock: &'rwlock StaticRwLock, data: &'rwlock UnsafeCell<T>) |
416 | -> LockResult<RwLockReadGuard<'rwlock, T>> { | |
417 | poison::map_result(lock.poison.borrow(), |_| { | |
418 | RwLockReadGuard { | |
419 | __lock: lock, | |
420 | __data: data, | |
1a4d82fc JJ |
421 | } |
422 | }) | |
423 | } | |
424 | } | |
85aaf69f | 425 | |
d9579d0f | 426 | impl<'rwlock, T: ?Sized> RwLockWriteGuard<'rwlock, T> { |
1a4d82fc JJ |
427 | fn new(lock: &'rwlock StaticRwLock, data: &'rwlock UnsafeCell<T>) |
428 | -> LockResult<RwLockWriteGuard<'rwlock, T>> { | |
429 | poison::map_result(lock.poison.borrow(), |guard| { | |
430 | RwLockWriteGuard { | |
431 | __lock: lock, | |
432 | __data: data, | |
433 | __poison: guard, | |
1a4d82fc JJ |
434 | } |
435 | }) | |
436 | } | |
437 | } | |
438 | ||
85aaf69f | 439 | #[stable(feature = "rust1", since = "1.0.0")] |
d9579d0f | 440 | impl<'rwlock, T: ?Sized> Deref for RwLockReadGuard<'rwlock, T> { |
1a4d82fc JJ |
441 | type Target = T; |
442 | ||
443 | fn deref(&self) -> &T { unsafe { &*self.__data.get() } } | |
444 | } | |
d9579d0f | 445 | |
85aaf69f | 446 | #[stable(feature = "rust1", since = "1.0.0")] |
d9579d0f | 447 | impl<'rwlock, T: ?Sized> Deref for RwLockWriteGuard<'rwlock, T> { |
1a4d82fc JJ |
448 | type Target = T; |
449 | ||
450 | fn deref(&self) -> &T { unsafe { &*self.__data.get() } } | |
451 | } | |
d9579d0f | 452 | |
85aaf69f | 453 | #[stable(feature = "rust1", since = "1.0.0")] |
d9579d0f | 454 | impl<'rwlock, T: ?Sized> DerefMut for RwLockWriteGuard<'rwlock, T> { |
1a4d82fc JJ |
455 | fn deref_mut(&mut self) -> &mut T { |
456 | unsafe { &mut *self.__data.get() } | |
457 | } | |
458 | } | |
459 | ||
85aaf69f | 460 | #[stable(feature = "rust1", since = "1.0.0")] |
d9579d0f | 461 | impl<'a, T: ?Sized> Drop for RwLockReadGuard<'a, T> { |
1a4d82fc JJ |
462 | fn drop(&mut self) { |
463 | unsafe { self.__lock.lock.read_unlock(); } | |
464 | } | |
465 | } | |
466 | ||
85aaf69f | 467 | #[stable(feature = "rust1", since = "1.0.0")] |
d9579d0f | 468 | impl<'a, T: ?Sized> Drop for RwLockWriteGuard<'a, T> { |
1a4d82fc JJ |
469 | fn drop(&mut self) { |
470 | self.__lock.poison.done(&self.__poison); | |
471 | unsafe { self.__lock.lock.write_unlock(); } | |
472 | } | |
473 | } | |
474 | ||
475 | #[cfg(test)] | |
476 | mod tests { | |
85aaf69f SL |
477 | #![allow(deprecated)] // rand |
478 | ||
1a4d82fc JJ |
479 | use prelude::v1::*; |
480 | ||
481 | use rand::{self, Rng}; | |
482 | use sync::mpsc::channel; | |
85aaf69f | 483 | use thread; |
62682a34 | 484 | use sync::{Arc, RwLock, StaticRwLock, TryLockError}; |
b039eaaf SL |
485 | use sync::atomic::{AtomicUsize, Ordering}; |
486 | ||
487 | #[derive(Eq, PartialEq, Debug)] | |
488 | struct NonCopy(i32); | |
1a4d82fc JJ |
489 | |
490 | #[test] | |
491 | fn smoke() { | |
492 | let l = RwLock::new(()); | |
493 | drop(l.read().unwrap()); | |
494 | drop(l.write().unwrap()); | |
495 | drop((l.read().unwrap(), l.read().unwrap())); | |
496 | drop(l.write().unwrap()); | |
497 | } | |
498 | ||
499 | #[test] | |
500 | fn static_smoke() { | |
62682a34 | 501 | static R: StaticRwLock = StaticRwLock::new(); |
1a4d82fc JJ |
502 | drop(R.read().unwrap()); |
503 | drop(R.write().unwrap()); | |
504 | drop((R.read().unwrap(), R.read().unwrap())); | |
505 | drop(R.write().unwrap()); | |
506 | unsafe { R.destroy(); } | |
507 | } | |
508 | ||
509 | #[test] | |
510 | fn frob() { | |
62682a34 | 511 | static R: StaticRwLock = StaticRwLock::new(); |
c34b1796 AL |
512 | const N: usize = 10; |
513 | const M: usize = 1000; | |
1a4d82fc JJ |
514 | |
515 | let (tx, rx) = channel::<()>(); | |
85aaf69f | 516 | for _ in 0..N { |
1a4d82fc | 517 | let tx = tx.clone(); |
85aaf69f | 518 | thread::spawn(move|| { |
1a4d82fc | 519 | let mut rng = rand::thread_rng(); |
85aaf69f | 520 | for _ in 0..M { |
1a4d82fc JJ |
521 | if rng.gen_weighted_bool(N) { |
522 | drop(R.write().unwrap()); | |
523 | } else { | |
524 | drop(R.read().unwrap()); | |
525 | } | |
526 | } | |
527 | drop(tx); | |
528 | }); | |
529 | } | |
530 | drop(tx); | |
531 | let _ = rx.recv(); | |
532 | unsafe { R.destroy(); } | |
533 | } | |
534 | ||
535 | #[test] | |
536 | fn test_rw_arc_poison_wr() { | |
85aaf69f | 537 | let arc = Arc::new(RwLock::new(1)); |
1a4d82fc | 538 | let arc2 = arc.clone(); |
85aaf69f | 539 | let _: Result<(), _> = thread::spawn(move|| { |
1a4d82fc JJ |
540 | let _lock = arc2.write().unwrap(); |
541 | panic!(); | |
542 | }).join(); | |
543 | assert!(arc.read().is_err()); | |
544 | } | |
545 | ||
546 | #[test] | |
547 | fn test_rw_arc_poison_ww() { | |
85aaf69f SL |
548 | let arc = Arc::new(RwLock::new(1)); |
549 | assert!(!arc.is_poisoned()); | |
1a4d82fc | 550 | let arc2 = arc.clone(); |
85aaf69f | 551 | let _: Result<(), _> = thread::spawn(move|| { |
1a4d82fc JJ |
552 | let _lock = arc2.write().unwrap(); |
553 | panic!(); | |
554 | }).join(); | |
555 | assert!(arc.write().is_err()); | |
85aaf69f | 556 | assert!(arc.is_poisoned()); |
1a4d82fc JJ |
557 | } |
558 | ||
559 | #[test] | |
560 | fn test_rw_arc_no_poison_rr() { | |
85aaf69f | 561 | let arc = Arc::new(RwLock::new(1)); |
1a4d82fc | 562 | let arc2 = arc.clone(); |
85aaf69f | 563 | let _: Result<(), _> = thread::spawn(move|| { |
1a4d82fc JJ |
564 | let _lock = arc2.read().unwrap(); |
565 | panic!(); | |
566 | }).join(); | |
567 | let lock = arc.read().unwrap(); | |
568 | assert_eq!(*lock, 1); | |
569 | } | |
570 | #[test] | |
571 | fn test_rw_arc_no_poison_rw() { | |
85aaf69f | 572 | let arc = Arc::new(RwLock::new(1)); |
1a4d82fc | 573 | let arc2 = arc.clone(); |
85aaf69f | 574 | let _: Result<(), _> = thread::spawn(move|| { |
1a4d82fc JJ |
575 | let _lock = arc2.read().unwrap(); |
576 | panic!() | |
577 | }).join(); | |
578 | let lock = arc.write().unwrap(); | |
579 | assert_eq!(*lock, 1); | |
580 | } | |
581 | ||
582 | #[test] | |
583 | fn test_rw_arc() { | |
85aaf69f | 584 | let arc = Arc::new(RwLock::new(0)); |
1a4d82fc JJ |
585 | let arc2 = arc.clone(); |
586 | let (tx, rx) = channel(); | |
587 | ||
85aaf69f | 588 | thread::spawn(move|| { |
1a4d82fc | 589 | let mut lock = arc2.write().unwrap(); |
85aaf69f | 590 | for _ in 0..10 { |
1a4d82fc JJ |
591 | let tmp = *lock; |
592 | *lock = -1; | |
85aaf69f | 593 | thread::yield_now(); |
1a4d82fc JJ |
594 | *lock = tmp + 1; |
595 | } | |
596 | tx.send(()).unwrap(); | |
597 | }); | |
598 | ||
599 | // Readers try to catch the writer in the act | |
600 | let mut children = Vec::new(); | |
85aaf69f | 601 | for _ in 0..5 { |
1a4d82fc | 602 | let arc3 = arc.clone(); |
85aaf69f | 603 | children.push(thread::spawn(move|| { |
1a4d82fc JJ |
604 | let lock = arc3.read().unwrap(); |
605 | assert!(*lock >= 0); | |
606 | })); | |
607 | } | |
608 | ||
609 | // Wait for children to pass their asserts | |
85aaf69f | 610 | for r in children { |
1a4d82fc JJ |
611 | assert!(r.join().is_ok()); |
612 | } | |
613 | ||
614 | // Wait for writer to finish | |
615 | rx.recv().unwrap(); | |
616 | let lock = arc.read().unwrap(); | |
617 | assert_eq!(*lock, 10); | |
618 | } | |
619 | ||
620 | #[test] | |
621 | fn test_rw_arc_access_in_unwind() { | |
85aaf69f | 622 | let arc = Arc::new(RwLock::new(1)); |
1a4d82fc | 623 | let arc2 = arc.clone(); |
85aaf69f | 624 | let _ = thread::spawn(move|| -> () { |
1a4d82fc | 625 | struct Unwinder { |
c34b1796 | 626 | i: Arc<RwLock<isize>>, |
1a4d82fc JJ |
627 | } |
628 | impl Drop for Unwinder { | |
629 | fn drop(&mut self) { | |
630 | let mut lock = self.i.write().unwrap(); | |
631 | *lock += 1; | |
632 | } | |
633 | } | |
634 | let _u = Unwinder { i: arc2 }; | |
635 | panic!(); | |
636 | }).join(); | |
637 | let lock = arc.read().unwrap(); | |
638 | assert_eq!(*lock, 2); | |
639 | } | |
bd371182 | 640 | |
b039eaaf SL |
641 | #[test] |
642 | fn test_rwlock_unsized() { | |
643 | let rw: &RwLock<[i32]> = &RwLock::new([1, 2, 3]); | |
644 | { | |
645 | let b = &mut *rw.write().unwrap(); | |
646 | b[0] = 4; | |
647 | b[2] = 5; | |
648 | } | |
649 | let comp: &[i32] = &[4, 2, 5]; | |
650 | assert_eq!(&*rw.read().unwrap(), comp); | |
651 | } | |
d9579d0f | 652 | |
bd371182 AL |
653 | #[test] |
654 | fn test_rwlock_try_write() { | |
655 | use mem::drop; | |
656 | ||
657 | let lock = RwLock::new(0isize); | |
658 | let read_guard = lock.read().unwrap(); | |
659 | ||
660 | let write_result = lock.try_write(); | |
661 | match write_result { | |
662 | Err(TryLockError::WouldBlock) => (), | |
663 | Ok(_) => assert!(false, "try_write should not succeed while read_guard is in scope"), | |
664 | Err(_) => assert!(false, "unexpected error"), | |
665 | } | |
666 | ||
667 | drop(read_guard); | |
668 | } | |
b039eaaf SL |
669 | |
670 | #[test] | |
671 | fn test_into_inner() { | |
672 | let m = RwLock::new(NonCopy(10)); | |
673 | assert_eq!(m.into_inner().unwrap(), NonCopy(10)); | |
674 | } | |
675 | ||
676 | #[test] | |
677 | fn test_into_inner_drop() { | |
678 | struct Foo(Arc<AtomicUsize>); | |
679 | impl Drop for Foo { | |
680 | fn drop(&mut self) { | |
681 | self.0.fetch_add(1, Ordering::SeqCst); | |
682 | } | |
683 | } | |
684 | let num_drops = Arc::new(AtomicUsize::new(0)); | |
685 | let m = RwLock::new(Foo(num_drops.clone())); | |
686 | assert_eq!(num_drops.load(Ordering::SeqCst), 0); | |
687 | { | |
688 | let _inner = m.into_inner().unwrap(); | |
689 | assert_eq!(num_drops.load(Ordering::SeqCst), 0); | |
690 | } | |
691 | assert_eq!(num_drops.load(Ordering::SeqCst), 1); | |
692 | } | |
693 | ||
694 | #[test] | |
695 | fn test_into_inner_poison() { | |
696 | let m = Arc::new(RwLock::new(NonCopy(10))); | |
697 | let m2 = m.clone(); | |
698 | let _ = thread::spawn(move || { | |
699 | let _lock = m2.write().unwrap(); | |
700 | panic!("test panic in inner thread to poison RwLock"); | |
701 | }).join(); | |
702 | ||
703 | assert!(m.is_poisoned()); | |
704 | match Arc::try_unwrap(m).unwrap().into_inner() { | |
705 | Err(e) => assert_eq!(e.into_inner(), NonCopy(10)), | |
706 | Ok(x) => panic!("into_inner of poisoned RwLock is Ok: {:?}", x), | |
707 | } | |
708 | } | |
709 | ||
710 | #[test] | |
711 | fn test_get_mut() { | |
712 | let mut m = RwLock::new(NonCopy(10)); | |
713 | *m.get_mut().unwrap() = NonCopy(20); | |
714 | assert_eq!(m.into_inner().unwrap(), NonCopy(20)); | |
715 | } | |
716 | ||
717 | #[test] | |
718 | fn test_get_mut_poison() { | |
719 | let m = Arc::new(RwLock::new(NonCopy(10))); | |
720 | let m2 = m.clone(); | |
721 | let _ = thread::spawn(move || { | |
722 | let _lock = m2.write().unwrap(); | |
723 | panic!("test panic in inner thread to poison RwLock"); | |
724 | }).join(); | |
725 | ||
726 | assert!(m.is_poisoned()); | |
727 | match Arc::try_unwrap(m).unwrap().get_mut() { | |
728 | Err(e) => assert_eq!(*e.into_inner(), NonCopy(10)), | |
729 | Ok(x) => panic!("get_mut of poisoned RwLock is Ok: {:?}", x), | |
730 | } | |
731 | } | |
1a4d82fc | 732 | } |