3 //! The Windows implementation of mutexes is a little odd and it may not be
4 //! immediately obvious what's going on. The primary oddness is that SRWLock is
5 //! used instead of CriticalSection, and this is done because:
7 //! 1. SRWLock is several times faster than CriticalSection according to
8 //! benchmarks performed on both Windows 8 and Windows 7.
10 //! 2. CriticalSection allows recursive locking while SRWLock deadlocks. The
11 //! Unix implementation deadlocks so consistency is preferred. See #19962 for
14 //! 3. While CriticalSection is fair and SRWLock is not, the current Rust policy
15 //! is that there are no guarantees of fairness.
17 //! The downside of this approach, however, is that SRWLock is not available on
18 //! Windows XP, so we continue to have a fallback implementation where
19 //! CriticalSection is used and we keep track of who's holding the mutex to
20 //! detect recursive locks.
22 use crate::cell
::{Cell, UnsafeCell}
;
23 use crate::mem
::{self, MaybeUninit}
;
24 use crate::sync
::atomic
::{AtomicUsize, Ordering}
;
28 // This is either directly an SRWLOCK (if supported), or a Box<Inner> otherwise.
32 unsafe impl Send
for Mutex {}
33 unsafe impl Sync
for Mutex {}
36 remutex
: ReentrantMutex
,
40 #[derive(Clone, Copy)]
47 pub unsafe fn raw(m
: &Mutex
) -> c
::PSRWLOCK
{
48 debug_assert
!(mem
::size_of
::<c
::SRWLOCK
>() <= mem
::size_of_val(&m
.lock
));
49 &m
.lock
as *const _
as *mut _
53 pub const fn new() -> Mutex
{
55 // This works because SRWLOCK_INIT is 0 (wrapped in a struct), so we are also properly
56 // initializing an SRWLOCK here.
57 lock
: AtomicUsize
::new(0),
61 pub unsafe fn init(&mut self) {}
62 pub unsafe fn lock(&self) {
64 Kind
::SRWLock
=> c
::AcquireSRWLockExclusive(raw(self)),
65 Kind
::CriticalSection
=> {
66 let inner
= &*self.inner();
68 if inner
.held
.replace(true) {
69 // It was already locked, so we got a recursive lock which we do not want.
70 inner
.remutex
.unlock();
71 panic
!("cannot recursively lock a mutex");
76 pub unsafe fn try_lock(&self) -> bool
{
78 Kind
::SRWLock
=> c
::TryAcquireSRWLockExclusive(raw(self)) != 0,
79 Kind
::CriticalSection
=> {
80 let inner
= &*self.inner();
81 if !inner
.remutex
.try_lock() {
83 } else if inner
.held
.replace(true) {
84 // It was already locked, so we got a recursive lock which we do not want.
85 inner
.remutex
.unlock();
93 pub unsafe fn unlock(&self) {
95 Kind
::SRWLock
=> c
::ReleaseSRWLockExclusive(raw(self)),
96 Kind
::CriticalSection
=> {
97 let inner
= &*(self.lock
.load(Ordering
::SeqCst
) as *const Inner
);
98 inner
.held
.set(false);
99 inner
.remutex
.unlock();
103 pub unsafe fn destroy(&self) {
106 Kind
::CriticalSection
=> match self.lock
.load(Ordering
::SeqCst
) {
108 n
=> Box
::from_raw(n
as *mut Inner
).remutex
.destroy(),
113 unsafe fn inner(&self) -> *const Inner
{
114 match self.lock
.load(Ordering
::SeqCst
) {
116 n
=> return n
as *const _
,
118 let inner
= box Inner { remutex: ReentrantMutex::uninitialized(), held: Cell::new(false) }
;
119 inner
.remutex
.init();
120 let inner
= Box
::into_raw(inner
);
121 match self.lock
.compare_and_swap(0, inner
as usize, Ordering
::SeqCst
) {
124 Box
::from_raw(inner
).remutex
.destroy();
132 if c
::AcquireSRWLockExclusive
::is_available() { Kind::SRWLock }
else { Kind::CriticalSection }
135 pub struct ReentrantMutex
{
136 inner
: MaybeUninit
<UnsafeCell
<c
::CRITICAL_SECTION
>>,
139 unsafe impl Send
for ReentrantMutex {}
140 unsafe impl Sync
for ReentrantMutex {}
142 impl ReentrantMutex
{
143 pub const fn uninitialized() -> ReentrantMutex
{
144 ReentrantMutex { inner: MaybeUninit::uninit() }
147 pub unsafe fn init(&self) {
148 c
::InitializeCriticalSection(UnsafeCell
::raw_get(self.inner
.as_ptr()));
151 pub unsafe fn lock(&self) {
152 c
::EnterCriticalSection(UnsafeCell
::raw_get(self.inner
.as_ptr()));
156 pub unsafe fn try_lock(&self) -> bool
{
157 c
::TryEnterCriticalSection(UnsafeCell
::raw_get(self.inner
.as_ptr())) != 0
160 pub unsafe fn unlock(&self) {
161 c
::LeaveCriticalSection(UnsafeCell
::raw_get(self.inner
.as_ptr()));
164 pub unsafe fn destroy(&self) {
165 c
::DeleteCriticalSection(UnsafeCell
::raw_get(self.inner
.as_ptr()));