3 //! The Windows implementation of mutexes is a little odd and it may not be
4 //! immediately obvious what's going on. The primary oddness is that SRWLock is
5 //! used instead of CriticalSection, and this is done because:
7 //! 1. SRWLock is several times faster than CriticalSection according to
8 //! benchmarks performed on both Windows 8 and Windows 7.
10 //! 2. CriticalSection allows recursive locking while SRWLock deadlocks. The
11 //! Unix implementation deadlocks so consistency is preferred. See #19962 for
14 //! 3. While CriticalSection is fair and SRWLock is not, the current Rust policy
15 //! is that there are no guarantees of fairness.
17 //! The downside of this approach, however, is that SRWLock is not available on
18 //! Windows XP, so we continue to have a fallback implementation where
19 //! CriticalSection is used and we keep track of who's holding the mutex to
20 //! detect recursive locks.
22 use crate::cell
::UnsafeCell
;
23 use crate::mem
::{self, MaybeUninit}
;
24 use crate::sync
::atomic
::{AtomicUsize, Ordering}
;
26 use crate::sys
::compat
;
30 held
: UnsafeCell
<bool
>,
33 unsafe impl Send
for Mutex {}
34 unsafe impl Sync
for Mutex {}
36 #[derive(Clone, Copy)]
43 pub unsafe fn raw(m
: &Mutex
) -> c
::PSRWLOCK
{
44 debug_assert
!(mem
::size_of
::<c
::SRWLOCK
>() <= mem
::size_of_val(&m
.lock
));
45 &m
.lock
as *const _
as *mut _
49 pub const fn new() -> Mutex
{
51 // This works because SRWLOCK_INIT is 0 (wrapped in a struct), so we are also properly
52 // initializing an SRWLOCK here.
53 lock
: AtomicUsize
::new(0),
54 held
: UnsafeCell
::new(false),
58 pub unsafe fn init(&mut self) {}
59 pub unsafe fn lock(&self) {
61 Kind
::SRWLock
=> c
::AcquireSRWLockExclusive(raw(self)),
62 Kind
::CriticalSection
=> {
63 let re
= self.remutex();
65 if !self.flag_locked() {
67 panic
!("cannot recursively lock a mutex");
72 pub unsafe fn try_lock(&self) -> bool
{
74 Kind
::SRWLock
=> c
::TryAcquireSRWLockExclusive(raw(self)) != 0,
75 Kind
::CriticalSection
=> {
76 let re
= self.remutex();
77 if !(*re
).try_lock() {
79 } else if self.flag_locked() {
88 pub unsafe fn unlock(&self) {
89 *self.held
.get() = false;
91 Kind
::SRWLock
=> c
::ReleaseSRWLockExclusive(raw(self)),
92 Kind
::CriticalSection
=> (*self.remutex()).unlock(),
95 pub unsafe fn destroy(&self) {
98 Kind
::CriticalSection
=> match self.lock
.load(Ordering
::SeqCst
) {
101 Box
::from_raw(n
as *mut ReentrantMutex
).destroy();
107 unsafe fn remutex(&self) -> *mut ReentrantMutex
{
108 match self.lock
.load(Ordering
::SeqCst
) {
110 n
=> return n
as *mut _
,
112 let re
= box ReentrantMutex
::uninitialized();
114 let re
= Box
::into_raw(re
);
115 match self.lock
.compare_and_swap(0, re
as usize, Ordering
::SeqCst
) {
118 Box
::from_raw(re
).destroy();
124 unsafe fn flag_locked(&self) -> bool
{
125 if *self.held
.get() {
128 *self.held
.get() = true;
135 static KIND
: AtomicUsize
= AtomicUsize
::new(0);
137 let val
= KIND
.load(Ordering
::SeqCst
);
138 if val
== Kind
::SRWLock
as usize {
139 return Kind
::SRWLock
;
140 } else if val
== Kind
::CriticalSection
as usize {
141 return Kind
::CriticalSection
;
144 let ret
= match compat
::lookup("kernel32", "AcquireSRWLockExclusive") {
145 None
=> Kind
::CriticalSection
,
146 Some(..) => Kind
::SRWLock
,
148 KIND
.store(ret
as usize, Ordering
::SeqCst
);
152 pub struct ReentrantMutex
{
153 inner
: UnsafeCell
<MaybeUninit
<c
::CRITICAL_SECTION
>>,
156 unsafe impl Send
for ReentrantMutex {}
157 unsafe impl Sync
for ReentrantMutex {}
159 impl ReentrantMutex
{
160 pub const fn uninitialized() -> ReentrantMutex
{
161 ReentrantMutex { inner: UnsafeCell::new(MaybeUninit::uninit()) }
164 pub unsafe fn init(&self) {
165 c
::InitializeCriticalSection((&mut *self.inner
.get()).as_mut_ptr());
168 pub unsafe fn lock(&self) {
169 c
::EnterCriticalSection((&mut *self.inner
.get()).as_mut_ptr());
173 pub unsafe fn try_lock(&self) -> bool
{
174 c
::TryEnterCriticalSection((&mut *self.inner
.get()).as_mut_ptr()) != 0
177 pub unsafe fn unlock(&self) {
178 c
::LeaveCriticalSection((&mut *self.inner
.get()).as_mut_ptr());
181 pub unsafe fn destroy(&self) {
182 c
::DeleteCriticalSection((&mut *self.inner
.get()).as_mut_ptr());