]> git.proxmox.com Git - rustc.git/blame - library/std/src/thread/local.rs
New upstream version 1.47.0+dfsg1
[rustc.git] / library / std / src / thread / local.rs
CommitLineData
1a4d82fc 1//! Thread local storage
c34b1796 2
dfeec247 3#![unstable(feature = "thread_local_internals", issue = "none")]
1a4d82fc 4
416331ca 5use crate::error::Error;
532ac7d7 6use crate::fmt;
1a4d82fc
JJ
7
8/// A thread local storage key which owns its contents.
9///
10/// This key uses the fastest possible implementation available to it for the
7cac9316
XL
11/// target platform. It is instantiated with the [`thread_local!`] macro and the
12/// primary method is the [`with`] method.
1a4d82fc 13///
7cac9316 14/// The [`with`] method yields a reference to the contained value which cannot be
bd371182 15/// sent across threads or escape the given closure.
1a4d82fc
JJ
16///
17/// # Initialization and Destruction
18///
7cac9316
XL
19/// Initialization is dynamically performed on the first call to [`with`]
20/// within a thread, and values that implement [`Drop`] get destructed when a
8bb4bdeb 21/// thread exits. Some caveats apply, which are explained below.
1a4d82fc 22///
ea8adc8c
XL
23/// A `LocalKey`'s initializer cannot recursively depend on itself, and using
24/// a `LocalKey` in this way will cause the initializer to infinitely recurse
25/// on the first call to `with`.
26///
c34b1796 27/// # Examples
1a4d82fc
JJ
28///
29/// ```
30/// use std::cell::RefCell;
85aaf69f 31/// use std::thread;
1a4d82fc 32///
c34b1796 33/// thread_local!(static FOO: RefCell<u32> = RefCell::new(1));
1a4d82fc
JJ
34///
35/// FOO.with(|f| {
36/// assert_eq!(*f.borrow(), 1);
37/// *f.borrow_mut() = 2;
38/// });
39///
40/// // each thread starts out with the initial value of 1
532ac7d7 41/// let t = thread::spawn(move|| {
1a4d82fc
JJ
42/// FOO.with(|f| {
43/// assert_eq!(*f.borrow(), 1);
44/// *f.borrow_mut() = 3;
45/// });
46/// });
47///
532ac7d7
XL
48/// // wait for the thread to complete and bail out on panic
49/// t.join().unwrap();
50///
1a4d82fc
JJ
51/// // we retain our original value of 2 despite the child thread
52/// FOO.with(|f| {
53/// assert_eq!(*f.borrow(), 2);
54/// });
55/// ```
7453a54e
SL
56///
57/// # Platform-specific behavior
58///
59/// Note that a "best effort" is made to ensure that destructors for types
a7813a04 60/// stored in thread local storage are run, but not all platforms can guarantee
7453a54e
SL
61/// that destructors will be run for all types in thread local storage. For
62/// example, there are a number of known caveats where destructors are not run:
63///
64/// 1. On Unix systems when pthread-based TLS is being used, destructors will
65/// not be run for TLS values on the main thread when it exits. Note that the
66/// application will exit immediately after the main thread exits as well.
67/// 2. On all platforms it's possible for TLS to re-initialize other TLS slots
68/// during destruction. Some platforms ensure that this cannot happen
69/// infinitely by preventing re-initialization of any slot that has been
70/// destroyed, but not all platforms have this guard. Those platforms that do
71/// not guard typically have a synthetic limit after which point no more
72/// destructors are run.
7cac9316 73///
3dfed10e 74/// [`with`]: LocalKey::with
85aaf69f 75#[stable(feature = "rust1", since = "1.0.0")]
9cc50fc6
SL
76pub struct LocalKey<T: 'static> {
77 // This outer `LocalKey<T>` type is what's going to be stored in statics,
78 // but actual data inside will sometimes be tagged with #[thread_local].
79 // It's not valid for a true static to reference a #[thread_local] static,
80 // so we get around that by exposing an accessor through a layer of function
81 // indirection (this thunk).
1a4d82fc 82 //
9cc50fc6
SL
83 // Note that the thunk is itself unsafe because the returned lifetime of the
84 // slot where data lives, `'static`, is not actually valid. The lifetime
3b2f2976 85 // here is actually slightly shorter than the currently running thread!
9cc50fc6
SL
86 //
87 // Although this is an extra layer of indirection, it should in theory be
88 // trivially devirtualizable by LLVM because the value of `inner` never
89 // changes and the constant should be readonly within a crate. This mainly
90 // only runs into problems when TLS statics are exported across crates.
dc9dc135 91 inner: unsafe fn() -> Option<&'static T>,
1a4d82fc
JJ
92}
93
8bb4bdeb 94#[stable(feature = "std_debug", since = "1.16.0")]
32a655c1 95impl<T: 'static> fmt::Debug for LocalKey<T> {
532ac7d7 96 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
32a655c1
SL
97 f.pad("LocalKey { .. }")
98 }
99}
100
7cac9316 101/// Declare a new thread local storage key of type [`std::thread::LocalKey`].
62682a34 102///
3157f602
XL
103/// # Syntax
104///
105/// The macro wraps any number of static declarations and makes them thread local.
041b39d2 106/// Publicity and attributes for each static are allowed. Example:
3157f602
XL
107///
108/// ```
109/// use std::cell::RefCell;
110/// thread_local! {
111/// pub static FOO: RefCell<u32> = RefCell::new(1);
112///
113/// #[allow(unused)]
114/// static BAR: RefCell<f32> = RefCell::new(1.0);
115/// }
116/// # fn main() {}
117/// ```
118///
3dfed10e 119/// See [`LocalKey` documentation][`std::thread::LocalKey`] for more
62682a34 120/// information.
7cac9316 121///
3dfed10e 122/// [`std::thread::LocalKey`]: crate::thread::LocalKey
1a4d82fc 123#[macro_export]
62682a34 124#[stable(feature = "rust1", since = "1.0.0")]
532ac7d7 125#[allow_internal_unstable(thread_local_internals)]
041b39d2
XL
126macro_rules! thread_local {
127 // empty (base case for the recursion)
128 () => {};
129
130 // process multiple declarations
131 ($(#[$attr:meta])* $vis:vis static $name:ident: $t:ty = $init:expr; $($rest:tt)*) => (
a1dfa0c6
XL
132 $crate::__thread_local_inner!($(#[$attr])* $vis $name, $t, $init);
133 $crate::thread_local!($($rest)*);
041b39d2
XL
134 );
135
136 // handle a single declaration
137 ($(#[$attr:meta])* $vis:vis static $name:ident: $t:ty = $init:expr) => (
a1dfa0c6 138 $crate::__thread_local_inner!($(#[$attr])* $vis $name, $t, $init);
041b39d2
XL
139 );
140}
141
041b39d2 142#[doc(hidden)]
dfeec247 143#[unstable(feature = "thread_local_internals", reason = "should not be necessary", issue = "none")]
041b39d2 144#[macro_export]
532ac7d7 145#[allow_internal_unstable(thread_local_internals, cfg_target_thread_local, thread_local)]
ea8adc8c 146#[allow_internal_unsafe]
041b39d2 147macro_rules! __thread_local_inner {
60c5eb7d 148 (@key $t:ty, $init:expr) => {
ea8adc8c
XL
149 {
150 #[inline]
041b39d2
XL
151 fn __init() -> $t { $init }
152
dc9dc135 153 unsafe fn __getit() -> $crate::option::Option<&'static $t> {
0bf4aa26 154 #[cfg(all(target_arch = "wasm32", not(target_feature = "atomics")))]
83c7162d
XL
155 static __KEY: $crate::thread::__StaticLocalKeyInner<$t> =
156 $crate::thread::__StaticLocalKeyInner::new();
157
041b39d2 158 #[thread_local]
0bf4aa26
XL
159 #[cfg(all(
160 target_thread_local,
161 not(all(target_arch = "wasm32", not(target_feature = "atomics"))),
162 ))]
041b39d2
XL
163 static __KEY: $crate::thread::__FastLocalKeyInner<$t> =
164 $crate::thread::__FastLocalKeyInner::new();
165
0bf4aa26
XL
166 #[cfg(all(
167 not(target_thread_local),
168 not(all(target_arch = "wasm32", not(target_feature = "atomics"))),
169 ))]
041b39d2
XL
170 static __KEY: $crate::thread::__OsLocalKeyInner<$t> =
171 $crate::thread::__OsLocalKeyInner::new();
172
3dfed10e
XL
173 // FIXME: remove the #[allow(...)] marker when macros don't
174 // raise warning for missing/extraneous unsafe blocks anymore.
175 // See https://github.com/rust-lang/rust/issues/74838.
176 #[allow(unused_unsafe)]
177 unsafe { __KEY.get(__init) }
041b39d2
XL
178 }
179
3b2f2976 180 unsafe {
dc9dc135 181 $crate::thread::LocalKey::new(__getit)
3b2f2976 182 }
ea8adc8c
XL
183 }
184 };
185 ($(#[$attr:meta])* $vis:vis $name:ident, $t:ty, $init:expr) => {
ea8adc8c 186 $(#[$attr])* $vis const $name: $crate::thread::LocalKey<$t> =
60c5eb7d 187 $crate::__thread_local_inner!(@key $t, $init);
041b39d2
XL
188 }
189}
190
041b39d2 191/// An error returned by [`LocalKey::try_with`](struct.LocalKey.html#method.try_with).
0531ce1d 192#[stable(feature = "thread_local_try_with", since = "1.26.0")]
416331ca 193#[derive(Clone, Copy, Eq, PartialEq)]
041b39d2
XL
194pub struct AccessError {
195 _private: (),
196}
197
0531ce1d 198#[stable(feature = "thread_local_try_with", since = "1.26.0")]
041b39d2 199impl fmt::Debug for AccessError {
532ac7d7 200 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
041b39d2
XL
201 f.debug_struct("AccessError").finish()
202 }
203}
204
0531ce1d 205#[stable(feature = "thread_local_try_with", since = "1.26.0")]
041b39d2 206impl fmt::Display for AccessError {
532ac7d7 207 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
041b39d2
XL
208 fmt::Display::fmt("already destroyed", f)
209 }
210}
211
416331ca
XL
212#[stable(feature = "thread_local_try_with", since = "1.26.0")]
213impl Error for AccessError {}
214
c34b1796 215impl<T: 'static> LocalKey<T> {
62682a34 216 #[doc(hidden)]
60c5eb7d
XL
217 #[unstable(
218 feature = "thread_local_internals",
219 reason = "recently added to create a key",
dfeec247 220 issue = "none"
60c5eb7d 221 )]
dc9dc135 222 pub const unsafe fn new(inner: unsafe fn() -> Option<&'static T>) -> LocalKey<T> {
60c5eb7d 223 LocalKey { inner }
62682a34
SL
224 }
225
9346a6ac 226 /// Acquires a reference to the value in this TLS key.
1a4d82fc
JJ
227 ///
228 /// This will lazily initialize the value if this thread has not referenced
229 /// this key yet.
230 ///
231 /// # Panics
232 ///
233 /// This function will `panic!()` if the key currently has its
234 /// destructor running, and it **may** panic if the destructor has
235 /// previously been run for this thread.
85aaf69f 236 #[stable(feature = "rust1", since = "1.0.0")]
1a4d82fc 237 pub fn with<F, R>(&'static self, f: F) -> R
60c5eb7d
XL
238 where
239 F: FnOnce(&T) -> R,
240 {
241 self.try_with(f).expect(
242 "cannot access a Thread Local Storage value \
243 during or after destruction",
244 )
1a4d82fc
JJ
245 }
246
041b39d2
XL
247 /// Acquires a reference to the value in this TLS key.
248 ///
249 /// This will lazily initialize the value if this thread has not referenced
250 /// this key yet. If the key has been destroyed (which may happen if this is called
8faf50e0 251 /// in a destructor), this function will return an [`AccessError`](struct.AccessError.html).
041b39d2
XL
252 ///
253 /// # Panics
254 ///
255 /// This function will still `panic!()` if the key is uninitialized and the
256 /// key's initializer panics.
0531ce1d 257 #[stable(feature = "thread_local_try_with", since = "1.26.0")]
ba9703b0 258 #[inline]
041b39d2 259 pub fn try_with<F, R>(&'static self, f: F) -> Result<R, AccessError>
0531ce1d
XL
260 where
261 F: FnOnce(&T) -> R,
262 {
041b39d2 263 unsafe {
60c5eb7d 264 let thread_local = (self.inner)().ok_or(AccessError { _private: () })?;
dc9dc135
XL
265 Ok(f(thread_local))
266 }
267 }
268}
269
270mod lazy {
271 use crate::cell::UnsafeCell;
dc9dc135 272 use crate::hint;
60c5eb7d 273 use crate::mem;
dc9dc135
XL
274
275 pub struct LazyKeyInner<T> {
276 inner: UnsafeCell<Option<T>>,
277 }
278
279 impl<T> LazyKeyInner<T> {
280 pub const fn new() -> LazyKeyInner<T> {
60c5eb7d 281 LazyKeyInner { inner: UnsafeCell::new(None) }
dc9dc135
XL
282 }
283
284 pub unsafe fn get(&self) -> Option<&'static T> {
285 (*self.inner.get()).as_ref()
286 }
287
288 pub unsafe fn initialize<F: FnOnce() -> T>(&self, init: F) -> &'static T {
289 // Execute the initialization up front, *then* move it into our slot,
290 // just in case initialization fails.
291 let value = init();
292 let ptr = self.inner.get();
293
294 // note that this can in theory just be `*ptr = Some(value)`, but due to
295 // the compiler will currently codegen that pattern with something like:
296 //
297 // ptr::drop_in_place(ptr)
298 // ptr::write(ptr, Some(value))
299 //
300 // Due to this pattern it's possible for the destructor of the value in
301 // `ptr` (e.g., if this is being recursively initialized) to re-access
302 // TLS, in which case there will be a `&` and `&mut` pointer to the same
303 // value (an aliasing violation). To avoid setting the "I'm running a
304 // destructor" flag we just use `mem::replace` which should sequence the
305 // operations a little differently and make this safe to call.
f9f354fc 306 let _ = mem::replace(&mut *ptr, Some(value));
dc9dc135
XL
307
308 // After storing `Some` we want to get a reference to the contents of
309 // what we just stored. While we could use `unwrap` here and it should
310 // always work it empirically doesn't seem to always get optimized away,
311 // which means that using something like `try_with` can pull in
312 // panicking code and cause a large size bloat.
313 match *ptr {
314 Some(ref x) => x,
315 None => hint::unreachable_unchecked(),
316 }
317 }
318
319 #[allow(unused)]
320 pub unsafe fn take(&mut self) -> Option<T> {
321 (*self.inner.get()).take()
041b39d2
XL
322 }
323 }
324}
325
83c7162d
XL
326/// On some platforms like wasm32 there's no threads, so no need to generate
327/// thread locals and we can instead just use plain statics!
328#[doc(hidden)]
0bf4aa26 329#[cfg(all(target_arch = "wasm32", not(target_feature = "atomics")))]
83c7162d 330pub mod statik {
dc9dc135 331 use super::lazy::LazyKeyInner;
532ac7d7 332 use crate::fmt;
83c7162d
XL
333
334 pub struct Key<T> {
dc9dc135 335 inner: LazyKeyInner<T>,
83c7162d
XL
336 }
337
60c5eb7d 338 unsafe impl<T> Sync for Key<T> {}
83c7162d
XL
339
340 impl<T> fmt::Debug for Key<T> {
532ac7d7 341 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
83c7162d
XL
342 f.pad("Key { .. }")
343 }
344 }
345
346 impl<T> Key<T> {
347 pub const fn new() -> Key<T> {
60c5eb7d 348 Key { inner: LazyKeyInner::new() }
83c7162d
XL
349 }
350
dc9dc135
XL
351 pub unsafe fn get(&self, init: fn() -> T) -> Option<&'static T> {
352 let value = match self.inner.get() {
353 Some(ref value) => value,
354 None => self.inner.initialize(init),
355 };
356 Some(value)
83c7162d
XL
357 }
358 }
359}
360
041b39d2
XL
361#[doc(hidden)]
362#[cfg(target_thread_local)]
363pub mod fast {
dc9dc135
XL
364 use super::lazy::LazyKeyInner;
365 use crate::cell::Cell;
532ac7d7
XL
366 use crate::fmt;
367 use crate::mem;
3dfed10e 368 use crate::sys::thread_local_dtor::register_dtor;
041b39d2 369
dc9dc135
XL
370 #[derive(Copy, Clone)]
371 enum DtorState {
372 Unregistered,
373 Registered,
374 RunningOrHasRun,
375 }
376
377 // This data structure has been carefully constructed so that the fast path
378 // only contains one branch on x86. That optimization is necessary to avoid
379 // duplicated tls lookups on OSX.
380 //
381 // LLVM issue: https://bugs.llvm.org/show_bug.cgi?id=41722
041b39d2 382 pub struct Key<T> {
dc9dc135
XL
383 // If `LazyKeyInner::get` returns `None`, that indicates either:
384 // * The value has never been initialized
385 // * The value is being recursively initialized
386 // * The value has already been destroyed or is being destroyed
387 // To determine which kind of `None`, check `dtor_state`.
388 //
389 // This is very optimizer friendly for the fast path - initialized but
390 // not yet dropped.
391 inner: LazyKeyInner<T>,
041b39d2
XL
392
393 // Metadata to keep track of the state of the destructor. Remember that
dc9dc135
XL
394 // this variable is thread-local, not global.
395 dtor_state: Cell<DtorState>,
041b39d2
XL
396 }
397
398 impl<T> fmt::Debug for Key<T> {
532ac7d7 399 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
041b39d2
XL
400 f.pad("Key { .. }")
401 }
402 }
403
041b39d2
XL
404 impl<T> Key<T> {
405 pub const fn new() -> Key<T> {
60c5eb7d 406 Key { inner: LazyKeyInner::new(), dtor_state: Cell::new(DtorState::Unregistered) }
041b39d2
XL
407 }
408
dc9dc135
XL
409 pub unsafe fn get<F: FnOnce() -> T>(&self, init: F) -> Option<&'static T> {
410 match self.inner.get() {
411 Some(val) => Some(val),
412 None => self.try_initialize(init),
041b39d2 413 }
041b39d2
XL
414 }
415
dc9dc135
XL
416 // `try_initialize` is only called once per fast thread local variable,
417 // except in corner cases where thread_local dtors reference other
418 // thread_local's, or it is being recursively initialized.
419 //
420 // Macos: Inlining this function can cause two `tlv_get_addr` calls to
421 // be performed for every call to `Key::get`. The #[cold] hint makes
422 // that less likely.
423 // LLVM issue: https://bugs.llvm.org/show_bug.cgi?id=41722
424 #[cold]
425 unsafe fn try_initialize<F: FnOnce() -> T>(&self, init: F) -> Option<&'static T> {
426 if !mem::needs_drop::<T>() || self.try_register_dtor() {
427 Some(self.inner.initialize(init))
428 } else {
429 None
041b39d2 430 }
dc9dc135 431 }
041b39d2 432
dc9dc135
XL
433 // `try_register_dtor` is only called once per fast thread local
434 // variable, except in corner cases where thread_local dtors reference
435 // other thread_local's, or it is being recursively initialized.
436 unsafe fn try_register_dtor(&self) -> bool {
437 match self.dtor_state.get() {
438 DtorState::Unregistered => {
439 // dtor registration happens before initialization.
60c5eb7d 440 register_dtor(self as *const _ as *mut u8, destroy_value::<T>);
dc9dc135
XL
441 self.dtor_state.set(DtorState::Registered);
442 true
443 }
444 DtorState::Registered => {
445 // recursively initialized
446 true
447 }
60c5eb7d 448 DtorState::RunningOrHasRun => false,
dc9dc135 449 }
041b39d2
XL
450 }
451 }
452
60c5eb7d 453 unsafe extern "C" fn destroy_value<T>(ptr: *mut u8) {
041b39d2 454 let ptr = ptr as *mut Key<T>;
041b39d2 455
dc9dc135
XL
456 // Right before we run the user destructor be sure to set the
457 // `Option<T>` to `None`, and `dtor_state` to `RunningOrHasRun`. This
458 // causes future calls to `get` to run `try_initialize_drop` again,
459 // which will now fail, and return `None`.
460 let value = (*ptr).inner.take();
461 (*ptr).dtor_state.set(DtorState::RunningOrHasRun);
462 drop(value);
041b39d2 463 }
1a4d82fc
JJ
464}
465
d9579d0f 466#[doc(hidden)]
9cc50fc6 467pub mod os {
dc9dc135
XL
468 use super::lazy::LazyKeyInner;
469 use crate::cell::Cell;
532ac7d7
XL
470 use crate::fmt;
471 use crate::marker;
472 use crate::ptr;
3dfed10e 473 use crate::sys_common::thread_local_key::StaticKey as OsStaticKey;
1a4d82fc 474
1a4d82fc 475 pub struct Key<T> {
1a4d82fc 476 // OS-TLS key that we'll use to key off.
62682a34
SL
477 os: OsStaticKey,
478 marker: marker::PhantomData<Cell<T>>,
1a4d82fc
JJ
479 }
480
32a655c1 481 impl<T> fmt::Debug for Key<T> {
532ac7d7 482 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
32a655c1
SL
483 f.pad("Key { .. }")
484 }
485 }
486
60c5eb7d 487 unsafe impl<T> Sync for Key<T> {}
1a4d82fc
JJ
488
489 struct Value<T: 'static> {
dc9dc135 490 inner: LazyKeyInner<T>,
1a4d82fc 491 key: &'static Key<T>,
1a4d82fc
JJ
492 }
493
62682a34
SL
494 impl<T: 'static> Key<T> {
495 pub const fn new() -> Key<T> {
60c5eb7d 496 Key { os: OsStaticKey::new(Some(destroy_value::<T>)), marker: marker::PhantomData }
1a4d82fc
JJ
497 }
498
dc9dc135 499 pub unsafe fn get(&'static self, init: fn() -> T) -> Option<&'static T> {
3b2f2976 500 let ptr = self.os.get() as *mut Value<T>;
dc9dc135 501 if ptr as usize > 1 {
e74abb32
XL
502 if let Some(ref value) = (*ptr).inner.get() {
503 return Some(value);
1a4d82fc 504 }
dc9dc135
XL
505 }
506 self.try_initialize(init)
507 }
508
509 // `try_initialize` is only called once per os thread local variable,
510 // except in corner cases where thread_local dtors reference other
511 // thread_local's, or it is being recursively initialized.
512 unsafe fn try_initialize(&'static self, init: fn() -> T) -> Option<&'static T> {
513 let ptr = self.os.get() as *mut Value<T>;
514 if ptr as usize == 1 {
515 // destructor is running
60c5eb7d 516 return None;
7453a54e 517 }
3b2f2976 518
dc9dc135
XL
519 let ptr = if ptr.is_null() {
520 // If the lookup returned null, we haven't initialized our own
521 // local copy, so do that now.
60c5eb7d 522 let ptr: Box<Value<T>> = box Value { inner: LazyKeyInner::new(), key: self };
dc9dc135
XL
523 let ptr = Box::into_raw(ptr);
524 self.os.set(ptr as *mut u8);
525 ptr
526 } else {
527 // recursive initialization
528 ptr
3b2f2976 529 };
dc9dc135
XL
530
531 Some((*ptr).inner.initialize(init))
1a4d82fc
JJ
532 }
533 }
534
60c5eb7d 535 unsafe extern "C" fn destroy_value<T: 'static>(ptr: *mut u8) {
1a4d82fc
JJ
536 // The OS TLS ensures that this key contains a NULL value when this
537 // destructor starts to run. We set it back to a sentinel value of 1 to
538 // ensure that any future calls to `get` for this thread will return
539 // `None`.
540 //
541 // Note that to prevent an infinite loop we reset it back to null right
542 // before we return from the destructor ourselves.
d9579d0f 543 let ptr = Box::from_raw(ptr as *mut Value<T>);
1a4d82fc
JJ
544 let key = ptr.key;
545 key.os.set(1 as *mut u8);
546 drop(ptr);
85aaf69f 547 key.os.set(ptr::null_mut());
1a4d82fc
JJ
548 }
549}
550
c30ab7b3 551#[cfg(all(test, not(target_os = "emscripten")))]
1a4d82fc 552mod tests {
532ac7d7 553 use crate::cell::{Cell, UnsafeCell};
60c5eb7d 554 use crate::sync::mpsc::{channel, Sender};
532ac7d7 555 use crate::thread;
1a4d82fc
JJ
556
557 struct Foo(Sender<()>);
558
559 impl Drop for Foo {
560 fn drop(&mut self) {
561 let Foo(ref s) = *self;
562 s.send(()).unwrap();
563 }
564 }
565
566 #[test]
567 fn smoke_no_dtor() {
62682a34 568 thread_local!(static FOO: Cell<i32> = Cell::new(1));
1a4d82fc 569
62682a34
SL
570 FOO.with(|f| {
571 assert_eq!(f.get(), 1);
572 f.set(2);
1a4d82fc
JJ
573 });
574 let (tx, rx) = channel();
60c5eb7d 575 let _t = thread::spawn(move || {
62682a34
SL
576 FOO.with(|f| {
577 assert_eq!(f.get(), 1);
1a4d82fc
JJ
578 });
579 tx.send(()).unwrap();
580 });
581 rx.recv().unwrap();
582
62682a34
SL
583 FOO.with(|f| {
584 assert_eq!(f.get(), 2);
1a4d82fc
JJ
585 });
586 }
587
588 #[test]
589 fn states() {
590 struct Foo;
591 impl Drop for Foo {
592 fn drop(&mut self) {
0531ce1d 593 assert!(FOO.try_with(|_| ()).is_err());
1a4d82fc
JJ
594 }
595 }
0531ce1d 596 thread_local!(static FOO: Foo = Foo);
1a4d82fc 597
85aaf69f 598 thread::spawn(|| {
0531ce1d 599 assert!(FOO.try_with(|_| ()).is_ok());
60c5eb7d
XL
600 })
601 .join()
602 .ok()
603 .expect("thread panicked");
1a4d82fc
JJ
604 }
605
606 #[test]
607 fn smoke_dtor() {
62682a34 608 thread_local!(static FOO: UnsafeCell<Option<Foo>> = UnsafeCell::new(None));
1a4d82fc
JJ
609
610 let (tx, rx) = channel();
60c5eb7d 611 let _t = thread::spawn(move || unsafe {
1a4d82fc
JJ
612 let mut tx = Some(tx);
613 FOO.with(|f| {
614 *f.get() = Some(Foo(tx.take().unwrap()));
615 });
616 });
617 rx.recv().unwrap();
618 }
619
620 #[test]
621 fn circular() {
622 struct S1;
623 struct S2;
62682a34
SL
624 thread_local!(static K1: UnsafeCell<Option<S1>> = UnsafeCell::new(None));
625 thread_local!(static K2: UnsafeCell<Option<S2>> = UnsafeCell::new(None));
c34b1796 626 static mut HITS: u32 = 0;
1a4d82fc
JJ
627
628 impl Drop for S1 {
629 fn drop(&mut self) {
630 unsafe {
631 HITS += 1;
0531ce1d 632 if K2.try_with(|_| ()).is_err() {
1a4d82fc
JJ
633 assert_eq!(HITS, 3);
634 } else {
635 if HITS == 1 {
636 K2.with(|s| *s.get() = Some(S2));
637 } else {
638 assert_eq!(HITS, 3);
639 }
640 }
641 }
642 }
643 }
644 impl Drop for S2 {
645 fn drop(&mut self) {
646 unsafe {
647 HITS += 1;
0531ce1d 648 assert!(K1.try_with(|_| ()).is_ok());
1a4d82fc
JJ
649 assert_eq!(HITS, 2);
650 K1.with(|s| *s.get() = Some(S1));
651 }
652 }
653 }
654
60c5eb7d 655 thread::spawn(move || {
1a4d82fc 656 drop(S1);
60c5eb7d
XL
657 })
658 .join()
659 .ok()
660 .expect("thread panicked");
1a4d82fc
JJ
661 }
662
663 #[test]
664 fn self_referential() {
665 struct S1;
62682a34 666 thread_local!(static K1: UnsafeCell<Option<S1>> = UnsafeCell::new(None));
1a4d82fc
JJ
667
668 impl Drop for S1 {
669 fn drop(&mut self) {
0531ce1d 670 assert!(K1.try_with(|_| ()).is_err());
1a4d82fc
JJ
671 }
672 }
673
60c5eb7d 674 thread::spawn(move || unsafe {
1a4d82fc 675 K1.with(|s| *s.get() = Some(S1));
60c5eb7d
XL
676 })
677 .join()
678 .ok()
679 .expect("thread panicked");
1a4d82fc
JJ
680 }
681
7453a54e 682 // Note that this test will deadlock if TLS destructors aren't run (this
9fa01778 683 // requires the destructor to be run to pass the test).
1a4d82fc
JJ
684 #[test]
685 fn dtors_in_dtors_in_dtors() {
686 struct S1(Sender<()>);
62682a34
SL
687 thread_local!(static K1: UnsafeCell<Option<S1>> = UnsafeCell::new(None));
688 thread_local!(static K2: UnsafeCell<Option<Foo>> = UnsafeCell::new(None));
1a4d82fc
JJ
689
690 impl Drop for S1 {
691 fn drop(&mut self) {
692 let S1(ref tx) = *self;
693 unsafe {
0531ce1d 694 let _ = K2.try_with(|s| *s.get() = Some(Foo(tx.clone())));
1a4d82fc
JJ
695 }
696 }
697 }
698
699 let (tx, rx) = channel();
60c5eb7d 700 let _t = thread::spawn(move || unsafe {
1a4d82fc
JJ
701 let mut tx = Some(tx);
702 K1.with(|s| *s.get() = Some(S1(tx.take().unwrap())));
703 });
704 rx.recv().unwrap();
705 }
706}
707
708#[cfg(test)]
709mod dynamic_tests {
532ac7d7
XL
710 use crate::cell::RefCell;
711 use crate::collections::HashMap;
1a4d82fc
JJ
712
713 #[test]
714 fn smoke() {
60c5eb7d
XL
715 fn square(i: i32) -> i32 {
716 i * i
717 }
c34b1796 718 thread_local!(static FOO: i32 = square(3));
1a4d82fc
JJ
719
720 FOO.with(|f| {
721 assert_eq!(*f, 9);
722 });
723 }
724
725 #[test]
726 fn hashmap() {
c34b1796 727 fn map() -> RefCell<HashMap<i32, i32>> {
1a4d82fc
JJ
728 let mut m = HashMap::new();
729 m.insert(1, 2);
730 RefCell::new(m)
731 }
c34b1796 732 thread_local!(static FOO: RefCell<HashMap<i32, i32>> = map());
1a4d82fc
JJ
733
734 FOO.with(|map| {
c34b1796 735 assert_eq!(map.borrow()[&1], 2);
1a4d82fc
JJ
736 });
737 }
738
739 #[test]
740 fn refcell_vec() {
c34b1796 741 thread_local!(static FOO: RefCell<Vec<u32>> = RefCell::new(vec![1, 2, 3]));
1a4d82fc
JJ
742
743 FOO.with(|vec| {
744 assert_eq!(vec.borrow().len(), 3);
745 vec.borrow_mut().push(4);
746 assert_eq!(vec.borrow()[3], 4);
747 });
748 }
749}