]> git.proxmox.com Git - rustc.git/blame - src/libstd/thread/local.rs
New upstream version 1.43.0+dfsg1
[rustc.git] / src / libstd / thread / local.rs
CommitLineData
1a4d82fc 1//! Thread local storage
c34b1796 2
dfeec247 3#![unstable(feature = "thread_local_internals", issue = "none")]
1a4d82fc 4
416331ca 5use crate::error::Error;
532ac7d7 6use crate::fmt;
1a4d82fc
JJ
7
8/// A thread local storage key which owns its contents.
9///
10/// This key uses the fastest possible implementation available to it for the
7cac9316
XL
11/// target platform. It is instantiated with the [`thread_local!`] macro and the
12/// primary method is the [`with`] method.
1a4d82fc 13///
7cac9316 14/// The [`with`] method yields a reference to the contained value which cannot be
bd371182 15/// sent across threads or escape the given closure.
1a4d82fc
JJ
16///
17/// # Initialization and Destruction
18///
7cac9316
XL
19/// Initialization is dynamically performed on the first call to [`with`]
20/// within a thread, and values that implement [`Drop`] get destructed when a
8bb4bdeb 21/// thread exits. Some caveats apply, which are explained below.
1a4d82fc 22///
ea8adc8c
XL
23/// A `LocalKey`'s initializer cannot recursively depend on itself, and using
24/// a `LocalKey` in this way will cause the initializer to infinitely recurse
25/// on the first call to `with`.
26///
c34b1796 27/// # Examples
1a4d82fc
JJ
28///
29/// ```
30/// use std::cell::RefCell;
85aaf69f 31/// use std::thread;
1a4d82fc 32///
c34b1796 33/// thread_local!(static FOO: RefCell<u32> = RefCell::new(1));
1a4d82fc
JJ
34///
35/// FOO.with(|f| {
36/// assert_eq!(*f.borrow(), 1);
37/// *f.borrow_mut() = 2;
38/// });
39///
40/// // each thread starts out with the initial value of 1
532ac7d7 41/// let t = thread::spawn(move|| {
1a4d82fc
JJ
42/// FOO.with(|f| {
43/// assert_eq!(*f.borrow(), 1);
44/// *f.borrow_mut() = 3;
45/// });
46/// });
47///
532ac7d7
XL
48/// // wait for the thread to complete and bail out on panic
49/// t.join().unwrap();
50///
1a4d82fc
JJ
51/// // we retain our original value of 2 despite the child thread
52/// FOO.with(|f| {
53/// assert_eq!(*f.borrow(), 2);
54/// });
55/// ```
7453a54e
SL
56///
57/// # Platform-specific behavior
58///
59/// Note that a "best effort" is made to ensure that destructors for types
a7813a04 60/// stored in thread local storage are run, but not all platforms can guarantee
7453a54e
SL
61/// that destructors will be run for all types in thread local storage. For
62/// example, there are a number of known caveats where destructors are not run:
63///
64/// 1. On Unix systems when pthread-based TLS is being used, destructors will
65/// not be run for TLS values on the main thread when it exits. Note that the
66/// application will exit immediately after the main thread exits as well.
67/// 2. On all platforms it's possible for TLS to re-initialize other TLS slots
68/// during destruction. Some platforms ensure that this cannot happen
69/// infinitely by preventing re-initialization of any slot that has been
70/// destroyed, but not all platforms have this guard. Those platforms that do
71/// not guard typically have a synthetic limit after which point no more
72/// destructors are run.
7cac9316
XL
73///
74/// [`with`]: ../../std/thread/struct.LocalKey.html#method.with
75/// [`thread_local!`]: ../../std/macro.thread_local.html
76/// [`Drop`]: ../../std/ops/trait.Drop.html
85aaf69f 77#[stable(feature = "rust1", since = "1.0.0")]
9cc50fc6
SL
78pub struct LocalKey<T: 'static> {
79 // This outer `LocalKey<T>` type is what's going to be stored in statics,
80 // but actual data inside will sometimes be tagged with #[thread_local].
81 // It's not valid for a true static to reference a #[thread_local] static,
82 // so we get around that by exposing an accessor through a layer of function
83 // indirection (this thunk).
1a4d82fc 84 //
9cc50fc6
SL
85 // Note that the thunk is itself unsafe because the returned lifetime of the
86 // slot where data lives, `'static`, is not actually valid. The lifetime
3b2f2976 87 // here is actually slightly shorter than the currently running thread!
9cc50fc6
SL
88 //
89 // Although this is an extra layer of indirection, it should in theory be
90 // trivially devirtualizable by LLVM because the value of `inner` never
91 // changes and the constant should be readonly within a crate. This mainly
92 // only runs into problems when TLS statics are exported across crates.
dc9dc135 93 inner: unsafe fn() -> Option<&'static T>,
1a4d82fc
JJ
94}
95
8bb4bdeb 96#[stable(feature = "std_debug", since = "1.16.0")]
32a655c1 97impl<T: 'static> fmt::Debug for LocalKey<T> {
532ac7d7 98 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
32a655c1
SL
99 f.pad("LocalKey { .. }")
100 }
101}
102
7cac9316 103/// Declare a new thread local storage key of type [`std::thread::LocalKey`].
62682a34 104///
3157f602
XL
105/// # Syntax
106///
107/// The macro wraps any number of static declarations and makes them thread local.
041b39d2 108/// Publicity and attributes for each static are allowed. Example:
3157f602
XL
109///
110/// ```
111/// use std::cell::RefCell;
112/// thread_local! {
113/// pub static FOO: RefCell<u32> = RefCell::new(1);
114///
115/// #[allow(unused)]
116/// static BAR: RefCell<f32> = RefCell::new(1.0);
117/// }
118/// # fn main() {}
119/// ```
120///
7cac9316 121/// See [LocalKey documentation][`std::thread::LocalKey`] for more
62682a34 122/// information.
7cac9316
XL
123///
124/// [`std::thread::LocalKey`]: ../std/thread/struct.LocalKey.html
1a4d82fc 125#[macro_export]
62682a34 126#[stable(feature = "rust1", since = "1.0.0")]
532ac7d7 127#[allow_internal_unstable(thread_local_internals)]
041b39d2
XL
128macro_rules! thread_local {
129 // empty (base case for the recursion)
130 () => {};
131
132 // process multiple declarations
133 ($(#[$attr:meta])* $vis:vis static $name:ident: $t:ty = $init:expr; $($rest:tt)*) => (
a1dfa0c6
XL
134 $crate::__thread_local_inner!($(#[$attr])* $vis $name, $t, $init);
135 $crate::thread_local!($($rest)*);
041b39d2
XL
136 );
137
138 // handle a single declaration
139 ($(#[$attr:meta])* $vis:vis static $name:ident: $t:ty = $init:expr) => (
a1dfa0c6 140 $crate::__thread_local_inner!($(#[$attr])* $vis $name, $t, $init);
041b39d2
XL
141 );
142}
143
041b39d2 144#[doc(hidden)]
dfeec247 145#[unstable(feature = "thread_local_internals", reason = "should not be necessary", issue = "none")]
041b39d2 146#[macro_export]
532ac7d7 147#[allow_internal_unstable(thread_local_internals, cfg_target_thread_local, thread_local)]
ea8adc8c 148#[allow_internal_unsafe]
041b39d2 149macro_rules! __thread_local_inner {
60c5eb7d 150 (@key $t:ty, $init:expr) => {
ea8adc8c
XL
151 {
152 #[inline]
041b39d2
XL
153 fn __init() -> $t { $init }
154
dc9dc135 155 unsafe fn __getit() -> $crate::option::Option<&'static $t> {
0bf4aa26 156 #[cfg(all(target_arch = "wasm32", not(target_feature = "atomics")))]
83c7162d
XL
157 static __KEY: $crate::thread::__StaticLocalKeyInner<$t> =
158 $crate::thread::__StaticLocalKeyInner::new();
159
041b39d2 160 #[thread_local]
0bf4aa26
XL
161 #[cfg(all(
162 target_thread_local,
163 not(all(target_arch = "wasm32", not(target_feature = "atomics"))),
164 ))]
041b39d2
XL
165 static __KEY: $crate::thread::__FastLocalKeyInner<$t> =
166 $crate::thread::__FastLocalKeyInner::new();
167
0bf4aa26
XL
168 #[cfg(all(
169 not(target_thread_local),
170 not(all(target_arch = "wasm32", not(target_feature = "atomics"))),
171 ))]
041b39d2
XL
172 static __KEY: $crate::thread::__OsLocalKeyInner<$t> =
173 $crate::thread::__OsLocalKeyInner::new();
174
dc9dc135 175 __KEY.get(__init)
041b39d2
XL
176 }
177
3b2f2976 178 unsafe {
dc9dc135 179 $crate::thread::LocalKey::new(__getit)
3b2f2976 180 }
ea8adc8c
XL
181 }
182 };
183 ($(#[$attr:meta])* $vis:vis $name:ident, $t:ty, $init:expr) => {
ea8adc8c 184 $(#[$attr])* $vis const $name: $crate::thread::LocalKey<$t> =
60c5eb7d 185 $crate::__thread_local_inner!(@key $t, $init);
041b39d2
XL
186 }
187}
188
041b39d2 189/// An error returned by [`LocalKey::try_with`](struct.LocalKey.html#method.try_with).
0531ce1d 190#[stable(feature = "thread_local_try_with", since = "1.26.0")]
416331ca 191#[derive(Clone, Copy, Eq, PartialEq)]
041b39d2
XL
192pub struct AccessError {
193 _private: (),
194}
195
0531ce1d 196#[stable(feature = "thread_local_try_with", since = "1.26.0")]
041b39d2 197impl fmt::Debug for AccessError {
532ac7d7 198 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
041b39d2
XL
199 f.debug_struct("AccessError").finish()
200 }
201}
202
0531ce1d 203#[stable(feature = "thread_local_try_with", since = "1.26.0")]
041b39d2 204impl fmt::Display for AccessError {
532ac7d7 205 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
041b39d2
XL
206 fmt::Display::fmt("already destroyed", f)
207 }
208}
209
416331ca
XL
210#[stable(feature = "thread_local_try_with", since = "1.26.0")]
211impl Error for AccessError {}
212
c34b1796 213impl<T: 'static> LocalKey<T> {
62682a34 214 #[doc(hidden)]
60c5eb7d
XL
215 #[unstable(
216 feature = "thread_local_internals",
217 reason = "recently added to create a key",
dfeec247 218 issue = "none"
60c5eb7d 219 )]
dc9dc135 220 pub const unsafe fn new(inner: unsafe fn() -> Option<&'static T>) -> LocalKey<T> {
60c5eb7d 221 LocalKey { inner }
62682a34
SL
222 }
223
9346a6ac 224 /// Acquires a reference to the value in this TLS key.
1a4d82fc
JJ
225 ///
226 /// This will lazily initialize the value if this thread has not referenced
227 /// this key yet.
228 ///
229 /// # Panics
230 ///
231 /// This function will `panic!()` if the key currently has its
232 /// destructor running, and it **may** panic if the destructor has
233 /// previously been run for this thread.
85aaf69f 234 #[stable(feature = "rust1", since = "1.0.0")]
1a4d82fc 235 pub fn with<F, R>(&'static self, f: F) -> R
60c5eb7d
XL
236 where
237 F: FnOnce(&T) -> R,
238 {
239 self.try_with(f).expect(
240 "cannot access a Thread Local Storage value \
241 during or after destruction",
242 )
1a4d82fc
JJ
243 }
244
041b39d2
XL
245 /// Acquires a reference to the value in this TLS key.
246 ///
247 /// This will lazily initialize the value if this thread has not referenced
248 /// this key yet. If the key has been destroyed (which may happen if this is called
8faf50e0 249 /// in a destructor), this function will return an [`AccessError`](struct.AccessError.html).
041b39d2
XL
250 ///
251 /// # Panics
252 ///
253 /// This function will still `panic!()` if the key is uninitialized and the
254 /// key's initializer panics.
0531ce1d 255 #[stable(feature = "thread_local_try_with", since = "1.26.0")]
041b39d2 256 pub fn try_with<F, R>(&'static self, f: F) -> Result<R, AccessError>
0531ce1d
XL
257 where
258 F: FnOnce(&T) -> R,
259 {
041b39d2 260 unsafe {
60c5eb7d 261 let thread_local = (self.inner)().ok_or(AccessError { _private: () })?;
dc9dc135
XL
262 Ok(f(thread_local))
263 }
264 }
265}
266
267mod lazy {
268 use crate::cell::UnsafeCell;
dc9dc135 269 use crate::hint;
60c5eb7d 270 use crate::mem;
dc9dc135
XL
271
272 pub struct LazyKeyInner<T> {
273 inner: UnsafeCell<Option<T>>,
274 }
275
276 impl<T> LazyKeyInner<T> {
277 pub const fn new() -> LazyKeyInner<T> {
60c5eb7d 278 LazyKeyInner { inner: UnsafeCell::new(None) }
dc9dc135
XL
279 }
280
281 pub unsafe fn get(&self) -> Option<&'static T> {
282 (*self.inner.get()).as_ref()
283 }
284
285 pub unsafe fn initialize<F: FnOnce() -> T>(&self, init: F) -> &'static T {
286 // Execute the initialization up front, *then* move it into our slot,
287 // just in case initialization fails.
288 let value = init();
289 let ptr = self.inner.get();
290
291 // note that this can in theory just be `*ptr = Some(value)`, but due to
292 // the compiler will currently codegen that pattern with something like:
293 //
294 // ptr::drop_in_place(ptr)
295 // ptr::write(ptr, Some(value))
296 //
297 // Due to this pattern it's possible for the destructor of the value in
298 // `ptr` (e.g., if this is being recursively initialized) to re-access
299 // TLS, in which case there will be a `&` and `&mut` pointer to the same
300 // value (an aliasing violation). To avoid setting the "I'm running a
301 // destructor" flag we just use `mem::replace` which should sequence the
302 // operations a little differently and make this safe to call.
303 mem::replace(&mut *ptr, Some(value));
304
305 // After storing `Some` we want to get a reference to the contents of
306 // what we just stored. While we could use `unwrap` here and it should
307 // always work it empirically doesn't seem to always get optimized away,
308 // which means that using something like `try_with` can pull in
309 // panicking code and cause a large size bloat.
310 match *ptr {
311 Some(ref x) => x,
312 None => hint::unreachable_unchecked(),
313 }
314 }
315
316 #[allow(unused)]
317 pub unsafe fn take(&mut self) -> Option<T> {
318 (*self.inner.get()).take()
041b39d2
XL
319 }
320 }
321}
322
83c7162d
XL
323/// On some platforms like wasm32 there's no threads, so no need to generate
324/// thread locals and we can instead just use plain statics!
325#[doc(hidden)]
0bf4aa26 326#[cfg(all(target_arch = "wasm32", not(target_feature = "atomics")))]
83c7162d 327pub mod statik {
dc9dc135 328 use super::lazy::LazyKeyInner;
532ac7d7 329 use crate::fmt;
83c7162d
XL
330
331 pub struct Key<T> {
dc9dc135 332 inner: LazyKeyInner<T>,
83c7162d
XL
333 }
334
60c5eb7d 335 unsafe impl<T> Sync for Key<T> {}
83c7162d
XL
336
337 impl<T> fmt::Debug for Key<T> {
532ac7d7 338 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
83c7162d
XL
339 f.pad("Key { .. }")
340 }
341 }
342
343 impl<T> Key<T> {
344 pub const fn new() -> Key<T> {
60c5eb7d 345 Key { inner: LazyKeyInner::new() }
83c7162d
XL
346 }
347
dc9dc135
XL
348 pub unsafe fn get(&self, init: fn() -> T) -> Option<&'static T> {
349 let value = match self.inner.get() {
350 Some(ref value) => value,
351 None => self.inner.initialize(init),
352 };
353 Some(value)
83c7162d
XL
354 }
355 }
356}
357
041b39d2
XL
358#[doc(hidden)]
359#[cfg(target_thread_local)]
360pub mod fast {
dc9dc135
XL
361 use super::lazy::LazyKeyInner;
362 use crate::cell::Cell;
532ac7d7
XL
363 use crate::fmt;
364 use crate::mem;
dc9dc135 365 use crate::sys::fast_thread_local::register_dtor;
041b39d2 366
dc9dc135
XL
367 #[derive(Copy, Clone)]
368 enum DtorState {
369 Unregistered,
370 Registered,
371 RunningOrHasRun,
372 }
373
374 // This data structure has been carefully constructed so that the fast path
375 // only contains one branch on x86. That optimization is necessary to avoid
376 // duplicated tls lookups on OSX.
377 //
378 // LLVM issue: https://bugs.llvm.org/show_bug.cgi?id=41722
041b39d2 379 pub struct Key<T> {
dc9dc135
XL
380 // If `LazyKeyInner::get` returns `None`, that indicates either:
381 // * The value has never been initialized
382 // * The value is being recursively initialized
383 // * The value has already been destroyed or is being destroyed
384 // To determine which kind of `None`, check `dtor_state`.
385 //
386 // This is very optimizer friendly for the fast path - initialized but
387 // not yet dropped.
388 inner: LazyKeyInner<T>,
041b39d2
XL
389
390 // Metadata to keep track of the state of the destructor. Remember that
dc9dc135
XL
391 // this variable is thread-local, not global.
392 dtor_state: Cell<DtorState>,
041b39d2
XL
393 }
394
395 impl<T> fmt::Debug for Key<T> {
532ac7d7 396 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
041b39d2
XL
397 f.pad("Key { .. }")
398 }
399 }
400
041b39d2
XL
401 impl<T> Key<T> {
402 pub const fn new() -> Key<T> {
60c5eb7d 403 Key { inner: LazyKeyInner::new(), dtor_state: Cell::new(DtorState::Unregistered) }
041b39d2
XL
404 }
405
dc9dc135
XL
406 pub unsafe fn get<F: FnOnce() -> T>(&self, init: F) -> Option<&'static T> {
407 match self.inner.get() {
408 Some(val) => Some(val),
409 None => self.try_initialize(init),
041b39d2 410 }
041b39d2
XL
411 }
412
dc9dc135
XL
413 // `try_initialize` is only called once per fast thread local variable,
414 // except in corner cases where thread_local dtors reference other
415 // thread_local's, or it is being recursively initialized.
416 //
417 // Macos: Inlining this function can cause two `tlv_get_addr` calls to
418 // be performed for every call to `Key::get`. The #[cold] hint makes
419 // that less likely.
420 // LLVM issue: https://bugs.llvm.org/show_bug.cgi?id=41722
421 #[cold]
422 unsafe fn try_initialize<F: FnOnce() -> T>(&self, init: F) -> Option<&'static T> {
423 if !mem::needs_drop::<T>() || self.try_register_dtor() {
424 Some(self.inner.initialize(init))
425 } else {
426 None
041b39d2 427 }
dc9dc135 428 }
041b39d2 429
dc9dc135
XL
430 // `try_register_dtor` is only called once per fast thread local
431 // variable, except in corner cases where thread_local dtors reference
432 // other thread_local's, or it is being recursively initialized.
433 unsafe fn try_register_dtor(&self) -> bool {
434 match self.dtor_state.get() {
435 DtorState::Unregistered => {
436 // dtor registration happens before initialization.
60c5eb7d 437 register_dtor(self as *const _ as *mut u8, destroy_value::<T>);
dc9dc135
XL
438 self.dtor_state.set(DtorState::Registered);
439 true
440 }
441 DtorState::Registered => {
442 // recursively initialized
443 true
444 }
60c5eb7d 445 DtorState::RunningOrHasRun => false,
dc9dc135 446 }
041b39d2
XL
447 }
448 }
449
60c5eb7d 450 unsafe extern "C" fn destroy_value<T>(ptr: *mut u8) {
041b39d2 451 let ptr = ptr as *mut Key<T>;
041b39d2 452
dc9dc135
XL
453 // Right before we run the user destructor be sure to set the
454 // `Option<T>` to `None`, and `dtor_state` to `RunningOrHasRun`. This
455 // causes future calls to `get` to run `try_initialize_drop` again,
456 // which will now fail, and return `None`.
457 let value = (*ptr).inner.take();
458 (*ptr).dtor_state.set(DtorState::RunningOrHasRun);
459 drop(value);
041b39d2 460 }
1a4d82fc
JJ
461}
462
d9579d0f 463#[doc(hidden)]
9cc50fc6 464pub mod os {
dc9dc135
XL
465 use super::lazy::LazyKeyInner;
466 use crate::cell::Cell;
532ac7d7
XL
467 use crate::fmt;
468 use crate::marker;
469 use crate::ptr;
470 use crate::sys_common::thread_local::StaticKey as OsStaticKey;
1a4d82fc 471
1a4d82fc 472 pub struct Key<T> {
1a4d82fc 473 // OS-TLS key that we'll use to key off.
62682a34
SL
474 os: OsStaticKey,
475 marker: marker::PhantomData<Cell<T>>,
1a4d82fc
JJ
476 }
477
32a655c1 478 impl<T> fmt::Debug for Key<T> {
532ac7d7 479 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
32a655c1
SL
480 f.pad("Key { .. }")
481 }
482 }
483
60c5eb7d 484 unsafe impl<T> Sync for Key<T> {}
1a4d82fc
JJ
485
486 struct Value<T: 'static> {
dc9dc135 487 inner: LazyKeyInner<T>,
1a4d82fc 488 key: &'static Key<T>,
1a4d82fc
JJ
489 }
490
62682a34
SL
491 impl<T: 'static> Key<T> {
492 pub const fn new() -> Key<T> {
60c5eb7d 493 Key { os: OsStaticKey::new(Some(destroy_value::<T>)), marker: marker::PhantomData }
1a4d82fc
JJ
494 }
495
dc9dc135 496 pub unsafe fn get(&'static self, init: fn() -> T) -> Option<&'static T> {
3b2f2976 497 let ptr = self.os.get() as *mut Value<T>;
dc9dc135 498 if ptr as usize > 1 {
e74abb32
XL
499 if let Some(ref value) = (*ptr).inner.get() {
500 return Some(value);
1a4d82fc 501 }
dc9dc135
XL
502 }
503 self.try_initialize(init)
504 }
505
506 // `try_initialize` is only called once per os thread local variable,
507 // except in corner cases where thread_local dtors reference other
508 // thread_local's, or it is being recursively initialized.
509 unsafe fn try_initialize(&'static self, init: fn() -> T) -> Option<&'static T> {
510 let ptr = self.os.get() as *mut Value<T>;
511 if ptr as usize == 1 {
512 // destructor is running
60c5eb7d 513 return None;
7453a54e 514 }
3b2f2976 515
dc9dc135
XL
516 let ptr = if ptr.is_null() {
517 // If the lookup returned null, we haven't initialized our own
518 // local copy, so do that now.
60c5eb7d 519 let ptr: Box<Value<T>> = box Value { inner: LazyKeyInner::new(), key: self };
dc9dc135
XL
520 let ptr = Box::into_raw(ptr);
521 self.os.set(ptr as *mut u8);
522 ptr
523 } else {
524 // recursive initialization
525 ptr
3b2f2976 526 };
dc9dc135
XL
527
528 Some((*ptr).inner.initialize(init))
1a4d82fc
JJ
529 }
530 }
531
60c5eb7d 532 unsafe extern "C" fn destroy_value<T: 'static>(ptr: *mut u8) {
1a4d82fc
JJ
533 // The OS TLS ensures that this key contains a NULL value when this
534 // destructor starts to run. We set it back to a sentinel value of 1 to
535 // ensure that any future calls to `get` for this thread will return
536 // `None`.
537 //
538 // Note that to prevent an infinite loop we reset it back to null right
539 // before we return from the destructor ourselves.
d9579d0f 540 let ptr = Box::from_raw(ptr as *mut Value<T>);
1a4d82fc
JJ
541 let key = ptr.key;
542 key.os.set(1 as *mut u8);
543 drop(ptr);
85aaf69f 544 key.os.set(ptr::null_mut());
1a4d82fc
JJ
545 }
546}
547
c30ab7b3 548#[cfg(all(test, not(target_os = "emscripten")))]
1a4d82fc 549mod tests {
532ac7d7 550 use crate::cell::{Cell, UnsafeCell};
60c5eb7d 551 use crate::sync::mpsc::{channel, Sender};
532ac7d7 552 use crate::thread;
1a4d82fc
JJ
553
554 struct Foo(Sender<()>);
555
556 impl Drop for Foo {
557 fn drop(&mut self) {
558 let Foo(ref s) = *self;
559 s.send(()).unwrap();
560 }
561 }
562
563 #[test]
564 fn smoke_no_dtor() {
62682a34 565 thread_local!(static FOO: Cell<i32> = Cell::new(1));
1a4d82fc 566
62682a34
SL
567 FOO.with(|f| {
568 assert_eq!(f.get(), 1);
569 f.set(2);
1a4d82fc
JJ
570 });
571 let (tx, rx) = channel();
60c5eb7d 572 let _t = thread::spawn(move || {
62682a34
SL
573 FOO.with(|f| {
574 assert_eq!(f.get(), 1);
1a4d82fc
JJ
575 });
576 tx.send(()).unwrap();
577 });
578 rx.recv().unwrap();
579
62682a34
SL
580 FOO.with(|f| {
581 assert_eq!(f.get(), 2);
1a4d82fc
JJ
582 });
583 }
584
585 #[test]
586 fn states() {
587 struct Foo;
588 impl Drop for Foo {
589 fn drop(&mut self) {
0531ce1d 590 assert!(FOO.try_with(|_| ()).is_err());
1a4d82fc
JJ
591 }
592 }
0531ce1d 593 thread_local!(static FOO: Foo = Foo);
1a4d82fc 594
85aaf69f 595 thread::spawn(|| {
0531ce1d 596 assert!(FOO.try_with(|_| ()).is_ok());
60c5eb7d
XL
597 })
598 .join()
599 .ok()
600 .expect("thread panicked");
1a4d82fc
JJ
601 }
602
603 #[test]
604 fn smoke_dtor() {
62682a34 605 thread_local!(static FOO: UnsafeCell<Option<Foo>> = UnsafeCell::new(None));
1a4d82fc
JJ
606
607 let (tx, rx) = channel();
60c5eb7d 608 let _t = thread::spawn(move || unsafe {
1a4d82fc
JJ
609 let mut tx = Some(tx);
610 FOO.with(|f| {
611 *f.get() = Some(Foo(tx.take().unwrap()));
612 });
613 });
614 rx.recv().unwrap();
615 }
616
617 #[test]
618 fn circular() {
619 struct S1;
620 struct S2;
62682a34
SL
621 thread_local!(static K1: UnsafeCell<Option<S1>> = UnsafeCell::new(None));
622 thread_local!(static K2: UnsafeCell<Option<S2>> = UnsafeCell::new(None));
c34b1796 623 static mut HITS: u32 = 0;
1a4d82fc
JJ
624
625 impl Drop for S1 {
626 fn drop(&mut self) {
627 unsafe {
628 HITS += 1;
0531ce1d 629 if K2.try_with(|_| ()).is_err() {
1a4d82fc
JJ
630 assert_eq!(HITS, 3);
631 } else {
632 if HITS == 1 {
633 K2.with(|s| *s.get() = Some(S2));
634 } else {
635 assert_eq!(HITS, 3);
636 }
637 }
638 }
639 }
640 }
641 impl Drop for S2 {
642 fn drop(&mut self) {
643 unsafe {
644 HITS += 1;
0531ce1d 645 assert!(K1.try_with(|_| ()).is_ok());
1a4d82fc
JJ
646 assert_eq!(HITS, 2);
647 K1.with(|s| *s.get() = Some(S1));
648 }
649 }
650 }
651
60c5eb7d 652 thread::spawn(move || {
1a4d82fc 653 drop(S1);
60c5eb7d
XL
654 })
655 .join()
656 .ok()
657 .expect("thread panicked");
1a4d82fc
JJ
658 }
659
660 #[test]
661 fn self_referential() {
662 struct S1;
62682a34 663 thread_local!(static K1: UnsafeCell<Option<S1>> = UnsafeCell::new(None));
1a4d82fc
JJ
664
665 impl Drop for S1 {
666 fn drop(&mut self) {
0531ce1d 667 assert!(K1.try_with(|_| ()).is_err());
1a4d82fc
JJ
668 }
669 }
670
60c5eb7d 671 thread::spawn(move || unsafe {
1a4d82fc 672 K1.with(|s| *s.get() = Some(S1));
60c5eb7d
XL
673 })
674 .join()
675 .ok()
676 .expect("thread panicked");
1a4d82fc
JJ
677 }
678
7453a54e 679 // Note that this test will deadlock if TLS destructors aren't run (this
9fa01778 680 // requires the destructor to be run to pass the test).
1a4d82fc
JJ
681 #[test]
682 fn dtors_in_dtors_in_dtors() {
683 struct S1(Sender<()>);
62682a34
SL
684 thread_local!(static K1: UnsafeCell<Option<S1>> = UnsafeCell::new(None));
685 thread_local!(static K2: UnsafeCell<Option<Foo>> = UnsafeCell::new(None));
1a4d82fc
JJ
686
687 impl Drop for S1 {
688 fn drop(&mut self) {
689 let S1(ref tx) = *self;
690 unsafe {
0531ce1d 691 let _ = K2.try_with(|s| *s.get() = Some(Foo(tx.clone())));
1a4d82fc
JJ
692 }
693 }
694 }
695
696 let (tx, rx) = channel();
60c5eb7d 697 let _t = thread::spawn(move || unsafe {
1a4d82fc
JJ
698 let mut tx = Some(tx);
699 K1.with(|s| *s.get() = Some(S1(tx.take().unwrap())));
700 });
701 rx.recv().unwrap();
702 }
703}
704
705#[cfg(test)]
706mod dynamic_tests {
532ac7d7
XL
707 use crate::cell::RefCell;
708 use crate::collections::HashMap;
1a4d82fc
JJ
709
710 #[test]
711 fn smoke() {
60c5eb7d
XL
712 fn square(i: i32) -> i32 {
713 i * i
714 }
c34b1796 715 thread_local!(static FOO: i32 = square(3));
1a4d82fc
JJ
716
717 FOO.with(|f| {
718 assert_eq!(*f, 9);
719 });
720 }
721
722 #[test]
723 fn hashmap() {
c34b1796 724 fn map() -> RefCell<HashMap<i32, i32>> {
1a4d82fc
JJ
725 let mut m = HashMap::new();
726 m.insert(1, 2);
727 RefCell::new(m)
728 }
c34b1796 729 thread_local!(static FOO: RefCell<HashMap<i32, i32>> = map());
1a4d82fc
JJ
730
731 FOO.with(|map| {
c34b1796 732 assert_eq!(map.borrow()[&1], 2);
1a4d82fc
JJ
733 });
734 }
735
736 #[test]
737 fn refcell_vec() {
c34b1796 738 thread_local!(static FOO: RefCell<Vec<u32>> = RefCell::new(vec![1, 2, 3]));
1a4d82fc
JJ
739
740 FOO.with(|vec| {
741 assert_eq!(vec.borrow().len(), 3);
742 vec.borrow_mut().push(4);
743 assert_eq!(vec.borrow()[3], 4);
744 });
745 }
746}