]> git.proxmox.com Git - rustc.git/blame - src/libstd/thread/local.rs
New upstream version 1.37.0+dfsg1
[rustc.git] / src / libstd / thread / local.rs
CommitLineData
1a4d82fc 1//! Thread local storage
c34b1796 2
e9174d1e 3#![unstable(feature = "thread_local_internals", issue = "0")]
1a4d82fc 4
532ac7d7 5use crate::fmt;
1a4d82fc
JJ
6
7/// A thread local storage key which owns its contents.
8///
9/// This key uses the fastest possible implementation available to it for the
7cac9316
XL
10/// target platform. It is instantiated with the [`thread_local!`] macro and the
11/// primary method is the [`with`] method.
1a4d82fc 12///
7cac9316 13/// The [`with`] method yields a reference to the contained value which cannot be
bd371182 14/// sent across threads or escape the given closure.
1a4d82fc
JJ
15///
16/// # Initialization and Destruction
17///
7cac9316
XL
18/// Initialization is dynamically performed on the first call to [`with`]
19/// within a thread, and values that implement [`Drop`] get destructed when a
8bb4bdeb 20/// thread exits. Some caveats apply, which are explained below.
1a4d82fc 21///
ea8adc8c
XL
22/// A `LocalKey`'s initializer cannot recursively depend on itself, and using
23/// a `LocalKey` in this way will cause the initializer to infinitely recurse
24/// on the first call to `with`.
25///
c34b1796 26/// # Examples
1a4d82fc
JJ
27///
28/// ```
29/// use std::cell::RefCell;
85aaf69f 30/// use std::thread;
1a4d82fc 31///
c34b1796 32/// thread_local!(static FOO: RefCell<u32> = RefCell::new(1));
1a4d82fc
JJ
33///
34/// FOO.with(|f| {
35/// assert_eq!(*f.borrow(), 1);
36/// *f.borrow_mut() = 2;
37/// });
38///
39/// // each thread starts out with the initial value of 1
532ac7d7 40/// let t = thread::spawn(move|| {
1a4d82fc
JJ
41/// FOO.with(|f| {
42/// assert_eq!(*f.borrow(), 1);
43/// *f.borrow_mut() = 3;
44/// });
45/// });
46///
532ac7d7
XL
47/// // wait for the thread to complete and bail out on panic
48/// t.join().unwrap();
49///
1a4d82fc
JJ
50/// // we retain our original value of 2 despite the child thread
51/// FOO.with(|f| {
52/// assert_eq!(*f.borrow(), 2);
53/// });
54/// ```
7453a54e
SL
55///
56/// # Platform-specific behavior
57///
58/// Note that a "best effort" is made to ensure that destructors for types
a7813a04 59/// stored in thread local storage are run, but not all platforms can guarantee
7453a54e
SL
60/// that destructors will be run for all types in thread local storage. For
61/// example, there are a number of known caveats where destructors are not run:
62///
63/// 1. On Unix systems when pthread-based TLS is being used, destructors will
64/// not be run for TLS values on the main thread when it exits. Note that the
65/// application will exit immediately after the main thread exits as well.
66/// 2. On all platforms it's possible for TLS to re-initialize other TLS slots
67/// during destruction. Some platforms ensure that this cannot happen
68/// infinitely by preventing re-initialization of any slot that has been
69/// destroyed, but not all platforms have this guard. Those platforms that do
70/// not guard typically have a synthetic limit after which point no more
71/// destructors are run.
7cac9316
XL
72///
73/// [`with`]: ../../std/thread/struct.LocalKey.html#method.with
74/// [`thread_local!`]: ../../std/macro.thread_local.html
75/// [`Drop`]: ../../std/ops/trait.Drop.html
85aaf69f 76#[stable(feature = "rust1", since = "1.0.0")]
9cc50fc6
SL
77pub struct LocalKey<T: 'static> {
78 // This outer `LocalKey<T>` type is what's going to be stored in statics,
79 // but actual data inside will sometimes be tagged with #[thread_local].
80 // It's not valid for a true static to reference a #[thread_local] static,
81 // so we get around that by exposing an accessor through a layer of function
82 // indirection (this thunk).
1a4d82fc 83 //
9cc50fc6
SL
84 // Note that the thunk is itself unsafe because the returned lifetime of the
85 // slot where data lives, `'static`, is not actually valid. The lifetime
3b2f2976 86 // here is actually slightly shorter than the currently running thread!
9cc50fc6
SL
87 //
88 // Although this is an extra layer of indirection, it should in theory be
89 // trivially devirtualizable by LLVM because the value of `inner` never
90 // changes and the constant should be readonly within a crate. This mainly
91 // only runs into problems when TLS statics are exported across crates.
dc9dc135 92 inner: unsafe fn() -> Option<&'static T>,
1a4d82fc
JJ
93}
94
8bb4bdeb 95#[stable(feature = "std_debug", since = "1.16.0")]
32a655c1 96impl<T: 'static> fmt::Debug for LocalKey<T> {
532ac7d7 97 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
32a655c1
SL
98 f.pad("LocalKey { .. }")
99 }
100}
101
7cac9316 102/// Declare a new thread local storage key of type [`std::thread::LocalKey`].
62682a34 103///
3157f602
XL
104/// # Syntax
105///
106/// The macro wraps any number of static declarations and makes them thread local.
041b39d2 107/// Publicity and attributes for each static are allowed. Example:
3157f602
XL
108///
109/// ```
110/// use std::cell::RefCell;
111/// thread_local! {
112/// pub static FOO: RefCell<u32> = RefCell::new(1);
113///
114/// #[allow(unused)]
115/// static BAR: RefCell<f32> = RefCell::new(1.0);
116/// }
117/// # fn main() {}
118/// ```
119///
7cac9316 120/// See [LocalKey documentation][`std::thread::LocalKey`] for more
62682a34 121/// information.
7cac9316
XL
122///
123/// [`std::thread::LocalKey`]: ../std/thread/struct.LocalKey.html
1a4d82fc 124#[macro_export]
62682a34 125#[stable(feature = "rust1", since = "1.0.0")]
532ac7d7 126#[allow_internal_unstable(thread_local_internals)]
041b39d2
XL
127macro_rules! thread_local {
128 // empty (base case for the recursion)
129 () => {};
130
131 // process multiple declarations
132 ($(#[$attr:meta])* $vis:vis static $name:ident: $t:ty = $init:expr; $($rest:tt)*) => (
a1dfa0c6
XL
133 $crate::__thread_local_inner!($(#[$attr])* $vis $name, $t, $init);
134 $crate::thread_local!($($rest)*);
041b39d2
XL
135 );
136
137 // handle a single declaration
138 ($(#[$attr:meta])* $vis:vis static $name:ident: $t:ty = $init:expr) => (
a1dfa0c6 139 $crate::__thread_local_inner!($(#[$attr])* $vis $name, $t, $init);
041b39d2
XL
140 );
141}
142
041b39d2
XL
143#[doc(hidden)]
144#[unstable(feature = "thread_local_internals",
145 reason = "should not be necessary",
146 issue = "0")]
147#[macro_export]
532ac7d7 148#[allow_internal_unstable(thread_local_internals, cfg_target_thread_local, thread_local)]
ea8adc8c 149#[allow_internal_unsafe]
041b39d2 150macro_rules! __thread_local_inner {
ea8adc8c
XL
151 (@key $(#[$attr:meta])* $vis:vis $name:ident, $t:ty, $init:expr) => {
152 {
153 #[inline]
041b39d2
XL
154 fn __init() -> $t { $init }
155
dc9dc135 156 unsafe fn __getit() -> $crate::option::Option<&'static $t> {
0bf4aa26 157 #[cfg(all(target_arch = "wasm32", not(target_feature = "atomics")))]
83c7162d
XL
158 static __KEY: $crate::thread::__StaticLocalKeyInner<$t> =
159 $crate::thread::__StaticLocalKeyInner::new();
160
041b39d2 161 #[thread_local]
0bf4aa26
XL
162 #[cfg(all(
163 target_thread_local,
164 not(all(target_arch = "wasm32", not(target_feature = "atomics"))),
165 ))]
041b39d2
XL
166 static __KEY: $crate::thread::__FastLocalKeyInner<$t> =
167 $crate::thread::__FastLocalKeyInner::new();
168
0bf4aa26
XL
169 #[cfg(all(
170 not(target_thread_local),
171 not(all(target_arch = "wasm32", not(target_feature = "atomics"))),
172 ))]
041b39d2
XL
173 static __KEY: $crate::thread::__OsLocalKeyInner<$t> =
174 $crate::thread::__OsLocalKeyInner::new();
175
dc9dc135 176 __KEY.get(__init)
041b39d2
XL
177 }
178
3b2f2976 179 unsafe {
dc9dc135 180 $crate::thread::LocalKey::new(__getit)
3b2f2976 181 }
ea8adc8c
XL
182 }
183 };
184 ($(#[$attr:meta])* $vis:vis $name:ident, $t:ty, $init:expr) => {
ea8adc8c 185 $(#[$attr])* $vis const $name: $crate::thread::LocalKey<$t> =
a1dfa0c6 186 $crate::__thread_local_inner!(@key $(#[$attr])* $vis $name, $t, $init);
041b39d2
XL
187 }
188}
189
041b39d2 190/// An error returned by [`LocalKey::try_with`](struct.LocalKey.html#method.try_with).
0531ce1d 191#[stable(feature = "thread_local_try_with", since = "1.26.0")]
041b39d2
XL
192pub struct AccessError {
193 _private: (),
194}
195
0531ce1d 196#[stable(feature = "thread_local_try_with", since = "1.26.0")]
041b39d2 197impl fmt::Debug for AccessError {
532ac7d7 198 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
041b39d2
XL
199 f.debug_struct("AccessError").finish()
200 }
201}
202
0531ce1d 203#[stable(feature = "thread_local_try_with", since = "1.26.0")]
041b39d2 204impl fmt::Display for AccessError {
532ac7d7 205 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
041b39d2
XL
206 fmt::Display::fmt("already destroyed", f)
207 }
208}
209
c34b1796 210impl<T: 'static> LocalKey<T> {
62682a34
SL
211 #[doc(hidden)]
212 #[unstable(feature = "thread_local_internals",
e9174d1e
SL
213 reason = "recently added to create a key",
214 issue = "0")]
dc9dc135 215 pub const unsafe fn new(inner: unsafe fn() -> Option<&'static T>) -> LocalKey<T> {
62682a34 216 LocalKey {
3b2f2976 217 inner,
62682a34
SL
218 }
219 }
220
9346a6ac 221 /// Acquires a reference to the value in this TLS key.
1a4d82fc
JJ
222 ///
223 /// This will lazily initialize the value if this thread has not referenced
224 /// this key yet.
225 ///
226 /// # Panics
227 ///
228 /// This function will `panic!()` if the key currently has its
229 /// destructor running, and it **may** panic if the destructor has
230 /// previously been run for this thread.
85aaf69f 231 #[stable(feature = "rust1", since = "1.0.0")]
1a4d82fc
JJ
232 pub fn with<F, R>(&'static self, f: F) -> R
233 where F: FnOnce(&T) -> R {
041b39d2
XL
234 self.try_with(f).expect("cannot access a TLS value during or \
235 after it is destroyed")
1a4d82fc
JJ
236 }
237
041b39d2
XL
238 /// Acquires a reference to the value in this TLS key.
239 ///
240 /// This will lazily initialize the value if this thread has not referenced
241 /// this key yet. If the key has been destroyed (which may happen if this is called
8faf50e0 242 /// in a destructor), this function will return an [`AccessError`](struct.AccessError.html).
041b39d2
XL
243 ///
244 /// # Panics
245 ///
246 /// This function will still `panic!()` if the key is uninitialized and the
247 /// key's initializer panics.
0531ce1d 248 #[stable(feature = "thread_local_try_with", since = "1.26.0")]
041b39d2 249 pub fn try_with<F, R>(&'static self, f: F) -> Result<R, AccessError>
0531ce1d
XL
250 where
251 F: FnOnce(&T) -> R,
252 {
041b39d2 253 unsafe {
dc9dc135 254 let thread_local = (self.inner)().ok_or(AccessError {
041b39d2
XL
255 _private: (),
256 })?;
dc9dc135
XL
257 Ok(f(thread_local))
258 }
259 }
260}
261
262mod lazy {
263 use crate::cell::UnsafeCell;
264 use crate::mem;
265 use crate::hint;
266
267 pub struct LazyKeyInner<T> {
268 inner: UnsafeCell<Option<T>>,
269 }
270
271 impl<T> LazyKeyInner<T> {
272 pub const fn new() -> LazyKeyInner<T> {
273 LazyKeyInner {
274 inner: UnsafeCell::new(None),
275 }
276 }
277
278 pub unsafe fn get(&self) -> Option<&'static T> {
279 (*self.inner.get()).as_ref()
280 }
281
282 pub unsafe fn initialize<F: FnOnce() -> T>(&self, init: F) -> &'static T {
283 // Execute the initialization up front, *then* move it into our slot,
284 // just in case initialization fails.
285 let value = init();
286 let ptr = self.inner.get();
287
288 // note that this can in theory just be `*ptr = Some(value)`, but due to
289 // the compiler will currently codegen that pattern with something like:
290 //
291 // ptr::drop_in_place(ptr)
292 // ptr::write(ptr, Some(value))
293 //
294 // Due to this pattern it's possible for the destructor of the value in
295 // `ptr` (e.g., if this is being recursively initialized) to re-access
296 // TLS, in which case there will be a `&` and `&mut` pointer to the same
297 // value (an aliasing violation). To avoid setting the "I'm running a
298 // destructor" flag we just use `mem::replace` which should sequence the
299 // operations a little differently and make this safe to call.
300 mem::replace(&mut *ptr, Some(value));
301
302 // After storing `Some` we want to get a reference to the contents of
303 // what we just stored. While we could use `unwrap` here and it should
304 // always work it empirically doesn't seem to always get optimized away,
305 // which means that using something like `try_with` can pull in
306 // panicking code and cause a large size bloat.
307 match *ptr {
308 Some(ref x) => x,
309 None => hint::unreachable_unchecked(),
310 }
311 }
312
313 #[allow(unused)]
314 pub unsafe fn take(&mut self) -> Option<T> {
315 (*self.inner.get()).take()
041b39d2
XL
316 }
317 }
318}
319
83c7162d
XL
320/// On some platforms like wasm32 there's no threads, so no need to generate
321/// thread locals and we can instead just use plain statics!
322#[doc(hidden)]
0bf4aa26 323#[cfg(all(target_arch = "wasm32", not(target_feature = "atomics")))]
83c7162d 324pub mod statik {
dc9dc135 325 use super::lazy::LazyKeyInner;
532ac7d7 326 use crate::fmt;
83c7162d
XL
327
328 pub struct Key<T> {
dc9dc135 329 inner: LazyKeyInner<T>,
83c7162d
XL
330 }
331
532ac7d7 332 unsafe impl<T> Sync for Key<T> { }
83c7162d
XL
333
334 impl<T> fmt::Debug for Key<T> {
532ac7d7 335 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
83c7162d
XL
336 f.pad("Key { .. }")
337 }
338 }
339
340 impl<T> Key<T> {
341 pub const fn new() -> Key<T> {
342 Key {
dc9dc135 343 inner: LazyKeyInner::new(),
83c7162d
XL
344 }
345 }
346
dc9dc135
XL
347 pub unsafe fn get(&self, init: fn() -> T) -> Option<&'static T> {
348 let value = match self.inner.get() {
349 Some(ref value) => value,
350 None => self.inner.initialize(init),
351 };
352 Some(value)
83c7162d
XL
353 }
354 }
355}
356
041b39d2
XL
357#[doc(hidden)]
358#[cfg(target_thread_local)]
359pub mod fast {
dc9dc135
XL
360 use super::lazy::LazyKeyInner;
361 use crate::cell::Cell;
532ac7d7
XL
362 use crate::fmt;
363 use crate::mem;
dc9dc135 364 use crate::sys::fast_thread_local::register_dtor;
041b39d2 365
dc9dc135
XL
366 #[derive(Copy, Clone)]
367 enum DtorState {
368 Unregistered,
369 Registered,
370 RunningOrHasRun,
371 }
372
373 // This data structure has been carefully constructed so that the fast path
374 // only contains one branch on x86. That optimization is necessary to avoid
375 // duplicated tls lookups on OSX.
376 //
377 // LLVM issue: https://bugs.llvm.org/show_bug.cgi?id=41722
041b39d2 378 pub struct Key<T> {
dc9dc135
XL
379 // If `LazyKeyInner::get` returns `None`, that indicates either:
380 // * The value has never been initialized
381 // * The value is being recursively initialized
382 // * The value has already been destroyed or is being destroyed
383 // To determine which kind of `None`, check `dtor_state`.
384 //
385 // This is very optimizer friendly for the fast path - initialized but
386 // not yet dropped.
387 inner: LazyKeyInner<T>,
041b39d2
XL
388
389 // Metadata to keep track of the state of the destructor. Remember that
dc9dc135
XL
390 // this variable is thread-local, not global.
391 dtor_state: Cell<DtorState>,
041b39d2
XL
392 }
393
394 impl<T> fmt::Debug for Key<T> {
532ac7d7 395 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
041b39d2
XL
396 f.pad("Key { .. }")
397 }
398 }
399
041b39d2
XL
400 impl<T> Key<T> {
401 pub const fn new() -> Key<T> {
402 Key {
dc9dc135
XL
403 inner: LazyKeyInner::new(),
404 dtor_state: Cell::new(DtorState::Unregistered),
041b39d2
XL
405 }
406 }
407
dc9dc135
XL
408 pub unsafe fn get<F: FnOnce() -> T>(&self, init: F) -> Option<&'static T> {
409 match self.inner.get() {
410 Some(val) => Some(val),
411 None => self.try_initialize(init),
041b39d2 412 }
041b39d2
XL
413 }
414
dc9dc135
XL
415 // `try_initialize` is only called once per fast thread local variable,
416 // except in corner cases where thread_local dtors reference other
417 // thread_local's, or it is being recursively initialized.
418 //
419 // Macos: Inlining this function can cause two `tlv_get_addr` calls to
420 // be performed for every call to `Key::get`. The #[cold] hint makes
421 // that less likely.
422 // LLVM issue: https://bugs.llvm.org/show_bug.cgi?id=41722
423 #[cold]
424 unsafe fn try_initialize<F: FnOnce() -> T>(&self, init: F) -> Option<&'static T> {
425 if !mem::needs_drop::<T>() || self.try_register_dtor() {
426 Some(self.inner.initialize(init))
427 } else {
428 None
041b39d2 429 }
dc9dc135 430 }
041b39d2 431
dc9dc135
XL
432 // `try_register_dtor` is only called once per fast thread local
433 // variable, except in corner cases where thread_local dtors reference
434 // other thread_local's, or it is being recursively initialized.
435 unsafe fn try_register_dtor(&self) -> bool {
436 match self.dtor_state.get() {
437 DtorState::Unregistered => {
438 // dtor registration happens before initialization.
439 register_dtor(self as *const _ as *mut u8,
440 destroy_value::<T>);
441 self.dtor_state.set(DtorState::Registered);
442 true
443 }
444 DtorState::Registered => {
445 // recursively initialized
446 true
447 }
448 DtorState::RunningOrHasRun => {
449 false
450 }
451 }
041b39d2
XL
452 }
453 }
454
455 unsafe extern fn destroy_value<T>(ptr: *mut u8) {
456 let ptr = ptr as *mut Key<T>;
041b39d2 457
dc9dc135
XL
458 // Right before we run the user destructor be sure to set the
459 // `Option<T>` to `None`, and `dtor_state` to `RunningOrHasRun`. This
460 // causes future calls to `get` to run `try_initialize_drop` again,
461 // which will now fail, and return `None`.
462 let value = (*ptr).inner.take();
463 (*ptr).dtor_state.set(DtorState::RunningOrHasRun);
464 drop(value);
041b39d2 465 }
1a4d82fc
JJ
466}
467
d9579d0f 468#[doc(hidden)]
9cc50fc6 469pub mod os {
dc9dc135
XL
470 use super::lazy::LazyKeyInner;
471 use crate::cell::Cell;
532ac7d7
XL
472 use crate::fmt;
473 use crate::marker;
474 use crate::ptr;
475 use crate::sys_common::thread_local::StaticKey as OsStaticKey;
1a4d82fc 476
1a4d82fc 477 pub struct Key<T> {
1a4d82fc 478 // OS-TLS key that we'll use to key off.
62682a34
SL
479 os: OsStaticKey,
480 marker: marker::PhantomData<Cell<T>>,
1a4d82fc
JJ
481 }
482
32a655c1 483 impl<T> fmt::Debug for Key<T> {
532ac7d7 484 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
32a655c1
SL
485 f.pad("Key { .. }")
486 }
487 }
488
532ac7d7 489 unsafe impl<T> Sync for Key<T> { }
1a4d82fc
JJ
490
491 struct Value<T: 'static> {
dc9dc135 492 inner: LazyKeyInner<T>,
1a4d82fc 493 key: &'static Key<T>,
1a4d82fc
JJ
494 }
495
62682a34
SL
496 impl<T: 'static> Key<T> {
497 pub const fn new() -> Key<T> {
498 Key {
499 os: OsStaticKey::new(Some(destroy_value::<T>)),
500 marker: marker::PhantomData
501 }
1a4d82fc
JJ
502 }
503
dc9dc135 504 pub unsafe fn get(&'static self, init: fn() -> T) -> Option<&'static T> {
3b2f2976 505 let ptr = self.os.get() as *mut Value<T>;
dc9dc135
XL
506 if ptr as usize > 1 {
507 match (*ptr).inner.get() {
508 Some(ref value) => return Some(value),
509 None => {},
1a4d82fc 510 }
dc9dc135
XL
511 }
512 self.try_initialize(init)
513 }
514
515 // `try_initialize` is only called once per os thread local variable,
516 // except in corner cases where thread_local dtors reference other
517 // thread_local's, or it is being recursively initialized.
518 unsafe fn try_initialize(&'static self, init: fn() -> T) -> Option<&'static T> {
519 let ptr = self.os.get() as *mut Value<T>;
520 if ptr as usize == 1 {
521 // destructor is running
522 return None
7453a54e 523 }
3b2f2976 524
dc9dc135
XL
525 let ptr = if ptr.is_null() {
526 // If the lookup returned null, we haven't initialized our own
527 // local copy, so do that now.
528 let ptr: Box<Value<T>> = box Value {
529 inner: LazyKeyInner::new(),
530 key: self,
531 };
532 let ptr = Box::into_raw(ptr);
533 self.os.set(ptr as *mut u8);
534 ptr
535 } else {
536 // recursive initialization
537 ptr
3b2f2976 538 };
dc9dc135
XL
539
540 Some((*ptr).inner.initialize(init))
1a4d82fc
JJ
541 }
542 }
543
041b39d2 544 unsafe extern fn destroy_value<T: 'static>(ptr: *mut u8) {
1a4d82fc
JJ
545 // The OS TLS ensures that this key contains a NULL value when this
546 // destructor starts to run. We set it back to a sentinel value of 1 to
547 // ensure that any future calls to `get` for this thread will return
548 // `None`.
549 //
550 // Note that to prevent an infinite loop we reset it back to null right
551 // before we return from the destructor ourselves.
d9579d0f 552 let ptr = Box::from_raw(ptr as *mut Value<T>);
1a4d82fc
JJ
553 let key = ptr.key;
554 key.os.set(1 as *mut u8);
555 drop(ptr);
85aaf69f 556 key.os.set(ptr::null_mut());
1a4d82fc
JJ
557 }
558}
559
c30ab7b3 560#[cfg(all(test, not(target_os = "emscripten")))]
1a4d82fc 561mod tests {
532ac7d7
XL
562 use crate::sync::mpsc::{channel, Sender};
563 use crate::cell::{Cell, UnsafeCell};
564 use crate::thread;
1a4d82fc
JJ
565
566 struct Foo(Sender<()>);
567
568 impl Drop for Foo {
569 fn drop(&mut self) {
570 let Foo(ref s) = *self;
571 s.send(()).unwrap();
572 }
573 }
574
575 #[test]
576 fn smoke_no_dtor() {
62682a34 577 thread_local!(static FOO: Cell<i32> = Cell::new(1));
1a4d82fc 578
62682a34
SL
579 FOO.with(|f| {
580 assert_eq!(f.get(), 1);
581 f.set(2);
1a4d82fc
JJ
582 });
583 let (tx, rx) = channel();
85aaf69f 584 let _t = thread::spawn(move|| {
62682a34
SL
585 FOO.with(|f| {
586 assert_eq!(f.get(), 1);
1a4d82fc
JJ
587 });
588 tx.send(()).unwrap();
589 });
590 rx.recv().unwrap();
591
62682a34
SL
592 FOO.with(|f| {
593 assert_eq!(f.get(), 2);
1a4d82fc
JJ
594 });
595 }
596
597 #[test]
598 fn states() {
599 struct Foo;
600 impl Drop for Foo {
601 fn drop(&mut self) {
0531ce1d 602 assert!(FOO.try_with(|_| ()).is_err());
1a4d82fc
JJ
603 }
604 }
0531ce1d 605 thread_local!(static FOO: Foo = Foo);
1a4d82fc 606
85aaf69f 607 thread::spawn(|| {
0531ce1d 608 assert!(FOO.try_with(|_| ()).is_ok());
532ac7d7 609 }).join().ok().expect("thread panicked");
1a4d82fc
JJ
610 }
611
612 #[test]
613 fn smoke_dtor() {
62682a34 614 thread_local!(static FOO: UnsafeCell<Option<Foo>> = UnsafeCell::new(None));
1a4d82fc
JJ
615
616 let (tx, rx) = channel();
85aaf69f 617 let _t = thread::spawn(move|| unsafe {
1a4d82fc
JJ
618 let mut tx = Some(tx);
619 FOO.with(|f| {
620 *f.get() = Some(Foo(tx.take().unwrap()));
621 });
622 });
623 rx.recv().unwrap();
624 }
625
626 #[test]
627 fn circular() {
628 struct S1;
629 struct S2;
62682a34
SL
630 thread_local!(static K1: UnsafeCell<Option<S1>> = UnsafeCell::new(None));
631 thread_local!(static K2: UnsafeCell<Option<S2>> = UnsafeCell::new(None));
c34b1796 632 static mut HITS: u32 = 0;
1a4d82fc
JJ
633
634 impl Drop for S1 {
635 fn drop(&mut self) {
636 unsafe {
637 HITS += 1;
0531ce1d 638 if K2.try_with(|_| ()).is_err() {
1a4d82fc
JJ
639 assert_eq!(HITS, 3);
640 } else {
641 if HITS == 1 {
642 K2.with(|s| *s.get() = Some(S2));
643 } else {
644 assert_eq!(HITS, 3);
645 }
646 }
647 }
648 }
649 }
650 impl Drop for S2 {
651 fn drop(&mut self) {
652 unsafe {
653 HITS += 1;
0531ce1d 654 assert!(K1.try_with(|_| ()).is_ok());
1a4d82fc
JJ
655 assert_eq!(HITS, 2);
656 K1.with(|s| *s.get() = Some(S1));
657 }
658 }
659 }
660
85aaf69f 661 thread::spawn(move|| {
1a4d82fc 662 drop(S1);
532ac7d7 663 }).join().ok().expect("thread panicked");
1a4d82fc
JJ
664 }
665
666 #[test]
667 fn self_referential() {
668 struct S1;
62682a34 669 thread_local!(static K1: UnsafeCell<Option<S1>> = UnsafeCell::new(None));
1a4d82fc
JJ
670
671 impl Drop for S1 {
672 fn drop(&mut self) {
0531ce1d 673 assert!(K1.try_with(|_| ()).is_err());
1a4d82fc
JJ
674 }
675 }
676
85aaf69f 677 thread::spawn(move|| unsafe {
1a4d82fc 678 K1.with(|s| *s.get() = Some(S1));
532ac7d7 679 }).join().ok().expect("thread panicked");
1a4d82fc
JJ
680 }
681
7453a54e 682 // Note that this test will deadlock if TLS destructors aren't run (this
9fa01778 683 // requires the destructor to be run to pass the test).
1a4d82fc
JJ
684 #[test]
685 fn dtors_in_dtors_in_dtors() {
686 struct S1(Sender<()>);
62682a34
SL
687 thread_local!(static K1: UnsafeCell<Option<S1>> = UnsafeCell::new(None));
688 thread_local!(static K2: UnsafeCell<Option<Foo>> = UnsafeCell::new(None));
1a4d82fc
JJ
689
690 impl Drop for S1 {
691 fn drop(&mut self) {
692 let S1(ref tx) = *self;
693 unsafe {
0531ce1d 694 let _ = K2.try_with(|s| *s.get() = Some(Foo(tx.clone())));
1a4d82fc
JJ
695 }
696 }
697 }
698
699 let (tx, rx) = channel();
85aaf69f 700 let _t = thread::spawn(move|| unsafe {
1a4d82fc
JJ
701 let mut tx = Some(tx);
702 K1.with(|s| *s.get() = Some(S1(tx.take().unwrap())));
703 });
704 rx.recv().unwrap();
705 }
706}
707
708#[cfg(test)]
709mod dynamic_tests {
532ac7d7
XL
710 use crate::cell::RefCell;
711 use crate::collections::HashMap;
1a4d82fc
JJ
712
713 #[test]
714 fn smoke() {
c34b1796
AL
715 fn square(i: i32) -> i32 { i * i }
716 thread_local!(static FOO: i32 = square(3));
1a4d82fc
JJ
717
718 FOO.with(|f| {
719 assert_eq!(*f, 9);
720 });
721 }
722
723 #[test]
724 fn hashmap() {
c34b1796 725 fn map() -> RefCell<HashMap<i32, i32>> {
1a4d82fc
JJ
726 let mut m = HashMap::new();
727 m.insert(1, 2);
728 RefCell::new(m)
729 }
c34b1796 730 thread_local!(static FOO: RefCell<HashMap<i32, i32>> = map());
1a4d82fc
JJ
731
732 FOO.with(|map| {
c34b1796 733 assert_eq!(map.borrow()[&1], 2);
1a4d82fc
JJ
734 });
735 }
736
737 #[test]
738 fn refcell_vec() {
c34b1796 739 thread_local!(static FOO: RefCell<Vec<u32>> = RefCell::new(vec![1, 2, 3]));
1a4d82fc
JJ
740
741 FOO.with(|vec| {
742 assert_eq!(vec.borrow().len(), 3);
743 vec.borrow_mut().push(4);
744 assert_eq!(vec.borrow()[3], 4);
745 });
746 }
747}