1 //! Thread local storage
3 #![unstable(feature = "thread_local_internals", issue = "none")]
5 use crate::error
::Error
;
8 /// A thread local storage key which owns its contents.
10 /// This key uses the fastest possible implementation available to it for the
11 /// target platform. It is instantiated with the [`thread_local!`] macro and the
12 /// primary method is the [`with`] method.
14 /// The [`with`] method yields a reference to the contained value which cannot be
15 /// sent across threads or escape the given closure.
17 /// # Initialization and Destruction
19 /// Initialization is dynamically performed on the first call to [`with`]
20 /// within a thread, and values that implement [`Drop`] get destructed when a
21 /// thread exits. Some caveats apply, which are explained below.
23 /// A `LocalKey`'s initializer cannot recursively depend on itself, and using
24 /// a `LocalKey` in this way will cause the initializer to infinitely recurse
25 /// on the first call to `with`.
30 /// use std::cell::RefCell;
33 /// thread_local!(static FOO: RefCell<u32> = RefCell::new(1));
36 /// assert_eq!(*f.borrow(), 1);
37 /// *f.borrow_mut() = 2;
40 /// // each thread starts out with the initial value of 1
41 /// let t = thread::spawn(move|| {
43 /// assert_eq!(*f.borrow(), 1);
44 /// *f.borrow_mut() = 3;
48 /// // wait for the thread to complete and bail out on panic
49 /// t.join().unwrap();
51 /// // we retain our original value of 2 despite the child thread
53 /// assert_eq!(*f.borrow(), 2);
57 /// # Platform-specific behavior
59 /// Note that a "best effort" is made to ensure that destructors for types
60 /// stored in thread local storage are run, but not all platforms can guarantee
61 /// that destructors will be run for all types in thread local storage. For
62 /// example, there are a number of known caveats where destructors are not run:
64 /// 1. On Unix systems when pthread-based TLS is being used, destructors will
65 /// not be run for TLS values on the main thread when it exits. Note that the
66 /// application will exit immediately after the main thread exits as well.
67 /// 2. On all platforms it's possible for TLS to re-initialize other TLS slots
68 /// during destruction. Some platforms ensure that this cannot happen
69 /// infinitely by preventing re-initialization of any slot that has been
70 /// destroyed, but not all platforms have this guard. Those platforms that do
71 /// not guard typically have a synthetic limit after which point no more
72 /// destructors are run.
74 /// [`with`]: LocalKey::with
75 #[stable(feature = "rust1", since = "1.0.0")]
76 pub struct LocalKey
<T
: '
static> {
77 // This outer `LocalKey<T>` type is what's going to be stored in statics,
78 // but actual data inside will sometimes be tagged with #[thread_local].
79 // It's not valid for a true static to reference a #[thread_local] static,
80 // so we get around that by exposing an accessor through a layer of function
81 // indirection (this thunk).
83 // Note that the thunk is itself unsafe because the returned lifetime of the
84 // slot where data lives, `'static`, is not actually valid. The lifetime
85 // here is actually slightly shorter than the currently running thread!
87 // Although this is an extra layer of indirection, it should in theory be
88 // trivially devirtualizable by LLVM because the value of `inner` never
89 // changes and the constant should be readonly within a crate. This mainly
90 // only runs into problems when TLS statics are exported across crates.
91 inner
: unsafe fn() -> Option
<&'
static T
>,
94 #[stable(feature = "std_debug", since = "1.16.0")]
95 impl<T
: '
static> fmt
::Debug
for LocalKey
<T
> {
96 fn fmt(&self, f
: &mut fmt
::Formatter
<'_
>) -> fmt
::Result
{
97 f
.pad("LocalKey { .. }")
101 /// Declare a new thread local storage key of type [`std::thread::LocalKey`].
105 /// The macro wraps any number of static declarations and makes them thread local.
106 /// Publicity and attributes for each static are allowed. Example:
109 /// use std::cell::RefCell;
111 /// pub static FOO: RefCell<u32> = RefCell::new(1);
114 /// static BAR: RefCell<f32> = RefCell::new(1.0);
119 /// See [`LocalKey` documentation][`std::thread::LocalKey`] for more
122 /// [`std::thread::LocalKey`]: crate::thread::LocalKey
124 #[stable(feature = "rust1", since = "1.0.0")]
125 #[allow_internal_unstable(thread_local_internals)]
126 macro_rules
! thread_local
{
127 // empty (base case for the recursion)
130 // process multiple declarations
131 ($
(#[$attr:meta])* $vis:vis static $name:ident: $t:ty = $init:expr; $($rest:tt)*) => (
132 $
crate::__thread_local_inner
!($
(#[$attr])* $vis $name, $t, $init);
133 $
crate::thread_local
!($
($rest
)*);
136 // handle a single declaration
137 ($
(#[$attr:meta])* $vis:vis static $name:ident: $t:ty = $init:expr) => (
138 $
crate::__thread_local_inner
!($
(#[$attr])* $vis $name, $t, $init);
143 #[unstable(feature = "thread_local_internals", reason = "should not be necessary", issue = "none")]
145 #[allow_internal_unstable(thread_local_internals, cfg_target_thread_local, thread_local)]
146 #[allow_internal_unsafe]
147 macro_rules
! __thread_local_inner
{
148 (@key $t
:ty
, $init
:expr
) => {
151 fn __init() -> $t { $init }
153 unsafe fn __getit() -> $
crate::option
::Option
<&'
static $t
> {
154 #[cfg(all(target_arch = "wasm32", not(target_feature = "atomics")))]
155 static __KEY
: $
crate::thread
::__StaticLocalKeyInner
<$t
> =
156 $
crate::thread
::__StaticLocalKeyInner
::new();
161 not(all(target_arch
= "wasm32", not(target_feature
= "atomics"))),
163 static __KEY
: $
crate::thread
::__FastLocalKeyInner
<$t
> =
164 $
crate::thread
::__FastLocalKeyInner
::new();
167 not(target_thread_local
),
168 not(all(target_arch
= "wasm32", not(target_feature
= "atomics"))),
170 static __KEY
: $
crate::thread
::__OsLocalKeyInner
<$t
> =
171 $
crate::thread
::__OsLocalKeyInner
::new();
173 // FIXME: remove the #[allow(...)] marker when macros don't
174 // raise warning for missing/extraneous unsafe blocks anymore.
175 // See https://github.com/rust-lang/rust/issues/74838.
176 #[allow(unused_unsafe)]
177 unsafe { __KEY.get(__init) }
181 $
crate::thread
::LocalKey
::new(__getit
)
185 ($
(#[$attr:meta])* $vis:vis $name:ident, $t:ty, $init:expr) => {
186 $
(#[$attr])* $vis const $name: $crate::thread::LocalKey<$t> =
187 $
crate::__thread_local_inner
!(@key $t
, $init
);
191 /// An error returned by [`LocalKey::try_with`](struct.LocalKey.html#method.try_with).
192 #[stable(feature = "thread_local_try_with", since = "1.26.0")]
193 #[derive(Clone, Copy, Eq, PartialEq)]
194 pub struct AccessError
{
198 #[stable(feature = "thread_local_try_with", since = "1.26.0")]
199 impl fmt
::Debug
for AccessError
{
200 fn fmt(&self, f
: &mut fmt
::Formatter
<'_
>) -> fmt
::Result
{
201 f
.debug_struct("AccessError").finish()
205 #[stable(feature = "thread_local_try_with", since = "1.26.0")]
206 impl fmt
::Display
for AccessError
{
207 fn fmt(&self, f
: &mut fmt
::Formatter
<'_
>) -> fmt
::Result
{
208 fmt
::Display
::fmt("already destroyed", f
)
212 #[stable(feature = "thread_local_try_with", since = "1.26.0")]
213 impl Error
for AccessError {}
215 impl<T
: '
static> LocalKey
<T
> {
218 feature
= "thread_local_internals",
219 reason
= "recently added to create a key",
222 pub const unsafe fn new(inner
: unsafe fn() -> Option
<&'
static T
>) -> LocalKey
<T
> {
226 /// Acquires a reference to the value in this TLS key.
228 /// This will lazily initialize the value if this thread has not referenced
233 /// This function will `panic!()` if the key currently has its
234 /// destructor running, and it **may** panic if the destructor has
235 /// previously been run for this thread.
236 #[stable(feature = "rust1", since = "1.0.0")]
237 pub fn with
<F
, R
>(&'
static self, f
: F
) -> R
241 self.try_with(f
).expect(
242 "cannot access a Thread Local Storage value \
243 during or after destruction",
247 /// Acquires a reference to the value in this TLS key.
249 /// This will lazily initialize the value if this thread has not referenced
250 /// this key yet. If the key has been destroyed (which may happen if this is called
251 /// in a destructor), this function will return an [`AccessError`](struct.AccessError.html).
255 /// This function will still `panic!()` if the key is uninitialized and the
256 /// key's initializer panics.
257 #[stable(feature = "thread_local_try_with", since = "1.26.0")]
259 pub fn try_with
<F
, R
>(&'
static self, f
: F
) -> Result
<R
, AccessError
>
264 let thread_local
= (self.inner
)().ok_or(AccessError { _private: () }
)?
;
271 use crate::cell
::UnsafeCell
;
275 pub struct LazyKeyInner
<T
> {
276 inner
: UnsafeCell
<Option
<T
>>,
279 impl<T
> LazyKeyInner
<T
> {
280 pub const fn new() -> LazyKeyInner
<T
> {
281 LazyKeyInner { inner: UnsafeCell::new(None) }
284 pub unsafe fn get(&self) -> Option
<&'
static T
> {
285 (*self.inner
.get()).as_ref()
288 pub unsafe fn initialize
<F
: FnOnce() -> T
>(&self, init
: F
) -> &'
static T
{
289 // Execute the initialization up front, *then* move it into our slot,
290 // just in case initialization fails.
292 let ptr
= self.inner
.get();
294 // note that this can in theory just be `*ptr = Some(value)`, but due to
295 // the compiler will currently codegen that pattern with something like:
297 // ptr::drop_in_place(ptr)
298 // ptr::write(ptr, Some(value))
300 // Due to this pattern it's possible for the destructor of the value in
301 // `ptr` (e.g., if this is being recursively initialized) to re-access
302 // TLS, in which case there will be a `&` and `&mut` pointer to the same
303 // value (an aliasing violation). To avoid setting the "I'm running a
304 // destructor" flag we just use `mem::replace` which should sequence the
305 // operations a little differently and make this safe to call.
306 let _
= mem
::replace(&mut *ptr
, Some(value
));
308 // After storing `Some` we want to get a reference to the contents of
309 // what we just stored. While we could use `unwrap` here and it should
310 // always work it empirically doesn't seem to always get optimized away,
311 // which means that using something like `try_with` can pull in
312 // panicking code and cause a large size bloat.
315 None
=> hint
::unreachable_unchecked(),
320 pub unsafe fn take(&mut self) -> Option
<T
> {
321 (*self.inner
.get()).take()
326 /// On some platforms like wasm32 there's no threads, so no need to generate
327 /// thread locals and we can instead just use plain statics!
329 #[cfg(all(target_arch = "wasm32", not(target_feature = "atomics")))]
331 use super::lazy
::LazyKeyInner
;
335 inner
: LazyKeyInner
<T
>,
338 unsafe impl<T
> Sync
for Key
<T
> {}
340 impl<T
> fmt
::Debug
for Key
<T
> {
341 fn fmt(&self, f
: &mut fmt
::Formatter
<'_
>) -> fmt
::Result
{
347 pub const fn new() -> Key
<T
> {
348 Key { inner: LazyKeyInner::new() }
351 pub unsafe fn get(&self, init
: fn() -> T
) -> Option
<&'
static T
> {
352 let value
= match self.inner
.get() {
353 Some(ref value
) => value
,
354 None
=> self.inner
.initialize(init
),
362 #[cfg(target_thread_local)]
364 use super::lazy
::LazyKeyInner
;
365 use crate::cell
::Cell
;
368 use crate::sys
::thread_local_dtor
::register_dtor
;
370 #[derive(Copy, Clone)]
377 // This data structure has been carefully constructed so that the fast path
378 // only contains one branch on x86. That optimization is necessary to avoid
379 // duplicated tls lookups on OSX.
381 // LLVM issue: https://bugs.llvm.org/show_bug.cgi?id=41722
383 // If `LazyKeyInner::get` returns `None`, that indicates either:
384 // * The value has never been initialized
385 // * The value is being recursively initialized
386 // * The value has already been destroyed or is being destroyed
387 // To determine which kind of `None`, check `dtor_state`.
389 // This is very optimizer friendly for the fast path - initialized but
391 inner
: LazyKeyInner
<T
>,
393 // Metadata to keep track of the state of the destructor. Remember that
394 // this variable is thread-local, not global.
395 dtor_state
: Cell
<DtorState
>,
398 impl<T
> fmt
::Debug
for Key
<T
> {
399 fn fmt(&self, f
: &mut fmt
::Formatter
<'_
>) -> fmt
::Result
{
405 pub const fn new() -> Key
<T
> {
406 Key { inner: LazyKeyInner::new(), dtor_state: Cell::new(DtorState::Unregistered) }
409 pub unsafe fn get
<F
: FnOnce() -> T
>(&self, init
: F
) -> Option
<&'
static T
> {
410 match self.inner
.get() {
411 Some(val
) => Some(val
),
412 None
=> self.try_initialize(init
),
416 // `try_initialize` is only called once per fast thread local variable,
417 // except in corner cases where thread_local dtors reference other
418 // thread_local's, or it is being recursively initialized.
420 // Macos: Inlining this function can cause two `tlv_get_addr` calls to
421 // be performed for every call to `Key::get`. The #[cold] hint makes
423 // LLVM issue: https://bugs.llvm.org/show_bug.cgi?id=41722
425 unsafe fn try_initialize
<F
: FnOnce() -> T
>(&self, init
: F
) -> Option
<&'
static T
> {
426 if !mem
::needs_drop
::<T
>() || self.try_register_dtor() {
427 Some(self.inner
.initialize(init
))
433 // `try_register_dtor` is only called once per fast thread local
434 // variable, except in corner cases where thread_local dtors reference
435 // other thread_local's, or it is being recursively initialized.
436 unsafe fn try_register_dtor(&self) -> bool
{
437 match self.dtor_state
.get() {
438 DtorState
::Unregistered
=> {
439 // dtor registration happens before initialization.
440 register_dtor(self as *const _
as *mut u8, destroy_value
::<T
>);
441 self.dtor_state
.set(DtorState
::Registered
);
444 DtorState
::Registered
=> {
445 // recursively initialized
448 DtorState
::RunningOrHasRun
=> false,
453 unsafe extern "C" fn destroy_value
<T
>(ptr
: *mut u8) {
454 let ptr
= ptr
as *mut Key
<T
>;
456 // Right before we run the user destructor be sure to set the
457 // `Option<T>` to `None`, and `dtor_state` to `RunningOrHasRun`. This
458 // causes future calls to `get` to run `try_initialize_drop` again,
459 // which will now fail, and return `None`.
460 let value
= (*ptr
).inner
.take();
461 (*ptr
).dtor_state
.set(DtorState
::RunningOrHasRun
);
468 use super::lazy
::LazyKeyInner
;
469 use crate::cell
::Cell
;
473 use crate::sys_common
::thread_local_key
::StaticKey
as OsStaticKey
;
476 // OS-TLS key that we'll use to key off.
478 marker
: marker
::PhantomData
<Cell
<T
>>,
481 impl<T
> fmt
::Debug
for Key
<T
> {
482 fn fmt(&self, f
: &mut fmt
::Formatter
<'_
>) -> fmt
::Result
{
487 unsafe impl<T
> Sync
for Key
<T
> {}
489 struct Value
<T
: '
static> {
490 inner
: LazyKeyInner
<T
>,
491 key
: &'
static Key
<T
>,
494 impl<T
: '
static> Key
<T
> {
495 pub const fn new() -> Key
<T
> {
496 Key { os: OsStaticKey::new(Some(destroy_value::<T>)), marker: marker::PhantomData }
499 pub unsafe fn get(&'
static self, init
: fn() -> T
) -> Option
<&'
static T
> {
500 let ptr
= self.os
.get() as *mut Value
<T
>;
501 if ptr
as usize > 1 {
502 if let Some(ref value
) = (*ptr
).inner
.get() {
506 self.try_initialize(init
)
509 // `try_initialize` is only called once per os thread local variable,
510 // except in corner cases where thread_local dtors reference other
511 // thread_local's, or it is being recursively initialized.
512 unsafe fn try_initialize(&'
static self, init
: fn() -> T
) -> Option
<&'
static T
> {
513 let ptr
= self.os
.get() as *mut Value
<T
>;
514 if ptr
as usize == 1 {
515 // destructor is running
519 let ptr
= if ptr
.is_null() {
520 // If the lookup returned null, we haven't initialized our own
521 // local copy, so do that now.
522 let ptr
: Box
<Value
<T
>> = box Value { inner: LazyKeyInner::new(), key: self }
;
523 let ptr
= Box
::into_raw(ptr
);
524 self.os
.set(ptr
as *mut u8);
527 // recursive initialization
531 Some((*ptr
).inner
.initialize(init
))
535 unsafe extern "C" fn destroy_value
<T
: '
static>(ptr
: *mut u8) {
536 // The OS TLS ensures that this key contains a NULL value when this
537 // destructor starts to run. We set it back to a sentinel value of 1 to
538 // ensure that any future calls to `get` for this thread will return
541 // Note that to prevent an infinite loop we reset it back to null right
542 // before we return from the destructor ourselves.
543 let ptr
= Box
::from_raw(ptr
as *mut Value
<T
>);
545 key
.os
.set(1 as *mut u8);
547 key
.os
.set(ptr
::null_mut());
551 #[cfg(all(test, not(target_os = "emscripten")))]
553 use crate::cell
::{Cell, UnsafeCell}
;
554 use crate::sync
::mpsc
::{channel, Sender}
;
557 struct Foo(Sender
<()>);
561 let Foo(ref s
) = *self;
568 thread_local
!(static FOO
: Cell
<i32> = Cell
::new(1));
571 assert_eq
!(f
.get(), 1);
574 let (tx
, rx
) = channel();
575 let _t
= thread
::spawn(move || {
577 assert_eq
!(f
.get(), 1);
579 tx
.send(()).unwrap();
584 assert_eq
!(f
.get(), 2);
593 assert
!(FOO
.try_with(|_
| ()).is_err());
596 thread_local
!(static FOO
: Foo
= Foo
);
599 assert
!(FOO
.try_with(|_
| ()).is_ok());
603 .expect("thread panicked");
608 thread_local
!(static FOO
: UnsafeCell
<Option
<Foo
>> = UnsafeCell
::new(None
));
610 let (tx
, rx
) = channel();
611 let _t
= thread
::spawn(move || unsafe {
612 let mut tx
= Some(tx
);
614 *f
.get() = Some(Foo(tx
.take().unwrap()));
624 thread_local
!(static K1
: UnsafeCell
<Option
<S1
>> = UnsafeCell
::new(None
));
625 thread_local
!(static K2
: UnsafeCell
<Option
<S2
>> = UnsafeCell
::new(None
));
626 static mut HITS
: u32 = 0;
632 if K2
.try_with(|_
| ()).is_err() {
636 K2
.with(|s
| *s
.get() = Some(S2
));
648 assert
!(K1
.try_with(|_
| ()).is_ok());
650 K1
.with(|s
| *s
.get() = Some(S1
));
655 thread
::spawn(move || {
660 .expect("thread panicked");
664 fn self_referential() {
666 thread_local
!(static K1
: UnsafeCell
<Option
<S1
>> = UnsafeCell
::new(None
));
670 assert
!(K1
.try_with(|_
| ()).is_err());
674 thread
::spawn(move || unsafe {
675 K1
.with(|s
| *s
.get() = Some(S1
));
679 .expect("thread panicked");
682 // Note that this test will deadlock if TLS destructors aren't run (this
683 // requires the destructor to be run to pass the test).
685 fn dtors_in_dtors_in_dtors() {
686 struct S1(Sender
<()>);
687 thread_local
!(static K1
: UnsafeCell
<Option
<S1
>> = UnsafeCell
::new(None
));
688 thread_local
!(static K2
: UnsafeCell
<Option
<Foo
>> = UnsafeCell
::new(None
));
692 let S1(ref tx
) = *self;
694 let _
= K2
.try_with(|s
| *s
.get() = Some(Foo(tx
.clone())));
699 let (tx
, rx
) = channel();
700 let _t
= thread
::spawn(move || unsafe {
701 let mut tx
= Some(tx
);
702 K1
.with(|s
| *s
.get() = Some(S1(tx
.take().unwrap())));
710 use crate::cell
::RefCell
;
711 use crate::collections
::HashMap
;
715 fn square(i
: i32) -> i32 {
718 thread_local
!(static FOO
: i32 = square(3));
727 fn map() -> RefCell
<HashMap
<i32, i32>> {
728 let mut m
= HashMap
::new();
732 thread_local
!(static FOO
: RefCell
<HashMap
<i32, i32>> = map());
735 assert_eq
!(map
.borrow()[&1], 2);
741 thread_local
!(static FOO
: RefCell
<Vec
<u32>> = RefCell
::new(vec
![1, 2, 3]));
744 assert_eq
!(vec
.borrow().len(), 3);
745 vec
.borrow_mut().push(4);
746 assert_eq
!(vec
.borrow()[3], 4);