]> git.proxmox.com Git - rustc.git/blob - library/std/src/thread/local.rs
New upstream version 1.48.0~beta.8+dfsg1
[rustc.git] / library / std / src / thread / local.rs
1 //! Thread local storage
2
3 #![unstable(feature = "thread_local_internals", issue = "none")]
4
5 #[cfg(all(test, not(target_os = "emscripten")))]
6 mod tests;
7
8 #[cfg(test)]
9 mod dynamic_tests;
10
11 use crate::error::Error;
12 use crate::fmt;
13
14 /// A thread local storage key which owns its contents.
15 ///
16 /// This key uses the fastest possible implementation available to it for the
17 /// target platform. It is instantiated with the [`thread_local!`] macro and the
18 /// primary method is the [`with`] method.
19 ///
20 /// The [`with`] method yields a reference to the contained value which cannot be
21 /// sent across threads or escape the given closure.
22 ///
23 /// # Initialization and Destruction
24 ///
25 /// Initialization is dynamically performed on the first call to [`with`]
26 /// within a thread, and values that implement [`Drop`] get destructed when a
27 /// thread exits. Some caveats apply, which are explained below.
28 ///
29 /// A `LocalKey`'s initializer cannot recursively depend on itself, and using
30 /// a `LocalKey` in this way will cause the initializer to infinitely recurse
31 /// on the first call to `with`.
32 ///
33 /// # Examples
34 ///
35 /// ```
36 /// use std::cell::RefCell;
37 /// use std::thread;
38 ///
39 /// thread_local!(static FOO: RefCell<u32> = RefCell::new(1));
40 ///
41 /// FOO.with(|f| {
42 /// assert_eq!(*f.borrow(), 1);
43 /// *f.borrow_mut() = 2;
44 /// });
45 ///
46 /// // each thread starts out with the initial value of 1
47 /// let t = thread::spawn(move|| {
48 /// FOO.with(|f| {
49 /// assert_eq!(*f.borrow(), 1);
50 /// *f.borrow_mut() = 3;
51 /// });
52 /// });
53 ///
54 /// // wait for the thread to complete and bail out on panic
55 /// t.join().unwrap();
56 ///
57 /// // we retain our original value of 2 despite the child thread
58 /// FOO.with(|f| {
59 /// assert_eq!(*f.borrow(), 2);
60 /// });
61 /// ```
62 ///
63 /// # Platform-specific behavior
64 ///
65 /// Note that a "best effort" is made to ensure that destructors for types
66 /// stored in thread local storage are run, but not all platforms can guarantee
67 /// that destructors will be run for all types in thread local storage. For
68 /// example, there are a number of known caveats where destructors are not run:
69 ///
70 /// 1. On Unix systems when pthread-based TLS is being used, destructors will
71 /// not be run for TLS values on the main thread when it exits. Note that the
72 /// application will exit immediately after the main thread exits as well.
73 /// 2. On all platforms it's possible for TLS to re-initialize other TLS slots
74 /// during destruction. Some platforms ensure that this cannot happen
75 /// infinitely by preventing re-initialization of any slot that has been
76 /// destroyed, but not all platforms have this guard. Those platforms that do
77 /// not guard typically have a synthetic limit after which point no more
78 /// destructors are run.
79 ///
80 /// [`with`]: LocalKey::with
81 #[stable(feature = "rust1", since = "1.0.0")]
82 pub struct LocalKey<T: 'static> {
83 // This outer `LocalKey<T>` type is what's going to be stored in statics,
84 // but actual data inside will sometimes be tagged with #[thread_local].
85 // It's not valid for a true static to reference a #[thread_local] static,
86 // so we get around that by exposing an accessor through a layer of function
87 // indirection (this thunk).
88 //
89 // Note that the thunk is itself unsafe because the returned lifetime of the
90 // slot where data lives, `'static`, is not actually valid. The lifetime
91 // here is actually slightly shorter than the currently running thread!
92 //
93 // Although this is an extra layer of indirection, it should in theory be
94 // trivially devirtualizable by LLVM because the value of `inner` never
95 // changes and the constant should be readonly within a crate. This mainly
96 // only runs into problems when TLS statics are exported across crates.
97 inner: unsafe fn() -> Option<&'static T>,
98 }
99
100 #[stable(feature = "std_debug", since = "1.16.0")]
101 impl<T: 'static> fmt::Debug for LocalKey<T> {
102 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
103 f.pad("LocalKey { .. }")
104 }
105 }
106
107 /// Declare a new thread local storage key of type [`std::thread::LocalKey`].
108 ///
109 /// # Syntax
110 ///
111 /// The macro wraps any number of static declarations and makes them thread local.
112 /// Publicity and attributes for each static are allowed. Example:
113 ///
114 /// ```
115 /// use std::cell::RefCell;
116 /// thread_local! {
117 /// pub static FOO: RefCell<u32> = RefCell::new(1);
118 ///
119 /// #[allow(unused)]
120 /// static BAR: RefCell<f32> = RefCell::new(1.0);
121 /// }
122 /// # fn main() {}
123 /// ```
124 ///
125 /// See [`LocalKey` documentation][`std::thread::LocalKey`] for more
126 /// information.
127 ///
128 /// [`std::thread::LocalKey`]: crate::thread::LocalKey
129 #[macro_export]
130 #[stable(feature = "rust1", since = "1.0.0")]
131 #[allow_internal_unstable(thread_local_internals)]
132 macro_rules! thread_local {
133 // empty (base case for the recursion)
134 () => {};
135
136 // process multiple declarations
137 ($(#[$attr:meta])* $vis:vis static $name:ident: $t:ty = $init:expr; $($rest:tt)*) => (
138 $crate::__thread_local_inner!($(#[$attr])* $vis $name, $t, $init);
139 $crate::thread_local!($($rest)*);
140 );
141
142 // handle a single declaration
143 ($(#[$attr:meta])* $vis:vis static $name:ident: $t:ty = $init:expr) => (
144 $crate::__thread_local_inner!($(#[$attr])* $vis $name, $t, $init);
145 );
146 }
147
148 #[doc(hidden)]
149 #[unstable(feature = "thread_local_internals", reason = "should not be necessary", issue = "none")]
150 #[macro_export]
151 #[allow_internal_unstable(thread_local_internals, cfg_target_thread_local, thread_local)]
152 #[allow_internal_unsafe]
153 macro_rules! __thread_local_inner {
154 (@key $t:ty, $init:expr) => {
155 {
156 #[inline]
157 fn __init() -> $t { $init }
158
159 unsafe fn __getit() -> $crate::option::Option<&'static $t> {
160 #[cfg(all(target_arch = "wasm32", not(target_feature = "atomics")))]
161 static __KEY: $crate::thread::__StaticLocalKeyInner<$t> =
162 $crate::thread::__StaticLocalKeyInner::new();
163
164 #[thread_local]
165 #[cfg(all(
166 target_thread_local,
167 not(all(target_arch = "wasm32", not(target_feature = "atomics"))),
168 ))]
169 static __KEY: $crate::thread::__FastLocalKeyInner<$t> =
170 $crate::thread::__FastLocalKeyInner::new();
171
172 #[cfg(all(
173 not(target_thread_local),
174 not(all(target_arch = "wasm32", not(target_feature = "atomics"))),
175 ))]
176 static __KEY: $crate::thread::__OsLocalKeyInner<$t> =
177 $crate::thread::__OsLocalKeyInner::new();
178
179 // FIXME: remove the #[allow(...)] marker when macros don't
180 // raise warning for missing/extraneous unsafe blocks anymore.
181 // See https://github.com/rust-lang/rust/issues/74838.
182 #[allow(unused_unsafe)]
183 unsafe { __KEY.get(__init) }
184 }
185
186 unsafe {
187 $crate::thread::LocalKey::new(__getit)
188 }
189 }
190 };
191 ($(#[$attr:meta])* $vis:vis $name:ident, $t:ty, $init:expr) => {
192 $(#[$attr])* $vis const $name: $crate::thread::LocalKey<$t> =
193 $crate::__thread_local_inner!(@key $t, $init);
194 }
195 }
196
197 /// An error returned by [`LocalKey::try_with`](struct.LocalKey.html#method.try_with).
198 #[stable(feature = "thread_local_try_with", since = "1.26.0")]
199 #[derive(Clone, Copy, Eq, PartialEq)]
200 pub struct AccessError {
201 _private: (),
202 }
203
204 #[stable(feature = "thread_local_try_with", since = "1.26.0")]
205 impl fmt::Debug for AccessError {
206 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
207 f.debug_struct("AccessError").finish()
208 }
209 }
210
211 #[stable(feature = "thread_local_try_with", since = "1.26.0")]
212 impl fmt::Display for AccessError {
213 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
214 fmt::Display::fmt("already destroyed", f)
215 }
216 }
217
218 #[stable(feature = "thread_local_try_with", since = "1.26.0")]
219 impl Error for AccessError {}
220
221 impl<T: 'static> LocalKey<T> {
222 #[doc(hidden)]
223 #[unstable(
224 feature = "thread_local_internals",
225 reason = "recently added to create a key",
226 issue = "none"
227 )]
228 #[rustc_const_unstable(feature = "thread_local_internals", issue = "none")]
229 pub const unsafe fn new(inner: unsafe fn() -> Option<&'static T>) -> LocalKey<T> {
230 LocalKey { inner }
231 }
232
233 /// Acquires a reference to the value in this TLS key.
234 ///
235 /// This will lazily initialize the value if this thread has not referenced
236 /// this key yet.
237 ///
238 /// # Panics
239 ///
240 /// This function will `panic!()` if the key currently has its
241 /// destructor running, and it **may** panic if the destructor has
242 /// previously been run for this thread.
243 #[stable(feature = "rust1", since = "1.0.0")]
244 pub fn with<F, R>(&'static self, f: F) -> R
245 where
246 F: FnOnce(&T) -> R,
247 {
248 self.try_with(f).expect(
249 "cannot access a Thread Local Storage value \
250 during or after destruction",
251 )
252 }
253
254 /// Acquires a reference to the value in this TLS key.
255 ///
256 /// This will lazily initialize the value if this thread has not referenced
257 /// this key yet. If the key has been destroyed (which may happen if this is called
258 /// in a destructor), this function will return an [`AccessError`](struct.AccessError.html).
259 ///
260 /// # Panics
261 ///
262 /// This function will still `panic!()` if the key is uninitialized and the
263 /// key's initializer panics.
264 #[stable(feature = "thread_local_try_with", since = "1.26.0")]
265 #[inline]
266 pub fn try_with<F, R>(&'static self, f: F) -> Result<R, AccessError>
267 where
268 F: FnOnce(&T) -> R,
269 {
270 unsafe {
271 let thread_local = (self.inner)().ok_or(AccessError { _private: () })?;
272 Ok(f(thread_local))
273 }
274 }
275 }
276
277 mod lazy {
278 use crate::cell::UnsafeCell;
279 use crate::hint;
280 use crate::mem;
281
282 pub struct LazyKeyInner<T> {
283 inner: UnsafeCell<Option<T>>,
284 }
285
286 impl<T> LazyKeyInner<T> {
287 pub const fn new() -> LazyKeyInner<T> {
288 LazyKeyInner { inner: UnsafeCell::new(None) }
289 }
290
291 pub unsafe fn get(&self) -> Option<&'static T> {
292 // SAFETY: The caller must ensure no reference is ever handed out to
293 // the inner cell nor mutable reference to the Option<T> inside said
294 // cell. This make it safe to hand a reference, though the lifetime
295 // of 'static is itself unsafe, making the get method unsafe.
296 unsafe { (*self.inner.get()).as_ref() }
297 }
298
299 /// The caller must ensure that no reference is active: this method
300 /// needs unique access.
301 pub unsafe fn initialize<F: FnOnce() -> T>(&self, init: F) -> &'static T {
302 // Execute the initialization up front, *then* move it into our slot,
303 // just in case initialization fails.
304 let value = init();
305 let ptr = self.inner.get();
306
307 // SAFETY:
308 //
309 // note that this can in theory just be `*ptr = Some(value)`, but due to
310 // the compiler will currently codegen that pattern with something like:
311 //
312 // ptr::drop_in_place(ptr)
313 // ptr::write(ptr, Some(value))
314 //
315 // Due to this pattern it's possible for the destructor of the value in
316 // `ptr` (e.g., if this is being recursively initialized) to re-access
317 // TLS, in which case there will be a `&` and `&mut` pointer to the same
318 // value (an aliasing violation). To avoid setting the "I'm running a
319 // destructor" flag we just use `mem::replace` which should sequence the
320 // operations a little differently and make this safe to call.
321 //
322 // The precondition also ensures that we are the only one accessing
323 // `self` at the moment so replacing is fine.
324 unsafe {
325 let _ = mem::replace(&mut *ptr, Some(value));
326 }
327
328 // SAFETY: With the call to `mem::replace` it is guaranteed there is
329 // a `Some` behind `ptr`, not a `None` so `unreachable_unchecked`
330 // will never be reached.
331 unsafe {
332 // After storing `Some` we want to get a reference to the contents of
333 // what we just stored. While we could use `unwrap` here and it should
334 // always work it empirically doesn't seem to always get optimized away,
335 // which means that using something like `try_with` can pull in
336 // panicking code and cause a large size bloat.
337 match *ptr {
338 Some(ref x) => x,
339 None => hint::unreachable_unchecked(),
340 }
341 }
342 }
343
344 /// The other methods hand out references while taking &self.
345 /// As such, callers of this method must ensure no `&` and `&mut` are
346 /// available and used at the same time.
347 #[allow(unused)]
348 pub unsafe fn take(&mut self) -> Option<T> {
349 // SAFETY: See doc comment for this method.
350 unsafe { (*self.inner.get()).take() }
351 }
352 }
353 }
354
355 /// On some platforms like wasm32 there's no threads, so no need to generate
356 /// thread locals and we can instead just use plain statics!
357 #[doc(hidden)]
358 #[cfg(all(target_arch = "wasm32", not(target_feature = "atomics")))]
359 pub mod statik {
360 use super::lazy::LazyKeyInner;
361 use crate::fmt;
362
363 pub struct Key<T> {
364 inner: LazyKeyInner<T>,
365 }
366
367 unsafe impl<T> Sync for Key<T> {}
368
369 impl<T> fmt::Debug for Key<T> {
370 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
371 f.pad("Key { .. }")
372 }
373 }
374
375 impl<T> Key<T> {
376 pub const fn new() -> Key<T> {
377 Key { inner: LazyKeyInner::new() }
378 }
379
380 pub unsafe fn get(&self, init: fn() -> T) -> Option<&'static T> {
381 // SAFETY: The caller must ensure no reference is ever handed out to
382 // the inner cell nor mutable reference to the Option<T> inside said
383 // cell. This make it safe to hand a reference, though the lifetime
384 // of 'static is itself unsafe, making the get method unsafe.
385 let value = unsafe {
386 match self.inner.get() {
387 Some(ref value) => value,
388 None => self.inner.initialize(init),
389 }
390 };
391
392 Some(value)
393 }
394 }
395 }
396
397 #[doc(hidden)]
398 #[cfg(target_thread_local)]
399 pub mod fast {
400 use super::lazy::LazyKeyInner;
401 use crate::cell::Cell;
402 use crate::fmt;
403 use crate::mem;
404 use crate::sys::thread_local_dtor::register_dtor;
405
406 #[derive(Copy, Clone)]
407 enum DtorState {
408 Unregistered,
409 Registered,
410 RunningOrHasRun,
411 }
412
413 // This data structure has been carefully constructed so that the fast path
414 // only contains one branch on x86. That optimization is necessary to avoid
415 // duplicated tls lookups on OSX.
416 //
417 // LLVM issue: https://bugs.llvm.org/show_bug.cgi?id=41722
418 pub struct Key<T> {
419 // If `LazyKeyInner::get` returns `None`, that indicates either:
420 // * The value has never been initialized
421 // * The value is being recursively initialized
422 // * The value has already been destroyed or is being destroyed
423 // To determine which kind of `None`, check `dtor_state`.
424 //
425 // This is very optimizer friendly for the fast path - initialized but
426 // not yet dropped.
427 inner: LazyKeyInner<T>,
428
429 // Metadata to keep track of the state of the destructor. Remember that
430 // this variable is thread-local, not global.
431 dtor_state: Cell<DtorState>,
432 }
433
434 impl<T> fmt::Debug for Key<T> {
435 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
436 f.pad("Key { .. }")
437 }
438 }
439
440 impl<T> Key<T> {
441 pub const fn new() -> Key<T> {
442 Key { inner: LazyKeyInner::new(), dtor_state: Cell::new(DtorState::Unregistered) }
443 }
444
445 pub unsafe fn get<F: FnOnce() -> T>(&self, init: F) -> Option<&'static T> {
446 // SAFETY: See the definitions of `LazyKeyInner::get` and
447 // `try_initialize` for more informations.
448 //
449 // The caller must ensure no mutable references are ever active to
450 // the inner cell or the inner T when this is called.
451 // The `try_initialize` is dependant on the passed `init` function
452 // for this.
453 unsafe {
454 match self.inner.get() {
455 Some(val) => Some(val),
456 None => self.try_initialize(init),
457 }
458 }
459 }
460
461 // `try_initialize` is only called once per fast thread local variable,
462 // except in corner cases where thread_local dtors reference other
463 // thread_local's, or it is being recursively initialized.
464 //
465 // Macos: Inlining this function can cause two `tlv_get_addr` calls to
466 // be performed for every call to `Key::get`.
467 // LLVM issue: https://bugs.llvm.org/show_bug.cgi?id=41722
468 #[inline(never)]
469 unsafe fn try_initialize<F: FnOnce() -> T>(&self, init: F) -> Option<&'static T> {
470 // SAFETY: See comment above (this function doc).
471 if !mem::needs_drop::<T>() || unsafe { self.try_register_dtor() } {
472 // SAFETY: See comment above (his function doc).
473 Some(unsafe { self.inner.initialize(init) })
474 } else {
475 None
476 }
477 }
478
479 // `try_register_dtor` is only called once per fast thread local
480 // variable, except in corner cases where thread_local dtors reference
481 // other thread_local's, or it is being recursively initialized.
482 unsafe fn try_register_dtor(&self) -> bool {
483 match self.dtor_state.get() {
484 DtorState::Unregistered => {
485 // SAFETY: dtor registration happens before initialization.
486 // Passing `self` as a pointer while using `destroy_value<T>`
487 // is safe because the function will build a pointer to a
488 // Key<T>, which is the type of self and so find the correct
489 // size.
490 unsafe { register_dtor(self as *const _ as *mut u8, destroy_value::<T>) };
491 self.dtor_state.set(DtorState::Registered);
492 true
493 }
494 DtorState::Registered => {
495 // recursively initialized
496 true
497 }
498 DtorState::RunningOrHasRun => false,
499 }
500 }
501 }
502
503 unsafe extern "C" fn destroy_value<T>(ptr: *mut u8) {
504 let ptr = ptr as *mut Key<T>;
505
506 // SAFETY:
507 //
508 // The pointer `ptr` has been built just above and comes from
509 // `try_register_dtor` where it is originally a Key<T> coming from `self`,
510 // making it non-NUL and of the correct type.
511 //
512 // Right before we run the user destructor be sure to set the
513 // `Option<T>` to `None`, and `dtor_state` to `RunningOrHasRun`. This
514 // causes future calls to `get` to run `try_initialize_drop` again,
515 // which will now fail, and return `None`.
516 unsafe {
517 let value = (*ptr).inner.take();
518 (*ptr).dtor_state.set(DtorState::RunningOrHasRun);
519 drop(value);
520 }
521 }
522 }
523
524 #[doc(hidden)]
525 pub mod os {
526 use super::lazy::LazyKeyInner;
527 use crate::cell::Cell;
528 use crate::fmt;
529 use crate::marker;
530 use crate::ptr;
531 use crate::sys_common::thread_local_key::StaticKey as OsStaticKey;
532
533 pub struct Key<T> {
534 // OS-TLS key that we'll use to key off.
535 os: OsStaticKey,
536 marker: marker::PhantomData<Cell<T>>,
537 }
538
539 impl<T> fmt::Debug for Key<T> {
540 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
541 f.pad("Key { .. }")
542 }
543 }
544
545 unsafe impl<T> Sync for Key<T> {}
546
547 struct Value<T: 'static> {
548 inner: LazyKeyInner<T>,
549 key: &'static Key<T>,
550 }
551
552 impl<T: 'static> Key<T> {
553 #[rustc_const_unstable(feature = "thread_local_internals", issue = "none")]
554 pub const fn new() -> Key<T> {
555 Key { os: OsStaticKey::new(Some(destroy_value::<T>)), marker: marker::PhantomData }
556 }
557
558 /// It is a requirement for the caller to ensure that no mutable
559 /// reference is active when this method is called.
560 pub unsafe fn get(&'static self, init: fn() -> T) -> Option<&'static T> {
561 // SAFETY: See the documentation for this method.
562 let ptr = unsafe { self.os.get() as *mut Value<T> };
563 if ptr as usize > 1 {
564 // SAFETY: the check ensured the pointer is safe (its destructor
565 // is not running) + it is coming from a trusted source (self).
566 if let Some(ref value) = unsafe { (*ptr).inner.get() } {
567 return Some(value);
568 }
569 }
570 // SAFETY: At this point we are sure we have no value and so
571 // initializing (or trying to) is safe.
572 unsafe { self.try_initialize(init) }
573 }
574
575 // `try_initialize` is only called once per os thread local variable,
576 // except in corner cases where thread_local dtors reference other
577 // thread_local's, or it is being recursively initialized.
578 unsafe fn try_initialize(&'static self, init: fn() -> T) -> Option<&'static T> {
579 // SAFETY: No mutable references are ever handed out meaning getting
580 // the value is ok.
581 let ptr = unsafe { self.os.get() as *mut Value<T> };
582 if ptr as usize == 1 {
583 // destructor is running
584 return None;
585 }
586
587 let ptr = if ptr.is_null() {
588 // If the lookup returned null, we haven't initialized our own
589 // local copy, so do that now.
590 let ptr: Box<Value<T>> = box Value { inner: LazyKeyInner::new(), key: self };
591 let ptr = Box::into_raw(ptr);
592 // SAFETY: At this point we are sure there is no value inside
593 // ptr so setting it will not affect anyone else.
594 unsafe {
595 self.os.set(ptr as *mut u8);
596 }
597 ptr
598 } else {
599 // recursive initialization
600 ptr
601 };
602
603 // SAFETY: ptr has been ensured as non-NUL just above an so can be
604 // dereferenced safely.
605 unsafe { Some((*ptr).inner.initialize(init)) }
606 }
607 }
608
609 unsafe extern "C" fn destroy_value<T: 'static>(ptr: *mut u8) {
610 // SAFETY:
611 //
612 // The OS TLS ensures that this key contains a NULL value when this
613 // destructor starts to run. We set it back to a sentinel value of 1 to
614 // ensure that any future calls to `get` for this thread will return
615 // `None`.
616 //
617 // Note that to prevent an infinite loop we reset it back to null right
618 // before we return from the destructor ourselves.
619 unsafe {
620 let ptr = Box::from_raw(ptr as *mut Value<T>);
621 let key = ptr.key;
622 key.os.set(1 as *mut u8);
623 drop(ptr);
624 key.os.set(ptr::null_mut());
625 }
626 }
627 }