]> git.proxmox.com Git - rustc.git/blob - library/std/src/thread/local.rs
New upstream version 1.47.0~beta.2+dfsg1
[rustc.git] / library / std / src / thread / local.rs
1 //! Thread local storage
2
3 #![unstable(feature = "thread_local_internals", issue = "none")]
4
5 use crate::error::Error;
6 use crate::fmt;
7
8 /// A thread local storage key which owns its contents.
9 ///
10 /// This key uses the fastest possible implementation available to it for the
11 /// target platform. It is instantiated with the [`thread_local!`] macro and the
12 /// primary method is the [`with`] method.
13 ///
14 /// The [`with`] method yields a reference to the contained value which cannot be
15 /// sent across threads or escape the given closure.
16 ///
17 /// # Initialization and Destruction
18 ///
19 /// Initialization is dynamically performed on the first call to [`with`]
20 /// within a thread, and values that implement [`Drop`] get destructed when a
21 /// thread exits. Some caveats apply, which are explained below.
22 ///
23 /// A `LocalKey`'s initializer cannot recursively depend on itself, and using
24 /// a `LocalKey` in this way will cause the initializer to infinitely recurse
25 /// on the first call to `with`.
26 ///
27 /// # Examples
28 ///
29 /// ```
30 /// use std::cell::RefCell;
31 /// use std::thread;
32 ///
33 /// thread_local!(static FOO: RefCell<u32> = RefCell::new(1));
34 ///
35 /// FOO.with(|f| {
36 /// assert_eq!(*f.borrow(), 1);
37 /// *f.borrow_mut() = 2;
38 /// });
39 ///
40 /// // each thread starts out with the initial value of 1
41 /// let t = thread::spawn(move|| {
42 /// FOO.with(|f| {
43 /// assert_eq!(*f.borrow(), 1);
44 /// *f.borrow_mut() = 3;
45 /// });
46 /// });
47 ///
48 /// // wait for the thread to complete and bail out on panic
49 /// t.join().unwrap();
50 ///
51 /// // we retain our original value of 2 despite the child thread
52 /// FOO.with(|f| {
53 /// assert_eq!(*f.borrow(), 2);
54 /// });
55 /// ```
56 ///
57 /// # Platform-specific behavior
58 ///
59 /// Note that a "best effort" is made to ensure that destructors for types
60 /// stored in thread local storage are run, but not all platforms can guarantee
61 /// that destructors will be run for all types in thread local storage. For
62 /// example, there are a number of known caveats where destructors are not run:
63 ///
64 /// 1. On Unix systems when pthread-based TLS is being used, destructors will
65 /// not be run for TLS values on the main thread when it exits. Note that the
66 /// application will exit immediately after the main thread exits as well.
67 /// 2. On all platforms it's possible for TLS to re-initialize other TLS slots
68 /// during destruction. Some platforms ensure that this cannot happen
69 /// infinitely by preventing re-initialization of any slot that has been
70 /// destroyed, but not all platforms have this guard. Those platforms that do
71 /// not guard typically have a synthetic limit after which point no more
72 /// destructors are run.
73 ///
74 /// [`with`]: LocalKey::with
75 #[stable(feature = "rust1", since = "1.0.0")]
76 pub struct LocalKey<T: 'static> {
77 // This outer `LocalKey<T>` type is what's going to be stored in statics,
78 // but actual data inside will sometimes be tagged with #[thread_local].
79 // It's not valid for a true static to reference a #[thread_local] static,
80 // so we get around that by exposing an accessor through a layer of function
81 // indirection (this thunk).
82 //
83 // Note that the thunk is itself unsafe because the returned lifetime of the
84 // slot where data lives, `'static`, is not actually valid. The lifetime
85 // here is actually slightly shorter than the currently running thread!
86 //
87 // Although this is an extra layer of indirection, it should in theory be
88 // trivially devirtualizable by LLVM because the value of `inner` never
89 // changes and the constant should be readonly within a crate. This mainly
90 // only runs into problems when TLS statics are exported across crates.
91 inner: unsafe fn() -> Option<&'static T>,
92 }
93
94 #[stable(feature = "std_debug", since = "1.16.0")]
95 impl<T: 'static> fmt::Debug for LocalKey<T> {
96 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
97 f.pad("LocalKey { .. }")
98 }
99 }
100
101 /// Declare a new thread local storage key of type [`std::thread::LocalKey`].
102 ///
103 /// # Syntax
104 ///
105 /// The macro wraps any number of static declarations and makes them thread local.
106 /// Publicity and attributes for each static are allowed. Example:
107 ///
108 /// ```
109 /// use std::cell::RefCell;
110 /// thread_local! {
111 /// pub static FOO: RefCell<u32> = RefCell::new(1);
112 ///
113 /// #[allow(unused)]
114 /// static BAR: RefCell<f32> = RefCell::new(1.0);
115 /// }
116 /// # fn main() {}
117 /// ```
118 ///
119 /// See [`LocalKey` documentation][`std::thread::LocalKey`] for more
120 /// information.
121 ///
122 /// [`std::thread::LocalKey`]: crate::thread::LocalKey
123 #[macro_export]
124 #[stable(feature = "rust1", since = "1.0.0")]
125 #[allow_internal_unstable(thread_local_internals)]
126 macro_rules! thread_local {
127 // empty (base case for the recursion)
128 () => {};
129
130 // process multiple declarations
131 ($(#[$attr:meta])* $vis:vis static $name:ident: $t:ty = $init:expr; $($rest:tt)*) => (
132 $crate::__thread_local_inner!($(#[$attr])* $vis $name, $t, $init);
133 $crate::thread_local!($($rest)*);
134 );
135
136 // handle a single declaration
137 ($(#[$attr:meta])* $vis:vis static $name:ident: $t:ty = $init:expr) => (
138 $crate::__thread_local_inner!($(#[$attr])* $vis $name, $t, $init);
139 );
140 }
141
142 #[doc(hidden)]
143 #[unstable(feature = "thread_local_internals", reason = "should not be necessary", issue = "none")]
144 #[macro_export]
145 #[allow_internal_unstable(thread_local_internals, cfg_target_thread_local, thread_local)]
146 #[allow_internal_unsafe]
147 macro_rules! __thread_local_inner {
148 (@key $t:ty, $init:expr) => {
149 {
150 #[inline]
151 fn __init() -> $t { $init }
152
153 unsafe fn __getit() -> $crate::option::Option<&'static $t> {
154 #[cfg(all(target_arch = "wasm32", not(target_feature = "atomics")))]
155 static __KEY: $crate::thread::__StaticLocalKeyInner<$t> =
156 $crate::thread::__StaticLocalKeyInner::new();
157
158 #[thread_local]
159 #[cfg(all(
160 target_thread_local,
161 not(all(target_arch = "wasm32", not(target_feature = "atomics"))),
162 ))]
163 static __KEY: $crate::thread::__FastLocalKeyInner<$t> =
164 $crate::thread::__FastLocalKeyInner::new();
165
166 #[cfg(all(
167 not(target_thread_local),
168 not(all(target_arch = "wasm32", not(target_feature = "atomics"))),
169 ))]
170 static __KEY: $crate::thread::__OsLocalKeyInner<$t> =
171 $crate::thread::__OsLocalKeyInner::new();
172
173 // FIXME: remove the #[allow(...)] marker when macros don't
174 // raise warning for missing/extraneous unsafe blocks anymore.
175 // See https://github.com/rust-lang/rust/issues/74838.
176 #[allow(unused_unsafe)]
177 unsafe { __KEY.get(__init) }
178 }
179
180 unsafe {
181 $crate::thread::LocalKey::new(__getit)
182 }
183 }
184 };
185 ($(#[$attr:meta])* $vis:vis $name:ident, $t:ty, $init:expr) => {
186 $(#[$attr])* $vis const $name: $crate::thread::LocalKey<$t> =
187 $crate::__thread_local_inner!(@key $t, $init);
188 }
189 }
190
191 /// An error returned by [`LocalKey::try_with`](struct.LocalKey.html#method.try_with).
192 #[stable(feature = "thread_local_try_with", since = "1.26.0")]
193 #[derive(Clone, Copy, Eq, PartialEq)]
194 pub struct AccessError {
195 _private: (),
196 }
197
198 #[stable(feature = "thread_local_try_with", since = "1.26.0")]
199 impl fmt::Debug for AccessError {
200 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
201 f.debug_struct("AccessError").finish()
202 }
203 }
204
205 #[stable(feature = "thread_local_try_with", since = "1.26.0")]
206 impl fmt::Display for AccessError {
207 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
208 fmt::Display::fmt("already destroyed", f)
209 }
210 }
211
212 #[stable(feature = "thread_local_try_with", since = "1.26.0")]
213 impl Error for AccessError {}
214
215 impl<T: 'static> LocalKey<T> {
216 #[doc(hidden)]
217 #[unstable(
218 feature = "thread_local_internals",
219 reason = "recently added to create a key",
220 issue = "none"
221 )]
222 pub const unsafe fn new(inner: unsafe fn() -> Option<&'static T>) -> LocalKey<T> {
223 LocalKey { inner }
224 }
225
226 /// Acquires a reference to the value in this TLS key.
227 ///
228 /// This will lazily initialize the value if this thread has not referenced
229 /// this key yet.
230 ///
231 /// # Panics
232 ///
233 /// This function will `panic!()` if the key currently has its
234 /// destructor running, and it **may** panic if the destructor has
235 /// previously been run for this thread.
236 #[stable(feature = "rust1", since = "1.0.0")]
237 pub fn with<F, R>(&'static self, f: F) -> R
238 where
239 F: FnOnce(&T) -> R,
240 {
241 self.try_with(f).expect(
242 "cannot access a Thread Local Storage value \
243 during or after destruction",
244 )
245 }
246
247 /// Acquires a reference to the value in this TLS key.
248 ///
249 /// This will lazily initialize the value if this thread has not referenced
250 /// this key yet. If the key has been destroyed (which may happen if this is called
251 /// in a destructor), this function will return an [`AccessError`](struct.AccessError.html).
252 ///
253 /// # Panics
254 ///
255 /// This function will still `panic!()` if the key is uninitialized and the
256 /// key's initializer panics.
257 #[stable(feature = "thread_local_try_with", since = "1.26.0")]
258 #[inline]
259 pub fn try_with<F, R>(&'static self, f: F) -> Result<R, AccessError>
260 where
261 F: FnOnce(&T) -> R,
262 {
263 unsafe {
264 let thread_local = (self.inner)().ok_or(AccessError { _private: () })?;
265 Ok(f(thread_local))
266 }
267 }
268 }
269
270 mod lazy {
271 use crate::cell::UnsafeCell;
272 use crate::hint;
273 use crate::mem;
274
275 pub struct LazyKeyInner<T> {
276 inner: UnsafeCell<Option<T>>,
277 }
278
279 impl<T> LazyKeyInner<T> {
280 pub const fn new() -> LazyKeyInner<T> {
281 LazyKeyInner { inner: UnsafeCell::new(None) }
282 }
283
284 pub unsafe fn get(&self) -> Option<&'static T> {
285 (*self.inner.get()).as_ref()
286 }
287
288 pub unsafe fn initialize<F: FnOnce() -> T>(&self, init: F) -> &'static T {
289 // Execute the initialization up front, *then* move it into our slot,
290 // just in case initialization fails.
291 let value = init();
292 let ptr = self.inner.get();
293
294 // note that this can in theory just be `*ptr = Some(value)`, but due to
295 // the compiler will currently codegen that pattern with something like:
296 //
297 // ptr::drop_in_place(ptr)
298 // ptr::write(ptr, Some(value))
299 //
300 // Due to this pattern it's possible for the destructor of the value in
301 // `ptr` (e.g., if this is being recursively initialized) to re-access
302 // TLS, in which case there will be a `&` and `&mut` pointer to the same
303 // value (an aliasing violation). To avoid setting the "I'm running a
304 // destructor" flag we just use `mem::replace` which should sequence the
305 // operations a little differently and make this safe to call.
306 let _ = mem::replace(&mut *ptr, Some(value));
307
308 // After storing `Some` we want to get a reference to the contents of
309 // what we just stored. While we could use `unwrap` here and it should
310 // always work it empirically doesn't seem to always get optimized away,
311 // which means that using something like `try_with` can pull in
312 // panicking code and cause a large size bloat.
313 match *ptr {
314 Some(ref x) => x,
315 None => hint::unreachable_unchecked(),
316 }
317 }
318
319 #[allow(unused)]
320 pub unsafe fn take(&mut self) -> Option<T> {
321 (*self.inner.get()).take()
322 }
323 }
324 }
325
326 /// On some platforms like wasm32 there's no threads, so no need to generate
327 /// thread locals and we can instead just use plain statics!
328 #[doc(hidden)]
329 #[cfg(all(target_arch = "wasm32", not(target_feature = "atomics")))]
330 pub mod statik {
331 use super::lazy::LazyKeyInner;
332 use crate::fmt;
333
334 pub struct Key<T> {
335 inner: LazyKeyInner<T>,
336 }
337
338 unsafe impl<T> Sync for Key<T> {}
339
340 impl<T> fmt::Debug for Key<T> {
341 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
342 f.pad("Key { .. }")
343 }
344 }
345
346 impl<T> Key<T> {
347 pub const fn new() -> Key<T> {
348 Key { inner: LazyKeyInner::new() }
349 }
350
351 pub unsafe fn get(&self, init: fn() -> T) -> Option<&'static T> {
352 let value = match self.inner.get() {
353 Some(ref value) => value,
354 None => self.inner.initialize(init),
355 };
356 Some(value)
357 }
358 }
359 }
360
361 #[doc(hidden)]
362 #[cfg(target_thread_local)]
363 pub mod fast {
364 use super::lazy::LazyKeyInner;
365 use crate::cell::Cell;
366 use crate::fmt;
367 use crate::mem;
368 use crate::sys::thread_local_dtor::register_dtor;
369
370 #[derive(Copy, Clone)]
371 enum DtorState {
372 Unregistered,
373 Registered,
374 RunningOrHasRun,
375 }
376
377 // This data structure has been carefully constructed so that the fast path
378 // only contains one branch on x86. That optimization is necessary to avoid
379 // duplicated tls lookups on OSX.
380 //
381 // LLVM issue: https://bugs.llvm.org/show_bug.cgi?id=41722
382 pub struct Key<T> {
383 // If `LazyKeyInner::get` returns `None`, that indicates either:
384 // * The value has never been initialized
385 // * The value is being recursively initialized
386 // * The value has already been destroyed or is being destroyed
387 // To determine which kind of `None`, check `dtor_state`.
388 //
389 // This is very optimizer friendly for the fast path - initialized but
390 // not yet dropped.
391 inner: LazyKeyInner<T>,
392
393 // Metadata to keep track of the state of the destructor. Remember that
394 // this variable is thread-local, not global.
395 dtor_state: Cell<DtorState>,
396 }
397
398 impl<T> fmt::Debug for Key<T> {
399 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
400 f.pad("Key { .. }")
401 }
402 }
403
404 impl<T> Key<T> {
405 pub const fn new() -> Key<T> {
406 Key { inner: LazyKeyInner::new(), dtor_state: Cell::new(DtorState::Unregistered) }
407 }
408
409 pub unsafe fn get<F: FnOnce() -> T>(&self, init: F) -> Option<&'static T> {
410 match self.inner.get() {
411 Some(val) => Some(val),
412 None => self.try_initialize(init),
413 }
414 }
415
416 // `try_initialize` is only called once per fast thread local variable,
417 // except in corner cases where thread_local dtors reference other
418 // thread_local's, or it is being recursively initialized.
419 //
420 // Macos: Inlining this function can cause two `tlv_get_addr` calls to
421 // be performed for every call to `Key::get`. The #[cold] hint makes
422 // that less likely.
423 // LLVM issue: https://bugs.llvm.org/show_bug.cgi?id=41722
424 #[cold]
425 unsafe fn try_initialize<F: FnOnce() -> T>(&self, init: F) -> Option<&'static T> {
426 if !mem::needs_drop::<T>() || self.try_register_dtor() {
427 Some(self.inner.initialize(init))
428 } else {
429 None
430 }
431 }
432
433 // `try_register_dtor` is only called once per fast thread local
434 // variable, except in corner cases where thread_local dtors reference
435 // other thread_local's, or it is being recursively initialized.
436 unsafe fn try_register_dtor(&self) -> bool {
437 match self.dtor_state.get() {
438 DtorState::Unregistered => {
439 // dtor registration happens before initialization.
440 register_dtor(self as *const _ as *mut u8, destroy_value::<T>);
441 self.dtor_state.set(DtorState::Registered);
442 true
443 }
444 DtorState::Registered => {
445 // recursively initialized
446 true
447 }
448 DtorState::RunningOrHasRun => false,
449 }
450 }
451 }
452
453 unsafe extern "C" fn destroy_value<T>(ptr: *mut u8) {
454 let ptr = ptr as *mut Key<T>;
455
456 // Right before we run the user destructor be sure to set the
457 // `Option<T>` to `None`, and `dtor_state` to `RunningOrHasRun`. This
458 // causes future calls to `get` to run `try_initialize_drop` again,
459 // which will now fail, and return `None`.
460 let value = (*ptr).inner.take();
461 (*ptr).dtor_state.set(DtorState::RunningOrHasRun);
462 drop(value);
463 }
464 }
465
466 #[doc(hidden)]
467 pub mod os {
468 use super::lazy::LazyKeyInner;
469 use crate::cell::Cell;
470 use crate::fmt;
471 use crate::marker;
472 use crate::ptr;
473 use crate::sys_common::thread_local_key::StaticKey as OsStaticKey;
474
475 pub struct Key<T> {
476 // OS-TLS key that we'll use to key off.
477 os: OsStaticKey,
478 marker: marker::PhantomData<Cell<T>>,
479 }
480
481 impl<T> fmt::Debug for Key<T> {
482 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
483 f.pad("Key { .. }")
484 }
485 }
486
487 unsafe impl<T> Sync for Key<T> {}
488
489 struct Value<T: 'static> {
490 inner: LazyKeyInner<T>,
491 key: &'static Key<T>,
492 }
493
494 impl<T: 'static> Key<T> {
495 pub const fn new() -> Key<T> {
496 Key { os: OsStaticKey::new(Some(destroy_value::<T>)), marker: marker::PhantomData }
497 }
498
499 pub unsafe fn get(&'static self, init: fn() -> T) -> Option<&'static T> {
500 let ptr = self.os.get() as *mut Value<T>;
501 if ptr as usize > 1 {
502 if let Some(ref value) = (*ptr).inner.get() {
503 return Some(value);
504 }
505 }
506 self.try_initialize(init)
507 }
508
509 // `try_initialize` is only called once per os thread local variable,
510 // except in corner cases where thread_local dtors reference other
511 // thread_local's, or it is being recursively initialized.
512 unsafe fn try_initialize(&'static self, init: fn() -> T) -> Option<&'static T> {
513 let ptr = self.os.get() as *mut Value<T>;
514 if ptr as usize == 1 {
515 // destructor is running
516 return None;
517 }
518
519 let ptr = if ptr.is_null() {
520 // If the lookup returned null, we haven't initialized our own
521 // local copy, so do that now.
522 let ptr: Box<Value<T>> = box Value { inner: LazyKeyInner::new(), key: self };
523 let ptr = Box::into_raw(ptr);
524 self.os.set(ptr as *mut u8);
525 ptr
526 } else {
527 // recursive initialization
528 ptr
529 };
530
531 Some((*ptr).inner.initialize(init))
532 }
533 }
534
535 unsafe extern "C" fn destroy_value<T: 'static>(ptr: *mut u8) {
536 // The OS TLS ensures that this key contains a NULL value when this
537 // destructor starts to run. We set it back to a sentinel value of 1 to
538 // ensure that any future calls to `get` for this thread will return
539 // `None`.
540 //
541 // Note that to prevent an infinite loop we reset it back to null right
542 // before we return from the destructor ourselves.
543 let ptr = Box::from_raw(ptr as *mut Value<T>);
544 let key = ptr.key;
545 key.os.set(1 as *mut u8);
546 drop(ptr);
547 key.os.set(ptr::null_mut());
548 }
549 }
550
551 #[cfg(all(test, not(target_os = "emscripten")))]
552 mod tests {
553 use crate::cell::{Cell, UnsafeCell};
554 use crate::sync::mpsc::{channel, Sender};
555 use crate::thread;
556
557 struct Foo(Sender<()>);
558
559 impl Drop for Foo {
560 fn drop(&mut self) {
561 let Foo(ref s) = *self;
562 s.send(()).unwrap();
563 }
564 }
565
566 #[test]
567 fn smoke_no_dtor() {
568 thread_local!(static FOO: Cell<i32> = Cell::new(1));
569
570 FOO.with(|f| {
571 assert_eq!(f.get(), 1);
572 f.set(2);
573 });
574 let (tx, rx) = channel();
575 let _t = thread::spawn(move || {
576 FOO.with(|f| {
577 assert_eq!(f.get(), 1);
578 });
579 tx.send(()).unwrap();
580 });
581 rx.recv().unwrap();
582
583 FOO.with(|f| {
584 assert_eq!(f.get(), 2);
585 });
586 }
587
588 #[test]
589 fn states() {
590 struct Foo;
591 impl Drop for Foo {
592 fn drop(&mut self) {
593 assert!(FOO.try_with(|_| ()).is_err());
594 }
595 }
596 thread_local!(static FOO: Foo = Foo);
597
598 thread::spawn(|| {
599 assert!(FOO.try_with(|_| ()).is_ok());
600 })
601 .join()
602 .ok()
603 .expect("thread panicked");
604 }
605
606 #[test]
607 fn smoke_dtor() {
608 thread_local!(static FOO: UnsafeCell<Option<Foo>> = UnsafeCell::new(None));
609
610 let (tx, rx) = channel();
611 let _t = thread::spawn(move || unsafe {
612 let mut tx = Some(tx);
613 FOO.with(|f| {
614 *f.get() = Some(Foo(tx.take().unwrap()));
615 });
616 });
617 rx.recv().unwrap();
618 }
619
620 #[test]
621 fn circular() {
622 struct S1;
623 struct S2;
624 thread_local!(static K1: UnsafeCell<Option<S1>> = UnsafeCell::new(None));
625 thread_local!(static K2: UnsafeCell<Option<S2>> = UnsafeCell::new(None));
626 static mut HITS: u32 = 0;
627
628 impl Drop for S1 {
629 fn drop(&mut self) {
630 unsafe {
631 HITS += 1;
632 if K2.try_with(|_| ()).is_err() {
633 assert_eq!(HITS, 3);
634 } else {
635 if HITS == 1 {
636 K2.with(|s| *s.get() = Some(S2));
637 } else {
638 assert_eq!(HITS, 3);
639 }
640 }
641 }
642 }
643 }
644 impl Drop for S2 {
645 fn drop(&mut self) {
646 unsafe {
647 HITS += 1;
648 assert!(K1.try_with(|_| ()).is_ok());
649 assert_eq!(HITS, 2);
650 K1.with(|s| *s.get() = Some(S1));
651 }
652 }
653 }
654
655 thread::spawn(move || {
656 drop(S1);
657 })
658 .join()
659 .ok()
660 .expect("thread panicked");
661 }
662
663 #[test]
664 fn self_referential() {
665 struct S1;
666 thread_local!(static K1: UnsafeCell<Option<S1>> = UnsafeCell::new(None));
667
668 impl Drop for S1 {
669 fn drop(&mut self) {
670 assert!(K1.try_with(|_| ()).is_err());
671 }
672 }
673
674 thread::spawn(move || unsafe {
675 K1.with(|s| *s.get() = Some(S1));
676 })
677 .join()
678 .ok()
679 .expect("thread panicked");
680 }
681
682 // Note that this test will deadlock if TLS destructors aren't run (this
683 // requires the destructor to be run to pass the test).
684 #[test]
685 fn dtors_in_dtors_in_dtors() {
686 struct S1(Sender<()>);
687 thread_local!(static K1: UnsafeCell<Option<S1>> = UnsafeCell::new(None));
688 thread_local!(static K2: UnsafeCell<Option<Foo>> = UnsafeCell::new(None));
689
690 impl Drop for S1 {
691 fn drop(&mut self) {
692 let S1(ref tx) = *self;
693 unsafe {
694 let _ = K2.try_with(|s| *s.get() = Some(Foo(tx.clone())));
695 }
696 }
697 }
698
699 let (tx, rx) = channel();
700 let _t = thread::spawn(move || unsafe {
701 let mut tx = Some(tx);
702 K1.with(|s| *s.get() = Some(S1(tx.take().unwrap())));
703 });
704 rx.recv().unwrap();
705 }
706 }
707
708 #[cfg(test)]
709 mod dynamic_tests {
710 use crate::cell::RefCell;
711 use crate::collections::HashMap;
712
713 #[test]
714 fn smoke() {
715 fn square(i: i32) -> i32 {
716 i * i
717 }
718 thread_local!(static FOO: i32 = square(3));
719
720 FOO.with(|f| {
721 assert_eq!(*f, 9);
722 });
723 }
724
725 #[test]
726 fn hashmap() {
727 fn map() -> RefCell<HashMap<i32, i32>> {
728 let mut m = HashMap::new();
729 m.insert(1, 2);
730 RefCell::new(m)
731 }
732 thread_local!(static FOO: RefCell<HashMap<i32, i32>> = map());
733
734 FOO.with(|map| {
735 assert_eq!(map.borrow()[&1], 2);
736 });
737 }
738
739 #[test]
740 fn refcell_vec() {
741 thread_local!(static FOO: RefCell<Vec<u32>> = RefCell::new(vec![1, 2, 3]));
742
743 FOO.with(|vec| {
744 assert_eq!(vec.borrow().len(), 3);
745 vec.borrow_mut().push(4);
746 assert_eq!(vec.borrow()[3], 4);
747 });
748 }
749 }