]> git.proxmox.com Git - rustc.git/blob - vendor/tracing-subscriber/src/thread.rs
New upstream version 1.46.0~beta.2+dfsg1
[rustc.git] / vendor / tracing-subscriber / src / thread.rs
1 use crate::sync::RwLock;
2 use std::sync::atomic::{AtomicUsize, Ordering};
3 use std::{
4 cell::{Cell, UnsafeCell},
5 fmt,
6 marker::PhantomData,
7 };
8 pub(crate) struct Local<T> {
9 // TODO(eliza): this once used a `crossbeam_util::ShardedRwLock`. We may
10 // eventually wish to replace it with a sharded lock implementation on top
11 // of our internal `RwLock` wrapper type. If possible, we should profile
12 // this first to determine if it's necessary.
13 inner: RwLock<Inner<T>>,
14 }
15
16 type Inner<T> = Vec<Option<UnsafeCell<T>>>;
17
18 /// Uniquely identifies a thread.
19 #[repr(transparent)]
20 #[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
21 pub(crate) struct Id {
22 id: usize,
23 _not_send: PhantomData<UnsafeCell<()>>,
24 }
25
26 // === impl Local ===
27
28 impl<T> Local<T> {
29 pub(crate) fn new() -> Self {
30 let len = Id::current().as_usize();
31 // Preallocate up to the current thread ID, so we don't have to inside
32 // the lock.
33 let mut data = Vec::with_capacity(len);
34 data.resize_with(len, || None);
35 Local {
36 inner: RwLock::new(data),
37 }
38 }
39
40 pub(crate) fn with_or_else<O>(
41 &self,
42 new: impl FnOnce() -> T,
43 f: impl FnOnce(&mut T) -> O,
44 ) -> Option<O> {
45 let i = Id::current().as_usize();
46 let mut f = Some(f);
47 self.try_with_index(i, |item| f.take().expect("called twice")(item))
48 .or_else(move || {
49 self.new_thread(i, new);
50 self.try_with_index(i, |item| f.take().expect("called twice")(item))
51 })
52 }
53
54 fn try_with_index<O>(&self, i: usize, f: impl FnOnce(&mut T) -> O) -> Option<O> {
55 let lock = try_lock!(self.inner.read(), else return None);
56 let slot = lock.get(i)?.as_ref()?;
57 let item = unsafe { &mut *slot.get() };
58 Some(f(item))
59 }
60
61 #[cold]
62 fn new_thread(&self, i: usize, new: impl FnOnce() -> T) {
63 let mut lock = try_lock!(self.inner.write());
64 let this = &mut *lock;
65 this.resize_with(i + 1, || None);
66 this[i] = Some(UnsafeCell::new(new()));
67 }
68 }
69
70 impl<T: Default> Local<T> {
71 #[inline]
72 pub(crate) fn with<O>(&self, f: impl FnOnce(&mut T) -> O) -> Option<O> {
73 self.with_or_else(T::default, f)
74 }
75 }
76
77 unsafe impl<T> Sync for Local<T> {}
78
79 impl<T: fmt::Debug> fmt::Debug for Local<T> {
80 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
81 let id = Id::current();
82 self.try_with_index(id.as_usize(), |local| {
83 f.debug_struct("Local")
84 .field("thread", &id)
85 .field("local", &*local)
86 .finish()
87 })
88 .unwrap_or_else(|| {
89 f.debug_struct("Local")
90 .field("thread", &id)
91 .field("local", &format_args!("<uninitialized>"))
92 .finish()
93 })
94 }
95 }
96
97 // === impl Id ===
98
99 impl Id {
100 pub(crate) fn current() -> Self {
101 thread_local! {
102 static MY_ID: Cell<Option<Id>> = Cell::new(None);
103 }
104
105 MY_ID
106 .try_with(|my_id| my_id.get().unwrap_or_else(|| Self::new_thread(my_id)))
107 .unwrap_or_else(|_| Self::poisoned())
108 }
109
110 pub(crate) fn as_usize(self) -> usize {
111 self.id
112 }
113
114 #[cold]
115 fn new_thread(local: &Cell<Option<Id>>) -> Self {
116 static NEXT_ID: AtomicUsize = AtomicUsize::new(0);
117 let id = NEXT_ID.fetch_add(1, Ordering::AcqRel);
118 let tid = Self {
119 id,
120 _not_send: PhantomData,
121 };
122 local.set(Some(tid));
123 tid
124 }
125
126 #[cold]
127 fn poisoned() -> Self {
128 Self {
129 id: std::usize::MAX,
130 _not_send: PhantomData,
131 }
132 }
133
134 /// Returns true if the local thread ID was accessed while unwinding.
135 pub(crate) fn is_poisoned(self) -> bool {
136 self.id == std::usize::MAX
137 }
138 }
139
140 impl fmt::Debug for Id {
141 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
142 if self.is_poisoned() {
143 f.debug_tuple("Id")
144 .field(&format_args!("<poisoned>"))
145 .finish()
146 } else {
147 f.debug_tuple("Id").field(&self.id).finish()
148 }
149 }
150 }