]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_middle/src/ty/list.rs
New upstream version 1.63.0+dfsg1
[rustc.git] / compiler / rustc_middle / src / ty / list.rs
CommitLineData
f9f354fc 1use crate::arena::Arena;
f9f354fc 2use rustc_serialize::{Encodable, Encoder};
f035d41b
XL
3use std::alloc::Layout;
4use std::cmp::Ordering;
f9f354fc
XL
5use std::fmt;
6use std::hash::{Hash, Hasher};
7use std::iter;
8use std::mem;
9use std::ops::Deref;
10use std::ptr;
11use std::slice;
12
a2a8927a
XL
13/// `List<T>` is a bit like `&[T]`, but with some critical differences.
14/// - IMPORTANT: Every `List<T>` is *required* to have unique contents. The
15/// type's correctness relies on this, *but it does not enforce it*.
16/// Therefore, any code that creates a `List<T>` must ensure uniqueness
17/// itself. In practice this is achieved by interning.
18/// - The length is stored within the `List<T>`, so `&List<Ty>` is a thin
19/// pointer.
20/// - Because of this, you cannot get a `List<T>` that is a sub-list of another
21/// `List<T>`. You can get a sub-slice `&[T]`, however.
22/// - `List<T>` can be used with `CopyTaggedPtr`, which is useful within
23/// structs whose size must be minimized.
24/// - Because of the uniqueness assumption, we can use the address of a
25/// `List<T>` for faster equality comparisons and hashing.
26/// - `T` must be `Copy`. This lets `List<T>` be stored in a dropless arena and
27/// iterators return a `T` rather than a `&T`.
28/// - `T` must not be zero-sized.
f9f354fc
XL
29#[repr(C)]
30pub struct List<T> {
31 len: usize,
a2a8927a
XL
32
33 /// Although this claims to be a zero-length array, in practice `len`
34 /// elements are actually present.
f9f354fc 35 data: [T; 0],
a2a8927a 36
f9f354fc
XL
37 opaque: OpaqueListContents,
38}
39
a2a8927a
XL
40extern "C" {
41 /// A dummy type used to force `List` to be unsized while not requiring
42 /// references to it be wide pointers.
43 type OpaqueListContents;
3dfed10e
XL
44}
45
a2a8927a
XL
46impl<T> List<T> {
47 /// Returns a reference to the (unique, static) empty list.
48 #[inline(always)]
49 pub fn empty<'a>() -> &'a List<T> {
50 #[repr(align(64))]
51 struct MaxAlign;
52
53 assert!(mem::align_of::<T>() <= mem::align_of::<MaxAlign>());
54
55 #[repr(C)]
56 struct InOrder<T, U>(T, U);
57
58 // The empty slice is static and contains a single `0` usize (for the
59 // length) that is 64-byte aligned, thus featuring the necessary
60 // trailing padding for elements with up to 64-byte alignment.
61 static EMPTY_SLICE: InOrder<usize, MaxAlign> = InOrder(0, MaxAlign);
62 unsafe { &*(&EMPTY_SLICE as *const _ as *const List<T>) }
63 }
04454e1e
FG
64
65 pub fn len(&self) -> usize {
66 self.len
67 }
a2a8927a 68}
f9f354fc
XL
69
70impl<T: Copy> List<T> {
a2a8927a
XL
71 /// Allocates a list from `arena` and copies the contents of `slice` into it.
72 ///
73 /// WARNING: the contents *must be unique*, such that no list with these
74 /// contents has been previously created. If not, operations such as `eq`
75 /// and `hash` might give incorrect results.
76 ///
77 /// Panics if `T` is `Drop`, or `T` is zero-sized, or the slice is empty
78 /// (because the empty list exists statically, and is available via
79 /// `empty()`).
f9f354fc
XL
80 #[inline]
81 pub(super) fn from_arena<'tcx>(arena: &'tcx Arena<'tcx>, slice: &[T]) -> &'tcx List<T> {
82 assert!(!mem::needs_drop::<T>());
83 assert!(mem::size_of::<T>() != 0);
84 assert!(!slice.is_empty());
85
f035d41b
XL
86 let (layout, _offset) =
87 Layout::new::<usize>().extend(Layout::for_value::<[T]>(slice)).unwrap();
136023e0 88 let mem = arena.dropless.alloc_raw(layout) as *mut List<T>;
f9f354fc 89 unsafe {
f9f354fc 90 // Write the length
136023e0 91 ptr::addr_of_mut!((*mem).len).write(slice.len());
f9f354fc
XL
92
93 // Write the elements
136023e0
XL
94 ptr::addr_of_mut!((*mem).data)
95 .cast::<T>()
96 .copy_from_nonoverlapping(slice.as_ptr(), slice.len());
f9f354fc 97
a2a8927a 98 &*mem
f9f354fc
XL
99 }
100 }
101
102 // If this method didn't exist, we would use `slice.iter` due to
103 // deref coercion.
104 //
105 // This would be weird, as `self.into_iter` iterates over `T` directly.
106 #[inline(always)]
107 pub fn iter(&self) -> <&'_ List<T> as IntoIterator>::IntoIter {
108 self.into_iter()
109 }
110}
111
112impl<T: fmt::Debug> fmt::Debug for List<T> {
113 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
114 (**self).fmt(f)
115 }
116}
117
3dfed10e
XL
118impl<S: Encoder, T: Encodable<S>> Encodable<S> for List<T> {
119 #[inline]
923072b8
FG
120 fn encode(&self, s: &mut S) {
121 (**self).encode(s);
f9f354fc
XL
122 }
123}
124
a2a8927a
XL
125impl<T: PartialEq> PartialEq for List<T> {
126 #[inline]
127 fn eq(&self, other: &List<T>) -> bool {
128 // Pointer equality implies list equality (due to the unique contents
129 // assumption).
130 ptr::eq(self, other)
131 }
132}
133
134impl<T: Eq> Eq for List<T> {}
135
f9f354fc
XL
136impl<T> Ord for List<T>
137where
138 T: Ord,
139{
140 fn cmp(&self, other: &List<T>) -> Ordering {
a2a8927a
XL
141 // Pointer equality implies list equality (due to the unique contents
142 // assumption), but the contents must be compared otherwise.
f9f354fc
XL
143 if self == other { Ordering::Equal } else { <[T] as Ord>::cmp(&**self, &**other) }
144 }
145}
146
147impl<T> PartialOrd for List<T>
148where
149 T: PartialOrd,
150{
151 fn partial_cmp(&self, other: &List<T>) -> Option<Ordering> {
a2a8927a
XL
152 // Pointer equality implies list equality (due to the unique contents
153 // assumption), but the contents must be compared otherwise.
f9f354fc
XL
154 if self == other {
155 Some(Ordering::Equal)
156 } else {
157 <[T] as PartialOrd>::partial_cmp(&**self, &**other)
158 }
159 }
160}
161
f9f354fc
XL
162impl<T> Hash for List<T> {
163 #[inline]
164 fn hash<H: Hasher>(&self, s: &mut H) {
a2a8927a
XL
165 // Pointer hashing is sufficient (due to the unique contents
166 // assumption).
f9f354fc
XL
167 (self as *const List<T>).hash(s)
168 }
169}
170
171impl<T> Deref for List<T> {
172 type Target = [T];
173 #[inline(always)]
174 fn deref(&self) -> &[T] {
175 self.as_ref()
176 }
177}
178
179impl<T> AsRef<[T]> for List<T> {
180 #[inline(always)]
181 fn as_ref(&self) -> &[T] {
182 unsafe { slice::from_raw_parts(self.data.as_ptr(), self.len) }
183 }
184}
185
186impl<'a, T: Copy> IntoIterator for &'a List<T> {
187 type Item = T;
188 type IntoIter = iter::Copied<<&'a [T] as IntoIterator>::IntoIter>;
189 #[inline(always)]
190 fn into_iter(self) -> Self::IntoIter {
191 self[..].iter().copied()
192 }
193}
194
a2a8927a
XL
195unsafe impl<T: Sync> Sync for List<T> {}
196
197unsafe impl<'a, T: 'a> rustc_data_structures::tagged_ptr::Pointer for &'a List<T> {
198 const BITS: usize = std::mem::align_of::<usize>().trailing_zeros() as usize;
199
200 #[inline]
201 fn into_usize(self) -> usize {
202 self as *const List<T> as usize
203 }
204
205 #[inline]
206 unsafe fn from_usize(ptr: usize) -> &'a List<T> {
207 &*(ptr as *const List<T>)
208 }
209
210 unsafe fn with_ref<R, F: FnOnce(&Self) -> R>(ptr: usize, f: F) -> R {
211 // `Self` is `&'a List<T>` which impls `Copy`, so this is fine.
212 let ptr = Self::from_usize(ptr);
213 f(&ptr)
f9f354fc
XL
214 }
215}