]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_middle/src/ty/consts/int.rs
New upstream version 1.49.0~beta.4+dfsg1
[rustc.git] / compiler / rustc_middle / src / ty / consts / int.rs
CommitLineData
29967ef6
XL
1use rustc_apfloat::ieee::{Double, Single};
2use rustc_apfloat::Float;
3use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
4use rustc_target::abi::{Size, TargetDataLayout};
5use std::convert::{TryFrom, TryInto};
6use std::fmt;
3dfed10e
XL
7
8#[derive(Copy, Clone)]
9/// A type for representing any integer. Only used for printing.
3dfed10e 10pub struct ConstInt {
29967ef6
XL
11 /// The "untyped" variant of `ConstInt`.
12 int: ScalarInt,
3dfed10e
XL
13 /// Whether the value is of a signed integer type.
14 signed: bool,
15 /// Whether the value is a `usize` or `isize` type.
16 is_ptr_sized_integral: bool,
3dfed10e
XL
17}
18
19impl ConstInt {
29967ef6
XL
20 pub fn new(int: ScalarInt, signed: bool, is_ptr_sized_integral: bool) -> Self {
21 Self { int, signed, is_ptr_sized_integral }
3dfed10e
XL
22 }
23}
24
25impl std::fmt::Debug for ConstInt {
26 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
29967ef6
XL
27 let Self { int, signed, is_ptr_sized_integral } = *self;
28 let size = int.size().bytes();
29 let raw = int.data;
3dfed10e
XL
30 if signed {
31 let bit_size = size * 8;
32 let min = 1u128 << (bit_size - 1);
33 let max = min - 1;
34 if raw == min {
35 match (size, is_ptr_sized_integral) {
36 (_, true) => write!(fmt, "isize::MIN"),
37 (1, _) => write!(fmt, "i8::MIN"),
38 (2, _) => write!(fmt, "i16::MIN"),
39 (4, _) => write!(fmt, "i32::MIN"),
40 (8, _) => write!(fmt, "i64::MIN"),
41 (16, _) => write!(fmt, "i128::MIN"),
42 _ => bug!("ConstInt 0x{:x} with size = {} and signed = {}", raw, size, signed),
43 }
44 } else if raw == max {
45 match (size, is_ptr_sized_integral) {
46 (_, true) => write!(fmt, "isize::MAX"),
47 (1, _) => write!(fmt, "i8::MAX"),
48 (2, _) => write!(fmt, "i16::MAX"),
49 (4, _) => write!(fmt, "i32::MAX"),
50 (8, _) => write!(fmt, "i64::MAX"),
51 (16, _) => write!(fmt, "i128::MAX"),
52 _ => bug!("ConstInt 0x{:x} with size = {} and signed = {}", raw, size, signed),
53 }
54 } else {
55 match size {
56 1 => write!(fmt, "{}", raw as i8)?,
57 2 => write!(fmt, "{}", raw as i16)?,
58 4 => write!(fmt, "{}", raw as i32)?,
59 8 => write!(fmt, "{}", raw as i64)?,
60 16 => write!(fmt, "{}", raw as i128)?,
61 _ => bug!("ConstInt 0x{:x} with size = {} and signed = {}", raw, size, signed),
62 }
63 if fmt.alternate() {
64 match (size, is_ptr_sized_integral) {
65 (_, true) => write!(fmt, "_isize")?,
66 (1, _) => write!(fmt, "_i8")?,
67 (2, _) => write!(fmt, "_i16")?,
68 (4, _) => write!(fmt, "_i32")?,
69 (8, _) => write!(fmt, "_i64")?,
70 (16, _) => write!(fmt, "_i128")?,
71 _ => bug!(),
72 }
73 }
74 Ok(())
75 }
76 } else {
29967ef6 77 let max = Size::from_bytes(size).truncate(u128::MAX);
3dfed10e
XL
78 if raw == max {
79 match (size, is_ptr_sized_integral) {
80 (_, true) => write!(fmt, "usize::MAX"),
81 (1, _) => write!(fmt, "u8::MAX"),
82 (2, _) => write!(fmt, "u16::MAX"),
83 (4, _) => write!(fmt, "u32::MAX"),
84 (8, _) => write!(fmt, "u64::MAX"),
85 (16, _) => write!(fmt, "u128::MAX"),
86 _ => bug!("ConstInt 0x{:x} with size = {} and signed = {}", raw, size, signed),
87 }
88 } else {
89 match size {
90 1 => write!(fmt, "{}", raw as u8)?,
91 2 => write!(fmt, "{}", raw as u16)?,
92 4 => write!(fmt, "{}", raw as u32)?,
93 8 => write!(fmt, "{}", raw as u64)?,
94 16 => write!(fmt, "{}", raw as u128)?,
95 _ => bug!("ConstInt 0x{:x} with size = {} and signed = {}", raw, size, signed),
96 }
97 if fmt.alternate() {
98 match (size, is_ptr_sized_integral) {
99 (_, true) => write!(fmt, "_usize")?,
100 (1, _) => write!(fmt, "_u8")?,
101 (2, _) => write!(fmt, "_u16")?,
102 (4, _) => write!(fmt, "_u32")?,
103 (8, _) => write!(fmt, "_u64")?,
104 (16, _) => write!(fmt, "_u128")?,
105 _ => bug!(),
106 }
107 }
108 Ok(())
109 }
110 }
111 }
112}
29967ef6
XL
113
114/// The raw bytes of a simple value.
115///
116/// This is a packed struct in order to allow this type to be optimally embedded in enums
117/// (like Scalar).
118#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
119#[repr(packed)]
120pub struct ScalarInt {
121 /// The first `size` bytes of `data` are the value.
122 /// Do not try to read less or more bytes than that. The remaining bytes must be 0.
123 data: u128,
124 size: u8,
125}
126
127// Cannot derive these, as the derives take references to the fields, and we
128// can't take references to fields of packed structs.
129impl<CTX> crate::ty::HashStable<CTX> for ScalarInt {
130 fn hash_stable(&self, hcx: &mut CTX, hasher: &mut crate::ty::StableHasher) {
131 // Using a block `{self.data}` here to force a copy instead of using `self.data`
132 // directly, because `hash_stable` takes `&self` and would thus borrow `self.data`.
133 // Since `Self` is a packed struct, that would create a possibly unaligned reference,
134 // which is UB.
135 { self.data }.hash_stable(hcx, hasher);
136 self.size.hash_stable(hcx, hasher);
137 }
138}
139
140impl<S: Encoder> Encodable<S> for ScalarInt {
141 fn encode(&self, s: &mut S) -> Result<(), S::Error> {
142 s.emit_u128(self.data)?;
143 s.emit_u8(self.size)
144 }
145}
146
147impl<D: Decoder> Decodable<D> for ScalarInt {
148 fn decode(d: &mut D) -> Result<ScalarInt, D::Error> {
149 Ok(ScalarInt { data: d.read_u128()?, size: d.read_u8()? })
150 }
151}
152
153impl ScalarInt {
154 pub const TRUE: ScalarInt = ScalarInt { data: 1_u128, size: 1 };
155
156 pub const FALSE: ScalarInt = ScalarInt { data: 0_u128, size: 1 };
157
158 pub const ZST: ScalarInt = ScalarInt { data: 0_u128, size: 0 };
159
160 #[inline]
161 pub fn size(self) -> Size {
162 Size::from_bytes(self.size)
163 }
164
165 /// Make sure the `data` fits in `size`.
166 /// This is guaranteed by all constructors here, but having had this check saved us from
167 /// bugs many times in the past, so keeping it around is definitely worth it.
168 #[inline(always)]
169 fn check_data(self) {
170 // Using a block `{self.data}` here to force a copy instead of using `self.data`
171 // directly, because `debug_assert_eq` takes references to its arguments and formatting
172 // arguments and would thus borrow `self.data`. Since `Self`
173 // is a packed struct, that would create a possibly unaligned reference, which
174 // is UB.
175 debug_assert_eq!(
176 self.size().truncate(self.data),
177 { self.data },
178 "Scalar value {:#x} exceeds size of {} bytes",
179 { self.data },
180 self.size
181 );
182 }
183
184 #[inline]
185 pub fn null(size: Size) -> Self {
186 Self { data: 0, size: size.bytes() as u8 }
187 }
188
189 #[inline]
190 pub fn is_null(self) -> bool {
191 self.data == 0
192 }
193
194 pub(crate) fn ptr_sized_op<E>(
195 self,
196 dl: &TargetDataLayout,
197 f_int: impl FnOnce(u64) -> Result<u64, E>,
198 ) -> Result<Self, E> {
199 assert_eq!(u64::from(self.size), dl.pointer_size.bytes());
200 Ok(Self::try_from_uint(f_int(u64::try_from(self.data).unwrap())?, self.size()).unwrap())
201 }
202
203 #[inline]
204 pub fn try_from_uint(i: impl Into<u128>, size: Size) -> Option<Self> {
205 let data = i.into();
206 if size.truncate(data) == data {
207 Some(Self { data, size: size.bytes() as u8 })
208 } else {
209 None
210 }
211 }
212
213 #[inline]
214 pub fn try_from_int(i: impl Into<i128>, size: Size) -> Option<Self> {
215 let i = i.into();
216 // `into` performed sign extension, we have to truncate
217 let truncated = size.truncate(i as u128);
218 if size.sign_extend(truncated) as i128 == i {
219 Some(Self { data: truncated, size: size.bytes() as u8 })
220 } else {
221 None
222 }
223 }
224
225 #[inline]
226 pub fn assert_bits(self, target_size: Size) -> u128 {
227 self.to_bits(target_size).unwrap_or_else(|size| {
228 bug!("expected int of size {}, but got size {}", target_size.bytes(), size.bytes())
229 })
230 }
231
232 #[inline]
233 pub fn to_bits(self, target_size: Size) -> Result<u128, Size> {
234 assert_ne!(target_size.bytes(), 0, "you should never look at the bits of a ZST");
235 if target_size.bytes() == u64::from(self.size) {
236 self.check_data();
237 Ok(self.data)
238 } else {
239 Err(self.size())
240 }
241 }
242}
243
244macro_rules! from {
245 ($($ty:ty),*) => {
246 $(
247 impl From<$ty> for ScalarInt {
248 #[inline]
249 fn from(u: $ty) -> Self {
250 Self {
251 data: u128::from(u),
252 size: std::mem::size_of::<$ty>() as u8,
253 }
254 }
255 }
256 )*
257 }
258}
259
260macro_rules! try_from {
261 ($($ty:ty),*) => {
262 $(
263 impl TryFrom<ScalarInt> for $ty {
264 type Error = Size;
265 #[inline]
266 fn try_from(int: ScalarInt) -> Result<Self, Size> {
267 // The `unwrap` cannot fail because to_bits (if it succeeds)
268 // is guaranteed to return a value that fits into the size.
269 int.to_bits(Size::from_bytes(std::mem::size_of::<$ty>()))
270 .map(|u| u.try_into().unwrap())
271 }
272 }
273 )*
274 }
275}
276
277from!(u8, u16, u32, u64, u128, bool);
278try_from!(u8, u16, u32, u64, u128);
279
280impl From<char> for ScalarInt {
281 #[inline]
282 fn from(c: char) -> Self {
283 Self { data: c as u128, size: std::mem::size_of::<char>() as u8 }
284 }
285}
286
287impl TryFrom<ScalarInt> for char {
288 type Error = Size;
289 #[inline]
290 fn try_from(int: ScalarInt) -> Result<Self, Size> {
291 int.to_bits(Size::from_bytes(std::mem::size_of::<char>()))
292 .map(|u| char::from_u32(u.try_into().unwrap()).unwrap())
293 }
294}
295
296impl From<Single> for ScalarInt {
297 #[inline]
298 fn from(f: Single) -> Self {
299 // We trust apfloat to give us properly truncated data.
300 Self { data: f.to_bits(), size: 4 }
301 }
302}
303
304impl TryFrom<ScalarInt> for Single {
305 type Error = Size;
306 #[inline]
307 fn try_from(int: ScalarInt) -> Result<Self, Size> {
308 int.to_bits(Size::from_bytes(4)).map(Self::from_bits)
309 }
310}
311
312impl From<Double> for ScalarInt {
313 #[inline]
314 fn from(f: Double) -> Self {
315 // We trust apfloat to give us properly truncated data.
316 Self { data: f.to_bits(), size: 8 }
317 }
318}
319
320impl TryFrom<ScalarInt> for Double {
321 type Error = Size;
322 #[inline]
323 fn try_from(int: ScalarInt) -> Result<Self, Size> {
324 int.to_bits(Size::from_bytes(8)).map(Self::from_bits)
325 }
326}
327
328impl fmt::Debug for ScalarInt {
329 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
330 if self.size == 0 {
331 self.check_data();
332 write!(f, "<ZST>")
333 } else {
334 // Dispatch to LowerHex below.
335 write!(f, "0x{:x}", self)
336 }
337 }
338}
339
340impl fmt::LowerHex for ScalarInt {
341 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
342 self.check_data();
343 // Format as hex number wide enough to fit any value of the given `size`.
344 // So data=20, size=1 will be "0x14", but with size=4 it'll be "0x00000014".
345 // Using a block `{self.data}` here to force a copy instead of using `self.data`
346 // directly, because `write!` takes references to its formatting arguments and
347 // would thus borrow `self.data`. Since `Self`
348 // is a packed struct, that would create a possibly unaligned reference, which
349 // is UB.
350 write!(f, "{:01$x}", { self.data }, self.size as usize * 2)
351 }
352}
353
354impl fmt::UpperHex for ScalarInt {
355 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
356 self.check_data();
357 // Format as hex number wide enough to fit any value of the given `size`.
358 // So data=20, size=1 will be "0x14", but with size=4 it'll be "0x00000014".
359 // Using a block `{self.data}` here to force a copy instead of using `self.data`
360 // directly, because `write!` takes references to its formatting arguments and
361 // would thus borrow `self.data`. Since `Self`
362 // is a packed struct, that would create a possibly unaligned reference, which
363 // is UB.
364 write!(f, "{:01$X}", { self.data }, self.size as usize * 2)
365 }
366}