]> git.proxmox.com Git - rustc.git/blob - vendor/kstring/src/string.rs
New upstream version 1.71.1+dfsg1
[rustc.git] / vendor / kstring / src / string.rs
1 use std::{borrow::Cow, fmt};
2
3 use crate::stack::StackString;
4 use crate::KStringCowBase;
5 use crate::KStringRef;
6
7 pub(crate) type StdString = std::string::String;
8
9 /// A UTF-8 encoded, immutable string.
10 pub type KString = KStringBase<crate::backend::DefaultStr>;
11
12 /// A UTF-8 encoded, immutable string.
13 #[derive(Clone)]
14 #[repr(transparent)]
15 pub struct KStringBase<B> {
16 inner: KStringInner<B>,
17 }
18
19 impl<B> KStringBase<B> {
20 pub const EMPTY: Self = KStringBase::from_static("");
21
22 /// Create a new empty `KStringBase`.
23 #[inline]
24 #[must_use]
25 pub fn new() -> Self {
26 Self::EMPTY
27 }
28
29 /// Create a reference to a `'static` data.
30 #[inline]
31 #[must_use]
32 pub const fn from_static(other: &'static str) -> Self {
33 Self {
34 inner: KStringInner::from_static(other),
35 }
36 }
37
38 /// Create an inline string, if possible
39 #[inline]
40 #[must_use]
41 pub fn try_inline(other: &str) -> Option<Self> {
42 KStringInner::try_inline(other).map(|inner| Self { inner })
43 }
44 }
45
46 impl<B: crate::backend::HeapStr> KStringBase<B> {
47 /// Create an owned `KStringBase`.
48 #[inline]
49 #[must_use]
50 pub fn from_boxed(other: crate::backend::BoxedStr) -> Self {
51 Self {
52 inner: KStringInner::from_boxed(other),
53 }
54 }
55
56 /// Create an owned `KStringBase`.
57 #[inline]
58 #[must_use]
59 pub fn from_string(other: StdString) -> Self {
60 Self {
61 inner: KStringInner::from_string(other),
62 }
63 }
64
65 /// Create an owned `KStringBase` optimally from a reference.
66 #[inline]
67 #[must_use]
68 pub fn from_ref(other: &str) -> Self {
69 Self {
70 inner: KStringInner::from_ref(other),
71 }
72 }
73
74 /// Get a reference to the `KStringBase`.
75 #[inline]
76 #[must_use]
77 pub fn as_ref(&self) -> KStringRef<'_> {
78 self.inner.as_ref()
79 }
80
81 /// Extracts a string slice containing the entire `KStringBase`.
82 #[inline]
83 #[must_use]
84 pub fn as_str(&self) -> &str {
85 self.inner.as_str()
86 }
87
88 /// Convert to a mutable string type, cloning the data if necessary.
89 #[inline]
90 #[must_use]
91 pub fn into_string(self) -> StdString {
92 String::from(self.into_boxed_str())
93 }
94
95 /// Convert to a mutable string type, cloning the data if necessary.
96 #[inline]
97 #[must_use]
98 pub fn into_boxed_str(self) -> crate::backend::BoxedStr {
99 self.inner.into_boxed_str()
100 }
101
102 /// Convert to a Cow str
103 #[inline]
104 #[must_use]
105 pub fn into_cow_str(self) -> Cow<'static, str> {
106 self.inner.into_cow_str()
107 }
108 }
109
110 impl<B: crate::backend::HeapStr> std::ops::Deref for KStringBase<B> {
111 type Target = str;
112
113 #[inline]
114 fn deref(&self) -> &str {
115 self.as_str()
116 }
117 }
118
119 impl<B: crate::backend::HeapStr> Eq for KStringBase<B> {}
120
121 impl<'s, B: crate::backend::HeapStr> PartialEq<KStringBase<B>> for KStringBase<B> {
122 #[inline]
123 fn eq(&self, other: &Self) -> bool {
124 PartialEq::eq(self.as_str(), other.as_str())
125 }
126 }
127
128 impl<'s, B: crate::backend::HeapStr> PartialEq<str> for KStringBase<B> {
129 #[inline]
130 fn eq(&self, other: &str) -> bool {
131 PartialEq::eq(self.as_str(), other)
132 }
133 }
134
135 impl<'s, B: crate::backend::HeapStr> PartialEq<&'s str> for KStringBase<B> {
136 #[inline]
137 fn eq(&self, other: &&str) -> bool {
138 PartialEq::eq(self.as_str(), *other)
139 }
140 }
141
142 impl<'s, B: crate::backend::HeapStr> PartialEq<String> for KStringBase<B> {
143 #[inline]
144 fn eq(&self, other: &StdString) -> bool {
145 PartialEq::eq(self.as_str(), other.as_str())
146 }
147 }
148
149 impl<B: crate::backend::HeapStr> Ord for KStringBase<B> {
150 #[inline]
151 fn cmp(&self, other: &Self) -> std::cmp::Ordering {
152 self.as_str().cmp(other.as_str())
153 }
154 }
155
156 impl<B: crate::backend::HeapStr> PartialOrd for KStringBase<B> {
157 #[inline]
158 fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
159 self.as_str().partial_cmp(other.as_str())
160 }
161 }
162
163 impl<B: crate::backend::HeapStr> std::hash::Hash for KStringBase<B> {
164 #[inline]
165 fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
166 self.as_str().hash(state);
167 }
168 }
169
170 impl<B: crate::backend::HeapStr> fmt::Debug for KStringBase<B> {
171 #[inline]
172 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
173 self.as_str().fmt(f)
174 }
175 }
176
177 impl<B: crate::backend::HeapStr> fmt::Display for KStringBase<B> {
178 #[inline]
179 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
180 fmt::Display::fmt(self.as_str(), f)
181 }
182 }
183
184 impl<B: crate::backend::HeapStr> AsRef<str> for KStringBase<B> {
185 #[inline]
186 fn as_ref(&self) -> &str {
187 self.as_str()
188 }
189 }
190
191 impl<B: crate::backend::HeapStr> AsRef<[u8]> for KStringBase<B> {
192 #[inline]
193 fn as_ref(&self) -> &[u8] {
194 self.as_bytes()
195 }
196 }
197
198 impl<B: crate::backend::HeapStr> AsRef<std::ffi::OsStr> for KStringBase<B> {
199 #[inline]
200 fn as_ref(&self) -> &std::ffi::OsStr {
201 (&**self).as_ref()
202 }
203 }
204
205 impl<B: crate::backend::HeapStr> AsRef<std::path::Path> for KStringBase<B> {
206 #[inline]
207 fn as_ref(&self) -> &std::path::Path {
208 std::path::Path::new(self)
209 }
210 }
211
212 impl<B: crate::backend::HeapStr> std::borrow::Borrow<str> for KStringBase<B> {
213 #[inline]
214 fn borrow(&self) -> &str {
215 self.as_str()
216 }
217 }
218
219 impl<B: crate::backend::HeapStr> Default for KStringBase<B> {
220 #[inline]
221 fn default() -> Self {
222 Self::new()
223 }
224 }
225
226 impl<'s, B: crate::backend::HeapStr> From<KStringRef<'s>> for KStringBase<B> {
227 #[inline]
228 fn from(other: KStringRef<'s>) -> Self {
229 other.to_owned()
230 }
231 }
232
233 impl<'s, B: crate::backend::HeapStr> From<&'s KStringRef<'s>> for KStringBase<B> {
234 #[inline]
235 fn from(other: &'s KStringRef<'s>) -> Self {
236 other.to_owned()
237 }
238 }
239
240 impl<'s, B: crate::backend::HeapStr> From<KStringCowBase<'s, B>> for KStringBase<B> {
241 #[inline]
242 fn from(other: KStringCowBase<'s, B>) -> Self {
243 other.into_owned()
244 }
245 }
246
247 impl<'s, B: crate::backend::HeapStr> From<&'s KStringCowBase<'s, B>> for KStringBase<B> {
248 #[inline]
249 fn from(other: &'s KStringCowBase<'s, B>) -> Self {
250 other.clone().into_owned()
251 }
252 }
253
254 impl<B: crate::backend::HeapStr> From<StdString> for KStringBase<B> {
255 #[inline]
256 fn from(other: StdString) -> Self {
257 Self::from_string(other)
258 }
259 }
260
261 impl<'s, B: crate::backend::HeapStr> From<&'s StdString> for KStringBase<B> {
262 #[inline]
263 fn from(other: &'s StdString) -> Self {
264 Self::from_ref(other)
265 }
266 }
267
268 impl<B: crate::backend::HeapStr> From<crate::backend::BoxedStr> for KStringBase<B> {
269 #[inline]
270 fn from(other: crate::backend::BoxedStr) -> Self {
271 Self::from_boxed(other)
272 }
273 }
274
275 impl<'s, B: crate::backend::HeapStr> From<&'s crate::backend::BoxedStr> for KStringBase<B> {
276 #[inline]
277 fn from(other: &'s crate::backend::BoxedStr) -> Self {
278 Self::from_ref(other)
279 }
280 }
281
282 impl<B: crate::backend::HeapStr> From<&'static str> for KStringBase<B> {
283 #[inline]
284 fn from(other: &'static str) -> Self {
285 Self::from_static(other)
286 }
287 }
288
289 impl<B: crate::backend::HeapStr> std::str::FromStr for KStringBase<B> {
290 type Err = std::convert::Infallible;
291 #[inline]
292 fn from_str(s: &str) -> Result<Self, Self::Err> {
293 Ok(Self::from_ref(s))
294 }
295 }
296
297 #[cfg(feature = "serde")]
298 impl<B: crate::backend::HeapStr> serde::Serialize for KStringBase<B> {
299 #[inline]
300 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
301 where
302 S: serde::Serializer,
303 {
304 serializer.serialize_str(self.as_str())
305 }
306 }
307
308 #[cfg(feature = "serde")]
309 impl<'de, B: crate::backend::HeapStr> serde::Deserialize<'de> for KStringBase<B> {
310 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
311 where
312 D: serde::Deserializer<'de>,
313 {
314 deserializer.deserialize_string(StringVisitor(std::marker::PhantomData))
315 }
316 }
317
318 #[cfg(feature = "serde")]
319 struct StringVisitor<B>(std::marker::PhantomData<B>);
320
321 #[cfg(feature = "serde")]
322 impl<'de, B: crate::backend::HeapStr> serde::de::Visitor<'de> for StringVisitor<B> {
323 type Value = KStringBase<B>;
324
325 fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
326 formatter.write_str("a string")
327 }
328
329 fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
330 where
331 E: serde::de::Error,
332 {
333 Ok(Self::Value::from_ref(v))
334 }
335
336 fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
337 where
338 E: serde::de::Error,
339 {
340 Ok(Self::Value::from_string(v))
341 }
342
343 fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E>
344 where
345 E: serde::de::Error,
346 {
347 match std::str::from_utf8(v) {
348 Ok(s) => Ok(Self::Value::from_ref(s)),
349 Err(_) => Err(serde::de::Error::invalid_value(
350 serde::de::Unexpected::Bytes(v),
351 &self,
352 )),
353 }
354 }
355
356 fn visit_byte_buf<E>(self, v: Vec<u8>) -> Result<Self::Value, E>
357 where
358 E: serde::de::Error,
359 {
360 match String::from_utf8(v) {
361 Ok(s) => Ok(Self::Value::from_string(s)),
362 Err(e) => Err(serde::de::Error::invalid_value(
363 serde::de::Unexpected::Bytes(&e.into_bytes()),
364 &self,
365 )),
366 }
367 }
368 }
369
370 use inner::KStringInner;
371
372 #[cfg(not(feature = "unsafe"))]
373 mod inner {
374 use super::*;
375
376 pub(super) enum KStringInner<B> {
377 Singleton(&'static str),
378 Inline(StackString<CAPACITY>),
379 Owned(B),
380 }
381
382 impl<B> KStringInner<B> {
383 /// Create a reference to a `'static` data.
384 #[inline]
385 pub const fn from_static(other: &'static str) -> Self {
386 Self::Singleton(other)
387 }
388
389 #[inline]
390 pub fn try_inline(other: &str) -> Option<Self> {
391 StackString::try_new(other).map(Self::Inline)
392 }
393 }
394
395 impl<B: crate::backend::HeapStr> KStringInner<B> {
396 #[inline]
397 pub(super) fn from_boxed(other: crate::backend::BoxedStr) -> Self {
398 #[allow(clippy::useless_conversion)]
399 Self::Owned(B::from_boxed_str(other))
400 }
401
402 #[inline]
403 pub(super) fn from_string(other: StdString) -> Self {
404 if (0..=CAPACITY).contains(&other.len()) {
405 let inline = { StackString::new(other.as_str()) };
406 Self::Inline(inline)
407 } else {
408 Self::from_boxed(other.into_boxed_str())
409 }
410 }
411
412 #[inline]
413 pub(super) fn from_ref(other: &str) -> Self {
414 if (0..=CAPACITY).contains(&other.len()) {
415 let inline = { StackString::new(other) };
416 Self::Inline(inline)
417 } else {
418 Self::Owned(B::from_str(other))
419 }
420 }
421
422 #[inline]
423 pub(super) fn as_ref(&self) -> KStringRef<'_> {
424 match self {
425 Self::Singleton(s) => KStringRef::from_static(s),
426 Self::Inline(s) => KStringRef::from_ref(s.as_str()),
427 Self::Owned(s) => KStringRef::from_ref(s.as_str()),
428 }
429 }
430
431 #[inline]
432 pub(super) fn as_str(&self) -> &str {
433 match self {
434 Self::Singleton(s) => s,
435 Self::Inline(s) => s.as_str(),
436 Self::Owned(s) => s.as_str(),
437 }
438 }
439
440 #[inline]
441 pub(super) fn into_boxed_str(self) -> crate::backend::BoxedStr {
442 match self {
443 Self::Singleton(s) => crate::backend::BoxedStr::from(s),
444 Self::Inline(s) => crate::backend::BoxedStr::from(s.as_str()),
445 Self::Owned(s) => crate::backend::BoxedStr::from(s.as_str()),
446 }
447 }
448
449 /// Convert to a Cow str
450 #[inline]
451 pub(super) fn into_cow_str(self) -> Cow<'static, str> {
452 match self {
453 Self::Singleton(s) => Cow::Borrowed(s),
454 Self::Inline(s) => Cow::Owned(s.as_str().into()),
455 Self::Owned(s) => Cow::Owned(s.as_str().into()),
456 }
457 }
458 }
459
460 // Explicit to avoid inlining which cuts clone times in half.
461 //
462 // An automatically derived `clone()` has 10ns overhead while the explicit `Deref`/`as_str` has
463 // none of that. Being explicit and removing the `#[inline]` attribute dropped the overhead to
464 // 5ns.
465 //
466 // My only guess is that the `clone()` calls we delegate to are just that much bigger than
467 // `as_str()` that, when combined with a jump table, is blowing the icache, slowing things down.
468 impl<B: Clone> Clone for KStringInner<B> {
469 fn clone(&self) -> Self {
470 match self {
471 Self::Singleton(s) => Self::Singleton(s),
472 Self::Inline(s) => Self::Inline(*s),
473 Self::Owned(s) => Self::Owned(s.clone()),
474 }
475 }
476 }
477
478 #[allow(unused)]
479 const LEN_SIZE: usize = std::mem::size_of::<crate::stack::Len>();
480
481 #[allow(unused)]
482 const TAG_SIZE: usize = std::mem::size_of::<u8>();
483
484 #[allow(unused)]
485 const MAX_CAPACITY: usize =
486 std::mem::size_of::<crate::string::StdString>() - TAG_SIZE - LEN_SIZE;
487
488 // Performance seems to slow down when trying to occupy all of the padding left by `String`'s
489 // discriminant. The question is whether faster len=1-16 "allocations" outweighs going to the heap
490 // for len=17-22.
491 #[allow(unused)]
492 const ALIGNED_CAPACITY: usize = std::mem::size_of::<crate::backend::DefaultStr>() - LEN_SIZE;
493
494 #[cfg(feature = "max_inline")]
495 const CAPACITY: usize = MAX_CAPACITY;
496 #[cfg(not(feature = "max_inline"))]
497 const CAPACITY: usize = ALIGNED_CAPACITY;
498 }
499
500 #[cfg(feature = "unsafe")]
501 mod inner {
502 use super::*;
503
504 pub(super) union KStringInner<B> {
505 tag: TagVariant,
506 singleton: SingletonVariant,
507 owned: std::mem::ManuallyDrop<OwnedVariant<B>>,
508 inline: InlineVariant,
509 }
510
511 impl<B> KStringInner<B> {
512 /// Create a reference to a `'static` data.
513 #[inline]
514 pub const fn from_static(other: &'static str) -> Self {
515 Self {
516 singleton: SingletonVariant::new(other),
517 }
518 }
519
520 #[inline]
521 pub fn try_inline(other: &str) -> Option<Self> {
522 StackString::try_new(other).map(|inline| Self {
523 inline: InlineVariant::new(inline),
524 })
525 }
526
527 #[inline]
528 const fn tag(&self) -> Tag {
529 unsafe {
530 // SAFETY: `tag` is in the same spot in each variant
531 self.tag.tag
532 }
533 }
534 }
535
536 impl<B: crate::backend::HeapStr> KStringInner<B> {
537 #[inline]
538 pub(super) fn from_boxed(other: crate::backend::BoxedStr) -> Self {
539 #[allow(clippy::useless_conversion)]
540 let payload = B::from_boxed_str(other);
541 Self {
542 owned: std::mem::ManuallyDrop::new(OwnedVariant::new(payload)),
543 }
544 }
545
546 #[inline]
547 pub(super) fn from_string(other: StdString) -> Self {
548 if (0..=CAPACITY).contains(&other.len()) {
549 let payload = unsafe {
550 // SAFETY: range check ensured this is always safe
551 StackString::new_unchecked(other.as_str())
552 };
553 Self {
554 inline: InlineVariant::new(payload),
555 }
556 } else {
557 Self::from_boxed(other.into_boxed_str())
558 }
559 }
560
561 #[inline]
562 pub(super) fn from_ref(other: &str) -> Self {
563 if (0..=CAPACITY).contains(&other.len()) {
564 let payload = unsafe {
565 // SAFETY: range check ensured this is always safe
566 StackString::new_unchecked(other)
567 };
568 Self {
569 inline: InlineVariant::new(payload),
570 }
571 } else {
572 #[allow(clippy::useless_conversion)]
573 let payload = B::from_str(other);
574 Self {
575 owned: std::mem::ManuallyDrop::new(OwnedVariant::new(payload)),
576 }
577 }
578 }
579
580 #[inline]
581 pub(super) fn as_ref(&self) -> KStringRef<'_> {
582 let tag = self.tag();
583 unsafe {
584 // SAFETY: `tag` ensures access to correct variant
585 if tag.is_singleton() {
586 KStringRef::from_static(self.singleton.payload)
587 } else if tag.is_owned() {
588 KStringRef::from_ref(self.owned.payload.as_str())
589 } else {
590 debug_assert!(tag.is_inline());
591 KStringRef::from_ref(self.inline.payload.as_str())
592 }
593 }
594 }
595
596 #[inline]
597 pub(super) fn as_str(&self) -> &str {
598 let tag = self.tag();
599 unsafe {
600 // SAFETY: `tag` ensures access to correct variant
601 if tag.is_singleton() {
602 self.singleton.payload
603 } else if tag.is_owned() {
604 self.owned.payload.as_str()
605 } else {
606 debug_assert!(tag.is_inline());
607 self.inline.payload.as_str()
608 }
609 }
610 }
611
612 #[inline]
613 pub(super) fn into_boxed_str(self) -> crate::backend::BoxedStr {
614 let tag = self.tag();
615 unsafe {
616 // SAFETY: `tag` ensures access to correct variant
617 if tag.is_singleton() {
618 crate::backend::BoxedStr::from(self.singleton.payload)
619 } else if tag.is_owned() {
620 crate::backend::BoxedStr::from(self.owned.payload.as_str())
621 } else {
622 debug_assert!(tag.is_inline());
623 crate::backend::BoxedStr::from(self.inline.payload.as_ref())
624 }
625 }
626 }
627
628 /// Convert to a Cow str
629 #[inline]
630 pub(super) fn into_cow_str(self) -> Cow<'static, str> {
631 let tag = self.tag();
632 unsafe {
633 // SAFETY: `tag` ensures access to correct variant
634 if tag.is_singleton() {
635 Cow::Borrowed(self.singleton.payload)
636 } else if tag.is_owned() {
637 Cow::Owned(self.owned.payload.as_str().into())
638 } else {
639 debug_assert!(tag.is_inline());
640 Cow::Owned(self.inline.payload.as_str().into())
641 }
642 }
643 }
644 }
645
646 // Explicit to avoid inlining which cuts clone times in half.
647 //
648 // An automatically derived `clone()` has 10ns overhead while the explicit `Deref`/`as_str` has
649 // none of that. Being explicit and removing the `#[inline]` attribute dropped the overhead to
650 // 5ns.
651 //
652 // My only guess is that the `clone()` calls we delegate to are just that much bigger than
653 // `as_str()` that, when combined with a jump table, is blowing the icache, slowing things down.
654 impl<B: Clone> Clone for KStringInner<B> {
655 fn clone(&self) -> Self {
656 let tag = self.tag();
657 if tag.is_owned() {
658 unsafe {
659 // SAFETY: `tag` ensures access to correct variant
660 Self {
661 owned: std::mem::ManuallyDrop::new(OwnedVariant::new(
662 self.owned.payload.clone(),
663 )),
664 }
665 }
666 } else {
667 unsafe {
668 // SAFETY: `tag` ensures access to correct variant
669 // SAFETY: non-owned types are copyable
670 std::mem::transmute_copy(self)
671 }
672 }
673 }
674 }
675
676 impl<B> Drop for KStringInner<B> {
677 fn drop(&mut self) {
678 let tag = self.tag();
679 if tag.is_owned() {
680 unsafe {
681 // SAFETY: `tag` ensures we are using the right variant
682 std::mem::ManuallyDrop::drop(&mut self.owned)
683 }
684 }
685 }
686 }
687
688 #[allow(unused)]
689 const LEN_SIZE: usize = std::mem::size_of::<crate::stack::Len>();
690
691 #[allow(unused)]
692 const TAG_SIZE: usize = std::mem::size_of::<Tag>();
693
694 #[allow(unused)]
695 const PAYLOAD_SIZE: usize = std::mem::size_of::<crate::backend::DefaultStr>();
696 type Payload = Padding<PAYLOAD_SIZE>;
697
698 #[allow(unused)]
699 const TARGET_SIZE: usize = std::mem::size_of::<Target>();
700 type Target = crate::string::StdString;
701
702 #[allow(unused)]
703 const MAX_CAPACITY: usize = TARGET_SIZE - LEN_SIZE - TAG_SIZE;
704
705 // Performance seems to slow down when trying to occupy all of the padding left by `String`'s
706 // discriminant. The question is whether faster len=1-16 "allocations" outweighs going to the heap
707 // for len=17-22.
708 #[allow(unused)]
709 const ALIGNED_CAPACITY: usize = PAYLOAD_SIZE - LEN_SIZE;
710
711 #[cfg(feature = "max_inline")]
712 const CAPACITY: usize = MAX_CAPACITY;
713 #[cfg(not(feature = "max_inline"))]
714 const CAPACITY: usize = ALIGNED_CAPACITY;
715
716 const PAYLOAD_PAD_SIZE: usize = TARGET_SIZE - PAYLOAD_SIZE - TAG_SIZE;
717 const INLINE_PAD_SIZE: usize = TARGET_SIZE - CAPACITY - LEN_SIZE - TAG_SIZE;
718
719 #[derive(Copy, Clone)]
720 #[repr(C)]
721 struct TagVariant {
722 payload: Payload,
723 pad: Padding<PAYLOAD_PAD_SIZE>,
724 tag: Tag,
725 }
726 static_assertions::assert_eq_size!(Target, TagVariant);
727
728 #[derive(Copy, Clone)]
729 #[repr(C)]
730 struct SingletonVariant {
731 payload: &'static str,
732 pad: Padding<PAYLOAD_PAD_SIZE>,
733 tag: Tag,
734 }
735 static_assertions::assert_eq_size!(Payload, &'static str);
736 static_assertions::assert_eq_size!(Target, SingletonVariant);
737
738 impl SingletonVariant {
739 #[inline]
740 const fn new(payload: &'static str) -> Self {
741 Self {
742 payload,
743 pad: Padding::new(),
744 tag: Tag::SINGLETON,
745 }
746 }
747 }
748
749 impl std::fmt::Debug for SingletonVariant {
750 #[inline]
751 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
752 self.payload.fmt(f)
753 }
754 }
755
756 #[derive(Clone)]
757 #[repr(C)]
758 struct OwnedVariant<B> {
759 payload: B,
760 pad: Padding<PAYLOAD_PAD_SIZE>,
761 tag: Tag,
762 }
763 static_assertions::assert_eq_size!(Payload, crate::backend::DefaultStr);
764 static_assertions::assert_eq_size!(Target, OwnedVariant<crate::backend::DefaultStr>);
765
766 impl<B> OwnedVariant<B> {
767 #[inline]
768 const fn new(payload: B) -> Self {
769 Self {
770 payload,
771 pad: Padding::new(),
772 tag: Tag::OWNED,
773 }
774 }
775 }
776
777 impl<B: crate::backend::HeapStr> std::fmt::Debug for OwnedVariant<B> {
778 #[inline]
779 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
780 self.payload.fmt(f)
781 }
782 }
783
784 #[derive(Copy, Clone)]
785 #[repr(C)]
786 struct InlineVariant {
787 payload: StackString<CAPACITY>,
788 pad: Padding<INLINE_PAD_SIZE>,
789 tag: Tag,
790 }
791 static_assertions::assert_eq_size!(Target, InlineVariant);
792
793 impl InlineVariant {
794 #[inline]
795 const fn new(payload: StackString<CAPACITY>) -> Self {
796 Self {
797 payload,
798 pad: Padding::new(),
799 tag: Tag::INLINE,
800 }
801 }
802 }
803
804 impl std::fmt::Debug for InlineVariant {
805 #[inline]
806 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
807 self.payload.fmt(f)
808 }
809 }
810
811 #[derive(Copy, Clone, PartialEq, Eq)]
812 #[repr(transparent)]
813 struct Tag(u8);
814
815 impl Tag {
816 const SINGLETON: Tag = Tag(0);
817 const OWNED: Tag = Tag(u8::MAX);
818 const INLINE: Tag = Tag(1);
819
820 #[inline]
821 const fn is_singleton(self) -> bool {
822 self.0 == Self::SINGLETON.0
823 }
824
825 #[inline]
826 const fn is_owned(self) -> bool {
827 self.0 == Self::OWNED.0
828 }
829
830 #[inline]
831 const fn is_inline(self) -> bool {
832 !self.is_singleton() && !self.is_owned()
833 }
834 }
835
836 #[derive(Copy, Clone)]
837 #[repr(transparent)]
838 struct Padding<const L: usize>([std::mem::MaybeUninit<u8>; L]);
839
840 impl<const L: usize> Padding<L> {
841 const fn new() -> Self {
842 let padding = unsafe {
843 // SAFETY: Padding, never actually used
844 std::mem::MaybeUninit::uninit().assume_init()
845 };
846 Self(padding)
847 }
848 }
849
850 impl<const L: usize> Default for Padding<L> {
851 fn default() -> Self {
852 Self::new()
853 }
854 }
855 }
856
857 #[cfg(test)]
858 mod test {
859 use super::*;
860
861 #[test]
862 fn test_size() {
863 println!("KString: {}", std::mem::size_of::<KString>());
864 }
865 }