]> git.proxmox.com Git - rustc.git/blob - library/alloc/src/collections/btree/node.rs
New upstream version 1.54.0+dfsg1
[rustc.git] / library / alloc / src / collections / btree / node.rs
1 // This is an attempt at an implementation following the ideal
2 //
3 // ```
4 // struct BTreeMap<K, V> {
5 // height: usize,
6 // root: Option<Box<Node<K, V, height>>>
7 // }
8 //
9 // struct Node<K, V, height: usize> {
10 // keys: [K; 2 * B - 1],
11 // vals: [V; 2 * B - 1],
12 // edges: [if height > 0 { Box<Node<K, V, height - 1>> } else { () }; 2 * B],
13 // parent: Option<(NonNull<Node<K, V, height + 1>>, u16)>,
14 // len: u16,
15 // }
16 // ```
17 //
18 // Since Rust doesn't actually have dependent types and polymorphic recursion,
19 // we make do with lots of unsafety.
20
21 // A major goal of this module is to avoid complexity by treating the tree as a generic (if
22 // weirdly shaped) container and avoiding dealing with most of the B-Tree invariants. As such,
23 // this module doesn't care whether the entries are sorted, which nodes can be underfull, or
24 // even what underfull means. However, we do rely on a few invariants:
25 //
26 // - Trees must have uniform depth/height. This means that every path down to a leaf from a
27 // given node has exactly the same length.
28 // - A node of length `n` has `n` keys, `n` values, and `n + 1` edges.
29 // This implies that even an empty node has at least one edge.
30 // For a leaf node, "having an edge" only means we can identify a position in the node,
31 // since leaf edges are empty and need no data representation. In an internal node,
32 // an edge both identifies a position and contains a pointer to a child node.
33
34 use core::marker::PhantomData;
35 use core::mem::{self, MaybeUninit};
36 use core::ptr::{self, NonNull};
37 use core::slice::SliceIndex;
38
39 use crate::alloc::{Allocator, Global, Layout};
40 use crate::boxed::Box;
41
42 const B: usize = 6;
43 pub const CAPACITY: usize = 2 * B - 1;
44 pub const MIN_LEN_AFTER_SPLIT: usize = B - 1;
45 const KV_IDX_CENTER: usize = B - 1;
46 const EDGE_IDX_LEFT_OF_CENTER: usize = B - 1;
47 const EDGE_IDX_RIGHT_OF_CENTER: usize = B;
48
49 /// The underlying representation of leaf nodes and part of the representation of internal nodes.
50 struct LeafNode<K, V> {
51 /// We want to be covariant in `K` and `V`.
52 parent: Option<NonNull<InternalNode<K, V>>>,
53
54 /// This node's index into the parent node's `edges` array.
55 /// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`.
56 /// This is only guaranteed to be initialized when `parent` is non-null.
57 parent_idx: MaybeUninit<u16>,
58
59 /// The number of keys and values this node stores.
60 len: u16,
61
62 /// The arrays storing the actual data of the node. Only the first `len` elements of each
63 /// array are initialized and valid.
64 keys: [MaybeUninit<K>; CAPACITY],
65 vals: [MaybeUninit<V>; CAPACITY],
66 }
67
68 impl<K, V> LeafNode<K, V> {
69 /// Initializes a new `LeafNode` in-place.
70 unsafe fn init(this: *mut Self) {
71 // As a general policy, we leave fields uninitialized if they can be, as this should
72 // be both slightly faster and easier to track in Valgrind.
73 unsafe {
74 // parent_idx, keys, and vals are all MaybeUninit
75 ptr::addr_of_mut!((*this).parent).write(None);
76 ptr::addr_of_mut!((*this).len).write(0);
77 }
78 }
79
80 /// Creates a new boxed `LeafNode`.
81 fn new() -> Box<Self> {
82 unsafe {
83 let mut leaf = Box::new_uninit();
84 LeafNode::init(leaf.as_mut_ptr());
85 leaf.assume_init()
86 }
87 }
88 }
89
90 /// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden
91 /// behind `BoxedNode`s to prevent dropping uninitialized keys and values. Any pointer to an
92 /// `InternalNode` can be directly cast to a pointer to the underlying `LeafNode` portion of the
93 /// node, allowing code to act on leaf and internal nodes generically without having to even check
94 /// which of the two a pointer is pointing at. This property is enabled by the use of `repr(C)`.
95 #[repr(C)]
96 // gdb_providers.py uses this type name for introspection.
97 struct InternalNode<K, V> {
98 data: LeafNode<K, V>,
99
100 /// The pointers to the children of this node. `len + 1` of these are considered
101 /// initialized and valid, except that near the end, while the tree is held
102 /// through borrow type `Dying`, some of these pointers are dangling.
103 edges: [MaybeUninit<BoxedNode<K, V>>; 2 * B],
104 }
105
106 impl<K, V> InternalNode<K, V> {
107 /// Creates a new boxed `InternalNode`.
108 ///
109 /// # Safety
110 /// An invariant of internal nodes is that they have at least one
111 /// initialized and valid edge. This function does not set up
112 /// such an edge.
113 unsafe fn new() -> Box<Self> {
114 unsafe {
115 let mut node = Box::<Self>::new_uninit();
116 // We only need to initialize the data; the edges are MaybeUninit.
117 LeafNode::init(ptr::addr_of_mut!((*node.as_mut_ptr()).data));
118 node.assume_init()
119 }
120 }
121 }
122
123 /// A managed, non-null pointer to a node. This is either an owned pointer to
124 /// `LeafNode<K, V>` or an owned pointer to `InternalNode<K, V>`.
125 ///
126 /// However, `BoxedNode` contains no information as to which of the two types
127 /// of nodes it actually contains, and, partially due to this lack of information,
128 /// is not a separate type and has no destructor.
129 type BoxedNode<K, V> = NonNull<LeafNode<K, V>>;
130
131 // N.B. `NodeRef` is always covariant in `K` and `V`, even when the `BorrowType`
132 // is `Mut`. This is technically wrong, but cannot result in any unsafety due to
133 // internal use of `NodeRef` because we stay completely generic over `K` and `V`.
134 // However, whenever a public type wraps `NodeRef`, make sure that it has the
135 // correct variance.
136 ///
137 /// A reference to a node.
138 ///
139 /// This type has a number of parameters that controls how it acts:
140 /// - `BorrowType`: A dummy type that describes the kind of borrow and carries a lifetime.
141 /// - When this is `Immut<'a>`, the `NodeRef` acts roughly like `&'a Node`.
142 /// - When this is `ValMut<'a>`, the `NodeRef` acts roughly like `&'a Node`
143 /// with respect to keys and tree structure, but also allows many
144 /// mutable references to values throughout the tree to coexist.
145 /// - When this is `Mut<'a>`, the `NodeRef` acts roughly like `&'a mut Node`,
146 /// although insert methods allow a mutable pointer to a value to coexist.
147 /// - When this is `Owned`, the `NodeRef` acts roughly like `Box<Node>`,
148 /// but does not have a destructor, and must be cleaned up manually.
149 /// - When this is `Dying`, the `NodeRef` still acts roughly like `Box<Node>`,
150 /// but has methods to destroy the tree bit by bit, and ordinary methods,
151 /// while not marked as unsafe to call, can invoke UB if called incorrectly.
152 /// Since any `NodeRef` allows navigating through the tree, `BorrowType`
153 /// effectively applies to the entire tree, not just to the node itself.
154 /// - `K` and `V`: These are the types of keys and values stored in the nodes.
155 /// - `Type`: This can be `Leaf`, `Internal`, or `LeafOrInternal`. When this is
156 /// `Leaf`, the `NodeRef` points to a leaf node, when this is `Internal` the
157 /// `NodeRef` points to an internal node, and when this is `LeafOrInternal` the
158 /// `NodeRef` could be pointing to either type of node.
159 /// `Type` is named `NodeType` when used outside `NodeRef`.
160 ///
161 /// Both `BorrowType` and `NodeType` restrict what methods we implement, to
162 /// exploit static type safety. There are limitations in the way we can apply
163 /// such restrictions:
164 /// - For each type parameter, we can only define a method either generically
165 /// or for one particular type. For example, we cannot define a method like
166 /// `into_kv` generically for all `BorrowType`, or once for all types that
167 /// carry a lifetime, because we want it to return `&'a` references.
168 /// Therefore, we define it only for the least powerful type `Immut<'a>`.
169 /// - We cannot get implicit coercion from say `Mut<'a>` to `Immut<'a>`.
170 /// Therefore, we have to explicitly call `reborrow` on a more powerfull
171 /// `NodeRef` in order to reach a method like `into_kv`.
172 ///
173 /// All methods on `NodeRef` that return some kind of reference, either:
174 /// - Take `self` by value, and return the lifetime carried by `BorrowType`.
175 /// Sometimes, to invoke such a method, we need to call `reborrow_mut`.
176 /// - Take `self` by reference, and (implicitly) return that reference's
177 /// lifetime, instead of the lifetime carried by `BorrowType`. That way,
178 /// the borrow checker guarantees that the `NodeRef` remains borrowed as long
179 /// as the returned reference is used.
180 /// The methods supporting insert bend this rule by returning a raw pointer,
181 /// i.e., a reference without any lifetime.
182 pub struct NodeRef<BorrowType, K, V, Type> {
183 /// The number of levels that the node and the level of leaves are apart, a
184 /// constant of the node that cannot be entirely described by `Type`, and that
185 /// the node itself does not store. We only need to store the height of the root
186 /// node, and derive every other node's height from it.
187 /// Must be zero if `Type` is `Leaf` and non-zero if `Type` is `Internal`.
188 height: usize,
189 /// The pointer to the leaf or internal node. The definition of `InternalNode`
190 /// ensures that the pointer is valid either way.
191 node: NonNull<LeafNode<K, V>>,
192 _marker: PhantomData<(BorrowType, Type)>,
193 }
194
195 /// The root node of an owned tree.
196 ///
197 /// Note that this does not have a destructor, and must be cleaned up manually.
198 pub type Root<K, V> = NodeRef<marker::Owned, K, V, marker::LeafOrInternal>;
199
200 impl<'a, K: 'a, V: 'a, Type> Copy for NodeRef<marker::Immut<'a>, K, V, Type> {}
201 impl<'a, K: 'a, V: 'a, Type> Clone for NodeRef<marker::Immut<'a>, K, V, Type> {
202 fn clone(&self) -> Self {
203 *self
204 }
205 }
206
207 unsafe impl<BorrowType, K: Sync, V: Sync, Type> Sync for NodeRef<BorrowType, K, V, Type> {}
208
209 unsafe impl<'a, K: Sync + 'a, V: Sync + 'a, Type> Send for NodeRef<marker::Immut<'a>, K, V, Type> {}
210 unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send for NodeRef<marker::Mut<'a>, K, V, Type> {}
211 unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send for NodeRef<marker::ValMut<'a>, K, V, Type> {}
212 unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Owned, K, V, Type> {}
213 unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Dying, K, V, Type> {}
214
215 impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
216 fn new_leaf() -> Self {
217 Self::from_new_leaf(LeafNode::new())
218 }
219
220 fn from_new_leaf(leaf: Box<LeafNode<K, V>>) -> Self {
221 NodeRef { height: 0, node: NonNull::from(Box::leak(leaf)), _marker: PhantomData }
222 }
223 }
224
225 impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
226 fn new_internal(child: Root<K, V>) -> Self {
227 let mut new_node = unsafe { InternalNode::new() };
228 new_node.edges[0].write(child.node);
229 unsafe { NodeRef::from_new_internal(new_node, child.height + 1) }
230 }
231
232 /// # Safety
233 /// `height` must not be zero.
234 unsafe fn from_new_internal(internal: Box<InternalNode<K, V>>, height: usize) -> Self {
235 debug_assert!(height > 0);
236 let node = NonNull::from(Box::leak(internal)).cast();
237 let mut this = NodeRef { height, node, _marker: PhantomData };
238 this.borrow_mut().correct_all_childrens_parent_links();
239 this
240 }
241 }
242
243 impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
244 /// Unpack a node reference that was packed as `NodeRef::parent`.
245 fn from_internal(node: NonNull<InternalNode<K, V>>, height: usize) -> Self {
246 debug_assert!(height > 0);
247 NodeRef { height, node: node.cast(), _marker: PhantomData }
248 }
249 }
250
251 impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
252 /// Exposes the data of an internal node.
253 ///
254 /// Returns a raw ptr to avoid invalidating other references to this node.
255 fn as_internal_ptr(this: &Self) -> *mut InternalNode<K, V> {
256 // SAFETY: the static node type is `Internal`.
257 this.node.as_ptr() as *mut InternalNode<K, V>
258 }
259 }
260
261 impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
262 /// Borrows exclusive access to the data of an internal node.
263 fn as_internal_mut(&mut self) -> &mut InternalNode<K, V> {
264 let ptr = Self::as_internal_ptr(self);
265 unsafe { &mut *ptr }
266 }
267 }
268
269 impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
270 /// Finds the length of the node. This is the number of keys or values.
271 /// The number of edges is `len() + 1`.
272 /// Note that, despite being safe, calling this function can have the side effect
273 /// of invalidating mutable references that unsafe code has created.
274 pub fn len(&self) -> usize {
275 // Crucially, we only access the `len` field here. If BorrowType is marker::ValMut,
276 // there might be outstanding mutable references to values that we must not invalidate.
277 unsafe { usize::from((*Self::as_leaf_ptr(self)).len) }
278 }
279
280 /// Returns the number of levels that the node and leaves are apart. Zero
281 /// height means the node is a leaf itself. If you picture trees with the
282 /// root on top, the number says at which elevation the node appears.
283 /// If you picture trees with leaves on top, the number says how high
284 /// the tree extends above the node.
285 pub fn height(&self) -> usize {
286 self.height
287 }
288
289 /// Temporarily takes out another, immutable reference to the same node.
290 pub fn reborrow(&self) -> NodeRef<marker::Immut<'_>, K, V, Type> {
291 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
292 }
293
294 /// Exposes the leaf portion of any leaf or internal node.
295 ///
296 /// Returns a raw ptr to avoid invalidating other references to this node.
297 fn as_leaf_ptr(this: &Self) -> *mut LeafNode<K, V> {
298 // The node must be valid for at least the LeafNode portion.
299 // This is not a reference in the NodeRef type because we don't know if
300 // it should be unique or shared.
301 this.node.as_ptr()
302 }
303 }
304
305 impl<BorrowType: marker::BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
306 /// Finds the parent of the current node. Returns `Ok(handle)` if the current
307 /// node actually has a parent, where `handle` points to the edge of the parent
308 /// that points to the current node. Returns `Err(self)` if the current node has
309 /// no parent, giving back the original `NodeRef`.
310 ///
311 /// The method name assumes you picture trees with the root node on top.
312 ///
313 /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
314 /// both, upon success, do nothing.
315 pub fn ascend(
316 self,
317 ) -> Result<Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>, Self> {
318 assert!(BorrowType::PERMITS_TRAVERSAL);
319 // We need to use raw pointers to nodes because, if BorrowType is marker::ValMut,
320 // there might be outstanding mutable references to values that we must not invalidate.
321 let leaf_ptr: *const _ = Self::as_leaf_ptr(&self);
322 unsafe { (*leaf_ptr).parent }
323 .as_ref()
324 .map(|parent| Handle {
325 node: NodeRef::from_internal(*parent, self.height + 1),
326 idx: unsafe { usize::from((*leaf_ptr).parent_idx.assume_init()) },
327 _marker: PhantomData,
328 })
329 .ok_or(self)
330 }
331
332 pub fn first_edge(self) -> Handle<Self, marker::Edge> {
333 unsafe { Handle::new_edge(self, 0) }
334 }
335
336 pub fn last_edge(self) -> Handle<Self, marker::Edge> {
337 let len = self.len();
338 unsafe { Handle::new_edge(self, len) }
339 }
340
341 /// Note that `self` must be nonempty.
342 pub fn first_kv(self) -> Handle<Self, marker::KV> {
343 let len = self.len();
344 assert!(len > 0);
345 unsafe { Handle::new_kv(self, 0) }
346 }
347
348 /// Note that `self` must be nonempty.
349 pub fn last_kv(self) -> Handle<Self, marker::KV> {
350 let len = self.len();
351 assert!(len > 0);
352 unsafe { Handle::new_kv(self, len - 1) }
353 }
354 }
355
356 impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
357 /// Could be a public implementation of PartialEq, but only used in this module.
358 fn eq(&self, other: &Self) -> bool {
359 let Self { node, height, _marker } = self;
360 if node.eq(&other.node) {
361 debug_assert_eq!(*height, other.height);
362 true
363 } else {
364 false
365 }
366 }
367 }
368
369 impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
370 /// Exposes the leaf portion of any leaf or internal node in an immutable tree.
371 fn into_leaf(self) -> &'a LeafNode<K, V> {
372 let ptr = Self::as_leaf_ptr(&self);
373 // SAFETY: there can be no mutable references into this tree borrowed as `Immut`.
374 unsafe { &*ptr }
375 }
376
377 /// Borrows a view into the keys stored in the node.
378 pub fn keys(&self) -> &[K] {
379 let leaf = self.into_leaf();
380 unsafe {
381 MaybeUninit::slice_assume_init_ref(leaf.keys.get_unchecked(..usize::from(leaf.len)))
382 }
383 }
384 }
385
386 impl<K, V> NodeRef<marker::Dying, K, V, marker::LeafOrInternal> {
387 /// Similar to `ascend`, gets a reference to a node's parent node, but also
388 /// deallocates the current node in the process. This is unsafe because the
389 /// current node will still be accessible despite being deallocated.
390 pub unsafe fn deallocate_and_ascend(
391 self,
392 ) -> Option<Handle<NodeRef<marker::Dying, K, V, marker::Internal>, marker::Edge>> {
393 let height = self.height;
394 let node = self.node;
395 let ret = self.ascend().ok();
396 unsafe {
397 Global.deallocate(
398 node.cast(),
399 if height > 0 {
400 Layout::new::<InternalNode<K, V>>()
401 } else {
402 Layout::new::<LeafNode<K, V>>()
403 },
404 );
405 }
406 ret
407 }
408 }
409
410 impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
411 /// Temporarily takes out another mutable reference to the same node. Beware, as
412 /// this method is very dangerous, doubly so since it may not immediately appear
413 /// dangerous.
414 ///
415 /// Because mutable pointers can roam anywhere around the tree, the returned
416 /// pointer can easily be used to make the original pointer dangling, out of
417 /// bounds, or invalid under stacked borrow rules.
418 // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef`
419 // that restricts the use of navigation methods on reborrowed pointers,
420 // preventing this unsafety.
421 unsafe fn reborrow_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, Type> {
422 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
423 }
424
425 /// Borrows exclusive access to the leaf portion of a leaf or internal node.
426 fn as_leaf_mut(&mut self) -> &mut LeafNode<K, V> {
427 let ptr = Self::as_leaf_ptr(self);
428 // SAFETY: we have exclusive access to the entire node.
429 unsafe { &mut *ptr }
430 }
431
432 /// Offers exclusive access to the leaf portion of a leaf or internal node.
433 fn into_leaf_mut(mut self) -> &'a mut LeafNode<K, V> {
434 let ptr = Self::as_leaf_ptr(&mut self);
435 // SAFETY: we have exclusive access to the entire node.
436 unsafe { &mut *ptr }
437 }
438 }
439
440 impl<K, V, Type> NodeRef<marker::Dying, K, V, Type> {
441 /// Borrows exclusive access to the leaf portion of a dying leaf or internal node.
442 fn as_leaf_dying(&mut self) -> &mut LeafNode<K, V> {
443 let ptr = Self::as_leaf_ptr(self);
444 // SAFETY: we have exclusive access to the entire node.
445 unsafe { &mut *ptr }
446 }
447 }
448
449 impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
450 /// Borrows exclusive access to an element of the key storage area.
451 ///
452 /// # Safety
453 /// `index` is in bounds of 0..CAPACITY
454 unsafe fn key_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
455 where
456 I: SliceIndex<[MaybeUninit<K>], Output = Output>,
457 {
458 // SAFETY: the caller will not be able to call further methods on self
459 // until the key slice reference is dropped, as we have unique access
460 // for the lifetime of the borrow.
461 unsafe { self.as_leaf_mut().keys.as_mut_slice().get_unchecked_mut(index) }
462 }
463
464 /// Borrows exclusive access to an element or slice of the node's value storage area.
465 ///
466 /// # Safety
467 /// `index` is in bounds of 0..CAPACITY
468 unsafe fn val_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
469 where
470 I: SliceIndex<[MaybeUninit<V>], Output = Output>,
471 {
472 // SAFETY: the caller will not be able to call further methods on self
473 // until the value slice reference is dropped, as we have unique access
474 // for the lifetime of the borrow.
475 unsafe { self.as_leaf_mut().vals.as_mut_slice().get_unchecked_mut(index) }
476 }
477 }
478
479 impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
480 /// Borrows exclusive access to an element or slice of the node's storage area for edge contents.
481 ///
482 /// # Safety
483 /// `index` is in bounds of 0..CAPACITY + 1
484 unsafe fn edge_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
485 where
486 I: SliceIndex<[MaybeUninit<BoxedNode<K, V>>], Output = Output>,
487 {
488 // SAFETY: the caller will not be able to call further methods on self
489 // until the edge slice reference is dropped, as we have unique access
490 // for the lifetime of the borrow.
491 unsafe { self.as_internal_mut().edges.as_mut_slice().get_unchecked_mut(index) }
492 }
493 }
494
495 impl<'a, K, V, Type> NodeRef<marker::ValMut<'a>, K, V, Type> {
496 /// # Safety
497 /// - The node has more than `idx` initialized elements.
498 unsafe fn into_key_val_mut_at(mut self, idx: usize) -> (&'a K, &'a mut V) {
499 // We only create a reference to the one element we are interested in,
500 // to avoid aliasing with outstanding references to other elements,
501 // in particular, those returned to the caller in earlier iterations.
502 let leaf = Self::as_leaf_ptr(&mut self);
503 let keys = unsafe { ptr::addr_of!((*leaf).keys) };
504 let vals = unsafe { ptr::addr_of_mut!((*leaf).vals) };
505 // We must coerce to unsized array pointers because of Rust issue #74679.
506 let keys: *const [_] = keys;
507 let vals: *mut [_] = vals;
508 let key = unsafe { (&*keys.get_unchecked(idx)).assume_init_ref() };
509 let val = unsafe { (&mut *vals.get_unchecked_mut(idx)).assume_init_mut() };
510 (key, val)
511 }
512 }
513
514 impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
515 /// Borrows exclusive access to the length of the node.
516 pub fn len_mut(&mut self) -> &mut u16 {
517 &mut self.as_leaf_mut().len
518 }
519 }
520
521 impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
522 /// # Safety
523 /// Every item returned by `range` is a valid edge index for the node.
524 unsafe fn correct_childrens_parent_links<R: Iterator<Item = usize>>(&mut self, range: R) {
525 for i in range {
526 debug_assert!(i <= self.len());
527 unsafe { Handle::new_edge(self.reborrow_mut(), i) }.correct_parent_link();
528 }
529 }
530
531 fn correct_all_childrens_parent_links(&mut self) {
532 let len = self.len();
533 unsafe { self.correct_childrens_parent_links(0..=len) };
534 }
535 }
536
537 impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
538 /// Sets the node's link to its parent edge,
539 /// without invalidating other references to the node.
540 fn set_parent_link(&mut self, parent: NonNull<InternalNode<K, V>>, parent_idx: usize) {
541 let leaf = Self::as_leaf_ptr(self);
542 unsafe { (*leaf).parent = Some(parent) };
543 unsafe { (*leaf).parent_idx.write(parent_idx as u16) };
544 }
545 }
546
547 impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
548 /// Clears the root's link to its parent edge.
549 fn clear_parent_link(&mut self) {
550 let mut root_node = self.borrow_mut();
551 let leaf = root_node.as_leaf_mut();
552 leaf.parent = None;
553 }
554 }
555
556 impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
557 /// Returns a new owned tree, with its own root node that is initially empty.
558 pub fn new() -> Self {
559 NodeRef::new_leaf().forget_type()
560 }
561
562 /// Adds a new internal node with a single edge pointing to the previous root node,
563 /// make that new node the root node, and return it. This increases the height by 1
564 /// and is the opposite of `pop_internal_level`.
565 pub fn push_internal_level(&mut self) -> NodeRef<marker::Mut<'_>, K, V, marker::Internal> {
566 super::mem::take_mut(self, |old_root| NodeRef::new_internal(old_root).forget_type());
567
568 // `self.borrow_mut()`, except that we just forgot we're internal now:
569 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
570 }
571
572 /// Removes the internal root node, using its first child as the new root node.
573 /// As it is intended only to be called when the root node has only one child,
574 /// no cleanup is done on any of the keys, values and other children.
575 /// This decreases the height by 1 and is the opposite of `push_internal_level`.
576 ///
577 /// Requires exclusive access to the `Root` object but not to the root node;
578 /// it will not invalidate other handles or references to the root node.
579 ///
580 /// Panics if there is no internal level, i.e., if the root node is a leaf.
581 pub fn pop_internal_level(&mut self) {
582 assert!(self.height > 0);
583
584 let top = self.node;
585
586 // SAFETY: we asserted to be internal.
587 let internal_self = unsafe { self.borrow_mut().cast_to_internal_unchecked() };
588 // SAFETY: we borrowed `self` exclusively and its borrow type is exclusive.
589 let internal_node = unsafe { &mut *NodeRef::as_internal_ptr(&internal_self) };
590 // SAFETY: the first edge is always initialized.
591 self.node = unsafe { internal_node.edges[0].assume_init_read() };
592 self.height -= 1;
593 self.clear_parent_link();
594
595 unsafe {
596 Global.deallocate(top.cast(), Layout::new::<InternalNode<K, V>>());
597 }
598 }
599 }
600
601 impl<K, V, Type> NodeRef<marker::Owned, K, V, Type> {
602 /// Mutably borrows the owned root node. Unlike `reborrow_mut`, this is safe
603 /// because the return value cannot be used to destroy the root, and there
604 /// cannot be other references to the tree.
605 pub fn borrow_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, Type> {
606 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
607 }
608
609 /// Slightly mutably borrows the owned root node.
610 pub fn borrow_valmut(&mut self) -> NodeRef<marker::ValMut<'_>, K, V, Type> {
611 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
612 }
613
614 /// Irreversibly transitions to a reference that permits traversal and offers
615 /// destructive methods and little else.
616 pub fn into_dying(self) -> NodeRef<marker::Dying, K, V, Type> {
617 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
618 }
619 }
620
621 impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
622 /// Adds a key-value pair to the end of the node.
623 pub fn push(&mut self, key: K, val: V) {
624 let len = self.len_mut();
625 let idx = usize::from(*len);
626 assert!(idx < CAPACITY);
627 *len += 1;
628 unsafe {
629 self.key_area_mut(idx).write(key);
630 self.val_area_mut(idx).write(val);
631 }
632 }
633 }
634
635 impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
636 /// Adds a key-value pair, and an edge to go to the right of that pair,
637 /// to the end of the node.
638 pub fn push(&mut self, key: K, val: V, edge: Root<K, V>) {
639 assert!(edge.height == self.height - 1);
640
641 let len = self.len_mut();
642 let idx = usize::from(*len);
643 assert!(idx < CAPACITY);
644 *len += 1;
645 unsafe {
646 self.key_area_mut(idx).write(key);
647 self.val_area_mut(idx).write(val);
648 self.edge_area_mut(idx + 1).write(edge.node);
649 Handle::new_edge(self.reborrow_mut(), idx + 1).correct_parent_link();
650 }
651 }
652 }
653
654 impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Leaf> {
655 /// Removes any static information asserting that this node is a `Leaf` node.
656 pub fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
657 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
658 }
659 }
660
661 impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
662 /// Removes any static information asserting that this node is an `Internal` node.
663 pub fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
664 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
665 }
666 }
667
668 impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
669 /// Checks whether a node is an `Internal` node or a `Leaf` node.
670 pub fn force(
671 self,
672 ) -> ForceResult<
673 NodeRef<BorrowType, K, V, marker::Leaf>,
674 NodeRef<BorrowType, K, V, marker::Internal>,
675 > {
676 if self.height == 0 {
677 ForceResult::Leaf(NodeRef {
678 height: self.height,
679 node: self.node,
680 _marker: PhantomData,
681 })
682 } else {
683 ForceResult::Internal(NodeRef {
684 height: self.height,
685 node: self.node,
686 _marker: PhantomData,
687 })
688 }
689 }
690 }
691
692 impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
693 /// Unsafely asserts to the compiler the static information that this node is a `Leaf`.
694 unsafe fn cast_to_leaf_unchecked(self) -> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
695 debug_assert!(self.height == 0);
696 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
697 }
698
699 /// Unsafely asserts to the compiler the static information that this node is an `Internal`.
700 unsafe fn cast_to_internal_unchecked(self) -> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
701 debug_assert!(self.height > 0);
702 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
703 }
704 }
705
706 /// A reference to a specific key-value pair or edge within a node. The `Node` parameter
707 /// must be a `NodeRef`, while the `Type` can either be `KV` (signifying a handle on a key-value
708 /// pair) or `Edge` (signifying a handle on an edge).
709 ///
710 /// Note that even `Leaf` nodes can have `Edge` handles. Instead of representing a pointer to
711 /// a child node, these represent the spaces where child pointers would go between the key-value
712 /// pairs. For example, in a node with length 2, there would be 3 possible edge locations - one
713 /// to the left of the node, one between the two pairs, and one at the right of the node.
714 pub struct Handle<Node, Type> {
715 node: Node,
716 idx: usize,
717 _marker: PhantomData<Type>,
718 }
719
720 impl<Node: Copy, Type> Copy for Handle<Node, Type> {}
721 // We don't need the full generality of `#[derive(Clone)]`, as the only time `Node` will be
722 // `Clone`able is when it is an immutable reference and therefore `Copy`.
723 impl<Node: Copy, Type> Clone for Handle<Node, Type> {
724 fn clone(&self) -> Self {
725 *self
726 }
727 }
728
729 impl<Node, Type> Handle<Node, Type> {
730 /// Retrieves the node that contains the edge or key-value pair this handle points to.
731 pub fn into_node(self) -> Node {
732 self.node
733 }
734
735 /// Returns the position of this handle in the node.
736 pub fn idx(&self) -> usize {
737 self.idx
738 }
739 }
740
741 impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV> {
742 /// Creates a new handle to a key-value pair in `node`.
743 /// Unsafe because the caller must ensure that `idx < node.len()`.
744 pub unsafe fn new_kv(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
745 debug_assert!(idx < node.len());
746
747 Handle { node, idx, _marker: PhantomData }
748 }
749
750 pub fn left_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
751 unsafe { Handle::new_edge(self.node, self.idx) }
752 }
753
754 pub fn right_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
755 unsafe { Handle::new_edge(self.node, self.idx + 1) }
756 }
757 }
758
759 impl<BorrowType, K, V, NodeType, HandleType> PartialEq
760 for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
761 {
762 fn eq(&self, other: &Self) -> bool {
763 let Self { node, idx, _marker } = self;
764 node.eq(&other.node) && *idx == other.idx
765 }
766 }
767
768 impl<BorrowType, K, V, NodeType, HandleType>
769 Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
770 {
771 /// Temporarily takes out another immutable handle on the same location.
772 pub fn reborrow(&self) -> Handle<NodeRef<marker::Immut<'_>, K, V, NodeType>, HandleType> {
773 // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
774 Handle { node: self.node.reborrow(), idx: self.idx, _marker: PhantomData }
775 }
776 }
777
778 impl<'a, K, V, NodeType, HandleType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
779 /// Temporarily takes out another mutable handle on the same location. Beware, as
780 /// this method is very dangerous, doubly so since it may not immediately appear
781 /// dangerous.
782 ///
783 /// For details, see `NodeRef::reborrow_mut`.
784 pub unsafe fn reborrow_mut(
785 &mut self,
786 ) -> Handle<NodeRef<marker::Mut<'_>, K, V, NodeType>, HandleType> {
787 // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
788 Handle { node: unsafe { self.node.reborrow_mut() }, idx: self.idx, _marker: PhantomData }
789 }
790 }
791
792 impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
793 /// Creates a new handle to an edge in `node`.
794 /// Unsafe because the caller must ensure that `idx <= node.len()`.
795 pub unsafe fn new_edge(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
796 debug_assert!(idx <= node.len());
797
798 Handle { node, idx, _marker: PhantomData }
799 }
800
801 pub fn left_kv(self) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
802 if self.idx > 0 {
803 Ok(unsafe { Handle::new_kv(self.node, self.idx - 1) })
804 } else {
805 Err(self)
806 }
807 }
808
809 pub fn right_kv(self) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
810 if self.idx < self.node.len() {
811 Ok(unsafe { Handle::new_kv(self.node, self.idx) })
812 } else {
813 Err(self)
814 }
815 }
816 }
817
818 pub enum LeftOrRight<T> {
819 Left(T),
820 Right(T),
821 }
822
823 /// Given an edge index where we want to insert into a node filled to capacity,
824 /// computes a sensible KV index of a split point and where to perform the insertion.
825 /// The goal of the split point is for its key and value to end up in a parent node;
826 /// the keys, values and edges to the left of the split point become the left child;
827 /// the keys, values and edges to the right of the split point become the right child.
828 fn splitpoint(edge_idx: usize) -> (usize, LeftOrRight<usize>) {
829 debug_assert!(edge_idx <= CAPACITY);
830 // Rust issue #74834 tries to explain these symmetric rules.
831 match edge_idx {
832 0..EDGE_IDX_LEFT_OF_CENTER => (KV_IDX_CENTER - 1, LeftOrRight::Left(edge_idx)),
833 EDGE_IDX_LEFT_OF_CENTER => (KV_IDX_CENTER, LeftOrRight::Left(edge_idx)),
834 EDGE_IDX_RIGHT_OF_CENTER => (KV_IDX_CENTER, LeftOrRight::Right(0)),
835 _ => (KV_IDX_CENTER + 1, LeftOrRight::Right(edge_idx - (KV_IDX_CENTER + 1 + 1))),
836 }
837 }
838
839 impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
840 /// Inserts a new key-value pair between the key-value pairs to the right and left of
841 /// this edge. This method assumes that there is enough space in the node for the new
842 /// pair to fit.
843 ///
844 /// The returned pointer points to the inserted value.
845 fn insert_fit(&mut self, key: K, val: V) -> *mut V {
846 debug_assert!(self.node.len() < CAPACITY);
847 let new_len = self.node.len() + 1;
848
849 unsafe {
850 slice_insert(self.node.key_area_mut(..new_len), self.idx, key);
851 slice_insert(self.node.val_area_mut(..new_len), self.idx, val);
852 *self.node.len_mut() = new_len as u16;
853
854 self.node.val_area_mut(self.idx).assume_init_mut()
855 }
856 }
857 }
858
859 impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
860 /// Inserts a new key-value pair between the key-value pairs to the right and left of
861 /// this edge. This method splits the node if there isn't enough room.
862 ///
863 /// The returned pointer points to the inserted value.
864 fn insert(mut self, key: K, val: V) -> (InsertResult<'a, K, V, marker::Leaf>, *mut V) {
865 if self.node.len() < CAPACITY {
866 let val_ptr = self.insert_fit(key, val);
867 let kv = unsafe { Handle::new_kv(self.node, self.idx) };
868 (InsertResult::Fit(kv), val_ptr)
869 } else {
870 let (middle_kv_idx, insertion) = splitpoint(self.idx);
871 let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) };
872 let mut result = middle.split();
873 let mut insertion_edge = match insertion {
874 LeftOrRight::Left(insert_idx) => unsafe {
875 Handle::new_edge(result.left.reborrow_mut(), insert_idx)
876 },
877 LeftOrRight::Right(insert_idx) => unsafe {
878 Handle::new_edge(result.right.borrow_mut(), insert_idx)
879 },
880 };
881 let val_ptr = insertion_edge.insert_fit(key, val);
882 (InsertResult::Split(result), val_ptr)
883 }
884 }
885 }
886
887 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
888 /// Fixes the parent pointer and index in the child node that this edge
889 /// links to. This is useful when the ordering of edges has been changed,
890 fn correct_parent_link(self) {
891 // Create backpointer without invalidating other references to the node.
892 let ptr = unsafe { NonNull::new_unchecked(NodeRef::as_internal_ptr(&self.node)) };
893 let idx = self.idx;
894 let mut child = self.descend();
895 child.set_parent_link(ptr, idx);
896 }
897 }
898
899 impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
900 /// Inserts a new key-value pair and an edge that will go to the right of that new pair
901 /// between this edge and the key-value pair to the right of this edge. This method assumes
902 /// that there is enough space in the node for the new pair to fit.
903 fn insert_fit(&mut self, key: K, val: V, edge: Root<K, V>) {
904 debug_assert!(self.node.len() < CAPACITY);
905 debug_assert!(edge.height == self.node.height - 1);
906 let new_len = self.node.len() + 1;
907
908 unsafe {
909 slice_insert(self.node.key_area_mut(..new_len), self.idx, key);
910 slice_insert(self.node.val_area_mut(..new_len), self.idx, val);
911 slice_insert(self.node.edge_area_mut(..new_len + 1), self.idx + 1, edge.node);
912 *self.node.len_mut() = new_len as u16;
913
914 self.node.correct_childrens_parent_links(self.idx + 1..new_len + 1);
915 }
916 }
917
918 /// Inserts a new key-value pair and an edge that will go to the right of that new pair
919 /// between this edge and the key-value pair to the right of this edge. This method splits
920 /// the node if there isn't enough room.
921 fn insert(
922 mut self,
923 key: K,
924 val: V,
925 edge: Root<K, V>,
926 ) -> InsertResult<'a, K, V, marker::Internal> {
927 assert!(edge.height == self.node.height - 1);
928
929 if self.node.len() < CAPACITY {
930 self.insert_fit(key, val, edge);
931 let kv = unsafe { Handle::new_kv(self.node, self.idx) };
932 InsertResult::Fit(kv)
933 } else {
934 let (middle_kv_idx, insertion) = splitpoint(self.idx);
935 let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) };
936 let mut result = middle.split();
937 let mut insertion_edge = match insertion {
938 LeftOrRight::Left(insert_idx) => unsafe {
939 Handle::new_edge(result.left.reborrow_mut(), insert_idx)
940 },
941 LeftOrRight::Right(insert_idx) => unsafe {
942 Handle::new_edge(result.right.borrow_mut(), insert_idx)
943 },
944 };
945 insertion_edge.insert_fit(key, val, edge);
946 InsertResult::Split(result)
947 }
948 }
949 }
950
951 impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
952 /// Inserts a new key-value pair between the key-value pairs to the right and left of
953 /// this edge. This method splits the node if there isn't enough room, and tries to
954 /// insert the split off portion into the parent node recursively, until the root is reached.
955 ///
956 /// If the returned result is a `Fit`, its handle's node can be this edge's node or an ancestor.
957 /// If the returned result is a `Split`, the `left` field will be the root node.
958 /// The returned pointer points to the inserted value.
959 pub fn insert_recursing(
960 self,
961 key: K,
962 value: V,
963 ) -> (InsertResult<'a, K, V, marker::LeafOrInternal>, *mut V) {
964 let (mut split, val_ptr) = match self.insert(key, value) {
965 (InsertResult::Fit(handle), ptr) => {
966 return (InsertResult::Fit(handle.forget_node_type()), ptr);
967 }
968 (InsertResult::Split(split), val_ptr) => (split.forget_node_type(), val_ptr),
969 };
970
971 loop {
972 split = match split.left.ascend() {
973 Ok(parent) => match parent.insert(split.kv.0, split.kv.1, split.right) {
974 InsertResult::Fit(handle) => {
975 return (InsertResult::Fit(handle.forget_node_type()), val_ptr);
976 }
977 InsertResult::Split(split) => split.forget_node_type(),
978 },
979 Err(root) => {
980 return (InsertResult::Split(SplitResult { left: root, ..split }), val_ptr);
981 }
982 };
983 }
984 }
985 }
986
987 impl<BorrowType: marker::BorrowType, K, V>
988 Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>
989 {
990 /// Finds the node pointed to by this edge.
991 ///
992 /// The method name assumes you picture trees with the root node on top.
993 ///
994 /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
995 /// both, upon success, do nothing.
996 pub fn descend(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
997 assert!(BorrowType::PERMITS_TRAVERSAL);
998 // We need to use raw pointers to nodes because, if BorrowType is
999 // marker::ValMut, there might be outstanding mutable references to
1000 // values that we must not invalidate. There's no worry accessing the
1001 // height field because that value is copied. Beware that, once the
1002 // node pointer is dereferenced, we access the edges array with a
1003 // reference (Rust issue #73987) and invalidate any other references
1004 // to or inside the array, should any be around.
1005 let parent_ptr = NodeRef::as_internal_ptr(&self.node);
1006 let node = unsafe { (*parent_ptr).edges.get_unchecked(self.idx).assume_init_read() };
1007 NodeRef { node, height: self.node.height - 1, _marker: PhantomData }
1008 }
1009 }
1010
1011 impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Immut<'a>, K, V, NodeType>, marker::KV> {
1012 pub fn into_kv(self) -> (&'a K, &'a V) {
1013 debug_assert!(self.idx < self.node.len());
1014 let leaf = self.node.into_leaf();
1015 let k = unsafe { leaf.keys.get_unchecked(self.idx).assume_init_ref() };
1016 let v = unsafe { leaf.vals.get_unchecked(self.idx).assume_init_ref() };
1017 (k, v)
1018 }
1019 }
1020
1021 impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1022 pub fn key_mut(&mut self) -> &mut K {
1023 unsafe { self.node.key_area_mut(self.idx).assume_init_mut() }
1024 }
1025
1026 pub fn into_val_mut(self) -> &'a mut V {
1027 debug_assert!(self.idx < self.node.len());
1028 let leaf = self.node.into_leaf_mut();
1029 unsafe { leaf.vals.get_unchecked_mut(self.idx).assume_init_mut() }
1030 }
1031 }
1032
1033 impl<'a, K, V, NodeType> Handle<NodeRef<marker::ValMut<'a>, K, V, NodeType>, marker::KV> {
1034 pub fn into_kv_valmut(self) -> (&'a K, &'a mut V) {
1035 unsafe { self.node.into_key_val_mut_at(self.idx) }
1036 }
1037 }
1038
1039 impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1040 pub fn kv_mut(&mut self) -> (&mut K, &mut V) {
1041 debug_assert!(self.idx < self.node.len());
1042 // We cannot call separate key and value methods, because calling the second one
1043 // invalidates the reference returned by the first.
1044 unsafe {
1045 let leaf = self.node.as_leaf_mut();
1046 let key = leaf.keys.get_unchecked_mut(self.idx).assume_init_mut();
1047 let val = leaf.vals.get_unchecked_mut(self.idx).assume_init_mut();
1048 (key, val)
1049 }
1050 }
1051
1052 /// Replaces the key and value that the KV handle refers to.
1053 pub fn replace_kv(&mut self, k: K, v: V) -> (K, V) {
1054 let (key, val) = self.kv_mut();
1055 (mem::replace(key, k), mem::replace(val, v))
1056 }
1057 }
1058
1059 impl<K, V, NodeType> Handle<NodeRef<marker::Dying, K, V, NodeType>, marker::KV> {
1060 /// Extracts the key and value that the KV handle refers to.
1061 pub fn into_key_val(mut self) -> (K, V) {
1062 debug_assert!(self.idx < self.node.len());
1063 let leaf = self.node.as_leaf_dying();
1064 unsafe {
1065 let key = leaf.keys.get_unchecked_mut(self.idx).assume_init_read();
1066 let val = leaf.vals.get_unchecked_mut(self.idx).assume_init_read();
1067 (key, val)
1068 }
1069 }
1070
1071 /// Drops the key and value that the KV handle refers to.
1072 #[inline]
1073 pub fn drop_key_val(mut self) {
1074 debug_assert!(self.idx < self.node.len());
1075 let leaf = self.node.as_leaf_dying();
1076 unsafe {
1077 leaf.keys.get_unchecked_mut(self.idx).assume_init_drop();
1078 leaf.vals.get_unchecked_mut(self.idx).assume_init_drop();
1079 }
1080 }
1081 }
1082
1083 impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1084 /// Helps implementations of `split` for a particular `NodeType`,
1085 /// by taking care of leaf data.
1086 fn split_leaf_data(&mut self, new_node: &mut LeafNode<K, V>) -> (K, V) {
1087 debug_assert!(self.idx < self.node.len());
1088 let old_len = self.node.len();
1089 let new_len = old_len - self.idx - 1;
1090 new_node.len = new_len as u16;
1091 unsafe {
1092 let k = self.node.key_area_mut(self.idx).assume_init_read();
1093 let v = self.node.val_area_mut(self.idx).assume_init_read();
1094
1095 move_to_slice(
1096 self.node.key_area_mut(self.idx + 1..old_len),
1097 &mut new_node.keys[..new_len],
1098 );
1099 move_to_slice(
1100 self.node.val_area_mut(self.idx + 1..old_len),
1101 &mut new_node.vals[..new_len],
1102 );
1103
1104 *self.node.len_mut() = self.idx as u16;
1105 (k, v)
1106 }
1107 }
1108 }
1109
1110 impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
1111 /// Splits the underlying node into three parts:
1112 ///
1113 /// - The node is truncated to only contain the key-value pairs to the left of
1114 /// this handle.
1115 /// - The key and value pointed to by this handle are extracted.
1116 /// - All the key-value pairs to the right of this handle are put into a newly
1117 /// allocated node.
1118 pub fn split(mut self) -> SplitResult<'a, K, V, marker::Leaf> {
1119 let mut new_node = LeafNode::new();
1120
1121 let kv = self.split_leaf_data(&mut new_node);
1122
1123 let right = NodeRef::from_new_leaf(new_node);
1124 SplitResult { left: self.node, kv, right }
1125 }
1126
1127 /// Removes the key-value pair pointed to by this handle and returns it, along with the edge
1128 /// that the key-value pair collapsed into.
1129 pub fn remove(
1130 mut self,
1131 ) -> ((K, V), Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
1132 let old_len = self.node.len();
1133 unsafe {
1134 let k = slice_remove(self.node.key_area_mut(..old_len), self.idx);
1135 let v = slice_remove(self.node.val_area_mut(..old_len), self.idx);
1136 *self.node.len_mut() = (old_len - 1) as u16;
1137 ((k, v), self.left_edge())
1138 }
1139 }
1140 }
1141
1142 impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
1143 /// Splits the underlying node into three parts:
1144 ///
1145 /// - The node is truncated to only contain the edges and key-value pairs to the
1146 /// left of this handle.
1147 /// - The key and value pointed to by this handle are extracted.
1148 /// - All the edges and key-value pairs to the right of this handle are put into
1149 /// a newly allocated node.
1150 pub fn split(mut self) -> SplitResult<'a, K, V, marker::Internal> {
1151 let old_len = self.node.len();
1152 unsafe {
1153 let mut new_node = InternalNode::new();
1154 let kv = self.split_leaf_data(&mut new_node.data);
1155 let new_len = usize::from(new_node.data.len);
1156 move_to_slice(
1157 self.node.edge_area_mut(self.idx + 1..old_len + 1),
1158 &mut new_node.edges[..new_len + 1],
1159 );
1160
1161 let height = self.node.height;
1162 let right = NodeRef::from_new_internal(new_node, height);
1163
1164 SplitResult { left: self.node, kv, right }
1165 }
1166 }
1167 }
1168
1169 /// Represents a session for evaluating and performing a balancing operation
1170 /// around an internal key-value pair.
1171 pub struct BalancingContext<'a, K, V> {
1172 parent: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV>,
1173 left_child: NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1174 right_child: NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1175 }
1176
1177 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
1178 pub fn consider_for_balancing(self) -> BalancingContext<'a, K, V> {
1179 let self1 = unsafe { ptr::read(&self) };
1180 let self2 = unsafe { ptr::read(&self) };
1181 BalancingContext {
1182 parent: self,
1183 left_child: self1.left_edge().descend(),
1184 right_child: self2.right_edge().descend(),
1185 }
1186 }
1187 }
1188
1189 impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1190 /// Chooses a balancing context involving the node as a child, thus between
1191 /// the KV immediately to the left or to the right in the parent node.
1192 /// Returns an `Err` if there is no parent.
1193 /// Panics if the parent is empty.
1194 ///
1195 /// Prefers the left side, to be optimal if the given node is somehow
1196 /// underfull, meaning here only that it has fewer elements than its left
1197 /// sibling and than its right sibling, if they exist. In that case,
1198 /// merging with the left sibling is faster, since we only need to move
1199 /// the node's N elements, instead of shifting them to the right and moving
1200 /// more than N elements in front. Stealing from the left sibling is also
1201 /// typically faster, since we only need to shift the node's N elements to
1202 /// the right, instead of shifting at least N of the sibling's elements to
1203 /// the left.
1204 pub fn choose_parent_kv(self) -> Result<LeftOrRight<BalancingContext<'a, K, V>>, Self> {
1205 match unsafe { ptr::read(&self) }.ascend() {
1206 Ok(parent_edge) => match parent_edge.left_kv() {
1207 Ok(left_parent_kv) => Ok(LeftOrRight::Left(BalancingContext {
1208 parent: unsafe { ptr::read(&left_parent_kv) },
1209 left_child: left_parent_kv.left_edge().descend(),
1210 right_child: self,
1211 })),
1212 Err(parent_edge) => match parent_edge.right_kv() {
1213 Ok(right_parent_kv) => Ok(LeftOrRight::Right(BalancingContext {
1214 parent: unsafe { ptr::read(&right_parent_kv) },
1215 left_child: self,
1216 right_child: right_parent_kv.right_edge().descend(),
1217 })),
1218 Err(_) => unreachable!("empty internal node"),
1219 },
1220 },
1221 Err(root) => Err(root),
1222 }
1223 }
1224 }
1225
1226 impl<'a, K, V> BalancingContext<'a, K, V> {
1227 pub fn left_child_len(&self) -> usize {
1228 self.left_child.len()
1229 }
1230
1231 pub fn right_child_len(&self) -> usize {
1232 self.right_child.len()
1233 }
1234
1235 pub fn into_left_child(self) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1236 self.left_child
1237 }
1238
1239 pub fn into_right_child(self) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1240 self.right_child
1241 }
1242
1243 /// Returns whether merging is possible, i.e., whether there is enough room
1244 /// in a node to combine the central KV with both adjacent child nodes.
1245 pub fn can_merge(&self) -> bool {
1246 self.left_child.len() + 1 + self.right_child.len() <= CAPACITY
1247 }
1248 }
1249
1250 impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> {
1251 /// Performs a merge and lets a closure decide what to return.
1252 fn do_merge<
1253 F: FnOnce(
1254 NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
1255 NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1256 ) -> R,
1257 R,
1258 >(
1259 self,
1260 result: F,
1261 ) -> R {
1262 let Handle { node: mut parent_node, idx: parent_idx, _marker } = self.parent;
1263 let old_parent_len = parent_node.len();
1264 let mut left_node = self.left_child;
1265 let old_left_len = left_node.len();
1266 let mut right_node = self.right_child;
1267 let right_len = right_node.len();
1268 let new_left_len = old_left_len + 1 + right_len;
1269
1270 assert!(new_left_len <= CAPACITY);
1271
1272 unsafe {
1273 *left_node.len_mut() = new_left_len as u16;
1274
1275 let parent_key = slice_remove(parent_node.key_area_mut(..old_parent_len), parent_idx);
1276 left_node.key_area_mut(old_left_len).write(parent_key);
1277 move_to_slice(
1278 right_node.key_area_mut(..right_len),
1279 left_node.key_area_mut(old_left_len + 1..new_left_len),
1280 );
1281
1282 let parent_val = slice_remove(parent_node.val_area_mut(..old_parent_len), parent_idx);
1283 left_node.val_area_mut(old_left_len).write(parent_val);
1284 move_to_slice(
1285 right_node.val_area_mut(..right_len),
1286 left_node.val_area_mut(old_left_len + 1..new_left_len),
1287 );
1288
1289 slice_remove(&mut parent_node.edge_area_mut(..old_parent_len + 1), parent_idx + 1);
1290 parent_node.correct_childrens_parent_links(parent_idx + 1..old_parent_len);
1291 *parent_node.len_mut() -= 1;
1292
1293 if parent_node.height > 1 {
1294 // SAFETY: the height of the nodes being merged is one below the height
1295 // of the node of this edge, thus above zero, so they are internal.
1296 let mut left_node = left_node.reborrow_mut().cast_to_internal_unchecked();
1297 let mut right_node = right_node.cast_to_internal_unchecked();
1298 move_to_slice(
1299 right_node.edge_area_mut(..right_len + 1),
1300 left_node.edge_area_mut(old_left_len + 1..new_left_len + 1),
1301 );
1302
1303 left_node.correct_childrens_parent_links(old_left_len + 1..new_left_len + 1);
1304
1305 Global.deallocate(right_node.node.cast(), Layout::new::<InternalNode<K, V>>());
1306 } else {
1307 Global.deallocate(right_node.node.cast(), Layout::new::<LeafNode<K, V>>());
1308 }
1309 }
1310 result(parent_node, left_node)
1311 }
1312
1313 /// Merges the parent's key-value pair and both adjacent child nodes into
1314 /// the left child node and returns the shrunk parent node.
1315 ///
1316 /// Panics unless we `.can_merge()`.
1317 pub fn merge_tracking_parent(self) -> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
1318 self.do_merge(|parent, _child| parent)
1319 }
1320
1321 /// Merges the parent's key-value pair and both adjacent child nodes into
1322 /// the left child node and returns that child node.
1323 ///
1324 /// Panics unless we `.can_merge()`.
1325 pub fn merge_tracking_child(self) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1326 self.do_merge(|_parent, child| child)
1327 }
1328
1329 /// Merges the parent's key-value pair and both adjacent child nodes into
1330 /// the left child node and returns the edge handle in that child node
1331 /// where the tracked child edge ended up,
1332 ///
1333 /// Panics unless we `.can_merge()`.
1334 pub fn merge_tracking_child_edge(
1335 self,
1336 track_edge_idx: LeftOrRight<usize>,
1337 ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1338 let old_left_len = self.left_child.len();
1339 let right_len = self.right_child.len();
1340 assert!(match track_edge_idx {
1341 LeftOrRight::Left(idx) => idx <= old_left_len,
1342 LeftOrRight::Right(idx) => idx <= right_len,
1343 });
1344 let child = self.merge_tracking_child();
1345 let new_idx = match track_edge_idx {
1346 LeftOrRight::Left(idx) => idx,
1347 LeftOrRight::Right(idx) => old_left_len + 1 + idx,
1348 };
1349 unsafe { Handle::new_edge(child, new_idx) }
1350 }
1351
1352 /// Removes a key-value pair from the left child and places it in the key-value storage
1353 /// of the parent, while pushing the old parent key-value pair into the right child.
1354 /// Returns a handle to the edge in the right child corresponding to where the original
1355 /// edge specified by `track_right_edge_idx` ended up.
1356 pub fn steal_left(
1357 mut self,
1358 track_right_edge_idx: usize,
1359 ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1360 self.bulk_steal_left(1);
1361 unsafe { Handle::new_edge(self.right_child, 1 + track_right_edge_idx) }
1362 }
1363
1364 /// Removes a key-value pair from the right child and places it in the key-value storage
1365 /// of the parent, while pushing the old parent key-value pair onto the left child.
1366 /// Returns a handle to the edge in the left child specified by `track_left_edge_idx`,
1367 /// which didn't move.
1368 pub fn steal_right(
1369 mut self,
1370 track_left_edge_idx: usize,
1371 ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1372 self.bulk_steal_right(1);
1373 unsafe { Handle::new_edge(self.left_child, track_left_edge_idx) }
1374 }
1375
1376 /// This does stealing similar to `steal_left` but steals multiple elements at once.
1377 pub fn bulk_steal_left(&mut self, count: usize) {
1378 assert!(count > 0);
1379 unsafe {
1380 let left_node = &mut self.left_child;
1381 let old_left_len = left_node.len();
1382 let right_node = &mut self.right_child;
1383 let old_right_len = right_node.len();
1384
1385 // Make sure that we may steal safely.
1386 assert!(old_right_len + count <= CAPACITY);
1387 assert!(old_left_len >= count);
1388
1389 let new_left_len = old_left_len - count;
1390 let new_right_len = old_right_len + count;
1391 *left_node.len_mut() = new_left_len as u16;
1392 *right_node.len_mut() = new_right_len as u16;
1393
1394 // Move leaf data.
1395 {
1396 // Make room for stolen elements in the right child.
1397 slice_shr(right_node.key_area_mut(..new_right_len), count);
1398 slice_shr(right_node.val_area_mut(..new_right_len), count);
1399
1400 // Move elements from the left child to the right one.
1401 move_to_slice(
1402 left_node.key_area_mut(new_left_len + 1..old_left_len),
1403 right_node.key_area_mut(..count - 1),
1404 );
1405 move_to_slice(
1406 left_node.val_area_mut(new_left_len + 1..old_left_len),
1407 right_node.val_area_mut(..count - 1),
1408 );
1409
1410 // Move the left-most stolen pair to the parent.
1411 let k = left_node.key_area_mut(new_left_len).assume_init_read();
1412 let v = left_node.val_area_mut(new_left_len).assume_init_read();
1413 let (k, v) = self.parent.replace_kv(k, v);
1414
1415 // Move parent's key-value pair to the right child.
1416 right_node.key_area_mut(count - 1).write(k);
1417 right_node.val_area_mut(count - 1).write(v);
1418 }
1419
1420 match (left_node.reborrow_mut().force(), right_node.reborrow_mut().force()) {
1421 (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
1422 // Make room for stolen edges.
1423 slice_shr(right.edge_area_mut(..new_right_len + 1), count);
1424
1425 // Steal edges.
1426 move_to_slice(
1427 left.edge_area_mut(new_left_len + 1..old_left_len + 1),
1428 right.edge_area_mut(..count),
1429 );
1430
1431 right.correct_childrens_parent_links(0..new_right_len + 1);
1432 }
1433 (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1434 _ => unreachable!(),
1435 }
1436 }
1437 }
1438
1439 /// The symmetric clone of `bulk_steal_left`.
1440 pub fn bulk_steal_right(&mut self, count: usize) {
1441 assert!(count > 0);
1442 unsafe {
1443 let left_node = &mut self.left_child;
1444 let old_left_len = left_node.len();
1445 let right_node = &mut self.right_child;
1446 let old_right_len = right_node.len();
1447
1448 // Make sure that we may steal safely.
1449 assert!(old_left_len + count <= CAPACITY);
1450 assert!(old_right_len >= count);
1451
1452 let new_left_len = old_left_len + count;
1453 let new_right_len = old_right_len - count;
1454 *left_node.len_mut() = new_left_len as u16;
1455 *right_node.len_mut() = new_right_len as u16;
1456
1457 // Move leaf data.
1458 {
1459 // Move the right-most stolen pair to the parent.
1460 let k = right_node.key_area_mut(count - 1).assume_init_read();
1461 let v = right_node.val_area_mut(count - 1).assume_init_read();
1462 let (k, v) = self.parent.replace_kv(k, v);
1463
1464 // Move parent's key-value pair to the left child.
1465 left_node.key_area_mut(old_left_len).write(k);
1466 left_node.val_area_mut(old_left_len).write(v);
1467
1468 // Move elements from the right child to the left one.
1469 move_to_slice(
1470 right_node.key_area_mut(..count - 1),
1471 left_node.key_area_mut(old_left_len + 1..new_left_len),
1472 );
1473 move_to_slice(
1474 right_node.val_area_mut(..count - 1),
1475 left_node.val_area_mut(old_left_len + 1..new_left_len),
1476 );
1477
1478 // Fill gap where stolen elements used to be.
1479 slice_shl(right_node.key_area_mut(..old_right_len), count);
1480 slice_shl(right_node.val_area_mut(..old_right_len), count);
1481 }
1482
1483 match (left_node.reborrow_mut().force(), right_node.reborrow_mut().force()) {
1484 (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
1485 // Steal edges.
1486 move_to_slice(
1487 right.edge_area_mut(..count),
1488 left.edge_area_mut(old_left_len + 1..new_left_len + 1),
1489 );
1490
1491 // Fill gap where stolen edges used to be.
1492 slice_shl(right.edge_area_mut(..old_right_len + 1), count);
1493
1494 left.correct_childrens_parent_links(old_left_len + 1..new_left_len + 1);
1495 right.correct_childrens_parent_links(0..new_right_len + 1);
1496 }
1497 (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1498 _ => unreachable!(),
1499 }
1500 }
1501 }
1502 }
1503
1504 impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
1505 pub fn forget_node_type(
1506 self,
1507 ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
1508 unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
1509 }
1510 }
1511
1512 impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
1513 pub fn forget_node_type(
1514 self,
1515 ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
1516 unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
1517 }
1518 }
1519
1520 impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::KV> {
1521 pub fn forget_node_type(
1522 self,
1523 ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV> {
1524 unsafe { Handle::new_kv(self.node.forget_type(), self.idx) }
1525 }
1526 }
1527
1528 impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::KV> {
1529 pub fn forget_node_type(
1530 self,
1531 ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV> {
1532 unsafe { Handle::new_kv(self.node.forget_type(), self.idx) }
1533 }
1534 }
1535
1536 impl<BorrowType, K, V, Type> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, Type> {
1537 /// Checks whether the underlying node is an `Internal` node or a `Leaf` node.
1538 pub fn force(
1539 self,
1540 ) -> ForceResult<
1541 Handle<NodeRef<BorrowType, K, V, marker::Leaf>, Type>,
1542 Handle<NodeRef<BorrowType, K, V, marker::Internal>, Type>,
1543 > {
1544 match self.node.force() {
1545 ForceResult::Leaf(node) => {
1546 ForceResult::Leaf(Handle { node, idx: self.idx, _marker: PhantomData })
1547 }
1548 ForceResult::Internal(node) => {
1549 ForceResult::Internal(Handle { node, idx: self.idx, _marker: PhantomData })
1550 }
1551 }
1552 }
1553 }
1554
1555 impl<'a, K, V, Type> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, Type> {
1556 /// Unsafely asserts to the compiler the static information that the handle's node is a `Leaf`.
1557 pub unsafe fn cast_to_leaf_unchecked(
1558 self,
1559 ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, Type> {
1560 let node = unsafe { self.node.cast_to_leaf_unchecked() };
1561 Handle { node, idx: self.idx, _marker: PhantomData }
1562 }
1563 }
1564
1565 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1566 /// Move the suffix after `self` from one node to another one. `right` must be empty.
1567 /// The first edge of `right` remains unchanged.
1568 pub fn move_suffix(
1569 &mut self,
1570 right: &mut NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1571 ) {
1572 unsafe {
1573 let new_left_len = self.idx;
1574 let mut left_node = self.reborrow_mut().into_node();
1575 let old_left_len = left_node.len();
1576
1577 let new_right_len = old_left_len - new_left_len;
1578 let mut right_node = right.reborrow_mut();
1579
1580 assert!(right_node.len() == 0);
1581 assert!(left_node.height == right_node.height);
1582
1583 if new_right_len > 0 {
1584 *left_node.len_mut() = new_left_len as u16;
1585 *right_node.len_mut() = new_right_len as u16;
1586
1587 move_to_slice(
1588 left_node.key_area_mut(new_left_len..old_left_len),
1589 right_node.key_area_mut(..new_right_len),
1590 );
1591 move_to_slice(
1592 left_node.val_area_mut(new_left_len..old_left_len),
1593 right_node.val_area_mut(..new_right_len),
1594 );
1595 match (left_node.force(), right_node.force()) {
1596 (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
1597 move_to_slice(
1598 left.edge_area_mut(new_left_len + 1..old_left_len + 1),
1599 right.edge_area_mut(1..new_right_len + 1),
1600 );
1601 right.correct_childrens_parent_links(1..new_right_len + 1);
1602 }
1603 (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1604 _ => unreachable!(),
1605 }
1606 }
1607 }
1608 }
1609 }
1610
1611 pub enum ForceResult<Leaf, Internal> {
1612 Leaf(Leaf),
1613 Internal(Internal),
1614 }
1615
1616 /// Result of insertion, when a node needed to expand beyond its capacity.
1617 pub struct SplitResult<'a, K, V, NodeType> {
1618 // Altered node in existing tree with elements and edges that belong to the left of `kv`.
1619 pub left: NodeRef<marker::Mut<'a>, K, V, NodeType>,
1620 // Some key and value split off, to be inserted elsewhere.
1621 pub kv: (K, V),
1622 // Owned, unattached, new node with elements and edges that belong to the right of `kv`.
1623 pub right: NodeRef<marker::Owned, K, V, NodeType>,
1624 }
1625
1626 impl<'a, K, V> SplitResult<'a, K, V, marker::Leaf> {
1627 pub fn forget_node_type(self) -> SplitResult<'a, K, V, marker::LeafOrInternal> {
1628 SplitResult { left: self.left.forget_type(), kv: self.kv, right: self.right.forget_type() }
1629 }
1630 }
1631
1632 impl<'a, K, V> SplitResult<'a, K, V, marker::Internal> {
1633 pub fn forget_node_type(self) -> SplitResult<'a, K, V, marker::LeafOrInternal> {
1634 SplitResult { left: self.left.forget_type(), kv: self.kv, right: self.right.forget_type() }
1635 }
1636 }
1637
1638 pub enum InsertResult<'a, K, V, NodeType> {
1639 Fit(Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV>),
1640 Split(SplitResult<'a, K, V, NodeType>),
1641 }
1642
1643 pub mod marker {
1644 use core::marker::PhantomData;
1645
1646 pub enum Leaf {}
1647 pub enum Internal {}
1648 pub enum LeafOrInternal {}
1649
1650 pub enum Owned {}
1651 pub enum Dying {}
1652 pub struct Immut<'a>(PhantomData<&'a ()>);
1653 pub struct Mut<'a>(PhantomData<&'a mut ()>);
1654 pub struct ValMut<'a>(PhantomData<&'a mut ()>);
1655
1656 pub trait BorrowType {
1657 // Whether node references of this borrow type allow traversing
1658 // to other nodes in the tree.
1659 const PERMITS_TRAVERSAL: bool = true;
1660 }
1661 impl BorrowType for Owned {
1662 // Traversal isn't needede, it happens using the result of `borrow_mut`.
1663 // By disabling traversal, and only creating new references to roots,
1664 // we know that every reference of the `Owned` type is to a root node.
1665 const PERMITS_TRAVERSAL: bool = false;
1666 }
1667 impl BorrowType for Dying {}
1668 impl<'a> BorrowType for Immut<'a> {}
1669 impl<'a> BorrowType for Mut<'a> {}
1670 impl<'a> BorrowType for ValMut<'a> {}
1671
1672 pub enum KV {}
1673 pub enum Edge {}
1674 }
1675
1676 /// Inserts a value into a slice of initialized elements followed by one uninitialized element.
1677 ///
1678 /// # Safety
1679 /// The slice has more than `idx` elements.
1680 unsafe fn slice_insert<T>(slice: &mut [MaybeUninit<T>], idx: usize, val: T) {
1681 unsafe {
1682 let len = slice.len();
1683 debug_assert!(len > idx);
1684 let slice_ptr = slice.as_mut_ptr();
1685 if len > idx + 1 {
1686 ptr::copy(slice_ptr.add(idx), slice_ptr.add(idx + 1), len - idx - 1);
1687 }
1688 (*slice_ptr.add(idx)).write(val);
1689 }
1690 }
1691
1692 /// Removes and returns a value from a slice of all initialized elements, leaving behind one
1693 /// trailing uninitialized element.
1694 ///
1695 /// # Safety
1696 /// The slice has more than `idx` elements.
1697 unsafe fn slice_remove<T>(slice: &mut [MaybeUninit<T>], idx: usize) -> T {
1698 unsafe {
1699 let len = slice.len();
1700 debug_assert!(idx < len);
1701 let slice_ptr = slice.as_mut_ptr();
1702 let ret = (*slice_ptr.add(idx)).assume_init_read();
1703 ptr::copy(slice_ptr.add(idx + 1), slice_ptr.add(idx), len - idx - 1);
1704 ret
1705 }
1706 }
1707
1708 /// Shifts the elements in a slice `distance` positions to the left.
1709 ///
1710 /// # Safety
1711 /// The slice has at least `distance` elements.
1712 unsafe fn slice_shl<T>(slice: &mut [MaybeUninit<T>], distance: usize) {
1713 unsafe {
1714 let slice_ptr = slice.as_mut_ptr();
1715 ptr::copy(slice_ptr.add(distance), slice_ptr, slice.len() - distance);
1716 }
1717 }
1718
1719 /// Shifts the elements in a slice `distance` positions to the right.
1720 ///
1721 /// # Safety
1722 /// The slice has at least `distance` elements.
1723 unsafe fn slice_shr<T>(slice: &mut [MaybeUninit<T>], distance: usize) {
1724 unsafe {
1725 let slice_ptr = slice.as_mut_ptr();
1726 ptr::copy(slice_ptr, slice_ptr.add(distance), slice.len() - distance);
1727 }
1728 }
1729
1730 /// Moves all values from a slice of initialized elements to a slice
1731 /// of uninitialized elements, leaving behind `src` as all uninitialized.
1732 /// Works like `dst.copy_from_slice(src)` but does not require `T` to be `Copy`.
1733 fn move_to_slice<T>(src: &mut [MaybeUninit<T>], dst: &mut [MaybeUninit<T>]) {
1734 assert!(src.len() == dst.len());
1735 unsafe {
1736 ptr::copy_nonoverlapping(src.as_ptr(), dst.as_mut_ptr(), src.len());
1737 }
1738 }
1739
1740 #[cfg(test)]
1741 mod tests;