]> git.proxmox.com Git - rustc.git/blob - library/alloc/src/collections/btree/node.rs
New upstream version 1.52.0~beta.3+dfsg1
[rustc.git] / library / alloc / src / collections / btree / node.rs
1 // This is an attempt at an implementation following the ideal
2 //
3 // ```
4 // struct BTreeMap<K, V> {
5 // height: usize,
6 // root: Option<Box<Node<K, V, height>>>
7 // }
8 //
9 // struct Node<K, V, height: usize> {
10 // keys: [K; 2 * B - 1],
11 // vals: [V; 2 * B - 1],
12 // edges: [if height > 0 { Box<Node<K, V, height - 1>> } else { () }; 2 * B],
13 // parent: Option<(NonNull<Node<K, V, height + 1>>, u16)>,
14 // len: u16,
15 // }
16 // ```
17 //
18 // Since Rust doesn't actually have dependent types and polymorphic recursion,
19 // we make do with lots of unsafety.
20
21 // A major goal of this module is to avoid complexity by treating the tree as a generic (if
22 // weirdly shaped) container and avoiding dealing with most of the B-Tree invariants. As such,
23 // this module doesn't care whether the entries are sorted, which nodes can be underfull, or
24 // even what underfull means. However, we do rely on a few invariants:
25 //
26 // - Trees must have uniform depth/height. This means that every path down to a leaf from a
27 // given node has exactly the same length.
28 // - A node of length `n` has `n` keys, `n` values, and `n + 1` edges.
29 // This implies that even an empty node has at least one edge.
30 // For a leaf node, "having an edge" only means we can identify a position in the node,
31 // since leaf edges are empty and need no data representation. In an internal node,
32 // an edge both identifies a position and contains a pointer to a child node.
33
34 use core::marker::PhantomData;
35 use core::mem::{self, MaybeUninit};
36 use core::ptr::{self, NonNull};
37 use core::slice::SliceIndex;
38
39 use crate::alloc::{Allocator, Global, Layout};
40 use crate::boxed::Box;
41
42 const B: usize = 6;
43 pub const CAPACITY: usize = 2 * B - 1;
44 pub const MIN_LEN_AFTER_SPLIT: usize = B - 1;
45 const KV_IDX_CENTER: usize = B - 1;
46 const EDGE_IDX_LEFT_OF_CENTER: usize = B - 1;
47 const EDGE_IDX_RIGHT_OF_CENTER: usize = B;
48
49 /// The underlying representation of leaf nodes and part of the representation of internal nodes.
50 struct LeafNode<K, V> {
51 /// We want to be covariant in `K` and `V`.
52 parent: Option<NonNull<InternalNode<K, V>>>,
53
54 /// This node's index into the parent node's `edges` array.
55 /// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`.
56 /// This is only guaranteed to be initialized when `parent` is non-null.
57 parent_idx: MaybeUninit<u16>,
58
59 /// The number of keys and values this node stores.
60 len: u16,
61
62 /// The arrays storing the actual data of the node. Only the first `len` elements of each
63 /// array are initialized and valid.
64 keys: [MaybeUninit<K>; CAPACITY],
65 vals: [MaybeUninit<V>; CAPACITY],
66 }
67
68 impl<K, V> LeafNode<K, V> {
69 /// Initializes a new `LeafNode` in-place.
70 unsafe fn init(this: *mut Self) {
71 // As a general policy, we leave fields uninitialized if they can be, as this should
72 // be both slightly faster and easier to track in Valgrind.
73 unsafe {
74 // parent_idx, keys, and vals are all MaybeUninit
75 ptr::addr_of_mut!((*this).parent).write(None);
76 ptr::addr_of_mut!((*this).len).write(0);
77 }
78 }
79
80 /// Creates a new boxed `LeafNode`.
81 fn new() -> Box<Self> {
82 unsafe {
83 let mut leaf = Box::new_uninit();
84 LeafNode::init(leaf.as_mut_ptr());
85 leaf.assume_init()
86 }
87 }
88 }
89
90 /// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden
91 /// behind `BoxedNode`s to prevent dropping uninitialized keys and values. Any pointer to an
92 /// `InternalNode` can be directly casted to a pointer to the underlying `LeafNode` portion of the
93 /// node, allowing code to act on leaf and internal nodes generically without having to even check
94 /// which of the two a pointer is pointing at. This property is enabled by the use of `repr(C)`.
95 #[repr(C)]
96 // gdb_providers.py uses this type name for introspection.
97 struct InternalNode<K, V> {
98 data: LeafNode<K, V>,
99
100 /// The pointers to the children of this node. `len + 1` of these are considered
101 /// initialized and valid, except that near the end, while the tree is held
102 /// through borrow type `Dying`, some of these pointers are dangling.
103 edges: [MaybeUninit<BoxedNode<K, V>>; 2 * B],
104 }
105
106 impl<K, V> InternalNode<K, V> {
107 /// Creates a new boxed `InternalNode`.
108 ///
109 /// # Safety
110 /// An invariant of internal nodes is that they have at least one
111 /// initialized and valid edge. This function does not set up
112 /// such an edge.
113 unsafe fn new() -> Box<Self> {
114 unsafe {
115 let mut node = Box::<Self>::new_uninit();
116 // We only need to initialize the data; the edges are MaybeUninit.
117 LeafNode::init(ptr::addr_of_mut!((*node.as_mut_ptr()).data));
118 node.assume_init()
119 }
120 }
121 }
122
123 /// A managed, non-null pointer to a node. This is either an owned pointer to
124 /// `LeafNode<K, V>` or an owned pointer to `InternalNode<K, V>`.
125 ///
126 /// However, `BoxedNode` contains no information as to which of the two types
127 /// of nodes it actually contains, and, partially due to this lack of information,
128 /// is not a separate type and has no destructor.
129 type BoxedNode<K, V> = NonNull<LeafNode<K, V>>;
130
131 /// The root node of an owned tree.
132 ///
133 /// Note that this does not have a destructor, and must be cleaned up manually.
134 pub type Root<K, V> = NodeRef<marker::Owned, K, V, marker::LeafOrInternal>;
135
136 impl<K, V> Root<K, V> {
137 /// Returns a new owned tree, with its own root node that is initially empty.
138 pub fn new() -> Self {
139 NodeRef::new_leaf().forget_type()
140 }
141 }
142
143 impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
144 fn new_leaf() -> Self {
145 Self::from_new_leaf(LeafNode::new())
146 }
147
148 fn from_new_leaf(leaf: Box<LeafNode<K, V>>) -> Self {
149 NodeRef { height: 0, node: NonNull::from(Box::leak(leaf)), _marker: PhantomData }
150 }
151 }
152
153 impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
154 fn new_internal(child: Root<K, V>) -> Self {
155 let mut new_node = unsafe { InternalNode::new() };
156 new_node.edges[0].write(child.node);
157 unsafe { NodeRef::from_new_internal(new_node, child.height + 1) }
158 }
159
160 /// # Safety
161 /// `height` must not be zero.
162 unsafe fn from_new_internal(internal: Box<InternalNode<K, V>>, height: usize) -> Self {
163 debug_assert!(height > 0);
164 let node = NonNull::from(Box::leak(internal)).cast();
165 let mut this = NodeRef { height, node, _marker: PhantomData };
166 this.borrow_mut().correct_all_childrens_parent_links();
167 this
168 }
169 }
170
171 impl<K, V, Type> NodeRef<marker::Owned, K, V, Type> {
172 /// Mutably borrows the owned root node. Unlike `reborrow_mut`, this is safe
173 /// because the return value cannot be used to destroy the root, and there
174 /// cannot be other references to the tree.
175 pub fn borrow_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, Type> {
176 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
177 }
178
179 /// Slightly mutably borrows the owned root node.
180 pub fn borrow_valmut(&mut self) -> NodeRef<marker::ValMut<'_>, K, V, Type> {
181 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
182 }
183
184 /// Irreversibly transitions to a reference that permits traversal and offers
185 /// destructive methods and little else.
186 pub fn into_dying(self) -> NodeRef<marker::Dying, K, V, Type> {
187 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
188 }
189 }
190
191 impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
192 /// Adds a new internal node with a single edge pointing to the previous root node,
193 /// make that new node the root node, and return it. This increases the height by 1
194 /// and is the opposite of `pop_internal_level`.
195 pub fn push_internal_level(&mut self) -> NodeRef<marker::Mut<'_>, K, V, marker::Internal> {
196 super::mem::take_mut(self, |old_root| NodeRef::new_internal(old_root).forget_type());
197
198 // `self.borrow_mut()`, except that we just forgot we're internal now:
199 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
200 }
201
202 /// Removes the internal root node, using its first child as the new root node.
203 /// As it is intended only to be called when the root node has only one child,
204 /// no cleanup is done on any of the keys, values and other children.
205 /// This decreases the height by 1 and is the opposite of `push_internal_level`.
206 ///
207 /// Requires exclusive access to the `Root` object but not to the root node;
208 /// it will not invalidate other handles or references to the root node.
209 ///
210 /// Panics if there is no internal level, i.e., if the root node is a leaf.
211 pub fn pop_internal_level(&mut self) {
212 assert!(self.height > 0);
213
214 let top = self.node;
215
216 // SAFETY: we asserted to be internal.
217 let internal_self = unsafe { self.borrow_mut().cast_to_internal_unchecked() };
218 // SAFETY: we borrowed `self` exclusively and its borrow type is exclusive.
219 let internal_node = unsafe { &mut *NodeRef::as_internal_ptr(&internal_self) };
220 // SAFETY: the first edge is always initialized.
221 self.node = unsafe { internal_node.edges[0].assume_init_read() };
222 self.height -= 1;
223 self.clear_parent_link();
224
225 unsafe {
226 Global.deallocate(top.cast(), Layout::new::<InternalNode<K, V>>());
227 }
228 }
229 }
230
231 // N.B. `NodeRef` is always covariant in `K` and `V`, even when the `BorrowType`
232 // is `Mut`. This is technically wrong, but cannot result in any unsafety due to
233 // internal use of `NodeRef` because we stay completely generic over `K` and `V`.
234 // However, whenever a public type wraps `NodeRef`, make sure that it has the
235 // correct variance.
236 ///
237 /// A reference to a node.
238 ///
239 /// This type has a number of parameters that controls how it acts:
240 /// - `BorrowType`: A dummy type that describes the kind of borrow and carries a lifetime.
241 /// - When this is `Immut<'a>`, the `NodeRef` acts roughly like `&'a Node`.
242 /// - When this is `ValMut<'a>`, the `NodeRef` acts roughly like `&'a Node`
243 /// with respect to keys and tree structure, but also allows many
244 /// mutable references to values throughout the tree to coexist.
245 /// - When this is `Mut<'a>`, the `NodeRef` acts roughly like `&'a mut Node`,
246 /// although insert methods allow a mutable pointer to a value to coexist.
247 /// - When this is `Owned`, the `NodeRef` acts roughly like `Box<Node>`,
248 /// but does not have a destructor, and must be cleaned up manually.
249 /// - When this is `Dying`, the `NodeRef` still acts roughly like `Box<Node>`,
250 /// but has methods to destroy the tree bit by bit, and ordinary methods,
251 /// while not marked as unsafe to call, can invoke UB if called incorrectly.
252 /// Since any `NodeRef` allows navigating through the tree, `BorrowType`
253 /// effectively applies to the entire tree, not just to the node itself.
254 /// - `K` and `V`: These are the types of keys and values stored in the nodes.
255 /// - `Type`: This can be `Leaf`, `Internal`, or `LeafOrInternal`. When this is
256 /// `Leaf`, the `NodeRef` points to a leaf node, when this is `Internal` the
257 /// `NodeRef` points to an internal node, and when this is `LeafOrInternal` the
258 /// `NodeRef` could be pointing to either type of node.
259 /// `Type` is named `NodeType` when used outside `NodeRef`.
260 ///
261 /// Both `BorrowType` and `NodeType` restrict what methods we implement, to
262 /// exploit static type safety. There are limitations in the way we can apply
263 /// such restrictions:
264 /// - For each type parameter, we can only define a method either generically
265 /// or for one particular type. For example, we cannot define a method like
266 /// `into_kv` generically for all `BorrowType`, or once for all types that
267 /// carry a lifetime, because we want it to return `&'a` references.
268 /// Therefore, we define it only for the least powerful type `Immut<'a>`.
269 /// - We cannot get implicit coercion from say `Mut<'a>` to `Immut<'a>`.
270 /// Therefore, we have to explicitly call `reborrow` on a more powerfull
271 /// `NodeRef` in order to reach a method like `into_kv`.
272 ///
273 /// All methods on `NodeRef` that return some kind of reference, either:
274 /// - Take `self` by value, and return the lifetime carried by `BorrowType`.
275 /// Sometimes, to invoke such a method, we need to call `reborrow_mut`.
276 /// - Take `self` by reference, and (implicitly) return that reference's
277 /// lifetime, instead of the lifetime carried by `BorrowType`. That way,
278 /// the borrow checker guarantees that the `NodeRef` remains borrowed as long
279 /// as the returned reference is used.
280 /// The methods supporting insert bend this rule by returning a raw pointer,
281 /// i.e., a reference without any lifetime.
282 pub struct NodeRef<BorrowType, K, V, Type> {
283 /// The number of levels that the node and the level of leaves are apart, a
284 /// constant of the node that cannot be entirely described by `Type`, and that
285 /// the node itself does not store. We only need to store the height of the root
286 /// node, and derive every other node's height from it.
287 /// Must be zero if `Type` is `Leaf` and non-zero if `Type` is `Internal`.
288 height: usize,
289 /// The pointer to the leaf or internal node. The definition of `InternalNode`
290 /// ensures that the pointer is valid either way.
291 node: NonNull<LeafNode<K, V>>,
292 _marker: PhantomData<(BorrowType, Type)>,
293 }
294
295 impl<'a, K: 'a, V: 'a, Type> Copy for NodeRef<marker::Immut<'a>, K, V, Type> {}
296 impl<'a, K: 'a, V: 'a, Type> Clone for NodeRef<marker::Immut<'a>, K, V, Type> {
297 fn clone(&self) -> Self {
298 *self
299 }
300 }
301
302 unsafe impl<BorrowType, K: Sync, V: Sync, Type> Sync for NodeRef<BorrowType, K, V, Type> {}
303
304 unsafe impl<'a, K: Sync + 'a, V: Sync + 'a, Type> Send for NodeRef<marker::Immut<'a>, K, V, Type> {}
305 unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send for NodeRef<marker::Mut<'a>, K, V, Type> {}
306 unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send for NodeRef<marker::ValMut<'a>, K, V, Type> {}
307 unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Owned, K, V, Type> {}
308 unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Dying, K, V, Type> {}
309
310 impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
311 /// Unpack a node reference that was packed as `NodeRef::parent`.
312 fn from_internal(node: NonNull<InternalNode<K, V>>, height: usize) -> Self {
313 debug_assert!(height > 0);
314 NodeRef { height, node: node.cast(), _marker: PhantomData }
315 }
316 }
317
318 impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
319 /// Exposes the data of an internal node.
320 ///
321 /// Returns a raw ptr to avoid invalidating other references to this node.
322 fn as_internal_ptr(this: &Self) -> *mut InternalNode<K, V> {
323 // SAFETY: the static node type is `Internal`.
324 this.node.as_ptr() as *mut InternalNode<K, V>
325 }
326 }
327
328 impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
329 /// Borrows exclusive access to the data of an internal node.
330 fn as_internal_mut(&mut self) -> &mut InternalNode<K, V> {
331 let ptr = Self::as_internal_ptr(self);
332 unsafe { &mut *ptr }
333 }
334 }
335
336 impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
337 /// Finds the length of the node. This is the number of keys or values.
338 /// The number of edges is `len() + 1`.
339 /// Note that, despite being safe, calling this function can have the side effect
340 /// of invalidating mutable references that unsafe code has created.
341 pub fn len(&self) -> usize {
342 // Crucially, we only access the `len` field here. If BorrowType is marker::ValMut,
343 // there might be outstanding mutable references to values that we must not invalidate.
344 unsafe { usize::from((*Self::as_leaf_ptr(self)).len) }
345 }
346
347 /// Returns the number of levels that the node and leaves are apart. Zero
348 /// height means the node is a leaf itself. If you picture trees with the
349 /// root on top, the number says at which elevation the node appears.
350 /// If you picture trees with leaves on top, the number says how high
351 /// the tree extends above the node.
352 pub fn height(&self) -> usize {
353 self.height
354 }
355
356 /// Temporarily takes out another, immutable reference to the same node.
357 pub fn reborrow(&self) -> NodeRef<marker::Immut<'_>, K, V, Type> {
358 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
359 }
360
361 /// Exposes the leaf portion of any leaf or internal node.
362 ///
363 /// Returns a raw ptr to avoid invalidating other references to this node.
364 fn as_leaf_ptr(this: &Self) -> *mut LeafNode<K, V> {
365 // The node must be valid for at least the LeafNode portion.
366 // This is not a reference in the NodeRef type because we don't know if
367 // it should be unique or shared.
368 this.node.as_ptr()
369 }
370 }
371
372 impl<BorrowType: marker::BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
373 /// Finds the parent of the current node. Returns `Ok(handle)` if the current
374 /// node actually has a parent, where `handle` points to the edge of the parent
375 /// that points to the current node. Returns `Err(self)` if the current node has
376 /// no parent, giving back the original `NodeRef`.
377 ///
378 /// The method name assumes you picture trees with the root node on top.
379 ///
380 /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
381 /// both, upon success, do nothing.
382 pub fn ascend(
383 self,
384 ) -> Result<Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>, Self> {
385 assert!(BorrowType::PERMITS_TRAVERSAL);
386 // We need to use raw pointers to nodes because, if BorrowType is marker::ValMut,
387 // there might be outstanding mutable references to values that we must not invalidate.
388 let leaf_ptr: *const _ = Self::as_leaf_ptr(&self);
389 unsafe { (*leaf_ptr).parent }
390 .as_ref()
391 .map(|parent| Handle {
392 node: NodeRef::from_internal(*parent, self.height + 1),
393 idx: unsafe { usize::from((*leaf_ptr).parent_idx.assume_init()) },
394 _marker: PhantomData,
395 })
396 .ok_or(self)
397 }
398
399 pub fn first_edge(self) -> Handle<Self, marker::Edge> {
400 unsafe { Handle::new_edge(self, 0) }
401 }
402
403 pub fn last_edge(self) -> Handle<Self, marker::Edge> {
404 let len = self.len();
405 unsafe { Handle::new_edge(self, len) }
406 }
407
408 /// Note that `self` must be nonempty.
409 pub fn first_kv(self) -> Handle<Self, marker::KV> {
410 let len = self.len();
411 assert!(len > 0);
412 unsafe { Handle::new_kv(self, 0) }
413 }
414
415 /// Note that `self` must be nonempty.
416 pub fn last_kv(self) -> Handle<Self, marker::KV> {
417 let len = self.len();
418 assert!(len > 0);
419 unsafe { Handle::new_kv(self, len - 1) }
420 }
421 }
422
423 impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
424 /// Exposes the leaf portion of any leaf or internal node in an immutable tree.
425 fn into_leaf(self) -> &'a LeafNode<K, V> {
426 let ptr = Self::as_leaf_ptr(&self);
427 // SAFETY: there can be no mutable references into this tree borrowed as `Immut`.
428 unsafe { &*ptr }
429 }
430
431 /// Borrows a view into the keys stored in the node.
432 pub fn keys(&self) -> &[K] {
433 let leaf = self.into_leaf();
434 unsafe {
435 MaybeUninit::slice_assume_init_ref(leaf.keys.get_unchecked(..usize::from(leaf.len)))
436 }
437 }
438 }
439
440 impl<K, V> NodeRef<marker::Dying, K, V, marker::LeafOrInternal> {
441 /// Similar to `ascend`, gets a reference to a node's parent node, but also
442 /// deallocates the current node in the process. This is unsafe because the
443 /// current node will still be accessible despite being deallocated.
444 pub unsafe fn deallocate_and_ascend(
445 self,
446 ) -> Option<Handle<NodeRef<marker::Dying, K, V, marker::Internal>, marker::Edge>> {
447 let height = self.height;
448 let node = self.node;
449 let ret = self.ascend().ok();
450 unsafe {
451 Global.deallocate(
452 node.cast(),
453 if height > 0 {
454 Layout::new::<InternalNode<K, V>>()
455 } else {
456 Layout::new::<LeafNode<K, V>>()
457 },
458 );
459 }
460 ret
461 }
462 }
463
464 impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
465 /// Unsafely asserts to the compiler the static information that this node is a `Leaf`.
466 unsafe fn cast_to_leaf_unchecked(self) -> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
467 debug_assert!(self.height == 0);
468 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
469 }
470
471 /// Unsafely asserts to the compiler the static information that this node is an `Internal`.
472 unsafe fn cast_to_internal_unchecked(self) -> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
473 debug_assert!(self.height > 0);
474 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
475 }
476 }
477
478 impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
479 /// Temporarily takes out another, mutable reference to the same node. Beware, as
480 /// this method is very dangerous, doubly so since it may not immediately appear
481 /// dangerous.
482 ///
483 /// Because mutable pointers can roam anywhere around the tree, the returned
484 /// pointer can easily be used to make the original pointer dangling, out of
485 /// bounds, or invalid under stacked borrow rules.
486 // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef`
487 // that restricts the use of navigation methods on reborrowed pointers,
488 // preventing this unsafety.
489 unsafe fn reborrow_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, Type> {
490 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
491 }
492
493 /// Borrows exclusive access to the leaf portion of any leaf or internal node.
494 fn as_leaf_mut(&mut self) -> &mut LeafNode<K, V> {
495 let ptr = Self::as_leaf_ptr(self);
496 // SAFETY: we have exclusive access to the entire node.
497 unsafe { &mut *ptr }
498 }
499
500 /// Offers exclusive access to the leaf portion of any leaf or internal node.
501 fn into_leaf_mut(mut self) -> &'a mut LeafNode<K, V> {
502 let ptr = Self::as_leaf_ptr(&mut self);
503 // SAFETY: we have exclusive access to the entire node.
504 unsafe { &mut *ptr }
505 }
506 }
507
508 impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
509 /// Borrows exclusive access to an element of the key storage area.
510 ///
511 /// # Safety
512 /// `index` is in bounds of 0..CAPACITY
513 unsafe fn key_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
514 where
515 I: SliceIndex<[MaybeUninit<K>], Output = Output>,
516 {
517 // SAFETY: the caller will not be able to call further methods on self
518 // until the key slice reference is dropped, as we have unique access
519 // for the lifetime of the borrow.
520 unsafe { self.as_leaf_mut().keys.as_mut_slice().get_unchecked_mut(index) }
521 }
522
523 /// Borrows exclusive access to an element or slice of the node's value storage area.
524 ///
525 /// # Safety
526 /// `index` is in bounds of 0..CAPACITY
527 unsafe fn val_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
528 where
529 I: SliceIndex<[MaybeUninit<V>], Output = Output>,
530 {
531 // SAFETY: the caller will not be able to call further methods on self
532 // until the value slice reference is dropped, as we have unique access
533 // for the lifetime of the borrow.
534 unsafe { self.as_leaf_mut().vals.as_mut_slice().get_unchecked_mut(index) }
535 }
536 }
537
538 impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
539 /// Borrows exclusive access to an element or slice of the node's storage area for edge contents.
540 ///
541 /// # Safety
542 /// `index` is in bounds of 0..CAPACITY + 1
543 unsafe fn edge_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
544 where
545 I: SliceIndex<[MaybeUninit<BoxedNode<K, V>>], Output = Output>,
546 {
547 // SAFETY: the caller will not be able to call further methods on self
548 // until the edge slice reference is dropped, as we have unique access
549 // for the lifetime of the borrow.
550 unsafe { self.as_internal_mut().edges.as_mut_slice().get_unchecked_mut(index) }
551 }
552 }
553
554 impl<'a, K, V, Type> NodeRef<marker::ValMut<'a>, K, V, Type> {
555 /// # Safety
556 /// - The node has more than `idx` initialized elements.
557 unsafe fn into_key_val_mut_at(mut self, idx: usize) -> (&'a K, &'a mut V) {
558 // We only create a reference to the one element we are interested in,
559 // to avoid aliasing with outstanding references to other elements,
560 // in particular, those returned to the caller in earlier iterations.
561 let leaf = Self::as_leaf_ptr(&mut self);
562 let keys = unsafe { ptr::addr_of!((*leaf).keys) };
563 let vals = unsafe { ptr::addr_of_mut!((*leaf).vals) };
564 // We must coerce to unsized array pointers because of Rust issue #74679.
565 let keys: *const [_] = keys;
566 let vals: *mut [_] = vals;
567 let key = unsafe { (&*keys.get_unchecked(idx)).assume_init_ref() };
568 let val = unsafe { (&mut *vals.get_unchecked_mut(idx)).assume_init_mut() };
569 (key, val)
570 }
571 }
572
573 impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
574 /// Borrows exclusive access to the length of the node.
575 pub fn len_mut(&mut self) -> &mut u16 {
576 &mut self.as_leaf_mut().len
577 }
578 }
579
580 impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
581 /// Sets the node's link to its parent edge,
582 /// without invalidating other references to the node.
583 fn set_parent_link(&mut self, parent: NonNull<InternalNode<K, V>>, parent_idx: usize) {
584 let leaf = Self::as_leaf_ptr(self);
585 unsafe { (*leaf).parent = Some(parent) };
586 unsafe { (*leaf).parent_idx.write(parent_idx as u16) };
587 }
588 }
589
590 impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
591 /// Clears the root's link to its parent edge.
592 fn clear_parent_link(&mut self) {
593 let mut root_node = self.borrow_mut();
594 let leaf = root_node.as_leaf_mut();
595 leaf.parent = None;
596 }
597 }
598
599 impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
600 /// Adds a key-value pair to the end of the node.
601 pub fn push(&mut self, key: K, val: V) {
602 let len = self.len_mut();
603 let idx = usize::from(*len);
604 assert!(idx < CAPACITY);
605 *len += 1;
606 unsafe {
607 self.key_area_mut(idx).write(key);
608 self.val_area_mut(idx).write(val);
609 }
610 }
611 }
612
613 impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
614 /// # Safety
615 /// Every item returned by `range` is a valid edge index for the node.
616 unsafe fn correct_childrens_parent_links<R: Iterator<Item = usize>>(&mut self, range: R) {
617 for i in range {
618 debug_assert!(i <= self.len());
619 unsafe { Handle::new_edge(self.reborrow_mut(), i) }.correct_parent_link();
620 }
621 }
622
623 fn correct_all_childrens_parent_links(&mut self) {
624 let len = self.len();
625 unsafe { self.correct_childrens_parent_links(0..=len) };
626 }
627 }
628
629 impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
630 /// Adds a key-value pair, and an edge to go to the right of that pair,
631 /// to the end of the node.
632 pub fn push(&mut self, key: K, val: V, edge: Root<K, V>) {
633 assert!(edge.height == self.height - 1);
634
635 let len = self.len_mut();
636 let idx = usize::from(*len);
637 assert!(idx < CAPACITY);
638 *len += 1;
639 unsafe {
640 self.key_area_mut(idx).write(key);
641 self.val_area_mut(idx).write(val);
642 self.edge_area_mut(idx + 1).write(edge.node);
643 Handle::new_edge(self.reborrow_mut(), idx + 1).correct_parent_link();
644 }
645 }
646 }
647
648 impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
649 /// Checks whether a node is an `Internal` node or a `Leaf` node.
650 pub fn force(
651 self,
652 ) -> ForceResult<
653 NodeRef<BorrowType, K, V, marker::Leaf>,
654 NodeRef<BorrowType, K, V, marker::Internal>,
655 > {
656 if self.height == 0 {
657 ForceResult::Leaf(NodeRef {
658 height: self.height,
659 node: self.node,
660 _marker: PhantomData,
661 })
662 } else {
663 ForceResult::Internal(NodeRef {
664 height: self.height,
665 node: self.node,
666 _marker: PhantomData,
667 })
668 }
669 }
670 }
671
672 /// A reference to a specific key-value pair or edge within a node. The `Node` parameter
673 /// must be a `NodeRef`, while the `Type` can either be `KV` (signifying a handle on a key-value
674 /// pair) or `Edge` (signifying a handle on an edge).
675 ///
676 /// Note that even `Leaf` nodes can have `Edge` handles. Instead of representing a pointer to
677 /// a child node, these represent the spaces where child pointers would go between the key-value
678 /// pairs. For example, in a node with length 2, there would be 3 possible edge locations - one
679 /// to the left of the node, one between the two pairs, and one at the right of the node.
680 pub struct Handle<Node, Type> {
681 node: Node,
682 idx: usize,
683 _marker: PhantomData<Type>,
684 }
685
686 impl<Node: Copy, Type> Copy for Handle<Node, Type> {}
687 // We don't need the full generality of `#[derive(Clone)]`, as the only time `Node` will be
688 // `Clone`able is when it is an immutable reference and therefore `Copy`.
689 impl<Node: Copy, Type> Clone for Handle<Node, Type> {
690 fn clone(&self) -> Self {
691 *self
692 }
693 }
694
695 impl<Node, Type> Handle<Node, Type> {
696 /// Retrieves the node that contains the edge or key-value pair this handle points to.
697 pub fn into_node(self) -> Node {
698 self.node
699 }
700
701 /// Returns the position of this handle in the node.
702 pub fn idx(&self) -> usize {
703 self.idx
704 }
705 }
706
707 impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV> {
708 /// Creates a new handle to a key-value pair in `node`.
709 /// Unsafe because the caller must ensure that `idx < node.len()`.
710 pub unsafe fn new_kv(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
711 debug_assert!(idx < node.len());
712
713 Handle { node, idx, _marker: PhantomData }
714 }
715
716 pub fn left_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
717 unsafe { Handle::new_edge(self.node, self.idx) }
718 }
719
720 pub fn right_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
721 unsafe { Handle::new_edge(self.node, self.idx + 1) }
722 }
723 }
724
725 impl<BorrowType, K, V, NodeType> NodeRef<BorrowType, K, V, NodeType> {
726 /// Could be a public implementation of PartialEq, but only used in this module.
727 fn eq(&self, other: &Self) -> bool {
728 let Self { node, height, _marker } = self;
729 if node.eq(&other.node) {
730 debug_assert_eq!(*height, other.height);
731 true
732 } else {
733 false
734 }
735 }
736 }
737
738 impl<BorrowType, K, V, NodeType, HandleType> PartialEq
739 for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
740 {
741 fn eq(&self, other: &Self) -> bool {
742 let Self { node, idx, _marker } = self;
743 node.eq(&other.node) && *idx == other.idx
744 }
745 }
746
747 impl<BorrowType, K, V, NodeType, HandleType>
748 Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
749 {
750 /// Temporarily takes out another, immutable handle on the same location.
751 pub fn reborrow(&self) -> Handle<NodeRef<marker::Immut<'_>, K, V, NodeType>, HandleType> {
752 // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
753 Handle { node: self.node.reborrow(), idx: self.idx, _marker: PhantomData }
754 }
755 }
756
757 impl<'a, K, V, Type> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, Type> {
758 /// Unsafely asserts to the compiler the static information that the handle's node is a `Leaf`.
759 pub unsafe fn cast_to_leaf_unchecked(
760 self,
761 ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, Type> {
762 let node = unsafe { self.node.cast_to_leaf_unchecked() };
763 Handle { node, idx: self.idx, _marker: PhantomData }
764 }
765 }
766
767 impl<'a, K, V, NodeType, HandleType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
768 /// Temporarily takes out another, mutable handle on the same location. Beware, as
769 /// this method is very dangerous, doubly so since it may not immediately appear
770 /// dangerous.
771 ///
772 /// For details, see `NodeRef::reborrow_mut`.
773 pub unsafe fn reborrow_mut(
774 &mut self,
775 ) -> Handle<NodeRef<marker::Mut<'_>, K, V, NodeType>, HandleType> {
776 // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
777 Handle { node: unsafe { self.node.reborrow_mut() }, idx: self.idx, _marker: PhantomData }
778 }
779 }
780
781 impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
782 /// Creates a new handle to an edge in `node`.
783 /// Unsafe because the caller must ensure that `idx <= node.len()`.
784 pub unsafe fn new_edge(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
785 debug_assert!(idx <= node.len());
786
787 Handle { node, idx, _marker: PhantomData }
788 }
789
790 pub fn left_kv(self) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
791 if self.idx > 0 {
792 Ok(unsafe { Handle::new_kv(self.node, self.idx - 1) })
793 } else {
794 Err(self)
795 }
796 }
797
798 pub fn right_kv(self) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
799 if self.idx < self.node.len() {
800 Ok(unsafe { Handle::new_kv(self.node, self.idx) })
801 } else {
802 Err(self)
803 }
804 }
805 }
806
807 pub enum LeftOrRight<T> {
808 Left(T),
809 Right(T),
810 }
811
812 /// Given an edge index where we want to insert into a node filled to capacity,
813 /// computes a sensible KV index of a split point and where to perform the insertion.
814 /// The goal of the split point is for its key and value to end up in a parent node;
815 /// the keys, values and edges to the left of the split point become the left child;
816 /// the keys, values and edges to the right of the split point become the right child.
817 fn splitpoint(edge_idx: usize) -> (usize, LeftOrRight<usize>) {
818 debug_assert!(edge_idx <= CAPACITY);
819 // Rust issue #74834 tries to explain these symmetric rules.
820 match edge_idx {
821 0..EDGE_IDX_LEFT_OF_CENTER => (KV_IDX_CENTER - 1, LeftOrRight::Left(edge_idx)),
822 EDGE_IDX_LEFT_OF_CENTER => (KV_IDX_CENTER, LeftOrRight::Left(edge_idx)),
823 EDGE_IDX_RIGHT_OF_CENTER => (KV_IDX_CENTER, LeftOrRight::Right(0)),
824 _ => (KV_IDX_CENTER + 1, LeftOrRight::Right(edge_idx - (KV_IDX_CENTER + 1 + 1))),
825 }
826 }
827
828 impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
829 /// Inserts a new key-value pair between the key-value pairs to the right and left of
830 /// this edge. This method assumes that there is enough space in the node for the new
831 /// pair to fit.
832 ///
833 /// The returned pointer points to the inserted value.
834 fn insert_fit(&mut self, key: K, val: V) -> *mut V {
835 debug_assert!(self.node.len() < CAPACITY);
836 let new_len = self.node.len() + 1;
837
838 unsafe {
839 slice_insert(self.node.key_area_mut(..new_len), self.idx, key);
840 slice_insert(self.node.val_area_mut(..new_len), self.idx, val);
841 *self.node.len_mut() = new_len as u16;
842
843 self.node.val_area_mut(self.idx).assume_init_mut()
844 }
845 }
846 }
847
848 impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
849 /// Inserts a new key-value pair between the key-value pairs to the right and left of
850 /// this edge. This method splits the node if there isn't enough room.
851 ///
852 /// The returned pointer points to the inserted value.
853 fn insert(mut self, key: K, val: V) -> (InsertResult<'a, K, V, marker::Leaf>, *mut V) {
854 if self.node.len() < CAPACITY {
855 let val_ptr = self.insert_fit(key, val);
856 let kv = unsafe { Handle::new_kv(self.node, self.idx) };
857 (InsertResult::Fit(kv), val_ptr)
858 } else {
859 let (middle_kv_idx, insertion) = splitpoint(self.idx);
860 let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) };
861 let mut result = middle.split();
862 let mut insertion_edge = match insertion {
863 LeftOrRight::Left(insert_idx) => unsafe {
864 Handle::new_edge(result.left.reborrow_mut(), insert_idx)
865 },
866 LeftOrRight::Right(insert_idx) => unsafe {
867 Handle::new_edge(result.right.borrow_mut(), insert_idx)
868 },
869 };
870 let val_ptr = insertion_edge.insert_fit(key, val);
871 (InsertResult::Split(result), val_ptr)
872 }
873 }
874 }
875
876 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
877 /// Fixes the parent pointer and index in the child node that this edge
878 /// links to. This is useful when the ordering of edges has been changed,
879 fn correct_parent_link(self) {
880 // Create backpointer without invalidating other references to the node.
881 let ptr = unsafe { NonNull::new_unchecked(NodeRef::as_internal_ptr(&self.node)) };
882 let idx = self.idx;
883 let mut child = self.descend();
884 child.set_parent_link(ptr, idx);
885 }
886 }
887
888 impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
889 /// Inserts a new key-value pair and an edge that will go to the right of that new pair
890 /// between this edge and the key-value pair to the right of this edge. This method assumes
891 /// that there is enough space in the node for the new pair to fit.
892 fn insert_fit(&mut self, key: K, val: V, edge: Root<K, V>) {
893 debug_assert!(self.node.len() < CAPACITY);
894 debug_assert!(edge.height == self.node.height - 1);
895 let new_len = self.node.len() + 1;
896
897 unsafe {
898 slice_insert(self.node.key_area_mut(..new_len), self.idx, key);
899 slice_insert(self.node.val_area_mut(..new_len), self.idx, val);
900 slice_insert(self.node.edge_area_mut(..new_len + 1), self.idx + 1, edge.node);
901 *self.node.len_mut() = new_len as u16;
902
903 self.node.correct_childrens_parent_links(self.idx + 1..new_len + 1);
904 }
905 }
906
907 /// Inserts a new key-value pair and an edge that will go to the right of that new pair
908 /// between this edge and the key-value pair to the right of this edge. This method splits
909 /// the node if there isn't enough room.
910 fn insert(
911 mut self,
912 key: K,
913 val: V,
914 edge: Root<K, V>,
915 ) -> InsertResult<'a, K, V, marker::Internal> {
916 assert!(edge.height == self.node.height - 1);
917
918 if self.node.len() < CAPACITY {
919 self.insert_fit(key, val, edge);
920 let kv = unsafe { Handle::new_kv(self.node, self.idx) };
921 InsertResult::Fit(kv)
922 } else {
923 let (middle_kv_idx, insertion) = splitpoint(self.idx);
924 let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) };
925 let mut result = middle.split();
926 let mut insertion_edge = match insertion {
927 LeftOrRight::Left(insert_idx) => unsafe {
928 Handle::new_edge(result.left.reborrow_mut(), insert_idx)
929 },
930 LeftOrRight::Right(insert_idx) => unsafe {
931 Handle::new_edge(result.right.borrow_mut(), insert_idx)
932 },
933 };
934 insertion_edge.insert_fit(key, val, edge);
935 InsertResult::Split(result)
936 }
937 }
938 }
939
940 impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
941 /// Inserts a new key-value pair between the key-value pairs to the right and left of
942 /// this edge. This method splits the node if there isn't enough room, and tries to
943 /// insert the split off portion into the parent node recursively, until the root is reached.
944 ///
945 /// If the returned result is a `Fit`, its handle's node can be this edge's node or an ancestor.
946 /// If the returned result is a `Split`, the `left` field will be the root node.
947 /// The returned pointer points to the inserted value.
948 pub fn insert_recursing(
949 self,
950 key: K,
951 value: V,
952 ) -> (InsertResult<'a, K, V, marker::LeafOrInternal>, *mut V) {
953 let (mut split, val_ptr) = match self.insert(key, value) {
954 (InsertResult::Fit(handle), ptr) => {
955 return (InsertResult::Fit(handle.forget_node_type()), ptr);
956 }
957 (InsertResult::Split(split), val_ptr) => (split.forget_node_type(), val_ptr),
958 };
959
960 loop {
961 split = match split.left.ascend() {
962 Ok(parent) => match parent.insert(split.kv.0, split.kv.1, split.right) {
963 InsertResult::Fit(handle) => {
964 return (InsertResult::Fit(handle.forget_node_type()), val_ptr);
965 }
966 InsertResult::Split(split) => split.forget_node_type(),
967 },
968 Err(root) => {
969 return (InsertResult::Split(SplitResult { left: root, ..split }), val_ptr);
970 }
971 };
972 }
973 }
974 }
975
976 impl<BorrowType: marker::BorrowType, K, V>
977 Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>
978 {
979 /// Finds the node pointed to by this edge.
980 ///
981 /// The method name assumes you picture trees with the root node on top.
982 ///
983 /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
984 /// both, upon success, do nothing.
985 pub fn descend(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
986 assert!(BorrowType::PERMITS_TRAVERSAL);
987 // We need to use raw pointers to nodes because, if BorrowType is
988 // marker::ValMut, there might be outstanding mutable references to
989 // values that we must not invalidate. There's no worry accessing the
990 // height field because that value is copied. Beware that, once the
991 // node pointer is dereferenced, we access the edges array with a
992 // reference (Rust issue #73987) and invalidate any other references
993 // to or inside the array, should any be around.
994 let parent_ptr = NodeRef::as_internal_ptr(&self.node);
995 let node = unsafe { (*parent_ptr).edges.get_unchecked(self.idx).assume_init_read() };
996 NodeRef { node, height: self.node.height - 1, _marker: PhantomData }
997 }
998 }
999
1000 impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Immut<'a>, K, V, NodeType>, marker::KV> {
1001 pub fn into_kv(self) -> (&'a K, &'a V) {
1002 debug_assert!(self.idx < self.node.len());
1003 let leaf = self.node.into_leaf();
1004 let k = unsafe { leaf.keys.get_unchecked(self.idx).assume_init_ref() };
1005 let v = unsafe { leaf.vals.get_unchecked(self.idx).assume_init_ref() };
1006 (k, v)
1007 }
1008 }
1009
1010 impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1011 pub fn key_mut(&mut self) -> &mut K {
1012 unsafe { self.node.key_area_mut(self.idx).assume_init_mut() }
1013 }
1014
1015 pub fn into_val_mut(self) -> &'a mut V {
1016 debug_assert!(self.idx < self.node.len());
1017 let leaf = self.node.into_leaf_mut();
1018 unsafe { leaf.vals.get_unchecked_mut(self.idx).assume_init_mut() }
1019 }
1020 }
1021
1022 impl<'a, K, V, NodeType> Handle<NodeRef<marker::ValMut<'a>, K, V, NodeType>, marker::KV> {
1023 pub fn into_kv_valmut(self) -> (&'a K, &'a mut V) {
1024 unsafe { self.node.into_key_val_mut_at(self.idx) }
1025 }
1026 }
1027
1028 impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1029 pub fn kv_mut(&mut self) -> (&mut K, &mut V) {
1030 debug_assert!(self.idx < self.node.len());
1031 // We cannot call separate key and value methods, because calling the second one
1032 // invalidates the reference returned by the first.
1033 unsafe {
1034 let leaf = self.node.as_leaf_mut();
1035 let key = leaf.keys.get_unchecked_mut(self.idx).assume_init_mut();
1036 let val = leaf.vals.get_unchecked_mut(self.idx).assume_init_mut();
1037 (key, val)
1038 }
1039 }
1040
1041 /// Replace the key and value that the KV handle refers to.
1042 pub fn replace_kv(&mut self, k: K, v: V) -> (K, V) {
1043 let (key, val) = self.kv_mut();
1044 (mem::replace(key, k), mem::replace(val, v))
1045 }
1046 }
1047
1048 impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1049 /// Helps implementations of `split` for a particular `NodeType`,
1050 /// by taking care of leaf data.
1051 fn split_leaf_data(&mut self, new_node: &mut LeafNode<K, V>) -> (K, V) {
1052 debug_assert!(self.idx < self.node.len());
1053 let old_len = self.node.len();
1054 let new_len = old_len - self.idx - 1;
1055 new_node.len = new_len as u16;
1056 unsafe {
1057 let k = self.node.key_area_mut(self.idx).assume_init_read();
1058 let v = self.node.val_area_mut(self.idx).assume_init_read();
1059
1060 move_to_slice(
1061 self.node.key_area_mut(self.idx + 1..old_len),
1062 &mut new_node.keys[..new_len],
1063 );
1064 move_to_slice(
1065 self.node.val_area_mut(self.idx + 1..old_len),
1066 &mut new_node.vals[..new_len],
1067 );
1068
1069 *self.node.len_mut() = self.idx as u16;
1070 (k, v)
1071 }
1072 }
1073 }
1074
1075 impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
1076 /// Splits the underlying node into three parts:
1077 ///
1078 /// - The node is truncated to only contain the key-value pairs to the left of
1079 /// this handle.
1080 /// - The key and value pointed to by this handle are extracted.
1081 /// - All the key-value pairs to the right of this handle are put into a newly
1082 /// allocated node.
1083 pub fn split(mut self) -> SplitResult<'a, K, V, marker::Leaf> {
1084 let mut new_node = LeafNode::new();
1085
1086 let kv = self.split_leaf_data(&mut new_node);
1087
1088 let right = NodeRef::from_new_leaf(new_node);
1089 SplitResult { left: self.node, kv, right }
1090 }
1091
1092 /// Removes the key-value pair pointed to by this handle and returns it, along with the edge
1093 /// that the key-value pair collapsed into.
1094 pub fn remove(
1095 mut self,
1096 ) -> ((K, V), Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
1097 let old_len = self.node.len();
1098 unsafe {
1099 let k = slice_remove(self.node.key_area_mut(..old_len), self.idx);
1100 let v = slice_remove(self.node.val_area_mut(..old_len), self.idx);
1101 *self.node.len_mut() = (old_len - 1) as u16;
1102 ((k, v), self.left_edge())
1103 }
1104 }
1105 }
1106
1107 impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
1108 /// Splits the underlying node into three parts:
1109 ///
1110 /// - The node is truncated to only contain the edges and key-value pairs to the
1111 /// left of this handle.
1112 /// - The key and value pointed to by this handle are extracted.
1113 /// - All the edges and key-value pairs to the right of this handle are put into
1114 /// a newly allocated node.
1115 pub fn split(mut self) -> SplitResult<'a, K, V, marker::Internal> {
1116 let old_len = self.node.len();
1117 unsafe {
1118 let mut new_node = InternalNode::new();
1119 let kv = self.split_leaf_data(&mut new_node.data);
1120 let new_len = usize::from(new_node.data.len);
1121 move_to_slice(
1122 self.node.edge_area_mut(self.idx + 1..old_len + 1),
1123 &mut new_node.edges[..new_len + 1],
1124 );
1125
1126 let height = self.node.height;
1127 let right = NodeRef::from_new_internal(new_node, height);
1128
1129 SplitResult { left: self.node, kv, right }
1130 }
1131 }
1132 }
1133
1134 /// Represents a session for evaluating and performing a balancing operation
1135 /// around an internal key-value pair.
1136 pub struct BalancingContext<'a, K, V> {
1137 parent: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV>,
1138 left_child: NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1139 right_child: NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1140 }
1141
1142 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
1143 pub fn consider_for_balancing(self) -> BalancingContext<'a, K, V> {
1144 let self1 = unsafe { ptr::read(&self) };
1145 let self2 = unsafe { ptr::read(&self) };
1146 BalancingContext {
1147 parent: self,
1148 left_child: self1.left_edge().descend(),
1149 right_child: self2.right_edge().descend(),
1150 }
1151 }
1152 }
1153
1154 impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1155 /// Chooses a balancing context involving the node as a child, thus between
1156 /// the KV immediately to the left or to the right in the parent node.
1157 /// Returns an `Err` if there is no parent.
1158 /// Panics if the parent is empty.
1159 ///
1160 /// Prefers the left side, to be optimal if the given node is somehow
1161 /// underfull, meaning here only that it has fewer elements than its left
1162 /// sibling and than its right sibling, if they exist. In that case,
1163 /// merging with the left sibling is faster, since we only need to move
1164 /// the node's N elements, instead of shifting them to the right and moving
1165 /// more than N elements in front. Stealing from the left sibling is also
1166 /// typically faster, since we only need to shift the node's N elements to
1167 /// the right, instead of shifting at least N of the sibling's elements to
1168 /// the left.
1169 pub fn choose_parent_kv(self) -> Result<LeftOrRight<BalancingContext<'a, K, V>>, Self> {
1170 match unsafe { ptr::read(&self) }.ascend() {
1171 Ok(parent_edge) => match parent_edge.left_kv() {
1172 Ok(left_parent_kv) => Ok(LeftOrRight::Left(BalancingContext {
1173 parent: unsafe { ptr::read(&left_parent_kv) },
1174 left_child: left_parent_kv.left_edge().descend(),
1175 right_child: self,
1176 })),
1177 Err(parent_edge) => match parent_edge.right_kv() {
1178 Ok(right_parent_kv) => Ok(LeftOrRight::Right(BalancingContext {
1179 parent: unsafe { ptr::read(&right_parent_kv) },
1180 left_child: self,
1181 right_child: right_parent_kv.right_edge().descend(),
1182 })),
1183 Err(_) => unreachable!("empty internal node"),
1184 },
1185 },
1186 Err(root) => Err(root),
1187 }
1188 }
1189 }
1190
1191 impl<'a, K, V> BalancingContext<'a, K, V> {
1192 pub fn left_child_len(&self) -> usize {
1193 self.left_child.len()
1194 }
1195
1196 pub fn right_child_len(&self) -> usize {
1197 self.right_child.len()
1198 }
1199
1200 pub fn into_left_child(self) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1201 self.left_child
1202 }
1203
1204 pub fn into_right_child(self) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1205 self.right_child
1206 }
1207
1208 /// Returns whether merging is possible, i.e., whether there is enough room
1209 /// in a node to combine the central KV with both adjacent child nodes.
1210 pub fn can_merge(&self) -> bool {
1211 self.left_child.len() + 1 + self.right_child.len() <= CAPACITY
1212 }
1213 }
1214
1215 impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> {
1216 /// Performs a merge and lets a closure decide what to return.
1217 fn do_merge<
1218 F: FnOnce(
1219 NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
1220 NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1221 ) -> R,
1222 R,
1223 >(
1224 self,
1225 result: F,
1226 ) -> R {
1227 let Handle { node: mut parent_node, idx: parent_idx, _marker } = self.parent;
1228 let old_parent_len = parent_node.len();
1229 let mut left_node = self.left_child;
1230 let old_left_len = left_node.len();
1231 let mut right_node = self.right_child;
1232 let right_len = right_node.len();
1233 let new_left_len = old_left_len + 1 + right_len;
1234
1235 assert!(new_left_len <= CAPACITY);
1236
1237 unsafe {
1238 *left_node.len_mut() = new_left_len as u16;
1239
1240 let parent_key = slice_remove(parent_node.key_area_mut(..old_parent_len), parent_idx);
1241 left_node.key_area_mut(old_left_len).write(parent_key);
1242 move_to_slice(
1243 right_node.key_area_mut(..right_len),
1244 left_node.key_area_mut(old_left_len + 1..new_left_len),
1245 );
1246
1247 let parent_val = slice_remove(parent_node.val_area_mut(..old_parent_len), parent_idx);
1248 left_node.val_area_mut(old_left_len).write(parent_val);
1249 move_to_slice(
1250 right_node.val_area_mut(..right_len),
1251 left_node.val_area_mut(old_left_len + 1..new_left_len),
1252 );
1253
1254 slice_remove(&mut parent_node.edge_area_mut(..old_parent_len + 1), parent_idx + 1);
1255 parent_node.correct_childrens_parent_links(parent_idx + 1..old_parent_len);
1256 *parent_node.len_mut() -= 1;
1257
1258 if parent_node.height > 1 {
1259 // SAFETY: the height of the nodes being merged is one below the height
1260 // of the node of this edge, thus above zero, so they are internal.
1261 let mut left_node = left_node.reborrow_mut().cast_to_internal_unchecked();
1262 let mut right_node = right_node.cast_to_internal_unchecked();
1263 move_to_slice(
1264 right_node.edge_area_mut(..right_len + 1),
1265 left_node.edge_area_mut(old_left_len + 1..new_left_len + 1),
1266 );
1267
1268 left_node.correct_childrens_parent_links(old_left_len + 1..new_left_len + 1);
1269
1270 Global.deallocate(right_node.node.cast(), Layout::new::<InternalNode<K, V>>());
1271 } else {
1272 Global.deallocate(right_node.node.cast(), Layout::new::<LeafNode<K, V>>());
1273 }
1274 }
1275 result(parent_node, left_node)
1276 }
1277
1278 /// Merges the parent's key-value pair and both adjacent child nodes into
1279 /// the left child node and returns the shrunk parent node.
1280 ///
1281 /// Panics unless we `.can_merge()`.
1282 pub fn merge_tracking_parent(self) -> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
1283 self.do_merge(|parent, _child| parent)
1284 }
1285
1286 /// Merges the parent's key-value pair and both adjacent child nodes into
1287 /// the left child node and returns that child node.
1288 ///
1289 /// Panics unless we `.can_merge()`.
1290 pub fn merge_tracking_child(self) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1291 self.do_merge(|_parent, child| child)
1292 }
1293
1294 /// Merges the parent's key-value pair and both adjacent child nodes into
1295 /// the left child node and returns the edge handle in that child node
1296 /// where the tracked child edge ended up,
1297 ///
1298 /// Panics unless we `.can_merge()`.
1299 pub fn merge_tracking_child_edge(
1300 self,
1301 track_edge_idx: LeftOrRight<usize>,
1302 ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1303 let old_left_len = self.left_child.len();
1304 let right_len = self.right_child.len();
1305 assert!(match track_edge_idx {
1306 LeftOrRight::Left(idx) => idx <= old_left_len,
1307 LeftOrRight::Right(idx) => idx <= right_len,
1308 });
1309 let child = self.merge_tracking_child();
1310 let new_idx = match track_edge_idx {
1311 LeftOrRight::Left(idx) => idx,
1312 LeftOrRight::Right(idx) => old_left_len + 1 + idx,
1313 };
1314 unsafe { Handle::new_edge(child, new_idx) }
1315 }
1316
1317 /// Removes a key-value pair from the left child and places it in the key-value storage
1318 /// of the parent, while pushing the old parent key-value pair into the right child.
1319 /// Returns a handle to the edge in the right child corresponding to where the original
1320 /// edge specified by `track_right_edge_idx` ended up.
1321 pub fn steal_left(
1322 mut self,
1323 track_right_edge_idx: usize,
1324 ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1325 self.bulk_steal_left(1);
1326 unsafe { Handle::new_edge(self.right_child, 1 + track_right_edge_idx) }
1327 }
1328
1329 /// Removes a key-value pair from the right child and places it in the key-value storage
1330 /// of the parent, while pushing the old parent key-value pair onto the left child.
1331 /// Returns a handle to the edge in the left child specified by `track_left_edge_idx`,
1332 /// which didn't move.
1333 pub fn steal_right(
1334 mut self,
1335 track_left_edge_idx: usize,
1336 ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1337 self.bulk_steal_right(1);
1338 unsafe { Handle::new_edge(self.left_child, track_left_edge_idx) }
1339 }
1340
1341 /// This does stealing similar to `steal_left` but steals multiple elements at once.
1342 pub fn bulk_steal_left(&mut self, count: usize) {
1343 assert!(count > 0);
1344 unsafe {
1345 let left_node = &mut self.left_child;
1346 let old_left_len = left_node.len();
1347 let right_node = &mut self.right_child;
1348 let old_right_len = right_node.len();
1349
1350 // Make sure that we may steal safely.
1351 assert!(old_right_len + count <= CAPACITY);
1352 assert!(old_left_len >= count);
1353
1354 let new_left_len = old_left_len - count;
1355 let new_right_len = old_right_len + count;
1356 *left_node.len_mut() = new_left_len as u16;
1357 *right_node.len_mut() = new_right_len as u16;
1358
1359 // Move leaf data.
1360 {
1361 // Make room for stolen elements in the right child.
1362 slice_shr(right_node.key_area_mut(..new_right_len), count);
1363 slice_shr(right_node.val_area_mut(..new_right_len), count);
1364
1365 // Move elements from the left child to the right one.
1366 move_to_slice(
1367 left_node.key_area_mut(new_left_len + 1..old_left_len),
1368 right_node.key_area_mut(..count - 1),
1369 );
1370 move_to_slice(
1371 left_node.val_area_mut(new_left_len + 1..old_left_len),
1372 right_node.val_area_mut(..count - 1),
1373 );
1374
1375 // Move the left-most stolen pair to the parent.
1376 let k = left_node.key_area_mut(new_left_len).assume_init_read();
1377 let v = left_node.val_area_mut(new_left_len).assume_init_read();
1378 let (k, v) = self.parent.replace_kv(k, v);
1379
1380 // Move parent's key-value pair to the right child.
1381 right_node.key_area_mut(count - 1).write(k);
1382 right_node.val_area_mut(count - 1).write(v);
1383 }
1384
1385 match (left_node.reborrow_mut().force(), right_node.reborrow_mut().force()) {
1386 (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
1387 // Make room for stolen edges.
1388 slice_shr(right.edge_area_mut(..new_right_len + 1), count);
1389
1390 // Steal edges.
1391 move_to_slice(
1392 left.edge_area_mut(new_left_len + 1..old_left_len + 1),
1393 right.edge_area_mut(..count),
1394 );
1395
1396 right.correct_childrens_parent_links(0..new_right_len + 1);
1397 }
1398 (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1399 _ => unreachable!(),
1400 }
1401 }
1402 }
1403
1404 /// The symmetric clone of `bulk_steal_left`.
1405 pub fn bulk_steal_right(&mut self, count: usize) {
1406 assert!(count > 0);
1407 unsafe {
1408 let left_node = &mut self.left_child;
1409 let old_left_len = left_node.len();
1410 let right_node = &mut self.right_child;
1411 let old_right_len = right_node.len();
1412
1413 // Make sure that we may steal safely.
1414 assert!(old_left_len + count <= CAPACITY);
1415 assert!(old_right_len >= count);
1416
1417 let new_left_len = old_left_len + count;
1418 let new_right_len = old_right_len - count;
1419 *left_node.len_mut() = new_left_len as u16;
1420 *right_node.len_mut() = new_right_len as u16;
1421
1422 // Move leaf data.
1423 {
1424 // Move the right-most stolen pair to the parent.
1425 let k = right_node.key_area_mut(count - 1).assume_init_read();
1426 let v = right_node.val_area_mut(count - 1).assume_init_read();
1427 let (k, v) = self.parent.replace_kv(k, v);
1428
1429 // Move parent's key-value pair to the left child.
1430 left_node.key_area_mut(old_left_len).write(k);
1431 left_node.val_area_mut(old_left_len).write(v);
1432
1433 // Move elements from the right child to the left one.
1434 move_to_slice(
1435 right_node.key_area_mut(..count - 1),
1436 left_node.key_area_mut(old_left_len + 1..new_left_len),
1437 );
1438 move_to_slice(
1439 right_node.val_area_mut(..count - 1),
1440 left_node.val_area_mut(old_left_len + 1..new_left_len),
1441 );
1442
1443 // Fill gap where stolen elements used to be.
1444 slice_shl(right_node.key_area_mut(..old_right_len), count);
1445 slice_shl(right_node.val_area_mut(..old_right_len), count);
1446 }
1447
1448 match (left_node.reborrow_mut().force(), right_node.reborrow_mut().force()) {
1449 (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
1450 // Steal edges.
1451 move_to_slice(
1452 right.edge_area_mut(..count),
1453 left.edge_area_mut(old_left_len + 1..new_left_len + 1),
1454 );
1455
1456 // Fill gap where stolen edges used to be.
1457 slice_shl(right.edge_area_mut(..old_right_len + 1), count);
1458
1459 left.correct_childrens_parent_links(old_left_len + 1..new_left_len + 1);
1460 right.correct_childrens_parent_links(0..new_right_len + 1);
1461 }
1462 (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1463 _ => unreachable!(),
1464 }
1465 }
1466 }
1467 }
1468
1469 impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Leaf> {
1470 /// Removes any static information asserting that this node is a `Leaf` node.
1471 pub fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
1472 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
1473 }
1474 }
1475
1476 impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
1477 /// Removes any static information asserting that this node is an `Internal` node.
1478 pub fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
1479 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
1480 }
1481 }
1482
1483 impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
1484 pub fn forget_node_type(
1485 self,
1486 ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
1487 unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
1488 }
1489 }
1490
1491 impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
1492 pub fn forget_node_type(
1493 self,
1494 ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
1495 unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
1496 }
1497 }
1498
1499 impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::KV> {
1500 pub fn forget_node_type(
1501 self,
1502 ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV> {
1503 unsafe { Handle::new_kv(self.node.forget_type(), self.idx) }
1504 }
1505 }
1506
1507 impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::KV> {
1508 pub fn forget_node_type(
1509 self,
1510 ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV> {
1511 unsafe { Handle::new_kv(self.node.forget_type(), self.idx) }
1512 }
1513 }
1514
1515 impl<BorrowType, K, V, Type> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, Type> {
1516 /// Checks whether the underlying node is an `Internal` node or a `Leaf` node.
1517 pub fn force(
1518 self,
1519 ) -> ForceResult<
1520 Handle<NodeRef<BorrowType, K, V, marker::Leaf>, Type>,
1521 Handle<NodeRef<BorrowType, K, V, marker::Internal>, Type>,
1522 > {
1523 match self.node.force() {
1524 ForceResult::Leaf(node) => {
1525 ForceResult::Leaf(Handle { node, idx: self.idx, _marker: PhantomData })
1526 }
1527 ForceResult::Internal(node) => {
1528 ForceResult::Internal(Handle { node, idx: self.idx, _marker: PhantomData })
1529 }
1530 }
1531 }
1532 }
1533
1534 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1535 /// Move the suffix after `self` from one node to another one. `right` must be empty.
1536 /// The first edge of `right` remains unchanged.
1537 pub fn move_suffix(
1538 &mut self,
1539 right: &mut NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1540 ) {
1541 unsafe {
1542 let new_left_len = self.idx;
1543 let mut left_node = self.reborrow_mut().into_node();
1544 let old_left_len = left_node.len();
1545
1546 let new_right_len = old_left_len - new_left_len;
1547 let mut right_node = right.reborrow_mut();
1548
1549 assert!(right_node.len() == 0);
1550 assert!(left_node.height == right_node.height);
1551
1552 if new_right_len > 0 {
1553 *left_node.len_mut() = new_left_len as u16;
1554 *right_node.len_mut() = new_right_len as u16;
1555
1556 move_to_slice(
1557 left_node.key_area_mut(new_left_len..old_left_len),
1558 right_node.key_area_mut(..new_right_len),
1559 );
1560 move_to_slice(
1561 left_node.val_area_mut(new_left_len..old_left_len),
1562 right_node.val_area_mut(..new_right_len),
1563 );
1564 match (left_node.force(), right_node.force()) {
1565 (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
1566 move_to_slice(
1567 left.edge_area_mut(new_left_len + 1..old_left_len + 1),
1568 right.edge_area_mut(1..new_right_len + 1),
1569 );
1570 right.correct_childrens_parent_links(1..new_right_len + 1);
1571 }
1572 (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1573 _ => unreachable!(),
1574 }
1575 }
1576 }
1577 }
1578 }
1579
1580 pub enum ForceResult<Leaf, Internal> {
1581 Leaf(Leaf),
1582 Internal(Internal),
1583 }
1584
1585 /// Result of insertion, when a node needed to expand beyond its capacity.
1586 pub struct SplitResult<'a, K, V, NodeType> {
1587 // Altered node in existing tree with elements and edges that belong to the left of `kv`.
1588 pub left: NodeRef<marker::Mut<'a>, K, V, NodeType>,
1589 // Some key and value split off, to be inserted elsewhere.
1590 pub kv: (K, V),
1591 // Owned, unattached, new node with elements and edges that belong to the right of `kv`.
1592 pub right: NodeRef<marker::Owned, K, V, NodeType>,
1593 }
1594
1595 impl<'a, K, V> SplitResult<'a, K, V, marker::Leaf> {
1596 pub fn forget_node_type(self) -> SplitResult<'a, K, V, marker::LeafOrInternal> {
1597 SplitResult { left: self.left.forget_type(), kv: self.kv, right: self.right.forget_type() }
1598 }
1599 }
1600
1601 impl<'a, K, V> SplitResult<'a, K, V, marker::Internal> {
1602 pub fn forget_node_type(self) -> SplitResult<'a, K, V, marker::LeafOrInternal> {
1603 SplitResult { left: self.left.forget_type(), kv: self.kv, right: self.right.forget_type() }
1604 }
1605 }
1606
1607 pub enum InsertResult<'a, K, V, NodeType> {
1608 Fit(Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV>),
1609 Split(SplitResult<'a, K, V, NodeType>),
1610 }
1611
1612 pub mod marker {
1613 use core::marker::PhantomData;
1614
1615 pub enum Leaf {}
1616 pub enum Internal {}
1617 pub enum LeafOrInternal {}
1618
1619 pub enum Owned {}
1620 pub enum Dying {}
1621 pub struct Immut<'a>(PhantomData<&'a ()>);
1622 pub struct Mut<'a>(PhantomData<&'a mut ()>);
1623 pub struct ValMut<'a>(PhantomData<&'a mut ()>);
1624
1625 pub trait BorrowType {
1626 // Whether node references of this borrow type allow traversing
1627 // to other nodes in the tree.
1628 const PERMITS_TRAVERSAL: bool = true;
1629 }
1630 impl BorrowType for Owned {
1631 // Traversal isn't needede, it happens using the result of `borrow_mut`.
1632 // By disabling traversal, and only creating new references to roots,
1633 // we know that every reference of the `Owned` type is to a root node.
1634 const PERMITS_TRAVERSAL: bool = false;
1635 }
1636 impl BorrowType for Dying {}
1637 impl<'a> BorrowType for Immut<'a> {}
1638 impl<'a> BorrowType for Mut<'a> {}
1639 impl<'a> BorrowType for ValMut<'a> {}
1640
1641 pub enum KV {}
1642 pub enum Edge {}
1643 }
1644
1645 /// Inserts a value into a slice of initialized elements followed by one uninitialized element.
1646 ///
1647 /// # Safety
1648 /// The slice has more than `idx` elements.
1649 unsafe fn slice_insert<T>(slice: &mut [MaybeUninit<T>], idx: usize, val: T) {
1650 unsafe {
1651 let len = slice.len();
1652 debug_assert!(len > idx);
1653 let slice_ptr = slice.as_mut_ptr();
1654 if len > idx + 1 {
1655 ptr::copy(slice_ptr.add(idx), slice_ptr.add(idx + 1), len - idx - 1);
1656 }
1657 (*slice_ptr.add(idx)).write(val);
1658 }
1659 }
1660
1661 /// Removes and returns a value from a slice of all initialized elements, leaving behind one
1662 /// trailing uninitialized element.
1663 ///
1664 /// # Safety
1665 /// The slice has more than `idx` elements.
1666 unsafe fn slice_remove<T>(slice: &mut [MaybeUninit<T>], idx: usize) -> T {
1667 unsafe {
1668 let len = slice.len();
1669 debug_assert!(idx < len);
1670 let slice_ptr = slice.as_mut_ptr();
1671 let ret = (*slice_ptr.add(idx)).assume_init_read();
1672 ptr::copy(slice_ptr.add(idx + 1), slice_ptr.add(idx), len - idx - 1);
1673 ret
1674 }
1675 }
1676
1677 /// Shifts the elements in a slice `distance` positions to the left.
1678 ///
1679 /// # Safety
1680 /// The slice has at least `distance` elements.
1681 unsafe fn slice_shl<T>(slice: &mut [MaybeUninit<T>], distance: usize) {
1682 unsafe {
1683 let slice_ptr = slice.as_mut_ptr();
1684 ptr::copy(slice_ptr.add(distance), slice_ptr, slice.len() - distance);
1685 }
1686 }
1687
1688 /// Shifts the elements in a slice `distance` positions to the right.
1689 ///
1690 /// # Safety
1691 /// The slice has at least `distance` elements.
1692 unsafe fn slice_shr<T>(slice: &mut [MaybeUninit<T>], distance: usize) {
1693 unsafe {
1694 let slice_ptr = slice.as_mut_ptr();
1695 ptr::copy(slice_ptr, slice_ptr.add(distance), slice.len() - distance);
1696 }
1697 }
1698
1699 /// Moves all values from a slice of initialized elements to a slice
1700 /// of uninitialized elements, leaving behind `src` as all uninitialized.
1701 /// Works like `dst.copy_from_slice(src)` but does not require `T` to be `Copy`.
1702 fn move_to_slice<T>(src: &mut [MaybeUninit<T>], dst: &mut [MaybeUninit<T>]) {
1703 assert!(src.len() == dst.len());
1704 unsafe {
1705 ptr::copy_nonoverlapping(src.as_ptr(), dst.as_mut_ptr(), src.len());
1706 }
1707 }
1708
1709 #[cfg(test)]
1710 mod tests;