]> git.proxmox.com Git - rustc.git/blame - library/alloc/src/collections/btree/node.rs
New upstream version 1.61.0+dfsg1
[rustc.git] / library / alloc / src / collections / btree / node.rs
CommitLineData
9cc50fc6
SL
1// This is an attempt at an implementation following the ideal
2//
3// ```
4// struct BTreeMap<K, V> {
5// height: usize,
6// root: Option<Box<Node<K, V, height>>>
7// }
8//
9// struct Node<K, V, height: usize> {
10// keys: [K; 2 * B - 1],
11// vals: [V; 2 * B - 1],
29967ef6
XL
12// edges: [if height > 0 { Box<Node<K, V, height - 1>> } else { () }; 2 * B],
13// parent: Option<(NonNull<Node<K, V, height + 1>>, u16)>,
9cc50fc6
SL
14// len: u16,
15// }
16// ```
17//
7453a54e 18// Since Rust doesn't actually have dependent types and polymorphic recursion,
9cc50fc6 19// we make do with lots of unsafety.
1a4d82fc 20
a7813a04
XL
21// A major goal of this module is to avoid complexity by treating the tree as a generic (if
22// weirdly shaped) container and avoiding dealing with most of the B-Tree invariants. As such,
23// this module doesn't care whether the entries are sorted, which nodes can be underfull, or
24// even what underfull means. However, we do rely on a few invariants:
25//
26// - Trees must have uniform depth/height. This means that every path down to a leaf from a
27// given node has exactly the same length.
29967ef6
XL
28// - A node of length `n` has `n` keys, `n` values, and `n + 1` edges.
29// This implies that even an empty node has at least one edge.
fc512014
XL
30// For a leaf node, "having an edge" only means we can identify a position in the node,
31// since leaf edges are empty and need no data representation. In an internal node,
32// an edge both identifies a position and contains a pointer to a child node.
a7813a04 33
85aaf69f 34use core::marker::PhantomData;
0bf4aa26 35use core::mem::{self, MaybeUninit};
fc512014
XL
36use core::ptr::{self, NonNull};
37use core::slice::SliceIndex;
9cc50fc6 38
fc512014 39use crate::alloc::{Allocator, Global, Layout};
9fa01778 40use crate::boxed::Box;
9cc50fc6
SL
41
42const B: usize = 6;
43pub const CAPACITY: usize = 2 * B - 1;
29967ef6 44pub const MIN_LEN_AFTER_SPLIT: usize = B - 1;
3dfed10e
XL
45const KV_IDX_CENTER: usize = B - 1;
46const EDGE_IDX_LEFT_OF_CENTER: usize = B - 1;
47const EDGE_IDX_RIGHT_OF_CENTER: usize = B;
9cc50fc6 48
1b1a35ee 49/// The underlying representation of leaf nodes and part of the representation of internal nodes.
9cc50fc6 50struct LeafNode<K, V> {
1b1a35ee
XL
51 /// We want to be covariant in `K` and `V`.
52 parent: Option<NonNull<InternalNode<K, V>>>,
a7813a04
XL
53
54 /// This node's index into the parent node's `edges` array.
55 /// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`.
a1dfa0c6 56 /// This is only guaranteed to be initialized when `parent` is non-null.
0bf4aa26 57 parent_idx: MaybeUninit<u16>,
a7813a04
XL
58
59 /// The number of keys and values this node stores.
9cc50fc6 60 len: u16,
94b46f34
XL
61
62 /// The arrays storing the actual data of the node. Only the first `len` elements of each
63 /// array are initialized and valid.
9fa01778
XL
64 keys: [MaybeUninit<K>; CAPACITY],
65 vals: [MaybeUninit<V>; CAPACITY],
1a4d82fc
JJ
66}
67
9cc50fc6 68impl<K, V> LeafNode<K, V> {
6a06907d
XL
69 /// Initializes a new `LeafNode` in-place.
70 unsafe fn init(this: *mut Self) {
71 // As a general policy, we leave fields uninitialized if they can be, as this should
72 // be both slightly faster and easier to track in Valgrind.
73 unsafe {
74 // parent_idx, keys, and vals are all MaybeUninit
75 ptr::addr_of_mut!((*this).parent).write(None);
76 ptr::addr_of_mut!((*this).len).write(0);
77 }
78 }
79
80 /// Creates a new boxed `LeafNode`.
81 fn new() -> Box<Self> {
82 unsafe {
83 let mut leaf = Box::new_uninit();
84 LeafNode::init(leaf.as_mut_ptr());
85 leaf.assume_init()
b039eaaf 86 }
9cc50fc6 87 }
0731742a 88}
94b46f34 89
a7813a04
XL
90/// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden
91/// behind `BoxedNode`s to prevent dropping uninitialized keys and values. Any pointer to an
cdc7bbd5 92/// `InternalNode` can be directly cast to a pointer to the underlying `LeafNode` portion of the
a7813a04
XL
93/// node, allowing code to act on leaf and internal nodes generically without having to even check
94/// which of the two a pointer is pointing at. This property is enabled by the use of `repr(C)`.
9cc50fc6 95#[repr(C)]
1b1a35ee 96// gdb_providers.py uses this type name for introspection.
9cc50fc6
SL
97struct InternalNode<K, V> {
98 data: LeafNode<K, V>,
a7813a04
XL
99
100 /// The pointers to the children of this node. `len + 1` of these are considered
5869c6ff
XL
101 /// initialized and valid, except that near the end, while the tree is held
102 /// through borrow type `Dying`, some of these pointers are dangling.
9fa01778 103 edges: [MaybeUninit<BoxedNode<K, V>>; 2 * B],
1a4d82fc
JJ
104}
105
9cc50fc6 106impl<K, V> InternalNode<K, V> {
6a06907d 107 /// Creates a new boxed `InternalNode`.
a7813a04 108 ///
6a06907d
XL
109 /// # Safety
110 /// An invariant of internal nodes is that they have at least one
111 /// initialized and valid edge. This function does not set up
a7813a04 112 /// such an edge.
6a06907d
XL
113 unsafe fn new() -> Box<Self> {
114 unsafe {
115 let mut node = Box::<Self>::new_uninit();
116 // We only need to initialize the data; the edges are MaybeUninit.
117 LeafNode::init(ptr::addr_of_mut!((*node.as_mut_ptr()).data));
118 node.assume_init()
119 }
1a4d82fc
JJ
120 }
121}
122
74b04a01 123/// A managed, non-null pointer to a node. This is either an owned pointer to
ba9703b0
XL
124/// `LeafNode<K, V>` or an owned pointer to `InternalNode<K, V>`.
125///
126/// However, `BoxedNode` contains no information as to which of the two types
74b04a01 127/// of nodes it actually contains, and, partially due to this lack of information,
fc512014
XL
128/// is not a separate type and has no destructor.
129type BoxedNode<K, V> = NonNull<LeafNode<K, V>>;
1a4d82fc 130
9cc50fc6
SL
131// N.B. `NodeRef` is always covariant in `K` and `V`, even when the `BorrowType`
132// is `Mut`. This is technically wrong, but cannot result in any unsafety due to
133// internal use of `NodeRef` because we stay completely generic over `K` and `V`.
134// However, whenever a public type wraps `NodeRef`, make sure that it has the
135// correct variance.
29967ef6 136///
9cc50fc6 137/// A reference to a node.
1a4d82fc 138///
3b2f2976 139/// This type has a number of parameters that controls how it acts:
29967ef6
XL
140/// - `BorrowType`: A dummy type that describes the kind of borrow and carries a lifetime.
141/// - When this is `Immut<'a>`, the `NodeRef` acts roughly like `&'a Node`.
142/// - When this is `ValMut<'a>`, the `NodeRef` acts roughly like `&'a Node`
143/// with respect to keys and tree structure, but also allows many
144/// mutable references to values throughout the tree to coexist.
145/// - When this is `Mut<'a>`, the `NodeRef` acts roughly like `&'a mut Node`,
146/// although insert methods allow a mutable pointer to a value to coexist.
147/// - When this is `Owned`, the `NodeRef` acts roughly like `Box<Node>`,
148/// but does not have a destructor, and must be cleaned up manually.
5869c6ff
XL
149/// - When this is `Dying`, the `NodeRef` still acts roughly like `Box<Node>`,
150/// but has methods to destroy the tree bit by bit, and ordinary methods,
151/// while not marked as unsafe to call, can invoke UB if called incorrectly.
fc512014 152/// Since any `NodeRef` allows navigating through the tree, `BorrowType`
5869c6ff 153/// effectively applies to the entire tree, not just to the node itself.
29967ef6 154/// - `K` and `V`: These are the types of keys and values stored in the nodes.
9cc50fc6
SL
155/// - `Type`: This can be `Leaf`, `Internal`, or `LeafOrInternal`. When this is
156/// `Leaf`, the `NodeRef` points to a leaf node, when this is `Internal` the
157/// `NodeRef` points to an internal node, and when this is `LeafOrInternal` the
158/// `NodeRef` could be pointing to either type of node.
29967ef6
XL
159/// `Type` is named `NodeType` when used outside `NodeRef`.
160///
161/// Both `BorrowType` and `NodeType` restrict what methods we implement, to
162/// exploit static type safety. There are limitations in the way we can apply
163/// such restrictions:
164/// - For each type parameter, we can only define a method either generically
165/// or for one particular type. For example, we cannot define a method like
5869c6ff
XL
166/// `into_kv` generically for all `BorrowType`, or once for all types that
167/// carry a lifetime, because we want it to return `&'a` references.
29967ef6
XL
168/// Therefore, we define it only for the least powerful type `Immut<'a>`.
169/// - We cannot get implicit coercion from say `Mut<'a>` to `Immut<'a>`.
136023e0 170/// Therefore, we have to explicitly call `reborrow` on a more powerful
5869c6ff 171/// `NodeRef` in order to reach a method like `into_kv`.
fc512014
XL
172///
173/// All methods on `NodeRef` that return some kind of reference, either:
174/// - Take `self` by value, and return the lifetime carried by `BorrowType`.
175/// Sometimes, to invoke such a method, we need to call `reborrow_mut`.
176/// - Take `self` by reference, and (implicitly) return that reference's
177/// lifetime, instead of the lifetime carried by `BorrowType`. That way,
178/// the borrow checker guarantees that the `NodeRef` remains borrowed as long
179/// as the returned reference is used.
180/// The methods supporting insert bend this rule by returning a raw pointer,
181/// i.e., a reference without any lifetime.
9cc50fc6 182pub struct NodeRef<BorrowType, K, V, Type> {
fc512014
XL
183 /// The number of levels that the node and the level of leaves are apart, a
184 /// constant of the node that cannot be entirely described by `Type`, and that
185 /// the node itself does not store. We only need to store the height of the root
186 /// node, and derive every other node's height from it.
187 /// Must be zero if `Type` is `Leaf` and non-zero if `Type` is `Internal`.
9cc50fc6 188 height: usize,
29967ef6
XL
189 /// The pointer to the leaf or internal node. The definition of `InternalNode`
190 /// ensures that the pointer is valid either way.
0531ce1d 191 node: NonNull<LeafNode<K, V>>,
dfeec247 192 _marker: PhantomData<(BorrowType, Type)>,
1a4d82fc
JJ
193}
194
cdc7bbd5
XL
195/// The root node of an owned tree.
196///
197/// Note that this does not have a destructor, and must be cleaned up manually.
198pub type Root<K, V> = NodeRef<marker::Owned, K, V, marker::LeafOrInternal>;
199
dfeec247 200impl<'a, K: 'a, V: 'a, Type> Copy for NodeRef<marker::Immut<'a>, K, V, Type> {}
9cc50fc6
SL
201impl<'a, K: 'a, V: 'a, Type> Clone for NodeRef<marker::Immut<'a>, K, V, Type> {
202 fn clone(&self) -> Self {
203 *self
204 }
1a4d82fc
JJ
205}
206
dfeec247 207unsafe impl<BorrowType, K: Sync, V: Sync, Type> Sync for NodeRef<BorrowType, K, V, Type> {}
9cc50fc6 208
dfeec247
XL
209unsafe impl<'a, K: Sync + 'a, V: Sync + 'a, Type> Send for NodeRef<marker::Immut<'a>, K, V, Type> {}
210unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send for NodeRef<marker::Mut<'a>, K, V, Type> {}
1b1a35ee 211unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send for NodeRef<marker::ValMut<'a>, K, V, Type> {}
dfeec247 212unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Owned, K, V, Type> {}
5869c6ff 213unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Dying, K, V, Type> {}
9cc50fc6 214
cdc7bbd5 215impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
ee023bcb 216 pub fn new_leaf() -> Self {
cdc7bbd5
XL
217 Self::from_new_leaf(LeafNode::new())
218 }
219
220 fn from_new_leaf(leaf: Box<LeafNode<K, V>>) -> Self {
221 NodeRef { height: 0, node: NonNull::from(Box::leak(leaf)), _marker: PhantomData }
222 }
223}
224
225impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
226 fn new_internal(child: Root<K, V>) -> Self {
227 let mut new_node = unsafe { InternalNode::new() };
228 new_node.edges[0].write(child.node);
229 unsafe { NodeRef::from_new_internal(new_node, child.height + 1) }
230 }
231
232 /// # Safety
233 /// `height` must not be zero.
234 unsafe fn from_new_internal(internal: Box<InternalNode<K, V>>, height: usize) -> Self {
235 debug_assert!(height > 0);
236 let node = NonNull::from(Box::leak(internal)).cast();
237 let mut this = NodeRef { height, node, _marker: PhantomData };
238 this.borrow_mut().correct_all_childrens_parent_links();
239 this
240 }
241}
242
29967ef6
XL
243impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
244 /// Unpack a node reference that was packed as `NodeRef::parent`.
245 fn from_internal(node: NonNull<InternalNode<K, V>>, height: usize) -> Self {
246 debug_assert!(height > 0);
247 NodeRef { height, node: node.cast(), _marker: PhantomData }
248 }
249}
250
9cc50fc6 251impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
29967ef6 252 /// Exposes the data of an internal node.
1b1a35ee 253 ///
29967ef6
XL
254 /// Returns a raw ptr to avoid invalidating other references to this node.
255 fn as_internal_ptr(this: &Self) -> *mut InternalNode<K, V> {
256 // SAFETY: the static node type is `Internal`.
257 this.node.as_ptr() as *mut InternalNode<K, V>
1a4d82fc 258 }
1a4d82fc
JJ
259}
260
1b1a35ee 261impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
fc512014
XL
262 /// Borrows exclusive access to the data of an internal node.
263 fn as_internal_mut(&mut self) -> &mut InternalNode<K, V> {
264 let ptr = Self::as_internal_ptr(self);
29967ef6 265 unsafe { &mut *ptr }
1a4d82fc 266 }
9cc50fc6
SL
267}
268
9cc50fc6 269impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
29967ef6
XL
270 /// Finds the length of the node. This is the number of keys or values.
271 /// The number of edges is `len() + 1`.
74b04a01
XL
272 /// Note that, despite being safe, calling this function can have the side effect
273 /// of invalidating mutable references that unsafe code has created.
85aaf69f 274 pub fn len(&self) -> usize {
1b1a35ee
XL
275 // Crucially, we only access the `len` field here. If BorrowType is marker::ValMut,
276 // there might be outstanding mutable references to values that we must not invalidate.
29967ef6 277 unsafe { usize::from((*Self::as_leaf_ptr(self)).len) }
1a4d82fc
JJ
278 }
279
fc512014
XL
280 /// Returns the number of levels that the node and leaves are apart. Zero
281 /// height means the node is a leaf itself. If you picture trees with the
282 /// root on top, the number says at which elevation the node appears.
283 /// If you picture trees with leaves on top, the number says how high
284 /// the tree extends above the node.
a7813a04
XL
285 pub fn height(&self) -> usize {
286 self.height
287 }
288
a7813a04 289 /// Temporarily takes out another, immutable reference to the same node.
29967ef6 290 pub fn reborrow(&self) -> NodeRef<marker::Immut<'_>, K, V, Type> {
1b1a35ee 291 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
1a4d82fc
JJ
292 }
293
1b1a35ee 294 /// Exposes the leaf portion of any leaf or internal node.
1b1a35ee 295 ///
29967ef6
XL
296 /// Returns a raw ptr to avoid invalidating other references to this node.
297 fn as_leaf_ptr(this: &Self) -> *mut LeafNode<K, V> {
ba9703b0
XL
298 // The node must be valid for at least the LeafNode portion.
299 // This is not a reference in the NodeRef type because we don't know if
300 // it should be unique or shared.
29967ef6 301 this.node.as_ptr()
94b46f34 302 }
29967ef6 303}
94b46f34 304
5869c6ff 305impl<BorrowType: marker::BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
a7813a04
XL
306 /// Finds the parent of the current node. Returns `Ok(handle)` if the current
307 /// node actually has a parent, where `handle` points to the edge of the parent
308 /// that points to the current node. Returns `Err(self)` if the current node has
309 /// no parent, giving back the original `NodeRef`.
310 ///
fc512014
XL
311 /// The method name assumes you picture trees with the root node on top.
312 ///
a7813a04
XL
313 /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
314 /// both, upon success, do nothing.
dfeec247
XL
315 pub fn ascend(
316 self,
317 ) -> Result<Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>, Self> {
5869c6ff 318 assert!(BorrowType::PERMITS_TRAVERSAL);
1b1a35ee
XL
319 // We need to use raw pointers to nodes because, if BorrowType is marker::ValMut,
320 // there might be outstanding mutable references to values that we must not invalidate.
29967ef6 321 let leaf_ptr: *const _ = Self::as_leaf_ptr(&self);
1b1a35ee
XL
322 unsafe { (*leaf_ptr).parent }
323 .as_ref()
324 .map(|parent| Handle {
29967ef6 325 node: NodeRef::from_internal(*parent, self.height + 1),
1b1a35ee 326 idx: unsafe { usize::from((*leaf_ptr).parent_idx.assume_init()) },
dfeec247 327 _marker: PhantomData,
9cc50fc6 328 })
1b1a35ee 329 .ok_or(self)
1a4d82fc 330 }
1a4d82fc 331
9cc50fc6 332 pub fn first_edge(self) -> Handle<Self, marker::Edge> {
74b04a01 333 unsafe { Handle::new_edge(self, 0) }
1a4d82fc
JJ
334 }
335
9cc50fc6
SL
336 pub fn last_edge(self) -> Handle<Self, marker::Edge> {
337 let len = self.len();
74b04a01 338 unsafe { Handle::new_edge(self, len) }
1a4d82fc 339 }
3157f602
XL
340
341 /// Note that `self` must be nonempty.
342 pub fn first_kv(self) -> Handle<Self, marker::KV> {
74b04a01
XL
343 let len = self.len();
344 assert!(len > 0);
345 unsafe { Handle::new_kv(self, 0) }
3157f602
XL
346 }
347
348 /// Note that `self` must be nonempty.
349 pub fn last_kv(self) -> Handle<Self, marker::KV> {
350 let len = self.len();
74b04a01
XL
351 assert!(len > 0);
352 unsafe { Handle::new_kv(self, len - 1) }
1a4d82fc
JJ
353 }
354}
355
cdc7bbd5
XL
356impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
357 /// Could be a public implementation of PartialEq, but only used in this module.
358 fn eq(&self, other: &Self) -> bool {
359 let Self { node, height, _marker } = self;
360 if node.eq(&other.node) {
361 debug_assert_eq!(*height, other.height);
362 true
363 } else {
364 false
365 }
366 }
367}
368
1b1a35ee 369impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
29967ef6 370 /// Exposes the leaf portion of any leaf or internal node in an immutable tree.
fc512014
XL
371 fn into_leaf(self) -> &'a LeafNode<K, V> {
372 let ptr = Self::as_leaf_ptr(&self);
29967ef6
XL
373 // SAFETY: there can be no mutable references into this tree borrowed as `Immut`.
374 unsafe { &*ptr }
1b1a35ee 375 }
5869c6ff
XL
376
377 /// Borrows a view into the keys stored in the node.
378 pub fn keys(&self) -> &[K] {
379 let leaf = self.into_leaf();
380 unsafe {
381 MaybeUninit::slice_assume_init_ref(leaf.keys.get_unchecked(..usize::from(leaf.len)))
382 }
383 }
1b1a35ee
XL
384}
385
5869c6ff 386impl<K, V> NodeRef<marker::Dying, K, V, marker::LeafOrInternal> {
a7813a04 387 /// Similar to `ascend`, gets a reference to a node's parent node, but also
5869c6ff 388 /// deallocates the current node in the process. This is unsafe because the
a7813a04 389 /// current node will still be accessible despite being deallocated.
dfeec247
XL
390 pub unsafe fn deallocate_and_ascend(
391 self,
5869c6ff 392 ) -> Option<Handle<NodeRef<marker::Dying, K, V, marker::Internal>, marker::Edge>> {
74b04a01 393 let height = self.height;
83c7162d 394 let node = self.node;
9cc50fc6 395 let ret = self.ascend().ok();
f035d41b 396 unsafe {
fc512014 397 Global.deallocate(
f035d41b
XL
398 node.cast(),
399 if height > 0 {
400 Layout::new::<InternalNode<K, V>>()
401 } else {
402 Layout::new::<LeafNode<K, V>>()
403 },
404 );
405 }
9cc50fc6 406 ret
1a4d82fc
JJ
407 }
408}
409
6a06907d 410impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
cdc7bbd5 411 /// Temporarily takes out another mutable reference to the same node. Beware, as
94222f64 412 /// this method is very dangerous, doubly so since it might not immediately appear
a7813a04
XL
413 /// dangerous.
414 ///
1b1a35ee
XL
415 /// Because mutable pointers can roam anywhere around the tree, the returned
416 /// pointer can easily be used to make the original pointer dangling, out of
417 /// bounds, or invalid under stacked borrow rules.
418 // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef`
419 // that restricts the use of navigation methods on reborrowed pointers,
420 // preventing this unsafety.
9fa01778 421 unsafe fn reborrow_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, Type> {
1b1a35ee 422 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
1a4d82fc
JJ
423 }
424
17df50a5 425 /// Borrows exclusive access to the leaf portion of a leaf or internal node.
fc512014
XL
426 fn as_leaf_mut(&mut self) -> &mut LeafNode<K, V> {
427 let ptr = Self::as_leaf_ptr(self);
428 // SAFETY: we have exclusive access to the entire node.
429 unsafe { &mut *ptr }
430 }
431
17df50a5 432 /// Offers exclusive access to the leaf portion of a leaf or internal node.
fc512014
XL
433 fn into_leaf_mut(mut self) -> &'a mut LeafNode<K, V> {
434 let ptr = Self::as_leaf_ptr(&mut self);
29967ef6
XL
435 // SAFETY: we have exclusive access to the entire node.
436 unsafe { &mut *ptr }
1b1a35ee 437 }
29967ef6 438}
1b1a35ee 439
17df50a5
XL
440impl<K, V, Type> NodeRef<marker::Dying, K, V, Type> {
441 /// Borrows exclusive access to the leaf portion of a dying leaf or internal node.
442 fn as_leaf_dying(&mut self) -> &mut LeafNode<K, V> {
443 let ptr = Self::as_leaf_ptr(self);
444 // SAFETY: we have exclusive access to the entire node.
445 unsafe { &mut *ptr }
446 }
447}
448
29967ef6 449impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
fc512014 450 /// Borrows exclusive access to an element of the key storage area.
1b1a35ee
XL
451 ///
452 /// # Safety
fc512014 453 /// `index` is in bounds of 0..CAPACITY
5869c6ff 454 unsafe fn key_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
fc512014
XL
455 where
456 I: SliceIndex<[MaybeUninit<K>], Output = Output>,
457 {
458 // SAFETY: the caller will not be able to call further methods on self
459 // until the key slice reference is dropped, as we have unique access
460 // for the lifetime of the borrow.
461 unsafe { self.as_leaf_mut().keys.as_mut_slice().get_unchecked_mut(index) }
1a4d82fc
JJ
462 }
463
fc512014 464 /// Borrows exclusive access to an element or slice of the node's value storage area.
1b1a35ee
XL
465 ///
466 /// # Safety
fc512014 467 /// `index` is in bounds of 0..CAPACITY
5869c6ff 468 unsafe fn val_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
fc512014
XL
469 where
470 I: SliceIndex<[MaybeUninit<V>], Output = Output>,
471 {
472 // SAFETY: the caller will not be able to call further methods on self
473 // until the value slice reference is dropped, as we have unique access
474 // for the lifetime of the borrow.
475 unsafe { self.as_leaf_mut().vals.as_mut_slice().get_unchecked_mut(index) }
1b1a35ee
XL
476 }
477}
478
29967ef6 479impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
fc512014 480 /// Borrows exclusive access to an element or slice of the node's storage area for edge contents.
29967ef6
XL
481 ///
482 /// # Safety
fc512014 483 /// `index` is in bounds of 0..CAPACITY + 1
5869c6ff 484 unsafe fn edge_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
fc512014
XL
485 where
486 I: SliceIndex<[MaybeUninit<BoxedNode<K, V>>], Output = Output>,
487 {
488 // SAFETY: the caller will not be able to call further methods on self
489 // until the edge slice reference is dropped, as we have unique access
490 // for the lifetime of the borrow.
491 unsafe { self.as_internal_mut().edges.as_mut_slice().get_unchecked_mut(index) }
1a4d82fc
JJ
492 }
493}
494
1b1a35ee
XL
495impl<'a, K, V, Type> NodeRef<marker::ValMut<'a>, K, V, Type> {
496 /// # Safety
29967ef6 497 /// - The node has more than `idx` initialized elements.
29967ef6 498 unsafe fn into_key_val_mut_at(mut self, idx: usize) -> (&'a K, &'a mut V) {
1b1a35ee
XL
499 // We only create a reference to the one element we are interested in,
500 // to avoid aliasing with outstanding references to other elements,
501 // in particular, those returned to the caller in earlier iterations.
29967ef6 502 let leaf = Self::as_leaf_ptr(&mut self);
5869c6ff
XL
503 let keys = unsafe { ptr::addr_of!((*leaf).keys) };
504 let vals = unsafe { ptr::addr_of_mut!((*leaf).vals) };
1b1a35ee 505 // We must coerce to unsized array pointers because of Rust issue #74679.
29967ef6
XL
506 let keys: *const [_] = keys;
507 let vals: *mut [_] = vals;
1b1a35ee
XL
508 let key = unsafe { (&*keys.get_unchecked(idx)).assume_init_ref() };
509 let val = unsafe { (&mut *vals.get_unchecked_mut(idx)).assume_init_mut() };
510 (key, val)
94b46f34 511 }
1a4d82fc
JJ
512}
513
29967ef6 514impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
fc512014
XL
515 /// Borrows exclusive access to the length of the node.
516 pub fn len_mut(&mut self) -> &mut u16 {
517 &mut self.as_leaf_mut().len
29967ef6
XL
518 }
519}
520
cdc7bbd5
XL
521impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
522 /// # Safety
523 /// Every item returned by `range` is a valid edge index for the node.
524 unsafe fn correct_childrens_parent_links<R: Iterator<Item = usize>>(&mut self, range: R) {
525 for i in range {
526 debug_assert!(i <= self.len());
527 unsafe { Handle::new_edge(self.reborrow_mut(), i) }.correct_parent_link();
528 }
529 }
530
531 fn correct_all_childrens_parent_links(&mut self) {
532 let len = self.len();
533 unsafe { self.correct_childrens_parent_links(0..=len) };
534 }
535}
536
29967ef6 537impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
fc512014 538 /// Sets the node's link to its parent edge,
29967ef6
XL
539 /// without invalidating other references to the node.
540 fn set_parent_link(&mut self, parent: NonNull<InternalNode<K, V>>, parent_idx: usize) {
541 let leaf = Self::as_leaf_ptr(self);
542 unsafe { (*leaf).parent = Some(parent) };
543 unsafe { (*leaf).parent_idx.write(parent_idx as u16) };
544 }
fc512014 545}
29967ef6 546
fc512014
XL
547impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
548 /// Clears the root's link to its parent edge.
29967ef6 549 fn clear_parent_link(&mut self) {
fc512014
XL
550 let mut root_node = self.borrow_mut();
551 let leaf = root_node.as_leaf_mut();
29967ef6
XL
552 leaf.parent = None;
553 }
554}
555
cdc7bbd5
XL
556impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
557 /// Returns a new owned tree, with its own root node that is initially empty.
558 pub fn new() -> Self {
559 NodeRef::new_leaf().forget_type()
560 }
561
562 /// Adds a new internal node with a single edge pointing to the previous root node,
563 /// make that new node the root node, and return it. This increases the height by 1
564 /// and is the opposite of `pop_internal_level`.
565 pub fn push_internal_level(&mut self) -> NodeRef<marker::Mut<'_>, K, V, marker::Internal> {
566 super::mem::take_mut(self, |old_root| NodeRef::new_internal(old_root).forget_type());
567
568 // `self.borrow_mut()`, except that we just forgot we're internal now:
569 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
570 }
571
572 /// Removes the internal root node, using its first child as the new root node.
573 /// As it is intended only to be called when the root node has only one child,
574 /// no cleanup is done on any of the keys, values and other children.
575 /// This decreases the height by 1 and is the opposite of `push_internal_level`.
576 ///
c295e0f8 577 /// Requires exclusive access to the `NodeRef` object but not to the root node;
cdc7bbd5
XL
578 /// it will not invalidate other handles or references to the root node.
579 ///
580 /// Panics if there is no internal level, i.e., if the root node is a leaf.
581 pub fn pop_internal_level(&mut self) {
582 assert!(self.height > 0);
583
584 let top = self.node;
585
586 // SAFETY: we asserted to be internal.
587 let internal_self = unsafe { self.borrow_mut().cast_to_internal_unchecked() };
588 // SAFETY: we borrowed `self` exclusively and its borrow type is exclusive.
589 let internal_node = unsafe { &mut *NodeRef::as_internal_ptr(&internal_self) };
590 // SAFETY: the first edge is always initialized.
591 self.node = unsafe { internal_node.edges[0].assume_init_read() };
592 self.height -= 1;
593 self.clear_parent_link();
594
595 unsafe {
596 Global.deallocate(top.cast(), Layout::new::<InternalNode<K, V>>());
597 }
598 }
599}
600
601impl<K, V, Type> NodeRef<marker::Owned, K, V, Type> {
602 /// Mutably borrows the owned root node. Unlike `reborrow_mut`, this is safe
603 /// because the return value cannot be used to destroy the root, and there
604 /// cannot be other references to the tree.
605 pub fn borrow_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, Type> {
606 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
607 }
608
609 /// Slightly mutably borrows the owned root node.
610 pub fn borrow_valmut(&mut self) -> NodeRef<marker::ValMut<'_>, K, V, Type> {
611 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
612 }
613
614 /// Irreversibly transitions to a reference that permits traversal and offers
615 /// destructive methods and little else.
616 pub fn into_dying(self) -> NodeRef<marker::Dying, K, V, Type> {
617 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
618 }
619}
620
1b1a35ee 621impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
ee023bcb
FG
622 /// Adds a key-value pair to the end of the node, and returns
623 /// the mutable reference of the inserted value.
624 pub fn push(&mut self, key: K, val: V) -> &mut V {
fc512014 625 let len = self.len_mut();
1b1a35ee
XL
626 let idx = usize::from(*len);
627 assert!(idx < CAPACITY);
628 *len += 1;
1a4d82fc 629 unsafe {
5869c6ff 630 self.key_area_mut(idx).write(key);
ee023bcb 631 self.val_area_mut(idx).write(val)
0731742a 632 }
1a4d82fc
JJ
633 }
634}
635
1b1a35ee 636impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
fc512014 637 /// Adds a key-value pair, and an edge to go to the right of that pair,
1b1a35ee 638 /// to the end of the node.
9cc50fc6 639 pub fn push(&mut self, key: K, val: V, edge: Root<K, V>) {
74b04a01 640 assert!(edge.height == self.height - 1);
1a4d82fc 641
fc512014 642 let len = self.len_mut();
1b1a35ee
XL
643 let idx = usize::from(*len);
644 assert!(idx < CAPACITY);
645 *len += 1;
9cc50fc6 646 unsafe {
5869c6ff
XL
647 self.key_area_mut(idx).write(key);
648 self.val_area_mut(idx).write(val);
649 self.edge_area_mut(idx + 1).write(edge.node);
9cc50fc6 650 Handle::new_edge(self.reborrow_mut(), idx + 1).correct_parent_link();
1a4d82fc
JJ
651 }
652 }
1a4d82fc
JJ
653}
654
cdc7bbd5
XL
655impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Leaf> {
656 /// Removes any static information asserting that this node is a `Leaf` node.
657 pub fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
658 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
659 }
660}
661
662impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
663 /// Removes any static information asserting that this node is an `Internal` node.
664 pub fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
665 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
666 }
667}
668
9cc50fc6 669impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
a7813a04 670 /// Checks whether a node is an `Internal` node or a `Leaf` node.
dfeec247
XL
671 pub fn force(
672 self,
673 ) -> ForceResult<
9cc50fc6 674 NodeRef<BorrowType, K, V, marker::Leaf>,
dfeec247 675 NodeRef<BorrowType, K, V, marker::Internal>,
9cc50fc6
SL
676 > {
677 if self.height == 0 {
678 ForceResult::Leaf(NodeRef {
679 height: self.height,
680 node: self.node,
dfeec247 681 _marker: PhantomData,
9cc50fc6
SL
682 })
683 } else {
684 ForceResult::Internal(NodeRef {
685 height: self.height,
686 node: self.node,
dfeec247 687 _marker: PhantomData,
9cc50fc6 688 })
1a4d82fc
JJ
689 }
690 }
9cc50fc6 691}
1a4d82fc 692
cdc7bbd5
XL
693impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
694 /// Unsafely asserts to the compiler the static information that this node is a `Leaf`.
695 unsafe fn cast_to_leaf_unchecked(self) -> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
696 debug_assert!(self.height == 0);
697 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
698 }
699
700 /// Unsafely asserts to the compiler the static information that this node is an `Internal`.
701 unsafe fn cast_to_internal_unchecked(self) -> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
702 debug_assert!(self.height > 0);
703 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
704 }
705}
706
fc512014
XL
707/// A reference to a specific key-value pair or edge within a node. The `Node` parameter
708/// must be a `NodeRef`, while the `Type` can either be `KV` (signifying a handle on a key-value
a7813a04
XL
709/// pair) or `Edge` (signifying a handle on an edge).
710///
711/// Note that even `Leaf` nodes can have `Edge` handles. Instead of representing a pointer to
fc512014 712/// a child node, these represent the spaces where child pointers would go between the key-value
a7813a04
XL
713/// pairs. For example, in a node with length 2, there would be 3 possible edge locations - one
714/// to the left of the node, one between the two pairs, and one at the right of the node.
9cc50fc6
SL
715pub struct Handle<Node, Type> {
716 node: Node,
717 idx: usize,
dfeec247 718 _marker: PhantomData<Type>,
9cc50fc6 719}
1a4d82fc 720
dfeec247 721impl<Node: Copy, Type> Copy for Handle<Node, Type> {}
a7813a04
XL
722// We don't need the full generality of `#[derive(Clone)]`, as the only time `Node` will be
723// `Clone`able is when it is an immutable reference and therefore `Copy`.
9cc50fc6
SL
724impl<Node: Copy, Type> Clone for Handle<Node, Type> {
725 fn clone(&self) -> Self {
726 *self
1a4d82fc 727 }
9cc50fc6 728}
1a4d82fc 729
9cc50fc6 730impl<Node, Type> Handle<Node, Type> {
fc512014 731 /// Retrieves the node that contains the edge or key-value pair this handle points to.
9cc50fc6
SL
732 pub fn into_node(self) -> Node {
733 self.node
1a4d82fc 734 }
ba9703b0
XL
735
736 /// Returns the position of this handle in the node.
737 pub fn idx(&self) -> usize {
738 self.idx
739 }
9cc50fc6 740}
1a4d82fc 741
9cc50fc6 742impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV> {
fc512014 743 /// Creates a new handle to a key-value pair in `node`.
74b04a01
XL
744 /// Unsafe because the caller must ensure that `idx < node.len()`.
745 pub unsafe fn new_kv(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
9cc50fc6 746 debug_assert!(idx < node.len());
1a4d82fc 747
dfeec247 748 Handle { node, idx, _marker: PhantomData }
1a4d82fc 749 }
1a4d82fc 750
9cc50fc6 751 pub fn left_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
74b04a01 752 unsafe { Handle::new_edge(self.node, self.idx) }
1a4d82fc
JJ
753 }
754
9cc50fc6 755 pub fn right_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
74b04a01 756 unsafe { Handle::new_edge(self.node, self.idx + 1) }
1a4d82fc 757 }
9cc50fc6 758}
1a4d82fc 759
9cc50fc6 760impl<BorrowType, K, V, NodeType, HandleType> PartialEq
dfeec247
XL
761 for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
762{
9cc50fc6 763 fn eq(&self, other: &Self) -> bool {
fc512014 764 let Self { node, idx, _marker } = self;
29967ef6 765 node.eq(&other.node) && *idx == other.idx
9cc50fc6
SL
766 }
767}
1a4d82fc 768
9cc50fc6 769impl<BorrowType, K, V, NodeType, HandleType>
dfeec247
XL
770 Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
771{
cdc7bbd5 772 /// Temporarily takes out another immutable handle on the same location.
dfeec247 773 pub fn reborrow(&self) -> Handle<NodeRef<marker::Immut<'_>, K, V, NodeType>, HandleType> {
9cc50fc6 774 // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
dfeec247 775 Handle { node: self.node.reborrow(), idx: self.idx, _marker: PhantomData }
1a4d82fc 776 }
9cc50fc6 777}
1a4d82fc 778
6a06907d 779impl<'a, K, V, NodeType, HandleType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
cdc7bbd5 780 /// Temporarily takes out another mutable handle on the same location. Beware, as
94222f64 781 /// this method is very dangerous, doubly so since it might not immediately appear
a7813a04
XL
782 /// dangerous.
783 ///
1b1a35ee 784 /// For details, see `NodeRef::reborrow_mut`.
dfeec247
XL
785 pub unsafe fn reborrow_mut(
786 &mut self,
787 ) -> Handle<NodeRef<marker::Mut<'_>, K, V, NodeType>, HandleType> {
9cc50fc6 788 // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
f035d41b 789 Handle { node: unsafe { self.node.reborrow_mut() }, idx: self.idx, _marker: PhantomData }
1a4d82fc 790 }
9cc50fc6 791}
1a4d82fc 792
dfeec247 793impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
74b04a01
XL
794 /// Creates a new handle to an edge in `node`.
795 /// Unsafe because the caller must ensure that `idx <= node.len()`.
796 pub unsafe fn new_edge(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
9cc50fc6 797 debug_assert!(idx <= node.len());
1a4d82fc 798
dfeec247 799 Handle { node, idx, _marker: PhantomData }
9cc50fc6 800 }
1a4d82fc 801
dfeec247 802 pub fn left_kv(self) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
74b04a01
XL
803 if self.idx > 0 {
804 Ok(unsafe { Handle::new_kv(self.node, self.idx - 1) })
805 } else {
806 Err(self)
807 }
1a4d82fc
JJ
808 }
809
dfeec247 810 pub fn right_kv(self) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
74b04a01
XL
811 if self.idx < self.node.len() {
812 Ok(unsafe { Handle::new_kv(self.node, self.idx) })
813 } else {
814 Err(self)
815 }
1a4d82fc
JJ
816 }
817}
818
fc512014
XL
819pub enum LeftOrRight<T> {
820 Left(T),
821 Right(T),
3dfed10e
XL
822}
823
824/// Given an edge index where we want to insert into a node filled to capacity,
825/// computes a sensible KV index of a split point and where to perform the insertion.
826/// The goal of the split point is for its key and value to end up in a parent node;
827/// the keys, values and edges to the left of the split point become the left child;
828/// the keys, values and edges to the right of the split point become the right child.
fc512014 829fn splitpoint(edge_idx: usize) -> (usize, LeftOrRight<usize>) {
3dfed10e
XL
830 debug_assert!(edge_idx <= CAPACITY);
831 // Rust issue #74834 tries to explain these symmetric rules.
832 match edge_idx {
fc512014
XL
833 0..EDGE_IDX_LEFT_OF_CENTER => (KV_IDX_CENTER - 1, LeftOrRight::Left(edge_idx)),
834 EDGE_IDX_LEFT_OF_CENTER => (KV_IDX_CENTER, LeftOrRight::Left(edge_idx)),
835 EDGE_IDX_RIGHT_OF_CENTER => (KV_IDX_CENTER, LeftOrRight::Right(0)),
836 _ => (KV_IDX_CENTER + 1, LeftOrRight::Right(edge_idx - (KV_IDX_CENTER + 1 + 1))),
3dfed10e
XL
837 }
838}
839
29967ef6 840impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
fc512014 841 /// Inserts a new key-value pair between the key-value pairs to the right and left of
a7813a04
XL
842 /// this edge. This method assumes that there is enough space in the node for the new
843 /// pair to fit.
29967ef6
XL
844 ///
845 /// The returned pointer points to the inserted value.
846 fn insert_fit(&mut self, key: K, val: V) -> *mut V {
9cc50fc6 847 debug_assert!(self.node.len() < CAPACITY);
fc512014 848 let new_len = self.node.len() + 1;
1a4d82fc
JJ
849
850 unsafe {
5869c6ff
XL
851 slice_insert(self.node.key_area_mut(..new_len), self.idx, key);
852 slice_insert(self.node.val_area_mut(..new_len), self.idx, val);
fc512014 853 *self.node.len_mut() = new_len as u16;
1a4d82fc 854
5869c6ff 855 self.node.val_area_mut(self.idx).assume_init_mut()
1a4d82fc
JJ
856 }
857 }
3dfed10e
XL
858}
859
1b1a35ee 860impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
fc512014 861 /// Inserts a new key-value pair between the key-value pairs to the right and left of
a7813a04
XL
862 /// this edge. This method splits the node if there isn't enough room.
863 ///
864 /// The returned pointer points to the inserted value.
ee023bcb 865 fn insert(mut self, key: K, val: V) -> (Option<SplitResult<'a, K, V, marker::Leaf>>, *mut V) {
9cc50fc6 866 if self.node.len() < CAPACITY {
1b1a35ee 867 let val_ptr = self.insert_fit(key, val);
ee023bcb 868 (None, val_ptr)
9cc50fc6 869 } else {
3dfed10e
XL
870 let (middle_kv_idx, insertion) = splitpoint(self.idx);
871 let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) };
fc512014 872 let mut result = middle.split();
1b1a35ee 873 let mut insertion_edge = match insertion {
fc512014
XL
874 LeftOrRight::Left(insert_idx) => unsafe {
875 Handle::new_edge(result.left.reborrow_mut(), insert_idx)
3dfed10e 876 },
fc512014
XL
877 LeftOrRight::Right(insert_idx) => unsafe {
878 Handle::new_edge(result.right.borrow_mut(), insert_idx)
3dfed10e 879 },
9cc50fc6 880 };
1b1a35ee 881 let val_ptr = insertion_edge.insert_fit(key, val);
ee023bcb 882 (Some(result), val_ptr)
1a4d82fc
JJ
883 }
884 }
885}
886
9cc50fc6 887impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
fc512014
XL
888 /// Fixes the parent pointer and index in the child node that this edge
889 /// links to. This is useful when the ordering of edges has been changed,
29967ef6
XL
890 fn correct_parent_link(self) {
891 // Create backpointer without invalidating other references to the node.
892 let ptr = unsafe { NonNull::new_unchecked(NodeRef::as_internal_ptr(&self.node)) };
893 let idx = self.idx;
9cc50fc6 894 let mut child = self.descend();
29967ef6 895 child.set_parent_link(ptr, idx);
9cc50fc6 896 }
1b1a35ee 897}
1a4d82fc 898
1b1a35ee 899impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
fc512014
XL
900 /// Inserts a new key-value pair and an edge that will go to the right of that new pair
901 /// between this edge and the key-value pair to the right of this edge. This method assumes
a7813a04 902 /// that there is enough space in the node for the new pair to fit.
9cc50fc6 903 fn insert_fit(&mut self, key: K, val: V, edge: Root<K, V>) {
29967ef6 904 debug_assert!(self.node.len() < CAPACITY);
9cc50fc6 905 debug_assert!(edge.height == self.node.height - 1);
fc512014 906 let new_len = self.node.len() + 1;
1a4d82fc 907
9cc50fc6 908 unsafe {
5869c6ff
XL
909 slice_insert(self.node.key_area_mut(..new_len), self.idx, key);
910 slice_insert(self.node.val_area_mut(..new_len), self.idx, val);
911 slice_insert(self.node.edge_area_mut(..new_len + 1), self.idx + 1, edge.node);
fc512014 912 *self.node.len_mut() = new_len as u16;
9cc50fc6 913
fc512014 914 self.node.correct_childrens_parent_links(self.idx + 1..new_len + 1);
9cc50fc6
SL
915 }
916 }
1a4d82fc 917
fc512014
XL
918 /// Inserts a new key-value pair and an edge that will go to the right of that new pair
919 /// between this edge and the key-value pair to the right of this edge. This method splits
a7813a04 920 /// the node if there isn't enough room.
3dfed10e 921 fn insert(
dfeec247
XL
922 mut self,
923 key: K,
924 val: V,
925 edge: Root<K, V>,
ee023bcb 926 ) -> Option<SplitResult<'a, K, V, marker::Internal>> {
74b04a01 927 assert!(edge.height == self.node.height - 1);
1a4d82fc 928
9cc50fc6
SL
929 if self.node.len() < CAPACITY {
930 self.insert_fit(key, val, edge);
ee023bcb 931 None
9cc50fc6 932 } else {
3dfed10e
XL
933 let (middle_kv_idx, insertion) = splitpoint(self.idx);
934 let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) };
fc512014 935 let mut result = middle.split();
29967ef6 936 let mut insertion_edge = match insertion {
fc512014
XL
937 LeftOrRight::Left(insert_idx) => unsafe {
938 Handle::new_edge(result.left.reborrow_mut(), insert_idx)
3dfed10e 939 },
fc512014
XL
940 LeftOrRight::Right(insert_idx) => unsafe {
941 Handle::new_edge(result.right.borrow_mut(), insert_idx)
3dfed10e 942 },
29967ef6
XL
943 };
944 insertion_edge.insert_fit(key, val, edge);
ee023bcb 945 Some(result)
3dfed10e
XL
946 }
947 }
948}
949
1b1a35ee 950impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
fc512014 951 /// Inserts a new key-value pair between the key-value pairs to the right and left of
3dfed10e
XL
952 /// this edge. This method splits the node if there isn't enough room, and tries to
953 /// insert the split off portion into the parent node recursively, until the root is reached.
954 ///
ee023bcb
FG
955 /// If the returned result is some `SplitResult`, the `left` field will be the root node.
956 /// The returned pointer points to the inserted value, which in the case of `SplitResult`
957 /// is in the `left` or `right` tree.
3dfed10e
XL
958 pub fn insert_recursing(
959 self,
960 key: K,
961 value: V,
ee023bcb 962 ) -> (Option<SplitResult<'a, K, V, marker::LeafOrInternal>>, *mut V) {
3dfed10e 963 let (mut split, val_ptr) = match self.insert(key, value) {
ee023bcb
FG
964 (None, val_ptr) => return (None, val_ptr),
965 (Some(split), val_ptr) => (split.forget_node_type(), val_ptr),
3dfed10e
XL
966 };
967
968 loop {
969 split = match split.left.ascend() {
fc512014 970 Ok(parent) => match parent.insert(split.kv.0, split.kv.1, split.right) {
ee023bcb
FG
971 None => return (None, val_ptr),
972 Some(split) => split.forget_node_type(),
3dfed10e 973 },
ee023bcb 974 Err(root) => return (Some(SplitResult { left: root, ..split }), val_ptr),
3dfed10e 975 };
9cc50fc6
SL
976 }
977 }
1a4d82fc
JJ
978}
979
5869c6ff
XL
980impl<BorrowType: marker::BorrowType, K, V>
981 Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>
982{
a7813a04
XL
983 /// Finds the node pointed to by this edge.
984 ///
fc512014
XL
985 /// The method name assumes you picture trees with the root node on top.
986 ///
a7813a04
XL
987 /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
988 /// both, upon success, do nothing.
9cc50fc6 989 pub fn descend(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
5869c6ff 990 assert!(BorrowType::PERMITS_TRAVERSAL);
1b1a35ee
XL
991 // We need to use raw pointers to nodes because, if BorrowType is
992 // marker::ValMut, there might be outstanding mutable references to
993 // values that we must not invalidate. There's no worry accessing the
994 // height field because that value is copied. Beware that, once the
995 // node pointer is dereferenced, we access the edges array with a
996 // reference (Rust issue #73987) and invalidate any other references
997 // to or inside the array, should any be around.
29967ef6 998 let parent_ptr = NodeRef::as_internal_ptr(&self.node);
fc512014
XL
999 let node = unsafe { (*parent_ptr).edges.get_unchecked(self.idx).assume_init_read() };
1000 NodeRef { node, height: self.node.height - 1, _marker: PhantomData }
9cc50fc6 1001 }
1a4d82fc
JJ
1002}
1003
dfeec247 1004impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Immut<'a>, K, V, NodeType>, marker::KV> {
9cc50fc6 1005 pub fn into_kv(self) -> (&'a K, &'a V) {
5869c6ff
XL
1006 debug_assert!(self.idx < self.node.len());
1007 let leaf = self.node.into_leaf();
1008 let k = unsafe { leaf.keys.get_unchecked(self.idx).assume_init_ref() };
1009 let v = unsafe { leaf.vals.get_unchecked(self.idx).assume_init_ref() };
1010 (k, v)
1a4d82fc 1011 }
9cc50fc6 1012}
1a4d82fc 1013
dfeec247 1014impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
fc512014 1015 pub fn key_mut(&mut self) -> &mut K {
5869c6ff 1016 unsafe { self.node.key_area_mut(self.idx).assume_init_mut() }
3dfed10e
XL
1017 }
1018
1019 pub fn into_val_mut(self) -> &'a mut V {
5869c6ff 1020 debug_assert!(self.idx < self.node.len());
fc512014
XL
1021 let leaf = self.node.into_leaf_mut();
1022 unsafe { leaf.vals.get_unchecked_mut(self.idx).assume_init_mut() }
3dfed10e 1023 }
1b1a35ee 1024}
3dfed10e 1025
1b1a35ee
XL
1026impl<'a, K, V, NodeType> Handle<NodeRef<marker::ValMut<'a>, K, V, NodeType>, marker::KV> {
1027 pub fn into_kv_valmut(self) -> (&'a K, &'a mut V) {
1028 unsafe { self.node.into_key_val_mut_at(self.idx) }
1a4d82fc 1029 }
9cc50fc6 1030}
1a4d82fc 1031
1b1a35ee 1032impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
9cc50fc6 1033 pub fn kv_mut(&mut self) -> (&mut K, &mut V) {
5869c6ff 1034 debug_assert!(self.idx < self.node.len());
29967ef6 1035 // We cannot call separate key and value methods, because calling the second one
1b1a35ee 1036 // invalidates the reference returned by the first.
29967ef6 1037 unsafe {
fc512014 1038 let leaf = self.node.as_leaf_mut();
29967ef6
XL
1039 let key = leaf.keys.get_unchecked_mut(self.idx).assume_init_mut();
1040 let val = leaf.vals.get_unchecked_mut(self.idx).assume_init_mut();
1041 (key, val)
1042 }
1a4d82fc 1043 }
1a4d82fc 1044
17df50a5 1045 /// Replaces the key and value that the KV handle refers to.
fc512014
XL
1046 pub fn replace_kv(&mut self, k: K, v: V) -> (K, V) {
1047 let (key, val) = self.kv_mut();
1048 (mem::replace(key, k), mem::replace(val, v))
29967ef6 1049 }
fc512014 1050}
29967ef6 1051
17df50a5
XL
1052impl<K, V, NodeType> Handle<NodeRef<marker::Dying, K, V, NodeType>, marker::KV> {
1053 /// Extracts the key and value that the KV handle refers to.
94222f64
XL
1054 /// # Safety
1055 /// The node that the handle refers to must not yet have been deallocated.
1056 pub unsafe fn into_key_val(mut self) -> (K, V) {
17df50a5
XL
1057 debug_assert!(self.idx < self.node.len());
1058 let leaf = self.node.as_leaf_dying();
1059 unsafe {
1060 let key = leaf.keys.get_unchecked_mut(self.idx).assume_init_read();
1061 let val = leaf.vals.get_unchecked_mut(self.idx).assume_init_read();
1062 (key, val)
1063 }
1064 }
1065
1066 /// Drops the key and value that the KV handle refers to.
94222f64
XL
1067 /// # Safety
1068 /// The node that the handle refers to must not yet have been deallocated.
17df50a5 1069 #[inline]
94222f64 1070 pub unsafe fn drop_key_val(mut self) {
17df50a5
XL
1071 debug_assert!(self.idx < self.node.len());
1072 let leaf = self.node.as_leaf_dying();
1073 unsafe {
1074 leaf.keys.get_unchecked_mut(self.idx).assume_init_drop();
1075 leaf.vals.get_unchecked_mut(self.idx).assume_init_drop();
1076 }
1077 }
1078}
1079
fc512014 1080impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
3dfed10e
XL
1081 /// Helps implementations of `split` for a particular `NodeType`,
1082 /// by taking care of leaf data.
29967ef6 1083 fn split_leaf_data(&mut self, new_node: &mut LeafNode<K, V>) -> (K, V) {
fc512014 1084 debug_assert!(self.idx < self.node.len());
5869c6ff
XL
1085 let old_len = self.node.len();
1086 let new_len = old_len - self.idx - 1;
29967ef6 1087 new_node.len = new_len as u16;
9cc50fc6 1088 unsafe {
5869c6ff
XL
1089 let k = self.node.key_area_mut(self.idx).assume_init_read();
1090 let v = self.node.val_area_mut(self.idx).assume_init_read();
9cc50fc6 1091
5869c6ff
XL
1092 move_to_slice(
1093 self.node.key_area_mut(self.idx + 1..old_len),
1094 &mut new_node.keys[..new_len],
9cc50fc6 1095 );
5869c6ff
XL
1096 move_to_slice(
1097 self.node.val_area_mut(self.idx + 1..old_len),
1098 &mut new_node.vals[..new_len],
9cc50fc6
SL
1099 );
1100
fc512014 1101 *self.node.len_mut() = self.idx as u16;
29967ef6 1102 (k, v)
3dfed10e
XL
1103 }
1104 }
1105}
1106
1b1a35ee 1107impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
3dfed10e
XL
1108 /// Splits the underlying node into three parts:
1109 ///
fc512014 1110 /// - The node is truncated to only contain the key-value pairs to the left of
3dfed10e 1111 /// this handle.
1b1a35ee 1112 /// - The key and value pointed to by this handle are extracted.
fc512014 1113 /// - All the key-value pairs to the right of this handle are put into a newly
3dfed10e 1114 /// allocated node.
fc512014 1115 pub fn split(mut self) -> SplitResult<'a, K, V, marker::Leaf> {
6a06907d 1116 let mut new_node = LeafNode::new();
3dfed10e 1117
6a06907d 1118 let kv = self.split_leaf_data(&mut new_node);
9cc50fc6 1119
6a06907d
XL
1120 let right = NodeRef::from_new_leaf(new_node);
1121 SplitResult { left: self.node, kv, right }
9cc50fc6 1122 }
1a4d82fc 1123
fc512014
XL
1124 /// Removes the key-value pair pointed to by this handle and returns it, along with the edge
1125 /// that the key-value pair collapsed into.
dfeec247
XL
1126 pub fn remove(
1127 mut self,
3dfed10e 1128 ) -> ((K, V), Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
fc512014 1129 let old_len = self.node.len();
9cc50fc6 1130 unsafe {
5869c6ff
XL
1131 let k = slice_remove(self.node.key_area_mut(..old_len), self.idx);
1132 let v = slice_remove(self.node.val_area_mut(..old_len), self.idx);
fc512014 1133 *self.node.len_mut() = (old_len - 1) as u16;
3dfed10e 1134 ((k, v), self.left_edge())
9cc50fc6 1135 }
1a4d82fc
JJ
1136 }
1137}
1138
1b1a35ee 1139impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
a7813a04
XL
1140 /// Splits the underlying node into three parts:
1141 ///
fc512014 1142 /// - The node is truncated to only contain the edges and key-value pairs to the
29967ef6 1143 /// left of this handle.
1b1a35ee 1144 /// - The key and value pointed to by this handle are extracted.
fc512014 1145 /// - All the edges and key-value pairs to the right of this handle are put into
a7813a04 1146 /// a newly allocated node.
fc512014 1147 pub fn split(mut self) -> SplitResult<'a, K, V, marker::Internal> {
5869c6ff 1148 let old_len = self.node.len();
9cc50fc6 1149 unsafe {
6a06907d 1150 let mut new_node = InternalNode::new();
fc512014
XL
1151 let kv = self.split_leaf_data(&mut new_node.data);
1152 let new_len = usize::from(new_node.data.len);
5869c6ff
XL
1153 move_to_slice(
1154 self.node.edge_area_mut(self.idx + 1..old_len + 1),
1155 &mut new_node.edges[..new_len + 1],
9cc50fc6
SL
1156 );
1157
29967ef6 1158 let height = self.node.height;
5869c6ff 1159 let right = NodeRef::from_new_internal(new_node, height);
1a4d82fc 1160
fc512014 1161 SplitResult { left: self.node, kv, right }
9cc50fc6
SL
1162 }
1163 }
fc512014 1164}
1a4d82fc 1165
fc512014
XL
1166/// Represents a session for evaluating and performing a balancing operation
1167/// around an internal key-value pair.
1168pub struct BalancingContext<'a, K, V> {
1169 parent: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV>,
1170 left_child: NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1171 right_child: NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1172}
1173
1174impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
1175 pub fn consider_for_balancing(self) -> BalancingContext<'a, K, V> {
9cc50fc6
SL
1176 let self1 = unsafe { ptr::read(&self) };
1177 let self2 = unsafe { ptr::read(&self) };
fc512014
XL
1178 BalancingContext {
1179 parent: self,
1180 left_child: self1.left_edge().descend(),
1181 right_child: self2.right_edge().descend(),
1182 }
1183 }
1184}
1185
1186impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1187 /// Chooses a balancing context involving the node as a child, thus between
1188 /// the KV immediately to the left or to the right in the parent node.
1189 /// Returns an `Err` if there is no parent.
1190 /// Panics if the parent is empty.
1191 ///
1192 /// Prefers the left side, to be optimal if the given node is somehow
1193 /// underfull, meaning here only that it has fewer elements than its left
1194 /// sibling and than its right sibling, if they exist. In that case,
1195 /// merging with the left sibling is faster, since we only need to move
1196 /// the node's N elements, instead of shifting them to the right and moving
1197 /// more than N elements in front. Stealing from the left sibling is also
1198 /// typically faster, since we only need to shift the node's N elements to
1199 /// the right, instead of shifting at least N of the sibling's elements to
1200 /// the left.
1201 pub fn choose_parent_kv(self) -> Result<LeftOrRight<BalancingContext<'a, K, V>>, Self> {
1202 match unsafe { ptr::read(&self) }.ascend() {
1203 Ok(parent_edge) => match parent_edge.left_kv() {
1204 Ok(left_parent_kv) => Ok(LeftOrRight::Left(BalancingContext {
1205 parent: unsafe { ptr::read(&left_parent_kv) },
1206 left_child: left_parent_kv.left_edge().descend(),
1207 right_child: self,
1208 })),
1209 Err(parent_edge) => match parent_edge.right_kv() {
1210 Ok(right_parent_kv) => Ok(LeftOrRight::Right(BalancingContext {
1211 parent: unsafe { ptr::read(&right_parent_kv) },
1212 left_child: self,
1213 right_child: right_parent_kv.right_edge().descend(),
1214 })),
1215 Err(_) => unreachable!("empty internal node"),
1216 },
1217 },
1218 Err(root) => Err(root),
1219 }
1220 }
1221}
1222
1223impl<'a, K, V> BalancingContext<'a, K, V> {
1224 pub fn left_child_len(&self) -> usize {
1225 self.left_child.len()
1226 }
1227
1228 pub fn right_child_len(&self) -> usize {
1229 self.right_child.len()
1230 }
1231
1232 pub fn into_left_child(self) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1233 self.left_child
1234 }
1235
1236 pub fn into_right_child(self) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1237 self.right_child
1238 }
1239
5869c6ff
XL
1240 /// Returns whether merging is possible, i.e., whether there is enough room
1241 /// in a node to combine the central KV with both adjacent child nodes.
fc512014
XL
1242 pub fn can_merge(&self) -> bool {
1243 self.left_child.len() + 1 + self.right_child.len() <= CAPACITY
1244 }
1245}
1246
1247impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> {
5869c6ff
XL
1248 /// Performs a merge and lets a closure decide what to return.
1249 fn do_merge<
1250 F: FnOnce(
1251 NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
1252 NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1253 ) -> R,
1254 R,
1255 >(
fc512014 1256 self,
5869c6ff
XL
1257 result: F,
1258 ) -> R {
fc512014
XL
1259 let Handle { node: mut parent_node, idx: parent_idx, _marker } = self.parent;
1260 let old_parent_len = parent_node.len();
1261 let mut left_node = self.left_child;
1262 let old_left_len = left_node.len();
5869c6ff 1263 let mut right_node = self.right_child;
9cc50fc6 1264 let right_len = right_node.len();
fc512014 1265 let new_left_len = old_left_len + 1 + right_len;
9cc50fc6 1266
fc512014 1267 assert!(new_left_len <= CAPACITY);
1a4d82fc 1268
9cc50fc6 1269 unsafe {
fc512014 1270 *left_node.len_mut() = new_left_len as u16;
29967ef6 1271
5869c6ff
XL
1272 let parent_key = slice_remove(parent_node.key_area_mut(..old_parent_len), parent_idx);
1273 left_node.key_area_mut(old_left_len).write(parent_key);
1274 move_to_slice(
1275 right_node.key_area_mut(..right_len),
1276 left_node.key_area_mut(old_left_len + 1..new_left_len),
dfeec247 1277 );
29967ef6 1278
5869c6ff
XL
1279 let parent_val = slice_remove(parent_node.val_area_mut(..old_parent_len), parent_idx);
1280 left_node.val_area_mut(old_left_len).write(parent_val);
1281 move_to_slice(
1282 right_node.val_area_mut(..right_len),
1283 left_node.val_area_mut(old_left_len + 1..new_left_len),
9cc50fc6
SL
1284 );
1285
5869c6ff 1286 slice_remove(&mut parent_node.edge_area_mut(..old_parent_len + 1), parent_idx + 1);
fc512014
XL
1287 parent_node.correct_childrens_parent_links(parent_idx + 1..old_parent_len);
1288 *parent_node.len_mut() -= 1;
7453a54e 1289
fc512014 1290 if parent_node.height > 1 {
3dfed10e
XL
1291 // SAFETY: the height of the nodes being merged is one below the height
1292 // of the node of this edge, thus above zero, so they are internal.
fc512014 1293 let mut left_node = left_node.reborrow_mut().cast_to_internal_unchecked();
5869c6ff
XL
1294 let mut right_node = right_node.cast_to_internal_unchecked();
1295 move_to_slice(
1296 right_node.edge_area_mut(..right_len + 1),
1297 left_node.edge_area_mut(old_left_len + 1..new_left_len + 1),
9cc50fc6
SL
1298 );
1299
fc512014 1300 left_node.correct_childrens_parent_links(old_left_len + 1..new_left_len + 1);
1a4d82fc 1301
fc512014 1302 Global.deallocate(right_node.node.cast(), Layout::new::<InternalNode<K, V>>());
9cc50fc6 1303 } else {
fc512014 1304 Global.deallocate(right_node.node.cast(), Layout::new::<LeafNode<K, V>>());
3dfed10e 1305 }
9cc50fc6 1306 }
5869c6ff
XL
1307 result(parent_node, left_node)
1308 }
1309
1310 /// Merges the parent's key-value pair and both adjacent child nodes into
1311 /// the left child node and returns the shrunk parent node.
1312 ///
1313 /// Panics unless we `.can_merge()`.
1314 pub fn merge_tracking_parent(self) -> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
1315 self.do_merge(|parent, _child| parent)
1316 }
1317
1318 /// Merges the parent's key-value pair and both adjacent child nodes into
1319 /// the left child node and returns that child node.
1320 ///
1321 /// Panics unless we `.can_merge()`.
1322 pub fn merge_tracking_child(self) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1323 self.do_merge(|_parent, child| child)
1324 }
1325
1326 /// Merges the parent's key-value pair and both adjacent child nodes into
1327 /// the left child node and returns the edge handle in that child node
1328 /// where the tracked child edge ended up,
1329 ///
1330 /// Panics unless we `.can_merge()`.
1331 pub fn merge_tracking_child_edge(
1332 self,
1333 track_edge_idx: LeftOrRight<usize>,
1334 ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1335 let old_left_len = self.left_child.len();
1336 let right_len = self.right_child.len();
1337 assert!(match track_edge_idx {
1338 LeftOrRight::Left(idx) => idx <= old_left_len,
1339 LeftOrRight::Right(idx) => idx <= right_len,
1340 });
1341 let child = self.merge_tracking_child();
1342 let new_idx = match track_edge_idx {
1343 LeftOrRight::Left(idx) => idx,
1344 LeftOrRight::Right(idx) => old_left_len + 1 + idx,
1345 };
1346 unsafe { Handle::new_edge(child, new_idx) }
85aaf69f 1347 }
a7813a04 1348
fc512014
XL
1349 /// Removes a key-value pair from the left child and places it in the key-value storage
1350 /// of the parent, while pushing the old parent key-value pair into the right child.
1351 /// Returns a handle to the edge in the right child corresponding to where the original
1352 /// edge specified by `track_right_edge_idx` ended up.
1353 pub fn steal_left(
1354 mut self,
1355 track_right_edge_idx: usize,
1356 ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
5869c6ff
XL
1357 self.bulk_steal_left(1);
1358 unsafe { Handle::new_edge(self.right_child, 1 + track_right_edge_idx) }
a7813a04
XL
1359 }
1360
fc512014
XL
1361 /// Removes a key-value pair from the right child and places it in the key-value storage
1362 /// of the parent, while pushing the old parent key-value pair onto the left child.
1363 /// Returns a handle to the edge in the left child specified by `track_left_edge_idx`,
1364 /// which didn't move.
1365 pub fn steal_right(
1366 mut self,
1367 track_left_edge_idx: usize,
1368 ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
5869c6ff
XL
1369 self.bulk_steal_right(1);
1370 unsafe { Handle::new_edge(self.left_child, track_left_edge_idx) }
a7813a04
XL
1371 }
1372
1373 /// This does stealing similar to `steal_left` but steals multiple elements at once.
3157f602 1374 pub fn bulk_steal_left(&mut self, count: usize) {
fc512014 1375 assert!(count > 0);
a7813a04 1376 unsafe {
fc512014
XL
1377 let left_node = &mut self.left_child;
1378 let old_left_len = left_node.len();
1379 let right_node = &mut self.right_child;
1380 let old_right_len = right_node.len();
a7813a04
XL
1381
1382 // Make sure that we may steal safely.
fc512014
XL
1383 assert!(old_right_len + count <= CAPACITY);
1384 assert!(old_left_len >= count);
3157f602 1385
fc512014
XL
1386 let new_left_len = old_left_len - count;
1387 let new_right_len = old_right_len + count;
1388 *left_node.len_mut() = new_left_len as u16;
1389 *right_node.len_mut() = new_right_len as u16;
3157f602 1390
fc512014 1391 // Move leaf data.
3157f602 1392 {
3157f602 1393 // Make room for stolen elements in the right child.
5869c6ff
XL
1394 slice_shr(right_node.key_area_mut(..new_right_len), count);
1395 slice_shr(right_node.val_area_mut(..new_right_len), count);
3157f602
XL
1396
1397 // Move elements from the left child to the right one.
5869c6ff
XL
1398 move_to_slice(
1399 left_node.key_area_mut(new_left_len + 1..old_left_len),
1400 right_node.key_area_mut(..count - 1),
1401 );
1402 move_to_slice(
1403 left_node.val_area_mut(new_left_len + 1..old_left_len),
1404 right_node.val_area_mut(..count - 1),
1405 );
3157f602
XL
1406
1407 // Move the left-most stolen pair to the parent.
5869c6ff
XL
1408 let k = left_node.key_area_mut(new_left_len).assume_init_read();
1409 let v = left_node.val_area_mut(new_left_len).assume_init_read();
1410 let (k, v) = self.parent.replace_kv(k, v);
1411
1412 // Move parent's key-value pair to the right child.
1413 right_node.key_area_mut(count - 1).write(k);
1414 right_node.val_area_mut(count - 1).write(v);
a7813a04
XL
1415 }
1416
fc512014 1417 match (left_node.reborrow_mut().force(), right_node.reborrow_mut().force()) {
5869c6ff 1418 (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
3157f602 1419 // Make room for stolen edges.
5869c6ff 1420 slice_shr(right.edge_area_mut(..new_right_len + 1), count);
3157f602 1421
fc512014 1422 // Steal edges.
5869c6ff
XL
1423 move_to_slice(
1424 left.edge_area_mut(new_left_len + 1..old_left_len + 1),
1425 right.edge_area_mut(..count),
1426 );
1427
1428 right.correct_childrens_parent_links(0..new_right_len + 1);
dfeec247
XL
1429 }
1430 (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1b1a35ee 1431 _ => unreachable!(),
a7813a04 1432 }
3157f602
XL
1433 }
1434 }
a7813a04 1435
3157f602
XL
1436 /// The symmetric clone of `bulk_steal_left`.
1437 pub fn bulk_steal_right(&mut self, count: usize) {
fc512014 1438 assert!(count > 0);
3157f602 1439 unsafe {
fc512014
XL
1440 let left_node = &mut self.left_child;
1441 let old_left_len = left_node.len();
1442 let right_node = &mut self.right_child;
1443 let old_right_len = right_node.len();
3157f602
XL
1444
1445 // Make sure that we may steal safely.
fc512014
XL
1446 assert!(old_left_len + count <= CAPACITY);
1447 assert!(old_right_len >= count);
3157f602 1448
fc512014
XL
1449 let new_left_len = old_left_len + count;
1450 let new_right_len = old_right_len - count;
1451 *left_node.len_mut() = new_left_len as u16;
1452 *right_node.len_mut() = new_right_len as u16;
3157f602 1453
fc512014 1454 // Move leaf data.
3157f602 1455 {
5869c6ff
XL
1456 // Move the right-most stolen pair to the parent.
1457 let k = right_node.key_area_mut(count - 1).assume_init_read();
1458 let v = right_node.val_area_mut(count - 1).assume_init_read();
1459 let (k, v) = self.parent.replace_kv(k, v);
3157f602 1460
fc512014 1461 // Move parent's key-value pair to the left child.
5869c6ff
XL
1462 left_node.key_area_mut(old_left_len).write(k);
1463 left_node.val_area_mut(old_left_len).write(v);
3157f602
XL
1464
1465 // Move elements from the right child to the left one.
5869c6ff
XL
1466 move_to_slice(
1467 right_node.key_area_mut(..count - 1),
1468 left_node.key_area_mut(old_left_len + 1..new_left_len),
1469 );
1470 move_to_slice(
1471 right_node.val_area_mut(..count - 1),
1472 left_node.val_area_mut(old_left_len + 1..new_left_len),
1473 );
3157f602 1474
fc512014 1475 // Fill gap where stolen elements used to be.
5869c6ff
XL
1476 slice_shl(right_node.key_area_mut(..old_right_len), count);
1477 slice_shl(right_node.val_area_mut(..old_right_len), count);
3157f602
XL
1478 }
1479
fc512014 1480 match (left_node.reborrow_mut().force(), right_node.reborrow_mut().force()) {
5869c6ff 1481 (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
fc512014 1482 // Steal edges.
5869c6ff
XL
1483 move_to_slice(
1484 right.edge_area_mut(..count),
1485 left.edge_area_mut(old_left_len + 1..new_left_len + 1),
1486 );
3157f602 1487
fc512014 1488 // Fill gap where stolen edges used to be.
5869c6ff
XL
1489 slice_shl(right.edge_area_mut(..old_right_len + 1), count);
1490
1491 left.correct_childrens_parent_links(old_left_len + 1..new_left_len + 1);
1492 right.correct_childrens_parent_links(0..new_right_len + 1);
dfeec247
XL
1493 }
1494 (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1b1a35ee 1495 _ => unreachable!(),
a7813a04
XL
1496 }
1497 }
1498 }
85aaf69f
SL
1499}
1500
74b04a01
XL
1501impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
1502 pub fn forget_node_type(
1503 self,
1504 ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
1505 unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
1506 }
1507}
1508
1509impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
1510 pub fn forget_node_type(
1511 self,
1512 ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
1513 unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
1514 }
1515}
1516
1517impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::KV> {
1518 pub fn forget_node_type(
1519 self,
1520 ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV> {
1521 unsafe { Handle::new_kv(self.node.forget_type(), self.idx) }
1522 }
1523}
1524
6a06907d 1525impl<BorrowType, K, V, Type> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, Type> {
9fa01778 1526 /// Checks whether the underlying node is an `Internal` node or a `Leaf` node.
dfeec247
XL
1527 pub fn force(
1528 self,
1529 ) -> ForceResult<
6a06907d
XL
1530 Handle<NodeRef<BorrowType, K, V, marker::Leaf>, Type>,
1531 Handle<NodeRef<BorrowType, K, V, marker::Internal>, Type>,
9cc50fc6
SL
1532 > {
1533 match self.node.force() {
dfeec247
XL
1534 ForceResult::Leaf(node) => {
1535 ForceResult::Leaf(Handle { node, idx: self.idx, _marker: PhantomData })
1536 }
1537 ForceResult::Internal(node) => {
1538 ForceResult::Internal(Handle { node, idx: self.idx, _marker: PhantomData })
1539 }
9cc50fc6 1540 }
85aaf69f
SL
1541 }
1542}
1a4d82fc 1543
cdc7bbd5
XL
1544impl<'a, K, V, Type> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, Type> {
1545 /// Unsafely asserts to the compiler the static information that the handle's node is a `Leaf`.
1546 pub unsafe fn cast_to_leaf_unchecked(
1547 self,
1548 ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, Type> {
1549 let node = unsafe { self.node.cast_to_leaf_unchecked() };
1550 Handle { node, idx: self.idx, _marker: PhantomData }
1551 }
1552}
1553
3157f602
XL
1554impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1555 /// Move the suffix after `self` from one node to another one. `right` must be empty.
1556 /// The first edge of `right` remains unchanged.
dfeec247
XL
1557 pub fn move_suffix(
1558 &mut self,
1559 right: &mut NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1560 ) {
3157f602 1561 unsafe {
fc512014 1562 let new_left_len = self.idx;
3157f602 1563 let mut left_node = self.reborrow_mut().into_node();
5869c6ff 1564 let old_left_len = left_node.len();
3157f602 1565
5869c6ff 1566 let new_right_len = old_left_len - new_left_len;
3157f602
XL
1567 let mut right_node = right.reborrow_mut();
1568
74b04a01
XL
1569 assert!(right_node.len() == 0);
1570 assert!(left_node.height == right_node.height);
3157f602 1571
fc512014 1572 if new_right_len > 0 {
fc512014
XL
1573 *left_node.len_mut() = new_left_len as u16;
1574 *right_node.len_mut() = new_right_len as u16;
3157f602 1575
5869c6ff
XL
1576 move_to_slice(
1577 left_node.key_area_mut(new_left_len..old_left_len),
1578 right_node.key_area_mut(..new_right_len),
1579 );
1580 move_to_slice(
1581 left_node.val_area_mut(new_left_len..old_left_len),
1582 right_node.val_area_mut(..new_right_len),
1583 );
74b04a01 1584 match (left_node.force(), right_node.force()) {
5869c6ff
XL
1585 (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
1586 move_to_slice(
1587 left.edge_area_mut(new_left_len + 1..old_left_len + 1),
1588 right.edge_area_mut(1..new_right_len + 1),
1589 );
1590 right.correct_childrens_parent_links(1..new_right_len + 1);
74b04a01
XL
1591 }
1592 (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1b1a35ee 1593 _ => unreachable!(),
dfeec247 1594 }
3157f602
XL
1595 }
1596 }
1597 }
1598}
1599
9cc50fc6
SL
1600pub enum ForceResult<Leaf, Internal> {
1601 Leaf(Leaf),
dfeec247 1602 Internal(Internal),
9cc50fc6 1603}
85aaf69f 1604
3dfed10e 1605/// Result of insertion, when a node needed to expand beyond its capacity.
fc512014
XL
1606pub struct SplitResult<'a, K, V, NodeType> {
1607 // Altered node in existing tree with elements and edges that belong to the left of `kv`.
1608 pub left: NodeRef<marker::Mut<'a>, K, V, NodeType>,
ee023bcb 1609 // Some key and value that existed before and were split off, to be inserted elsewhere.
fc512014
XL
1610 pub kv: (K, V),
1611 // Owned, unattached, new node with elements and edges that belong to the right of `kv`.
1612 pub right: NodeRef<marker::Owned, K, V, NodeType>,
1613}
1614
1615impl<'a, K, V> SplitResult<'a, K, V, marker::Leaf> {
1616 pub fn forget_node_type(self) -> SplitResult<'a, K, V, marker::LeafOrInternal> {
1617 SplitResult { left: self.left.forget_type(), kv: self.kv, right: self.right.forget_type() }
1618 }
1619}
1620
1621impl<'a, K, V> SplitResult<'a, K, V, marker::Internal> {
1622 pub fn forget_node_type(self) -> SplitResult<'a, K, V, marker::LeafOrInternal> {
1623 SplitResult { left: self.left.forget_type(), kv: self.kv, right: self.right.forget_type() }
1624 }
3dfed10e
XL
1625}
1626
9cc50fc6
SL
1627pub mod marker {
1628 use core::marker::PhantomData;
1a4d82fc 1629
dfeec247
XL
1630 pub enum Leaf {}
1631 pub enum Internal {}
1632 pub enum LeafOrInternal {}
85aaf69f 1633
dfeec247 1634 pub enum Owned {}
5869c6ff 1635 pub enum Dying {}
9cc50fc6
SL
1636 pub struct Immut<'a>(PhantomData<&'a ()>);
1637 pub struct Mut<'a>(PhantomData<&'a mut ()>);
1b1a35ee 1638 pub struct ValMut<'a>(PhantomData<&'a mut ()>);
85aaf69f 1639
5869c6ff
XL
1640 pub trait BorrowType {
1641 // Whether node references of this borrow type allow traversing
1642 // to other nodes in the tree.
1643 const PERMITS_TRAVERSAL: bool = true;
1644 }
1645 impl BorrowType for Owned {
c295e0f8 1646 // Traversal isn't needed, it happens using the result of `borrow_mut`.
5869c6ff
XL
1647 // By disabling traversal, and only creating new references to roots,
1648 // we know that every reference of the `Owned` type is to a root node.
1649 const PERMITS_TRAVERSAL: bool = false;
1650 }
1651 impl BorrowType for Dying {}
1652 impl<'a> BorrowType for Immut<'a> {}
1653 impl<'a> BorrowType for Mut<'a> {}
1654 impl<'a> BorrowType for ValMut<'a> {}
1655
dfeec247
XL
1656 pub enum KV {}
1657 pub enum Edge {}
1a4d82fc 1658}
85aaf69f 1659
29967ef6
XL
1660/// Inserts a value into a slice of initialized elements followed by one uninitialized element.
1661///
1662/// # Safety
1663/// The slice has more than `idx` elements.
1664unsafe fn slice_insert<T>(slice: &mut [MaybeUninit<T>], idx: usize, val: T) {
f035d41b 1665 unsafe {
29967ef6
XL
1666 let len = slice.len();
1667 debug_assert!(len > idx);
1668 let slice_ptr = slice.as_mut_ptr();
1669 if len > idx + 1 {
1670 ptr::copy(slice_ptr.add(idx), slice_ptr.add(idx + 1), len - idx - 1);
1671 }
1672 (*slice_ptr.add(idx)).write(val);
f035d41b 1673 }
9cc50fc6
SL
1674}
1675
29967ef6
XL
1676/// Removes and returns a value from a slice of all initialized elements, leaving behind one
1677/// trailing uninitialized element.
1678///
1679/// # Safety
1680/// The slice has more than `idx` elements.
1681unsafe fn slice_remove<T>(slice: &mut [MaybeUninit<T>], idx: usize) -> T {
f035d41b 1682 unsafe {
29967ef6
XL
1683 let len = slice.len();
1684 debug_assert!(idx < len);
1685 let slice_ptr = slice.as_mut_ptr();
1686 let ret = (*slice_ptr.add(idx)).assume_init_read();
1687 ptr::copy(slice_ptr.add(idx + 1), slice_ptr.add(idx), len - idx - 1);
f035d41b
XL
1688 ret
1689 }
9cc50fc6 1690}
3dfed10e 1691
5869c6ff
XL
1692/// Shifts the elements in a slice `distance` positions to the left.
1693///
1694/// # Safety
1695/// The slice has at least `distance` elements.
1696unsafe fn slice_shl<T>(slice: &mut [MaybeUninit<T>], distance: usize) {
1697 unsafe {
1698 let slice_ptr = slice.as_mut_ptr();
1699 ptr::copy(slice_ptr.add(distance), slice_ptr, slice.len() - distance);
1700 }
1701}
1702
1703/// Shifts the elements in a slice `distance` positions to the right.
1704///
1705/// # Safety
1706/// The slice has at least `distance` elements.
1707unsafe fn slice_shr<T>(slice: &mut [MaybeUninit<T>], distance: usize) {
1708 unsafe {
1709 let slice_ptr = slice.as_mut_ptr();
1710 ptr::copy(slice_ptr, slice_ptr.add(distance), slice.len() - distance);
1711 }
1712}
1713
1714/// Moves all values from a slice of initialized elements to a slice
1715/// of uninitialized elements, leaving behind `src` as all uninitialized.
1716/// Works like `dst.copy_from_slice(src)` but does not require `T` to be `Copy`.
1717fn move_to_slice<T>(src: &mut [MaybeUninit<T>], dst: &mut [MaybeUninit<T>]) {
1718 assert!(src.len() == dst.len());
1719 unsafe {
1720 ptr::copy_nonoverlapping(src.as_ptr(), dst.as_mut_ptr(), src.len());
1721 }
1722}
1723
3dfed10e
XL
1724#[cfg(test)]
1725mod tests;