]> git.proxmox.com Git - rustc.git/blame - src/libcollections/btree/node.rs
Imported Upstream version 1.2.0+dfsg1
[rustc.git] / src / libcollections / btree / node.rs
CommitLineData
1a4d82fc
JJ
1// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
11// This module represents all the internal representation and logic for a B-Tree's node
12// with a safe interface, so that BTreeMap itself does not depend on any of these details.
13
14pub use self::InsertionResult::*;
15pub use self::SearchResult::*;
16pub use self::ForceResult::*;
17pub use self::TraversalItem::*;
18
19use core::prelude::*;
20
1a4d82fc 21use core::cmp::Ordering::{Greater, Less, Equal};
62682a34 22use core::intrinsics::arith_offset;
1a4d82fc 23use core::iter::Zip;
85aaf69f
SL
24use core::marker::PhantomData;
25use core::ops::{Deref, DerefMut, Index, IndexMut};
1a4d82fc 26use core::ptr::Unique;
c34b1796 27use core::{slice, mem, ptr, cmp, raw};
85aaf69f
SL
28use alloc::heap::{self, EMPTY};
29
30use borrow::Borrow;
1a4d82fc
JJ
31
32/// Represents the result of an Insertion: either the item fit, or the node had to split
33pub enum InsertionResult<K, V> {
34 /// The inserted element fit
35 Fit,
36 /// The inserted element did not fit, so the node was split
37 Split(K, V, Node<K, V>),
38}
39
40/// Represents the result of a search for a key in a single node
41pub enum SearchResult<NodeRef> {
42 /// The element was found at the given index
43 Found(Handle<NodeRef, handle::KV, handle::LeafOrInternal>),
44 /// The element wasn't found, but if it's anywhere, it must be beyond this edge
45 GoDown(Handle<NodeRef, handle::Edge, handle::LeafOrInternal>),
46}
47
48/// A B-Tree Node. We keep keys/edges/values separate to optimize searching for keys.
49#[unsafe_no_drop_flag]
50pub struct Node<K, V> {
51 // To avoid the need for multiple allocations, we allocate a single buffer with enough space
52 // for `capacity` keys, `capacity` values, and (in internal nodes) `capacity + 1` edges.
53 // Despite this, we store three separate pointers to the three "chunks" of the buffer because
54 // the performance drops significantly if the locations of the vals and edges need to be
55 // recalculated upon access.
56 //
57 // These will never be null during normal usage of a `Node`. However, to avoid the need for a
58 // drop flag, `Node::drop` zeroes `keys`, signaling that the `Node` has already been cleaned
59 // up.
60 keys: Unique<K>,
61 vals: Unique<V>,
62
85aaf69f
SL
63 // In leaf nodes, this will be None, and no space will be allocated for edges.
64 edges: Option<Unique<Node<K, V>>>,
1a4d82fc
JJ
65
66 // At any given time, there will be `_len` keys, `_len` values, and (in an internal node)
67 // `_len + 1` edges. In a leaf node, there will never be any edges.
68 //
69 // Note: instead of accessing this field directly, please call the `len()` method, which should
70 // be more stable in the face of representation changes.
85aaf69f 71 _len: usize,
1a4d82fc
JJ
72
73 // FIXME(gereeter) It shouldn't be necessary to store the capacity in every node, as it should
74 // be constant throughout the tree. Once a solution to this is found, it might be possible to
75 // also pass down the offsets into the buffer that vals and edges are stored at, removing the
76 // need for those two pointers.
77 //
78 // Note: instead of accessing this field directly, please call the `capacity()` method, which
79 // should be more stable in the face of representation changes.
85aaf69f
SL
80 _capacity: usize,
81}
82
83struct NodeSlice<'a, K: 'a, V: 'a> {
84 keys: &'a [K],
85 vals: &'a [V],
86 pub edges: &'a [Node<K, V>],
87 head_is_edge: bool,
88 tail_is_edge: bool,
89 has_edges: bool,
90}
91
92struct MutNodeSlice<'a, K: 'a, V: 'a> {
93 keys: &'a [K],
94 vals: &'a mut [V],
95 pub edges: &'a mut [Node<K, V>],
96 head_is_edge: bool,
97 tail_is_edge: bool,
98 has_edges: bool,
1a4d82fc
JJ
99}
100
101/// Rounds up to a multiple of a power of two. Returns the closest multiple
102/// of `target_alignment` that is higher or equal to `unrounded`.
103///
104/// # Panics
105///
106/// Fails if `target_alignment` is not a power of two.
107#[inline]
85aaf69f 108fn round_up_to_next(unrounded: usize, target_alignment: usize) -> usize {
c34b1796 109 assert!(target_alignment.is_power_of_two());
1a4d82fc
JJ
110 (unrounded + target_alignment - 1) & !(target_alignment - 1)
111}
112
113#[test]
114fn test_rounding() {
115 assert_eq!(round_up_to_next(0, 4), 0);
116 assert_eq!(round_up_to_next(1, 4), 4);
117 assert_eq!(round_up_to_next(2, 4), 4);
118 assert_eq!(round_up_to_next(3, 4), 4);
119 assert_eq!(round_up_to_next(4, 4), 4);
120 assert_eq!(round_up_to_next(5, 4), 8);
121}
122
123// Returns a tuple of (val_offset, edge_offset),
124// from the start of a mallocated array.
125#[inline]
85aaf69f
SL
126fn calculate_offsets(keys_size: usize,
127 vals_size: usize, vals_align: usize,
128 edges_align: usize)
129 -> (usize, usize) {
1a4d82fc
JJ
130 let vals_offset = round_up_to_next(keys_size, vals_align);
131 let end_of_vals = vals_offset + vals_size;
132
133 let edges_offset = round_up_to_next(end_of_vals, edges_align);
134
135 (vals_offset, edges_offset)
136}
137
138// Returns a tuple of (minimum required alignment, array_size),
139// from the start of a mallocated array.
140#[inline]
85aaf69f
SL
141fn calculate_allocation(keys_size: usize, keys_align: usize,
142 vals_size: usize, vals_align: usize,
143 edges_size: usize, edges_align: usize)
144 -> (usize, usize) {
1a4d82fc
JJ
145 let (_, edges_offset) = calculate_offsets(keys_size,
146 vals_size, vals_align,
147 edges_align);
148 let end_of_edges = edges_offset + edges_size;
149
150 let min_align = cmp::max(keys_align, cmp::max(vals_align, edges_align));
151
152 (min_align, end_of_edges)
153}
154
155#[test]
156fn test_offset_calculation() {
157 assert_eq!(calculate_allocation(128, 8, 15, 1, 4, 4), (8, 148));
158 assert_eq!(calculate_allocation(3, 1, 2, 1, 1, 1), (1, 6));
159 assert_eq!(calculate_allocation(6, 2, 12, 4, 24, 8), (8, 48));
160 assert_eq!(calculate_offsets(128, 15, 1, 4), (128, 144));
161 assert_eq!(calculate_offsets(3, 2, 1, 1), (3, 5));
162 assert_eq!(calculate_offsets(6, 12, 4, 8), (8, 24));
163}
164
85aaf69f 165fn calculate_allocation_generic<K, V>(capacity: usize, is_leaf: bool) -> (usize, usize) {
62682a34
SL
166 let (keys_size, keys_align) = (capacity * mem::size_of::<K>(), mem::align_of::<K>());
167 let (vals_size, vals_align) = (capacity * mem::size_of::<V>(), mem::align_of::<V>());
1a4d82fc
JJ
168 let (edges_size, edges_align) = if is_leaf {
169 (0, 1)
170 } else {
62682a34 171 ((capacity + 1) * mem::size_of::<Node<K, V>>(), mem::align_of::<Node<K, V>>())
1a4d82fc
JJ
172 };
173
174 calculate_allocation(
175 keys_size, keys_align,
176 vals_size, vals_align,
177 edges_size, edges_align
178 )
179}
180
85aaf69f 181fn calculate_offsets_generic<K, V>(capacity: usize, is_leaf: bool) -> (usize, usize) {
1a4d82fc
JJ
182 let keys_size = capacity * mem::size_of::<K>();
183 let vals_size = capacity * mem::size_of::<V>();
62682a34 184 let vals_align = mem::align_of::<V>();
1a4d82fc
JJ
185 let edges_align = if is_leaf {
186 1
187 } else {
62682a34 188 mem::align_of::<Node<K, V>>()
1a4d82fc
JJ
189 };
190
191 calculate_offsets(
192 keys_size,
193 vals_size, vals_align,
194 edges_align
195 )
196}
197
198/// An iterator over a slice that owns the elements of the slice but not the allocation.
199struct RawItems<T> {
200 head: *const T,
201 tail: *const T,
202}
203
204impl<T> RawItems<T> {
205 unsafe fn from_slice(slice: &[T]) -> RawItems<T> {
206 RawItems::from_parts(slice.as_ptr(), slice.len())
207 }
208
85aaf69f 209 unsafe fn from_parts(ptr: *const T, len: usize) -> RawItems<T> {
1a4d82fc
JJ
210 if mem::size_of::<T>() == 0 {
211 RawItems {
212 head: ptr,
62682a34 213 tail: arith_offset(ptr as *const i8, len as isize) as *const T,
1a4d82fc
JJ
214 }
215 } else {
216 RawItems {
217 head: ptr,
85aaf69f 218 tail: ptr.offset(len as isize),
1a4d82fc
JJ
219 }
220 }
221 }
222
223 unsafe fn push(&mut self, val: T) {
224 ptr::write(self.tail as *mut T, val);
225
226 if mem::size_of::<T>() == 0 {
62682a34 227 self.tail = arith_offset(self.tail as *const i8, 1) as *const T;
1a4d82fc
JJ
228 } else {
229 self.tail = self.tail.offset(1);
230 }
231 }
232}
233
234impl<T> Iterator for RawItems<T> {
235 type Item = T;
236
237 fn next(&mut self) -> Option<T> {
238 if self.head == self.tail {
239 None
240 } else {
241 unsafe {
242 let ret = Some(ptr::read(self.head));
243
244 if mem::size_of::<T>() == 0 {
62682a34 245 self.head = arith_offset(self.head as *const i8, 1) as *const T;
1a4d82fc
JJ
246 } else {
247 self.head = self.head.offset(1);
248 }
249
250 ret
251 }
252 }
253 }
254}
255
256impl<T> DoubleEndedIterator for RawItems<T> {
257 fn next_back(&mut self) -> Option<T> {
258 if self.head == self.tail {
259 None
260 } else {
261 unsafe {
262 if mem::size_of::<T>() == 0 {
62682a34 263 self.tail = arith_offset(self.tail as *const i8, -1) as *const T;
1a4d82fc
JJ
264 } else {
265 self.tail = self.tail.offset(-1);
266 }
267
268 Some(ptr::read(self.tail))
269 }
270 }
271 }
272}
273
1a4d82fc
JJ
274impl<T> Drop for RawItems<T> {
275 fn drop(&mut self) {
85aaf69f 276 for _ in self.by_ref() {}
1a4d82fc
JJ
277 }
278}
279
1a4d82fc
JJ
280impl<K, V> Drop for Node<K, V> {
281 fn drop(&mut self) {
c34b1796
AL
282 if self.keys.is_null() ||
283 (unsafe { self.keys.get() as *const K as usize == mem::POST_DROP_USIZE })
284 {
85aaf69f 285 // Since we have #[unsafe_no_drop_flag], we have to watch
c34b1796 286 // out for the sentinel value being stored in self.keys. (Using
85aaf69f
SL
287 // null is technically a violation of the `Unique`
288 // requirements, though.)
1a4d82fc
JJ
289 return;
290 }
291
292 // Do the actual cleanup.
293 unsafe {
294 drop(RawItems::from_slice(self.keys()));
295 drop(RawItems::from_slice(self.vals()));
296 drop(RawItems::from_slice(self.edges()));
297
298 self.destroy();
299 }
300
85aaf69f 301 self.keys = unsafe { Unique::new(0 as *mut K) };
1a4d82fc
JJ
302 }
303}
304
305impl<K, V> Node<K, V> {
306 /// Make a new internal node. The caller must initialize the result to fix the invariant that
307 /// there are `len() + 1` edges.
85aaf69f 308 unsafe fn new_internal(capacity: usize) -> Node<K, V> {
1a4d82fc
JJ
309 let (alignment, size) = calculate_allocation_generic::<K, V>(capacity, false);
310
311 let buffer = heap::allocate(size, alignment);
312 if buffer.is_null() { ::alloc::oom(); }
313
314 let (vals_offset, edges_offset) = calculate_offsets_generic::<K, V>(capacity, false);
315
316 Node {
85aaf69f
SL
317 keys: Unique::new(buffer as *mut K),
318 vals: Unique::new(buffer.offset(vals_offset as isize) as *mut V),
319 edges: Some(Unique::new(buffer.offset(edges_offset as isize) as *mut Node<K, V>)),
1a4d82fc
JJ
320 _len: 0,
321 _capacity: capacity,
322 }
323 }
324
325 /// Make a new leaf node
85aaf69f 326 fn new_leaf(capacity: usize) -> Node<K, V> {
1a4d82fc
JJ
327 let (alignment, size) = calculate_allocation_generic::<K, V>(capacity, true);
328
329 let buffer = unsafe { heap::allocate(size, alignment) };
330 if buffer.is_null() { ::alloc::oom(); }
331
332 let (vals_offset, _) = calculate_offsets_generic::<K, V>(capacity, true);
333
334 Node {
85aaf69f
SL
335 keys: unsafe { Unique::new(buffer as *mut K) },
336 vals: unsafe { Unique::new(buffer.offset(vals_offset as isize) as *mut V) },
337 edges: None,
1a4d82fc
JJ
338 _len: 0,
339 _capacity: capacity,
340 }
341 }
342
343 unsafe fn destroy(&mut self) {
344 let (alignment, size) =
345 calculate_allocation_generic::<K, V>(self.capacity(), self.is_leaf());
85aaf69f 346 heap::deallocate(*self.keys as *mut u8, size, alignment);
1a4d82fc
JJ
347 }
348
349 #[inline]
350 pub fn as_slices<'a>(&'a self) -> (&'a [K], &'a [V]) {
351 unsafe {(
c34b1796
AL
352 slice::from_raw_parts(*self.keys, self.len()),
353 slice::from_raw_parts(*self.vals, self.len()),
1a4d82fc
JJ
354 )}
355 }
356
357 #[inline]
358 pub fn as_slices_mut<'a>(&'a mut self) -> (&'a mut [K], &'a mut [V]) {
359 unsafe { mem::transmute(self.as_slices()) }
360 }
361
362 #[inline]
85aaf69f
SL
363 pub fn as_slices_internal<'b>(&'b self) -> NodeSlice<'b, K, V> {
364 let is_leaf = self.is_leaf();
1a4d82fc
JJ
365 let (keys, vals) = self.as_slices();
366 let edges: &[_] = if self.is_leaf() {
367 &[]
368 } else {
369 unsafe {
85aaf69f
SL
370 let data = match self.edges {
371 None => heap::EMPTY as *const Node<K,V>,
372 Some(ref p) => **p as *const Node<K,V>,
373 };
1a4d82fc 374 mem::transmute(raw::Slice {
85aaf69f 375 data: data,
1a4d82fc
JJ
376 len: self.len() + 1
377 })
378 }
379 };
85aaf69f
SL
380 NodeSlice {
381 keys: keys,
382 vals: vals,
383 edges: edges,
384 head_is_edge: true,
385 tail_is_edge: true,
386 has_edges: !is_leaf,
387 }
1a4d82fc
JJ
388 }
389
390 #[inline]
85aaf69f 391 pub fn as_slices_internal_mut<'b>(&'b mut self) -> MutNodeSlice<'b, K, V> {
1a4d82fc
JJ
392 unsafe { mem::transmute(self.as_slices_internal()) }
393 }
394
395 #[inline]
396 pub fn keys<'a>(&'a self) -> &'a [K] {
397 self.as_slices().0
398 }
399
400 #[inline]
401 pub fn keys_mut<'a>(&'a mut self) -> &'a mut [K] {
402 self.as_slices_mut().0
403 }
404
405 #[inline]
406 pub fn vals<'a>(&'a self) -> &'a [V] {
407 self.as_slices().1
408 }
409
410 #[inline]
411 pub fn vals_mut<'a>(&'a mut self) -> &'a mut [V] {
412 self.as_slices_mut().1
413 }
414
415 #[inline]
416 pub fn edges<'a>(&'a self) -> &'a [Node<K, V>] {
85aaf69f 417 self.as_slices_internal().edges
1a4d82fc
JJ
418 }
419
420 #[inline]
421 pub fn edges_mut<'a>(&'a mut self) -> &'a mut [Node<K, V>] {
85aaf69f 422 self.as_slices_internal_mut().edges
1a4d82fc
JJ
423 }
424}
425
426// FIXME(gereeter) Write an efficient clone_from
85aaf69f 427#[stable(feature = "rust1", since = "1.0.0")]
1a4d82fc
JJ
428impl<K: Clone, V: Clone> Clone for Node<K, V> {
429 fn clone(&self) -> Node<K, V> {
430 let mut ret = if self.is_leaf() {
431 Node::new_leaf(self.capacity())
432 } else {
433 unsafe { Node::new_internal(self.capacity()) }
434 };
435
436 unsafe {
437 // For failure safety
438 let mut keys = RawItems::from_parts(ret.keys().as_ptr(), 0);
439 let mut vals = RawItems::from_parts(ret.vals().as_ptr(), 0);
440 let mut edges = RawItems::from_parts(ret.edges().as_ptr(), 0);
441
85aaf69f 442 for key in self.keys() {
1a4d82fc
JJ
443 keys.push(key.clone())
444 }
85aaf69f 445 for val in self.vals() {
1a4d82fc
JJ
446 vals.push(val.clone())
447 }
85aaf69f 448 for edge in self.edges() {
1a4d82fc
JJ
449 edges.push(edge.clone())
450 }
451
452 mem::forget(keys);
453 mem::forget(vals);
454 mem::forget(edges);
455
456 ret._len = self.len();
457 }
458
459 ret
460 }
461}
462
463/// A reference to something in the middle of a `Node`. There are two `Type`s of `Handle`s,
464/// namely `KV` handles, which point to key/value pairs, and `Edge` handles, which point to edges
465/// before or after key/value pairs. Methods are provided for removing pairs, inserting into edges,
466/// accessing the stored values, and moving around the `Node`.
467///
468/// This handle is generic, and can take any sort of reference to a `Node`. The reason for this is
469/// two-fold. First of all, it reduces the amount of repetitive code, implementing functions that
470/// don't need mutability on both mutable and immutable references. Secondly and more importantly,
471/// this allows users of the `Handle` API to associate metadata with the reference. This is used in
472/// `BTreeMap` to give `Node`s temporary "IDs" that persist to when the `Node` is used in a
473/// `Handle`.
474///
475/// # A note on safety
476///
477/// Unfortunately, the extra power afforded by being generic also means that safety can technically
478/// be broken. For sensible implementations of `Deref` and `DerefMut`, these handles are perfectly
479/// safe. As long as repeatedly calling `.deref()` results in the same Node being returned each
480/// time, everything should work fine. However, if the `Deref` implementation swaps in multiple
481/// different nodes, then the indices that are assumed to be in bounds suddenly stop being so. For
482/// example:
483///
484/// ```rust,ignore
485/// struct Nasty<'a> {
85aaf69f
SL
486/// first: &'a Node<usize, usize>,
487/// second: &'a Node<usize, usize>,
1a4d82fc
JJ
488/// flag: &'a Cell<bool>,
489/// }
490///
491/// impl<'a> Deref for Nasty<'a> {
85aaf69f 492/// type Target = Node<usize, usize>;
1a4d82fc 493///
85aaf69f 494/// fn deref(&self) -> &Node<usize, usize> {
1a4d82fc
JJ
495/// if self.flag.get() {
496/// &*self.second
497/// } else {
498/// &*self.first
499/// }
500/// }
501/// }
502///
503/// fn main() {
504/// let flag = Cell::new(false);
505/// let mut small_node = Node::make_leaf_root(3);
506/// let mut large_node = Node::make_leaf_root(100);
507///
85aaf69f 508/// for i in 0..100 {
1a4d82fc
JJ
509/// // Insert to the end
510/// large_node.edge_handle(i).insert_as_leaf(i, i);
511/// }
512///
513/// let nasty = Nasty {
514/// first: &large_node,
515/// second: &small_node,
516/// flag: &flag
517/// }
518///
519/// // The handle points at index 75.
520/// let handle = Node::search(nasty, 75);
521///
522/// // Now the handle still points at index 75, but on the small node, which has no index 75.
523/// flag.set(true);
524///
525/// println!("Uninitialized memory: {:?}", handle.into_kv());
526/// }
527/// ```
c34b1796 528#[derive(Copy, Clone)]
1a4d82fc
JJ
529pub struct Handle<NodeRef, Type, NodeType> {
530 node: NodeRef,
85aaf69f
SL
531 index: usize,
532 marker: PhantomData<(Type, NodeType)>,
1a4d82fc
JJ
533}
534
535pub mod handle {
536 // Handle types.
537 pub enum KV {}
538 pub enum Edge {}
539
540 // Handle node types.
541 pub enum LeafOrInternal {}
542 pub enum Leaf {}
543 pub enum Internal {}
544}
545
546impl<K: Ord, V> Node<K, V> {
547 /// Searches for the given key in the node. If it finds an exact match,
548 /// `Found` will be yielded with the matching index. If it doesn't find an exact match,
549 /// `GoDown` will be yielded with the index of the subtree the key must lie in.
550 pub fn search<Q: ?Sized, NodeRef: Deref<Target=Node<K, V>>>(node: NodeRef, key: &Q)
85aaf69f 551 -> SearchResult<NodeRef> where K: Borrow<Q>, Q: Ord {
1a4d82fc
JJ
552 // FIXME(Gankro): Tune when to search linear or binary based on B (and maybe K/V).
553 // For the B configured as of this writing (B = 6), binary search was *significantly*
85aaf69f
SL
554 // worse for usizes.
555 match node.as_slices_internal().search_linear(key) {
556 (index, true) => Found(Handle { node: node, index: index, marker: PhantomData }),
557 (index, false) => GoDown(Handle { node: node, index: index, marker: PhantomData }),
1a4d82fc
JJ
558 }
559 }
1a4d82fc
JJ
560}
561
562// Public interface
563impl <K, V> Node<K, V> {
564 /// Make a leaf root from scratch
85aaf69f 565 pub fn make_leaf_root(b: usize) -> Node<K, V> {
1a4d82fc
JJ
566 Node::new_leaf(capacity_from_b(b))
567 }
568
569 /// Make an internal root and swap it with an old root
85aaf69f 570 pub fn make_internal_root(left_and_out: &mut Node<K,V>, b: usize, key: K, value: V,
1a4d82fc
JJ
571 right: Node<K,V>) {
572 let node = mem::replace(left_and_out, unsafe { Node::new_internal(capacity_from_b(b)) });
573 left_and_out._len = 1;
574 unsafe {
575 ptr::write(left_and_out.keys_mut().get_unchecked_mut(0), key);
576 ptr::write(left_and_out.vals_mut().get_unchecked_mut(0), value);
577 ptr::write(left_and_out.edges_mut().get_unchecked_mut(0), node);
578 ptr::write(left_and_out.edges_mut().get_unchecked_mut(1), right);
579 }
580 }
581
582 /// How many key-value pairs the node contains
85aaf69f 583 pub fn len(&self) -> usize {
1a4d82fc
JJ
584 self._len
585 }
586
9346a6ac
AL
587 /// Does the node not contain any key-value pairs
588 pub fn is_empty(&self) -> bool { self.len() == 0 }
589
1a4d82fc 590 /// How many key-value pairs the node can fit
85aaf69f 591 pub fn capacity(&self) -> usize {
1a4d82fc
JJ
592 self._capacity
593 }
594
595 /// If the node has any children
596 pub fn is_leaf(&self) -> bool {
85aaf69f 597 self.edges.is_none()
1a4d82fc
JJ
598 }
599
600 /// if the node has too few elements
601 pub fn is_underfull(&self) -> bool {
602 self.len() < min_load_from_capacity(self.capacity())
603 }
604
605 /// if the node cannot fit any more elements
606 pub fn is_full(&self) -> bool {
607 self.len() == self.capacity()
608 }
609}
610
611impl<K, V, NodeRef: Deref<Target=Node<K, V>>, Type, NodeType> Handle<NodeRef, Type, NodeType> {
612 /// Returns a reference to the node that contains the pointed-to edge or key/value pair. This
613 /// is very different from `edge` and `edge_mut` because those return children of the node
614 /// returned by `node`.
615 pub fn node(&self) -> &Node<K, V> {
616 &*self.node
617 }
618}
619
620impl<K, V, NodeRef, Type, NodeType> Handle<NodeRef, Type, NodeType> where
621 NodeRef: Deref<Target=Node<K, V>> + DerefMut,
622{
623 /// Converts a handle into one that stores the same information using a raw pointer. This can
624 /// be useful in conjunction with `from_raw` when the type system is insufficient for
625 /// determining the lifetimes of the nodes.
626 pub fn as_raw(&mut self) -> Handle<*mut Node<K, V>, Type, NodeType> {
627 Handle {
628 node: &mut *self.node as *mut _,
85aaf69f
SL
629 index: self.index,
630 marker: PhantomData,
1a4d82fc
JJ
631 }
632 }
633}
634
635impl<K, V, Type, NodeType> Handle<*mut Node<K, V>, Type, NodeType> {
636 /// Converts from a handle stored with a raw pointer, which isn't directly usable, to a handle
637 /// stored with a reference. This is an unsafe inverse of `as_raw`, and together they allow
638 /// unsafely extending the lifetime of the reference to the `Node`.
639 pub unsafe fn from_raw<'a>(&'a self) -> Handle<&'a Node<K, V>, Type, NodeType> {
640 Handle {
641 node: &*self.node,
85aaf69f
SL
642 index: self.index,
643 marker: PhantomData,
1a4d82fc
JJ
644 }
645 }
646
647 /// Converts from a handle stored with a raw pointer, which isn't directly usable, to a handle
648 /// stored with a mutable reference. This is an unsafe inverse of `as_raw`, and together they
649 /// allow unsafely extending the lifetime of the reference to the `Node`.
650 pub unsafe fn from_raw_mut<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, Type, NodeType> {
651 Handle {
652 node: &mut *self.node,
85aaf69f
SL
653 index: self.index,
654 marker: PhantomData,
1a4d82fc
JJ
655 }
656 }
657}
658
659impl<'a, K: 'a, V: 'a> Handle<&'a Node<K, V>, handle::Edge, handle::Internal> {
660 /// Turns the handle into a reference to the edge it points at. This is necessary because the
661 /// returned pointer has a larger lifetime than what would be returned by `edge` or `edge_mut`,
662 /// making it more suitable for moving down a chain of nodes.
663 pub fn into_edge(self) -> &'a Node<K, V> {
664 unsafe {
665 self.node.edges().get_unchecked(self.index)
666 }
667 }
668}
669
670impl<'a, K: 'a, V: 'a> Handle<&'a mut Node<K, V>, handle::Edge, handle::Internal> {
671 /// Turns the handle into a mutable reference to the edge it points at. This is necessary
672 /// because the returned pointer has a larger lifetime than what would be returned by
673 /// `edge_mut`, making it more suitable for moving down a chain of nodes.
674 pub fn into_edge_mut(self) -> &'a mut Node<K, V> {
675 unsafe {
676 self.node.edges_mut().get_unchecked_mut(self.index)
677 }
678 }
679}
680
681impl<K, V, NodeRef: Deref<Target=Node<K, V>>> Handle<NodeRef, handle::Edge, handle::Internal> {
682 // This doesn't exist because there are no uses for it,
85aaf69f 683 // but is fine to add, analogous to edge_mut.
1a4d82fc
JJ
684 //
685 // /// Returns a reference to the edge pointed-to by this handle. This should not be
686 // /// confused with `node`, which references the parent node of what is returned here.
687 // pub fn edge(&self) -> &Node<K, V>
688}
689
690pub enum ForceResult<NodeRef, Type> {
691 Leaf(Handle<NodeRef, Type, handle::Leaf>),
692 Internal(Handle<NodeRef, Type, handle::Internal>)
693}
694
695impl<K, V, NodeRef: Deref<Target=Node<K, V>>, Type> Handle<NodeRef, Type, handle::LeafOrInternal> {
696 /// Figure out whether this handle is pointing to something in a leaf node or to something in
697 /// an internal node, clarifying the type according to the result.
698 pub fn force(self) -> ForceResult<NodeRef, Type> {
699 if self.node.is_leaf() {
700 Leaf(Handle {
701 node: self.node,
85aaf69f
SL
702 index: self.index,
703 marker: PhantomData,
1a4d82fc
JJ
704 })
705 } else {
706 Internal(Handle {
707 node: self.node,
85aaf69f
SL
708 index: self.index,
709 marker: PhantomData,
1a4d82fc
JJ
710 })
711 }
712 }
713}
714impl<K, V, NodeRef> Handle<NodeRef, handle::Edge, handle::Leaf> where
715 NodeRef: Deref<Target=Node<K, V>> + DerefMut,
716{
717 /// Tries to insert this key-value pair at the given index in this leaf node
718 /// If the node is full, we have to split it.
719 ///
720 /// Returns a *mut V to the inserted value, because the caller may want this when
721 /// they're done mutating the tree, but we don't want to borrow anything for now.
722 pub fn insert_as_leaf(mut self, key: K, value: V) ->
723 (InsertionResult<K, V>, *mut V) {
724 if !self.node.is_full() {
725 // The element can fit, just insert it
726 (Fit, unsafe { self.node.insert_kv(self.index, key, value) as *mut _ })
727 } else {
728 // The element can't fit, this node is full. Split it into two nodes.
729 let (new_key, new_val, mut new_right) = self.node.split();
730 let left_len = self.node.len();
731
732 let ptr = unsafe {
733 if self.index <= left_len {
734 self.node.insert_kv(self.index, key, value)
735 } else {
736 // We need to subtract 1 because in splitting we took out new_key and new_val.
737 // Just being in the right node means we are past left_len k/v pairs in the
738 // left node and 1 k/v pair in the parent node.
739 new_right.insert_kv(self.index - left_len - 1, key, value)
740 }
741 } as *mut _;
742
743 (Split(new_key, new_val, new_right), ptr)
744 }
745 }
746}
747
748impl<K, V, NodeRef> Handle<NodeRef, handle::Edge, handle::Internal> where
749 NodeRef: Deref<Target=Node<K, V>> + DerefMut,
750{
751 /// Returns a mutable reference to the edge pointed-to by this handle. This should not be
752 /// confused with `node`, which references the parent node of what is returned here.
753 pub fn edge_mut(&mut self) -> &mut Node<K, V> {
754 unsafe {
755 self.node.edges_mut().get_unchecked_mut(self.index)
756 }
757 }
758
759 /// Tries to insert this key-value pair at the given index in this internal node
760 /// If the node is full, we have to split it.
761 pub fn insert_as_internal(mut self, key: K, value: V, right: Node<K, V>)
762 -> InsertionResult<K, V> {
763 if !self.node.is_full() {
764 // The element can fit, just insert it
765 unsafe {
766 self.node.insert_kv(self.index, key, value);
767 self.node.insert_edge(self.index + 1, right); // +1 to insert to the right
768 }
769 Fit
770 } else {
771 // The element can't fit, this node is full. Split it into two nodes.
772 let (new_key, new_val, mut new_right) = self.node.split();
773 let left_len = self.node.len();
774
775 if self.index <= left_len {
776 unsafe {
777 self.node.insert_kv(self.index, key, value);
778 self.node.insert_edge(self.index + 1, right); // +1 to insert to the right
779 }
780 } else {
781 unsafe {
782 // The -1 here is for the same reason as in insert_as_internal - because we
783 // split, there are actually left_len + 1 k/v pairs before the right node, with
784 // the extra 1 being put in the parent.
785 new_right.insert_kv(self.index - left_len - 1, key, value);
786 new_right.insert_edge(self.index - left_len, right);
787 }
788 }
789
790 Split(new_key, new_val, new_right)
791 }
792 }
793
794 /// Handle an underflow in this node's child. We favour handling "to the left" because we know
795 /// we're empty, but our neighbour can be full. Handling to the left means when we choose to
796 /// steal, we pop off the end of our neighbour (always fast) and "unshift" ourselves
797 /// (always slow, but at least faster since we know we're half-empty).
798 /// Handling "to the right" reverses these roles. Of course, we merge whenever possible
799 /// because we want dense nodes, and merging is about equal work regardless of direction.
800 pub fn handle_underflow(mut self) {
801 unsafe {
802 if self.index > 0 {
803 self.handle_underflow_to_left();
804 } else {
805 self.handle_underflow_to_right();
806 }
807 }
808 }
809
810 /// Right is underflowed. Tries to steal from left,
811 /// but merges left and right if left is low too.
812 unsafe fn handle_underflow_to_left(&mut self) {
813 let left_len = self.node.edges()[self.index - 1].len();
814 if left_len > min_load_from_capacity(self.node.capacity()) {
815 self.left_kv().steal_rightward();
816 } else {
817 self.left_kv().merge_children();
818 }
819 }
820
821 /// Left is underflowed. Tries to steal from the right,
822 /// but merges left and right if right is low too.
823 unsafe fn handle_underflow_to_right(&mut self) {
824 let right_len = self.node.edges()[self.index + 1].len();
825 if right_len > min_load_from_capacity(self.node.capacity()) {
826 self.right_kv().steal_leftward();
827 } else {
828 self.right_kv().merge_children();
829 }
830 }
831}
832
833impl<K, V, NodeRef, NodeType> Handle<NodeRef, handle::Edge, NodeType> where
834 NodeRef: Deref<Target=Node<K, V>> + DerefMut,
835{
836 /// Gets the handle pointing to the key/value pair just to the left of the pointed-to edge.
837 /// This is unsafe because the handle might point to the first edge in the node, which has no
838 /// pair to its left.
839 unsafe fn left_kv<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::KV, NodeType> {
840 Handle {
841 node: &mut *self.node,
85aaf69f
SL
842 index: self.index - 1,
843 marker: PhantomData,
1a4d82fc
JJ
844 }
845 }
846
847 /// Gets the handle pointing to the key/value pair just to the right of the pointed-to edge.
848 /// This is unsafe because the handle might point to the last edge in the node, which has no
849 /// pair to its right.
850 unsafe fn right_kv<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::KV, NodeType> {
851 Handle {
852 node: &mut *self.node,
85aaf69f
SL
853 index: self.index,
854 marker: PhantomData,
1a4d82fc
JJ
855 }
856 }
857}
858
859impl<'a, K: 'a, V: 'a, NodeType> Handle<&'a Node<K, V>, handle::KV, NodeType> {
860 /// Turns the handle into references to the key and value it points at. This is necessary
861 /// because the returned pointers have larger lifetimes than what would be returned by `key`
862 /// or `val`.
863 pub fn into_kv(self) -> (&'a K, &'a V) {
864 let (keys, vals) = self.node.as_slices();
865 unsafe {
866 (
867 keys.get_unchecked(self.index),
868 vals.get_unchecked(self.index)
869 )
870 }
871 }
872}
873
874impl<'a, K: 'a, V: 'a, NodeType> Handle<&'a mut Node<K, V>, handle::KV, NodeType> {
875 /// Turns the handle into mutable references to the key and value it points at. This is
876 /// necessary because the returned pointers have larger lifetimes than what would be returned
877 /// by `key_mut` or `val_mut`.
878 pub fn into_kv_mut(self) -> (&'a mut K, &'a mut V) {
879 let (keys, vals) = self.node.as_slices_mut();
880 unsafe {
881 (
882 keys.get_unchecked_mut(self.index),
883 vals.get_unchecked_mut(self.index)
884 )
885 }
886 }
887
888 /// Convert this handle into one pointing at the edge immediately to the left of the key/value
889 /// pair pointed-to by this handle. This is useful because it returns a reference with larger
890 /// lifetime than `left_edge`.
891 pub fn into_left_edge(self) -> Handle<&'a mut Node<K, V>, handle::Edge, NodeType> {
892 Handle {
893 node: &mut *self.node,
85aaf69f
SL
894 index: self.index,
895 marker: PhantomData,
1a4d82fc
JJ
896 }
897 }
898}
899
900impl<'a, K: 'a, V: 'a, NodeRef: Deref<Target=Node<K, V>> + 'a, NodeType> Handle<NodeRef, handle::KV,
901 NodeType> {
902 // These are fine to include, but are currently unneeded.
903 //
904 // /// Returns a reference to the key pointed-to by this handle. This doesn't return a
905 // /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
906 // /// handle.
907 // pub fn key(&'a self) -> &'a K {
908 // unsafe { self.node.keys().get_unchecked(self.index) }
909 // }
910 //
911 // /// Returns a reference to the value pointed-to by this handle. This doesn't return a
912 // /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
913 // /// handle.
914 // pub fn val(&'a self) -> &'a V {
915 // unsafe { self.node.vals().get_unchecked(self.index) }
916 // }
917}
918
919impl<'a, K: 'a, V: 'a, NodeRef, NodeType> Handle<NodeRef, handle::KV, NodeType> where
920 NodeRef: 'a + Deref<Target=Node<K, V>> + DerefMut,
921{
922 /// Returns a mutable reference to the key pointed-to by this handle. This doesn't return a
923 /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
924 /// handle.
925 pub fn key_mut(&'a mut self) -> &'a mut K {
926 unsafe { self.node.keys_mut().get_unchecked_mut(self.index) }
927 }
928
929 /// Returns a mutable reference to the value pointed-to by this handle. This doesn't return a
930 /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
931 /// handle.
932 pub fn val_mut(&'a mut self) -> &'a mut V {
933 unsafe { self.node.vals_mut().get_unchecked_mut(self.index) }
934 }
935}
936
937impl<K, V, NodeRef, NodeType> Handle<NodeRef, handle::KV, NodeType> where
938 NodeRef: Deref<Target=Node<K, V>> + DerefMut,
939{
940 /// Gets the handle pointing to the edge immediately to the left of the key/value pair pointed
941 /// to by this handle.
942 pub fn left_edge<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::Edge, NodeType> {
943 Handle {
944 node: &mut *self.node,
85aaf69f
SL
945 index: self.index,
946 marker: PhantomData,
1a4d82fc
JJ
947 }
948 }
949
950 /// Gets the handle pointing to the edge immediately to the right of the key/value pair pointed
951 /// to by this handle.
952 pub fn right_edge<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::Edge, NodeType> {
953 Handle {
954 node: &mut *self.node,
85aaf69f
SL
955 index: self.index + 1,
956 marker: PhantomData,
1a4d82fc
JJ
957 }
958 }
959}
960
961impl<K, V, NodeRef> Handle<NodeRef, handle::KV, handle::Leaf> where
962 NodeRef: Deref<Target=Node<K, V>> + DerefMut,
963{
964 /// Removes the key/value pair at the handle's location.
965 ///
966 /// # Panics (in debug build)
967 ///
968 /// Panics if the node containing the pair is not a leaf node.
969 pub fn remove_as_leaf(mut self) -> (K, V) {
970 unsafe { self.node.remove_kv(self.index) }
971 }
972}
973
974impl<K, V, NodeRef> Handle<NodeRef, handle::KV, handle::Internal> where
975 NodeRef: Deref<Target=Node<K, V>> + DerefMut
976{
977 /// Steal! Stealing is roughly analogous to a binary tree rotation.
978 /// In this case, we're "rotating" right.
979 unsafe fn steal_rightward(&mut self) {
980 // Take the biggest stuff off left
981 let (mut key, mut val, edge) = {
982 let mut left_handle = self.left_edge();
983 let left = left_handle.edge_mut();
984 let (key, val) = left.pop_kv();
985 let edge = if left.is_leaf() {
986 None
987 } else {
988 Some(left.pop_edge())
989 };
990
991 (key, val, edge)
992 };
993
994 // Swap the parent's separating key-value pair with left's
995 mem::swap(&mut key, self.key_mut());
996 mem::swap(&mut val, self.val_mut());
997
998 // Put them at the start of right
999 let mut right_handle = self.right_edge();
1000 let right = right_handle.edge_mut();
1001 right.insert_kv(0, key, val);
1002 match edge {
1003 Some(edge) => right.insert_edge(0, edge),
1004 None => {}
1005 }
1006 }
1007
1008 /// Steal! Stealing is roughly analogous to a binary tree rotation.
1009 /// In this case, we're "rotating" left.
1010 unsafe fn steal_leftward(&mut self) {
1011 // Take the smallest stuff off right
1012 let (mut key, mut val, edge) = {
1013 let mut right_handle = self.right_edge();
1014 let right = right_handle.edge_mut();
1015 let (key, val) = right.remove_kv(0);
1016 let edge = if right.is_leaf() {
1017 None
1018 } else {
1019 Some(right.remove_edge(0))
1020 };
1021
1022 (key, val, edge)
1023 };
1024
1025 // Swap the parent's separating key-value pair with right's
1026 mem::swap(&mut key, self.key_mut());
1027 mem::swap(&mut val, self.val_mut());
1028
1029 // Put them at the end of left
1030 let mut left_handle = self.left_edge();
1031 let left = left_handle.edge_mut();
1032 left.push_kv(key, val);
1033 match edge {
1034 Some(edge) => left.push_edge(edge),
1035 None => {}
1036 }
1037 }
1038
1039 /// Merge! Smooshes left and right into one node, along with the key-value
1040 /// pair that separated them in their parent.
1041 unsafe fn merge_children(mut self) {
1042 // Permanently remove right's index, and the key-value pair that separates
1043 // left and right
1044 let (key, val) = self.node.remove_kv(self.index);
1045 let right = self.node.remove_edge(self.index + 1);
1046
1047 // Give left right's stuff.
1048 self.left_edge().edge_mut()
1049 .absorb(key, val, right);
1050 }
1051}
1052
1053impl<K, V> Node<K, V> {
1054 /// Returns the mutable handle pointing to the key/value pair at a given index.
1055 ///
1056 /// # Panics (in debug build)
1057 ///
1058 /// Panics if the given index is out of bounds.
85aaf69f 1059 pub fn kv_handle(&mut self, index: usize) -> Handle<&mut Node<K, V>, handle::KV,
1a4d82fc
JJ
1060 handle::LeafOrInternal> {
1061 // Necessary for correctness, but in a private module
1062 debug_assert!(index < self.len(), "kv_handle index out of bounds");
1063 Handle {
1064 node: self,
85aaf69f
SL
1065 index: index,
1066 marker: PhantomData,
1a4d82fc
JJ
1067 }
1068 }
1069
1070 pub fn iter<'a>(&'a self) -> Traversal<'a, K, V> {
85aaf69f 1071 self.as_slices_internal().iter()
1a4d82fc
JJ
1072 }
1073
1074 pub fn iter_mut<'a>(&'a mut self) -> MutTraversal<'a, K, V> {
85aaf69f 1075 self.as_slices_internal_mut().iter_mut()
1a4d82fc
JJ
1076 }
1077
1078 pub fn into_iter(self) -> MoveTraversal<K, V> {
1079 unsafe {
1080 let ret = MoveTraversal {
1081 inner: MoveTraversalImpl {
1082 keys: RawItems::from_slice(self.keys()),
1083 vals: RawItems::from_slice(self.vals()),
1084 edges: RawItems::from_slice(self.edges()),
1085
c34b1796 1086 ptr: Unique::new(*self.keys as *mut u8),
1a4d82fc
JJ
1087 capacity: self.capacity(),
1088 is_leaf: self.is_leaf()
1089 },
1090 head_is_edge: true,
1091 tail_is_edge: true,
1092 has_edges: !self.is_leaf(),
1093 };
1094 mem::forget(self);
1095 ret
1096 }
1097 }
1098
1099 /// When a node has no keys or values and only a single edge, extract that edge.
1100 pub fn hoist_lone_child(&mut self) {
1101 // Necessary for correctness, but in a private module
9346a6ac 1102 debug_assert!(self.is_empty());
1a4d82fc
JJ
1103 debug_assert!(!self.is_leaf());
1104
1105 unsafe {
1106 let ret = ptr::read(self.edges().get_unchecked(0));
1107 self.destroy();
1108 ptr::write(self, ret);
1109 }
1110 }
1111}
1112
1113// Vector functions (all unchecked)
1114impl<K, V> Node<K, V> {
1115 // This must be followed by push_edge on an internal node.
1116 #[inline]
1117 unsafe fn push_kv(&mut self, key: K, val: V) {
1118 let len = self.len();
1119
1120 ptr::write(self.keys_mut().get_unchecked_mut(len), key);
1121 ptr::write(self.vals_mut().get_unchecked_mut(len), val);
1122
1123 self._len += 1;
1124 }
1125
1126 // This can only be called immediately after a call to push_kv.
1127 #[inline]
1128 unsafe fn push_edge(&mut self, edge: Node<K, V>) {
1129 let len = self.len();
1130
1131 ptr::write(self.edges_mut().get_unchecked_mut(len), edge);
1132 }
1133
1134 // This must be followed by insert_edge on an internal node.
1135 #[inline]
85aaf69f 1136 unsafe fn insert_kv(&mut self, index: usize, key: K, val: V) -> &mut V {
c34b1796 1137 ptr::copy(
85aaf69f 1138 self.keys().as_ptr().offset(index as isize),
c34b1796 1139 self.keys_mut().as_mut_ptr().offset(index as isize + 1),
1a4d82fc
JJ
1140 self.len() - index
1141 );
c34b1796 1142 ptr::copy(
85aaf69f 1143 self.vals().as_ptr().offset(index as isize),
c34b1796 1144 self.vals_mut().as_mut_ptr().offset(index as isize + 1),
1a4d82fc
JJ
1145 self.len() - index
1146 );
1147
1148 ptr::write(self.keys_mut().get_unchecked_mut(index), key);
1149 ptr::write(self.vals_mut().get_unchecked_mut(index), val);
1150
1151 self._len += 1;
1152
1153 self.vals_mut().get_unchecked_mut(index)
1154 }
1155
1156 // This can only be called immediately after a call to insert_kv.
1157 #[inline]
85aaf69f 1158 unsafe fn insert_edge(&mut self, index: usize, edge: Node<K, V>) {
c34b1796 1159 ptr::copy(
85aaf69f 1160 self.edges().as_ptr().offset(index as isize),
c34b1796 1161 self.edges_mut().as_mut_ptr().offset(index as isize + 1),
1a4d82fc
JJ
1162 self.len() - index
1163 );
1164 ptr::write(self.edges_mut().get_unchecked_mut(index), edge);
1165 }
1166
1167 // This must be followed by pop_edge on an internal node.
1168 #[inline]
1169 unsafe fn pop_kv(&mut self) -> (K, V) {
1170 let key = ptr::read(self.keys().get_unchecked(self.len() - 1));
1171 let val = ptr::read(self.vals().get_unchecked(self.len() - 1));
1172
1173 self._len -= 1;
1174
1175 (key, val)
1176 }
1177
1178 // This can only be called immediately after a call to pop_kv.
1179 #[inline]
1180 unsafe fn pop_edge(&mut self) -> Node<K, V> {
1181 let edge = ptr::read(self.edges().get_unchecked(self.len() + 1));
1182
1183 edge
1184 }
1185
1186 // This must be followed by remove_edge on an internal node.
1187 #[inline]
85aaf69f 1188 unsafe fn remove_kv(&mut self, index: usize) -> (K, V) {
1a4d82fc
JJ
1189 let key = ptr::read(self.keys().get_unchecked(index));
1190 let val = ptr::read(self.vals().get_unchecked(index));
1191
c34b1796 1192 ptr::copy(
85aaf69f 1193 self.keys().as_ptr().offset(index as isize + 1),
c34b1796 1194 self.keys_mut().as_mut_ptr().offset(index as isize),
1a4d82fc
JJ
1195 self.len() - index - 1
1196 );
c34b1796 1197 ptr::copy(
85aaf69f 1198 self.vals().as_ptr().offset(index as isize + 1),
c34b1796 1199 self.vals_mut().as_mut_ptr().offset(index as isize),
1a4d82fc
JJ
1200 self.len() - index - 1
1201 );
1202
1203 self._len -= 1;
1204
1205 (key, val)
1206 }
1207
1208 // This can only be called immediately after a call to remove_kv.
1209 #[inline]
85aaf69f 1210 unsafe fn remove_edge(&mut self, index: usize) -> Node<K, V> {
1a4d82fc
JJ
1211 let edge = ptr::read(self.edges().get_unchecked(index));
1212
c34b1796 1213 ptr::copy(
85aaf69f 1214 self.edges().as_ptr().offset(index as isize + 1),
c34b1796
AL
1215 self.edges_mut().as_mut_ptr().offset(index as isize),
1216 // index can be == len+1, so do the +1 first to avoid underflow.
1217 (self.len() + 1) - index
1a4d82fc
JJ
1218 );
1219
1220 edge
1221 }
1222}
1223
1224// Private implementation details
1225impl<K, V> Node<K, V> {
1226 /// Node is full, so split it into two nodes, and yield the middle-most key-value pair
1227 /// because we have one too many, and our parent now has one too few
1228 fn split(&mut self) -> (K, V, Node<K, V>) {
1229 // Necessary for correctness, but in a private function
9346a6ac 1230 debug_assert!(!self.is_empty());
1a4d82fc
JJ
1231
1232 let mut right = if self.is_leaf() {
1233 Node::new_leaf(self.capacity())
1234 } else {
1235 unsafe { Node::new_internal(self.capacity()) }
1236 };
1237
1238 unsafe {
1239 right._len = self.len() / 2;
1240 let right_offset = self.len() - right.len();
c34b1796 1241 ptr::copy_nonoverlapping(
85aaf69f 1242 self.keys().as_ptr().offset(right_offset as isize),
c34b1796 1243 right.keys_mut().as_mut_ptr(),
1a4d82fc
JJ
1244 right.len()
1245 );
c34b1796 1246 ptr::copy_nonoverlapping(
85aaf69f 1247 self.vals().as_ptr().offset(right_offset as isize),
c34b1796 1248 right.vals_mut().as_mut_ptr(),
1a4d82fc
JJ
1249 right.len()
1250 );
1251 if !self.is_leaf() {
c34b1796 1252 ptr::copy_nonoverlapping(
85aaf69f 1253 self.edges().as_ptr().offset(right_offset as isize),
c34b1796 1254 right.edges_mut().as_mut_ptr(),
1a4d82fc
JJ
1255 right.len() + 1
1256 );
1257 }
1258
1259 let key = ptr::read(self.keys().get_unchecked(right_offset - 1));
1260 let val = ptr::read(self.vals().get_unchecked(right_offset - 1));
1261
1262 self._len = right_offset - 1;
1263
1264 (key, val, right)
1265 }
1266 }
1267
1268 /// Take all the values from right, separated by the given key and value
1269 fn absorb(&mut self, key: K, val: V, mut right: Node<K, V>) {
1270 // Necessary for correctness, but in a private function
1271 // Just as a sanity check, make sure we can fit this guy in
1272 debug_assert!(self.len() + right.len() <= self.capacity());
1273 debug_assert!(self.is_leaf() == right.is_leaf());
1274
1275 unsafe {
1276 let old_len = self.len();
1277 self._len += right.len() + 1;
1278
1279 ptr::write(self.keys_mut().get_unchecked_mut(old_len), key);
1280 ptr::write(self.vals_mut().get_unchecked_mut(old_len), val);
1281
c34b1796 1282 ptr::copy_nonoverlapping(
1a4d82fc 1283 right.keys().as_ptr(),
c34b1796 1284 self.keys_mut().as_mut_ptr().offset(old_len as isize + 1),
1a4d82fc
JJ
1285 right.len()
1286 );
c34b1796 1287 ptr::copy_nonoverlapping(
1a4d82fc 1288 right.vals().as_ptr(),
c34b1796 1289 self.vals_mut().as_mut_ptr().offset(old_len as isize + 1),
1a4d82fc
JJ
1290 right.len()
1291 );
1292 if !self.is_leaf() {
c34b1796 1293 ptr::copy_nonoverlapping(
1a4d82fc 1294 right.edges().as_ptr(),
c34b1796 1295 self.edges_mut().as_mut_ptr().offset(old_len as isize + 1),
1a4d82fc
JJ
1296 right.len() + 1
1297 );
1298 }
1299
1300 right.destroy();
1301 mem::forget(right);
1302 }
1303 }
1304}
1305
1306/// Get the capacity of a node from the order of the parent B-Tree
85aaf69f 1307fn capacity_from_b(b: usize) -> usize {
1a4d82fc
JJ
1308 2 * b - 1
1309}
1310
1311/// Get the minimum load of a node from its capacity
85aaf69f 1312fn min_load_from_capacity(cap: usize) -> usize {
1a4d82fc
JJ
1313 // B - 1
1314 cap / 2
1315}
1316
1317/// A trait for pairs of `Iterator`s, one over edges and the other over key/value pairs. This is
1318/// necessary, as the `MoveTraversalImpl` needs to have a destructor that deallocates the `Node`,
1319/// and a pair of `Iterator`s would require two independent destructors.
85aaf69f
SL
1320trait TraversalImpl {
1321 type Item;
1322 type Edge;
1a4d82fc 1323
85aaf69f
SL
1324 fn next_kv(&mut self) -> Option<Self::Item>;
1325 fn next_kv_back(&mut self) -> Option<Self::Item>;
1326
1327 fn next_edge(&mut self) -> Option<Self::Edge>;
1328 fn next_edge_back(&mut self) -> Option<Self::Edge>;
1a4d82fc
JJ
1329}
1330
1331/// A `TraversalImpl` that actually is backed by two iterators. This works in the non-moving case,
1332/// as no deallocation needs to be done.
c34b1796 1333#[derive(Clone)]
1a4d82fc
JJ
1334struct ElemsAndEdges<Elems, Edges>(Elems, Edges);
1335
1336impl<K, V, E, Elems: DoubleEndedIterator, Edges: DoubleEndedIterator>
85aaf69f 1337 TraversalImpl for ElemsAndEdges<Elems, Edges>
1a4d82fc
JJ
1338 where Elems : Iterator<Item=(K, V)>, Edges : Iterator<Item=E>
1339{
85aaf69f
SL
1340 type Item = (K, V);
1341 type Edge = E;
1a4d82fc
JJ
1342
1343 fn next_kv(&mut self) -> Option<(K, V)> { self.0.next() }
1344 fn next_kv_back(&mut self) -> Option<(K, V)> { self.0.next_back() }
1345
1346 fn next_edge(&mut self) -> Option<E> { self.1.next() }
1347 fn next_edge_back(&mut self) -> Option<E> { self.1.next_back() }
1348}
1349
1350/// A `TraversalImpl` taking a `Node` by value.
1351struct MoveTraversalImpl<K, V> {
1352 keys: RawItems<K>,
1353 vals: RawItems<V>,
1354 edges: RawItems<Node<K, V>>,
1355
1356 // For deallocation when we are done iterating.
c34b1796 1357 ptr: Unique<u8>,
85aaf69f 1358 capacity: usize,
1a4d82fc
JJ
1359 is_leaf: bool
1360}
1361
c34b1796
AL
1362unsafe impl<K: Sync, V: Sync> Sync for MoveTraversalImpl<K, V> {}
1363unsafe impl<K: Send, V: Send> Send for MoveTraversalImpl<K, V> {}
1364
85aaf69f
SL
1365impl<K, V> TraversalImpl for MoveTraversalImpl<K, V> {
1366 type Item = (K, V);
1367 type Edge = Node<K, V>;
1368
1a4d82fc
JJ
1369 fn next_kv(&mut self) -> Option<(K, V)> {
1370 match (self.keys.next(), self.vals.next()) {
1371 (Some(k), Some(v)) => Some((k, v)),
1372 _ => None
1373 }
1374 }
1375
1376 fn next_kv_back(&mut self) -> Option<(K, V)> {
1377 match (self.keys.next_back(), self.vals.next_back()) {
1378 (Some(k), Some(v)) => Some((k, v)),
1379 _ => None
1380 }
1381 }
1382
1383 fn next_edge(&mut self) -> Option<Node<K, V>> {
1384 // Necessary for correctness, but in a private module
1385 debug_assert!(!self.is_leaf);
1386 self.edges.next()
1387 }
1388
1389 fn next_edge_back(&mut self) -> Option<Node<K, V>> {
1390 // Necessary for correctness, but in a private module
1391 debug_assert!(!self.is_leaf);
1392 self.edges.next_back()
1393 }
1394}
1395
1a4d82fc
JJ
1396impl<K, V> Drop for MoveTraversalImpl<K, V> {
1397 fn drop(&mut self) {
1398 // We need to cleanup the stored values manually, as the RawItems destructor would run
1399 // after our deallocation.
85aaf69f
SL
1400 for _ in self.keys.by_ref() {}
1401 for _ in self.vals.by_ref() {}
1402 for _ in self.edges.by_ref() {}
1a4d82fc
JJ
1403
1404 let (alignment, size) =
1405 calculate_allocation_generic::<K, V>(self.capacity, self.is_leaf);
c34b1796 1406 unsafe { heap::deallocate(*self.ptr, size, alignment) };
1a4d82fc
JJ
1407 }
1408}
1409
1410/// An abstraction over all the different kinds of traversals a node supports
c34b1796 1411#[derive(Clone)]
1a4d82fc
JJ
1412struct AbsTraversal<Impl> {
1413 inner: Impl,
1414 head_is_edge: bool,
1415 tail_is_edge: bool,
1416 has_edges: bool,
1417}
1418
85aaf69f 1419/// A single atomic step in a traversal.
1a4d82fc 1420pub enum TraversalItem<K, V, E> {
85aaf69f
SL
1421 /// An element is visited. This isn't written as `Elem(K, V)` just because `opt.map(Elem)`
1422 /// requires the function to take a single argument. (Enum constructors are functions.)
1423 Elem((K, V)),
1424 /// An edge is followed.
1a4d82fc
JJ
1425 Edge(E),
1426}
1427
1428/// A traversal over a node's entries and edges
1429pub type Traversal<'a, K, V> = AbsTraversal<ElemsAndEdges<Zip<slice::Iter<'a, K>,
1430 slice::Iter<'a, V>>,
c34b1796 1431 slice::Iter<'a, Node<K, V>>>>;
1a4d82fc
JJ
1432
1433/// A mutable traversal over a node's entries and edges
1434pub type MutTraversal<'a, K, V> = AbsTraversal<ElemsAndEdges<Zip<slice::Iter<'a, K>,
1435 slice::IterMut<'a, V>>,
c34b1796 1436 slice::IterMut<'a, Node<K, V>>>>;
1a4d82fc
JJ
1437
1438/// An owning traversal over a node's entries and edges
1439pub type MoveTraversal<K, V> = AbsTraversal<MoveTraversalImpl<K, V>>;
1440
85aaf69f
SL
1441
1442impl<K, V, E, Impl> Iterator for AbsTraversal<Impl>
1443 where Impl: TraversalImpl<Item=(K, V), Edge=E> {
1a4d82fc
JJ
1444 type Item = TraversalItem<K, V, E>;
1445
1446 fn next(&mut self) -> Option<TraversalItem<K, V, E>> {
85aaf69f
SL
1447 self.next_edge_item().map(Edge).or_else(||
1448 self.next_kv_item().map(Elem)
1449 )
1450 }
1451}
1452
1453impl<K, V, E, Impl> DoubleEndedIterator for AbsTraversal<Impl>
1454 where Impl: TraversalImpl<Item=(K, V), Edge=E> {
1455 fn next_back(&mut self) -> Option<TraversalItem<K, V, E>> {
1456 self.next_edge_item_back().map(Edge).or_else(||
1457 self.next_kv_item_back().map(Elem)
1458 )
1459 }
1460}
1a4d82fc 1461
85aaf69f
SL
1462impl<K, V, E, Impl> AbsTraversal<Impl>
1463 where Impl: TraversalImpl<Item=(K, V), Edge=E> {
1464 /// Advances the iterator and returns the item if it's an edge. Returns None
1465 /// and does nothing if the first item is not an edge.
1466 pub fn next_edge_item(&mut self) -> Option<E> {
1467 // NB. `&& self.has_edges` might be redundant in this condition.
1468 let edge = if self.head_is_edge && self.has_edges {
1469 self.inner.next_edge()
1a4d82fc 1470 } else {
85aaf69f
SL
1471 None
1472 };
1473 self.head_is_edge = false;
1474 edge
1475 }
1476
1477 /// Advances the iterator and returns the item if it's an edge. Returns None
1478 /// and does nothing if the last item is not an edge.
1479 pub fn next_edge_item_back(&mut self) -> Option<E> {
1480 let edge = if self.tail_is_edge && self.has_edges {
1481 self.inner.next_edge_back()
1482 } else {
1483 None
1484 };
1485 self.tail_is_edge = false;
1486 edge
1487 }
1488
1489 /// Advances the iterator and returns the item if it's a key-value pair. Returns None
1490 /// and does nothing if the first item is not a key-value pair.
1491 pub fn next_kv_item(&mut self) -> Option<(K, V)> {
1492 if !self.head_is_edge {
1493 self.head_is_edge = true;
1494 self.inner.next_kv()
1495 } else {
1496 None
1497 }
1498 }
1499
1500 /// Advances the iterator and returns the item if it's a key-value pair. Returns None
1501 /// and does nothing if the last item is not a key-value pair.
1502 pub fn next_kv_item_back(&mut self) -> Option<(K, V)> {
1503 if !self.tail_is_edge {
1504 self.tail_is_edge = true;
1505 self.inner.next_kv_back()
1506 } else {
1507 None
1a4d82fc
JJ
1508 }
1509 }
1510}
1511
85aaf69f
SL
1512macro_rules! node_slice_impl {
1513 ($NodeSlice:ident, $Traversal:ident,
1514 $as_slices_internal:ident, $index:ident, $iter:ident) => {
1515 impl<'a, K: Ord + 'a, V: 'a> $NodeSlice<'a, K, V> {
1516 /// Performs linear search in a slice. Returns a tuple of (index, is_exact_match).
1517 fn search_linear<Q: ?Sized>(&self, key: &Q) -> (usize, bool)
1518 where K: Borrow<Q>, Q: Ord {
1519 for (i, k) in self.keys.iter().enumerate() {
1520 match key.cmp(k.borrow()) {
1521 Greater => {},
1522 Equal => return (i, true),
1523 Less => return (i, false),
1524 }
1525 }
1526 (self.keys.len(), false)
1527 }
1a4d82fc 1528
85aaf69f
SL
1529 /// Returns a sub-slice with elements starting with `min_key`.
1530 pub fn slice_from(self, min_key: &K) -> $NodeSlice<'a, K, V> {
1531 // _______________
1532 // |_1_|_3_|_5_|_7_|
1533 // | | | | |
1534 // 0 0 1 1 2 2 3 3 4 index
1535 // | | | | |
1536 // \___|___|___|___/ slice_from(&0); pos = 0
1537 // \___|___|___/ slice_from(&2); pos = 1
1538 // |___|___|___/ slice_from(&3); pos = 1; result.head_is_edge = false
1539 // \___|___/ slice_from(&4); pos = 2
1540 // \___/ slice_from(&6); pos = 3
1541 // \|/ slice_from(&999); pos = 4
1542 let (pos, pos_is_kv) = self.search_linear(min_key);
1543 $NodeSlice {
1544 has_edges: self.has_edges,
1545 edges: if !self.has_edges {
1546 self.edges
1547 } else {
c34b1796 1548 self.edges.$index(pos ..)
85aaf69f
SL
1549 },
1550 keys: &self.keys[pos ..],
c34b1796 1551 vals: self.vals.$index(pos ..),
85aaf69f
SL
1552 head_is_edge: !pos_is_kv,
1553 tail_is_edge: self.tail_is_edge,
1554 }
1555 }
1556
1557 /// Returns a sub-slice with elements up to and including `max_key`.
1558 pub fn slice_to(self, max_key: &K) -> $NodeSlice<'a, K, V> {
1559 // _______________
1560 // |_1_|_3_|_5_|_7_|
1561 // | | | | |
1562 // 0 0 1 1 2 2 3 3 4 index
1563 // | | | | |
1564 //\|/ | | | | slice_to(&0); pos = 0
1565 // \___/ | | | slice_to(&2); pos = 1
1566 // \___|___| | | slice_to(&3); pos = 1; result.tail_is_edge = false
1567 // \___|___/ | | slice_to(&4); pos = 2
1568 // \___|___|___/ | slice_to(&6); pos = 3
1569 // \___|___|___|___/ slice_to(&999); pos = 4
1570 let (pos, pos_is_kv) = self.search_linear(max_key);
1571 let pos = pos + if pos_is_kv { 1 } else { 0 };
1572 $NodeSlice {
1573 has_edges: self.has_edges,
1574 edges: if !self.has_edges {
1575 self.edges
1576 } else {
c34b1796 1577 self.edges.$index(.. (pos + 1))
85aaf69f
SL
1578 },
1579 keys: &self.keys[..pos],
c34b1796 1580 vals: self.vals.$index(.. pos),
85aaf69f
SL
1581 head_is_edge: self.head_is_edge,
1582 tail_is_edge: !pos_is_kv,
1583 }
1584 }
1585 }
1586
1587 impl<'a, K: 'a, V: 'a> $NodeSlice<'a, K, V> {
1588 /// Returns an iterator over key/value pairs and edges in a slice.
1589 #[inline]
1590 pub fn $iter(self) -> $Traversal<'a, K, V> {
1591 let mut edges = self.edges.$iter();
1592 // Skip edges at both ends, if excluded.
1593 if !self.head_is_edge { edges.next(); }
1594 if !self.tail_is_edge { edges.next_back(); }
1595 // The key iterator is always immutable.
1596 $Traversal {
1597 inner: ElemsAndEdges(
1598 self.keys.iter().zip(self.vals.$iter()),
1599 edges
1600 ),
1601 head_is_edge: self.head_is_edge,
1602 tail_is_edge: self.tail_is_edge,
1603 has_edges: self.has_edges,
1604 }
1605 }
1a4d82fc
JJ
1606 }
1607 }
1608}
85aaf69f
SL
1609
1610node_slice_impl!(NodeSlice, Traversal, as_slices_internal, index, iter);
1611node_slice_impl!(MutNodeSlice, MutTraversal, as_slices_internal_mut, index_mut, iter_mut);