]> git.proxmox.com Git - rustc.git/blame - src/libcollections/btree/node.rs
Imported Upstream version 1.0.0~0alpha
[rustc.git] / src / libcollections / btree / node.rs
CommitLineData
1a4d82fc
JJ
1// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
11// This module represents all the internal representation and logic for a B-Tree's node
12// with a safe interface, so that BTreeMap itself does not depend on any of these details.
13
14pub use self::InsertionResult::*;
15pub use self::SearchResult::*;
16pub use self::ForceResult::*;
17pub use self::TraversalItem::*;
18
19use core::prelude::*;
20
21use core::borrow::BorrowFrom;
22use core::cmp::Ordering::{Greater, Less, Equal};
23use core::iter::Zip;
24use core::ops::{Deref, DerefMut};
25use core::ptr::Unique;
26use core::{slice, mem, ptr, cmp, num, raw};
27use alloc::heap;
28
29/// Represents the result of an Insertion: either the item fit, or the node had to split
30pub enum InsertionResult<K, V> {
31 /// The inserted element fit
32 Fit,
33 /// The inserted element did not fit, so the node was split
34 Split(K, V, Node<K, V>),
35}
36
37/// Represents the result of a search for a key in a single node
38pub enum SearchResult<NodeRef> {
39 /// The element was found at the given index
40 Found(Handle<NodeRef, handle::KV, handle::LeafOrInternal>),
41 /// The element wasn't found, but if it's anywhere, it must be beyond this edge
42 GoDown(Handle<NodeRef, handle::Edge, handle::LeafOrInternal>),
43}
44
45/// A B-Tree Node. We keep keys/edges/values separate to optimize searching for keys.
46#[unsafe_no_drop_flag]
47pub struct Node<K, V> {
48 // To avoid the need for multiple allocations, we allocate a single buffer with enough space
49 // for `capacity` keys, `capacity` values, and (in internal nodes) `capacity + 1` edges.
50 // Despite this, we store three separate pointers to the three "chunks" of the buffer because
51 // the performance drops significantly if the locations of the vals and edges need to be
52 // recalculated upon access.
53 //
54 // These will never be null during normal usage of a `Node`. However, to avoid the need for a
55 // drop flag, `Node::drop` zeroes `keys`, signaling that the `Node` has already been cleaned
56 // up.
57 keys: Unique<K>,
58 vals: Unique<V>,
59
60 // In leaf nodes, this will be null, and no space will be allocated for edges.
61 edges: Unique<Node<K, V>>,
62
63 // At any given time, there will be `_len` keys, `_len` values, and (in an internal node)
64 // `_len + 1` edges. In a leaf node, there will never be any edges.
65 //
66 // Note: instead of accessing this field directly, please call the `len()` method, which should
67 // be more stable in the face of representation changes.
68 _len: uint,
69
70 // FIXME(gereeter) It shouldn't be necessary to store the capacity in every node, as it should
71 // be constant throughout the tree. Once a solution to this is found, it might be possible to
72 // also pass down the offsets into the buffer that vals and edges are stored at, removing the
73 // need for those two pointers.
74 //
75 // Note: instead of accessing this field directly, please call the `capacity()` method, which
76 // should be more stable in the face of representation changes.
77 _capacity: uint,
78}
79
80/// Rounds up to a multiple of a power of two. Returns the closest multiple
81/// of `target_alignment` that is higher or equal to `unrounded`.
82///
83/// # Panics
84///
85/// Fails if `target_alignment` is not a power of two.
86#[inline]
87fn round_up_to_next(unrounded: uint, target_alignment: uint) -> uint {
88 assert!(num::UnsignedInt::is_power_of_two(target_alignment));
89 (unrounded + target_alignment - 1) & !(target_alignment - 1)
90}
91
92#[test]
93fn test_rounding() {
94 assert_eq!(round_up_to_next(0, 4), 0);
95 assert_eq!(round_up_to_next(1, 4), 4);
96 assert_eq!(round_up_to_next(2, 4), 4);
97 assert_eq!(round_up_to_next(3, 4), 4);
98 assert_eq!(round_up_to_next(4, 4), 4);
99 assert_eq!(round_up_to_next(5, 4), 8);
100}
101
102// Returns a tuple of (val_offset, edge_offset),
103// from the start of a mallocated array.
104#[inline]
105fn calculate_offsets(keys_size: uint,
106 vals_size: uint, vals_align: uint,
107 edges_align: uint)
108 -> (uint, uint) {
109 let vals_offset = round_up_to_next(keys_size, vals_align);
110 let end_of_vals = vals_offset + vals_size;
111
112 let edges_offset = round_up_to_next(end_of_vals, edges_align);
113
114 (vals_offset, edges_offset)
115}
116
117// Returns a tuple of (minimum required alignment, array_size),
118// from the start of a mallocated array.
119#[inline]
120fn calculate_allocation(keys_size: uint, keys_align: uint,
121 vals_size: uint, vals_align: uint,
122 edges_size: uint, edges_align: uint)
123 -> (uint, uint) {
124 let (_, edges_offset) = calculate_offsets(keys_size,
125 vals_size, vals_align,
126 edges_align);
127 let end_of_edges = edges_offset + edges_size;
128
129 let min_align = cmp::max(keys_align, cmp::max(vals_align, edges_align));
130
131 (min_align, end_of_edges)
132}
133
134#[test]
135fn test_offset_calculation() {
136 assert_eq!(calculate_allocation(128, 8, 15, 1, 4, 4), (8, 148));
137 assert_eq!(calculate_allocation(3, 1, 2, 1, 1, 1), (1, 6));
138 assert_eq!(calculate_allocation(6, 2, 12, 4, 24, 8), (8, 48));
139 assert_eq!(calculate_offsets(128, 15, 1, 4), (128, 144));
140 assert_eq!(calculate_offsets(3, 2, 1, 1), (3, 5));
141 assert_eq!(calculate_offsets(6, 12, 4, 8), (8, 24));
142}
143
144fn calculate_allocation_generic<K, V>(capacity: uint, is_leaf: bool) -> (uint, uint) {
145 let (keys_size, keys_align) = (capacity * mem::size_of::<K>(), mem::min_align_of::<K>());
146 let (vals_size, vals_align) = (capacity * mem::size_of::<V>(), mem::min_align_of::<V>());
147 let (edges_size, edges_align) = if is_leaf {
148 (0, 1)
149 } else {
150 ((capacity + 1) * mem::size_of::<Node<K, V>>(), mem::min_align_of::<Node<K, V>>())
151 };
152
153 calculate_allocation(
154 keys_size, keys_align,
155 vals_size, vals_align,
156 edges_size, edges_align
157 )
158}
159
160fn calculate_offsets_generic<K, V>(capacity: uint, is_leaf: bool) -> (uint, uint) {
161 let keys_size = capacity * mem::size_of::<K>();
162 let vals_size = capacity * mem::size_of::<V>();
163 let vals_align = mem::min_align_of::<V>();
164 let edges_align = if is_leaf {
165 1
166 } else {
167 mem::min_align_of::<Node<K, V>>()
168 };
169
170 calculate_offsets(
171 keys_size,
172 vals_size, vals_align,
173 edges_align
174 )
175}
176
177/// An iterator over a slice that owns the elements of the slice but not the allocation.
178struct RawItems<T> {
179 head: *const T,
180 tail: *const T,
181}
182
183impl<T> RawItems<T> {
184 unsafe fn from_slice(slice: &[T]) -> RawItems<T> {
185 RawItems::from_parts(slice.as_ptr(), slice.len())
186 }
187
188 unsafe fn from_parts(ptr: *const T, len: uint) -> RawItems<T> {
189 if mem::size_of::<T>() == 0 {
190 RawItems {
191 head: ptr,
192 tail: (ptr as uint + len) as *const T,
193 }
194 } else {
195 RawItems {
196 head: ptr,
197 tail: ptr.offset(len as int),
198 }
199 }
200 }
201
202 unsafe fn push(&mut self, val: T) {
203 ptr::write(self.tail as *mut T, val);
204
205 if mem::size_of::<T>() == 0 {
206 self.tail = (self.tail as uint + 1) as *const T;
207 } else {
208 self.tail = self.tail.offset(1);
209 }
210 }
211}
212
213impl<T> Iterator for RawItems<T> {
214 type Item = T;
215
216 fn next(&mut self) -> Option<T> {
217 if self.head == self.tail {
218 None
219 } else {
220 unsafe {
221 let ret = Some(ptr::read(self.head));
222
223 if mem::size_of::<T>() == 0 {
224 self.head = (self.head as uint + 1) as *const T;
225 } else {
226 self.head = self.head.offset(1);
227 }
228
229 ret
230 }
231 }
232 }
233}
234
235impl<T> DoubleEndedIterator for RawItems<T> {
236 fn next_back(&mut self) -> Option<T> {
237 if self.head == self.tail {
238 None
239 } else {
240 unsafe {
241 if mem::size_of::<T>() == 0 {
242 self.tail = (self.tail as uint - 1) as *const T;
243 } else {
244 self.tail = self.tail.offset(-1);
245 }
246
247 Some(ptr::read(self.tail))
248 }
249 }
250 }
251}
252
253#[unsafe_destructor]
254impl<T> Drop for RawItems<T> {
255 fn drop(&mut self) {
256 for _ in *self {}
257 }
258}
259
260#[unsafe_destructor]
261impl<K, V> Drop for Node<K, V> {
262 fn drop(&mut self) {
263 if self.keys.0.is_null() {
264 // We have already cleaned up this node.
265 return;
266 }
267
268 // Do the actual cleanup.
269 unsafe {
270 drop(RawItems::from_slice(self.keys()));
271 drop(RawItems::from_slice(self.vals()));
272 drop(RawItems::from_slice(self.edges()));
273
274 self.destroy();
275 }
276
277 self.keys.0 = ptr::null_mut();
278 }
279}
280
281impl<K, V> Node<K, V> {
282 /// Make a new internal node. The caller must initialize the result to fix the invariant that
283 /// there are `len() + 1` edges.
284 unsafe fn new_internal(capacity: uint) -> Node<K, V> {
285 let (alignment, size) = calculate_allocation_generic::<K, V>(capacity, false);
286
287 let buffer = heap::allocate(size, alignment);
288 if buffer.is_null() { ::alloc::oom(); }
289
290 let (vals_offset, edges_offset) = calculate_offsets_generic::<K, V>(capacity, false);
291
292 Node {
293 keys: Unique(buffer as *mut K),
294 vals: Unique(buffer.offset(vals_offset as int) as *mut V),
295 edges: Unique(buffer.offset(edges_offset as int) as *mut Node<K, V>),
296 _len: 0,
297 _capacity: capacity,
298 }
299 }
300
301 /// Make a new leaf node
302 fn new_leaf(capacity: uint) -> Node<K, V> {
303 let (alignment, size) = calculate_allocation_generic::<K, V>(capacity, true);
304
305 let buffer = unsafe { heap::allocate(size, alignment) };
306 if buffer.is_null() { ::alloc::oom(); }
307
308 let (vals_offset, _) = calculate_offsets_generic::<K, V>(capacity, true);
309
310 Node {
311 keys: Unique(buffer as *mut K),
312 vals: Unique(unsafe { buffer.offset(vals_offset as int) as *mut V }),
313 edges: Unique(ptr::null_mut()),
314 _len: 0,
315 _capacity: capacity,
316 }
317 }
318
319 unsafe fn destroy(&mut self) {
320 let (alignment, size) =
321 calculate_allocation_generic::<K, V>(self.capacity(), self.is_leaf());
322 heap::deallocate(self.keys.0 as *mut u8, size, alignment);
323 }
324
325 #[inline]
326 pub fn as_slices<'a>(&'a self) -> (&'a [K], &'a [V]) {
327 unsafe {(
328 mem::transmute(raw::Slice {
329 data: self.keys.0 as *const K,
330 len: self.len()
331 }),
332 mem::transmute(raw::Slice {
333 data: self.vals.0 as *const V,
334 len: self.len()
335 })
336 )}
337 }
338
339 #[inline]
340 pub fn as_slices_mut<'a>(&'a mut self) -> (&'a mut [K], &'a mut [V]) {
341 unsafe { mem::transmute(self.as_slices()) }
342 }
343
344 #[inline]
345 pub fn as_slices_internal<'a>(&'a self) -> (&'a [K], &'a [V], &'a [Node<K, V>]) {
346 let (keys, vals) = self.as_slices();
347 let edges: &[_] = if self.is_leaf() {
348 &[]
349 } else {
350 unsafe {
351 mem::transmute(raw::Slice {
352 data: self.edges.0 as *const Node<K, V>,
353 len: self.len() + 1
354 })
355 }
356 };
357 (keys, vals, edges)
358 }
359
360 #[inline]
361 pub fn as_slices_internal_mut<'a>(&'a mut self) -> (&'a mut [K], &'a mut [V],
362 &'a mut [Node<K, V>]) {
363 unsafe { mem::transmute(self.as_slices_internal()) }
364 }
365
366 #[inline]
367 pub fn keys<'a>(&'a self) -> &'a [K] {
368 self.as_slices().0
369 }
370
371 #[inline]
372 pub fn keys_mut<'a>(&'a mut self) -> &'a mut [K] {
373 self.as_slices_mut().0
374 }
375
376 #[inline]
377 pub fn vals<'a>(&'a self) -> &'a [V] {
378 self.as_slices().1
379 }
380
381 #[inline]
382 pub fn vals_mut<'a>(&'a mut self) -> &'a mut [V] {
383 self.as_slices_mut().1
384 }
385
386 #[inline]
387 pub fn edges<'a>(&'a self) -> &'a [Node<K, V>] {
388 self.as_slices_internal().2
389 }
390
391 #[inline]
392 pub fn edges_mut<'a>(&'a mut self) -> &'a mut [Node<K, V>] {
393 self.as_slices_internal_mut().2
394 }
395}
396
397// FIXME(gereeter) Write an efficient clone_from
398#[stable]
399impl<K: Clone, V: Clone> Clone for Node<K, V> {
400 fn clone(&self) -> Node<K, V> {
401 let mut ret = if self.is_leaf() {
402 Node::new_leaf(self.capacity())
403 } else {
404 unsafe { Node::new_internal(self.capacity()) }
405 };
406
407 unsafe {
408 // For failure safety
409 let mut keys = RawItems::from_parts(ret.keys().as_ptr(), 0);
410 let mut vals = RawItems::from_parts(ret.vals().as_ptr(), 0);
411 let mut edges = RawItems::from_parts(ret.edges().as_ptr(), 0);
412
413 for key in self.keys().iter() {
414 keys.push(key.clone())
415 }
416 for val in self.vals().iter() {
417 vals.push(val.clone())
418 }
419 for edge in self.edges().iter() {
420 edges.push(edge.clone())
421 }
422
423 mem::forget(keys);
424 mem::forget(vals);
425 mem::forget(edges);
426
427 ret._len = self.len();
428 }
429
430 ret
431 }
432}
433
434/// A reference to something in the middle of a `Node`. There are two `Type`s of `Handle`s,
435/// namely `KV` handles, which point to key/value pairs, and `Edge` handles, which point to edges
436/// before or after key/value pairs. Methods are provided for removing pairs, inserting into edges,
437/// accessing the stored values, and moving around the `Node`.
438///
439/// This handle is generic, and can take any sort of reference to a `Node`. The reason for this is
440/// two-fold. First of all, it reduces the amount of repetitive code, implementing functions that
441/// don't need mutability on both mutable and immutable references. Secondly and more importantly,
442/// this allows users of the `Handle` API to associate metadata with the reference. This is used in
443/// `BTreeMap` to give `Node`s temporary "IDs" that persist to when the `Node` is used in a
444/// `Handle`.
445///
446/// # A note on safety
447///
448/// Unfortunately, the extra power afforded by being generic also means that safety can technically
449/// be broken. For sensible implementations of `Deref` and `DerefMut`, these handles are perfectly
450/// safe. As long as repeatedly calling `.deref()` results in the same Node being returned each
451/// time, everything should work fine. However, if the `Deref` implementation swaps in multiple
452/// different nodes, then the indices that are assumed to be in bounds suddenly stop being so. For
453/// example:
454///
455/// ```rust,ignore
456/// struct Nasty<'a> {
457/// first: &'a Node<uint, uint>,
458/// second: &'a Node<uint, uint>,
459/// flag: &'a Cell<bool>,
460/// }
461///
462/// impl<'a> Deref for Nasty<'a> {
463/// type Target = Node<uint, uint>;
464///
465/// fn deref(&self) -> &Node<uint, uint> {
466/// if self.flag.get() {
467/// &*self.second
468/// } else {
469/// &*self.first
470/// }
471/// }
472/// }
473///
474/// fn main() {
475/// let flag = Cell::new(false);
476/// let mut small_node = Node::make_leaf_root(3);
477/// let mut large_node = Node::make_leaf_root(100);
478///
479/// for i in range(0, 100) {
480/// // Insert to the end
481/// large_node.edge_handle(i).insert_as_leaf(i, i);
482/// }
483///
484/// let nasty = Nasty {
485/// first: &large_node,
486/// second: &small_node,
487/// flag: &flag
488/// }
489///
490/// // The handle points at index 75.
491/// let handle = Node::search(nasty, 75);
492///
493/// // Now the handle still points at index 75, but on the small node, which has no index 75.
494/// flag.set(true);
495///
496/// println!("Uninitialized memory: {:?}", handle.into_kv());
497/// }
498/// ```
499#[derive(Copy)]
500pub struct Handle<NodeRef, Type, NodeType> {
501 node: NodeRef,
502 index: uint
503}
504
505pub mod handle {
506 // Handle types.
507 pub enum KV {}
508 pub enum Edge {}
509
510 // Handle node types.
511 pub enum LeafOrInternal {}
512 pub enum Leaf {}
513 pub enum Internal {}
514}
515
516impl<K: Ord, V> Node<K, V> {
517 /// Searches for the given key in the node. If it finds an exact match,
518 /// `Found` will be yielded with the matching index. If it doesn't find an exact match,
519 /// `GoDown` will be yielded with the index of the subtree the key must lie in.
520 pub fn search<Q: ?Sized, NodeRef: Deref<Target=Node<K, V>>>(node: NodeRef, key: &Q)
521 -> SearchResult<NodeRef> where Q: BorrowFrom<K> + Ord {
522 // FIXME(Gankro): Tune when to search linear or binary based on B (and maybe K/V).
523 // For the B configured as of this writing (B = 6), binary search was *significantly*
524 // worse for uints.
525 let (found, index) = node.search_linear(key);
526 if found {
527 Found(Handle {
528 node: node,
529 index: index
530 })
531 } else {
532 GoDown(Handle {
533 node: node,
534 index: index
535 })
536 }
537 }
538
539 fn search_linear<Q: ?Sized>(&self, key: &Q) -> (bool, uint) where Q: BorrowFrom<K> + Ord {
540 for (i, k) in self.keys().iter().enumerate() {
541 match key.cmp(BorrowFrom::borrow_from(k)) {
542 Greater => {},
543 Equal => return (true, i),
544 Less => return (false, i),
545 }
546 }
547 (false, self.len())
548 }
549}
550
551// Public interface
552impl <K, V> Node<K, V> {
553 /// Make a leaf root from scratch
554 pub fn make_leaf_root(b: uint) -> Node<K, V> {
555 Node::new_leaf(capacity_from_b(b))
556 }
557
558 /// Make an internal root and swap it with an old root
559 pub fn make_internal_root(left_and_out: &mut Node<K,V>, b: uint, key: K, value: V,
560 right: Node<K,V>) {
561 let node = mem::replace(left_and_out, unsafe { Node::new_internal(capacity_from_b(b)) });
562 left_and_out._len = 1;
563 unsafe {
564 ptr::write(left_and_out.keys_mut().get_unchecked_mut(0), key);
565 ptr::write(left_and_out.vals_mut().get_unchecked_mut(0), value);
566 ptr::write(left_and_out.edges_mut().get_unchecked_mut(0), node);
567 ptr::write(left_and_out.edges_mut().get_unchecked_mut(1), right);
568 }
569 }
570
571 /// How many key-value pairs the node contains
572 pub fn len(&self) -> uint {
573 self._len
574 }
575
576 /// How many key-value pairs the node can fit
577 pub fn capacity(&self) -> uint {
578 self._capacity
579 }
580
581 /// If the node has any children
582 pub fn is_leaf(&self) -> bool {
583 self.edges.0.is_null()
584 }
585
586 /// if the node has too few elements
587 pub fn is_underfull(&self) -> bool {
588 self.len() < min_load_from_capacity(self.capacity())
589 }
590
591 /// if the node cannot fit any more elements
592 pub fn is_full(&self) -> bool {
593 self.len() == self.capacity()
594 }
595}
596
597impl<K, V, NodeRef: Deref<Target=Node<K, V>>, Type, NodeType> Handle<NodeRef, Type, NodeType> {
598 /// Returns a reference to the node that contains the pointed-to edge or key/value pair. This
599 /// is very different from `edge` and `edge_mut` because those return children of the node
600 /// returned by `node`.
601 pub fn node(&self) -> &Node<K, V> {
602 &*self.node
603 }
604}
605
606impl<K, V, NodeRef, Type, NodeType> Handle<NodeRef, Type, NodeType> where
607 NodeRef: Deref<Target=Node<K, V>> + DerefMut,
608{
609 /// Converts a handle into one that stores the same information using a raw pointer. This can
610 /// be useful in conjunction with `from_raw` when the type system is insufficient for
611 /// determining the lifetimes of the nodes.
612 pub fn as_raw(&mut self) -> Handle<*mut Node<K, V>, Type, NodeType> {
613 Handle {
614 node: &mut *self.node as *mut _,
615 index: self.index
616 }
617 }
618}
619
620impl<K, V, Type, NodeType> Handle<*mut Node<K, V>, Type, NodeType> {
621 /// Converts from a handle stored with a raw pointer, which isn't directly usable, to a handle
622 /// stored with a reference. This is an unsafe inverse of `as_raw`, and together they allow
623 /// unsafely extending the lifetime of the reference to the `Node`.
624 pub unsafe fn from_raw<'a>(&'a self) -> Handle<&'a Node<K, V>, Type, NodeType> {
625 Handle {
626 node: &*self.node,
627 index: self.index
628 }
629 }
630
631 /// Converts from a handle stored with a raw pointer, which isn't directly usable, to a handle
632 /// stored with a mutable reference. This is an unsafe inverse of `as_raw`, and together they
633 /// allow unsafely extending the lifetime of the reference to the `Node`.
634 pub unsafe fn from_raw_mut<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, Type, NodeType> {
635 Handle {
636 node: &mut *self.node,
637 index: self.index
638 }
639 }
640}
641
642impl<'a, K: 'a, V: 'a> Handle<&'a Node<K, V>, handle::Edge, handle::Internal> {
643 /// Turns the handle into a reference to the edge it points at. This is necessary because the
644 /// returned pointer has a larger lifetime than what would be returned by `edge` or `edge_mut`,
645 /// making it more suitable for moving down a chain of nodes.
646 pub fn into_edge(self) -> &'a Node<K, V> {
647 unsafe {
648 self.node.edges().get_unchecked(self.index)
649 }
650 }
651}
652
653impl<'a, K: 'a, V: 'a> Handle<&'a mut Node<K, V>, handle::Edge, handle::Internal> {
654 /// Turns the handle into a mutable reference to the edge it points at. This is necessary
655 /// because the returned pointer has a larger lifetime than what would be returned by
656 /// `edge_mut`, making it more suitable for moving down a chain of nodes.
657 pub fn into_edge_mut(self) -> &'a mut Node<K, V> {
658 unsafe {
659 self.node.edges_mut().get_unchecked_mut(self.index)
660 }
661 }
662}
663
664impl<K, V, NodeRef: Deref<Target=Node<K, V>>> Handle<NodeRef, handle::Edge, handle::Internal> {
665 // This doesn't exist because there are no uses for it,
666 // but is fine to add, analagous to edge_mut.
667 //
668 // /// Returns a reference to the edge pointed-to by this handle. This should not be
669 // /// confused with `node`, which references the parent node of what is returned here.
670 // pub fn edge(&self) -> &Node<K, V>
671}
672
673pub enum ForceResult<NodeRef, Type> {
674 Leaf(Handle<NodeRef, Type, handle::Leaf>),
675 Internal(Handle<NodeRef, Type, handle::Internal>)
676}
677
678impl<K, V, NodeRef: Deref<Target=Node<K, V>>, Type> Handle<NodeRef, Type, handle::LeafOrInternal> {
679 /// Figure out whether this handle is pointing to something in a leaf node or to something in
680 /// an internal node, clarifying the type according to the result.
681 pub fn force(self) -> ForceResult<NodeRef, Type> {
682 if self.node.is_leaf() {
683 Leaf(Handle {
684 node: self.node,
685 index: self.index
686 })
687 } else {
688 Internal(Handle {
689 node: self.node,
690 index: self.index
691 })
692 }
693 }
694}
695impl<K, V, NodeRef> Handle<NodeRef, handle::Edge, handle::Leaf> where
696 NodeRef: Deref<Target=Node<K, V>> + DerefMut,
697{
698 /// Tries to insert this key-value pair at the given index in this leaf node
699 /// If the node is full, we have to split it.
700 ///
701 /// Returns a *mut V to the inserted value, because the caller may want this when
702 /// they're done mutating the tree, but we don't want to borrow anything for now.
703 pub fn insert_as_leaf(mut self, key: K, value: V) ->
704 (InsertionResult<K, V>, *mut V) {
705 if !self.node.is_full() {
706 // The element can fit, just insert it
707 (Fit, unsafe { self.node.insert_kv(self.index, key, value) as *mut _ })
708 } else {
709 // The element can't fit, this node is full. Split it into two nodes.
710 let (new_key, new_val, mut new_right) = self.node.split();
711 let left_len = self.node.len();
712
713 let ptr = unsafe {
714 if self.index <= left_len {
715 self.node.insert_kv(self.index, key, value)
716 } else {
717 // We need to subtract 1 because in splitting we took out new_key and new_val.
718 // Just being in the right node means we are past left_len k/v pairs in the
719 // left node and 1 k/v pair in the parent node.
720 new_right.insert_kv(self.index - left_len - 1, key, value)
721 }
722 } as *mut _;
723
724 (Split(new_key, new_val, new_right), ptr)
725 }
726 }
727}
728
729impl<K, V, NodeRef> Handle<NodeRef, handle::Edge, handle::Internal> where
730 NodeRef: Deref<Target=Node<K, V>> + DerefMut,
731{
732 /// Returns a mutable reference to the edge pointed-to by this handle. This should not be
733 /// confused with `node`, which references the parent node of what is returned here.
734 pub fn edge_mut(&mut self) -> &mut Node<K, V> {
735 unsafe {
736 self.node.edges_mut().get_unchecked_mut(self.index)
737 }
738 }
739
740 /// Tries to insert this key-value pair at the given index in this internal node
741 /// If the node is full, we have to split it.
742 pub fn insert_as_internal(mut self, key: K, value: V, right: Node<K, V>)
743 -> InsertionResult<K, V> {
744 if !self.node.is_full() {
745 // The element can fit, just insert it
746 unsafe {
747 self.node.insert_kv(self.index, key, value);
748 self.node.insert_edge(self.index + 1, right); // +1 to insert to the right
749 }
750 Fit
751 } else {
752 // The element can't fit, this node is full. Split it into two nodes.
753 let (new_key, new_val, mut new_right) = self.node.split();
754 let left_len = self.node.len();
755
756 if self.index <= left_len {
757 unsafe {
758 self.node.insert_kv(self.index, key, value);
759 self.node.insert_edge(self.index + 1, right); // +1 to insert to the right
760 }
761 } else {
762 unsafe {
763 // The -1 here is for the same reason as in insert_as_internal - because we
764 // split, there are actually left_len + 1 k/v pairs before the right node, with
765 // the extra 1 being put in the parent.
766 new_right.insert_kv(self.index - left_len - 1, key, value);
767 new_right.insert_edge(self.index - left_len, right);
768 }
769 }
770
771 Split(new_key, new_val, new_right)
772 }
773 }
774
775 /// Handle an underflow in this node's child. We favour handling "to the left" because we know
776 /// we're empty, but our neighbour can be full. Handling to the left means when we choose to
777 /// steal, we pop off the end of our neighbour (always fast) and "unshift" ourselves
778 /// (always slow, but at least faster since we know we're half-empty).
779 /// Handling "to the right" reverses these roles. Of course, we merge whenever possible
780 /// because we want dense nodes, and merging is about equal work regardless of direction.
781 pub fn handle_underflow(mut self) {
782 unsafe {
783 if self.index > 0 {
784 self.handle_underflow_to_left();
785 } else {
786 self.handle_underflow_to_right();
787 }
788 }
789 }
790
791 /// Right is underflowed. Tries to steal from left,
792 /// but merges left and right if left is low too.
793 unsafe fn handle_underflow_to_left(&mut self) {
794 let left_len = self.node.edges()[self.index - 1].len();
795 if left_len > min_load_from_capacity(self.node.capacity()) {
796 self.left_kv().steal_rightward();
797 } else {
798 self.left_kv().merge_children();
799 }
800 }
801
802 /// Left is underflowed. Tries to steal from the right,
803 /// but merges left and right if right is low too.
804 unsafe fn handle_underflow_to_right(&mut self) {
805 let right_len = self.node.edges()[self.index + 1].len();
806 if right_len > min_load_from_capacity(self.node.capacity()) {
807 self.right_kv().steal_leftward();
808 } else {
809 self.right_kv().merge_children();
810 }
811 }
812}
813
814impl<K, V, NodeRef, NodeType> Handle<NodeRef, handle::Edge, NodeType> where
815 NodeRef: Deref<Target=Node<K, V>> + DerefMut,
816{
817 /// Gets the handle pointing to the key/value pair just to the left of the pointed-to edge.
818 /// This is unsafe because the handle might point to the first edge in the node, which has no
819 /// pair to its left.
820 unsafe fn left_kv<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::KV, NodeType> {
821 Handle {
822 node: &mut *self.node,
823 index: self.index - 1
824 }
825 }
826
827 /// Gets the handle pointing to the key/value pair just to the right of the pointed-to edge.
828 /// This is unsafe because the handle might point to the last edge in the node, which has no
829 /// pair to its right.
830 unsafe fn right_kv<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::KV, NodeType> {
831 Handle {
832 node: &mut *self.node,
833 index: self.index
834 }
835 }
836}
837
838impl<'a, K: 'a, V: 'a, NodeType> Handle<&'a Node<K, V>, handle::KV, NodeType> {
839 /// Turns the handle into references to the key and value it points at. This is necessary
840 /// because the returned pointers have larger lifetimes than what would be returned by `key`
841 /// or `val`.
842 pub fn into_kv(self) -> (&'a K, &'a V) {
843 let (keys, vals) = self.node.as_slices();
844 unsafe {
845 (
846 keys.get_unchecked(self.index),
847 vals.get_unchecked(self.index)
848 )
849 }
850 }
851}
852
853impl<'a, K: 'a, V: 'a, NodeType> Handle<&'a mut Node<K, V>, handle::KV, NodeType> {
854 /// Turns the handle into mutable references to the key and value it points at. This is
855 /// necessary because the returned pointers have larger lifetimes than what would be returned
856 /// by `key_mut` or `val_mut`.
857 pub fn into_kv_mut(self) -> (&'a mut K, &'a mut V) {
858 let (keys, vals) = self.node.as_slices_mut();
859 unsafe {
860 (
861 keys.get_unchecked_mut(self.index),
862 vals.get_unchecked_mut(self.index)
863 )
864 }
865 }
866
867 /// Convert this handle into one pointing at the edge immediately to the left of the key/value
868 /// pair pointed-to by this handle. This is useful because it returns a reference with larger
869 /// lifetime than `left_edge`.
870 pub fn into_left_edge(self) -> Handle<&'a mut Node<K, V>, handle::Edge, NodeType> {
871 Handle {
872 node: &mut *self.node,
873 index: self.index
874 }
875 }
876}
877
878impl<'a, K: 'a, V: 'a, NodeRef: Deref<Target=Node<K, V>> + 'a, NodeType> Handle<NodeRef, handle::KV,
879 NodeType> {
880 // These are fine to include, but are currently unneeded.
881 //
882 // /// Returns a reference to the key pointed-to by this handle. This doesn't return a
883 // /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
884 // /// handle.
885 // pub fn key(&'a self) -> &'a K {
886 // unsafe { self.node.keys().get_unchecked(self.index) }
887 // }
888 //
889 // /// Returns a reference to the value pointed-to by this handle. This doesn't return a
890 // /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
891 // /// handle.
892 // pub fn val(&'a self) -> &'a V {
893 // unsafe { self.node.vals().get_unchecked(self.index) }
894 // }
895}
896
897impl<'a, K: 'a, V: 'a, NodeRef, NodeType> Handle<NodeRef, handle::KV, NodeType> where
898 NodeRef: 'a + Deref<Target=Node<K, V>> + DerefMut,
899{
900 /// Returns a mutable reference to the key pointed-to by this handle. This doesn't return a
901 /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
902 /// handle.
903 pub fn key_mut(&'a mut self) -> &'a mut K {
904 unsafe { self.node.keys_mut().get_unchecked_mut(self.index) }
905 }
906
907 /// Returns a mutable reference to the value pointed-to by this handle. This doesn't return a
908 /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
909 /// handle.
910 pub fn val_mut(&'a mut self) -> &'a mut V {
911 unsafe { self.node.vals_mut().get_unchecked_mut(self.index) }
912 }
913}
914
915impl<K, V, NodeRef, NodeType> Handle<NodeRef, handle::KV, NodeType> where
916 NodeRef: Deref<Target=Node<K, V>> + DerefMut,
917{
918 /// Gets the handle pointing to the edge immediately to the left of the key/value pair pointed
919 /// to by this handle.
920 pub fn left_edge<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::Edge, NodeType> {
921 Handle {
922 node: &mut *self.node,
923 index: self.index
924 }
925 }
926
927 /// Gets the handle pointing to the edge immediately to the right of the key/value pair pointed
928 /// to by this handle.
929 pub fn right_edge<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::Edge, NodeType> {
930 Handle {
931 node: &mut *self.node,
932 index: self.index + 1
933 }
934 }
935}
936
937impl<K, V, NodeRef> Handle<NodeRef, handle::KV, handle::Leaf> where
938 NodeRef: Deref<Target=Node<K, V>> + DerefMut,
939{
940 /// Removes the key/value pair at the handle's location.
941 ///
942 /// # Panics (in debug build)
943 ///
944 /// Panics if the node containing the pair is not a leaf node.
945 pub fn remove_as_leaf(mut self) -> (K, V) {
946 unsafe { self.node.remove_kv(self.index) }
947 }
948}
949
950impl<K, V, NodeRef> Handle<NodeRef, handle::KV, handle::Internal> where
951 NodeRef: Deref<Target=Node<K, V>> + DerefMut
952{
953 /// Steal! Stealing is roughly analogous to a binary tree rotation.
954 /// In this case, we're "rotating" right.
955 unsafe fn steal_rightward(&mut self) {
956 // Take the biggest stuff off left
957 let (mut key, mut val, edge) = {
958 let mut left_handle = self.left_edge();
959 let left = left_handle.edge_mut();
960 let (key, val) = left.pop_kv();
961 let edge = if left.is_leaf() {
962 None
963 } else {
964 Some(left.pop_edge())
965 };
966
967 (key, val, edge)
968 };
969
970 // Swap the parent's separating key-value pair with left's
971 mem::swap(&mut key, self.key_mut());
972 mem::swap(&mut val, self.val_mut());
973
974 // Put them at the start of right
975 let mut right_handle = self.right_edge();
976 let right = right_handle.edge_mut();
977 right.insert_kv(0, key, val);
978 match edge {
979 Some(edge) => right.insert_edge(0, edge),
980 None => {}
981 }
982 }
983
984 /// Steal! Stealing is roughly analogous to a binary tree rotation.
985 /// In this case, we're "rotating" left.
986 unsafe fn steal_leftward(&mut self) {
987 // Take the smallest stuff off right
988 let (mut key, mut val, edge) = {
989 let mut right_handle = self.right_edge();
990 let right = right_handle.edge_mut();
991 let (key, val) = right.remove_kv(0);
992 let edge = if right.is_leaf() {
993 None
994 } else {
995 Some(right.remove_edge(0))
996 };
997
998 (key, val, edge)
999 };
1000
1001 // Swap the parent's separating key-value pair with right's
1002 mem::swap(&mut key, self.key_mut());
1003 mem::swap(&mut val, self.val_mut());
1004
1005 // Put them at the end of left
1006 let mut left_handle = self.left_edge();
1007 let left = left_handle.edge_mut();
1008 left.push_kv(key, val);
1009 match edge {
1010 Some(edge) => left.push_edge(edge),
1011 None => {}
1012 }
1013 }
1014
1015 /// Merge! Smooshes left and right into one node, along with the key-value
1016 /// pair that separated them in their parent.
1017 unsafe fn merge_children(mut self) {
1018 // Permanently remove right's index, and the key-value pair that separates
1019 // left and right
1020 let (key, val) = self.node.remove_kv(self.index);
1021 let right = self.node.remove_edge(self.index + 1);
1022
1023 // Give left right's stuff.
1024 self.left_edge().edge_mut()
1025 .absorb(key, val, right);
1026 }
1027}
1028
1029impl<K, V> Node<K, V> {
1030 /// Returns the mutable handle pointing to the key/value pair at a given index.
1031 ///
1032 /// # Panics (in debug build)
1033 ///
1034 /// Panics if the given index is out of bounds.
1035 pub fn kv_handle(&mut self, index: uint) -> Handle<&mut Node<K, V>, handle::KV,
1036 handle::LeafOrInternal> {
1037 // Necessary for correctness, but in a private module
1038 debug_assert!(index < self.len(), "kv_handle index out of bounds");
1039 Handle {
1040 node: self,
1041 index: index
1042 }
1043 }
1044
1045 pub fn iter<'a>(&'a self) -> Traversal<'a, K, V> {
1046 let is_leaf = self.is_leaf();
1047 let (keys, vals, edges) = self.as_slices_internal();
1048 Traversal {
1049 inner: ElemsAndEdges(
1050 keys.iter().zip(vals.iter()),
1051 edges.iter()
1052 ),
1053 head_is_edge: true,
1054 tail_is_edge: true,
1055 has_edges: !is_leaf,
1056 }
1057 }
1058
1059 pub fn iter_mut<'a>(&'a mut self) -> MutTraversal<'a, K, V> {
1060 let is_leaf = self.is_leaf();
1061 let (keys, vals, edges) = self.as_slices_internal_mut();
1062 MutTraversal {
1063 inner: ElemsAndEdges(
1064 keys.iter().zip(vals.iter_mut()),
1065 edges.iter_mut()
1066 ),
1067 head_is_edge: true,
1068 tail_is_edge: true,
1069 has_edges: !is_leaf,
1070 }
1071 }
1072
1073 pub fn into_iter(self) -> MoveTraversal<K, V> {
1074 unsafe {
1075 let ret = MoveTraversal {
1076 inner: MoveTraversalImpl {
1077 keys: RawItems::from_slice(self.keys()),
1078 vals: RawItems::from_slice(self.vals()),
1079 edges: RawItems::from_slice(self.edges()),
1080
1081 ptr: self.keys.0 as *mut u8,
1082 capacity: self.capacity(),
1083 is_leaf: self.is_leaf()
1084 },
1085 head_is_edge: true,
1086 tail_is_edge: true,
1087 has_edges: !self.is_leaf(),
1088 };
1089 mem::forget(self);
1090 ret
1091 }
1092 }
1093
1094 /// When a node has no keys or values and only a single edge, extract that edge.
1095 pub fn hoist_lone_child(&mut self) {
1096 // Necessary for correctness, but in a private module
1097 debug_assert!(self.len() == 0);
1098 debug_assert!(!self.is_leaf());
1099
1100 unsafe {
1101 let ret = ptr::read(self.edges().get_unchecked(0));
1102 self.destroy();
1103 ptr::write(self, ret);
1104 }
1105 }
1106}
1107
1108// Vector functions (all unchecked)
1109impl<K, V> Node<K, V> {
1110 // This must be followed by push_edge on an internal node.
1111 #[inline]
1112 unsafe fn push_kv(&mut self, key: K, val: V) {
1113 let len = self.len();
1114
1115 ptr::write(self.keys_mut().get_unchecked_mut(len), key);
1116 ptr::write(self.vals_mut().get_unchecked_mut(len), val);
1117
1118 self._len += 1;
1119 }
1120
1121 // This can only be called immediately after a call to push_kv.
1122 #[inline]
1123 unsafe fn push_edge(&mut self, edge: Node<K, V>) {
1124 let len = self.len();
1125
1126 ptr::write(self.edges_mut().get_unchecked_mut(len), edge);
1127 }
1128
1129 // This must be followed by insert_edge on an internal node.
1130 #[inline]
1131 unsafe fn insert_kv(&mut self, index: uint, key: K, val: V) -> &mut V {
1132 ptr::copy_memory(
1133 self.keys_mut().as_mut_ptr().offset(index as int + 1),
1134 self.keys().as_ptr().offset(index as int),
1135 self.len() - index
1136 );
1137 ptr::copy_memory(
1138 self.vals_mut().as_mut_ptr().offset(index as int + 1),
1139 self.vals().as_ptr().offset(index as int),
1140 self.len() - index
1141 );
1142
1143 ptr::write(self.keys_mut().get_unchecked_mut(index), key);
1144 ptr::write(self.vals_mut().get_unchecked_mut(index), val);
1145
1146 self._len += 1;
1147
1148 self.vals_mut().get_unchecked_mut(index)
1149 }
1150
1151 // This can only be called immediately after a call to insert_kv.
1152 #[inline]
1153 unsafe fn insert_edge(&mut self, index: uint, edge: Node<K, V>) {
1154 ptr::copy_memory(
1155 self.edges_mut().as_mut_ptr().offset(index as int + 1),
1156 self.edges().as_ptr().offset(index as int),
1157 self.len() - index
1158 );
1159 ptr::write(self.edges_mut().get_unchecked_mut(index), edge);
1160 }
1161
1162 // This must be followed by pop_edge on an internal node.
1163 #[inline]
1164 unsafe fn pop_kv(&mut self) -> (K, V) {
1165 let key = ptr::read(self.keys().get_unchecked(self.len() - 1));
1166 let val = ptr::read(self.vals().get_unchecked(self.len() - 1));
1167
1168 self._len -= 1;
1169
1170 (key, val)
1171 }
1172
1173 // This can only be called immediately after a call to pop_kv.
1174 #[inline]
1175 unsafe fn pop_edge(&mut self) -> Node<K, V> {
1176 let edge = ptr::read(self.edges().get_unchecked(self.len() + 1));
1177
1178 edge
1179 }
1180
1181 // This must be followed by remove_edge on an internal node.
1182 #[inline]
1183 unsafe fn remove_kv(&mut self, index: uint) -> (K, V) {
1184 let key = ptr::read(self.keys().get_unchecked(index));
1185 let val = ptr::read(self.vals().get_unchecked(index));
1186
1187 ptr::copy_memory(
1188 self.keys_mut().as_mut_ptr().offset(index as int),
1189 self.keys().as_ptr().offset(index as int + 1),
1190 self.len() - index - 1
1191 );
1192 ptr::copy_memory(
1193 self.vals_mut().as_mut_ptr().offset(index as int),
1194 self.vals().as_ptr().offset(index as int + 1),
1195 self.len() - index - 1
1196 );
1197
1198 self._len -= 1;
1199
1200 (key, val)
1201 }
1202
1203 // This can only be called immediately after a call to remove_kv.
1204 #[inline]
1205 unsafe fn remove_edge(&mut self, index: uint) -> Node<K, V> {
1206 let edge = ptr::read(self.edges().get_unchecked(index));
1207
1208 ptr::copy_memory(
1209 self.edges_mut().as_mut_ptr().offset(index as int),
1210 self.edges().as_ptr().offset(index as int + 1),
1211 self.len() - index + 1
1212 );
1213
1214 edge
1215 }
1216}
1217
1218// Private implementation details
1219impl<K, V> Node<K, V> {
1220 /// Node is full, so split it into two nodes, and yield the middle-most key-value pair
1221 /// because we have one too many, and our parent now has one too few
1222 fn split(&mut self) -> (K, V, Node<K, V>) {
1223 // Necessary for correctness, but in a private function
1224 debug_assert!(self.len() > 0);
1225
1226 let mut right = if self.is_leaf() {
1227 Node::new_leaf(self.capacity())
1228 } else {
1229 unsafe { Node::new_internal(self.capacity()) }
1230 };
1231
1232 unsafe {
1233 right._len = self.len() / 2;
1234 let right_offset = self.len() - right.len();
1235 ptr::copy_nonoverlapping_memory(
1236 right.keys_mut().as_mut_ptr(),
1237 self.keys().as_ptr().offset(right_offset as int),
1238 right.len()
1239 );
1240 ptr::copy_nonoverlapping_memory(
1241 right.vals_mut().as_mut_ptr(),
1242 self.vals().as_ptr().offset(right_offset as int),
1243 right.len()
1244 );
1245 if !self.is_leaf() {
1246 ptr::copy_nonoverlapping_memory(
1247 right.edges_mut().as_mut_ptr(),
1248 self.edges().as_ptr().offset(right_offset as int),
1249 right.len() + 1
1250 );
1251 }
1252
1253 let key = ptr::read(self.keys().get_unchecked(right_offset - 1));
1254 let val = ptr::read(self.vals().get_unchecked(right_offset - 1));
1255
1256 self._len = right_offset - 1;
1257
1258 (key, val, right)
1259 }
1260 }
1261
1262 /// Take all the values from right, separated by the given key and value
1263 fn absorb(&mut self, key: K, val: V, mut right: Node<K, V>) {
1264 // Necessary for correctness, but in a private function
1265 // Just as a sanity check, make sure we can fit this guy in
1266 debug_assert!(self.len() + right.len() <= self.capacity());
1267 debug_assert!(self.is_leaf() == right.is_leaf());
1268
1269 unsafe {
1270 let old_len = self.len();
1271 self._len += right.len() + 1;
1272
1273 ptr::write(self.keys_mut().get_unchecked_mut(old_len), key);
1274 ptr::write(self.vals_mut().get_unchecked_mut(old_len), val);
1275
1276 ptr::copy_nonoverlapping_memory(
1277 self.keys_mut().as_mut_ptr().offset(old_len as int + 1),
1278 right.keys().as_ptr(),
1279 right.len()
1280 );
1281 ptr::copy_nonoverlapping_memory(
1282 self.vals_mut().as_mut_ptr().offset(old_len as int + 1),
1283 right.vals().as_ptr(),
1284 right.len()
1285 );
1286 if !self.is_leaf() {
1287 ptr::copy_nonoverlapping_memory(
1288 self.edges_mut().as_mut_ptr().offset(old_len as int + 1),
1289 right.edges().as_ptr(),
1290 right.len() + 1
1291 );
1292 }
1293
1294 right.destroy();
1295 mem::forget(right);
1296 }
1297 }
1298}
1299
1300/// Get the capacity of a node from the order of the parent B-Tree
1301fn capacity_from_b(b: uint) -> uint {
1302 2 * b - 1
1303}
1304
1305/// Get the minimum load of a node from its capacity
1306fn min_load_from_capacity(cap: uint) -> uint {
1307 // B - 1
1308 cap / 2
1309}
1310
1311/// A trait for pairs of `Iterator`s, one over edges and the other over key/value pairs. This is
1312/// necessary, as the `MoveTraversalImpl` needs to have a destructor that deallocates the `Node`,
1313/// and a pair of `Iterator`s would require two independent destructors.
1314trait TraversalImpl<K, V, E> {
1315 fn next_kv(&mut self) -> Option<(K, V)>;
1316 fn next_kv_back(&mut self) -> Option<(K, V)>;
1317
1318 fn next_edge(&mut self) -> Option<E>;
1319 fn next_edge_back(&mut self) -> Option<E>;
1320}
1321
1322/// A `TraversalImpl` that actually is backed by two iterators. This works in the non-moving case,
1323/// as no deallocation needs to be done.
1324struct ElemsAndEdges<Elems, Edges>(Elems, Edges);
1325
1326impl<K, V, E, Elems: DoubleEndedIterator, Edges: DoubleEndedIterator>
1327 TraversalImpl<K, V, E> for ElemsAndEdges<Elems, Edges>
1328 where Elems : Iterator<Item=(K, V)>, Edges : Iterator<Item=E>
1329{
1330
1331 fn next_kv(&mut self) -> Option<(K, V)> { self.0.next() }
1332 fn next_kv_back(&mut self) -> Option<(K, V)> { self.0.next_back() }
1333
1334 fn next_edge(&mut self) -> Option<E> { self.1.next() }
1335 fn next_edge_back(&mut self) -> Option<E> { self.1.next_back() }
1336}
1337
1338/// A `TraversalImpl` taking a `Node` by value.
1339struct MoveTraversalImpl<K, V> {
1340 keys: RawItems<K>,
1341 vals: RawItems<V>,
1342 edges: RawItems<Node<K, V>>,
1343
1344 // For deallocation when we are done iterating.
1345 ptr: *mut u8,
1346 capacity: uint,
1347 is_leaf: bool
1348}
1349
1350impl<K, V> TraversalImpl<K, V, Node<K, V>> for MoveTraversalImpl<K, V> {
1351 fn next_kv(&mut self) -> Option<(K, V)> {
1352 match (self.keys.next(), self.vals.next()) {
1353 (Some(k), Some(v)) => Some((k, v)),
1354 _ => None
1355 }
1356 }
1357
1358 fn next_kv_back(&mut self) -> Option<(K, V)> {
1359 match (self.keys.next_back(), self.vals.next_back()) {
1360 (Some(k), Some(v)) => Some((k, v)),
1361 _ => None
1362 }
1363 }
1364
1365 fn next_edge(&mut self) -> Option<Node<K, V>> {
1366 // Necessary for correctness, but in a private module
1367 debug_assert!(!self.is_leaf);
1368 self.edges.next()
1369 }
1370
1371 fn next_edge_back(&mut self) -> Option<Node<K, V>> {
1372 // Necessary for correctness, but in a private module
1373 debug_assert!(!self.is_leaf);
1374 self.edges.next_back()
1375 }
1376}
1377
1378#[unsafe_destructor]
1379impl<K, V> Drop for MoveTraversalImpl<K, V> {
1380 fn drop(&mut self) {
1381 // We need to cleanup the stored values manually, as the RawItems destructor would run
1382 // after our deallocation.
1383 for _ in self.keys {}
1384 for _ in self.vals {}
1385 for _ in self.edges {}
1386
1387 let (alignment, size) =
1388 calculate_allocation_generic::<K, V>(self.capacity, self.is_leaf);
1389 unsafe { heap::deallocate(self.ptr, size, alignment) };
1390 }
1391}
1392
1393/// An abstraction over all the different kinds of traversals a node supports
1394struct AbsTraversal<Impl> {
1395 inner: Impl,
1396 head_is_edge: bool,
1397 tail_is_edge: bool,
1398 has_edges: bool,
1399}
1400
1401/// A single atomic step in a traversal. Either an element is visited, or an edge is followed
1402pub enum TraversalItem<K, V, E> {
1403 Elem(K, V),
1404 Edge(E),
1405}
1406
1407/// A traversal over a node's entries and edges
1408pub type Traversal<'a, K, V> = AbsTraversal<ElemsAndEdges<Zip<slice::Iter<'a, K>,
1409 slice::Iter<'a, V>>,
1410 slice::Iter<'a, Node<K, V>>>>;
1411
1412/// A mutable traversal over a node's entries and edges
1413pub type MutTraversal<'a, K, V> = AbsTraversal<ElemsAndEdges<Zip<slice::Iter<'a, K>,
1414 slice::IterMut<'a, V>>,
1415 slice::IterMut<'a, Node<K, V>>>>;
1416
1417/// An owning traversal over a node's entries and edges
1418pub type MoveTraversal<K, V> = AbsTraversal<MoveTraversalImpl<K, V>>;
1419
1420#[old_impl_check]
1421impl<K, V, E, Impl: TraversalImpl<K, V, E>> Iterator for AbsTraversal<Impl> {
1422 type Item = TraversalItem<K, V, E>;
1423
1424 fn next(&mut self) -> Option<TraversalItem<K, V, E>> {
1425 let head_is_edge = self.head_is_edge;
1426 self.head_is_edge = !head_is_edge;
1427
1428 if head_is_edge && self.has_edges {
1429 self.inner.next_edge().map(|node| Edge(node))
1430 } else {
1431 self.inner.next_kv().map(|(k, v)| Elem(k, v))
1432 }
1433 }
1434}
1435
1436#[old_impl_check]
1437impl<K, V, E, Impl: TraversalImpl<K, V, E>> DoubleEndedIterator for AbsTraversal<Impl> {
1438 fn next_back(&mut self) -> Option<TraversalItem<K, V, E>> {
1439 let tail_is_edge = self.tail_is_edge;
1440 self.tail_is_edge = !tail_is_edge;
1441
1442 if tail_is_edge && self.has_edges {
1443 self.inner.next_edge_back().map(|node| Edge(node))
1444 } else {
1445 self.inner.next_kv_back().map(|(k, v)| Elem(k, v))
1446 }
1447 }
1448}