diff --git a/library/alloc/src/collections/btree/append.rs b/library/alloc/src/collections/btree/append.rs index d137d2721ee4f..091376d5d685b 100644 --- a/library/alloc/src/collections/btree/append.rs +++ b/library/alloc/src/collections/btree/append.rs @@ -16,7 +16,7 @@ impl Root { /// a `BTreeMap`, both iterators should produce keys in strictly ascending /// order, each greater than all keys in the tree, including any keys /// already in the tree upon entry. - pub fn append_from_sorted_iters( + pub(super) fn append_from_sorted_iters( &mut self, left: I, right: I, @@ -36,8 +36,12 @@ impl Root { /// Pushes all key-value pairs to the end of the tree, incrementing a /// `length` variable along the way. The latter makes it easier for the /// caller to avoid a leak when the iterator panicks. - pub fn bulk_push(&mut self, iter: I, length: &mut usize, alloc: A) - where + pub(super) fn bulk_push( + &mut self, + iter: I, + length: &mut usize, + alloc: A, + ) where I: Iterator, { let mut cur_node = self.borrow_mut().last_leaf_edge().into_node(); diff --git a/library/alloc/src/collections/btree/borrow.rs b/library/alloc/src/collections/btree/borrow.rs index 000b9bd0fab42..e848ac3f2d192 100644 --- a/library/alloc/src/collections/btree/borrow.rs +++ b/library/alloc/src/collections/btree/borrow.rs @@ -11,7 +11,7 @@ use core::ptr::NonNull; /// the compiler to follow. A `DormantMutRef` allows you to check borrowing /// yourself, while still expressing its stacked nature, and encapsulating /// the raw pointer code needed to do this without undefined behavior. -pub struct DormantMutRef<'a, T> { +pub(super) struct DormantMutRef<'a, T> { ptr: NonNull, _marker: PhantomData<&'a mut T>, } @@ -23,7 +23,7 @@ impl<'a, T> DormantMutRef<'a, T> { /// Capture a unique borrow, and immediately reborrow it. For the compiler, /// the lifetime of the new reference is the same as the lifetime of the /// original reference, but you promise to use it for a shorter period. - pub fn new(t: &'a mut T) -> (&'a mut T, Self) { + pub(super) fn new(t: &'a mut T) -> (&'a mut T, Self) { let ptr = NonNull::from(t); // SAFETY: we hold the borrow throughout 'a via `_marker`, and we expose // only this reference, so it is unique. @@ -37,7 +37,7 @@ impl<'a, T> DormantMutRef<'a, T> { /// /// The reborrow must have ended, i.e., the reference returned by `new` and /// all pointers and references derived from it, must not be used anymore. - pub unsafe fn awaken(self) -> &'a mut T { + pub(super) unsafe fn awaken(self) -> &'a mut T { // SAFETY: our own safety conditions imply this reference is again unique. unsafe { &mut *self.ptr.as_ptr() } } @@ -48,7 +48,7 @@ impl<'a, T> DormantMutRef<'a, T> { /// /// The reborrow must have ended, i.e., the reference returned by `new` and /// all pointers and references derived from it, must not be used anymore. - pub unsafe fn reborrow(&mut self) -> &'a mut T { + pub(super) unsafe fn reborrow(&mut self) -> &'a mut T { // SAFETY: our own safety conditions imply this reference is again unique. unsafe { &mut *self.ptr.as_ptr() } } @@ -59,7 +59,7 @@ impl<'a, T> DormantMutRef<'a, T> { /// /// The reborrow must have ended, i.e., the reference returned by `new` and /// all pointers and references derived from it, must not be used anymore. - pub unsafe fn reborrow_shared(&self) -> &'a T { + pub(super) unsafe fn reborrow_shared(&self) -> &'a T { // SAFETY: our own safety conditions imply this reference is again unique. unsafe { &*self.ptr.as_ptr() } } diff --git a/library/alloc/src/collections/btree/dedup_sorted_iter.rs b/library/alloc/src/collections/btree/dedup_sorted_iter.rs index cd6a88f329125..6bcf0bca519af 100644 --- a/library/alloc/src/collections/btree/dedup_sorted_iter.rs +++ b/library/alloc/src/collections/btree/dedup_sorted_iter.rs @@ -6,7 +6,7 @@ use core::iter::Peekable; /// Used by [`BTreeMap::bulk_build_from_sorted_iter`][1]. /// /// [1]: crate::collections::BTreeMap::bulk_build_from_sorted_iter -pub struct DedupSortedIter +pub(super) struct DedupSortedIter where I: Iterator, { @@ -17,7 +17,7 @@ impl DedupSortedIter where I: Iterator, { - pub fn new(iter: I) -> Self { + pub(super) fn new(iter: I) -> Self { Self { iter: iter.peekable() } } } diff --git a/library/alloc/src/collections/btree/fix.rs b/library/alloc/src/collections/btree/fix.rs index 09edea3555ad5..b0c6759794691 100644 --- a/library/alloc/src/collections/btree/fix.rs +++ b/library/alloc/src/collections/btree/fix.rs @@ -57,7 +57,10 @@ impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { /// /// This method does not expect ancestors to already be underfull upon entry /// and panics if it encounters an empty ancestor. - pub fn fix_node_and_affected_ancestors(mut self, alloc: A) -> bool { + pub(super) fn fix_node_and_affected_ancestors( + mut self, + alloc: A, + ) -> bool { loop { match self.fix_node_through_parent(alloc.clone()) { Ok(Some(parent)) => self = parent.forget_type(), @@ -70,7 +73,7 @@ impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { impl Root { /// Removes empty levels on the top, but keeps an empty leaf if the entire tree is empty. - pub fn fix_top(&mut self, alloc: A) { + pub(super) fn fix_top(&mut self, alloc: A) { while self.height() > 0 && self.len() == 0 { self.pop_internal_level(alloc.clone()); } @@ -79,7 +82,7 @@ impl Root { /// Stocks up or merge away any underfull nodes on the right border of the /// tree. The other nodes, those that are not the root nor a rightmost edge, /// must already have at least MIN_LEN elements. - pub fn fix_right_border(&mut self, alloc: A) { + pub(super) fn fix_right_border(&mut self, alloc: A) { self.fix_top(alloc.clone()); if self.len() > 0 { self.borrow_mut().last_kv().fix_right_border_of_right_edge(alloc.clone()); @@ -88,7 +91,7 @@ impl Root { } /// The symmetric clone of `fix_right_border`. - pub fn fix_left_border(&mut self, alloc: A) { + pub(super) fn fix_left_border(&mut self, alloc: A) { self.fix_top(alloc.clone()); if self.len() > 0 { self.borrow_mut().first_kv().fix_left_border_of_left_edge(alloc.clone()); @@ -99,7 +102,7 @@ impl Root { /// Stocks up any underfull nodes on the right border of the tree. /// The other nodes, those that are neither the root nor a rightmost edge, /// must be prepared to have up to MIN_LEN elements stolen. - pub fn fix_right_border_of_plentiful(&mut self) { + pub(super) fn fix_right_border_of_plentiful(&mut self) { let mut cur_node = self.borrow_mut(); while let Internal(internal) = cur_node.force() { // Check if rightmost child is underfull. diff --git a/library/alloc/src/collections/btree/mem.rs b/library/alloc/src/collections/btree/mem.rs index d738c5c47b4cc..4643c4133d55d 100644 --- a/library/alloc/src/collections/btree/mem.rs +++ b/library/alloc/src/collections/btree/mem.rs @@ -6,7 +6,7 @@ use core::{intrinsics, mem, ptr}; /// If a panic occurs in the `change` closure, the entire process will be aborted. #[allow(dead_code)] // keep as illustration and for future use #[inline] -pub fn take_mut(v: &mut T, change: impl FnOnce(T) -> T) { +pub(super) fn take_mut(v: &mut T, change: impl FnOnce(T) -> T) { replace(v, |value| (change(value), ())) } @@ -15,7 +15,7 @@ pub fn take_mut(v: &mut T, change: impl FnOnce(T) -> T) { /// /// If a panic occurs in the `change` closure, the entire process will be aborted. #[inline] -pub fn replace(v: &mut T, change: impl FnOnce(T) -> (T, R)) -> R { +pub(super) fn replace(v: &mut T, change: impl FnOnce(T) -> (T, R)) -> R { struct PanicGuard; impl Drop for PanicGuard { fn drop(&mut self) { diff --git a/library/alloc/src/collections/btree/merge_iter.rs b/library/alloc/src/collections/btree/merge_iter.rs index 7f23d93b990f5..c5b93d30a1185 100644 --- a/library/alloc/src/collections/btree/merge_iter.rs +++ b/library/alloc/src/collections/btree/merge_iter.rs @@ -4,7 +4,7 @@ use core::iter::FusedIterator; /// Core of an iterator that merges the output of two strictly ascending iterators, /// for instance a union or a symmetric difference. -pub struct MergeIterInner { +pub(super) struct MergeIterInner { a: I, b: I, peeked: Option>, @@ -40,7 +40,7 @@ where impl MergeIterInner { /// Creates a new core for an iterator merging a pair of sources. - pub fn new(a: I, b: I) -> Self { + pub(super) fn new(a: I, b: I) -> Self { MergeIterInner { a, b, peeked: None } } @@ -51,7 +51,7 @@ impl MergeIterInner { /// the sources are not strictly ascending). If neither returned option /// contains a value, iteration has finished and subsequent calls will /// return the same empty pair. - pub fn nexts Ordering>( + pub(super) fn nexts Ordering>( &mut self, cmp: Cmp, ) -> (Option, Option) @@ -85,7 +85,7 @@ impl MergeIterInner { } /// Returns a pair of upper bounds for the `size_hint` of the final iterator. - pub fn lens(&self) -> (usize, usize) + pub(super) fn lens(&self) -> (usize, usize) where I: ExactSizeIterator, { diff --git a/library/alloc/src/collections/btree/mod.rs b/library/alloc/src/collections/btree/mod.rs index b8667d09c33b3..6651480667391 100644 --- a/library/alloc/src/collections/btree/mod.rs +++ b/library/alloc/src/collections/btree/mod.rs @@ -2,13 +2,13 @@ mod append; mod borrow; mod dedup_sorted_iter; mod fix; -pub mod map; +pub(super) mod map; mod mem; mod merge_iter; mod navigate; mod node; mod remove; mod search; -pub mod set; +pub(super) mod set; mod set_val; mod split; diff --git a/library/alloc/src/collections/btree/navigate.rs b/library/alloc/src/collections/btree/navigate.rs index 14b7d4ad71f86..b2a7de74875d9 100644 --- a/library/alloc/src/collections/btree/navigate.rs +++ b/library/alloc/src/collections/btree/navigate.rs @@ -7,7 +7,7 @@ use super::node::{Handle, NodeRef, marker}; use super::search::SearchBound; use crate::alloc::Allocator; // `front` and `back` are always both `None` or both `Some`. -pub struct LeafRange { +pub(super) struct LeafRange { front: Option, marker::Edge>>, back: Option, marker::Edge>>, } @@ -25,7 +25,7 @@ impl Default for LeafRange { } impl LeafRange { - pub fn none() -> Self { + pub(super) fn none() -> Self { LeafRange { front: None, back: None } } @@ -34,7 +34,7 @@ impl LeafRange { } /// Temporarily takes out another, immutable equivalent of the same range. - pub fn reborrow(&self) -> LeafRange, K, V> { + pub(super) fn reborrow(&self) -> LeafRange, K, V> { LeafRange { front: self.front.as_ref().map(|f| f.reborrow()), back: self.back.as_ref().map(|b| b.reborrow()), @@ -44,24 +44,24 @@ impl LeafRange { impl<'a, K, V> LeafRange, K, V> { #[inline] - pub fn next_checked(&mut self) -> Option<(&'a K, &'a V)> { + pub(super) fn next_checked(&mut self) -> Option<(&'a K, &'a V)> { self.perform_next_checked(|kv| kv.into_kv()) } #[inline] - pub fn next_back_checked(&mut self) -> Option<(&'a K, &'a V)> { + pub(super) fn next_back_checked(&mut self) -> Option<(&'a K, &'a V)> { self.perform_next_back_checked(|kv| kv.into_kv()) } } impl<'a, K, V> LeafRange, K, V> { #[inline] - pub fn next_checked(&mut self) -> Option<(&'a K, &'a mut V)> { + pub(super) fn next_checked(&mut self) -> Option<(&'a K, &'a mut V)> { self.perform_next_checked(|kv| unsafe { ptr::read(kv) }.into_kv_valmut()) } #[inline] - pub fn next_back_checked(&mut self) -> Option<(&'a K, &'a mut V)> { + pub(super) fn next_back_checked(&mut self) -> Option<(&'a K, &'a mut V)> { self.perform_next_back_checked(|kv| unsafe { ptr::read(kv) }.into_kv_valmut()) } } @@ -124,7 +124,7 @@ impl LazyLeafHandle { } // `front` and `back` are always both `None` or both `Some`. -pub struct LazyLeafRange { +pub(super) struct LazyLeafRange { front: Option>, back: Option>, } @@ -142,12 +142,12 @@ impl<'a, K: 'a, V: 'a> Clone for LazyLeafRange, K, V> { } impl LazyLeafRange { - pub fn none() -> Self { + pub(super) fn none() -> Self { LazyLeafRange { front: None, back: None } } /// Temporarily takes out another, immutable equivalent of the same range. - pub fn reborrow(&self) -> LazyLeafRange, K, V> { + pub(super) fn reborrow(&self) -> LazyLeafRange, K, V> { LazyLeafRange { front: self.front.as_ref().map(|f| f.reborrow()), back: self.back.as_ref().map(|b| b.reborrow()), @@ -157,24 +157,24 @@ impl LazyLeafRange { impl<'a, K, V> LazyLeafRange, K, V> { #[inline] - pub unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) { + pub(super) unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) { unsafe { self.init_front().unwrap().next_unchecked() } } #[inline] - pub unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) { + pub(super) unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) { unsafe { self.init_back().unwrap().next_back_unchecked() } } } impl<'a, K, V> LazyLeafRange, K, V> { #[inline] - pub unsafe fn next_unchecked(&mut self) -> (&'a K, &'a mut V) { + pub(super) unsafe fn next_unchecked(&mut self) -> (&'a K, &'a mut V) { unsafe { self.init_front().unwrap().next_unchecked() } } #[inline] - pub unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a mut V) { + pub(super) unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a mut V) { unsafe { self.init_back().unwrap().next_back_unchecked() } } } @@ -190,7 +190,7 @@ impl LazyLeafRange { } #[inline] - pub unsafe fn deallocating_next_unchecked( + pub(super) unsafe fn deallocating_next_unchecked( &mut self, alloc: A, ) -> Handle, marker::KV> { @@ -200,7 +200,7 @@ impl LazyLeafRange { } #[inline] - pub unsafe fn deallocating_next_back_unchecked( + pub(super) unsafe fn deallocating_next_back_unchecked( &mut self, alloc: A, ) -> Handle, marker::KV> { @@ -210,7 +210,7 @@ impl LazyLeafRange { } #[inline] - pub fn deallocating_end(&mut self, alloc: A) { + pub(super) fn deallocating_end(&mut self, alloc: A) { if let Some(front) = self.take_front() { front.deallocating_end(alloc) } @@ -313,7 +313,7 @@ impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> /// /// The result is meaningful only if the tree is ordered by key, like the tree /// in a `BTreeMap` is. - pub fn range_search(self, range: R) -> LeafRange, K, V> + pub(super) fn range_search(self, range: R) -> LeafRange, K, V> where Q: ?Sized + Ord, K: Borrow, @@ -324,7 +324,7 @@ impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> } /// Finds the pair of leaf edges delimiting an entire tree. - pub fn full_range(self) -> LazyLeafRange, K, V> { + pub(super) fn full_range(self) -> LazyLeafRange, K, V> { full_range(self, self) } } @@ -339,7 +339,7 @@ impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> /// /// # Safety /// Do not use the duplicate handles to visit the same KV twice. - pub fn range_search(self, range: R) -> LeafRange, K, V> + pub(super) fn range_search(self, range: R) -> LeafRange, K, V> where Q: ?Sized + Ord, K: Borrow, @@ -351,7 +351,7 @@ impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> /// Splits a unique reference into a pair of leaf edges delimiting the full range of the tree. /// The results are non-unique references allowing mutation (of values only), so must be used /// with care. - pub fn full_range(self) -> LazyLeafRange, K, V> { + pub(super) fn full_range(self) -> LazyLeafRange, K, V> { // We duplicate the root NodeRef here -- we will never visit the same KV // twice, and never end up with overlapping value references. let self2 = unsafe { ptr::read(&self) }; @@ -363,7 +363,7 @@ impl NodeRef { /// Splits a unique reference into a pair of leaf edges delimiting the full range of the tree. /// The results are non-unique references allowing massively destructive mutation, so must be /// used with the utmost care. - pub fn full_range(self) -> LazyLeafRange { + pub(super) fn full_range(self) -> LazyLeafRange { // We duplicate the root NodeRef here -- we will never access it in a way // that overlaps references obtained from the root. let self2 = unsafe { ptr::read(&self) }; @@ -377,7 +377,7 @@ impl /// Given a leaf edge handle, returns [`Result::Ok`] with a handle to the neighboring KV /// on the right side, which is either in the same leaf node or in an ancestor node. /// If the leaf edge is the last one in the tree, returns [`Result::Err`] with the root node. - pub fn next_kv( + pub(super) fn next_kv( self, ) -> Result< Handle, marker::KV>, @@ -398,7 +398,7 @@ impl /// Given a leaf edge handle, returns [`Result::Ok`] with a handle to the neighboring KV /// on the left side, which is either in the same leaf node or in an ancestor node. /// If the leaf edge is the first one in the tree, returns [`Result::Err`] with the root node. - pub fn next_back_kv( + pub(super) fn next_back_kv( self, ) -> Result< Handle, marker::KV>, @@ -627,7 +627,9 @@ impl NodeRef Handle, marker::Edge> { + pub(super) fn first_leaf_edge( + self, + ) -> Handle, marker::Edge> { let mut node = self; loop { match node.force() { @@ -640,7 +642,9 @@ impl NodeRef Handle, marker::Edge> { + pub(super) fn last_leaf_edge( + self, + ) -> Handle, marker::Edge> { let mut node = self; loop { match node.force() { @@ -651,7 +655,7 @@ impl NodeRef { +pub(super) enum Position { Leaf(NodeRef), Internal(NodeRef), InternalKV, @@ -661,7 +665,7 @@ impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> /// Visits leaf nodes and internal KVs in order of ascending keys, and also /// visits internal nodes as a whole in a depth first order, meaning that /// internal nodes precede their individual KVs and their child nodes. - pub fn visit_nodes_in_order(self, mut visit: F) + pub(super) fn visit_nodes_in_order(self, mut visit: F) where F: FnMut(Position, K, V>), { @@ -693,7 +697,7 @@ impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> } /// Calculates the number of elements in a (sub)tree. - pub fn calc_length(self) -> usize { + pub(super) fn calc_length(self) -> usize { let mut result = 0; self.visit_nodes_in_order(|pos| match pos { Position::Leaf(node) => result += node.len(), @@ -708,7 +712,9 @@ impl Handle, marker::KV> { /// Returns the leaf edge closest to a KV for forward navigation. - pub fn next_leaf_edge(self) -> Handle, marker::Edge> { + pub(super) fn next_leaf_edge( + self, + ) -> Handle, marker::Edge> { match self.force() { Leaf(leaf_kv) => leaf_kv.right_edge(), Internal(internal_kv) => { @@ -719,7 +725,7 @@ impl } /// Returns the leaf edge closest to a KV for backward navigation. - pub fn next_back_leaf_edge( + pub(super) fn next_back_leaf_edge( self, ) -> Handle, marker::Edge> { match self.force() { @@ -735,7 +741,7 @@ impl impl NodeRef { /// Returns the leaf edge corresponding to the first point at which the /// given bound is true. - pub fn lower_bound( + pub(super) fn lower_bound( self, mut bound: SearchBound<&Q>, ) -> Handle, marker::Edge> @@ -758,7 +764,7 @@ impl NodeRef( + pub(super) fn upper_bound( self, mut bound: SearchBound<&Q>, ) -> Handle, marker::Edge> diff --git a/library/alloc/src/collections/btree/node.rs b/library/alloc/src/collections/btree/node.rs index 4057657632ba4..6815ac1c19305 100644 --- a/library/alloc/src/collections/btree/node.rs +++ b/library/alloc/src/collections/btree/node.rs @@ -40,8 +40,8 @@ use crate::alloc::{Allocator, Layout}; use crate::boxed::Box; const B: usize = 6; -pub const CAPACITY: usize = 2 * B - 1; -pub const MIN_LEN_AFTER_SPLIT: usize = B - 1; +pub(super) const CAPACITY: usize = 2 * B - 1; +pub(super) const MIN_LEN_AFTER_SPLIT: usize = B - 1; const KV_IDX_CENTER: usize = B - 1; const EDGE_IDX_LEFT_OF_CENTER: usize = B - 1; const EDGE_IDX_RIGHT_OF_CENTER: usize = B; @@ -179,7 +179,7 @@ type BoxedNode = NonNull>; /// as the returned reference is used. /// The methods supporting insert bend this rule by returning a raw pointer, /// i.e., a reference without any lifetime. -pub struct NodeRef { +pub(super) struct NodeRef { /// The number of levels that the node and the level of leaves are apart, a /// constant of the node that cannot be entirely described by `Type`, and that /// the node itself does not store. We only need to store the height of the root @@ -195,7 +195,7 @@ pub struct NodeRef { /// The root node of an owned tree. /// /// Note that this does not have a destructor, and must be cleaned up manually. -pub type Root = NodeRef; +pub(super) type Root = NodeRef; impl<'a, K: 'a, V: 'a, Type> Copy for NodeRef, K, V, Type> {} impl<'a, K: 'a, V: 'a, Type> Clone for NodeRef, K, V, Type> { @@ -213,7 +213,7 @@ unsafe impl Send for NodeRef unsafe impl Send for NodeRef {} impl NodeRef { - pub fn new_leaf(alloc: A) -> Self { + pub(super) fn new_leaf(alloc: A) -> Self { Self::from_new_leaf(LeafNode::new(alloc)) } @@ -274,7 +274,7 @@ impl NodeRef { /// The number of edges is `len() + 1`. /// Note that, despite being safe, calling this function can have the side effect /// of invalidating mutable references that unsafe code has created. - pub fn len(&self) -> usize { + pub(super) fn len(&self) -> usize { // Crucially, we only access the `len` field here. If BorrowType is marker::ValMut, // there might be outstanding mutable references to values that we must not invalidate. unsafe { usize::from((*Self::as_leaf_ptr(self)).len) } @@ -285,12 +285,12 @@ impl NodeRef { /// root on top, the number says at which elevation the node appears. /// If you picture trees with leaves on top, the number says how high /// the tree extends above the node. - pub fn height(&self) -> usize { + pub(super) fn height(&self) -> usize { self.height } /// Temporarily takes out another, immutable reference to the same node. - pub fn reborrow(&self) -> NodeRef, K, V, Type> { + pub(super) fn reborrow(&self) -> NodeRef, K, V, Type> { NodeRef { height: self.height, node: self.node, _marker: PhantomData } } @@ -315,7 +315,7 @@ impl NodeRef /// /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should /// both, upon success, do nothing. - pub fn ascend( + pub(super) fn ascend( self, ) -> Result, marker::Edge>, Self> { const { @@ -335,24 +335,24 @@ impl NodeRef .ok_or(self) } - pub fn first_edge(self) -> Handle { + pub(super) fn first_edge(self) -> Handle { unsafe { Handle::new_edge(self, 0) } } - pub fn last_edge(self) -> Handle { + pub(super) fn last_edge(self) -> Handle { let len = self.len(); unsafe { Handle::new_edge(self, len) } } /// Note that `self` must be nonempty. - pub fn first_kv(self) -> Handle { + pub(super) fn first_kv(self) -> Handle { let len = self.len(); assert!(len > 0); unsafe { Handle::new_kv(self, 0) } } /// Note that `self` must be nonempty. - pub fn last_kv(self) -> Handle { + pub(super) fn last_kv(self) -> Handle { let len = self.len(); assert!(len > 0); unsafe { Handle::new_kv(self, len - 1) } @@ -381,7 +381,7 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef, K, V, Type> { } /// Borrows a view into the keys stored in the node. - pub fn keys(&self) -> &[K] { + pub(super) fn keys(&self) -> &[K] { let leaf = self.into_leaf(); unsafe { leaf.keys.get_unchecked(..usize::from(leaf.len)).assume_init_ref() } } @@ -391,7 +391,7 @@ impl NodeRef { /// Similar to `ascend`, gets a reference to a node's parent node, but also /// deallocates the current node in the process. This is unsafe because the /// current node will still be accessible despite being deallocated. - pub unsafe fn deallocate_and_ascend( + pub(super) unsafe fn deallocate_and_ascend( self, alloc: A, ) -> Option, marker::Edge>> { @@ -443,7 +443,7 @@ impl<'a, K, V, Type> NodeRef, K, V, Type> { /// Returns a dormant copy of this node with its lifetime erased which can /// be reawakened later. - pub fn dormant(&self) -> NodeRef { + pub(super) fn dormant(&self) -> NodeRef { NodeRef { height: self.height, node: self.node, _marker: PhantomData } } } @@ -455,7 +455,7 @@ impl NodeRef { /// /// The reborrow must have ended, i.e., the reference returned by `new` and /// all pointers and references derived from it, must not be used anymore. - pub unsafe fn awaken<'a>(self) -> NodeRef, K, V, Type> { + pub(super) unsafe fn awaken<'a>(self) -> NodeRef, K, V, Type> { NodeRef { height: self.height, node: self.node, _marker: PhantomData } } } @@ -536,7 +536,7 @@ impl<'a, K, V, Type> NodeRef, K, V, Type> { impl<'a, K: 'a, V: 'a, Type> NodeRef, K, V, Type> { /// Borrows exclusive access to the length of the node. - pub fn len_mut(&mut self) -> &mut u16 { + pub(super) fn len_mut(&mut self) -> &mut u16 { &mut self.as_leaf_mut().len } } @@ -578,14 +578,14 @@ impl NodeRef { impl NodeRef { /// Returns a new owned tree, with its own root node that is initially empty. - pub fn new(alloc: A) -> Self { + pub(super) fn new(alloc: A) -> Self { NodeRef::new_leaf(alloc).forget_type() } /// Adds a new internal node with a single edge pointing to the previous root node, /// make that new node the root node, and return it. This increases the height by 1 /// and is the opposite of `pop_internal_level`. - pub fn push_internal_level( + pub(super) fn push_internal_level( &mut self, alloc: A, ) -> NodeRef, K, V, marker::Internal> { @@ -604,7 +604,7 @@ impl NodeRef { /// it will not invalidate other handles or references to the root node. /// /// Panics if there is no internal level, i.e., if the root node is a leaf. - pub fn pop_internal_level(&mut self, alloc: A) { + pub(super) fn pop_internal_level(&mut self, alloc: A) { assert!(self.height > 0); let top = self.node; @@ -628,18 +628,18 @@ impl NodeRef { /// Mutably borrows the owned root node. Unlike `reborrow_mut`, this is safe /// because the return value cannot be used to destroy the root, and there /// cannot be other references to the tree. - pub fn borrow_mut(&mut self) -> NodeRef, K, V, Type> { + pub(super) fn borrow_mut(&mut self) -> NodeRef, K, V, Type> { NodeRef { height: self.height, node: self.node, _marker: PhantomData } } /// Slightly mutably borrows the owned root node. - pub fn borrow_valmut(&mut self) -> NodeRef, K, V, Type> { + pub(super) fn borrow_valmut(&mut self) -> NodeRef, K, V, Type> { NodeRef { height: self.height, node: self.node, _marker: PhantomData } } /// Irreversibly transitions to a reference that permits traversal and offers /// destructive methods and little else. - pub fn into_dying(self) -> NodeRef { + pub(super) fn into_dying(self) -> NodeRef { NodeRef { height: self.height, node: self.node, _marker: PhantomData } } } @@ -651,7 +651,7 @@ impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::Leaf> { /// # Safety /// /// The returned handle has an unbound lifetime. - pub unsafe fn push_with_handle<'b>( + pub(super) unsafe fn push_with_handle<'b>( &mut self, key: K, val: V, @@ -672,7 +672,7 @@ impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::Leaf> { /// Adds a key-value pair to the end of the node, and returns /// the mutable reference of the inserted value. - pub fn push(&mut self, key: K, val: V) -> *mut V { + pub(super) fn push(&mut self, key: K, val: V) -> *mut V { // SAFETY: The unbound handle is no longer accessible. unsafe { self.push_with_handle(key, val).into_val_mut() } } @@ -681,7 +681,7 @@ impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::Leaf> { impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::Internal> { /// Adds a key-value pair, and an edge to go to the right of that pair, /// to the end of the node. - pub fn push(&mut self, key: K, val: V, edge: Root) { + pub(super) fn push(&mut self, key: K, val: V, edge: Root) { assert!(edge.height == self.height - 1); let len = self.len_mut(); @@ -699,21 +699,21 @@ impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::Internal> { impl NodeRef { /// Removes any static information asserting that this node is a `Leaf` node. - pub fn forget_type(self) -> NodeRef { + pub(super) fn forget_type(self) -> NodeRef { NodeRef { height: self.height, node: self.node, _marker: PhantomData } } } impl NodeRef { /// Removes any static information asserting that this node is an `Internal` node. - pub fn forget_type(self) -> NodeRef { + pub(super) fn forget_type(self) -> NodeRef { NodeRef { height: self.height, node: self.node, _marker: PhantomData } } } impl NodeRef { /// Checks whether a node is an `Internal` node or a `Leaf` node. - pub fn force( + pub(super) fn force( self, ) -> ForceResult< NodeRef, @@ -737,7 +737,9 @@ impl NodeRef { impl<'a, K, V> NodeRef, K, V, marker::LeafOrInternal> { /// Unsafely asserts to the compiler the static information that this node is a `Leaf`. - pub unsafe fn cast_to_leaf_unchecked(self) -> NodeRef, K, V, marker::Leaf> { + pub(super) unsafe fn cast_to_leaf_unchecked( + self, + ) -> NodeRef, K, V, marker::Leaf> { debug_assert!(self.height == 0); NodeRef { height: self.height, node: self.node, _marker: PhantomData } } @@ -757,7 +759,7 @@ impl<'a, K, V> NodeRef, K, V, marker::LeafOrInternal> { /// a child node, these represent the spaces where child pointers would go between the key-value /// pairs. For example, in a node with length 2, there would be 3 possible edge locations - one /// to the left of the node, one between the two pairs, and one at the right of the node. -pub struct Handle { +pub(super) struct Handle { node: Node, idx: usize, _marker: PhantomData, @@ -774,12 +776,12 @@ impl Clone for Handle { impl Handle { /// Retrieves the node that contains the edge or key-value pair this handle points to. - pub fn into_node(self) -> Node { + pub(super) fn into_node(self) -> Node { self.node } /// Returns the position of this handle in the node. - pub fn idx(&self) -> usize { + pub(super) fn idx(&self) -> usize { self.idx } } @@ -787,17 +789,17 @@ impl Handle { impl Handle, marker::KV> { /// Creates a new handle to a key-value pair in `node`. /// Unsafe because the caller must ensure that `idx < node.len()`. - pub unsafe fn new_kv(node: NodeRef, idx: usize) -> Self { + pub(super) unsafe fn new_kv(node: NodeRef, idx: usize) -> Self { debug_assert!(idx < node.len()); Handle { node, idx, _marker: PhantomData } } - pub fn left_edge(self) -> Handle, marker::Edge> { + pub(super) fn left_edge(self) -> Handle, marker::Edge> { unsafe { Handle::new_edge(self.node, self.idx) } } - pub fn right_edge(self) -> Handle, marker::Edge> { + pub(super) fn right_edge(self) -> Handle, marker::Edge> { unsafe { Handle::new_edge(self.node, self.idx + 1) } } } @@ -815,7 +817,9 @@ impl Handle, HandleType> { /// Temporarily takes out another immutable handle on the same location. - pub fn reborrow(&self) -> Handle, K, V, NodeType>, HandleType> { + pub(super) fn reborrow( + &self, + ) -> Handle, K, V, NodeType>, HandleType> { // We can't use Handle::new_kv or Handle::new_edge because we don't know our type Handle { node: self.node.reborrow(), idx: self.idx, _marker: PhantomData } } @@ -827,7 +831,7 @@ impl<'a, K, V, NodeType, HandleType> Handle, K, V, NodeT /// dangerous. /// /// For details, see `NodeRef::reborrow_mut`. - pub unsafe fn reborrow_mut( + pub(super) unsafe fn reborrow_mut( &mut self, ) -> Handle, K, V, NodeType>, HandleType> { // We can't use Handle::new_kv or Handle::new_edge because we don't know our type @@ -837,7 +841,9 @@ impl<'a, K, V, NodeType, HandleType> Handle, K, V, NodeT /// Returns a dormant copy of this handle which can be reawakened later. /// /// See `DormantMutRef` for more details. - pub fn dormant(&self) -> Handle, HandleType> { + pub(super) fn dormant( + &self, + ) -> Handle, HandleType> { Handle { node: self.node.dormant(), idx: self.idx, _marker: PhantomData } } } @@ -849,7 +855,9 @@ impl Handle(self) -> Handle, K, V, NodeType>, HandleType> { + pub(super) unsafe fn awaken<'a>( + self, + ) -> Handle, K, V, NodeType>, HandleType> { Handle { node: unsafe { self.node.awaken() }, idx: self.idx, _marker: PhantomData } } } @@ -857,13 +865,15 @@ impl Handle Handle, marker::Edge> { /// Creates a new handle to an edge in `node`. /// Unsafe because the caller must ensure that `idx <= node.len()`. - pub unsafe fn new_edge(node: NodeRef, idx: usize) -> Self { + pub(super) unsafe fn new_edge(node: NodeRef, idx: usize) -> Self { debug_assert!(idx <= node.len()); Handle { node, idx, _marker: PhantomData } } - pub fn left_kv(self) -> Result, marker::KV>, Self> { + pub(super) fn left_kv( + self, + ) -> Result, marker::KV>, Self> { if self.idx > 0 { Ok(unsafe { Handle::new_kv(self.node, self.idx - 1) }) } else { @@ -871,7 +881,9 @@ impl Handle, mar } } - pub fn right_kv(self) -> Result, marker::KV>, Self> { + pub(super) fn right_kv( + self, + ) -> Result, marker::KV>, Self> { if self.idx < self.node.len() { Ok(unsafe { Handle::new_kv(self.node, self.idx) }) } else { @@ -880,7 +892,7 @@ impl Handle, mar } } -pub enum LeftOrRight { +pub(super) enum LeftOrRight { Left(T), Right(T), } @@ -1034,7 +1046,7 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, mark /// If the returned result is some `SplitResult`, the `left` field will be the root node. /// The returned pointer points to the inserted value, which in the case of `SplitResult` /// is in the `left` or `right` tree. - pub fn insert_recursing( + pub(super) fn insert_recursing( self, key: K, value: V, @@ -1078,7 +1090,7 @@ impl /// /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should /// both, upon success, do nothing. - pub fn descend(self) -> NodeRef { + pub(super) fn descend(self) -> NodeRef { const { assert!(BorrowType::TRAVERSAL_PERMIT); } @@ -1097,7 +1109,7 @@ impl } impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType>, marker::KV> { - pub fn into_kv(self) -> (&'a K, &'a V) { + pub(super) fn into_kv(self) -> (&'a K, &'a V) { debug_assert!(self.idx < self.node.len()); let leaf = self.node.into_leaf(); let k = unsafe { leaf.keys.get_unchecked(self.idx).assume_init_ref() }; @@ -1107,17 +1119,17 @@ impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeTyp } impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType>, marker::KV> { - pub fn key_mut(&mut self) -> &mut K { + pub(super) fn key_mut(&mut self) -> &mut K { unsafe { self.node.key_area_mut(self.idx).assume_init_mut() } } - pub fn into_val_mut(self) -> &'a mut V { + pub(super) fn into_val_mut(self) -> &'a mut V { debug_assert!(self.idx < self.node.len()); let leaf = self.node.into_leaf_mut(); unsafe { leaf.vals.get_unchecked_mut(self.idx).assume_init_mut() } } - pub fn into_kv_mut(self) -> (&'a mut K, &'a mut V) { + pub(super) fn into_kv_mut(self) -> (&'a mut K, &'a mut V) { debug_assert!(self.idx < self.node.len()); let leaf = self.node.into_leaf_mut(); let k = unsafe { leaf.keys.get_unchecked_mut(self.idx).assume_init_mut() }; @@ -1127,13 +1139,13 @@ impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType> } impl<'a, K, V, NodeType> Handle, K, V, NodeType>, marker::KV> { - pub fn into_kv_valmut(self) -> (&'a K, &'a mut V) { + pub(super) fn into_kv_valmut(self) -> (&'a K, &'a mut V) { unsafe { self.node.into_key_val_mut_at(self.idx) } } } impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType>, marker::KV> { - pub fn kv_mut(&mut self) -> (&mut K, &mut V) { + pub(super) fn kv_mut(&mut self) -> (&mut K, &mut V) { debug_assert!(self.idx < self.node.len()); // We cannot call separate key and value methods, because calling the second one // invalidates the reference returned by the first. @@ -1146,7 +1158,7 @@ impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType> } /// Replaces the key and value that the KV handle refers to. - pub fn replace_kv(&mut self, k: K, v: V) -> (K, V) { + pub(super) fn replace_kv(&mut self, k: K, v: V) -> (K, V) { let (key, val) = self.kv_mut(); (mem::replace(key, k), mem::replace(val, v)) } @@ -1156,7 +1168,7 @@ impl Handle, marker::KV> /// Extracts the key and value that the KV handle refers to. /// # Safety /// The node that the handle refers to must not yet have been deallocated. - pub unsafe fn into_key_val(mut self) -> (K, V) { + pub(super) unsafe fn into_key_val(mut self) -> (K, V) { debug_assert!(self.idx < self.node.len()); let leaf = self.node.as_leaf_dying(); unsafe { @@ -1170,7 +1182,7 @@ impl Handle, marker::KV> /// # Safety /// The node that the handle refers to must not yet have been deallocated. #[inline] - pub unsafe fn drop_key_val(mut self) { + pub(super) unsafe fn drop_key_val(mut self) { // Run the destructor of the value even if the destructor of the key panics. struct Dropper<'a, T>(&'a mut MaybeUninit); impl Drop for Dropper<'_, T> { @@ -1229,7 +1241,10 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, mark /// - The key and value pointed to by this handle are extracted. /// - All the key-value pairs to the right of this handle are put into a newly /// allocated node. - pub fn split(mut self, alloc: A) -> SplitResult<'a, K, V, marker::Leaf> { + pub(super) fn split( + mut self, + alloc: A, + ) -> SplitResult<'a, K, V, marker::Leaf> { let mut new_node = LeafNode::new(alloc); let kv = self.split_leaf_data(&mut new_node); @@ -1240,7 +1255,7 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, mark /// Removes the key-value pair pointed to by this handle and returns it, along with the edge /// that the key-value pair collapsed into. - pub fn remove( + pub(super) fn remove( mut self, ) -> ((K, V), Handle, K, V, marker::Leaf>, marker::Edge>) { let old_len = self.node.len(); @@ -1261,7 +1276,7 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Internal>, /// - The key and value pointed to by this handle are extracted. /// - All the edges and key-value pairs to the right of this handle are put into /// a newly allocated node. - pub fn split( + pub(super) fn split( mut self, alloc: A, ) -> SplitResult<'a, K, V, marker::Internal> { @@ -1285,14 +1300,14 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Internal>, /// Represents a session for evaluating and performing a balancing operation /// around an internal key-value pair. -pub struct BalancingContext<'a, K, V> { +pub(super) struct BalancingContext<'a, K, V> { parent: Handle, K, V, marker::Internal>, marker::KV>, left_child: NodeRef, K, V, marker::LeafOrInternal>, right_child: NodeRef, K, V, marker::LeafOrInternal>, } impl<'a, K, V> Handle, K, V, marker::Internal>, marker::KV> { - pub fn consider_for_balancing(self) -> BalancingContext<'a, K, V> { + pub(super) fn consider_for_balancing(self) -> BalancingContext<'a, K, V> { let self1 = unsafe { ptr::read(&self) }; let self2 = unsafe { ptr::read(&self) }; BalancingContext { @@ -1318,7 +1333,7 @@ impl<'a, K, V> NodeRef, K, V, marker::LeafOrInternal> { /// typically faster, since we only need to shift the node's N elements to /// the right, instead of shifting at least N of the sibling's elements to /// the left. - pub fn choose_parent_kv(self) -> Result>, Self> { + pub(super) fn choose_parent_kv(self) -> Result>, Self> { match unsafe { ptr::read(&self) }.ascend() { Ok(parent_edge) => match parent_edge.left_kv() { Ok(left_parent_kv) => Ok(LeftOrRight::Left(BalancingContext { @@ -1341,25 +1356,25 @@ impl<'a, K, V> NodeRef, K, V, marker::LeafOrInternal> { } impl<'a, K, V> BalancingContext<'a, K, V> { - pub fn left_child_len(&self) -> usize { + pub(super) fn left_child_len(&self) -> usize { self.left_child.len() } - pub fn right_child_len(&self) -> usize { + pub(super) fn right_child_len(&self) -> usize { self.right_child.len() } - pub fn into_left_child(self) -> NodeRef, K, V, marker::LeafOrInternal> { + pub(super) fn into_left_child(self) -> NodeRef, K, V, marker::LeafOrInternal> { self.left_child } - pub fn into_right_child(self) -> NodeRef, K, V, marker::LeafOrInternal> { + pub(super) fn into_right_child(self) -> NodeRef, K, V, marker::LeafOrInternal> { self.right_child } /// Returns whether merging is possible, i.e., whether there is enough room /// in a node to combine the central KV with both adjacent child nodes. - pub fn can_merge(&self) -> bool { + pub(super) fn can_merge(&self) -> bool { self.left_child.len() + 1 + self.right_child.len() <= CAPACITY } } @@ -1433,7 +1448,7 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> { /// the left child node and returns the shrunk parent node. /// /// Panics unless we `.can_merge()`. - pub fn merge_tracking_parent( + pub(super) fn merge_tracking_parent( self, alloc: A, ) -> NodeRef, K, V, marker::Internal> { @@ -1444,7 +1459,7 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> { /// the left child node and returns that child node. /// /// Panics unless we `.can_merge()`. - pub fn merge_tracking_child( + pub(super) fn merge_tracking_child( self, alloc: A, ) -> NodeRef, K, V, marker::LeafOrInternal> { @@ -1456,7 +1471,7 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> { /// where the tracked child edge ended up, /// /// Panics unless we `.can_merge()`. - pub fn merge_tracking_child_edge( + pub(super) fn merge_tracking_child_edge( self, track_edge_idx: LeftOrRight, alloc: A, @@ -1479,7 +1494,7 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> { /// of the parent, while pushing the old parent key-value pair into the right child. /// Returns a handle to the edge in the right child corresponding to where the original /// edge specified by `track_right_edge_idx` ended up. - pub fn steal_left( + pub(super) fn steal_left( mut self, track_right_edge_idx: usize, ) -> Handle, K, V, marker::LeafOrInternal>, marker::Edge> { @@ -1491,7 +1506,7 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> { /// of the parent, while pushing the old parent key-value pair onto the left child. /// Returns a handle to the edge in the left child specified by `track_left_edge_idx`, /// which didn't move. - pub fn steal_right( + pub(super) fn steal_right( mut self, track_left_edge_idx: usize, ) -> Handle, K, V, marker::LeafOrInternal>, marker::Edge> { @@ -1500,7 +1515,7 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> { } /// This does stealing similar to `steal_left` but steals multiple elements at once. - pub fn bulk_steal_left(&mut self, count: usize) { + pub(super) fn bulk_steal_left(&mut self, count: usize) { assert!(count > 0); unsafe { let left_node = &mut self.left_child; @@ -1563,7 +1578,7 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> { } /// The symmetric clone of `bulk_steal_left`. - pub fn bulk_steal_right(&mut self, count: usize) { + pub(super) fn bulk_steal_right(&mut self, count: usize) { assert!(count > 0); unsafe { let left_node = &mut self.left_child; @@ -1628,7 +1643,7 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> { } impl Handle, marker::Edge> { - pub fn forget_node_type( + pub(super) fn forget_node_type( self, ) -> Handle, marker::Edge> { unsafe { Handle::new_edge(self.node.forget_type(), self.idx) } @@ -1636,7 +1651,7 @@ impl Handle, marker::E } impl Handle, marker::Edge> { - pub fn forget_node_type( + pub(super) fn forget_node_type( self, ) -> Handle, marker::Edge> { unsafe { Handle::new_edge(self.node.forget_type(), self.idx) } @@ -1644,7 +1659,7 @@ impl Handle, marke } impl Handle, marker::KV> { - pub fn forget_node_type( + pub(super) fn forget_node_type( self, ) -> Handle, marker::KV> { unsafe { Handle::new_kv(self.node.forget_type(), self.idx) } @@ -1653,7 +1668,7 @@ impl Handle, marker::K impl Handle, Type> { /// Checks whether the underlying node is an `Internal` node or a `Leaf` node. - pub fn force( + pub(super) fn force( self, ) -> ForceResult< Handle, Type>, @@ -1672,7 +1687,7 @@ impl Handle Handle, K, V, marker::LeafOrInternal>, Type> { /// Unsafely asserts to the compiler the static information that the handle's node is a `Leaf`. - pub unsafe fn cast_to_leaf_unchecked( + pub(super) unsafe fn cast_to_leaf_unchecked( self, ) -> Handle, K, V, marker::Leaf>, Type> { let node = unsafe { self.node.cast_to_leaf_unchecked() }; @@ -1683,7 +1698,7 @@ impl<'a, K, V, Type> Handle, K, V, marker::LeafOrInterna impl<'a, K, V> Handle, K, V, marker::LeafOrInternal>, marker::Edge> { /// Move the suffix after `self` from one node to another one. `right` must be empty. /// The first edge of `right` remains unchanged. - pub fn move_suffix( + pub(super) fn move_suffix( &mut self, right: &mut NodeRef, K, V, marker::LeafOrInternal>, ) { @@ -1726,13 +1741,13 @@ impl<'a, K, V> Handle, K, V, marker::LeafOrInternal>, ma } } -pub enum ForceResult { +pub(super) enum ForceResult { Leaf(Leaf), Internal(Internal), } /// Result of insertion, when a node needed to expand beyond its capacity. -pub struct SplitResult<'a, K, V, NodeType> { +pub(super) struct SplitResult<'a, K, V, NodeType> { // Altered node in existing tree with elements and edges that belong to the left of `kv`. pub left: NodeRef, K, V, NodeType>, // Some key and value that existed before and were split off, to be inserted elsewhere. @@ -1742,32 +1757,32 @@ pub struct SplitResult<'a, K, V, NodeType> { } impl<'a, K, V> SplitResult<'a, K, V, marker::Leaf> { - pub fn forget_node_type(self) -> SplitResult<'a, K, V, marker::LeafOrInternal> { + pub(super) fn forget_node_type(self) -> SplitResult<'a, K, V, marker::LeafOrInternal> { SplitResult { left: self.left.forget_type(), kv: self.kv, right: self.right.forget_type() } } } impl<'a, K, V> SplitResult<'a, K, V, marker::Internal> { - pub fn forget_node_type(self) -> SplitResult<'a, K, V, marker::LeafOrInternal> { + pub(super) fn forget_node_type(self) -> SplitResult<'a, K, V, marker::LeafOrInternal> { SplitResult { left: self.left.forget_type(), kv: self.kv, right: self.right.forget_type() } } } -pub mod marker { +pub(super) mod marker { use core::marker::PhantomData; - pub enum Leaf {} - pub enum Internal {} - pub enum LeafOrInternal {} + pub(crate) enum Leaf {} + pub(crate) enum Internal {} + pub(crate) enum LeafOrInternal {} - pub enum Owned {} - pub enum Dying {} - pub enum DormantMut {} - pub struct Immut<'a>(PhantomData<&'a ()>); - pub struct Mut<'a>(PhantomData<&'a mut ()>); - pub struct ValMut<'a>(PhantomData<&'a mut ()>); + pub(crate) enum Owned {} + pub(crate) enum Dying {} + pub(crate) enum DormantMut {} + pub(crate) struct Immut<'a>(PhantomData<&'a ()>); + pub(crate) struct Mut<'a>(PhantomData<&'a mut ()>); + pub(crate) struct ValMut<'a>(PhantomData<&'a mut ()>); - pub trait BorrowType { + pub(crate) trait BorrowType { /// If node references of this borrow type allow traversing to other /// nodes in the tree, this constant is set to `true`. It can be used /// for a compile-time assertion. @@ -1786,8 +1801,8 @@ pub mod marker { impl<'a> BorrowType for ValMut<'a> {} impl BorrowType for DormantMut {} - pub enum KV {} - pub enum Edge {} + pub(crate) enum KV {} + pub(crate) enum Edge {} } /// Inserts a value into a slice of initialized elements followed by one uninitialized element. diff --git a/library/alloc/src/collections/btree/node/tests.rs b/library/alloc/src/collections/btree/node/tests.rs index 4d2fa0f094171..ecd009f11c71a 100644 --- a/library/alloc/src/collections/btree/node/tests.rs +++ b/library/alloc/src/collections/btree/node/tests.rs @@ -6,7 +6,7 @@ use crate::string::String; impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { // Asserts that the back pointer in each reachable node points to its parent. - pub fn assert_back_pointers(self) { + pub(crate) fn assert_back_pointers(self) { if let ForceResult::Internal(node) = self.force() { for idx in 0..=node.len() { let edge = unsafe { Handle::new_edge(node, idx) }; @@ -20,7 +20,7 @@ impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> // Renders a multi-line display of the keys in order and in tree hierarchy, // picturing the tree growing sideways from its root on the left to its // leaves on the right. - pub fn dump_keys(self) -> String + pub(crate) fn dump_keys(self) -> String where K: Debug, { diff --git a/library/alloc/src/collections/btree/remove.rs b/library/alloc/src/collections/btree/remove.rs index 56f2824b782bd..9d870b86f34a0 100644 --- a/library/alloc/src/collections/btree/remove.rs +++ b/library/alloc/src/collections/btree/remove.rs @@ -10,7 +10,7 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::LeafOrInter /// the leaf edge corresponding to that former pair. It's possible this empties /// a root node that is internal, which the caller should pop from the map /// holding the tree. The caller should also decrement the map's length. - pub fn remove_kv_tracking( + pub(super) fn remove_kv_tracking( self, handle_emptied_internal_root: F, alloc: A, diff --git a/library/alloc/src/collections/btree/search.rs b/library/alloc/src/collections/btree/search.rs index 22e015edac3d2..96e5bf108024b 100644 --- a/library/alloc/src/collections/btree/search.rs +++ b/library/alloc/src/collections/btree/search.rs @@ -8,7 +8,7 @@ use SearchResult::*; use super::node::ForceResult::*; use super::node::{Handle, NodeRef, marker}; -pub enum SearchBound { +pub(super) enum SearchBound { /// An inclusive bound to look for, just like `Bound::Included(T)`. Included(T), /// An exclusive bound to look for, just like `Bound::Excluded(T)`. @@ -20,7 +20,7 @@ pub enum SearchBound { } impl SearchBound { - pub fn from_range(range_bound: Bound) -> Self { + pub(super) fn from_range(range_bound: Bound) -> Self { match range_bound { Bound::Included(t) => Included(t), Bound::Excluded(t) => Excluded(t), @@ -29,12 +29,12 @@ impl SearchBound { } } -pub enum SearchResult { +pub(super) enum SearchResult { Found(Handle, marker::KV>), GoDown(Handle, marker::Edge>), } -pub enum IndexResult { +pub(super) enum IndexResult { KV(usize), Edge(usize), } @@ -46,7 +46,7 @@ impl NodeRef( + pub(super) fn search_tree( mut self, key: &Q, ) -> SearchResult @@ -80,7 +80,7 @@ impl NodeRef( + pub(super) fn search_tree_for_bifurcation<'r, Q: ?Sized, R>( mut self, range: &'r R, ) -> Result< @@ -156,7 +156,7 @@ impl NodeRef( + pub(super) fn find_lower_bound_edge<'r, Q>( self, bound: SearchBound<&'r Q>, ) -> (Handle, SearchBound<&'r Q>) @@ -170,7 +170,7 @@ impl NodeRef( + pub(super) fn find_upper_bound_edge<'r, Q>( self, bound: SearchBound<&'r Q>, ) -> (Handle, SearchBound<&'r Q>) @@ -192,7 +192,10 @@ impl NodeRef { /// /// The result is meaningful only if the tree is ordered by key, like the tree /// in a `BTreeMap` is. - pub fn search_node(self, key: &Q) -> SearchResult + pub(super) fn search_node( + self, + key: &Q, + ) -> SearchResult where Q: Ord, K: Borrow, diff --git a/library/alloc/src/collections/btree/split.rs b/library/alloc/src/collections/btree/split.rs index c188ed1da6113..87a79e6cf3f93 100644 --- a/library/alloc/src/collections/btree/split.rs +++ b/library/alloc/src/collections/btree/split.rs @@ -8,7 +8,7 @@ use super::search::SearchResult::*; impl Root { /// Calculates the length of both trees that result from splitting up /// a given number of distinct key-value pairs. - pub fn calc_split_length( + pub(super) fn calc_split_length( total_num: usize, root_a: &Root, root_b: &Root, @@ -31,7 +31,11 @@ impl Root { /// and if the ordering of `Q` corresponds to that of `K`. /// If `self` respects all `BTreeMap` tree invariants, then both /// `self` and the returned tree will respect those invariants. - pub fn split_off(&mut self, key: &Q, alloc: A) -> Self + pub(super) fn split_off( + &mut self, + key: &Q, + alloc: A, + ) -> Self where K: Borrow, { diff --git a/library/alloc/src/collections/linked_list/tests.rs b/library/alloc/src/collections/linked_list/tests.rs index b7d4f8512a0f2..aa19239f6c55d 100644 --- a/library/alloc/src/collections/linked_list/tests.rs +++ b/library/alloc/src/collections/linked_list/tests.rs @@ -58,7 +58,7 @@ fn list_from(v: &[T]) -> LinkedList { v.iter().cloned().collect() } -pub fn check_links(list: &LinkedList) { +fn check_links(list: &LinkedList) { unsafe { let mut len = 0; let mut last_ptr: Option<&Node> = None; diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index b4f08debc932a..8af137f644f1c 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -88,6 +88,7 @@ #![allow(rustdoc::redundant_explicit_links)] #![warn(rustdoc::unescaped_backticks)] #![deny(ffi_unwind_calls)] +#![warn(unreachable_pub)] // // Library features: // tidy-alphabetical-start @@ -225,7 +226,7 @@ pub mod alloc; pub mod boxed; #[cfg(test)] mod boxed { - pub use std::boxed::Box; + pub(crate) use std::boxed::Box; } pub mod borrow; pub mod collections; diff --git a/library/alloc/src/raw_vec.rs b/library/alloc/src/raw_vec.rs index ad86bf4bf072f..b80d1fc788947 100644 --- a/library/alloc/src/raw_vec.rs +++ b/library/alloc/src/raw_vec.rs @@ -97,7 +97,7 @@ impl RawVec { /// `RawVec` with capacity `usize::MAX`. Useful for implementing /// delayed allocation. #[must_use] - pub const fn new() -> Self { + pub(crate) const fn new() -> Self { Self::new_in(Global) } @@ -120,7 +120,7 @@ impl RawVec { #[must_use] #[inline] #[track_caller] - pub fn with_capacity(capacity: usize) -> Self { + pub(crate) fn with_capacity(capacity: usize) -> Self { Self { inner: RawVecInner::with_capacity(capacity, T::LAYOUT), _marker: PhantomData } } @@ -129,7 +129,7 @@ impl RawVec { #[must_use] #[inline] #[track_caller] - pub fn with_capacity_zeroed(capacity: usize) -> Self { + pub(crate) fn with_capacity_zeroed(capacity: usize) -> Self { Self { inner: RawVecInner::with_capacity_zeroed_in(capacity, Global, T::LAYOUT), _marker: PhantomData, @@ -172,7 +172,7 @@ impl RawVec { /// Like `new`, but parameterized over the choice of allocator for /// the returned `RawVec`. #[inline] - pub const fn new_in(alloc: A) -> Self { + pub(crate) const fn new_in(alloc: A) -> Self { Self { inner: RawVecInner::new_in(alloc, align_of::()), _marker: PhantomData } } @@ -181,7 +181,7 @@ impl RawVec { #[cfg(not(no_global_oom_handling))] #[inline] #[track_caller] - pub fn with_capacity_in(capacity: usize, alloc: A) -> Self { + pub(crate) fn with_capacity_in(capacity: usize, alloc: A) -> Self { Self { inner: RawVecInner::with_capacity_in(capacity, alloc, T::LAYOUT), _marker: PhantomData, @@ -191,7 +191,7 @@ impl RawVec { /// Like `try_with_capacity`, but parameterized over the choice of /// allocator for the returned `RawVec`. #[inline] - pub fn try_with_capacity_in(capacity: usize, alloc: A) -> Result { + pub(crate) fn try_with_capacity_in(capacity: usize, alloc: A) -> Result { match RawVecInner::try_with_capacity_in(capacity, alloc, T::LAYOUT) { Ok(inner) => Ok(Self { inner, _marker: PhantomData }), Err(e) => Err(e), @@ -203,7 +203,7 @@ impl RawVec { #[cfg(not(no_global_oom_handling))] #[inline] #[track_caller] - pub fn with_capacity_zeroed_in(capacity: usize, alloc: A) -> Self { + pub(crate) fn with_capacity_zeroed_in(capacity: usize, alloc: A) -> Self { Self { inner: RawVecInner::with_capacity_zeroed_in(capacity, alloc, T::LAYOUT), _marker: PhantomData, @@ -222,7 +222,7 @@ impl RawVec { /// /// Note, that the requested capacity and `self.capacity()` could differ, as /// an allocator could overallocate and return a greater memory block than requested. - pub unsafe fn into_box(self, len: usize) -> Box<[MaybeUninit], A> { + pub(crate) unsafe fn into_box(self, len: usize) -> Box<[MaybeUninit], A> { // Sanity-check one half of the safety requirement (we cannot check the other half). debug_assert!( len <= self.capacity(), @@ -247,7 +247,7 @@ impl RawVec { /// If the `ptr` and `capacity` come from a `RawVec` created via `alloc`, then this is /// guaranteed. #[inline] - pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, alloc: A) -> Self { + pub(crate) unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, alloc: A) -> Self { // SAFETY: Precondition passed to the caller unsafe { let ptr = ptr.cast(); @@ -265,7 +265,7 @@ impl RawVec { /// /// See [`RawVec::from_raw_parts_in`]. #[inline] - pub unsafe fn from_nonnull_in(ptr: NonNull, capacity: usize, alloc: A) -> Self { + pub(crate) unsafe fn from_nonnull_in(ptr: NonNull, capacity: usize, alloc: A) -> Self { // SAFETY: Precondition passed to the caller unsafe { let ptr = ptr.cast(); @@ -278,12 +278,12 @@ impl RawVec { /// `Unique::dangling()` if `capacity == 0` or `T` is zero-sized. In the former case, you must /// be careful. #[inline] - pub const fn ptr(&self) -> *mut T { + pub(crate) const fn ptr(&self) -> *mut T { self.inner.ptr() } #[inline] - pub fn non_null(&self) -> NonNull { + pub(crate) fn non_null(&self) -> NonNull { self.inner.non_null() } @@ -291,13 +291,13 @@ impl RawVec { /// /// This will always be `usize::MAX` if `T` is zero-sized. #[inline] - pub const fn capacity(&self) -> usize { + pub(crate) const fn capacity(&self) -> usize { self.inner.capacity(size_of::()) } /// Returns a shared reference to the allocator backing this `RawVec`. #[inline] - pub fn allocator(&self) -> &A { + pub(crate) fn allocator(&self) -> &A { self.inner.allocator() } @@ -323,7 +323,7 @@ impl RawVec { #[cfg(not(no_global_oom_handling))] #[inline] #[track_caller] - pub fn reserve(&mut self, len: usize, additional: usize) { + pub(crate) fn reserve(&mut self, len: usize, additional: usize) { self.inner.reserve(len, additional, T::LAYOUT) } @@ -332,12 +332,16 @@ impl RawVec { #[cfg(not(no_global_oom_handling))] #[inline(never)] #[track_caller] - pub fn grow_one(&mut self) { + pub(crate) fn grow_one(&mut self) { self.inner.grow_one(T::LAYOUT) } /// The same as `reserve`, but returns on errors instead of panicking or aborting. - pub fn try_reserve(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> { + pub(crate) fn try_reserve( + &mut self, + len: usize, + additional: usize, + ) -> Result<(), TryReserveError> { self.inner.try_reserve(len, additional, T::LAYOUT) } @@ -360,12 +364,12 @@ impl RawVec { /// Aborts on OOM. #[cfg(not(no_global_oom_handling))] #[track_caller] - pub fn reserve_exact(&mut self, len: usize, additional: usize) { + pub(crate) fn reserve_exact(&mut self, len: usize, additional: usize) { self.inner.reserve_exact(len, additional, T::LAYOUT) } /// The same as `reserve_exact`, but returns on errors instead of panicking or aborting. - pub fn try_reserve_exact( + pub(crate) fn try_reserve_exact( &mut self, len: usize, additional: usize, @@ -386,7 +390,7 @@ impl RawVec { #[cfg(not(no_global_oom_handling))] #[track_caller] #[inline] - pub fn shrink_to_fit(&mut self, cap: usize) { + pub(crate) fn shrink_to_fit(&mut self, cap: usize) { self.inner.shrink_to_fit(cap, T::LAYOUT) } } diff --git a/library/alloc/src/slice.rs b/library/alloc/src/slice.rs index edc8d99f2f990..1cedead7aa243 100644 --- a/library/alloc/src/slice.rs +++ b/library/alloc/src/slice.rs @@ -85,6 +85,7 @@ use crate::vec::Vec; // functions are actually methods that are in `impl [T]` but not in // `core::slice::SliceExt` - we need to supply these functions for the // `test_permutations` test +#[allow(unreachable_pub)] // cfg(test) pub above pub(crate) mod hack { use core::alloc::Allocator; diff --git a/library/alloc/src/testing/crash_test.rs b/library/alloc/src/testing/crash_test.rs index 684bac60d9a86..8e00e4f41e5d6 100644 --- a/library/alloc/src/testing/crash_test.rs +++ b/library/alloc/src/testing/crash_test.rs @@ -11,7 +11,7 @@ use crate::fmt::Debug; // the `Debug` trait is the only thing we use from `crate /// Crash test dummies are identified and ordered by an id, so they can be used /// as keys in a BTreeMap. #[derive(Debug)] -pub struct CrashTestDummy { +pub(crate) struct CrashTestDummy { pub id: usize, cloned: AtomicUsize, dropped: AtomicUsize, @@ -20,7 +20,7 @@ pub struct CrashTestDummy { impl CrashTestDummy { /// Creates a crash test dummy design. The `id` determines order and equality of instances. - pub fn new(id: usize) -> CrashTestDummy { + pub(crate) fn new(id: usize) -> CrashTestDummy { CrashTestDummy { id, cloned: AtomicUsize::new(0), @@ -31,34 +31,34 @@ impl CrashTestDummy { /// Creates an instance of a crash test dummy that records what events it experiences /// and optionally panics. - pub fn spawn(&self, panic: Panic) -> Instance<'_> { + pub(crate) fn spawn(&self, panic: Panic) -> Instance<'_> { Instance { origin: self, panic } } /// Returns how many times instances of the dummy have been cloned. - pub fn cloned(&self) -> usize { + pub(crate) fn cloned(&self) -> usize { self.cloned.load(SeqCst) } /// Returns how many times instances of the dummy have been dropped. - pub fn dropped(&self) -> usize { + pub(crate) fn dropped(&self) -> usize { self.dropped.load(SeqCst) } /// Returns how many times instances of the dummy have had their `query` member invoked. - pub fn queried(&self) -> usize { + pub(crate) fn queried(&self) -> usize { self.queried.load(SeqCst) } } #[derive(Debug)] -pub struct Instance<'a> { +pub(crate) struct Instance<'a> { origin: &'a CrashTestDummy, panic: Panic, } #[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub enum Panic { +pub(crate) enum Panic { Never, InClone, InDrop, @@ -66,12 +66,12 @@ pub enum Panic { } impl Instance<'_> { - pub fn id(&self) -> usize { + pub(crate) fn id(&self) -> usize { self.origin.id } /// Some anonymous query, the result of which is already given. - pub fn query(&self, result: R) -> R { + pub(crate) fn query(&self, result: R) -> R { self.origin.queried.fetch_add(1, SeqCst); if self.panic == Panic::InQuery { panic!("panic in `query`"); diff --git a/library/alloc/src/testing/mod.rs b/library/alloc/src/testing/mod.rs index 7a094f8a59522..c8457daf93e5a 100644 --- a/library/alloc/src/testing/mod.rs +++ b/library/alloc/src/testing/mod.rs @@ -1,3 +1,3 @@ -pub mod crash_test; -pub mod ord_chaos; -pub mod rng; +pub(crate) mod crash_test; +pub(crate) mod ord_chaos; +pub(crate) mod rng; diff --git a/library/alloc/src/testing/ord_chaos.rs b/library/alloc/src/testing/ord_chaos.rs index 96ce7c1579046..55e1ae5e3deaa 100644 --- a/library/alloc/src/testing/ord_chaos.rs +++ b/library/alloc/src/testing/ord_chaos.rs @@ -4,7 +4,7 @@ use std::ptr; // Minimal type with an `Ord` implementation violating transitivity. #[derive(Debug)] -pub enum Cyclic3 { +pub(crate) enum Cyclic3 { A, B, C, @@ -37,16 +37,16 @@ impl Eq for Cyclic3 {} // Controls the ordering of values wrapped by `Governed`. #[derive(Debug)] -pub struct Governor { +pub(crate) struct Governor { flipped: Cell, } impl Governor { - pub fn new() -> Self { + pub(crate) fn new() -> Self { Governor { flipped: Cell::new(false) } } - pub fn flip(&self) { + pub(crate) fn flip(&self) { self.flipped.set(!self.flipped.get()); } } @@ -55,7 +55,7 @@ impl Governor { // (assuming that `T` respects total order), but can suddenly be made to invert // that total order. #[derive(Debug)] -pub struct Governed<'a, T>(pub T, pub &'a Governor); +pub(crate) struct Governed<'a, T>(pub T, pub &'a Governor); impl PartialOrd for Governed<'_, T> { fn partial_cmp(&self, other: &Self) -> Option { diff --git a/library/alloc/src/testing/rng.rs b/library/alloc/src/testing/rng.rs index ecf543bee035a..77d3348f38a5d 100644 --- a/library/alloc/src/testing/rng.rs +++ b/library/alloc/src/testing/rng.rs @@ -1,5 +1,5 @@ /// XorShiftRng -pub struct DeterministicRng { +pub(crate) struct DeterministicRng { count: usize, x: u32, y: u32, @@ -8,12 +8,12 @@ pub struct DeterministicRng { } impl DeterministicRng { - pub fn new() -> Self { + pub(crate) fn new() -> Self { DeterministicRng { count: 0, x: 0x193a6754, y: 0xa8a7d469, z: 0x97830e05, w: 0x113ba7bb } } /// Guarantees that each returned number is unique. - pub fn next(&mut self) -> u32 { + pub(crate) fn next(&mut self) -> u32 { self.count += 1; assert!(self.count <= 70029); let x = self.x;