pub struct Forest<TT: TreeTypes, R>(/* private fields */);
Implementations§
source§impl<T: TreeTypes, R: ReadOnlyStore<T::Link>> Forest<T, R>
impl<T: TreeTypes, R: ReadOnlyStore<T::Link>> Forest<T, R>
sourcepub fn stream_trees<Q, S, V>(
&self,
query: Q,
trees: S
) -> impl Stream<Item = Result<(u64, T::Key, V)>> + Send
pub fn stream_trees<Q, S, V>( &self, query: Q, trees: S ) -> impl Stream<Item = Result<(u64, T::Key, V)>> + Send
Given a sequence of roots, will stream matching events in ascending order indefinitely.
This is implemented by calling stream_trees_chunked and just flattening the chunks.
sourcepub fn stream_trees_chunked<S, Q, V, E, F>(
&self,
query: Q,
trees: S,
range: RangeInclusive<u64>,
mk_extra: &'static F
) -> impl Stream<Item = Result<FilteredChunk<(u64, T::Key, V), E>>> + Send + 'static
pub fn stream_trees_chunked<S, Q, V, E, F>( &self, query: Q, trees: S, range: RangeInclusive<u64>, mk_extra: &'static F ) -> impl Stream<Item = Result<FilteredChunk<(u64, T::Key, V), E>>> + Send + 'static
Given a sequence of roots, will stream chunks in ascending order until it arrives at range.end()
.
- query: the query
- roots: the stream of roots. It is assumed that trees later in this stream will be bigger
- range: the range which to stream. It is up to the caller to ensure that we have events for this range.
- mk_extra: a fn that allows to compute extra info from indices. this can be useful to get progress info even if the query does not match any events
sourcepub fn stream_trees_chunked_threaded<S, Q, V, E, F>(
&self,
query: Q,
trees: S,
range: RangeInclusive<u64>,
mk_extra: &'static F,
thread_pool: ThreadPool
) -> impl Stream<Item = Result<FilteredChunk<(u64, T::Key, V), E>>> + Send + 'static
pub fn stream_trees_chunked_threaded<S, Q, V, E, F>( &self, query: Q, trees: S, range: RangeInclusive<u64>, mk_extra: &'static F, thread_pool: ThreadPool ) -> impl Stream<Item = Result<FilteredChunk<(u64, T::Key, V), E>>> + Send + 'static
Given a sequence of roots, will stream chunks in ascending order indefinitely.
Note that this method has no way to know when the query is done. So ending this stream,
if desired, will have to be done by the caller using e.g. take_while(...)
.
- query: the query
- roots: the stream of roots. It is assumed that trees later in this stream will be bigger
- range: the range which to stream. It is up to the caller to ensure that we have events for this range.
- mk_extra: a fn that allows to compute extra info from indices. this can be useful to get progress info even if the query does not match any events
sourcepub fn stream_trees_chunked_reverse<S, Q, V, E, F>(
&self,
query: Q,
trees: S,
range: RangeInclusive<u64>,
mk_extra: &'static F
) -> impl Stream<Item = Result<FilteredChunk<(u64, T::Key, V), E>>> + Send + 'static
pub fn stream_trees_chunked_reverse<S, Q, V, E, F>( &self, query: Q, trees: S, range: RangeInclusive<u64>, mk_extra: &'static F ) -> impl Stream<Item = Result<FilteredChunk<(u64, T::Key, V), E>>> + Send + 'static
Given a sequence of roots, will stream chunks in reverse order until it arrives at range.start()
.
Values within chunks are in ascending offset order, so if you flatten them you have to reverse them first.
- query: the query
- trees: the stream of roots. It is assumed that trees later in this stream will be bigger
- range: the range which to stream. It is up to the caller to ensure that we have events for this range.
- mk_extra: a fn that allows to compute extra info from indices. this can be useful to get progress info even if the query does not match any events
source§impl<TT: TreeTypes, R: Clone> Forest<TT, R>
impl<TT: TreeTypes, R: Clone> Forest<TT, R>
pub fn new(store: R, branch_cache: BranchCache<TT>) -> Self
pub fn transaction<W: BlockWriter<TT::Link>>( &self, f: impl FnOnce(R) -> (R, W) ) -> Transaction<TT, R, W>
source§impl<T: TreeTypes, R: ReadOnlyStore<T::Link>> Forest<T, R>
impl<T: TreeTypes, R: ReadOnlyStore<T::Link>> Forest<T, R>
pub fn load_stream_builder<V>( &self, secrets: Secrets, config: Config, link: T::Link ) -> Result<StreamBuilder<T, V>>
pub fn load_tree<V>( &self, secrets: Secrets, link: T::Link ) -> Result<Tree<T, V>>
sourcepub fn dump_graph<S, V>(
&self,
tree: &Tree<T, V>,
f: impl Fn((usize, &NodeInfo<T, R>)) -> S + Clone
) -> Result<(Vec<(usize, usize)>, BTreeMap<usize, S>)>
pub fn dump_graph<S, V>( &self, tree: &Tree<T, V>, f: impl Fn((usize, &NodeInfo<T, R>)) -> S + Clone ) -> Result<(Vec<(usize, usize)>, BTreeMap<usize, S>)>
dumps the tree structure
sourcepub fn roots<V>(&self, tree: &StreamBuilder<T, V>) -> Result<Vec<Index<T>>>
pub fn roots<V>(&self, tree: &StreamBuilder<T, V>) -> Result<Vec<Index<T>>>
sealed roots of the tree
sourcepub fn left_roots<V>(&self, tree: &Tree<T, V>) -> Result<Vec<Tree<T, V>>>
pub fn left_roots<V>(&self, tree: &Tree<T, V>) -> Result<Vec<Tree<T, V>>>
leftmost branches of the tree as separate trees
pub fn check_invariants<V>( &self, tree: &StreamBuilder<T, V> ) -> Result<Vec<String>>
pub fn is_packed<V>(&self, tree: &Tree<T, V>) -> Result<bool>
pub fn assert_invariants<V>(&self, tree: &StreamBuilder<T, V>) -> Result<()>
pub fn stream_filtered<V: BanyanValue>( &self, tree: &Tree<T, V>, query: impl Query<T> + Clone + 'static ) -> impl Stream<Item = Result<(u64, T::Key, V)>> + 'static
sourcepub fn iter_index<V>(
&self,
tree: &Tree<T, V>,
query: impl Query<T> + Clone + 'static
) -> impl Iterator<Item = Result<Index<T>>> + 'static
pub fn iter_index<V>( &self, tree: &Tree<T, V>, query: impl Query<T> + Clone + 'static ) -> impl Iterator<Item = Result<Index<T>>> + 'static
Returns an iterator yielding all indexes that have values matching the provided query.
sourcepub fn iter_index_reverse<V>(
&self,
tree: &Tree<T, V>,
query: impl Query<T> + Clone + 'static
) -> impl Iterator<Item = Result<Index<T>>> + 'static
pub fn iter_index_reverse<V>( &self, tree: &Tree<T, V>, query: impl Query<T> + Clone + 'static ) -> impl Iterator<Item = Result<Index<T>>> + 'static
Returns an iterator yielding all indexes that have values matching the provided query in reverse order.
pub fn iter_filtered<V: BanyanValue>( &self, tree: &Tree<T, V>, query: impl Query<T> + Clone + 'static ) -> impl Iterator<Item = Result<(u64, T::Key, V)>> + 'static
pub fn iter_filtered_reverse<V: BanyanValue>( &self, tree: &Tree<T, V>, query: impl Query<T> + Clone + 'static ) -> impl Iterator<Item = Result<(u64, T::Key, V)>> + 'static
pub fn iter_from<V: BanyanValue>( &self, tree: &Tree<T, V> ) -> impl Iterator<Item = Result<(u64, T::Key, V)>> + 'static
pub fn iter_filtered_chunked<Q, V, E, F>( &self, tree: &Tree<T, V>, query: Q, mk_extra: &'static F ) -> impl Iterator<Item = Result<FilteredChunk<(u64, T::Key, V), E>>> + 'static
pub fn iter_filtered_chunked_reverse<Q, V, E, F>( &self, tree: &Tree<T, V>, query: Q, mk_extra: &'static F ) -> impl Iterator<Item = Result<FilteredChunk<(u64, T::Key, V), E>>> + 'static
pub fn stream_filtered_chunked<Q, V, E, F>( &self, tree: &Tree<T, V>, query: Q, mk_extra: &'static F ) -> impl Stream<Item = Result<FilteredChunk<(u64, T::Key, V), E>>> + 'static
pub fn stream_filtered_chunked_reverse<Q, V, E, F>( &self, tree: &Tree<T, V>, query: Q, mk_extra: &'static F ) -> impl Stream<Item = Result<FilteredChunk<(u64, T::Key, V), E>>> + 'static
sourcepub fn get<V: BanyanValue>(
&self,
tree: &Tree<T, V>,
offset: u64
) -> Result<Option<(T::Key, V)>>
pub fn get<V: BanyanValue>( &self, tree: &Tree<T, V>, offset: u64 ) -> Result<Option<(T::Key, V)>>
element at index
returns Ok(None) when offset is larger than count, or when hitting a purged part of the tree. Returns an error when part of the tree should be there, but could not be read.
sourcepub fn collect<V: BanyanValue>(
&self,
tree: &Tree<T, V>
) -> Result<Vec<Option<(T::Key, V)>>>
pub fn collect<V: BanyanValue>( &self, tree: &Tree<T, V> ) -> Result<Vec<Option<(T::Key, V)>>>
Collects all elements from a stream. Might produce an OOM for large streams.
sourcepub fn collect_from<V: BanyanValue>(
&self,
tree: &Tree<T, V>,
offset: u64
) -> Result<Vec<Option<(T::Key, V)>>>
pub fn collect_from<V: BanyanValue>( &self, tree: &Tree<T, V>, offset: u64 ) -> Result<Vec<Option<(T::Key, V)>>>
Collects all elements from the given offset. Might produce an OOM for large streams.