pub struct AsyncQueryBuilder { /* private fields */ }Expand description
Builder for constructing async queries over the graph
Provides a fluent API for filtering and executing queries asynchronously. Supports both batch loading and streaming for memory-efficient processing.
§Examples
use llm_memory_graph::query::AsyncQueryBuilder;
use llm_memory_graph::types::NodeType;
use futures::stream::StreamExt;
// Query with filters
let nodes = builder
.node_type(NodeType::Prompt)
.limit(100)
.execute()
.await?;
// Stream large result sets
let mut stream = builder.execute_stream();
while let Some(node) = stream.next().await {
// Process node...
}Implementations§
Source§impl AsyncQueryBuilder
impl AsyncQueryBuilder
Sourcepub fn new(storage: Arc<dyn AsyncStorageBackend>) -> Self
pub fn new(storage: Arc<dyn AsyncStorageBackend>) -> Self
Create a new async query builder
§Examples
use llm_memory_graph::query::AsyncQueryBuilder;
use llm_memory_graph::storage::AsyncSledBackend;
use std::sync::Arc;
let backend = AsyncSledBackend::open("./data/graph.db").await?;
let builder = AsyncQueryBuilder::new(Arc::new(backend));Sourcepub fn node_type(self, node_type: NodeType) -> Self
pub fn node_type(self, node_type: NodeType) -> Self
Filter by node type
§Examples
let prompts = builder
.node_type(NodeType::Prompt)
.execute()
.await?;Sourcepub fn time_range(self, start: DateTime<Utc>, end: DateTime<Utc>) -> Self
pub fn time_range(self, start: DateTime<Utc>, end: DateTime<Utc>) -> Self
Filter by time range (inclusive)
§Examples
let start = Utc::now() - chrono::Duration::hours(24);
let end = Utc::now();
let recent_nodes = builder
.time_range(start, end)
.execute()
.await?;Sourcepub fn offset(self, offset: usize) -> Self
pub fn offset(self, offset: usize) -> Self
Skip the first N results
§Examples
// Get results 11-20 (skip first 10, take next 10)
let page2 = builder
.offset(10)
.limit(10)
.execute()
.await?;Sourcepub async fn execute(&self) -> Result<Vec<Node>>
pub async fn execute(&self) -> Result<Vec<Node>>
Execute the query and return all matching nodes
This loads all results into memory. For large result sets, consider using
execute_stream() instead.
§Examples
let nodes = builder.execute().await?;
println!("Found {} nodes", nodes.len());Sourcepub fn execute_stream(
&self,
) -> Pin<Box<dyn Stream<Item = Result<Node>> + Send + '_>>
pub fn execute_stream( &self, ) -> Pin<Box<dyn Stream<Item = Result<Node>> + Send + '_>>
Execute the query and return a stream of results
This is memory-efficient for large result sets as it processes nodes one at a time without loading everything into memory. The stream uses storage-level streaming to avoid loading all nodes at once.
§Examples
let mut stream = builder.execute_stream();
let mut count = 0;
while let Some(result) = stream.next().await {
match result {
Ok(node) => {
// Process node without loading all into memory
count += 1;
}
Err(e) => eprintln!("Error: {}", e),
}
}
println!("Processed {} nodes", count);Sourcepub async fn count(&self) -> Result<usize>
pub async fn count(&self) -> Result<usize>
Count the number of matching nodes without loading them
This is more efficient than execute().await?.len() for large result sets
as it uses storage-level counting when possible.
§Examples
let prompt_count = builder
.node_type(NodeType::Prompt)
.count()
.await?;
println!("Total prompts: {}", prompt_count);