use anyhow::{Context as AnyhowContext, Result};
use std::sync::Arc;
use tracing::{debug, warn};
use super::budget::SharedBudgetManager;
use super::cache::{CacheConfig, ContextCache};
use super::file_loader::FileLoader;
use super::graph::{load_relationships_parallel, RelatedChunk};
use super::token_counter::TokenCounter;
use super::types::{ContextBundle, ContextItem, ExpandOptions, LineRange};
use crate::db::SqliteStore;
use crate::profile_scope;
#[derive(Debug, Clone)]
pub struct ChunkMetadata {
pub id: i64,
pub file_relpath: String,
pub worktree_path: String,
pub symbol_name: Option<String>,
pub kind: String,
pub start_line: i32,
pub end_line: i32,
pub signature: Option<String>,
pub docstring: Option<String>,
}
#[async_trait::async_trait]
pub trait ContextAssembler: Send + Sync {
async fn assemble(
&self,
chunk_id: i64,
budget: usize,
options: ExpandOptions,
) -> Result<ContextBundle>;
}
pub struct BasicContextAssembler {
#[allow(dead_code)] store: Arc<SqliteStore>,
token_counter: TokenCounter,
cache: Arc<ContextCache>,
}
impl BasicContextAssembler {
pub fn new(store: Arc<SqliteStore>, cache_config: CacheConfig) -> Self {
let cache = Arc::new(ContextCache::new(store.clone(), cache_config));
Self {
store,
token_counter: TokenCounter::new(),
cache,
}
}
pub fn new_without_cache(store: Arc<SqliteStore>) -> Self {
let cache_config = CacheConfig {
enabled: false,
..Default::default()
};
Self::new(store, cache_config)
}
pub fn cache(&self) -> &Arc<ContextCache> {
&self.cache
}
pub async fn get_chunk_metadata(&self, _chunk_id: i64) -> Result<ChunkMetadata> {
profile_scope!("get_chunk_metadata");
anyhow::bail!("get_chunk_metadata not yet implemented for SQLite")
}
async fn create_context_item(
&self,
metadata: ChunkMetadata,
role: &str,
reason: &str,
) -> Result<ContextItem> {
profile_scope!("create_context_item");
let file_loader = FileLoader::new(&metadata.worktree_path);
let range = LineRange::new(metadata.start_line, metadata.end_line);
let content = file_loader
.load_range(&metadata.file_relpath, range)
.await
.with_context(|| {
format!(
"Failed to load file content: {} (lines {}-{})",
metadata.file_relpath, metadata.start_line, metadata.end_line
)
})?;
let tokens = self
.token_counter
.count(&content)
.context("Failed to count tokens")?;
debug!(
"Created context item: {} lines {}-{}, {} tokens",
metadata.file_relpath, metadata.start_line, metadata.end_line, tokens
);
Ok(ContextItem {
relpath: metadata.file_relpath,
range,
role: role.to_string(),
reason: reason.to_string(),
content,
tokens,
})
}
}
#[async_trait::async_trait]
impl ContextAssembler for BasicContextAssembler {
async fn assemble(
&self,
chunk_id: i64,
budget: usize,
options: ExpandOptions,
) -> Result<ContextBundle> {
profile_scope!("context_assemble");
debug!(
"Assembling context for chunk {} with budget {} tokens",
chunk_id, budget
);
if let Some(cached_bundle) = self.cache.get(chunk_id, &options).await? {
debug!("Returning cached bundle for chunk {}", chunk_id);
return Ok(cached_bundle);
}
debug!("Cache miss for chunk {}, assembling...", chunk_id);
let metadata = self
.get_chunk_metadata(chunk_id)
.await
.context("Failed to retrieve chunk metadata")?;
let reason = if let Some(ref name) = metadata.symbol_name {
format!("Primary chunk: {} ({})", name, metadata.kind)
} else {
format!("Primary chunk ({})", metadata.kind)
};
let item = self
.create_context_item(metadata, "primary", &reason)
.await
.context("Failed to create context item")?;
let mut bundle = ContextBundle::new();
if item.tokens > budget {
warn!(
"Primary chunk ({} tokens) exceeds budget ({} tokens), truncating",
item.tokens, budget
);
bundle.truncated = true;
}
bundle.add_item(item);
debug!(
"Assembled context bundle: {} items, {} tokens, truncated: {}",
bundle.items.len(),
bundle.total_tokens,
bundle.truncated
);
if let Err(e) = self.cache.put(chunk_id, &options, &bundle).await {
warn!("Failed to cache bundle for chunk {}: {}", chunk_id, e);
}
Ok(bundle)
}
}
pub struct ParallelContextAssembler {
store: Arc<SqliteStore>,
token_counter: TokenCounter,
cache: Arc<ContextCache>,
}
impl ParallelContextAssembler {
pub fn new(store: Arc<SqliteStore>, cache_config: CacheConfig) -> Self {
let cache = Arc::new(ContextCache::new(store.clone(), cache_config));
Self {
store,
token_counter: TokenCounter::new(),
cache,
}
}
pub fn new_without_cache(store: Arc<SqliteStore>) -> Self {
let cache_config = CacheConfig {
enabled: false,
..Default::default()
};
Self::new(store, cache_config)
}
pub fn cache(&self) -> &Arc<ContextCache> {
&self.cache
}
async fn get_chunk_metadata(&self, _chunk_id: i64) -> Result<ChunkMetadata> {
anyhow::bail!("get_chunk_metadata not yet implemented for SQLite")
}
async fn create_context_item(
&self,
metadata: ChunkMetadata,
role: &str,
reason: &str,
) -> Result<ContextItem> {
profile_scope!("create_context_item");
let file_loader = FileLoader::new(&metadata.worktree_path);
let range = LineRange::new(metadata.start_line, metadata.end_line);
let content = file_loader
.load_range(&metadata.file_relpath, range)
.await
.with_context(|| {
format!(
"Failed to load file content: {} (lines {}-{})",
metadata.file_relpath, metadata.start_line, metadata.end_line
)
})?;
let tokens = self
.token_counter
.count(&content)
.context("Failed to count tokens")?;
debug!(
"Created context item: {} lines {}-{}, {} tokens",
metadata.file_relpath, metadata.start_line, metadata.end_line, tokens
);
Ok(ContextItem {
relpath: metadata.file_relpath,
range,
role: role.to_string(),
reason: reason.to_string(),
content,
tokens,
})
}
async fn related_chunk_to_item(
&self,
chunk: RelatedChunk,
role: &str,
budget: &SharedBudgetManager,
) -> Option<ContextItem> {
if budget.remaining() == 0 {
debug!("Budget exhausted, skipping chunk {}", chunk.id);
return None;
}
let metadata = ChunkMetadata {
id: chunk.id,
file_relpath: chunk.relpath.clone(),
worktree_path: String::new(), symbol_name: chunk.symbol_name.clone(),
kind: chunk.kind.clone(),
start_line: chunk.start_line,
end_line: chunk.end_line,
signature: None,
docstring: None,
};
let reason = if let Some(ref name) = chunk.symbol_name {
format!(
"{}: {} (depth {}, relevance {:.2})",
role, name, chunk.depth, chunk.relevance
)
} else {
format!(
"{} (depth {}, relevance {:.2})",
role, chunk.depth, chunk.relevance
)
};
match self.create_context_item(metadata, role, &reason).await {
Ok(item) => {
if budget.try_reserve(role, item.tokens) {
Some(item)
} else {
debug!("Insufficient budget for {} chunk {}", role, chunk.id);
None
}
}
Err(e) => {
warn!(
"Failed to create context item for chunk {}: {}",
chunk.id, e
);
None
}
}
}
async fn load_related_items(
&self,
chunks: Vec<RelatedChunk>,
role: &str,
budget: SharedBudgetManager,
max_items: usize,
) -> Vec<ContextItem> {
let chunks_to_load: Vec<_> = chunks.into_iter().take(max_items).collect();
let mut handles = vec![];
for chunk in chunks_to_load {
let budget_clone = budget.clone();
let role_str = role.to_string();
let self_clone = self.clone_for_parallel();
let handle = tokio::spawn(async move {
self_clone
.related_chunk_to_item(chunk, &role_str, &budget_clone)
.await
});
handles.push(handle);
}
let mut items = Vec::new();
for handle in handles {
if let Ok(Some(item)) = handle.await {
items.push(item);
}
}
items
}
fn clone_for_parallel(&self) -> Self {
Self {
store: self.store.clone(),
token_counter: self.token_counter.clone(),
cache: self.cache.clone(),
}
}
}
impl Clone for ParallelContextAssembler {
fn clone(&self) -> Self {
Self {
store: self.store.clone(),
token_counter: self.token_counter.clone(),
cache: self.cache.clone(),
}
}
}
#[async_trait::async_trait]
impl ContextAssembler for ParallelContextAssembler {
async fn assemble(
&self,
chunk_id: i64,
budget: usize,
options: ExpandOptions,
) -> Result<ContextBundle> {
debug!(
"Assembling context for chunk {} with budget {} tokens (parallel mode)",
chunk_id, budget
);
if let Some(cached_bundle) = self.cache.get(chunk_id, &options).await? {
debug!("Returning cached bundle for chunk {}", chunk_id);
return Ok(cached_bundle);
}
debug!(
"Cache miss for chunk {}, assembling in parallel...",
chunk_id
);
let budget_mgr = SharedBudgetManager::new(budget);
let allocation = budget_mgr.allocate().unwrap();
let (metadata_result, relationships) =
tokio::join!(self.get_chunk_metadata(chunk_id), async {
if options.callers || options.callees || options.tests {
load_relationships_parallel(&self.store, chunk_id, options.max_depth).await
} else {
(Vec::new(), Vec::new(), Vec::new())
}
});
let metadata = metadata_result.context("Failed to retrieve chunk metadata")?;
let (callers, callees, tests) = relationships;
let reason = if let Some(ref name) = metadata.symbol_name {
format!("Primary chunk: {} ({})", name, metadata.kind)
} else {
format!("Primary chunk ({})", metadata.kind)
};
let primary_item = self
.create_context_item(metadata, "primary", &reason)
.await
.context("Failed to create primary context item")?;
let mut bundle = ContextBundle::new();
if !budget_mgr.try_reserve("primary", primary_item.tokens) {
warn!(
"Primary chunk ({} tokens) exceeds budget ({} tokens)",
primary_item.tokens, budget
);
bundle.truncated = true;
}
bundle.add_item(primary_item);
if budget_mgr.remaining() > 0 {
let (test_items, caller_items, callee_items) = tokio::join!(
async {
if options.tests && !tests.is_empty() {
self.load_related_items(
tests,
"test",
budget_mgr.clone(),
allocation.tests / 400, )
.await
} else {
Vec::new()
}
},
async {
if options.callers && !callers.is_empty() {
self.load_related_items(
callers,
"caller",
budget_mgr.clone(),
allocation.callers / 400,
)
.await
} else {
Vec::new()
}
},
async {
if options.callees && !callees.is_empty() {
self.load_related_items(
callees,
"callee",
budget_mgr.clone(),
allocation.callees / 400,
)
.await
} else {
Vec::new()
}
}
);
for item in test_items {
bundle.add_item(item);
}
for item in caller_items {
bundle.add_item(item);
}
for item in callee_items {
bundle.add_item(item);
}
}
debug!(
"Assembled context bundle (parallel): {} items, {} tokens, truncated: {}",
bundle.items.len(),
bundle.total_tokens,
bundle.truncated
);
if let Err(e) = self.cache.put(chunk_id, &options, &bundle).await {
warn!("Failed to cache bundle for chunk {}: {}", chunk_id, e);
}
Ok(bundle)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_chunk_metadata_creation() {
let metadata = ChunkMetadata {
id: 1,
file_relpath: "src/main.rs".to_string(),
worktree_path: "/workspace".to_string(),
symbol_name: Some("main".to_string()),
kind: "func".to_string(),
start_line: 1,
end_line: 10,
signature: Some("fn main()".to_string()),
docstring: None,
};
assert_eq!(metadata.id, 1);
assert_eq!(metadata.file_relpath, "src/main.rs");
assert_eq!(metadata.symbol_name, Some("main".to_string()));
}
}