mod compaction;
pub mod cross_repo;
use crate::index::{CodeChunk, ResolvedEdge, Symbol};
use crate::IndexAndResolveResult;
use codemem_core::{CodememError, Edge, GraphConfig, GraphNode, NodeKind, RelationshipType};
use std::collections::{HashMap, HashSet};
#[derive(Debug, Default)]
pub struct IndexPersistResult {
pub files_created: usize,
pub packages_created: usize,
pub symbols_stored: usize,
pub chunks_stored: usize,
pub edges_resolved: usize,
pub symbols_embedded: usize,
pub chunks_embedded: usize,
pub chunks_pruned: usize,
pub symbols_pruned: usize,
}
#[derive(Debug, Default)]
pub struct CrossRepoPersistResult {
pub packages_registered: usize,
pub unresolved_refs_stored: usize,
pub forward_edges_created: usize,
pub backward_edges_created: usize,
pub endpoints_detected: usize,
pub client_calls_detected: usize,
pub spec_endpoints_detected: usize,
pub event_channels_detected: usize,
pub http_edges_matched: usize,
pub event_edges_matched: usize,
}
pub fn edge_weight_for(rel: &RelationshipType, config: &GraphConfig) -> f64 {
match rel {
RelationshipType::Calls => config.calls_edge_weight,
RelationshipType::Imports => config.imports_edge_weight,
RelationshipType::Contains => config.contains_edge_weight,
RelationshipType::TypeDefinition => config.type_definition_edge_weight,
RelationshipType::Reads => config.reads_edge_weight,
RelationshipType::Writes => config.writes_edge_weight,
RelationshipType::Overrides => config.overrides_edge_weight,
RelationshipType::Implements | RelationshipType::Inherits => 0.8,
RelationshipType::DependsOn => 0.7,
RelationshipType::CoChanged => 0.6,
RelationshipType::EvolvedInto | RelationshipType::Summarizes => 0.7,
RelationshipType::PartOf => 0.4,
RelationshipType::RelatesTo | RelationshipType::SharesTheme => 0.3,
RelationshipType::HttpCalls => 0.7,
RelationshipType::PublishesTo | RelationshipType::SubscribesTo => 0.6,
RelationshipType::ModifiedBy => 0.4,
_ => 0.5,
}
}
struct GraphPersistCounts {
packages_created: usize,
chunks_stored: usize,
}
impl super::CodememEngine {
pub fn persist_index_results(
&self,
results: &IndexAndResolveResult,
namespace: Option<&str>,
) -> Result<IndexPersistResult, CodememError> {
self.persist_index_results_with_progress(results, namespace, |_, _| {})
}
pub fn persist_graph_only(
&self,
results: &IndexAndResolveResult,
namespace: Option<&str>,
) -> Result<IndexPersistResult, CodememError> {
let seen_files = &results.file_paths;
let graph_counts = self.persist_graph_nodes(results, namespace)?;
let (chunks_pruned, symbols_pruned) = if self.config.chunking.auto_compact {
self.compact_graph(seen_files, namespace)
} else {
(0, 0)
};
Ok(IndexPersistResult {
files_created: seen_files.len(),
packages_created: graph_counts.packages_created,
symbols_stored: results.symbols.len(),
chunks_stored: graph_counts.chunks_stored,
edges_resolved: results.edges.len(),
symbols_embedded: 0,
chunks_embedded: 0,
chunks_pruned,
symbols_pruned,
})
}
pub fn persist_index_results_with_progress(
&self,
results: &IndexAndResolveResult,
namespace: Option<&str>,
on_progress: impl Fn(usize, usize),
) -> Result<IndexPersistResult, CodememError> {
let seen_files = &results.file_paths;
let graph_counts = self.persist_graph_nodes(results, namespace)?;
let (symbols_embedded, chunks_embedded) = self.embed_and_persist(
&results.symbols,
&results.chunks,
&results.edges,
on_progress,
)?;
let (chunks_pruned, symbols_pruned) = if self.config.chunking.auto_compact {
self.compact_graph(seen_files, namespace)
} else {
(0, 0)
};
Ok(IndexPersistResult {
files_created: seen_files.len(),
packages_created: graph_counts.packages_created,
symbols_stored: results.symbols.len(),
chunks_stored: graph_counts.chunks_stored,
edges_resolved: results.edges.len(),
symbols_embedded,
chunks_embedded,
chunks_pruned,
symbols_pruned,
})
}
fn persist_graph_nodes(
&self,
results: &IndexAndResolveResult,
namespace: Option<&str>,
) -> Result<GraphPersistCounts, CodememError> {
let all_symbols = &results.symbols;
let all_chunks = &results.chunks;
let seen_files = &results.file_paths;
let edges = &results.edges;
let now = chrono::Utc::now();
let ns_string = namespace.map(|s| s.to_string());
let contains_weight = edge_weight_for(&RelationshipType::Contains, &self.config.graph);
let mut graph = self.lock_graph()?;
let file_nodes: Vec<GraphNode> = seen_files
.iter()
.map(|file_path| {
let mut payload = HashMap::new();
payload.insert(
"file_path".to_string(),
serde_json::Value::String(file_path.clone()),
);
GraphNode {
id: format!("file:{file_path}"),
kind: NodeKind::File,
label: file_path.clone(),
payload,
centrality: 0.0,
memory_id: None,
namespace: ns_string.clone(),
valid_from: None,
valid_to: None,
}
})
.collect();
self.persist_nodes_to_storage_and_graph(&file_nodes, &mut **graph);
let (dir_nodes, dir_edges, created_dirs) =
self.build_package_tree(seen_files, &ns_string, contains_weight, now);
self.persist_nodes_to_storage_and_graph(&dir_nodes, &mut **graph);
self.persist_edges_to_storage_and_graph(&dir_edges, &mut **graph);
let (sym_nodes, sym_edges) =
Self::build_symbol_nodes(all_symbols, &ns_string, contains_weight, now);
let mut old_syms_by_file: HashMap<String, HashSet<String>> = HashMap::new();
for node in graph.get_all_nodes() {
if !node.id.starts_with("sym:") {
continue;
}
if matches!(
node.payload.get("source").and_then(|v| v.as_str()),
Some("scip" | "scip-synthetic")
) {
continue;
}
let Some(fp) = node.payload.get("file_path").and_then(|v| v.as_str()) else {
continue;
};
if !seen_files.contains(fp) {
continue;
}
old_syms_by_file
.entry(fp.to_string())
.or_default()
.insert(node.id);
}
drop(graph);
for file_path in seen_files {
let new_sym_ids: HashSet<String> = sym_nodes
.iter()
.filter(|n| {
n.payload.get("file_path").and_then(|v| v.as_str()) == Some(file_path.as_str())
})
.map(|n| n.id.clone())
.collect();
let empty = HashSet::new();
let old_sym_ids = old_syms_by_file.get(file_path).unwrap_or(&empty);
if let Err(e) = self.cleanup_stale_symbols(file_path, old_sym_ids, &new_sym_ids) {
tracing::warn!("Failed to cleanup stale symbols for {file_path}: {e}");
}
}
let mut graph = self.lock_graph()?;
self.persist_nodes_to_storage_and_graph(&sym_nodes, &mut **graph);
self.persist_edges_to_storage_and_graph(&sym_edges, &mut **graph);
let ref_edges = Self::build_reference_edges(edges, &self.config.graph, now);
self.persist_edges_to_storage_and_graph(&ref_edges, &mut **graph);
if let Some(ref scip_build) = results.scip_build {
let new_scip_ids: HashSet<&str> =
scip_build.nodes.iter().map(|n| n.id.as_str()).collect();
let mut stale_scip_ids = Vec::new();
for node in graph.get_all_nodes() {
if !node.id.starts_with("sym:") {
continue;
}
if !matches!(
node.payload.get("source").and_then(|v| v.as_str()),
Some("scip" | "scip-synthetic")
) {
continue;
}
if !new_scip_ids.contains(node.id.as_str()) {
if let Some(fp) = node.payload.get("file_path").and_then(|v| v.as_str()) {
if seen_files.contains(fp) {
stale_scip_ids.push(node.id.clone());
}
}
}
}
for stale_id in &stale_scip_ids {
let _ = graph.remove_node(stale_id);
let _ = self.storage.delete_graph_nodes_by_prefix(stale_id);
if let Some(qname) = stale_id.strip_prefix("sym:") {
let doc_id = format!("scip-doc:{qname}");
let _ = self.storage.delete_memory(&doc_id);
}
}
if !stale_scip_ids.is_empty() {
tracing::info!(
"Cleaned up {} stale SCIP nodes from re-index",
stale_scip_ids.len()
);
}
self.persist_nodes_to_storage_and_graph(&scip_build.nodes, &mut **graph);
let (fused_edges, superseded_ids) = Self::fuse_edges(&ref_edges, &scip_build.edges);
for edge_id in &superseded_ids {
let _ = graph.remove_edge(edge_id);
let _ = self.storage.delete_graph_edge(edge_id);
}
self.persist_edges_to_storage_and_graph(&fused_edges, &mut **graph);
for (memory, related_node_id) in &scip_build.memories {
let _ = self.storage.insert_memory(memory);
let relates_edge = Edge {
id: format!("relates:{}->mem:{}", related_node_id, memory.id),
src: related_node_id.clone(),
dst: format!("mem:{}", memory.id),
relationship: RelationshipType::RelatesTo,
weight: 0.3,
properties: HashMap::new(),
created_at: now,
valid_from: Some(now),
valid_to: None,
};
let _ = graph.add_edge(relates_edge.clone());
let _ = self.storage.insert_graph_edges_batch(&[relates_edge]);
}
}
for file_path in seen_files {
let prefix = format!("chunk:{file_path}:");
let _ = self.storage.delete_graph_nodes_by_prefix(&prefix);
}
let (chunk_nodes, chunk_edges) =
Self::build_chunk_nodes(all_chunks, &ns_string, contains_weight, now);
let chunk_count = chunk_nodes.len();
self.persist_nodes_to_storage_and_graph(&chunk_nodes, &mut **graph);
self.persist_edges_to_storage_and_graph(&chunk_edges, &mut **graph);
drop(graph);
Ok(GraphPersistCounts {
packages_created: created_dirs,
chunks_stored: chunk_count,
})
}
fn persist_nodes_to_storage_and_graph(
&self,
nodes: &[GraphNode],
graph: &mut dyn codemem_core::GraphBackend,
) {
if let Err(e) = self.storage.insert_graph_nodes_batch(nodes) {
tracing::warn!("Failed to batch-insert {} graph nodes: {e}", nodes.len());
}
for node in nodes {
let _ = graph.add_node(node.clone());
}
}
fn persist_edges_to_storage_and_graph(
&self,
edges: &[Edge],
graph: &mut dyn codemem_core::GraphBackend,
) {
let mut referenced_ids: std::collections::HashSet<&str> = std::collections::HashSet::new();
for edge in edges {
referenced_ids.insert(&edge.src);
referenced_ids.insert(&edge.dst);
}
let existing_ids: std::collections::HashSet<String> = referenced_ids
.iter()
.filter(|id| self.storage.get_graph_node(id).ok().flatten().is_some())
.map(|id| id.to_string())
.collect();
let valid_edges: Vec<&Edge> = edges
.iter()
.filter(|e| existing_ids.contains(&e.src) && existing_ids.contains(&e.dst))
.collect();
let skipped = edges.len() - valid_edges.len();
if skipped > 0 {
tracing::debug!("Skipped {} edges referencing non-existent nodes", skipped);
}
let owned: Vec<Edge> = valid_edges.into_iter().cloned().collect();
if let Err(e) = self.storage.insert_graph_edges_batch(&owned) {
tracing::warn!("Failed to batch-insert {} graph edges: {e}", owned.len());
}
for edge in &owned {
let _ = graph.add_edge(edge.clone());
}
}
fn build_package_tree(
&self,
seen_files: &HashSet<String>,
ns_string: &Option<String>,
contains_weight: f64,
now: chrono::DateTime<chrono::Utc>,
) -> (Vec<GraphNode>, Vec<Edge>, usize) {
let mut created_dirs: HashSet<String> = HashSet::new();
let mut created_edge_ids: HashSet<String> = HashSet::new();
let mut dir_nodes = Vec::new();
let mut dir_edges = Vec::new();
for file_path in seen_files {
let p = std::path::Path::new(file_path);
let mut ancestors: Vec<String> = Vec::new();
let mut current = p.parent();
while let Some(dir) = current {
let dir_str = dir.to_string_lossy().to_string();
if dir_str.is_empty() || dir_str == "." {
break;
}
ancestors.push(dir_str);
current = dir.parent();
}
ancestors.reverse();
for (i, dir_str) in ancestors.iter().enumerate() {
let pkg_id = format!("pkg:{dir_str}/");
if created_dirs.insert(pkg_id.clone()) {
dir_nodes.push(GraphNode {
id: pkg_id.clone(),
kind: NodeKind::Package,
label: format!("{dir_str}/"),
payload: HashMap::new(),
centrality: 0.0,
memory_id: None,
namespace: ns_string.clone(),
valid_from: None,
valid_to: None,
});
}
if i == 0 {
continue;
}
let parent_pkg_id = format!("pkg:{}/", ancestors[i - 1]);
let edge_id = format!("contains:{parent_pkg_id}->{pkg_id}");
if !created_edge_ids.insert(edge_id.clone()) {
continue;
}
dir_edges.push(Edge {
id: edge_id,
src: parent_pkg_id,
dst: pkg_id.clone(),
relationship: RelationshipType::Contains,
weight: contains_weight,
valid_from: Some(now),
valid_to: None,
properties: HashMap::new(),
created_at: now,
});
}
if let Some(last_dir) = ancestors.last() {
let parent_pkg_id = format!("pkg:{last_dir}/");
let file_node_id = format!("file:{file_path}");
let edge_id = format!("contains:{parent_pkg_id}->{file_node_id}");
dir_edges.push(Edge {
id: edge_id,
src: parent_pkg_id,
dst: file_node_id,
relationship: RelationshipType::Contains,
weight: contains_weight,
valid_from: Some(now),
valid_to: None,
properties: HashMap::new(),
created_at: now,
});
}
}
let count = created_dirs.len();
(dir_nodes, dir_edges, count)
}
fn build_symbol_nodes(
symbols: &[Symbol],
ns_string: &Option<String>,
contains_weight: f64,
now: chrono::DateTime<chrono::Utc>,
) -> (Vec<GraphNode>, Vec<Edge>) {
let mut sym_nodes = Vec::with_capacity(symbols.len());
let mut sym_edges = Vec::with_capacity(symbols.len());
for sym in symbols {
let kind = NodeKind::from(sym.kind);
let payload = Self::build_symbol_payload(sym);
let sym_node_id = format!("sym:{}", sym.qualified_name);
sym_nodes.push(GraphNode {
id: sym_node_id.clone(),
kind,
label: sym.qualified_name.clone(),
payload,
centrality: 0.0,
memory_id: None,
namespace: ns_string.clone(),
valid_from: None,
valid_to: None,
});
let file_node_id = format!("file:{}", sym.file_path);
sym_edges.push(Edge {
id: format!("contains:{file_node_id}->{sym_node_id}"),
src: file_node_id,
dst: sym_node_id,
relationship: RelationshipType::Contains,
weight: contains_weight,
valid_from: Some(now),
valid_to: None,
properties: HashMap::new(),
created_at: now,
});
}
(sym_nodes, sym_edges)
}
fn build_symbol_payload(sym: &Symbol) -> HashMap<String, serde_json::Value> {
let mut payload = HashMap::new();
payload.insert(
"symbol_kind".to_string(),
serde_json::Value::String(sym.kind.to_string()),
);
payload.insert(
"signature".to_string(),
serde_json::Value::String(sym.signature.clone()),
);
payload.insert(
"file_path".to_string(),
serde_json::Value::String(sym.file_path.clone()),
);
payload.insert("line_start".to_string(), serde_json::json!(sym.line_start));
payload.insert("line_end".to_string(), serde_json::json!(sym.line_end));
payload.insert(
"visibility".to_string(),
serde_json::Value::String(sym.visibility.to_string()),
);
if let Some(ref doc) = sym.doc_comment {
payload.insert(
"doc_comment".to_string(),
serde_json::Value::String(doc.clone()),
);
}
if !sym.parameters.is_empty() {
payload.insert(
"parameters".to_string(),
serde_json::to_value(&sym.parameters).unwrap_or_default(),
);
}
if let Some(ref ret) = sym.return_type {
payload.insert(
"return_type".to_string(),
serde_json::Value::String(ret.clone()),
);
}
if sym.is_async {
payload.insert("is_async".to_string(), serde_json::json!(true));
}
if !sym.attributes.is_empty() {
payload.insert(
"attributes".to_string(),
serde_json::to_value(&sym.attributes).unwrap_or_default(),
);
}
if !sym.throws.is_empty() {
payload.insert(
"throws".to_string(),
serde_json::to_value(&sym.throws).unwrap_or_default(),
);
}
if let Some(ref gp) = sym.generic_params {
payload.insert(
"generic_params".to_string(),
serde_json::Value::String(gp.clone()),
);
}
if sym.is_abstract {
payload.insert("is_abstract".to_string(), serde_json::json!(true));
}
if let Some(ref parent) = sym.parent {
payload.insert(
"parent".to_string(),
serde_json::Value::String(parent.clone()),
);
}
payload
}
const AST_GREP_BASE_CONFIDENCE: f64 = 0.10;
fn build_reference_edges(
edges: &[ResolvedEdge],
graph_config: &GraphConfig,
now: chrono::DateTime<chrono::Utc>,
) -> Vec<Edge> {
edges
.iter()
.map(|edge| {
let mut properties = HashMap::new();
properties.insert("source".to_string(), serde_json::json!("ast-grep"));
properties.insert(
"confidence".to_string(),
serde_json::json!(Self::AST_GREP_BASE_CONFIDENCE),
);
properties.insert("source_layers".to_string(), serde_json::json!(["ast-grep"]));
let base_weight = edge_weight_for(&edge.relationship, graph_config);
let weight = base_weight * edge.resolution_confidence;
Edge {
id: format!(
"ref:{}->{}:{}",
edge.source_qualified_name, edge.target_qualified_name, edge.relationship
),
src: format!("sym:{}", edge.source_qualified_name),
dst: format!("sym:{}", edge.target_qualified_name),
relationship: edge.relationship,
weight,
valid_from: Some(now),
valid_to: None,
properties,
created_at: now,
}
})
.collect()
}
fn fuse_edges(ast_grep_edges: &[Edge], scip_edges: &[Edge]) -> (Vec<Edge>, Vec<String>) {
let ast_grep_index: HashMap<(String, String, String), &str> = ast_grep_edges
.iter()
.map(|e| {
(
(e.src.clone(), e.dst.clone(), e.relationship.to_string()),
e.id.as_str(),
)
})
.collect();
let mut superseded_ids = Vec::new();
let fused = scip_edges
.iter()
.map(|scip_edge| {
let key = (
scip_edge.src.clone(),
scip_edge.dst.clone(),
scip_edge.relationship.to_string(),
);
if let Some(&ast_edge_id) = ast_grep_index.get(&key) {
superseded_ids.push(ast_edge_id.to_string());
let mut fused = scip_edge.clone();
let scip_conf = scip_edge
.properties
.get("confidence")
.and_then(|v| v.as_f64())
.unwrap_or(0.15);
let fused_conf = scip_conf + Self::AST_GREP_BASE_CONFIDENCE;
fused
.properties
.insert("confidence".to_string(), serde_json::json!(fused_conf));
fused.properties.insert(
"source_layers".to_string(),
serde_json::json!(["ast-grep", "scip"]),
);
fused
} else {
scip_edge.clone()
}
})
.collect();
(fused, superseded_ids)
}
fn build_chunk_nodes(
chunks: &[CodeChunk],
ns_string: &Option<String>,
contains_weight: f64,
now: chrono::DateTime<chrono::Utc>,
) -> (Vec<GraphNode>, Vec<Edge>) {
let mut chunk_nodes = Vec::with_capacity(chunks.len());
let mut chunk_edges = Vec::with_capacity(chunks.len() * 2);
for chunk in chunks {
let chunk_id = format!("chunk:{}:{}", chunk.file_path, chunk.index);
let mut payload = HashMap::new();
payload.insert(
"file_path".to_string(),
serde_json::Value::String(chunk.file_path.clone()),
);
payload.insert(
"line_start".to_string(),
serde_json::json!(chunk.line_start),
);
payload.insert("line_end".to_string(), serde_json::json!(chunk.line_end));
payload.insert(
"node_kind".to_string(),
serde_json::Value::String(chunk.node_kind.clone()),
);
payload.insert(
"non_ws_chars".to_string(),
serde_json::json!(chunk.non_ws_chars),
);
if let Some(ref parent) = chunk.parent_symbol {
payload.insert(
"parent_symbol".to_string(),
serde_json::Value::String(parent.clone()),
);
}
chunk_nodes.push(GraphNode {
id: chunk_id.clone(),
kind: NodeKind::Chunk,
label: format!(
"chunk:{}:{}..{}",
chunk.file_path, chunk.line_start, chunk.line_end
),
payload,
centrality: 0.0,
memory_id: None,
namespace: ns_string.clone(),
valid_from: None,
valid_to: None,
});
let file_node_id = format!("file:{}", chunk.file_path);
chunk_edges.push(Edge {
id: format!("contains:{file_node_id}->{chunk_id}"),
src: file_node_id,
dst: chunk_id.clone(),
relationship: RelationshipType::Contains,
weight: contains_weight,
valid_from: Some(now),
valid_to: None,
properties: HashMap::new(),
created_at: now,
});
if let Some(ref parent_sym) = chunk.parent_symbol {
let parent_node_id = format!("sym:{parent_sym}");
chunk_edges.push(Edge {
id: format!("contains:{parent_node_id}->{chunk_id}"),
src: parent_node_id,
dst: chunk_id,
relationship: RelationshipType::Contains,
weight: contains_weight,
valid_from: Some(now),
valid_to: None,
properties: HashMap::new(),
created_at: now,
});
}
}
(chunk_nodes, chunk_edges)
}
fn embed_and_persist(
&self,
symbols: &[Symbol],
chunks: &[CodeChunk],
edges: &[ResolvedEdge],
on_progress: impl Fn(usize, usize),
) -> Result<(usize, usize), CodememError> {
let mut symbols_embedded = 0usize;
let mut chunks_embedded = 0usize;
if !self.embeddings_ready() {
return Ok((0, 0));
}
let sym_texts: Vec<(String, String)> = symbols
.iter()
.map(|sym| {
let id = format!("sym:{}", sym.qualified_name);
let text = self.enrich_symbol_text(sym, edges);
(id, text)
})
.collect();
let chunk_texts: Vec<(String, String)> = chunks
.iter()
.map(|chunk| {
let id = format!("chunk:{}:{}", chunk.file_path, chunk.index);
let text = self.enrich_chunk_text(chunk);
(id, text)
})
.collect();
let embed_batch_size = self.config.embedding.batch_size;
let all_pairs: Vec<(String, String)> = sym_texts.into_iter().chain(chunk_texts).collect();
let total = all_pairs.len();
let sym_count = symbols.len();
let mut done = 0usize;
for batch in all_pairs.chunks(embed_batch_size) {
let texts: Vec<&str> = batch.iter().map(|(_, t)| t.as_str()).collect();
let t0 = std::time::Instant::now();
let embed_result = {
let emb = self.lock_embeddings()?;
match emb {
Some(emb_guard) => emb_guard.embed_batch(&texts),
None => break,
}
};
match embed_result {
Ok(embeddings) => {
let embed_ms = t0.elapsed().as_millis();
let t1 = std::time::Instant::now();
let pairs: Vec<(&str, &[f32])> = batch
.iter()
.zip(embeddings.iter())
.map(|((id, _), emb_vec)| (id.as_str(), emb_vec.as_slice()))
.collect();
if let Err(e) = self.storage.store_embeddings_batch(&pairs) {
tracing::warn!("Failed to batch-store embeddings: {e}");
}
let sqlite_ms = t1.elapsed().as_millis();
let t2 = std::time::Instant::now();
let batch_items: Vec<(String, Vec<f32>)> = batch
.iter()
.zip(embeddings.into_iter())
.map(|((id, _), emb_vec)| (id.clone(), emb_vec))
.collect();
let batch_len = batch_items.len();
{
let mut vec = self.lock_vector()?;
if let Err(e) = vec.insert_batch(&batch_items) {
tracing::warn!("Failed to batch-insert into vector index: {e}");
}
}
let vector_ms = t2.elapsed().as_millis();
let syms_in_batch = batch_len.min(sym_count.saturating_sub(done));
symbols_embedded += syms_in_batch;
chunks_embedded += batch_len - syms_in_batch;
done += batch_len;
tracing::debug!(
"Embed batch {}: embed={embed_ms}ms sqlite={sqlite_ms}ms vector={vector_ms}ms",
batch_len
);
}
Err(e) => {
tracing::warn!("embed_batch failed for chunk of {} texts: {e}", batch.len());
}
}
on_progress(done, total);
}
self.save_index();
Ok((symbols_embedded, chunks_embedded))
}
}