1use crate::{EmbeddableContent, EmbeddingConfig, Vector};
4use anyhow::{anyhow, Result};
5use scirs2_core::random::Random;
6use serde::{Deserialize, Serialize};
7use std::collections::HashMap;
8
9#[derive(Debug, Clone, Serialize, Deserialize)]
11pub struct HuggingFaceConfig {
12 pub model_name: String,
13 pub cache_dir: Option<String>,
14 pub device: String,
15 pub batch_size: usize,
16 pub max_length: usize,
17 pub pooling_strategy: PoolingStrategy,
18 pub trust_remote_code: bool,
19}
20
21#[derive(Debug, Clone, Serialize, Deserialize)]
23pub enum PoolingStrategy {
24 Cls,
26 Mean,
28 Max,
30 AttentionWeighted,
32}
33
34impl Default for HuggingFaceConfig {
35 fn default() -> Self {
36 Self {
37 model_name: "sentence-transformers/all-MiniLM-L6-v2".to_string(),
38 cache_dir: None,
39 device: "cpu".to_string(),
40 batch_size: 32,
41 max_length: 512,
42 pooling_strategy: PoolingStrategy::Mean,
43 trust_remote_code: false,
44 }
45 }
46}
47
48#[derive(Debug)]
50pub struct HuggingFaceEmbedder {
51 config: HuggingFaceConfig,
52 model_cache: HashMap<String, ModelInfo>,
53}
54
55#[derive(Debug, Clone)]
57struct ModelInfo {
58 dimensions: usize,
59 max_sequence_length: usize,
60 model_type: String,
61 loaded: bool,
62}
63
64impl HuggingFaceEmbedder {
65 pub fn new(config: HuggingFaceConfig) -> Result<Self> {
67 Ok(Self {
68 config,
69 model_cache: HashMap::new(),
70 })
71 }
72
73 pub fn with_default_config() -> Result<Self> {
75 Self::new(HuggingFaceConfig::default())
76 }
77
78 pub async fn load_model(&mut self, model_name: &str) -> Result<()> {
80 if self.model_cache.contains_key(model_name) {
81 return Ok(());
82 }
83
84 let model_info = self.get_model_info(model_name).await?;
86 self.model_cache.insert(model_name.to_string(), model_info);
87
88 tracing::info!("Loaded HuggingFace model: {}", model_name);
89 Ok(())
90 }
91
92 async fn get_model_info(&self, model_name: &str) -> Result<ModelInfo> {
94 let dimensions = match model_name {
97 "sentence-transformers/all-MiniLM-L6-v2" => 384,
98 "sentence-transformers/all-mpnet-base-v2" => 768,
99 "microsoft/DialoGPT-medium" => 1024,
100 "bert-base-uncased" => 768,
101 "distilbert-base-uncased" => 768,
102 _ => 768, };
104
105 Ok(ModelInfo {
106 dimensions,
107 max_sequence_length: self.config.max_length,
108 model_type: "transformer".to_string(),
109 loaded: true,
110 })
111 }
112
113 pub async fn embed_batch(&mut self, contents: &[EmbeddableContent]) -> Result<Vec<Vector>> {
115 if contents.is_empty() {
116 return Ok(vec![]);
117 }
118
119 let model_name = self.config.model_name.clone();
121 self.load_model(&model_name).await?;
122
123 let model_info = self
124 .model_cache
125 .get(&self.config.model_name)
126 .ok_or_else(|| anyhow!("Model not loaded: {}", self.config.model_name))?;
127
128 let mut embeddings = Vec::with_capacity(contents.len());
129
130 for chunk in contents.chunks(self.config.batch_size) {
132 let texts: Vec<String> = chunk
133 .iter()
134 .map(|content| self.content_to_text(content))
135 .collect();
136
137 let batch_embeddings = self.generate_embeddings(&texts, model_info).await?;
138 embeddings.extend(batch_embeddings);
139 }
140
141 Ok(embeddings)
142 }
143
144 pub async fn embed(&mut self, content: &EmbeddableContent) -> Result<Vector> {
146 let embeddings = self.embed_batch(&[content.clone()]).await?;
147 embeddings
148 .into_iter()
149 .next()
150 .ok_or_else(|| anyhow!("Failed to generate embedding"))
151 }
152
153 fn content_to_text(&self, content: &EmbeddableContent) -> String {
155 match content {
156 EmbeddableContent::Text(text) => text.clone(),
157 EmbeddableContent::RdfResource {
158 uri,
159 label,
160 description,
161 properties,
162 } => {
163 let mut text_parts = vec![uri.clone()];
164
165 if let Some(label) = label {
166 text_parts.push(label.clone());
167 }
168
169 if let Some(desc) = description {
170 text_parts.push(desc.clone());
171 }
172
173 for (prop, values) in properties {
174 text_parts.push(format!("{}: {}", prop, values.join(", ")));
175 }
176
177 text_parts.join(" ")
178 }
179 EmbeddableContent::SparqlQuery(query) => query.clone(),
180 EmbeddableContent::GraphPattern(pattern) => pattern.clone(),
181 }
182 }
183
184 async fn generate_embeddings(
186 &self,
187 texts: &[String],
188 model_info: &ModelInfo,
189 ) -> Result<Vec<Vector>> {
190 let mut embeddings = Vec::with_capacity(texts.len());
193
194 for text in texts {
195 let embedding = self.simulate_embedding(text, model_info.dimensions)?;
196 embeddings.push(embedding);
197 }
198
199 Ok(embeddings)
200 }
201
202 fn simulate_embedding(&self, text: &str, dimensions: usize) -> Result<Vector> {
204 use std::collections::hash_map::DefaultHasher;
206 use std::hash::{Hash, Hasher};
207
208 let mut hasher = DefaultHasher::new();
209 text.hash(&mut hasher);
210 let seed = hasher.finish();
211
212 let mut rng = Random::seed(seed);
213 use scirs2_core::random::Rng;
214
215 let mut embedding = vec![0.0f32; dimensions];
216 for value in embedding.iter_mut().take(dimensions) {
217 *value = rng.gen_range(-1.0..1.0); }
219
220 if matches!(self.config.pooling_strategy, PoolingStrategy::Mean) {
222 let norm = embedding.iter().map(|x| x * x).sum::<f32>().sqrt();
223 if norm > 0.0 {
224 for x in &mut embedding {
225 *x /= norm;
226 }
227 }
228 }
229
230 Ok(Vector::new(embedding))
231 }
232
233 pub fn get_cached_models(&self) -> Vec<String> {
235 self.model_cache.keys().cloned().collect()
236 }
237
238 pub fn clear_cache(&mut self) {
240 self.model_cache.clear();
241 }
242
243 pub fn get_model_dimensions(&self, model_name: &str) -> Option<usize> {
245 self.model_cache.get(model_name).map(|info| info.dimensions)
246 }
247}
248
249#[derive(Debug)]
251pub struct HuggingFaceModelManager {
252 embedders: HashMap<String, HuggingFaceEmbedder>,
253 default_model: String,
254}
255
256impl HuggingFaceModelManager {
257 pub fn new(default_model: String) -> Self {
259 Self {
260 embedders: HashMap::new(),
261 default_model,
262 }
263 }
264
265 pub fn add_model(&mut self, name: String, config: HuggingFaceConfig) -> Result<()> {
267 let embedder = HuggingFaceEmbedder::new(config)?;
268 self.embedders.insert(name, embedder);
269 Ok(())
270 }
271
272 pub async fn embed_with_model(
274 &mut self,
275 model_name: &str,
276 content: &EmbeddableContent,
277 ) -> Result<Vector> {
278 let embedder = self
279 .embedders
280 .get_mut(model_name)
281 .ok_or_else(|| anyhow!("Model not found: {}", model_name))?;
282 embedder.embed(content).await
283 }
284
285 pub async fn embed(&mut self, content: &EmbeddableContent) -> Result<Vector> {
287 self.embed_with_model(&self.default_model.clone(), content)
288 .await
289 }
290
291 pub fn list_models(&self) -> Vec<String> {
293 self.embedders.keys().cloned().collect()
294 }
295}
296
297impl From<EmbeddingConfig> for HuggingFaceConfig {
299 fn from(config: EmbeddingConfig) -> Self {
300 Self {
301 model_name: config.model_name,
302 cache_dir: None,
303 device: "cpu".to_string(),
304 batch_size: 32,
305 max_length: config.max_sequence_length,
306 pooling_strategy: if config.normalize {
307 PoolingStrategy::Mean
308 } else {
309 PoolingStrategy::Cls
310 },
311 trust_remote_code: false,
312 }
313 }
314}
315
316#[cfg(test)]
317mod tests {
318 use super::*;
319
320 #[tokio::test]
321 async fn test_huggingface_embedder_creation() {
322 let embedder = HuggingFaceEmbedder::with_default_config();
323 assert!(embedder.is_ok());
324 }
325
326 #[tokio::test]
327 async fn test_model_loading() {
328 let mut embedder = HuggingFaceEmbedder::with_default_config().unwrap();
329 let result = embedder
330 .load_model("sentence-transformers/all-MiniLM-L6-v2")
331 .await;
332 assert!(result.is_ok());
333
334 let dimensions = embedder.get_model_dimensions("sentence-transformers/all-MiniLM-L6-v2");
335 assert_eq!(dimensions, Some(384));
336 }
337
338 #[tokio::test]
339 async fn test_text_embedding() {
340 let mut embedder = HuggingFaceEmbedder::with_default_config().unwrap();
341 let content = EmbeddableContent::Text("Hello, world!".to_string());
342
343 let result = embedder.embed(&content).await;
344 assert!(result.is_ok());
345
346 let embedding = result.unwrap();
347 assert_eq!(embedding.dimensions, 384);
348 }
349
350 #[tokio::test]
351 async fn test_rdf_resource_embedding() {
352 let mut embedder = HuggingFaceEmbedder::with_default_config().unwrap();
353 let mut properties = HashMap::new();
354 properties.insert("type".to_string(), vec!["Person".to_string()]);
355
356 let content = EmbeddableContent::RdfResource {
357 uri: "http://example.org/person/1".to_string(),
358 label: Some("John Doe".to_string()),
359 description: Some("A person in the knowledge graph".to_string()),
360 properties,
361 };
362
363 let result = embedder.embed(&content).await;
364 assert!(result.is_ok());
365 }
366
367 #[tokio::test]
368 async fn test_batch_embedding() {
369 let mut embedder = HuggingFaceEmbedder::with_default_config().unwrap();
370 let contents = vec![
371 EmbeddableContent::Text("First text".to_string()),
372 EmbeddableContent::Text("Second text".to_string()),
373 EmbeddableContent::Text("Third text".to_string()),
374 ];
375
376 let result = embedder.embed_batch(&contents).await;
377 assert!(result.is_ok());
378
379 let embeddings = result.unwrap();
380 assert_eq!(embeddings.len(), 3);
381 }
382
383 #[tokio::test]
384 async fn test_model_manager() {
385 let mut manager = HuggingFaceModelManager::new("default".to_string());
386 let config = HuggingFaceConfig::default();
387
388 let result = manager.add_model("default".to_string(), config);
389 assert!(result.is_ok());
390
391 let models = manager.list_models();
392 assert!(models.contains(&"default".to_string()));
393 }
394
395 #[test]
396 fn test_config_conversion() {
397 let embedding_config = EmbeddingConfig {
398 model_name: "test-model".to_string(),
399 dimensions: 768,
400 max_sequence_length: 512,
401 normalize: true,
402 };
403
404 let hf_config: HuggingFaceConfig = embedding_config.into();
405 assert_eq!(hf_config.model_name, "test-model");
406 assert_eq!(hf_config.max_length, 512);
407 assert!(matches!(hf_config.pooling_strategy, PoolingStrategy::Mean));
408 }
409
410 #[test]
411 fn test_pooling_strategies() {
412 let strategies = vec![
413 PoolingStrategy::Cls,
414 PoolingStrategy::Mean,
415 PoolingStrategy::Max,
416 PoolingStrategy::AttentionWeighted,
417 ];
418
419 for strategy in strategies {
420 let config = HuggingFaceConfig {
421 pooling_strategy: strategy,
422 ..Default::default()
423 };
424 assert!(matches!(
425 config.pooling_strategy,
426 PoolingStrategy::Cls
427 | PoolingStrategy::Mean
428 | PoolingStrategy::Max
429 | PoolingStrategy::AttentionWeighted
430 ));
431 }
432 }
433}