use ahash::AHashMap;
use terraphim_service::context::{ContextConfig, ContextManager};
use terraphim_service::llm::{ChatOptions, build_llm_from_role};
use terraphim_types::{ContextItem, ContextType, ConversationId, RoleName};
#[tokio::test]
#[ignore] async fn test_ollama_chat_with_context_real() {
let ollama_url = "http://127.0.0.1:11434";
let client = reqwest::Client::new();
if client
.get(format!("{}/api/tags", ollama_url))
.send()
.await
.is_err()
{
eprintln!("Skipping test: Ollama not running on {}", ollama_url);
return;
}
let role = create_test_ollama_role(ollama_url);
let llm_client = build_llm_from_role(&role).expect("Should build Ollama client");
let mut context_manager = ContextManager::new(ContextConfig::default());
let conversation_id = context_manager
.create_conversation("Test Chat".to_string(), RoleName::new("Test"))
.await
.expect("Should create conversation");
let context_item = ContextItem {
id: "rust-async-1".to_string(),
context_type: ContextType::Document,
title: "Rust Async Programming Guide".to_string(),
summary: Some("Guide to async programming in Rust with tokio".to_string()),
content: "Rust async programming uses the tokio runtime for concurrent execution. Key concepts include async/await syntax, futures, and tasks.".to_string(),
metadata: {
let mut map = AHashMap::new();
map.insert("source".to_string(), "rust-doc".to_string());
map
},
created_at: chrono::Utc::now(),
relevance_score: Some(95.0),
};
context_manager
.add_context(&conversation_id, context_item)
.expect("Should add context");
let conversation = context_manager
.get_conversation(&conversation_id)
.expect("Should get conversation");
let messages_with_context =
terraphim_service::context::build_llm_messages_with_context(&conversation, true);
assert!(!messages_with_context.is_empty());
let context_message = &messages_with_context[0];
assert_eq!(context_message["role"], "system");
let content = context_message["content"].as_str().unwrap();
assert!(content.contains("Context Information:"));
assert!(content.contains("### Rust Async Programming Guide"));
assert!(content.contains("tokio runtime"));
let mut messages = messages_with_context;
messages.push(serde_json::json!({
"role": "user",
"content": "How do I create async tasks in Rust?"
}));
let chat_opts = ChatOptions {
max_tokens: Some(512),
temperature: Some(0.7),
};
let response = llm_client
.chat_completion(messages, chat_opts)
.await
.expect("Chat completion should succeed");
assert!(
!response.is_empty(),
"Should get non-empty response from Ollama"
);
}
#[tokio::test]
#[ignore] async fn test_ollama_multi_context_chat() {
let ollama_url = "http://127.0.0.1:11434";
let client = reqwest::Client::new();
if client
.get(format!("{}/api/tags", ollama_url))
.send()
.await
.is_err()
{
eprintln!("Skipping test: Ollama not running on {}", ollama_url);
return;
}
let role = create_test_ollama_role(ollama_url);
let llm_client = build_llm_from_role(&role).expect("Should build Ollama client");
let mut context_manager = ContextManager::new(ContextConfig::default());
let conversation_id = context_manager
.create_conversation("Docker Chat".to_string(), RoleName::new("DevOps"))
.await
.expect("Should create conversation");
let context_item = ContextItem {
id: "docker-1".to_string(),
context_type: ContextType::Document,
title: "Docker Best Practices".to_string(),
summary: Some("Best practices for Docker containerization".to_string()),
content: "Docker containers provide lightweight virtualization. Best practices include multi-stage builds, minimal base images, and proper layer caching.".to_string(),
metadata: {
let mut map = AHashMap::new();
map.insert("category".to_string(), "devops".to_string());
map
},
created_at: chrono::Utc::now(),
relevance_score: Some(88.0),
};
context_manager
.add_context(&conversation_id, context_item)
.expect("Should add context");
let conversation = context_manager
.get_conversation(&conversation_id)
.expect("Should get conversation");
let messages_with_context =
terraphim_service::context::build_llm_messages_with_context(&conversation, true);
let mut messages = messages_with_context;
messages.push(serde_json::json!({
"role": "user",
"content": "What are the best practices for Docker containers?"
}));
let chat_opts = ChatOptions {
max_tokens: Some(1024),
temperature: Some(0.8),
};
let response = llm_client
.chat_completion(messages, chat_opts)
.await
.expect("Chat completion should succeed");
assert!(
!response.is_empty(),
"Should get non-empty response from Ollama"
);
}
#[tokio::test]
async fn test_context_formatting() {
let mut context_manager = ContextManager::new(ContextConfig::default());
let conversation_id = context_manager
.create_conversation("Format Test".to_string(), RoleName::new("Test"))
.await
.expect("Should create conversation");
let contexts = vec![
ContextItem {
id: "doc-1".to_string(),
context_type: ContextType::Document,
title: "API Documentation".to_string(),
summary: Some("REST API documentation".to_string()),
content: "The API supports GET, POST, PUT, and DELETE operations.".to_string(),
metadata: AHashMap::new(),
created_at: chrono::Utc::now(),
relevance_score: Some(92.0),
},
ContextItem {
id: "search-1".to_string(),
context_type: ContextType::SearchResult,
title: "Search Result: API Examples".to_string(),
summary: Some("Code examples for API usage".to_string()),
content: "Example: curl -X GET https://api.example.com/users".to_string(),
metadata: {
let mut map = AHashMap::new();
map.insert("query".to_string(), "API examples".to_string());
map
},
created_at: chrono::Utc::now(),
relevance_score: Some(85.0),
},
];
for context in contexts {
context_manager
.add_context(&conversation_id, context)
.expect("Should add context");
}
let conversation = context_manager
.get_conversation(&conversation_id)
.expect("Should get conversation");
let messages = terraphim_service::context::build_llm_messages_with_context(&conversation, true);
assert_eq!(messages.len(), 1); let context_message = &messages[0];
assert_eq!(context_message["role"], "system");
let content = context_message["content"].as_str().unwrap();
assert!(content.contains("Context Information:"));
assert!(content.contains("### API Documentation"));
assert!(content.contains("### Search Result: API Examples"));
assert!(content.contains("GET, POST, PUT"));
assert!(content.contains("curl -X GET"));
}
#[tokio::test]
async fn test_empty_context_handling() {
let _context_manager = ContextManager::new(ContextConfig::default());
let _conversation_id = ConversationId::from_string("empty-test".to_string());
let conversation =
terraphim_types::Conversation::new("Empty Test".to_string(), RoleName::new("Test"));
let messages = terraphim_service::context::build_llm_messages_with_context(&conversation, true);
assert!(messages.is_empty());
let messages =
terraphim_service::context::build_llm_messages_with_context(&conversation, false);
assert!(messages.is_empty());
}
fn create_test_ollama_role(base_url: &str) -> terraphim_config::Role {
let mut role = terraphim_config::Role {
shortname: Some("TestOllama".into()),
name: "Test Ollama".into(),
relevance_function: terraphim_types::RelevanceFunction::TitleScorer,
terraphim_it: false,
theme: "default".into(),
kg: None,
haystacks: vec![],
llm_enabled: true,
llm_api_key: None,
llm_model: Some("gemma3:270m".to_string()),
llm_auto_summarize: false,
llm_chat_enabled: true,
llm_chat_system_prompt: Some("You are a helpful assistant.".to_string()),
llm_chat_model: Some("gemma3:270m".to_string()),
llm_context_window: None,
extra: AHashMap::new(),
llm_router_enabled: false,
llm_router_config: None,
};
role.extra
.insert("llm_provider".to_string(), serde_json::json!("ollama"));
role.extra
.insert("ollama_base_url".to_string(), serde_json::json!(base_url));
role
}