use serde_json::json;
use vtcode_core::config::constants::models;
use vtcode_core::config::models::Provider;
use vtcode_core::config::types::VerbosityLevel;
use vtcode_core::llm::{
factory::{LLMFactory, create_provider_for_model, infer_provider},
provider::{LLMProvider, LLMRequest, Message, MessageRole, ToolDefinition},
providers::{
AnthropicProvider, GeminiProvider, LmStudioProvider, MoonshotProvider, OllamaProvider,
OpenAIProvider, OpenRouterProvider,
},
};
#[test]
fn test_provider_factory_creation() {
let factory = LLMFactory::new();
let providers = factory.list_providers();
assert!(providers.contains(&"gemini".to_string()));
assert!(providers.contains(&"openai".to_string()));
assert!(providers.contains(&"anthropic".to_string()));
assert!(providers.contains(&"copilot".to_string()));
assert!(providers.contains(&"openrouter".to_string()));
assert!(providers.contains(&"moonshot".to_string()));
assert!(providers.contains(&"deepseek".to_string()));
assert!(providers.contains(&"zai".to_string()));
assert!(providers.contains(&"ollama".to_string()));
assert!(providers.contains(&"lmstudio".to_string()));
assert!(providers.contains(&"minimax".to_string()));
assert!(providers.contains(&"huggingface".to_string()));
assert!(providers.contains(&"openresponses".to_string()));
}
#[test]
fn test_provider_auto_detection() {
let factory = LLMFactory::new();
assert_eq!(
factory.provider_from_model("gpt-oss-20b"),
Some("openai".to_string())
);
assert_eq!(
factory.provider_from_model("gpt-5-mini"),
Some("openai".to_string())
);
assert_eq!(
factory.provider_from_model("o3"),
Some("openai".to_string())
);
assert_eq!(
factory.provider_from_model("o4-mini"),
Some("openai".to_string())
);
assert_eq!(
factory.provider_from_model(models::CLAUDE_SONNET_4_6),
Some("anthropic".to_string())
);
assert_eq!(
factory.provider_from_model("claude-sonnet-4-20250514"),
Some("anthropic".to_string())
);
assert_eq!(
factory.provider_from_model("claude-opus-4-1-20250805"),
Some("anthropic".to_string())
);
assert_eq!(
factory.provider_from_model("gemini-3-flash-preview"),
Some("gemini".to_string())
);
assert_eq!(
factory.provider_from_model("gemini-3.1-pro-preview"),
Some("gemini".to_string())
);
assert_eq!(
factory.provider_from_model(models::OPENROUTER_QWEN3_CODER),
Some("openrouter".to_string())
);
assert_eq!(
factory.provider_from_model("anthropic/claude-sonnet-4.6"),
Some("openrouter".to_string())
);
assert_eq!(
factory.provider_from_model(models::lmstudio::META_LLAMA_31_8B_INSTRUCT),
Some("lmstudio".to_string())
);
assert_eq!(
factory.provider_from_model("kimi-k2.5"),
Some("moonshot".to_string())
);
assert_eq!(factory.provider_from_model("unknown-model"), None);
}
#[test]
fn infer_provider_respects_override_and_model() {
let provider = infer_provider(Some("openai"), "gemini-3-flash-preview");
assert_eq!(provider, Some(Provider::OpenAI));
let provider = infer_provider(None, models::CLAUDE_SONNET_4_6);
assert_eq!(provider, Some(Provider::Anthropic));
let provider = infer_provider(None, "unknown-model");
assert_eq!(provider, None);
}
#[test]
fn test_provider_creation() {
let gemini =
create_provider_for_model("gemini-3-flash-preview", "test_key".to_string(), None, None);
assert!(gemini.is_ok());
let openai = create_provider_for_model(models::GPT_OSS_20B, "test_key".to_string(), None, None);
assert!(openai.is_ok());
let openai_reasoning = create_provider_for_model("o4-mini", "test_key".to_string(), None, None);
assert!(openai_reasoning.is_ok());
let anthropic = create_provider_for_model(
models::CLAUDE_SONNET_4_6,
"test_key".to_string(),
None,
None,
);
assert!(anthropic.is_ok());
let openrouter = create_provider_for_model(
models::OPENROUTER_QWEN3_CODER,
"test_key".to_string(),
None,
None,
);
assert!(openrouter.is_ok());
let moonshot = create_provider_for_model("kimi-k2.5", "test_key".to_string(), None, None);
assert!(moonshot.is_ok());
let ollama =
create_provider_for_model(models::ollama::DEFAULT_MODEL, String::new(), None, None);
assert!(ollama.is_ok());
let invalid = create_provider_for_model("invalid-model", "test_key".to_string(), None, None);
assert!(invalid.is_err());
}
#[test]
fn test_unified_client_creation() {
let gemini_client =
create_provider_for_model("gemini-3-flash-preview", "test_key".to_string(), None, None);
assert!(gemini_client.is_ok());
if let Ok(client) = gemini_client {
assert_eq!(client.name(), "gemini");
}
let openai_client =
create_provider_for_model("gpt-oss-20b", "test_key".to_string(), None, None);
assert!(openai_client.is_ok());
if let Ok(client) = openai_client {
assert_eq!(client.name(), "openai");
}
let anthropic_client = create_provider_for_model(
models::CLAUDE_SONNET_4_6,
"test_key".to_string(),
None,
None,
);
assert!(anthropic_client.is_ok());
if let Ok(client) = anthropic_client {
assert_eq!(client.name(), "anthropic");
}
let openrouter_client = create_provider_for_model(
models::OPENROUTER_QWEN3_CODER,
"test_key".to_string(),
None,
None,
);
assert!(openrouter_client.is_ok());
if let Ok(client) = openrouter_client {
assert_eq!(client.name(), "openrouter");
}
let moonshot_client =
create_provider_for_model("kimi-k2.5", "test_key".to_string(), None, None);
assert!(moonshot_client.is_ok());
if let Ok(client) = moonshot_client {
assert_eq!(client.name(), "moonshot");
}
let ollama_client =
create_provider_for_model(models::ollama::DEFAULT_MODEL, String::new(), None, None);
assert!(ollama_client.is_ok());
if let Ok(client) = ollama_client {
assert_eq!(client.name(), "ollama");
}
let lmstudio_client =
create_provider_for_model(models::lmstudio::DEFAULT_MODEL, String::new(), None, None);
assert!(lmstudio_client.is_ok());
if let Ok(client) = lmstudio_client {
assert_eq!(client.name(), "lmstudio");
}
}
#[test]
fn test_message_creation() {
let user_msg = Message::user("Hello, world!".to_string());
assert_eq!(user_msg.content.as_text(), "Hello, world!");
assert!(matches!(user_msg.role, MessageRole::User));
assert!(user_msg.tool_calls.is_none());
let assistant_msg = Message::assistant("Hi there!".to_string());
assert_eq!(assistant_msg.content.as_text(), "Hi there!");
assert!(matches!(assistant_msg.role, MessageRole::Assistant));
let system_msg = Message::system("You are a helpful assistant".to_string());
assert_eq!(system_msg.content.as_text(), "You are a helpful assistant");
assert!(matches!(system_msg.role, MessageRole::System));
}
#[test]
#[ignore]
fn test_provider_supported_models() {
let gemini = GeminiProvider::new("test_key".to_string());
let gemini_models = gemini.supported_models();
assert!(gemini_models.contains(&"gemini-3-flash-preview".to_string()));
assert!(gemini_models.contains(&"gemini-3.1-pro-preview".to_string()));
assert!(gemini_models.len() >= 2);
let openai = OpenAIProvider::new("test_key".to_string());
let openai_models = openai.supported_models();
assert!(openai_models.contains(&"gpt-oss-20b".to_string()));
assert!(openai_models.contains(&"gpt-5-mini".to_string()));
assert!(openai_models.len() >= 2);
let anthropic = AnthropicProvider::new("test_key".to_string());
let anthropic_models = anthropic.supported_models();
assert!(anthropic_models.contains(&models::CLAUDE_SONNET_4_6.to_string()));
assert!(anthropic_models.contains(&models::CLAUDE_HAIKU_4_5.to_string()));
assert!(anthropic_models.contains(&models::CLAUDE_OPUS_4_7.to_string()));
assert!(anthropic_models.len() >= 3);
let openrouter = OpenRouterProvider::new("test_key".to_string());
let openrouter_models = openrouter.supported_models();
assert!(openrouter_models.contains(&models::OPENROUTER_QWEN3_CODER.to_string()));
assert!(openrouter_models.contains(&"anthropic/claude-sonnet-4.6".to_string()));
assert!(openrouter_models.len() >= 2);
let moonshot = MoonshotProvider::new("test_key".to_string());
let moonshot_models = moonshot.supported_models();
assert!(moonshot_models.contains(&"kimi-k2.5".to_string()));
assert_eq!(moonshot_models.len(), 1);
}
#[test]
fn test_provider_names() {
let gemini = GeminiProvider::new("test_key".to_string());
assert_eq!(gemini.name(), "gemini");
let openai = OpenAIProvider::new("test_key".to_string());
assert_eq!(openai.name(), "openai");
let anthropic = AnthropicProvider::new("test_key".to_string());
assert_eq!(anthropic.name(), "anthropic");
let openrouter = OpenRouterProvider::new("test_key".to_string());
assert_eq!(openrouter.name(), "openrouter");
let moonshot = MoonshotProvider::new("test_key".to_string());
assert_eq!(moonshot.name(), "moonshot");
let ollama = OllamaProvider::new(String::new());
assert_eq!(ollama.name(), "ollama");
let lmstudio = LmStudioProvider::new(String::new());
assert_eq!(lmstudio.name(), "lmstudio");
}
#[test]
#[ignore]
fn test_request_validation() {
let gemini = GeminiProvider::new("test_key".to_string());
let openai = OpenAIProvider::new("test_key".to_string());
let anthropic = AnthropicProvider::new("test_key".to_string());
let _openrouter = OpenRouterProvider::new("test_key".to_string());
let valid_gemini_request = LLMRequest {
messages: vec![Message::user("test".to_string())],
model: "gemini-3-flash-preview".to_string(),
verbosity: Some(VerbosityLevel::default()),
..Default::default()
};
assert!(gemini.validate_request(&valid_gemini_request).is_ok());
let valid_openai_request = LLMRequest {
messages: vec![Message::user("test".to_string())],
model: "gpt-oss-20b".to_string(),
verbosity: Some(VerbosityLevel::default()),
..Default::default()
};
assert!(openai.validate_request(&valid_openai_request).is_ok());
let valid_anthropic_request = LLMRequest {
messages: vec![Message::user("test".to_string())],
model: models::CLAUDE_SONNET_4_6.to_string(),
verbosity: Some(VerbosityLevel::default()),
..Default::default()
};
assert!(anthropic.validate_request(&valid_anthropic_request).is_ok());
let legacy_anthropic_request = LLMRequest {
messages: vec![Message::user("test".to_string())],
model: "claude-sonnet-4-20250514".to_string(),
verbosity: Some(VerbosityLevel::default()),
..Default::default()
};
assert!(
anthropic
.validate_request(&legacy_anthropic_request)
.is_ok()
);
let invalid_request = LLMRequest {
messages: vec![Message::user("test".to_string())],
model: "invalid-model".to_string(),
verbosity: Some(VerbosityLevel::default()),
..Default::default()
};
assert!(gemini.validate_request(&invalid_request).is_err());
assert!(openai.validate_request(&invalid_request).is_err());
assert!(anthropic.validate_request(&invalid_request).is_err());
}
#[test]
fn test_anthropic_tool_message_handling() {
let anthropic = AnthropicProvider::new("test_key".to_string());
let tool_message =
Message::tool_response("tool_123".to_string(), "Tool result content".to_string());
let request = LLMRequest {
messages: vec![tool_message],
model: models::CLAUDE_SONNET_4_6.to_string(),
verbosity: Some(VerbosityLevel::default()),
..Default::default()
};
assert!(anthropic.validate_request(&request).is_ok());
}
#[test]
fn test_backward_compatibility() {
use vtcode_core::llm::make_client;
use vtcode_core::models::ModelId;
use std::str::FromStr;
let model = ModelId::from_str("gemini-3-flash-preview").unwrap();
let client = make_client("test_key".to_string(), model).expect("client should be created");
let model_id = client.model_id();
assert!(!model_id.is_empty());
}
#[test]
fn test_tool_definition_creation() {
let tool = ToolDefinition::function(
"get_weather".to_string(),
"Get weather for a location".to_string(),
json!({
"type": "object",
"properties": {
"location": {"type": "string", "description": "The location to get weather for"}
},
"required": ["location"]
}),
);
assert_eq!(tool.function_name(), "get_weather");
assert_eq!(
tool.function.as_ref().unwrap().description,
"Get weather for a location"
);
assert!(tool.function.as_ref().unwrap().parameters.is_object());
}