use mockito::Server;
use openrouter_rust::{
OpenRouterClient,
models::{ListModelsParams, Model, ModelsResponse, ModelsCountResponse, PublicPricing},
};
use serde_json::json;
#[tokio::test]
async fn test_list_models() {
let mut server = Server::new_async().await;
let mock_response = json!({
"data": [
{
"id": "openai/gpt-4",
"canonical_slug": "openai/gpt-4",
"name": "GPT-4",
"created": 1677649963.0,
"description": "OpenAI's GPT-4 model",
"pricing": {"prompt": {}, "completion": {}},
"context_length": 8192.0,
"architecture": {
"input_modalities": ["text"],
"output_modalities": ["text"]
},
"top_provider": {"is_moderated": true},
"per_request_limits": {"prompt_tokens": 8192.0, "completion_tokens": 4096.0},
"supported_parameters": ["temperature", "max_tokens"],
"default_parameters": {}
}
]
});
let _m = server.mock("GET", "/models")
.match_header("authorization", "Bearer test-key")
.with_status(200)
.with_header("content-type", "application/json")
.with_body(mock_response.to_string())
.create_async()
.await;
let client = OpenRouterClient::builder()
.api_key("test-key")
.base_url(&server.url())
.build()
.unwrap();
let response = client.list_models(None).await.unwrap();
assert_eq!(response.data.len(), 1);
assert_eq!(response.data[0].id, "openai/gpt-4");
assert_eq!(response.data[0].name, "GPT-4");
assert_eq!(response.data[0].context_length, Some(8192.0));
}
#[tokio::test]
async fn test_list_models_with_params() {
let mut server = Server::new_async().await;
let _m = server.mock("GET", "/models?category=programming")
.with_status(200)
.with_header("content-type", "application/json")
.with_body(json!({"data": []}).to_string())
.create_async()
.await;
let client = OpenRouterClient::builder()
.api_key("test-key")
.base_url(&server.url())
.build()
.unwrap();
let params = ListModelsParams {
category: Some("programming".to_string()),
..Default::default()
};
let response = client.list_models(Some(params)).await.unwrap();
assert!(response.data.is_empty());
}
#[tokio::test]
async fn test_get_models_count() {
let mut server = Server::new_async().await;
let mock_response = json!({
"data": {
"count": 150.0
}
});
let _m = server.mock("GET", "/models/count")
.match_header("authorization", "Bearer test-key")
.with_status(200)
.with_header("content-type", "application/json")
.with_body(mock_response.to_string())
.create_async()
.await;
let client = OpenRouterClient::builder()
.api_key("test-key")
.base_url(&server.url())
.build()
.unwrap();
let response = client.get_models_count().await.unwrap();
assert_eq!(response.data.count, 150.0);
}
#[tokio::test]
async fn test_list_models_user() {
let mut server = Server::new_async().await;
let mock_response = json!({
"data": [
{
"id": "anthropic/claude-3.5-sonnet",
"canonical_slug": "anthropic/claude-3.5-sonnet",
"name": "Claude 3.5 Sonnet",
"created": 1677649963.0,
"description": "Anthropic's Claude 3.5 Sonnet",
"pricing": {"prompt": {}, "completion": {}},
"architecture": {
"input_modalities": ["text", "image"],
"output_modalities": ["text"]
},
"top_provider": {"is_moderated": false},
"per_request_limits": {"prompt_tokens": 200000.0, "completion_tokens": 4096.0},
"supported_parameters": ["temperature", "max_tokens"],
"default_parameters": {}
}
]
});
let _m = server.mock("GET", "/models/user")
.match_header("authorization", "Bearer test-key")
.with_status(200)
.with_header("content-type", "application/json")
.with_body(mock_response.to_string())
.create_async()
.await;
let client = OpenRouterClient::builder()
.api_key("test-key")
.base_url(&server.url())
.build()
.unwrap();
let response = client.list_models_user().await.unwrap();
assert_eq!(response.data.len(), 1);
assert_eq!(response.data[0].id, "anthropic/claude-3.5-sonnet");
}
#[test]
fn test_list_models_params_default() {
let params = ListModelsParams::default();
assert!(params.category.is_none());
assert!(params.supported_parameters.is_none());
}