use std::fmt;
use std::str::FromStr;
use std::time::Duration;
use reqwest::blocking::Client;
use serde_json::Value;
use super::SummarizeError;
const CONNECT_TIMEOUT: Duration = Duration::from_secs(30);
const REQUEST_TIMEOUT: Duration = Duration::from_secs(120);
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum SummaryProviderKind {
Anthropic,
OpenAI,
OpenRouter,
}
impl fmt::Display for SummaryProviderKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
SummaryProviderKind::Anthropic => write!(f, "anthropic"),
SummaryProviderKind::OpenAI => write!(f, "openai"),
SummaryProviderKind::OpenRouter => write!(f, "openrouter"),
}
}
}
impl FromStr for SummaryProviderKind {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"anthropic" => Ok(SummaryProviderKind::Anthropic),
"openai" => Ok(SummaryProviderKind::OpenAI),
"openrouter" => Ok(SummaryProviderKind::OpenRouter),
other => Err(format!("Unknown summary provider: '{other}'. Expected one of: anthropic, openai, openrouter")),
}
}
}
#[derive(Debug, Clone)]
pub struct SummaryResponse {
pub content: String,
}
pub trait SummaryProvider {
fn summarize(
&self,
system_prompt: &str,
user_content: &str,
) -> Result<SummaryResponse, SummarizeError>;
}
pub(crate) struct AnthropicProvider {
client: Client,
api_key: String,
model: String,
}
impl AnthropicProvider {
pub(crate) fn new(client: Client, api_key: String, model: String) -> Self {
Self {
client,
api_key,
model,
}
}
fn build_request_body(&self, system_prompt: &str, user_content: &str) -> Value {
serde_json::json!({
"model": self.model,
"max_tokens": 1024,
"system": system_prompt,
"messages": [
{
"role": "user",
"content": user_content,
}
]
})
}
}
impl SummaryProvider for AnthropicProvider {
fn summarize(
&self,
system_prompt: &str,
user_content: &str,
) -> Result<SummaryResponse, SummarizeError> {
let body = self.build_request_body(system_prompt, user_content);
let response = self
.client
.post("https://api.anthropic.com/v1/messages")
.header("x-api-key", &self.api_key)
.header("anthropic-version", "2023-06-01")
.header("content-type", "application/json")
.json(&body)
.send()
.map_err(|e| SummarizeError::RequestFailed(e.to_string()))?;
let status = response.status();
if !status.is_success() {
let status_code = status.as_u16();
let body_text = response
.text()
.unwrap_or_else(|_| "Unknown error".to_string());
return Err(SummarizeError::HttpError {
status: status_code,
body: body_text,
});
}
let json: Value = response
.json()
.map_err(|e| SummarizeError::ParseError(e.to_string()))?;
let content = json
.get("content")
.and_then(|c| c.as_array())
.and_then(|arr| arr.first())
.and_then(|item| item.get("text"))
.and_then(|t| t.as_str())
.ok_or_else(|| {
SummarizeError::ParseError(
"Missing content[0].text in Anthropic response".to_string(),
)
})?;
Ok(SummaryResponse {
content: content.to_string(),
})
}
}
pub(crate) struct OpenAIProvider {
client: Client,
api_key: String,
model: String,
}
impl OpenAIProvider {
pub(crate) fn new(client: Client, api_key: String, model: String) -> Self {
Self {
client,
api_key,
model,
}
}
fn build_request_body(&self, system_prompt: &str, user_content: &str) -> Value {
serde_json::json!({
"model": self.model,
"max_tokens": 1024,
"messages": [
{
"role": "system",
"content": system_prompt,
},
{
"role": "user",
"content": user_content,
}
]
})
}
}
impl SummaryProvider for OpenAIProvider {
fn summarize(
&self,
system_prompt: &str,
user_content: &str,
) -> Result<SummaryResponse, SummarizeError> {
let body = self.build_request_body(system_prompt, user_content);
let response = self
.client
.post("https://api.openai.com/v1/chat/completions")
.header("Authorization", format!("Bearer {}", self.api_key))
.header("content-type", "application/json")
.json(&body)
.send()
.map_err(|e| SummarizeError::RequestFailed(e.to_string()))?;
let status = response.status();
if !status.is_success() {
let status_code = status.as_u16();
let body_text = response
.text()
.unwrap_or_else(|_| "Unknown error".to_string());
return Err(SummarizeError::HttpError {
status: status_code,
body: body_text,
});
}
let json: Value = response
.json()
.map_err(|e| SummarizeError::ParseError(e.to_string()))?;
parse_openai_response(&json)
}
}
pub(crate) struct OpenRouterProvider {
client: Client,
api_key: String,
model: String,
}
impl OpenRouterProvider {
pub(crate) fn new(client: Client, api_key: String, model: String) -> Self {
Self {
client,
api_key,
model,
}
}
fn build_request_body(&self, system_prompt: &str, user_content: &str) -> Value {
serde_json::json!({
"model": self.model,
"max_tokens": 1024,
"messages": [
{
"role": "system",
"content": system_prompt,
},
{
"role": "user",
"content": user_content,
}
]
})
}
}
impl SummaryProvider for OpenRouterProvider {
fn summarize(
&self,
system_prompt: &str,
user_content: &str,
) -> Result<SummaryResponse, SummarizeError> {
let body = self.build_request_body(system_prompt, user_content);
let response = self
.client
.post("https://openrouter.ai/api/v1/chat/completions")
.header("Authorization", format!("Bearer {}", self.api_key))
.header("HTTP-Referer", "https://lore.varalys.com")
.header("content-type", "application/json")
.json(&body)
.send()
.map_err(|e| SummarizeError::RequestFailed(e.to_string()))?;
let status = response.status();
if !status.is_success() {
let status_code = status.as_u16();
let body_text = response
.text()
.unwrap_or_else(|_| "Unknown error".to_string());
return Err(SummarizeError::HttpError {
status: status_code,
body: body_text,
});
}
let json: Value = response
.json()
.map_err(|e| SummarizeError::ParseError(e.to_string()))?;
parse_openai_response(&json)
}
}
fn parse_openai_response(json: &Value) -> Result<SummaryResponse, SummarizeError> {
let content = json
.get("choices")
.and_then(|c| c.as_array())
.and_then(|arr| arr.first())
.and_then(|choice| choice.get("message"))
.and_then(|msg| msg.get("content"))
.and_then(|c| c.as_str())
.ok_or_else(|| {
SummarizeError::ParseError("Missing choices[0].message.content in response".to_string())
})?;
Ok(SummaryResponse {
content: content.to_string(),
})
}
pub fn default_model(kind: SummaryProviderKind) -> &'static str {
match kind {
SummaryProviderKind::Anthropic => "claude-haiku-4-5",
SummaryProviderKind::OpenAI => "gpt-4o-mini",
SummaryProviderKind::OpenRouter => "meta-llama/llama-3.1-8b-instruct:free",
}
}
pub fn create_provider(
kind: SummaryProviderKind,
api_key: String,
model: Option<String>,
) -> Box<dyn SummaryProvider> {
let client = Client::builder()
.connect_timeout(CONNECT_TIMEOUT)
.timeout(REQUEST_TIMEOUT)
.build()
.expect("Failed to build HTTP client");
let model = model.unwrap_or_else(|| default_model(kind).to_string());
match kind {
SummaryProviderKind::Anthropic => Box::new(AnthropicProvider::new(client, api_key, model)),
SummaryProviderKind::OpenAI => Box::new(OpenAIProvider::new(client, api_key, model)),
SummaryProviderKind::OpenRouter => {
Box::new(OpenRouterProvider::new(client, api_key, model))
}
}
}
#[cfg(test)]
mod tests {
use super::*;
fn build_client() -> Client {
Client::builder()
.connect_timeout(CONNECT_TIMEOUT)
.timeout(REQUEST_TIMEOUT)
.build()
.expect("Failed to build HTTP client")
}
#[test]
fn test_default_model_anthropic() {
assert_eq!(
default_model(SummaryProviderKind::Anthropic),
"claude-haiku-4-5"
);
}
#[test]
fn test_default_model_openai() {
assert_eq!(default_model(SummaryProviderKind::OpenAI), "gpt-4o-mini");
}
#[test]
fn test_default_model_openrouter() {
assert_eq!(
default_model(SummaryProviderKind::OpenRouter),
"meta-llama/llama-3.1-8b-instruct:free"
);
}
#[test]
fn test_provider_kind_display_anthropic() {
assert_eq!(SummaryProviderKind::Anthropic.to_string(), "anthropic");
}
#[test]
fn test_provider_kind_display_openai() {
assert_eq!(SummaryProviderKind::OpenAI.to_string(), "openai");
}
#[test]
fn test_provider_kind_display_openrouter() {
assert_eq!(SummaryProviderKind::OpenRouter.to_string(), "openrouter");
}
#[test]
fn test_provider_kind_from_str_anthropic() {
assert_eq!(
SummaryProviderKind::from_str("anthropic").unwrap(),
SummaryProviderKind::Anthropic
);
}
#[test]
fn test_provider_kind_from_str_openai() {
assert_eq!(
SummaryProviderKind::from_str("openai").unwrap(),
SummaryProviderKind::OpenAI
);
}
#[test]
fn test_provider_kind_from_str_openrouter() {
assert_eq!(
SummaryProviderKind::from_str("openrouter").unwrap(),
SummaryProviderKind::OpenRouter
);
}
#[test]
fn test_provider_kind_from_str_case_insensitive() {
assert_eq!(
SummaryProviderKind::from_str("ANTHROPIC").unwrap(),
SummaryProviderKind::Anthropic
);
assert_eq!(
SummaryProviderKind::from_str("OpenAI").unwrap(),
SummaryProviderKind::OpenAI
);
assert_eq!(
SummaryProviderKind::from_str("OpenRouter").unwrap(),
SummaryProviderKind::OpenRouter
);
}
#[test]
fn test_provider_kind_from_str_unknown() {
let err = SummaryProviderKind::from_str("gemini").unwrap_err();
assert!(err.contains("Unknown summary provider"));
assert!(err.contains("gemini"));
}
#[test]
fn test_create_provider_anthropic_does_not_panic() {
let _provider =
create_provider(SummaryProviderKind::Anthropic, "test-key".to_string(), None);
}
#[test]
fn test_create_provider_openai_does_not_panic() {
let _provider = create_provider(SummaryProviderKind::OpenAI, "test-key".to_string(), None);
}
#[test]
fn test_create_provider_openrouter_does_not_panic() {
let _provider = create_provider(
SummaryProviderKind::OpenRouter,
"test-key".to_string(),
None,
);
}
#[test]
fn test_create_provider_with_custom_model() {
let _provider = create_provider(
SummaryProviderKind::Anthropic,
"test-key".to_string(),
Some("claude-sonnet-4-20250514".to_string()),
);
}
#[test]
fn test_anthropic_request_body() {
let provider = AnthropicProvider::new(
build_client(),
"test-key".to_string(),
"claude-haiku-4-5".to_string(),
);
let body = provider.build_request_body("Be concise.", "Summarize this session.");
assert_eq!(body["model"], "claude-haiku-4-5");
assert_eq!(body["max_tokens"], 1024);
assert_eq!(body["system"], "Be concise.");
let messages = body["messages"].as_array().unwrap();
assert_eq!(messages.len(), 1);
assert_eq!(messages[0]["role"], "user");
assert_eq!(messages[0]["content"], "Summarize this session.");
}
#[test]
fn test_openai_request_body() {
let provider = OpenAIProvider::new(
build_client(),
"test-key".to_string(),
"gpt-4o-mini".to_string(),
);
let body = provider.build_request_body("Be concise.", "Summarize this session.");
assert_eq!(body["model"], "gpt-4o-mini");
assert_eq!(body["max_tokens"], 1024);
let messages = body["messages"].as_array().unwrap();
assert_eq!(messages.len(), 2);
assert_eq!(messages[0]["role"], "system");
assert_eq!(messages[0]["content"], "Be concise.");
assert_eq!(messages[1]["role"], "user");
assert_eq!(messages[1]["content"], "Summarize this session.");
}
#[test]
fn test_openrouter_request_body() {
let provider = OpenRouterProvider::new(
build_client(),
"test-key".to_string(),
"meta-llama/llama-3.1-8b-instruct:free".to_string(),
);
let body = provider.build_request_body("Be concise.", "Summarize this session.");
assert_eq!(body["model"], "meta-llama/llama-3.1-8b-instruct:free");
assert_eq!(body["max_tokens"], 1024);
let messages = body["messages"].as_array().unwrap();
assert_eq!(messages.len(), 2);
assert_eq!(messages[0]["role"], "system");
assert_eq!(messages[0]["content"], "Be concise.");
assert_eq!(messages[1]["role"], "user");
assert_eq!(messages[1]["content"], "Summarize this session.");
}
#[test]
fn test_parse_openai_response_valid() {
let json = serde_json::json!({
"choices": [
{
"message": {
"role": "assistant",
"content": "This session implemented a new feature."
}
}
]
});
let result = parse_openai_response(&json).unwrap();
assert_eq!(result.content, "This session implemented a new feature.");
}
#[test]
fn test_parse_openai_response_missing_choices() {
let json = serde_json::json!({});
let err = parse_openai_response(&json).unwrap_err();
match err {
SummarizeError::ParseError(msg) => {
assert!(msg.contains("choices[0].message.content"));
}
other => panic!("Expected ParseError, got: {other:?}"),
}
}
#[test]
fn test_parse_openai_response_empty_choices() {
let json = serde_json::json!({ "choices": [] });
let err = parse_openai_response(&json).unwrap_err();
match err {
SummarizeError::ParseError(_) => {}
other => panic!("Expected ParseError, got: {other:?}"),
}
}
#[test]
fn test_parse_openai_response_missing_content() {
let json = serde_json::json!({
"choices": [
{
"message": {
"role": "assistant"
}
}
]
});
let err = parse_openai_response(&json).unwrap_err();
match err {
SummarizeError::ParseError(_) => {}
other => panic!("Expected ParseError, got: {other:?}"),
}
}
#[test]
fn test_parse_anthropic_response_valid() {
let json = serde_json::json!({
"content": [
{
"type": "text",
"text": "This session refactored the database layer."
}
]
});
let content = json
.get("content")
.and_then(|c| c.as_array())
.and_then(|arr| arr.first())
.and_then(|item| item.get("text"))
.and_then(|t| t.as_str())
.unwrap();
assert_eq!(content, "This session refactored the database layer.");
}
#[test]
fn test_timeout_constants() {
assert_eq!(CONNECT_TIMEOUT.as_secs(), 30);
assert_eq!(REQUEST_TIMEOUT.as_secs(), 120);
}
#[test]
fn test_summary_response_debug() {
let response = SummaryResponse {
content: "test summary".to_string(),
};
let debug = format!("{response:?}");
assert!(debug.contains("test summary"));
}
#[test]
fn test_summary_response_clone() {
let response = SummaryResponse {
content: "test summary".to_string(),
};
let cloned = response.clone();
assert_eq!(response.content, cloned.content);
}
}