1use std::fmt;
9use std::str::FromStr;
10use std::time::Duration;
11
12use reqwest::blocking::Client;
13use serde_json::Value;
14
15use super::SummarizeError;
16
17const CONNECT_TIMEOUT: Duration = Duration::from_secs(30);
19
20const REQUEST_TIMEOUT: Duration = Duration::from_secs(120);
22
23#[derive(Debug, Clone, Copy, PartialEq, Eq)]
27pub enum SummaryProviderKind {
28 Anthropic,
30 OpenAI,
32 OpenRouter,
34}
35
36impl fmt::Display for SummaryProviderKind {
37 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
38 match self {
39 SummaryProviderKind::Anthropic => write!(f, "anthropic"),
40 SummaryProviderKind::OpenAI => write!(f, "openai"),
41 SummaryProviderKind::OpenRouter => write!(f, "openrouter"),
42 }
43 }
44}
45
46impl FromStr for SummaryProviderKind {
47 type Err = String;
48
49 fn from_str(s: &str) -> Result<Self, Self::Err> {
50 match s.to_lowercase().as_str() {
51 "anthropic" => Ok(SummaryProviderKind::Anthropic),
52 "openai" => Ok(SummaryProviderKind::OpenAI),
53 "openrouter" => Ok(SummaryProviderKind::OpenRouter),
54 other => Err(format!("Unknown summary provider: '{other}'. Expected one of: anthropic, openai, openrouter")),
55 }
56 }
57}
58
59#[derive(Debug, Clone)]
61pub struct SummaryResponse {
62 pub content: String,
64}
65
66pub trait SummaryProvider {
73 fn summarize(
78 &self,
79 system_prompt: &str,
80 user_content: &str,
81 ) -> Result<SummaryResponse, SummarizeError>;
82}
83
84pub(crate) struct AnthropicProvider {
88 client: Client,
90 api_key: String,
92 model: String,
94}
95
96impl AnthropicProvider {
97 pub(crate) fn new(client: Client, api_key: String, model: String) -> Self {
99 Self {
100 client,
101 api_key,
102 model,
103 }
104 }
105
106 fn build_request_body(&self, system_prompt: &str, user_content: &str) -> Value {
108 serde_json::json!({
109 "model": self.model,
110 "max_tokens": 1024,
111 "system": system_prompt,
112 "messages": [
113 {
114 "role": "user",
115 "content": user_content,
116 }
117 ]
118 })
119 }
120}
121
122impl SummaryProvider for AnthropicProvider {
123 fn summarize(
124 &self,
125 system_prompt: &str,
126 user_content: &str,
127 ) -> Result<SummaryResponse, SummarizeError> {
128 let body = self.build_request_body(system_prompt, user_content);
129
130 let response = self
131 .client
132 .post("https://api.anthropic.com/v1/messages")
133 .header("x-api-key", &self.api_key)
134 .header("anthropic-version", "2023-06-01")
135 .header("content-type", "application/json")
136 .json(&body)
137 .send()
138 .map_err(|e| SummarizeError::RequestFailed(e.to_string()))?;
139
140 let status = response.status();
141 if !status.is_success() {
142 let status_code = status.as_u16();
143 let body_text = response
144 .text()
145 .unwrap_or_else(|_| "Unknown error".to_string());
146 return Err(SummarizeError::HttpError {
147 status: status_code,
148 body: body_text,
149 });
150 }
151
152 let json: Value = response
153 .json()
154 .map_err(|e| SummarizeError::ParseError(e.to_string()))?;
155
156 let content = json
157 .get("content")
158 .and_then(|c| c.as_array())
159 .and_then(|arr| arr.first())
160 .and_then(|item| item.get("text"))
161 .and_then(|t| t.as_str())
162 .ok_or_else(|| {
163 SummarizeError::ParseError(
164 "Missing content[0].text in Anthropic response".to_string(),
165 )
166 })?;
167
168 Ok(SummaryResponse {
169 content: content.to_string(),
170 })
171 }
172}
173
174pub(crate) struct OpenAIProvider {
178 client: Client,
180 api_key: String,
182 model: String,
184}
185
186impl OpenAIProvider {
187 pub(crate) fn new(client: Client, api_key: String, model: String) -> Self {
189 Self {
190 client,
191 api_key,
192 model,
193 }
194 }
195
196 fn build_request_body(&self, system_prompt: &str, user_content: &str) -> Value {
198 serde_json::json!({
199 "model": self.model,
200 "max_tokens": 1024,
201 "messages": [
202 {
203 "role": "system",
204 "content": system_prompt,
205 },
206 {
207 "role": "user",
208 "content": user_content,
209 }
210 ]
211 })
212 }
213}
214
215impl SummaryProvider for OpenAIProvider {
216 fn summarize(
217 &self,
218 system_prompt: &str,
219 user_content: &str,
220 ) -> Result<SummaryResponse, SummarizeError> {
221 let body = self.build_request_body(system_prompt, user_content);
222
223 let response = self
224 .client
225 .post("https://api.openai.com/v1/chat/completions")
226 .header("Authorization", format!("Bearer {}", self.api_key))
227 .header("content-type", "application/json")
228 .json(&body)
229 .send()
230 .map_err(|e| SummarizeError::RequestFailed(e.to_string()))?;
231
232 let status = response.status();
233 if !status.is_success() {
234 let status_code = status.as_u16();
235 let body_text = response
236 .text()
237 .unwrap_or_else(|_| "Unknown error".to_string());
238 return Err(SummarizeError::HttpError {
239 status: status_code,
240 body: body_text,
241 });
242 }
243
244 let json: Value = response
245 .json()
246 .map_err(|e| SummarizeError::ParseError(e.to_string()))?;
247
248 parse_openai_response(&json)
249 }
250}
251
252pub(crate) struct OpenRouterProvider {
259 client: Client,
261 api_key: String,
263 model: String,
265}
266
267impl OpenRouterProvider {
268 pub(crate) fn new(client: Client, api_key: String, model: String) -> Self {
270 Self {
271 client,
272 api_key,
273 model,
274 }
275 }
276
277 fn build_request_body(&self, system_prompt: &str, user_content: &str) -> Value {
281 serde_json::json!({
282 "model": self.model,
283 "max_tokens": 1024,
284 "messages": [
285 {
286 "role": "system",
287 "content": system_prompt,
288 },
289 {
290 "role": "user",
291 "content": user_content,
292 }
293 ]
294 })
295 }
296}
297
298impl SummaryProvider for OpenRouterProvider {
299 fn summarize(
300 &self,
301 system_prompt: &str,
302 user_content: &str,
303 ) -> Result<SummaryResponse, SummarizeError> {
304 let body = self.build_request_body(system_prompt, user_content);
305
306 let response = self
307 .client
308 .post("https://openrouter.ai/api/v1/chat/completions")
309 .header("Authorization", format!("Bearer {}", self.api_key))
310 .header("HTTP-Referer", "https://lore.varalys.com")
311 .header("content-type", "application/json")
312 .json(&body)
313 .send()
314 .map_err(|e| SummarizeError::RequestFailed(e.to_string()))?;
315
316 let status = response.status();
317 if !status.is_success() {
318 let status_code = status.as_u16();
319 let body_text = response
320 .text()
321 .unwrap_or_else(|_| "Unknown error".to_string());
322 return Err(SummarizeError::HttpError {
323 status: status_code,
324 body: body_text,
325 });
326 }
327
328 let json: Value = response
329 .json()
330 .map_err(|e| SummarizeError::ParseError(e.to_string()))?;
331
332 parse_openai_response(&json)
333 }
334}
335
336fn parse_openai_response(json: &Value) -> Result<SummaryResponse, SummarizeError> {
343 let content = json
344 .get("choices")
345 .and_then(|c| c.as_array())
346 .and_then(|arr| arr.first())
347 .and_then(|choice| choice.get("message"))
348 .and_then(|msg| msg.get("content"))
349 .and_then(|c| c.as_str())
350 .ok_or_else(|| {
351 SummarizeError::ParseError("Missing choices[0].message.content in response".to_string())
352 })?;
353
354 Ok(SummaryResponse {
355 content: content.to_string(),
356 })
357}
358
359pub fn default_model(kind: SummaryProviderKind) -> &'static str {
363 match kind {
364 SummaryProviderKind::Anthropic => "claude-haiku-4-5",
365 SummaryProviderKind::OpenAI => "gpt-4o-mini",
366 SummaryProviderKind::OpenRouter => "meta-llama/llama-3.1-8b-instruct:free",
367 }
368}
369
370pub fn create_provider(
375 kind: SummaryProviderKind,
376 api_key: String,
377 model: Option<String>,
378) -> Box<dyn SummaryProvider> {
379 let client = Client::builder()
380 .connect_timeout(CONNECT_TIMEOUT)
381 .timeout(REQUEST_TIMEOUT)
382 .build()
383 .expect("Failed to build HTTP client");
384
385 let model = model.unwrap_or_else(|| default_model(kind).to_string());
386
387 match kind {
388 SummaryProviderKind::Anthropic => Box::new(AnthropicProvider::new(client, api_key, model)),
389 SummaryProviderKind::OpenAI => Box::new(OpenAIProvider::new(client, api_key, model)),
390 SummaryProviderKind::OpenRouter => {
391 Box::new(OpenRouterProvider::new(client, api_key, model))
392 }
393 }
394}
395
396#[cfg(test)]
397mod tests {
398 use super::*;
399
400 fn build_client() -> Client {
402 Client::builder()
403 .connect_timeout(CONNECT_TIMEOUT)
404 .timeout(REQUEST_TIMEOUT)
405 .build()
406 .expect("Failed to build HTTP client")
407 }
408
409 #[test]
412 fn test_default_model_anthropic() {
413 assert_eq!(
414 default_model(SummaryProviderKind::Anthropic),
415 "claude-haiku-4-5"
416 );
417 }
418
419 #[test]
420 fn test_default_model_openai() {
421 assert_eq!(default_model(SummaryProviderKind::OpenAI), "gpt-4o-mini");
422 }
423
424 #[test]
425 fn test_default_model_openrouter() {
426 assert_eq!(
427 default_model(SummaryProviderKind::OpenRouter),
428 "meta-llama/llama-3.1-8b-instruct:free"
429 );
430 }
431
432 #[test]
435 fn test_provider_kind_display_anthropic() {
436 assert_eq!(SummaryProviderKind::Anthropic.to_string(), "anthropic");
437 }
438
439 #[test]
440 fn test_provider_kind_display_openai() {
441 assert_eq!(SummaryProviderKind::OpenAI.to_string(), "openai");
442 }
443
444 #[test]
445 fn test_provider_kind_display_openrouter() {
446 assert_eq!(SummaryProviderKind::OpenRouter.to_string(), "openrouter");
447 }
448
449 #[test]
452 fn test_provider_kind_from_str_anthropic() {
453 assert_eq!(
454 SummaryProviderKind::from_str("anthropic").unwrap(),
455 SummaryProviderKind::Anthropic
456 );
457 }
458
459 #[test]
460 fn test_provider_kind_from_str_openai() {
461 assert_eq!(
462 SummaryProviderKind::from_str("openai").unwrap(),
463 SummaryProviderKind::OpenAI
464 );
465 }
466
467 #[test]
468 fn test_provider_kind_from_str_openrouter() {
469 assert_eq!(
470 SummaryProviderKind::from_str("openrouter").unwrap(),
471 SummaryProviderKind::OpenRouter
472 );
473 }
474
475 #[test]
476 fn test_provider_kind_from_str_case_insensitive() {
477 assert_eq!(
478 SummaryProviderKind::from_str("ANTHROPIC").unwrap(),
479 SummaryProviderKind::Anthropic
480 );
481 assert_eq!(
482 SummaryProviderKind::from_str("OpenAI").unwrap(),
483 SummaryProviderKind::OpenAI
484 );
485 assert_eq!(
486 SummaryProviderKind::from_str("OpenRouter").unwrap(),
487 SummaryProviderKind::OpenRouter
488 );
489 }
490
491 #[test]
492 fn test_provider_kind_from_str_unknown() {
493 let err = SummaryProviderKind::from_str("gemini").unwrap_err();
494 assert!(err.contains("Unknown summary provider"));
495 assert!(err.contains("gemini"));
496 }
497
498 #[test]
501 fn test_create_provider_anthropic_does_not_panic() {
502 let _provider =
503 create_provider(SummaryProviderKind::Anthropic, "test-key".to_string(), None);
504 }
505
506 #[test]
507 fn test_create_provider_openai_does_not_panic() {
508 let _provider = create_provider(SummaryProviderKind::OpenAI, "test-key".to_string(), None);
509 }
510
511 #[test]
512 fn test_create_provider_openrouter_does_not_panic() {
513 let _provider = create_provider(
514 SummaryProviderKind::OpenRouter,
515 "test-key".to_string(),
516 None,
517 );
518 }
519
520 #[test]
521 fn test_create_provider_with_custom_model() {
522 let _provider = create_provider(
523 SummaryProviderKind::Anthropic,
524 "test-key".to_string(),
525 Some("claude-sonnet-4-20250514".to_string()),
526 );
527 }
528
529 #[test]
532 fn test_anthropic_request_body() {
533 let provider = AnthropicProvider::new(
534 build_client(),
535 "test-key".to_string(),
536 "claude-haiku-4-5".to_string(),
537 );
538
539 let body = provider.build_request_body("Be concise.", "Summarize this session.");
540
541 assert_eq!(body["model"], "claude-haiku-4-5");
542 assert_eq!(body["max_tokens"], 1024);
543 assert_eq!(body["system"], "Be concise.");
544
545 let messages = body["messages"].as_array().unwrap();
546 assert_eq!(messages.len(), 1);
547 assert_eq!(messages[0]["role"], "user");
548 assert_eq!(messages[0]["content"], "Summarize this session.");
549 }
550
551 #[test]
552 fn test_openai_request_body() {
553 let provider = OpenAIProvider::new(
554 build_client(),
555 "test-key".to_string(),
556 "gpt-4o-mini".to_string(),
557 );
558
559 let body = provider.build_request_body("Be concise.", "Summarize this session.");
560
561 assert_eq!(body["model"], "gpt-4o-mini");
562 assert_eq!(body["max_tokens"], 1024);
563
564 let messages = body["messages"].as_array().unwrap();
565 assert_eq!(messages.len(), 2);
566 assert_eq!(messages[0]["role"], "system");
567 assert_eq!(messages[0]["content"], "Be concise.");
568 assert_eq!(messages[1]["role"], "user");
569 assert_eq!(messages[1]["content"], "Summarize this session.");
570 }
571
572 #[test]
573 fn test_openrouter_request_body() {
574 let provider = OpenRouterProvider::new(
575 build_client(),
576 "test-key".to_string(),
577 "meta-llama/llama-3.1-8b-instruct:free".to_string(),
578 );
579
580 let body = provider.build_request_body("Be concise.", "Summarize this session.");
581
582 assert_eq!(body["model"], "meta-llama/llama-3.1-8b-instruct:free");
583 assert_eq!(body["max_tokens"], 1024);
584
585 let messages = body["messages"].as_array().unwrap();
586 assert_eq!(messages.len(), 2);
587 assert_eq!(messages[0]["role"], "system");
588 assert_eq!(messages[0]["content"], "Be concise.");
589 assert_eq!(messages[1]["role"], "user");
590 assert_eq!(messages[1]["content"], "Summarize this session.");
591 }
592
593 #[test]
596 fn test_parse_openai_response_valid() {
597 let json = serde_json::json!({
598 "choices": [
599 {
600 "message": {
601 "role": "assistant",
602 "content": "This session implemented a new feature."
603 }
604 }
605 ]
606 });
607
608 let result = parse_openai_response(&json).unwrap();
609 assert_eq!(result.content, "This session implemented a new feature.");
610 }
611
612 #[test]
613 fn test_parse_openai_response_missing_choices() {
614 let json = serde_json::json!({});
615 let err = parse_openai_response(&json).unwrap_err();
616 match err {
617 SummarizeError::ParseError(msg) => {
618 assert!(msg.contains("choices[0].message.content"));
619 }
620 other => panic!("Expected ParseError, got: {other:?}"),
621 }
622 }
623
624 #[test]
625 fn test_parse_openai_response_empty_choices() {
626 let json = serde_json::json!({ "choices": [] });
627 let err = parse_openai_response(&json).unwrap_err();
628 match err {
629 SummarizeError::ParseError(_) => {}
630 other => panic!("Expected ParseError, got: {other:?}"),
631 }
632 }
633
634 #[test]
635 fn test_parse_openai_response_missing_content() {
636 let json = serde_json::json!({
637 "choices": [
638 {
639 "message": {
640 "role": "assistant"
641 }
642 }
643 ]
644 });
645 let err = parse_openai_response(&json).unwrap_err();
646 match err {
647 SummarizeError::ParseError(_) => {}
648 other => panic!("Expected ParseError, got: {other:?}"),
649 }
650 }
651
652 #[test]
653 fn test_parse_anthropic_response_valid() {
654 let json = serde_json::json!({
656 "content": [
657 {
658 "type": "text",
659 "text": "This session refactored the database layer."
660 }
661 ]
662 });
663
664 let content = json
665 .get("content")
666 .and_then(|c| c.as_array())
667 .and_then(|arr| arr.first())
668 .and_then(|item| item.get("text"))
669 .and_then(|t| t.as_str())
670 .unwrap();
671
672 assert_eq!(content, "This session refactored the database layer.");
673 }
674
675 #[test]
678 fn test_timeout_constants() {
679 assert_eq!(CONNECT_TIMEOUT.as_secs(), 30);
680 assert_eq!(REQUEST_TIMEOUT.as_secs(), 120);
681 }
682
683 #[test]
686 fn test_summary_response_debug() {
687 let response = SummaryResponse {
688 content: "test summary".to_string(),
689 };
690 let debug = format!("{response:?}");
691 assert!(debug.contains("test summary"));
692 }
693
694 #[test]
695 fn test_summary_response_clone() {
696 let response = SummaryResponse {
697 content: "test summary".to_string(),
698 };
699 let cloned = response.clone();
700 assert_eq!(response.content, cloned.content);
701 }
702}