1use std::fmt;
8
9use crate::decision::{Intelligence, Response};
10use crate::errors::{EventGraphError, Result};
11use crate::event::Event;
12use crate::types::Score;
13
14pub trait Provider: Intelligence {
18 fn name(&self) -> &str;
20
21 fn model(&self) -> &str;
23}
24
25#[derive(Debug, Clone)]
29pub struct Config {
30 pub provider: String,
33
34 pub model: String,
36
37 pub api_key: String,
40
41 pub base_url: String,
44
45 pub max_tokens: usize,
47
48 pub temperature: f64,
50
51 pub system_prompt: String,
53}
54
55impl Default for Config {
56 fn default() -> Self {
57 Self {
58 provider: String::new(),
59 model: String::new(),
60 api_key: String::new(),
61 base_url: String::new(),
62 max_tokens: 0,
63 temperature: 0.0,
64 system_prompt: String::new(),
65 }
66 }
67}
68
69impl fmt::Display for Config {
70 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
71 write!(f, "Config(provider={}, model={})", self.provider, self.model)
72 }
73}
74
75pub fn new(mut cfg: Config) -> Result<Box<dyn Provider>> {
79 if cfg.max_tokens == 0 {
80 cfg.max_tokens = 1024;
81 }
82
83 match cfg.provider.as_str() {
84 "claude-cli" => {
85 let p = ClaudeCliProvider::new(cfg)?;
86 Ok(Box::new(p))
87 }
88 #[cfg(feature = "intelligence")]
89 "openai-compatible" | "openai" | "xai" | "groq" | "together" | "ollama" | "azure" => {
90 let p = OpenAIProvider::new(cfg)?;
91 Ok(Box::new(p))
92 }
93 #[cfg(not(feature = "intelligence"))]
94 "openai-compatible" | "openai" | "xai" | "groq" | "together" | "ollama" | "azure" => {
95 Err(EventGraphError::GrammarViolation {
96 detail: format!(
97 "provider {:?} requires the \"intelligence\" feature (ureq dependency)",
98 cfg.provider
99 ),
100 })
101 }
102 other => Err(EventGraphError::GrammarViolation {
103 detail: format!(
104 "unknown provider: {:?} (supported: claude-cli, openai-compatible, openai, xai, groq, together, ollama, azure)",
105 other
106 ),
107 }),
108 }
109}
110
111pub fn new_claude_cli_config(model: &str) -> Config {
113 let model = if model.is_empty() { "sonnet" } else { model };
114 Config {
115 provider: "claude-cli".to_string(),
116 model: model.to_string(),
117 ..Config::default()
118 }
119}
120
121fn events_to_messages(events: &[Event]) -> String {
125 if events.is_empty() {
126 return String::new();
127 }
128 let mut buf = String::from("Event history:\n");
129 for (i, ev) in events.iter().enumerate() {
130 if i >= 20 {
131 buf.push_str(&format!("... and {} more events\n", events.len() - 20));
132 break;
133 }
134 buf.push_str(&format!(
135 "- [{}] {} by {}\n",
136 ev.event_type.value(),
137 ev.id.value(),
138 ev.source.value(),
139 ));
140 }
141 buf
142}
143
144fn parse_confidence(_tokens_used: usize) -> Score {
147 Score::new(0.7).expect("0.7 is always valid")
148}
149
150#[derive(Debug)]
154struct ClaudeCliResult {
155 result: String,
156 is_error: bool,
157 subtype: String,
158 input_tokens: usize,
159 output_tokens: usize,
160}
161
162pub struct ClaudeCliProvider {
165 model: String,
166 max_budget: f64,
167 system_prompt: String,
168 claude_path: String,
169}
170
171impl ClaudeCliProvider {
172 fn new(cfg: Config) -> Result<Self> {
173 let model = if cfg.model.is_empty() {
174 "sonnet".to_string()
175 } else {
176 cfg.model
177 };
178
179 let claude_path = if cfg.base_url.is_empty() {
180 "claude".to_string()
181 } else {
182 cfg.base_url
184 };
185
186 let check = std::process::Command::new("which")
188 .arg(&claude_path)
189 .stdout(std::process::Stdio::null())
190 .stderr(std::process::Stdio::null())
191 .status();
192
193 let found = match check {
195 Ok(status) => status.success(),
196 Err(_) => {
197 match std::process::Command::new("where")
198 .arg(&claude_path)
199 .stdout(std::process::Stdio::null())
200 .stderr(std::process::Stdio::null())
201 .status()
202 {
203 Ok(status) => status.success(),
204 Err(_) => false,
205 }
206 }
207 };
208
209 if !found {
210 return Err(EventGraphError::GrammarViolation {
211 detail: format!("claude CLI not found in PATH: {}", claude_path),
212 });
213 }
214
215 let max_budget = if cfg.temperature > 0.0 {
216 cfg.temperature
218 } else {
219 1.0 };
221
222 Ok(Self {
223 model,
224 max_budget,
225 system_prompt: cfg.system_prompt,
226 claude_path,
227 })
228 }
229
230 fn parse_result(stdout: &[u8]) -> Result<ClaudeCliResult> {
231 let parsed: serde_json::Value = serde_json::from_slice(stdout).map_err(|e| {
232 EventGraphError::GrammarViolation {
233 detail: format!(
234 "failed to parse claude CLI JSON output: {}\nraw: {}",
235 e,
236 String::from_utf8_lossy(stdout)
237 ),
238 }
239 })?;
240
241 Ok(ClaudeCliResult {
242 result: parsed["result"].as_str().unwrap_or("").to_string(),
243 is_error: parsed["is_error"].as_bool().unwrap_or(false),
244 subtype: parsed["subtype"].as_str().unwrap_or("").to_string(),
245 input_tokens: parsed["usage"]["input_tokens"].as_u64().unwrap_or(0) as usize,
246 output_tokens: parsed["usage"]["output_tokens"].as_u64().unwrap_or(0) as usize,
247 })
248 }
249}
250
251impl fmt::Debug for ClaudeCliProvider {
252 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
253 f.debug_struct("ClaudeCliProvider")
254 .field("model", &self.model)
255 .field("claude_path", &self.claude_path)
256 .finish()
257 }
258}
259
260impl Intelligence for ClaudeCliProvider {
261 fn reason(&self, prompt: &str, history: &[Event]) -> Result<Response> {
262 let mut full_prompt = String::new();
263 let history_text = events_to_messages(history);
264 if !history_text.is_empty() {
265 full_prompt.push_str(&history_text);
266 full_prompt.push_str("\n---\n\n");
267 }
268 full_prompt.push_str(prompt);
269
270 let mut args = vec![
271 "-p".to_string(),
272 "--output-format".to_string(),
273 "json".to_string(),
274 "--model".to_string(),
275 self.model.clone(),
276 "--max-budget-usd".to_string(),
277 format!("{:.2}", self.max_budget),
278 "--no-session-persistence".to_string(),
279 ];
280 if !self.system_prompt.is_empty() {
281 args.push("--system-prompt".to_string());
282 args.push(self.system_prompt.clone());
283 }
284
285 let mut cmd = std::process::Command::new(&self.claude_path);
286 cmd.args(&args);
287 cmd.stdin(std::process::Stdio::piped());
288 cmd.stdout(std::process::Stdio::piped());
289 cmd.stderr(std::process::Stdio::piped());
290
291 cmd.env_remove("CLAUDECODE");
293
294 use std::io::Write;
295 let mut child = cmd.spawn().map_err(|e| EventGraphError::GrammarViolation {
296 detail: format!("failed to spawn claude CLI: {}", e),
297 })?;
298
299 if let Some(ref mut stdin) = child.stdin {
300 let _ = stdin.write_all(full_prompt.as_bytes());
301 }
302 drop(child.stdin.take());
304
305 let output = child.wait_with_output().map_err(|e| {
306 EventGraphError::GrammarViolation {
307 detail: format!("claude CLI error: {}", e),
308 }
309 })?;
310
311 if !output.status.success() {
312 if !output.stdout.is_empty() {
314 if let Ok(result) = Self::parse_result(&output.stdout) {
315 if !result.result.is_empty() {
316 let tokens_used = result.input_tokens + result.output_tokens;
317 let confidence = parse_confidence(tokens_used);
318 return Ok(Response {
319 content: result.result,
320 confidence,
321 tokens_used,
322 });
323 }
324 }
325 }
326 return Err(EventGraphError::GrammarViolation {
327 detail: format!(
328 "claude CLI error (exit {})\nstderr: {}",
329 output.status,
330 String::from_utf8_lossy(&output.stderr),
331 ),
332 });
333 }
334
335 let result = Self::parse_result(&output.stdout)?;
336
337 if result.is_error {
338 return Err(EventGraphError::GrammarViolation {
339 detail: format!(
340 "claude CLI returned error: {} (subtype: {})",
341 result.result, result.subtype
342 ),
343 });
344 }
345
346 let tokens_used = result.input_tokens + result.output_tokens;
347 let confidence = parse_confidence(tokens_used);
348
349 Ok(Response {
350 content: result.result,
351 confidence,
352 tokens_used,
353 })
354 }
355}
356
357impl Provider for ClaudeCliProvider {
358 fn name(&self) -> &str { "claude-cli" }
359 fn model(&self) -> &str { &self.model }
360}
361
362#[cfg(feature = "intelligence")]
366const OPENAI_BASE_URL: &str = "https://api.openai.com/v1";
367#[cfg(feature = "intelligence")]
368const XAI_BASE_URL: &str = "https://api.x.ai/v1";
369#[cfg(feature = "intelligence")]
370const GROQ_BASE_URL: &str = "https://api.groq.com/openai/v1";
371#[cfg(feature = "intelligence")]
372const TOGETHER_BASE_URL: &str = "https://api.together.xyz/v1";
373#[cfg(feature = "intelligence")]
374#[allow(dead_code)]
375const OLLAMA_BASE_URL: &str = "http://localhost:11434/v1";
376
377#[cfg_attr(not(feature = "intelligence"), allow(dead_code))]
379fn infer_provider_name(base_url: &str) -> &'static str {
380 let lower = base_url.to_lowercase();
381 if lower.contains("azure") {
382 "azure"
383 } else if lower.contains("openai.com") {
384 "openai"
385 } else if lower.contains("x.ai") {
386 "xai"
387 } else if lower.contains("groq.com") {
388 "groq"
389 } else if lower.contains("together.xyz") {
390 "together"
391 } else if lower.contains("localhost") || lower.contains("127.0.0.1") {
392 "ollama"
393 } else if lower.contains("fireworks") {
394 "fireworks"
395 } else {
396 "openai-compatible"
397 }
398}
399
400#[cfg(feature = "intelligence")]
402fn detect_openai_provider() -> (String, String, &'static str) {
403 if let Ok(key) = std::env::var("XAI_API_KEY") {
404 if !key.is_empty() {
405 return (key, XAI_BASE_URL.to_string(), "xai");
406 }
407 }
408 if let Ok(key) = std::env::var("GROQ_API_KEY") {
409 if !key.is_empty() {
410 return (key, GROQ_BASE_URL.to_string(), "groq");
411 }
412 }
413 if let Ok(key) = std::env::var("TOGETHER_API_KEY") {
414 if !key.is_empty() {
415 return (key, TOGETHER_BASE_URL.to_string(), "together");
416 }
417 }
418 if let Ok(key) = std::env::var("OPENAI_API_KEY") {
419 if !key.is_empty() {
420 return (key, OPENAI_BASE_URL.to_string(), "openai");
421 }
422 }
423 if let Ok(host) = std::env::var("OLLAMA_HOST") {
424 if !host.is_empty() {
425 return (String::new(), format!("{}/v1", host), "ollama");
426 }
427 }
428 (String::new(), String::new(), "openai-compatible")
429}
430
431#[cfg(feature = "intelligence")]
432pub struct OpenAIProvider {
433 base_url: String,
434 api_key: String,
435 model: String,
436 max_tokens: usize,
437 temperature: f64,
438 system_prompt: String,
439 provider_name: String,
440}
441
442#[cfg(feature = "intelligence")]
443impl OpenAIProvider {
444 fn new(cfg: Config) -> Result<Self> {
445 if cfg.model.is_empty() {
446 return Err(EventGraphError::GrammarViolation {
447 detail: "openai-compatible provider requires a model".to_string(),
448 });
449 }
450
451 let mut api_key = cfg.api_key;
452 let mut base_url = cfg.base_url;
453 let mut provider_name = cfg.provider.clone();
454
455 match provider_name.as_str() {
457 "openai" => {
458 if base_url.is_empty() {
459 base_url = OPENAI_BASE_URL.to_string();
460 }
461 if api_key.is_empty() {
462 api_key = std::env::var("OPENAI_API_KEY").unwrap_or_default();
463 }
464 }
465 "xai" => {
466 if base_url.is_empty() {
467 base_url = XAI_BASE_URL.to_string();
468 }
469 if api_key.is_empty() {
470 api_key = std::env::var("XAI_API_KEY").unwrap_or_default();
471 }
472 }
473 "groq" => {
474 if base_url.is_empty() {
475 base_url = GROQ_BASE_URL.to_string();
476 }
477 if api_key.is_empty() {
478 api_key = std::env::var("GROQ_API_KEY").unwrap_or_default();
479 }
480 }
481 "together" => {
482 if base_url.is_empty() {
483 base_url = TOGETHER_BASE_URL.to_string();
484 }
485 if api_key.is_empty() {
486 api_key = std::env::var("TOGETHER_API_KEY").unwrap_or_default();
487 }
488 }
489 "ollama" => {
490 if base_url.is_empty() {
491 let host = std::env::var("OLLAMA_HOST")
492 .unwrap_or_else(|_| "http://localhost:11434".to_string());
493 base_url = format!("{}/v1", host);
494 }
495 }
496 "openai-compatible" => {
497 if api_key.is_empty() && base_url.is_empty() {
498 let (detected_key, detected_url, detected_name) = detect_openai_provider();
499 api_key = detected_key;
500 base_url = detected_url;
501 provider_name = detected_name.to_string();
502 }
503 }
504 "azure" => {
505 }
507 _ => {}
508 }
509
510 if base_url.is_empty() {
511 base_url = OPENAI_BASE_URL.to_string();
512 }
513
514 if provider_name == "openai-compatible" {
516 provider_name = infer_provider_name(&base_url).to_string();
517 }
518
519 let base_url = base_url.trim_end_matches('/').to_string();
520
521 Ok(Self {
522 base_url,
523 api_key,
524 model: cfg.model,
525 max_tokens: cfg.max_tokens,
526 temperature: cfg.temperature,
527 system_prompt: cfg.system_prompt,
528 provider_name,
529 })
530 }
531}
532
533#[cfg(feature = "intelligence")]
534impl fmt::Debug for OpenAIProvider {
535 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
536 f.debug_struct("OpenAIProvider")
537 .field("provider_name", &self.provider_name)
538 .field("model", &self.model)
539 .field("base_url", &self.base_url)
540 .finish()
541 }
542}
543
544#[cfg(feature = "intelligence")]
545impl Intelligence for OpenAIProvider {
546 fn reason(&self, prompt: &str, history: &[Event]) -> Result<Response> {
547 let mut messages = Vec::new();
548
549 if !self.system_prompt.is_empty() {
551 messages.push(serde_json::json!({
552 "role": "system",
553 "content": self.system_prompt,
554 }));
555 }
556
557 let history_text = events_to_messages(history);
559 if !history_text.is_empty() {
560 messages.push(serde_json::json!({
561 "role": "user",
562 "content": history_text,
563 }));
564 messages.push(serde_json::json!({
565 "role": "assistant",
566 "content": "I understand the event history. What would you like me to reason about?",
567 }));
568 }
569
570 messages.push(serde_json::json!({
572 "role": "user",
573 "content": prompt,
574 }));
575
576 let mut req_body = serde_json::json!({
577 "model": self.model,
578 "messages": messages,
579 });
580
581 if self.max_tokens > 0 {
582 req_body["max_tokens"] = serde_json::json!(self.max_tokens);
583 }
584 if self.temperature > 0.0 {
585 req_body["temperature"] = serde_json::json!(self.temperature);
586 }
587
588 let url = format!("{}/chat/completions", self.base_url);
589
590 let mut request = ureq::post(&url)
591 .header("Content-Type", "application/json");
592
593 if !self.api_key.is_empty() {
594 request = request.header("Authorization", &format!("Bearer {}", self.api_key));
595 }
596
597 let mut resp = request
598 .send_json(&req_body)
599 .map_err(|e| EventGraphError::GrammarViolation {
600 detail: format!("openai API request error: {}", e),
601 })?;
602
603 let body: serde_json::Value = resp.body_mut().read_json().map_err(|e| {
604 EventGraphError::GrammarViolation {
605 detail: format!("openai API response parse error: {}", e),
606 }
607 })?;
608
609 if let Some(err) = body.get("error") {
611 let msg = err["message"].as_str().unwrap_or("unknown error");
612 return Err(EventGraphError::GrammarViolation {
613 detail: format!("openai API error: {}", msg),
614 });
615 }
616
617 let choices = body["choices"].as_array().ok_or_else(|| {
618 EventGraphError::GrammarViolation {
619 detail: "openai API returned no choices".to_string(),
620 }
621 })?;
622
623 if choices.is_empty() {
624 return Err(EventGraphError::GrammarViolation {
625 detail: "openai API returned no choices".to_string(),
626 });
627 }
628
629 let content = choices[0]["message"]["content"]
630 .as_str()
631 .unwrap_or("")
632 .to_string();
633
634 let tokens_used = body["usage"]["total_tokens"].as_u64().unwrap_or(0) as usize;
635 let confidence = parse_confidence(tokens_used);
636
637 Ok(Response {
638 content,
639 confidence,
640 tokens_used,
641 })
642 }
643}
644
645#[cfg(feature = "intelligence")]
646impl Provider for OpenAIProvider {
647 fn name(&self) -> &str { &self.provider_name }
648 fn model(&self) -> &str { &self.model }
649}
650
651#[cfg(test)]
654mod tests {
655 use super::*;
656
657 #[test]
660 fn test_new_unknown_provider() {
661 let result = new(Config {
662 provider: "unknown".to_string(),
663 model: "some-model".to_string(),
664 ..Config::default()
665 });
666 assert!(result.is_err(), "expected error for unknown provider");
667 }
668
669 #[test]
670 fn test_config_requires_model_for_openai_compatible() {
671 let result = new(Config {
675 provider: "openai-compatible".to_string(),
676 ..Config::default()
677 });
678 assert!(result.is_err(), "expected error when model is empty");
679 }
680
681 #[test]
682 fn test_default_max_tokens() {
683 let cfg = Config {
684 provider: "claude-cli".to_string(),
685 model: "sonnet".to_string(),
686 ..Config::default()
687 };
688 assert_eq!(cfg.max_tokens, 0, "default max_tokens should be 0 before factory");
689 }
690
691 #[test]
692 fn test_new_claude_cli_config() {
693 let cfg = new_claude_cli_config("haiku");
694 assert_eq!(cfg.provider, "claude-cli");
695 assert_eq!(cfg.model, "haiku");
696 }
697
698 #[test]
699 fn test_new_claude_cli_config_default_model() {
700 let cfg = new_claude_cli_config("");
701 assert_eq!(cfg.model, "sonnet");
702 }
703
704 #[test]
705 fn test_events_to_messages_empty() {
706 let result = events_to_messages(&[]);
707 assert!(result.is_empty());
708 }
709
710 #[test]
711 fn test_parse_confidence_returns_0_7() {
712 let score = parse_confidence(100);
713 assert!((score.value() - 0.7).abs() < f64::EPSILON);
714 }
715
716 #[test]
717 fn test_infer_provider_name_from_urls() {
718 assert_eq!(infer_provider_name("https://api.openai.com/v1"), "openai");
719 assert_eq!(infer_provider_name("https://api.x.ai/v1"), "xai");
720 assert_eq!(infer_provider_name("https://api.groq.com/openai/v1"), "groq");
721 assert_eq!(infer_provider_name("https://api.together.xyz/v1"), "together");
722 assert_eq!(infer_provider_name("http://localhost:11434/v1"), "ollama");
723 assert_eq!(infer_provider_name("http://127.0.0.1:11434/v1"), "ollama");
724 assert_eq!(
725 infer_provider_name("https://mydeployment.azure.openai.com/v1"),
726 "azure"
727 );
728 assert_eq!(
729 infer_provider_name("https://custom.example.com/v1"),
730 "openai-compatible"
731 );
732 assert_eq!(
733 infer_provider_name("https://api.fireworks.ai/v1"),
734 "fireworks"
735 );
736 }
737
738 #[cfg(feature = "intelligence")]
739 mod openai_tests {
740 use super::*;
741
742 #[test]
743 fn test_new_openai_compatible_requires_model() {
744 let result = new(Config {
745 provider: "openai-compatible".to_string(),
746 ..Config::default()
747 });
748 assert!(result.is_err(), "expected error when model is empty");
749 }
750
751 #[test]
752 fn test_new_openai_compatible_success() {
753 let p = new(Config {
754 provider: "openai-compatible".to_string(),
755 model: "gpt-4o".to_string(),
756 api_key: "test-key-not-real".to_string(),
757 ..Config::default()
758 })
759 .expect("should create provider");
760 assert_eq!(p.model(), "gpt-4o");
761 }
762
763 #[test]
764 fn test_openai_compatible_infers_provider_name() {
765 let cases = vec![
766 ("openai", "", "openai"),
767 ("xai", "", "xai"),
768 ("groq", "", "groq"),
769 ("together", "", "together"),
770 ("ollama", "", "ollama"),
771 ("openai-compatible", "https://api.openai.com/v1", "openai"),
772 ("openai-compatible", "https://api.x.ai/v1", "xai"),
773 ("openai-compatible", "https://api.groq.com/openai/v1", "groq"),
774 ("openai-compatible", "https://api.together.xyz/v1", "together"),
775 ("openai-compatible", "http://localhost:11434/v1", "ollama"),
776 (
777 "openai-compatible",
778 "https://mydeployment.azure.openai.com/v1",
779 "azure",
780 ),
781 (
782 "openai-compatible",
783 "https://custom.example.com/v1",
784 "openai-compatible",
785 ),
786 ];
787
788 for (provider, base_url, want_name) in cases {
789 let p = new(Config {
790 provider: provider.to_string(),
791 model: "test-model".to_string(),
792 api_key: "test-key".to_string(),
793 base_url: base_url.to_string(),
794 ..Config::default()
795 })
796 .unwrap_or_else(|e| {
797 panic!("unexpected error for provider={provider}, base_url={base_url}: {e}")
798 });
799 assert_eq!(
800 p.name(),
801 want_name,
802 "provider={provider}, base_url={base_url}: name={}, want={want_name}",
803 p.name()
804 );
805 }
806 }
807
808 #[test]
809 fn test_openai_compatible_with_all_options() {
810 let p = new(Config {
811 provider: "openai-compatible".to_string(),
812 model: "grok-3".to_string(),
813 api_key: "test-key".to_string(),
814 base_url: "https://api.x.ai/v1".to_string(),
815 max_tokens: 2048,
816 temperature: 0.7,
817 system_prompt: "You are a helpful assistant.".to_string(),
818 })
819 .expect("should create provider");
820 assert_eq!(p.name(), "xai");
821 assert_eq!(p.model(), "grok-3");
822 }
823 }
824
825 #[cfg(feature = "intelligence")]
828 #[test]
829 fn test_integration_openai_compatible_reason() {
830 let api_key = std::env::var("OPENAI_API_KEY").unwrap_or_default();
831 if api_key.is_empty() {
832 eprintln!("OPENAI_API_KEY not set — skipping integration test");
833 return;
834 }
835
836 let p = new(Config {
837 provider: "openai-compatible".to_string(),
838 model: "gpt-4o-mini".to_string(),
839 api_key,
840 max_tokens: 100,
841 ..Config::default()
842 })
843 .expect("should create provider");
844
845 let resp = p.reason("Reply with exactly one word: hello", &[]);
846 assert!(resp.is_ok(), "Reason failed: {:?}", resp.err());
847 let resp = resp.unwrap();
848 assert!(!resp.content.is_empty(), "response content is empty");
849 assert!(resp.tokens_used > 0, "tokens used is 0");
850 }
851
852 #[cfg(feature = "intelligence")]
853 #[test]
854 fn test_integration_openai_compatible_invalid_key() {
855 let p = new(Config {
856 provider: "openai-compatible".to_string(),
857 model: "gpt-4o-mini".to_string(),
858 api_key: "sk-invalid-key-for-testing".to_string(),
859 max_tokens: 50,
860 ..Config::default()
861 })
862 .expect("should create provider");
863
864 let resp = p.reason("hello", &[]);
865 assert!(resp.is_err(), "expected error with invalid API key");
866 }
867
868 #[test]
869 fn test_integration_claude_cli_reason() {
870 if std::env::var("EVENTGRAPH_TEST_CLAUDE_CLI").unwrap_or_default().is_empty() {
871 eprintln!("EVENTGRAPH_TEST_CLAUDE_CLI not set — skipping Claude CLI integration test");
872 return;
873 }
874
875 let p = new(new_claude_cli_config("sonnet")).expect("should create provider");
876 assert_eq!(p.name(), "claude-cli");
877 assert_eq!(p.model(), "sonnet");
878
879 let resp = p.reason("Reply with exactly one word: hello", &[]);
880 assert!(resp.is_ok(), "Reason failed: {:?}", resp.err());
881 let resp = resp.unwrap();
882 assert!(!resp.content.is_empty(), "response content is empty");
883 }
884}