use evolve_llm::{AnthropicHaikuClient, LlmClient, OllamaClient};
#[tokio::test]
#[ignore = "requires ANTHROPIC_API_KEY"]
async fn smoke_anthropic_round_trip() {
let client = AnthropicHaikuClient::from_env().expect("ANTHROPIC_API_KEY must be set");
let result = client
.complete("Reply with exactly the word: pong", 16)
.await
.expect("anthropic call should succeed");
assert!(!result.text.is_empty(), "expected non-empty assistant text");
assert!(
result.text.to_lowercase().contains("pong"),
"expected 'pong' in response, got: {}",
result.text,
);
}
#[tokio::test]
#[ignore = "requires local Ollama with OLLAMA_MODEL pulled"]
async fn smoke_ollama_round_trip() {
let client = OllamaClient::local();
let result = client
.complete("Reply with exactly the word: pong", 16)
.await
.expect("ollama call should succeed");
assert!(!result.text.is_empty(), "expected non-empty assistant text");
}