use language_barrier_core::llm_service::{HTTPLlmService, LLMService};
use language_barrier_core::model::{Claude, Sonnet35Version};
use language_barrier_core::{Chat, Message};
use std::sync::Arc;
use test_tools::WeatherTool;
use tracing::{Level, info};
mod test_tools;
mod test_utils;
use test_utils::{get_anthropic_provider, setup_tracing};
#[tokio::test]
async fn test_multi_turn_conversation_with_tools() {
setup_tracing(Level::DEBUG);
info!("Starting test_multi_turn_conversation_with_tools");
let Some(provider) = get_anthropic_provider() else {
info!("Skipping test_multi_turn_conversation_with_tools: No API key available");
return;
};
let model = Claude::Sonnet35 {
version: Sonnet35Version::V2,
};
let chat_sf = Chat::default()
.with_system_prompt("You are a helpful AI assistant that can provide weather information.")
.with_max_output_tokens(1000)
.with_tool(WeatherTool)
.unwrap()
.add_message(Message::user("What's the weather like in San Francisco?"));
let service = HTTPLlmService::new(model, Arc::new(provider.clone()));
match service.generate_next_message(&chat_sf).await {
Ok(Message::Assistant { tool_calls, .. }) => {
assert!(
!tool_calls.is_empty(),
"Expected tool calls for San Francisco"
);
}
Err(e) => {
info!("San Francisco request had an expected error: {}", e);
return;
}
_ => {
panic!("Expected assistant message");
}
}
let chat_ny = Chat::default()
.with_system_prompt("You are a helpful AI assistant that can provide weather information.")
.with_max_output_tokens(1000)
.with_tool(WeatherTool)
.unwrap()
.add_message(Message::user("What's the weather like in New York?"));
match service.generate_next_message(&chat_ny).await {
Ok(Message::Assistant { tool_calls, .. }) => {
assert!(!tool_calls.is_empty(), "Expected tool calls for New York");
let references_ny = tool_calls
.iter()
.any(|call| call.function.arguments.contains("New York"));
assert!(references_ny, "Expected tool call to reference New York");
}
Err(e) => {
info!("New York request had an expected error: {}", e);
}
_ => {
panic!("Expected assistant message");
}
}
}