#![cfg(all(feature = "openapi", feature = "test-support"))]
use radkit::agent::LlmWorker;
use radkit::macros::LLMOutput;
use radkit::models::{Content, ContentPart, LlmResponse, TokenUsage};
use radkit::test_support::FakeLlm;
use radkit::tools::{BaseToolset, DefaultExecutionState, OpenApiToolSet, ToolCall, ToolContext};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use serde_json::json;
use std::sync::Arc;
#[derive(Debug, Serialize, Deserialize, LLMOutput, JsonSchema)]
struct PetSearchResult {
total_found: usize,
pet_names: Vec<String>,
summary: String,
}
#[derive(Debug, Serialize, Deserialize, LLMOutput, JsonSchema)]
struct PetDetails {
id: i64,
name: String,
status: String,
category: Option<String>,
}
async fn create_petstore_toolset() -> Arc<OpenApiToolSet> {
let toolset = OpenApiToolSet::from_url(
"petstore".to_string(),
"https://petstore3.swagger.io/api/v3/openapi.json",
None, )
.await
.expect("Failed to create Petstore toolset");
Arc::new(toolset)
}
fn tool_call_response(tool_name: &str, arguments: serde_json::Value) -> LlmResponse {
let tool_call = ToolCall::new("call-1", tool_name, arguments);
LlmResponse::new(
Content::from_parts(vec![ContentPart::ToolCall(tool_call)]),
TokenUsage::empty(),
)
}
fn structured_response<T: Serialize>(value: &T) -> LlmResponse {
let json_str = serde_json::to_string(value).unwrap();
LlmResponse::new(Content::from_text(json_str), TokenUsage::empty())
}
#[tokio::test]
#[ignore = "requires network access to Petstore API"]
async fn test_openapi_agent_query_parameter() {
println!("๐งช Testing OpenAPI Agent with Query Parameter");
let petstore_toolset = create_petstore_toolset().await;
let tools = petstore_toolset.get_tools().await;
assert!(!tools.is_empty(), "No tools generated from OpenAPI spec");
println!("๐ Generated {} OpenAPI tools", tools.len());
let find_by_status = tools
.iter()
.find(|t| t.name() == "findPetsByStatus")
.expect("findPetsByStatus tool not found");
println!("โ Found tool: {}", find_by_status.description());
let tool_call_resp = tool_call_response(
"findPetsByStatus",
json!({
"status": "available"
}),
);
let final_result = PetSearchResult {
total_found: 3,
pet_names: vec![
"Doggie".to_string(),
"Fluffy".to_string(),
"Buddy".to_string(),
],
summary: "Found 3 available pets".to_string(),
};
let structured_resp = structured_response(&final_result);
let fake_llm = FakeLlm::with_responses(
"fake-openapi-llm",
vec![Ok(tool_call_resp), Ok(structured_resp)],
);
let pet_agent = LlmWorker::<PetSearchResult>::builder(fake_llm.clone())
.with_system_instructions(
"You are a pet store assistant. Use the findPetsByStatus tool to search for pets.",
)
.with_toolset(petstore_toolset.clone())
.with_max_iterations(5)
.build();
println!("๐ค Executing agent query: 'Find all available pets'");
let result = pet_agent.run("Find all available pets").await;
assert!(result.is_ok(), "Agent execution failed: {:?}", result.err());
let search_result = result.unwrap();
println!("๐ Pet Search Result:");
println!(" Total Found: {}", search_result.total_found);
println!(" Pet Names: {:?}", search_result.pet_names);
println!(" Summary: {}", search_result.summary);
let llm_calls = fake_llm.calls();
assert!(
!llm_calls.is_empty(),
"FakeLlm was not called during execution"
);
println!("โ LLM called {} times", llm_calls.len());
let last_thread = &llm_calls[llm_calls.len() - 1];
let has_tool_result = last_thread.events().iter().any(|event| {
event
.content()
.parts()
.iter()
.any(|part| matches!(part, ContentPart::ToolResponse(_)))
});
assert!(
has_tool_result,
"No tool result found - HTTP request was not executed"
);
println!("โ OpenAPI tool executed successfully");
println!("โ
OpenAPI query parameter test passed");
}
#[tokio::test]
#[ignore = "requires network access to Petstore API"]
async fn test_openapi_agent_path_parameter() {
println!("๐งช Testing OpenAPI Agent with Path Parameter");
let petstore_toolset = create_petstore_toolset().await;
let tools = petstore_toolset.get_tools().await;
let get_pet_by_id = tools
.iter()
.find(|t| t.name() == "getPetById")
.expect("getPetById tool not found");
println!("โ Found tool: {}", get_pet_by_id.description());
let tool_call_resp = tool_call_response(
"getPetById",
json!({
"petId": 1
}),
);
let final_result = PetDetails {
id: 1,
name: "Doggie".to_string(),
status: "available".to_string(),
category: Some("Dogs".to_string()),
};
let structured_resp = structured_response(&final_result);
let fake_llm = FakeLlm::with_responses(
"fake-openapi-llm-path",
vec![Ok(tool_call_resp), Ok(structured_resp)],
);
let pet_details_agent = LlmWorker::<PetDetails>::builder(fake_llm.clone())
.with_system_instructions(
"You are a pet store assistant. Use the getPetById tool to get pet details.",
)
.with_toolset(petstore_toolset.clone())
.with_max_iterations(5)
.build();
println!("๐ค Executing agent query: 'Get details for pet ID 1'");
let result = pet_details_agent.run("Get details for pet ID 1").await;
assert!(result.is_ok(), "Agent execution failed: {:?}", result.err());
let pet_details = result.unwrap();
println!("๐ Pet Details:");
println!(" ID: {}", pet_details.id);
println!(" Name: {}", pet_details.name);
println!(" Status: {}", pet_details.status);
if let Some(category) = &pet_details.category {
println!(" Category: {}", category);
}
let llm_calls = fake_llm.calls();
assert!(!llm_calls.is_empty(), "FakeLlm was not called");
println!("โ LLM called {} times", llm_calls.len());
let last_thread = &llm_calls[llm_calls.len() - 1];
let has_tool_result = last_thread.events().iter().any(|event| {
event
.content()
.parts()
.iter()
.any(|part| matches!(part, ContentPart::ToolResponse(_)))
});
assert!(
has_tool_result,
"No tool result found - HTTP request was not executed"
);
println!("โ OpenAPI tool with path parameter executed successfully");
println!("โ
OpenAPI path parameter test passed");
}
#[tokio::test]
#[ignore = "requires network access to Petstore API"]
async fn test_openapi_agent_multi_tool_orchestration() {
println!("๐งช Testing OpenAPI Agent Multi-Tool Orchestration");
let petstore_toolset = create_petstore_toolset().await;
let tool_call_1 = tool_call_response(
"findPetsByStatus",
json!({
"status": "available"
}),
);
let tool_call_2 = tool_call_response(
"getPetById",
json!({
"petId": 1
}),
);
#[derive(Debug, Serialize, Deserialize, LLMOutput, JsonSchema)]
struct CombinedResult {
total_available: usize,
first_pet_name: String,
first_pet_status: String,
}
let combined = CombinedResult {
total_available: 5,
first_pet_name: "Doggie".to_string(),
first_pet_status: "available".to_string(),
};
let structured_resp = structured_response(&combined);
let fake_llm = FakeLlm::with_responses(
"fake-openapi-llm-multi",
vec![Ok(tool_call_1), Ok(tool_call_2), Ok(structured_resp)],
);
let multi_tool_agent = LlmWorker::<CombinedResult>::builder(fake_llm.clone())
.with_system_instructions(
"You are a pet store assistant. Use findPetsByStatus and getPetById to answer queries.",
)
.with_toolset(petstore_toolset.clone())
.with_max_iterations(10)
.build();
println!("๐ค Executing: 'Find available pets and tell me about the first one'");
let result = multi_tool_agent
.run("Find available pets and tell me about the first one")
.await;
assert!(result.is_ok(), "Agent execution failed: {:?}", result.err());
let combined_result = result.unwrap();
println!("๐ Combined Result:");
println!(" Total Available: {}", combined_result.total_available);
println!(" First Pet Name: {}", combined_result.first_pet_name);
println!(" First Pet Status: {}", combined_result.first_pet_status);
let llm_calls = fake_llm.calls();
assert!(
llm_calls.len() >= 2,
"Expected at least 2 LLM calls, got {}",
llm_calls.len()
);
println!("โ LLM called {} times for multi-tool", llm_calls.len());
let tool_result_count = llm_calls
.iter()
.flat_map(|thread| thread.events())
.filter(|event| {
event
.content()
.parts()
.iter()
.any(|part| matches!(part, ContentPart::ToolResponse(_)))
})
.count();
assert!(
tool_result_count >= 1,
"Expected at least 1 tool execution, got {}",
tool_result_count
);
println!("โ {} tool execution(s) completed", tool_result_count);
println!("โ
OpenAPI multi-tool orchestration test passed");
}
#[tokio::test]
#[ignore = "requires network access to Petstore API"]
async fn test_openapi_http_error_handling() {
println!("๐งช Testing OpenAPI HTTP Error Handling");
let petstore_toolset = create_petstore_toolset().await;
let tools = petstore_toolset.get_tools().await;
let get_pet_tool = tools
.iter()
.find(|t| t.name() == "getPetById")
.expect("getPetById tool not found");
println!("โ Testing error handling with: {}", get_pet_tool.name());
let mut args = std::collections::HashMap::new();
args.insert("petId".to_string(), json!(999999999));
let state = DefaultExecutionState::new();
let tool_context = ToolContext::builder()
.with_state(&state)
.build()
.expect("Failed to create ToolContext");
println!("๐ง Calling tool with invalid pet ID (999999999)...");
let result = get_pet_tool.run_async(args, &tool_context).await;
println!("๐ Tool result success: {}", result.is_success());
if result.is_success() {
let data = result.data();
if let Some(status) = data.get("status") {
let status_code = status.as_u64().unwrap_or(0);
println!("โ Tool returned status code: {}", status_code);
if status_code == 404 {
println!("โ Correctly received 404 for non-existent pet");
}
}
} else if let Some(error) = result.error_message() {
println!("โ Tool returned error: {}", error);
}
println!("โ Tool handled HTTP error without panic");
println!("โ
OpenAPI error handling test passed");
}