#![ cfg( all( feature = "builder_patterns", feature = "integration_tests" ) ) ]
use api_ollama::{
OllamaClient,
ChatRequestBuilder,
GenerateRequestBuilder,
EmbeddingsRequestBuilder,
MessageRole
};
use std::collections::HashMap;
mod server_helpers;
#[ tokio::test ]
async fn test_chat_request_builder_basic()
{
with_test_server!(|mut client : OllamaClient, model : String| async move {
let request = ChatRequestBuilder::new()
.model(&model)
.user_message("Hi")
.max_tokens(10)
.build()
.expect("Failed to build chat request");
let response = client.chat(request).await
.expect("Chat request should succeed - network/timeout failures must fail test loudly");
assert!(!response.message.content.is_empty(), "Response should have content");
println!( "✓ Basic chat request builder successful" );
});
}
#[ tokio::test ]
async fn test_chat_request_builder_conversation()
{
with_test_server!(|mut client : OllamaClient, model : String| async move {
let request = ChatRequestBuilder::new()
.model(&model)
.system_message("You are helpful")
.user_message("Hi")
.assistant_message("Hello")
.user_message("Bye")
.max_tokens(10)
.build()
.expect("Failed to build conversation request");
assert_eq!(request.messages.len(), 4, "Should have 4 messages");
assert_eq!(request.messages[0].role, MessageRole::System);
assert_eq!(request.messages[1].role, MessageRole::User);
assert_eq!(request.messages[2].role, MessageRole::Assistant);
assert_eq!(request.messages[3].role, MessageRole::User);
client.chat(request).await
.expect("Conversation chat request should succeed - network/timeout failures must fail test loudly");
println!( "✓ Conversation chat request builder successful" );
});
}
#[ tokio::test ]
async fn test_chat_request_builder_with_options()
{
with_test_server!(|mut client : OllamaClient, model : String| async move {
let mut options = HashMap::new();
options.insert("temperature".to_string(), serde_json::Value::from(0.7));
options.insert("top_p".to_string(), serde_json::Value::from(0.9));
let request = ChatRequestBuilder::new()
.model(&model)
.user_message("Hi")
.temperature(0.8)
.top_p(0.9)
.max_tokens(10)
.options(options)
.build()
.expect("Failed to build chat request with options");
client.chat(request).await
.expect("Chat request with options should succeed - network/timeout failures must fail test loudly");
println!( "✓ Chat request builder with options successful" );
});
}
#[ tokio::test ]
async fn test_chat_request_builder_streaming()
{
#[ cfg( feature = "streaming" ) ]
{
with_test_server!(|mut client : OllamaClient, model : String| async move {
let request = ChatRequestBuilder::new()
.model(&model)
.user_message("Count from 1 to 3")
.streaming(true)
.build()
.expect("Failed to build streaming chat request");
assert_eq!(request.stream, Some(true), "Should enable streaming");
let _stream = client.chat_stream(request).await
.expect("Streaming chat request should succeed - network/timeout failures must fail test loudly");
println!( "✓ Streaming chat request builder successful" );
});
}
#[ cfg( not( feature = "streaming" ) ) ]
{
println!( "⚠ Skipping streaming test - streaming feature not enabled" );
}
}
#[ tokio::test ]
async fn test_generate_request_builder_basic()
{
with_test_server!(|mut client : OllamaClient, model : String| async move {
let request = GenerateRequestBuilder::new()
.model(&model)
.prompt("Write a haiku about coding")
.build()
.expect("Failed to build generate request");
let response = client.generate(request).await
.expect("Generate request should succeed - network/timeout failures must fail test loudly");
assert!(!response.response.is_empty(), "Response should have content");
println!( "✓ Basic generate request builder successful" );
});
}
#[ tokio::test ]
async fn test_generate_request_builder_with_options()
{
with_test_server!(|mut client : OllamaClient, model : String| async move {
let request = GenerateRequestBuilder::new()
.model(&model)
.prompt("Say hello in one word")
.temperature(0.1)
.max_tokens(10)
.stop_sequences(&[".", "!"])
.build()
.expect("Failed to build generate request with options");
client.generate(request).await
.expect("Generate request with options should succeed - network/timeout failures must fail test loudly");
println!( "✓ Generate request builder with options successful" );
});
}
#[ tokio::test ]
async fn test_embeddings_request_builder_basic()
{
#[ cfg( feature = "embeddings" ) ]
{
with_test_server!(|mut client : OllamaClient, model : String| async move {
let request = EmbeddingsRequestBuilder::new()
.model(&model)
.prompt("Hello world")
.build()
.expect("Failed to build embeddings request");
let response = client.embeddings(request).await
.expect("Embeddings request should succeed - network/timeout failures must fail test loudly");
assert!(!response.embedding.is_empty(), "Should have embeddings");
println!( "✓ Basic embeddings request builder successful" );
});
}
#[ cfg( not( feature = "embeddings" ) ) ]
{
println!( "⚠ Skipping embeddings test - embeddings feature not enabled" );
}
}
#[ tokio::test ]
async fn test_embeddings_request_builder_with_options()
{
#[ cfg( feature = "embeddings" ) ]
{
with_test_server!(|mut client : OllamaClient, model : String| async move {
let request = EmbeddingsRequestBuilder::new()
.model(&model)
.prompt("Machine learning is fascinating")
.temperature(0.2)
.dimension(2048)
.build()
.expect("Failed to build embeddings request with options");
client.embeddings(request).await
.expect("Embeddings request with options should succeed - network/timeout failures must fail test loudly");
println!( "✓ Embeddings request builder with options successful" );
});
}
#[ cfg( not( feature = "embeddings" ) ) ]
{
println!( "⚠ Skipping embeddings test - embeddings feature not enabled" );
}
}
#[ tokio::test ]
async fn test_builder_method_chaining()
{
with_test_server!(|mut client : OllamaClient, model : String| async move {
let request = ChatRequestBuilder::new()
.model(&model)
.system_message("You are helpful")
.user_message("Hi")
.temperature(0.5)
.max_tokens(10)
.build()
.expect("Method chaining should work");
assert_eq!(request.model, model);
assert_eq!(request.messages.len(), 2);
assert!(request.options.is_some());
client.chat(request).await
.expect("Builder method chaining request should succeed - network/timeout failures must fail test loudly");
println!( "✓ Builder method chaining successful" );
});
}
#[ tokio::test ]
async fn test_builder_validation_errors()
{
let result = ChatRequestBuilder::new()
.user_message("Hello")
.build();
assert!(result.is_err(), "Builder should fail without model");
let result = ChatRequestBuilder::new()
.model("test-model")
.build();
assert!(result.is_err(), "Builder should fail without messages");
let result = ChatRequestBuilder::new()
.model("")
.user_message("Hello")
.build();
assert!(result.is_err(), "Builder should fail with empty model");
let result = ChatRequestBuilder::new()
.model("test-model")
.user_message("")
.build();
assert!(result.is_err(), "Builder should fail with empty message");
println!( "✓ Builder validation errors successful" );
}
#[ tokio::test ]
async fn test_builder_default_values()
{
let request = ChatRequestBuilder::new()
.model("test-model")
.user_message("Hello")
.build()
.expect("Basic builder should work");
assert_eq!(request.stream, Some(false), "Stream should default to false for non-streaming");
assert!(request.options.is_none(), "Options should default to None");
println!( "✓ Builder default values successful" );
}
#[ tokio::test ]
async fn test_builder_immutability()
{
let builder1 = ChatRequestBuilder::new()
.model("model1")
.user_message("Hello");
let builder2 = builder1.clone()
.model("model2");
let request1 = builder1.build().expect("Builder1 should work");
let request2 = builder2.build().expect("Builder2 should work");
assert_eq!(request1.model, "model1");
assert_eq!(request2.model, "model2");
println!( "✓ Builder immutability successful" );
}
#[ tokio::test ]
async fn test_builder_authentication_integration()
{
#[ cfg( feature = "secret_management" ) ]
{
use api_ollama::SecretStore;
with_test_server!(|client : OllamaClient, model : String| async move {
let mut secret_store = SecretStore::new();
secret_store.set("api_key", "test-key").expect("Failed to set API key");
let mut auth_client = client.with_secret_store(secret_store);
let request = ChatRequestBuilder::new()
.model(&model)
.user_message("Hi")
.max_tokens(10)
.build()
.expect("Builder with auth should work");
auth_client.chat(request).await
.expect("Builder authentication request should succeed - network/timeout failures must fail test loudly");
println!( "✓ Builder authentication integration successful" );
});
}
#[ cfg( not( feature = "secret_management" ) ) ]
{
println!( "⚠ Skipping authentication test - secret_management feature not enabled" );
}
}
#[ tokio::test ]
async fn test_builder_complex_conversation()
{
with_test_server!(|mut client : OllamaClient, model : String| async move {
let request = ChatRequestBuilder::new()
.model(&model)
.system_message("You are helpful.")
.user_message("Say yes")
.assistant_message("Yes")
.temperature(0.3)
.max_tokens(50)
.build()
.expect("Complex conversation builder should work");
assert_eq!(request.messages.len(), 3);
assert_eq!(request.messages[0].role, MessageRole::System);
assert_eq!(request.messages[1].role, MessageRole::User);
assert_eq!(request.messages[2].role, MessageRole::Assistant);
client.chat(request).await
.expect("Builder complex conversation request should succeed - network/timeout failures must fail test loudly");
println!( "✓ Builder complex conversation successful" );
});
}