use api_openai::
{
sync ::{ SyncClient, StreamConfig, SyncStreamIterator },
components ::chat_shared::{ ChatCompletionRequest, ChatCompletionStreamResponse },
environment ::{ OpenaiEnvironmentImpl, OpenAIRecommended },
secret ::Secret,
};
use std::sync::Arc;
use core::sync::atomic::{ AtomicBool, Ordering };
use core::time::Duration;
#[ test ]
fn test_sync_client_creation_for_streaming()
{
let secret = Secret::new_unchecked( "sk-test_sync_streaming_1234567890abcdef".to_string() );
let environment = OpenaiEnvironmentImpl::build(
secret,
None,
None,
OpenAIRecommended::base_url().to_string(),
OpenAIRecommended::realtime_base_url().to_string(),
).expect( "Environment creation should work" );
let sync_client = SyncClient::new( environment ).expect( "Sync client creation should work" );
let _chat_client = sync_client.chat();
}
#[ test ]
fn test_stream_config_default_values()
{
let config = StreamConfig::default();
assert!( config.timeout.is_some() );
assert_eq!( config.timeout.unwrap(), Duration::from_secs( 300 ) );
assert_eq!( config.buffer_size, 100 );
assert!( config.cancellation_token.is_none() );
}
#[ test ]
fn test_stream_config_custom_values()
{
let cancel_token = Arc::new( AtomicBool::new( false ) );
let config = StreamConfig
{
timeout : Some( Duration::from_secs( 60 ) ),
buffer_size : 50,
cancellation_token : Some( cancel_token.clone() ),
};
assert_eq!( config.timeout, Some( Duration::from_secs( 60 ) ) );
assert_eq!( config.buffer_size, 50 );
assert!( config.cancellation_token.is_some() );
assert!( !cancel_token.load( Ordering::Relaxed ) );
cancel_token.store( true, Ordering::Relaxed );
assert!( cancel_token.load( Ordering::Relaxed ) );
}
#[ test ]
fn test_cancellation_token_behavior()
{
let token = Arc::new( AtomicBool::new( false ) );
assert!( !token.load( Ordering::Relaxed ) );
token.store( true, Ordering::Relaxed );
assert!( token.load( Ordering::Relaxed ) );
token.store( false, Ordering::Relaxed );
assert!( !token.load( Ordering::Relaxed ) );
}
#[ test ]
fn test_stream_config_clone()
{
let original_token = Arc::new( AtomicBool::new( false ) );
let original_config = StreamConfig
{
timeout : Some( Duration::from_secs( 120 ) ),
buffer_size : 200,
cancellation_token : Some( original_token.clone() ),
};
let cloned_config = original_config.clone();
assert_eq!( original_config.timeout, cloned_config.timeout );
assert_eq!( original_config.buffer_size, cloned_config.buffer_size );
original_token.store( true, Ordering::Relaxed );
if let ( Some( orig_token ), Some( cloned_token ) ) = ( &original_config.cancellation_token, &cloned_config.cancellation_token )
{
assert_eq!( orig_token.load( Ordering::Relaxed ), cloned_token.load( Ordering::Relaxed ) );
}
}
#[ test ]
fn test_sync_stream_iterator_structure()
{
let secret = Secret::new_unchecked( "sk-test_iterator_structure_1234567890abcdef".to_string() );
let environment = OpenaiEnvironmentImpl::build(
secret,
None,
None,
OpenAIRecommended::base_url().to_string(),
OpenAIRecommended::realtime_base_url().to_string(),
).expect( "Environment creation should work" );
let sync_client = SyncClient::new( environment ).expect( "Sync client creation should work" );
let _chat_client = sync_client.chat();
let _request = ChatCompletionRequest
{
model : "gpt-5-nano".to_string(),
messages : vec![],
temperature : None,
top_p : None,
max_tokens : None,
n : None,
stop : None,
stream : None,
system_prompt : None,
user : None,
tools : None,
tool_choice : None,
response_format : None,
seed : None,
logit_bias : None,
logprobs : None,
top_logprobs : None,
};
}
#[ test ]
fn test_mock_stream_iterator_cancellation()
{
use std::sync::mpsc;
let ( _sender, _receiver ) = mpsc::channel::< api_openai::error::Result< ChatCompletionStreamResponse > >();
let cancellation_token = Arc::new( AtomicBool::new( false ) );
assert!( !cancellation_token.load( Ordering::Relaxed ) );
cancellation_token.store( true, Ordering::Relaxed );
assert!( cancellation_token.load( Ordering::Relaxed ) );
if cancellation_token.load( Ordering::Relaxed )
{
let result : Option< api_openai::error::Result< ChatCompletionStreamResponse > > = None;
assert!( result.is_none() );
}
}
#[ test ]
fn test_stream_config_serialization()
{
let config = StreamConfig
{
timeout : Some( Duration::from_secs( 45 ) ),
buffer_size : 75,
cancellation_token : None,
};
let debug_string = format!( "{config:?}" );
assert!( debug_string.contains( "StreamConfig" ) );
assert!( debug_string.contains( "timeout" ) );
assert!( debug_string.contains( "buffer_size" ) );
}
#[ test ]
fn test_multiple_cancellation_tokens()
{
let token1 = Arc::new( AtomicBool::new( false ) );
let token2 = Arc::new( AtomicBool::new( false ) );
assert!( !token1.load( Ordering::Relaxed ) );
assert!( !token2.load( Ordering::Relaxed ) );
token1.store( true, Ordering::Relaxed );
assert!( token1.load( Ordering::Relaxed ) );
assert!( !token2.load( Ordering::Relaxed ) );
token2.store( true, Ordering::Relaxed );
assert!( token1.load( Ordering::Relaxed ) );
assert!( token2.load( Ordering::Relaxed ) );
}
#[ test ]
fn test_stream_config_extreme_values()
{
let config = StreamConfig
{
timeout : Some( Duration::from_secs( u64::MAX ) ),
buffer_size : usize::MAX,
cancellation_token : Some( Arc::new( AtomicBool::new( true ) ) ),
};
assert_eq!( config.timeout, Some( Duration::from_secs( u64::MAX ) ) );
assert_eq!( config.buffer_size, usize::MAX );
assert!( config.cancellation_token.is_some() );
if let Some( token ) = &config.cancellation_token
{
assert!( token.load( Ordering::Relaxed ) );
}
}
#[ test ]
fn test_stream_config_zero_values()
{
let config = StreamConfig
{
timeout : Some( Duration::from_secs( 0 ) ),
buffer_size : 0,
cancellation_token : None,
};
assert_eq!( config.timeout, Some( Duration::from_secs( 0 ) ) );
assert_eq!( config.buffer_size, 0 );
assert!( config.cancellation_token.is_none() );
}
#[ test ]
fn test_stream_config_no_timeout()
{
let config = StreamConfig
{
timeout : None,
buffer_size : 100,
cancellation_token : None,
};
assert!( config.timeout.is_none() );
assert_eq!( config.buffer_size, 100 );
}
#[ test ]
fn test_sync_chat_streaming_method_signatures()
{
let secret = Secret::new_unchecked( "sk-test_method_signatures_1234567890abcdef".to_string() );
let environment = OpenaiEnvironmentImpl::build(
secret,
None,
None,
OpenAIRecommended::base_url().to_string(),
OpenAIRecommended::realtime_base_url().to_string(),
).expect( "Environment creation should work" );
let sync_client = SyncClient::new( environment ).expect( "Sync client creation should work" );
let _chat_client = sync_client.chat();
let _request = ChatCompletionRequest
{
model : "gpt-5-nano".to_string(),
messages : vec![],
temperature : None,
top_p : None,
max_tokens : None,
n : None,
stop : None,
stream : None,
system_prompt : None,
user : None,
tools : None,
tool_choice : None,
response_format : None,
seed : None,
logit_bias : None,
logprobs : None,
top_logprobs : None,
};
let config = StreamConfig::default();
let _ = config; }
#[ test ]
fn test_cancellation_token_thread_safety()
{
use std::thread;
let token = Arc::new( AtomicBool::new( false ) );
let handles : Vec< _ > = ( 0..10 ).map( | i | {
let token_clone = token.clone();
thread ::spawn( move || {
let value = i % 2 == 0;
token_clone.store( value, Ordering::Relaxed );
token_clone.load( Ordering::Relaxed )
})
}).collect();
for handle in handles
{
let _ = handle.join();
}
let _final_value = token.load( Ordering::Relaxed );
}
#[ test ]
fn test_stream_iterator_trait_bounds()
{
fn assert_send< T: Send >() {}
fn assert_iterator< T: Iterator >() {}
assert_send ::< SyncStreamIterator< ChatCompletionStreamResponse > >();
assert_iterator ::< SyncStreamIterator< ChatCompletionStreamResponse > >();
}
#[ test ]
fn test_duration_arithmetic()
{
let base_duration = Duration::from_secs( 60 );
let added_duration = base_duration + Duration::from_secs( 30 );
assert_eq!( added_duration, Duration::from_secs( 90 ) );
assert!( Duration::from_secs( 30 ) < Duration::from_secs( 60 ) );
assert!( Duration::from_secs( 60 ) > Duration::from_secs( 30 ) );
}
#[ test ]
fn test_atomic_bool_memory_ordering()
{
let token = Arc::new( AtomicBool::new( false ) );
token.store( true, Ordering::Relaxed );
assert!( token.load( Ordering::Relaxed ) );
token.store( false, Ordering::SeqCst );
assert!( !token.load( Ordering::SeqCst ) );
token.store( true, Ordering::Release );
assert!( token.load( Ordering::Acquire ) );
}