#![ cfg( feature = "integration" ) ]
#![ allow( unused_imports, dead_code ) ]
use api_openai::ClientApiAccessors;
pub use api_openai as the_module;
mod test_isolation;
use test_isolation::{ TestIsolation, IsolatedClient, should_run_real_api_tests };
use api_openai::
{
Client,
error ::OpenAIError,
environment ::{ EnvironmentInterface, OpenaiEnvironment, OpenaiEnvironmentImpl },
secret ::Secret,
components ::
{
responses ::
{
CreateResponseRequest,
ResponseObject,
ResponseInput,
ResponseStreamEvent,
ResponseItemList,
},
input ::
{
InputItem,
InputMessage,
InputContentPart,
InputText,
},
common ::{ ModelIdsResponses, ListQuery },
tools ::{ Tool, ToolChoice, FunctionTool, FunctionParameters },
output ::{ OutputItem, OutputContentPart },
}
};
use serde_json::json;
use futures_util::stream::StreamExt;
use secrecy::ExposeSecret;
use tokio::sync::mpsc;
#[ deprecated( note = "Use TestIsolation framework instead" ) ]
fn load_secret_for_test() -> Secret
{
eprintln!( "DEPRECATED: load_secret_for_test() - Use TestIsolation::create_test_secret() instead" );
Secret::load_with_fallbacks( "OPENAI_API_KEY" )
.expect("INTEGRATION TEST FAILURE: Real API credentials required but not found")
}
#[ deprecated( note = "Use should_run_real_api_tests() instead" ) ]
fn should_run_integration_tests() -> bool
{
should_run_real_api_tests()
}
#[ deprecated( note = "Use should_run_real_api_tests() instead" ) ]
fn should_run_with_real_api() -> bool
{
should_run_real_api_tests()
}
#[ deprecated( note = "Use IsolatedClient::new() instead" ) ]
#[ allow( deprecated ) ]
#[ allow( unused_macros ) ]
fn create_test_client() -> Result< Client< OpenaiEnvironmentImpl >, Box< dyn core::error::Error > >
{
eprintln!( "DEPRECATED: create_test_client() - Use IsolatedClient::new() instead" );
let secret = load_secret_for_test();
let env = OpenaiEnvironmentImpl::build( secret, None, None, api_openai::environment::OpenAIRecommended::base_url().to_string(), api_openai::environment::OpenAIRecommended::realtime_base_url().to_string() )?;
Ok( Client::build( env )? )
}
#[ deprecated( note = "Use IsolatedClient::new() instead" ) ]
#[ allow( unused_macros ) ]
macro_rules! setup_test_client
{
() =>
{
create_test_client().expect( "Failed to create test client" )
};
}
#[ deprecated( note = "Not needed with TestIsolation framework" ) ]
macro_rules! require_api_key
{
() =>
{
};
}
fn ensure_secret_loaded() -> Result< (), OpenAIError >
{
let secret = Secret::load_with_fallbacks("OPENAI_API_KEY")
.map_err(|e| OpenAIError::InvalidArgument( format!("INTEGRATION TEST FAILURE: Real API credentials required but not found. {e}") ))?;
let api_key = secret.expose_secret();
if api_key.is_empty() || api_key.contains("invalid") || api_key.len() < 20
{
return Err( OpenAIError::InvalidArgument( format!("INTEGRATION TEST FAILURE: Invalid API key detected. Real integration tests require valid OpenAI API credentials, got : '{}'", &api_key[..core::cmp::min(10, api_key.len())]) ) );
}
let len = api_key.len();
println!( "✅ Real API credentials loaded successfully (length : {len})" );
Ok( () )
}
#[ tokio::test ]
async fn create_response()
{
let isolated_client = IsolatedClient::new( "create_response", should_run_real_api_tests() )
.expect( "Failed to create isolated client" );
let client = isolated_client.client();
let request = CreateResponseRequest::former()
.model( ModelIdsResponses::from( "gpt-5-nano".to_string() ) )
.input( ResponseInput::Items(
vec![
InputItem::Message(
InputMessage {
r#type : "message".to_string(),
role : "user".to_string(),
content : vec![
InputContentPart::Text(
InputText {
text : "Hello, how are you?".to_string(),
}
),
],
status : None,
id : None,
}
),
]
))
.max_output_tokens( 50 )
.parallel_tool_calls( true )
.form();
let result = client.responses().create( request ).await;
match result
{
Ok( response_object ) =>
{
assert!( !response_object.id.is_empty(), "Response should have an id field" );
assert!( !response_object.output.is_empty(), "Response should have output" );
assert_eq!( response_object.object, "response", "Object type should be 'response'" );
assert!( response_object.created_at > 0, "Created timestamp should be valid" );
},
Err( e ) =>
{
panic!( "API request failed with an error : {e:?}" );
}
}
}
#[ tokio::test ]
async fn create_response_invalid_model()
{
let isolated_client = IsolatedClient::new( "create_response_invalid_model", should_run_real_api_tests() )
.expect( "Failed to create isolated client" );
let client = isolated_client.client();
let request = CreateResponseRequest::former()
.model( ModelIdsResponses::from( "invalid-model-xyz".to_string() ) )
.input( ResponseInput::String( "Test".to_string() ) )
.form();
let result = client.responses().create( request ).await;
assert!( result.is_err(), "Should return error for invalid model" );
}
#[ tokio::test ]
async fn create_response_stream()
{
let isolated_client = IsolatedClient::new( "create_response_stream", should_run_real_api_tests() )
.expect( "Failed to create isolated client" );
let client = isolated_client.client();
let request = CreateResponseRequest::former()
.model( ModelIdsResponses::from( "gpt-5-nano".to_string() ) )
.input( ResponseInput::String( "Count from 1 to 5".to_string() ) )
.max_output_tokens( 100 )
.stream( true )
.form();
let result = client.responses().create_stream( request ).await;
match result
{
Ok( mut receiver ) =>
{
let mut event_count = 0;
let mut _error_received = false;
let timeout_duration = core::time::Duration::from_secs(10);
let start_time = std::time::Instant::now();
while let Some( event_result ) = receiver.recv().await
{
if start_time.elapsed() > timeout_duration
{
eprintln!( "⚠️ Stream timeout after 10 seconds with {event_count} events received" );
break;
}
match event_result
{
Ok( event ) =>
{
event_count += 1;
match event
{
ResponseStreamEvent::ResponseCreated( _ ) => println!( "Response created" ),
ResponseStreamEvent::ResponseInProgress( _ ) => println!( "Response in progress" ),
ResponseStreamEvent::ResponseInAnalysis( _ ) => println!( "Response in analysis" ),
ResponseStreamEvent::ResponseTextDelta( _ ) => println!( "Text delta received" ),
ResponseStreamEvent::ResponseCompleted( event ) =>
{
let response_id = &event.response.id;
println!( "Response completed : {response_id}" );
break;
},
_ => println!( "Other event received : {event:?}" ),
}
},
Err( e ) =>
{
_error_received = true;
eprintln!( "❌ Stream error occurred : {e:?}" );
panic!( "Stream encountered error - MANDATORY FAILURE: {e:?}" );
},
}
}
assert!( event_count > 0, "MANDATORY FAILURE: Should receive at least one stream event. Received {event_count} events." );
},
Err( e ) =>
{
panic!( "Stream creation failed - MANDATORY FAILURE: {e:?}" );
}
}
}
#[ tokio::test ]
async fn create_response_with_tools()
{
let isolated_client = IsolatedClient::new( "create_response_with_tools", should_run_real_api_tests() )
.expect( "Failed to create isolated client" );
let client = isolated_client.client();
let function_def = json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
}
},
"required": ["location"]
});
let tool = Tool::Function(
FunctionTool::former()
.name( "get_weather".to_string() )
.description( "Get the current weather for a location".to_string() )
.parameters( FunctionParameters::new( function_def ) )
.form()
);
let request = CreateResponseRequest::former()
.model( ModelIdsResponses::from( "gpt-5-nano".to_string() ) )
.input( ResponseInput::String( "What's the weather like in Boston?".to_string() ) )
.tools( vec![ tool ] )
.tool_choice( ToolChoice::String( "auto".to_string() ) )
.form();
let result = client.responses().create( request ).await;
match result
{
Ok( response ) =>
{
assert!( !response.id.is_empty(), "Response should have ID" );
assert!( response.tools.is_some(), "Tools should be present in response" );
},
Err( e ) =>
{
panic!( "Request failed : {e:?}" );
}
}
}
#[ tokio::test ]
#[ allow( deprecated ) ]
async fn retrieve_response()
{
require_api_key!();
let secret = load_secret_for_test();
let env = api_openai::exposed::environment::OpenaiEnvironmentImpl::build(secret, None, None, api_openai::environment::OpenAIRecommended::base_url().to_string(), api_openai::environment::OpenAIRecommended::realtime_base_url().to_string()).expect("Failed to create environment");
let client = Client::build(env).expect("Failed to create client");
let create_request = CreateResponseRequest::former()
.model( ModelIdsResponses::from( "gpt-5-nano".to_string() ) )
.input( ResponseInput::String( "Test".to_string() ) )
.max_output_tokens( 50 )
.form();
match client.responses().create( create_request ).await
{
Ok( created_response ) =>
{
let result = client.responses().retrieve( &created_response.id ).await;
match result
{
Ok( retrieved_response ) =>
{
assert_eq!( created_response.id, retrieved_response.id, "Retrieved response ID should match" );
assert_eq!( created_response.model, retrieved_response.model, "Model should match" );
},
Err( e ) => eprintln!( "Retrieval failed : {e:?}" ),
}
},
Err( e ) =>
{
eprintln!( "Create failed : {e:?}" );
}
}
}
#[ tokio::test ]
#[ allow( deprecated ) ]
async fn list_response_input_items()
{
require_api_key!();
let secret = load_secret_for_test();
let env = api_openai::exposed::environment::OpenaiEnvironmentImpl::build(secret, None, None, api_openai::environment::OpenAIRecommended::base_url().to_string(), api_openai::environment::OpenAIRecommended::realtime_base_url().to_string()).expect("Failed to create environment");
let client = Client::build(env).expect("Failed to create client");
let create_request = CreateResponseRequest::former()
.model( ModelIdsResponses::from( "gpt-5-nano".to_string() ) )
.input( ResponseInput::Items(
vec![
InputItem::Message(
InputMessage {
r#type : "message".to_string(),
role : "user".to_string(),
content : vec![
InputContentPart::Text(
InputText {
text : "First message".to_string(),
}
),
],
status : None,
id : None,
}
),
]
))
.form();
match client.responses().create( create_request ).await
{
Ok( response ) =>
{
let query = ListQuery {
limit : Some( 10 ),
};
let result = client.responses().list_input_items( &response.id, Some( query ) ).await;
match result
{
Ok( item_list ) =>
{
assert_eq!( item_list.object, "list", "Should return a list object" );
assert!( !item_list.data.is_empty(), "Should have at least one input item" );
},
Err( e ) => eprintln!( "List failed : {e:?}" ),
}
},
Err( e ) =>
{
eprintln!( "Create failed : {e:?}" );
}
}
}
#[ tokio::test ]
#[ allow( deprecated ) ]
async fn delete_response()
{
require_api_key!();
let secret = load_secret_for_test();
let env = api_openai::exposed::environment::OpenaiEnvironmentImpl::build(secret, None, None, api_openai::environment::OpenAIRecommended::base_url().to_string(), api_openai::environment::OpenAIRecommended::realtime_base_url().to_string()).expect("Failed to create environment");
let client = Client::build(env).expect("Failed to create client");
let create_request = CreateResponseRequest::former()
.model( ModelIdsResponses::from( "gpt-5-nano".to_string() ) )
.input( ResponseInput::String( "Test for deletion".to_string() ) )
.max_output_tokens( 50 )
.store( false ) .form();
match client.responses().create( create_request ).await
{
Ok( response ) =>
{
let result = client.responses().delete( &response.id ).await;
match result
{
Ok( delete_result ) =>
{
println!( "Response deleted successfully : {delete_result:?}" );
let retrieve_result = client.responses().retrieve( &response.id ).await;
assert!( retrieve_result.is_err(), "Should not be able to retrieve deleted response" );
},
Err( e ) => eprintln!( "Delete failed : {e:?}" ),
}
},
Err( e ) =>
{
eprintln!( "Create failed : {e:?}" );
}
}
}
#[ tokio::test ]
#[ allow( deprecated ) ]
async fn update_response()
{
require_api_key!();
let secret = load_secret_for_test();
let env = api_openai::exposed::environment::OpenaiEnvironmentImpl::build(secret, None, None, api_openai::environment::OpenAIRecommended::base_url().to_string(), api_openai::environment::OpenAIRecommended::realtime_base_url().to_string()).expect("Failed to create environment");
let client = Client::build(env).expect("Failed to create client");
let create_request = CreateResponseRequest::former()
.model( ModelIdsResponses::from( "gpt-5-nano".to_string() ) )
.input( ResponseInput::String( "Test for update".to_string() ) )
.form();
match client.responses().create( create_request ).await
{
Ok( response ) =>
{
let update_data = json!({
"metadata": {
"updated": "true",
"timestamp": chrono::Utc::now().to_rfc3339()
}
});
let result = client.responses().update( &response.id, update_data ).await;
match result
{
Ok( updated_response ) =>
{
assert_eq!( response.id, updated_response.id, "ID should remain the same" );
assert!( updated_response.metadata.is_some(), "Metadata should be present" );
},
Err( e ) => eprintln!( "Update failed : {e:?}" ),
}
},
Err( e ) =>
{
eprintln!( "Create failed : {e:?}" );
}
}
}
#[ tokio::test ]
#[ allow( deprecated ) ]
async fn cancel_response()
{
require_api_key!();
let secret = load_secret_for_test();
let env = api_openai::exposed::environment::OpenaiEnvironmentImpl::build(secret, None, None, api_openai::environment::OpenAIRecommended::base_url().to_string(), api_openai::environment::OpenAIRecommended::realtime_base_url().to_string()).expect("Failed to create environment");
let client = Client::build(env).expect("Failed to create client");
let create_request = CreateResponseRequest::former()
.model( ModelIdsResponses::from( "gpt-5-nano".to_string() ) )
.input( ResponseInput::String( "Write a very long story about..." .to_string() ) )
.max_output_tokens( 1000 )
.stream( true )
.form();
match client.responses().create_stream( create_request ).await
{
Ok( mut receiver ) =>
{
if let Some( Ok( ResponseStreamEvent::ResponseCreated( event ) ) ) = receiver.recv().await
{
let response_id = event.response.id.clone();
let cancel_result = client.responses().cancel( &response_id ).await;
match cancel_result
{
Ok( cancelled_response ) =>
{
assert_eq!( response_id, cancelled_response.id, "Cancelled response ID should match" );
assert!(
cancelled_response.status == "failed" || cancelled_response.status == "incomplete",
"Status should indicate cancellation"
);
},
Err( e ) => eprintln!( "Cancel failed : {e:?}" ),
}
}
},
Err( e ) =>
{
eprintln!( "Stream creation failed : {e:?}" );
}
}
}
#[ tokio::test ]
#[ allow( deprecated ) ]
async fn test_environment_details()
{
require_api_key!();
let secret = load_secret_for_test();
let env = api_openai::exposed::environment::OpenaiEnvironmentImpl::build(secret, None, None, api_openai::environment::OpenAIRecommended::base_url().to_string(), api_openai::environment::OpenAIRecommended::realtime_base_url().to_string()).expect("Failed to create environment");
let client = Client::build(env).expect("Failed to create client");
let env = &client.environment;
let base_url = env.base_url();
let api_key_secret = OpenaiEnvironment::api_key(env);
let api_key = api_key_secret.expose_secret();
println!( "Environment Base URL: {base_url}" );
let key_start = &api_key[..5];
println!( "Environment API Key (masked): {key_start}..." );
let headers = env.headers();
println!( "Environment Headers : {headers:?}" );
assert_eq!( base_url.as_str(), "https://api.openai.com/v1/", "Base URL should be api.openai.com/v1/" );
assert!( !api_key.is_empty(), "API key should not be empty" );
assert!( api_key.len() > 10, "API key should be longer than 10 characters" ); }