#![ cfg( feature = "integration" ) ]
#![ allow( unused_imports, dead_code, clippy::missing_panics_doc, clippy::missing_errors_doc ) ]
use api_openai::ClientApiAccessors;
pub use api_openai as the_module;
use crate::test_isolation::TestIsolation;
pub use crate::test_isolation::{ IsolatedClient, should_run_real_api_tests };
pub use api_openai::{
Client,
error ::OpenAIError,
environment ::{ EnvironmentInterface, OpenaiEnvironment, OpenaiEnvironmentImpl },
secret ::Secret,
components ::{
responses ::{
CreateResponseRequest,
ResponseObject,
ResponseInput,
ResponseStreamEvent,
ResponseItemList,
},
input ::{
InputItem,
InputMessage,
InputContentPart,
InputText,
},
common ::{ ModelIdsResponses, ListQuery },
tools ::{ Tool, ToolChoice, FunctionTool, FunctionParameters },
output ::{ OutputItem, OutputContentPart },
}
};
pub use serde_json::json;
pub use futures_util::stream::StreamExt;
pub use secrecy::ExposeSecret;
pub use tokio::sync::mpsc;
pub fn assert_valid_response(response : &ResponseObject)
{
assert!(!response.id.is_empty(), "Response should have an id field");
assert!(!response.output.is_empty(), "Response should have output");
assert_eq!(response.object, "response", "Object type should be 'response'");
assert!(response.created_at > 0, "Created timestamp should be valid");
}
#[ must_use ]
pub fn create_basic_test_request() -> CreateResponseRequest
{
CreateResponseRequest::former()
.model(ModelIdsResponses::from("gpt-5-nano".to_string()))
.input(ResponseInput::Items(
vec![
InputItem::Message(
InputMessage {
r#type : "message".to_string(),
role : "user".to_string(),
content : vec![
InputContentPart::Text(
InputText {
text : "Hello, how are you?".to_string(),
}
),
],
status : None,
id : None,
}
),
]
))
.max_output_tokens(50)
.parallel_tool_calls(true)
.form()
}
#[ must_use ]
pub fn create_tools_test_request() -> CreateResponseRequest
{
let get_weather_tool = Tool::Function(
FunctionTool::former()
.description("Get weather information for a location".to_string())
.name("get_weather".to_string())
.parameters(FunctionParameters::new(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The location to get weather for"
}
},
"required": ["location"]
})))
.form()
);
CreateResponseRequest::former()
.model(ModelIdsResponses::from("gpt-5-nano".to_string()))
.input(ResponseInput::String("What's the weather like in Paris?".to_string()))
.tools(vec![get_weather_tool])
.tool_choice(ToolChoice::String("required".to_string()))
.max_output_tokens(150)
.form()
}
#[ allow( clippy::std_instead_of_core ) ] pub fn handle_test_result< T, E: std::fmt::Debug >(
result : Result< T, E >,
test_name : &str,
success_validator : impl FnOnce(&T)
) {
match result
{
Ok(ref response) =>
{
success_validator(response);
},
Err(e) =>
{
panic!("API request failed in test '{test_name}': {e:?}");
}
}
}