use async_openai::types::chat::{
ChatCompletionRequestSystemMessageArgs, ChatCompletionRequestUserMessageArgs,
ChatCompletionStreamOptions, CreateChatCompletionRequest, CreateChatCompletionRequestArgs,
};
#[test]
fn chat_types_serde() {
let request: CreateChatCompletionRequest = CreateChatCompletionRequestArgs::default()
.messages([
ChatCompletionRequestSystemMessageArgs::default()
.content("your are a calculator")
.build()
.unwrap()
.into(),
ChatCompletionRequestUserMessageArgs::default()
.content("what is the result of 1+1")
.build()
.unwrap()
.into(),
])
.build()
.unwrap();
let serialized = serde_json::to_string(&request).unwrap();
let deserialized: CreateChatCompletionRequest = serde_json::from_str(&serialized).unwrap();
assert_eq!(request, deserialized);
}
#[test]
fn stream_options_none_fields_not_serialized() {
let stream_options = ChatCompletionStreamOptions {
include_usage: Some(true),
include_obfuscation: None,
};
let serialized = serde_json::to_string(&stream_options).unwrap();
assert!(serialized.contains("include_usage"));
assert!(
!serialized.contains("include_obfuscation"),
"include_obfuscation should not be serialized when None, but got: {}",
serialized
);
let stream_options_empty = ChatCompletionStreamOptions {
include_usage: None,
include_obfuscation: None,
};
let serialized_empty = serde_json::to_string(&stream_options_empty).unwrap();
assert_eq!(serialized_empty, "{}");
let deserialized: ChatCompletionStreamOptions = serde_json::from_str(&serialized).unwrap();
assert_eq!(stream_options, deserialized);
}