async_openai/
responses.rs

1use crate::{
2    config::Config,
3    error::OpenAIError,
4    types::responses::{CreateResponse, Response, ResponseStream},
5    Client,
6};
7
8/// Given text input or a list of context items, the model will generate a response.
9///
10/// Related guide: [Responses](https://platform.openai.com/docs/api-reference/responses)
11pub struct Responses<'c, C: Config> {
12    client: &'c Client<C>,
13}
14
15impl<'c, C: Config> Responses<'c, C> {
16    /// Constructs a new Responses client.
17    pub fn new(client: &'c Client<C>) -> Self {
18        Self { client }
19    }
20
21    /// Creates a model response for the given input.
22    #[crate::byot(
23        T0 = serde::Serialize,
24        R = serde::de::DeserializeOwned
25    )]
26    pub async fn create(&self, request: CreateResponse) -> Result<Response, OpenAIError> {
27        self.client.post("/responses", request).await
28    }
29
30    /// Creates a model response for the given input with streaming.
31    ///
32    /// Response events will be sent as server-sent events as they become available,
33    #[crate::byot(
34        T0 = serde::Serialize,
35        R = serde::de::DeserializeOwned,
36        stream = "true",
37        where_clause = "R: std::marker::Send + 'static"
38    )]
39    #[allow(unused_mut)]
40    pub async fn create_stream(
41        &self,
42        mut request: CreateResponse,
43    ) -> Result<ResponseStream, OpenAIError> {
44        #[cfg(not(feature = "byot"))]
45        {
46            if matches!(request.stream, Some(false)) {
47                return Err(OpenAIError::InvalidArgument(
48                    "When stream is false, use Responses::create".into(),
49                ));
50            }
51            request.stream = Some(true);
52        }
53        Ok(self.client.post_stream("/responses", request).await)
54    }
55}