async_openai/
responses.rs

1use serde::Serialize;
2
3use crate::{
4    config::Config,
5    error::OpenAIError,
6    types::responses::{
7        CreateResponse, DeleteResponse, Response, ResponseItemList, ResponseStream,
8        TokenCountsBody, TokenCountsResource,
9    },
10    Client,
11};
12
13pub struct Responses<'c, C: Config> {
14    client: &'c Client<C>,
15}
16
17impl<'c, C: Config> Responses<'c, C> {
18    /// Constructs a new Responses client.
19    pub fn new(client: &'c Client<C>) -> Self {
20        Self { client }
21    }
22
23    /// Creates a model response. Provide [text](https://platform.openai.com/docs/guides/text) or
24    /// [image](https://platform.openai.com/docs/guides/images) inputs to generate
25    /// [text](https://platform.openai.com/docs/guides/text) or
26    /// [JSON](https://platform.openai.com/docs/guides/structured-outputs) outputs. Have the model call
27    /// your own [custom code](https://platform.openai.com/docs/guides/function-calling) or use
28    /// built-in [tools](https://platform.openai.com/docs/guides/tools) like
29    /// [web search](https://platform.openai.com/docs/guides/tools-web-search)
30    /// or [file search](https://platform.openai.com/docs/guides/tools-file-search) to use your own data
31    /// as input for the model's response.
32    #[crate::byot(
33        T0 = serde::Serialize,
34        R = serde::de::DeserializeOwned
35    )]
36    pub async fn create(&self, request: CreateResponse) -> Result<Response, OpenAIError> {
37        self.client.post("/responses", request).await
38    }
39
40    /// Creates a model response for the given input with streaming.
41    ///
42    /// Response events will be sent as server-sent events as they become available,
43    #[crate::byot(
44        T0 = serde::Serialize,
45        R = serde::de::DeserializeOwned,
46        stream = "true",
47        where_clause = "R: std::marker::Send + 'static"
48    )]
49    #[allow(unused_mut)]
50    pub async fn create_stream(
51        &self,
52        mut request: CreateResponse,
53    ) -> Result<ResponseStream, OpenAIError> {
54        #[cfg(not(feature = "byot"))]
55        {
56            if matches!(request.stream, Some(false)) {
57                return Err(OpenAIError::InvalidArgument(
58                    "When stream is false, use Responses::create".into(),
59                ));
60            }
61            request.stream = Some(true);
62        }
63        Ok(self.client.post_stream("/responses", request).await)
64    }
65
66    /// Retrieves a model response with the given ID.
67    #[crate::byot(T0 = std::fmt::Display, T1 = serde::Serialize, R = serde::de::DeserializeOwned)]
68    pub async fn retrieve<Q>(&self, response_id: &str, query: &Q) -> Result<Response, OpenAIError>
69    where
70        Q: Serialize + ?Sized,
71    {
72        self.client
73            .get_with_query(&format!("/responses/{}", response_id), &query)
74            .await
75    }
76
77    /// Deletes a model response with the given ID.
78    #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)]
79    pub async fn delete(&self, response_id: &str) -> Result<DeleteResponse, OpenAIError> {
80        self.client
81            .delete(&format!("/responses/{}", response_id))
82            .await
83    }
84
85    /// Cancels a model response with the given ID. Only responses created with the
86    /// `background` parameter set to `true` can be cancelled.
87    /// [Learn more](https://platform.openai.com/docs/guides/background).
88    #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)]
89    pub async fn cancel(&self, response_id: &str) -> Result<Response, OpenAIError> {
90        self.client
91            .post(
92                &format!("/responses/{}/cancel", response_id),
93                serde_json::json!({}),
94            )
95            .await
96    }
97
98    /// Returns a list of input items for a given response.
99    #[crate::byot(T0 = std::fmt::Display, T1 = serde::Serialize, R = serde::de::DeserializeOwned)]
100    pub async fn list_input_items<Q>(
101        &self,
102        response_id: &str,
103        query: &Q,
104    ) -> Result<ResponseItemList, OpenAIError>
105    where
106        Q: Serialize + ?Sized,
107    {
108        self.client
109            .get_with_query(&format!("/responses/{}/input_items", response_id), &query)
110            .await
111    }
112
113    /// Get input token counts
114    #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)]
115    pub async fn get_input_token_counts(
116        &self,
117        request: TokenCountsBody,
118    ) -> Result<TokenCountsResource, OpenAIError> {
119        self.client.post("/responses/input_tokens", request).await
120    }
121}