async_openai/responses/
responses_.rs

1use crate::{
2    config::Config,
3    error::OpenAIError,
4    types::responses::{
5        CompactResource, CompactResponseRequest, CreateResponse, DeleteResponse, Response,
6        ResponseItemList, TokenCountsBody, TokenCountsResource,
7    },
8    Client, RequestOptions,
9};
10
11#[cfg(not(target_family = "wasm"))]
12use crate::types::responses::ResponseStream;
13
14pub struct Responses<'c, C: Config> {
15    client: &'c Client<C>,
16    pub(crate) request_options: RequestOptions,
17}
18
19impl<'c, C: Config> Responses<'c, C> {
20    /// Constructs a new Responses client.
21    pub fn new(client: &'c Client<C>) -> Self {
22        Self {
23            client,
24            request_options: RequestOptions::new(),
25        }
26    }
27
28    /// Creates a model response. Provide [text](https://platform.openai.com/docs/guides/text) or
29    /// [image](https://platform.openai.com/docs/guides/images) inputs to generate
30    /// [text](https://platform.openai.com/docs/guides/text) or
31    /// [JSON](https://platform.openai.com/docs/guides/structured-outputs) outputs. Have the model call
32    /// your own [custom code](https://platform.openai.com/docs/guides/function-calling) or use
33    /// built-in [tools](https://platform.openai.com/docs/guides/tools) like
34    /// [web search](https://platform.openai.com/docs/guides/tools-web-search)
35    /// or [file search](https://platform.openai.com/docs/guides/tools-file-search) to use your own data
36    /// as input for the model's response.
37    #[crate::byot(
38        T0 = serde::Serialize,
39        R = serde::de::DeserializeOwned
40    )]
41    pub async fn create(&self, request: CreateResponse) -> Result<Response, OpenAIError> {
42        self.client
43            .post("/responses", request, &self.request_options)
44            .await
45    }
46
47    /// Creates a model response for the given input with streaming.
48    ///
49    /// Response events will be sent as server-sent events as they become available,
50    #[cfg(not(target_family = "wasm"))]
51    #[crate::byot(
52        T0 = serde::Serialize,
53        R = serde::de::DeserializeOwned,
54        stream = "true",
55        where_clause = "R: std::marker::Send + 'static"
56    )]
57    #[allow(unused_mut)]
58    pub async fn create_stream(
59        &self,
60        mut request: CreateResponse,
61    ) -> Result<ResponseStream, OpenAIError> {
62        #[cfg(not(feature = "byot"))]
63        {
64            if matches!(request.stream, Some(false)) {
65                return Err(OpenAIError::InvalidArgument(
66                    "When stream is false, use Responses::create".into(),
67                ));
68            }
69            request.stream = Some(true);
70        }
71        Ok(self
72            .client
73            .post_stream("/responses", request, &self.request_options)
74            .await)
75    }
76
77    /// Retrieves a model response with the given ID.
78    #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)]
79    pub async fn retrieve(&self, response_id: &str) -> Result<Response, OpenAIError> {
80        self.client
81            .get(
82                &format!("/responses/{}", response_id),
83                &self.request_options,
84            )
85            .await
86    }
87
88    /// Retrieves a model response with the given ID with streaming.
89    ///
90    /// Response events will be sent as server-sent events as they become available.
91    #[cfg(not(target_family = "wasm"))]
92    #[crate::byot(
93        T0 = std::fmt::Display,
94        R = serde::de::DeserializeOwned,
95        stream = "true",
96        where_clause = "R: std::marker::Send + 'static"
97    )]
98    pub async fn retrieve_stream(&self, response_id: &str) -> Result<ResponseStream, OpenAIError> {
99        let mut request_options = self.request_options.clone();
100        request_options.with_query(&[("stream", "true")])?;
101
102        Ok(self
103            .client
104            .get_stream(&format!("/responses/{}", response_id), &request_options)
105            .await)
106    }
107
108    /// Deletes a model response with the given ID.
109    #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)]
110    pub async fn delete(&self, response_id: &str) -> Result<DeleteResponse, OpenAIError> {
111        self.client
112            .delete(
113                &format!("/responses/{}", response_id),
114                &self.request_options,
115            )
116            .await
117    }
118
119    /// Cancels a model response with the given ID. Only responses created with the
120    /// `background` parameter set to `true` can be cancelled.
121    /// [Learn more](https://platform.openai.com/docs/guides/background).
122    #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)]
123    pub async fn cancel(&self, response_id: &str) -> Result<Response, OpenAIError> {
124        self.client
125            .post(
126                &format!("/responses/{}/cancel", response_id),
127                serde_json::json!({}),
128                &self.request_options,
129            )
130            .await
131    }
132
133    /// Returns a list of input items for a given response.
134    #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)]
135    pub async fn list_input_items(
136        &self,
137        response_id: &str,
138    ) -> Result<ResponseItemList, OpenAIError> {
139        self.client
140            .get(
141                &format!("/responses/{}/input_items", response_id),
142                &self.request_options,
143            )
144            .await
145    }
146
147    /// Get input token counts
148    #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)]
149    pub async fn get_input_token_counts(
150        &self,
151        request: TokenCountsBody,
152    ) -> Result<TokenCountsResource, OpenAIError> {
153        self.client
154            .post("/responses/input_tokens", request, &self.request_options)
155            .await
156    }
157
158    /// Compact a conversation.
159    ///
160    /// Learn when and how to compact long-running conversations in the
161    /// [conversation state guide](https://platform.openai.com/docs/guides/conversation-state#managing-the-context-window).
162    #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)]
163    pub async fn compact(
164        &self,
165        request: CompactResponseRequest,
166    ) -> Result<CompactResource, OpenAIError> {
167        self.client
168            .post("/responses/compact", request, &self.request_options)
169            .await
170    }
171}