Skip to main content

async_openai/responses/
responses_.rs

1use crate::{
2    config::Config,
3    error::OpenAIError,
4    types::responses::{
5        CompactResource, CompactResponseRequest, CreateResponse, DeleteResponse, Response,
6        ResponseItemList, TokenCountsBody, TokenCountsResource,
7    },
8    Client, RequestOptions,
9};
10
11#[cfg(not(target_family = "wasm"))]
12use crate::types::responses::ResponseStream;
13
14pub struct Responses<'c, C: Config> {
15    client: &'c Client<C>,
16    pub(crate) request_options: RequestOptions,
17}
18
19impl<'c, C: Config> Responses<'c, C> {
20    /// Constructs a new Responses client.
21    pub fn new(client: &'c Client<C>) -> Self {
22        Self {
23            client,
24            request_options: RequestOptions::new(),
25        }
26    }
27
28    /// Creates a model response. Provide [text](https://platform.openai.com/docs/guides/text) or
29    /// [image](https://platform.openai.com/docs/guides/images) inputs to generate
30    /// [text](https://platform.openai.com/docs/guides/text) or
31    /// [JSON](https://platform.openai.com/docs/guides/structured-outputs) outputs. Have the model call
32    /// your own [custom code](https://platform.openai.com/docs/guides/function-calling) or use
33    /// built-in [tools](https://platform.openai.com/docs/guides/tools) like
34    /// [web search](https://platform.openai.com/docs/guides/tools-web-search)
35    /// or [file search](https://platform.openai.com/docs/guides/tools-file-search) to use your own data
36    /// as input for the model's response.
37    #[crate::byot(
38        T0 = serde::Serialize,
39        R = serde::de::DeserializeOwned
40    )]
41    pub async fn create(&self, request: CreateResponse) -> Result<Response, OpenAIError> {
42        self.client
43            .post("/responses", request, &self.request_options)
44            .await
45    }
46
47    /// Creates a model response for the given input with streaming.
48    ///
49    /// Response events will be sent as server-sent events as they become available,
50    #[cfg(not(target_family = "wasm"))]
51    #[crate::byot(
52        T0 = serde::Serialize,
53        R = serde::de::DeserializeOwned,
54        stream = "true",
55        where_clause = "R: std::marker::Send + 'static"
56    )]
57    #[allow(unused_mut)]
58    pub async fn create_stream(
59        &self,
60        mut request: CreateResponse,
61    ) -> Result<ResponseStream, OpenAIError> {
62        #[cfg(not(feature = "byot"))]
63        {
64            if matches!(request.stream, Some(false)) {
65                return Err(OpenAIError::InvalidArgument(
66                    "When stream is false, use Responses::create".into(),
67                ));
68            }
69            request.stream = Some(true);
70        }
71        self.client
72            .post_stream("/responses", request, &self.request_options)
73            .await
74    }
75
76    /// Retrieves a model response with the given ID.
77    #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)]
78    pub async fn retrieve(&self, response_id: &str) -> Result<Response, OpenAIError> {
79        self.client
80            .get(
81                &format!("/responses/{}", response_id),
82                &self.request_options,
83            )
84            .await
85    }
86
87    /// Retrieves a model response with the given ID with streaming.
88    ///
89    /// Response events will be sent as server-sent events as they become available.
90    #[cfg(not(target_family = "wasm"))]
91    #[crate::byot(
92        T0 = std::fmt::Display,
93        R = serde::de::DeserializeOwned,
94        stream = "true",
95        where_clause = "R: std::marker::Send + 'static"
96    )]
97    pub async fn retrieve_stream(&self, response_id: &str) -> Result<ResponseStream, OpenAIError> {
98        let mut request_options = self.request_options.clone();
99        request_options.with_query(&[("stream", "true")])?;
100
101        self.client
102            .get_stream(&format!("/responses/{}", response_id), &request_options)
103            .await
104    }
105
106    /// Deletes a model response with the given ID.
107    #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)]
108    pub async fn delete(&self, response_id: &str) -> Result<DeleteResponse, OpenAIError> {
109        self.client
110            .delete(
111                &format!("/responses/{}", response_id),
112                &self.request_options,
113            )
114            .await
115    }
116
117    /// Cancels a model response with the given ID. Only responses created with the
118    /// `background` parameter set to `true` can be cancelled.
119    /// [Learn more](https://platform.openai.com/docs/guides/background).
120    #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)]
121    pub async fn cancel(&self, response_id: &str) -> Result<Response, OpenAIError> {
122        self.client
123            .post(
124                &format!("/responses/{}/cancel", response_id),
125                serde_json::json!({}),
126                &self.request_options,
127            )
128            .await
129    }
130
131    /// Returns a list of input items for a given response.
132    #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)]
133    pub async fn list_input_items(
134        &self,
135        response_id: &str,
136    ) -> Result<ResponseItemList, OpenAIError> {
137        self.client
138            .get(
139                &format!("/responses/{}/input_items", response_id),
140                &self.request_options,
141            )
142            .await
143    }
144
145    /// Get input token counts
146    #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)]
147    pub async fn get_input_token_counts(
148        &self,
149        request: TokenCountsBody,
150    ) -> Result<TokenCountsResource, OpenAIError> {
151        self.client
152            .post("/responses/input_tokens", request, &self.request_options)
153            .await
154    }
155
156    /// Compact a conversation.
157    ///
158    /// Learn when and how to compact long-running conversations in the
159    /// [conversation state guide](https://platform.openai.com/docs/guides/conversation-state#managing-the-context-window).
160    #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)]
161    pub async fn compact(
162        &self,
163        request: CompactResponseRequest,
164    ) -> Result<CompactResource, OpenAIError> {
165        self.client
166            .post("/responses/compact", request, &self.request_options)
167            .await
168    }
169}