Skip to main content

async_openai/responses/
responses_.rs

1use crate::{
2    config::Config,
3    error::OpenAIError,
4    types::responses::{
5        CompactResource, CompactResponseRequest, CreateResponse, DeleteResponse, Response,
6        ResponseItemList, TokenCountsBody, TokenCountsResource,
7    },
8    Client, RequestOptions,
9};
10
11use crate::types::responses::ResponseStream;
12
13pub struct Responses<'c, C: Config> {
14    client: &'c Client<C>,
15    pub(crate) request_options: RequestOptions,
16}
17
18impl<'c, C: Config> Responses<'c, C> {
19    /// Constructs a new Responses client.
20    pub fn new(client: &'c Client<C>) -> Self {
21        Self {
22            client,
23            request_options: RequestOptions::new(),
24        }
25    }
26
27    /// Creates a model response. Provide [text](https://platform.openai.com/docs/guides/text) or
28    /// [image](https://platform.openai.com/docs/guides/images) inputs to generate
29    /// [text](https://platform.openai.com/docs/guides/text) or
30    /// [JSON](https://platform.openai.com/docs/guides/structured-outputs) outputs. Have the model call
31    /// your own [custom code](https://platform.openai.com/docs/guides/function-calling) or use
32    /// built-in [tools](https://platform.openai.com/docs/guides/tools) like
33    /// [web search](https://platform.openai.com/docs/guides/tools-web-search)
34    /// or [file search](https://platform.openai.com/docs/guides/tools-file-search) to use your own data
35    /// as input for the model's response.
36    #[crate::byot(
37        T0 = serde::Serialize,
38        R = serde::de::DeserializeOwned
39    )]
40    pub async fn create(&self, request: CreateResponse) -> Result<Response, OpenAIError> {
41        self.client
42            .post("/responses", request, &self.request_options)
43            .await
44    }
45
46    /// Creates a model response for the given input with streaming.
47    ///
48    /// Response events will be sent as server-sent events as they become available,
49    #[crate::byot(
50        T0 = serde::Serialize,
51        R = serde::de::DeserializeOwned,
52        stream = "true",
53        where_clause = "R: crate::traits::MaybeSend + 'static"
54    )]
55    #[allow(unused_mut)]
56    pub async fn create_stream(
57        &self,
58        mut request: CreateResponse,
59    ) -> Result<ResponseStream, OpenAIError> {
60        #[cfg(not(feature = "byot"))]
61        {
62            if matches!(request.stream, Some(false)) {
63                return Err(OpenAIError::InvalidArgument(
64                    "When stream is false, use Responses::create".into(),
65                ));
66            }
67            request.stream = Some(true);
68        }
69        self.client
70            .post_stream("/responses", request, &self.request_options)
71            .await
72    }
73
74    /// Retrieves a model response with the given ID.
75    #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)]
76    pub async fn retrieve(&self, response_id: &str) -> Result<Response, OpenAIError> {
77        self.client
78            .get(
79                &format!("/responses/{}", response_id),
80                &self.request_options,
81            )
82            .await
83    }
84
85    /// Retrieves a model response with the given ID with streaming.
86    ///
87    /// Response events will be sent as server-sent events as they become available.
88    #[crate::byot(
89        T0 = std::fmt::Display,
90        R = serde::de::DeserializeOwned,
91        stream = "true",
92        where_clause = "R: crate::traits::MaybeSend + 'static"
93    )]
94    pub async fn retrieve_stream(&self, response_id: &str) -> Result<ResponseStream, OpenAIError> {
95        let mut request_options = self.request_options.clone();
96        request_options.with_query(&[("stream", "true")])?;
97
98        self.client
99            .get_stream(&format!("/responses/{}", response_id), &request_options)
100            .await
101    }
102
103    /// Deletes a model response with the given ID.
104    #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)]
105    pub async fn delete(&self, response_id: &str) -> Result<DeleteResponse, OpenAIError> {
106        self.client
107            .delete(
108                &format!("/responses/{}", response_id),
109                &self.request_options,
110            )
111            .await
112    }
113
114    /// Cancels a model response with the given ID. Only responses created with the
115    /// `background` parameter set to `true` can be cancelled.
116    /// [Learn more](https://platform.openai.com/docs/guides/background).
117    #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)]
118    pub async fn cancel(&self, response_id: &str) -> Result<Response, OpenAIError> {
119        self.client
120            .post(
121                &format!("/responses/{}/cancel", response_id),
122                serde_json::json!({}),
123                &self.request_options,
124            )
125            .await
126    }
127
128    /// Returns a list of input items for a given response.
129    #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)]
130    pub async fn list_input_items(
131        &self,
132        response_id: &str,
133    ) -> Result<ResponseItemList, OpenAIError> {
134        self.client
135            .get(
136                &format!("/responses/{}/input_items", response_id),
137                &self.request_options,
138            )
139            .await
140    }
141
142    /// Get input token counts
143    #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)]
144    pub async fn get_input_token_counts(
145        &self,
146        request: TokenCountsBody,
147    ) -> Result<TokenCountsResource, OpenAIError> {
148        self.client
149            .post("/responses/input_tokens", request, &self.request_options)
150            .await
151    }
152
153    /// Compact a conversation.
154    ///
155    /// Learn when and how to compact long-running conversations in the
156    /// [conversation state guide](https://platform.openai.com/docs/guides/conversation-state#managing-the-context-window).
157    #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)]
158    pub async fn compact(
159        &self,
160        request: CompactResponseRequest,
161    ) -> Result<CompactResource, OpenAIError> {
162        self.client
163            .post("/responses/compact", request, &self.request_options)
164            .await
165    }
166}