rusty_openai/openai_api/moderations.rs
1use crate::{error_handling::OpenAIResult, openai::OpenAI};
2use serde::Serialize;
3use serde_json::Value;
4
5/// [`ModerationApi`] struct to interact with the moderation endpoint of the API.
6pub struct ModerationApi<'a>(pub(crate) &'a OpenAI<'a>);
7
8#[derive(Serialize)]
9struct ModerationRequest<'a> {
10 /// The text input to be moderated
11 input: &'a str,
12
13 /// Optional name of the moderation model
14 #[serde(skip_serializing_if = "Option::is_none")]
15 model: Option<&'a str>,
16}
17
18impl<'a> ModerationApi<'a> {
19 /// Submit text input for moderation.
20 ///
21 /// # Arguments
22 ///
23 /// * `input` - The text input to be moderated.
24 /// * `model` - Optional name of the moderation model to use.
25 ///
26 /// # Returns
27 ///
28 /// A Result containing the JSON response as [`serde_json::Value`] on success, or an [`OpenAIError`][crate::error_handling::OpenAIError] on failure.
29 pub async fn moderate(&self, input: &str, model: Option<&str>) -> OpenAIResult<Value> {
30 // Initialize a JSON object to build the request body.
31 let body = ModerationRequest { input, model };
32
33 // Send a POST request to the moderation endpoint with the request body.
34 self.0.post_json("/moderations", &body).await
35 }
36}