elikoga_textsynth/
tokenize.rs

1//! Provides tokenize api
2
3use serde::{Deserialize, Serialize};
4use serde_with::skip_serializing_none;
5use thiserror::Error;
6
7use crate::{IsEngine, TextSynthClient};
8
9/// Struct for a tokenize request
10#[skip_serializing_none]
11#[derive(Serialize, Builder)]
12#[builder(setter(into))]
13pub struct Request {
14    /// Input text.
15    text: String,
16}
17
18/// Struct for a tokenization answer
19#[derive(Deserialize, Debug)]
20pub struct Response {
21    /// Token indexes corresponding to the input text.
22    pub tokens: Vec<u32>,
23}
24
25#[derive(Error, Debug)]
26/// Error for a completion answer
27pub enum Error {
28    /// Serde error
29    #[error("Serde error: {0}")]
30    SerdeError(#[from] serde_json::Error),
31    /// Error from Reqwest
32    #[error("Reqwest error: {0}")]
33    RequestError(#[from] reqwest::Error),
34}
35
36impl TextSynthClient {
37    /// Perform a tokenization request
38    pub async fn tokenize(
39        &self,
40        engine: &impl IsEngine,
41        request: &Request,
42    ) -> Result<Response, Error> {
43        let request_json = serde_json::to_string(&request)?;
44        let url = format!("{}/engines/{}/tokenize", self.base_url, engine);
45        let response = self.client.post(&url).body(request_json).send().await?;
46        response.json().await.map_err(|e| e.into())
47    }
48}