gpt3_rs/api/
classifications.rs

1//! Classifies a query from provided context
2//! # Builder
3//! Use the [`classifications::Builder`][struct@Builder] to construct a [`classifications::Request`][Request] struct
4use std::collections::HashMap;
5
6use derive_builder::Builder;
7use serde::{Deserialize, Serialize};
8
9use crate::into_vec::IntoVec;
10use crate::model::Model;
11use crate::OPENAI_URL;
12
13use super::{LogProbs, RequestInfo};
14/// Classifies a query from provided context
15///
16/// # OpenAi documentation
17/// Classifies the specified query using provided examples.
18///
19/// The endpoint first searches over the labeled examples to select the ones most relevant for the particular query.
20/// Then, the relevant examples are combined with the query to construct a prompt to produce the final label via the completions endpoint.
21/// Labeled examples can be provided via an uploaded file, or explicitly listed in the request using the examples parameter for quick tests and small scale use cases.
22///
23/// # Example
24/// ```ignore
25/// let request = classifications::Builder::default()
26///     .model(Model::Curie)
27///     .search_model(Model::Ada)
28///     .query("It is a rainy day :(")
29///     .examples(&[
30///         &["A happy moment", "Positive"],
31///         &["I am sad.", "Negative"],
32///         &["I am feeling awesome", "Positive"]
33///      ])
34///     .labels(&["Positive", "Negative", "Neutral"])
35///     .build()
36///     .unwrap();
37/// ```
38/// # Required
39/// ```ignore
40/// model, query
41/// ```
42#[derive(Debug, Clone, PartialEq, Serialize, Builder)]
43#[builder_struct_attr(doc = "# Required")]
44#[builder_struct_attr(doc = "[`model`](Self::model())")]
45#[builder_struct_attr(doc = "[`query`](Self::query())")]
46#[builder_struct_attr(doc = "")]
47#[builder(name = "Builder")]
48pub struct Request {
49    /// ID of the engine to use for completion. You can select one of ada, babbage, curie, or davinci.
50    pub model: Model,
51    /// Query to be classified.
52    #[builder(setter(into))]
53    pub query: String,
54    /// A list of examples with labels, in the following format:
55    /// `[["The movie is so interesting.", "Positive"], ["It is quite boring.", "Negative"], ...]`
56    /// All the label strings will be normalized to be capitalized.
57    /// You should specify either examples or file, but not both.
58    #[builder(default, setter(strip_option, into))]
59    #[serde(skip_serializing_if = "Option::is_none")]
60    pub examples: Option<IntoVec<Vec<String>>>,
61    /// The ID of the uploaded file that contains training examples. See upload file for how to upload a file of the desired format and purpose.
62    /// You should specify either examples or file, but not both.
63    #[builder(default, setter(strip_option, into))]
64    #[serde(skip_serializing_if = "Option::is_none")]
65    pub file: Option<String>,
66    /// The set of categories being classified. If not specified, candidate labels will be automatically collected from the examples you provide.
67    /// All the label strings will be normalized to be capitalized.
68    #[builder(default, setter(strip_option, into))]
69    #[serde(skip_serializing_if = "Option::is_none")]
70    pub labels: Option<IntoVec<String>>,
71    /// ID of the engine to use for Search. You can select one of ada, babbage, curie, or davinci.
72    #[builder(default, setter(strip_option))]
73    #[serde(skip_serializing_if = "Option::is_none")]
74    pub search_model: Option<Model>,
75    /// What sampling temperature to use. Higher values mean the model will take more risks.
76    /// Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer.
77    #[builder(default, setter(strip_option))]
78    #[serde(skip_serializing_if = "Option::is_none")]
79    pub temperature: Option<f64>,
80    /// Include the log probabilities on the logprobs most likely tokens, as well the chosen tokens. For example, if logprobs is 5, the API will return a list of the 5 most likely tokens.
81    ///  The API will always return the logprob of the sampled token, so there may be up to logprobs+1 elements in the response.
82    /// The maximum value for logprobs is 5. If you need more than this, please contact support@openai.com and describe your use case.
83    /// When logprobs is set, completion will be automatically added into expand to get the logprobs.
84    #[builder(default, setter(strip_option))]
85    #[serde(skip_serializing_if = "Option::is_none")]
86    pub logprobs: Option<u8>,
87    /// The maximum number of examples to be ranked by Search when using file.
88    /// Setting it to a higher value leads to improved accuracy but with increased latency and cost.
89    #[builder(default, setter(strip_option))]
90    #[serde(skip_serializing_if = "Option::is_none")]
91    pub max_examples: Option<u64>,
92    /// Modify the likelihood of specified tokens appearing in the completion.
93    /// Accepts a json object that maps tokens (specified by their token ID in the GPT tokenizer) to an associated bias value from -100 to 100.
94    /// You can use this tokenizer tool (which works for both GPT-2 and GPT-3) to convert text to token IDs.
95    /// Mathematically, the bias is added to the logits generated by the model prior to sampling.
96    /// The exact effect will vary per model, but values between -1 and 1 should decrease or increase likelihood of selection;
97    /// values like -100 or 100 should result in a ban or exclusive selection of the relevant token.
98    /// As an example, you can pass {"50256": -100} to prevent the <|endoftext|> token from being generated.
99    #[builder(default, setter(strip_option))]
100    #[serde(skip_serializing_if = "Option::is_none")]
101    pub logit_bias: Option<HashMap<String, i8>>,
102    /// If set to true, the returned JSON will include a "prompt" field containing the final prompt that was used to request a completion.
103    /// This is mainly useful for debugging purposes.
104    #[builder(default, setter(strip_option))]
105    #[serde(skip_serializing_if = "Option::is_none")]
106    pub return_prompt: Option<bool>,
107    /// A special boolean flag for showing metadata. If set to true, each document entry in the returned JSON will contain a "metadata" field.
108    /// This flag only takes effect when file is set.
109    #[builder(default, setter(strip_option))]
110    #[serde(skip_serializing_if = "Option::is_none")]
111    pub return_metadata: Option<bool>,
112    /// If an object name is in the list, we provide the full information of the object;
113    /// otherwise, we only provide the object ID. Currently we support completion and file objects for expansion.
114    #[builder(default, setter(strip_option, into))]
115    #[serde(skip_serializing_if = "Option::is_none")]
116    pub expand: Option<IntoVec<String>>,
117    /// A unique identifier representing your end-user, which will help OpenAI to monitor and detect abuse.
118    #[builder(default, setter(strip_option, into))]
119    #[serde(skip_serializing_if = "Option::is_none")]
120    pub user: Option<String>,
121}
122
123/// A response corresponding to a [`Request`]
124#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
125pub struct Response {
126    /// completion id
127    pub completion: String,
128    /// The chosen label for the query
129    pub label: String,
130    /// The model used for the completion of the request
131    pub model: String,
132    /// The requested action
133    pub object: String,
134    /// The model used for the search
135    pub search_model: String,
136    /// The examples used to judge the query
137    pub selected_examples: Vec<SelectedExample>,
138}
139
140#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
141pub struct SelectedExample {
142    /// The document the example is in
143    pub document: usize,
144    /// The label of the example
145    pub label: String,
146    /// The text of the example
147    pub text: String,
148    /// A list of the n most likely tokens
149    pub logpropbs: Option<LogProbs>,
150}
151
152impl RequestInfo for Request {
153    fn url(&self) -> String {
154        format!("{OPENAI_URL}/classifications")
155    }
156}
157#[cfg_attr(not(feature = "blocking"), async_trait::async_trait)]
158impl crate::client::Request for Request {
159    type Response = Response;
160}