openai_gpt_client/
text_completion.rs1use std::fmt::{Display, Formatter};
2
3use serde::{Deserialize, Serialize};
4use serde_json::Value;
5
6use crate::{client::Stop, model_variants::ModelId};
7
8#[serde_with::skip_serializing_none]
9#[derive(Debug, Default, Deserialize, Serialize)]
10pub struct TextCompletionRequest {
11 pub model: ModelId,
12 pub prompt: String,
13 pub suffix: Option<String>,
14 pub max_tokens: Option<i32>,
15 pub temperature: Option<f64>,
16 pub top_p: Option<f64>,
17 pub n: Option<i32>,
18 pub stream: Option<bool>,
19 pub logprobs: Option<i32>,
20 pub echo: Option<bool>,
21 pub stop: Option<Stop>,
22 pub presence_penalty: Option<f64>,
23 pub frequency_penalty: Option<f64>,
24 pub best_of: Option<i32>,
25 pub logit_bias: Option<Value>,
26 pub user: Option<String>,
27}
28
29#[derive(Debug, Deserialize, Serialize)]
30pub struct TextCompletionResponse {
31 pub id: Option<String>,
32 pub object: Option<String>,
33 pub created: Option<i32>,
34 pub model: Option<String>,
35 pub choices: Option<Vec<TextCompletionChoice>>,
36 pub usage: Option<TextCompletionUsage>,
37}
38
39impl Display for TextCompletionResponse {
40 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
41 if let Some(choices) = &self.choices {
42 if let Some(text) = choices.get(0) {
43 write!(f, "{}", text.text)
44 } else {
45 write!(f, "No response")
46 }
47 } else {
48 write!(f, "No response")
49 }
50 }
51}
52
53#[derive(Debug, Deserialize, Serialize)]
54pub struct TextCompletionChoice {
55 pub text: String,
56 pub index: i32,
57 pub logprobs: Option<TextCompletionLogprobs>,
58 pub finish_reason: Option<String>,
59}
60
61#[derive(Debug, Deserialize, Serialize)]
62pub struct TextCompletionLogprobs {
63 }
65
66#[derive(Debug, Deserialize, Serialize)]
67pub struct TextCompletionUsage {
68 pub prompt_tokens: i32,
69 pub completion_tokens: i32,
70 pub total_tokens: i32,
71}