rust_chatgpt/v1/
completions.rs

1use crate::error::ChatGptError;
2use crate::v1::{convert_from_value, ChatGptRequest};
3use serde::{Deserialize, Serialize};
4use serde_json::{json, Value};
5use std::collections::HashMap;
6
7#[derive(Debug, Deserialize, Serialize, Clone)]
8pub struct ChatGptResponseCompletions {
9    value: Value,
10}
11
12#[derive(Debug, Deserialize, Serialize, Clone)]
13pub struct ChatGptRequestCompletionsCreate {
14    model: String,
15    // string or array
16    prompt: Option<Vec<String>>,
17    suffix: Option<String>,
18    max_tokens: Option<isize>,
19    temperature: Option<f32>,
20    top_p: Option<f32>,
21    n: Option<isize>,
22    stream: Option<bool>,
23    logprobs: Option<isize>,
24    echo: Option<bool>,
25    // string or array
26    stop: Option<Vec<String>>,
27    presence_penalty: Option<f32>,
28    frequency_penalty: Option<f32>,
29    best_of: Option<f32>,
30    logit_bias: Option<HashMap<String, isize>>,
31}
32
33impl ChatGptRequest for ChatGptRequestCompletionsCreate {
34    fn from_value(value: Value) -> Result<Self, ChatGptError>
35    where
36        Self: Sized,
37    {
38        convert_from_value(value)
39    }
40
41    fn to_value(&self) -> Value {
42        json!(self)
43    }
44}
45
46impl ChatGptRequestCompletionsCreate {
47    pub fn new(model: &str, max_tokens: isize, prompt: &str) -> Self {
48        Self {
49            model: model.to_string(),
50            max_tokens: Some(max_tokens),
51            prompt: Some(vec![prompt.to_string()]),
52            ..Default::default()
53        }
54    }
55}
56
57impl Default for ChatGptRequestCompletionsCreate {
58    fn default() -> Self {
59        ChatGptRequestCompletionsCreate {
60            model: "".to_string(),
61            prompt: None,
62            suffix: None,
63            max_tokens: None,
64            temperature: None,
65            top_p: None,
66            n: None,
67            stream: None,
68            logprobs: None,
69            echo: None,
70            stop: None,
71            presence_penalty: None,
72            frequency_penalty: None,
73            best_of: None,
74            logit_bias: None,
75        }
76    }
77}
78
79pub enum ChatGptPrompt {}