1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
use http_req::{
    request::{Method, Request},
    uri::Uri,
};
use serde::Deserialize;
use std::fmt;
use urlencoding::encode;

use crate::Retry;

/// Response struct for the chat completion.
///
/// `restarted` is the flag to show whether a new conversation is created.
///
/// `choice` is the response from ChatGPT.
///
#[derive(Debug, Deserialize)]
pub struct ChatResponse {
    pub restarted: bool,
    pub choice: String,
}

impl Default for ChatResponse {
    fn default() -> ChatResponse {
        ChatResponse {
            restarted: true,
            choice: String::new(),
        }
    }
}

/// Models for Chat
#[derive(Debug, Clone, Copy)]
pub enum ChatModel {
    GPT4_32K,
    GPT4,
    GPT35Turbo,
}

impl fmt::Display for ChatModel {
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
        match self {
            ChatModel::GPT4_32K => write!(f, "gpt-4-32k"),
            ChatModel::GPT4 => write!(f, "gpt-4"),
            ChatModel::GPT35Turbo => write!(f, "gpt-3.5-turbo"),
        }
    }
}

impl Default for ChatModel {
    fn default() -> ChatModel {
        ChatModel::GPT35Turbo
    }
}

/// struct for setting the chat options.
///
/// When `restart` is true, a new conversation will be created.
///
/// `system_prompt` will be treated as the prompt of the system role.
///
#[derive(Debug, Default)]
pub struct ChatOptions<'a> {
    pub model: ChatModel,
    pub restart: bool,
    pub system_prompt: Option<&'a str>,
}

impl crate::OpenAIFlows {
    /// Create chat completion with the provided sentence.
    /// It use OpenAI's [GPT-3.5](https://platform.openai.com/docs/models/gpt-3-5) model to make a conversation.
    ///
    /// `conversation_id` is the identity of the conversation.
    /// The history will be fetched and attached to the `sentence` as a whole prompt for ChatGPT.
    ///
    /// `sentence` is a String reprensent the sentence of the conversation.
    ///
    /// If you have not connected your OpenAI account with [Flows.network platform](https://flows.network),
    /// you will receive an error in the flow's building log or running log.
    ///
    pub async fn chat_completion(
        &self,
        conversation_id: &str,
        sentence: &str,
        options: &ChatOptions<'_>,
    ) -> Result<ChatResponse, String> {
        self.keey_trying(|account| {
            chat_completion_inner(account, conversation_id, sentence, options)
        })
    }
}

fn chat_completion_inner(
    account: &str,
    conversation_id: &str,
    sentence: &str,
    options: &ChatOptions,
) -> Retry<ChatResponse> {
    unsafe {
        let mut flows_user = Vec::<u8>::with_capacity(100);
        let c = crate::get_flows_user(flows_user.as_mut_ptr());
        flows_user.set_len(c as usize);
        let flows_user = String::from_utf8(flows_user).unwrap();

        let mut flow_id = Vec::<u8>::with_capacity(100);
        let c = crate::get_flow_id(flow_id.as_mut_ptr());
        if c == 0 {
            panic!("Failed to get flow id");
        }
        flow_id.set_len(c as usize);
        let flow_id = String::from_utf8(flow_id).unwrap();

        let mut writer = Vec::new();
        let uri = format!(
            "{}/{}/{}/chat_completion?account={}&conversation={}&model={}&restart={}",
            crate::OPENAI_API_PREFIX.as_str(),
            flows_user,
            flow_id,
            encode(account),
            encode(conversation_id),
            options.model,
            options.restart,
        );
        let uri = Uri::try_from(uri.as_str()).unwrap();
        let body = serde_json::to_vec(&serde_json::json!({
            "sentence": sentence,
            "system_prompt": options.system_prompt
        }))
        .unwrap_or_default();
        match Request::new(&uri)
            .method(Method::POST)
            .header("Content-Type", "application/json")
            .header("Content-Length", &body.len())
            .body(&body)
            .send(&mut writer)
        {
            Ok(res) => {
                match res.status_code().is_success() {
                    true => Retry::No(
                        serde_json::from_slice::<ChatResponse>(&writer)
                            .or(Err(String::from("Unexpected error"))),
                    ),
                    false => {
                        match res.status_code().into() {
                            409 | 429 | 503 => {
                                // 409 TryAgain 429 RateLimitError
                                // 503 ServiceUnavableila
                                Retry::Yes(String::from_utf8_lossy(&writer).into_owned())
                            }
                            _ => Retry::No(Err(String::from_utf8_lossy(&writer).into_owned())),
                        }
                    }
                }
            }
            Err(e) => Retry::No(Err(e.to_string())),
        }
    }
}