chatgpt/
chatgpt.rs

1#![warn(missing_docs)]
2#![doc = include_str!("../README.md")]
3#![cfg_attr(docsrs, feature(doc_cfg))]
4
5/// This module contains the ChatGPT client
6pub mod client;
7/// This module contains additional configuration for ChatGPT
8pub mod config;
9/// Conversation related types
10pub mod converse;
11/// This module contains the errors related to the API
12pub mod err;
13#[cfg(feature = "functions")]
14/// Contains API for function calling
15pub mod functions;
16/// The prelude module. Import everything from it to get the necessary elements from this library
17pub mod prelude;
18/// Types returned from the API and sent to it
19pub mod types;
20
21/// Result that is returned from most ChatGPT functions
22pub type Result<T> = std::result::Result<T, err::Error>;
23
24#[cfg(test)]
25pub mod test {
26    use std::path::Path;
27
28    use futures::StreamExt;
29
30    use crate::{client::ChatGPT, config::ModelConfiguration, types::ResponseChunk};
31
32    #[tokio::test]
33    async fn test_client() -> crate::Result<()> {
34        let client = ChatGPT::new(std::env::var("TEST_API_KEY")?)?;
35        let resp = client
36            .send_message("Write me a short pun about the Rust language.")
37            .await?;
38        assert!(!resp.message_choices.is_empty());
39        Ok(())
40    }
41
42    #[tokio::test]
43    async fn test_undirected_conversation() -> crate::Result<()> {
44        let client = ChatGPT::new(std::env::var("TEST_API_KEY")?)?;
45        let mut conv = client.new_conversation();
46        let resp = conv
47            .send_message("Could you tell me what day is it today?")
48            .await?;
49        assert!(!resp.message_choices.is_empty());
50        Ok(())
51    }
52
53    #[tokio::test]
54    async fn test_conversation() -> crate::Result<()> {
55        let client = ChatGPT::new(std::env::var("TEST_API_KEY")?)?;
56        let mut conv = client.new_conversation_directed(
57            "You are TestGPT, an AI model developed in Rust in year 2023.",
58        );
59        let resp_a = conv.send_message("Could you tell me who you are?").await?;
60        let resp_b = conv
61            .send_message("What did I ask you about in my first question?")
62            .await?;
63        assert!(!resp_a.message_choices.is_empty() && !resp_b.message_choices.is_empty());
64        Ok(())
65    }
66
67    #[tokio::test]
68    async fn test_conversation_saving() -> crate::Result<()> {
69        let client = ChatGPT::new(std::env::var("TEST_API_KEY")?)?;
70        let mut conv = client.new_conversation_directed(
71            "You are TestGPT, an AI model developed in Rust in year 2023.",
72        );
73        let _resp_a = conv.send_message("Could you tell me who you are?").await?;
74        let _resp_b = conv
75            .send_message("What did I ask you about in my first question?")
76            .await?;
77        conv.save_history_json("history.json").await?;
78        let path: &Path = "history.json".as_ref();
79        assert!(path.exists());
80        Ok(())
81    }
82
83    #[tokio::test]
84    async fn test_conversation_restoring() -> crate::Result<()> {
85        let client = ChatGPT::new(std::env::var("TEST_API_KEY")?)?;
86        let mut conv = client.restore_conversation_json("history.json").await?;
87        let _resp = conv
88            .send_message("Could you tell me what did I ask you about in my first question?")
89            .await?;
90        conv.save_history_json("history.json").await?;
91        Ok(())
92    }
93
94    #[tokio::test]
95    async fn test_some_config() -> crate::Result<()> {
96        let client = ChatGPT::new_with_config(
97            std::env::var("TEST_API_KEY")?,
98            ModelConfiguration {
99                temperature: 0.9,
100                reply_count: 3,
101                ..Default::default()
102            },
103        )?;
104        let response = client
105            .send_message("Could you give me names of three popular Rust web frameworks?")
106            .await?;
107        assert_eq!(response.message_choices.len(), 3);
108        Ok(())
109    }
110
111    #[tokio::test]
112    async fn test_streaming() -> crate::Result<()> {
113        let client = ChatGPT::new(std::env::var("TEST_API_KEY")?)?;
114        let response = client
115            .send_message_streaming("Could you give me names of three popular Rust web frameworks?")
116            .await?;
117        let collected = response
118            .map(|part| part.unwrap())
119            .collect::<Vec<ResponseChunk>>()
120            .await;
121        assert_eq!(collected.last().unwrap().to_owned(), ResponseChunk::Done);
122        Ok(())
123    }
124
125    #[tokio::test]
126    async fn test_streaming_conv() -> crate::Result<()> {
127        let client = ChatGPT::new(std::env::var("TEST_API_KEY")?)?;
128        let mut conv = client.new_conversation();
129        let _ = conv
130            .send_message("Could you give me names of three popular Rust web frameworks?")
131            .await?;
132        let streamed = conv
133            .send_message_streaming("Now could you do the same but for Kotlin?")
134            .await?;
135        let collected = streamed
136            .map(|part| part.unwrap())
137            .collect::<Vec<ResponseChunk>>()
138            .await;
139        assert_eq!(collected.last().unwrap().to_owned(), ResponseChunk::Done);
140        Ok(())
141    }
142
143    #[tokio::test]
144    async fn test_max_token_config() -> crate::Result<()> {
145        let client = ChatGPT::new_with_config(
146            std::env::var("TEST_API_KEY")?,
147            ModelConfiguration {
148                max_tokens: Some(10),
149                ..Default::default()
150            },
151        )?;
152        let response = client
153            .send_message("Could you give me names of three popular Rust web frameworks?")
154            .await?;
155        assert_eq!(
156            response.message_choices.first().unwrap().finish_reason,
157            "length".to_string()
158        );
159        Ok(())
160    }
161
162    #[tokio::test]
163    async fn test_default_max_token_config() -> crate::Result<()> {
164        let client = ChatGPT::new_with_config(
165            std::env::var("TEST_API_KEY")?,
166            ModelConfiguration {
167                max_tokens: Some(1),
168                ..Default::default()
169            },
170        )?;
171        let response = client
172            .send_message("Could you give me names of three popular Rust web frameworks?")
173            .await?;
174        assert_eq!(
175            response.message_choices.first().unwrap().finish_reason,
176            "length".to_string()
177        );
178        Ok(())
179    }
180}