chatgpt/
chatgpt.rs

1#![warn(missing_docs)]
2#![doc = include_str!("../README.md")]
3#![cfg_attr(docsrs, feature(doc_cfg))]
4
5/// This module contains the ChatGPT client
6pub mod client;
7/// This module contains additional configuration for ChatGPT
8pub mod config;
9/// Conversation related types
10pub mod converse;
11/// This module contains the errors related to the API
12pub mod err;
13#[cfg(feature = "functions")]
14/// Contains API for function calling
15pub mod functions;
16/// The prelude module. Import everything from it to get the necessary elements from this library
17pub mod prelude;
18/// Types returned from the API and sent to it
19pub mod types;
20
21/// Result that is returned from most ChatGPT functions
22pub type Result<T> = std::result::Result<T, err::Error>;
23
24#[cfg(test)]
25pub mod test {
26    use std::path::Path;
27
28    use futures::StreamExt;
29
30    use crate::{client::ChatGPT, config::ModelConfiguration, types::ResponseChunk};
31
32    #[tokio::test]
33    async fn test_client() -> crate::Result<()> {
34        let client = ChatGPT::new(std::env::var("TEST_API_KEY")?)?;
35        let resp = client
36            .send_message("Write me a short pun about the Rust language.")
37            .await?;
38        assert!(!resp.message_choices.is_empty());
39        Ok(())
40    }
41
42    #[tokio::test]
43    async fn test_undirected_conversation() -> crate::Result<()> {
44        let client = ChatGPT::new(std::env::var("TEST_API_KEY")?)?;
45        let mut conv = client.new_conversation();
46        let resp = conv
47            .send_message("Could you tell me what day is it today?")
48            .await?;
49        assert!(!resp.message_choices.is_empty());
50        Ok(())
51    }
52
53    #[tokio::test]
54    async fn test_conversation() -> crate::Result<()> {
55        let client = ChatGPT::new(std::env::var("TEST_API_KEY")?)?;
56        let mut conv = client.new_conversation_directed(
57            "You are TestGPT, an AI model developed in Rust in year 2023.",
58        );
59        let resp_a = conv.send_message("Could you tell me who you are?").await?;
60        let resp_b = conv
61            .send_message("What did I ask you about in my first question?")
62            .await?;
63        assert!(!resp_a.message_choices.is_empty() && !resp_b.message_choices.is_empty());
64        Ok(())
65    }
66
67    #[tokio::test]
68    async fn test_conversation_saving() -> crate::Result<()> {
69        let client = ChatGPT::new(std::env::var("TEST_API_KEY")?)?;
70        let mut conv = client.new_conversation_directed(
71            "You are TestGPT, an AI model developed in Rust in year 2023.",
72        );
73        let _resp_a = conv.send_message("Could you tell me who you are?").await?;
74        let _resp_b = conv
75            .send_message("What did I ask you about in my first question?")
76            .await?;
77        conv.save_history_json("history.json").await?;
78        let path: &Path = "history.json".as_ref();
79        assert!(path.exists());
80        Ok(())
81    }
82
83    #[tokio::test]
84    async fn test_conversation_restoring() -> crate::Result<()> {
85        let client = ChatGPT::new(std::env::var("TEST_API_KEY")?)?;
86        let mut conv = client.restore_conversation_json("history.json").await?;
87        let _resp = conv
88            .send_message("Could you tell me what did I ask you about in my first question?")
89            .await?;
90        conv.save_history_json("history.json").await?;
91        Ok(())
92    }
93
94    #[tokio::test]
95    async fn test_some_config() -> crate::Result<()> {
96        let client = ChatGPT::new_with_config(
97            std::env::var("TEST_API_KEY")?,
98            ModelConfiguration {
99                temperature: 0.9,
100                reply_count: 3,
101                ..Default::default()
102            },
103        )?;
104        let response = client
105            .send_message("Could you give me names of three popular Rust web frameworks?")
106            .await?;
107        assert_eq!(response.message_choices.len(), 3);
108        Ok(())
109    }
110
111    #[tokio::test]
112    async fn test_streaming() -> crate::Result<()> {
113        let client = ChatGPT::new(std::env::var("TEST_API_KEY")?)?;
114        let response = client
115            .send_message_streaming("Could you give me names of three popular Rust web frameworks?")
116            .await?;
117        let collected = response.collect::<Vec<ResponseChunk>>().await;
118        assert_eq!(collected.last().unwrap().to_owned(), ResponseChunk::Done);
119        Ok(())
120    }
121
122    #[tokio::test]
123    async fn test_streaming_conv() -> crate::Result<()> {
124        let client = ChatGPT::new(std::env::var("TEST_API_KEY")?)?;
125        let mut conv = client.new_conversation();
126        let _ = conv
127            .send_message("Could you give me names of three popular Rust web frameworks?")
128            .await?;
129        let streamed = conv
130            .send_message_streaming("Now could you do the same but for Kotlin?")
131            .await?;
132        let collected = streamed.collect::<Vec<ResponseChunk>>().await;
133        assert_eq!(collected.last().unwrap().to_owned(), ResponseChunk::Done);
134        Ok(())
135    }
136
137    #[tokio::test]
138    async fn test_max_token_config() -> crate::Result<()> {
139        let client = ChatGPT::new_with_config(
140            std::env::var("TEST_API_KEY")?,
141            ModelConfiguration {
142                max_tokens: Some(10),
143                ..Default::default()
144            },
145        )?;
146        let response = client
147            .send_message("Could you give me names of three popular Rust web frameworks?")
148            .await?;
149        assert_eq!(
150            response.message_choices.first().unwrap().finish_reason,
151            "length".to_string()
152        );
153        Ok(())
154    }
155
156    #[tokio::test]
157    async fn test_default_max_token_config() -> crate::Result<()> {
158        let client = ChatGPT::new_with_config(
159            std::env::var("TEST_API_KEY")?,
160            ModelConfiguration {
161                ..Default::default()
162            },
163        )?;
164        let response = client
165            .send_message("Could you give me names of three popular Rust web frameworks?")
166            .await?;
167        assert_eq!(
168            response.message_choices.first().unwrap().finish_reason,
169            "length".to_string()
170        );
171        Ok(())
172    }
173}