bevy_openai/
ai_client.rs

1use bevy::log::info;
2use derive_builder::Builder;
3use openai_api_rs::v1::api::Client;
4use openai_api_rs::v1::chat_completion;
5use openai_api_rs::v1::chat_completion::ChatCompletionRequest;
6use openai_api_rs::v1::common::GPT3_5_TURBO;
7
8#[derive(Builder, Clone)]
9pub struct ClientConfig {
10    #[builder(default = "\"https://api.openai.com/v1\".to_owned()")]
11    api_endpoint: String,
12    #[builder(default = "std::env::var(\"OPENAI_API_KEY\").expect(\"OPENAI_API_KEY not set\")")]
13    api_key: String,
14    #[builder(default = "None")]
15    proxy: Option<String>,
16}
17
18pub(crate) fn send_to_ai_internal(prompt: String, client_config: ClientConfig) -> String {
19    let mut client = Client::new(client_config.api_key);
20    client.api_endpoint = client_config.api_endpoint;
21    client.proxy = client_config.proxy;
22
23    info!("prompt:{}", prompt);
24    let req = ChatCompletionRequest::new(
25        GPT3_5_TURBO.to_string(),
26        vec![chat_completion::ChatCompletionMessage {
27            role: chat_completion::MessageRole::user,
28            content: chat_completion::Content::Text(prompt),
29            name: None,
30        }],
31    );
32    let response = client.chat_completion(req).expect("Failed to send request");
33    let res_str = response.choices[0]
34        .message
35        .content
36        .clone()
37        .expect("Failed to get response");
38    info!("response:{}", res_str);
39    res_str
40}