1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
// Import necessary configurations and traits from the Ambi framework.
use ambi::llm::providers::openai_api::config::OpenAIEngineConfig;
use ambi::types::ChatTemplateType;
use ambi::{Agent, ChatRunner};
use ambi::{AgentState, LLMEngineConfig};
use anyhow::Result;
#[tokio::main]
async fn main() -> Result<()> {
// Step 1: Define the system prompt to set the persona and behavior of the AI assistant.
let system_prompt = "You are a helpful and harmless AI assistant.";
// Step 2: Retrieve the API key securely from environment variables.
// Fallback to a default string if the environment variable is not set.
let api_key =
std::env::var("OPENAI_API_KEY").unwrap_or_else(|_| "your-default-api-key".to_string());
// Step 3: Configure the cloud-based LLM engine.
// Here we use the OpenAI API configuration format, which is compatible with many providers.
let engine_config = LLMEngineConfig::OpenAI(OpenAIEngineConfig {
api_key,
base_url: "https://api.openai.com/v1".to_string(), // The base URL of the API endpoint.
model_name: "gpt-4o-mini".to_string(), // The specific model to use.
temp: 0.7, // Controls randomness (higher is more creative).
top_p: 0.9, // Controls diversity via nucleus sampling.
});
// Step 4: Instantiate the ChatRunner. This is used to distinguish which `ChatRunner` it comes from.
let chat_runner = ChatRunner::default();
// Step 5: Instantiate the Agent using the builder pattern.
// We pass the engine configuration, set the chat template, and inject the system prompt.
let agent = Agent::make(engine_config)
.await?
.template(ChatTemplateType::Chatml)
.preamble(system_prompt);
// Step 6: Initialize a thread-safe, shared agent state via the new_shared() convenience constructor.
let agent_state = AgentState::new_shared("session-id");
// Step 7: Initiate a synchronous chat request to the LLM.
// The agent will process the prompt, interact with the model, and return the final string.
let res = chat_runner
.chat(&agent, &agent_state, "Who are you and what can you do?")
.await?;
// Step 6: Print the final response received from the model.
print!("{}", res);
Ok(())
}