Expand description
§Chat Module
This module provides functionality for interacting with the OpenAI Chat Completions API. It includes tools for building requests, sending them to OpenAI’s chat completion endpoint, and processing the responses.
§Key Features
- Chat completion request building and sending
- Structured output support with JSON schema
- Response parsing and processing
- Support for various OpenAI models and parameters
§Usage Examples
§Basic Chat Completion
use openai_tools::chat::request::ChatCompletion;
use openai_tools::common::message::Message;
use openai_tools::common::role::Role;
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let mut chat = ChatCompletion::new();
let messages = vec![Message::from_string(Role::User, "Hello!")];
let response = chat
.model_id("gpt-4o-mini")
.messages(messages)
.temperature(1.0)
.chat()
.await?;
println!("{}", response.choices[0].message.content.as_ref().unwrap().text.as_ref().unwrap());
Ok(())
}
§Using JSON Schema for Structured Output
use openai_tools::chat::request::ChatCompletion;
use openai_tools::common::message::Message;
use openai_tools::common::role::Role;
use openai_tools::common::structured_output::Schema;
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize)]
struct WeatherInfo {
location: String,
date: String,
weather: String,
temperature: String,
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let mut chat = ChatCompletion::new();
let messages = vec![Message::from_string(
Role::User,
"What's the weather like tomorrow in Tokyo?"
)];
// Create JSON schema for structured output
let mut json_schema = Schema::chat_json_schema("weather");
json_schema.add_property("location", "string", "The location for weather check");
json_schema.add_property("date", "string", "The date for weather forecast");
json_schema.add_property("weather", "string", "Weather condition description");
json_schema.add_property("temperature", "string", "Temperature information");
let response = chat
.model_id("gpt-4o-mini")
.messages(messages)
.temperature(0.7)
.json_schema(json_schema)
.chat()
.await?;
// Parse structured response
let weather: WeatherInfo = serde_json::from_str(
response.choices[0].message.content.as_ref().unwrap().text.as_ref().unwrap()
)?;
println!("Weather in {}: {} on {}, Temperature: {}",
weather.location, weather.weather, weather.date, weather.temperature);
Ok(())
}
§Using Function Calling with Tools
use openai_tools::chat::request::ChatCompletion;
use openai_tools::common::message::Message;
use openai_tools::common::role::Role;
use openai_tools::common::tool::Tool;
use openai_tools::common::parameters::ParameterProp;
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let mut chat = ChatCompletion::new();
let messages = vec![Message::from_string(
Role::User,
"Please calculate 15 * 23 using the calculator tool"
)];
// Define a calculator function tool
let calculator_tool = Tool::function(
"calculator",
"A calculator that can perform basic arithmetic operations",
vec![
("operation", ParameterProp::string("The operation to perform (add, subtract, multiply, divide)")),
("a", ParameterProp::number("The first number")),
("b", ParameterProp::number("The second number")),
],
false, // strict mode
);
let response = chat
.model_id("gpt-4o-mini")
.messages(messages)
.temperature(0.1)
.tools(vec![calculator_tool])
.chat()
.await?;
// Handle function calls in the response
if let Some(tool_calls) = &response.choices[0].message.tool_calls {
for tool_call in tool_calls {
println!("Function called: {}", tool_call.function.name);
if let Ok(args) = tool_call.function.arguments_as_map() {
println!("Arguments: {:?}", args);
}
// In a real application, you would execute the function here
// and send the result back to continue the conversation
}
} else if let Some(content) = &response.choices[0].message.content {
if let Some(text) = &content.text {
println!("{}", text);
}
}
Ok(())
}