pub struct ChatArguments {Show 15 fields
pub model: String,
pub messages: Vec<Message>,
pub temperature: Option<f32>,
pub top_p: Option<f32>,
pub n: Option<u32>,
pub stream: Option<bool>,
pub stop: Option<String>,
pub max_tokens: Option<u32>,
pub presence_penalty: Option<f32>,
pub frequency_penalty: Option<f32>,
pub user: Option<String>,
pub response_format: Option<ResponseFormat>,
pub image_generation: Option<ImageGeneration>,
pub grok_tools: Option<Vec<GrokTool>>,
pub tools: Option<Vec<OpenAITool>>,
}Fields§
§model: String§messages: Vec<Message>§temperature: Option<f32>§top_p: Option<f32>§n: Option<u32>§stream: Option<bool>§stop: Option<String>§max_tokens: Option<u32>§presence_penalty: Option<f32>§frequency_penalty: Option<f32>§user: Option<String>§response_format: Option<ResponseFormat>§image_generation: Option<ImageGeneration>§grok_tools: Option<Vec<GrokTool>>xAI Agent Tools API - server-side tools for agentic capabilities. Includes: web_search, x_search, code_execution, collections_search, mcp. See: https://docs.x.ai/docs/guides/tools/overview
tools: Option<Vec<OpenAITool>>OpenAI Agent Tools API - server-side tools for agentic capabilities (Responses API only). Includes: web_search, file_search, code_interpreter. Note: When tools are provided, use create_openai_responses() to use the Responses API endpoint. See: https://platform.openai.com/docs/guides/tools-web-search
Implementations§
Source§impl ChatArguments
impl ChatArguments
Sourcepub fn new(model: impl AsRef<str>, messages: Vec<Message>) -> ChatArguments
pub fn new(model: impl AsRef<str>, messages: Vec<Message>) -> ChatArguments
Examples found in repository?
5async fn main() {
6 let client = openai_rust::Client::new(&std::env::var("OPENAI_API_KEY").unwrap());
7 let args = openai_rust::chat::ChatArguments::new(
8 "gpt-3.5-turbo",
9 vec![openai_rust::chat::Message {
10 role: "user".to_owned(),
11 content: "Hello GPT!".to_owned(),
12 }],
13 );
14 let res = client.create_chat(args, None).await.unwrap();
15 println!("{}", res);
16}More examples
7async fn main() {
8 let client = openai_rust::Client::new(&std::env::var("OPENAI_API_KEY").unwrap());
9 let args = openai_rust::chat::ChatArguments::new(
10 "gpt-3.5-turbo",
11 vec![openai_rust::chat::Message {
12 role: "user".to_owned(),
13 content: "Hello GPT!".to_owned(),
14 }],
15 );
16 let mut res = client.create_chat_stream(args, None).await.unwrap();
17 while let Some(chunk) = res.next().await {
18 print!("{}", chunk.unwrap());
19 std::io::stdout().flush().unwrap();
20 }
21}Sourcepub fn with_grok_tools(self, tools: Vec<GrokTool>) -> Self
pub fn with_grok_tools(self, tools: Vec<GrokTool>) -> Self
Add xAI server-side tools for agentic capabilities.
Recommended model: grok-4-1-fast for best tool-calling performance.
Sourcepub fn with_openai_tools(self, tools: Vec<OpenAITool>) -> Self
pub fn with_openai_tools(self, tools: Vec<OpenAITool>) -> Self
Add OpenAI server-side tools for agentic capabilities (Responses API).
Note: When tools are provided, use create_openai_responses() to use the Responses API endpoint.
Recommended models: gpt-5, gpt-4o.
Trait Implementations§
Source§impl Clone for ChatArguments
impl Clone for ChatArguments
Source§fn clone(&self) -> ChatArguments
fn clone(&self) -> ChatArguments
1.0.0 · Source§fn clone_from(&mut self, source: &Self)
fn clone_from(&mut self, source: &Self)
source. Read more