Expand description
xAI (Grok) provider implementation for the LLM Kit.
This crate provides a provider implementation for xAI’s Grok models, supporting chat completions, image generation, reasoning modes, and integrated search.
§Examples
§Basic Usage with Client Builder (Recommended)
use llm_kit_xai::XaiClient;
// Create a provider using the client builder
let provider = XaiClient::new()
.api_key("your-api-key")
.build();
let model = provider.chat_model("grok-4");§Alternative: Direct Instantiation
use llm_kit_xai::{XaiProvider, XaiProviderSettings};
// Create a provider using settings directly
let provider = XaiProvider::new(
XaiProviderSettings::new()
.with_api_key("your-api-key")
);
let model = provider.chat_model("grok-4");§Chained Usage
use llm_kit_xai::XaiClient;
let model = XaiClient::new()
.api_key("your-api-key")
.build()
.chat_model("grok-3-fast");§Environment Variable
use llm_kit_xai::XaiClient;
// API key will be read from XAI_API_KEY environment variable
let provider = XaiClient::new().build();
let model = provider.chat_model("grok-4-fast-reasoning");§Tool Calling
use llm_kit_provider::LanguageModel;
use llm_kit_provider::language_model::call_options::LanguageModelCallOptions;
use llm_kit_provider::language_model::prompt::LanguageModelMessage;
use llm_kit_provider::language_model::tool::LanguageModelTool;
use llm_kit_provider::language_model::tool::function_tool::LanguageModelFunctionTool;
use llm_kit_xai::XaiClient;
use serde_json::json;
let provider = XaiClient::new()
.api_key("your-api-key")
.build();
let model = provider.chat_model("grok-beta");
// Define a tool using llm-kit-provider types
let weather_tool = LanguageModelFunctionTool::new(
"get_weather",
json!({
"type": "object",
"properties": {
"city": {"type": "string", "description": "The city name"}
},
"required": ["city"]
}),
)
.with_description("Get the current weather");
let tools = vec![LanguageModelTool::Function(weather_tool)];
let prompt = vec![LanguageModelMessage::user_text("What's the weather in Tokyo?")];
let options = LanguageModelCallOptions::new(prompt).with_tools(tools);
let result = model.do_generate(options).await?;For full tool execution with llm-kit-core, see the examples directory.
§Image Generation
use llm_kit_provider::ImageModel;
use llm_kit_provider::image_model::call_options::ImageModelCallOptions;
use llm_kit_xai::XaiClient;
let provider = XaiClient::new()
.api_key("your-api-key")
.build();
let model = provider.image_model("grok-2-vision-1212");
let options = ImageModelCallOptions::new(
"A futuristic cityscape at sunset".to_string(),
1
);
let result = model.do_generate(options).await?;
println!("Generated {} image(s)", result.images.len());For more image generation examples with llm-kit-core, see the examples directory.
Re-exports§
pub use chat::SearchParameters;pub use chat::SearchSource;pub use chat::XaiChatLanguageModel;pub use chat::XaiChatMessage;pub use chat::XaiChatModelId;pub use chat::XaiProviderOptions;pub use chat::XaiUserContent;pub use chat::convert_to_xai_chat_messages;pub use client::XaiClient;pub use error::XaiErrorData;pub use error::XaiErrorDetails;pub use provider::XaiProvider;pub use settings::XaiProviderSettings;