pub struct Client {
pub api_key: String,
pub endpoint: String,
pub max_retries: u32,
pub timeout: u32,
/* private fields */
}
Fields§
§api_key: String
§endpoint: String
§max_retries: u32
§timeout: u32
Implementations§
Source§impl Client
impl Client
Sourcepub fn new(
api_key: Option<String>,
endpoint: Option<String>,
max_retries: Option<u32>,
timeout: Option<u32>,
) -> Result<Self, ClientError>
pub fn new( api_key: Option<String>, endpoint: Option<String>, max_retries: Option<u32>, timeout: Option<u32>, ) -> Result<Self, ClientError>
Constructs a new Client
.
§Arguments
api_key
- An optional API key. If not provided, the method will try to use theMISTRAL_API_KEY
environment variable.endpoint
- An optional custom API endpoint. Defaults to the official API endpoint if not provided.max_retries
- Optional maximum number of retries for failed requests. Defaults to5
.timeout
- Optional timeout in seconds for requests. Defaults to120
.
§Examples
use mistralai_client::v1::client::Client;
let client = Client::new(Some("your_api_key_here".to_string()), None, Some(3), Some(60));
assert!(client.is_ok());
§Errors
This method fails whenever neither the api_key
is provided
nor the MISTRAL_API_KEY
environment variable is set.
Sourcepub fn chat(
&self,
model: Model,
messages: Vec<ChatMessage>,
options: Option<ChatParams>,
) -> Result<ChatResponse, ApiError>
pub fn chat( &self, model: Model, messages: Vec<ChatMessage>, options: Option<ChatParams>, ) -> Result<ChatResponse, ApiError>
Synchronously sends a chat completion request and returns the response.
§Arguments
model
- The [Model] to use for the chat completion.messages
- A vector of [ChatMessage] to send as part of the chat.options
- Optional [ChatParams] to customize the request.
§Returns
Returns a Result containing the ChatResponse
if the request is successful,
or an [ApiError] if there is an error.
§Examples
use mistralai_client::v1::{
chat::{ChatMessage, ChatMessageRole},
client::Client,
constants::Model,
};
let client = Client::new(None, None, None, None).unwrap();
let messages = vec![ChatMessage {
role: ChatMessageRole::User,
content: "Hello, world!".to_string(),
tool_calls: None,
}];
let response = client.chat(Model::OpenMistral7b, messages, None).unwrap();
println!("{:?}: {}", response.choices[0].message.role, response.choices[0].message.content);
Sourcepub async fn chat_async(
&self,
model: Model,
messages: Vec<ChatMessage>,
options: Option<ChatParams>,
) -> Result<ChatResponse, ApiError>
pub async fn chat_async( &self, model: Model, messages: Vec<ChatMessage>, options: Option<ChatParams>, ) -> Result<ChatResponse, ApiError>
Asynchronously sends a chat completion request and returns the response.
§Arguments
model
- The [Model] to use for the chat completion.messages
- A vector of [ChatMessage] to send as part of the chat.options
- Optional [ChatParams] to customize the request.
§Returns
Returns a Result containing a Stream
of ChatStreamChunk
if the request is successful,
or an [ApiError] if there is an error.
§Examples
use mistralai_client::v1::{
chat::{ChatMessage, ChatMessageRole},
client::Client,
constants::Model,
};
#[tokio::main]
async fn main() {
let client = Client::new(None, None, None, None).unwrap();
let messages = vec![ChatMessage {
role: ChatMessageRole::User,
content: "Hello, world!".to_string(),
tool_calls: None,
}];
let response = client.chat_async(Model::OpenMistral7b, messages, None).await.unwrap();
println!("{:?}: {}", response.choices[0].message.role, response.choices[0].message.content);
}
Sourcepub async fn chat_stream(
&self,
model: Model,
messages: Vec<ChatMessage>,
options: Option<ChatParams>,
) -> Result<impl Stream<Item = Result<Vec<ChatStreamChunk>, ApiError>>, ApiError>
pub async fn chat_stream( &self, model: Model, messages: Vec<ChatMessage>, options: Option<ChatParams>, ) -> Result<impl Stream<Item = Result<Vec<ChatStreamChunk>, ApiError>>, ApiError>
Asynchronously sends a chat completion request and returns a stream of message chunks.
§Arguments
model
- The [Model] to use for the chat completion.messages
- A vector of [ChatMessage] to send as part of the chat.options
- Optional [ChatParams] to customize the request.
§Returns
Returns a Result containing a Stream
of ChatStreamChunk
if the request is successful,
or an [ApiError] if there is an error.
§Examples
use futures::stream::StreamExt;
use mistralai_client::v1::{
chat::{ChatMessage, ChatMessageRole},
client::Client,
constants::Model,
};
use std::io::{self, Write};
#[tokio::main]
async fn main() {
let client = Client::new(None, None, None, None).unwrap();
let messages = vec![ChatMessage {
role: ChatMessageRole::User,
content: "Hello, world!".to_string(),
tool_calls: None,
}];
let stream_result = client
.chat_stream(Model::OpenMistral7b,messages, None)
.await
.unwrap();
stream_result
.for_each(|chunk_result| async {
match chunk_result {
Ok(chunks) => chunks.iter().for_each(|chunk| {
print!("{}", chunk.choices[0].delta.content);
io::stdout().flush().unwrap();
// => "Once upon a time, [...]"
}),
Err(error) => {
eprintln!("Error processing chunk: {:?}", error)
}
}
})
.await;
print!("\n") // To persist the last chunk output.
}
pub fn embeddings( &self, model: EmbedModel, input: Vec<String>, options: Option<EmbeddingRequestOptions>, ) -> Result<EmbeddingResponse, ApiError>
pub async fn embeddings_async( &self, model: EmbedModel, input: Vec<String>, options: Option<EmbeddingRequestOptions>, ) -> Result<EmbeddingResponse, ApiError>
pub fn get_last_function_call_result(&self) -> Option<Box<dyn Any + Send>>
pub fn list_models(&self) -> Result<ModelListResponse, ApiError>
pub async fn list_models_async(&self) -> Result<ModelListResponse, ApiError>
pub fn register_function(&mut self, name: String, function: Box<dyn Function>)
Trait Implementations§
Auto Trait Implementations§
impl Freeze for Client
impl RefUnwindSafe for Client
impl Send for Client
impl Sync for Client
impl Unpin for Client
impl UnwindSafe for Client
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more