mistralai-client 0.6.0

Mistral AI API client library for Rust (unofficial).
Documentation

Mistral AI Rust Client

Crates.io Package Docs.rs Documentation Test Workflow Status Code Coverage

Rust client for the Mistral AI API.



Supported APIs

  • Chat without streaming
  • Chat without streaming (async)
  • Chat with streaming
  • Embedding
  • Embedding (async)
  • List models
  • List models (async)
  • Function Calling
  • Function Calling (async)

Installation

You can install the library in your project using:

cargo add mistralai-client

Mistral API Key

You can get your Mistral API Key there: https://docs.mistral.ai/#api-access.

As an environment variable

Just set the MISTRAL_API_KEY environment variable.

As a client argument

use mistralai_client::v1::client::Client;

fn main() {
    let api_key = "your_api_key";

    let client = Client::new(Some(api_key), None, None, None).unwrap();
}

Usage

Chat without streaming

use mistralai_client::v1::{
    chat_completion::{ChatCompletionParams, ChatMessage, ChatMessageRole},
    client::Client,
    constants::Model,
};

fn main() {
    // This example suppose you have set the `MISTRAL_API_KEY` environment variable.
    let client = Client::new(None, None, None, None).unwrap();

    let model = Model::OpenMistral7b;
    let messages = vec![ChatMessage {
        role: ChatMessageRole::user,
        content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
    }];
    let options = ChatCompletionRequestOptions {
        temperature: Some(0.0),
        random_seed: Some(42),
        ..Default::default()
    };

    let result = client.chat(model, messages, Some(options)).unwrap();
    println!("Assistant: {}", result.choices[0].message.content);
    // => "Assistant: Tower. [...]"
}

Chat without streaming (async)

use mistralai_client::v1::{
    chat_completion::{ChatCompletionParams, ChatMessage, ChatMessageRole},
    client::Client,
    constants::Model,
};

#[tokio::main]
async fn main() {
    // This example suppose you have set the `MISTRAL_API_KEY` environment variable.
    let client = Client::new(None, None, None, None).unwrap();

    let model = Model::OpenMistral7b;
    let messages = vec![ChatMessage {
        role: ChatMessageRole::user,
        content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
    }];
    let options = ChatCompletionRequestOptions {
        temperature: Some(0.0),
        random_seed: Some(42),
        ..Default::default()
    };

    let result = client.chat_async(model, messages, Some(options)).await.unwrap();
    println!("Assistant: {}", result.choices[0].message.content);
    // => "Assistant: Tower. [...]"
}

Chat with streaming (async)

use futures::stream::StreamExt;
use mistralai_client::v1::{
    chat_completion::{ChatCompletionParams, ChatMessage, ChatMessageRole},
    client::Client,
    constants::Model,
};

[#tokio::main]
async fn main() {
    // This example suppose you have set the `MISTRAL_API_KEY` environment variable.
  let client = Client::new(None, None, None, None).unwrap();

    let model = Model::OpenMistral7b;
    let messages = vec![ChatMessage {
        role: ChatMessageRole::user,
        content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
    }];
    let options = ChatCompletionParams {
        temperature: Some(0.0),
        random_seed: Some(42),
        ..Default::default()
    };

    let stream_result = client.chat_stream(model, messages, Some(options)).await;
    let mut stream = stream_result.expect("Failed to create stream.");
    while let Some(chunk_result) = stream.next().await {
        match chunk_result {
            Ok(chunk) => {
                println!("Assistant (message chunk): {}", chunk.choices[0].delta.content);
            }
            Err(e) => eprintln!("Error processing chunk: {:?}", e),
        }
    }
}

Embeddings

use mistralai_client::v1::{client::Client, constants::EmbedModel};

fn main() {
    // This example suppose you have set the `MISTRAL_API_KEY` environment variable.
  let client: Client = Client::new(None, None, None, None).unwrap();

  let model = EmbedModel::MistralEmbed;
  let input = vec!["Embed this sentence.", "As well as this one."]
      .iter()
      .map(|s| s.to_string())
      .collect();
  let options = None;

  let response = client.embeddings(model, input, options).unwrap();
  println!("Embeddings: {:?}", response.data);
  // => "Embeddings: [{...}, {...}]"
}

Embeddings (async)

use mistralai_client::v1::{client::Client, constants::EmbedModel};

#[tokio::main]
async fn main() {
    // This example suppose you have set the `MISTRAL_API_KEY` environment variable.
  let client: Client = Client::new(None, None, None, None).unwrap();

  let model = EmbedModel::MistralEmbed;
  let input = vec!["Embed this sentence.", "As well as this one."]
      .iter()
      .map(|s| s.to_string())
      .collect();
  let options = None;

  let response = client.embeddings_async(model, input, options).await.unwrap();
  println!("Embeddings: {:?}", response.data);
  // => "Embeddings: [{...}, {...}]"
}

List models

use mistralai_client::v1::client::Client;

fn main() {
    // This example suppose you have set the `MISTRAL_API_KEY` environment variable.
    let client = Client::new(None, None, None, None).unwrap();

    let result = client.list_models().unwrap();
    println!("First Model ID: {:?}", result.data[0].id);
    // => "First Model ID: open-mistral-7b"
}

List models (async)

use mistralai_client::v1::client::Client;

#[tokio::main]
async fn main() {
    // This example suppose you have set the `MISTRAL_API_KEY` environment variable.
    let client = Client::new(None, None, None, None).await.unwrap();

    let result = client.list_models_async().unwrap();
    println!("First Model ID: {:?}", result.data[0].id);
    // => "First Model ID: open-mistral-7b"
}