machi 0.8.1

A Web3-native AI Agent Framework
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
//! Ollama API client implementation.
//!
//! This module provides a client for the Ollama local LLM server, supporting:
//! - Chat completions (synchronous and streaming)
//! - Text embeddings

mod chat;
mod client;
mod config;
mod embedding;
mod stream;

pub use client::Ollama;
pub use config::OllamaConfig;