reasonkit/llm/
mod.rs

1//! Provider-neutral LLM infrastructure.
2//!
3//! This module hosts low-level clients/adapters for LLM backends that may not fit
4//! the OpenAI-compatible `/chat/completions` shape.
5//!
6//! Current submodules:
7//! - `ollama` - Minimal client for Ollama `/api/chat` (supports Ollama Cloud via local server)
8
9pub mod ollama;