ferrum_cli/lib.rs
1//! # Ferrum CLI Library
2//!
3//! Ollama-style command-line interface for the Ferrum LLM inference framework.
4//!
5//! ## Commands
6//!
7//! - `run`: Run a model and start interactive chat
8//! - `serve`: Start the HTTP inference server
9//! - `stop`: Stop the running server
10//! - `pull`: Download a model from HuggingFace
11//! - `list`: List downloaded models
12
13pub mod commands;
14pub mod config;
15pub mod utils;
16
17// Re-exports
18pub use config::CliConfig;