Skip to main content

autoagents_llamacpp/
lib.rs

1//! # AutoAgents llama.cpp Backend
2//!
3//! Local LLM inference backend for AutoAgents using llama-cpp-2 bindings.
4//!
5//! ## Features
6//!
7//! - **GGUF Model Support**: Load local GGUF models via llama.cpp
8//! - **Sampling Controls**: Temperature, top-k, top-p, penalties
9//! - **Structured Output**: JSON schema hints with optional grammar enforcement
10//! - **Streaming**: Token streaming for chat responses
11//! - **Production Ready**: Robust error handling and configuration
12//!
13
14pub mod builder;
15pub mod config;
16pub mod conversion;
17pub mod error;
18pub mod huggingface;
19pub mod models;
20pub mod provider;
21
22// Re-exports for convenience
23pub use builder::LlamaCppProviderBuilder;
24pub use config::{
25    LlamaCppConfig, LlamaCppConfigBuilder, LlamaCppReasoningFormat, LlamaCppSplitMode,
26};
27pub use error::LlamaCppProviderError;
28pub use models::ModelSource;
29pub use provider::LlamaCppProvider;
30
31// Re-export llama-cpp types that users might need
32pub use llama_cpp_2::model::params::LlamaSplitMode;
33
34#[cfg(test)]
35mod tests {
36    use super::*;
37
38    #[test]
39    fn test_library_imports() {
40        let _source = ModelSource::Gguf {
41            model_path: "model.gguf".to_string(),
42        };
43    }
44
45    #[test]
46    fn test_builder_accessible() {
47        let _builder = LlamaCppProvider::builder();
48        let _config_builder = LlamaCppConfigBuilder::new();
49    }
50}