rust_ai_core/ecosystem.rs
1// SPDX-License-Identifier: MIT
2// Copyright 2026 Tyler Zervas
3
4//! Unified re-exports from the rust-ai ecosystem crates.
5//!
6//! This module provides convenient access to all rust-ai ecosystem crates
7//! through a single import path. All crates are always available as they
8//! are required dependencies of rust-ai-core.
9//!
10//! ## Available Modules
11//!
12//! | Module | Crate | Description |
13//! |--------|-------|-------------|
14//! | [`peft`] | peft-rs | LoRA, DoRA, AdaLoRA adapters |
15//! | [`qlora`] | qlora-rs | 4-bit quantized fine-tuning |
16//! | [`unsloth`] | unsloth-rs | Optimized transformer blocks |
17//! | [`axolotl`] | axolotl-rs | Fine-tuning orchestration |
18//! | [`bitnet`] | bitnet-quantize | BitNet 1.58-bit quantization |
19//! | [`trit`] | trit-vsa | Ternary VSA operations |
20//! | [`vsa_optim`] | vsa-optim-rs | VSA-based optimization |
21//! | [`tritter`] | tritter-accel | Ternary GPU acceleration |
22//!
23//! ## Usage
24//!
25//! ```rust,ignore
26//! use rust_ai_core::ecosystem::peft::{LoraConfig, LoraLinear};
27//! use rust_ai_core::ecosystem::qlora::QLoraConfig;
28//! use rust_ai_core::ecosystem::bitnet::TernaryLinear;
29//! ```
30//!
31//! Or use the top-level facade for common operations:
32//!
33//! ```rust,ignore
34//! use rust_ai_core::RustAI;
35//!
36//! let ai = RustAI::new(RustAIConfig::default())?;
37//! let config = ai.finetune()
38//! .model("meta-llama/Llama-2-7b")
39//! .rank(64)
40//! .build()?;
41//! ```
42
43// =============================================================================
44// PEFT (Parameter-Efficient Fine-Tuning)
45// =============================================================================
46
47/// LoRA, DoRA, and AdaLoRA adapter implementations.
48///
49/// Re-exports from `peft-rs` crate.
50///
51/// ## Key Types
52///
53/// - `LoraConfig` - Configuration for LoRA adapters
54/// - `LoraLinear` - LoRA-wrapped linear layer
55/// - `DoraConfig` - Configuration for DoRA (Weight-Decomposed LoRA)
56/// - `AdaLoraConfig` - Configuration for AdaLoRA (Adaptive Budget)
57///
58/// ## Example
59///
60/// ```rust,ignore
61/// use rust_ai_core::ecosystem::peft::{LoraConfig, LoraLinear};
62///
63/// let config = LoraConfig::new(64, 16.0); // rank=64, alpha=16.0
64/// let lora_layer = LoraLinear::new(base_linear, &config)?;
65/// ```
66pub mod peft {
67 pub use peft_rs::*;
68}
69
70// =============================================================================
71// QLoRA (Quantized LoRA)
72// =============================================================================
73
74/// 4-bit quantized LoRA for memory-efficient fine-tuning.
75///
76/// Re-exports from `qlora-rs` crate.
77///
78/// ## Key Types
79///
80/// - `QLoraConfig` - Combined quantization and LoRA configuration
81/// - `Nf4Quantizer` - NF4 (Normal Float 4-bit) quantizer
82/// - `QuantizedLinear` - Quantized linear layer with LoRA
83///
84/// ## Example
85///
86/// ```rust,ignore
87/// use rust_ai_core::ecosystem::qlora::{QLoraConfig, QuantizedLinear};
88///
89/// let config = QLoraConfig::default()
90/// .with_lora_rank(32)
91/// .with_bits(4);
92/// let qlora_layer = QuantizedLinear::new(weights, &config)?;
93/// ```
94pub mod qlora {
95 pub use qlora_rs::*;
96}
97
98// =============================================================================
99// Unsloth (Optimized Transformers)
100// =============================================================================
101
102/// Optimized transformer building blocks.
103///
104/// Re-exports from `unsloth-rs` crate.
105///
106/// ## Key Types
107///
108/// - `FlashAttention` - Memory-efficient attention implementation
109/// - `SwiGLU` - SwiGLU activation (used in Llama models)
110/// - `RMSNorm` - Root Mean Square layer normalization
111///
112/// ## Example
113///
114/// ```rust,ignore
115/// use rust_ai_core::ecosystem::unsloth::{FlashAttention, AttentionConfig};
116///
117/// let attn = FlashAttention::new(&config, device)?;
118/// let output = attn.forward(&q, &k, &v, mask)?;
119/// ```
120pub mod unsloth {
121 pub use unsloth_rs::*;
122}
123
124// =============================================================================
125// Axolotl (Fine-Tuning Orchestration)
126// =============================================================================
127
128/// YAML-driven fine-tuning configuration and orchestration.
129///
130/// Re-exports from `axolotl-rs` crate.
131///
132/// ## Key Types
133///
134/// - `AxolotlConfig` - Main configuration struct (loadable from YAML)
135/// - `TrainingPipeline` - Orchestrates the training workflow
136/// - `DatasetConfig` - Dataset loading and preprocessing configuration
137///
138/// ## Example
139///
140/// ```rust,ignore
141/// use rust_ai_core::ecosystem::axolotl::{AxolotlConfig, TrainingPipeline};
142///
143/// let config = AxolotlConfig::from_yaml("config.yaml")?;
144/// let pipeline = TrainingPipeline::new(config)?;
145/// pipeline.run()?;
146/// ```
147pub mod axolotl {
148 pub use axolotl_rs::*;
149}
150
151// =============================================================================
152// BitNet (1.58-bit Quantization)
153// =============================================================================
154
155/// Microsoft BitNet b1.58 quantization and inference.
156///
157/// Re-exports from `bitnet-quantize` crate.
158///
159/// ## Key Types
160///
161/// - `BitNetConfig` - Configuration for BitNet quantization
162/// - `TernaryLinear` - Linear layer with ternary weights (-1, 0, +1)
163/// - `BitNetQuantizer` - Quantizes weights to 1.58-bit representation
164///
165/// ## Example
166///
167/// ```rust,ignore
168/// use rust_ai_core::ecosystem::bitnet::{BitNetConfig, TernaryLinear};
169///
170/// let config = BitNetConfig::default();
171/// let ternary_layer = TernaryLinear::from_linear(linear, &config)?;
172/// ```
173pub mod bitnet {
174 pub use bitnet_quantize::*;
175}
176
177// =============================================================================
178// Trit-VSA (Ternary Vector Symbolic Architectures)
179// =============================================================================
180
181/// Balanced ternary arithmetic with bitsliced storage.
182///
183/// Re-exports from `trit-vsa` crate.
184///
185/// ## Key Types
186///
187/// - `TritVector` - Balanced ternary vector (-1, 0, +1)
188/// - `TritSlice` - Bitsliced storage for efficient operations
189/// - `HdcEncoder` - Hyperdimensional computing encoder
190///
191/// ## Example
192///
193/// ```rust,ignore
194/// use rust_ai_core::ecosystem::trit::{TritVector, TritOps};
195///
196/// let a = TritVector::random(10000);
197/// let b = TritVector::random(10000);
198/// let bound = a.bind(&b); // Multiplication in VSA
199/// ```
200pub mod trit {
201 pub use trit_vsa::*;
202}
203
204// =============================================================================
205// VSA-Optim (VSA-Based Optimization)
206// =============================================================================
207
208/// Deterministic training optimization using VSA compression.
209///
210/// Re-exports from `vsa-optim-rs` crate.
211///
212/// ## Key Types
213///
214/// - `VsaOptimizer` - VSA-based optimizer with gradient prediction
215/// - `CompressionConfig` - Configuration for gradient compression
216/// - `GradientPredictor` - Closed-form gradient prediction
217///
218/// ## Example
219///
220/// ```rust,ignore
221/// use rust_ai_core::ecosystem::vsa_optim::{VsaOptimizer, VsaConfig};
222///
223/// let config = VsaConfig::default()
224/// .with_dimension(10000)
225/// .with_compression_ratio(0.1);
226/// let optimizer = VsaOptimizer::new(model.parameters(), config)?;
227/// ```
228pub mod vsa_optim {
229 pub use vsa_optim_rs::*;
230}
231
232// =============================================================================
233// Tritter-Accel (Ternary GPU Acceleration)
234// =============================================================================
235
236/// GPU-accelerated ternary operations for BitNet and VSA.
237///
238/// Re-exports from `tritter-accel` crate.
239///
240/// ## Key Types
241///
242/// - `TritterRuntime` - GPU runtime for ternary operations
243/// - `TernaryMatmul` - Optimized ternary matrix multiplication
244/// - `PackedTernary` - Memory-efficient ternary storage
245///
246/// ## Example
247///
248/// ```rust,ignore
249/// use rust_ai_core::ecosystem::tritter::{TritterRuntime, TernaryMatmul};
250///
251/// let runtime = TritterRuntime::new(device)?;
252/// let matmul = TernaryMatmul::new(&runtime);
253/// let output = matmul.forward(&weights, &input)?;
254/// ```
255pub mod tritter {
256 #[allow(unused_imports)]
257 pub use tritter_accel::*;
258}
259
260// =============================================================================
261// ECOSYSTEM INFO
262// =============================================================================
263
264/// Information about the rust-ai ecosystem crates.
265///
266/// Provides version information and capability detection for all ecosystem crates.
267#[derive(Debug, Clone)]
268pub struct EcosystemInfo {
269 /// peft-rs version
270 pub peft_version: &'static str,
271 /// qlora-rs version
272 pub qlora_version: &'static str,
273 /// unsloth-rs version
274 pub unsloth_version: &'static str,
275 /// axolotl-rs version
276 pub axolotl_version: &'static str,
277 /// bitnet-quantize version
278 pub bitnet_version: &'static str,
279 /// trit-vsa version
280 pub trit_version: &'static str,
281 /// vsa-optim-rs version
282 pub vsa_optim_version: &'static str,
283 /// tritter-accel version
284 pub tritter_version: &'static str,
285}
286
287impl Default for EcosystemInfo {
288 fn default() -> Self {
289 Self::new()
290 }
291}
292
293impl EcosystemInfo {
294 /// Get ecosystem version information.
295 #[must_use]
296 pub const fn new() -> Self {
297 Self {
298 peft_version: "1.0.3",
299 qlora_version: "1.0.5",
300 unsloth_version: "1.0",
301 axolotl_version: "1.1",
302 bitnet_version: "0.2",
303 trit_version: "0.2",
304 vsa_optim_version: "0.1",
305 tritter_version: "0.1",
306 }
307 }
308
309 /// List all ecosystem crate names.
310 #[must_use]
311 pub const fn crate_names() -> &'static [&'static str] {
312 &[
313 "peft-rs",
314 "qlora-rs",
315 "unsloth-rs",
316 "axolotl-rs",
317 "bitnet-quantize",
318 "trit-vsa",
319 "vsa-optim-rs",
320 "tritter-accel",
321 ]
322 }
323}
324
325#[cfg(test)]
326mod tests {
327 use super::*;
328
329 #[test]
330 fn test_ecosystem_info() {
331 let info = EcosystemInfo::new();
332 assert!(!info.peft_version.is_empty());
333 assert!(!info.qlora_version.is_empty());
334 assert!(!info.tritter_version.is_empty());
335 }
336
337 #[test]
338 fn test_crate_names() {
339 let names = EcosystemInfo::crate_names();
340 assert_eq!(names.len(), 8);
341 assert!(names.contains(&"peft-rs"));
342 assert!(names.contains(&"tritter-accel"));
343 }
344}