ruvector_gnn/lib.rs
1//! # RuVector GNN
2//!
3//! Graph Neural Network capabilities for RuVector, providing tensor operations,
4//! GNN layers, compression, and differentiable search.
5//!
6//! ## Forgetting Mitigation (Issue #17)
7//!
8//! This crate includes comprehensive forgetting mitigation for continual learning:
9//!
10//! - **Adam Optimizer**: Full implementation with momentum and bias correction
11//! - **Replay Buffer**: Experience replay with reservoir sampling for uniform coverage
12//! - **EWC (Elastic Weight Consolidation)**: Prevents catastrophic forgetting
13//! - **Learning Rate Scheduling**: Multiple strategies including warmup and plateau detection
14//!
15//! ### Usage Example
16//!
17//! ```rust,ignore
18//! use ruvector_gnn::{
19//! training::{Optimizer, OptimizerType},
20//! replay::ReplayBuffer,
21//! ewc::ElasticWeightConsolidation,
22//! scheduler::{LearningRateScheduler, SchedulerType},
23//! };
24//!
25//! // Create Adam optimizer
26//! let mut optimizer = Optimizer::new(OptimizerType::Adam {
27//! learning_rate: 0.001,
28//! beta1: 0.9,
29//! beta2: 0.999,
30//! epsilon: 1e-8,
31//! });
32//!
33//! // Create replay buffer for experience replay
34//! let mut replay = ReplayBuffer::new(10000);
35//!
36//! // Create EWC for preventing forgetting
37//! let mut ewc = ElasticWeightConsolidation::new(0.4);
38//!
39//! // Create learning rate scheduler
40//! let mut scheduler = LearningRateScheduler::new(
41//! SchedulerType::CosineAnnealing { t_max: 100, eta_min: 1e-6 },
42//! 0.001
43//! );
44//! ```
45
46#![warn(missing_docs)]
47#![deny(unsafe_op_in_unsafe_fn)]
48
49pub mod compress;
50pub mod error;
51pub mod ewc;
52pub mod layer;
53pub mod query;
54pub mod replay;
55pub mod scheduler;
56pub mod search;
57pub mod tensor;
58pub mod training;
59
60#[cfg(all(not(target_arch = "wasm32"), feature = "mmap"))]
61pub mod mmap;
62
63#[cfg(all(feature = "cold-tier", not(target_arch = "wasm32")))]
64pub mod cold_tier;
65
66// Re-export commonly used types
67pub use compress::{CompressedTensor, CompressionLevel, TensorCompress};
68pub use error::{GnnError, Result};
69pub use ewc::ElasticWeightConsolidation;
70pub use layer::RuvectorLayer;
71pub use query::{QueryMode, QueryResult, RuvectorQuery, SubGraph};
72pub use replay::{DistributionStats, ReplayBuffer, ReplayEntry};
73pub use scheduler::{LearningRateScheduler, SchedulerType};
74pub use search::{cosine_similarity, differentiable_search, hierarchical_forward};
75pub use training::{
76 info_nce_loss, local_contrastive_loss, sgd_step, Loss, LossType, OnlineConfig, Optimizer,
77 OptimizerType, TrainConfig,
78};
79
80#[cfg(all(not(target_arch = "wasm32"), feature = "mmap"))]
81pub use mmap::{AtomicBitmap, MmapGradientAccumulator, MmapManager};
82
83#[cfg(test)]
84mod tests {
85 use super::*;
86
87 #[test]
88 fn test_basic() {
89 // Basic smoke test to ensure the crate compiles
90 assert!(true);
91 }
92}