ruvector_gnn/lib.rs
1//! # RuVector GNN
2//!
3//! Graph Neural Network capabilities for RuVector, providing tensor operations,
4//! GNN layers, compression, and differentiable search.
5//!
6//! ## Forgetting Mitigation (Issue #17)
7//!
8//! This crate includes comprehensive forgetting mitigation for continual learning:
9//!
10//! - **Adam Optimizer**: Full implementation with momentum and bias correction
11//! - **Replay Buffer**: Experience replay with reservoir sampling for uniform coverage
12//! - **EWC (Elastic Weight Consolidation)**: Prevents catastrophic forgetting
13//! - **Learning Rate Scheduling**: Multiple strategies including warmup and plateau detection
14//!
15//! ### Usage Example
16//!
17//! ```rust,ignore
18//! use ruvector_gnn::{
19//! training::{Optimizer, OptimizerType},
20//! replay::ReplayBuffer,
21//! ewc::ElasticWeightConsolidation,
22//! scheduler::{LearningRateScheduler, SchedulerType},
23//! };
24//!
25//! // Create Adam optimizer
26//! let mut optimizer = Optimizer::new(OptimizerType::Adam {
27//! learning_rate: 0.001,
28//! beta1: 0.9,
29//! beta2: 0.999,
30//! epsilon: 1e-8,
31//! });
32//!
33//! // Create replay buffer for experience replay
34//! let mut replay = ReplayBuffer::new(10000);
35//!
36//! // Create EWC for preventing forgetting
37//! let mut ewc = ElasticWeightConsolidation::new(0.4);
38//!
39//! // Create learning rate scheduler
40//! let mut scheduler = LearningRateScheduler::new(
41//! SchedulerType::CosineAnnealing { t_max: 100, eta_min: 1e-6 },
42//! 0.001
43//! );
44//! ```
45
46#![warn(missing_docs)]
47#![deny(unsafe_op_in_unsafe_fn)]
48
49pub mod compress;
50pub mod error;
51pub mod ewc;
52pub mod layer;
53pub mod query;
54pub mod replay;
55pub mod scheduler;
56pub mod search;
57pub mod tensor;
58pub mod training;
59
60#[cfg(all(not(target_arch = "wasm32"), feature = "mmap"))]
61pub mod mmap;
62
63// Re-export commonly used types
64pub use compress::{CompressedTensor, CompressionLevel, TensorCompress};
65pub use error::{GnnError, Result};
66pub use ewc::ElasticWeightConsolidation;
67pub use layer::RuvectorLayer;
68pub use query::{QueryMode, QueryResult, RuvectorQuery, SubGraph};
69pub use replay::{DistributionStats, ReplayBuffer, ReplayEntry};
70pub use scheduler::{LearningRateScheduler, SchedulerType};
71pub use search::{cosine_similarity, differentiable_search, hierarchical_forward};
72pub use training::{
73 info_nce_loss, local_contrastive_loss, sgd_step, Loss, LossType, OnlineConfig, Optimizer,
74 OptimizerType, TrainConfig,
75};
76
77#[cfg(all(not(target_arch = "wasm32"), feature = "mmap"))]
78pub use mmap::{AtomicBitmap, MmapGradientAccumulator, MmapManager};
79
80#[cfg(test)]
81mod tests {
82 use super::*;
83
84 #[test]
85 fn test_basic() {
86 // Basic smoke test to ensure the crate compiles
87 assert!(true);
88 }
89}