ruvector_gnn/lib.rs
1//! # RuVector GNN
2//!
3//! Graph Neural Network capabilities for RuVector, providing tensor operations,
4//! GNN layers, compression, and differentiable search.
5//!
6//! ## Forgetting Mitigation (Issue #17)
7//!
8//! This crate includes comprehensive forgetting mitigation for continual learning:
9//!
10//! - **Adam Optimizer**: Full implementation with momentum and bias correction
11//! - **Replay Buffer**: Experience replay with reservoir sampling for uniform coverage
12//! - **EWC (Elastic Weight Consolidation)**: Prevents catastrophic forgetting
13//! - **Learning Rate Scheduling**: Multiple strategies including warmup and plateau detection
14//!
15//! ### Usage Example
16//!
17//! ```rust,ignore
18//! use ruvector_gnn::{
19//! training::{Optimizer, OptimizerType},
20//! replay::ReplayBuffer,
21//! ewc::ElasticWeightConsolidation,
22//! scheduler::{LearningRateScheduler, SchedulerType},
23//! };
24//!
25//! // Create Adam optimizer
26//! let mut optimizer = Optimizer::new(OptimizerType::Adam {
27//! learning_rate: 0.001,
28//! beta1: 0.9,
29//! beta2: 0.999,
30//! epsilon: 1e-8,
31//! });
32//!
33//! // Create replay buffer for experience replay
34//! let mut replay = ReplayBuffer::new(10000);
35//!
36//! // Create EWC for preventing forgetting
37//! let mut ewc = ElasticWeightConsolidation::new(0.4);
38//!
39//! // Create learning rate scheduler
40//! let mut scheduler = LearningRateScheduler::new(
41//! SchedulerType::CosineAnnealing { t_max: 100, eta_min: 1e-6 },
42//! 0.001
43//! );
44//! ```
45
46#![warn(missing_docs)]
47#![deny(unsafe_op_in_unsafe_fn)]
48
49pub mod compress;
50pub mod error;
51pub mod ewc;
52pub mod graphmae;
53pub mod layer;
54pub mod query;
55pub mod replay;
56pub mod scheduler;
57pub mod search;
58pub mod tensor;
59pub mod training;
60
61#[cfg(all(not(target_arch = "wasm32"), feature = "mmap"))]
62pub mod mmap;
63
64#[cfg(all(feature = "cold-tier", not(target_arch = "wasm32")))]
65pub mod cold_tier;
66
67// Re-export commonly used types
68pub use compress::{CompressedTensor, CompressionLevel, TensorCompress};
69pub use error::{GnnError, Result};
70pub use ewc::ElasticWeightConsolidation;
71pub use graphmae::{
72 sce_loss, mse_loss, FeatureMasking, GATEncoder, GraphData, GraphMAE, GraphMAEConfig,
73 GraphMAEDecoder, LossFn, MaskResult,
74};
75pub use layer::RuvectorLayer;
76pub use query::{QueryMode, QueryResult, RuvectorQuery, SubGraph};
77pub use replay::{DistributionStats, ReplayBuffer, ReplayEntry};
78pub use scheduler::{LearningRateScheduler, SchedulerType};
79pub use search::{cosine_similarity, differentiable_search, hierarchical_forward};
80pub use training::{
81 info_nce_loss, local_contrastive_loss, sgd_step, Loss, LossType, OnlineConfig, Optimizer,
82 OptimizerType, TrainConfig,
83};
84
85#[cfg(all(not(target_arch = "wasm32"), feature = "mmap"))]
86pub use mmap::{AtomicBitmap, MmapGradientAccumulator, MmapManager};
87
88#[cfg(test)]
89mod tests {
90 use super::*;
91
92 #[test]
93 fn test_basic() {
94 // Basic smoke test to ensure the crate compiles
95 assert!(true);
96 }
97}