ruvector_attention/
lib.rs1pub mod attention;
44pub mod config;
45pub mod error;
46pub mod traits;
47pub mod utils;
48pub mod hyperbolic;
49pub mod sparse;
50pub mod moe;
51pub mod graph;
52pub mod training;
53pub mod sdk;
54
55pub use attention::{MultiHeadAttention, ScaledDotProductAttention};
57pub use config::{AttentionConfig, GraphAttentionConfig, SparseAttentionConfig};
58pub use error::{AttentionError, AttentionResult};
59pub use traits::{
60 Attention, EdgeInfo, GeometricAttention, Gradients, GraphAttention, SparseAttention,
61 SparseMask, TrainableAttention,
62};
63pub use hyperbolic::{
64 poincare_distance, mobius_add, exp_map, log_map, project_to_ball,
65 HyperbolicAttention, HyperbolicAttentionConfig,
66 MixedCurvatureAttention, MixedCurvatureConfig,
67};
68
69pub use sparse::{
71 SparseMaskBuilder, AttentionMask,
72 LocalGlobalAttention, LinearAttention, FlashAttention,
73};
74
75pub use moe::{
77 MoEAttention, MoEConfig,
78 Expert, ExpertType, StandardExpert, HyperbolicExpert, LinearExpert,
79 Router, LearnedRouter, TopKRouting,
80};
81
82pub use graph::{
84 EdgeFeaturedAttention, EdgeFeaturedConfig,
85 GraphRoPE, RoPEConfig,
86 DualSpaceAttention, DualSpaceConfig,
87};
88
89pub use training::{
91 Loss, InfoNCELoss, LocalContrastiveLoss, SpectralRegularization, Reduction,
92 Optimizer, SGD, Adam, AdamW,
93 CurriculumScheduler, CurriculumStage, TemperatureAnnealing, DecayType,
94 NegativeMiner, HardNegativeMiner, MiningStrategy,
95};
96
97pub use sdk::{AttentionBuilder, AttentionPipeline, presets};
99
100pub const VERSION: &str = env!("CARGO_PKG_VERSION");
102
103#[cfg(test)]
104mod tests {
105 use super::*;
106
107 #[test]
108 fn test_version() {
109 assert!(!VERSION.is_empty());
110 }
111
112 #[test]
113 fn test_basic_attention_workflow() {
114 let config = AttentionConfig::builder()
115 .dim(64)
116 .num_heads(4)
117 .build()
118 .unwrap();
119
120 assert_eq!(config.dim, 64);
121 assert_eq!(config.num_heads, 4);
122 assert_eq!(config.head_dim(), 16);
123 }
124}