trustformers-models 0.1.1

Model implementations for TrustformeRS
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
/// Performer: Fast Attention via Positive Orthogonal Random features (FAVOR+)
///
/// Paper: "Rethinking Attention with Performers" (Choromanski et al., 2020)
/// Key innovation: Approximates softmax attention using random feature maps for O(n) complexity
pub mod config;
pub mod model;

pub use config::PerformerConfig;
pub use model::{PerformerForMaskedLM, PerformerForSequenceClassification, PerformerModel};

#[cfg(test)]
mod tests;