trustformers-core 0.1.1

Core traits and utilities for TrustformeRS
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
//! Attention mechanism implementations and utilities.
//!
//! This module provides various attention mechanisms used in transformer architectures,
//! organized into submodules for better maintainability and code reuse.

pub mod common;
pub mod flash;
pub mod multi_head;

pub use common::{
    AttentionConfig, AttentionOptimizationHints, AttentionProjections, AttentionUtils,
};
pub use flash::FlashAttention;
pub use multi_head::MultiHeadAttention;