//! Attention mechanism implementations and utilities.
//!
//! This module provides various attention mechanisms used in transformer architectures,
//! organized into submodules for better maintainability and code reuse.
pub use ;
pub use FlashAttention;
pub use MultiHeadAttention;