concision_transformer/
lib.rs

1/*
2   Appellation: concision-transformers <library>
3   Contrib: FL03 <jo3mccain@icloud.com>
4*/
5//! # Transformers
6//!
7//! ### Resources
8//!
9//! - [Attention is All You Need](https://arxiv.org/abs/1706.03762)
10
11#![cfg_attr(not(feature = "std"), no_std)]
12
13#[cfg(feature = "alloc")]
14extern crate alloc;
15
16extern crate concision_core as concision;
17extern crate concision_linear as linear;
18extern crate ndarray as nd;
19
20pub use self::attention::{scaled_dot_product_attention, AttentionHead};
21pub use self::params::*;
22pub use self::primitives::*;
23pub use self::transformer::Transformer;
24
25#[macro_use]
26pub(crate) mod macros;
27pub(crate) mod primitives;
28pub(crate) mod transformer;
29
30pub mod attention;
31pub mod codec;
32pub mod model;
33pub mod ops;
34pub mod params;
35
36pub(crate) mod impls {
37    pub mod impl_head;
38    pub mod impl_linalg;
39    pub mod impl_params;
40}
41
42pub mod prelude {
43    pub use super::attention::prelude::*;
44    pub use super::Transformer;
45}