syntaxdot-transformers 0.1.0

Transformer architectures, such as BERT
1
2
3
4
5
6
7
8
9
10
11
12
13
use tch::Tensor;

/// Trait to get the attention of a layer.
pub trait LayerAttention {
    /// Get the attention of a layer.
    fn layer_attention(&self) -> Option<&Tensor>;
}

/// Trait to get the output of a layer.
pub trait LayerOutput {
    /// Get the output of a layer.
    fn layer_output(&self) -> &Tensor;
}