1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
use crate as burn;

use super::Initializer;
use crate::config::Config;
use crate::module::Module;
use crate::module::Param;
use crate::tensor::backend::Backend;
use crate::tensor::Tensor;
use burn_tensor::Int;

/// Configuration to create an [Embedding](Embedding) layer.
#[derive(Config)]
pub struct EmbeddingConfig {
    /// The number of embedding vectors.
    n_embedding: usize,
    /// The size of each vector.
    d_model: usize,
    /// The type of function used to initialize neural network parameters
    #[config(default = "Initializer::Normal(0.0,1.0)")]
    pub initializer: Initializer,
}

/// Lookup table to store a fix number of vectors.
///
/// # Params
///
/// - weight: Matrix of shape `[n_embedding, d_model]` initialized from a normal distribution:
///     `N(0, 1)`
#[derive(Module, Debug)]
pub struct Embedding<B: Backend> {
    weight: Param<Tensor<B, 2>>,
}

impl EmbeddingConfig {
    /// Initialize a new [embedding](Embedding) module.
    pub fn init<B: Backend>(&self) -> Embedding<B> {
        let weight = self
            .initializer
            .init([self.n_embedding, self.d_model])
            .require_grad();

        Embedding {
            weight: Param::from(weight),
        }
    }
    /// Initialize a new [embedding](Embedding) module with a [record](EmbeddingRecord).
    pub fn init_with<B: Backend>(&self, record: EmbeddingRecord<B>) -> Embedding<B> {
        Embedding {
            weight: record.weight,
        }
    }
}

impl<B: Backend> Embedding<B> {
    /// Applies the forward pass on the input tensor.
    ///
    /// # Shapes
    ///
    /// - input: [batch_size, seq_length]
    /// - output: [batch_size, d_model]
    pub fn forward(&self, input: Tensor<B, 2, Int>) -> Tensor<B, 3> {
        burn_tensor::module::embedding(self.weight.val(), input)
    }
}

#[cfg(test)]
mod tests {
    use super::*;
    use crate::TestBackend;
    use burn_tensor::Data;

    #[test]
    fn initializer_default() {
        TestBackend::seed(0);

        let config = EmbeddingConfig::new(100, 10);
        let embed = config.init::<TestBackend>();
        let weights = embed.weight.val().reshape([1000]);
        let (var_act, mean_act) = weights.var_mean(0);

        assert_eq!(config.initializer, Initializer::Normal(0.0, 1.0));
        var_act.to_data().assert_approx_eq(&Data::from([1.0f32]), 1);
        mean_act
            .to_data()
            .assert_approx_eq(&Data::from([0.0f32]), 1);
    }

    #[test]
    fn initializer_zeros() {
        TestBackend::seed(0);

        let config = EmbeddingConfig::new(5, 5).with_initializer(Initializer::Zeros);
        let embed = config.init::<TestBackend>();

        assert_eq!(config.initializer, Initializer::Zeros);
        embed
            .weight
            .to_data()
            .assert_approx_eq(&Data::zeros(embed.weight.shape()), 3);
    }
}