border_tch_agent/mlp/
mlp2.rs

1use super::{mlp, MlpConfig};
2use crate::model::SubModel;
3use tch::{nn, nn::Module, Device, Tensor};
4
5#[allow(clippy::clippy::upper_case_acronyms)]
6/// Multilayer perceptron that outputs two tensors of the same size.
7pub struct Mlp2 {
8    in_dim: i64,
9    units: Vec<i64>,
10    out_dim: i64,
11    activation_out: bool,
12    device: Device,
13    head1: nn::Linear,
14    head2: nn::Linear,
15    seq: nn::Sequential,
16}
17
18impl SubModel for Mlp2 {
19    type Config = MlpConfig;
20    type Input = Tensor;
21    type Output = (Tensor, Tensor);
22
23    fn forward(&self, input: &Self::Input) -> Self::Output {
24        let x = self.seq.forward(&input.to(self.device));
25        let mean = x.apply(&self.head1);
26        let std = x.apply(&self.head2).exp();
27        (mean, std)
28    }
29
30    /// TODO: support activation_out
31    fn build(var_store: &nn::VarStore, config: Self::Config) -> Self {
32        let seq = mlp("al", var_store, &config);
33        let out_dim = config.out_dim;
34        let in_dim = *config.units.last().unwrap();
35        let p = &var_store.root();
36
37        let head1 = nn::linear(p / "ml", in_dim, out_dim as _, Default::default());
38        let head2 = nn::linear(p / "sl", in_dim, out_dim as _, Default::default());
39
40        Self {
41            in_dim: config.in_dim,
42            units: config.units,
43            out_dim: config.out_dim,
44            activation_out: false,
45            device: var_store.device(),
46            head1,
47            head2,
48            seq,
49        }
50    }
51
52    fn clone_with_var_store(&self, var_store: &nn::VarStore) -> Self {
53        let config = Self::Config {
54            in_dim: self.in_dim,
55            units: self.units.clone(),
56            out_dim: self.out_dim,
57            activation_out: self.activation_out,
58        };
59
60        Self::build(var_store, config)
61    }
62}