burn_core/nn/pool/
avg_pool1d.rs

1use crate as burn;
2use crate::nn::conv::checks::check_same_padding_support;
3
4use crate::config::Config;
5use crate::module::{Content, DisplaySettings, ModuleDisplay};
6use crate::module::{Ignored, Module};
7use crate::nn::PaddingConfig1d;
8use crate::tensor::Tensor;
9use crate::tensor::backend::Backend;
10
11use crate::tensor::module::avg_pool1d;
12
13/// Configuration to create a [1D avg pooling](AvgPool1d) layer using the [init function](AvgPool1dConfig::init).
14#[derive(Config, Debug)]
15pub struct AvgPool1dConfig {
16    /// The size of the kernel.
17    pub kernel_size: usize,
18    /// The stride.
19    #[config(default = "1")]
20    pub stride: usize,
21    /// The padding configuration.
22    ///
23    /// ### Warning
24    /// Only symmetric padding is currently supported. As such, using `Same` padding with an even kernel
25    /// size is not supported as it will not produce the same output size.
26    #[config(default = "PaddingConfig1d::Valid")]
27    pub padding: PaddingConfig1d,
28    /// If the padding is counted in the denominator when computing the average.
29    #[config(default = "true")]
30    pub count_include_pad: bool,
31}
32
33/// Applies a 1D avg pooling over input tensors.
34///
35/// Should be created with [AvgPool1dConfig](AvgPool1dConfig).
36///
37/// # Remarks
38///
39/// The zero-padding values will be included in the calculation
40/// of the average. This means that the zeros are counted as
41/// legitimate values, and they contribute to the denominator
42/// when calculating the average. This is equivalent to
43/// `torch.nn.AvgPool2d` with `count_include_pad=True`.
44#[derive(Module, Clone, Debug)]
45#[module(custom_display)]
46pub struct AvgPool1d {
47    /// The stride.
48    pub stride: usize,
49    /// The size of the kernel.
50    pub kernel_size: usize,
51    /// The padding configuration.
52    pub padding: Ignored<PaddingConfig1d>,
53    /// If the padding is counted in the denominator when computing the average.
54    pub count_include_pad: bool,
55}
56
57impl ModuleDisplay for AvgPool1d {
58    fn custom_settings(&self) -> Option<DisplaySettings> {
59        DisplaySettings::new()
60            .with_new_line_after_attribute(false)
61            .optional()
62    }
63
64    fn custom_content(&self, content: Content) -> Option<Content> {
65        content
66            .add("kernel_size", &self.kernel_size)
67            .add("stride", &self.stride)
68            .add("padding", &self.padding)
69            .add("count_include_pad", &self.count_include_pad)
70            .optional()
71    }
72}
73
74impl AvgPool1dConfig {
75    /// Initialize a new [avg pool 1d](AvgPool1d) module.
76    pub fn init(&self) -> AvgPool1d {
77        if self.padding == PaddingConfig1d::Same {
78            check_same_padding_support(&[self.kernel_size]);
79        }
80        AvgPool1d {
81            stride: self.stride,
82            kernel_size: self.kernel_size,
83            padding: Ignored(self.padding.clone()),
84            count_include_pad: self.count_include_pad,
85        }
86    }
87}
88
89impl AvgPool1d {
90    /// Applies the forward pass on the input tensor.
91    ///
92    /// See [avg_pool1d](crate::tensor::module::avg_pool1d) for more information.
93    ///
94    /// # Shapes
95    ///
96    /// - input: `[batch_size, channels, length_in]`
97    /// - output: `[batch_size, channels, length_out]`
98    pub fn forward<B: Backend>(&self, input: Tensor<B, 3>) -> Tensor<B, 3> {
99        let [_batch_size, _channels, length] = input.dims();
100        let padding = self
101            .padding
102            .calculate_padding_1d(length, self.kernel_size, self.stride);
103
104        avg_pool1d(
105            input,
106            self.kernel_size,
107            self.stride,
108            padding,
109            self.count_include_pad,
110        )
111    }
112}
113
114#[cfg(test)]
115mod tests {
116    use super::*;
117
118    #[test]
119    #[should_panic = "Same padding with an even kernel size is not supported"]
120    fn same_with_even_kernel_is_invalid() {
121        let config = AvgPool1dConfig::new(2).with_padding(PaddingConfig1d::Same);
122        let _ = config.init();
123    }
124
125    #[test]
126    fn display() {
127        let config = AvgPool1dConfig::new(3);
128        let layer = config.init();
129
130        assert_eq!(
131            alloc::format!("{}", layer),
132            "AvgPool1d {kernel_size: 3, stride: 1, padding: Valid, count_include_pad: true}"
133        );
134    }
135}