use alloc::format;
use crate as burn;
use crate::config::Config;
use crate::module::{Content, DisplaySettings, Ignored, Module, ModuleDisplay, Param};
use crate::nn::Initializer;
use crate::nn::PaddingConfig2d;
use crate::tensor::backend::Backend;
use crate::tensor::module::conv2d;
use crate::tensor::ops::ConvOptions;
use crate::tensor::Tensor;
use crate::nn::conv::checks;
#[derive(Config, Debug)]
pub struct Conv2dConfig {
pub channels: [usize; 2],
pub kernel_size: [usize; 2],
#[config(default = "[1, 1]")]
pub stride: [usize; 2],
#[config(default = "[1, 1]")]
pub dilation: [usize; 2],
#[config(default = "1")]
pub groups: usize,
#[config(default = "PaddingConfig2d::Valid")]
pub padding: PaddingConfig2d,
#[config(default = true)]
pub bias: bool,
#[config(
default = "Initializer::KaimingUniform{gain:1.0/num_traits::Float::sqrt(3.0),fan_out_only:false}"
)]
pub initializer: Initializer,
}
#[derive(Module, Debug)]
#[module(custom_display)]
pub struct Conv2d<B: Backend> {
pub weight: Param<Tensor<B, 4>>,
pub bias: Option<Param<Tensor<B, 1>>>,
pub stride: [usize; 2],
pub kernel_size: [usize; 2],
pub dilation: [usize; 2],
pub groups: usize,
pub padding: Ignored<PaddingConfig2d>,
}
impl Conv2dConfig {
pub fn init<B: Backend>(&self, device: &B::Device) -> Conv2d<B> {
checks::checks_channels_div_groups(self.channels[0], self.channels[1], self.groups);
let shape = [
self.channels[1],
self.channels[0] / self.groups,
self.kernel_size[0],
self.kernel_size[1],
];
let k = self.kernel_size.iter().product::<usize>();
let fan_in = self.channels[0] / self.groups * k;
let fan_out = self.channels[1] / self.groups * k;
let weight = self
.initializer
.init_with(shape, Some(fan_in), Some(fan_out), device);
let mut bias = None;
if self.bias {
bias = Some(self.initializer.init_with(
[self.channels[1]],
Some(fan_in),
Some(fan_out),
device,
));
}
Conv2d {
weight,
bias,
stride: self.stride,
kernel_size: self.kernel_size,
dilation: self.dilation,
padding: Ignored(self.padding.clone()),
groups: self.groups,
}
}
}
impl<B: Backend> ModuleDisplay for Conv2d<B> {
fn custom_settings(&self) -> Option<DisplaySettings> {
DisplaySettings::new()
.with_new_line_after_attribute(false)
.optional()
}
fn custom_content(&self, content: Content) -> Option<Content> {
let padding_formatted = format!("{}", &self.padding);
let stride = format!("{:?}", self.stride);
let kernel_size = format!("{:?}", self.kernel_size);
let dilation = format!("{:?}", self.dilation);
content
.add("stride", &stride)
.add("kernel_size", &kernel_size)
.add("dilation", &dilation)
.add("groups", &self.groups)
.add("padding", &padding_formatted)
.optional()
}
}
impl<B: Backend> Conv2d<B> {
pub fn forward(&self, input: Tensor<B, 4>) -> Tensor<B, 4> {
let [_batch_size, _channels_in, height_in, width_in] = input.dims();
let padding =
self.padding
.calculate_padding_2d(height_in, width_in, &self.kernel_size, &self.stride);
conv2d(
input,
self.weight.val(),
self.bias.as_ref().map(|bias| bias.val()),
ConvOptions::new(self.stride, padding, self.dilation, self.groups),
)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::tensor::TensorData;
use crate::TestBackend;
#[test]
fn initializer_default() {
TestBackend::seed(0);
let config = Conv2dConfig::new([5, 1], [5, 5]);
let k = (config.channels[0] * config.kernel_size[0] * config.kernel_size[1]) as f64;
let k = (config.groups as f64 / k).sqrt() as f32;
let device = Default::default();
let conv = config.init::<TestBackend>(&device);
conv.weight.to_data().assert_within_range(-k..k);
}
#[test]
fn initializer_zeros() {
TestBackend::seed(0);
let config = Conv2dConfig::new([5, 2], [5, 5]).with_initializer(Initializer::Zeros);
let device = Default::default();
let conv = config.init::<TestBackend>(&device);
assert_eq!(config.initializer, Initializer::Zeros);
conv.weight
.to_data()
.assert_approx_eq(&TensorData::zeros::<f32, _>(conv.weight.shape()), 3);
}
#[test]
fn initializer_fan_out() {
TestBackend::seed(0);
let init = Initializer::KaimingUniform {
gain: 1.0 / 3.0f64.sqrt(),
fan_out_only: true, };
let device = Default::default();
let config = Conv2dConfig::new([5, 1], [5, 5]).with_initializer(init.clone());
let _ = config.init::<TestBackend>(&device);
assert_eq!(config.initializer, init);
}
#[test]
fn initializer_fan_with_groups_is_valid() {
TestBackend::seed(0);
let init = Initializer::KaimingUniform {
gain: 1.0 / 3.0f64.sqrt(),
fan_out_only: true,
};
let device = Default::default();
let config = Conv2dConfig::new([4, 4], [1, 1])
.with_initializer(init.clone())
.with_groups(4);
let _ = config.init::<TestBackend>(&device);
assert_eq!(config.initializer, init);
}
#[test]
#[should_panic = "Both channels must be divisible by the number of groups."]
fn channels_with_groups_is_invalid() {
let device = Default::default();
let config = Conv2dConfig::new([1, 4], [1, 1]).with_groups(4);
let _ = config.init::<TestBackend>(&device);
}
#[test]
fn display() {
let config = Conv2dConfig::new([5, 1], [5, 5]);
let conv = config.init::<TestBackend>(&Default::default());
assert_eq!(
alloc::format!("{}", conv),
"Conv2d {stride: [1, 1], kernel_size: [5, 5], dilation: [1, 1], groups: 1, padding: Valid, params: 126}"
);
}
}