1use alloc::format;
2
3use burn_core as burn;
4
5use crate::PaddingConfig3d;
6use burn::config::Config;
7use burn::module::Initializer;
8use burn::module::{Content, DisplaySettings, Ignored, Module, ModuleDisplay, Param};
9use burn::tensor::Tensor;
10use burn::tensor::backend::Backend;
11use burn::tensor::module::conv3d;
12use burn::tensor::ops::ConvOptions;
13
14use crate::conv::checks;
15
16#[derive(Config, Debug)]
18pub struct Conv3dConfig {
19 pub channels: [usize; 2],
21 pub kernel_size: [usize; 3],
23 #[config(default = "[1, 1, 1]")]
25 pub stride: [usize; 3],
26 #[config(default = "[1, 1, 1]")]
28 pub dilation: [usize; 3],
29 #[config(default = "1")]
31 pub groups: usize,
32 #[config(default = "PaddingConfig3d::Valid")]
34 pub padding: PaddingConfig3d,
35 #[config(default = true)]
37 pub bias: bool,
38 #[config(
40 default = "Initializer::KaimingUniform{gain:1.0/num_traits::Float::sqrt(3.0),fan_out_only:false}"
41 )]
42 pub initializer: Initializer,
43}
44
45#[derive(Module, Debug)]
49#[module(custom_display)]
50pub struct Conv3d<B: Backend> {
51 pub weight: Param<Tensor<B, 5>>,
53 pub bias: Option<Param<Tensor<B, 1>>>,
55 pub stride: [usize; 3],
57 pub kernel_size: [usize; 3],
59 pub dilation: [usize; 3],
61 pub groups: usize,
63 pub padding: Ignored<PaddingConfig3d>,
65}
66
67impl Conv3dConfig {
68 pub fn init<B: Backend>(&self, device: &B::Device) -> Conv3d<B> {
70 checks::checks_channels_div_groups(self.channels[0], self.channels[1], self.groups);
71 if self.padding == PaddingConfig3d::Same {
72 checks::check_same_padding_support(&self.kernel_size);
73 }
74
75 let shape = [
76 self.channels[1],
77 self.channels[0] / self.groups,
78 self.kernel_size[0],
79 self.kernel_size[1],
80 self.kernel_size[2],
81 ];
82
83 let k = self.kernel_size.iter().product::<usize>();
84 let fan_in = self.channels[0] / self.groups * k;
85 let fan_out = self.channels[1] / self.groups * k;
86
87 let weight = self
88 .initializer
89 .init_with(shape, Some(fan_in), Some(fan_out), device);
90 let mut bias = None;
91
92 if self.bias {
93 bias = Some(self.initializer.init_with(
94 [self.channels[1]],
95 Some(fan_in),
96 Some(fan_out),
97 device,
98 ));
99 }
100
101 Conv3d {
102 weight,
103 bias,
104 stride: self.stride,
105 kernel_size: self.kernel_size,
106 dilation: self.dilation,
107 padding: Ignored(self.padding.clone()),
108 groups: self.groups,
109 }
110 }
111}
112
113impl<B: Backend> ModuleDisplay for Conv3d<B> {
114 fn custom_settings(&self) -> Option<DisplaySettings> {
115 DisplaySettings::new()
116 .with_new_line_after_attribute(false)
117 .optional()
118 }
119
120 fn custom_content(&self, content: Content) -> Option<Content> {
121 let padding_formatted = format!("{}", &self.padding);
123
124 let stride = format!("{:?}", self.stride);
126 let kernel_size = format!("{:?}", self.kernel_size);
127 let dilation = format!("{:?}", self.dilation);
128
129 let [channels_out, group_channels_in, _, _, _] = self.weight.dims();
131 let channels_in = group_channels_in * self.groups;
132 let ch_out = format!("{:?}", channels_out);
133 let ch_in = format!("{:?}", channels_in);
134
135 content
136 .add("ch_in", &ch_in)
137 .add("ch_out", &ch_out)
138 .add("stride", &stride)
139 .add("kernel_size", &kernel_size)
140 .add("dilation", &dilation)
141 .add("groups", &self.groups)
142 .add("padding", &padding_formatted)
143 .optional()
144 }
145}
146
147impl<B: Backend> Conv3d<B> {
148 pub fn forward(&self, input: Tensor<B, 5>) -> Tensor<B, 5> {
157 let [_batch_size, _channels_in, depth_in, height_in, width_in] = input.dims();
158 let padding = self.padding.calculate_padding_3d(
159 depth_in,
160 height_in,
161 width_in,
162 &self.kernel_size,
163 &self.stride,
164 );
165 conv3d(
166 input,
167 self.weight.val(),
168 self.bias.as_ref().map(|bias| bias.val()),
169 ConvOptions::new(self.stride, padding, self.dilation, self.groups),
170 )
171 }
172}
173
174#[cfg(test)]
175mod tests {
176 use burn::tensor::{ElementConversion, Tolerance, ops::FloatElem};
177 type FT = FloatElem<TestBackend>;
178
179 use super::*;
180 use crate::TestBackend;
181 use burn::tensor::TensorData;
182
183 #[test]
184 fn initializer_default() {
185 let device = Default::default();
186 TestBackend::seed(&device, 0);
187
188 let config = Conv3dConfig::new([5, 1], [5, 5, 5]);
189 let k = (config.channels[0]
190 * config.kernel_size[0]
191 * config.kernel_size[1]
192 * config.kernel_size[2]) as f64;
193 let k = (config.groups as f64 / k).sqrt().elem::<FT>();
194 let conv = config.init::<TestBackend>(&device);
195
196 conv.weight.to_data().assert_within_range(-k..k);
197 }
198
199 #[test]
200 fn initializer_zeros() {
201 let device = Default::default();
202 TestBackend::seed(&device, 0);
203
204 let config = Conv3dConfig::new([5, 2], [5, 5, 5]).with_initializer(Initializer::Zeros);
205 let device = Default::default();
206 let conv = config.init::<TestBackend>(&device);
207
208 assert_eq!(config.initializer, Initializer::Zeros);
209 conv.weight.to_data().assert_approx_eq::<FT>(
210 &TensorData::zeros::<f32, _>(conv.weight.shape()),
211 Tolerance::default(),
212 );
213 }
214
215 #[test]
216 fn initializer_fan_out() {
217 let device = Default::default();
218 TestBackend::seed(&device, 0);
219
220 let init = Initializer::KaimingUniform {
221 gain: 1.0 / 3.0f64.sqrt(),
222 fan_out_only: true, };
224 let config = Conv3dConfig::new([5, 1], [5, 5, 5]).with_initializer(init.clone());
225 let _ = config.init::<TestBackend>(&device);
226
227 assert_eq!(config.initializer, init);
228 }
229
230 #[test]
231 fn initializer_fan_with_groups_is_valid() {
232 let device = Default::default();
233 TestBackend::seed(&device, 0);
234
235 let init = Initializer::KaimingUniform {
236 gain: 1.0 / 3.0f64.sqrt(),
237 fan_out_only: true,
238 };
239
240 let config = Conv3dConfig::new([4, 4], [1, 1, 1])
241 .with_initializer(init.clone())
242 .with_groups(4);
243 let _ = config.init::<TestBackend>(&device);
244
245 assert_eq!(config.initializer, init);
246 }
247
248 #[test]
249 #[should_panic = "Same padding with an even kernel size is not supported"]
250 fn same_with_even_kernel_is_invalid() {
251 let device = Default::default();
252 let config = Conv3dConfig::new([4, 4], [2, 2, 2]).with_padding(PaddingConfig3d::Same);
253 let _ = config.init::<TestBackend>(&device);
254 }
255
256 #[test]
257 fn display() {
258 let config = Conv3dConfig::new([5, 1], [5, 5, 5]);
259 let conv = config.init::<TestBackend>(&Default::default());
260
261 assert_eq!(
262 alloc::format!("{conv}"),
263 "Conv3d {ch_in: 5, ch_out: 1, stride: [1, 1, 1], kernel_size: [5, 5, 5], dilation: [1, 1, 1], groups: 1, padding: Valid, params: 626}"
264 );
265 }
266
267 #[test]
268 #[should_panic = "Number of channels in input tensor and input channels of convolution must be equal. got: 4, expected: 5"]
269 fn input_channels_mismatch() {
270 let config = Conv3dConfig::new([5, 3], [3, 3, 3]);
271 let conv = config.init::<TestBackend>(&Default::default());
272
273 let input = Tensor::<TestBackend, 5>::zeros([1, 4, 10, 10, 10], &Default::default());
274 let _ = conv.forward(input);
275 }
276}