1use pyo3::prelude::*;
8use scirs2_neural::activations_minimal::{Activation, ReLU, Sigmoid, Softmax, Tanh, GELU};
9use scirs2_numpy::{IntoPyArray, PyArray1, PyArray2, PyArrayMethods};
10
11#[pyclass(name = "ReLU")]
23pub struct PyReLU {
24 inner: ReLU,
25}
26
27#[pymethods]
28impl PyReLU {
29 #[new]
30 fn new() -> Self {
31 Self { inner: ReLU::new() }
32 }
33
34 fn forward(&self, py: Python, input: &Bound<'_, PyAny>) -> PyResult<Py<PyAny>> {
42 apply_activation(&self.inner, py, input)
43 }
44
45 fn backward(
54 &self,
55 py: Python,
56 grad_output: &Bound<'_, PyAny>,
57 input: &Bound<'_, PyAny>,
58 ) -> PyResult<Py<PyAny>> {
59 apply_activation_backward(&self.inner, py, grad_output, input)
60 }
61}
62
63#[pyclass(name = "Sigmoid")]
71pub struct PySigmoid {
72 inner: Sigmoid,
73}
74
75#[pymethods]
76impl PySigmoid {
77 #[new]
78 fn new() -> Self {
79 Self {
80 inner: Sigmoid::new(),
81 }
82 }
83
84 fn forward(&self, py: Python, input: &Bound<'_, PyAny>) -> PyResult<Py<PyAny>> {
86 apply_activation(&self.inner, py, input)
87 }
88
89 fn backward(
91 &self,
92 py: Python,
93 grad_output: &Bound<'_, PyAny>,
94 input: &Bound<'_, PyAny>,
95 ) -> PyResult<Py<PyAny>> {
96 apply_activation_backward(&self.inner, py, grad_output, input)
97 }
98}
99
100#[pyclass(name = "Tanh")]
108pub struct PyTanh {
109 inner: Tanh,
110}
111
112#[pymethods]
113impl PyTanh {
114 #[new]
115 fn new() -> Self {
116 Self { inner: Tanh::new() }
117 }
118
119 fn forward(&self, py: Python, input: &Bound<'_, PyAny>) -> PyResult<Py<PyAny>> {
121 apply_activation(&self.inner, py, input)
122 }
123
124 fn backward(
126 &self,
127 py: Python,
128 grad_output: &Bound<'_, PyAny>,
129 input: &Bound<'_, PyAny>,
130 ) -> PyResult<Py<PyAny>> {
131 apply_activation_backward(&self.inner, py, grad_output, input)
132 }
133}
134
135#[pyclass(name = "GELU")]
143pub struct PyGELU {
144 inner: GELU,
145}
146
147#[pymethods]
148impl PyGELU {
149 #[new]
150 #[pyo3(signature = (fast=false))]
151 fn new(fast: bool) -> Self {
152 Self {
153 inner: if fast { GELU::fast() } else { GELU::new() },
154 }
155 }
156
157 fn forward(&self, py: Python, input: &Bound<'_, PyAny>) -> PyResult<Py<PyAny>> {
159 apply_activation(&self.inner, py, input)
160 }
161
162 fn backward(
164 &self,
165 py: Python,
166 grad_output: &Bound<'_, PyAny>,
167 input: &Bound<'_, PyAny>,
168 ) -> PyResult<Py<PyAny>> {
169 apply_activation_backward(&self.inner, py, grad_output, input)
170 }
171}
172
173#[pyclass(name = "Softmax")]
181pub struct PySoftmax {
182 inner: Softmax,
183}
184
185#[pymethods]
186impl PySoftmax {
187 #[new]
188 #[pyo3(signature = (axis=-1))]
189 fn new(axis: isize) -> Self {
190 Self {
191 inner: Softmax::new(axis),
192 }
193 }
194
195 fn forward(&self, py: Python, input: &Bound<'_, PyAny>) -> PyResult<Py<PyAny>> {
197 apply_activation(&self.inner, py, input)
198 }
199
200 fn backward(
202 &self,
203 py: Python,
204 grad_output: &Bound<'_, PyAny>,
205 input: &Bound<'_, PyAny>,
206 ) -> PyResult<Py<PyAny>> {
207 apply_activation_backward(&self.inner, py, grad_output, input)
208 }
209}
210
211#[allow(deprecated)]
217fn apply_activation<A: Activation<f64>>(
218 activation: &A,
219 py: Python,
220 input: &Bound<'_, PyAny>,
221) -> PyResult<Py<PyAny>> {
222 if let Ok(arr1d) = input.downcast::<PyArray1<f64>>() {
224 let binding = arr1d.readonly();
225 let data = binding.as_array().to_owned();
226 let dyn_input = data.into_dyn();
227
228 let output = activation.forward(&dyn_input).map_err(|e| {
229 PyErr::new::<pyo3::exceptions::PyRuntimeError, _>(format!("Activation error: {}", e))
230 })?;
231
232 let out1d = output
233 .into_dimensionality::<scirs2_core::ndarray::Ix1>()
234 .map_err(|e| {
235 PyErr::new::<pyo3::exceptions::PyRuntimeError, _>(format!("Dimension error: {}", e))
236 })?;
237
238 return Ok(out1d.into_pyarray(py).unbind().into());
239 }
240
241 if let Ok(arr2d) = input.downcast::<PyArray2<f64>>() {
243 let binding = arr2d.readonly();
244 let data = binding.as_array().to_owned();
245 let dyn_input = data.into_dyn();
246
247 let output = activation.forward(&dyn_input).map_err(|e| {
248 PyErr::new::<pyo3::exceptions::PyRuntimeError, _>(format!("Activation error: {}", e))
249 })?;
250
251 let out2d = output
252 .into_dimensionality::<scirs2_core::ndarray::Ix2>()
253 .map_err(|e| {
254 PyErr::new::<pyo3::exceptions::PyRuntimeError, _>(format!("Dimension error: {}", e))
255 })?;
256
257 return Ok(out2d.into_pyarray(py).unbind().into());
258 }
259
260 Err(PyErr::new::<pyo3::exceptions::PyTypeError, _>(
261 "Input must be 1D or 2D float64 numpy array",
262 ))
263}
264
265#[allow(deprecated)]
267fn apply_activation_backward<A: Activation<f64>>(
268 activation: &A,
269 py: Python,
270 grad_output: &Bound<'_, PyAny>,
271 input: &Bound<'_, PyAny>,
272) -> PyResult<Py<PyAny>> {
273 if let (Ok(grad1d), Ok(inp1d)) = (
275 grad_output.downcast::<PyArray1<f64>>(),
276 input.downcast::<PyArray1<f64>>(),
277 ) {
278 let grad_binding = grad1d.readonly();
279 let grad_data = grad_binding.as_array().to_owned().into_dyn();
280
281 let inp_binding = inp1d.readonly();
282 let inp_data = inp_binding.as_array().to_owned().into_dyn();
283
284 let grad_input = activation.backward(&grad_data, &inp_data).map_err(|e| {
285 PyErr::new::<pyo3::exceptions::PyRuntimeError, _>(format!(
286 "Activation backward error: {}",
287 e
288 ))
289 })?;
290
291 let out1d = grad_input
292 .into_dimensionality::<scirs2_core::ndarray::Ix1>()
293 .map_err(|e| {
294 PyErr::new::<pyo3::exceptions::PyRuntimeError, _>(format!("Dimension error: {}", e))
295 })?;
296
297 return Ok(out1d.into_pyarray(py).unbind().into());
298 }
299
300 if let (Ok(grad2d), Ok(inp2d)) = (
302 grad_output.downcast::<PyArray2<f64>>(),
303 input.downcast::<PyArray2<f64>>(),
304 ) {
305 let grad_binding = grad2d.readonly();
306 let grad_data = grad_binding.as_array().to_owned().into_dyn();
307
308 let inp_binding = inp2d.readonly();
309 let inp_data = inp_binding.as_array().to_owned().into_dyn();
310
311 let grad_input = activation.backward(&grad_data, &inp_data).map_err(|e| {
312 PyErr::new::<pyo3::exceptions::PyRuntimeError, _>(format!(
313 "Activation backward error: {}",
314 e
315 ))
316 })?;
317
318 let out2d = grad_input
319 .into_dimensionality::<scirs2_core::ndarray::Ix2>()
320 .map_err(|e| {
321 PyErr::new::<pyo3::exceptions::PyRuntimeError, _>(format!("Dimension error: {}", e))
322 })?;
323
324 return Ok(out2d.into_pyarray(py).unbind().into());
325 }
326
327 Err(PyErr::new::<pyo3::exceptions::PyTypeError, _>(
328 "Inputs must be 1D or 2D float64 numpy arrays",
329 ))
330}
331
332pub fn register_module(m: &Bound<'_, PyModule>) -> PyResult<()> {
337 m.add_class::<PyReLU>()?;
339 m.add_class::<PySigmoid>()?;
340 m.add_class::<PyTanh>()?;
341 m.add_class::<PyGELU>()?;
342 m.add_class::<PySoftmax>()?;
343
344 m.add(
346 "__doc__",
347 "Neural network activation functions and utilities\n\n\
348 This module provides standalone activation functions that can be used\n\
349 with NumPy arrays for neural network inference and custom training loops.\n\n\
350 Available activations:\n\
351 - ReLU: Rectified Linear Unit\n\
352 - Sigmoid: Logistic sigmoid\n\
353 - Tanh: Hyperbolic tangent\n\
354 - GELU: Gaussian Error Linear Unit\n\
355 - Softmax: Softmax normalization\n\n\
356 Each activation provides:\n\
357 - forward(input): Forward pass\n\
358 - backward(grad_output, input): Backward pass for gradient computation\n\n\
359 For comprehensive neural network training with automatic differentiation,\n\
360 we recommend using PyTorch or TensorFlow, which integrate seamlessly\n\
361 with scirs2 via NumPy array compatibility.",
362 )?;
363
364 Ok(())
365}