Skip to main content

scirs2/
neural.rs

1//! Python bindings for scirs2-neural
2//!
3//! This module provides Python bindings for neural network activation functions
4//! and utilities. Full layer-based training requires scirs2-autograd's computational
5//! graph system. For comprehensive neural network training, use PyTorch or TensorFlow.
6
7use pyo3::prelude::*;
8use scirs2_neural::activations_minimal::{Activation, ReLU, Sigmoid, Softmax, Tanh, GELU};
9use scirs2_numpy::{IntoPyArray, PyArray1, PyArray2, PyArrayMethods};
10
11// ============================================================================
12// Activation Function Classes
13// ============================================================================
14
15/// ReLU activation function
16///
17/// Applies: f(x) = max(0, x)
18///
19/// Example:
20///     relu = scirs2.ReLU()
21///     output = relu.forward(input_array)
22#[pyclass(name = "ReLU")]
23pub struct PyReLU {
24    inner: ReLU,
25}
26
27#[pymethods]
28impl PyReLU {
29    #[new]
30    fn new() -> Self {
31        Self { inner: ReLU::new() }
32    }
33
34    /// Forward pass
35    ///
36    /// Args:
37    ///     input (np.ndarray): Input array (any shape)
38    ///
39    /// Returns:
40    ///     np.ndarray: Activated output
41    fn forward(&self, py: Python, input: &Bound<'_, PyAny>) -> PyResult<Py<PyAny>> {
42        apply_activation(&self.inner, py, input)
43    }
44
45    /// Backward pass (gradient computation)
46    ///
47    /// Args:
48    ///     grad_output (np.ndarray): Gradient from next layer
49    ///     input (np.ndarray): Original input to forward pass
50    ///
51    /// Returns:
52    ///     np.ndarray: Gradient with respect to input
53    fn backward(
54        &self,
55        py: Python,
56        grad_output: &Bound<'_, PyAny>,
57        input: &Bound<'_, PyAny>,
58    ) -> PyResult<Py<PyAny>> {
59        apply_activation_backward(&self.inner, py, grad_output, input)
60    }
61}
62
63/// Sigmoid activation function
64///
65/// Applies: f(x) = 1 / (1 + exp(-x))
66///
67/// Example:
68///     sigmoid = scirs2.Sigmoid()
69///     output = sigmoid.forward(input_array)
70#[pyclass(name = "Sigmoid")]
71pub struct PySigmoid {
72    inner: Sigmoid,
73}
74
75#[pymethods]
76impl PySigmoid {
77    #[new]
78    fn new() -> Self {
79        Self {
80            inner: Sigmoid::new(),
81        }
82    }
83
84    /// Forward pass
85    fn forward(&self, py: Python, input: &Bound<'_, PyAny>) -> PyResult<Py<PyAny>> {
86        apply_activation(&self.inner, py, input)
87    }
88
89    /// Backward pass
90    fn backward(
91        &self,
92        py: Python,
93        grad_output: &Bound<'_, PyAny>,
94        input: &Bound<'_, PyAny>,
95    ) -> PyResult<Py<PyAny>> {
96        apply_activation_backward(&self.inner, py, grad_output, input)
97    }
98}
99
100/// Tanh activation function
101///
102/// Applies: f(x) = tanh(x)
103///
104/// Example:
105///     tanh = scirs2.Tanh()
106///     output = tanh.forward(input_array)
107#[pyclass(name = "Tanh")]
108pub struct PyTanh {
109    inner: Tanh,
110}
111
112#[pymethods]
113impl PyTanh {
114    #[new]
115    fn new() -> Self {
116        Self { inner: Tanh::new() }
117    }
118
119    /// Forward pass
120    fn forward(&self, py: Python, input: &Bound<'_, PyAny>) -> PyResult<Py<PyAny>> {
121        apply_activation(&self.inner, py, input)
122    }
123
124    /// Backward pass
125    fn backward(
126        &self,
127        py: Python,
128        grad_output: &Bound<'_, PyAny>,
129        input: &Bound<'_, PyAny>,
130    ) -> PyResult<Py<PyAny>> {
131        apply_activation_backward(&self.inner, py, grad_output, input)
132    }
133}
134
135/// GELU activation function
136///
137/// Gaussian Error Linear Unit activation.
138///
139/// Example:
140///     gelu = scirs2.GELU()
141///     output = gelu.forward(input_array)
142#[pyclass(name = "GELU")]
143pub struct PyGELU {
144    inner: GELU,
145}
146
147#[pymethods]
148impl PyGELU {
149    #[new]
150    #[pyo3(signature = (fast=false))]
151    fn new(fast: bool) -> Self {
152        Self {
153            inner: if fast { GELU::fast() } else { GELU::new() },
154        }
155    }
156
157    /// Forward pass
158    fn forward(&self, py: Python, input: &Bound<'_, PyAny>) -> PyResult<Py<PyAny>> {
159        apply_activation(&self.inner, py, input)
160    }
161
162    /// Backward pass
163    fn backward(
164        &self,
165        py: Python,
166        grad_output: &Bound<'_, PyAny>,
167        input: &Bound<'_, PyAny>,
168    ) -> PyResult<Py<PyAny>> {
169        apply_activation_backward(&self.inner, py, grad_output, input)
170    }
171}
172
173/// Softmax activation function
174///
175/// Applies: f(x)_i = exp(x_i) / sum(exp(x_j))
176///
177/// Example:
178///     softmax = scirs2.Softmax(axis=-1)
179///     output = softmax.forward(input_array)
180#[pyclass(name = "Softmax")]
181pub struct PySoftmax {
182    inner: Softmax,
183}
184
185#[pymethods]
186impl PySoftmax {
187    #[new]
188    #[pyo3(signature = (axis=-1))]
189    fn new(axis: isize) -> Self {
190        Self {
191            inner: Softmax::new(axis),
192        }
193    }
194
195    /// Forward pass
196    fn forward(&self, py: Python, input: &Bound<'_, PyAny>) -> PyResult<Py<PyAny>> {
197        apply_activation(&self.inner, py, input)
198    }
199
200    /// Backward pass
201    fn backward(
202        &self,
203        py: Python,
204        grad_output: &Bound<'_, PyAny>,
205        input: &Bound<'_, PyAny>,
206    ) -> PyResult<Py<PyAny>> {
207        apply_activation_backward(&self.inner, py, grad_output, input)
208    }
209}
210
211// ============================================================================
212// Helper Functions
213// ============================================================================
214
215/// Apply activation function to NumPy array
216#[allow(deprecated)]
217fn apply_activation<A: Activation<f64>>(
218    activation: &A,
219    py: Python,
220    input: &Bound<'_, PyAny>,
221) -> PyResult<Py<PyAny>> {
222    // Try 1D array
223    if let Ok(arr1d) = input.downcast::<PyArray1<f64>>() {
224        let binding = arr1d.readonly();
225        let data = binding.as_array().to_owned();
226        let dyn_input = data.into_dyn();
227
228        let output = activation.forward(&dyn_input).map_err(|e| {
229            PyErr::new::<pyo3::exceptions::PyRuntimeError, _>(format!("Activation error: {}", e))
230        })?;
231
232        let out1d = output
233            .into_dimensionality::<scirs2_core::ndarray::Ix1>()
234            .map_err(|e| {
235                PyErr::new::<pyo3::exceptions::PyRuntimeError, _>(format!("Dimension error: {}", e))
236            })?;
237
238        return Ok(out1d.into_pyarray(py).unbind().into());
239    }
240
241    // Try 2D array
242    if let Ok(arr2d) = input.downcast::<PyArray2<f64>>() {
243        let binding = arr2d.readonly();
244        let data = binding.as_array().to_owned();
245        let dyn_input = data.into_dyn();
246
247        let output = activation.forward(&dyn_input).map_err(|e| {
248            PyErr::new::<pyo3::exceptions::PyRuntimeError, _>(format!("Activation error: {}", e))
249        })?;
250
251        let out2d = output
252            .into_dimensionality::<scirs2_core::ndarray::Ix2>()
253            .map_err(|e| {
254                PyErr::new::<pyo3::exceptions::PyRuntimeError, _>(format!("Dimension error: {}", e))
255            })?;
256
257        return Ok(out2d.into_pyarray(py).unbind().into());
258    }
259
260    Err(PyErr::new::<pyo3::exceptions::PyTypeError, _>(
261        "Input must be 1D or 2D float64 numpy array",
262    ))
263}
264
265/// Apply activation backward pass
266#[allow(deprecated)]
267fn apply_activation_backward<A: Activation<f64>>(
268    activation: &A,
269    py: Python,
270    grad_output: &Bound<'_, PyAny>,
271    input: &Bound<'_, PyAny>,
272) -> PyResult<Py<PyAny>> {
273    // Try 1D arrays
274    if let (Ok(grad1d), Ok(inp1d)) = (
275        grad_output.downcast::<PyArray1<f64>>(),
276        input.downcast::<PyArray1<f64>>(),
277    ) {
278        let grad_binding = grad1d.readonly();
279        let grad_data = grad_binding.as_array().to_owned().into_dyn();
280
281        let inp_binding = inp1d.readonly();
282        let inp_data = inp_binding.as_array().to_owned().into_dyn();
283
284        let grad_input = activation.backward(&grad_data, &inp_data).map_err(|e| {
285            PyErr::new::<pyo3::exceptions::PyRuntimeError, _>(format!(
286                "Activation backward error: {}",
287                e
288            ))
289        })?;
290
291        let out1d = grad_input
292            .into_dimensionality::<scirs2_core::ndarray::Ix1>()
293            .map_err(|e| {
294                PyErr::new::<pyo3::exceptions::PyRuntimeError, _>(format!("Dimension error: {}", e))
295            })?;
296
297        return Ok(out1d.into_pyarray(py).unbind().into());
298    }
299
300    // Try 2D arrays
301    if let (Ok(grad2d), Ok(inp2d)) = (
302        grad_output.downcast::<PyArray2<f64>>(),
303        input.downcast::<PyArray2<f64>>(),
304    ) {
305        let grad_binding = grad2d.readonly();
306        let grad_data = grad_binding.as_array().to_owned().into_dyn();
307
308        let inp_binding = inp2d.readonly();
309        let inp_data = inp_binding.as_array().to_owned().into_dyn();
310
311        let grad_input = activation.backward(&grad_data, &inp_data).map_err(|e| {
312            PyErr::new::<pyo3::exceptions::PyRuntimeError, _>(format!(
313                "Activation backward error: {}",
314                e
315            ))
316        })?;
317
318        let out2d = grad_input
319            .into_dimensionality::<scirs2_core::ndarray::Ix2>()
320            .map_err(|e| {
321                PyErr::new::<pyo3::exceptions::PyRuntimeError, _>(format!("Dimension error: {}", e))
322            })?;
323
324        return Ok(out2d.into_pyarray(py).unbind().into());
325    }
326
327    Err(PyErr::new::<pyo3::exceptions::PyTypeError, _>(
328        "Inputs must be 1D or 2D float64 numpy arrays",
329    ))
330}
331
332// ============================================================================
333// Module Registration
334// ============================================================================
335
336pub fn register_module(m: &Bound<'_, PyModule>) -> PyResult<()> {
337    // Register activation function classes
338    m.add_class::<PyReLU>()?;
339    m.add_class::<PySigmoid>()?;
340    m.add_class::<PyTanh>()?;
341    m.add_class::<PyGELU>()?;
342    m.add_class::<PySoftmax>()?;
343
344    // Add module documentation
345    m.add(
346        "__doc__",
347        "Neural network activation functions and utilities\n\n\
348        This module provides standalone activation functions that can be used\n\
349        with NumPy arrays for neural network inference and custom training loops.\n\n\
350        Available activations:\n\
351        - ReLU: Rectified Linear Unit\n\
352        - Sigmoid: Logistic sigmoid\n\
353        - Tanh: Hyperbolic tangent\n\
354        - GELU: Gaussian Error Linear Unit\n\
355        - Softmax: Softmax normalization\n\n\
356        Each activation provides:\n\
357        - forward(input): Forward pass\n\
358        - backward(grad_output, input): Backward pass for gradient computation\n\n\
359        For comprehensive neural network training with automatic differentiation,\n\
360        we recommend using PyTorch or TensorFlow, which integrate seamlessly\n\
361        with scirs2 via NumPy array compatibility.",
362    )?;
363
364    Ok(())
365}