pub fn relu_simd<F>(a: &ArrayView1<'_, F>) -> Array1<F>where
F: Float + SimdUnifiedOps,Expand description
SIMD-accelerated ReLU (Rectified Linear Unit)
Computes max(0, x) element-wise.
§Arguments
a- Input array
§Returns
ReLU-activated array
§Examples
use scirs2_core::ndarray::array;
use scirs2_core::ndarray_ext::elementwise::relu_simd;
let x = array![-2.0_f64, -1.0, 0.0, 1.0, 2.0];
let result = relu_simd::<f64>(&x.view());
assert!((result[0] - 0.0).abs() < 1e-14);
assert!((result[1] - 0.0).abs() < 1e-14);
assert!((result[2] - 0.0).abs() < 1e-14);
assert!((result[3] - 1.0).abs() < 1e-14);
assert!((result[4] - 2.0).abs() < 1e-14);§Use Cases
- Neural network activation
- Sparse representations
- Thresholding signals
- Feature rectification