rumus 0.1.0

A native-Rust deep learning framework with explicit memory safety and hardware acceleration
Documentation
1
2
3
4
5
6
7
8
9
10
11
//! Activation functions.

use crate::tensor::Tensor;

/// Element-wise ReLU activation: `max(0, x)`.
///
/// This is a free function, not a `Module`, because ReLU has no learnable
/// parameters and no state.
pub fn relu(input: &Tensor) -> Tensor {
    input.relu()
}