pub struct Dense<F: Float + Debug + Send + Sync> { /* private fields */ }
Expand description
Dense (fully connected) layer for neural networks.
A dense layer performs the operation: y = activation(W * x + b), where W is the weight matrix, x is the input vector, b is the bias vector, and activation is the activation function.
Implementations§
Source§impl<F: Float + Debug + ScalarOperand + Send + Sync + 'static> Dense<F>
impl<F: Float + Debug + ScalarOperand + Send + Sync + 'static> Dense<F>
Sourcepub fn new<R: Rng + RngCore>(
input_dim: usize,
output_dim: usize,
activation_name: Option<&str>,
rng: &mut R,
) -> Result<Self>
pub fn new<R: Rng + RngCore>( input_dim: usize, output_dim: usize, activation_name: Option<&str>, rng: &mut R, ) -> Result<Self>
Create a new dense layer.
§Arguments
input_dim
- Number of input featuresoutput_dim
- Number of output featuresactivation_name
- Optional activation function namerng
- Random number generator for weight initialization
Examples found in repository?
More examples
examples/manual_xor.rs (line 22)
9fn main() -> Result<()> {
10 println!("Manual XOR Neural Network Example");
11 // Create a simple dataset for XOR problem
12 let inputs = Array::from_shape_vec(
13 IxDyn(&[4, 2]),
14 vec![0.0f32, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0],
15 )?;
16 let targets = Array::from_shape_vec(IxDyn(&[4, 1]), vec![0.0f32, 1.0, 1.0, 0.0])?;
17 println!("XOR problem dataset:");
18 println!("Inputs:\n{inputs:?}");
19 println!("Targets:\n{targets:?}");
20 // Create neural network layers
21 let mut rng = SmallRng::from_seed([42; 32]);
22 let mut hidden_layer = Dense::new(2, 4, Some("relu"), &mut rng)?;
23 let mut output_layer = Dense::new(4, 1, None, &mut rng)?;
24 // Create loss function
25 let loss_fn = MeanSquaredError::new();
26 // Training parameters
27 let learning_rate = 0.5f32;
28 let num_epochs = 10000;
29 println!("\nTraining for {num_epochs} epochs");
30 for epoch in 0..num_epochs {
31 // Forward pass through the network
32 let hidden_output = hidden_layer.forward(&inputs)?;
33 let final_output = output_layer.forward(&hidden_output)?;
34 // Compute loss
35 let loss = loss_fn.forward(&final_output, &targets)?;
36 if epoch % 500 == 0 || epoch == num_epochs - 1 {
37 println!("Epoch {}/{num_epochs}: loss = {loss:.6}", epoch + 1);
38 }
39 // Backward pass
40 let output_grad = loss_fn.backward(&final_output, &targets)?;
41 let hidden_grad = output_layer.backward(&hidden_output, &output_grad)?;
42 let _input_grad = hidden_layer.backward(&inputs, &hidden_grad)?;
43 // Update parameters
44 hidden_layer.update(learning_rate)?;
45 output_layer.update(learning_rate)?;
46 }
47 // Evaluate the model
48 println!("\nEvaluation:");
49 let hidden_output = hidden_layer.forward(&inputs)?;
50 let final_output = output_layer.forward(&hidden_output)?;
51 println!("Predictions:\n{final_output:.3?}");
52 // Test with individual inputs
53 println!("\nTesting with specific inputs:");
54 let test_cases = vec![
55 (0.0f32, 0.0f32),
56 (0.0f32, 1.0f32),
57 (1.0f32, 0.0f32),
58 (1.0f32, 1.0f32),
59 ];
60 for (x1, x2) in test_cases {
61 let test_input = Array::from_shape_vec(IxDyn(&[1, 2]), vec![x1, x2])?;
62 let hidden_output = hidden_layer.forward(&test_input)?;
63 let prediction = output_layer.forward(&hidden_output)?;
64 let expected = if (x1 == 1.0 && x2 == 0.0) || (x1 == 0.0 && x2 == 1.0) {
65 1.0
66 } else {
67 0.0
68 };
69 println!(
70 "Input: [{x1:.1}, {x2:.1}], Predicted: {:.3}, Expected: {expected:.1}",
71 prediction[[0, 0]]
72 );
73 }
74 Ok(())
75}
Sourcepub fn output_dim(&self) -> usize
pub fn output_dim(&self) -> usize
Get the output dimension
Trait Implementations§
Source§impl<F: Float + Debug + ScalarOperand + Send + Sync + 'static> Layer<F> for Dense<F>
impl<F: Float + Debug + ScalarOperand + Send + Sync + 'static> Layer<F> for Dense<F>
Source§fn forward(&self, input: &Array<F, IxDyn>) -> Result<Array<F, IxDyn>>
fn forward(&self, input: &Array<F, IxDyn>) -> Result<Array<F, IxDyn>>
Forward pass of the layer Read more
Source§fn backward(
&self,
_input: &Array<F, IxDyn>,
grad_output: &Array<F, IxDyn>,
) -> Result<Array<F, IxDyn>>
fn backward( &self, _input: &Array<F, IxDyn>, grad_output: &Array<F, IxDyn>, ) -> Result<Array<F, IxDyn>>
Backward pass of the layer to compute gradients Read more
Source§fn update(&mut self, learningrate: F) -> Result<()>
fn update(&mut self, learningrate: F) -> Result<()>
Update the layer parameters with the given learning rate
Source§fn as_any_mut(&mut self) -> &mut dyn Any
fn as_any_mut(&mut self) -> &mut dyn Any
Get the layer as a mutable dyn Any for downcasting
Source§fn layer_type(&self) -> &str
fn layer_type(&self) -> &str
Get the type of the layer (e.g., “Dense”, “Conv2D”)
Source§fn parameter_count(&self) -> usize
fn parameter_count(&self) -> usize
Get the number of trainable parameters in this layer
Source§fn layer_description(&self) -> String
fn layer_description(&self) -> String
Get a detailed description of this layer
Source§fn set_gradients(&mut self, _gradients: &[Array<F, IxDyn>]) -> Result<()>
fn set_gradients(&mut self, _gradients: &[Array<F, IxDyn>]) -> Result<()>
Set the gradients of the layer parameters
Source§fn set_params(&mut self, _params: &[Array<F, IxDyn>]) -> Result<()>
fn set_params(&mut self, _params: &[Array<F, IxDyn>]) -> Result<()>
Set the parameters of the layer
Source§fn set_training(&mut self, _training: bool)
fn set_training(&mut self, _training: bool)
Set the layer to training mode (true) or evaluation mode (false)
Source§fn is_training(&self) -> bool
fn is_training(&self) -> bool
Get the current training mode
Source§impl<F: Float + Debug + ScalarOperand + Send + Sync + 'static> ParamLayer<F> for Dense<F>
impl<F: Float + Debug + ScalarOperand + Send + Sync + 'static> ParamLayer<F> for Dense<F>
impl<F: Float + Debug + Send + Sync> Send for Dense<F>
impl<F: Float + Debug + Send + Sync> Sync for Dense<F>
Auto Trait Implementations§
impl<F> !Freeze for Dense<F>
impl<F> !RefUnwindSafe for Dense<F>
impl<F> Unpin for Dense<F>
impl<F> !UnwindSafe for Dense<F>
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more
Source§impl<T> CloneToUninit for Twhere
T: Clone,
impl<T> CloneToUninit for Twhere
T: Clone,
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
Converts
self
into a Left
variant of Either<Self, Self>
if into_left
is true
.
Converts self
into a Right
variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
Converts
self
into a Left
variant of Either<Self, Self>
if into_left(&self)
returns true
.
Converts self
into a Right
variant of Either<Self, Self>
otherwise. Read more