ModelBuilder

Struct ModelBuilder 

Source
pub struct ModelBuilder { /* private fields */ }

Implementations§

Source§

impl ModelBuilder

Source

pub fn new() -> ModelBuilder

Examples found in repository?
examples/mnist.rs (line 44)
34pub fn training() {
35    println!("extracting mnist data...");
36    let labels: Matrix = extract_labels("train-labels.idx1-ubyte");
37    let mut images: Matrix = extract_images("train-images.idx3-ubyte");
38    println!("extraction done");
39
40    images.normalize();
41    println!("number of images {}", images.height);
42    println!("number of pixels in each image {}", images.width);
43
44    ModelBuilder::new()
45        .add_layer(Layer::init(28 * 28, 128, true))
46        .add_layer(Layer::init(128, 128, true))
47        .add_layer(Layer::init(128, 10, false))
48        .optimizer(Optimizer::Adam {
49            learning_step: 0.001,
50            beta1: 0.9,
51            beta2: 0.999,
52        })
53        .l2_reg(0.001)
54        .checkpoint(Checkpoint::ValAcc {
55            save_path: "mnist_128x128".to_string(),
56        })
57        .verbose(10, false)
58        .build_and_train(&images, &labels, 128, 10, 2000);
59}
More examples
Hide additional examples
examples/spiral.rs (line 21)
6pub fn main() {
7    // generating the spiral dataset points
8    // 3000 points, spread into three classes (here a class = one spiral)
9    let (data, labels) = generate_spiral_dataset(3000, 3);
10
11    // Layer::init(number_of_inputs: u32, number_of_neurons_for_the_layer: u32, reLu: bool)
12    // if the last arg is true, applies ReLu as the activation function
13    // by default softmax is applied to the last layer
14
15    // One point of the spiral dataset consists of a X and a Y
16    // So the first layer has 2 inputs
17    // The last layer has 3 neurons because we have 3 classes, and therefore we want 3 outputs
18
19    // build and train
20    // (data: &matrix, labels: &matrix, batch_size: u32, number_of_epochs: u32, size_of_the_validation_dataset, usize)
21    let _ = ModelBuilder::new()
22        .add_layer(Layer::init(2, 10, true))
23        .add_layer(Layer::init(10, 10, true))
24        .add_layer(Layer::init(10, 3, false))
25        .optimizer(Optimizer::SGD {
26            learning_step: 0.001,
27        })
28        .l2_reg(0.0001)
29        .build_and_train(&data, &labels, 128, 10, 500);
30}
Source

pub fn add_layer(self, layer: Layer) -> ModelBuilder

Examples found in repository?
examples/mnist.rs (line 45)
34pub fn training() {
35    println!("extracting mnist data...");
36    let labels: Matrix = extract_labels("train-labels.idx1-ubyte");
37    let mut images: Matrix = extract_images("train-images.idx3-ubyte");
38    println!("extraction done");
39
40    images.normalize();
41    println!("number of images {}", images.height);
42    println!("number of pixels in each image {}", images.width);
43
44    ModelBuilder::new()
45        .add_layer(Layer::init(28 * 28, 128, true))
46        .add_layer(Layer::init(128, 128, true))
47        .add_layer(Layer::init(128, 10, false))
48        .optimizer(Optimizer::Adam {
49            learning_step: 0.001,
50            beta1: 0.9,
51            beta2: 0.999,
52        })
53        .l2_reg(0.001)
54        .checkpoint(Checkpoint::ValAcc {
55            save_path: "mnist_128x128".to_string(),
56        })
57        .verbose(10, false)
58        .build_and_train(&images, &labels, 128, 10, 2000);
59}
More examples
Hide additional examples
examples/spiral.rs (line 22)
6pub fn main() {
7    // generating the spiral dataset points
8    // 3000 points, spread into three classes (here a class = one spiral)
9    let (data, labels) = generate_spiral_dataset(3000, 3);
10
11    // Layer::init(number_of_inputs: u32, number_of_neurons_for_the_layer: u32, reLu: bool)
12    // if the last arg is true, applies ReLu as the activation function
13    // by default softmax is applied to the last layer
14
15    // One point of the spiral dataset consists of a X and a Y
16    // So the first layer has 2 inputs
17    // The last layer has 3 neurons because we have 3 classes, and therefore we want 3 outputs
18
19    // build and train
20    // (data: &matrix, labels: &matrix, batch_size: u32, number_of_epochs: u32, size_of_the_validation_dataset, usize)
21    let _ = ModelBuilder::new()
22        .add_layer(Layer::init(2, 10, true))
23        .add_layer(Layer::init(10, 10, true))
24        .add_layer(Layer::init(10, 3, false))
25        .optimizer(Optimizer::SGD {
26            learning_step: 0.001,
27        })
28        .l2_reg(0.0001)
29        .build_and_train(&data, &labels, 128, 10, 500);
30}
Source

pub fn optimizer(self, optimizer: Optimizer) -> ModelBuilder

Examples found in repository?
examples/mnist.rs (lines 48-52)
34pub fn training() {
35    println!("extracting mnist data...");
36    let labels: Matrix = extract_labels("train-labels.idx1-ubyte");
37    let mut images: Matrix = extract_images("train-images.idx3-ubyte");
38    println!("extraction done");
39
40    images.normalize();
41    println!("number of images {}", images.height);
42    println!("number of pixels in each image {}", images.width);
43
44    ModelBuilder::new()
45        .add_layer(Layer::init(28 * 28, 128, true))
46        .add_layer(Layer::init(128, 128, true))
47        .add_layer(Layer::init(128, 10, false))
48        .optimizer(Optimizer::Adam {
49            learning_step: 0.001,
50            beta1: 0.9,
51            beta2: 0.999,
52        })
53        .l2_reg(0.001)
54        .checkpoint(Checkpoint::ValAcc {
55            save_path: "mnist_128x128".to_string(),
56        })
57        .verbose(10, false)
58        .build_and_train(&images, &labels, 128, 10, 2000);
59}
More examples
Hide additional examples
examples/spiral.rs (lines 25-27)
6pub fn main() {
7    // generating the spiral dataset points
8    // 3000 points, spread into three classes (here a class = one spiral)
9    let (data, labels) = generate_spiral_dataset(3000, 3);
10
11    // Layer::init(number_of_inputs: u32, number_of_neurons_for_the_layer: u32, reLu: bool)
12    // if the last arg is true, applies ReLu as the activation function
13    // by default softmax is applied to the last layer
14
15    // One point of the spiral dataset consists of a X and a Y
16    // So the first layer has 2 inputs
17    // The last layer has 3 neurons because we have 3 classes, and therefore we want 3 outputs
18
19    // build and train
20    // (data: &matrix, labels: &matrix, batch_size: u32, number_of_epochs: u32, size_of_the_validation_dataset, usize)
21    let _ = ModelBuilder::new()
22        .add_layer(Layer::init(2, 10, true))
23        .add_layer(Layer::init(10, 10, true))
24        .add_layer(Layer::init(10, 3, false))
25        .optimizer(Optimizer::SGD {
26            learning_step: 0.001,
27        })
28        .l2_reg(0.0001)
29        .build_and_train(&data, &labels, 128, 10, 500);
30}
Source

pub fn l2_reg(self, lambda: f64) -> ModelBuilder

Examples found in repository?
examples/mnist.rs (line 53)
34pub fn training() {
35    println!("extracting mnist data...");
36    let labels: Matrix = extract_labels("train-labels.idx1-ubyte");
37    let mut images: Matrix = extract_images("train-images.idx3-ubyte");
38    println!("extraction done");
39
40    images.normalize();
41    println!("number of images {}", images.height);
42    println!("number of pixels in each image {}", images.width);
43
44    ModelBuilder::new()
45        .add_layer(Layer::init(28 * 28, 128, true))
46        .add_layer(Layer::init(128, 128, true))
47        .add_layer(Layer::init(128, 10, false))
48        .optimizer(Optimizer::Adam {
49            learning_step: 0.001,
50            beta1: 0.9,
51            beta2: 0.999,
52        })
53        .l2_reg(0.001)
54        .checkpoint(Checkpoint::ValAcc {
55            save_path: "mnist_128x128".to_string(),
56        })
57        .verbose(10, false)
58        .build_and_train(&images, &labels, 128, 10, 2000);
59}
More examples
Hide additional examples
examples/spiral.rs (line 28)
6pub fn main() {
7    // generating the spiral dataset points
8    // 3000 points, spread into three classes (here a class = one spiral)
9    let (data, labels) = generate_spiral_dataset(3000, 3);
10
11    // Layer::init(number_of_inputs: u32, number_of_neurons_for_the_layer: u32, reLu: bool)
12    // if the last arg is true, applies ReLu as the activation function
13    // by default softmax is applied to the last layer
14
15    // One point of the spiral dataset consists of a X and a Y
16    // So the first layer has 2 inputs
17    // The last layer has 3 neurons because we have 3 classes, and therefore we want 3 outputs
18
19    // build and train
20    // (data: &matrix, labels: &matrix, batch_size: u32, number_of_epochs: u32, size_of_the_validation_dataset, usize)
21    let _ = ModelBuilder::new()
22        .add_layer(Layer::init(2, 10, true))
23        .add_layer(Layer::init(10, 10, true))
24        .add_layer(Layer::init(10, 3, false))
25        .optimizer(Optimizer::SGD {
26            learning_step: 0.001,
27        })
28        .l2_reg(0.0001)
29        .build_and_train(&data, &labels, 128, 10, 500);
30}
Source

pub fn checkpoint(self, checkpoint: Checkpoint) -> ModelBuilder

Examples found in repository?
examples/mnist.rs (lines 54-56)
34pub fn training() {
35    println!("extracting mnist data...");
36    let labels: Matrix = extract_labels("train-labels.idx1-ubyte");
37    let mut images: Matrix = extract_images("train-images.idx3-ubyte");
38    println!("extraction done");
39
40    images.normalize();
41    println!("number of images {}", images.height);
42    println!("number of pixels in each image {}", images.width);
43
44    ModelBuilder::new()
45        .add_layer(Layer::init(28 * 28, 128, true))
46        .add_layer(Layer::init(128, 128, true))
47        .add_layer(Layer::init(128, 10, false))
48        .optimizer(Optimizer::Adam {
49            learning_step: 0.001,
50            beta1: 0.9,
51            beta2: 0.999,
52        })
53        .l2_reg(0.001)
54        .checkpoint(Checkpoint::ValAcc {
55            save_path: "mnist_128x128".to_string(),
56        })
57        .verbose(10, false)
58        .build_and_train(&images, &labels, 128, 10, 2000);
59}
Source

pub fn verbose(self, print_frequency: usize, silent_mode: bool) -> ModelBuilder

Examples found in repository?
examples/mnist.rs (line 57)
34pub fn training() {
35    println!("extracting mnist data...");
36    let labels: Matrix = extract_labels("train-labels.idx1-ubyte");
37    let mut images: Matrix = extract_images("train-images.idx3-ubyte");
38    println!("extraction done");
39
40    images.normalize();
41    println!("number of images {}", images.height);
42    println!("number of pixels in each image {}", images.width);
43
44    ModelBuilder::new()
45        .add_layer(Layer::init(28 * 28, 128, true))
46        .add_layer(Layer::init(128, 128, true))
47        .add_layer(Layer::init(128, 10, false))
48        .optimizer(Optimizer::Adam {
49            learning_step: 0.001,
50            beta1: 0.9,
51            beta2: 0.999,
52        })
53        .l2_reg(0.001)
54        .checkpoint(Checkpoint::ValAcc {
55            save_path: "mnist_128x128".to_string(),
56        })
57        .verbose(10, false)
58        .build_and_train(&images, &labels, 128, 10, 2000);
59}
Source

pub fn debug(self, debug: bool) -> ModelBuilder

Source

pub fn build(self) -> Model

Source

pub fn build_and_train( self, data: &Matrix, labels: &Matrix, batch_size: u32, epochs: u32, validation_dataset_size: usize, )

Examples found in repository?
examples/mnist.rs (line 58)
34pub fn training() {
35    println!("extracting mnist data...");
36    let labels: Matrix = extract_labels("train-labels.idx1-ubyte");
37    let mut images: Matrix = extract_images("train-images.idx3-ubyte");
38    println!("extraction done");
39
40    images.normalize();
41    println!("number of images {}", images.height);
42    println!("number of pixels in each image {}", images.width);
43
44    ModelBuilder::new()
45        .add_layer(Layer::init(28 * 28, 128, true))
46        .add_layer(Layer::init(128, 128, true))
47        .add_layer(Layer::init(128, 10, false))
48        .optimizer(Optimizer::Adam {
49            learning_step: 0.001,
50            beta1: 0.9,
51            beta2: 0.999,
52        })
53        .l2_reg(0.001)
54        .checkpoint(Checkpoint::ValAcc {
55            save_path: "mnist_128x128".to_string(),
56        })
57        .verbose(10, false)
58        .build_and_train(&images, &labels, 128, 10, 2000);
59}
More examples
Hide additional examples
examples/spiral.rs (line 29)
6pub fn main() {
7    // generating the spiral dataset points
8    // 3000 points, spread into three classes (here a class = one spiral)
9    let (data, labels) = generate_spiral_dataset(3000, 3);
10
11    // Layer::init(number_of_inputs: u32, number_of_neurons_for_the_layer: u32, reLu: bool)
12    // if the last arg is true, applies ReLu as the activation function
13    // by default softmax is applied to the last layer
14
15    // One point of the spiral dataset consists of a X and a Y
16    // So the first layer has 2 inputs
17    // The last layer has 3 neurons because we have 3 classes, and therefore we want 3 outputs
18
19    // build and train
20    // (data: &matrix, labels: &matrix, batch_size: u32, number_of_epochs: u32, size_of_the_validation_dataset, usize)
21    let _ = ModelBuilder::new()
22        .add_layer(Layer::init(2, 10, true))
23        .add_layer(Layer::init(10, 10, true))
24        .add_layer(Layer::init(10, 3, false))
25        .optimizer(Optimizer::SGD {
26            learning_step: 0.001,
27        })
28        .l2_reg(0.0001)
29        .build_and_train(&data, &labels, 128, 10, 500);
30}

Trait Implementations§

Source§

impl Clone for ModelBuilder

Source§

fn clone(&self) -> ModelBuilder

Returns a duplicate of the value. Read more
1.0.0 · Source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more

Auto Trait Implementations§

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> CloneToUninit for T
where T: Clone,

Source§

unsafe fn clone_to_uninit(&self, dest: *mut u8)

🔬This is a nightly-only experimental API. (clone_to_uninit)
Performs copy-assignment from self to dest. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T> ToOwned for T
where T: Clone,

Source§

type Owned = T

The resulting type after obtaining ownership.
Source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
Source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
Source§

impl<V, T> VZip<V> for T
where V: MultiLane<T>,

Source§

fn vzip(self) -> V