pub struct ModelBuilder { /* private fields */ }Implementations§
Source§impl ModelBuilder
impl ModelBuilder
Sourcepub fn new() -> ModelBuilder
pub fn new() -> ModelBuilder
Examples found in repository?
examples/mnist.rs (line 44)
34pub fn training() {
35 println!("extracting mnist data...");
36 let labels: Matrix = extract_labels("train-labels.idx1-ubyte");
37 let mut images: Matrix = extract_images("train-images.idx3-ubyte");
38 println!("extraction done");
39
40 images.normalize();
41 println!("number of images {}", images.height);
42 println!("number of pixels in each image {}", images.width);
43
44 ModelBuilder::new()
45 .add_layer(Layer::init(28 * 28, 128, true))
46 .add_layer(Layer::init(128, 128, true))
47 .add_layer(Layer::init(128, 10, false))
48 .optimizer(Optimizer::Adam {
49 learning_step: 0.001,
50 beta1: 0.9,
51 beta2: 0.999,
52 })
53 .l2_reg(0.001)
54 .checkpoint(Checkpoint::ValAcc {
55 save_path: "mnist_128x128".to_string(),
56 })
57 .verbose(10, false)
58 .build_and_train(&images, &labels, 128, 10, 2000);
59}More examples
examples/spiral.rs (line 21)
6pub fn main() {
7 // generating the spiral dataset points
8 // 3000 points, spread into three classes (here a class = one spiral)
9 let (data, labels) = generate_spiral_dataset(3000, 3);
10
11 // Layer::init(number_of_inputs: u32, number_of_neurons_for_the_layer: u32, reLu: bool)
12 // if the last arg is true, applies ReLu as the activation function
13 // by default softmax is applied to the last layer
14
15 // One point of the spiral dataset consists of a X and a Y
16 // So the first layer has 2 inputs
17 // The last layer has 3 neurons because we have 3 classes, and therefore we want 3 outputs
18
19 // build and train
20 // (data: &matrix, labels: &matrix, batch_size: u32, number_of_epochs: u32, size_of_the_validation_dataset, usize)
21 let _ = ModelBuilder::new()
22 .add_layer(Layer::init(2, 10, true))
23 .add_layer(Layer::init(10, 10, true))
24 .add_layer(Layer::init(10, 3, false))
25 .optimizer(Optimizer::SGD {
26 learning_step: 0.001,
27 })
28 .l2_reg(0.0001)
29 .build_and_train(&data, &labels, 128, 10, 500);
30}Sourcepub fn add_layer(self, layer: Layer) -> ModelBuilder
pub fn add_layer(self, layer: Layer) -> ModelBuilder
Examples found in repository?
examples/mnist.rs (line 45)
34pub fn training() {
35 println!("extracting mnist data...");
36 let labels: Matrix = extract_labels("train-labels.idx1-ubyte");
37 let mut images: Matrix = extract_images("train-images.idx3-ubyte");
38 println!("extraction done");
39
40 images.normalize();
41 println!("number of images {}", images.height);
42 println!("number of pixels in each image {}", images.width);
43
44 ModelBuilder::new()
45 .add_layer(Layer::init(28 * 28, 128, true))
46 .add_layer(Layer::init(128, 128, true))
47 .add_layer(Layer::init(128, 10, false))
48 .optimizer(Optimizer::Adam {
49 learning_step: 0.001,
50 beta1: 0.9,
51 beta2: 0.999,
52 })
53 .l2_reg(0.001)
54 .checkpoint(Checkpoint::ValAcc {
55 save_path: "mnist_128x128".to_string(),
56 })
57 .verbose(10, false)
58 .build_and_train(&images, &labels, 128, 10, 2000);
59}More examples
examples/spiral.rs (line 22)
6pub fn main() {
7 // generating the spiral dataset points
8 // 3000 points, spread into three classes (here a class = one spiral)
9 let (data, labels) = generate_spiral_dataset(3000, 3);
10
11 // Layer::init(number_of_inputs: u32, number_of_neurons_for_the_layer: u32, reLu: bool)
12 // if the last arg is true, applies ReLu as the activation function
13 // by default softmax is applied to the last layer
14
15 // One point of the spiral dataset consists of a X and a Y
16 // So the first layer has 2 inputs
17 // The last layer has 3 neurons because we have 3 classes, and therefore we want 3 outputs
18
19 // build and train
20 // (data: &matrix, labels: &matrix, batch_size: u32, number_of_epochs: u32, size_of_the_validation_dataset, usize)
21 let _ = ModelBuilder::new()
22 .add_layer(Layer::init(2, 10, true))
23 .add_layer(Layer::init(10, 10, true))
24 .add_layer(Layer::init(10, 3, false))
25 .optimizer(Optimizer::SGD {
26 learning_step: 0.001,
27 })
28 .l2_reg(0.0001)
29 .build_and_train(&data, &labels, 128, 10, 500);
30}Sourcepub fn optimizer(self, optimizer: Optimizer) -> ModelBuilder
pub fn optimizer(self, optimizer: Optimizer) -> ModelBuilder
Examples found in repository?
examples/mnist.rs (lines 48-52)
34pub fn training() {
35 println!("extracting mnist data...");
36 let labels: Matrix = extract_labels("train-labels.idx1-ubyte");
37 let mut images: Matrix = extract_images("train-images.idx3-ubyte");
38 println!("extraction done");
39
40 images.normalize();
41 println!("number of images {}", images.height);
42 println!("number of pixels in each image {}", images.width);
43
44 ModelBuilder::new()
45 .add_layer(Layer::init(28 * 28, 128, true))
46 .add_layer(Layer::init(128, 128, true))
47 .add_layer(Layer::init(128, 10, false))
48 .optimizer(Optimizer::Adam {
49 learning_step: 0.001,
50 beta1: 0.9,
51 beta2: 0.999,
52 })
53 .l2_reg(0.001)
54 .checkpoint(Checkpoint::ValAcc {
55 save_path: "mnist_128x128".to_string(),
56 })
57 .verbose(10, false)
58 .build_and_train(&images, &labels, 128, 10, 2000);
59}More examples
examples/spiral.rs (lines 25-27)
6pub fn main() {
7 // generating the spiral dataset points
8 // 3000 points, spread into three classes (here a class = one spiral)
9 let (data, labels) = generate_spiral_dataset(3000, 3);
10
11 // Layer::init(number_of_inputs: u32, number_of_neurons_for_the_layer: u32, reLu: bool)
12 // if the last arg is true, applies ReLu as the activation function
13 // by default softmax is applied to the last layer
14
15 // One point of the spiral dataset consists of a X and a Y
16 // So the first layer has 2 inputs
17 // The last layer has 3 neurons because we have 3 classes, and therefore we want 3 outputs
18
19 // build and train
20 // (data: &matrix, labels: &matrix, batch_size: u32, number_of_epochs: u32, size_of_the_validation_dataset, usize)
21 let _ = ModelBuilder::new()
22 .add_layer(Layer::init(2, 10, true))
23 .add_layer(Layer::init(10, 10, true))
24 .add_layer(Layer::init(10, 3, false))
25 .optimizer(Optimizer::SGD {
26 learning_step: 0.001,
27 })
28 .l2_reg(0.0001)
29 .build_and_train(&data, &labels, 128, 10, 500);
30}Sourcepub fn l2_reg(self, lambda: f64) -> ModelBuilder
pub fn l2_reg(self, lambda: f64) -> ModelBuilder
Examples found in repository?
examples/mnist.rs (line 53)
34pub fn training() {
35 println!("extracting mnist data...");
36 let labels: Matrix = extract_labels("train-labels.idx1-ubyte");
37 let mut images: Matrix = extract_images("train-images.idx3-ubyte");
38 println!("extraction done");
39
40 images.normalize();
41 println!("number of images {}", images.height);
42 println!("number of pixels in each image {}", images.width);
43
44 ModelBuilder::new()
45 .add_layer(Layer::init(28 * 28, 128, true))
46 .add_layer(Layer::init(128, 128, true))
47 .add_layer(Layer::init(128, 10, false))
48 .optimizer(Optimizer::Adam {
49 learning_step: 0.001,
50 beta1: 0.9,
51 beta2: 0.999,
52 })
53 .l2_reg(0.001)
54 .checkpoint(Checkpoint::ValAcc {
55 save_path: "mnist_128x128".to_string(),
56 })
57 .verbose(10, false)
58 .build_and_train(&images, &labels, 128, 10, 2000);
59}More examples
examples/spiral.rs (line 28)
6pub fn main() {
7 // generating the spiral dataset points
8 // 3000 points, spread into three classes (here a class = one spiral)
9 let (data, labels) = generate_spiral_dataset(3000, 3);
10
11 // Layer::init(number_of_inputs: u32, number_of_neurons_for_the_layer: u32, reLu: bool)
12 // if the last arg is true, applies ReLu as the activation function
13 // by default softmax is applied to the last layer
14
15 // One point of the spiral dataset consists of a X and a Y
16 // So the first layer has 2 inputs
17 // The last layer has 3 neurons because we have 3 classes, and therefore we want 3 outputs
18
19 // build and train
20 // (data: &matrix, labels: &matrix, batch_size: u32, number_of_epochs: u32, size_of_the_validation_dataset, usize)
21 let _ = ModelBuilder::new()
22 .add_layer(Layer::init(2, 10, true))
23 .add_layer(Layer::init(10, 10, true))
24 .add_layer(Layer::init(10, 3, false))
25 .optimizer(Optimizer::SGD {
26 learning_step: 0.001,
27 })
28 .l2_reg(0.0001)
29 .build_and_train(&data, &labels, 128, 10, 500);
30}Sourcepub fn checkpoint(self, checkpoint: Checkpoint) -> ModelBuilder
pub fn checkpoint(self, checkpoint: Checkpoint) -> ModelBuilder
Examples found in repository?
examples/mnist.rs (lines 54-56)
34pub fn training() {
35 println!("extracting mnist data...");
36 let labels: Matrix = extract_labels("train-labels.idx1-ubyte");
37 let mut images: Matrix = extract_images("train-images.idx3-ubyte");
38 println!("extraction done");
39
40 images.normalize();
41 println!("number of images {}", images.height);
42 println!("number of pixels in each image {}", images.width);
43
44 ModelBuilder::new()
45 .add_layer(Layer::init(28 * 28, 128, true))
46 .add_layer(Layer::init(128, 128, true))
47 .add_layer(Layer::init(128, 10, false))
48 .optimizer(Optimizer::Adam {
49 learning_step: 0.001,
50 beta1: 0.9,
51 beta2: 0.999,
52 })
53 .l2_reg(0.001)
54 .checkpoint(Checkpoint::ValAcc {
55 save_path: "mnist_128x128".to_string(),
56 })
57 .verbose(10, false)
58 .build_and_train(&images, &labels, 128, 10, 2000);
59}Sourcepub fn verbose(self, print_frequency: usize, silent_mode: bool) -> ModelBuilder
pub fn verbose(self, print_frequency: usize, silent_mode: bool) -> ModelBuilder
Examples found in repository?
examples/mnist.rs (line 57)
34pub fn training() {
35 println!("extracting mnist data...");
36 let labels: Matrix = extract_labels("train-labels.idx1-ubyte");
37 let mut images: Matrix = extract_images("train-images.idx3-ubyte");
38 println!("extraction done");
39
40 images.normalize();
41 println!("number of images {}", images.height);
42 println!("number of pixels in each image {}", images.width);
43
44 ModelBuilder::new()
45 .add_layer(Layer::init(28 * 28, 128, true))
46 .add_layer(Layer::init(128, 128, true))
47 .add_layer(Layer::init(128, 10, false))
48 .optimizer(Optimizer::Adam {
49 learning_step: 0.001,
50 beta1: 0.9,
51 beta2: 0.999,
52 })
53 .l2_reg(0.001)
54 .checkpoint(Checkpoint::ValAcc {
55 save_path: "mnist_128x128".to_string(),
56 })
57 .verbose(10, false)
58 .build_and_train(&images, &labels, 128, 10, 2000);
59}pub fn debug(self, debug: bool) -> ModelBuilder
pub fn build(self) -> Model
Sourcepub fn build_and_train(
self,
data: &Matrix,
labels: &Matrix,
batch_size: u32,
epochs: u32,
validation_dataset_size: usize,
)
pub fn build_and_train( self, data: &Matrix, labels: &Matrix, batch_size: u32, epochs: u32, validation_dataset_size: usize, )
Examples found in repository?
examples/mnist.rs (line 58)
34pub fn training() {
35 println!("extracting mnist data...");
36 let labels: Matrix = extract_labels("train-labels.idx1-ubyte");
37 let mut images: Matrix = extract_images("train-images.idx3-ubyte");
38 println!("extraction done");
39
40 images.normalize();
41 println!("number of images {}", images.height);
42 println!("number of pixels in each image {}", images.width);
43
44 ModelBuilder::new()
45 .add_layer(Layer::init(28 * 28, 128, true))
46 .add_layer(Layer::init(128, 128, true))
47 .add_layer(Layer::init(128, 10, false))
48 .optimizer(Optimizer::Adam {
49 learning_step: 0.001,
50 beta1: 0.9,
51 beta2: 0.999,
52 })
53 .l2_reg(0.001)
54 .checkpoint(Checkpoint::ValAcc {
55 save_path: "mnist_128x128".to_string(),
56 })
57 .verbose(10, false)
58 .build_and_train(&images, &labels, 128, 10, 2000);
59}More examples
examples/spiral.rs (line 29)
6pub fn main() {
7 // generating the spiral dataset points
8 // 3000 points, spread into three classes (here a class = one spiral)
9 let (data, labels) = generate_spiral_dataset(3000, 3);
10
11 // Layer::init(number_of_inputs: u32, number_of_neurons_for_the_layer: u32, reLu: bool)
12 // if the last arg is true, applies ReLu as the activation function
13 // by default softmax is applied to the last layer
14
15 // One point of the spiral dataset consists of a X and a Y
16 // So the first layer has 2 inputs
17 // The last layer has 3 neurons because we have 3 classes, and therefore we want 3 outputs
18
19 // build and train
20 // (data: &matrix, labels: &matrix, batch_size: u32, number_of_epochs: u32, size_of_the_validation_dataset, usize)
21 let _ = ModelBuilder::new()
22 .add_layer(Layer::init(2, 10, true))
23 .add_layer(Layer::init(10, 10, true))
24 .add_layer(Layer::init(10, 3, false))
25 .optimizer(Optimizer::SGD {
26 learning_step: 0.001,
27 })
28 .l2_reg(0.0001)
29 .build_and_train(&data, &labels, 128, 10, 500);
30}Trait Implementations§
Source§impl Clone for ModelBuilder
impl Clone for ModelBuilder
Source§fn clone(&self) -> ModelBuilder
fn clone(&self) -> ModelBuilder
Returns a duplicate of the value. Read more
1.0.0 · Source§fn clone_from(&mut self, source: &Self)
fn clone_from(&mut self, source: &Self)
Performs copy-assignment from
source. Read moreAuto Trait Implementations§
impl Freeze for ModelBuilder
impl RefUnwindSafe for ModelBuilder
impl Send for ModelBuilder
impl Sync for ModelBuilder
impl Unpin for ModelBuilder
impl UnwindSafe for ModelBuilder
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more