pub struct LineBuilder<'a> { /* private fields */ }Expand description
Builder for a line series.
Implementations§
Source§impl<'a> LineBuilder<'a>
impl<'a> LineBuilder<'a>
Sourcepub fn label(self, label: impl Into<String>) -> Self
pub fn label(self, label: impl Into<String>) -> Self
Set the series label.
Examples found in repository?
examples/training_curves.rs (line 55)
10fn main() -> Result<()> {
11 // ── Simulate training curves ─────────────────────────────────────
12 let epochs: Vec<f64> = (1..=50).map(f64::from).collect();
13
14 // Training loss: exponential decay + noise
15 let mut rng = SimpleRng::new(42);
16 let train_loss: Vec<f64> = epochs
17 .iter()
18 .map(|&e| 2.0 * (-e / 15.0).exp() + 0.05 + rng.normal() * 0.02)
19 .collect();
20
21 // Validation loss: decays slower, starts overfitting around epoch 30
22 let val_loss: Vec<f64> = epochs
23 .iter()
24 .map(|&e| {
25 let base = 2.0 * (-e / 20.0).exp() + 0.1;
26 let overfit = if e > 30.0 { (e - 30.0) * 0.005 } else { 0.0 };
27 base + overfit + rng.normal() * 0.03
28 })
29 .collect();
30
31 // Training accuracy: rises from ~50% to ~98%
32 let train_acc: Vec<f64> = epochs
33 .iter()
34 .map(|&e| (0.98 - 0.48 * (-e / 12.0).exp()).min(1.0) + rng.normal() * 0.01)
35 .collect();
36
37 // Validation accuracy: rises but plateaus earlier
38 let val_acc: Vec<f64> = epochs
39 .iter()
40 .map(|&e| {
41 let base = 0.93 - 0.43 * (-e / 18.0).exp();
42 let overfit = if e > 30.0 { -(e - 30.0) * 0.002 } else { 0.0 };
43 (base + overfit + rng.normal() * 0.015).min(1.0)
44 })
45 .collect();
46
47 // ── Plot 1: Loss curves ──────────────────────────────────────────
48 let mut fig = Figure::new()
49 .size(750.0, 500.0)
50 .title("Training & Validation Loss");
51
52 let ax = fig.add_axes();
53 ax.x_label("Epoch").y_label("Loss");
54 ax.line(&epochs, &train_loss)
55 .label("Train Loss")
56 .color(Color::from_hex("#1f77b4").unwrap().into())
57 .width(2.0)
58 .done();
59 ax.line(&epochs, &val_loss)
60 .label("Val Loss")
61 .color(Color::from_hex("#ff7f0e").unwrap().into())
62 .width(2.0)
63 .dash(&[8.0, 4.0])
64 .done();
65
66 fig.save_svg("training_loss.svg")?;
67 println!("Saved training_loss.svg");
68
69 // ── Plot 2: Accuracy curves ──────────────────────────────────────
70 let mut fig2 = Figure::new()
71 .size(750.0, 500.0)
72 .title("Training & Validation Accuracy");
73
74 let ax2 = fig2.add_axes();
75 ax2.x_label("Epoch").y_label("Accuracy").y_range(0.4, 1.05);
76 ax2.line(&epochs, &train_acc)
77 .label("Train Accuracy")
78 .color(Color::from_hex("#2ca02c").unwrap().into())
79 .width(2.0)
80 .done();
81 ax2.line(&epochs, &val_acc)
82 .label("Val Accuracy")
83 .color(Color::from_hex("#d62728").unwrap().into())
84 .width(2.0)
85 .dash(&[8.0, 4.0])
86 .done();
87
88 fig2.save_svg("training_accuracy.svg")?;
89 println!("Saved training_accuracy.svg");
90
91 Ok(())
92}Sourcepub fn color(self, color: Color) -> Self
pub fn color(self, color: Color) -> Self
Set the line color.
Examples found in repository?
examples/training_curves.rs (line 56)
10fn main() -> Result<()> {
11 // ── Simulate training curves ─────────────────────────────────────
12 let epochs: Vec<f64> = (1..=50).map(f64::from).collect();
13
14 // Training loss: exponential decay + noise
15 let mut rng = SimpleRng::new(42);
16 let train_loss: Vec<f64> = epochs
17 .iter()
18 .map(|&e| 2.0 * (-e / 15.0).exp() + 0.05 + rng.normal() * 0.02)
19 .collect();
20
21 // Validation loss: decays slower, starts overfitting around epoch 30
22 let val_loss: Vec<f64> = epochs
23 .iter()
24 .map(|&e| {
25 let base = 2.0 * (-e / 20.0).exp() + 0.1;
26 let overfit = if e > 30.0 { (e - 30.0) * 0.005 } else { 0.0 };
27 base + overfit + rng.normal() * 0.03
28 })
29 .collect();
30
31 // Training accuracy: rises from ~50% to ~98%
32 let train_acc: Vec<f64> = epochs
33 .iter()
34 .map(|&e| (0.98 - 0.48 * (-e / 12.0).exp()).min(1.0) + rng.normal() * 0.01)
35 .collect();
36
37 // Validation accuracy: rises but plateaus earlier
38 let val_acc: Vec<f64> = epochs
39 .iter()
40 .map(|&e| {
41 let base = 0.93 - 0.43 * (-e / 18.0).exp();
42 let overfit = if e > 30.0 { -(e - 30.0) * 0.002 } else { 0.0 };
43 (base + overfit + rng.normal() * 0.015).min(1.0)
44 })
45 .collect();
46
47 // ── Plot 1: Loss curves ──────────────────────────────────────────
48 let mut fig = Figure::new()
49 .size(750.0, 500.0)
50 .title("Training & Validation Loss");
51
52 let ax = fig.add_axes();
53 ax.x_label("Epoch").y_label("Loss");
54 ax.line(&epochs, &train_loss)
55 .label("Train Loss")
56 .color(Color::from_hex("#1f77b4").unwrap().into())
57 .width(2.0)
58 .done();
59 ax.line(&epochs, &val_loss)
60 .label("Val Loss")
61 .color(Color::from_hex("#ff7f0e").unwrap().into())
62 .width(2.0)
63 .dash(&[8.0, 4.0])
64 .done();
65
66 fig.save_svg("training_loss.svg")?;
67 println!("Saved training_loss.svg");
68
69 // ── Plot 2: Accuracy curves ──────────────────────────────────────
70 let mut fig2 = Figure::new()
71 .size(750.0, 500.0)
72 .title("Training & Validation Accuracy");
73
74 let ax2 = fig2.add_axes();
75 ax2.x_label("Epoch").y_label("Accuracy").y_range(0.4, 1.05);
76 ax2.line(&epochs, &train_acc)
77 .label("Train Accuracy")
78 .color(Color::from_hex("#2ca02c").unwrap().into())
79 .width(2.0)
80 .done();
81 ax2.line(&epochs, &val_acc)
82 .label("Val Accuracy")
83 .color(Color::from_hex("#d62728").unwrap().into())
84 .width(2.0)
85 .dash(&[8.0, 4.0])
86 .done();
87
88 fig2.save_svg("training_accuracy.svg")?;
89 println!("Saved training_accuracy.svg");
90
91 Ok(())
92}Sourcepub fn width(self, width: f64) -> Self
pub fn width(self, width: f64) -> Self
Set the line width.
Examples found in repository?
examples/training_curves.rs (line 57)
10fn main() -> Result<()> {
11 // ── Simulate training curves ─────────────────────────────────────
12 let epochs: Vec<f64> = (1..=50).map(f64::from).collect();
13
14 // Training loss: exponential decay + noise
15 let mut rng = SimpleRng::new(42);
16 let train_loss: Vec<f64> = epochs
17 .iter()
18 .map(|&e| 2.0 * (-e / 15.0).exp() + 0.05 + rng.normal() * 0.02)
19 .collect();
20
21 // Validation loss: decays slower, starts overfitting around epoch 30
22 let val_loss: Vec<f64> = epochs
23 .iter()
24 .map(|&e| {
25 let base = 2.0 * (-e / 20.0).exp() + 0.1;
26 let overfit = if e > 30.0 { (e - 30.0) * 0.005 } else { 0.0 };
27 base + overfit + rng.normal() * 0.03
28 })
29 .collect();
30
31 // Training accuracy: rises from ~50% to ~98%
32 let train_acc: Vec<f64> = epochs
33 .iter()
34 .map(|&e| (0.98 - 0.48 * (-e / 12.0).exp()).min(1.0) + rng.normal() * 0.01)
35 .collect();
36
37 // Validation accuracy: rises but plateaus earlier
38 let val_acc: Vec<f64> = epochs
39 .iter()
40 .map(|&e| {
41 let base = 0.93 - 0.43 * (-e / 18.0).exp();
42 let overfit = if e > 30.0 { -(e - 30.0) * 0.002 } else { 0.0 };
43 (base + overfit + rng.normal() * 0.015).min(1.0)
44 })
45 .collect();
46
47 // ── Plot 1: Loss curves ──────────────────────────────────────────
48 let mut fig = Figure::new()
49 .size(750.0, 500.0)
50 .title("Training & Validation Loss");
51
52 let ax = fig.add_axes();
53 ax.x_label("Epoch").y_label("Loss");
54 ax.line(&epochs, &train_loss)
55 .label("Train Loss")
56 .color(Color::from_hex("#1f77b4").unwrap().into())
57 .width(2.0)
58 .done();
59 ax.line(&epochs, &val_loss)
60 .label("Val Loss")
61 .color(Color::from_hex("#ff7f0e").unwrap().into())
62 .width(2.0)
63 .dash(&[8.0, 4.0])
64 .done();
65
66 fig.save_svg("training_loss.svg")?;
67 println!("Saved training_loss.svg");
68
69 // ── Plot 2: Accuracy curves ──────────────────────────────────────
70 let mut fig2 = Figure::new()
71 .size(750.0, 500.0)
72 .title("Training & Validation Accuracy");
73
74 let ax2 = fig2.add_axes();
75 ax2.x_label("Epoch").y_label("Accuracy").y_range(0.4, 1.05);
76 ax2.line(&epochs, &train_acc)
77 .label("Train Accuracy")
78 .color(Color::from_hex("#2ca02c").unwrap().into())
79 .width(2.0)
80 .done();
81 ax2.line(&epochs, &val_acc)
82 .label("Val Accuracy")
83 .color(Color::from_hex("#d62728").unwrap().into())
84 .width(2.0)
85 .dash(&[8.0, 4.0])
86 .done();
87
88 fig2.save_svg("training_accuracy.svg")?;
89 println!("Saved training_accuracy.svg");
90
91 Ok(())
92}Sourcepub fn dash(self, dashes: &[f64]) -> Self
pub fn dash(self, dashes: &[f64]) -> Self
Set a dash pattern.
Examples found in repository?
examples/training_curves.rs (line 63)
10fn main() -> Result<()> {
11 // ── Simulate training curves ─────────────────────────────────────
12 let epochs: Vec<f64> = (1..=50).map(f64::from).collect();
13
14 // Training loss: exponential decay + noise
15 let mut rng = SimpleRng::new(42);
16 let train_loss: Vec<f64> = epochs
17 .iter()
18 .map(|&e| 2.0 * (-e / 15.0).exp() + 0.05 + rng.normal() * 0.02)
19 .collect();
20
21 // Validation loss: decays slower, starts overfitting around epoch 30
22 let val_loss: Vec<f64> = epochs
23 .iter()
24 .map(|&e| {
25 let base = 2.0 * (-e / 20.0).exp() + 0.1;
26 let overfit = if e > 30.0 { (e - 30.0) * 0.005 } else { 0.0 };
27 base + overfit + rng.normal() * 0.03
28 })
29 .collect();
30
31 // Training accuracy: rises from ~50% to ~98%
32 let train_acc: Vec<f64> = epochs
33 .iter()
34 .map(|&e| (0.98 - 0.48 * (-e / 12.0).exp()).min(1.0) + rng.normal() * 0.01)
35 .collect();
36
37 // Validation accuracy: rises but plateaus earlier
38 let val_acc: Vec<f64> = epochs
39 .iter()
40 .map(|&e| {
41 let base = 0.93 - 0.43 * (-e / 18.0).exp();
42 let overfit = if e > 30.0 { -(e - 30.0) * 0.002 } else { 0.0 };
43 (base + overfit + rng.normal() * 0.015).min(1.0)
44 })
45 .collect();
46
47 // ── Plot 1: Loss curves ──────────────────────────────────────────
48 let mut fig = Figure::new()
49 .size(750.0, 500.0)
50 .title("Training & Validation Loss");
51
52 let ax = fig.add_axes();
53 ax.x_label("Epoch").y_label("Loss");
54 ax.line(&epochs, &train_loss)
55 .label("Train Loss")
56 .color(Color::from_hex("#1f77b4").unwrap().into())
57 .width(2.0)
58 .done();
59 ax.line(&epochs, &val_loss)
60 .label("Val Loss")
61 .color(Color::from_hex("#ff7f0e").unwrap().into())
62 .width(2.0)
63 .dash(&[8.0, 4.0])
64 .done();
65
66 fig.save_svg("training_loss.svg")?;
67 println!("Saved training_loss.svg");
68
69 // ── Plot 2: Accuracy curves ──────────────────────────────────────
70 let mut fig2 = Figure::new()
71 .size(750.0, 500.0)
72 .title("Training & Validation Accuracy");
73
74 let ax2 = fig2.add_axes();
75 ax2.x_label("Epoch").y_label("Accuracy").y_range(0.4, 1.05);
76 ax2.line(&epochs, &train_acc)
77 .label("Train Accuracy")
78 .color(Color::from_hex("#2ca02c").unwrap().into())
79 .width(2.0)
80 .done();
81 ax2.line(&epochs, &val_acc)
82 .label("Val Accuracy")
83 .color(Color::from_hex("#d62728").unwrap().into())
84 .width(2.0)
85 .dash(&[8.0, 4.0])
86 .done();
87
88 fig2.save_svg("training_accuracy.svg")?;
89 println!("Saved training_accuracy.svg");
90
91 Ok(())
92}Sourcepub fn done(self) -> &'a mut Axes
pub fn done(self) -> &'a mut Axes
Finish and add the series.
Examples found in repository?
examples/training_curves.rs (line 58)
10fn main() -> Result<()> {
11 // ── Simulate training curves ─────────────────────────────────────
12 let epochs: Vec<f64> = (1..=50).map(f64::from).collect();
13
14 // Training loss: exponential decay + noise
15 let mut rng = SimpleRng::new(42);
16 let train_loss: Vec<f64> = epochs
17 .iter()
18 .map(|&e| 2.0 * (-e / 15.0).exp() + 0.05 + rng.normal() * 0.02)
19 .collect();
20
21 // Validation loss: decays slower, starts overfitting around epoch 30
22 let val_loss: Vec<f64> = epochs
23 .iter()
24 .map(|&e| {
25 let base = 2.0 * (-e / 20.0).exp() + 0.1;
26 let overfit = if e > 30.0 { (e - 30.0) * 0.005 } else { 0.0 };
27 base + overfit + rng.normal() * 0.03
28 })
29 .collect();
30
31 // Training accuracy: rises from ~50% to ~98%
32 let train_acc: Vec<f64> = epochs
33 .iter()
34 .map(|&e| (0.98 - 0.48 * (-e / 12.0).exp()).min(1.0) + rng.normal() * 0.01)
35 .collect();
36
37 // Validation accuracy: rises but plateaus earlier
38 let val_acc: Vec<f64> = epochs
39 .iter()
40 .map(|&e| {
41 let base = 0.93 - 0.43 * (-e / 18.0).exp();
42 let overfit = if e > 30.0 { -(e - 30.0) * 0.002 } else { 0.0 };
43 (base + overfit + rng.normal() * 0.015).min(1.0)
44 })
45 .collect();
46
47 // ── Plot 1: Loss curves ──────────────────────────────────────────
48 let mut fig = Figure::new()
49 .size(750.0, 500.0)
50 .title("Training & Validation Loss");
51
52 let ax = fig.add_axes();
53 ax.x_label("Epoch").y_label("Loss");
54 ax.line(&epochs, &train_loss)
55 .label("Train Loss")
56 .color(Color::from_hex("#1f77b4").unwrap().into())
57 .width(2.0)
58 .done();
59 ax.line(&epochs, &val_loss)
60 .label("Val Loss")
61 .color(Color::from_hex("#ff7f0e").unwrap().into())
62 .width(2.0)
63 .dash(&[8.0, 4.0])
64 .done();
65
66 fig.save_svg("training_loss.svg")?;
67 println!("Saved training_loss.svg");
68
69 // ── Plot 2: Accuracy curves ──────────────────────────────────────
70 let mut fig2 = Figure::new()
71 .size(750.0, 500.0)
72 .title("Training & Validation Accuracy");
73
74 let ax2 = fig2.add_axes();
75 ax2.x_label("Epoch").y_label("Accuracy").y_range(0.4, 1.05);
76 ax2.line(&epochs, &train_acc)
77 .label("Train Accuracy")
78 .color(Color::from_hex("#2ca02c").unwrap().into())
79 .width(2.0)
80 .done();
81 ax2.line(&epochs, &val_acc)
82 .label("Val Accuracy")
83 .color(Color::from_hex("#d62728").unwrap().into())
84 .width(2.0)
85 .dash(&[8.0, 4.0])
86 .done();
87
88 fig2.save_svg("training_accuracy.svg")?;
89 println!("Saved training_accuracy.svg");
90
91 Ok(())
92}Auto Trait Implementations§
impl<'a> Freeze for LineBuilder<'a>
impl<'a> !RefUnwindSafe for LineBuilder<'a>
impl<'a> !Send for LineBuilder<'a>
impl<'a> !Sync for LineBuilder<'a>
impl<'a> Unpin for LineBuilder<'a>
impl<'a> UnsafeUnpin for LineBuilder<'a>
impl<'a> !UnwindSafe for LineBuilder<'a>
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more