Struct TransferLearningManager

Source
pub struct TransferLearningManager<F: Float + Debug> { /* private fields */ }
Expand description

Transfer learning manager

Implementations§

Source§

impl<F: Float + Debug + 'static> TransferLearningManager<F>

Source

pub fn new(strategy: TransferStrategy, base_learning_rate: f64) -> Result<Self>

Create a new transfer learning manager

Examples found in repository?
examples/neural_advanced_features.rs (lines 311-314)
306fn demonstrate_transfer_learning() -> Result<()> {
307    println!("🔄 Transfer Learning Demonstration");
308    println!("=================================\n");
309
310    // Create transfer learning manager
311    let mut transfer_manager = TransferLearningManager::<f64>::new(
312        TransferStrategy::FeatureExtraction { unfrozen_layers: 2 },
313        0.001,
314    )?;
315
316    let layer_names = vec![
317        "backbone.conv1".to_string(),
318        "backbone.conv2".to_string(),
319        "backbone.conv3".to_string(),
320        "head.fc1".to_string(),
321        "head.fc2".to_string(),
322    ];
323
324    println!("1. Initializing transfer learning strategy...");
325    transfer_manager.initialize_layer_states(&layer_names)?;
326
327    let summary = transfer_manager.get_summary();
328    println!("   Strategy: {:?}", summary.strategy);
329    println!("   Total layers: {}", summary.total_layers);
330    println!("   Frozen layers: {}", summary.frozen_layers);
331    println!("   Trainable layers: {}", summary.trainable_layers);
332
333    println!("\n2. Layer-wise learning rates:");
334    for layer_name in &layer_names {
335        let lr = transfer_manager.get_layer_learning_rate(layer_name);
336        let frozen = transfer_manager.is_layer_frozen(layer_name);
337        println!("   {}: lr={:.6}, frozen={}", layer_name, lr, frozen);
338    }
339
340    // Simulate progressive unfreezing
341    println!("\n3. Progressive unfreezing simulation:");
342    for epoch in [5, 10, 15] {
343        transfer_manager.update_epoch(epoch)?;
344        println!(
345            "   Epoch {}: {} frozen layers",
346            epoch,
347            transfer_manager.get_summary().frozen_layers
348        );
349    }
350
351    println!("✅ Transfer learning demonstration completed!\n");
352    Ok(())
353}
Source

pub fn initialize_layer_states(&mut self, layer_names: &[String]) -> Result<()>

Initialize layer states based on the transfer strategy

Examples found in repository?
examples/neural_advanced_features.rs (line 325)
306fn demonstrate_transfer_learning() -> Result<()> {
307    println!("🔄 Transfer Learning Demonstration");
308    println!("=================================\n");
309
310    // Create transfer learning manager
311    let mut transfer_manager = TransferLearningManager::<f64>::new(
312        TransferStrategy::FeatureExtraction { unfrozen_layers: 2 },
313        0.001,
314    )?;
315
316    let layer_names = vec![
317        "backbone.conv1".to_string(),
318        "backbone.conv2".to_string(),
319        "backbone.conv3".to_string(),
320        "head.fc1".to_string(),
321        "head.fc2".to_string(),
322    ];
323
324    println!("1. Initializing transfer learning strategy...");
325    transfer_manager.initialize_layer_states(&layer_names)?;
326
327    let summary = transfer_manager.get_summary();
328    println!("   Strategy: {:?}", summary.strategy);
329    println!("   Total layers: {}", summary.total_layers);
330    println!("   Frozen layers: {}", summary.frozen_layers);
331    println!("   Trainable layers: {}", summary.trainable_layers);
332
333    println!("\n2. Layer-wise learning rates:");
334    for layer_name in &layer_names {
335        let lr = transfer_manager.get_layer_learning_rate(layer_name);
336        let frozen = transfer_manager.is_layer_frozen(layer_name);
337        println!("   {}: lr={:.6}, frozen={}", layer_name, lr, frozen);
338    }
339
340    // Simulate progressive unfreezing
341    println!("\n3. Progressive unfreezing simulation:");
342    for epoch in [5, 10, 15] {
343        transfer_manager.update_epoch(epoch)?;
344        println!(
345            "   Epoch {}: {} frozen layers",
346            epoch,
347            transfer_manager.get_summary().frozen_layers
348        );
349    }
350
351    println!("✅ Transfer learning demonstration completed!\n");
352    Ok(())
353}
Source

pub fn update_epoch(&mut self, epoch: usize) -> Result<()>

Update layer states at the beginning of each epoch

Examples found in repository?
examples/neural_advanced_features.rs (line 343)
306fn demonstrate_transfer_learning() -> Result<()> {
307    println!("🔄 Transfer Learning Demonstration");
308    println!("=================================\n");
309
310    // Create transfer learning manager
311    let mut transfer_manager = TransferLearningManager::<f64>::new(
312        TransferStrategy::FeatureExtraction { unfrozen_layers: 2 },
313        0.001,
314    )?;
315
316    let layer_names = vec![
317        "backbone.conv1".to_string(),
318        "backbone.conv2".to_string(),
319        "backbone.conv3".to_string(),
320        "head.fc1".to_string(),
321        "head.fc2".to_string(),
322    ];
323
324    println!("1. Initializing transfer learning strategy...");
325    transfer_manager.initialize_layer_states(&layer_names)?;
326
327    let summary = transfer_manager.get_summary();
328    println!("   Strategy: {:?}", summary.strategy);
329    println!("   Total layers: {}", summary.total_layers);
330    println!("   Frozen layers: {}", summary.frozen_layers);
331    println!("   Trainable layers: {}", summary.trainable_layers);
332
333    println!("\n2. Layer-wise learning rates:");
334    for layer_name in &layer_names {
335        let lr = transfer_manager.get_layer_learning_rate(layer_name);
336        let frozen = transfer_manager.is_layer_frozen(layer_name);
337        println!("   {}: lr={:.6}, frozen={}", layer_name, lr, frozen);
338    }
339
340    // Simulate progressive unfreezing
341    println!("\n3. Progressive unfreezing simulation:");
342    for epoch in [5, 10, 15] {
343        transfer_manager.update_epoch(epoch)?;
344        println!(
345            "   Epoch {}: {} frozen layers",
346            epoch,
347            transfer_manager.get_summary().frozen_layers
348        );
349    }
350
351    println!("✅ Transfer learning demonstration completed!\n");
352    Ok(())
353}
Source

pub fn unfreeze_layers(&mut self, count: usize) -> Result<()>

Unfreeze the specified number of layers from the end

Source

pub fn freeze_layers(&mut self, layer_names: &[String]) -> Result<()>

Freeze specific layers

Source

pub fn get_layer_learning_rate(&self, layer_name: &str) -> F

Get effective learning rate for a layer

Examples found in repository?
examples/neural_advanced_features.rs (line 335)
306fn demonstrate_transfer_learning() -> Result<()> {
307    println!("🔄 Transfer Learning Demonstration");
308    println!("=================================\n");
309
310    // Create transfer learning manager
311    let mut transfer_manager = TransferLearningManager::<f64>::new(
312        TransferStrategy::FeatureExtraction { unfrozen_layers: 2 },
313        0.001,
314    )?;
315
316    let layer_names = vec![
317        "backbone.conv1".to_string(),
318        "backbone.conv2".to_string(),
319        "backbone.conv3".to_string(),
320        "head.fc1".to_string(),
321        "head.fc2".to_string(),
322    ];
323
324    println!("1. Initializing transfer learning strategy...");
325    transfer_manager.initialize_layer_states(&layer_names)?;
326
327    let summary = transfer_manager.get_summary();
328    println!("   Strategy: {:?}", summary.strategy);
329    println!("   Total layers: {}", summary.total_layers);
330    println!("   Frozen layers: {}", summary.frozen_layers);
331    println!("   Trainable layers: {}", summary.trainable_layers);
332
333    println!("\n2. Layer-wise learning rates:");
334    for layer_name in &layer_names {
335        let lr = transfer_manager.get_layer_learning_rate(layer_name);
336        let frozen = transfer_manager.is_layer_frozen(layer_name);
337        println!("   {}: lr={:.6}, frozen={}", layer_name, lr, frozen);
338    }
339
340    // Simulate progressive unfreezing
341    println!("\n3. Progressive unfreezing simulation:");
342    for epoch in [5, 10, 15] {
343        transfer_manager.update_epoch(epoch)?;
344        println!(
345            "   Epoch {}: {} frozen layers",
346            epoch,
347            transfer_manager.get_summary().frozen_layers
348        );
349    }
350
351    println!("✅ Transfer learning demonstration completed!\n");
352    Ok(())
353}
Source

pub fn is_layer_frozen(&self, layer_name: &str) -> bool

Check if a layer is frozen

Examples found in repository?
examples/neural_advanced_features.rs (line 336)
306fn demonstrate_transfer_learning() -> Result<()> {
307    println!("🔄 Transfer Learning Demonstration");
308    println!("=================================\n");
309
310    // Create transfer learning manager
311    let mut transfer_manager = TransferLearningManager::<f64>::new(
312        TransferStrategy::FeatureExtraction { unfrozen_layers: 2 },
313        0.001,
314    )?;
315
316    let layer_names = vec![
317        "backbone.conv1".to_string(),
318        "backbone.conv2".to_string(),
319        "backbone.conv3".to_string(),
320        "head.fc1".to_string(),
321        "head.fc2".to_string(),
322    ];
323
324    println!("1. Initializing transfer learning strategy...");
325    transfer_manager.initialize_layer_states(&layer_names)?;
326
327    let summary = transfer_manager.get_summary();
328    println!("   Strategy: {:?}", summary.strategy);
329    println!("   Total layers: {}", summary.total_layers);
330    println!("   Frozen layers: {}", summary.frozen_layers);
331    println!("   Trainable layers: {}", summary.trainable_layers);
332
333    println!("\n2. Layer-wise learning rates:");
334    for layer_name in &layer_names {
335        let lr = transfer_manager.get_layer_learning_rate(layer_name);
336        let frozen = transfer_manager.is_layer_frozen(layer_name);
337        println!("   {}: lr={:.6}, frozen={}", layer_name, lr, frozen);
338    }
339
340    // Simulate progressive unfreezing
341    println!("\n3. Progressive unfreezing simulation:");
342    for epoch in [5, 10, 15] {
343        transfer_manager.update_epoch(epoch)?;
344        println!(
345            "   Epoch {}: {} frozen layers",
346            epoch,
347            transfer_manager.get_summary().frozen_layers
348        );
349    }
350
351    println!("✅ Transfer learning demonstration completed!\n");
352    Ok(())
353}
Source

pub fn update_layer_statistics( &self, layer_name: String, gradient_magnitude: F, param_update_magnitude: F, param_count: usize, activation_variance: F, ) -> Result<()>

Update layer statistics

Source

pub fn get_layer_statistics( &self, ) -> Result<HashMap<String, LayerStatistics<F>>>

Get layer statistics

Source

pub fn get_summary(&self) -> TransferLearningState

Get summary of current transfer learning state

Examples found in repository?
examples/neural_advanced_features.rs (line 327)
306fn demonstrate_transfer_learning() -> Result<()> {
307    println!("🔄 Transfer Learning Demonstration");
308    println!("=================================\n");
309
310    // Create transfer learning manager
311    let mut transfer_manager = TransferLearningManager::<f64>::new(
312        TransferStrategy::FeatureExtraction { unfrozen_layers: 2 },
313        0.001,
314    )?;
315
316    let layer_names = vec![
317        "backbone.conv1".to_string(),
318        "backbone.conv2".to_string(),
319        "backbone.conv3".to_string(),
320        "head.fc1".to_string(),
321        "head.fc2".to_string(),
322    ];
323
324    println!("1. Initializing transfer learning strategy...");
325    transfer_manager.initialize_layer_states(&layer_names)?;
326
327    let summary = transfer_manager.get_summary();
328    println!("   Strategy: {:?}", summary.strategy);
329    println!("   Total layers: {}", summary.total_layers);
330    println!("   Frozen layers: {}", summary.frozen_layers);
331    println!("   Trainable layers: {}", summary.trainable_layers);
332
333    println!("\n2. Layer-wise learning rates:");
334    for layer_name in &layer_names {
335        let lr = transfer_manager.get_layer_learning_rate(layer_name);
336        let frozen = transfer_manager.is_layer_frozen(layer_name);
337        println!("   {}: lr={:.6}, frozen={}", layer_name, lr, frozen);
338    }
339
340    // Simulate progressive unfreezing
341    println!("\n3. Progressive unfreezing simulation:");
342    for epoch in [5, 10, 15] {
343        transfer_manager.update_epoch(epoch)?;
344        println!(
345            "   Epoch {}: {} frozen layers",
346            epoch,
347            transfer_manager.get_summary().frozen_layers
348        );
349    }
350
351    println!("✅ Transfer learning demonstration completed!\n");
352    Ok(())
353}

Auto Trait Implementations§

§

impl<F> Freeze for TransferLearningManager<F>
where F: Freeze,

§

impl<F> RefUnwindSafe for TransferLearningManager<F>
where F: RefUnwindSafe,

§

impl<F> Send for TransferLearningManager<F>
where F: Send + Sync,

§

impl<F> Sync for TransferLearningManager<F>
where F: Sync + Send,

§

impl<F> Unpin for TransferLearningManager<F>
where F: Unpin,

§

impl<F> UnwindSafe for TransferLearningManager<F>
where F: UnwindSafe,

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T> IntoEither for T

Source§

fn into_either(self, into_left: bool) -> Either<Self, Self>

Converts self into a Left variant of Either<Self, Self> if into_left is true. Converts self into a Right variant of Either<Self, Self> otherwise. Read more
Source§

fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
where F: FnOnce(&Self) -> bool,

Converts self into a Left variant of Either<Self, Self> if into_left(&self) returns true. Converts self into a Right variant of Either<Self, Self> otherwise. Read more
Source§

impl<T> Pointable for T

Source§

const ALIGN: usize

The alignment of pointer.
Source§

type Init = T

The type for initializers.
Source§

unsafe fn init(init: <T as Pointable>::Init) -> usize

Initializes a with the given initializer. Read more
Source§

unsafe fn deref<'a>(ptr: usize) -> &'a T

Dereferences the given pointer. Read more
Source§

unsafe fn deref_mut<'a>(ptr: usize) -> &'a mut T

Mutably dereferences the given pointer. Read more
Source§

unsafe fn drop(ptr: usize)

Drops the object pointed to by the given pointer. Read more
Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
Source§

impl<V, T> VZip<V> for T
where V: MultiLane<T>,

Source§

fn vzip(self) -> V