Skip to main content

Study

Struct Study 

Source
pub struct Study<V = f64>
where V: PartialOrd,
{ /* private fields */ }
Expand description

A study manages the optimization process, tracking trials and their results.

The study is parameterized by the objective value type V, which defaults to f64. The only constraint on V is PartialOrd, allowing comparison of objective values to determine which trial is best.

When V = f64, the study passes trial history to the sampler for informed parameter suggestions (e.g., TPE sampler uses history to guide sampling).

§Examples

use optimizer::{Direction, Study};

// Create a study to minimize an objective function
let study: Study<f64> = Study::new(Direction::Minimize);
assert_eq!(study.direction(), Direction::Minimize);

Implementations§

Source§

impl<V> Study<V>
where V: PartialOrd,

Source

pub fn new(direction: Direction) -> Self

Creates a new study with the given optimization direction.

Uses the default RandomSampler for parameter sampling.

§Arguments
  • direction - Whether to minimize or maximize the objective function.
§Examples
use optimizer::{Direction, Study};

let study: Study<f64> = Study::new(Direction::Minimize);
assert_eq!(study.direction(), Direction::Minimize);
Source

pub fn with_sampler( direction: Direction, sampler: impl Sampler + 'static, ) -> Self

Creates a new study with a custom sampler.

§Arguments
  • direction - Whether to minimize or maximize the objective function.
  • sampler - The sampler to use for parameter sampling.
§Examples
use optimizer::sampler::random::RandomSampler;
use optimizer::{Direction, Study};

let sampler = RandomSampler::with_seed(42);
let study: Study<f64> = Study::with_sampler(Direction::Maximize, sampler);
assert_eq!(study.direction(), Direction::Maximize);
Source

pub fn direction(&self) -> Direction

Returns the optimization direction.

Source

pub fn set_sampler(&mut self, sampler: impl Sampler + 'static)

Sets a new sampler for the study.

§Arguments
  • sampler - The sampler to use for parameter sampling.
§Examples
use optimizer::sampler::tpe::TpeSampler;
use optimizer::{Direction, Study};

let mut study: Study<f64> = Study::new(Direction::Minimize);
study.set_sampler(TpeSampler::new());
Source

pub fn create_trial(&self) -> Trial

Creates a new trial with a unique ID.

The trial starts in the Running state and can be used to suggest parameter values. After the objective function is evaluated, call complete_trial or fail_trial to record the result.

Note: For Study<f64>, this method creates a trial without sampler integration. Use create_trial_with_sampler() to create trials that use the study’s sampler and have access to trial history.

§Examples
use optimizer::{Direction, Study};

let study: Study<f64> = Study::new(Direction::Minimize);
let trial = study.create_trial();
assert_eq!(trial.id(), 0);

let trial2 = study.create_trial();
assert_eq!(trial2.id(), 1);
Source

pub fn complete_trial(&self, trial: Trial, value: V)

Records a completed trial with its objective value.

This method stores the trial’s parameters, distributions, and objective value in the study’s history. The stored data is used by samplers to inform future parameter suggestions.

§Arguments
  • trial - The trial that was evaluated.
  • value - The objective value returned by the objective function.
§Examples
use optimizer::{Direction, Study};

let study: Study<f64> = Study::new(Direction::Minimize);
let mut trial = study.create_trial();
let x = trial.suggest_float("x", 0.0, 1.0).unwrap();
let objective_value = x * x;
study.complete_trial(trial, objective_value);

assert_eq!(study.n_trials(), 1);
Source

pub fn fail_trial(&self, trial: Trial, _error: impl ToString)

Records a failed trial with an error message.

Failed trials are not stored in the study’s history and do not contribute to future sampling decisions. This method is useful when the objective function raises an error that should not stop the optimization process.

§Arguments
  • trial - The trial that failed.
  • _error - An error message describing why the trial failed.
§Examples
use optimizer::{Direction, Study};

let study: Study<f64> = Study::new(Direction::Minimize);
let trial = study.create_trial();
study.fail_trial(trial, "objective function raised an exception");

// Failed trials are not counted
assert_eq!(study.n_trials(), 0);
Source

pub fn trials(&self) -> Vec<CompletedTrial<V>>
where V: Clone,

Returns an iterator over all completed trials.

The iterator yields references to CompletedTrial values, which contain the trial’s parameters, distributions, and objective value.

Note: This method acquires a read lock on the completed trials, so the returned vector is a clone of the internal storage.

§Examples
use optimizer::{Direction, Study};

let study: Study<f64> = Study::new(Direction::Minimize);
let mut trial = study.create_trial();
let _ = trial.suggest_float("x", 0.0, 1.0);
study.complete_trial(trial, 0.5);

for completed in study.trials() {
    println!("Trial {} has value {:?}", completed.id, completed.value);
}
Source

pub fn n_trials(&self) -> usize

Returns the number of completed trials.

Failed trials are not counted.

§Examples
use optimizer::{Direction, Study};

let study: Study<f64> = Study::new(Direction::Minimize);
assert_eq!(study.n_trials(), 0);

let mut trial = study.create_trial();
let _ = trial.suggest_float("x", 0.0, 1.0);
study.complete_trial(trial, 0.5);
assert_eq!(study.n_trials(), 1);
Source

pub fn best_trial(&self) -> Result<CompletedTrial<V>>
where V: Clone,

Returns the trial with the best objective value.

The “best” trial depends on the optimization direction:

  • Direction::Minimize: Returns the trial with the lowest objective value.
  • Direction::Maximize: Returns the trial with the highest objective value.
§Errors

Returns Error::NoCompletedTrials if no trials have been completed.

§Examples
use optimizer::{Direction, Study};

let study: Study<f64> = Study::new(Direction::Minimize);

// Error when no trials completed
assert!(study.best_trial().is_err());

let mut trial1 = study.create_trial();
let _ = trial1.suggest_float("x", 0.0, 1.0);
study.complete_trial(trial1, 0.8);

let mut trial2 = study.create_trial();
let _ = trial2.suggest_float("x", 0.0, 1.0);
study.complete_trial(trial2, 0.3);

let best = study.best_trial().unwrap();
assert_eq!(best.value, 0.3); // Minimize: lower is better
Source

pub fn best_value(&self) -> Result<V>
where V: Clone,

Returns the best objective value found so far.

The “best” value depends on the optimization direction:

  • Direction::Minimize: Returns the lowest objective value.
  • Direction::Maximize: Returns the highest objective value.
§Errors

Returns Error::NoCompletedTrials if no trials have been completed.

§Examples
use optimizer::{Direction, Study};

let study: Study<f64> = Study::new(Direction::Maximize);

// Error when no trials completed
assert!(study.best_value().is_err());

let mut trial1 = study.create_trial();
let _ = trial1.suggest_float("x", 0.0, 1.0);
study.complete_trial(trial1, 0.3);

let mut trial2 = study.create_trial();
let _ = trial2.suggest_float("x", 0.0, 1.0);
study.complete_trial(trial2, 0.8);

let best = study.best_value().unwrap();
assert_eq!(best, 0.8); // Maximize: higher is better
Source

pub fn optimize<F, E>(&self, n_trials: usize, objective: F) -> Result<()>
where F: FnMut(&mut Trial) -> Result<V, E>, E: ToString,

Runs optimization with the given objective function.

This method runs n_trials evaluations sequentially. For each trial:

  1. A new trial is created
  2. The objective function is called with the trial
  3. If successful, the trial is recorded as completed
  4. If the objective returns an error, the trial is recorded as failed

Failed trials do not stop the optimization; the process continues with the next trial.

§Arguments
  • n_trials - The number of trials to run.
  • objective - A closure that takes a mutable reference to a Trial and returns the objective value or an error.
§Errors

Returns Error::NoCompletedTrials if all trials failed (no successful trials).

§Examples
use optimizer::sampler::random::RandomSampler;
use optimizer::{Direction, Study};

// Minimize x^2
let sampler = RandomSampler::with_seed(42);
let study: Study<f64> = Study::with_sampler(Direction::Minimize, sampler);

study
    .optimize(10, |trial| {
        let x = trial.suggest_float("x", -10.0, 10.0)?;
        Ok::<_, optimizer::Error>(x * x)
    })
    .unwrap();

// At least one trial should have completed
assert!(study.n_trials() > 0);
let best = study.best_value().unwrap();
assert!(best >= 0.0);
Source

pub fn optimize_with_callback<F, C, E>( &self, n_trials: usize, objective: F, callback: C, ) -> Result<()>
where V: Clone, F: FnMut(&mut Trial) -> Result<V, E>, C: FnMut(&Study<V>, &CompletedTrial<V>) -> ControlFlow<()>, E: ToString,

Runs optimization with a callback for monitoring progress.

This method is similar to optimize, but calls a callback function after each completed trial. The callback can inspect the study state and the completed trial, and can optionally stop optimization early by returning ControlFlow::Break(()).

§Arguments
  • n_trials - The maximum number of trials to run.
  • objective - A closure that takes a mutable reference to a Trial and returns the objective value or an error.
  • callback - A closure called after each successful trial. Returns ControlFlow::Continue(()) to proceed or ControlFlow::Break(()) to stop.
§Errors

Returns Error::NoCompletedTrials if no trials completed successfully before optimization stopped (either by completing all trials or early stopping). Returns Error::Internal if a completed trial is not found after adding (internal invariant violation).

§Examples
use std::ops::ControlFlow;

use optimizer::sampler::random::RandomSampler;
use optimizer::{Direction, Study};

// Stop early when we find a good enough value
let sampler = RandomSampler::with_seed(42);
let study: Study<f64> = Study::with_sampler(Direction::Minimize, sampler);

study
    .optimize_with_callback(
        100,
        |trial| {
            let x = trial.suggest_float("x", -10.0, 10.0)?;
            Ok::<_, optimizer::Error>(x * x)
        },
        |_study, completed_trial| {
            // Stop early if we find a value less than 1.0
            if completed_trial.value < 1.0 {
                ControlFlow::Break(())
            } else {
                ControlFlow::Continue(())
            }
        },
    )
    .unwrap();

// May have stopped early, but should have at least one trial
assert!(study.n_trials() > 0);
Source§

impl Study<f64>

Source

pub fn create_trial_with_sampler(&self) -> Trial

Creates a new trial with sampler integration.

This method creates a trial that uses the study’s sampler and has access to the history of completed trials for informed parameter suggestions. This is the recommended way to create trials when using Study<f64>.

The trial’s suggest_* methods will delegate to the sampler (e.g., TPE) which can use historical trial data to make informed sampling decisions.

§Examples
use optimizer::sampler::random::RandomSampler;
use optimizer::{Direction, Study};

// With a seeded sampler for reproducibility
let sampler = RandomSampler::with_seed(42);
let study: Study<f64> = Study::with_sampler(Direction::Minimize, sampler);
let mut trial = study.create_trial_with_sampler();

// Parameter suggestions now use the study's sampler and history
let x = trial.suggest_float("x", 0.0, 1.0).unwrap();
Source

pub fn optimize_with_sampler<F, E>( &self, n_trials: usize, objective: F, ) -> Result<()>
where F: FnMut(&mut Trial) -> Result<f64, E>, E: ToString,

Runs optimization with full sampler integration.

This method is similar to the generic optimizer method but creates trials using create_trial_with_sampler(), giving the sampler access to the history of completed trials for informed parameter suggestions.

This is the recommended way to run optimization when using Study<f64> with advanced samplers like TPE.

§Arguments
  • n_trials - The number of trials to run.
  • objective - A closure that takes a mutable reference to a Trial and returns the objective value or an error.
§Errors

Returns Error::NoCompletedTrials if all trials failed (no successful trials).

§Examples
use optimizer::sampler::random::RandomSampler;
use optimizer::{Direction, Study};

// Minimize x^2 with sampler integration
let sampler = RandomSampler::with_seed(42);
let study: Study<f64> = Study::with_sampler(Direction::Minimize, sampler);

study
    .optimize_with_sampler(10, |trial| {
        let x = trial.suggest_float("x", -10.0, 10.0)?;
        Ok::<_, optimizer::Error>(x * x)
    })
    .unwrap();

// At least one trial should have completed
assert!(study.n_trials() > 0);
Source

pub fn optimize_with_callback_sampler<F, C, E>( &self, n_trials: usize, objective: F, callback: C, ) -> Result<()>
where F: FnMut(&mut Trial) -> Result<f64, E>, C: FnMut(&Study<f64>, &CompletedTrial<f64>) -> ControlFlow<()>, E: ToString,

Runs optimization with a callback and full sampler integration.

This method combines the benefits of optimize_with_sampler (sampler access to trial history) with optimize_with_callback (progress monitoring and early stopping).

§Arguments
  • n_trials - The maximum number of trials to run.
  • objective - A closure that takes a mutable reference to a Trial and returns the objective value or an error.
  • callback - A closure called after each successful trial. Returns ControlFlow::Continue(()) to proceed or ControlFlow::Break(()) to stop.
§Errors

Returns Error::NoCompletedTrials if no trials completed successfully. Returns Error::Internal if a completed trial is not found after adding (internal invariant violation).

§Examples
use std::ops::ControlFlow;

use optimizer::sampler::random::RandomSampler;
use optimizer::{Direction, Study};

// Optimize with sampler integration and early stopping
let sampler = RandomSampler::with_seed(42);
let study: Study<f64> = Study::with_sampler(Direction::Minimize, sampler);

study
    .optimize_with_callback_sampler(
        100,
        |trial| {
            let x = trial.suggest_float("x", -10.0, 10.0)?;
            Ok::<_, optimizer::Error>(x * x)
        },
        |study, _completed_trial| {
            // Stop after finding 5 good trials
            if study.n_trials() >= 5 {
                ControlFlow::Break(())
            } else {
                ControlFlow::Continue(())
            }
        },
    )
    .unwrap();

assert!(study.n_trials() >= 5);

Auto Trait Implementations§

§

impl<V = f64> !Freeze for Study<V>

§

impl<V = f64> !RefUnwindSafe for Study<V>

§

impl<V> Send for Study<V>
where V: Send + Sync,

§

impl<V> Sync for Study<V>
where V: Send + Sync,

§

impl<V> Unpin for Study<V>

§

impl<V = f64> !UnwindSafe for Study<V>

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
Source§

impl<V, T> VZip<V> for T
where V: MultiLane<T>,

Source§

fn vzip(self) -> V