1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
use std::{cell::RefCell, rc::Rc};
use ordered_float::NotNan;
use super::{
GenericLocalSearchOptimizer, LocalSearchOptimizer,
metropolis::{metropolis_transition, tune_temperature},
};
use crate::{
Duration, OptModel,
callback::{OptCallbackFn, OptProgress},
};
/// Tune cooling rate based on initial and final inverse temperatures and number of iterations
/// initial beta will be cooled to final beta after n_iter iterations
/// - `initial_beta` : initial inverse temperature
/// - `final_beta` : final inverse temperature
/// - `n_iter` : number of iterations
/// - returns : cooling rate
pub fn tune_cooling_rate(initial_beta: f64, final_beta: f64, n_iter: usize) -> f64 {
(final_beta / initial_beta).powf(1.0 / n_iter as f64)
}
/// Optimizer that implements the simulated annealing algorithm
#[derive(Clone, Copy)]
pub struct SimulatedAnnealingOptimizer {
/// The optimizer will give up if there is no improvement of the score after this number of iterations
patience: usize,
/// Number of trial solutions to generate and evaluate at each iteration
n_trials: usize,
/// Returns to the best solution if there is no improvement after this number of iterations
return_iter: usize,
/// Initial inverse temperature
initial_beta: f64,
/// Cooling rate
cooling_rate: f64,
/// Number of steps after which temperature is updated
update_frequency: usize,
}
impl SimulatedAnnealingOptimizer {
/// Constructor of SimulatedAnnealingOptimizer
///
/// - `patience` : the optimizer will give up
/// if there is no improvement of the score after this number of iterations
/// - `n_trials` : number of trial solutions to generate and evaluate at each iteration
/// - `return_iter` : returns to the best solution if there is no improvement after this number of iterations.
/// - `initial_beta` : initial inverse temperature
/// - `cooling_rate` : cooling rate
/// - `update_frequency` : number of steps after which inverse temperature (beta) is updated
pub fn new(
patience: usize,
n_trials: usize,
return_iter: usize,
initial_beta: f64,
cooling_rate: f64,
update_frequency: usize,
) -> Self {
Self {
patience,
n_trials,
return_iter,
initial_beta,
cooling_rate,
update_frequency,
}
}
/// Tune inverse temperature parameter beta based on initial random trials
/// - `model` : the model to optimize
/// - `initial_solution` : the initial solution to start optimization. If None, a random solution will be generated.
/// - `n_warmup` : number of warmup iterations to run
/// - `target_initial_prob` : target acceptance probability for uphill moves at the beginning
pub fn tune_initial_temperature<M: OptModel<ScoreType = NotNan<f64>>>(
self,
model: &M,
initial_solution: Option<(M::SolutionType, M::ScoreType)>,
n_warmup: usize,
target_initial_prob: f64,
) -> Self {
let tuned_beta = tune_temperature(model, initial_solution, n_warmup, target_initial_prob);
Self {
initial_beta: tuned_beta,
..self
}
}
/// Tune cooling rate based on self.initial_beta, final beta of 1e2
pub fn tune_cooling_rate(self, n_iter: usize) -> Self {
let cooling_rate =
tune_cooling_rate(self.initial_beta, 1e2, n_iter / self.update_frequency);
Self {
cooling_rate,
..self
}
}
}
impl<M: OptModel<ScoreType = NotNan<f64>>> LocalSearchOptimizer<M> for SimulatedAnnealingOptimizer {
/// Start optimization
///
/// - `model` : the model to optimize
/// - `initial_solution` : the initial solution to start optimization
/// - `initial_score` : the initial score of the initial solution
/// - `n_iter`: maximum iterations
/// - `time_limit`: maximum iteration time
/// - `callback` : callback function that will be invoked at the end of each iteration
fn optimize(
&self,
model: &M,
initial_solution: M::SolutionType,
initial_score: M::ScoreType,
n_iter: usize,
time_limit: Duration,
callback: &mut dyn OptCallbackFn<M::SolutionType, M::ScoreType>,
) -> (M::SolutionType, M::ScoreType) {
let current_beta = Rc::new(RefCell::new(self.initial_beta));
let transition = {
let current_beta = Rc::clone(¤t_beta);
move |current: NotNan<f64>, trial: NotNan<f64>| {
metropolis_transition(*current_beta.borrow())(current, trial)
}
};
let mut callback_with_update = |progress: OptProgress<M::SolutionType, M::ScoreType>| {
if progress.iter % self.update_frequency == 0 && progress.iter > 0 {
let new_beta = *current_beta.borrow() * self.cooling_rate;
current_beta.replace(new_beta);
}
callback(progress);
};
let generic_optimizer = GenericLocalSearchOptimizer::new(
self.patience,
self.n_trials,
self.return_iter,
transition,
);
generic_optimizer.optimize(
model,
initial_solution,
initial_score,
n_iter,
time_limit,
&mut callback_with_update,
)
}
}