1#![allow(missing_docs)]
4
5#[cfg(test)]
6#[path = "../../../tests/unit/extensions/solve/config_test.rs"]
7mod config_test;
8
9extern crate serde_json;
10
11use serde::Deserialize;
12use std::io::{BufReader, Read};
13use std::sync::Arc;
14use vrp_core::construction::heuristics::InsertionContext;
15use vrp_core::models::GoalContext;
16use vrp_core::prelude::*;
17use vrp_core::rosomaxa::evolution::{InitialOperator, TelemetryMode};
18use vrp_core::rosomaxa::get_default_selection_size;
19use vrp_core::rosomaxa::prelude::*;
20use vrp_core::rosomaxa::utils::*;
21use vrp_core::solver::search::*;
22use vrp_core::solver::RecreateInitialOperator;
23use vrp_core::solver::*;
24
25#[derive(Clone, Default, Deserialize, Debug)]
27pub struct Config {
28 pub evolution: Option<EvolutionConfig>,
30 pub hyper: Option<HyperType>,
32 pub termination: Option<TerminationConfig>,
34 pub environment: Option<EnvironmentConfig>,
36 pub telemetry: Option<TelemetryConfig>,
38 pub output: Option<OutputConfig>,
40}
41
42#[derive(Clone, Deserialize, Debug)]
44#[serde(rename_all = "camelCase")]
45pub struct EvolutionConfig {
46 pub initial: Option<InitialConfig>,
47 pub population: Option<PopulationType>,
48}
49
50#[derive(Clone, Deserialize, Debug)]
51#[serde(tag = "type")]
52#[serde(rename_all = "camelCase")]
53pub enum PopulationType {
54 #[serde(rename(deserialize = "greedy"))]
56 #[serde(rename_all = "camelCase")]
57 Greedy {
58 selection_size: Option<usize>,
60 },
61
62 #[serde(rename(deserialize = "elitism"))]
65 #[serde(rename_all = "camelCase")]
66 Elitism {
67 max_size: Option<usize>,
69 selection_size: Option<usize>,
71 },
72
73 #[serde(rename(deserialize = "rosomaxa"))]
75 #[serde(rename_all = "camelCase")]
76 Rosomaxa {
77 selection_size: Option<usize>,
79 max_elite_size: Option<usize>,
81 max_node_size: Option<usize>,
83 spread_factor: Option<Float>,
85 distribution_factor: Option<Float>,
87 rebalance_memory: Option<usize>,
89 exploration_ratio: Option<Float>,
91 },
92}
93
94#[derive(Clone, Deserialize, Debug)]
96pub struct InitialConfig {
97 pub method: RecreateMethod,
98 pub alternatives: InitialAlternativesConfig,
99}
100
101#[derive(Clone, Deserialize, Debug)]
103#[serde(rename_all = "camelCase")]
104pub struct InitialAlternativesConfig {
105 pub methods: Vec<RecreateMethod>,
106 pub max_size: usize,
107 pub quota: Float,
108}
109
110#[derive(Clone, Deserialize, Debug)]
112#[serde(tag = "type")]
113#[serde(rename_all = "camelCase")]
114pub enum SelectionType {
115 #[serde(rename(deserialize = "naive"))]
116 Naive {
117 offspring_size: Option<usize>,
119 },
120}
121
122#[derive(Clone, Deserialize, Debug)]
124#[serde(tag = "type")]
125pub enum HyperType {
126 #[serde(rename(deserialize = "static-selective"))]
128 StaticSelective {
129 operators: Option<Vec<SearchOperatorType>>,
131 },
132
133 #[serde(rename(deserialize = "dynamic-selective"))]
136 DynamicSelective,
137}
138
139#[derive(Clone, Deserialize, Debug)]
141#[serde(tag = "type")]
142pub enum SearchOperatorType {
143 #[serde(rename(deserialize = "decomposition"))]
145 #[serde(rename_all = "camelCase")]
146 Decomposition {
147 routes: MinMaxConfig,
149 repeat: usize,
151 probability: OperatorProbabilityType,
153 },
154
155 #[serde(rename(deserialize = "local-search"))]
157 LocalSearch {
158 probability: OperatorProbabilityType,
160 times: MinMaxConfig,
162 operators: Vec<LocalOperatorType>,
164 },
165
166 #[serde(rename(deserialize = "ruin-recreate"))]
168 RuinRecreate {
169 probability: OperatorProbabilityType,
171 ruins: Vec<RuinGroupConfig>,
173 recreates: Vec<RecreateMethod>,
175 },
176}
177
178#[derive(Clone, Deserialize, Debug)]
180#[serde(untagged)]
181pub enum OperatorProbabilityType {
182 Scalar {
184 scalar: Float,
186 },
187
188 Context {
190 threshold: ContextThreshold,
192 phases: Vec<ContextPhase>,
194 },
195}
196
197#[derive(Clone, Deserialize, Debug)]
199pub struct ContextThreshold {
200 pub jobs: usize,
202 pub routes: usize,
204}
205
206#[derive(Clone, Deserialize, Debug)]
208#[serde(tag = "type")]
209pub enum ContextPhase {
210 #[serde(rename(deserialize = "initial"))]
212 Initial {
213 chance: Float,
215 },
216
217 #[serde(rename(deserialize = "exploration"))]
219 Exploration {
220 chance: Float,
222 },
223
224 #[serde(rename(deserialize = "exploitation"))]
226 Exploitation {
227 chance: Float,
229 },
230}
231
232#[derive(Clone, Deserialize, Debug)]
234pub struct RuinGroupConfig {
235 methods: Vec<RuinMethod>,
237 weight: usize,
239}
240
241#[derive(Clone, Deserialize, Debug)]
243#[serde(tag = "type")]
244pub enum RuinMethod {
245 #[serde(rename(deserialize = "adjusted-string"))]
247 AdjustedString { probability: Float, lmax: usize, cavg: usize, alpha: Float },
248 #[serde(rename(deserialize = "neighbour"))]
250 Neighbour { probability: Float, min: usize, max: usize },
251 #[serde(rename(deserialize = "random-job"))]
253 RandomJob { probability: Float, min: usize, max: usize },
254 #[serde(rename(deserialize = "random-route"))]
256 RandomRoute { probability: Float, min: usize, max: usize },
257 #[serde(rename(deserialize = "close-route"))]
259 CloseRoute { probability: Float },
260 #[serde(rename(deserialize = "worst-route"))]
261 WorstRoute { probability: Float },
262 #[serde(rename(deserialize = "worst-job"))]
264 WorstJob { probability: Float, min: usize, max: usize, skip: usize },
265 #[serde(rename(deserialize = "cluster"))]
267 #[serde(rename_all = "camelCase")]
268 Cluster { probability: Float, min: usize, max: usize },
269}
270
271#[derive(Clone, Deserialize, Debug)]
273#[serde(tag = "type")]
274pub enum RecreateMethod {
275 #[serde(rename(deserialize = "cheapest"))]
277 Cheapest { weight: usize },
278 #[serde(rename(deserialize = "skip-best"))]
280 SkipBest { weight: usize, start: usize, end: usize },
281 #[serde(rename(deserialize = "blinks"))]
283 Blinks { weight: usize },
284 #[serde(rename(deserialize = "gaps"))]
286 Gaps { weight: usize, min: usize, max: usize },
287 #[serde(rename(deserialize = "nearest"))]
289 Nearest { weight: usize },
290 #[serde(rename(deserialize = "skip-random"))]
292 SkipRandom { weight: usize },
293 #[serde(rename(deserialize = "slice"))]
295 Slice { weight: usize },
296 #[serde(rename(deserialize = "farthest"))]
298 Farthest { weight: usize },
299 #[serde(rename(deserialize = "perturbation"))]
301 Perturbation { weight: usize, probability: Float, min: Float, max: Float },
302 #[serde(rename(deserialize = "regret"))]
304 Regret { weight: usize, start: usize, end: usize },
305}
306
307#[derive(Clone, Deserialize, Debug)]
309#[serde(tag = "type")]
310pub enum LocalOperatorType {
311 #[serde(rename(deserialize = "swap-star"))]
312 SwapStar { weight: usize },
313
314 #[serde(rename(deserialize = "inter-route-best"))]
315 InterRouteBest { weight: usize, noise: NoiseConfig },
316
317 #[serde(rename(deserialize = "inter-route-random"))]
318 InterRouteRandom { weight: usize, noise: NoiseConfig },
319
320 #[serde(rename(deserialize = "intra-route-random"))]
321 IntraRouteRandom { weight: usize, noise: NoiseConfig },
322
323 #[serde(rename(deserialize = "sequence"))]
324 Sequence { weight: usize },
325}
326
327#[derive(Clone, Deserialize, Debug)]
328pub struct NoiseConfig {
329 probability: Float,
330 min: Float,
331 max: Float,
332}
333
334#[derive(Clone, Deserialize, Debug)]
335#[serde(rename_all = "camelCase")]
336pub struct TerminationConfig {
337 pub max_time: Option<usize>,
338 pub max_generations: Option<usize>,
339 pub variation: Option<VariationConfig>,
340}
341
342#[derive(Clone, Deserialize, Debug)]
343#[serde(rename_all = "camelCase")]
344pub struct VariationConfig {
345 interval_type: String,
346 value: usize,
347 cv: Float,
348 is_global: bool,
349}
350
351#[derive(Clone, Deserialize, Debug)]
353pub struct TelemetryConfig {
354 progress: Option<ProgressConfig>,
355 metrics: Option<MetricsConfig>,
356}
357
358#[derive(Clone, Deserialize, Debug)]
359#[serde(rename_all = "camelCase")]
360pub struct ProgressConfig {
361 enabled: bool,
363 log_best: Option<usize>,
365 log_population: Option<usize>,
367}
368
369#[derive(Clone, Deserialize, Debug)]
370#[serde(rename_all = "camelCase")]
371pub struct MetricsConfig {
372 enabled: bool,
374 track_population: Option<usize>,
376}
377
378#[derive(Clone, Deserialize, Debug)]
380#[serde(rename_all = "camelCase")]
381pub struct EnvironmentConfig {
382 pub parallelism: Option<ParallelismConfig>,
384
385 pub logging: Option<LoggingConfig>,
387
388 pub is_experimental: Option<bool>,
390}
391
392#[derive(Clone, Deserialize, Debug)]
394#[serde(rename_all = "camelCase")]
395pub struct ParallelismConfig {
396 pub num_thread_pools: usize,
398 pub threads_per_pool: usize,
400}
401
402#[derive(Clone, Deserialize, Debug)]
404#[serde(rename_all = "camelCase")]
405pub struct LoggingConfig {
406 enabled: bool,
408 prefix: Option<String>,
410}
411
412#[derive(Clone, Deserialize, Debug, Eq, PartialEq)]
413pub struct MinMaxConfig {
414 pub min: usize,
415 pub max: usize,
416}
417
418#[derive(Clone, Deserialize, Debug, Eq, PartialEq)]
419pub struct NameWeight {
420 pub name: String,
421 pub weight: usize,
422}
423
424#[derive(Clone, Deserialize, Debug, Eq, PartialEq)]
426#[serde(rename_all = "camelCase")]
427pub struct OutputConfig {
428 pub include_geojson: Option<bool>,
430}
431
432fn configure_from_evolution(
433 mut builder: ProblemConfigBuilder,
434 problem: Arc<Problem>,
435 environment: Arc<Environment>,
436 telemetry_mode: TelemetryMode,
437 population_config: &Option<EvolutionConfig>,
438) -> Result<ProblemConfigBuilder, GenericError> {
439 if let Some(config) = population_config {
440 if let Some(initial) = &config.initial {
441 builder = builder.with_initial(
442 initial.alternatives.max_size,
443 initial.alternatives.quota,
444 std::iter::once(create_recreate_method(&initial.method, environment.clone()))
445 .chain(
446 initial
447 .alternatives
448 .methods
449 .iter()
450 .map(|method| create_recreate_method(method, environment.clone())),
451 )
452 .map::<(
453 Box<
454 dyn InitialOperator<
455 Context = RefinementContext,
456 Objective = GoalContext,
457 Solution = InsertionContext,
458 > + Send
459 + Sync,
460 >,
461 _,
462 ), _>(|(recreate, weight)| {
463 (Box::new(RecreateInitialOperator::new(recreate)), weight)
464 })
465 .collect(),
466 );
467 }
468
469 if let Some(variation) = &config.population {
470 let default_selection_size = get_default_selection_size(environment.as_ref());
471 let population = match &variation {
472 PopulationType::Greedy { selection_size } => Box::new(GreedyPopulation::new(
473 problem.goal.clone(),
474 selection_size.unwrap_or(default_selection_size),
475 None,
476 )),
477 PopulationType::Elitism { max_size, selection_size } => Box::new(ElitismPopulation::new(
478 problem.goal.clone(),
479 environment.random.clone(),
480 max_size.unwrap_or(4),
481 selection_size.unwrap_or(default_selection_size),
482 )) as TargetPopulation,
483 PopulationType::Rosomaxa {
484 max_elite_size,
485 max_node_size,
486 spread_factor,
487 distribution_factor,
488 selection_size,
489 rebalance_memory,
490 exploration_ratio,
491 } => {
492 let mut config = RosomaxaConfig::new_with_defaults(default_selection_size);
493 if let Some(selection_size) = selection_size {
494 config.selection_size = *selection_size;
495 }
496 if let Some(max_elite_size) = max_elite_size {
497 config.elite_size = *max_elite_size;
498 }
499 if let Some(max_node_size) = max_node_size {
500 config.node_size = *max_node_size;
501 }
502 if let Some(spread_factor) = spread_factor {
503 config.spread_factor = *spread_factor;
504 }
505 if let Some(distribution_factor) = distribution_factor {
506 config.distribution_factor = *distribution_factor;
507 }
508 if let Some(rebalance_memory) = rebalance_memory {
509 config.rebalance_memory = *rebalance_memory;
510 }
511 if let Some(exploration_ratio) = exploration_ratio {
512 config.exploration_ratio = *exploration_ratio;
513 }
514
515 Box::new(RosomaxaPopulation::new(problem.goal.clone(), environment.clone(), config)?)
516 }
517 };
518
519 builder = builder.with_context(RefinementContext::new(problem, population, telemetry_mode, environment));
520 }
521 }
522
523 Ok(builder)
524}
525
526fn configure_from_hyper(
527 mut builder: ProblemConfigBuilder,
528 problem: Arc<Problem>,
529 environment: Arc<Environment>,
530 hyper_config: &Option<HyperType>,
531) -> Result<ProblemConfigBuilder, GenericError> {
532 if let Some(config) = hyper_config {
533 match config {
534 HyperType::StaticSelective { operators } => {
535 let static_selective = if let Some(operators) = operators {
536 let heuristic_group = operators
537 .iter()
538 .map(|operator| create_operator(problem.clone(), environment.clone(), operator))
539 .collect::<Result<Vec<_>, _>>()?;
540 get_static_heuristic_from_heuristic_group(problem.clone(), environment.clone(), heuristic_group)
541 } else {
542 get_static_heuristic(problem, environment)
543 };
544
545 builder = builder.with_heuristic(Box::new(static_selective));
546 }
547 HyperType::DynamicSelective => {
548 let dynamic_selective = get_dynamic_heuristic(problem, environment);
549 builder = builder.with_heuristic(Box::new(dynamic_selective));
550 }
551 }
552 }
553
554 Ok(builder)
555}
556
557fn configure_from_termination(
558 mut builder: ProblemConfigBuilder,
559 termination_config: &Option<TerminationConfig>,
560) -> ProblemConfigBuilder {
561 if let Some(config) = termination_config {
562 builder = builder.with_max_time(config.max_time).with_max_generations(config.max_generations).with_min_cv(
563 config.variation.as_ref().map(|v| (v.interval_type.clone(), v.value, v.cv, v.is_global)),
564 "min_cv".to_string(),
565 );
566 }
567
568 builder
569}
570
571fn create_recreate_method(method: &RecreateMethod, environment: Arc<Environment>) -> (Arc<dyn Recreate>, usize) {
572 let random = environment.random.clone();
573 match method {
574 RecreateMethod::Cheapest { weight } => (Arc::new(RecreateWithCheapest::new(random)), *weight),
575 RecreateMethod::Farthest { weight } => (Arc::new(RecreateWithFarthest::new(random)), *weight),
576 RecreateMethod::SkipBest { weight, start, end } => {
577 (Arc::new(RecreateWithSkipBest::new(*start, *end, random)), *weight)
578 }
579 RecreateMethod::Slice { weight } => (Arc::new(RecreateWithSlice::new(random)), *weight),
580 RecreateMethod::Blinks { weight } => (Arc::new(RecreateWithBlinks::new_with_defaults(random.clone())), *weight),
581 RecreateMethod::SkipRandom { weight } => (Arc::new(RecreateWithSkipRandom::new(random)), *weight),
582 RecreateMethod::Gaps { weight, min, max } => (Arc::new(RecreateWithGaps::new(*min, *max, random)), *weight),
583 RecreateMethod::Nearest { weight } => (Arc::new(RecreateWithNearestNeighbor::new(random)), *weight),
584 RecreateMethod::Regret { weight, start, end } => {
585 (Arc::new(RecreateWithRegret::new(*start, *end, random)), *weight)
586 }
587 RecreateMethod::Perturbation { weight, probability, min, max } => {
588 let noise = Noise::new_with_addition(*probability, (*min, *max), random.clone());
589 (Arc::new(RecreateWithPerturbation::new(noise, random.clone())), *weight)
590 }
591 }
592}
593
594fn create_operator(
595 problem: Arc<Problem>,
596 environment: Arc<Environment>,
597 operator: &SearchOperatorType,
598) -> Result<(TargetSearchOperator, TargetHeuristicProbability), GenericError> {
599 Ok(match operator {
600 SearchOperatorType::RuinRecreate { probability, ruins, recreates } => {
601 let ruin = Arc::new(WeightedRuin::new(ruins.iter().map(|g| create_ruin_group(&problem, g)).collect()));
602 let recreate = Arc::new(WeightedRecreate::new(
603 recreates.iter().map(|r| create_recreate_method(r, environment.clone())).collect(),
604 ));
605 (
606 Arc::new(RuinAndRecreate::new(ruin, recreate)),
607 create_operator_probability(probability, environment.random.clone()),
608 )
609 }
610 SearchOperatorType::LocalSearch { probability, times, operators: inners } => {
611 let operator = create_local_search(times, inners, environment.random.clone());
612 (Arc::new(LocalSearch::new(operator)), create_operator_probability(probability, environment.random.clone()))
613 }
614 SearchOperatorType::Decomposition { routes, repeat, probability } => {
615 if *repeat < 1 {
616 return Err(format!("repeat must be greater than 1. Specified: {repeat}").into());
617 }
618 if routes.min < 2 {
619 return Err(format!("min routes must be greater than 2. Specified: {}", routes.min).into());
620 }
621
622 let operator = create_default_heuristic_operator(problem, environment.clone());
623 (
624 Arc::new(DecomposeSearch::new(operator, (routes.min, routes.max), *repeat, 200)),
625 create_operator_probability(probability, environment.random.clone()),
626 )
627 }
628 })
629}
630
631fn create_operator_probability(
632 probability: &OperatorProbabilityType,
633 random: Arc<dyn Random>,
634) -> TargetHeuristicProbability {
635 match probability {
636 OperatorProbabilityType::Scalar { scalar } => create_scalar_operator_probability(*scalar, random),
637 OperatorProbabilityType::Context { threshold, phases } => create_context_operator_probability(
638 threshold.jobs,
639 threshold.routes,
640 phases
641 .iter()
642 .map(|phase| match phase {
643 ContextPhase::Initial { chance } => (SelectionPhase::Initial, *chance),
644 ContextPhase::Exploration { chance } => (SelectionPhase::Exploration, *chance),
645 ContextPhase::Exploitation { chance } => (SelectionPhase::Exploitation, *chance),
646 })
647 .collect(),
648 random,
649 ),
650 }
651}
652
653fn create_ruin_group(problem: &Arc<Problem>, group: &RuinGroupConfig) -> RuinGroup {
654 (group.methods.iter().map(|r| create_ruin_method(problem, r)).collect(), group.weight)
655}
656
657fn create_ruin_method(problem: &Arc<Problem>, method: &RuinMethod) -> (Arc<dyn Ruin>, Float) {
658 let limits = RemovalLimits::new(problem.as_ref());
659 let get_limits = |min: usize, max: usize| RemovalLimits {
660 removed_activities_range: min..max,
661 ..RemovalLimits::new(problem.as_ref())
662 };
663
664 match method {
665 RuinMethod::AdjustedString { probability, lmax, cavg, alpha } => {
666 (Arc::new(AdjustedStringRemoval::new(*lmax, *cavg, *alpha, limits)), *probability)
667 }
668 RuinMethod::Neighbour { probability, min, max } => {
669 (Arc::new(NeighbourRemoval::new(get_limits(*min, *max))), *probability)
670 }
671 RuinMethod::RandomJob { probability, min, max } => {
672 (Arc::new(RandomJobRemoval::new(get_limits(*min, *max))), *probability)
673 }
674 RuinMethod::RandomRoute { probability, min, max } => {
675 (Arc::new(RandomRouteRemoval::new(get_limits(*min, *max))), *probability)
676 }
677 RuinMethod::WorstJob { probability, min, max, skip: worst_skip } => {
678 (Arc::new(WorstJobRemoval::new(*worst_skip, get_limits(*min, *max))), *probability)
679 }
680 RuinMethod::Cluster { probability, min, max } => (
681 Arc::new(ClusterRemoval::new(problem.clone(), get_limits(*min, *max)).unwrap()),
683 *probability,
684 ),
685 RuinMethod::CloseRoute { probability } => (Arc::new(CloseRouteRemoval::new(limits)), *probability),
686 RuinMethod::WorstRoute { probability } => (Arc::new(WorstRouteRemoval::new(limits)), *probability),
687 }
688}
689
690fn create_local_search(
691 times: &MinMaxConfig,
692 inners: &[LocalOperatorType],
693 random: Arc<dyn Random>,
694) -> Arc<dyn LocalOperator> {
695 let operators = inners
696 .iter()
697 .map::<(Arc<dyn LocalOperator>, usize), _>(|op| match op {
698 LocalOperatorType::SwapStar { weight } => (Arc::new(ExchangeSwapStar::new(random.clone(), 200)), *weight),
699 LocalOperatorType::InterRouteBest { weight, noise } => {
700 (Arc::new(ExchangeInterRouteBest::new(noise.probability, noise.min, noise.max)), *weight)
701 }
702 LocalOperatorType::InterRouteRandom { weight, noise } => {
703 (Arc::new(ExchangeInterRouteRandom::new(noise.probability, noise.min, noise.max)), *weight)
704 }
705 LocalOperatorType::IntraRouteRandom { weight, noise } => {
706 (Arc::new(ExchangeIntraRouteRandom::new(noise.probability, noise.min, noise.max)), *weight)
707 }
708 LocalOperatorType::Sequence { weight } => (Arc::new(ExchangeSequence::default()), *weight),
709 })
710 .collect::<Vec<_>>();
711
712 Arc::new(CompositeLocalOperator::new(operators, times.min, times.max))
713}
714
715fn get_telemetry_mode(environment: Arc<Environment>, telemetry_config: &Option<TelemetryConfig>) -> TelemetryMode {
716 const LOG_BEST: usize = 100;
717 const LOG_POPULATION: usize = 1000;
718 const TRACK_POPULATION: usize = 1000;
719
720 let create_metrics = |track_population: &Option<usize>| TelemetryMode::OnlyMetrics {
721 track_population: track_population.unwrap_or(TRACK_POPULATION),
722 };
723
724 let create_progress = |log_best: &Option<usize>, log_population: &Option<usize>| TelemetryMode::OnlyLogging {
725 logger: environment.logger.clone(),
726 log_best: log_best.unwrap_or(LOG_BEST),
727 log_population: log_population.unwrap_or(LOG_POPULATION),
728 };
729
730 match telemetry_config.as_ref().map(|t| (&t.progress, &t.metrics)) {
731 Some((None, Some(MetricsConfig { enabled, track_population }))) if *enabled => create_metrics(track_population),
732 Some((Some(ProgressConfig { enabled, log_best, log_population }), None)) if *enabled => {
733 create_progress(log_best, log_population)
734 }
735 Some((
736 Some(ProgressConfig { enabled: progress_enabled, log_best, log_population }),
737 Some(MetricsConfig { enabled: metrics_enabled, track_population }),
738 )) => match (progress_enabled, metrics_enabled) {
739 (true, true) => TelemetryMode::All {
740 logger: environment.logger.clone(),
741 log_best: log_best.unwrap_or(LOG_BEST),
742 log_population: log_population.unwrap_or(LOG_POPULATION),
743 track_population: track_population.unwrap_or(TRACK_POPULATION),
744 },
745 (true, false) => create_progress(log_best, log_population),
746 (false, true) => create_metrics(track_population),
747 _ => TelemetryMode::None,
748 },
749 _ => TelemetryMode::None,
750 }
751}
752
753fn configure_from_environment(
754 environment_config: &Option<EnvironmentConfig>,
755 max_time: Option<usize>,
756) -> Arc<Environment> {
757 let mut environment = Environment::new_with_time_quota(max_time);
758
759 if let Some(parallelism) = environment_config.as_ref().and_then(|c| c.parallelism.as_ref()) {
760 environment.parallelism = Parallelism::new(parallelism.num_thread_pools, parallelism.threads_per_pool);
762 }
763
764 if let Some(logging) = environment_config.as_ref().and_then(|c| c.logging.as_ref()) {
765 environment.logger = match (logging.enabled, logging.prefix.clone()) {
766 (true, Some(prefix)) => Arc::new(move |msg: &str| println!("{prefix}{msg}")),
767 (true, None) => Arc::new(|msg: &str| println!("{msg}")),
768 _ => Arc::new(|_: &str| {}),
769 };
770 }
771
772 if let Some(is_experimental) = environment_config.as_ref().and_then(|c| c.is_experimental) {
773 environment.is_experimental = is_experimental;
774 }
775
776 Arc::new(environment)
777}
778
779pub fn read_config<R: Read>(reader: BufReader<R>) -> GenericResult<Config> {
781 serde_json::from_reader(reader).map_err(|err| format!("cannot deserialize config: '{err}'").into())
782}
783
784pub fn create_builder_from_config_file<R>(
786 problem: Arc<Problem>,
787 solutions: Vec<InsertionContext>,
788 reader: BufReader<R>,
789) -> GenericResult<ProblemConfigBuilder>
790where
791 R: Read,
792{
793 read_config(reader).and_then(|config| create_builder_from_config(problem, solutions, &config))
794}
795
796pub fn create_builder_from_config(
798 problem: Arc<Problem>,
799 solutions: Vec<InsertionContext>,
800 config: &Config,
801) -> GenericResult<ProblemConfigBuilder> {
802 let environment =
803 configure_from_environment(&config.environment, config.termination.as_ref().and_then(|t| t.max_time));
804 let telemetry_mode = get_telemetry_mode(environment.clone(), &config.telemetry);
805 let mut builder = VrpConfigBuilder::new(problem.clone())
806 .set_environment(environment.clone())
807 .set_telemetry_mode(telemetry_mode.clone())
808 .prebuild()?
809 .with_init_solutions(solutions, None);
810
811 builder =
812 configure_from_evolution(builder, problem.clone(), environment.clone(), telemetry_mode, &config.evolution)?;
813 builder = configure_from_hyper(builder, problem, environment, &config.hyper)?;
814 builder = configure_from_termination(builder, &config.termination);
815
816 Ok(builder)
817}