mod atw;
mod best_first;
mod graph;
mod path;
mod prefix;
use std::{
convert::TryInto,
ops::RangeInclusive,
sync::{
atomic::{AtomicBool, Ordering},
Arc,
},
};
use bellframe::Stage;
use itertools::Itertools;
use crate::{
parameters::{MethodId, MusicTypeId, Parameters},
prove_length::{prove_lengths, RefinedRanges},
Composition,
};
use self::atw::AtwTable;
#[derive(Debug)]
pub struct Search {
params: Arc<Parameters>,
config: Config,
graph: self::graph::Graph,
atw_table: Arc<AtwTable>,
refined_ranges: RefinedRanges,
}
impl Search {
pub fn new(params: Parameters, config: Config) -> crate::Result<Self> {
let mut source_graph = crate::graph::Graph::unoptimised(¶ms, &config)?;
let refined_ranges = prove_lengths(&source_graph, ¶ms)?;
source_graph.optimise(¶ms, &refined_ranges);
let chunk_lengths = source_graph
.chunks
.iter()
.map(|(id, chunk)| (id.clone(), chunk.per_part_length))
.collect_vec();
let atw_table = AtwTable::new(¶ms, &chunk_lengths);
let graph = self::graph::Graph::new(&source_graph, ¶ms, &atw_table);
drop(source_graph);
Ok(Search {
params: Arc::new(params),
atw_table: Arc::new(atw_table),
config,
refined_ranges,
graph,
})
}
pub fn run(&self, update_fn: impl FnMut(Update), abort_flag: &AtomicBool) {
abort_flag.store(false, Ordering::SeqCst);
best_first::search(self, update_fn, abort_flag);
}
}
impl Search {
pub fn method_count_range(&self, id: MethodId) -> RangeInclusive<usize> {
let idx = self.params.method_id_to_idx(id);
let range = &self.refined_ranges.method_counts[idx];
range.start().as_usize()..=range.end().as_usize()
}
pub fn methods(&self) -> impl Iterator<Item = (&crate::parameters::Method, String)> {
self.params.methods.iter().map(|m| (m, m.shorthand()))
}
pub fn music_type_ids(&self) -> impl Iterator<Item = MusicTypeId> + '_ {
self.params.music_types.iter().map(|ty| ty.id)
}
pub fn parameters(&self) -> &Parameters {
&self.params
}
pub fn num_parts(&self) -> usize {
self.params.num_parts()
}
pub fn is_multipart(&self) -> bool {
self.params.is_multipart()
}
pub fn effective_part_head_stage(&self) -> Stage {
self.params.part_head_group.effective_stage()
}
}
#[derive(Debug)]
pub enum Update {
Comp(Composition),
Progress(Progress),
Complete,
}
#[derive(Debug, Clone, Copy)]
pub struct Progress {
pub iter_count: usize,
pub num_comps: usize,
pub queue_len: usize,
pub avg_length: f32,
pub max_length: usize,
pub truncating_queue: bool,
pub aborting: bool,
}
impl Progress {
pub const START: Self = Self {
iter_count: 0,
num_comps: 0,
queue_len: 0,
avg_length: 0.0,
max_length: 0,
truncating_queue: false,
aborting: false,
};
}
#[derive(Debug, Clone)]
pub struct Config {
pub thread_limit: Option<usize>,
pub graph_size_limit: usize,
pub mem_limit: Option<usize>,
pub leak_search_memory: bool,
}
impl Default for Config {
fn default() -> Self {
Self {
thread_limit: None,
graph_size_limit: 100_000,
mem_limit: None,
leak_search_memory: false,
}
}
}
fn default_mem_limit() -> usize {
let ideal_mem_limit = if sysinfo::IS_SUPPORTED_SYSTEM {
(sysinfo::System::new_all().available_memory() as f32 * 0.8) as u64
} else {
5_000_000_000u64
};
let pointer_size_limit = (usize::MAX as u64).saturating_sub(500_000_000);
ideal_mem_limit
.min(pointer_size_limit)
.try_into()
.expect("Memory limit should fit into `usize`")
}