mod falseness;
mod layout;
use std::{
collections::{BTreeMap, HashMap},
ops::Deref,
sync::Arc,
time::Instant,
};
use bellframe::{
method::RowAnnot, music::AtRowPositions, Block, Mask, PlaceNot, Row, RowBuf, Stroke, StrokeSet,
};
use itertools::Itertools;
use crate::{
group::{PartHeadGroup, PhRotation},
parameters::{Call, MethodIdx, MethodVec, Parameters},
search::Config,
utils::counts::Counts,
};
use super::{Chunk, ChunkId, Graph, LinkSet, LinkSide, PerPartLength, RowIdx, TotalLength};
impl Graph {
pub(crate) fn unoptimised(params: &Parameters, config: &Config) -> crate::Result<Self> {
log::debug!("Building unoptimised graph:");
let graph_build_start = Instant::now();
check_params(params)?;
let start = Instant::now();
let (mut chunk_equiv_map, chunk_lengths, links) =
self::layout::chunk_lengths(params, config)?;
log::debug!(" Chunk layout generated in {:.2?}", start.elapsed());
let mut chunks = chunk_lengths
.into_iter()
.map(|(id, per_part_length): (ChunkId, PerPartLength)| {
let chunk = expand_chunk(&id, per_part_length, params);
(id, chunk)
})
.collect::<HashMap<_, _>>();
let start = Instant::now();
for (link_id, link) in links.iter() {
if let LinkSide::Chunk(id) = &link.from {
if let Some(chunk) = chunks.get_mut(id) {
chunk.successors.push(*link_id);
}
}
if let LinkSide::Chunk(id) = &link.to {
if let Some(chunk) = chunks.get_mut(id) {
chunk.predecessors.push(*link_id);
}
}
}
log::debug!(
" Successor/predecessor links set in {:.2?}",
start.elapsed()
);
if params.require_truth {
falseness::set_links(&mut chunks, &mut chunk_equiv_map, params);
}
let start = Instant::now();
let relies_on_stroke = params
.music_types
.iter()
.any(|ty| ty.strokes() != StrokeSet::Both);
let start_strokes = get_start_strokes(&chunks, &links, params);
if start_strokes.is_none() && relies_on_stroke {
return Err(crate::Error::InconsistentStroke);
}
let double_plain_courses: MethodVec<_> = params
.methods
.iter()
.map(|m| {
let mut double_plain_course = m.plain_course();
double_plain_course.extend_from_within(..);
double_plain_course
})
.collect();
for (id, chunk) in &mut chunks {
count_scores(id, chunk, &double_plain_courses, &start_strokes, params);
}
log::debug!(" Music counted in {:.2?}", start.elapsed());
log::debug!(
"Graph build completed in {:.3?}",
graph_build_start.elapsed()
);
use LinkSide::*;
let mut starts = Vec::new();
let mut ends = Vec::new();
for (link_id, link) in links.iter() {
match (&link.from, &link.to) {
(StartOrEnd, Chunk(chunk_id)) => starts.push((*link_id, chunk_id.clone())),
(Chunk(chunk_id), StartOrEnd) => ends.push((*link_id, chunk_id.clone())),
(Chunk(_), Chunk(_)) => {} (StartOrEnd, StartOrEnd) => unreachable!("Can't have 0-length start->end links"),
}
}
let graph = Graph {
chunks,
links,
starts,
ends,
};
Ok(graph)
}
}
fn expand_chunk(id: &ChunkId, per_part_length: PerPartLength, params: &Parameters) -> Chunk {
let total_length = per_part_length.as_total(¶ms.part_head_group);
Chunk {
per_part_length,
total_length,
method_counts: Counts::single_count(
total_length.as_usize(),
id.method.index(),
params.methods.len(),
),
predecessors: Vec::new(),
successors: Vec::new(),
false_chunks: Vec::new(),
score: 0.0,
music_counts: index_vec::index_vec![AtRowPositions::ZERO; params.music_types.len()],
required: false,
lb_distance_from_rounds: TotalLength::ZERO,
lb_distance_to_rounds: TotalLength::ZERO,
}
}
fn get_start_strokes(
chunks: &HashMap<ChunkId, Chunk>,
links: &LinkSet,
params: &Parameters,
) -> Option<HashMap<ChunkId, Stroke>> {
let mut start_strokes = HashMap::<ChunkId, Stroke>::with_capacity(chunks.len());
let mut frontier = Vec::<(ChunkId, Stroke)>::new();
let stroke_of_start_row = !params.start_stroke;
for link in links.values() {
if let (LinkSide::StartOrEnd, LinkSide::Chunk(id)) = (&link.from, &link.to) {
frontier.push((id.clone(), stroke_of_start_row));
}
}
while let Some((id, new_stroke)) = frontier.pop() {
match start_strokes.insert(id.clone(), new_stroke) {
Some(s) if s != new_stroke => return None, Some(_) => {} None => {
if let Some(chunk) = chunks.get(&id) {
let stroke_after_chunk = new_stroke.offset(chunk.per_part_length.as_usize());
for succ_link_id in &chunk.successors {
let succ_link = &links[*succ_link_id];
assert_eq!(succ_link.from, LinkSide::Chunk(id.clone()));
if let LinkSide::Chunk(succ_id) = &succ_link.to {
frontier.push((succ_id.to_owned(), stroke_after_chunk));
}
}
}
}
}
}
Some(start_strokes)
}
fn count_scores(
id: &ChunkId,
chunk: &mut Chunk,
double_plain_courses: &MethodVec<Block<RowAnnot>>,
start_strokes: &Option<HashMap<ChunkId, Stroke>>,
params: &Parameters,
) {
chunk.score = 0.0;
chunk.music_counts = index_vec::index_vec![AtRowPositions::ZERO; params.music_types.len()];
let start_stroke = match start_strokes {
Some(map) => match map.get(id) {
Some(stroke) => *stroke,
None => return,
},
None => Stroke::Back,
};
let double_plain_course = &double_plain_courses[id.method];
let lead_heads = params.methods[id.method].inner.lead_head().closure();
for part_head in params.part_head_group.rows() {
let lead_head_in_part = part_head * id.lead_head.as_ref();
let start_row = &lead_head_in_part * double_plain_course.get_row(id.sub_lead_idx).unwrap();
let mut rows = Block::empty(params.stage);
rows.extend_range(
double_plain_course,
id.sub_lead_idx..(id.sub_lead_idx + chunk.per_part_length.as_usize()),
);
rows.pre_multiply(&start_row);
for (count_so_far, music_type) in chunk.music_counts.iter_mut().zip_eq(¶ms.music_types)
{
let counts = music_type.count_block(&rows, start_stroke);
chunk.score += music_type.as_overall_score(counts);
*count_so_far += counts;
}
for (mask, weight) in ¶ms.course_weights {
for lead_head in &lead_heads {
if (mask * lead_head).matches(&lead_head_in_part) {
chunk.score += *weight * chunk.per_part_length.as_usize() as f32;
}
}
}
}
}
fn check_params(params: &Parameters) -> crate::Result<()> {
if params.is_multipart() && params.start_row != params.end_row {
return Err(crate::Error::DifferentStartEndRowInMultipart);
}
for (i1, m1) in params.methods.iter_enumerated() {
for m2 in ¶ms.methods[..i1] {
if m1.shorthand() == m2.shorthand() {
return Err(crate::Error::DuplicateShorthand {
shorthand: m1.shorthand(),
title1: m1.title(),
title2: m2.title(),
});
}
}
}
for call in ¶ms.calls {
if call.calling_positions.len() != params.stage.num_bells() {
return Err(crate::Error::WrongCallingPositionsLength {
call_name: call.symbol.clone(),
calling_position_len: call.calling_positions.len(),
stage: params.stage,
});
}
}
let defined_labels = params.lead_labels_used();
for call in ¶ms.calls {
for lead_label in [&call.label_from, &call.label_to] {
if !defined_labels.contains(lead_label) {
return Err(crate::Error::UndefinedLabel {
call_name: call.symbol.clone(),
label: lead_label.clone(),
});
}
}
}
check_for_duplicate_call_names(params)?;
let mut extra_masks = BTreeMap::<Mask, BTreeMap<RowBuf, Vec<MethodIdx>>>::new();
for (method_idx, method) in params.methods.iter_enumerated() {
let (_lhms, extra_masks_for_method) = method.allowed_lead_head_masks_with_extras(params);
for (specified_ch_mask, part_head) in extra_masks_for_method {
extra_masks
.entry(specified_ch_mask)
.or_default()
.entry(part_head)
.or_default()
.push(method_idx);
}
}
for (specified_mask, methods_per_part) in extra_masks {
println!("Note: For course mask {specified_mask}, adding extra masks for other parts:");
for (part_head, methods) in methods_per_part {
print!(" {} (in part {part_head}", &part_head * &specified_mask);
if params.is_spliced() {
print!(" for {}", params.method_list_string(&methods));
}
println!(")");
}
}
Ok(())
}
fn check_for_duplicate_call_names(params: &Parameters) -> crate::Result<()> {
let sorted_calls = params
.calls
.iter()
.map(|call: &Call| -> (&str, &str, &PlaceNot) {
(&call.symbol, &call.label_from, &call.place_notation)
})
.sorted_by_key(|&(sym, lead_loc, _pn)| (sym, lead_loc));
for ((sym1, label1, pn1), (sym2, label2, pn2)) in sorted_calls.tuple_windows() {
if sym1 == sym2 && label1 == label2 {
return Err(crate::Error::DuplicateCall {
symbol: sym1.to_owned(),
label: label1.to_owned(),
pn1: pn1.clone(),
pn2: pn2.clone(),
});
}
}
Ok(())
}
#[derive(Debug, Clone)]
struct ChunkIdInFirstPart {
lead_head: RowBuf,
row_idx: RowIdx,
}
impl Deref for ChunkIdInFirstPart {
type Target = RowIdx;
fn deref(&self) -> &Self::Target {
&self.row_idx
}
}
#[derive(Debug)]
struct ChunkEquivalenceMap<'params> {
part_head_group: &'params PartHeadGroup,
normalisation: HashMap<RowBuf, (Arc<Row>, PhRotation)>,
}
impl<'params> ChunkEquivalenceMap<'params> {
fn new(part_head_group: &'params PartHeadGroup) -> Self {
Self {
part_head_group,
normalisation: HashMap::new(),
}
}
fn normalise(&mut self, id: &ChunkIdInFirstPart) -> (ChunkId, PhRotation) {
if !self.normalisation.contains_key(&id.lead_head) {
let arc_lead_head = id.lead_head.to_arc();
for (part_head, element) in self.part_head_group.rotations() {
self.normalisation
.insert(part_head * &id.lead_head, (arc_lead_head.clone(), element));
}
}
let (normalised_lead_head, rotation) = self.normalisation[&id.lead_head].clone();
(ChunkId::new(normalised_lead_head, id.row_idx), rotation)
}
}