use crate::bam_io::{
RawBamWriter, create_bam_reader_for_pipeline_with_opts, create_bam_writer,
create_optional_bam_writer, create_raw_bam_reader_with_opts, create_raw_bam_writer,
};
use anyhow::{Context, Result, bail};
use clap::Parser;
use fgoxide::io::DelimFile;
use super::common::{
BamIoOptions, CompressionOptions, ConsensusCallingOptions, OverlappingConsensusOptions,
QueueMemoryOptions, ReadGroupOptions, RejectsOptions, SchedulerOptions, StatsOptions,
ThreadingOptions, build_pipeline_config,
};
use crate::commands::consensus_runner::{
ConsensusStatsOps, create_unmapped_consensus_header, log_overlapping_stats,
};
use crate::consensus_caller::{ConsensusCaller, ConsensusCallingStats, ConsensusOutput};
use crate::duplex_consensus_caller::DuplexConsensusCaller;
use crate::logging::{OperationTimer, log_consensus_summary};
use crate::mi_group::{MiGroup, MiGroupBatch, MiGroupIterator, MiGrouper};
use crate::overlapping_consensus::{
AgreementStrategy, CorrectionStats, DisagreementStrategy, OverlappingBasesConsensusCaller,
apply_overlapping_consensus,
};
use crate::per_thread_accumulator::PerThreadAccumulator;
use crate::progress::ProgressTracker;
use crate::read_info::LibraryIndex;
use crate::sam::{SamTag, header_as_unsorted};
use crate::sort::bam_fields;
use crate::umi::extract_mi_base;
use crate::unified_pipeline::{
GroupKeyConfig, Grouper, MemoryEstimate, run_bam_pipeline_from_reader,
};
use crate::validation::validate_file_exists;
use fgumi_raw_bam::{RawRecord, RawRecordView};
use log::info;
use noodles::sam::Header;
use noodles::sam::alignment::record::data::field::Tag;
use parking_lot::Mutex;
use std::io;
use std::io::Write as IoWrite;
use std::sync::Arc;
use super::command::Command;
use super::common::{MethylationRef, load_methylation_reference};
struct DuplexProcessedBatch {
consensus_output: ConsensusOutput,
groups_count: u64,
stats: ConsensusCallingStats,
overlapping_stats: Option<CorrectionStats>,
}
impl MemoryEstimate for DuplexProcessedBatch {
fn estimate_heap_size(&self) -> usize {
self.consensus_output.data.capacity()
}
}
#[derive(Default)]
struct CollectedDuplexMetrics {
stats: ConsensusCallingStats,
overlapping_stats: Option<CorrectionStats>,
groups_processed: u64,
}
#[derive(Parser, Debug)]
#[command(
name = "duplex",
about = "\x1b[38;5;180m[CONSENSUS]\x1b[0m \x1b[36mCall duplex consensus sequences from UMI-grouped reads\x1b[0m",
long_about = r#"
Calls duplex consensus sequences from reads generated from the same double-stranded source molecule. Prior
to running this tool, reads must have been grouped with `group` using the `paired` strategy. Doing
so will apply (by default) MI tags to all reads of the form `*/A` and `*/B` where the /A and /B suffixes
with the same identifier denote reads that are derived from opposite strands of the same source duplex molecule.
Reads from the same unique molecule are first partitioned by source strand and assembled into single
strand consensus molecules as described by the simplex command. Subsequently, for molecules that
have at least one observation of each strand, duplex consensus reads are assembled by combining the evidence
from the two single strand consensus reads.
Because of the nature of duplex sequencing, this tool does not support fragment reads - if found in the
input they are ignored. Similarly, read pairs for which consensus reads cannot be generated for one or
other read (R1 or R2) are omitted from the output.
The consensus reads produced are unaligned, due to the difficulty and error-prone nature of inferring the consensus
alignment. Consensus reads should therefore be aligned after, which should not be too expensive as likely there
are far fewer consensus reads than input raw reads.
Consensus reads have a number of additional optional tags set in the resulting BAM file. The tag names follow
a pattern where the first letter (a, b or c) denotes that the tag applies to the first single strand consensus (a),
second single-strand consensus (b) or the final duplex consensus (c). The second letter is intended to capture
the meaning of the tag (e.g. d=depth, m=min depth, e=errors/error-rate) and is upper case for values that are
one per read and lower case for values that are one per base.
The tags break down into those that are single-valued per read:
consensus depth [aD,bD,cD] (int) : the maximum depth of raw reads at any point in the consensus reads
consensus min depth [aM,bM,cM] (int) : the minimum depth of raw reads at any point in the consensus reads
consensus error rate [aE,bE,cE] (float): the fraction of bases in raw reads disagreeing with the final consensus calls
And those that have a value per base (duplex values are not generated, but can be generated by summing):
consensus depth [ad,bd] (short[]): the count of bases contributing to each single-strand consensus read at each position
consensus errors [ae,be] (short[]): the count of bases from raw reads disagreeing with the final single-strand consensus base
consensus bases [ac,bc] (string) : the single-strand consensus bases
consensus quals [aq,bq] (string) : the single-strand consensus qualities
The per base depths and errors are both capped at 32,767. In all cases no-calls (Ns) and bases below the
min-input-base-quality are not counted in tag value calculations.
The --min-reads option can take 1-3 values similar to `filter`. For example:
fgumi duplex ... --min-reads 10,5,3
If fewer than three values are supplied, the last value is repeated (i.e. `5,4` -> `5 4 4` and `1` -> `1 1 1`). The
first value applies to the final consensus read, the second value to one single-strand consensus, and the last
value to the other single-strand consensus. It is required that if values two and three differ,
the more stringent value comes earlier.
"#
)]
pub struct Duplex {
#[command(flatten)]
pub io: BamIoOptions,
#[command(flatten)]
pub rejects_opts: RejectsOptions,
#[command(flatten)]
pub stats_opts: StatsOptions,
#[command(flatten)]
pub read_group: ReadGroupOptions,
#[command(flatten)]
pub consensus: ConsensusCallingOptions,
#[command(flatten)]
pub overlapping: OverlappingConsensusOptions,
#[command(flatten)]
pub threading: ThreadingOptions,
#[command(flatten)]
pub compression: CompressionOptions,
#[arg(short = 'M', long = "min-reads", value_delimiter = ',', default_value = "1")]
pub min_reads: Vec<usize>,
#[arg(long = "max-reads-per-strand")]
pub max_reads_per_strand: Option<usize>,
#[command(flatten)]
pub scheduler_opts: SchedulerOptions,
#[command(flatten)]
pub queue_memory: QueueMemoryOptions,
#[arg(long = "methylation-mode", value_enum)]
pub methylation_mode: Option<crate::commands::common::MethylationModeArg>,
#[arg(long = "ref")]
pub reference: Option<std::path::PathBuf>,
}
impl Command for Duplex {
fn execute(&self, command_line: &str) -> Result<()> {
let timer = OperationTimer::new("Calling duplex consensus");
validate_file_exists(&self.io.input, "Input BAM")?;
let reader_threads = self.threading.num_threads();
let worker_threads = self.threading.num_threads();
let writer_threads = self.threading.num_threads();
info!("Duplex");
info!(" Input: {}", self.io.input.display());
info!(" Output: {}", self.io.output.display());
info!(" Min reads: {:?}", self.min_reads);
info!(" Min base quality: {}", self.consensus.min_input_base_quality);
info!(" Output per-base tags: {}", self.consensus.output_per_base_tags);
info!(" Worker threads: {worker_threads}");
info!(" Reader threads: {reader_threads}");
info!(" Trim reads: {}", self.consensus.trim);
info!(" Max reads per strand: {:?}", self.max_reads_per_strand);
info!(
" Consensus call overlapping bases: {}",
self.overlapping.consensus_call_overlapping_bases
);
let cell_tag = Tag::from(SamTag::CB);
let track_rejects = self.rejects_opts.is_enabled();
if self.reference.is_some() && self.methylation_mode.is_none() {
bail!("--ref requires --methylation-mode to be set");
}
let methylation_mode =
crate::commands::common::resolve_methylation_mode(self.methylation_mode);
let overlapping_enabled = self.overlapping.consensus_call_overlapping_bases;
if overlapping_enabled {
info!("Overlapping consensus calling enabled");
}
info!("Processing reads...");
if let Some(threads) = self.threading.threads {
let (reader, header) = create_bam_reader_for_pipeline_with_opts(
&self.io.input,
self.io.pipeline_reader_opts(),
)?;
let header = crate::commands::common::add_pg_record(header, command_line)?;
let read_name_prefix = self.read_group.prefix_or_from_header(&header);
let methylation_ref: MethylationRef =
load_methylation_reference(methylation_mode, &self.reference, &header)?;
let result = self.execute_threads_mode(
threads,
reader,
header,
read_name_prefix,
track_rejects,
command_line,
methylation_ref,
methylation_mode,
);
timer.log_completion(0); return result;
}
let (mut raw_reader, header) =
create_raw_bam_reader_with_opts(&self.io.input, 1, self.io.pipeline_reader_opts())?;
let header = crate::commands::common::add_pg_record(header, command_line)?;
let read_name_prefix = self.read_group.prefix_or_from_header(&header);
let methylation_ref: MethylationRef =
load_methylation_reference(methylation_mode, &self.reference, &header)?;
let mut writer = create_bam_writer(
&self.io.output,
&header,
writer_threads,
self.compression.compression_level,
)?;
let mut rejects_writer = create_optional_bam_writer(
self.rejects_opts.rejects.as_ref(),
&header,
writer_threads,
self.compression.compression_level,
)?;
let mut consensus_caller = DuplexConsensusCaller::new(
read_name_prefix.clone(),
self.read_group.read_group_id.clone(),
self.min_reads.clone(),
self.consensus.min_input_base_quality,
self.consensus.output_per_base_tags,
self.consensus.trim,
self.max_reads_per_strand,
Some(cell_tag),
track_rejects,
self.consensus.error_rate_pre_umi,
self.consensus.error_rate_post_umi,
)?;
if let Some((ref reference, ref ref_names)) = methylation_ref {
consensus_caller.set_reference(
Arc::clone(reference),
Arc::clone(ref_names),
methylation_mode,
);
}
let mut merged_overlapping_stats = CorrectionStats::new();
let mut consensus_count = 0usize;
let progress = ProgressTracker::new("Processed records").with_interval(1_000_000);
let raw_record_iter = std::iter::from_fn(move || {
loop {
let mut record = RawRecord::new();
match raw_reader.read_record(&mut record) {
Ok(0) => return None, Ok(_) => {
let flg = RawRecordView::new(&record).flags();
if flg & bam_fields::flags::SECONDARY != 0
|| flg & bam_fields::flags::SUPPLEMENTARY != 0
{
continue;
}
let is_mapped = flg & bam_fields::flags::UNMAPPED == 0;
let has_mapped_mate = flg & bam_fields::flags::PAIRED != 0
&& flg & bam_fields::flags::MATE_UNMAPPED == 0;
if is_mapped || has_mapped_mate {
return Some(Ok(record));
}
}
Err(e) => return Some(Err(e.into())),
}
}
});
let mi_group_iter =
MiGroupIterator::with_transform(raw_record_iter, "MI", |mi_bytes: &[u8]| {
let mi_str = String::from_utf8_lossy(mi_bytes);
extract_mi_base(&mi_str).to_string()
})
.with_cell_tag(Some(*SamTag::CB));
let mut overlapping_caller = if overlapping_enabled {
Some(OverlappingBasesConsensusCaller::new(
AgreementStrategy::Consensus,
DisagreementStrategy::Consensus,
))
} else {
None
};
for group_result in mi_group_iter {
let (_base_mi, mut records) = group_result.context("Failed to read MI group")?;
if let Some(ref mut oc) = overlapping_caller {
if has_both_strands_raw(&records) {
apply_overlapping_consensus(&mut records, oc)?;
}
}
let output = consensus_caller.consensus_reads(records)?;
let batch_size = output.count;
consensus_count += batch_size;
writer.get_mut().write_all(&output.data).context("Failed to write consensus read")?;
progress.log_if_needed(batch_size as u64);
}
let merged_stats = consensus_caller.statistics();
if let Some(ref oc) = overlapping_caller {
merged_overlapping_stats.merge(oc.stats());
}
if let Some(ref mut rw) = rejects_writer {
let rejected_reads = consensus_caller.rejected_reads();
for raw_record in rejected_reads {
let block_size = raw_record.len() as u32;
rw.get_mut()
.write_all(&block_size.to_le_bytes())
.context("Failed to write rejected read block size")?;
rw.get_mut().write_all(raw_record).context("Failed to write rejected read")?;
}
info!("Wrote {} rejected reads", rejected_reads.len());
}
if let Some(rw) = rejects_writer {
rw.into_inner().finish().context("Failed to finish rejects file")?;
}
progress.log_final();
writer.into_inner().finish().context("Failed to finish output BAM")?;
if overlapping_enabled {
log_overlapping_stats(&merged_overlapping_stats);
}
let mut metrics = merged_stats.to_metrics();
metrics.consensus_reads = consensus_count as u64;
log_consensus_summary(&metrics);
if let Some(stats_path) = &self.stats_opts.stats {
DelimFile::default().write_tsv(stats_path, [metrics]).map_err(|e| {
anyhow::anyhow!("Failed to write statistics to {}: {}", stats_path.display(), e)
})?;
info!("Statistics written to: {}", stats_path.display());
}
timer.log_completion(consensus_count as u64);
info!("Done!");
Ok(())
}
}
impl Duplex {
#[expect(clippy::too_many_arguments, reason = "pipeline setup needs all configuration")]
fn execute_threads_mode(
&self,
num_threads: usize,
reader: Box<dyn std::io::Read + Send>,
input_header: Header,
read_name_prefix: String,
track_rejects: bool,
command_line: &str,
methylation_ref: MethylationRef,
methylation_mode: fgumi_consensus::MethylationMode,
) -> Result<()> {
let output_header = create_unmapped_consensus_header(
&input_header,
&self.read_group.read_group_id,
"Read group",
command_line,
)?;
let mut pipeline_config = build_pipeline_config(
&self.scheduler_opts,
&self.compression,
&self.queue_memory,
num_threads,
)?;
let collected_metrics = PerThreadAccumulator::<CollectedDuplexMetrics>::new(num_threads);
let collected_metrics_for_serialize = Arc::clone(&collected_metrics);
let min_reads = self.min_reads.clone();
let min_input_base_quality = self.consensus.min_input_base_quality;
let output_per_base_tags = self.consensus.output_per_base_tags;
let trim = self.consensus.trim;
let max_reads_per_strand = self.max_reads_per_strand;
let error_rate_pre_umi = self.consensus.error_rate_pre_umi;
let error_rate_post_umi = self.consensus.error_rate_post_umi;
let overlapping_enabled = self.overlapping.consensus_call_overlapping_bases;
let read_group_id = self.read_group.read_group_id.clone();
let cell_tag = Tag::from(SamTag::CB);
let batch_size = 100;
let record_filter = |raw: &[u8]| -> bool {
let flg = RawRecordView::new(raw).flags();
if flg & bam_fields::flags::SECONDARY != 0
|| flg & bam_fields::flags::SUPPLEMENTARY != 0
{
return false;
}
let is_mapped = flg & bam_fields::flags::UNMAPPED == 0;
let has_mapped_mate =
flg & bam_fields::flags::PAIRED != 0 && flg & bam_fields::flags::MATE_UNMAPPED == 0;
is_mapped || has_mapped_mate
};
let mi_transform = |mi_bytes: &[u8]| -> String {
let mi_str = String::from_utf8_lossy(mi_bytes);
extract_mi_base(&mi_str).to_string()
};
let rejects_writer: Option<Arc<Mutex<Option<RawBamWriter>>>> = if track_rejects {
if let Some(path) = self.rejects_opts.rejects.as_ref() {
let writer_threads = self.threading.num_threads();
let rejects_header = header_as_unsorted(&input_header);
let w = create_raw_bam_writer(
path,
&rejects_header,
writer_threads,
self.compression.compression_level,
)?;
Some(Arc::new(Mutex::new(Some(w))))
} else {
None
}
} else {
None
};
let rejects_writer_for_process = rejects_writer.as_ref().map(Arc::clone);
let library_index = LibraryIndex::from_header(&input_header);
pipeline_config.group_key_config = Some(GroupKeyConfig::new(library_index, cell_tag));
let pipeline_result = run_bam_pipeline_from_reader(
pipeline_config,
reader,
input_header,
&self.io.output,
Some(output_header.clone()),
move |_header: &Header| {
Box::new(
MiGrouper::with_filter_and_transform(
"MI",
batch_size,
record_filter,
mi_transform,
)
.with_cell_tag(Some(*SamTag::CB)),
) as Box<dyn Grouper<Group = MiGroupBatch> + Send>
},
move |batch: MiGroupBatch| -> io::Result<DuplexProcessedBatch> {
let mut caller = DuplexConsensusCaller::new(
read_name_prefix.clone(),
read_group_id.clone(),
min_reads.clone(),
min_input_base_quality,
output_per_base_tags,
trim,
max_reads_per_strand,
Some(cell_tag),
track_rejects,
error_rate_pre_umi,
error_rate_post_umi,
)
.map_err(|e| {
io::Error::other(format!("Failed to create DuplexConsensusCaller: {e}"))
})?;
if let Some((ref reference, ref ref_names)) = methylation_ref {
caller.set_reference(
Arc::clone(reference),
Arc::clone(ref_names),
methylation_mode,
);
}
let mut overlapping_caller = if overlapping_enabled {
Some(OverlappingBasesConsensusCaller::new(
AgreementStrategy::Consensus,
DisagreementStrategy::Consensus,
))
} else {
None
};
let mut all_output = ConsensusOutput::default();
let mut batch_stats = ConsensusCallingStats::new();
let mut batch_overlapping = CorrectionStats::new();
let groups_count = batch.groups.len() as u64;
let flush_byte_records = |recs: &[Vec<u8>]| -> io::Result<()> {
if let Some(ref rw_arc) = rejects_writer_for_process {
if !recs.is_empty() {
let mut guard = rw_arc.lock();
if let Some(w) = guard.as_mut() {
for raw in recs {
w.write_raw_record(raw)?;
}
}
}
}
Ok(())
};
for MiGroup { mi, records: mut group_reads } in batch.groups {
caller.clear();
if let Some(ref mut oc) = overlapping_caller {
if has_both_strands_raw(&group_reads) {
oc.reset_stats();
apply_overlapping_consensus(&mut group_reads, oc).map_err(|e| {
io::Error::other(format!(
"Overlapping consensus error for MI {mi}: {e}"
))
})?;
batch_overlapping.merge(oc.stats());
}
}
let batch_output = caller.consensus_reads(group_reads).map_err(|e| {
io::Error::other(format!("Duplex consensus error for MI {mi}: {e}"))
})?;
all_output.merge(batch_output);
batch_stats.merge(&caller.statistics());
if track_rejects {
flush_byte_records(&caller.take_rejected_reads())?;
}
}
Ok(DuplexProcessedBatch {
consensus_output: all_output,
groups_count,
stats: batch_stats,
overlapping_stats: if overlapping_enabled {
Some(batch_overlapping)
} else {
None
},
})
},
move |processed: DuplexProcessedBatch,
_header: &Header,
output: &mut Vec<u8>|
-> io::Result<u64> {
let batch_stats = processed.stats;
let batch_overlapping = processed.overlapping_stats;
let groups_count = processed.groups_count;
collected_metrics_for_serialize.with_slot(|m| {
m.stats.merge(&batch_stats);
if let Some(o) = batch_overlapping {
m.overlapping_stats.get_or_insert_with(CorrectionStats::new).merge(&o);
}
m.groups_processed += groups_count;
});
let count = processed.consensus_output.count as u64;
output.extend_from_slice(&processed.consensus_output.data);
Ok(count)
},
);
let rejects_finish_result = rejects_writer
.and_then(|rw_arc| rw_arc.lock().take())
.map(|writer| writer.finish().context("Failed to finish rejects file"));
let groups_processed = match (pipeline_result, rejects_finish_result) {
(Ok(groups_processed), Some(Ok(()))) => {
info!("Rejected reads streamed to rejects file during processing");
groups_processed
}
(Ok(groups_processed), None) => groups_processed,
(Ok(_), Some(Err(finish_err))) => return Err(finish_err),
(Err(pipeline_err), Some(Err(finish_err))) => {
return Err(anyhow::anyhow!(
"Pipeline error: {pipeline_err}; additionally failed to finish rejects file: {finish_err}"
));
}
(Err(pipeline_err), _) => {
return Err(anyhow::anyhow!("Pipeline error: {pipeline_err}"));
}
};
let mut total_groups = 0u64;
let mut merged_stats = ConsensusCallingStats::new();
let mut merged_overlapping_stats = CorrectionStats::new();
for slot in collected_metrics.slots() {
let m = slot.lock();
total_groups += m.groups_processed;
merged_stats.merge(&m.stats);
if let Some(ref ocs) = m.overlapping_stats {
merged_overlapping_stats.merge(ocs);
}
}
if self.overlapping.consensus_call_overlapping_bases {
log_overlapping_stats(&merged_overlapping_stats);
}
info!("Duplex consensus calling complete");
info!("Total MI groups processed: {total_groups}");
info!("Total groups processed by pipeline: {groups_processed}");
let metrics = merged_stats.to_metrics();
let consensus_count = metrics.consensus_reads;
log_consensus_summary(&metrics);
if let Some(stats_path) = &self.stats_opts.stats {
let kv_metrics = metrics.to_kv_metrics();
DelimFile::default()
.write_tsv(stats_path, kv_metrics)
.with_context(|| format!("Failed to write statistics: {}", stats_path.display()))?;
info!("Wrote statistics to: {}", stats_path.display());
}
info!("Wrote {consensus_count} duplex consensus reads");
Ok(())
}
}
fn has_both_strands_raw(records: &[RawRecord]) -> bool {
if records.len() < 2 {
return false;
}
let mut has_a = false;
let mut has_b = false;
for raw in records {
if let Some(mi_bytes) = bam_fields::find_string_tag_in_record(raw, b"MI") {
let mi_bytes = mi_bytes.strip_suffix(&[0]).unwrap_or(mi_bytes);
if mi_bytes.len() >= 2 {
let len = mi_bytes.len();
if mi_bytes[len - 2] == b'/' {
match mi_bytes[len - 1] {
b'A' => {
has_a = true;
if has_b {
return true;
}
}
b'B' => {
has_b = true;
if has_a {
return true;
}
}
_ => {}
}
}
}
}
}
false
}
#[cfg(test)]
mod tests {
use super::*;
use crate::bam_io::{create_bam_reader, create_bam_writer};
use anyhow::Result;
use fgumi_raw_bam::{
SamBuilder as RawSamBuilder, flags, raw_record_to_record_buf, testutil::encode_op,
};
use noodles::sam;
use noodles::sam::alignment::io::Write as AlignmentWrite;
use noodles::sam::alignment::record::data::field::Tag;
use noodles::sam::alignment::record_buf::data::field::Value;
use noodles::sam::header::record::value::Map;
use noodles::sam::header::record::value::map::ReferenceSequence;
use rstest::rstest;
use std::collections::HashSet;
use std::num::NonZeroUsize;
use std::path::PathBuf;
use tempfile::{NamedTempFile, TempDir};
struct TestPaths {
#[allow(dead_code)]
dir: TempDir,
pub output: PathBuf,
}
impl TestPaths {
fn new() -> Result<Self> {
let dir = TempDir::new()?;
Ok(Self { output: dir.path().join("output.bam"), dir })
}
fn output_n(&self, n: usize) -> PathBuf {
self.dir.path().join(format!("output{n}.bam"))
}
}
fn create_duplex_with_paths(input: PathBuf, output: PathBuf) -> Duplex {
Duplex {
io: BamIoOptions { input, output, async_reader: false },
rejects_opts: RejectsOptions::default(),
stats_opts: StatsOptions::default(),
read_group: ReadGroupOptions {
read_name_prefix: Some("consensus".to_string()),
read_group_id: "duplex".to_string(),
},
consensus: ConsensusCallingOptions {
output_per_base_tags: false,
min_consensus_base_quality: 0,
..ConsensusCallingOptions::default()
},
overlapping: OverlappingConsensusOptions::default(),
threading: ThreadingOptions::none(),
compression: CompressionOptions { compression_level: 1 },
min_reads: vec![1],
max_reads_per_strand: None,
scheduler_opts: SchedulerOptions::default(),
queue_memory: QueueMemoryOptions::default(),
methylation_mode: None,
reference: None,
}
}
fn create_test_header() -> sam::Header {
use noodles::sam::header::record::value::map::Program;
let builder = sam::Header::builder()
.set_header(Map::default())
.add_program("fgumi", Map::<Program>::default())
.add_reference_sequence(
"chr1",
Map::<ReferenceSequence>::new(
NonZeroUsize::new(248_956_422).expect("non-zero chromosome length"),
),
);
builder.build()
}
fn to_record_buf(raw: fgumi_raw_bam::RawRecord) -> sam::alignment::RecordBuf {
raw_record_to_record_buf(&raw, &sam::Header::default())
.expect("raw_record_to_record_buf failed in test")
}
#[allow(clippy::cast_sign_loss)]
fn build_test_read(
name: &str,
ref_id: usize,
pos: i32,
mapq: u8,
raw_flags: u16,
bases: &[u8],
) -> sam::alignment::RecordBuf {
let cigar = encode_op(0, bases.len());
let qual = vec![40u8; bases.len()];
let mut b = RawSamBuilder::new();
b.read_name(name.as_bytes())
.flags(raw_flags)
.ref_id(ref_id as i32)
.pos(pos - 1)
.mapq(mapq)
.cigar_ops(&[cigar])
.sequence(bases)
.qualities(&qual);
to_record_buf(b.build())
}
#[allow(clippy::cast_sign_loss, clippy::too_many_arguments)]
fn build_duplex_pair(
name: &str,
ref_id: usize,
pos1: i32,
pos2: i32,
mi_tag: &str,
bases: &[u8],
rx_tag: Option<&str>,
cell_tag: Option<&str>,
) -> (sam::alignment::RecordBuf, sam::alignment::RecordBuf) {
let cigar = encode_op(0, bases.len());
let qual = vec![40u8; bases.len()];
let mut b1 = RawSamBuilder::new();
b1.read_name(name.as_bytes())
.flags(flags::PAIRED | flags::FIRST_SEGMENT | flags::MATE_REVERSE)
.ref_id(ref_id as i32)
.pos(pos1 - 1)
.mapq(60)
.cigar_ops(&[cigar])
.sequence(bases)
.qualities(&qual)
.mate_ref_id(ref_id as i32)
.mate_pos(pos2 - 1);
b1.add_string_tag(b"MI", mi_tag.as_bytes());
if let Some(rx) = rx_tag {
b1.add_string_tag(b"RX", rx.as_bytes());
}
if let Some(cell) = cell_tag {
b1.add_string_tag(b"CB", cell.as_bytes());
}
let r1 = to_record_buf(b1.build());
let mut b2 = RawSamBuilder::new();
b2.read_name(name.as_bytes())
.flags(flags::PAIRED | flags::LAST_SEGMENT | flags::REVERSE)
.ref_id(ref_id as i32)
.pos(pos2 - 1)
.mapq(60)
.cigar_ops(&[cigar])
.sequence(bases)
.qualities(&qual)
.mate_ref_id(ref_id as i32)
.mate_pos(pos1 - 1);
b2.add_string_tag(b"MI", mi_tag.as_bytes());
if let Some(rx) = rx_tag {
b2.add_string_tag(b"RX", rx.as_bytes());
}
if let Some(cell) = cell_tag {
b2.add_string_tag(b"CB", cell.as_bytes());
}
let r2 = to_record_buf(b2.build());
(r1, r2)
}
fn create_test_bam(mut records: Vec<sam::alignment::RecordBuf>) -> Result<NamedTempFile> {
let temp_file = NamedTempFile::new()?;
let header = create_test_header();
records.sort_by_key(|r| {
let pos = r.alignment_start().map_or(usize::MAX, usize::from);
let mate_pos = r.mate_alignment_start().map_or(usize::MAX, usize::from);
let min_pos = pos.min(mate_pos);
let max_pos = pos.max(mate_pos);
let mi_tag = noodles::sam::alignment::record::data::field::Tag::from([b'M', b'I']);
let mi =
if let Some(noodles::sam::alignment::record_buf::data::field::Value::String(s)) =
r.data().get(&mi_tag)
{
String::from_utf8_lossy(&s.iter().copied().collect::<Vec<u8>>()).to_string()
} else {
String::new()
};
let name = r.name().map(|n| n.to_vec()).unwrap_or_default();
(min_pos, max_pos, mi, name)
});
let mut writer = create_bam_writer(temp_file.path(), &header, 1, 6)?;
for record in records {
writer.write_alignment_record(&header, &record)?;
}
drop(writer); Ok(temp_file)
}
fn read_bam_records(path: &std::path::Path) -> Result<Vec<sam::alignment::RecordBuf>> {
let (mut reader, header) = create_bam_reader(path, 1)?;
let mut records = Vec::new();
for result in reader.records() {
let record = result?;
let record_buf =
sam::alignment::RecordBuf::try_from_alignment_record(&header, &record)?;
records.push(record_buf);
}
Ok(records)
}
fn get_string_tag(record: &sam::alignment::RecordBuf, tag_name: &str) -> Option<String> {
let tag_bytes = tag_name.as_bytes();
let tag = Tag::from([tag_bytes[0], tag_bytes[1]]);
record.data().get(&tag).and_then(|v| {
if let Value::String(s) = v {
Some(String::from_utf8_lossy(s).to_string())
} else {
None
}
})
}
fn count_unique_mi_tags(records: &[sam::alignment::RecordBuf]) -> usize {
let tags: HashSet<String> =
records.iter().filter_map(|r| get_string_tag(r, "MI")).collect();
tags.len()
}
#[test]
fn test_default_parameters() {
let duplex =
create_duplex_with_paths(PathBuf::from("test.bam"), PathBuf::from("output.bam"));
assert_eq!(duplex.min_reads, vec![1]);
assert_eq!(duplex.consensus.min_input_base_quality, 10);
assert!(duplex.threading.is_single_threaded());
assert!(!duplex.consensus.trim);
}
#[test]
fn test_custom_min_reads_single_value() {
let mut duplex =
create_duplex_with_paths(PathBuf::from("test.bam"), PathBuf::from("output.bam"));
duplex.min_reads = vec![5];
assert_eq!(duplex.min_reads, vec![5]);
}
#[test]
fn test_custom_min_reads_three_values() {
let mut duplex =
create_duplex_with_paths(PathBuf::from("test.bam"), PathBuf::from("output.bam"));
duplex.min_reads = vec![10, 5, 3];
assert_eq!(duplex.min_reads, vec![10, 5, 3]);
}
#[test]
fn test_output_per_base_tags_enabled() {
let mut duplex =
create_duplex_with_paths(PathBuf::from("test.bam"), PathBuf::from("output.bam"));
duplex.consensus.output_per_base_tags = true;
assert!(duplex.consensus.output_per_base_tags);
}
#[test]
fn test_multithreaded_configuration() {
let mut duplex =
create_duplex_with_paths(PathBuf::from("test.bam"), PathBuf::from("output.bam"));
duplex.threading = ThreadingOptions::new(8);
assert_eq!(duplex.threading.threads, Some(8));
}
#[test]
fn test_trim_and_downsample_options() {
let mut duplex =
create_duplex_with_paths(PathBuf::from("test.bam"), PathBuf::from("output.bam"));
duplex.consensus.trim = true;
duplex.max_reads_per_strand = Some(100);
assert!(duplex.consensus.trim);
assert_eq!(duplex.max_reads_per_strand, Some(100));
}
#[test]
fn test_duplex_consensus_basic_ab_ba_pairing() -> Result<()> {
let mut records = Vec::new();
let (r1_a, r2_a) =
build_duplex_pair("q1", 0, 100, 200, "1/A", b"AAAAAAAAAA", Some("AAT-CCG"), None);
records.push(r1_a);
records.push(r2_a);
let flags1 = 0x53; let flags2 = 0x83; let mut r1_b = build_test_read("q2", 0, 200, 60, flags1, b"TTTTTTTTTT"); let mut r2_b = build_test_read("q2", 0, 100, 60, flags2, b"AAAAAAAAAA");
*r1_b.mate_reference_sequence_id_mut() = Some(0);
*r1_b.mate_alignment_start_mut() = noodles::core::Position::try_from(100_usize).ok();
*r2_b.mate_reference_sequence_id_mut() = Some(0);
*r2_b.mate_alignment_start_mut() = noodles::core::Position::try_from(200_usize).ok();
let mi = Tag::from([b'M', b'I']);
r1_b.data_mut().insert(mi, Value::String(b"1/B".into()));
r2_b.data_mut().insert(mi, Value::String(b"1/B".into()));
let rx = Tag::from([b'R', b'X']);
r1_b.data_mut().insert(rx, Value::String(b"CCG-AAT".into()));
r2_b.data_mut().insert(rx, Value::String(b"CCG-AAT".into()));
records.push(r1_b);
records.push(r2_b);
let input = create_test_bam(records)?;
let paths = TestPaths::new()?;
let cmd = create_duplex_with_paths(input.path().to_path_buf(), paths.output.clone());
cmd.execute("test")?;
let output_records = read_bam_records(&paths.output)?;
assert_eq!(output_records.len(), 2, "Should have 2 duplex consensus reads");
for record in &output_records {
let mi = get_string_tag(record, "MI");
assert!(mi.is_some(), "MI tag should be present");
let mi = mi.expect("MI tag should have a value");
assert_eq!(mi, "1", "MI tag should be '1' without /A or /B suffix");
}
for record in &output_records {
let rx = get_string_tag(record, "RX");
assert!(rx.is_some(), "RX tag should be preserved");
}
Ok(())
}
#[test]
fn test_duplex_no_consensus_when_strands_mismatched_r1() -> Result<()> {
let mut records = Vec::new();
let (r1, mut r2) = build_duplex_pair("ab1", 0, 100, 200, "1/A", b"AAAAAAAAAA", None, None);
let mut flags2 = r2.flags();
flags2.set(noodles::sam::alignment::record::Flags::REVERSE_COMPLEMENTED, false);
*r2.flags_mut() = flags2;
records.push(r1);
records.push(r2);
let flags1 = 0x53; let flags2 = 0x83; let mut r1 = build_test_read("ba1", 0, 200, 60, flags1, b"AAAAAAAAAA");
let mut r2 = build_test_read("ba1", 0, 100, 60, flags2, b"AAAAAAAAAA");
*r1.mate_reference_sequence_id_mut() = Some(0);
*r1.mate_alignment_start_mut() = noodles::core::Position::try_from(100_usize).ok();
*r2.mate_reference_sequence_id_mut() = Some(0);
*r2.mate_alignment_start_mut() = noodles::core::Position::try_from(200_usize).ok();
let mi = Tag::from([b'M', b'I']);
r1.data_mut().insert(mi, Value::String(b"1/B".into()));
r2.data_mut().insert(mi, Value::String(b"1/B".into()));
records.push(r1);
records.push(r2);
let input = create_test_bam(records)?;
let paths = TestPaths::new()?;
let cmd = create_duplex_with_paths(input.path().to_path_buf(), paths.output.clone());
cmd.execute("test")?;
let output_records = read_bam_records(&paths.output)?;
assert_eq!(output_records.len(), 0, "Should have 0 consensus reads due to strand mismatch");
Ok(())
}
#[test]
fn test_duplex_with_min_reads_filtering() -> Result<()> {
let mut records = Vec::new();
for i in 1..=5 {
let (r1, r2) = build_duplex_pair(
&format!("ab{i}"),
0,
100,
100,
"1/A",
b"AAAAAAAAAA",
Some("AAT-CCG"),
None,
);
records.push(r1);
records.push(r2);
}
for i in 1..=2 {
let flags1 = 0x53;
let flags2 = 0x83;
let mut r1 = build_test_read(&format!("ba{i}"), 0, 100, 60, flags1, b"AAAAAAAAAA");
let mut r2 = build_test_read(&format!("ba{i}"), 0, 100, 60, flags2, b"AAAAAAAAAA");
*r1.mate_reference_sequence_id_mut() = Some(0);
*r1.mate_alignment_start_mut() = noodles::core::Position::try_from(100_usize).ok();
*r2.mate_reference_sequence_id_mut() = Some(0);
*r2.mate_alignment_start_mut() = noodles::core::Position::try_from(100_usize).ok();
let mi = Tag::from([b'M', b'I']);
r1.data_mut().insert(mi, Value::String(b"1/B".into()));
r2.data_mut().insert(mi, Value::String(b"1/B".into()));
let rx = Tag::from([b'R', b'X']);
r1.data_mut().insert(rx, Value::String(b"CCG-AAT".into()));
r2.data_mut().insert(rx, Value::String(b"CCG-AAT".into()));
records.push(r1);
records.push(r2);
}
let input = create_test_bam(records)?;
let paths = TestPaths::new()?;
let mut cmd = create_duplex_with_paths(input.path().to_path_buf(), paths.output.clone());
cmd.min_reads = vec![3, 3, 3];
cmd.execute("test")?;
let output_records = read_bam_records(&paths.output)?;
assert_eq!(
output_records.len(),
0,
"Should have 0 consensus reads because BA strand has insufficient reads"
);
Ok(())
}
#[test]
fn test_duplex_with_per_base_tags() -> Result<()> {
let mut records = Vec::new();
for i in 1..=3 {
let (r1, r2) = build_duplex_pair(
&format!("ab{i}"),
0,
100,
100,
"1/A",
b"AAAAAAAAAA",
Some("AAT-CCG"),
None,
);
records.push(r1);
records.push(r2);
}
for i in 1..=3 {
let flags1 = 0x53;
let flags2 = 0x83;
let mut r1 = build_test_read(&format!("ba{i}"), 0, 100, 60, flags1, b"AAAAAAAAAA");
let mut r2 = build_test_read(&format!("ba{i}"), 0, 100, 60, flags2, b"AAAAAAAAAA");
*r1.mate_reference_sequence_id_mut() = Some(0);
*r1.mate_alignment_start_mut() = noodles::core::Position::try_from(100_usize).ok();
*r2.mate_reference_sequence_id_mut() = Some(0);
*r2.mate_alignment_start_mut() = noodles::core::Position::try_from(100_usize).ok();
let mi = Tag::from([b'M', b'I']);
r1.data_mut().insert(mi, Value::String(b"1/B".into()));
r2.data_mut().insert(mi, Value::String(b"1/B".into()));
let rx = Tag::from([b'R', b'X']);
r1.data_mut().insert(rx, Value::String(b"CCG-AAT".into()));
r2.data_mut().insert(rx, Value::String(b"CCG-AAT".into()));
records.push(r1);
records.push(r2);
}
let input = create_test_bam(records)?;
let paths = TestPaths::new()?;
let mut cmd = create_duplex_with_paths(input.path().to_path_buf(), paths.output.clone());
cmd.consensus.output_per_base_tags = true;
cmd.execute("test")?;
let output_records = read_bam_records(&paths.output)?;
assert_eq!(output_records.len(), 2, "Should have 2 consensus reads");
for record in &output_records {
let has_ad = record.data().get(&Tag::from([b'a', b'd'])).is_some();
let has_bd = record.data().get(&Tag::from([b'b', b'd'])).is_some();
assert!(has_ad || has_bd, "Per-base tags should be present when enabled");
}
Ok(())
}
#[test]
fn test_duplex_with_cell_barcode_preservation() -> Result<()> {
let mut records = Vec::new();
for i in 1..=3 {
let (r1, r2) = build_duplex_pair(
&format!("ab{i}"),
0,
100,
100,
"1/A",
b"AAAAAAAAAA",
Some("AAT-CCG"),
Some("CELLBC"),
);
records.push(r1);
records.push(r2);
}
for i in 1..=3 {
let flags1 = 0x53;
let flags2 = 0x83;
let mut r1 = build_test_read(&format!("ba{i}"), 0, 100, 60, flags1, b"AAAAAAAAAA");
let mut r2 = build_test_read(&format!("ba{i}"), 0, 100, 60, flags2, b"AAAAAAAAAA");
*r1.mate_reference_sequence_id_mut() = Some(0);
*r1.mate_alignment_start_mut() = noodles::core::Position::try_from(100_usize).ok();
*r2.mate_reference_sequence_id_mut() = Some(0);
*r2.mate_alignment_start_mut() = noodles::core::Position::try_from(100_usize).ok();
let mi = Tag::from([b'M', b'I']);
r1.data_mut().insert(mi, Value::String(b"1/B".into()));
r2.data_mut().insert(mi, Value::String(b"1/B".into()));
let rx = Tag::from([b'R', b'X']);
r1.data_mut().insert(rx, Value::String(b"CCG-AAT".into()));
r2.data_mut().insert(rx, Value::String(b"CCG-AAT".into()));
let cb = Tag::from([b'C', b'B']);
r1.data_mut().insert(cb, Value::String(b"CELLBC".into()));
r2.data_mut().insert(cb, Value::String(b"CELLBC".into()));
records.push(r1);
records.push(r2);
}
let input = create_test_bam(records)?;
let paths = TestPaths::new()?;
let cmd = create_duplex_with_paths(input.path().to_path_buf(), paths.output.clone());
cmd.execute("test")?;
let output_records = read_bam_records(&paths.output)?;
assert_eq!(output_records.len(), 2, "Should have 2 consensus reads");
for record in &output_records {
let cell_bc = get_string_tag(record, "CB");
assert_eq!(cell_bc, Some("CELLBC".to_string()), "Cell barcode should be preserved");
}
Ok(())
}
#[test]
fn test_duplex_multithreading_produces_same_results() -> Result<()> {
let mut records = Vec::new();
for group in 1..=3 {
for i in 1..=3 {
let (r1, r2) = build_duplex_pair(
&format!("g{group}_ab{i}"),
0,
100 * group,
100 * group,
&format!("{group}/A"),
b"AAAAAAAAAA",
Some("AAT-CCG"),
None,
);
records.push(r1);
records.push(r2);
}
for i in 1..=3 {
let flags1 = 0x53;
let flags2 = 0x83;
let mut r1 = build_test_read(
&format!("g{group}_ba{i}"),
0,
100 * group,
60,
flags1,
b"AAAAAAAAAA",
);
let mut r2 = build_test_read(
&format!("g{group}_ba{i}"),
0,
100 * group,
60,
flags2,
b"AAAAAAAAAA",
);
*r1.mate_reference_sequence_id_mut() = Some(0);
*r1.mate_alignment_start_mut() =
noodles::core::Position::try_from((100 * group) as usize).ok();
*r2.mate_reference_sequence_id_mut() = Some(0);
*r2.mate_alignment_start_mut() =
noodles::core::Position::try_from((100 * group) as usize).ok();
let mi = Tag::from([b'M', b'I']);
r1.data_mut().insert(mi, Value::String(format!("{group}/B").as_bytes().into()));
r2.data_mut().insert(mi, Value::String(format!("{group}/B").as_bytes().into()));
let rx = Tag::from([b'R', b'X']);
r1.data_mut().insert(rx, Value::String(b"CCG-AAT".into()));
r2.data_mut().insert(rx, Value::String(b"CCG-AAT".into()));
records.push(r1);
records.push(r2);
}
}
let input1 = create_test_bam(records.clone())?;
let paths = TestPaths::new()?;
let cmd1 = create_duplex_with_paths(input1.path().to_path_buf(), paths.output.clone());
cmd1.execute("test")?;
let output1_records = read_bam_records(&paths.output)?;
let input2 = create_test_bam(records)?;
let output2_path = paths.output_n(2);
let mut cmd2 = create_duplex_with_paths(input2.path().to_path_buf(), output2_path.clone());
cmd2.threading = ThreadingOptions::new(4);
cmd2.execute("test")?;
let output2_records = read_bam_records(&output2_path)?;
assert_eq!(
output1_records.len(),
output2_records.len(),
"Single-threaded and multi-threaded should produce same number of reads"
);
assert_eq!(output1_records.len(), 6, "Should have 6 consensus reads (3 duplex pairs)");
assert_eq!(count_unique_mi_tags(&output1_records), 3, "Should have 3 unique MI tags");
assert_eq!(count_unique_mi_tags(&output2_records), 3, "Should have 3 unique MI tags");
Ok(())
}
#[test]
fn test_duplex_only_one_strand_no_consensus() -> Result<()> {
let mut records = Vec::new();
for i in 1..=5 {
let (r1, r2) = build_duplex_pair(
&format!("ab{i}"),
0,
100,
100,
"1/A",
b"AAAAAAAAAA",
Some("AAT-CCG"),
None,
);
records.push(r1);
records.push(r2);
}
let input = create_test_bam(records)?;
let paths = TestPaths::new()?;
let cmd = create_duplex_with_paths(input.path().to_path_buf(), paths.output.clone());
cmd.execute("test")?;
let output_records = read_bam_records(&paths.output)?;
assert_eq!(
output_records.len(),
0,
"Should have 0 consensus reads when only one strand is present"
);
Ok(())
}
#[rstest]
#[case::fast_path(ThreadingOptions::none())]
#[case::pipeline_1(ThreadingOptions::new(1))]
#[case::pipeline_2(ThreadingOptions::new(2))]
fn test_threading_modes(#[case] threading: ThreadingOptions) -> Result<()> {
let mut records = Vec::new();
for i in 0..5 {
let (r1, r2) = build_duplex_pair(
&format!("a{i}"),
0,
100,
200,
"1/A",
b"AAAAAAAAAA",
Some("AAT-CCG"),
None,
);
records.push(r1);
records.push(r2);
}
for i in 0..5 {
let (r1, r2) = build_duplex_pair(
&format!("b{i}"),
0,
100,
200,
"1/B",
b"AAAAAAAAAA",
Some("AAT-CCG"),
None,
);
records.push(r1);
records.push(r2);
}
let input = create_test_bam(records)?;
let paths = TestPaths::new()?;
let mut cmd = create_duplex_with_paths(input.path().to_path_buf(), paths.output.clone());
cmd.threading = threading;
cmd.execute("test")?;
assert!(&paths.output.exists());
Ok(())
}
#[test]
fn test_duplex_processed_batch_memory_estimate() {
let mut data = Vec::with_capacity(1024);
data.extend_from_slice(&[0u8; 100]);
let batch = DuplexProcessedBatch {
consensus_output: ConsensusOutput { data, count: 1 },
groups_count: 1,
stats: ConsensusCallingStats::default(),
overlapping_stats: None,
};
let estimate = batch.estimate_heap_size();
assert_eq!(estimate, 1024, "estimate should match consensus_output capacity");
}
#[rstest]
#[case::single_threaded(ThreadingOptions::none())]
#[case::multi_threaded(ThreadingOptions::new(2))]
fn test_duplex_em_seq_command(#[case] threading: ThreadingOptions) -> Result<()> {
use std::io::Write;
let ref_seq = "C".repeat(300);
let ref_file = {
let mut f = tempfile::NamedTempFile::new()?;
writeln!(f, ">chr1")?;
writeln!(f, "{ref_seq}")?;
f.flush()?;
f
};
let mut records = Vec::new();
for i in 0..3 {
let (r1, r2) =
build_duplex_pair(&format!("ab{i}"), 0, 100, 100, "1/A", b"CCCCCCCCCC", None, None);
records.push(r1);
records.push(r2);
}
for i in 0..3 {
let flags1 = 0x53_u16; let flags2 = 0x83_u16; let mut r1 = build_test_read(&format!("ba{i}"), 0, 100, 60, flags1, b"CCCCCCCCCC");
let mut r2 = build_test_read(&format!("ba{i}"), 0, 100, 60, flags2, b"CCCCCCCCCC");
*r1.mate_reference_sequence_id_mut() = Some(0);
*r1.mate_alignment_start_mut() = noodles::core::Position::try_from(100_usize).ok();
*r2.mate_reference_sequence_id_mut() = Some(0);
*r2.mate_alignment_start_mut() = noodles::core::Position::try_from(100_usize).ok();
let mi = Tag::from([b'M', b'I']);
r1.data_mut().insert(mi, Value::String(b"1/B".into()));
r2.data_mut().insert(mi, Value::String(b"1/B".into()));
records.push(r1);
records.push(r2);
}
let input = create_test_bam(records)?;
let paths = TestPaths::new()?;
let mut cmd = create_duplex_with_paths(input.path().to_path_buf(), paths.output.clone());
cmd.methylation_mode = Some(crate::commands::common::MethylationModeArg::EmSeq);
cmd.reference = Some(ref_file.path().to_path_buf());
cmd.threading = threading;
cmd.execute("test")?;
let output_records = read_bam_records(&paths.output)?;
assert_eq!(output_records.len(), 2, "Should have 2 duplex consensus reads");
for record in &output_records {
let cu_tag = Tag::from([b'c', b'u']);
assert!(record.data().get(&cu_tag).is_some(), "cu tag should be present with EM-Seq");
let ct_tag = Tag::from([b'c', b't']);
assert!(record.data().get(&ct_tag).is_some(), "ct tag should be present with EM-Seq");
}
Ok(())
}
}