pub fn to_int_be(data: &[u8]) -> u64 {
let start = data.len().saturating_sub(8);
data[start..]
.iter()
.fold(0u64, |acc, &byte| (acc << 8) | byte as u64)
}
use crate::tags::PaddingInfo;
use crate::util::resize_bytes;
use crate::vorbis::VCommentDict;
use crate::{AudexError, FileType, Result, StreamInfo, VERSION_STRING};
use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
use std::fs::{File, OpenOptions};
use std::io::{BufReader, Cursor, ErrorKind, Read, Seek, SeekFrom, Write};
use std::path::Path;
#[cfg(feature = "async")]
use crate::util::resize_bytes_async;
#[cfg(feature = "async")]
use tokio::fs::{File as TokioFile, OpenOptions as TokioOpenOptions};
#[cfg(feature = "async")]
use tokio::io::BufReader as TokioBufReader;
type PaddingFunction = Box<dyn FnOnce(&PaddingInfo) -> i64>;
use std::cmp::min;
use std::time::Duration;
#[derive(Debug, Clone)]
pub struct FLACParseOptions {
pub distrust_size: bool,
pub max_block_size: u32,
pub ignore_errors: bool,
pub streaming_io: bool,
pub vorbis_error_mode: crate::vorbis::ErrorMode,
}
impl Default for FLACParseOptions {
fn default() -> Self {
Self {
distrust_size: true,
max_block_size: 16 * 1024 * 1024, ignore_errors: true,
streaming_io: true,
vorbis_error_mode: crate::vorbis::ErrorMode::Replace,
}
}
}
#[derive(Debug, Clone)]
pub struct FLACError {
pub kind: FLACErrorKind,
pub position: Option<u64>,
pub context: String,
}
#[derive(Debug, Clone, PartialEq)]
#[non_exhaustive]
pub enum FLACErrorKind {
InvalidHeader,
BlockSizeError,
MultipleVorbisBlocks,
MultipleSeekTableBlocks,
MultipleCueSheetBlocks,
InvalidSampleRate,
CorruptedBlock,
SizeOverflow,
PartialSuccess(Vec<String>),
}
impl std::fmt::Display for FLACError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
FLACErrorKind::InvalidHeader => write!(f, "Invalid FLAC header"),
FLACErrorKind::BlockSizeError => write!(f, "Block size error"),
FLACErrorKind::MultipleVorbisBlocks => {
write!(f, "Multiple Vorbis comment blocks found")
}
FLACErrorKind::MultipleSeekTableBlocks => {
write!(f, "More than one SeekTable block found")
}
FLACErrorKind::MultipleCueSheetBlocks => {
write!(f, "More than one CueSheet block found")
}
FLACErrorKind::InvalidSampleRate => {
write!(f, "A sample rate value of 0 is invalid")
}
FLACErrorKind::CorruptedBlock => write!(f, "Corrupted block data"),
FLACErrorKind::SizeOverflow => write!(f, "Size overflow"),
FLACErrorKind::PartialSuccess(errors) => {
write!(f, "Partial success with errors: {}", errors.join("; "))
}
}
}
}
#[derive(Debug)]
pub struct FLAC {
pub info: FLACStreamInfo,
pub tags: Option<VCommentDict>,
pub pictures: Vec<Picture>,
pub seektable: Option<SeekTable>,
pub cuesheet: Option<CueSheet>,
pub application_blocks: Vec<ApplicationBlock>,
pub metadata_blocks: Vec<MetadataBlock>,
pub padding_blocks: Vec<Padding>,
filename: String,
parse_options: FLACParseOptions,
pub parse_errors: Vec<FLACError>,
invalid_overflow_size: Vec<(u8, usize)>,
original_overflow_sizes: std::collections::HashMap<u8, u32>,
dirty: bool,
original_metadata: Vec<u8>,
original_block_order: Vec<u8>,
}
impl FLAC {
pub fn new() -> Self {
Self {
info: FLACStreamInfo::default(),
tags: None,
pictures: Vec::new(),
seektable: None,
cuesheet: None,
application_blocks: Vec::new(),
metadata_blocks: Vec::new(),
padding_blocks: Vec::new(),
filename: String::new(),
parse_options: FLACParseOptions::default(),
parse_errors: Vec::new(),
invalid_overflow_size: Vec::new(),
original_overflow_sizes: std::collections::HashMap::new(),
dirty: false,
original_metadata: Vec::new(),
original_block_order: Vec::new(),
}
}
pub fn with_options(options: FLACParseOptions) -> Self {
Self {
info: FLACStreamInfo::default(),
tags: None,
pictures: Vec::new(),
seektable: None,
cuesheet: None,
application_blocks: Vec::new(),
metadata_blocks: Vec::new(),
padding_blocks: Vec::new(),
filename: String::new(),
parse_options: options,
parse_errors: Vec::new(),
invalid_overflow_size: Vec::new(),
original_overflow_sizes: std::collections::HashMap::new(),
dirty: false,
original_metadata: Vec::new(),
original_block_order: Vec::new(),
}
}
pub fn parse_errors(&self) -> &[FLACError] {
&self.parse_errors
}
pub fn is_valid(&self) -> bool {
!self
.parse_errors
.iter()
.any(|e| matches!(e.kind, FLACErrorKind::InvalidHeader))
}
pub fn from_file<P: AsRef<Path>>(path: P) -> Result<Self> {
Self::from_file_with_options(path, FLACParseOptions::default())
}
#[cfg_attr(feature = "tracing", tracing::instrument(skip_all, fields(path = %path.as_ref().display())))]
pub fn from_file_with_options<P: AsRef<Path>>(
path: P,
options: FLACParseOptions,
) -> Result<Self> {
let path = path.as_ref();
debug_event!("parsing FLAC file");
let file = File::open(path)?;
let mut flac = Self::with_options(options);
flac.filename = path.to_string_lossy().to_string();
if flac.parse_options.streaming_io {
let mut reader = BufReader::new(file);
flac.parse_flac_streaming(&mut reader)?;
} else {
let mut file = file;
flac.parse_flac(&mut file)?;
}
Ok(flac)
}
pub fn from_reader_with_options<R: Read + Seek>(
reader: &mut R,
options: FLACParseOptions,
) -> Result<Self> {
let mut flac = Self::with_options(options);
if flac.parse_options.streaming_io {
flac.parse_flac_streaming(reader)?;
} else {
flac.parse_flac(reader)?;
}
Ok(flac)
}
fn parse_flac<R: Read + Seek>(&mut self, reader: &mut R) -> Result<()> {
let mut signature = [0u8; 4];
reader.read_exact(&mut signature)?;
if &signature[..3] == b"ID3" {
let mut id3_size_bytes = [0u8; 6];
reader.read_exact(&mut id3_size_bytes)?;
let id3_size = self.decode_id3_size(&id3_size_bytes[2..])?;
reader.seek(SeekFrom::Current(id3_size as i64))?;
reader.read_exact(&mut signature)?;
}
if &signature != b"fLaC" {
return Err(AudexError::FLACNoHeader);
}
let metadata_start = reader.stream_position()?;
self.parse_metadata_blocks(reader)?;
let metadata_end = self.find_audio_offset_from_file(reader)?;
let file_end = reader.seek(SeekFrom::End(0))?;
let capped_end = metadata_end.min(file_end);
let metadata_size_u64 = capped_end.checked_sub(metadata_start).ok_or_else(|| {
AudexError::InvalidData("metadata region extends beyond file boundaries".to_string())
})?;
let metadata_size = usize::try_from(metadata_size_u64).map_err(|_| {
AudexError::InvalidData("metadata region too large for this platform".to_string())
})?;
reader.seek(SeekFrom::Start(metadata_start))?;
self.original_metadata = vec![0u8; metadata_size];
match reader.read_exact(&mut self.original_metadata) {
Ok(()) => {} Err(e) if e.kind() == std::io::ErrorKind::UnexpectedEof => {
self.original_metadata.clear();
}
Err(e) => return Err(e.into()),
}
if self.info.total_samples > 0 {
if let Ok(end_pos) = reader.seek(SeekFrom::End(0)) {
if let Some(duration) = self.info.length {
if end_pos >= metadata_end {
let audio_size = end_pos - metadata_end;
let duration_secs = duration.as_secs_f64();
if duration_secs > 0.0 {
let bitrate = (audio_size * 8) as f64 / duration_secs;
self.info.bitrate = Some(bitrate as u32);
}
}
}
}
}
Ok(())
}
fn decode_id3_size(&self, size_bytes: &[u8]) -> Result<u32> {
if size_bytes.len() != 4 {
return Err(AudexError::InvalidData("Invalid ID3 size".to_string()));
}
let size = ((size_bytes[0] & 0x7F) as u32) << 21
| ((size_bytes[1] & 0x7F) as u32) << 14
| ((size_bytes[2] & 0x7F) as u32) << 7
| (size_bytes[3] & 0x7F) as u32;
Ok(size)
}
fn parse_flac_streaming<R: Read + Seek>(&mut self, reader: &mut R) -> Result<()> {
let mut signature = [0u8; 4];
reader.read_exact(&mut signature)?;
if &signature[..3] == b"ID3" {
let mut id3_size_bytes = [0u8; 6];
reader.read_exact(&mut id3_size_bytes)?;
let id3_size = self.decode_id3_size(&id3_size_bytes[2..])?;
reader.seek(SeekFrom::Current(id3_size as i64))?;
reader.read_exact(&mut signature)?;
}
if &signature != b"fLaC" {
let error = FLACError {
kind: FLACErrorKind::InvalidHeader,
position: reader.stream_position().ok(),
context: "Missing or invalid FLAC signature".to_string(),
};
self.parse_errors.push(error);
return Err(AudexError::FLACNoHeader);
}
let metadata_start = reader.stream_position()?;
self.parse_metadata_blocks_streaming(reader)?;
let metadata_end = self.find_audio_offset_from_file(reader)?;
let file_end = reader.seek(SeekFrom::End(0))?;
let capped_end = metadata_end.min(file_end);
let metadata_size_u64 = capped_end.checked_sub(metadata_start).ok_or_else(|| {
AudexError::InvalidData("metadata region extends beyond file boundaries".to_string())
})?;
let metadata_size = usize::try_from(metadata_size_u64).map_err(|_| {
AudexError::InvalidData("metadata region too large for this platform".to_string())
})?;
reader.seek(SeekFrom::Start(metadata_start))?;
self.original_metadata = vec![0u8; metadata_size];
match reader.read_exact(&mut self.original_metadata) {
Ok(()) => {} Err(e) if e.kind() == std::io::ErrorKind::UnexpectedEof => {
self.original_metadata.clear();
}
Err(e) => return Err(e.into()),
}
if self.info.total_samples > 0 {
if let Ok(end_pos) = reader.seek(SeekFrom::End(0)) {
if let Some(duration) = self.info.length {
if end_pos >= metadata_end {
let audio_size = end_pos - metadata_end;
let duration_secs = duration.as_secs_f64();
if duration_secs > 0.0 {
let bitrate = (audio_size * 8) as f64 / duration_secs;
self.info.bitrate = Some(bitrate as u32);
}
}
}
}
}
Ok(())
}
fn parse_metadata_blocks_streaming<R: Read + Seek>(&mut self, reader: &mut R) -> Result<()> {
let mut is_last = false;
let mut vorbis_comment_count = 0;
const MAX_METADATA_BLOCKS: usize = 1024;
let mut block_count: usize = 0;
while !is_last {
block_count += 1;
if block_count > MAX_METADATA_BLOCKS {
return Err(AudexError::InvalidData(format!(
"Exceeded maximum metadata block count ({})",
MAX_METADATA_BLOCKS
)));
}
let block_start_pos = reader.stream_position()?;
let mut header = [0u8; 4];
if let Err(e) = reader.read_exact(&mut header) {
if self.parse_options.ignore_errors {
self.parse_errors.push(FLACError {
kind: FLACErrorKind::CorruptedBlock,
position: Some(block_start_pos),
context: format!("Failed to read block header: {}", e),
});
break;
}
return Err(e.into());
}
let block_type = header[0] & 0x7F; is_last = (header[0] & 0x80) != 0;
let block_size = u32::from_be_bytes([0, header[1], header[2], header[3]]);
self.original_block_order.push(block_type);
let block_data_start = reader.stream_position()?;
if block_size > self.parse_options.max_block_size {
let error = FLACError {
kind: FLACErrorKind::BlockSizeError,
position: Some(block_start_pos),
context: format!(
"Block size {} exceeds maximum {}",
block_size, self.parse_options.max_block_size
),
};
self.parse_errors.push(error);
if block_type == 4 || !self.parse_options.ignore_errors {
return Err(AudexError::InvalidData(format!(
"Block size {} exceeds maximum {}",
block_size, self.parse_options.max_block_size
)));
}
reader.seek(SeekFrom::Current(block_size as i64))?;
continue;
}
trace_event!(
block_type = block_type,
block_size = block_size,
is_last = is_last,
"parsing FLAC metadata block"
);
let parse_result = match block_type {
0 => {
self.parse_streaminfo_block_safe(reader, block_size, block_start_pos)
}
1 => {
self.parse_padding_block_safe(reader, block_size, block_start_pos)
}
2 => {
self.parse_application_block_safe(reader, block_size, block_start_pos)
}
3 => {
if self.seektable.is_some() {
self.parse_errors.push(FLACError {
kind: FLACErrorKind::MultipleSeekTableBlocks,
position: Some(block_start_pos),
context: "> 1 SeekTable block found".to_string(),
});
if !self.parse_options.ignore_errors {
return Err(AudexError::InvalidData(
"> 1 SeekTable block found".to_string(),
));
}
reader.seek(SeekFrom::Current(block_size as i64))?;
Ok(())
} else {
self.parse_seektable_block_safe(reader, block_size, block_start_pos)
}
}
4 => {
vorbis_comment_count += 1;
if vorbis_comment_count > 1 {
self.parse_errors.push(FLACError {
kind: FLACErrorKind::MultipleVorbisBlocks,
position: Some(block_start_pos),
context: format!(
"Found {} VORBIS_COMMENT blocks, using first",
vorbis_comment_count
),
});
if !self.parse_options.ignore_errors {
return Err(AudexError::FLACVorbis);
}
reader.seek(SeekFrom::Current(block_size as i64))?;
Ok(())
} else {
self.parse_vorbis_comment_block_safe(reader, block_size, block_start_pos)
}
}
5 => {
if self.cuesheet.is_some() {
self.parse_errors.push(FLACError {
kind: FLACErrorKind::MultipleCueSheetBlocks,
position: Some(block_start_pos),
context: "> 1 CueSheet block found".to_string(),
});
if !self.parse_options.ignore_errors {
return Err(AudexError::InvalidData(
"> 1 CueSheet block found".to_string(),
));
}
reader.seek(SeekFrom::Current(block_size as i64))?;
Ok(())
} else {
self.parse_cuesheet_block_safe(reader, block_size, block_start_pos)
}
}
6 => {
self.parse_picture_block_safe(reader, block_size, block_start_pos)
}
_ => {
reader.seek(SeekFrom::Current(block_size as i64))?;
Ok(())
}
};
if let Err(e) = parse_result {
warn_event!(block_type = block_type, error = %e, "FLAC metadata block parse error");
let error = FLACError {
kind: FLACErrorKind::CorruptedBlock,
position: Some(block_start_pos),
context: format!("Block type {} parse error: {}", block_type, e),
};
self.parse_errors.push(error);
let is_fatal_error = if self.parse_options.ignore_errors {
block_type == 4 && self.is_fatal_vorbis_error(&e)
} else {
block_type == 0 || (block_type == 4 && self.is_fatal_vorbis_error(&e))
};
if is_fatal_error {
return Err(e);
}
let current_pos = reader.stream_position()?;
let expected_pos = block_start_pos + 4 + block_size as u64;
if current_pos != expected_pos {
reader.seek(SeekFrom::Start(expected_pos))?;
}
} else {
let current_pos = reader.stream_position()?;
let bytes_to_read = current_pos.checked_sub(block_data_start).ok_or_else(|| {
AudexError::InvalidData(format!(
"FLAC block position underflow: current {} < start {}",
current_pos, block_data_start
))
})? as usize;
if (block_type == 4 || block_type == 6) && bytes_to_read > 0xFFFFFF {
self.original_overflow_sizes.insert(block_type, block_size);
}
reader.seek(SeekFrom::Start(block_data_start))?;
let mut block_data = vec![0u8; bytes_to_read];
reader.read_exact(&mut block_data)?;
self.metadata_blocks
.push(MetadataBlock::new(block_type, block_data));
}
}
debug_event!(
sample_rate = self.info.sample_rate,
channels = self.info.channels,
bits_per_sample = self.info.bits_per_sample,
total_samples = self.info.total_samples,
"FLAC STREAMINFO parsed"
);
if let Some(ref _tags) = self.tags {
debug_event!(
tag_count = _tags.keys().len(),
vendor = %_tags.vendor(),
"FLAC Vorbis Comment parsed"
);
}
let _picture_count = self.pictures.len();
if _picture_count > 0 {
debug_event!(picture_count = _picture_count, "FLAC pictures parsed");
}
Ok(())
}
fn is_fatal_vorbis_error(&self, error: &AudexError) -> bool {
match error {
AudexError::InvalidData(msg) => {
if msg.contains("failed to fill whole buffer") {
false
} else {
msg.contains("exceeds maximum")
|| msg.contains("failed to read vendor")
|| msg.contains("failed to read comment count")
|| msg.contains("failed to read comment length")
|| msg.contains("Truncated Vorbis")
|| (matches!(self.parse_options.vorbis_error_mode, crate::vorbis::ErrorMode::Strict)
&& msg.contains("encoding error"))
}
}
AudexError::FormatError(_) => {
matches!(
self.parse_options.vorbis_error_mode,
crate::vorbis::ErrorMode::Strict
)
}
_ => false,
}
}
fn parse_metadata_blocks<R: Read + Seek>(&mut self, reader: &mut R) -> Result<()> {
let mut is_last = false;
const MAX_METADATA_BLOCKS: usize = 1024;
let mut block_count: usize = 0;
while !is_last {
block_count += 1;
if block_count > MAX_METADATA_BLOCKS {
return Err(AudexError::InvalidData(format!(
"Exceeded maximum metadata block count ({})",
MAX_METADATA_BLOCKS
)));
}
let mut header = [0u8; 4];
reader.read_exact(&mut header)?;
let block_type = header[0] & 0x7F; is_last = (header[0] & 0x80) != 0;
let block_size = u32::from_be_bytes([0, header[1], header[2], header[3]]);
if block_size > self.parse_options.max_block_size {
return Err(AudexError::InvalidData(format!(
"Block size {} exceeds maximum {}",
block_size, self.parse_options.max_block_size
)));
}
self.original_block_order.push(block_type);
let block_data_start = reader.stream_position()?;
match block_type {
0 => {
self.parse_streaminfo_block(reader, block_size)?;
}
1 => {
self.parse_padding_block(reader, block_size)?
}
2 => {
self.parse_application_block(reader, block_size)?;
}
3 => {
if self.seektable.is_some() {
return Err(AudexError::InvalidData(
"> 1 SeekTable block found".to_string(),
));
}
self.parse_seektable_block(reader, block_size)?;
}
4 => {
if self.tags.is_some() {
return Err(AudexError::InvalidData(
"> 1 VorbisComment block found".to_string(),
));
}
self.parse_vorbis_comment_block(reader, block_size)?;
}
5 => {
if self.cuesheet.is_some() {
return Err(AudexError::InvalidData(
"> 1 CueSheet block found".to_string(),
));
}
self.parse_cuesheet_block(reader, block_size)?;
}
6 => {
self.parse_picture_block(reader, block_size)?;
}
_ => {
reader.seek(SeekFrom::Current(block_size as i64))?;
}
}
let current_pos = reader.stream_position()?;
let bytes_to_read = current_pos.checked_sub(block_data_start).ok_or_else(|| {
AudexError::InvalidData(format!(
"FLAC block position underflow: current {} < start {}",
current_pos, block_data_start
))
})? as usize;
if (block_type == 4 || block_type == 6) && bytes_to_read > 0xFFFFFF {
self.original_overflow_sizes.insert(block_type, block_size);
}
reader.seek(SeekFrom::Start(block_data_start))?;
let mut block_data = vec![0u8; bytes_to_read];
reader.read_exact(&mut block_data)?;
self.metadata_blocks
.push(MetadataBlock::new(block_type, block_data));
}
Ok(())
}
fn parse_streaminfo_block<R: Read>(&mut self, reader: &mut R, _block_size: u32) -> Result<()> {
let mut data = [0u8; 34]; reader.read_exact(&mut data)?;
let mut cursor = Cursor::new(&data);
self.info.min_blocksize = cursor.read_u16::<BigEndian>()?;
self.info.max_blocksize = cursor.read_u16::<BigEndian>()?;
let min_framesize_bytes = [cursor.read_u8()?, cursor.read_u8()?, cursor.read_u8()?];
self.info.min_framesize = u32::from_be_bytes([
0,
min_framesize_bytes[0],
min_framesize_bytes[1],
min_framesize_bytes[2],
]);
let max_framesize_bytes = [cursor.read_u8()?, cursor.read_u8()?, cursor.read_u8()?];
self.info.max_framesize = u32::from_be_bytes([
0,
max_framesize_bytes[0],
max_framesize_bytes[1],
max_framesize_bytes[2],
]);
let combined = cursor.read_u64::<BigEndian>()?;
self.info.sample_rate = ((combined >> 44) & 0xFFFFF) as u32; self.info.channels = (((combined >> 41) & 0x07) as u16) + 1; self.info.bits_per_sample = (((combined >> 36) & 0x1F) as u16) + 1; self.info.total_samples = combined & 0xFFFFFFFFF;
if self.info.sample_rate == 0 {
return Err(AudexError::InvalidData(
"A sample rate value of 0 is invalid".to_string(),
));
}
cursor.read_exact(&mut self.info.md5_signature)?;
if self.info.sample_rate > 0 {
if self.info.total_samples > 0 {
let duration_secs = self.info.total_samples as f64 / self.info.sample_rate as f64;
self.info.length = Some(Duration::from_secs_f64(duration_secs));
let bits_per_second = self.info.sample_rate as u64
* self.info.channels as u64
* self.info.bits_per_sample as u64;
self.info.bitrate = Some(u32::try_from(bits_per_second).unwrap_or(u32::MAX));
} else {
self.info.length = Some(Duration::from_secs(0));
self.info.bitrate = Some(0);
}
}
Ok(())
}
fn parse_streaminfo_block_safe<R: Read + Seek>(
&mut self,
reader: &mut R,
block_size: u32,
_block_start_pos: u64,
) -> Result<()> {
if block_size != 34 {
return Err(AudexError::InvalidData(format!(
"Invalid STREAMINFO size: {} (expected 34)",
block_size
)));
}
self.parse_streaminfo_block(reader, block_size)
}
fn parse_vorbis_comment_block<R: Read>(
&mut self,
reader: &mut R,
block_size: u32,
) -> Result<()> {
let mut data = vec![0u8; block_size as usize];
reader.read_exact(&mut data)?;
let comment = VCommentDict::from_bytes_with_options(
&data,
self.parse_options.vorbis_error_mode,
false,
)?;
self.tags = Some(comment);
Ok(())
}
fn parse_vorbis_comment_block_safe<R: Read + Seek>(
&mut self,
reader: &mut R,
block_size: u32,
block_start_pos: u64,
) -> Result<()> {
if self.parse_options.distrust_size {
let start_pos = reader.stream_position()?;
let mut comment = VCommentDict::new();
comment
.load(reader, self.parse_options.vorbis_error_mode, false)
.map_err(|e| {
AudexError::InvalidData(format!(
"Vorbis comment parse error at position {}: {}",
block_start_pos, e
))
})?;
let real_size = reader.stream_position()? - start_pos;
if real_size > self.parse_options.max_block_size as u64 {
return Err(AudexError::InvalidData(format!(
"Vorbis comment actual size ({} bytes) exceeds max_block_size ({})",
real_size, self.parse_options.max_block_size
)));
}
if real_size > 0xFFFFFF_u64 {
self.original_overflow_sizes.insert(4, block_size);
}
self.tags = Some(comment);
} else {
if block_size > self.parse_options.max_block_size {
return Err(AudexError::InvalidData(format!(
"Vorbis comment block size ({} bytes) exceeds max_block_size ({})",
block_size, self.parse_options.max_block_size
)));
}
let mut data = vec![0u8; block_size as usize];
reader.read_exact(&mut data)?;
let comment = VCommentDict::from_bytes_with_options(
&data,
self.parse_options.vorbis_error_mode,
false,
)?;
self.tags = Some(comment);
}
Ok(())
}
const MAX_LEGACY_BLOCK: u32 = 64 * 1024 * 1024;
fn parse_seektable_block<R: Read>(&mut self, reader: &mut R, block_size: u32) -> Result<()> {
crate::limits::ParseLimits::default()
.check_tag_size(block_size as u64, "FLAC legacy seektable block")?;
if block_size > Self::MAX_LEGACY_BLOCK {
return Err(AudexError::ParseError(format!(
"FLAC block too large: {} bytes",
block_size
)));
}
let mut data = vec![0u8; block_size as usize];
reader.read_exact(&mut data)?;
let seektable = SeekTable::from_bytes(&data)?;
self.seektable = Some(seektable);
Ok(())
}
fn parse_seektable_block_safe<R: Read + Seek>(
&mut self,
reader: &mut R,
block_size: u32,
_block_start_pos: u64,
) -> Result<()> {
let safe_size = if self.parse_options.distrust_size {
min(block_size, self.parse_options.max_block_size)
} else {
block_size
};
let mut data = vec![0u8; safe_size as usize];
let bytes_read = reader.read(&mut data)?;
if bytes_read < safe_size as usize {
return Err(AudexError::InvalidData(
"Truncated SEEKTABLE block".to_string(),
));
}
data.truncate(bytes_read);
let max_seekpoints = Some(10000); let seektable = SeekTable::from_bytes_with_options(&data, max_seekpoints)?;
self.seektable = Some(seektable);
if block_size > safe_size {
reader.seek(SeekFrom::Current((block_size - safe_size) as i64))?;
}
Ok(())
}
fn parse_cuesheet_block<R: Read>(&mut self, reader: &mut R, block_size: u32) -> Result<()> {
crate::limits::ParseLimits::default()
.check_tag_size(block_size as u64, "FLAC legacy cuesheet block")?;
if block_size > Self::MAX_LEGACY_BLOCK {
return Err(AudexError::ParseError(format!(
"FLAC block too large: {} bytes",
block_size
)));
}
let mut data = vec![0u8; block_size as usize];
reader.read_exact(&mut data)?;
let cuesheet = CueSheet::from_bytes(&data)?;
self.cuesheet = Some(cuesheet);
Ok(())
}
fn parse_cuesheet_block_safe<R: Read + Seek>(
&mut self,
reader: &mut R,
block_size: u32,
_block_start_pos: u64,
) -> Result<()> {
let safe_size = if self.parse_options.distrust_size {
min(block_size, self.parse_options.max_block_size)
} else {
block_size
};
let mut data = vec![0u8; safe_size as usize];
let bytes_read = reader.read(&mut data)?;
if bytes_read < safe_size as usize {
return Err(AudexError::InvalidData(
"Truncated CUESHEET block".to_string(),
));
}
data.truncate(bytes_read);
let cuesheet = CueSheet::from_bytes(&data)?;
self.cuesheet = Some(cuesheet);
if block_size > safe_size {
reader.seek(SeekFrom::Current((block_size - safe_size) as i64))?;
}
Ok(())
}
fn parse_picture_block<R: Read>(&mut self, reader: &mut R, block_size: u32) -> Result<()> {
crate::limits::ParseLimits::default()
.check_tag_size(block_size as u64, "FLAC legacy picture block")?;
if block_size > Self::MAX_LEGACY_BLOCK {
return Err(AudexError::ParseError(format!(
"FLAC block too large: {} bytes",
block_size
)));
}
let mut data = vec![0u8; block_size as usize];
reader.read_exact(&mut data)?;
let picture = Picture::from_bytes(&data)?;
self.pictures.push(picture);
Ok(())
}
fn parse_picture_block_safe<R: Read + Seek>(
&mut self,
reader: &mut R,
block_size: u32,
_block_start_pos: u64,
) -> Result<()> {
if self.parse_options.distrust_size {
let start_pos = reader.stream_position()?;
let max_picture_size = Some(self.parse_options.max_block_size as usize);
match Picture::from_reader(reader, max_picture_size) {
Ok(picture) => {
let real_size = reader.stream_position()? - start_pos;
if real_size > 0xFFFFFF_u64 {
self.original_overflow_sizes.insert(6, block_size);
}
self.pictures.push(picture);
}
Err(_) if self.parse_options.ignore_errors => {
let _ = reader.seek(SeekFrom::Start(start_pos + block_size as u64));
}
Err(e) => return Err(e),
}
} else {
let mut data = vec![0u8; block_size as usize];
reader.read_exact(&mut data)?;
let max_picture_size = Some(self.parse_options.max_block_size as usize);
match Picture::from_bytes_with_options(&data, max_picture_size) {
Ok(picture) => self.pictures.push(picture),
Err(_) if self.parse_options.ignore_errors => {}
Err(e) => return Err(e),
}
}
Ok(())
}
fn parse_padding_block<R: Read + Seek>(
&mut self,
reader: &mut R,
block_size: u32,
) -> Result<()> {
let padding = Padding::new(block_size as usize);
self.padding_blocks.push(padding);
reader.seek(SeekFrom::Current(block_size as i64))?;
Ok(())
}
fn parse_application_block<R: Read>(&mut self, reader: &mut R, block_size: u32) -> Result<()> {
let mut data = vec![0u8; block_size as usize];
reader.read_exact(&mut data)?;
let application_block = ApplicationBlock::from_bytes(&data)?;
self.application_blocks.push(application_block);
Ok(())
}
fn parse_application_block_safe<R: Read + Seek>(
&mut self,
reader: &mut R,
block_size: u32,
_block_start_pos: u64,
) -> Result<()> {
let safe_size = if self.parse_options.distrust_size {
min(block_size, self.parse_options.max_block_size)
} else {
block_size
};
let mut data = vec![0u8; safe_size as usize];
let bytes_read = reader.read(&mut data)?;
if bytes_read < safe_size as usize {
return Err(AudexError::InvalidData(
"Truncated APPLICATION block".to_string(),
));
}
data.truncate(bytes_read);
let application_block = ApplicationBlock::from_bytes(&data)?;
self.application_blocks.push(application_block);
if block_size > safe_size {
reader.seek(SeekFrom::Current((block_size - safe_size) as i64))?;
}
Ok(())
}
fn parse_padding_block_safe<R: Read + Seek>(
&mut self,
reader: &mut R,
block_size: u32,
_block_start_pos: u64,
) -> Result<()> {
let safe_size = if self.parse_options.distrust_size {
min(block_size, self.parse_options.max_block_size)
} else {
block_size
};
let padding = Padding::new(safe_size as usize);
self.padding_blocks.push(padding);
reader.seek(SeekFrom::Current(safe_size as i64))?;
if block_size > safe_size {
reader.seek(SeekFrom::Current((block_size - safe_size) as i64))?;
}
Ok(())
}
pub fn add_picture(&mut self, picture: Picture) {
self.pictures.push(picture);
self.dirty = true;
}
pub fn clear_pictures(&mut self) {
self.pictures.clear();
self.metadata_blocks.retain(|block| block.block_type != 6);
self.dirty = true;
}
pub fn add_application_block(&mut self, application_block: ApplicationBlock) {
self.application_blocks.push(application_block);
self.dirty = true;
}
pub fn clear_application_blocks(&mut self) {
self.application_blocks.clear();
self.dirty = true;
}
pub fn get_application_blocks_by_id(&self, application_id: [u8; 4]) -> Vec<&ApplicationBlock> {
self.application_blocks
.iter()
.filter(|block| block.application_id == application_id)
.collect()
}
pub fn remove_application_blocks_by_id(&mut self, application_id: [u8; 4]) -> usize {
let initial_len = self.application_blocks.len();
self.application_blocks
.retain(|block| block.application_id != application_id);
let removed_count = initial_len - self.application_blocks.len();
if removed_count > 0 {
self.dirty = true;
}
removed_count
}
pub fn add_padding(&mut self, size: usize) {
if size > 0 {
self.padding_blocks.push(Padding::new(size));
self.dirty = true;
}
}
pub fn clear_padding(&mut self) {
self.padding_blocks.clear();
self.dirty = true;
}
pub fn total_padding_size(&self) -> usize {
self.padding_blocks.iter().map(|p| p.size).sum()
}
pub fn get_overflow_blocks(&self) -> &[(u8, usize)] {
&self.invalid_overflow_size
}
pub fn has_overflow_blocks(&self) -> bool {
!self.invalid_overflow_size.is_empty()
}
pub fn optimize_padding(&mut self) {
let initial_len = self.padding_blocks.len();
self.padding_blocks.retain(|p| p.size > 0);
if self.padding_blocks.len() > 1 {
let total_size: usize = self.padding_blocks.iter().map(|p| p.size).sum();
let small_blocks = self.padding_blocks.iter().filter(|p| p.size < 1024).count();
if small_blocks > 3 {
self.padding_blocks.clear();
if total_size > 0 {
self.padding_blocks.push(Padding::new(total_size));
}
}
}
if initial_len != self.padding_blocks.len() {
self.dirty = true;
}
}
pub fn calculate_optimal_padding(&self, growth_factor: f64) -> usize {
let mut total_metadata_size = 34;
if let Some(ref tags) = self.tags {
if let Ok(data) = tags.to_bytes() {
total_metadata_size += data.len();
}
}
total_metadata_size += self
.pictures
.iter()
.filter_map(|p| p.to_bytes().ok())
.map(|data| data.len())
.sum::<usize>();
if let Some(ref seektable) = self.seektable {
if let Ok(data) = seektable.to_bytes() {
total_metadata_size += data.len();
}
}
if let Some(ref cuesheet) = self.cuesheet {
if let Ok(data) = cuesheet.to_bytes() {
total_metadata_size += data.len();
}
}
let growth_padding = (total_metadata_size as f64 * growth_factor) as usize;
let min_padding = 1024; let max_padding = 64 * 1024;
growth_padding.max(min_padding).min(max_padding)
}
pub fn add_tags(&mut self) -> Result<()> {
if self.tags.is_some() && !self.parse_options.ignore_errors {
return Err(AudexError::FLACVorbis);
}
self.tags = Some(VCommentDict::with_framing(false));
self.dirty = true;
Ok(())
}
pub fn save_to_file<P: AsRef<Path>>(
&mut self,
path: Option<P>,
_delete_id3: bool,
padding_func: Option<PaddingFunction>,
) -> Result<()> {
let file_path = match path {
Some(p) => p.as_ref().to_path_buf(),
None => std::path::PathBuf::from(&self.filename),
};
self.save_to_file_in_place(&file_path, padding_func)
}
fn save_to_file_in_place(
&mut self,
file_path: &Path,
padding_func: Option<PaddingFunction>,
) -> Result<()> {
if !file_path.exists() {
return Err(AudexError::InvalidData("File does not exist".to_string()));
}
let mut file = OpenOptions::new().read(true).write(true).open(file_path)?;
let audio_offset = self.find_audio_offset_from_file(&mut file)?;
let header_size = 4u64; let available = audio_offset.checked_sub(header_size).ok_or_else(|| {
AudexError::InvalidData(
"audio offset is smaller than FLAC header size, file may be corrupt".to_string(),
)
})?;
let file_size = file.seek(SeekFrom::End(0))?;
let content_size = file_size.checked_sub(audio_offset).ok_or_else(|| {
AudexError::InvalidData(
"file size is smaller than audio offset, file may be truncated".to_string(),
)
})? as usize;
let new_metadata = match self.prepare_metadata(padding_func, available, content_size)? {
Some(m) => m,
None => return Ok(()), };
let data_size = new_metadata.len() as u64;
trace_event!(
metadata_bytes = data_size,
available_bytes = available,
audio_offset = audio_offset,
"writing FLAC metadata blocks"
);
resize_bytes(&mut file, available, data_size, header_size)?;
file.seek(SeekFrom::Start(0))?;
file.write_all(b"fLaC")?;
file.write_all(&new_metadata)?;
self.original_metadata = new_metadata;
self.dirty = false;
Ok(())
}
fn save_to_writer_impl(
&mut self,
file: &mut dyn crate::ReadWriteSeek,
padding_func: Option<PaddingFunction>,
) -> Result<()> {
let audio_offset = self.find_audio_offset_from_file(file)?;
let header_size = 4u64; let available = audio_offset.checked_sub(header_size).ok_or_else(|| {
AudexError::InvalidData(
"audio offset is smaller than FLAC header size, file may be corrupt".to_string(),
)
})?;
let file_size = file.seek(SeekFrom::End(0))?;
let content_size = file_size.checked_sub(audio_offset).ok_or_else(|| {
AudexError::InvalidData(
"file size is smaller than audio offset, file may be truncated".to_string(),
)
})? as usize;
let new_metadata = match self.prepare_metadata(padding_func, available, content_size)? {
Some(m) => m,
None => return Ok(()), };
let data_size = new_metadata.len() as u64;
let logical_end = Self::resize_metadata_region(file, available, data_size, header_size)?;
file.seek(SeekFrom::Start(0))?;
file.write_all(b"fLaC")?;
file.write_all(&new_metadata)?;
file.seek(SeekFrom::Start(logical_end))?;
self.original_metadata = new_metadata;
self.dirty = false;
Ok(())
}
fn prepare_metadata(
&mut self,
padding_func: Option<PaddingFunction>,
available: u64,
content_size: usize,
) -> Result<Option<Vec<u8>>> {
if padding_func.is_none()
&& !self.original_metadata.is_empty()
&& self.original_metadata.len() as u64 == available
{
let regenerated =
self.generate_metadata_blocks(None, available as usize, content_size)?;
if regenerated == self.original_metadata {
self.dirty = false;
return Ok(None);
}
Ok(Some(regenerated))
} else {
let regenerated =
self.generate_metadata_blocks(padding_func, available as usize, content_size)?;
Ok(Some(regenerated))
}
}
fn resize_metadata_region(
file: &mut dyn crate::ReadWriteSeek,
old_size: u64,
new_size: u64,
offset: u64,
) -> Result<u64> {
if old_size == new_size {
return file.seek(SeekFrom::End(0)).map_err(Into::into);
}
let file_size = file.seek(SeekFrom::End(0))?;
let buffer_size: usize = 64 * 1024;
if new_size > old_size {
let grow = new_size - old_size;
let src_start = offset + old_size; let bytes_to_move = file_size - src_start;
file.seek(SeekFrom::End(0))?;
let mut remaining = grow;
let zero_buf = vec![0u8; buffer_size];
while remaining > 0 {
let chunk = std::cmp::min(remaining, buffer_size as u64) as usize;
file.write_all(&zero_buf[..chunk])?;
remaining -= chunk as u64;
}
if bytes_to_move > 0 {
let mut pos = bytes_to_move;
let mut buf = vec![0u8; buffer_size];
while pos > 0 {
let chunk = std::cmp::min(pos, buffer_size as u64) as usize;
let read_offset = src_start + pos - chunk as u64;
let write_offset = read_offset + grow;
file.seek(SeekFrom::Start(read_offset))?;
file.read_exact(&mut buf[..chunk])?;
file.seek(SeekFrom::Start(write_offset))?;
file.write_all(&buf[..chunk])?;
pos -= chunk as u64;
}
}
let new_total = file_size + (new_size - old_size);
file.flush()?;
Ok(new_total)
} else {
let src_start = offset + old_size; let dst_start = offset + new_size; let bytes_to_move = file_size - src_start;
let mut moved = 0u64;
let mut buf = vec![0u8; buffer_size];
while moved < bytes_to_move {
let chunk = std::cmp::min(bytes_to_move - moved, buffer_size as u64) as usize;
file.seek(SeekFrom::Start(src_start + moved))?;
file.read_exact(&mut buf[..chunk])?;
file.seek(SeekFrom::Start(dst_start + moved))?;
file.write_all(&buf[..chunk])?;
moved += chunk as u64;
}
let new_total = file_size - (old_size - new_size);
crate::util::truncate_writer_dyn(file, new_total)?;
file.flush()?;
Ok(new_total)
}
}
fn find_audio_offset_from_file<F: Read + Seek + ?Sized>(&self, file: &mut F) -> Result<u64> {
file.seek(SeekFrom::Start(0))?;
let mut signature = [0u8; 4];
file.read_exact(&mut signature)?;
if &signature[..3] == b"ID3" {
let mut id3_size_bytes = [0u8; 6];
file.read_exact(&mut id3_size_bytes)?;
let id3_size = self.decode_id3_size(&id3_size_bytes[2..])?;
file.seek(SeekFrom::Current(id3_size as i64))?;
file.read_exact(&mut signature)?;
}
if &signature != b"fLaC" {
return Err(AudexError::FLACNoHeader);
}
const MAX_METADATA_BLOCKS: usize = 1024;
let mut block_count: usize = 0;
loop {
block_count += 1;
if block_count > MAX_METADATA_BLOCKS {
return Err(AudexError::InvalidData(format!(
"Exceeded maximum metadata block count ({}) in audio offset search",
MAX_METADATA_BLOCKS
)));
}
let mut header = [0u8; 4];
match file.read_exact(&mut header) {
Ok(()) => {}
Err(e) if e.kind() == std::io::ErrorKind::UnexpectedEof => {
break;
}
Err(e) => return Err(e.into()),
}
let is_last = (header[0] & 0x80) != 0;
let block_type = header[0] & 0x7F;
let block_size = u32::from_be_bytes([0, header[1], header[2], header[3]]) as u64;
if block_type == 4 {
Self::skip_vorbis_comment_content(file)?;
} else if block_type == 6 {
Self::skip_picture_content(file)?;
} else {
file.seek(SeekFrom::Current(block_size as i64))?;
}
if is_last {
break;
}
}
Ok(file.stream_position()?)
}
fn skip_vorbis_comment_content<F: Read + Seek + ?Sized>(file: &mut F) -> Result<()> {
let cur = file.stream_position()?;
let file_end = file.seek(SeekFrom::End(0))?;
file.seek(SeekFrom::Start(cur))?;
let mut len_buf = [0u8; 4];
file.read_exact(&mut len_buf)?;
let vendor_len = u32::from_le_bytes(len_buf) as u64;
if file.stream_position()? + vendor_len > file_end {
return Err(AudexError::InvalidData(
"Vorbis vendor length exceeds file size".to_string(),
));
}
file.seek(SeekFrom::Current(vendor_len as i64))?;
file.read_exact(&mut len_buf)?;
let count = u32::from_le_bytes(len_buf);
if count > 100_000 {
return Err(AudexError::InvalidData(format!(
"Vorbis comment count {} too large",
count
)));
}
for _ in 0..count {
file.read_exact(&mut len_buf)?;
let comment_len = u32::from_le_bytes(len_buf) as u64;
if file.stream_position()? + comment_len > file_end {
return Err(AudexError::InvalidData(
"Vorbis comment length exceeds file size".to_string(),
));
}
file.seek(SeekFrom::Current(comment_len as i64))?;
}
Ok(())
}
fn skip_picture_content<F: Read + Seek + ?Sized>(file: &mut F) -> Result<()> {
let cur = file.stream_position()?;
let file_end = file.seek(SeekFrom::End(0))?;
file.seek(SeekFrom::Start(cur))?;
let mut buf4 = [0u8; 4];
file.read_exact(&mut buf4)?;
file.read_exact(&mut buf4)?;
let mime_len = u32::from_be_bytes(buf4) as u64;
if file.stream_position()? + mime_len > file_end {
return Err(AudexError::InvalidData(
"Picture MIME length exceeds file size".to_string(),
));
}
file.seek(SeekFrom::Current(mime_len as i64))?;
file.read_exact(&mut buf4)?;
let desc_len = u32::from_be_bytes(buf4) as u64;
if file.stream_position()? + desc_len > file_end {
return Err(AudexError::InvalidData(
"Picture description length exceeds file size".to_string(),
));
}
file.seek(SeekFrom::Current(desc_len as i64))?;
file.seek(SeekFrom::Current(16))?;
file.read_exact(&mut buf4)?;
let data_len = u32::from_be_bytes(buf4) as u64;
if file.stream_position()? + data_len > file_end {
return Err(AudexError::InvalidData(
"Picture data length exceeds file size".to_string(),
));
}
file.seek(SeekFrom::Current(data_len as i64))?;
Ok(())
}
fn generate_metadata_blocks(
&mut self,
padding_func: Option<PaddingFunction>,
available: usize,
content_size: usize,
) -> Result<Vec<u8>> {
self.invalid_overflow_size.clear();
let mut blocks = Vec::new();
let streaminfo_data = self.info.to_bytes()?;
self.add_block_with_overflow_check(&mut blocks, 0, streaminfo_data)?;
let mut written_blocks = std::collections::HashSet::new();
written_blocks.insert(0u8);
let application_blocks = self.application_blocks.clone();
let pictures = self.pictures.clone();
let metadata_blocks = self.metadata_blocks.clone();
let original_block_order = self.original_block_order.clone();
if !original_block_order.is_empty() {
for &block_type in &original_block_order {
if block_type == 0 || written_blocks.contains(&block_type) {
continue;
}
match block_type {
1 => {
continue;
}
2 => {
if !written_blocks.contains(&2) {
for app_block in &application_blocks {
let app_data = app_block.to_bytes()?;
self.add_block_with_overflow_check(&mut blocks, 2, app_data)?;
}
written_blocks.insert(2);
}
}
3 => {
if let Some(ref seektable) = self.seektable {
let seek_data = seektable.to_bytes()?;
self.add_block_with_overflow_check(&mut blocks, 3, seek_data)?;
written_blocks.insert(3);
}
}
4 => {
if let Some(ref tags) = self.tags {
let mut comment_to_write = tags.clone();
if !comment_to_write.keys().is_empty() {
comment_to_write.set_vendor(format!("Audex {}", VERSION_STRING));
}
let vorbis_data = comment_to_write.to_bytes()?;
self.add_block_with_overflow_check(&mut blocks, 4, vorbis_data)?;
written_blocks.insert(4);
}
}
5 => {
if let Some(ref cuesheet) = self.cuesheet {
let cue_data = cuesheet.to_bytes()?;
self.add_block_with_overflow_check(&mut blocks, 5, cue_data)?;
written_blocks.insert(5);
}
}
6 => {
if !written_blocks.contains(&6) {
for picture in &pictures {
let pic_data = picture.to_bytes()?;
let override_size = self.validate_picture_size(&pic_data)?;
let mut block = MetadataBlock::new(6, pic_data);
block.override_header_size = override_size;
blocks.push(block);
}
written_blocks.insert(6);
}
}
_ => {
for metadata_block in &metadata_blocks {
if metadata_block.block_type == block_type {
let block_data = metadata_block.data.clone();
self.add_block_with_overflow_check(
&mut blocks,
block_type,
block_data,
)?;
}
}
written_blocks.insert(block_type);
}
}
}
}
if !written_blocks.contains(&4) {
if let Some(ref tags) = self.tags {
let mut comment_to_write = tags.clone();
if !comment_to_write.keys().is_empty() {
comment_to_write.set_vendor(format!("Audex {}", VERSION_STRING));
}
let vorbis_data = comment_to_write.to_bytes()?;
self.add_block_with_overflow_check(&mut blocks, 4, vorbis_data)?;
written_blocks.insert(4);
}
}
if !written_blocks.contains(&3) {
if let Some(ref seektable) = self.seektable {
let seek_data = seektable.to_bytes()?;
self.add_block_with_overflow_check(&mut blocks, 3, seek_data)?;
written_blocks.insert(3);
}
}
if !written_blocks.contains(&2) {
for app_block in &application_blocks {
let app_data = app_block.to_bytes()?;
self.add_block_with_overflow_check(&mut blocks, 2, app_data)?;
}
written_blocks.insert(2);
}
if !written_blocks.contains(&5) {
if let Some(ref cuesheet) = self.cuesheet {
let cue_data = cuesheet.to_bytes()?;
self.add_block_with_overflow_check(&mut blocks, 5, cue_data)?;
written_blocks.insert(5);
}
}
if !written_blocks.contains(&6) {
for picture in &pictures {
let pic_data = picture.to_bytes()?;
let override_size = self.validate_picture_size(&pic_data)?;
let mut block = MetadataBlock::new(6, pic_data);
block.override_header_size = override_size;
blocks.push(block);
}
written_blocks.insert(6);
}
for metadata_block in &metadata_blocks {
if matches!(metadata_block.block_type, 0..=6) {
continue;
}
if !written_blocks.contains(&metadata_block.block_type) {
let block_data = metadata_block.data.clone();
self.add_block_with_overflow_check(
&mut blocks,
metadata_block.block_type,
block_data,
)?;
written_blocks.insert(metadata_block.block_type);
}
}
{
let padding_size = self.calculate_padding_size_for_generation(
available,
content_size,
&blocks,
padding_func,
)?;
if padding_size > 0 {
self.add_padding_blocks(&mut blocks, padding_size)?;
}
}
let mut metadata_bytes = Vec::new();
for (i, block) in blocks.iter().enumerate() {
let is_last = i == blocks.len() - 1;
if block.block_type >= 127 {
return Err(AudexError::InvalidData(format!(
"FLAC block type {} is out of valid range (0-126)",
block.block_type
)));
}
let header_byte = block.block_type | if is_last { 0x80 } else { 0x00 };
metadata_bytes.push(header_byte);
let size = block
.override_header_size
.unwrap_or(block.data.len() as u32);
let size_bytes = size.to_be_bytes();
metadata_bytes.extend_from_slice(&size_bytes[1..]);
metadata_bytes.extend_from_slice(&block.data);
}
Ok(metadata_bytes)
}
fn calculate_padding_size_for_generation(
&self,
available: usize,
content_size: usize,
blocks: &[MetadataBlock],
padding_func: Option<PaddingFunction>,
) -> Result<usize> {
let mut blockssize: usize = blocks.iter().map(|b| 4 + b.data.len()).sum();
blockssize += 4;
let padding_space = (available as i64) - (blockssize as i64);
let cont_size = content_size as i64;
let info = PaddingInfo::new(padding_space, cont_size);
let padding_result = info.get_padding_with(padding_func);
let padding_size = std::cmp::min(padding_result, Picture::MAX_SIZE as i64);
Ok(padding_size.max(0) as usize)
}
fn validate_picture_size(&mut self, pic_data: &[u8]) -> Result<Option<u32>> {
const MAX_BLOCK_SIZE: usize = 0xFFFFFF;
if pic_data.len() > MAX_BLOCK_SIZE {
if let Some(&original_size) = self.original_overflow_sizes.get(&6u8) {
self.invalid_overflow_size.push((6, pic_data.len()));
return Ok(Some(original_size));
}
self.invalid_overflow_size.push((6, pic_data.len()));
return Err(AudexError::InvalidData(format!(
"Picture block too large: {} bytes (max: {} bytes)",
pic_data.len(),
MAX_BLOCK_SIZE
)));
}
Ok(None)
}
fn add_block_with_overflow_check(
&mut self,
blocks: &mut Vec<MetadataBlock>,
block_type: u8,
data: Vec<u8>,
) -> Result<()> {
const MAX_BLOCK_SIZE: usize = 0xFFFFFF;
if data.len() > MAX_BLOCK_SIZE {
if let Some(&original_size) = self.original_overflow_sizes.get(&block_type) {
self.invalid_overflow_size.push((block_type, data.len()));
let mut block = MetadataBlock::new(block_type, data);
block.override_header_size = Some(original_size);
blocks.push(block);
return Ok(());
}
self.invalid_overflow_size.push((block_type, data.len()));
return Err(AudexError::InvalidData(format!(
"Block type {} too large: {} bytes (max: {} bytes)",
block_type,
data.len(),
MAX_BLOCK_SIZE
)));
}
blocks.push(MetadataBlock::new(block_type, data));
Ok(())
}
fn add_padding_blocks(
&mut self,
blocks: &mut Vec<MetadataBlock>,
total_padding: usize,
) -> Result<()> {
const MAX_PADDING_SIZE: usize = 0xFFFFFF;
if total_padding == 0 {
return Ok(());
}
if total_padding <= MAX_PADDING_SIZE {
let padding_data = vec![0u8; total_padding];
blocks.push(MetadataBlock::new(1, padding_data));
} else {
let mut remaining = total_padding;
while remaining > 0 {
let chunk_size = remaining.min(MAX_PADDING_SIZE);
let padding_data = vec![0u8; chunk_size];
blocks.push(MetadataBlock::new(1, padding_data));
remaining -= chunk_size;
}
}
Ok(())
}
#[cfg(feature = "async")]
pub async fn from_file_async<P: AsRef<Path>>(path: P) -> Result<Self> {
Self::from_file_with_options_async(path, FLACParseOptions::default()).await
}
#[cfg(feature = "async")]
pub async fn from_file_with_options_async<P: AsRef<Path>>(
path: P,
options: FLACParseOptions,
) -> Result<Self> {
let path = path.as_ref();
let file = TokioFile::open(path).await?;
let mut flac = Self::with_options(options);
flac.filename = path.to_string_lossy().to_string();
let mut reader = TokioBufReader::new(file);
flac.parse_flac_streaming_async(&mut reader).await?;
Ok(flac)
}
#[cfg(feature = "async")]
pub async fn load_async<P: AsRef<Path>>(path: P) -> Result<Self> {
Self::from_file_async(path).await
}
#[cfg(feature = "async")]
async fn parse_flac_streaming_async<R>(&mut self, reader: &mut TokioBufReader<R>) -> Result<()>
where
R: tokio::io::AsyncRead + tokio::io::AsyncSeek + Unpin,
{
use tokio::io::{AsyncReadExt, AsyncSeekExt};
let mut signature = [0u8; 4];
reader.read_exact(&mut signature).await?;
if &signature[..3] == b"ID3" {
let mut id3_size_bytes = [0u8; 6];
reader.read_exact(&mut id3_size_bytes).await?;
let id3_size = self.decode_id3_size(&id3_size_bytes[2..])?;
reader.seek(SeekFrom::Current(id3_size as i64)).await?;
reader.read_exact(&mut signature).await?;
}
if &signature != b"fLaC" {
let error = FLACError {
kind: FLACErrorKind::InvalidHeader,
position: reader.stream_position().await.ok(),
context: "Missing or invalid FLAC signature".to_string(),
};
self.parse_errors.push(error);
return Err(AudexError::FLACNoHeader);
}
let metadata_start = reader.stream_position().await?;
self.parse_metadata_blocks_streaming_async(reader).await?;
let metadata_end = reader.stream_position().await?;
let file_end = reader.seek(SeekFrom::End(0)).await?;
let capped_end = metadata_end.min(file_end);
let metadata_size_u64 = capped_end.checked_sub(metadata_start).ok_or_else(|| {
AudexError::InvalidData("metadata region extends beyond file boundaries".to_string())
})?;
let metadata_size = usize::try_from(metadata_size_u64).map_err(|_| {
AudexError::InvalidData("metadata region too large for this platform".to_string())
})?;
reader.seek(SeekFrom::Start(metadata_start)).await?;
self.original_metadata = vec![0u8; metadata_size];
match reader.read_exact(&mut self.original_metadata).await {
Ok(_) => {}
Err(e) if e.kind() == std::io::ErrorKind::UnexpectedEof => {
self.original_metadata.clear();
}
Err(e) => return Err(e.into()),
}
if self.info.total_samples > 0 {
if let Ok(end_pos) = reader.seek(SeekFrom::End(0)).await {
if let Some(duration) = self.info.length {
if end_pos >= metadata_end {
let audio_size = end_pos - metadata_end;
let duration_secs = duration.as_secs_f64();
if duration_secs > 0.0 {
let bitrate = (audio_size * 8) as f64 / duration_secs;
self.info.bitrate = Some(bitrate as u32);
}
}
}
}
}
Ok(())
}
#[cfg(feature = "async")]
async fn parse_metadata_blocks_streaming_async<R>(
&mut self,
reader: &mut TokioBufReader<R>,
) -> Result<()>
where
R: tokio::io::AsyncRead + tokio::io::AsyncSeek + Unpin,
{
use tokio::io::{AsyncReadExt, AsyncSeekExt};
let mut is_last = false;
let mut vorbis_comment_count = 0;
const MAX_METADATA_BLOCKS: usize = 1024;
let mut block_count: usize = 0;
while !is_last {
block_count += 1;
if block_count > MAX_METADATA_BLOCKS {
return Err(AudexError::InvalidData(format!(
"Exceeded maximum metadata block count ({})",
MAX_METADATA_BLOCKS
)));
}
let block_start_pos = reader.stream_position().await?;
let mut header = [0u8; 4];
if let Err(e) = reader.read_exact(&mut header).await {
if self.parse_options.ignore_errors {
self.parse_errors.push(FLACError {
kind: FLACErrorKind::CorruptedBlock,
position: Some(block_start_pos),
context: format!("Failed to read block header: {}", e),
});
break;
}
return Err(e.into());
}
let block_type = header[0] & 0x7F;
is_last = (header[0] & 0x80) != 0;
let block_size = u32::from_be_bytes([0, header[1], header[2], header[3]]);
self.original_block_order.push(block_type);
let block_data_start = reader.stream_position().await?;
if block_size > self.parse_options.max_block_size {
let error = FLACError {
kind: FLACErrorKind::BlockSizeError,
position: Some(block_start_pos),
context: format!(
"Block size {} exceeds maximum {}",
block_size, self.parse_options.max_block_size
),
};
self.parse_errors.push(error);
if block_type == 4 || !self.parse_options.ignore_errors {
return Err(AudexError::InvalidData(format!(
"Block size {} exceeds maximum {}",
block_size, self.parse_options.max_block_size
)));
}
let safe_skip_size = min(block_size, self.parse_options.max_block_size);
reader
.seek(SeekFrom::Current(safe_skip_size as i64))
.await?;
continue;
}
let parse_result = match block_type {
0 => self.parse_streaminfo_block_async(reader, block_size).await,
1 => self.parse_padding_block_async(reader, block_size).await,
2 => self.parse_application_block_async(reader, block_size).await,
3 => {
if self.seektable.is_some() {
self.parse_errors.push(FLACError {
kind: FLACErrorKind::MultipleSeekTableBlocks,
position: Some(block_start_pos),
context: "> 1 SeekTable block found".to_string(),
});
if !self.parse_options.ignore_errors {
return Err(AudexError::InvalidData(
"> 1 SeekTable block found".to_string(),
));
}
reader.seek(SeekFrom::Current(block_size as i64)).await?;
Ok(())
} else {
self.parse_seektable_block_async(reader, block_size).await
}
}
4 => {
vorbis_comment_count += 1;
if vorbis_comment_count > 1 {
self.parse_errors.push(FLACError {
kind: FLACErrorKind::MultipleVorbisBlocks,
position: Some(block_start_pos),
context: format!(
"Found {} VORBIS_COMMENT blocks, using first",
vorbis_comment_count
),
});
if !self.parse_options.ignore_errors {
return Err(AudexError::FLACVorbis);
}
reader.seek(SeekFrom::Current(block_size as i64)).await?;
Ok(())
} else {
self.parse_vorbis_comment_block_async(reader, block_size)
.await
}
}
5 => {
if self.cuesheet.is_some() {
self.parse_errors.push(FLACError {
kind: FLACErrorKind::MultipleCueSheetBlocks,
position: Some(block_start_pos),
context: "> 1 CueSheet block found".to_string(),
});
if !self.parse_options.ignore_errors {
return Err(AudexError::InvalidData(
"> 1 CueSheet block found".to_string(),
));
}
reader.seek(SeekFrom::Current(block_size as i64)).await?;
Ok(())
} else {
self.parse_cuesheet_block_async(reader, block_size).await
}
}
6 => self.parse_picture_block_async(reader, block_size).await,
_ => {
reader.seek(SeekFrom::Current(block_size as i64)).await?;
Ok(())
}
};
if let Err(e) = parse_result {
let error = FLACError {
kind: FLACErrorKind::CorruptedBlock,
position: Some(block_start_pos),
context: format!("Block type {} parse error: {}", block_type, e),
};
self.parse_errors.push(error);
let is_fatal_error = if self.parse_options.ignore_errors {
block_type == 4 && self.is_fatal_vorbis_error(&e)
} else {
block_type == 0 || (block_type == 4 && self.is_fatal_vorbis_error(&e))
};
if is_fatal_error {
return Err(e);
}
let current_pos = reader.stream_position().await?;
let expected_pos = block_start_pos + 4 + block_size as u64;
if current_pos != expected_pos {
reader.seek(SeekFrom::Start(expected_pos)).await?;
}
} else {
let current_pos = reader.stream_position().await?;
let bytes_to_read = current_pos.checked_sub(block_data_start).ok_or_else(|| {
AudexError::InvalidData(format!(
"FLAC block position underflow: current {} < start {}",
current_pos, block_data_start
))
})? as usize;
if (block_type == 4 || block_type == 6) && bytes_to_read > 0xFFFFFF {
self.original_overflow_sizes.insert(block_type, block_size);
}
reader.seek(SeekFrom::Start(block_data_start)).await?;
let mut block_data = vec![0u8; bytes_to_read];
reader.read_exact(&mut block_data).await?;
self.metadata_blocks
.push(MetadataBlock::new(block_type, block_data));
}
}
Ok(())
}
#[cfg(feature = "async")]
async fn parse_streaminfo_block_async<R>(
&mut self,
reader: &mut TokioBufReader<R>,
block_size: u32,
) -> Result<()>
where
R: tokio::io::AsyncRead + tokio::io::AsyncSeek + Unpin,
{
use tokio::io::AsyncReadExt;
if block_size != 34 {
return Err(AudexError::InvalidData(format!(
"Invalid STREAMINFO size: {} (expected 34)",
block_size
)));
}
let mut data = [0u8; 34];
reader.read_exact(&mut data).await?;
self.info = FLACStreamInfo::from_bytes(&data)?;
Ok(())
}
#[cfg(feature = "async")]
async fn parse_padding_block_async<R>(
&mut self,
reader: &mut TokioBufReader<R>,
block_size: u32,
) -> Result<()>
where
R: tokio::io::AsyncRead + tokio::io::AsyncSeek + Unpin,
{
use tokio::io::AsyncSeekExt;
let padding = Padding::new(block_size as usize);
self.padding_blocks.push(padding);
reader.seek(SeekFrom::Current(block_size as i64)).await?;
Ok(())
}
#[cfg(feature = "async")]
async fn parse_application_block_async<R>(
&mut self,
reader: &mut TokioBufReader<R>,
block_size: u32,
) -> Result<()>
where
R: tokio::io::AsyncRead + tokio::io::AsyncSeek + Unpin,
{
use tokio::io::{AsyncReadExt, AsyncSeekExt};
let safe_size = if self.parse_options.distrust_size {
min(block_size, self.parse_options.max_block_size)
} else {
block_size
};
let mut data = vec![0u8; safe_size as usize];
reader.read_exact(&mut data).await?;
let application_block = ApplicationBlock::from_bytes(&data)?;
self.application_blocks.push(application_block);
if block_size > safe_size {
reader
.seek(SeekFrom::Current((block_size - safe_size) as i64))
.await?;
}
Ok(())
}
#[cfg(feature = "async")]
async fn parse_seektable_block_async<R>(
&mut self,
reader: &mut TokioBufReader<R>,
block_size: u32,
) -> Result<()>
where
R: tokio::io::AsyncRead + tokio::io::AsyncSeek + Unpin,
{
use tokio::io::{AsyncReadExt, AsyncSeekExt};
let safe_size = if self.parse_options.distrust_size {
min(block_size, self.parse_options.max_block_size)
} else {
block_size
};
let mut data = vec![0u8; safe_size as usize];
let bytes_read = reader.read(&mut data).await?;
if bytes_read < safe_size as usize {
return Err(AudexError::InvalidData(
"Truncated SEEKTABLE block".to_string(),
));
}
data.truncate(bytes_read);
let max_seekpoints = Some(10000);
let seektable = SeekTable::from_bytes_with_options(&data, max_seekpoints)?;
self.seektable = Some(seektable);
if block_size > safe_size {
reader
.seek(SeekFrom::Current((block_size - safe_size) as i64))
.await?;
}
Ok(())
}
#[cfg(feature = "async")]
async fn parse_vorbis_comment_block_async<R>(
&mut self,
reader: &mut TokioBufReader<R>,
block_size: u32,
) -> Result<()>
where
R: tokio::io::AsyncRead + tokio::io::AsyncSeek + Unpin,
{
use tokio::io::{AsyncReadExt, AsyncSeekExt};
let safe_size = if self.parse_options.distrust_size {
min(block_size, self.parse_options.max_block_size)
} else {
block_size
};
let mut data = vec![0u8; safe_size as usize];
reader.read_exact(&mut data).await?;
let comment = VCommentDict::from_bytes_with_options(
&data,
self.parse_options.vorbis_error_mode,
false,
)?;
self.tags = Some(comment);
if block_size > safe_size {
reader
.seek(SeekFrom::Current((block_size - safe_size) as i64))
.await?;
}
Ok(())
}
#[cfg(feature = "async")]
async fn parse_cuesheet_block_async<R>(
&mut self,
reader: &mut TokioBufReader<R>,
block_size: u32,
) -> Result<()>
where
R: tokio::io::AsyncRead + tokio::io::AsyncSeek + Unpin,
{
use tokio::io::{AsyncReadExt, AsyncSeekExt};
let safe_size = if self.parse_options.distrust_size {
min(block_size, self.parse_options.max_block_size)
} else {
block_size
};
let mut data = vec![0u8; safe_size as usize];
let bytes_read = reader.read(&mut data).await?;
if bytes_read < safe_size as usize {
return Err(AudexError::InvalidData(
"Truncated CUESHEET block".to_string(),
));
}
data.truncate(bytes_read);
let cuesheet = CueSheet::from_bytes(&data)?;
self.cuesheet = Some(cuesheet);
if block_size > safe_size {
reader
.seek(SeekFrom::Current((block_size - safe_size) as i64))
.await?;
}
Ok(())
}
#[cfg(feature = "async")]
async fn parse_picture_block_async<R>(
&mut self,
reader: &mut TokioBufReader<R>,
block_size: u32,
) -> Result<()>
where
R: tokio::io::AsyncRead + tokio::io::AsyncSeek + Unpin,
{
use tokio::io::{AsyncReadExt, AsyncSeekExt};
let safe_size = if self.parse_options.distrust_size {
min(block_size, self.parse_options.max_block_size)
} else {
block_size
};
let mut data = vec![0u8; safe_size as usize];
let bytes_read = reader.read(&mut data).await?;
if bytes_read < safe_size as usize {
if !self.parse_options.ignore_errors {
return Err(AudexError::InvalidData(
"Truncated PICTURE block".to_string(),
));
}
if bytes_read == 0 {
return Ok(());
}
}
data.truncate(bytes_read);
let max_picture_size = Some(self.parse_options.max_block_size as usize);
match Picture::from_bytes_with_options(&data, max_picture_size) {
Ok(picture) => self.pictures.push(picture),
Err(_) if self.parse_options.ignore_errors => {
}
Err(e) => return Err(e),
}
if block_size > safe_size && bytes_read == safe_size as usize {
reader
.seek(SeekFrom::Current((block_size - safe_size) as i64))
.await?;
}
Ok(())
}
#[cfg(feature = "async")]
pub async fn save_to_file_async<P: AsRef<Path>>(
&mut self,
path: Option<P>,
_delete_id3: bool,
padding_func: Option<PaddingFunction>,
) -> Result<()> {
use tokio::io::{AsyncSeekExt, AsyncWriteExt};
let file_path = match path {
Some(p) => p.as_ref().to_path_buf(),
None => std::path::PathBuf::from(&self.filename),
};
if !tokio::fs::try_exists(&file_path).await.unwrap_or(false) {
return Err(AudexError::InvalidData("File does not exist".to_string()));
}
let mut file = TokioOpenOptions::new()
.read(true)
.write(true)
.open(&file_path)
.await?;
let audio_offset = self.find_audio_offset_async(&mut file).await?;
let header_size = 4u64; let available = audio_offset.checked_sub(header_size).ok_or_else(|| {
AudexError::InvalidData(
"audio offset is smaller than FLAC header size, file may be corrupt".to_string(),
)
})?;
let file_size = file.seek(SeekFrom::End(0)).await?;
let content_size = file_size.checked_sub(audio_offset).ok_or_else(|| {
AudexError::InvalidData(
"file size is smaller than audio offset, file may be truncated".to_string(),
)
})? as usize;
let (new_metadata, data_size) = if padding_func.is_none()
&& !self.original_metadata.is_empty()
&& self.original_metadata.len() as u64 == available
{
let regenerated =
self.generate_metadata_blocks(None, available as usize, content_size)?;
if regenerated == self.original_metadata {
self.dirty = false;
return Ok(());
}
let size = regenerated.len() as u64;
(regenerated, size)
} else {
let regenerated =
self.generate_metadata_blocks(padding_func, available as usize, content_size)?;
let size = regenerated.len() as u64;
(regenerated, size)
};
resize_bytes_async(&mut file, available, data_size, header_size).await?;
file.seek(SeekFrom::Start(0)).await?;
file.write_all(b"fLaC").await?;
file.write_all(&new_metadata).await?;
file.flush().await?;
self.original_metadata = new_metadata;
self.dirty = false;
Ok(())
}
#[cfg(feature = "async")]
pub async fn save_async(&mut self) -> Result<()> {
self.save_to_file_async::<&str>(None, false, None).await
}
#[cfg(feature = "async")]
pub async fn clear_async(&mut self) -> Result<()> {
if self.tags.is_some() {
self.tags = None;
self.dirty = true;
self.save_async().await?;
}
Ok(())
}
#[cfg(feature = "async")]
pub async fn delete_async(&mut self) -> Result<()> {
if !self.filename.is_empty() {
tokio::fs::remove_file(&self.filename).await?;
}
Ok(())
}
#[cfg(feature = "async")]
async fn find_audio_offset_async(&self, file: &mut TokioFile) -> Result<u64> {
use tokio::io::{AsyncReadExt, AsyncSeekExt};
file.seek(SeekFrom::Start(0)).await?;
let mut signature = [0u8; 4];
file.read_exact(&mut signature).await?;
if &signature[..3] == b"ID3" {
let mut id3_size_bytes = [0u8; 6];
file.read_exact(&mut id3_size_bytes).await?;
let id3_size = self.decode_id3_size(&id3_size_bytes[2..])?;
file.seek(SeekFrom::Current(id3_size as i64)).await?;
file.read_exact(&mut signature).await?;
}
if &signature != b"fLaC" {
return Err(AudexError::FLACNoHeader);
}
const MAX_METADATA_BLOCKS: usize = 1024;
let mut block_count = 0usize;
loop {
block_count += 1;
if block_count > MAX_METADATA_BLOCKS {
return Err(AudexError::InvalidData(format!(
"Exceeded maximum metadata block count ({}) in audio offset search",
MAX_METADATA_BLOCKS
)));
}
let mut header = [0u8; 4];
file.read_exact(&mut header).await?;
let is_last = (header[0] & 0x80) != 0;
let block_type = header[0] & 0x7F;
let block_size = u32::from_be_bytes([0, header[1], header[2], header[3]]) as u64;
if block_type == 4 {
Self::skip_vorbis_comment_content_async(file).await?;
} else if block_type == 6 {
Self::skip_picture_content_async(file).await?;
} else {
file.seek(SeekFrom::Current(block_size as i64)).await?;
}
if is_last {
break;
}
}
Ok(file.stream_position().await?)
}
#[cfg(feature = "async")]
async fn skip_vorbis_comment_content_async(file: &mut TokioFile) -> Result<()> {
use tokio::io::{AsyncReadExt, AsyncSeekExt};
let cur = file.stream_position().await?;
let file_end = file.seek(SeekFrom::End(0)).await?;
file.seek(SeekFrom::Start(cur)).await?;
let mut len_buf = [0u8; 4];
file.read_exact(&mut len_buf).await?;
let vendor_len = u32::from_le_bytes(len_buf) as u64;
if file.stream_position().await? + vendor_len > file_end {
return Err(AudexError::InvalidData(
"Vorbis vendor length exceeds file size".to_string(),
));
}
file.seek(SeekFrom::Current(vendor_len as i64)).await?;
file.read_exact(&mut len_buf).await?;
let count = u32::from_le_bytes(len_buf);
if count > 100_000 {
return Err(AudexError::InvalidData(format!(
"Vorbis comment count {} too large",
count
)));
}
for _ in 0..count {
file.read_exact(&mut len_buf).await?;
let comment_len = u32::from_le_bytes(len_buf) as u64;
if file.stream_position().await? + comment_len > file_end {
return Err(AudexError::InvalidData(
"Vorbis comment length exceeds file size".to_string(),
));
}
file.seek(SeekFrom::Current(comment_len as i64)).await?;
}
Ok(())
}
#[cfg(feature = "async")]
async fn skip_picture_content_async(file: &mut TokioFile) -> Result<()> {
use tokio::io::{AsyncReadExt, AsyncSeekExt};
let cur = file.stream_position().await?;
let file_end = file.seek(SeekFrom::End(0)).await?;
file.seek(SeekFrom::Start(cur)).await?;
let mut buf4 = [0u8; 4];
file.read_exact(&mut buf4).await?;
file.read_exact(&mut buf4).await?;
let mime_len = u32::from_be_bytes(buf4) as u64;
if file.stream_position().await? + mime_len > file_end {
return Err(AudexError::InvalidData(
"Picture MIME length exceeds file size".to_string(),
));
}
file.seek(SeekFrom::Current(mime_len as i64)).await?;
file.read_exact(&mut buf4).await?;
let desc_len = u32::from_be_bytes(buf4) as u64;
if file.stream_position().await? + desc_len > file_end {
return Err(AudexError::InvalidData(
"Picture description length exceeds file size".to_string(),
));
}
file.seek(SeekFrom::Current(desc_len as i64)).await?;
if file.stream_position().await? + 16 > file_end {
return Err(AudexError::InvalidData(
"Picture dimensions exceed file size".to_string(),
));
}
file.seek(SeekFrom::Current(16)).await?;
file.read_exact(&mut buf4).await?;
let data_len = u32::from_be_bytes(buf4) as u64;
if file.stream_position().await? + data_len > file_end {
return Err(AudexError::InvalidData(
"Picture data length exceeds file size".to_string(),
));
}
file.seek(SeekFrom::Current(data_len as i64)).await?;
Ok(())
}
}
impl Default for FLAC {
fn default() -> Self {
Self::new()
}
}
impl Default for Picture {
fn default() -> Self {
Self::new()
}
}
impl Default for SeekTable {
fn default() -> Self {
Self::new()
}
}
impl Default for CueSheet {
fn default() -> Self {
Self::new()
}
}
impl FileType for FLAC {
type Tags = VCommentDict;
type Info = FLACStreamInfo;
fn format_id() -> &'static str {
"FLAC"
}
fn load<P: AsRef<Path>>(path: P) -> Result<Self> {
Self::from_file(path)
}
fn load_from_reader(reader: &mut dyn crate::ReadSeek) -> Result<Self> {
debug_event!("parsing FLAC file from reader");
let mut flac = Self::new();
let mut reader = reader;
flac.parse_flac_streaming(&mut reader)?;
Ok(flac)
}
fn save(&mut self) -> Result<()> {
debug_event!("saving FLAC metadata");
self.save_to_file::<&str>(None, false, None)
}
fn save_to_writer(&mut self, writer: &mut dyn crate::ReadWriteSeek) -> Result<()> {
self.save_to_writer_impl(writer, None)
}
fn clear(&mut self) -> Result<()> {
if self.tags.is_some() {
self.tags = None;
self.dirty = true;
self.save()?;
}
Ok(())
}
fn clear_writer(&mut self, writer: &mut dyn crate::ReadWriteSeek) -> Result<()> {
if self.tags.is_some() {
self.tags = None;
self.dirty = true;
}
self.save_to_writer_impl(writer, None)
}
fn save_to_path(&mut self, path: &Path) -> Result<()> {
self.save_to_file(Some(path), false, None)
}
fn add_tags(&mut self) -> Result<()> {
if self.tags.is_some() && !self.parse_options.ignore_errors {
return Err(AudexError::FLACVorbis);
}
self.tags = Some(VCommentDict::with_framing(false));
self.dirty = true;
Ok(())
}
fn tags(&self) -> Option<&Self::Tags> {
self.tags.as_ref()
}
fn tags_mut(&mut self) -> Option<&mut Self::Tags> {
self.tags.as_mut()
}
fn info(&self) -> &Self::Info {
&self.info
}
fn score(filename: &str, header: &[u8]) -> i32 {
let mut score = 0;
if header.len() >= 4 {
if &header[0..4] == b"fLaC" {
score += 10; } else if header.len() >= 10 && &header[0..3] == b"ID3" {
for i in 10..header.len().saturating_sub(4) {
if &header[i..i + 4] == b"fLaC" {
score += 10;
break;
}
}
}
}
if filename.to_lowercase().ends_with(".flac") {
score += 3;
}
score
}
fn mime_types() -> &'static [&'static str] {
&["audio/flac", "audio/x-flac", "application/x-flac"]
}
}
#[derive(Debug, Clone, Default)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct FLACStreamInfo {
#[cfg_attr(
feature = "serde",
serde(with = "crate::serde_helpers::duration_as_secs_f64")
)]
pub length: Option<Duration>,
pub bitrate: Option<u32>,
pub sample_rate: u32,
pub channels: u16,
pub bits_per_sample: u16,
pub total_samples: u64,
pub md5_signature: [u8; 16],
pub min_blocksize: u16,
pub max_blocksize: u16,
pub min_framesize: u32,
pub max_framesize: u32,
}
impl StreamInfo for FLACStreamInfo {
fn length(&self) -> Option<Duration> {
self.length
}
fn bitrate(&self) -> Option<u32> {
self.bitrate
}
fn sample_rate(&self) -> Option<u32> {
Some(self.sample_rate)
}
fn channels(&self) -> Option<u16> {
Some(self.channels)
}
fn bits_per_sample(&self) -> Option<u16> {
Some(self.bits_per_sample)
}
}
impl FLACStreamInfo {
pub fn from_bytes(data: &[u8]) -> Result<Self> {
if data.len() != 34 {
return Err(AudexError::InvalidData(format!(
"STREAMINFO must be exactly 34 bytes, got {}",
data.len()
)));
}
let mut cursor = Cursor::new(data);
let min_blocksize = cursor.read_u16::<BigEndian>()?;
let max_blocksize = cursor.read_u16::<BigEndian>()?;
let min_framesize_bytes = [cursor.read_u8()?, cursor.read_u8()?, cursor.read_u8()?];
let min_framesize = u32::from_be_bytes([
0,
min_framesize_bytes[0],
min_framesize_bytes[1],
min_framesize_bytes[2],
]);
let max_framesize_bytes = [cursor.read_u8()?, cursor.read_u8()?, cursor.read_u8()?];
let max_framesize = u32::from_be_bytes([
0,
max_framesize_bytes[0],
max_framesize_bytes[1],
max_framesize_bytes[2],
]);
let combined = cursor.read_u64::<BigEndian>()?;
let sample_rate = ((combined >> 44) & 0xFFFFF) as u32; let channels = (((combined >> 41) & 0x07) as u16) + 1; let bits_per_sample = (((combined >> 36) & 0x1F) as u16) + 1; let total_samples = combined & 0xFFFFFFFFF;
let mut md5_signature = [0u8; 16];
cursor.read_exact(&mut md5_signature)?;
if sample_rate == 0 {
return Err(AudexError::InvalidData(
"A sample rate value of 0 is invalid".to_string(),
));
}
let (length, bitrate) = if sample_rate > 0 && total_samples > 0 {
let duration_secs = total_samples as f64 / sample_rate as f64;
let len = Some(Duration::from_secs_f64(duration_secs));
let bits_per_second = sample_rate as u64 * channels as u64 * bits_per_sample as u64;
let br = Some(u32::try_from(bits_per_second).unwrap_or(u32::MAX));
(len, br)
} else {
(Some(Duration::from_secs(0)), Some(0))
};
Ok(Self {
length,
bitrate,
sample_rate,
channels,
bits_per_sample,
total_samples,
md5_signature,
min_blocksize,
max_blocksize,
min_framesize,
max_framesize,
})
}
pub fn to_bytes(&self) -> Result<Vec<u8>> {
let mut buffer = Vec::with_capacity(34);
let mut writer = Cursor::new(&mut buffer);
writer.write_u16::<BigEndian>(self.min_blocksize)?;
writer.write_u16::<BigEndian>(self.max_blocksize)?;
let min_frame_bytes = self.min_framesize.to_be_bytes();
writer.write_all(&min_frame_bytes[1..])?;
let max_frame_bytes = self.max_framesize.to_be_bytes();
writer.write_all(&max_frame_bytes[1..])?;
let combined = ((self.sample_rate as u64) << 44)
| ((self.channels.saturating_sub(1) as u64) << 41)
| ((self.bits_per_sample.saturating_sub(1) as u64) << 36)
| (self.total_samples & 0xFFFFFFFFF);
writer.write_u64::<BigEndian>(combined)?;
writer.write_all(&self.md5_signature)?;
Ok(buffer)
}
pub fn write(&self) -> Result<Vec<u8>> {
self.to_bytes()
}
}
pub struct StrictReader<R: Read + Seek> {
reader: R,
position: u64,
total_size: Option<u64>,
}
impl<R: Read + Seek> StrictReader<R> {
pub fn new(mut reader: R) -> std::io::Result<Self> {
let total_size = match reader.seek(SeekFrom::End(0)) {
Ok(size) => {
reader.seek(SeekFrom::Start(0))?;
Some(size)
}
Err(_) => None,
};
Ok(Self {
reader,
position: 0,
total_size,
})
}
pub fn read_exact(&mut self, buf: &mut [u8]) -> std::io::Result<()> {
let bytes_to_read = buf.len();
if let Some(total_size) = self.total_size {
if self.position + bytes_to_read as u64 > total_size {
return Err(std::io::Error::new(
ErrorKind::UnexpectedEof,
format!(
"Attempted to read {} bytes at position {}, but only {} bytes available",
bytes_to_read,
self.position,
total_size - self.position
),
));
}
}
match self.reader.read_exact(buf) {
Ok(()) => {
self.position += bytes_to_read as u64;
Ok(())
}
Err(e) => {
if e.kind() == ErrorKind::UnexpectedEof {
Err(std::io::Error::new(
ErrorKind::UnexpectedEof,
format!(
"Unexpected EOF at position {} while reading {} bytes: {}",
self.position, bytes_to_read, e
),
))
} else {
Err(e)
}
}
}
}
pub fn seek(&mut self, pos: SeekFrom) -> std::io::Result<u64> {
match self.reader.seek(pos) {
Ok(new_pos) => {
self.position = new_pos;
if let Some(total_size) = self.total_size {
if new_pos > total_size {
return Err(std::io::Error::new(
ErrorKind::InvalidInput,
format!("Seek position {} exceeds file size {}", new_pos, total_size),
));
}
}
Ok(new_pos)
}
Err(e) => Err(e),
}
}
pub fn stream_position(&self) -> std::io::Result<u64> {
Ok(self.position)
}
pub fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
match self.reader.read(buf) {
Ok(bytes_read) => {
self.position += bytes_read as u64;
Ok(bytes_read)
}
Err(e) => Err(e),
}
}
pub fn has_data_remaining(&self) -> bool {
if let Some(total_size) = self.total_size {
self.position < total_size
} else {
true }
}
}
impl<R: Read + Seek> Read for StrictReader<R> {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
self.read(buf)
}
}
impl<R: Read + Seek> Seek for StrictReader<R> {
fn seek(&mut self, pos: SeekFrom) -> std::io::Result<u64> {
self.seek(pos)
}
}
#[derive(Debug, Clone)]
pub struct ApplicationBlock {
pub application_id: [u8; 4],
pub data: Vec<u8>,
}
impl ApplicationBlock {
pub fn new(application_id: [u8; 4], data: Vec<u8>) -> Self {
Self {
application_id,
data,
}
}
pub fn from_bytes(data: &[u8]) -> Result<Self> {
if data.len() < 4 {
return Err(AudexError::InvalidData(
"Application block too short".to_string(),
));
}
let mut application_id = [0u8; 4];
application_id.copy_from_slice(&data[0..4]);
let app_data = data[4..].to_vec();
Ok(Self {
application_id,
data: app_data,
})
}
pub fn to_bytes(&self) -> Result<Vec<u8>> {
let mut buffer = Vec::with_capacity(4 + self.data.len());
buffer.extend_from_slice(&self.application_id);
buffer.extend_from_slice(&self.data);
Ok(buffer)
}
pub fn application_id_str(&self) -> Option<String> {
std::str::from_utf8(&self.application_id)
.ok()
.map(|s| s.to_string())
}
pub fn total_size(&self) -> usize {
4 + self.data.len()
}
}
#[derive(Debug, Clone)]
pub struct MetadataBlock {
pub block_type: u8,
pub data: Vec<u8>,
pub override_header_size: Option<u32>,
}
impl MetadataBlock {
pub fn new(block_type: u8, data: Vec<u8>) -> Self {
Self {
block_type,
data,
override_header_size: None,
}
}
pub fn write_to<W: Write>(&self, writer: &mut W, is_last: bool) -> Result<()> {
if self.block_type >= 127 {
return Err(AudexError::InvalidData(format!(
"FLAC block type {} is out of valid range (0-126)",
self.block_type
)));
}
let header_byte = self.block_type | if is_last { 0x80 } else { 0x00 };
writer.write_u8(header_byte)?;
let size: u32 = if let Some(overridden) = self.override_header_size {
overridden
} else {
let sz = self.data.len() as u64;
if sz > 0xFFFFFF {
return Err(AudexError::InvalidData(format!(
"Block too large: {} bytes (max: {} bytes)",
sz, 0xFFFFFF
)));
}
sz as u32
};
let size_bytes = size.to_be_bytes();
writer.write_all(&size_bytes[1..])?;
if self.data.len() > 1024 * 1024 {
const CHUNK_SIZE: usize = 64 * 1024; for chunk in self.data.chunks(CHUNK_SIZE) {
writer.write_all(chunk)?;
}
} else {
writer.write_all(&self.data)?;
}
Ok(())
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct SeekPoint {
pub first_sample: u64,
pub byte_offset: u64,
pub num_samples: u16,
}
impl SeekPoint {
const PLACEHOLDER_SAMPLE: u64 = 0xFFFFFFFFFFFFFFFF;
pub fn new(first_sample: u64, byte_offset: u64, num_samples: u16) -> Self {
Self {
first_sample,
byte_offset,
num_samples,
}
}
pub fn placeholder() -> Self {
Self::new(Self::PLACEHOLDER_SAMPLE, 0, 0)
}
pub fn is_placeholder(&self) -> bool {
self.first_sample == Self::PLACEHOLDER_SAMPLE
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct SeekTable {
pub seekpoints: Vec<SeekPoint>,
}
impl SeekTable {
pub fn new() -> Self {
Self {
seekpoints: Vec::new(),
}
}
pub fn from_bytes(data: &[u8]) -> Result<Self> {
Self::from_bytes_with_options(data, None)
}
pub fn from_bytes_with_options(data: &[u8], max_seekpoints: Option<usize>) -> Result<Self> {
let mut cursor = Cursor::new(data);
let mut seekpoints = Vec::new();
let max_points = max_seekpoints.unwrap_or(100000); let expected_points = data.len() / 18;
if expected_points > max_points {
return Err(AudexError::InvalidData(format!(
"Too many seek points: {} (max: {})",
expected_points, max_points
)));
}
while cursor.position() + 18 <= data.len() as u64 {
let first_sample = cursor.read_u64::<BigEndian>()?;
let byte_offset = cursor.read_u64::<BigEndian>()?;
let num_samples = cursor.read_u16::<BigEndian>()?;
seekpoints.push(SeekPoint::new(first_sample, byte_offset, num_samples));
if seekpoints.len() > max_points {
break;
}
}
Ok(Self { seekpoints })
}
pub fn to_bytes(&self) -> Result<Vec<u8>> {
let mut buffer = Vec::new();
let mut writer = Cursor::new(&mut buffer);
for seekpoint in &self.seekpoints {
writer.write_u64::<BigEndian>(seekpoint.first_sample)?;
writer.write_u64::<BigEndian>(seekpoint.byte_offset)?;
writer.write_u16::<BigEndian>(seekpoint.num_samples)?;
}
Ok(buffer)
}
pub fn write(&self) -> Result<Vec<u8>> {
self.to_bytes()
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct CueSheetTrackIndex {
pub index_number: u8,
pub index_offset: u64,
}
#[derive(Debug, Clone, PartialEq)]
pub struct CueSheetTrack {
pub track_number: u8,
pub start_offset: u64,
pub isrc: String,
pub track_type: u8,
pub pre_emphasis: bool,
pub indexes: Vec<CueSheetTrackIndex>,
}
#[derive(Debug, Clone, PartialEq)]
pub struct CueSheet {
pub media_catalog_number: String,
pub lead_in_samples: u64,
pub is_compact_disc: bool,
pub tracks: Vec<CueSheetTrack>,
}
impl CueSheet {
pub fn new() -> Self {
Self {
media_catalog_number: String::new(),
lead_in_samples: 88200, is_compact_disc: true,
tracks: Vec::new(),
}
}
pub fn from_bytes(data: &[u8]) -> Result<Self> {
let mut cursor = Cursor::new(data);
let mut mcn_bytes = [0u8; 128];
cursor.read_exact(&mut mcn_bytes)?;
let mcn_end = mcn_bytes.iter().position(|&x| x == 0).unwrap_or(128);
let media_catalog_number = String::from_utf8_lossy(&mcn_bytes[..mcn_end]).into_owned();
let lead_in_samples = cursor.read_u64::<BigEndian>()?;
let flags = cursor.read_u8()?;
let is_compact_disc = (flags & 0x80) != 0;
cursor.seek(SeekFrom::Current(258))?;
let num_tracks = cursor.read_u8()?;
const MAX_CUESHEET_TRACKS: u8 = 100;
if num_tracks > MAX_CUESHEET_TRACKS {
return Err(AudexError::InvalidData(format!(
"CueSheet track count {} exceeds FLAC spec limit of {}",
num_tracks, MAX_CUESHEET_TRACKS
)));
}
let mut tracks = Vec::new();
for _ in 0..num_tracks {
let start_offset = cursor.read_u64::<BigEndian>()?;
let track_number = cursor.read_u8()?;
let mut isrc_bytes = [0u8; 12];
cursor.read_exact(&mut isrc_bytes)?;
let isrc_end = isrc_bytes.iter().position(|&x| x == 0).unwrap_or(12);
let isrc = String::from_utf8_lossy(&isrc_bytes[..isrc_end]).into_owned();
let track_flags = cursor.read_u8()?;
let track_type = (track_flags >> 7) & 1;
let pre_emphasis = (track_flags & 0x40) != 0;
cursor.seek(SeekFrom::Current(13))?;
let num_indexes = cursor.read_u8()?;
let mut indexes = Vec::new();
for _ in 0..num_indexes {
let index_offset = cursor.read_u64::<BigEndian>()?;
let index_number = cursor.read_u8()?;
cursor.seek(SeekFrom::Current(3))?;
indexes.push(CueSheetTrackIndex {
index_number,
index_offset,
});
}
tracks.push(CueSheetTrack {
track_number,
start_offset,
isrc,
track_type,
pre_emphasis,
indexes,
});
}
Ok(Self {
media_catalog_number,
lead_in_samples,
is_compact_disc,
tracks,
})
}
pub fn to_bytes(&self) -> Result<Vec<u8>> {
let mut buffer = Vec::new();
let mut writer = Cursor::new(&mut buffer);
let mut mcn_bytes = [0u8; 128];
let mcn_copy_len = self.media_catalog_number.len().min(128);
mcn_bytes[..mcn_copy_len]
.copy_from_slice(&self.media_catalog_number.as_bytes()[..mcn_copy_len]);
writer.write_all(&mcn_bytes)?;
writer.write_u64::<BigEndian>(self.lead_in_samples)?;
let flags = if self.is_compact_disc { 0x80 } else { 0x00 };
writer.write_u8(flags)?;
writer.write_all(&[0u8; 258])?;
let track_count = u8::try_from(self.tracks.len()).map_err(|_| {
AudexError::InvalidData(format!(
"CueSheet track count {} exceeds maximum of 255",
self.tracks.len()
))
})?;
writer.write_u8(track_count)?;
for track in &self.tracks {
writer.write_u64::<BigEndian>(track.start_offset)?;
writer.write_u8(track.track_number)?;
let mut isrc_bytes = [0u8; 12];
let isrc_copy_len = track.isrc.len().min(12);
isrc_bytes[..isrc_copy_len].copy_from_slice(&track.isrc.as_bytes()[..isrc_copy_len]);
writer.write_all(&isrc_bytes)?;
let track_flags =
(track.track_type << 7) | if track.pre_emphasis { 0x40 } else { 0x00 };
writer.write_u8(track_flags)?;
writer.write_all(&[0u8; 13])?;
let index_count = u8::try_from(track.indexes.len()).map_err(|_| {
AudexError::InvalidData(format!(
"CueSheet track index count {} exceeds maximum of 255",
track.indexes.len()
))
})?;
writer.write_u8(index_count)?;
for index in &track.indexes {
writer.write_u64::<BigEndian>(index.index_offset)?;
writer.write_u8(index.index_number)?;
writer.write_all(&[0u8; 3])?; }
}
Ok(buffer)
}
pub fn write(&self) -> Result<Vec<u8>> {
self.to_bytes()
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct Padding {
pub size: usize,
}
impl Padding {
pub fn new(size: usize) -> Self {
Self { size }
}
pub fn from_bytes(data: &[u8]) -> Result<Self> {
Ok(Self { size: data.len() })
}
pub fn to_bytes(&self) -> Result<Vec<u8>> {
Ok(vec![0u8; self.size])
}
pub fn write(&self) -> Result<Vec<u8>> {
self.to_bytes()
}
pub fn write_to<W: Write>(&self, writer: &mut W) -> Result<()> {
if self.size > 64 * 1024 {
const CHUNK_SIZE: usize = 64 * 1024;
let chunk = vec![0u8; CHUNK_SIZE];
let full_chunks = self.size / CHUNK_SIZE;
let remainder = self.size % CHUNK_SIZE;
for _ in 0..full_chunks {
writer.write_all(&chunk)?;
}
if remainder > 0 {
let remainder_chunk = vec![0u8; remainder];
writer.write_all(&remainder_chunk)?;
}
} else {
let padding_data = vec![0u8; self.size];
writer.write_all(&padding_data)?;
}
Ok(())
}
}
impl Default for Padding {
fn default() -> Self {
Self::new(0)
}
}
#[derive(Debug, Clone, PartialEq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct Picture {
pub picture_type: u32,
pub mime_type: String,
pub description: String,
pub width: u32,
pub height: u32,
pub color_depth: u32,
pub colors_used: u32,
#[cfg_attr(
feature = "serde",
serde(with = "crate::serde_helpers::bytes_as_base64")
)]
pub data: Vec<u8>,
}
impl Picture {
pub const MAX_SIZE: usize = (1 << 24) - 1;
pub fn new() -> Self {
Self {
picture_type: 0,
mime_type: String::new(),
description: String::new(),
width: 0,
height: 0,
color_depth: 0,
colors_used: 0,
data: Vec::new(),
}
}
pub fn from_bytes(data: &[u8]) -> Result<Self> {
Self::from_bytes_with_options(data, None)
}
pub fn from_bytes_with_options(data: &[u8], max_picture_size: Option<usize>) -> Result<Self> {
let mut cursor = Cursor::new(data);
let picture_type = cursor.read_u32::<BigEndian>()?;
let mime_len = cursor.read_u32::<BigEndian>()? as usize;
if mime_len > 256 {
return Err(AudexError::InvalidData(format!(
"MIME type too long: {} bytes",
mime_len
)));
}
let mut mime_bytes = vec![0u8; mime_len];
cursor.read_exact(&mut mime_bytes)?;
let mime_type = String::from_utf8(mime_bytes)
.map_err(|e| AudexError::InvalidData(format!("Invalid MIME type: {}", e)))?;
let desc_len = cursor.read_u32::<BigEndian>()? as usize;
if desc_len > 65536 {
return Err(AudexError::InvalidData(format!(
"Description too long: {} bytes",
desc_len
)));
}
let mut desc_bytes = vec![0u8; desc_len];
cursor.read_exact(&mut desc_bytes)?;
let description = String::from_utf8(desc_bytes)
.map_err(|e| AudexError::InvalidData(format!("Invalid description: {}", e)))?;
let width = cursor.read_u32::<BigEndian>()?;
let height = cursor.read_u32::<BigEndian>()?;
let color_depth = cursor.read_u32::<BigEndian>()?;
let colors_used = cursor.read_u32::<BigEndian>()?;
let data_len = cursor.read_u32::<BigEndian>()? as usize;
let global_limit = crate::limits::ParseLimits::default().max_image_size as usize;
let max_size = max_picture_size.unwrap_or(global_limit);
if data_len > max_size {
return Err(AudexError::InvalidData(format!(
"Picture data too large: {} bytes (max: {} bytes)",
data_len, max_size
)));
}
let remaining = data.len() - cursor.position() as usize;
if data_len > remaining {
return Err(AudexError::InvalidData(format!(
"Picture data truncated: expected {} bytes, have {} bytes",
data_len, remaining
)));
}
let mut picture_data = vec![0u8; data_len];
cursor.read_exact(&mut picture_data)?;
if width > 100000 || height > 100000 {
return Err(AudexError::InvalidData(format!(
"Image dimensions too large: {}x{}",
width, height
)));
}
Ok(Self {
picture_type,
mime_type,
description,
width,
height,
color_depth,
colors_used,
data: picture_data,
})
}
pub fn from_reader<R: Read>(reader: &mut R, max_picture_size: Option<usize>) -> Result<Self> {
let picture_type = reader.read_u32::<BigEndian>()?;
let mime_len = reader.read_u32::<BigEndian>()? as usize;
if mime_len > 256 {
return Err(AudexError::InvalidData(format!(
"MIME type too long: {} bytes",
mime_len
)));
}
let mut mime_bytes = vec![0u8; mime_len];
reader.read_exact(&mut mime_bytes)?;
let mime_type = String::from_utf8(mime_bytes)
.map_err(|e| AudexError::InvalidData(format!("Invalid MIME type: {}", e)))?;
let desc_len = reader.read_u32::<BigEndian>()? as usize;
if desc_len > 65536 {
return Err(AudexError::InvalidData(format!(
"Description too long: {} bytes",
desc_len
)));
}
let mut desc_bytes = vec![0u8; desc_len];
reader.read_exact(&mut desc_bytes)?;
let description = String::from_utf8(desc_bytes)
.map_err(|e| AudexError::InvalidData(format!("Invalid description: {}", e)))?;
let width = reader.read_u32::<BigEndian>()?;
let height = reader.read_u32::<BigEndian>()?;
let color_depth = reader.read_u32::<BigEndian>()?;
let colors_used = reader.read_u32::<BigEndian>()?;
let data_len = reader.read_u32::<BigEndian>()? as usize;
let global_limit = crate::limits::ParseLimits::default().max_image_size as usize;
let max_size = max_picture_size.unwrap_or(global_limit);
if data_len > max_size {
return Err(AudexError::InvalidData(format!(
"Picture data too large: {} bytes (max: {} bytes)",
data_len, max_size
)));
}
let mut data = vec![0u8; data_len];
reader.read_exact(&mut data)?;
Ok(Self {
picture_type,
mime_type,
description,
width,
height,
color_depth,
colors_used,
data,
})
}
pub fn to_bytes(&self) -> Result<Vec<u8>> {
let mut buffer = Vec::new();
let mut writer = Cursor::new(&mut buffer);
writer.write_u32::<BigEndian>(self.picture_type)?;
let mime_bytes = self.mime_type.as_bytes();
writer.write_u32::<BigEndian>(mime_bytes.len() as u32)?;
writer.write_all(mime_bytes)?;
let desc_bytes = self.description.as_bytes();
writer.write_u32::<BigEndian>(desc_bytes.len() as u32)?;
writer.write_all(desc_bytes)?;
writer.write_u32::<BigEndian>(self.width)?;
writer.write_u32::<BigEndian>(self.height)?;
writer.write_u32::<BigEndian>(self.color_depth)?;
writer.write_u32::<BigEndian>(self.colors_used)?;
writer.write_u32::<BigEndian>(self.data.len() as u32)?;
writer.write_all(&self.data)?;
Ok(buffer)
}
pub fn write(&self) -> Result<Vec<u8>> {
self.to_bytes()
}
}
pub fn clear<P: AsRef<Path>>(path: P) -> Result<()> {
let mut flac = FLAC::load(path)?;
flac.clear()
}