use std::collections::HashMap;
use std::collections::hash_map::Entry;
use std::io::{self, Seek, SeekFrom, Write, Read};
use std::path::{Path, PathBuf};
use std::fs::File;
use std::io::Take;
use byteorder::{ReadBytesExt, WriteBytesExt};
use flate2::read::{GzDecoder, ZlibDecoder};
use flate2::write::ZlibEncoder;
use flate2::Compression;
use crate::io::{ReadJavaExt, WriteJavaExt};
#[inline]
fn calc_chunk_meta_index(cx: i32, cz: i32) -> usize {
(cx & 31) as usize | (((cz & 31) as usize) << 5)
}
const EMPTY_SECTOR: &'static [u8; 4096] = &[0; 4096];
pub struct RegionDir {
path: PathBuf,
cache: HashMap<(i32, i32), Region<File>>,
}
impl RegionDir {
pub fn new(path: impl Into<PathBuf>) -> Self {
Self {
path: path.into(),
cache: HashMap::new(),
}
}
pub fn ensure_region(&mut self, cx: i32, cz: i32, create: bool) -> Result<&mut Region<File>, RegionError> {
let (rx, rz) = (cx >> 5, cz >> 5);
match self.cache.entry((rx, rz)) {
Entry::Occupied(o) => Ok(o.into_mut()),
Entry::Vacant(v) => {
Ok(v.insert(Region::open(self.path.join(format!("r.{rx}.{rz}.mcr")), create)?))
}
}
}
}
pub struct Region<I> {
inner: I,
chunks: Box<[ChunkMeta; 1024]>,
sectors: Vec<u64>,
}
impl Region<File> {
pub fn open<P: AsRef<Path>>(path: P, create: bool) -> Result<Self, RegionError> {
let path: &Path = path.as_ref();
if create {
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent)?;
}
}
let file = File::options()
.read(true)
.write(true)
.create(create)
.open(path)?;
Self::new(file, create)
}
}
impl<I> Region<I>
where
I: Read + Write + Seek,
{
pub fn new(mut inner: I, create: bool) -> Result<Self, RegionError> {
let mut file_len = inner.seek(SeekFrom::End(0))?;
if file_len == 0 && create {
for _ in 0..2 {
inner.write_all(EMPTY_SECTOR)?;
}
file_len = 8192;
} else if file_len < 8192 {
return Err(RegionError::FileTooSmall(file_len));
} else if file_len % 4096 != 0 {
return Err(RegionError::FileNotPadded(file_len));
}
let mut chunks: Box<[ChunkMeta; 1024]> = Box::new([ChunkMeta::INIT; 1024]);
let mut sectors = vec![0u64; file_len as usize / 4096];
sectors[0] |= 0b11;
inner.seek(SeekFrom::Start(0))?;
for i in 0..1024 {
let range_raw = inner.read_java_int()? as u32;
let range = SectorRange {
offset: range_raw >> 8,
count: range_raw & 0xFF,
};
chunks[i].range = range;
for offset in range.offset..range.offset + range.count {
if let Some(slot) = sectors.get_mut(offset as usize / 64) {
*slot |= 1u64 << (offset % 64);
} else {
return Err(RegionError::IllegalRange);
}
}
}
for i in 0..1024 {
chunks[i].timestamp = inner.read_java_int()? as u32;
}
Ok(Self {
inner,
chunks,
sectors,
})
}
fn get_chunk_meta(&self, cx: i32, cz: i32) -> ChunkMeta {
self.chunks[calc_chunk_meta_index(cx, cz)]
}
fn set_chunk_meta_and_sync(&mut self, cx: i32, cz: i32, chunk: ChunkMeta) -> io::Result<()> {
let index = calc_chunk_meta_index(cx, cz);
let range_raw = chunk.range.offset << 8 | chunk.range.count & 0xFF;
let header_offset = index as u64 * 4;
self.inner.seek(SeekFrom::Start(header_offset))?;
self.inner.write_java_int(range_raw as i32)?;
self.chunks[index] = chunk;
self.inner.seek(SeekFrom::Start(header_offset + 4096))?;
self.inner.write_java_int(chunk.timestamp as i32)?;
Ok(())
}
pub fn read_chunk(&mut self, cx: i32, cz: i32) -> Result<ChunkReader<'_, I>, RegionError> {
let chunk = self.get_chunk_meta(cx, cz);
if chunk.is_empty() {
return Err(RegionError::EmptyChunk);
}
if chunk.range.offset < 2 {
return Err(RegionError::IllegalRange);
}
self.inner.seek(SeekFrom::Start(chunk.range.offset as u64 * 4096))?;
let chunk_size = self.inner.read_java_int()?;
if chunk_size <= 0 || chunk_size as u32 + 4 > chunk.range.count * 4096 {
return Err(RegionError::IllegalRange);
}
let compression_id = self.inner.read_u8()?;
let chunk_size = chunk_size as u64 - 1; let chunk_data = Read::take(&mut self.inner, chunk_size);
let inner = match compression_id {
1 => ChunkReaderInner::Gz(GzDecoder::new(chunk_data)),
2 => ChunkReaderInner::Zlib(ZlibDecoder::new(chunk_data)),
_ => return Err(RegionError::IllegalCompression),
};
Ok(ChunkReader { inner })
}
pub fn write_chunk(&mut self, cx: i32, cz: i32) -> ChunkWriter<'_, I> {
ChunkWriter {
cx,
cz,
encoder: ZlibEncoder::new(Vec::new(), Compression::best()),
region: self,
}
}
fn write_chunk_data(&mut self, cx: i32, cz: i32, compression_id: u8, data: &[u8]) -> Result<(), RegionError> {
let sector_count = (data.len() + 5 - 1) as u32 / 4096 + 1;
if sector_count > 0xFF {
return Err(RegionError::OutOfSector);
}
let mut chunk = self.get_chunk_meta(cx, cz);
assert!(chunk.range.count == 0 || chunk.range.offset >= 2, "previous chunk metadata uses reserved sectors");
debug_assert!(sector_count != 0);
if sector_count != chunk.range.count {
let mut clear_range = chunk.range;
if sector_count < chunk.range.count {
clear_range.offset += sector_count;
clear_range.count -= sector_count;
chunk.range.count = sector_count;
}
if clear_range.count != 0 {
self.inner.seek(SeekFrom::Start(clear_range.offset as u64 * 4096))?;
for offset in clear_range.offset..clear_range.offset + clear_range.count {
let slot = &mut self.sectors[offset as usize / 64];
*slot &= !(1u64 << (offset % 64));
self.inner.write_all(EMPTY_SECTOR)?;
}
}
if sector_count > chunk.range.count {
debug_assert_eq!(self.sectors[0] & 0b11, 0b11);
let mut new_range = SectorRange::default();
'out: for (slot_index, mut slot) in self.sectors.iter().copied().enumerate() {
if slot != u64::MAX {
for bit_index in 0usize..64 {
if slot & 1 == 0 {
new_range.count += 1;
if new_range.count == sector_count {
break 'out;
}
} else {
new_range.offset = slot_index as u32 * 64 + bit_index as u32 + 1;
new_range.count = 0;
}
slot >>= 1;
}
} else {
new_range.offset = slot_index as u32 * 64 + 64;
new_range.count = 0;
}
}
assert!(new_range.offset >= 2, "allocating reserved sectors");
new_range.count = sector_count;
for offset in new_range.offset..new_range.offset + new_range.count {
let slot_index = offset as usize / 64;
if let Some(slot) = self.sectors.get_mut(slot_index) {
*slot |= 1u64 << (offset % 64);
} else {
debug_assert_eq!(slot_index, self.sectors.len());
self.sectors.push(1u64 << (offset % 64));
}
}
chunk.range = new_range;
}
}
assert!(chunk.range.offset >= 2, "allocating reserved sectors");
assert!(chunk.range.count != 0, "allocating zero sector");
self.set_chunk_meta_and_sync(cx, cz, chunk)?;
self.inner.seek(SeekFrom::Start(chunk.range.offset as u64 * 4096))?;
self.inner.write_java_int(data.len() as i32 + 1)?; self.inner.write_u8(compression_id)?;
self.inner.write_all(data)?;
let total_len = data.len() + 4 + 1;
let padding_len = 4096 - total_len % 4096;
self.inner.write_all(&EMPTY_SECTOR[..padding_len])?;
self.inner.flush()?;
Ok(())
}
}
pub struct ChunkReader<'region, I> {
inner: ChunkReaderInner<'region, I>,
}
enum ChunkReaderInner<'region, I> {
Gz(GzDecoder<Take<&'region mut I>>),
Zlib(ZlibDecoder<Take<&'region mut I>>),
}
impl<I> Read for ChunkReader<'_, I>
where
I: Read + Write + Seek,
{
#[inline]
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
match &mut self.inner {
ChunkReaderInner::Gz(gz) => gz.read(buf),
ChunkReaderInner::Zlib(zlib) => zlib.read(buf),
}
}
#[inline]
fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> {
match &mut self.inner {
ChunkReaderInner::Gz(gz) => gz.read_exact(buf),
ChunkReaderInner::Zlib(zlib) => zlib.read_exact(buf),
}
}
}
pub struct ChunkWriter<'region, I> {
cx: i32,
cz: i32,
encoder: ZlibEncoder<Vec<u8>>,
region: &'region mut Region<I>,
}
impl<I> ChunkWriter<'_, I>
where
I: Read + Write + Seek,
{
pub fn flush_chunk(self) -> Result<(), RegionError> {
let inner = self.encoder.flush_finish()?;
self.region.write_chunk_data(self.cx, self.cz, 2, &inner)
}
}
impl<I> Write for ChunkWriter<'_, I> {
#[inline]
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.encoder.write(buf)
}
#[inline]
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
#[derive(Debug, Clone, Copy)]
struct ChunkMeta {
range: SectorRange,
timestamp: u32,
}
impl ChunkMeta {
const INIT: Self = Self { range: SectorRange { offset: 0, count: 0 }, timestamp: 0 };
fn is_empty(self) -> bool {
self.range.is_empty()
}
}
#[derive(Debug, Clone, Copy, Default)]
struct SectorRange {
offset: u32,
count: u32,
}
impl SectorRange {
fn is_empty(self) -> bool {
self.count == 0
}
}
#[derive(thiserror::Error, Debug)]
pub enum RegionError {
#[error("io: {0}")]
Io(#[from] io::Error),
#[error("the region file size ({0}) is too short to store the two 4K header sectors")]
FileTooSmall(u64),
#[error("the region file size ({0}) is not a multiple of 4K")]
FileNotPadded(u64),
#[error("the region file has an invalid chunk range, likely out of range or colliding with another one")]
IllegalRange,
#[error("the required chunk is empty, it has no sector allocated in the region file")]
EmptyChunk,
#[error("the compression method in the chunk header is illegal")]
IllegalCompression,
#[error("no more sectors are available in the region file, really unlikely to happen")]
OutOfSector,
}