1use crate::error::{BedrockWorldError, Result};
9use crate::nbt::{NbtTag, parse_consecutive_root_nbt, parse_root_nbt_with_consumed};
10use bytes::Bytes;
11use indexmap::IndexMap;
12use serde::{Deserialize, Serialize};
13use std::collections::BTreeMap;
14
15const MAX_SUBCHUNK_PALETTE_LEN: usize = 4096;
16pub const LEGACY_TERRAIN_BLOCK_COUNT: usize = 16 * 128 * 16;
18pub const LEGACY_TERRAIN_VALUE_LEN: usize = 83_200;
20pub const LEGACY_SUBCHUNK_BLOCK_COUNT: usize = 16 * 16 * 16;
22pub const LEGACY_SUBCHUNK_MIN_VALUE_LEN: usize =
24 1 + LEGACY_SUBCHUNK_BLOCK_COUNT + LEGACY_SUBCHUNK_BLOCK_COUNT / 2;
25pub const LEGACY_SUBCHUNK_WITH_LIGHT_VALUE_LEN: usize =
27 LEGACY_SUBCHUNK_MIN_VALUE_LEN + LEGACY_SUBCHUNK_BLOCK_COUNT;
28
29const LEGACY_TERRAIN_BLOCK_DATA_OFFSET: usize = LEGACY_TERRAIN_BLOCK_COUNT;
30const LEGACY_TERRAIN_SKY_LIGHT_OFFSET: usize =
31 LEGACY_TERRAIN_BLOCK_DATA_OFFSET + LEGACY_TERRAIN_BLOCK_COUNT / 2;
32const LEGACY_TERRAIN_BLOCK_LIGHT_OFFSET: usize =
33 LEGACY_TERRAIN_SKY_LIGHT_OFFSET + LEGACY_TERRAIN_BLOCK_COUNT / 2;
34const LEGACY_TERRAIN_HEIGHTMAP_OFFSET: usize =
35 LEGACY_TERRAIN_BLOCK_LIGHT_OFFSET + LEGACY_TERRAIN_BLOCK_COUNT / 2;
36const LEGACY_TERRAIN_BIOME_OFFSET: usize = LEGACY_TERRAIN_HEIGHTMAP_OFFSET + 16 * 16;
37
38#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
39pub enum Dimension {
41 Overworld,
43 Nether,
45 End,
47 Unknown(i32),
49}
50
51#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
52pub enum ChunkVersion {
54 Old,
56 New,
58}
59
60#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
61pub struct BlockPos {
63 pub x: i32,
65 pub y: i32,
67 pub z: i32,
69}
70
71impl Dimension {
72 #[must_use]
73 pub const fn id(self) -> i32 {
75 match self {
76 Self::Overworld => 0,
77 Self::Nether => 1,
78 Self::End => 2,
79 Self::Unknown(value) => value,
80 }
81 }
82
83 #[must_use]
84 pub const fn from_id(id: i32) -> Self {
86 match id {
87 0 => Self::Overworld,
88 1 => Self::Nether,
89 2 => Self::End,
90 value => Self::Unknown(value),
91 }
92 }
93}
94
95#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
96pub struct ChunkPos {
98 pub x: i32,
100 pub z: i32,
102 pub dimension: Dimension,
104}
105
106impl ChunkPos {
107 #[must_use]
108 pub const fn y_range(self, version: ChunkVersion) -> (i32, i32) {
110 match self.dimension {
111 Dimension::Nether => (0, 127),
112 Dimension::End => (0, 255),
113 Dimension::Overworld => match version {
114 ChunkVersion::Old => (0, 255),
115 ChunkVersion::New => (-64, 319),
116 },
117 Dimension::Unknown(_) => (0, -1),
118 }
119 }
120
121 #[must_use]
122 pub const fn subchunk_index_range(self, version: ChunkVersion) -> (i8, i8) {
124 match self.dimension {
125 Dimension::Nether => (0, 7),
126 Dimension::End => (0, 15),
127 Dimension::Overworld => match version {
128 ChunkVersion::Old => (0, 15),
129 ChunkVersion::New => (-4, 19),
130 },
131 Dimension::Unknown(_) => (0, -1),
132 }
133 }
134
135 #[must_use]
136 pub const fn min_block_pos(self, version: ChunkVersion) -> BlockPos {
138 let (min_y, _) = self.y_range(version);
139 BlockPos {
140 x: self.x * 16,
141 y: min_y,
142 z: self.z * 16,
143 }
144 }
145
146 #[must_use]
147 pub const fn max_block_pos(self, version: ChunkVersion) -> BlockPos {
149 let (_, max_y) = self.y_range(version);
150 BlockPos {
151 x: self.x * 16 + 15,
152 y: max_y,
153 z: self.z * 16 + 15,
154 }
155 }
156}
157
158impl BlockPos {
159 #[must_use]
160 pub const fn to_chunk_pos(self, dimension: Dimension) -> ChunkPos {
162 let x = if self.x < 0 { self.x - 15 } else { self.x } / 16;
163 let z = if self.z < 0 { self.z - 15 } else { self.z } / 16;
164 ChunkPos { x, z, dimension }
165 }
166
167 #[must_use]
168 pub const fn in_chunk_offset(self) -> (u8, i32, u8) {
170 let mut x = self.x % 16;
171 let mut z = self.z % 16;
172 if x < 0 {
173 x += 16;
174 }
175 if z < 0 {
176 z += 16;
177 }
178 (x as u8, self.y, z as u8)
179 }
180}
181
182#[must_use]
183pub fn block_storage_index(local_x: u8, local_y: u8, local_z: u8) -> usize {
185 usize::from(local_x) * 256 + usize::from(local_z) * 16 + usize::from(local_y)
186}
187
188#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
189pub enum ChunkRecordTag {
191 Data3D,
193 Data2D,
195 Data2DLegacy,
197 SubChunkPrefix,
199 LegacyTerrain,
201 BlockEntity,
203 Entity,
205 PendingTicks,
207 BlockExtraData,
209 BiomeState,
211 FinalizedState,
213 ConversionData,
215 BorderBlocks,
217 HardcodedSpawners,
219 RandomTicks,
221 Checksums,
223 GenerationSeed,
225 MetaDataHash,
227 GeneratedPreCavesAndCliffsBlending,
229 BlendingBiomeHeight,
231 BlendingData,
233 ActorDigestVersion,
235 Version,
237 VersionOld,
239 LegacyVersion,
241 Unknown(u8),
243}
244
245impl ChunkRecordTag {
246 #[must_use]
247 pub const fn byte(self) -> u8 {
249 match self {
250 Self::Data3D => 0x2b,
251 Self::Version => 0x2c,
252 Self::Data2D => 0x2d,
253 Self::Data2DLegacy => 0x2e,
254 Self::SubChunkPrefix => 0x2f,
255 Self::LegacyTerrain => 0x30,
256 Self::BlockEntity => 0x31,
257 Self::Entity => 0x32,
258 Self::PendingTicks => 0x33,
259 Self::BlockExtraData => 0x34,
260 Self::BiomeState => 0x35,
261 Self::FinalizedState => 0x36,
262 Self::ConversionData => 0x37,
263 Self::BorderBlocks => 0x38,
264 Self::HardcodedSpawners => 0x39,
265 Self::RandomTicks => 0x3a,
266 Self::Checksums => 0x3b,
267 Self::GenerationSeed => 0x3c,
268 Self::GeneratedPreCavesAndCliffsBlending => 0x3d,
269 Self::BlendingBiomeHeight => 0x3e,
270 Self::MetaDataHash => 0x3f,
271 Self::BlendingData => 0x40,
272 Self::ActorDigestVersion => 0x41,
273 Self::VersionOld => 0x76,
274 Self::LegacyVersion => 0x77,
275 Self::Unknown(value) => value,
276 }
277 }
278
279 #[must_use]
280 pub const fn from_byte(value: u8) -> Self {
282 match value {
283 0x2b => Self::Data3D,
284 0x2c => Self::Version,
285 0x2d => Self::Data2D,
286 0x2e => Self::Data2DLegacy,
287 0x2f => Self::SubChunkPrefix,
288 0x30 => Self::LegacyTerrain,
289 0x31 => Self::BlockEntity,
290 0x32 => Self::Entity,
291 0x33 => Self::PendingTicks,
292 0x34 => Self::BlockExtraData,
293 0x35 => Self::BiomeState,
294 0x36 => Self::FinalizedState,
295 0x37 => Self::ConversionData,
296 0x38 => Self::BorderBlocks,
297 0x39 => Self::HardcodedSpawners,
298 0x3a => Self::RandomTicks,
299 0x3b => Self::Checksums,
300 0x3c => Self::GenerationSeed,
301 0x3d => Self::GeneratedPreCavesAndCliffsBlending,
302 0x3e => Self::BlendingBiomeHeight,
303 0x3f => Self::MetaDataHash,
304 0x40 => Self::BlendingData,
305 0x41 => Self::ActorDigestVersion,
306 0x76 => Self::VersionOld,
307 0x77 => Self::LegacyVersion,
308 other => Self::Unknown(other),
309 }
310 }
311
312 #[must_use]
313 pub const fn is_render_chunk_record(self) -> bool {
315 matches!(
316 self,
317 Self::Data3D
318 | Self::Data2D
319 | Self::Data2DLegacy
320 | Self::LegacyTerrain
321 | Self::SubChunkPrefix
322 )
323 }
324}
325
326#[derive(Debug, Clone, PartialEq, Eq, Hash)]
327pub enum BedrockDbKey {
333 Chunk(ChunkKey),
335 LocalPlayer,
337 RemotePlayer(String),
339 ActorPrefix {
341 actor_id: i64,
343 },
344 ActorDigest {
346 pos: ChunkPos,
348 },
349 Map(String),
351 Village(ParsedVillageKey),
353 Global(GlobalRecordKind),
355 Portals,
357 SchedulerWt,
359 StructureTemplate(String),
361 TickingArea(String),
363 GameFlatWorldLayers,
365 PlainString(String),
367 Unknown(Bytes),
369}
370
371#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
372pub enum VillageRecordKind {
374 Info,
376 Dwellers,
378 Players,
380 Poi,
382 Unknown,
384}
385
386#[derive(Debug, Clone, PartialEq, Eq, Hash)]
387pub struct ParsedVillageKey {
389 pub raw: String,
391 pub dimension: Option<Dimension>,
393 pub uuid: String,
395 pub kind: VillageRecordKind,
397}
398
399#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
400pub struct MapRecordId(String);
402
403impl MapRecordId {
404 pub fn new(id: impl Into<String>) -> Result<Self> {
411 let id = id.into();
412 if id.is_empty() || !id.as_bytes().iter().all(u8::is_ascii_graphic) {
413 return Err(BedrockWorldError::Validation(
414 "map id must be non-empty printable ASCII".to_string(),
415 ));
416 }
417 Ok(Self(id))
418 }
419
420 #[must_use]
421 pub fn unchecked(id: impl Into<String>) -> Self {
425 Self(id.into())
426 }
427
428 #[must_use]
429 pub fn as_str(&self) -> &str {
431 &self.0
432 }
433
434 #[must_use]
435 pub fn storage_key(&self) -> Bytes {
437 Bytes::from(format!("map_{}", self.0))
438 }
439
440 #[must_use]
441 pub fn from_storage_key(key: &[u8]) -> Option<Self> {
443 ascii_suffix(key, b"map_").map(Self)
444 }
445}
446
447impl std::fmt::Display for MapRecordId {
448 fn fmt(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
449 formatter.write_str(&self.0)
450 }
451}
452
453impl AsRef<str> for MapRecordId {
454 fn as_ref(&self) -> &str {
455 self.as_str()
456 }
457}
458
459#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
460pub struct ActorUid(pub i64);
462
463impl ActorUid {
464 #[must_use]
465 pub fn storage_key(self) -> Bytes {
467 let mut bytes = Vec::with_capacity(19);
468 bytes.extend_from_slice(b"actorprefix");
469 bytes.extend_from_slice(&self.0.to_le_bytes());
470 Bytes::from(bytes)
471 }
472
473 #[must_use]
474 pub fn from_actorprefix_key(key: &[u8]) -> Option<Self> {
476 parse_i64_suffix(key, b"actorprefix").map(Self)
477 }
478}
479
480#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
481pub struct ActorDigestKey {
483 pub pos: ChunkPos,
485}
486
487impl ActorDigestKey {
488 #[must_use]
489 pub const fn new(pos: ChunkPos) -> Self {
491 Self { pos }
492 }
493
494 #[must_use]
495 pub fn storage_key(self) -> Bytes {
497 let mut bytes = Vec::with_capacity(if self.pos.dimension == Dimension::Overworld {
498 12
499 } else {
500 16
501 });
502 bytes.extend_from_slice(b"digp");
503 bytes.extend_from_slice(&self.pos.x.to_le_bytes());
504 bytes.extend_from_slice(&self.pos.z.to_le_bytes());
505 if self.pos.dimension != Dimension::Overworld {
506 bytes.extend_from_slice(&self.pos.dimension.id().to_le_bytes());
507 }
508 Bytes::from(bytes)
509 }
510
511 #[must_use]
512 pub fn from_storage_key(key: &[u8]) -> Option<Self> {
514 parse_chunk_pos_suffix(key, b"digp").map(Self::new)
515 }
516}
517
518#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
519pub enum GlobalRecordKind {
521 MobEvents,
523 Dimension(Dimension),
525 Scoreboard,
527 LocalPlayer,
529 AutonomousEntities,
531 BiomeData,
533 LevelChunkMetaDataDictionary,
535 WorldClocks,
537 Other(String),
539}
540
541impl GlobalRecordKind {
542 #[must_use]
543 pub fn from_key(key: &[u8]) -> Option<Self> {
545 let text = std::str::from_utf8(key).ok()?;
546 match text {
547 "mobevents" => Some(Self::MobEvents),
548 "Overworld" => Some(Self::Dimension(Dimension::Overworld)),
549 "Nether" => Some(Self::Dimension(Dimension::Nether)),
550 "TheEnd" => Some(Self::Dimension(Dimension::End)),
551 "scoreboard" => Some(Self::Scoreboard),
552 "LocalPlayer" => Some(Self::LocalPlayer),
553 "AutonomousEntities" | "autonomousentities" => Some(Self::AutonomousEntities),
554 "BiomeData" => Some(Self::BiomeData),
555 "LevelChunkMetaDataDictionary" => Some(Self::LevelChunkMetaDataDictionary),
556 "WorldClocks" => Some(Self::WorldClocks),
557 _ => None,
558 }
559 }
560
561 #[must_use]
562 pub fn name(&self) -> String {
564 match self {
565 Self::MobEvents => "mobevents".to_string(),
566 Self::Dimension(Dimension::Overworld) => "Overworld".to_string(),
567 Self::Dimension(Dimension::Nether) => "Nether".to_string(),
568 Self::Dimension(Dimension::End) => "TheEnd".to_string(),
569 Self::Dimension(Dimension::Unknown(id)) => format!("Dimension({id})"),
570 Self::Scoreboard => "scoreboard".to_string(),
571 Self::LocalPlayer => "LocalPlayer".to_string(),
572 Self::AutonomousEntities => "AutonomousEntities".to_string(),
573 Self::BiomeData => "BiomeData".to_string(),
574 Self::LevelChunkMetaDataDictionary => "LevelChunkMetaDataDictionary".to_string(),
575 Self::WorldClocks => "WorldClocks".to_string(),
576 Self::Other(name) => name.clone(),
577 }
578 }
579
580 #[must_use]
581 pub fn storage_key(&self) -> Bytes {
583 Bytes::from(self.name())
584 }
585}
586
587impl BedrockDbKey {
588 #[must_use]
589 pub fn decode(key: &[u8]) -> Self {
591 if key == b"~local_player" {
592 return Self::LocalPlayer;
593 }
594 if let Some(remote_player) = key.strip_prefix(b"player_") {
595 return Self::RemotePlayer(String::from_utf8_lossy(remote_player).into_owned());
596 }
597 if let Some(actor_id) = parse_i64_suffix(key, b"actorprefix") {
598 return Self::ActorPrefix { actor_id };
599 }
600 if let Some(pos) = parse_chunk_pos_suffix(key, b"digp") {
601 return Self::ActorDigest { pos };
602 }
603 if key == b"portals" {
604 return Self::Portals;
605 }
606 if key == b"schedulerWT" {
607 return Self::SchedulerWt;
608 }
609 if let Some(map_id) = ascii_suffix(key, b"map_") {
610 return Self::Map(map_id);
611 }
612 if let Some(village) = parse_village_key(key) {
613 return Self::Village(village);
614 }
615 if let Some(name) = ascii_suffix(key, b"structuretemplate") {
616 return Self::StructureTemplate(name);
617 }
618 if let Some(name) = ascii_suffix(key, b"tickingarea") {
619 return Self::TickingArea(name);
620 }
621 if key == b"game_flatworldlayers" {
622 return Self::GameFlatWorldLayers;
623 }
624 if let Some(kind) = GlobalRecordKind::from_key(key) {
625 return Self::Global(kind);
626 }
627 if key.iter().all(u8::is_ascii_graphic) {
628 return Self::PlainString(String::from_utf8_lossy(key).into_owned());
629 }
630 if let Ok(chunk_key) = ChunkKey::decode(key) {
631 if matches!(chunk_key.tag, ChunkRecordTag::Unknown(_)) {
632 return Self::Unknown(Bytes::copy_from_slice(key));
633 }
634 return Self::Chunk(chunk_key);
635 }
636 Self::Unknown(Bytes::copy_from_slice(key))
637 }
638
639 #[must_use]
640 pub fn summary_kind(&self) -> String {
642 match self {
643 Self::Chunk(key) => format!("Chunk::{:?}", key.tag),
644 Self::LocalPlayer => "LocalPlayer".to_string(),
645 Self::RemotePlayer(_) => "RemotePlayer".to_string(),
646 Self::ActorPrefix { .. } => "ActorPrefix".to_string(),
647 Self::ActorDigest { .. } => "ActorDigest".to_string(),
648 Self::Map(_) => "Map".to_string(),
649 Self::Village(village) => format!("Village::{:?}", village.kind),
650 Self::Global(kind) => format!("Global::{}", kind.name()),
651 Self::Portals => "Portals".to_string(),
652 Self::SchedulerWt => "SchedulerWt".to_string(),
653 Self::StructureTemplate(_) => "StructureTemplate".to_string(),
654 Self::TickingArea(_) => "TickingArea".to_string(),
655 Self::GameFlatWorldLayers => "GameFlatWorldLayers".to_string(),
656 Self::PlainString(value) => format!("PlainString::{value}"),
657 Self::Unknown(_) => "Unknown".to_string(),
658 }
659 }
660
661 #[must_use]
662 pub fn encode(&self) -> Option<Bytes> {
664 match self {
665 Self::Chunk(key) => Some(key.encode()),
666 Self::LocalPlayer => Some(Bytes::from_static(b"~local_player")),
667 Self::RemotePlayer(xuid) => Some(Bytes::from(format!("player_{xuid}"))),
668 Self::ActorPrefix { actor_id } => Some(ActorUid(*actor_id).storage_key()),
669 Self::ActorDigest { pos } => Some(ActorDigestKey::new(*pos).storage_key()),
670 Self::Map(id) => Some(MapRecordId::unchecked(id.clone()).storage_key()),
671 Self::Village(key) => Some(Bytes::copy_from_slice(key.raw.as_bytes())),
672 Self::Global(kind) => Some(kind.storage_key()),
673 Self::Portals => Some(Bytes::from_static(b"portals")),
674 Self::SchedulerWt => Some(Bytes::from_static(b"schedulerWT")),
675 Self::StructureTemplate(name) => Some(Bytes::from(format!("structuretemplate{name}"))),
676 Self::TickingArea(name) => Some(Bytes::from(format!("tickingarea{name}"))),
677 Self::GameFlatWorldLayers => Some(Bytes::from_static(b"game_flatworldlayers")),
678 Self::PlainString(name) => Some(Bytes::copy_from_slice(name.as_bytes())),
679 Self::Unknown(bytes) => Some(bytes.clone()),
680 }
681 }
682}
683
684#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
685pub struct ChunkKey {
687 pub pos: ChunkPos,
689 pub tag: ChunkRecordTag,
691 pub subchunk_y: Option<i8>,
693}
694
695impl ChunkKey {
696 #[must_use]
697 pub const fn new(pos: ChunkPos, tag: ChunkRecordTag) -> Self {
699 Self {
700 pos,
701 tag,
702 subchunk_y: None,
703 }
704 }
705
706 #[must_use]
707 pub const fn subchunk(pos: ChunkPos, y: i8) -> Self {
709 Self {
710 pos,
711 tag: ChunkRecordTag::SubChunkPrefix,
712 subchunk_y: Some(y),
713 }
714 }
715
716 #[must_use]
717 pub fn encode(&self) -> Bytes {
719 let mut bytes = Vec::with_capacity(if self.pos.dimension == Dimension::Overworld {
720 10
721 } else {
722 14
723 });
724 bytes.extend_from_slice(&self.pos.x.to_le_bytes());
725 bytes.extend_from_slice(&self.pos.z.to_le_bytes());
726 if self.pos.dimension != Dimension::Overworld {
727 bytes.extend_from_slice(&self.pos.dimension.id().to_le_bytes());
728 }
729 bytes.push(self.tag.byte());
730 if let Some(y) = self.subchunk_y {
731 bytes.push(y.to_ne_bytes()[0]);
732 }
733 Bytes::from(bytes)
734 }
735
736 pub fn decode(key: &[u8]) -> Result<Self> {
738 match key.len() {
739 9 | 10 | 13 | 14 => {}
740 len => {
741 return Err(BedrockWorldError::InvalidKey(format!(
742 "unsupported chunk key length: {len}"
743 )));
744 }
745 }
746
747 let x = read_i32(key, 0)?;
748 let z = read_i32(key, 4)?;
749 let (dimension, tag_index) = if key.len() >= 13 {
750 (Dimension::from_id(read_i32(key, 8)?), 12)
751 } else {
752 (Dimension::Overworld, 8)
753 };
754 let tag = ChunkRecordTag::from_byte(
755 *key.get(tag_index)
756 .ok_or_else(|| BedrockWorldError::InvalidKey("missing record tag".to_string()))?,
757 );
758 let subchunk_y = if matches!(key.len(), 10 | 14) {
759 Some(i8::from_ne_bytes([key[tag_index + 1]]))
760 } else {
761 None
762 };
763 Ok(Self {
764 pos: ChunkPos { x, z, dimension },
765 tag,
766 subchunk_y,
767 })
768 }
769}
770
771#[derive(Debug, Clone, PartialEq, Eq)]
772pub struct ChunkRecord {
774 pub key: ChunkKey,
776 pub value: Bytes,
778}
779
780#[derive(Debug, Clone, PartialEq)]
781pub struct BlockState {
783 pub name: String,
785 pub states: BTreeMap<String, NbtTag>,
787 pub version: Option<i32>,
789}
790
791#[derive(Debug, Clone, PartialEq)]
792pub struct BlockPalette {
794 pub states: Vec<BlockState>,
796 pub indices: Option<Vec<u16>>,
798 pub counts: Vec<u16>,
800}
801
802impl BlockPalette {
803 #[must_use]
804 pub fn palette_index_at(&self, local_x: u8, local_y: u8, local_z: u8) -> Option<u16> {
806 if local_x >= 16 || local_y >= 16 || local_z >= 16 {
807 return None;
808 }
809 self.indices
810 .as_ref()?
811 .get(block_storage_index(local_x, local_y, local_z))
812 .copied()
813 }
814
815 #[must_use]
816 pub fn block_state_at(&self, local_x: u8, local_y: u8, local_z: u8) -> Option<&BlockState> {
818 let palette_index = usize::from(self.palette_index_at(local_x, local_y, local_z)?);
819 self.states.get(palette_index)
820 }
821}
822
823#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Serialize, Deserialize)]
824pub enum SubChunkDecodeMode {
826 CountsOnly,
828 #[default]
829 FullIndices,
831}
832
833#[derive(Debug, Clone, PartialEq)]
834pub enum SubChunkFormat {
836 LegacySubChunk(LegacySubChunk),
838 LegacyTerrain,
840 FixedArrayV1,
842 Paletted {
844 version: u8,
846 storages: Vec<BlockPalette>,
848 },
849 Raw {
851 version: Option<u8>,
853 bytes: Bytes,
855 },
856}
857
858#[derive(Debug, Clone, PartialEq)]
859pub struct SubChunk {
861 pub y: i8,
863 pub format: SubChunkFormat,
865}
866
867impl SubChunk {
868 #[must_use]
869 pub fn block_state_at(&self, local_x: u8, local_y: u8, local_z: u8) -> Option<&BlockState> {
871 match &self.format {
872 SubChunkFormat::Paletted { storages, .. } => storages
873 .first()
874 .and_then(|storage| storage.block_state_at(local_x, local_y, local_z)),
875 _ => None,
876 }
877 }
878
879 #[must_use]
880 pub fn visible_block_state_at(
882 &self,
883 local_x: u8,
884 local_y: u8,
885 local_z: u8,
886 ) -> Option<&BlockState> {
887 self.visible_block_states_at(local_x, local_y, local_z)
888 .next()
889 }
890
891 #[must_use]
892 pub fn visible_block_states_at(
894 &self,
895 local_x: u8,
896 local_y: u8,
897 local_z: u8,
898 ) -> VisibleBlockStatesAt<'_> {
899 let storages = match &self.format {
900 SubChunkFormat::Paletted { storages, .. } => Some(storages.iter().rev()),
901 _ => None,
902 };
903 VisibleBlockStatesAt {
904 storages,
905 local_x,
906 local_y,
907 local_z,
908 }
909 }
910
911 #[must_use]
912 pub fn legacy_block_id_at(&self, local_x: u8, local_y: u8, local_z: u8) -> Option<u8> {
914 match &self.format {
915 SubChunkFormat::LegacySubChunk(subchunk) => {
916 subchunk.block_id_at(local_x, local_y, local_z)
917 }
918 _ => None,
919 }
920 }
921
922 #[must_use]
923 pub fn legacy_block_data_at(&self, local_x: u8, local_y: u8, local_z: u8) -> Option<u8> {
925 match &self.format {
926 SubChunkFormat::LegacySubChunk(subchunk) => {
927 subchunk.block_data_at(local_x, local_y, local_z)
928 }
929 _ => None,
930 }
931 }
932}
933
934pub struct VisibleBlockStatesAt<'chunk> {
936 storages: Option<std::iter::Rev<std::slice::Iter<'chunk, BlockPalette>>>,
937 local_x: u8,
938 local_y: u8,
939 local_z: u8,
940}
941
942impl<'chunk> Iterator for VisibleBlockStatesAt<'chunk> {
943 type Item = &'chunk BlockState;
944
945 fn next(&mut self) -> Option<Self::Item> {
946 let storages = self.storages.as_mut()?;
947 for storage in storages {
948 let Some(state) = storage.block_state_at(self.local_x, self.local_y, self.local_z)
949 else {
950 continue;
951 };
952 if !is_air_block_state_name(&state.name) {
953 return Some(state);
954 }
955 }
956 None
957 }
958}
959
960fn is_air_block_state_name(name: &str) -> bool {
961 matches!(
962 name,
963 "air"
964 | "cave_air"
965 | "void_air"
966 | "minecraft:air"
967 | "minecraft:cave_air"
968 | "minecraft:void_air"
969 | "minecraft:structure_void"
970 | "minecraft:light_block"
971 | "minecraft:light"
972 )
973}
974
975#[derive(Debug, Clone, Copy, PartialEq, Eq)]
976pub struct LegacyBiomeSample {
978 pub biome_id: u8,
980 pub red: u8,
982 pub green: u8,
984 pub blue: u8,
986}
987
988impl LegacyBiomeSample {
989 #[must_use]
990 pub const fn rgb_u32(self) -> u32 {
992 ((self.red as u32) << 16) | ((self.green as u32) << 8) | self.blue as u32
993 }
994}
995
996#[derive(Debug, Clone, PartialEq, Eq)]
997pub struct LegacyTerrain {
999 bytes: Bytes,
1000}
1001
1002impl LegacyTerrain {
1003 pub fn parse(bytes: Bytes) -> Result<Self> {
1005 if bytes.len() != LEGACY_TERRAIN_VALUE_LEN {
1006 return Err(BedrockWorldError::UnsupportedChunkFormat(format!(
1007 "LegacyTerrain value must be {LEGACY_TERRAIN_VALUE_LEN} bytes, got {}",
1008 bytes.len()
1009 )));
1010 }
1011 Ok(Self { bytes })
1012 }
1013
1014 #[must_use]
1015 pub fn raw(&self) -> &Bytes {
1017 &self.bytes
1018 }
1019
1020 #[must_use]
1021 pub fn block_ids(&self) -> &[u8] {
1023 &self.bytes[..LEGACY_TERRAIN_BLOCK_COUNT]
1024 }
1025
1026 #[must_use]
1027 pub fn block_data(&self) -> &[u8] {
1029 &self.bytes[LEGACY_TERRAIN_BLOCK_DATA_OFFSET..LEGACY_TERRAIN_SKY_LIGHT_OFFSET]
1030 }
1031
1032 #[must_use]
1033 pub fn sky_light(&self) -> &[u8] {
1035 &self.bytes[LEGACY_TERRAIN_SKY_LIGHT_OFFSET..LEGACY_TERRAIN_BLOCK_LIGHT_OFFSET]
1036 }
1037
1038 #[must_use]
1039 pub fn block_light(&self) -> &[u8] {
1041 &self.bytes[LEGACY_TERRAIN_BLOCK_LIGHT_OFFSET..LEGACY_TERRAIN_HEIGHTMAP_OFFSET]
1042 }
1043
1044 #[must_use]
1045 pub fn heightmap(&self) -> &[u8] {
1047 &self.bytes[LEGACY_TERRAIN_HEIGHTMAP_OFFSET..LEGACY_TERRAIN_BIOME_OFFSET]
1048 }
1049
1050 #[must_use]
1051 pub fn biomes(&self) -> &[u8] {
1053 &self.bytes[LEGACY_TERRAIN_BIOME_OFFSET..LEGACY_TERRAIN_VALUE_LEN]
1054 }
1055
1056 #[must_use]
1057 pub fn block_index(local_x: u8, local_y: u8, local_z: u8) -> Option<usize> {
1059 if local_x < 16 && local_y < 128 && local_z < 16 {
1060 Some((usize::from(local_x) << 11) | (usize::from(local_z) << 7) | usize::from(local_y))
1061 } else {
1062 None
1063 }
1064 }
1065
1066 #[must_use]
1067 pub fn column_index(local_x: u8, local_z: u8) -> Option<usize> {
1069 if local_x < 16 && local_z < 16 {
1070 Some(usize::from(local_z) * 16 + usize::from(local_x))
1071 } else {
1072 None
1073 }
1074 }
1075
1076 #[must_use]
1077 pub fn block_id_at(&self, local_x: u8, local_y: u8, local_z: u8) -> Option<u8> {
1079 Self::block_index(local_x, local_y, local_z)
1080 .and_then(|index| self.block_ids().get(index).copied())
1081 }
1082
1083 #[must_use]
1084 pub fn block_data_at(&self, local_x: u8, local_y: u8, local_z: u8) -> Option<u8> {
1086 Self::block_index(local_x, local_y, local_z)
1087 .and_then(|index| nibble_at(self.block_data(), index))
1088 }
1089
1090 #[must_use]
1091 pub fn sky_light_at(&self, local_x: u8, local_y: u8, local_z: u8) -> Option<u8> {
1093 Self::block_index(local_x, local_y, local_z)
1094 .and_then(|index| nibble_at(self.sky_light(), index))
1095 }
1096
1097 #[must_use]
1098 pub fn block_light_at(&self, local_x: u8, local_y: u8, local_z: u8) -> Option<u8> {
1100 Self::block_index(local_x, local_y, local_z)
1101 .and_then(|index| nibble_at(self.block_light(), index))
1102 }
1103
1104 #[must_use]
1105 pub fn height_at(&self, local_x: u8, local_z: u8) -> Option<u8> {
1107 Self::column_index(local_x, local_z).and_then(|index| self.heightmap().get(index).copied())
1108 }
1109
1110 #[must_use]
1111 pub fn biome_sample_at(&self, local_x: u8, local_z: u8) -> Option<LegacyBiomeSample> {
1113 let offset = Self::column_index(local_x, local_z)?.checked_mul(4)?;
1114 let bytes = self.biomes().get(offset..offset + 4)?;
1115 Some(LegacyBiomeSample {
1116 biome_id: bytes[0],
1117 red: bytes[1],
1118 green: bytes[2],
1119 blue: bytes[3],
1120 })
1121 }
1122
1123 #[must_use]
1124 pub fn biome_color_at(&self, local_x: u8, local_z: u8) -> Option<u32> {
1126 self.biome_sample_at(local_x, local_z)
1127 .map(LegacyBiomeSample::rgb_u32)
1128 }
1129}
1130
1131#[derive(Debug, Clone, PartialEq, Eq)]
1132pub struct LegacySubChunk {
1134 version: u8,
1135 bytes: Bytes,
1136}
1137
1138impl LegacySubChunk {
1139 pub fn parse(bytes: Bytes) -> Result<Self> {
1141 let Some(version) = bytes.first().copied() else {
1142 return Err(BedrockWorldError::UnsupportedChunkFormat(
1143 "legacy subchunk value is empty".to_string(),
1144 ));
1145 };
1146 if !matches!(version, 0 | 2..=7) {
1147 return Err(BedrockWorldError::UnsupportedChunkFormat(format!(
1148 "version {version} is not a legacy subchunk payload"
1149 )));
1150 }
1151 if !matches!(
1152 bytes.len(),
1153 LEGACY_SUBCHUNK_MIN_VALUE_LEN | LEGACY_SUBCHUNK_WITH_LIGHT_VALUE_LEN
1154 ) {
1155 return Err(BedrockWorldError::UnsupportedChunkFormat(format!(
1156 "legacy subchunk value has invalid length {}",
1157 bytes.len()
1158 )));
1159 }
1160 Ok(Self { version, bytes })
1161 }
1162
1163 #[must_use]
1164 pub const fn version(&self) -> u8 {
1166 self.version
1167 }
1168
1169 #[must_use]
1170 pub fn raw(&self) -> &Bytes {
1172 &self.bytes
1173 }
1174
1175 #[must_use]
1176 pub fn block_ids(&self) -> &[u8] {
1178 let start = 1;
1179 let end = start + LEGACY_SUBCHUNK_BLOCK_COUNT;
1180 &self.bytes[start..end]
1181 }
1182
1183 #[must_use]
1184 pub fn block_data(&self) -> &[u8] {
1186 let start = 1 + LEGACY_SUBCHUNK_BLOCK_COUNT;
1187 let end = start + LEGACY_SUBCHUNK_BLOCK_COUNT / 2;
1188 &self.bytes[start..end]
1189 }
1190
1191 #[must_use]
1192 pub fn sky_light(&self) -> Option<&[u8]> {
1194 if self.bytes.len() != LEGACY_SUBCHUNK_WITH_LIGHT_VALUE_LEN {
1195 return None;
1196 }
1197 let start = 1 + LEGACY_SUBCHUNK_BLOCK_COUNT + LEGACY_SUBCHUNK_BLOCK_COUNT / 2;
1198 let end = start + LEGACY_SUBCHUNK_BLOCK_COUNT / 2;
1199 Some(&self.bytes[start..end])
1200 }
1201
1202 #[must_use]
1203 pub fn block_light(&self) -> Option<&[u8]> {
1205 if self.bytes.len() != LEGACY_SUBCHUNK_WITH_LIGHT_VALUE_LEN {
1206 return None;
1207 }
1208 let start = 1 + LEGACY_SUBCHUNK_BLOCK_COUNT + LEGACY_SUBCHUNK_BLOCK_COUNT;
1209 Some(&self.bytes[start..])
1210 }
1211
1212 #[must_use]
1213 pub fn block_index(local_x: u8, local_y: u8, local_z: u8) -> Option<usize> {
1215 if local_x < 16 && local_y < 16 && local_z < 16 {
1216 Some(usize::from(local_x) * 256 + usize::from(local_z) * 16 + usize::from(local_y))
1217 } else {
1218 None
1219 }
1220 }
1221
1222 #[must_use]
1223 pub fn block_id_at(&self, local_x: u8, local_y: u8, local_z: u8) -> Option<u8> {
1225 Self::block_index(local_x, local_y, local_z)
1226 .and_then(|index| self.block_ids().get(index).copied())
1227 }
1228
1229 #[must_use]
1230 pub fn block_data_at(&self, local_x: u8, local_y: u8, local_z: u8) -> Option<u8> {
1232 Self::block_index(local_x, local_y, local_z)
1233 .and_then(|index| nibble_at(self.block_data(), index))
1234 }
1235
1236 #[must_use]
1237 pub fn sky_light_at(&self, local_x: u8, local_y: u8, local_z: u8) -> Option<u8> {
1239 Self::block_index(local_x, local_y, local_z)
1240 .and_then(|index| nibble_at(self.sky_light()?, index))
1241 }
1242
1243 #[must_use]
1244 pub fn block_light_at(&self, local_x: u8, local_y: u8, local_z: u8) -> Option<u8> {
1246 Self::block_index(local_x, local_y, local_z)
1247 .and_then(|index| nibble_at(self.block_light()?, index))
1248 }
1249}
1250
1251#[derive(Debug, Clone, PartialEq)]
1252pub struct EntityData {
1254 pub tag: NbtTag,
1256}
1257
1258#[derive(Debug, Clone, PartialEq)]
1259pub struct Chunk {
1261 pub pos: ChunkPos,
1263 pub version: Option<u8>,
1265 pub records: Vec<ChunkRecord>,
1267}
1268
1269impl Chunk {
1270 pub fn get_subchunk(&self, y: i8) -> Result<Option<SubChunk>> {
1272 let Some(record) = self.records.iter().find(|record| {
1273 record.key.tag == ChunkRecordTag::SubChunkPrefix && record.key.subchunk_y == Some(y)
1274 }) else {
1275 return Ok(None);
1276 };
1277 parse_subchunk(y, record.value.clone()).map(Some)
1278 }
1279
1280 pub fn legacy_terrain(&self) -> Result<Option<LegacyTerrain>> {
1282 let Some(record) = self
1283 .records
1284 .iter()
1285 .find(|record| record.key.tag == ChunkRecordTag::LegacyTerrain)
1286 else {
1287 return Ok(None);
1288 };
1289 LegacyTerrain::parse(record.value.clone()).map(Some)
1290 }
1291
1292 pub fn get_block(&self, x: u8, y: i16, z: u8) -> Result<BlockState> {
1294 if x >= 16 || z >= 16 {
1295 return Err(BedrockWorldError::Validation(format!(
1296 "local block coordinates must use x/z in 0..15, got x={x}, z={z}"
1297 )));
1298 }
1299
1300 let subchunk_y = i8::try_from(i32::from(y).div_euclid(16)).map_err(|_| {
1301 BedrockWorldError::Validation(format!(
1302 "block y={y} cannot be represented as a Bedrock subchunk index"
1303 ))
1304 })?;
1305 let local_y = u8::try_from(i32::from(y).rem_euclid(16)).map_err(|_| {
1306 BedrockWorldError::Validation(format!("block y={y} has invalid local subchunk offset"))
1307 })?;
1308 if let Some(subchunk) = self.get_subchunk(subchunk_y)? {
1309 if let Some(state) = subchunk.block_state_at(x, local_y, z) {
1310 return Ok(state.clone());
1311 }
1312 if let Some(id) = subchunk.legacy_block_id_at(x, local_y, z) {
1313 let mut states = BTreeMap::new();
1314 if let Some(data) = subchunk.legacy_block_data_at(x, local_y, z) {
1315 states.insert("data".to_string(), NbtTag::Byte(data as i8));
1316 }
1317 return Ok(BlockState {
1318 name: format!("legacy:{id}"),
1319 states,
1320 version: None,
1321 });
1322 }
1323 }
1324 if (0..=127).contains(&y) {
1325 let Some(terrain) = self.legacy_terrain()? else {
1326 return Err(BedrockWorldError::UnsupportedChunkFormat(format!(
1327 "chunk {:?} has no legacy terrain record",
1328 self.pos
1329 )));
1330 };
1331 let local_y = u8::try_from(y).map_err(|_| {
1332 BedrockWorldError::Validation(format!("legacy block y={y} is outside 0..127"))
1333 })?;
1334 let id = terrain.block_id_at(x, local_y, z).ok_or_else(|| {
1335 BedrockWorldError::UnsupportedChunkFormat(format!(
1336 "chunk {:?} has no legacy block id at local ({x}, {y}, {z})",
1337 self.pos
1338 ))
1339 })?;
1340 let data = terrain.block_data_at(x, local_y, z).unwrap_or(0);
1341 let mut states = BTreeMap::new();
1342 states.insert("data".to_string(), NbtTag::Byte(data as i8));
1343 return Ok(BlockState {
1344 name: format!("legacy:{id}"),
1345 states,
1346 version: None,
1347 });
1348 }
1349 Err(BedrockWorldError::UnsupportedChunkFormat(format!(
1350 "chunk {:?} does not expose a block state at local ({x}, {y}, {z})",
1351 self.pos
1352 )))
1353 }
1354
1355 pub fn set_block(&mut self, _x: u8, _y: i16, _z: u8, _block: BlockState) -> Result<()> {
1357 Err(BedrockWorldError::UnsupportedChunkFormat(
1358 "structured block editing is not enabled for this chunk format".to_string(),
1359 ))
1360 }
1361
1362 pub fn get_entities(&self) -> Result<Vec<EntityData>> {
1364 let mut entities = Vec::new();
1365 for record in self
1366 .records
1367 .iter()
1368 .filter(|record| record.key.tag == ChunkRecordTag::Entity)
1369 {
1370 entities.extend(parse_consecutive_nbt(record.value.as_ref())?);
1371 }
1372 Ok(entities)
1373 }
1374
1375 pub fn get_block_entities(&self) -> Result<Vec<EntityData>> {
1377 let mut entities = Vec::new();
1378 for record in self
1379 .records
1380 .iter()
1381 .filter(|record| record.key.tag == ChunkRecordTag::BlockEntity)
1382 {
1383 entities.extend(parse_consecutive_nbt(record.value.as_ref())?);
1384 }
1385 Ok(entities)
1386 }
1387}
1388
1389pub fn parse_subchunk(y: i8, bytes: Bytes) -> Result<SubChunk> {
1391 parse_subchunk_with_mode(y, bytes, SubChunkDecodeMode::FullIndices)
1392}
1393
1394pub fn parse_subchunk_with_mode(y: i8, bytes: Bytes, mode: SubChunkDecodeMode) -> Result<SubChunk> {
1396 let version = bytes.first().copied();
1397 let format = match version {
1398 Some(0 | 2..=7) => LegacySubChunk::parse(bytes.clone()).map_or_else(
1399 |_| SubChunkFormat::Raw { version, bytes },
1400 SubChunkFormat::LegacySubChunk,
1401 ),
1402 Some(version @ 1) => parse_exact_palette_storages(&bytes, 1, 1, mode).map_or_else(
1403 |_| SubChunkFormat::Raw {
1404 version: Some(version),
1405 bytes,
1406 },
1407 |storages| SubChunkFormat::Paletted { version, storages },
1408 ),
1409 Some(version @ 8..=u8::MAX) => parse_paletted_subchunk(version, &bytes, mode)
1410 .unwrap_or_else(|_| SubChunkFormat::Raw {
1411 version: Some(version),
1412 bytes,
1413 }),
1414 _ => SubChunkFormat::Raw { version, bytes },
1415 };
1416 Ok(SubChunk { y, format })
1417}
1418
1419fn parse_consecutive_nbt(bytes: &[u8]) -> Result<Vec<EntityData>> {
1420 parse_consecutive_root_nbt(bytes)
1421 .map(|tags| tags.into_iter().map(|tag| EntityData { tag }).collect())
1422}
1423
1424fn parse_paletted_subchunk(
1425 version: u8,
1426 bytes: &[u8],
1427 mode: SubChunkDecodeMode,
1428) -> Result<SubChunkFormat> {
1429 let Some(storage_count) = bytes.get(1).copied() else {
1430 return Err(BedrockWorldError::UnsupportedChunkFormat(
1431 "paletted subchunk is missing storage count".to_string(),
1432 ));
1433 };
1434 let offsets: &[usize] = if version == 9 { &[3, 2] } else { &[2] };
1435 for offset in offsets {
1436 if let Ok(storages) = parse_exact_palette_storages(bytes, *offset, storage_count, mode) {
1437 return Ok(SubChunkFormat::Paletted { version, storages });
1438 }
1439 }
1440 Err(BedrockWorldError::UnsupportedChunkFormat(
1441 "unsupported paletted subchunk layout".to_string(),
1442 ))
1443}
1444
1445fn parse_exact_palette_storages(
1446 bytes: &[u8],
1447 offset: usize,
1448 storage_count: u8,
1449 mode: SubChunkDecodeMode,
1450) -> Result<Vec<BlockPalette>> {
1451 let (storages, consumed) = parse_palette_storages(bytes, offset, storage_count, mode)?;
1452 if consumed != bytes.len() {
1453 return Err(BedrockWorldError::UnsupportedChunkFormat(format!(
1454 "palette storage ended at byte {consumed} but payload has {} bytes",
1455 bytes.len()
1456 )));
1457 }
1458 Ok(storages)
1459}
1460
1461fn parse_palette_storages(
1462 bytes: &[u8],
1463 mut offset: usize,
1464 storage_count: u8,
1465 mode: SubChunkDecodeMode,
1466) -> Result<(Vec<BlockPalette>, usize)> {
1467 let mut storages = Vec::with_capacity(usize::from(storage_count));
1468 for _ in 0..storage_count {
1469 let header = *bytes.get(offset).ok_or_else(|| {
1470 BedrockWorldError::UnsupportedChunkFormat(
1471 "palette storage header is missing".to_string(),
1472 )
1473 })?;
1474 offset += 1;
1475
1476 let bits_per_block = header >> 1;
1477 if !matches!(bits_per_block, 0 | 1 | 2 | 3 | 4 | 5 | 6 | 8 | 16) {
1478 return Err(BedrockWorldError::UnsupportedChunkFormat(format!(
1479 "unsupported bits-per-block value: {bits_per_block}"
1480 )));
1481 }
1482
1483 let word_count = packed_word_count(bits_per_block);
1484 let words_byte_len = word_count.checked_mul(4).ok_or_else(|| {
1485 BedrockWorldError::UnsupportedChunkFormat("palette word count overflowed".to_string())
1486 })?;
1487 let words_bytes = bytes.get(offset..offset + words_byte_len).ok_or_else(|| {
1488 BedrockWorldError::UnsupportedChunkFormat(
1489 "palette block indices are truncated".to_string(),
1490 )
1491 })?;
1492 offset += words_byte_len;
1493
1494 let palette_len = if bits_per_block == 0 {
1495 1
1496 } else {
1497 let palette_len = read_i32_at(bytes, offset)?;
1498 offset += 4;
1499 if palette_len < 0 {
1500 return Err(BedrockWorldError::UnsupportedChunkFormat(
1501 "palette length cannot be negative".to_string(),
1502 ));
1503 }
1504 let palette_len = usize::try_from(palette_len).map_err(|_| {
1505 BedrockWorldError::UnsupportedChunkFormat("palette length overflowed".to_string())
1506 })?;
1507 if palette_len > MAX_SUBCHUNK_PALETTE_LEN {
1508 return Err(BedrockWorldError::UnsupportedChunkFormat(format!(
1509 "palette length {palette_len} exceeds maximum {MAX_SUBCHUNK_PALETTE_LEN}"
1510 )));
1511 }
1512 palette_len
1513 };
1514 let mut states = Vec::with_capacity(palette_len);
1515 for _ in 0..palette_len {
1516 let (tag, consumed) = parse_root_nbt_with_consumed(&bytes[offset..])?;
1517 offset += consumed;
1518 states.push(block_state_from_nbt(&tag));
1519 }
1520
1521 let indices = unpack_palette_indices(words_bytes, bits_per_block, palette_len)?;
1522 let mut counts = vec![0_u16; palette_len];
1523 for index in &indices {
1524 if let Some(count) = counts.get_mut(usize::from(*index)) {
1525 *count = count.saturating_add(1);
1526 }
1527 }
1528 let indices = match mode {
1529 SubChunkDecodeMode::CountsOnly => None,
1530 SubChunkDecodeMode::FullIndices => Some(indices),
1531 };
1532 storages.push(BlockPalette {
1533 states,
1534 indices,
1535 counts,
1536 });
1537 }
1538 Ok((storages, offset))
1539}
1540
1541fn packed_word_count(bits_per_block: u8) -> usize {
1542 if bits_per_block == 0 {
1543 return 0;
1544 }
1545 let values_per_word = usize::from(32 / bits_per_block);
1546 4096_usize.div_ceil(values_per_word)
1547}
1548
1549fn unpack_palette_indices(
1550 words_bytes: &[u8],
1551 bits_per_block: u8,
1552 palette_len: usize,
1553) -> Result<Vec<u16>> {
1554 if bits_per_block == 0 {
1555 return Ok(vec![0; 4096]);
1556 }
1557 let values_per_word = usize::from(32 / bits_per_block);
1558 let mask = (1_u32 << bits_per_block) - 1;
1559 let mut indices = Vec::with_capacity(4096);
1560 for word_bytes in words_bytes.chunks_exact(4) {
1561 let word = u32::from_le_bytes(
1562 word_bytes
1563 .try_into()
1564 .map_err(|_| BedrockWorldError::CorruptWorld("bad palette word".to_string()))?,
1565 );
1566 for item_index in 0..values_per_word {
1567 if indices.len() == 4096 {
1568 break;
1569 }
1570 let value = ((word >> (item_index * usize::from(bits_per_block))) & mask) as u16;
1571 if palette_len > 0 && usize::from(value) >= palette_len {
1572 return Err(BedrockWorldError::UnsupportedChunkFormat(format!(
1573 "palette index {value} exceeds palette length {palette_len}"
1574 )));
1575 }
1576 indices.push(value);
1577 }
1578 }
1579 if indices.len() != 4096 {
1580 return Err(BedrockWorldError::UnsupportedChunkFormat(format!(
1581 "palette produced {} block indices instead of 4096",
1582 indices.len()
1583 )));
1584 }
1585 Ok(indices)
1586}
1587
1588fn block_state_from_nbt(tag: &NbtTag) -> BlockState {
1589 let NbtTag::Compound(root) = tag else {
1590 return BlockState {
1591 name: "<invalid>".to_string(),
1592 states: BTreeMap::new(),
1593 version: None,
1594 };
1595 };
1596 let name = string_field(root, "name")
1597 .or_else(|| string_field(root, "Name"))
1598 .unwrap_or("<unknown>")
1599 .to_string();
1600 let states = match root.get("states").or_else(|| root.get("States")) {
1601 Some(NbtTag::Compound(values)) => values
1602 .iter()
1603 .map(|(key, value)| (key.clone(), value.clone()))
1604 .collect(),
1605 _ => BTreeMap::new(),
1606 };
1607 let version = int_field(root, "version").or_else(|| int_field(root, "Version"));
1608 BlockState {
1609 name,
1610 states,
1611 version,
1612 }
1613}
1614
1615fn string_field<'a>(root: &'a IndexMap<String, NbtTag>, key: &str) -> Option<&'a str> {
1616 match root.get(key) {
1617 Some(NbtTag::String(value)) => Some(value.as_str()),
1618 _ => None,
1619 }
1620}
1621
1622fn int_field(root: &IndexMap<String, NbtTag>, key: &str) -> Option<i32> {
1623 match root.get(key) {
1624 Some(NbtTag::Byte(value)) => Some(i32::from(*value)),
1625 Some(NbtTag::Short(value)) => Some(i32::from(*value)),
1626 Some(NbtTag::Int(value)) => Some(*value),
1627 _ => None,
1628 }
1629}
1630
1631fn read_i32_at(bytes: &[u8], offset: usize) -> Result<i32> {
1632 let slice: [u8; 4] = bytes
1633 .get(offset..offset + 4)
1634 .ok_or_else(|| {
1635 BedrockWorldError::UnsupportedChunkFormat("i32 field is truncated".to_string())
1636 })?
1637 .try_into()
1638 .map_err(|_| BedrockWorldError::UnsupportedChunkFormat("bad i32 field".to_string()))?;
1639 Ok(i32::from_le_bytes(slice))
1640}
1641
1642fn read_i32(bytes: &[u8], offset: usize) -> Result<i32> {
1643 let slice = bytes
1644 .get(offset..offset + 4)
1645 .ok_or_else(|| BedrockWorldError::InvalidKey("chunk key is truncated".to_string()))?;
1646 let slice: [u8; 4] = slice
1647 .try_into()
1648 .map_err(|_| BedrockWorldError::InvalidKey("invalid i32 field".to_string()))?;
1649 Ok(i32::from_le_bytes(slice))
1650}
1651
1652fn parse_i64_suffix(key: &[u8], prefix: &[u8]) -> Option<i64> {
1653 let suffix = key.strip_prefix(prefix)?;
1654 let bytes: [u8; 8] = suffix.try_into().ok()?;
1655 Some(i64::from_le_bytes(bytes))
1656}
1657
1658fn parse_chunk_pos_suffix(key: &[u8], prefix: &[u8]) -> Option<ChunkPos> {
1659 let suffix = key.strip_prefix(prefix)?;
1660 match suffix.len() {
1661 8 => Some(ChunkPos {
1662 x: read_i32_optional(suffix, 0)?,
1663 z: read_i32_optional(suffix, 4)?,
1664 dimension: Dimension::Overworld,
1665 }),
1666 12 => Some(ChunkPos {
1667 x: read_i32_optional(suffix, 0)?,
1668 z: read_i32_optional(suffix, 4)?,
1669 dimension: Dimension::from_id(read_i32_optional(suffix, 8)?),
1670 }),
1671 _ => None,
1672 }
1673}
1674
1675fn read_i32_optional(bytes: &[u8], offset: usize) -> Option<i32> {
1676 let slice: [u8; 4] = bytes.get(offset..offset + 4)?.try_into().ok()?;
1677 Some(i32::from_le_bytes(slice))
1678}
1679
1680fn nibble_at(bytes: &[u8], index: usize) -> Option<u8> {
1681 let byte = *bytes.get(index / 2)?;
1682 Some(if index.is_multiple_of(2) {
1683 byte & 0x0f
1684 } else {
1685 byte >> 4
1686 })
1687}
1688
1689fn ascii_suffix(key: &[u8], prefix: &[u8]) -> Option<String> {
1690 let suffix = key.strip_prefix(prefix)?;
1691 if suffix.iter().all(u8::is_ascii_graphic) {
1692 return Some(String::from_utf8_lossy(suffix).into_owned());
1693 }
1694 None
1695}
1696
1697fn parse_village_key(key: &[u8]) -> Option<ParsedVillageKey> {
1698 let raw = std::str::from_utf8(key).ok()?;
1699 let parts = raw.split('_').collect::<Vec<_>>();
1700 if !matches!(parts.as_slice(), ["VILLAGE", ..]) || !matches!(parts.len(), 3 | 4) {
1701 return None;
1702 }
1703 let (dimension, tail) = match parts.as_slice() {
1704 ["VILLAGE", dimension, _, _] => {
1705 let dimension = match *dimension {
1706 "Overworld" => Dimension::Overworld,
1707 "Nether" => Dimension::Nether,
1708 "TheEnd" => Dimension::End,
1709 _ => return None,
1710 };
1711 (Some(dimension), &parts[2..])
1712 }
1713 ["VILLAGE", _, _] => (None, &parts[1..]),
1714 _ => return None,
1715 };
1716 let uuid = tail[0];
1717 if uuid.len() != 36 {
1718 return None;
1719 }
1720 let kind = match tail[1] {
1721 "INFO" => VillageRecordKind::Info,
1722 "DWELLERS" => VillageRecordKind::Dwellers,
1723 "PLAYERS" => VillageRecordKind::Players,
1724 "POI" => VillageRecordKind::Poi,
1725 _ => VillageRecordKind::Unknown,
1726 };
1727 Some(ParsedVillageKey {
1728 raw: raw.to_string(),
1729 dimension,
1730 uuid: uuid.to_string(),
1731 kind,
1732 })
1733}
1734
1735#[cfg(test)]
1736mod tests {
1737 use super::*;
1738 use crate::nbt::serialize_root_nbt;
1739
1740 #[test]
1741 fn chunk_key_roundtrips_overworld_and_subchunk() {
1742 let pos = ChunkPos {
1743 x: -3,
1744 z: 7,
1745 dimension: Dimension::Overworld,
1746 };
1747 let key = ChunkKey::subchunk(pos, -4);
1748 let encoded = key.encode();
1749
1750 assert_eq!(encoded.len(), 10);
1751 assert_eq!(ChunkKey::decode(&encoded).expect("decode"), key);
1752 }
1753
1754 #[test]
1755 fn chunk_key_roundtrips_dimension_key() {
1756 let pos = ChunkPos {
1757 x: 1,
1758 z: 2,
1759 dimension: Dimension::Nether,
1760 };
1761 let key = ChunkKey::new(pos, ChunkRecordTag::Version);
1762 let encoded = key.encode();
1763
1764 assert_eq!(encoded.len(), 13);
1765 assert_eq!(ChunkKey::decode(&encoded).expect("decode"), key);
1766 }
1767
1768 #[test]
1769 fn bedrock_db_key_decodes_actor_and_digp_keys() {
1770 let mut actor_key = b"actorprefix".to_vec();
1771 actor_key.extend_from_slice(&42_i64.to_le_bytes());
1772 assert_eq!(
1773 BedrockDbKey::decode(&actor_key),
1774 BedrockDbKey::ActorPrefix { actor_id: 42 }
1775 );
1776
1777 let mut digp_key = b"digp".to_vec();
1778 digp_key.extend_from_slice(&1_i32.to_le_bytes());
1779 digp_key.extend_from_slice(&(-2_i32).to_le_bytes());
1780 assert_eq!(
1781 BedrockDbKey::decode(&digp_key),
1782 BedrockDbKey::ActorDigest {
1783 pos: ChunkPos {
1784 x: 1,
1785 z: -2,
1786 dimension: Dimension::Overworld
1787 }
1788 }
1789 );
1790 }
1791
1792 #[test]
1793 fn bedrock_db_key_encodes_documented_global_shapes() {
1794 let map_id = MapRecordId::new("42").expect("map id");
1795 assert_eq!(map_id.storage_key().as_ref(), b"map_42");
1796 assert_eq!(
1797 MapRecordId::from_storage_key(b"map_42"),
1798 Some(map_id.clone())
1799 );
1800 assert_eq!(
1801 BedrockDbKey::Map("42".to_string()).encode().as_deref(),
1802 Some(&b"map_42"[..])
1803 );
1804
1805 let pos = ChunkPos {
1806 x: 7,
1807 z: -8,
1808 dimension: Dimension::End,
1809 };
1810 let digest = ActorDigestKey::new(pos).storage_key();
1811 assert_eq!(
1812 ActorDigestKey::from_storage_key(&digest),
1813 Some(ActorDigestKey::new(pos))
1814 );
1815 assert_eq!(
1816 BedrockDbKey::Global(GlobalRecordKind::Scoreboard)
1817 .encode()
1818 .as_deref(),
1819 Some(&b"scoreboard"[..])
1820 );
1821 assert_eq!(
1822 BedrockDbKey::decode(b"TheEnd"),
1823 BedrockDbKey::Global(GlobalRecordKind::Dimension(Dimension::End))
1824 );
1825 }
1826
1827 #[test]
1828 fn chunk_record_tags_align_with_bedrock_level_reference() {
1829 let expected = [
1830 (0x2b, ChunkRecordTag::Data3D),
1831 (0x2c, ChunkRecordTag::Version),
1832 (0x2d, ChunkRecordTag::Data2D),
1833 (0x2e, ChunkRecordTag::Data2DLegacy),
1834 (0x2f, ChunkRecordTag::SubChunkPrefix),
1835 (0x30, ChunkRecordTag::LegacyTerrain),
1836 (0x31, ChunkRecordTag::BlockEntity),
1837 (0x32, ChunkRecordTag::Entity),
1838 (0x33, ChunkRecordTag::PendingTicks),
1839 (0x34, ChunkRecordTag::BlockExtraData),
1840 (0x35, ChunkRecordTag::BiomeState),
1841 (0x36, ChunkRecordTag::FinalizedState),
1842 (0x37, ChunkRecordTag::ConversionData),
1843 (0x38, ChunkRecordTag::BorderBlocks),
1844 (0x39, ChunkRecordTag::HardcodedSpawners),
1845 (0x3a, ChunkRecordTag::RandomTicks),
1846 (0x3b, ChunkRecordTag::Checksums),
1847 (0x3c, ChunkRecordTag::GenerationSeed),
1848 (0x3d, ChunkRecordTag::GeneratedPreCavesAndCliffsBlending),
1849 (0x3e, ChunkRecordTag::BlendingBiomeHeight),
1850 (0x3f, ChunkRecordTag::MetaDataHash),
1851 (0x40, ChunkRecordTag::BlendingData),
1852 (0x41, ChunkRecordTag::ActorDigestVersion),
1853 (0x76, ChunkRecordTag::VersionOld),
1854 ];
1855 for (byte, tag) in expected {
1856 assert_eq!(ChunkRecordTag::from_byte(byte), tag);
1857 assert_eq!(tag.byte(), byte);
1858 }
1859 }
1860
1861 #[test]
1862 fn bedrock_db_key_decodes_specific_ascii_keys_before_plain_keys() {
1863 assert_eq!(
1864 BedrockDbKey::decode(b"map_42"),
1865 BedrockDbKey::Map("42".to_string())
1866 );
1867 assert!(matches!(
1868 BedrockDbKey::decode(b"VILLAGE_12345678-1234-1234-1234-123456789abc_INFO"),
1869 BedrockDbKey::Village(_)
1870 ));
1871 assert!(matches!(
1872 BedrockDbKey::decode(b"LevelChunkMetaDataDictionary"),
1873 BedrockDbKey::Global(GlobalRecordKind::LevelChunkMetaDataDictionary)
1874 ));
1875 }
1876
1877 #[test]
1878 fn chunk_pos_matches_bedrock_level_height_ranges() {
1879 let overworld = ChunkPos {
1880 x: 0,
1881 z: 0,
1882 dimension: Dimension::Overworld,
1883 };
1884 assert_eq!(overworld.y_range(ChunkVersion::Old), (0, 255));
1885 assert_eq!(overworld.y_range(ChunkVersion::New), (-64, 319));
1886 assert_eq!(overworld.subchunk_index_range(ChunkVersion::New), (-4, 19));
1887 assert_eq!(
1888 BlockPos {
1889 x: -1,
1890 y: 64,
1891 z: -1
1892 }
1893 .to_chunk_pos(Dimension::Overworld),
1894 ChunkPos {
1895 x: -1,
1896 z: -1,
1897 dimension: Dimension::Overworld
1898 }
1899 );
1900 }
1901
1902 #[test]
1903 fn legacy_terrain_exposes_old_leveldb_arrays() {
1904 let mut bytes = vec![0; LEGACY_TERRAIN_VALUE_LEN];
1905 let block_index = LegacyTerrain::block_index(1, 2, 3).expect("block index");
1906 let column_index = 3 * 16 + 1;
1907 assert_eq!(block_index, 2_434);
1908 assert_eq!(LegacyTerrain::column_index(1, 3), Some(column_index));
1909 bytes[block_index] = 42;
1910 bytes[LEGACY_TERRAIN_BLOCK_DATA_OFFSET + block_index / 2] = 0xba;
1911 bytes[LEGACY_TERRAIN_SKY_LIGHT_OFFSET + block_index / 2] = 0xc7;
1912 bytes[LEGACY_TERRAIN_BLOCK_LIGHT_OFFSET + block_index / 2] = 0xd5;
1913 bytes[LEGACY_TERRAIN_HEIGHTMAP_OFFSET + column_index] = 99;
1914 bytes[LEGACY_TERRAIN_BIOME_OFFSET + column_index * 4
1915 ..LEGACY_TERRAIN_BIOME_OFFSET + column_index * 4 + 4]
1916 .copy_from_slice(&[12, 0xab, 0xcd, 0xef]);
1917
1918 let terrain = LegacyTerrain::parse(Bytes::from(bytes)).expect("legacy terrain");
1919
1920 assert_eq!(terrain.block_id_at(1, 2, 3), Some(42));
1921 assert_eq!(terrain.block_data_at(1, 2, 3), Some(0x0a));
1922 assert_eq!(terrain.sky_light_at(1, 2, 3), Some(0x07));
1923 assert_eq!(terrain.block_light_at(1, 2, 3), Some(0x05));
1924 assert_eq!(terrain.height_at(1, 3), Some(99));
1925 assert_eq!(terrain.biome_color_at(1, 3), Some(0x00ab_cdef));
1926 assert_eq!(
1927 terrain.biome_sample_at(1, 3),
1928 Some(LegacyBiomeSample {
1929 biome_id: 12,
1930 red: 0xab,
1931 green: 0xcd,
1932 blue: 0xef,
1933 })
1934 );
1935 assert!(LegacyTerrain::parse(Bytes::from_static(b"short")).is_err());
1936 }
1937
1938 #[test]
1939 fn legacy_subchunk_decodes_block_ids_metadata_and_light() {
1940 let mut bytes = vec![0; LEGACY_SUBCHUNK_WITH_LIGHT_VALUE_LEN];
1941 bytes[0] = 2;
1942 let index = LegacySubChunk::block_index(4, 5, 6).expect("block index");
1943 assert_eq!(index, 1_125);
1944 bytes[1 + index] = 7;
1945 bytes[1 + LEGACY_SUBCHUNK_BLOCK_COUNT + index / 2] = 0xc0;
1946 bytes[1 + LEGACY_SUBCHUNK_BLOCK_COUNT + LEGACY_SUBCHUNK_BLOCK_COUNT / 2 + index / 2] = 0xe0;
1947 bytes[1 + LEGACY_SUBCHUNK_BLOCK_COUNT + LEGACY_SUBCHUNK_BLOCK_COUNT + index / 2] = 0xa0;
1948
1949 let subchunk = parse_subchunk(0, Bytes::from(bytes)).expect("parse legacy subchunk");
1950
1951 let SubChunkFormat::LegacySubChunk(legacy) = &subchunk.format else {
1952 panic!("expected legacy subchunk");
1953 };
1954 assert_eq!(legacy.version(), 2);
1955 assert_eq!(legacy.block_id_at(4, 5, 6), Some(7));
1956 assert_eq!(legacy.block_data_at(4, 5, 6), Some(0x0c));
1957 assert_eq!(legacy.sky_light_at(4, 5, 6), Some(0x0e));
1958 assert_eq!(legacy.block_light_at(4, 5, 6), Some(0x0a));
1959 assert_eq!(subchunk.legacy_block_id_at(4, 5, 6), Some(7));
1960 }
1961
1962 #[test]
1963 fn paletted_subchunk_v1_uses_single_storage_without_count_byte() {
1964 let mut bytes = build_paletted_subchunk(8, None, 4, 4);
1965 bytes.remove(1);
1966 bytes[0] = 1;
1967
1968 let subchunk = parse_subchunk(0, Bytes::from(bytes)).expect("parse v1 palette");
1969
1970 let SubChunkFormat::Paletted { version, storages } = subchunk.format else {
1971 panic!("expected v1 paletted subchunk");
1972 };
1973 assert_eq!(version, 1);
1974 assert_eq!(storages.len(), 1);
1975 assert_eq!(storages[0].indices.as_ref().expect("indices").len(), 4096);
1976 }
1977
1978 #[test]
1979 fn paletted_subchunk_decodes_supported_bits_per_block() {
1980 for bits_per_block in [0, 1, 2, 3, 4, 5, 6, 8, 16] {
1981 let bytes = build_paletted_subchunk(8, None, bits_per_block, 4);
1982
1983 let subchunk = parse_subchunk(0, Bytes::from(bytes)).expect("parse");
1984
1985 let SubChunkFormat::Paletted { storages, .. } = subchunk.format else {
1986 panic!("expected paletted subchunk for {bits_per_block} bits");
1987 };
1988 assert_eq!(storages.len(), 1);
1989 assert_eq!(storages[0].indices.as_ref().expect("indices").len(), 4096);
1990 assert_eq!(storages[0].counts.iter().sum::<u16>(), 4096);
1991 }
1992 }
1993
1994 #[test]
1995 fn paletted_subchunk_counts_only_drops_indices_but_keeps_counts() {
1996 let bytes = build_paletted_subchunk(8, None, 4, 4);
1997
1998 let subchunk =
1999 parse_subchunk_with_mode(0, Bytes::from(bytes), SubChunkDecodeMode::CountsOnly)
2000 .expect("parse");
2001
2002 let SubChunkFormat::Paletted { storages, .. } = subchunk.format else {
2003 panic!("expected paletted subchunk");
2004 };
2005 assert!(storages[0].indices.is_none());
2006 assert_eq!(storages[0].counts.iter().sum::<u16>(), 4096);
2007 }
2008
2009 #[test]
2010 fn paletted_subchunk_v9_accepts_embedded_y_byte() {
2011 let bytes = build_paletted_subchunk(9, Some(-4), 4, 4);
2012
2013 let subchunk = parse_subchunk(-4, Bytes::from(bytes)).expect("parse");
2014
2015 let SubChunkFormat::Paletted { storages, .. } = subchunk.format else {
2016 panic!("expected paletted v9 subchunk");
2017 };
2018 assert_eq!(storages[0].states.len(), 4);
2019 }
2020
2021 #[test]
2022 fn paletted_subchunk_v9_accepts_positive_embedded_y_that_looks_like_storage_header() {
2023 let bytes = build_paletted_subchunk(9, Some(8), 4, 4);
2024
2025 let subchunk = parse_subchunk(8, Bytes::from(bytes)).expect("parse");
2026
2027 let SubChunkFormat::Paletted { storages, .. } = &subchunk.format else {
2028 panic!("expected paletted v9 subchunk");
2029 };
2030 assert_eq!(storages[0].states.len(), 4);
2031 assert_eq!(
2032 subchunk
2033 .block_state_at(1, 2, 3)
2034 .expect("block state at x=1 y=2 z=3")
2035 .name,
2036 "minecraft:block_2"
2037 );
2038 }
2039
2040 #[test]
2041 fn paletted_subchunk_v9_falls_back_to_legacy_layout_without_embedded_y() {
2042 let bytes = build_paletted_subchunk(9, None, 4, 4);
2043
2044 let subchunk = parse_subchunk(8, Bytes::from(bytes)).expect("parse");
2045
2046 let SubChunkFormat::Paletted { storages, .. } = &subchunk.format else {
2047 panic!("expected paletted v9 subchunk");
2048 };
2049 assert_eq!(storages[0].states.len(), 4);
2050 assert_eq!(
2051 subchunk
2052 .block_state_at(1, 2, 3)
2053 .expect("block state at x=1 y=2 z=3")
2054 .name,
2055 "minecraft:block_2"
2056 );
2057 }
2058
2059 #[test]
2060 fn paletted_subchunk_rejects_trailing_bytes_after_storage_payload() {
2061 let mut bytes = build_paletted_subchunk(8, None, 4, 4);
2062 bytes.push(0);
2063
2064 let subchunk = parse_subchunk(0, Bytes::from(bytes)).expect("parse");
2065
2066 assert!(matches!(subchunk.format, SubChunkFormat::Raw { .. }));
2067 }
2068
2069 #[test]
2070 fn block_state_lookup_uses_xz_plane_storage_order() {
2071 let bytes = build_paletted_subchunk(8, None, 4, 8);
2072 let subchunk = parse_subchunk(0, Bytes::from(bytes)).expect("parse");
2073
2074 assert_eq!(block_storage_index(1, 2, 3), 306);
2075 let state = subchunk
2076 .block_state_at(1, 2, 3)
2077 .expect("block state at x=1 y=2 z=3");
2078
2079 assert_eq!(
2080 state.name,
2081 format!("minecraft:block_{}", block_storage_index(1, 2, 3) % 8)
2082 );
2083 }
2084
2085 #[test]
2086 fn visible_block_state_lookup_uses_top_non_air_storage() {
2087 let subchunk = parse_subchunk(
2088 0,
2089 Bytes::from(build_two_storage_paletted_subchunk(
2090 "minecraft:stone",
2091 "minecraft:copper_block",
2092 )),
2093 )
2094 .expect("parse layered subchunk");
2095
2096 assert_eq!(
2097 subchunk
2098 .block_state_at(1, 2, 3)
2099 .expect("storage zero state")
2100 .name,
2101 "minecraft:stone"
2102 );
2103 let visible = subchunk
2104 .visible_block_states_at(1, 2, 3)
2105 .map(|state| state.name.as_str())
2106 .collect::<Vec<_>>();
2107
2108 assert_eq!(visible, ["minecraft:copper_block", "minecraft:stone"]);
2109 assert_eq!(
2110 subchunk
2111 .visible_block_state_at(1, 2, 3)
2112 .expect("visible state")
2113 .name,
2114 "minecraft:copper_block"
2115 );
2116 }
2117
2118 #[test]
2119 fn paletted_subchunk_v9_decodes_zero_bit_secondary_storage_without_palette_len() {
2120 let mut bytes = vec![9, 2, 4];
2121 append_test_palette_storage(
2122 &mut bytes,
2123 &["minecraft:air", "minecraft:stone"],
2124 |x, y, z| u16::from((x, y, z) == (4, 2, 4)),
2125 );
2126 append_zero_bit_palette_storage(&mut bytes, "minecraft:gold_block");
2127
2128 let subchunk = parse_subchunk(4, Bytes::from(bytes)).expect("parse v9 layered subchunk");
2129
2130 let SubChunkFormat::Paletted { storages, .. } = &subchunk.format else {
2131 panic!("expected paletted subchunk");
2132 };
2133 assert_eq!(storages.len(), 2);
2134 assert_eq!(storages[1].states.len(), 1);
2135 assert_eq!(storages[1].counts, [4096]);
2136 assert_eq!(
2137 subchunk
2138 .block_state_at(4, 2, 4)
2139 .expect("storage zero state")
2140 .name,
2141 "minecraft:stone"
2142 );
2143 assert_eq!(
2144 subchunk
2145 .visible_block_state_at(4, 2, 4)
2146 .expect("visible state")
2147 .name,
2148 "minecraft:gold_block"
2149 );
2150 }
2151
2152 #[test]
2153 fn chunk_get_block_reads_decoded_paletted_subchunk() {
2154 let pos = ChunkPos {
2155 x: 0,
2156 z: 0,
2157 dimension: Dimension::Overworld,
2158 };
2159 let key = ChunkKey::subchunk(pos, 0);
2160 let chunk = Chunk {
2161 pos,
2162 version: Some(8),
2163 records: vec![ChunkRecord {
2164 key,
2165 value: Bytes::from(build_paletted_subchunk(8, None, 4, 8)),
2166 }],
2167 };
2168
2169 let state = chunk.get_block(1, 2, 3).expect("block state");
2170
2171 assert_eq!(state.name, "minecraft:block_2");
2172 }
2173
2174 fn build_paletted_subchunk(
2175 version: u8,
2176 embedded_y: Option<i8>,
2177 bits_per_block: u8,
2178 palette_len: usize,
2179 ) -> Vec<u8> {
2180 let palette_len = if bits_per_block == 0 { 1 } else { palette_len };
2181 let mut bytes = vec![version, 1];
2182 if let Some(y) = embedded_y {
2183 bytes.push(y as u8);
2184 }
2185 bytes.push(bits_per_block << 1);
2186 let values_per_word = if bits_per_block == 0 {
2187 4096
2188 } else {
2189 usize::from(32 / bits_per_block)
2190 };
2191 let mut words = vec![0_u32; packed_word_count(bits_per_block)];
2192 if bits_per_block != 0 {
2193 for block_index in 0..4096 {
2194 let value = u32::try_from(block_index % palette_len).expect("palette index");
2195 let word_index = block_index / values_per_word;
2196 let bit_offset = (block_index % values_per_word) * usize::from(bits_per_block);
2197 words[word_index] |= value << bit_offset;
2198 }
2199 }
2200 for word in words {
2201 bytes.extend_from_slice(&word.to_le_bytes());
2202 }
2203 if bits_per_block != 0 {
2204 bytes.extend_from_slice(
2205 &i32::try_from(palette_len)
2206 .expect("palette length")
2207 .to_le_bytes(),
2208 );
2209 }
2210 for index in 0..palette_len {
2211 let tag = NbtTag::Compound(IndexMap::from([
2212 (
2213 "name".to_string(),
2214 NbtTag::String(format!("minecraft:block_{index}")),
2215 ),
2216 ("states".to_string(), NbtTag::Compound(IndexMap::new())),
2217 ("version".to_string(), NbtTag::Int(1)),
2218 ]));
2219 bytes.extend_from_slice(&serialize_root_nbt(&tag).expect("serialize palette"));
2220 }
2221 bytes
2222 }
2223
2224 fn append_zero_bit_palette_storage(bytes: &mut Vec<u8>, name: &str) {
2225 bytes.push(0);
2226 let tag = NbtTag::Compound(IndexMap::from([
2227 ("name".to_string(), NbtTag::String(name.to_string())),
2228 ("states".to_string(), NbtTag::Compound(IndexMap::new())),
2229 ("version".to_string(), NbtTag::Int(1)),
2230 ]));
2231 bytes.extend_from_slice(&serialize_root_nbt(&tag).expect("serialize palette"));
2232 }
2233
2234 fn build_two_storage_paletted_subchunk(lower_name: &str, upper_name: &str) -> Vec<u8> {
2235 let mut bytes = vec![8, 2];
2236 append_test_palette_storage(&mut bytes, &["minecraft:air", lower_name], |x, y, z| {
2237 u16::from((x, y, z) == (1, 2, 3))
2238 });
2239 append_test_palette_storage(&mut bytes, &["minecraft:air", upper_name], |x, y, z| {
2240 u16::from((x, y, z) == (1, 2, 3))
2241 });
2242 bytes
2243 }
2244
2245 fn append_test_palette_storage(
2246 bytes: &mut Vec<u8>,
2247 palette: &[&str],
2248 value_at: impl Fn(u8, u8, u8) -> u16,
2249 ) {
2250 let bits_per_block = 1_u8;
2251 let values_per_word = usize::from(32 / bits_per_block);
2252 let mut words = vec![0_u32; packed_word_count(bits_per_block)];
2253 for local_z in 0..16_u8 {
2254 for local_x in 0..16_u8 {
2255 for local_y in 0..16_u8 {
2256 let value = value_at(local_x, local_y, local_z);
2257 if value == 0 {
2258 continue;
2259 }
2260 let block_index = block_storage_index(local_x, local_y, local_z);
2261 let word_index = block_index / values_per_word;
2262 let bit_offset = (block_index % values_per_word) * usize::from(bits_per_block);
2263 words[word_index] |= u32::from(value) << bit_offset;
2264 }
2265 }
2266 }
2267 bytes.push(bits_per_block << 1);
2268 for word in words {
2269 bytes.extend_from_slice(&word.to_le_bytes());
2270 }
2271 bytes.extend_from_slice(
2272 &i32::try_from(palette.len())
2273 .expect("test palette length")
2274 .to_le_bytes(),
2275 );
2276 for name in palette {
2277 let tag = NbtTag::Compound(IndexMap::from([
2278 ("name".to_string(), NbtTag::String((*name).to_string())),
2279 ("states".to_string(), NbtTag::Compound(IndexMap::new())),
2280 ("version".to_string(), NbtTag::Int(1)),
2281 ]));
2282 bytes.extend_from_slice(&serialize_root_nbt(&tag).expect("serialize palette"));
2283 }
2284 }
2285}