1use crate::chunk::{
10 ActorUid, BedrockDbKey, ChunkPos, ChunkRecord, ChunkRecordTag, ChunkVersion, GlobalRecordKind,
11 LegacyTerrain, MapRecordId, ParsedVillageKey, SubChunk, SubChunkDecodeMode, SubChunkFormat,
12 parse_subchunk_with_mode,
13};
14use crate::error::{BedrockWorldError, Result as WorldResult};
15use crate::level_dat::LevelDatDocument;
16use crate::nbt::{NbtTag, parse_consecutive_root_nbt, parse_root_nbt, serialize_root_nbt};
17use crate::storage::{StorageReadOptions, StorageVisitorControl, WorldStorage};
18use bytes::Bytes;
19use indexmap::IndexMap;
20use serde::{Deserialize, Serialize};
21use std::collections::{BTreeMap, BTreeSet, HashMap};
22
23const MAX_BIOME_PALETTE_LEN: usize = 4096;
24
25#[derive(Debug, Clone, PartialEq)]
26pub struct ParsedWorld {
28 pub level_dat: LevelDatDocument,
30 pub entries: Vec<ParsedDbEntry>,
32 pub report: WorldParseReport,
34}
35
36#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
37pub struct WorldParseOptions {
39 pub categories: WorldParseCategories,
41 pub retention: RetentionMode,
43 pub subchunk_decode_mode: SubChunkDecodeMode,
45 pub actor_resolution: ActorResolution,
47}
48
49impl WorldParseOptions {
50 #[must_use]
51 pub const fn summary() -> Self {
53 Self {
54 categories: WorldParseCategories::all(),
55 retention: RetentionMode::Summary,
56 subchunk_decode_mode: SubChunkDecodeMode::CountsOnly,
57 actor_resolution: ActorResolution::ResolveReferenced,
58 }
59 }
60
61 #[must_use]
62 pub const fn structured() -> Self {
64 Self {
65 categories: WorldParseCategories::all(),
66 retention: RetentionMode::Structured,
67 subchunk_decode_mode: SubChunkDecodeMode::CountsOnly,
68 actor_resolution: ActorResolution::ResolveReferenced,
69 }
70 }
71
72 #[must_use]
73 pub const fn full_raw() -> Self {
75 Self {
76 categories: WorldParseCategories::all(),
77 retention: RetentionMode::FullRaw,
78 subchunk_decode_mode: SubChunkDecodeMode::FullIndices,
79 actor_resolution: ActorResolution::ResolveAll,
80 }
81 }
82
83 #[must_use]
84 pub const fn full() -> Self {
86 Self::full_raw()
87 }
88}
89
90impl Default for WorldParseOptions {
91 fn default() -> Self {
92 Self::summary()
93 }
94}
95
96#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
97pub struct WorldParseCategories {
99 pub chunks: bool,
101 pub players: bool,
103 pub actors: bool,
105 pub maps: bool,
107 pub villages: bool,
109 pub globals: bool,
111}
112
113impl WorldParseCategories {
114 #[must_use]
115 pub const fn all() -> Self {
117 Self {
118 chunks: true,
119 players: true,
120 actors: true,
121 maps: true,
122 villages: true,
123 globals: true,
124 }
125 }
126
127 #[must_use]
128 pub const fn keys_only() -> Self {
130 Self {
131 chunks: false,
132 players: false,
133 actors: false,
134 maps: false,
135 villages: false,
136 globals: false,
137 }
138 }
139}
140
141#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
142pub enum RetentionMode {
144 Summary,
146 Structured,
148 FullRaw,
150}
151
152impl RetentionMode {
153 #[must_use]
154 pub const fn retains_entries(self) -> bool {
156 matches!(self, Self::Structured | Self::FullRaw)
157 }
158
159 #[must_use]
160 pub const fn retains_raw(self) -> bool {
162 matches!(self, Self::FullRaw)
163 }
164}
165
166#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
167pub enum ActorResolution {
169 None,
171 DigestOnly,
173 ResolveReferenced,
175 ResolveAll,
177}
178
179#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)]
180pub struct WorldParseReport {
182 pub entry_count: usize,
184 pub chunk_count: usize,
186 pub subchunk_count: usize,
188 pub legacy_subchunk_count: usize,
190 pub legacy_terrain_count: usize,
192 pub subchunk_storage_count: usize,
194 pub palette_state_count: usize,
196 pub entity_count: usize,
198 pub block_entity_count: usize,
200 pub item_count: usize,
202 pub player_count: usize,
204 pub other_nbt_root_count: usize,
206 pub raw_entry_count: usize,
208 pub actor_digest_count: usize,
210 pub actor_digest_hit_count: usize,
212 pub actor_digest_missing_count: usize,
214 pub biome_record_count: usize,
216 pub biome_layer_count: usize,
218 pub hardcoded_spawn_area_count: usize,
220 pub village_record_count: usize,
222 pub map_record_count: usize,
224 pub global_record_count: usize,
226 pub key_kinds: BTreeMap<String, usize>,
228 pub warnings: Vec<String>,
230 pub parse_errors: Vec<String>,
232}
233
234#[derive(Debug, Clone, PartialEq)]
235pub struct ParsedDbEntry {
237 pub key: BedrockDbKey,
239 pub raw_key: Bytes,
241 pub raw_value_len: usize,
243 pub value: ParsedDbValue,
245}
246
247#[derive(Debug, Clone, PartialEq)]
248pub struct ParsedChunkData {
250 pub pos: ChunkPos,
252 pub records: Vec<ParsedChunkRecord>,
254 pub report: WorldParseReport,
256}
257
258#[derive(Debug, Clone, PartialEq)]
259pub enum ParsedDbValue {
261 Chunk(ParsedChunkRecord),
263 Player(ParsedPlayer),
265 ActorEntities(Vec<ParsedEntity>),
267 ActorDigest(ParsedActorDigest),
269 MapData(ParsedMapData),
271 VillageData(ParsedVillageData),
273 GlobalData(ParsedGlobalData),
275 NbtRoots(Vec<NbtTag>),
277 Raw(Bytes),
279}
280
281#[derive(Debug, Clone, PartialEq)]
282pub struct ParsedChunkRecord {
284 pub key: crate::ChunkKey,
286 pub value: ParsedChunkRecordValue,
288}
289
290#[derive(Debug, Clone, PartialEq)]
291pub enum ParsedChunkRecordValue {
293 SubChunk(SubChunk),
295 LegacyTerrain(LegacyTerrain),
297 Entities(Vec<ParsedEntity>),
299 BlockEntities(Vec<ParsedBlockEntity>),
301 PendingTicks(Vec<NbtTag>),
303 Version(u8),
305 FinalizedState(i32),
307 BiomeData(ParsedBiomeData),
309 HardcodedSpawnAreas(Vec<ParsedHardcodedSpawnArea>),
311 Raw(Bytes),
313}
314
315#[derive(Debug, Clone, PartialEq)]
316pub struct ParsedActorDigest {
318 pub pos: crate::ChunkPos,
320 pub actor_ids: Vec<i64>,
322 pub entities: Vec<ParsedEntity>,
324 pub missing_actor_count: usize,
326}
327
328#[derive(Debug, Clone, PartialEq)]
329pub enum ActorSource {
331 InlineChunk(crate::ChunkKey),
333 ActorPrefix(ActorUid),
335}
336
337#[derive(Debug, Clone, PartialEq)]
338pub struct ActorRecord {
340 pub uid: Option<ActorUid>,
342 pub source: ActorSource,
344 pub entity: ParsedEntity,
346 pub raw: Bytes,
348}
349
350#[derive(Debug, Clone, PartialEq, Eq)]
351pub struct ParsedBiomeData {
353 pub version: ChunkVersion,
355 pub height_map: Vec<i16>,
357 pub storages: Vec<ParsedBiomeStorage>,
359}
360
361#[derive(Debug, Clone, PartialEq, Eq)]
362pub struct ParsedBiomeStorage {
364 pub y: Option<i32>,
366 pub palette: Vec<u32>,
368 pub indices: Option<Vec<u16>>,
370 pub counts: Vec<u16>,
372}
373
374impl ParsedBiomeStorage {
375 #[must_use]
376 pub fn palette_index_at(&self, local_x: u8, local_y: u8, local_z: u8) -> Option<u16> {
382 if local_x >= 16 || local_y >= 16 || local_z >= 16 {
383 return None;
384 }
385 let index = if self.y.is_some() {
386 crate::block_storage_index(local_x, local_y, local_z)
387 } else {
388 usize::from(local_z) * 16 + usize::from(local_x)
389 };
390 self.indices.as_ref()?.get(index).copied()
391 }
392
393 #[must_use]
394 pub fn biome_id_at(&self, local_x: u8, local_y: u8, local_z: u8) -> Option<u32> {
396 let palette_index = usize::from(self.palette_index_at(local_x, local_y, local_z)?);
397 self.palette.get(palette_index).copied()
398 }
399}
400
401impl HeightMap2d {
402 pub fn new(values: Vec<i16>) -> WorldResult<Self> {
409 if values.len() != 256 {
410 return Err(BedrockWorldError::Validation(format!(
411 "height map must contain 256 values, got {}",
412 values.len()
413 )));
414 }
415 Ok(Self { values })
416 }
417
418 pub fn from_bytes(bytes: &[u8]) -> WorldResult<Self> {
424 read_height_map(bytes)
425 .map(|values| Self { values })
426 .map_err(BedrockWorldError::Validation)
427 }
428
429 #[must_use]
430 pub fn to_bytes(&self) -> Vec<u8> {
432 let mut bytes = Vec::with_capacity(512);
433 for value in &self.values {
434 bytes.extend_from_slice(&value.to_le_bytes());
435 }
436 bytes
437 }
438
439 #[must_use]
440 pub fn get(&self, local_x: u8, local_z: u8) -> Option<i16> {
442 if local_x >= 16 || local_z >= 16 {
443 return None;
444 }
445 self.values
446 .get(usize::from(local_z) * 16 + usize::from(local_x))
447 .copied()
448 }
449}
450
451impl Biome2d {
452 pub fn new(height_map: Vec<i16>, biomes: Vec<u8>) -> WorldResult<Self> {
459 HeightMap2d::new(height_map.clone())?;
460 if biomes.len() != 256 {
461 return Err(BedrockWorldError::Validation(format!(
462 "2D biome map must contain 256 values, got {}",
463 biomes.len()
464 )));
465 }
466 Ok(Self { height_map, biomes })
467 }
468
469 pub fn parse(bytes: &[u8]) -> WorldResult<Self> {
475 if bytes.len() < 768 {
476 return Err(BedrockWorldError::Validation(format!(
477 "Data2D is too short: {}",
478 bytes.len()
479 )));
480 }
481 Self::new(
482 read_height_map(&bytes[..512]).map_err(BedrockWorldError::Validation)?,
483 bytes[512..768].to_vec(),
484 )
485 }
486
487 pub fn encode(&self) -> WorldResult<Vec<u8>> {
493 Self::new(self.height_map.clone(), self.biomes.clone())?;
494 let mut bytes = HeightMap2d {
495 values: self.height_map.clone(),
496 }
497 .to_bytes();
498 bytes.extend_from_slice(&self.biomes);
499 Ok(bytes)
500 }
501}
502
503impl Biome3d {
504 pub fn new(height_map: Vec<i16>, storages: Vec<ParsedBiomeStorage>) -> WorldResult<Self> {
510 HeightMap2d::new(height_map.clone())?;
511 Ok(Self {
512 height_map,
513 storages,
514 })
515 }
516
517 pub fn parse(bytes: &[u8]) -> WorldResult<Self> {
523 let parsed = parse_data3d(bytes).map_err(BedrockWorldError::Validation)?;
524 Self::new(parsed.height_map, parsed.storages)
525 }
526
527 pub fn encode(&self) -> WorldResult<Vec<u8>> {
533 Self::new(self.height_map.clone(), self.storages.clone())?;
534 let mut bytes = HeightMap2d {
535 values: self.height_map.clone(),
536 }
537 .to_bytes();
538 for storage in &self.storages {
539 bytes.extend_from_slice(&encode_biome_storage(storage)?);
540 }
541 Ok(bytes)
542 }
543}
544
545impl HardcodedSpawnAreaKind {
546 #[must_use]
547 pub const fn byte(self) -> u8 {
549 match self {
550 Self::NetherFortress => 1,
551 Self::SwampHut => 2,
552 Self::OceanMonument => 3,
553 Self::PillagerOutpost => 5,
554 Self::Unknown(value) => value,
555 }
556 }
557
558 #[must_use]
559 pub const fn from_byte(value: u8) -> Self {
561 match value {
562 1 => Self::NetherFortress,
563 2 => Self::SwampHut,
564 3 => Self::OceanMonument,
565 5 => Self::PillagerOutpost,
566 other => Self::Unknown(other),
567 }
568 }
569}
570
571impl ParsedHardcodedSpawnArea {
572 pub fn validate(&self) -> WorldResult<()> {
574 for axis in 0..3 {
575 if self.min[axis] > self.max[axis] {
576 return Err(BedrockWorldError::Validation(format!(
577 "HSA min axis {axis} exceeds max"
578 )));
579 }
580 }
581 Ok(())
582 }
583}
584
585#[derive(Debug, Clone, PartialEq, Eq)]
586pub struct ParsedHardcodedSpawnArea {
588 pub kind: HardcodedSpawnAreaKind,
590 pub min: [i32; 3],
592 pub max: [i32; 3],
594}
595
596#[derive(Debug, Clone, Copy, PartialEq, Eq)]
597pub enum HardcodedSpawnAreaKind {
599 NetherFortress,
601 SwampHut,
603 OceanMonument,
605 PillagerOutpost,
607 Unknown(u8),
609}
610
611#[derive(Debug, Clone, PartialEq)]
612pub struct ParsedMapData {
614 pub id: String,
616 pub record_id: MapRecordId,
618 pub roots: Vec<NbtTag>,
620 pub known_fields: MapKnownFields,
622 pub pixels: Option<MapPixels>,
624 pub raw: Bytes,
626}
627
628#[derive(Debug, Clone, Default, PartialEq, Eq)]
629pub struct MapKnownFields {
631 pub dimension: Option<i32>,
633 pub center_x: Option<i32>,
635 pub center_z: Option<i32>,
637 pub scale: Option<i32>,
639 pub width: Option<i32>,
641 pub height: Option<i32>,
643 pub locked: Option<bool>,
645}
646
647#[derive(Debug, Clone, PartialEq, Eq)]
648pub struct MapPixels {
653 pub width: u32,
655 pub height: u32,
657 pub colors: Vec<u8>,
659}
660
661#[derive(Debug, Clone, PartialEq)]
662pub struct ParsedVillageData {
664 pub key: ParsedVillageKey,
666 pub roots: Vec<NbtTag>,
668 pub raw: Bytes,
670}
671
672#[derive(Debug, Clone, PartialEq)]
673pub struct ParsedGlobalData {
675 pub name: String,
677 pub kind: GlobalRecordKind,
679 pub roots: Vec<NbtTag>,
681 pub raw: Bytes,
683}
684
685#[derive(Debug, Clone, PartialEq)]
686pub struct BlockEntityRecord {
688 pub chunk: ChunkPos,
690 pub index: usize,
692 pub entity: ParsedBlockEntity,
694}
695
696#[derive(Debug, Clone, PartialEq)]
697pub struct HeightMap2d {
699 pub values: Vec<i16>,
701}
702
703#[derive(Debug, Clone, PartialEq, Eq)]
704pub struct Biome2d {
706 pub height_map: Vec<i16>,
708 pub biomes: Vec<u8>,
710}
711
712#[derive(Debug, Clone, PartialEq, Eq)]
713pub struct Biome3d {
715 pub height_map: Vec<i16>,
717 pub storages: Vec<ParsedBiomeStorage>,
719}
720
721#[derive(Debug, Clone, PartialEq)]
722pub struct ChunkRecordSet {
724 pub pos: ChunkPos,
726 pub records: BTreeMap<ChunkRecordTag, Vec<ChunkRecord>>,
728}
729
730#[derive(Debug, Clone, PartialEq)]
731pub struct ChunkModel {
733 pub pos: ChunkPos,
735 pub records: Vec<ParsedChunkRecord>,
737 pub unknown_records: Vec<ChunkRecord>,
739}
740
741#[derive(Debug, Clone, PartialEq)]
742pub struct ParsedPlayer {
744 pub key: BedrockDbKey,
746 pub unique_id: Option<i64>,
748 pub position: Option<[f64; 3]>,
750 pub dimension_id: Option<i32>,
752 pub items: Vec<ItemStack>,
754 pub nbt: NbtTag,
756}
757
758#[derive(Debug, Clone, PartialEq)]
759pub struct ParsedEntity {
761 pub identifier: Option<String>,
763 pub definitions: Vec<String>,
765 pub unique_id: Option<i64>,
767 pub position: Option<[f64; 3]>,
769 pub rotation: Option<[f32; 2]>,
771 pub motion: Option<[f32; 3]>,
773 pub items: Vec<ItemStack>,
775 pub nbt: NbtTag,
777}
778
779#[derive(Debug, Clone, PartialEq)]
780pub struct ParsedBlockEntity {
782 pub id: Option<String>,
784 pub position: Option<[i32; 3]>,
786 pub is_movable: Option<bool>,
788 pub custom_name: Option<String>,
790 pub items: Vec<ItemStack>,
792 pub nbt: NbtTag,
794}
795
796#[derive(Debug, Clone, PartialEq)]
797pub struct ItemStack {
799 pub name: Option<String>,
801 pub count: Option<i32>,
803 pub damage: Option<i32>,
805 pub was_picked_up: Option<bool>,
807 pub has_block: bool,
809 pub has_tag: bool,
811 pub nbt: NbtTag,
813}
814
815pub fn parse_world_storage(
817 level_dat: LevelDatDocument,
818 storage: &dyn WorldStorage,
819 options: WorldParseOptions,
820) -> WorldResult<ParsedWorld> {
821 let actor_records = load_actor_records(storage, options)?;
822
823 let mut report = WorldParseReport::default();
824 let mut chunk_positions = BTreeSet::new();
825 let mut parsed_entries = Vec::new();
826
827 storage.for_each_entry(StorageReadOptions::default(), &mut |raw_key, raw_value| {
828 report.entry_count += 1;
829 let key = BedrockDbKey::decode(raw_key);
830 *report.key_kinds.entry(key.summary_kind()).or_default() += 1;
831 if let BedrockDbKey::Chunk(chunk_key) = &key {
832 chunk_positions.insert(format!(
833 "{}:{}:{}",
834 chunk_key.pos.x,
835 chunk_key.pos.z,
836 chunk_key.pos.dimension.id()
837 ));
838 }
839 if options.retention.retains_entries() && should_parse_key(&key, options.categories) {
840 let value = parse_entry_value(&key, raw_value, &actor_records, &mut report, options);
841 parsed_entries.push(ParsedDbEntry {
842 key,
843 raw_key: Bytes::copy_from_slice(raw_key),
844 raw_value_len: raw_value.len(),
845 value,
846 });
847 }
848 Ok(StorageVisitorControl::Continue)
849 })?;
850
851 report.chunk_count = chunk_positions.len();
852 Ok(ParsedWorld {
853 level_dat,
854 entries: parsed_entries,
855 report,
856 })
857}
858
859#[must_use]
860pub fn parse_chunk_records(pos: ChunkPos, records: Vec<ChunkRecord>) -> ParsedChunkData {
862 parse_chunk_records_with_options(pos, records, WorldParseOptions::full())
863}
864
865#[must_use]
866pub fn parse_chunk_records_with_options(
868 pos: ChunkPos,
869 records: Vec<ChunkRecord>,
870 options: WorldParseOptions,
871) -> ParsedChunkData {
872 let mut report = WorldParseReport::default();
873 let parsed_records = records
874 .into_iter()
875 .map(|record| {
876 *report
877 .key_kinds
878 .entry(format!("Chunk::{:?}", record.key.tag))
879 .or_default() += 1;
880 ParsedChunkRecord {
881 key: record.key.clone(),
882 value: parse_chunk_record_value(&record.key, &record.value, &mut report, options),
883 }
884 })
885 .collect::<Vec<_>>();
886 report.entry_count = parsed_records.len();
887 report.chunk_count = usize::from(!parsed_records.is_empty());
888 ParsedChunkData {
889 pos,
890 records: parsed_records,
891 report,
892 }
893}
894
895pub fn parse_global_storage_entries(
897 storage: &dyn WorldStorage,
898 options: WorldParseOptions,
899) -> WorldResult<Vec<ParsedDbEntry>> {
900 let actor_records = HashMap::new();
901 let mut report = WorldParseReport::default();
902 let mut entries = Vec::new();
903 storage.for_each_entry(StorageReadOptions::default(), &mut |raw_key, raw_value| {
904 let key = BedrockDbKey::decode(raw_key);
905 if matches!(
906 key,
907 BedrockDbKey::Chunk(_)
908 | BedrockDbKey::ActorPrefix { .. }
909 | BedrockDbKey::ActorDigest { .. }
910 ) {
911 return Ok(StorageVisitorControl::Continue);
912 }
913 let value = parse_entry_value(&key, raw_value, &actor_records, &mut report, options);
914 entries.push(ParsedDbEntry {
915 key,
916 raw_key: Bytes::copy_from_slice(raw_key),
917 raw_value_len: raw_value.len(),
918 value,
919 });
920 Ok(StorageVisitorControl::Continue)
921 })?;
922 Ok(entries)
923}
924
925fn load_actor_records(
926 storage: &dyn WorldStorage,
927 options: WorldParseOptions,
928) -> WorldResult<HashMap<i64, Bytes>> {
929 match options.actor_resolution {
930 ActorResolution::None | ActorResolution::DigestOnly => Ok(HashMap::new()),
931 ActorResolution::ResolveAll => {
932 let mut actor_records = HashMap::new();
933 storage.for_each_entry(StorageReadOptions::default(), &mut |key, value| {
934 if let BedrockDbKey::ActorPrefix { actor_id } = BedrockDbKey::decode(key) {
935 actor_records.insert(actor_id, value.clone());
936 }
937 Ok(StorageVisitorControl::Continue)
938 })?;
939 Ok(actor_records)
940 }
941 ActorResolution::ResolveReferenced => {
942 let mut actor_ids = BTreeSet::new();
943 storage.for_each_entry(StorageReadOptions::default(), &mut |key, value| {
944 if matches!(BedrockDbKey::decode(key), BedrockDbKey::ActorDigest { .. }) {
945 for actor_id_bytes in value.chunks_exact(8) {
946 let mut actor_id_array = [0_u8; 8];
947 actor_id_array.copy_from_slice(actor_id_bytes);
948 actor_ids.insert(i64::from_le_bytes(actor_id_array));
949 }
950 }
951 Ok(StorageVisitorControl::Continue)
952 })?;
953 let mut actor_records = HashMap::new();
954 for actor_id in actor_ids {
955 if let Some(value) = storage.get(&actor_prefix_key(actor_id))? {
956 actor_records.insert(actor_id, value);
957 }
958 }
959 Ok(actor_records)
960 }
961 }
962}
963
964fn actor_prefix_key(actor_id: i64) -> Vec<u8> {
965 let mut key = Vec::with_capacity("actorprefix".len() + 8);
966 key.extend_from_slice(b"actorprefix");
967 key.extend_from_slice(&actor_id.to_le_bytes());
968 key
969}
970
971fn should_parse_key(key: &BedrockDbKey, categories: WorldParseCategories) -> bool {
972 match key {
973 BedrockDbKey::Chunk(_) => categories.chunks,
974 BedrockDbKey::LocalPlayer | BedrockDbKey::RemotePlayer(_) => categories.players,
975 BedrockDbKey::ActorPrefix { .. } | BedrockDbKey::ActorDigest { .. } => categories.actors,
976 BedrockDbKey::Map(_) => categories.maps,
977 BedrockDbKey::Village(_) => categories.villages,
978 BedrockDbKey::Global(_) => categories.globals,
979 BedrockDbKey::PlainString(name) if should_try_nbt_plain_key(name) => categories.globals,
980 _ => false,
981 }
982}
983
984fn parse_entry_value(
985 key: &BedrockDbKey,
986 value: &Bytes,
987 actor_records: &HashMap<i64, Bytes>,
988 report: &mut WorldParseReport,
989 options: WorldParseOptions,
990) -> ParsedDbValue {
991 match key {
992 BedrockDbKey::Chunk(chunk_key) => ParsedDbValue::Chunk(ParsedChunkRecord {
993 key: chunk_key.clone(),
994 value: parse_chunk_record_value(chunk_key, value, report, options),
995 }),
996 BedrockDbKey::LocalPlayer | BedrockDbKey::RemotePlayer(_) => {
997 parse_player_value(key.clone(), value, report)
998 }
999 BedrockDbKey::ActorPrefix { .. } => parse_actor_value(value, report),
1000 BedrockDbKey::ActorDigest { pos } => {
1001 parse_actor_digest_value(*pos, value, actor_records, report, options)
1002 }
1003 BedrockDbKey::Map(id) => parse_map_value(id, value, report),
1004 BedrockDbKey::Village(village) => parse_village_value(village, value, report),
1005 BedrockDbKey::Global(kind) => parse_global_value(&kind.name(), value, report),
1006 BedrockDbKey::PlainString(name) if should_try_nbt_plain_key(name) => {
1007 parse_global_value(name, value, report)
1008 }
1009 BedrockDbKey::GameFlatWorldLayers
1010 | BedrockDbKey::Portals
1011 | BedrockDbKey::SchedulerWt
1012 | BedrockDbKey::StructureTemplate(_)
1013 | BedrockDbKey::TickingArea(_)
1014 | BedrockDbKey::PlainString(_)
1015 | BedrockDbKey::Unknown(_) => {
1016 report.raw_entry_count += 1;
1017 raw_db_value(value, options)
1018 }
1019 }
1020}
1021
1022fn parse_chunk_record_value(
1023 chunk_key: &crate::ChunkKey,
1024 value: &Bytes,
1025 report: &mut WorldParseReport,
1026 options: WorldParseOptions,
1027) -> ParsedChunkRecordValue {
1028 match chunk_key.tag {
1029 ChunkRecordTag::SubChunkPrefix => {
1030 match parse_subchunk_with_mode(
1031 chunk_key.subchunk_y.unwrap_or_default(),
1032 value.clone(),
1033 options.subchunk_decode_mode,
1034 ) {
1035 Ok(subchunk) => {
1036 report.subchunk_count += 1;
1037 match &subchunk.format {
1038 SubChunkFormat::Paletted { storages, .. } => {
1039 report.subchunk_storage_count += storages.len();
1040 report.palette_state_count += storages
1041 .iter()
1042 .map(|storage| storage.states.len())
1043 .sum::<usize>();
1044 }
1045 SubChunkFormat::LegacySubChunk(_) => {
1046 report.legacy_subchunk_count += 1;
1047 report.subchunk_storage_count += 1;
1048 }
1049 SubChunkFormat::LegacyTerrain
1050 | SubChunkFormat::FixedArrayV1
1051 | SubChunkFormat::Raw { .. } => {}
1052 }
1053 ParsedChunkRecordValue::SubChunk(subchunk)
1054 }
1055 Err(error) => {
1056 report.warnings.push(format!(
1057 "subchunk {:?} kept raw: {error}",
1058 chunk_key.subchunk_y
1059 ));
1060 report.raw_entry_count += 1;
1061 ParsedChunkRecordValue::Raw(value.clone())
1062 }
1063 }
1064 }
1065 ChunkRecordTag::BlockEntity => parse_block_entities(value, report),
1066 ChunkRecordTag::Entity => parse_entities_chunk_record(value, report),
1067 ChunkRecordTag::PendingTicks => parse_pending_ticks(value, report),
1068 ChunkRecordTag::Version | ChunkRecordTag::VersionOld | ChunkRecordTag::LegacyVersion => {
1069 value.first().copied().map_or_else(
1070 || ParsedChunkRecordValue::Raw(value.clone()),
1071 ParsedChunkRecordValue::Version,
1072 )
1073 }
1074 ChunkRecordTag::FinalizedState => read_i32(value).map_or_else(
1075 || ParsedChunkRecordValue::Raw(value.clone()),
1076 ParsedChunkRecordValue::FinalizedState,
1077 ),
1078 ChunkRecordTag::Data3D => parse_biome_data(value, ChunkVersion::New, report),
1079 ChunkRecordTag::Data2D | ChunkRecordTag::Data2DLegacy => {
1080 parse_biome_data(value, ChunkVersion::Old, report)
1081 }
1082 ChunkRecordTag::HardcodedSpawners => parse_hardcoded_spawn_areas(value, report),
1083 ChunkRecordTag::LegacyTerrain => parse_legacy_terrain(value, report),
1084 ChunkRecordTag::BlockExtraData
1085 | ChunkRecordTag::BiomeState
1086 | ChunkRecordTag::ConversionData
1087 | ChunkRecordTag::BorderBlocks
1088 | ChunkRecordTag::RandomTicks
1089 | ChunkRecordTag::Checksums
1090 | ChunkRecordTag::GenerationSeed
1091 | ChunkRecordTag::MetaDataHash
1092 | ChunkRecordTag::GeneratedPreCavesAndCliffsBlending
1093 | ChunkRecordTag::BlendingBiomeHeight
1094 | ChunkRecordTag::BlendingData
1095 | ChunkRecordTag::ActorDigestVersion
1096 | ChunkRecordTag::Unknown(_) => {
1097 report.raw_entry_count += 1;
1098 raw_chunk_value(value, options)
1099 }
1100 }
1101}
1102
1103fn parse_legacy_terrain(value: &Bytes, report: &mut WorldParseReport) -> ParsedChunkRecordValue {
1104 match LegacyTerrain::parse(value.clone()) {
1105 Ok(terrain) => {
1106 report.legacy_terrain_count += 1;
1107 ParsedChunkRecordValue::LegacyTerrain(terrain)
1108 }
1109 Err(error) => {
1110 report
1111 .warnings
1112 .push(format!("LegacyTerrain kept raw: {error}"));
1113 report.raw_entry_count += 1;
1114 ParsedChunkRecordValue::Raw(value.clone())
1115 }
1116 }
1117}
1118
1119fn parse_actor_digest_value(
1120 pos: crate::ChunkPos,
1121 value: &Bytes,
1122 actor_records: &HashMap<i64, Bytes>,
1123 report: &mut WorldParseReport,
1124 options: WorldParseOptions,
1125) -> ParsedDbValue {
1126 report.actor_digest_count += 1;
1127 if !value.len().is_multiple_of(8) {
1128 report
1129 .warnings
1130 .push(format!("actor digest for {pos:?} kept raw: invalid length"));
1131 report.raw_entry_count += 1;
1132 return raw_db_value(value, options);
1133 }
1134 let mut actor_ids = Vec::with_capacity(value.len() / 8);
1135 let mut entities = Vec::new();
1136 let mut missing_actor_count = 0;
1137 for actor_id_bytes in value.chunks_exact(8) {
1138 let mut actor_id_array = [0_u8; 8];
1139 actor_id_array.copy_from_slice(actor_id_bytes);
1140 let actor_id = i64::from_le_bytes(actor_id_array);
1141 actor_ids.push(actor_id);
1142 let Some(actor_value) = actor_records.get(&actor_id) else {
1143 missing_actor_count += 1;
1144 continue;
1145 };
1146 report.actor_digest_hit_count += 1;
1147 match parse_actor_value(actor_value, report) {
1148 ParsedDbValue::ActorEntities(mut parsed_entities) => {
1149 entities.append(&mut parsed_entities);
1150 }
1151 ParsedDbValue::Raw(_)
1152 | ParsedDbValue::Chunk(_)
1153 | ParsedDbValue::Player(_)
1154 | ParsedDbValue::ActorDigest(_)
1155 | ParsedDbValue::MapData(_)
1156 | ParsedDbValue::VillageData(_)
1157 | ParsedDbValue::GlobalData(_)
1158 | ParsedDbValue::NbtRoots(_) => {}
1159 }
1160 }
1161 report.actor_digest_missing_count += missing_actor_count;
1162 ParsedDbValue::ActorDigest(ParsedActorDigest {
1163 pos,
1164 actor_ids,
1165 entities,
1166 missing_actor_count,
1167 })
1168}
1169
1170fn raw_db_value(value: &Bytes, options: WorldParseOptions) -> ParsedDbValue {
1171 if options.retention.retains_raw() {
1172 ParsedDbValue::Raw(value.clone())
1173 } else {
1174 ParsedDbValue::Raw(Bytes::new())
1175 }
1176}
1177
1178fn raw_chunk_value(value: &Bytes, options: WorldParseOptions) -> ParsedChunkRecordValue {
1179 if options.retention.retains_raw() {
1180 ParsedChunkRecordValue::Raw(value.clone())
1181 } else {
1182 ParsedChunkRecordValue::Raw(Bytes::new())
1183 }
1184}
1185
1186fn parse_map_value(id: &str, value: &Bytes, report: &mut WorldParseReport) -> ParsedDbValue {
1187 report.map_record_count += 1;
1188 let roots = parse_consecutive_root_nbt(value).unwrap_or_else(|error| {
1189 report.warnings.push(format!("map_{id} kept raw: {error}"));
1190 Vec::new()
1191 });
1192 let known_fields = map_known_fields(&roots);
1193 let pixels = map_pixels(&roots);
1194 ParsedDbValue::MapData(ParsedMapData {
1195 id: id.to_string(),
1196 record_id: MapRecordId::unchecked(id.to_string()),
1197 roots,
1198 known_fields,
1199 pixels,
1200 raw: value.clone(),
1201 })
1202}
1203
1204fn parse_village_value(
1205 key: &ParsedVillageKey,
1206 value: &Bytes,
1207 report: &mut WorldParseReport,
1208) -> ParsedDbValue {
1209 report.village_record_count += 1;
1210 let roots = parse_consecutive_root_nbt(value).unwrap_or_else(|error| {
1211 report
1212 .warnings
1213 .push(format!("{} kept raw: {error}", key.raw));
1214 Vec::new()
1215 });
1216 ParsedDbValue::VillageData(ParsedVillageData {
1217 key: key.clone(),
1218 roots,
1219 raw: value.clone(),
1220 })
1221}
1222
1223fn parse_global_value(name: &str, value: &Bytes, report: &mut WorldParseReport) -> ParsedDbValue {
1224 report.global_record_count += 1;
1225 match parse_consecutive_root_nbt(value) {
1226 Ok(tags) => {
1227 report.other_nbt_root_count += tags.len();
1228 ParsedDbValue::GlobalData(ParsedGlobalData {
1229 name: name.to_string(),
1230 kind: GlobalRecordKind::from_key(name.as_bytes())
1231 .unwrap_or_else(|| GlobalRecordKind::Other(name.to_string())),
1232 roots: tags,
1233 raw: value.clone(),
1234 })
1235 }
1236 Err(error) => {
1237 report.warnings.push(format!("{name} kept raw: {error}"));
1238 report.raw_entry_count += 1;
1239 ParsedDbValue::Raw(value.clone())
1240 }
1241 }
1242}
1243
1244pub fn parse_map_record(id: MapRecordId, value: Bytes) -> WorldResult<ParsedMapData> {
1246 let roots = parse_consecutive_root_nbt(&value)?;
1247 Ok(ParsedMapData {
1248 id: id.to_string(),
1249 record_id: id,
1250 known_fields: map_known_fields(&roots),
1251 pixels: map_pixels(&roots),
1252 roots,
1253 raw: value,
1254 })
1255}
1256
1257pub fn encode_map_record(record: &ParsedMapData) -> WorldResult<Bytes> {
1259 encode_consecutive_roots(&record.roots)
1260}
1261
1262pub fn parse_global_record(
1264 kind: GlobalRecordKind,
1265 name: String,
1266 value: Bytes,
1267) -> WorldResult<ParsedGlobalData> {
1268 let roots = parse_consecutive_root_nbt(&value)?;
1269 Ok(ParsedGlobalData {
1270 name,
1271 kind,
1272 roots,
1273 raw: value,
1274 })
1275}
1276
1277pub fn encode_global_record(record: &ParsedGlobalData) -> WorldResult<Bytes> {
1279 encode_consecutive_roots(&record.roots)
1280}
1281
1282pub fn parse_actor_digest_ids(value: &[u8]) -> WorldResult<Vec<ActorUid>> {
1284 if !value.len().is_multiple_of(8) {
1285 return Err(BedrockWorldError::CorruptWorld(format!(
1286 "actor digest value length {} is not a multiple of 8",
1287 value.len()
1288 )));
1289 }
1290 let mut actor_ids = Vec::with_capacity(value.len() / 8);
1291 for actor_id_bytes in value.chunks_exact(8) {
1292 let mut actor_id_array = [0_u8; 8];
1293 actor_id_array.copy_from_slice(actor_id_bytes);
1294 actor_ids.push(ActorUid(i64::from_le_bytes(actor_id_array)));
1295 }
1296 Ok(actor_ids)
1297}
1298
1299pub fn encode_actor_digest_ids(actor_ids: &[ActorUid]) -> Bytes {
1301 let mut bytes = Vec::with_capacity(actor_ids.len() * 8);
1302 for actor_id in actor_ids {
1303 bytes.extend_from_slice(&actor_id.0.to_le_bytes());
1304 }
1305 Bytes::from(bytes)
1306}
1307
1308pub fn parse_hardcoded_spawn_area_records(
1310 value: &[u8],
1311) -> WorldResult<Vec<ParsedHardcodedSpawnArea>> {
1312 read_hardcoded_spawn_areas(value).map_err(BedrockWorldError::Validation)
1313}
1314
1315pub fn encode_hardcoded_spawn_area_records(
1317 areas: &[ParsedHardcodedSpawnArea],
1318) -> WorldResult<Bytes> {
1319 let count = i32::try_from(areas.len())
1320 .map_err(|_| BedrockWorldError::Validation("too many hardcoded spawn areas".to_string()))?;
1321 let mut bytes = Vec::with_capacity(4 + areas.len() * 25);
1322 bytes.extend_from_slice(&count.to_le_bytes());
1323 for area in areas {
1324 area.validate()?;
1325 for value in area.min {
1326 bytes.extend_from_slice(&value.to_le_bytes());
1327 }
1328 for value in area.max {
1329 bytes.extend_from_slice(&value.to_le_bytes());
1330 }
1331 bytes.push(area.kind.byte());
1332 }
1333 Ok(Bytes::from(bytes))
1334}
1335
1336pub fn encode_consecutive_roots(roots: &[NbtTag]) -> WorldResult<Bytes> {
1338 if roots.is_empty() {
1339 return Err(BedrockWorldError::Validation(
1340 "record must contain at least one root NBT compound".to_string(),
1341 ));
1342 }
1343 let mut bytes = Vec::new();
1344 for root in roots {
1345 bytes.extend_from_slice(&serialize_root_nbt(root)?);
1346 }
1347 Ok(Bytes::from(bytes))
1348}
1349
1350fn parse_player_value(
1351 key: BedrockDbKey,
1352 value: &Bytes,
1353 report: &mut WorldParseReport,
1354) -> ParsedDbValue {
1355 match parse_root_nbt(value) {
1356 Ok(nbt) => {
1357 let items = collect_item_stacks(&nbt);
1358 report.player_count += 1;
1359 report.item_count += items.len();
1360 let root = compound(&nbt);
1361 ParsedDbValue::Player(ParsedPlayer {
1362 key,
1363 unique_id: root.and_then(|root| long_field(root, "UniqueID")),
1364 position: root.and_then(|root| vec3_f64_field(root, "Pos")),
1365 dimension_id: root.and_then(|root| int_field(root, "DimensionId")),
1366 items,
1367 nbt,
1368 })
1369 }
1370 Err(error) => {
1371 report
1372 .parse_errors
1373 .push(format!("player NBT parse failed: {error}"));
1374 report.raw_entry_count += 1;
1375 ParsedDbValue::Raw(value.clone())
1376 }
1377 }
1378}
1379
1380fn map_known_fields(roots: &[NbtTag]) -> MapKnownFields {
1381 let Some(root) = roots.first().and_then(compound) else {
1382 return MapKnownFields::default();
1383 };
1384 MapKnownFields {
1385 dimension: int_field_any(
1386 root,
1387 &["dimension", "dimensionId", "Dimension", "DimensionId"],
1388 ),
1389 center_x: int_field_any(root, &["xCenter", "centerX", "CenterX"]),
1390 center_z: int_field_any(root, &["zCenter", "centerZ", "CenterZ"]),
1391 scale: int_field_any(root, &["scale", "Scale"]),
1392 width: int_field_any(root, &["width", "Width"]),
1393 height: int_field_any(root, &["height", "Height"]),
1394 locked: bool_field_any(root, &["locked", "Locked"]),
1395 }
1396}
1397
1398fn map_pixels(roots: &[NbtTag]) -> Option<MapPixels> {
1399 let root = roots.first().and_then(compound)?;
1400 let colors = byte_array_field_any(root, &["colors", "Colors", "pixels", "Pixels"])?;
1401 let width = int_field_any(root, &["width", "Width"])
1402 .and_then(|value| u32::try_from(value).ok())
1403 .unwrap_or(128);
1404 let height = int_field_any(root, &["height", "Height"])
1405 .and_then(|value| u32::try_from(value).ok())
1406 .unwrap_or_else(|| {
1407 u32::try_from(colors.len())
1408 .ok()
1409 .and_then(|len| len.checked_div(width))
1410 .unwrap_or(128)
1411 });
1412 let expected_len = usize::try_from(width)
1413 .ok()?
1414 .checked_mul(usize::try_from(height).ok()?)?;
1415 (colors.len() == expected_len).then_some(MapPixels {
1416 width,
1417 height,
1418 colors: colors.iter().map(|value| *value as u8).collect(),
1419 })
1420}
1421
1422fn int_field_any(root: &IndexMap<String, NbtTag>, names: &[&str]) -> Option<i32> {
1423 names.iter().find_map(|name| int_field(root, name))
1424}
1425
1426fn bool_field_any(root: &IndexMap<String, NbtTag>, names: &[&str]) -> Option<bool> {
1427 names.iter().find_map(|name| bool_field(root, name))
1428}
1429
1430fn byte_array_field_any<'a>(
1431 root: &'a IndexMap<String, NbtTag>,
1432 names: &[&str],
1433) -> Option<&'a [i8]> {
1434 for name in names {
1435 if let Some(NbtTag::ByteArray(values)) = root.get(*name) {
1436 return Some(values);
1437 }
1438 }
1439 None
1440}
1441
1442pub(crate) fn parse_actor_value(value: &Bytes, report: &mut WorldParseReport) -> ParsedDbValue {
1443 match parse_consecutive_root_nbt(value) {
1444 Ok(tags) => {
1445 let entities = tags
1446 .into_iter()
1447 .map(|tag| parse_entity_from_nbt(tag, report))
1448 .collect::<Vec<_>>();
1449 report.entity_count += entities.len();
1450 ParsedDbValue::ActorEntities(entities)
1451 }
1452 Err(error) => {
1453 report
1454 .warnings
1455 .push(format!("actorprefix kept raw: {error}"));
1456 report.raw_entry_count += 1;
1457 ParsedDbValue::Raw(value.clone())
1458 }
1459 }
1460}
1461
1462fn parse_biome_data(
1463 value: &Bytes,
1464 version: ChunkVersion,
1465 report: &mut WorldParseReport,
1466) -> ParsedChunkRecordValue {
1467 let result = match version {
1468 ChunkVersion::Old => parse_legacy_data2d(value),
1469 ChunkVersion::New => parse_data3d(value),
1470 };
1471 match result {
1472 Ok(data) => {
1473 report.biome_record_count += 1;
1474 report.biome_layer_count += data.storages.len();
1475 ParsedChunkRecordValue::BiomeData(data)
1476 }
1477 Err(error) => {
1478 report
1479 .warnings
1480 .push(format!("biome data kept raw: {error}"));
1481 report.raw_entry_count += 1;
1482 ParsedChunkRecordValue::Raw(value.clone())
1483 }
1484 }
1485}
1486
1487pub(crate) fn parse_legacy_data2d(value: &[u8]) -> Result<ParsedBiomeData, String> {
1488 if value.len() < 768 {
1489 return Err(format!("Data2D is too short: {}", value.len()));
1490 }
1491 let height_map = read_height_map(&value[..512])?;
1492 let indices = value[512..768]
1493 .iter()
1494 .map(|value| u16::from(*value))
1495 .collect::<Vec<_>>();
1496 let palette = (0..=255).collect::<Vec<_>>();
1497 let mut counts = vec![0_u16; palette.len()];
1498 for index in &indices {
1499 if let Some(count) = counts.get_mut(usize::from(*index)) {
1500 *count = count.saturating_add(1);
1501 }
1502 }
1503 Ok(ParsedBiomeData {
1504 version: ChunkVersion::Old,
1505 height_map,
1506 storages: vec![ParsedBiomeStorage {
1507 y: None,
1508 palette,
1509 indices: Some(indices),
1510 counts,
1511 }],
1512 })
1513}
1514
1515pub(crate) fn parse_data3d(value: &[u8]) -> Result<ParsedBiomeData, String> {
1516 if value.len() < 512 {
1517 return Err(format!("Data3D is too short: {}", value.len()));
1518 }
1519 let height_map = read_height_map(&value[..512])?;
1520 let mut offset = 512;
1521 let mut storages = Vec::new();
1522 let mut y = -64;
1523 while offset < value.len() {
1524 let (storage, consumed) = parse_subchunk_biomes(&value[offset..], y)?;
1525 if consumed == 0 {
1526 return Err("Data3D biome parser did not advance".to_string());
1527 }
1528 offset += consumed;
1529 y += 16;
1530 storages.push(storage);
1531 }
1532 Ok(ParsedBiomeData {
1533 version: ChunkVersion::New,
1534 height_map,
1535 storages,
1536 })
1537}
1538
1539fn parse_subchunk_biomes(
1540 value: &[u8],
1541 start_y: i32,
1542) -> Result<(ParsedBiomeStorage, usize), String> {
1543 let Some(header) = value.first().copied() else {
1544 return Err("missing biome storage header".to_string());
1545 };
1546 if header == 0xff {
1547 return Ok((
1548 ParsedBiomeStorage {
1549 y: Some(start_y),
1550 palette: vec![u32::MAX],
1551 indices: None,
1552 counts: vec![4096],
1553 },
1554 1,
1555 ));
1556 }
1557 let bits_per_biome = header >> 1;
1558 let mut offset = 1;
1559 let indices = if bits_per_biome == 0 {
1560 vec![0_u16; 4096]
1561 } else {
1562 let word_count = packed_word_count(bits_per_biome);
1563 let words_byte_len = word_count
1564 .checked_mul(4)
1565 .ok_or_else(|| "biome palette word count overflowed".to_string())?;
1566 let words = value
1567 .get(offset..offset + words_byte_len)
1568 .ok_or_else(|| "biome palette words are truncated".to_string())?;
1569 offset += words_byte_len;
1570 unpack_indices(words, bits_per_biome)?
1571 };
1572 let palette_len = if bits_per_biome == 0 {
1573 1
1574 } else {
1575 let len = read_i32_le(value, offset)?;
1576 offset += 4;
1577 usize::try_from(len).map_err(|_| format!("invalid biome palette length: {len}"))?
1578 };
1579 if palette_len > MAX_BIOME_PALETTE_LEN {
1580 return Err(format!(
1581 "biome palette length {palette_len} exceeds maximum {MAX_BIOME_PALETTE_LEN}"
1582 ));
1583 }
1584 let mut palette = Vec::with_capacity(palette_len);
1585 for _ in 0..palette_len {
1586 let id = read_i32_le(value, offset)?;
1587 offset += 4;
1588 palette.push(u32::try_from(id).unwrap_or(u32::MAX));
1589 }
1590 let mut counts = vec![0_u16; palette.len()];
1591 for index in &indices {
1592 if let Some(count) = counts.get_mut(usize::from(*index)) {
1593 *count = count.saturating_add(1);
1594 }
1595 }
1596 Ok((
1597 ParsedBiomeStorage {
1598 y: Some(start_y),
1599 palette,
1600 indices: Some(indices),
1601 counts,
1602 },
1603 offset,
1604 ))
1605}
1606
1607fn read_height_map(value: &[u8]) -> Result<Vec<i16>, String> {
1608 if value.len() != 512 {
1609 return Err(format!("height map must be 512 bytes, got {}", value.len()));
1610 }
1611 Ok(value
1612 .chunks_exact(2)
1613 .map(|bytes| i16::from_le_bytes([bytes[0], bytes[1]]))
1614 .collect())
1615}
1616
1617fn encode_biome_storage(storage: &ParsedBiomeStorage) -> WorldResult<Vec<u8>> {
1618 if storage.palette.is_empty() {
1619 return Err(BedrockWorldError::Validation(
1620 "biome storage palette cannot be empty".to_string(),
1621 ));
1622 }
1623 if storage.palette.len() == 1
1624 && storage
1625 .indices
1626 .as_ref()
1627 .is_none_or(|indices| indices.len() == 4096 && indices.iter().all(|index| *index == 0))
1628 {
1629 let mut bytes = Vec::with_capacity(5);
1630 bytes.push(0);
1631 let id = i32::try_from(storage.palette[0])
1632 .map_err(|_| BedrockWorldError::Validation("biome id does not fit i32".to_string()))?;
1633 bytes.extend_from_slice(&id.to_le_bytes());
1634 return Ok(bytes);
1635 }
1636 let indices = storage.indices.as_ref().ok_or_else(|| {
1637 BedrockWorldError::Validation("non-uniform biome storage requires indices".to_string())
1638 })?;
1639 if indices.len() != 4096 {
1640 return Err(BedrockWorldError::Validation(format!(
1641 "biome storage requires 4096 indices, got {}",
1642 indices.len()
1643 )));
1644 }
1645 let bits = bits_per_palette_index(storage.palette.len())?;
1646 let mut bytes = Vec::new();
1647 bytes.push(bits << 1);
1648 bytes.extend_from_slice(&pack_indices(indices, bits)?);
1649 let palette_len = i32::try_from(storage.palette.len()).map_err(|_| {
1650 BedrockWorldError::Validation("biome palette length does not fit i32".to_string())
1651 })?;
1652 bytes.extend_from_slice(&palette_len.to_le_bytes());
1653 for id in &storage.palette {
1654 let id = i32::try_from(*id)
1655 .map_err(|_| BedrockWorldError::Validation("biome id does not fit i32".to_string()))?;
1656 bytes.extend_from_slice(&id.to_le_bytes());
1657 }
1658 Ok(bytes)
1659}
1660
1661fn packed_word_count(bits_per_value: u8) -> usize {
1662 if bits_per_value == 0 {
1663 return 0;
1664 }
1665 let values_per_word = usize::from(32 / bits_per_value);
1666 4096_usize.div_ceil(values_per_word)
1667}
1668
1669fn bits_per_palette_index(palette_len: usize) -> WorldResult<u8> {
1670 let max_index = palette_len.saturating_sub(1);
1671 for bits in [1_u8, 2, 3, 4, 5, 6, 8, 16] {
1672 if max_index < (1_usize << bits) {
1673 return Ok(bits);
1674 }
1675 }
1676 Err(BedrockWorldError::Validation(format!(
1677 "biome palette length {palette_len} exceeds encodable range"
1678 )))
1679}
1680
1681fn pack_indices(indices: &[u16], bits_per_value: u8) -> WorldResult<Vec<u8>> {
1682 if !matches!(bits_per_value, 1 | 2 | 3 | 4 | 5 | 6 | 8 | 16) {
1683 return Err(BedrockWorldError::Validation(format!(
1684 "unsupported biome bits-per-value: {bits_per_value}"
1685 )));
1686 }
1687 let values_per_word = usize::from(32 / bits_per_value);
1688 let mask = (1_u32 << bits_per_value) - 1;
1689 let mut bytes = Vec::with_capacity(packed_word_count(bits_per_value) * 4);
1690 for chunk in indices.chunks(values_per_word) {
1691 let mut word = 0_u32;
1692 for (offset, value) in chunk.iter().enumerate() {
1693 let value = u32::from(*value);
1694 if value > mask {
1695 return Err(BedrockWorldError::Validation(format!(
1696 "biome index {value} exceeds {bits_per_value}-bit palette"
1697 )));
1698 }
1699 word |= value << (offset * usize::from(bits_per_value));
1700 }
1701 bytes.extend_from_slice(&word.to_le_bytes());
1702 }
1703 Ok(bytes)
1704}
1705
1706fn unpack_indices(words_bytes: &[u8], bits_per_value: u8) -> Result<Vec<u16>, String> {
1707 if bits_per_value == 0 {
1708 return Ok(vec![0; 4096]);
1709 }
1710 if !matches!(bits_per_value, 1 | 2 | 3 | 4 | 5 | 6 | 8 | 16) {
1711 return Err(format!(
1712 "unsupported biome bits-per-value: {bits_per_value}"
1713 ));
1714 }
1715 let values_per_word = usize::from(32 / bits_per_value);
1716 let mask = (1_u32 << bits_per_value) - 1;
1717 let mut indices = Vec::with_capacity(4096);
1718 for word_bytes in words_bytes.chunks_exact(4) {
1719 let word = u32::from_le_bytes([word_bytes[0], word_bytes[1], word_bytes[2], word_bytes[3]]);
1720 for item_index in 0..values_per_word {
1721 if indices.len() == 4096 {
1722 break;
1723 }
1724 indices.push(((word >> (item_index * usize::from(bits_per_value))) & mask) as u16);
1725 }
1726 }
1727 if indices.len() != 4096 {
1728 return Err(format!("decoded {} biome indices", indices.len()));
1729 }
1730 Ok(indices)
1731}
1732
1733fn read_i32_le(value: &[u8], offset: usize) -> Result<i32, String> {
1734 let bytes = value
1735 .get(offset..offset + 4)
1736 .ok_or_else(|| "i32 field is truncated".to_string())?;
1737 Ok(i32::from_le_bytes([bytes[0], bytes[1], bytes[2], bytes[3]]))
1738}
1739
1740fn parse_hardcoded_spawn_areas(
1741 value: &Bytes,
1742 report: &mut WorldParseReport,
1743) -> ParsedChunkRecordValue {
1744 match read_hardcoded_spawn_areas(value) {
1745 Ok(areas) => {
1746 report.hardcoded_spawn_area_count += areas.len();
1747 ParsedChunkRecordValue::HardcodedSpawnAreas(areas)
1748 }
1749 Err(error) => {
1750 report
1751 .warnings
1752 .push(format!("hardcoded spawn areas kept raw: {error}"));
1753 report.raw_entry_count += 1;
1754 ParsedChunkRecordValue::Raw(value.clone())
1755 }
1756 }
1757}
1758
1759fn read_hardcoded_spawn_areas(value: &[u8]) -> Result<Vec<ParsedHardcodedSpawnArea>, String> {
1760 let count = usize::try_from(read_i32_le(value, 0)?)
1761 .map_err(|_| "hardcoded spawn area count cannot be negative".to_string())?;
1762 let expected_len = 4 + count * 25;
1763 if value.len() != expected_len {
1764 return Err(format!(
1765 "expected {expected_len} bytes, got {}",
1766 value.len()
1767 ));
1768 }
1769 let mut areas = Vec::with_capacity(count);
1770 for index in 0..count {
1771 let offset = 4 + index * 25;
1772 areas.push(ParsedHardcodedSpawnArea {
1773 kind: match value[offset + 24] {
1774 1 => HardcodedSpawnAreaKind::NetherFortress,
1775 2 => HardcodedSpawnAreaKind::SwampHut,
1776 3 => HardcodedSpawnAreaKind::OceanMonument,
1777 5 => HardcodedSpawnAreaKind::PillagerOutpost,
1778 value => HardcodedSpawnAreaKind::Unknown(value),
1779 },
1780 min: [
1781 read_i32_le(value, offset)?,
1782 read_i32_le(value, offset + 4)?,
1783 read_i32_le(value, offset + 8)?,
1784 ],
1785 max: [
1786 read_i32_le(value, offset + 12)?,
1787 read_i32_le(value, offset + 16)?,
1788 read_i32_le(value, offset + 20)?,
1789 ],
1790 });
1791 }
1792 Ok(areas)
1793}
1794
1795pub(crate) fn parse_block_entities(
1796 value: &Bytes,
1797 report: &mut WorldParseReport,
1798) -> ParsedChunkRecordValue {
1799 match parse_consecutive_root_nbt(value) {
1800 Ok(tags) => {
1801 let block_entities = tags
1802 .into_iter()
1803 .map(|tag| parse_block_entity_from_nbt(tag, report))
1804 .collect::<Vec<_>>();
1805 report.block_entity_count += block_entities.len();
1806 ParsedChunkRecordValue::BlockEntities(block_entities)
1807 }
1808 Err(error) => {
1809 report
1810 .warnings
1811 .push(format!("block entities kept raw: {error}"));
1812 report.raw_entry_count += 1;
1813 ParsedChunkRecordValue::Raw(value.clone())
1814 }
1815 }
1816}
1817
1818fn parse_entities_chunk_record(
1819 value: &Bytes,
1820 report: &mut WorldParseReport,
1821) -> ParsedChunkRecordValue {
1822 match parse_consecutive_root_nbt(value) {
1823 Ok(tags) => {
1824 let entities = tags
1825 .into_iter()
1826 .map(|tag| parse_entity_from_nbt(tag, report))
1827 .collect::<Vec<_>>();
1828 report.entity_count += entities.len();
1829 ParsedChunkRecordValue::Entities(entities)
1830 }
1831 Err(error) => {
1832 report.warnings.push(format!("entities kept raw: {error}"));
1833 report.raw_entry_count += 1;
1834 ParsedChunkRecordValue::Raw(value.clone())
1835 }
1836 }
1837}
1838
1839pub(crate) fn parse_entities_from_value(
1840 value: &Bytes,
1841 report: &mut WorldParseReport,
1842) -> Vec<ParsedEntity> {
1843 match parse_actor_value(value, report) {
1844 ParsedDbValue::ActorEntities(entities) => entities,
1845 _ => Vec::new(),
1846 }
1847}
1848
1849pub(crate) fn parse_block_entities_from_value(
1850 value: &Bytes,
1851 report: &mut WorldParseReport,
1852) -> Vec<ParsedBlockEntity> {
1853 match parse_block_entities(value, report) {
1854 ParsedChunkRecordValue::BlockEntities(block_entities) => block_entities,
1855 _ => Vec::new(),
1856 }
1857}
1858
1859fn parse_pending_ticks(value: &Bytes, report: &mut WorldParseReport) -> ParsedChunkRecordValue {
1860 match parse_consecutive_root_nbt(value) {
1861 Ok(tags) => ParsedChunkRecordValue::PendingTicks(tags),
1862 Err(error) => {
1863 report
1864 .warnings
1865 .push(format!("pending ticks kept raw: {error}"));
1866 report.raw_entry_count += 1;
1867 ParsedChunkRecordValue::Raw(value.clone())
1868 }
1869 }
1870}
1871
1872fn parse_entity_from_nbt(nbt: NbtTag, report: &mut WorldParseReport) -> ParsedEntity {
1873 let items = collect_item_stacks(&nbt);
1874 report.item_count += items.len();
1875 let root = compound(&nbt);
1876 ParsedEntity {
1877 identifier: root.and_then(entity_identifier),
1878 definitions: root.map_or_else(Vec::new, entity_definitions),
1879 unique_id: root.and_then(|root| long_field(root, "UniqueID")),
1880 position: root.and_then(|root| vec3_f64_field(root, "Pos")),
1881 rotation: root.and_then(|root| vec2_f32_field(root, "Rotation")),
1882 motion: root.and_then(|root| vec3_f32_field(root, "Motion")),
1883 items,
1884 nbt,
1885 }
1886}
1887
1888fn parse_block_entity_from_nbt(nbt: NbtTag, report: &mut WorldParseReport) -> ParsedBlockEntity {
1889 let items = collect_item_stacks(&nbt);
1890 report.item_count += items.len();
1891 let root = compound(&nbt);
1892 ParsedBlockEntity {
1893 id: root
1894 .and_then(|root| string_field(root, "id"))
1895 .map(ToString::to_string),
1896 position: root.and_then(|root| {
1897 Some([
1898 int_field(root, "x")?,
1899 int_field(root, "y")?,
1900 int_field(root, "z")?,
1901 ])
1902 }),
1903 is_movable: root.and_then(|root| bool_field(root, "isMovable")),
1904 custom_name: root
1905 .and_then(|root| string_field(root, "CustomName"))
1906 .map(ToString::to_string),
1907 items,
1908 nbt,
1909 }
1910}
1911
1912pub(crate) fn collect_item_stacks(tag: &NbtTag) -> Vec<ItemStack> {
1913 let mut items = Vec::new();
1914 collect_item_stacks_inner(tag, &mut items);
1915 items
1916}
1917
1918fn collect_item_stacks_inner(tag: &NbtTag, items: &mut Vec<ItemStack>) {
1919 match tag {
1920 NbtTag::Compound(root) => {
1921 if looks_like_item_stack(root) {
1922 items.push(ItemStack {
1923 name: string_field(root, "Name")
1924 .or_else(|| string_field(root, "name"))
1925 .map(ToString::to_string),
1926 count: int_field(root, "Count"),
1927 damage: int_field(root, "Damage").or_else(|| int_field(root, "Aux")),
1928 was_picked_up: bool_field(root, "WasPickedUp"),
1929 has_block: root.contains_key("Block"),
1930 has_tag: root.contains_key("tag"),
1931 nbt: tag.clone(),
1932 });
1933 }
1934 for value in root.values() {
1935 collect_item_stacks_inner(value, items);
1936 }
1937 }
1938 NbtTag::List(values) => {
1939 for value in values {
1940 collect_item_stacks_inner(value, items);
1941 }
1942 }
1943 _ => {}
1944 }
1945}
1946
1947fn looks_like_item_stack(root: &IndexMap<String, NbtTag>) -> bool {
1948 (root.contains_key("Name") || root.contains_key("name")) && root.contains_key("Count")
1949}
1950
1951fn should_try_nbt_plain_key(name: &str) -> bool {
1952 matches!(
1953 name,
1954 "AutonomousEntities"
1955 | "autonomousentities"
1956 | "BiomeData"
1957 | "LevelChunkMetaDataDictionary"
1958 | "LocalPlayer"
1959 | "Nether"
1960 | "Overworld"
1961 | "TheEnd"
1962 | "WorldClocks"
1963 | "mobevents"
1964 | "scoreboard"
1965 )
1966}
1967
1968fn entity_identifier(root: &IndexMap<String, NbtTag>) -> Option<String> {
1969 string_field(root, "identifier")
1970 .or_else(|| string_field(root, "Identifier"))
1971 .or_else(|| string_field(root, "id"))
1972 .map(ToString::to_string)
1973}
1974
1975fn entity_definitions(root: &IndexMap<String, NbtTag>) -> Vec<String> {
1976 match root.get("definitions").or_else(|| root.get("Definitions")) {
1977 Some(NbtTag::List(values)) => values
1978 .iter()
1979 .filter_map(|value| match value {
1980 NbtTag::String(value) => Some(value.clone()),
1981 _ => None,
1982 })
1983 .collect(),
1984 _ => Vec::new(),
1985 }
1986}
1987
1988fn compound(tag: &NbtTag) -> Option<&IndexMap<String, NbtTag>> {
1989 match tag {
1990 NbtTag::Compound(root) => Some(root),
1991 _ => None,
1992 }
1993}
1994
1995fn string_field<'a>(root: &'a IndexMap<String, NbtTag>, key: &str) -> Option<&'a str> {
1996 match root.get(key) {
1997 Some(NbtTag::String(value)) => Some(value.as_str()),
1998 _ => None,
1999 }
2000}
2001
2002fn bool_field(root: &IndexMap<String, NbtTag>, key: &str) -> Option<bool> {
2003 match root.get(key) {
2004 Some(NbtTag::Byte(value)) => Some(*value != 0),
2005 Some(NbtTag::Short(value)) => Some(*value != 0),
2006 Some(NbtTag::Int(value)) => Some(*value != 0),
2007 _ => None,
2008 }
2009}
2010
2011fn int_field(root: &IndexMap<String, NbtTag>, key: &str) -> Option<i32> {
2012 match root.get(key) {
2013 Some(NbtTag::Byte(value)) => Some(i32::from(*value)),
2014 Some(NbtTag::Short(value)) => Some(i32::from(*value)),
2015 Some(NbtTag::Int(value)) => Some(*value),
2016 Some(NbtTag::Long(value)) => i32::try_from(*value).ok(),
2017 _ => None,
2018 }
2019}
2020
2021fn long_field(root: &IndexMap<String, NbtTag>, key: &str) -> Option<i64> {
2022 match root.get(key) {
2023 Some(NbtTag::Byte(value)) => Some(i64::from(*value)),
2024 Some(NbtTag::Short(value)) => Some(i64::from(*value)),
2025 Some(NbtTag::Int(value)) => Some(i64::from(*value)),
2026 Some(NbtTag::Long(value)) => Some(*value),
2027 _ => None,
2028 }
2029}
2030
2031fn f64_value(tag: &NbtTag) -> Option<f64> {
2032 match tag {
2033 NbtTag::Float(value) => Some(f64::from(*value)),
2034 NbtTag::Double(value) => Some(*value),
2035 NbtTag::Int(value) => Some(f64::from(*value)),
2036 NbtTag::Long(value) => Some(*value as f64),
2037 _ => None,
2038 }
2039}
2040
2041fn f32_value(tag: &NbtTag) -> Option<f32> {
2042 match tag {
2043 NbtTag::Float(value) => Some(*value),
2044 NbtTag::Double(value) => Some(*value as f32),
2045 NbtTag::Int(value) => Some(*value as f32),
2046 _ => None,
2047 }
2048}
2049
2050fn vec3_f64_field(root: &IndexMap<String, NbtTag>, key: &str) -> Option<[f64; 3]> {
2051 let Some(NbtTag::List(values)) = root.get(key) else {
2052 return None;
2053 };
2054 Some([
2055 f64_value(values.first()?)?,
2056 f64_value(values.get(1)?)?,
2057 f64_value(values.get(2)?)?,
2058 ])
2059}
2060
2061fn vec3_f32_field(root: &IndexMap<String, NbtTag>, key: &str) -> Option<[f32; 3]> {
2062 let Some(NbtTag::List(values)) = root.get(key) else {
2063 return None;
2064 };
2065 Some([
2066 f32_value(values.first()?)?,
2067 f32_value(values.get(1)?)?,
2068 f32_value(values.get(2)?)?,
2069 ])
2070}
2071
2072fn vec2_f32_field(root: &IndexMap<String, NbtTag>, key: &str) -> Option<[f32; 2]> {
2073 let Some(NbtTag::List(values)) = root.get(key) else {
2074 return None;
2075 };
2076 Some([f32_value(values.first()?)?, f32_value(values.get(1)?)?])
2077}
2078
2079fn read_i32(value: &[u8]) -> Option<i32> {
2080 let bytes: [u8; 4] = value.get(..4)?.try_into().ok()?;
2081 Some(i32::from_le_bytes(bytes))
2082}
2083
2084#[cfg(test)]
2085mod tests {
2086 use super::*;
2087 use crate::nbt::serialize_root_nbt;
2088 use crate::storage::{MemoryStorage, WorldStorage};
2089
2090 #[test]
2091 fn item_stack_extracts_common_fields() {
2092 let item = NbtTag::Compound(IndexMap::from([
2093 (
2094 "Name".to_string(),
2095 NbtTag::String("minecraft:stone".to_string()),
2096 ),
2097 ("Count".to_string(), NbtTag::Byte(5)),
2098 ("Damage".to_string(), NbtTag::Short(1)),
2099 ("WasPickedUp".to_string(), NbtTag::Byte(1)),
2100 ]));
2101
2102 let items = collect_item_stacks(&item);
2103
2104 assert_eq!(items.len(), 1);
2105 assert_eq!(items[0].name.as_deref(), Some("minecraft:stone"));
2106 assert_eq!(items[0].count, Some(5));
2107 assert_eq!(items[0].damage, Some(1));
2108 assert_eq!(items[0].was_picked_up, Some(true));
2109 }
2110
2111 #[test]
2112 fn entity_extracts_identifier_position_and_items() {
2113 let entity = NbtTag::Compound(IndexMap::from([
2114 (
2115 "identifier".to_string(),
2116 NbtTag::String("minecraft:pig".to_string()),
2117 ),
2118 (
2119 "Pos".to_string(),
2120 NbtTag::List(vec![
2121 NbtTag::Float(1.0),
2122 NbtTag::Float(2.0),
2123 NbtTag::Float(3.0),
2124 ]),
2125 ),
2126 (
2127 "Inventory".to_string(),
2128 NbtTag::List(vec![NbtTag::Compound(IndexMap::from([
2129 (
2130 "Name".to_string(),
2131 NbtTag::String("minecraft:dirt".to_string()),
2132 ),
2133 ("Count".to_string(), NbtTag::Byte(1)),
2134 ]))]),
2135 ),
2136 ]));
2137 let bytes = Bytes::from(serialize_root_nbt(&entity).expect("serialize"));
2138 let mut report = WorldParseReport::default();
2139
2140 let value = parse_actor_value(&bytes, &mut report);
2141
2142 let ParsedDbValue::ActorEntities(entities) = value else {
2143 panic!("expected entity value");
2144 };
2145 assert_eq!(entities.len(), 1);
2146 assert_eq!(entities[0].identifier.as_deref(), Some("minecraft:pig"));
2147 assert_eq!(entities[0].position, Some([1.0, 2.0, 3.0]));
2148 assert_eq!(entities[0].items.len(), 1);
2149 }
2150
2151 #[test]
2152 fn block_entity_extracts_container_items() {
2153 let block_entity = NbtTag::Compound(IndexMap::from([
2154 ("id".to_string(), NbtTag::String("Chest".to_string())),
2155 ("x".to_string(), NbtTag::Int(1)),
2156 ("y".to_string(), NbtTag::Int(2)),
2157 ("z".to_string(), NbtTag::Int(3)),
2158 ("isMovable".to_string(), NbtTag::Byte(1)),
2159 (
2160 "Items".to_string(),
2161 NbtTag::List(vec![NbtTag::Compound(IndexMap::from([
2162 (
2163 "Name".to_string(),
2164 NbtTag::String("minecraft:apple".to_string()),
2165 ),
2166 ("Count".to_string(), NbtTag::Byte(2)),
2167 ]))]),
2168 ),
2169 ]));
2170 let bytes = Bytes::from(serialize_root_nbt(&block_entity).expect("serialize"));
2171 let mut report = WorldParseReport::default();
2172
2173 let value = parse_block_entities(&bytes, &mut report);
2174
2175 let ParsedChunkRecordValue::BlockEntities(block_entities) = value else {
2176 panic!("expected block entities");
2177 };
2178 assert_eq!(block_entities.len(), 1);
2179 assert_eq!(block_entities[0].id.as_deref(), Some("Chest"));
2180 assert_eq!(block_entities[0].position, Some([1, 2, 3]));
2181 assert_eq!(block_entities[0].items.len(), 1);
2182 }
2183
2184 #[test]
2185 fn biome_lookup_uses_xz_plane_storage_order() {
2186 let mut indices = vec![0_u16; 4096];
2187 indices[crate::block_storage_index(1, 2, 3)] = 2;
2188 let storage = ParsedBiomeStorage {
2189 y: Some(0),
2190 palette: vec![10, 20, 30],
2191 indices: Some(indices),
2192 counts: vec![4095, 0, 1],
2193 };
2194
2195 assert_eq!(storage.biome_id_at(1, 2, 3), Some(30));
2196 assert_eq!(storage.biome_id_at(1, 3, 3), Some(10));
2197 }
2198
2199 #[test]
2200 fn hsa_records_roundtrip_reference_binary_layout() {
2201 let areas = vec![ParsedHardcodedSpawnArea {
2202 kind: HardcodedSpawnAreaKind::PillagerOutpost,
2203 min: [1, 2, 3],
2204 max: [4, 5, 6],
2205 }];
2206
2207 let bytes = encode_hardcoded_spawn_area_records(&areas).expect("encode hsa");
2208 let decoded = parse_hardcoded_spawn_area_records(&bytes).expect("decode hsa");
2209
2210 assert_eq!(bytes.len(), 29);
2211 assert_eq!(decoded, areas);
2212 }
2213
2214 #[test]
2215 fn biome2d_and_biome3d_codecs_roundtrip() {
2216 let height_map = (0..256).map(|value| value as i16).collect::<Vec<_>>();
2217 let biomes = (0..256).map(|value| value as u8).collect::<Vec<_>>();
2218 let data2d = Biome2d::new(height_map.clone(), biomes.clone()).expect("2d");
2219 assert_eq!(
2220 Biome2d::parse(&data2d.encode().expect("encode")).expect("parse"),
2221 data2d
2222 );
2223
2224 let storage = ParsedBiomeStorage {
2225 y: Some(-64),
2226 palette: vec![1, 2],
2227 indices: Some(vec![0; 4096]),
2228 counts: vec![4096, 0],
2229 };
2230 let data3d = Biome3d::new(height_map, vec![storage]).expect("3d");
2231 assert_eq!(
2232 Biome3d::parse(&data3d.encode().expect("encode")).expect("parse"),
2233 data3d
2234 );
2235 }
2236
2237 #[test]
2238 fn map_and_global_records_extract_typed_fields() {
2239 let map_root = NbtTag::Compound(IndexMap::from([
2240 ("dimension".to_string(), NbtTag::Int(0)),
2241 ("xCenter".to_string(), NbtTag::Int(10)),
2242 ("zCenter".to_string(), NbtTag::Int(-20)),
2243 ("scale".to_string(), NbtTag::Byte(2)),
2244 ("width".to_string(), NbtTag::Int(2)),
2245 ("height".to_string(), NbtTag::Int(2)),
2246 ("colors".to_string(), NbtTag::ByteArray(vec![1, 2, 3, 4])),
2247 ]));
2248 let map_bytes = Bytes::from(serialize_root_nbt(&map_root).expect("serialize"));
2249 let map = parse_map_record(MapRecordId::unchecked("5"), map_bytes).expect("map");
2250
2251 assert_eq!(map.known_fields.center_x, Some(10));
2252 assert_eq!(
2253 map.pixels.as_ref().map(|pixels| pixels.colors.as_slice()),
2254 Some(&[1, 2, 3, 4][..])
2255 );
2256
2257 let global = parse_global_record(
2258 GlobalRecordKind::Scoreboard,
2259 "scoreboard".to_string(),
2260 encode_consecutive_roots(&[NbtTag::Compound(IndexMap::new())]).expect("encode"),
2261 )
2262 .expect("global");
2263 assert_eq!(global.kind, GlobalRecordKind::Scoreboard);
2264 }
2265
2266 #[test]
2267 fn chunk_record_parser_preserves_legacy_terrain_structure() {
2268 let records = vec![ChunkRecord {
2269 key: crate::ChunkKey::new(
2270 ChunkPos {
2271 x: 0,
2272 z: 0,
2273 dimension: crate::Dimension::Overworld,
2274 },
2275 ChunkRecordTag::LegacyTerrain,
2276 ),
2277 value: Bytes::from(vec![0; crate::LEGACY_TERRAIN_VALUE_LEN]),
2278 }];
2279
2280 let parsed = parse_chunk_records(
2281 ChunkPos {
2282 x: 0,
2283 z: 0,
2284 dimension: crate::Dimension::Overworld,
2285 },
2286 records,
2287 );
2288
2289 assert_eq!(parsed.report.legacy_terrain_count, 1);
2290 assert!(matches!(
2291 parsed.records[0].value,
2292 ParsedChunkRecordValue::LegacyTerrain(_)
2293 ));
2294 }
2295
2296 #[test]
2297 fn chunk_record_parser_counts_legacy_subchunks() {
2298 let mut value = vec![0; crate::LEGACY_SUBCHUNK_MIN_VALUE_LEN];
2299 value[0] = 2;
2300 let records = vec![ChunkRecord {
2301 key: crate::ChunkKey::subchunk(
2302 ChunkPos {
2303 x: 0,
2304 z: 0,
2305 dimension: crate::Dimension::Overworld,
2306 },
2307 0,
2308 ),
2309 value: Bytes::from(value),
2310 }];
2311
2312 let parsed = parse_chunk_records(
2313 ChunkPos {
2314 x: 0,
2315 z: 0,
2316 dimension: crate::Dimension::Overworld,
2317 },
2318 records,
2319 );
2320
2321 assert_eq!(parsed.report.subchunk_count, 1);
2322 assert_eq!(parsed.report.legacy_subchunk_count, 1);
2323 assert!(matches!(
2324 parsed.records[0].value,
2325 ParsedChunkRecordValue::SubChunk(SubChunk {
2326 format: SubChunkFormat::LegacySubChunk(_),
2327 ..
2328 })
2329 ));
2330 }
2331
2332 #[test]
2333 fn summary_parse_does_not_retain_raw_entries() {
2334 let storage = MemoryStorage::new();
2335 let chunk_key = crate::ChunkKey::new(
2336 ChunkPos {
2337 x: 0,
2338 z: 0,
2339 dimension: crate::Dimension::Overworld,
2340 },
2341 ChunkRecordTag::Version,
2342 );
2343 storage
2344 .put(&chunk_key.encode(), &[1])
2345 .expect("insert chunk version");
2346 storage
2347 .put(
2348 b"~local_player",
2349 &serialize_root_nbt(&NbtTag::Compound(IndexMap::new())).expect("serialize"),
2350 )
2351 .expect("insert player");
2352
2353 let parsed = parse_world_storage(
2354 LevelDatDocument {
2355 header: crate::LevelDatHeader {
2356 version: 10,
2357 declared_len: 0,
2358 actual_payload_len: 0,
2359 },
2360 root: NbtTag::Compound(IndexMap::new()),
2361 warnings: Vec::new(),
2362 },
2363 &storage,
2364 WorldParseOptions::summary(),
2365 )
2366 .expect("parse summary");
2367
2368 assert_eq!(parsed.report.entry_count, 2);
2369 assert_eq!(parsed.report.chunk_count, 1);
2370 assert!(parsed.entries.is_empty());
2371 }
2372}