1use crate::chunk::{
9 ActorDigestKey, ActorUid, BedrockDbKey, BlockPos, BlockState, Chunk, ChunkKey, ChunkPos,
10 ChunkRecord, ChunkRecordTag, ChunkVersion, GlobalRecordKind, LegacyBiomeSample, LegacyTerrain,
11 MapRecordId, SubChunk, SubChunkDecodeMode, parse_subchunk_with_mode,
12};
13use crate::error::{BedrockWorldError, Result};
14use crate::level_dat::{LevelDatDocument, read_level_dat_document, write_level_dat_document};
15use crate::nbt::{NbtTag, parse_consecutive_root_nbt, parse_root_nbt, serialize_root_nbt};
16use crate::parsed::{
17 ActorRecord, ActorSource, Biome2d, Biome3d, BlockEntityRecord, HeightMap2d, ItemStack,
18 ParsedBiomeData, ParsedBiomeStorage, ParsedBlockEntity, ParsedChunkData, ParsedDbEntry,
19 ParsedDbValue, ParsedEntity, ParsedGlobalData, ParsedHardcodedSpawnArea, ParsedMapData,
20 ParsedVillageData, ParsedWorld, WorldParseOptions, WorldParseReport, collect_item_stacks,
21 encode_actor_digest_ids, encode_consecutive_roots, encode_global_record,
22 encode_hardcoded_spawn_area_records, encode_map_record, parse_actor_digest_ids,
23 parse_block_entities_from_value, parse_chunk_records, parse_chunk_records_with_options,
24 parse_data3d, parse_entities_from_value, parse_global_record, parse_global_storage_entries,
25 parse_hardcoded_spawn_area_records, parse_legacy_data2d, parse_map_record, parse_world_storage,
26};
27use crate::player::{PlayerData, PlayerId};
28use crate::storage::backend::BedrockLevelDbStorage;
29use crate::storage::{
30 PocketChunksDatStorage, StorageBatch, StorageCancelFlag, StorageOp, StorageProgressSink,
31 StorageReadOptions, StorageScanMode, StorageThreadingOptions, StorageVisitorControl,
32 WorldStorage,
33};
34use bytes::Bytes;
35use rayon::{ThreadPoolBuilder, prelude::*};
36use std::path::{Path, PathBuf};
37use std::sync::Arc;
38use std::time::Instant;
39use std::{
40 collections::{BTreeMap, BTreeSet},
41 sync::{
42 Mutex,
43 atomic::{AtomicBool, Ordering},
44 mpsc,
45 },
46};
47
48#[derive(Debug, Clone)]
50pub struct OpenOptions {
51 pub read_only: bool,
53 pub format: WorldFormatHint,
56}
57
58impl Default for OpenOptions {
59 fn default() -> Self {
60 Self {
61 read_only: true,
62 format: WorldFormatHint::Auto,
63 }
64 }
65}
66
67#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
68pub enum WorldFormatHint {
70 #[default]
71 Auto,
73 LevelDb,
75 PocketChunksDat,
77}
78
79#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
80pub enum WorldFormat {
82 #[default]
83 LevelDb,
85 LevelDbLegacyTerrain,
87 PocketChunksDat,
89}
90
91pub struct BedrockWorld<S = Arc<dyn WorldStorage>> {
96 path: PathBuf,
97 options: OpenOptions,
98 storage: S,
99 format: WorldFormat,
100}
101
102pub trait WorldStorageHandle: Clone + Send + Sync + 'static {
104 fn storage(&self) -> &dyn WorldStorage;
106}
107
108impl<T> WorldStorageHandle for T
109where
110 T: WorldStorage + Clone + Send + Sync + 'static,
111{
112 fn storage(&self) -> &dyn WorldStorage {
113 self
114 }
115}
116
117impl<T> WorldStorageHandle for Arc<T>
118where
119 T: WorldStorage + 'static,
120{
121 fn storage(&self) -> &dyn WorldStorage {
122 self.as_ref()
123 }
124}
125
126impl WorldStorageHandle for Arc<dyn WorldStorage> {
127 fn storage(&self) -> &dyn WorldStorage {
128 self.as_ref()
129 }
130}
131
132#[derive(Debug, Clone, Copy, PartialEq, Eq)]
133pub struct SurfaceColumnOptions {
135 pub skip_air: bool,
137 pub transparent_water: bool,
139}
140
141impl Default for SurfaceColumnOptions {
142 fn default() -> Self {
143 Self {
144 skip_air: true,
145 transparent_water: true,
146 }
147 }
148}
149
150#[derive(Debug, Clone, PartialEq, Eq)]
151pub struct SurfaceColumn {
153 pub y: i32,
155 pub block_name: String,
157 pub biome_id: Option<u32>,
159 pub water_depth: u8,
161 pub under_water_block_name: Option<String>,
163 pub is_fallback: bool,
165}
166
167#[derive(Debug, Clone, Copy, PartialEq, Eq)]
168pub enum ExactSurfaceSubchunkPolicy {
170 Full,
172 HintThenVerify,
174}
175
176#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
177pub struct WorldPipelineOptions {
179 pub queue_depth: usize,
181 pub chunk_batch_size: usize,
183 pub subchunk_decode_workers: usize,
185 pub progress_interval: usize,
187}
188
189impl WorldPipelineOptions {
190 #[must_use]
191 pub fn resolve_queue_depth(self, workers: usize, work_items: usize) -> usize {
193 self.queue_depth
194 .max(if self.queue_depth == 0 {
195 workers
196 .max(1)
197 .saturating_mul(2)
198 .max(work_items.clamp(1, 256))
199 } else {
200 1
201 })
202 .max(1)
203 }
204
205 #[must_use]
206 pub fn resolve_progress_interval(self) -> usize {
208 self.progress_interval
209 .max(if self.progress_interval == 0 { 256 } else { 1 })
210 }
211}
212
213#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
214pub enum RenderChunkPriority {
216 #[default]
217 RowMajor,
219 DistanceFrom {
221 chunk_x: i32,
223 chunk_z: i32,
225 },
226}
227
228#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
229pub enum ExactSurfaceBiomeLoad {
231 None,
233 #[default]
234 TopColumns,
236 All,
238}
239
240#[derive(Debug, Clone, Copy, PartialEq, Eq)]
241pub enum RenderChunkRequest {
243 ExactSurface {
245 subchunks: ExactSurfaceSubchunkPolicy,
247 biome: ExactSurfaceBiomeLoad,
249 block_entities: bool,
251 },
252 RawHeightMap,
254 Layer {
256 y: i32,
258 },
259 Biome {
261 y: i32,
263 load_all: bool,
265 },
266}
267
268impl Default for RenderChunkRequest {
269 fn default() -> Self {
270 Self::ExactSurface {
271 subchunks: ExactSurfaceSubchunkPolicy::Full,
272 biome: ExactSurfaceBiomeLoad::TopColumns,
273 block_entities: false,
274 }
275 }
276}
277
278#[derive(Debug, Clone, Copy, PartialEq, Eq)]
279pub enum TerrainSampleSource {
281 Subchunk,
283 LegacyTerrain,
285 LegacyFallback,
287}
288
289#[derive(Debug, Clone, Copy, PartialEq, Eq)]
290pub enum TerrainColumnBiome {
292 Id(u32),
294 Legacy(LegacyBiomeSample),
296}
297
298#[derive(Debug, Clone, Copy, PartialEq, Eq)]
299pub enum TerrainSurfaceRole {
301 Air,
303 Water,
305 Overlay,
307 Primary,
309}
310
311#[derive(Debug, Clone, PartialEq)]
312pub struct TerrainColumnOverlay {
314 pub y: i16,
316 pub block_state: BlockState,
318 pub source: TerrainSampleSource,
320}
321
322#[derive(Debug, Clone, PartialEq)]
323pub struct TerrainColumnWater {
325 pub surface_y: i16,
327 pub block_state: BlockState,
329 pub depth: u8,
331 pub underwater_y: Option<i16>,
333 pub underwater_block_state: Option<BlockState>,
335 pub source: TerrainSampleSource,
337}
338
339#[derive(Debug, Clone, PartialEq)]
340pub struct TerrainColumnSample {
342 pub surface_y: i16,
344 pub surface_block_state: BlockState,
346 pub relief_y: i16,
348 pub relief_block_state: BlockState,
350 pub overlay: Option<TerrainColumnOverlay>,
352 pub water: Option<TerrainColumnWater>,
354 pub biome: Option<TerrainColumnBiome>,
356 pub source: TerrainSampleSource,
358}
359
360#[derive(Debug, Clone, PartialEq)]
361pub struct TerrainColumnSamples {
363 columns: Vec<Option<TerrainColumnSample>>,
364}
365
366impl TerrainColumnSamples {
367 #[must_use]
368 pub fn new() -> Self {
370 Self {
371 columns: vec![None; 16 * 16],
372 }
373 }
374
375 #[must_use]
376 pub fn get(&self, local_x: u8, local_z: u8) -> Option<&TerrainColumnSample> {
378 self.columns
379 .get(column_index(local_x, local_z)?)
380 .and_then(Option::as_ref)
381 }
382
383 pub fn set(&mut self, local_x: u8, local_z: u8, sample: TerrainColumnSample) {
385 if let Some(index) = column_index(local_x, local_z) {
386 if let Some(slot) = self.columns.get_mut(index) {
387 *slot = Some(sample);
388 }
389 }
390 }
391
392 #[must_use]
393 pub fn sampled_columns(&self) -> usize {
395 self.columns
396 .iter()
397 .filter(|sample| sample.is_some())
398 .count()
399 }
400
401 pub fn iter(&self) -> impl Iterator<Item = &TerrainColumnSample> {
403 self.columns.iter().filter_map(Option::as_ref)
404 }
405}
406
407impl Default for TerrainColumnSamples {
408 fn default() -> Self {
409 Self::new()
410 }
411}
412
413#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
414pub struct RenderLoadStats {
416 pub requested_chunks: usize,
418 pub loaded_chunks: usize,
420 pub subchunks_decoded: usize,
422 pub worker_threads: usize,
424 pub queue_wait_ms: u128,
426 pub load_ms: u128,
428 pub keys_requested: usize,
430 pub keys_found: usize,
432 pub exact_get_batches: usize,
434 pub prefix_scans: usize,
436 pub decode_ms: u128,
438 pub db_read_ms: u128,
440 pub biome_parse_ms: u128,
442 pub subchunk_parse_ms: u128,
444 pub surface_scan_ms: u128,
446 pub block_entity_parse_ms: u128,
448 pub full_reload_ms: u128,
450 pub legacy_terrain_records: usize,
452 pub legacy_biome_samples: usize,
454 pub legacy_biome_colors: usize,
456 pub terrain_source_legacy: usize,
458 pub terrain_source_subchunk: usize,
460 pub legacy_pocket_chunks: usize,
462 pub detected_format: WorldFormat,
464 pub computed_surface_columns: usize,
466 pub raw_height_mismatch_columns: usize,
468 pub missing_subchunk_columns: usize,
470 pub legacy_fallback_columns: usize,
472 pub legacy_biome_preferred_columns: usize,
474 pub modern_biome_fallback_columns: usize,
476}
477
478#[derive(Debug, Clone)]
479pub struct RenderChunkLoadOptions {
481 pub request: RenderChunkRequest,
483 pub subchunk_decode: SubChunkDecodeMode,
485 pub threading: WorldThreadingOptions,
487 pub pipeline: WorldPipelineOptions,
489 pub cancel: Option<CancelFlag>,
491 pub progress: Option<ProgressSink>,
493 pub priority: RenderChunkPriority,
495}
496
497impl Default for RenderChunkLoadOptions {
498 fn default() -> Self {
499 Self {
500 request: RenderChunkRequest::default(),
501 subchunk_decode: SubChunkDecodeMode::FullIndices,
502 threading: WorldThreadingOptions::Auto,
503 pipeline: WorldPipelineOptions::default(),
504 cancel: None,
505 progress: None,
506 priority: RenderChunkPriority::RowMajor,
507 }
508 }
509}
510
511#[derive(Debug, Clone, PartialEq)]
512pub struct RenderBlockEntity {
514 pub id: Option<String>,
516 pub position: Option<[i32; 3]>,
518 pub nbt: NbtTag,
520}
521
522#[derive(Debug, Clone, PartialEq)]
523pub struct RenderChunkData {
525 pub pos: ChunkPos,
527 pub is_loaded: bool,
529 pub height_map: Option<[[Option<i16>; 16]; 16]>,
531 pub legacy_biomes: Option<[[Option<LegacyBiomeSample>; 16]; 16]>,
533 pub legacy_biome_colors: Option<[[Option<u32>; 16]; 16]>,
535 pub biome_data: BTreeMap<i32, ParsedBiomeStorage>,
537 pub subchunks: BTreeMap<i8, SubChunk>,
539 pub block_entities: Vec<RenderBlockEntity>,
541 pub legacy_terrain: Option<LegacyTerrain>,
543 pub column_samples: Option<TerrainColumnSamples>,
545 pub version: crate::ChunkVersion,
547}
548
549impl RenderChunkData {
550 #[must_use]
551 pub fn column_sample_at(&self, local_x: u8, local_z: u8) -> Option<&TerrainColumnSample> {
553 self.column_samples.as_ref()?.get(local_x, local_z)
554 }
555}
556
557#[derive(Debug, Clone)]
558struct RawRenderChunkData {
559 pos: ChunkPos,
560 biome_record: Option<(crate::ChunkVersion, Bytes)>,
561 subchunks: BTreeMap<i8, Bytes>,
562 block_entities: Option<Bytes>,
563 legacy_terrain: Option<Bytes>,
564}
565
566#[derive(Debug, Clone, Copy, Default)]
567#[allow(clippy::struct_field_names)]
568struct RenderChunkDecodeTiming {
569 biome_parse_ms: u128,
570 subchunk_parse_ms: u128,
571 surface_scan_ms: u128,
572 block_entity_parse_ms: u128,
573}
574
575impl RenderChunkDecodeTiming {
576 fn add(&mut self, other: Self) {
577 self.biome_parse_ms = self.biome_parse_ms.saturating_add(other.biome_parse_ms);
578 self.subchunk_parse_ms = self
579 .subchunk_parse_ms
580 .saturating_add(other.subchunk_parse_ms);
581 self.surface_scan_ms = self.surface_scan_ms.saturating_add(other.surface_scan_ms);
582 self.block_entity_parse_ms = self
583 .block_entity_parse_ms
584 .saturating_add(other.block_entity_parse_ms);
585 }
586}
587
588#[derive(Debug, Clone, Copy)]
589enum RenderRecordKind {
590 LegacyTerrain,
591 Data3D,
592 Data2D,
593 Data2DLegacy,
594 Subchunk(i8),
595 BlockEntity,
596}
597
598#[derive(Debug, Clone, Copy)]
599struct RenderRecordRequest {
600 chunk_index: usize,
601 kind: RenderRecordKind,
602}
603
604#[derive(Debug, Clone)]
605pub struct RenderRegionLoadOptions {
607 pub request: RenderChunkRequest,
609 pub subchunk_decode: SubChunkDecodeMode,
611 pub threading: WorldThreadingOptions,
613 pub pipeline: WorldPipelineOptions,
615 pub cancel: Option<CancelFlag>,
617 pub progress: Option<ProgressSink>,
619 pub priority: RenderChunkPriority,
621}
622
623impl Default for RenderRegionLoadOptions {
624 fn default() -> Self {
625 Self {
626 request: RenderChunkRequest::default(),
627 subchunk_decode: SubChunkDecodeMode::FullIndices,
628 threading: WorldThreadingOptions::Auto,
629 pipeline: WorldPipelineOptions::default(),
630 cancel: None,
631 progress: None,
632 priority: RenderChunkPriority::RowMajor,
633 }
634 }
635}
636
637impl From<RenderRegionLoadOptions> for RenderChunkLoadOptions {
638 fn from(options: RenderRegionLoadOptions) -> Self {
639 Self {
640 request: options.request,
641 subchunk_decode: options.subchunk_decode,
642 threading: options.threading,
643 pipeline: options.pipeline,
644 cancel: options.cancel,
645 progress: options.progress,
646 priority: options.priority,
647 }
648 }
649}
650
651#[derive(Debug, Clone, Copy, PartialEq, Eq)]
652pub struct RenderChunkRegion {
654 pub dimension: crate::Dimension,
656 pub min_chunk_x: i32,
658 pub min_chunk_z: i32,
660 pub max_chunk_x: i32,
662 pub max_chunk_z: i32,
664}
665
666#[derive(Debug, Clone, PartialEq)]
667pub struct RenderRegionData {
669 pub region: RenderChunkRegion,
671 pub chunks: Vec<RenderChunkData>,
673 pub stats: RenderLoadStats,
675}
676
677#[derive(Debug, Clone, Copy, PartialEq, Eq)]
678pub struct ChunkBounds {
680 pub dimension: crate::Dimension,
682 pub min_chunk_x: i32,
684 pub min_chunk_z: i32,
686 pub max_chunk_x: i32,
688 pub max_chunk_z: i32,
690 pub chunk_count: usize,
692}
693
694impl ChunkBounds {
695 fn from_first(pos: ChunkPos) -> Self {
696 Self {
697 dimension: pos.dimension,
698 min_chunk_x: pos.x,
699 min_chunk_z: pos.z,
700 max_chunk_x: pos.x,
701 max_chunk_z: pos.z,
702 chunk_count: 1,
703 }
704 }
705
706 fn include(&mut self, pos: ChunkPos) {
707 self.min_chunk_x = self.min_chunk_x.min(pos.x);
708 self.min_chunk_z = self.min_chunk_z.min(pos.z);
709 self.max_chunk_x = self.max_chunk_x.max(pos.x);
710 self.max_chunk_z = self.max_chunk_z.max(pos.z);
711 self.chunk_count = self.chunk_count.saturating_add(1);
712 }
713}
714
715#[derive(Debug, Clone)]
716pub struct WorldScanOptions {
718 pub threading: WorldThreadingOptions,
720 pub pipeline: WorldPipelineOptions,
722 pub cancel: Option<CancelFlag>,
724 pub progress: Option<ProgressSink>,
726}
727
728impl Default for WorldScanOptions {
729 fn default() -> Self {
730 Self {
731 threading: WorldThreadingOptions::Auto,
732 pipeline: WorldPipelineOptions::default(),
733 cancel: None,
734 progress: None,
735 }
736 }
737}
738
739#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
740pub enum WorldThreadingOptions {
742 #[default]
743 Auto,
745 Fixed(usize),
747 Single,
749}
750
751pub const MAX_WORLD_THREADS: usize = 512;
753
754impl WorldThreadingOptions {
755 #[must_use]
756 pub fn resolve(self, work_items: usize) -> usize {
758 self.resolve_unchecked(work_items)
759 }
760
761 #[must_use]
762 pub fn resolve_unchecked(self, work_items: usize) -> usize {
764 match self {
765 Self::Single => 1,
766 Self::Fixed(threads) => threads.clamp(1, MAX_WORLD_THREADS),
767 Self::Auto => std::thread::available_parallelism()
768 .map(usize::from)
769 .unwrap_or(1)
770 .min(work_items.max(1)),
771 }
772 }
773
774 pub fn resolve_checked(self, work_items: usize) -> Result<usize> {
776 match self {
777 Self::Fixed(0) => Err(BedrockWorldError::Validation(
778 "thread count must be in 1..=512".to_string(),
779 )),
780 Self::Fixed(threads) if threads > MAX_WORLD_THREADS => Err(
781 BedrockWorldError::Validation("thread count must be in 1..=512".to_string()),
782 ),
783 _ => Ok(self.resolve_unchecked(work_items)),
784 }
785 }
786}
787
788#[derive(Debug, Clone, Default)]
789pub struct CancelFlag(Arc<AtomicBool>);
791
792impl CancelFlag {
793 #[must_use]
794 pub fn new() -> Self {
796 Self::default()
797 }
798
799 pub fn cancel(&self) {
801 self.0.store(true, Ordering::Relaxed);
802 }
803
804 #[must_use]
805 pub fn from_shared(cancelled: Arc<AtomicBool>) -> Self {
807 Self(cancelled)
808 }
809
810 #[must_use]
811 pub fn to_storage_cancel(&self) -> StorageCancelFlag {
813 StorageCancelFlag::from_shared(Arc::clone(&self.0))
814 }
815
816 #[must_use]
817 pub fn is_cancelled(&self) -> bool {
819 self.0.load(Ordering::Relaxed)
820 }
821}
822
823#[derive(Clone)]
824pub struct ProgressSink {
826 inner: Arc<Mutex<Box<dyn FnMut(WorldScanProgress) + Send>>>,
827}
828
829impl std::fmt::Debug for ProgressSink {
830 fn fmt(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
831 formatter
832 .debug_struct("ProgressSink")
833 .finish_non_exhaustive()
834 }
835}
836
837impl ProgressSink {
838 #[must_use]
839 pub fn new(callback: impl FnMut(WorldScanProgress) + Send + 'static) -> Self {
841 Self {
842 inner: Arc::new(Mutex::new(Box::new(callback))),
843 }
844 }
845
846 fn emit(&self, progress: WorldScanProgress) {
847 if let Ok(mut callback) = self.inner.lock() {
848 callback(progress);
849 }
850 }
851}
852
853#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
854pub struct WorldScanProgress {
856 pub entries_seen: usize,
858}
859
860impl BedrockWorld<Arc<dyn WorldStorage>> {
861 pub fn open_blocking(path: impl AsRef<Path>, options: OpenOptions) -> Result<Self> {
863 let path = path.as_ref().to_path_buf();
864 let format = detect_world_format(&path, options.format)?;
865 let storage: Arc<dyn WorldStorage> = match format {
866 WorldFormat::LevelDb | WorldFormat::LevelDbLegacyTerrain => {
867 let db_path = path.join("db");
868 if options.read_only {
869 Arc::new(BedrockLevelDbStorage::open_read_only(db_path)?)
870 } else {
871 Arc::new(BedrockLevelDbStorage::open(db_path)?)
872 }
873 }
874 WorldFormat::PocketChunksDat => {
875 if !options.read_only {
876 log::warn!(
877 "opening legacy chunks.dat world as read-only despite read_only=false"
878 );
879 }
880 Arc::new(PocketChunksDatStorage::open(&path)?)
881 }
882 };
883 log::debug!(
884 "opened Bedrock world (path={}, format={:?}, read_only={})",
885 path.display(),
886 format,
887 options.read_only
888 );
889 Ok(Self {
890 path,
891 options,
892 storage,
893 format,
894 })
895 }
896
897 #[cfg(feature = "async")]
898 pub async fn open(path: impl AsRef<Path>, options: OpenOptions) -> Result<Self> {
900 let path = path.as_ref().to_path_buf();
901 tokio::task::spawn_blocking(move || Self::open_blocking(path, options))
902 .await
903 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
904 }
905
906 #[must_use]
907 pub fn from_storage(
909 path: impl Into<PathBuf>,
910 storage: Arc<dyn WorldStorage>,
911 options: OpenOptions,
912 ) -> Self {
913 Self {
914 path: path.into(),
915 options,
916 storage,
917 format: WorldFormat::LevelDb,
918 }
919 }
920
921 #[must_use]
922 pub fn from_storage_with_format(
924 path: impl Into<PathBuf>,
925 storage: Arc<dyn WorldStorage>,
926 options: OpenOptions,
927 format: WorldFormat,
928 ) -> Self {
929 Self {
930 path: path.into(),
931 options,
932 storage,
933 format,
934 }
935 }
936}
937
938impl BedrockWorld<BedrockLevelDbStorage> {
939 pub fn open_typed_blocking(path: impl AsRef<Path>, options: OpenOptions) -> Result<Self> {
941 let path = path.as_ref().to_path_buf();
942 let format = detect_world_format(&path, options.format)?;
943 match format {
944 WorldFormat::LevelDb | WorldFormat::LevelDbLegacyTerrain => {
945 let db_path = path.join("db");
946 let storage = if options.read_only {
947 BedrockLevelDbStorage::open_read_only(db_path)?
948 } else {
949 BedrockLevelDbStorage::open(db_path)?
950 };
951 Ok(Self {
952 path,
953 options,
954 storage,
955 format,
956 })
957 }
958 WorldFormat::PocketChunksDat => Err(BedrockWorldError::UnsupportedChunkFormat(
959 "typed LevelDB open does not support legacy chunks.dat worlds".to_string(),
960 )),
961 }
962 }
963}
964
965impl<S> BedrockWorld<S>
966where
967 S: WorldStorageHandle,
968{
969 #[must_use]
970 pub fn from_typed_storage(path: impl Into<PathBuf>, storage: S, options: OpenOptions) -> Self {
972 Self {
973 path: path.into(),
974 options,
975 storage,
976 format: WorldFormat::LevelDb,
977 }
978 }
979
980 #[must_use]
981 pub fn from_typed_storage_with_format(
983 path: impl Into<PathBuf>,
984 storage: S,
985 options: OpenOptions,
986 format: WorldFormat,
987 ) -> Self {
988 Self {
989 path: path.into(),
990 options,
991 storage,
992 format,
993 }
994 }
995
996 #[must_use]
997 pub fn storage(&self) -> &dyn WorldStorage {
999 self.storage.storage()
1000 }
1001
1002 #[must_use]
1003 pub fn path(&self) -> &Path {
1005 &self.path
1006 }
1007
1008 #[must_use]
1009 pub const fn format(&self) -> WorldFormat {
1011 self.format
1012 }
1013
1014 pub fn read_level_dat_blocking(&self) -> Result<LevelDatDocument> {
1016 read_level_dat_document(&self.path.join("level.dat"))
1017 }
1018
1019 pub fn write_level_dat_blocking(&self, document: &LevelDatDocument) -> Result<()> {
1021 self.ensure_writable()?;
1022 write_level_dat_document(&self.path.join("level.dat"), document)
1023 }
1024
1025 pub fn list_players_blocking(&self) -> Result<Vec<PlayerId>> {
1027 let mut players = Vec::new();
1028 if self.storage().get(b"~local_player")?.is_some() {
1029 players.push(PlayerId::Local);
1030 }
1031 self.storage().for_each_prefix(
1032 b"player_",
1033 StorageReadOptions::default(),
1034 &mut |key, _value| {
1035 if let Some(player) = PlayerId::from_storage_key(key) {
1036 players.push(player);
1037 }
1038 Ok(StorageVisitorControl::Continue)
1039 },
1040 )?;
1041 Ok(players)
1042 }
1043
1044 pub fn classify_keys_blocking(
1046 &self,
1047 options: WorldScanOptions,
1048 ) -> Result<BTreeMap<String, usize>> {
1049 let mut counts = BTreeMap::new();
1050 let mut entries_seen = 0usize;
1051 self.storage()
1052 .for_each_key(to_storage_read_options(&options), &mut |key| {
1053 check_cancelled(&options)?;
1054 entries_seen = entries_seen.saturating_add(1);
1055 if entries_seen.is_multiple_of(8192) {
1056 emit_progress(&options, entries_seen);
1057 }
1058 let key = BedrockDbKey::decode(key);
1059 *counts.entry(key.summary_kind()).or_default() += 1;
1060 Ok(StorageVisitorControl::Continue)
1061 })?;
1062 emit_progress(&options, entries_seen);
1063 Ok(counts)
1064 }
1065
1066 pub fn list_chunk_positions_blocking(
1068 &self,
1069 options: WorldScanOptions,
1070 ) -> Result<Vec<ChunkPos>> {
1071 let mut positions = BTreeSet::new();
1072 let mut entries_seen = 0usize;
1073 self.storage()
1074 .for_each_key(to_storage_read_options(&options), &mut |key| {
1075 check_cancelled(&options)?;
1076 entries_seen = entries_seen.saturating_add(1);
1077 if let BedrockDbKey::Chunk(chunk_key) = BedrockDbKey::decode(key) {
1078 positions.insert(chunk_key.pos);
1079 }
1080 if entries_seen.is_multiple_of(8192) {
1081 emit_progress(&options, entries_seen);
1082 }
1083 Ok(StorageVisitorControl::Continue)
1084 })?;
1085 Ok(positions.into_iter().collect())
1086 }
1087
1088 pub fn list_render_chunk_positions_blocking(
1090 &self,
1091 options: WorldScanOptions,
1092 ) -> Result<Vec<ChunkPos>> {
1093 let started = Instant::now();
1094 log::debug!(
1095 "listing render chunk positions (threading={:?}, queue_depth={}, progress_interval={})",
1096 options.threading,
1097 options.pipeline.queue_depth,
1098 options.pipeline.progress_interval
1099 );
1100 let mut positions = BTreeSet::new();
1101 let mut entries_seen = 0usize;
1102 let outcome =
1103 self.storage()
1104 .for_each_key(to_storage_read_options(&options), &mut |key| {
1105 check_cancelled(&options)?;
1106 entries_seen = entries_seen.saturating_add(1);
1107 if let BedrockDbKey::Chunk(chunk_key) = BedrockDbKey::decode(key) {
1108 if chunk_key.tag.is_render_chunk_record() {
1109 positions.insert(chunk_key.pos);
1110 }
1111 }
1112 if entries_seen.is_multiple_of(8192) {
1113 emit_progress(&options, entries_seen);
1114 }
1115 Ok(StorageVisitorControl::Continue)
1116 })?;
1117 let positions = positions.into_iter().collect::<Vec<_>>();
1118 log::debug!(
1119 "render chunk position listing complete (entries_seen={}, positions={}, visited={}, tables_scanned={}, worker_threads={}, queue_wait_ms={}, cancel_checks={}, elapsed_ms={})",
1120 entries_seen,
1121 positions.len(),
1122 outcome.visited,
1123 outcome.tables_scanned,
1124 outcome.worker_threads,
1125 outcome.queue_wait_ms,
1126 outcome.cancel_checks,
1127 started.elapsed().as_millis()
1128 );
1129 Ok(positions)
1130 }
1131
1132 #[allow(clippy::too_many_lines)]
1133 pub fn list_render_chunk_positions_in_region_blocking(
1135 &self,
1136 region: RenderChunkRegion,
1137 options: WorldScanOptions,
1138 ) -> Result<Vec<ChunkPos>> {
1139 let started = Instant::now();
1140 validate_render_region(region)?;
1141 let x_count = i64::from(region.max_chunk_x) - i64::from(region.min_chunk_x) + 1;
1142 let z_count = i64::from(region.max_chunk_z) - i64::from(region.min_chunk_z) + 1;
1143 let capacity = usize::try_from(x_count.saturating_mul(z_count))
1144 .map_err(|_| BedrockWorldError::Validation("render region is too large".to_string()))?;
1145 let mut positions = Vec::with_capacity(capacity);
1146 for z in region.min_chunk_z..=region.max_chunk_z {
1147 for x in region.min_chunk_x..=region.max_chunk_x {
1148 positions.push(ChunkPos {
1149 x,
1150 z,
1151 dimension: region.dimension,
1152 });
1153 }
1154 }
1155 if positions.is_empty() {
1156 return Ok(Vec::new());
1157 }
1158
1159 let worker_count = options.threading.resolve_checked(positions.len())?;
1160 log::debug!(
1161 "indexing render chunk region (dimension={:?}, min=({}, {}), max=({}, {}), workers={})",
1162 region.dimension,
1163 region.min_chunk_x,
1164 region.min_chunk_z,
1165 region.max_chunk_x,
1166 region.max_chunk_z,
1167 worker_count
1168 );
1169 if worker_count == 1 {
1170 let render_positions = positions
1171 .into_iter()
1172 .filter_map(
1173 |pos| match self.has_render_chunk_records_blocking(pos, &options) {
1174 Ok(true) => Some(Ok(pos)),
1175 Ok(false) => None,
1176 Err(error) => Some(Err(error)),
1177 },
1178 )
1179 .collect::<Result<Vec<_>>>()?;
1180 log::debug!(
1181 "render chunk region index complete (dimension={:?}, candidates={}, positions={}, workers={}, queue_depth=0, elapsed_ms={})",
1182 region.dimension,
1183 capacity,
1184 render_positions.len(),
1185 worker_count,
1186 started.elapsed().as_millis()
1187 );
1188 return Ok(render_positions);
1189 }
1190
1191 let scan_options = WorldScanOptions {
1192 threading: WorldThreadingOptions::Single,
1193 pipeline: options.pipeline,
1194 cancel: options.cancel.clone(),
1195 progress: options.progress.clone(),
1196 };
1197 let next_position = Arc::new(std::sync::atomic::AtomicUsize::new(0));
1198 let queue_depth = options
1199 .pipeline
1200 .resolve_queue_depth(worker_count, positions.len());
1201 let (sender, receiver) = mpsc::sync_channel::<Result<Option<ChunkPos>>>(queue_depth);
1202 let pool = world_pool(worker_count)?;
1203 pool.scope(|scope| {
1204 for worker_index in 0..worker_count {
1205 let next_position = Arc::clone(&next_position);
1206 let sender = sender.clone();
1207 let positions = &positions;
1208 let scan_options = scan_options.clone();
1209 scope.spawn(move |_| {
1210 log::trace!("render region index worker {worker_index} started");
1211 loop {
1212 if scan_options
1213 .cancel
1214 .as_ref()
1215 .is_some_and(CancelFlag::is_cancelled)
1216 {
1217 return;
1218 }
1219 let index = next_position.fetch_add(1, Ordering::Relaxed);
1220 let Some(pos) = positions.get(index).copied() else {
1221 log::trace!("render region index worker {worker_index} finished");
1222 return;
1223 };
1224 let result = self
1225 .has_render_chunk_records_blocking(pos, &scan_options)
1226 .map(|is_renderable| is_renderable.then_some(pos));
1227 if sender.send(result).is_err() {
1228 return;
1229 }
1230 }
1231 });
1232 }
1233 drop(sender);
1234
1235 let mut render_positions = Vec::new();
1236 for result in receiver {
1237 if let Some(pos) = result? {
1238 render_positions.push(pos);
1239 }
1240 }
1241 render_positions.sort();
1242 log::debug!(
1243 "render chunk region index complete (dimension={:?}, candidates={}, positions={}, workers={}, queue_depth={}, elapsed_ms={})",
1244 region.dimension,
1245 positions.len(),
1246 render_positions.len(),
1247 worker_count,
1248 queue_depth,
1249 started.elapsed().as_millis()
1250 );
1251 Ok(render_positions)
1252 })
1253 }
1254
1255 pub fn discover_chunk_bounds_blocking(
1257 &self,
1258 dimension: crate::Dimension,
1259 options: WorldScanOptions,
1260 ) -> Result<Option<ChunkBounds>> {
1261 let mut bounds: Option<ChunkBounds> = None;
1262 let mut seen_positions = BTreeSet::new();
1263 let mut entries_seen = 0usize;
1264 self.storage()
1265 .for_each_key(to_storage_read_options(&options), &mut |key| {
1266 check_cancelled(&options)?;
1267 entries_seen = entries_seen.saturating_add(1);
1268 if let BedrockDbKey::Chunk(chunk_key) = BedrockDbKey::decode(key) {
1269 if chunk_key.pos.dimension == dimension && seen_positions.insert(chunk_key.pos)
1270 {
1271 match &mut bounds {
1272 Some(bounds) => bounds.include(chunk_key.pos),
1273 None => bounds = Some(ChunkBounds::from_first(chunk_key.pos)),
1274 }
1275 }
1276 }
1277 if entries_seen.is_multiple_of(8192) {
1278 emit_progress(&options, entries_seen);
1279 }
1280 Ok(StorageVisitorControl::Continue)
1281 })?;
1282 Ok(bounds)
1283 }
1284
1285 pub fn nearest_loaded_chunk_to_spawn_blocking(
1287 &self,
1288 dimension: crate::Dimension,
1289 spawn_block_x: i32,
1290 spawn_block_z: i32,
1291 options: WorldScanOptions,
1292 ) -> Result<Option<ChunkPos>> {
1293 let spawn_chunk = BlockPos {
1294 x: spawn_block_x,
1295 y: 0,
1296 z: spawn_block_z,
1297 }
1298 .to_chunk_pos(dimension);
1299 let mut best = None::<(i64, ChunkPos)>;
1300 let mut seen_positions = BTreeSet::new();
1301 let mut entries_seen = 0usize;
1302 self.storage()
1303 .for_each_key(to_storage_read_options(&options), &mut |key| {
1304 check_cancelled(&options)?;
1305 entries_seen = entries_seen.saturating_add(1);
1306 if let BedrockDbKey::Chunk(chunk_key) = BedrockDbKey::decode(key) {
1307 if chunk_key.pos.dimension == dimension && seen_positions.insert(chunk_key.pos)
1308 {
1309 let dx = i64::from(chunk_key.pos.x) - i64::from(spawn_chunk.x);
1310 let dz = i64::from(chunk_key.pos.z) - i64::from(spawn_chunk.z);
1311 let distance = dx.saturating_mul(dx).saturating_add(dz.saturating_mul(dz));
1312 if best.is_none_or(|(best_distance, _)| distance < best_distance) {
1313 best = Some((distance, chunk_key.pos));
1314 }
1315 }
1316 }
1317 if entries_seen.is_multiple_of(8192) {
1318 emit_progress(&options, entries_seen);
1319 }
1320 Ok(StorageVisitorControl::Continue)
1321 })?;
1322 Ok(best.map(|(_, pos)| pos))
1323 }
1324
1325 pub fn get_player_blocking(&self, id: &PlayerId) -> Result<Option<PlayerData>> {
1327 let Some(key) = id.storage_key() else {
1328 if *id == PlayerId::LegacyLevelDat {
1329 let document = self.read_level_dat_blocking()?;
1330 return Ok(Some(PlayerData::from_nbt(id.clone(), document.root)?));
1331 }
1332 return Ok(None);
1333 };
1334 self.storage()
1335 .get(key.as_ref())?
1336 .map(|bytes| PlayerData::from_raw(id.clone(), bytes))
1337 .transpose()
1338 }
1339
1340 pub fn put_player_blocking(&self, player: &PlayerData) -> Result<()> {
1342 self.ensure_writable()?;
1343 let Some(key) = player.id.storage_key() else {
1344 return Err(BedrockWorldError::Validation(
1345 "player id has no LevelDB key".to_string(),
1346 ));
1347 };
1348 self.storage().put(key.as_ref(), &player.raw)
1349 }
1350
1351 pub fn get_chunk_blocking(&self, pos: ChunkPos) -> Result<Chunk> {
1353 let mut records = Vec::new();
1354 let prefix = chunk_record_prefix(pos);
1355 self.storage().for_each_prefix(
1356 &prefix,
1357 StorageReadOptions::default(),
1358 &mut |raw_key, value| {
1359 if let Ok(key) = ChunkKey::decode(raw_key) {
1360 if key.pos == pos {
1361 records.push(ChunkRecord {
1362 key,
1363 value: value.clone(),
1364 });
1365 }
1366 }
1367 Ok(StorageVisitorControl::Continue)
1368 },
1369 )?;
1370 let version = records
1371 .iter()
1372 .find(|record| record.key.tag == ChunkRecordTag::Version)
1373 .and_then(|record| record.value.first().copied());
1374 Ok(Chunk {
1375 pos,
1376 version,
1377 records,
1378 })
1379 }
1380
1381 pub fn get_subchunk_blocking(&self, pos: ChunkPos, y: i8) -> Result<Option<crate::SubChunk>> {
1383 self.get_chunk_blocking(pos)?.get_subchunk(y)
1384 }
1385
1386 pub fn parse_world_blocking(&self, options: WorldParseOptions) -> Result<ParsedWorld> {
1388 let level_dat = self.read_level_dat_blocking()?;
1389 parse_world_storage(level_dat, self.storage(), options)
1390 }
1391
1392 pub fn parse_chunk_blocking(&self, pos: ChunkPos) -> Result<ParsedChunkData> {
1394 let chunk = self.get_chunk_blocking(pos)?;
1395 Ok(parse_chunk_records(pos, chunk.records))
1396 }
1397
1398 pub fn parse_chunk_with_options_blocking(
1400 &self,
1401 pos: ChunkPos,
1402 options: WorldParseOptions,
1403 ) -> Result<ParsedChunkData> {
1404 let chunk = self.get_chunk_blocking(pos)?;
1405 Ok(parse_chunk_records_with_options(
1406 pos,
1407 chunk.records,
1408 options,
1409 ))
1410 }
1411
1412 pub fn parse_subchunk_blocking(
1414 &self,
1415 pos: ChunkPos,
1416 y: i8,
1417 options: WorldParseOptions,
1418 ) -> Result<Option<crate::SubChunk>> {
1419 let key = ChunkKey::subchunk(pos, y);
1420 self.storage()
1421 .get(&key.encode())?
1422 .map(|value| parse_subchunk_with_mode(y, value, options.subchunk_decode_mode))
1423 .transpose()
1424 }
1425
1426 pub fn get_biome_storage_blocking(
1428 &self,
1429 pos: ChunkPos,
1430 y: i32,
1431 ) -> Result<Option<ParsedBiomeStorage>> {
1432 let Some(biome_data) = self.get_biome_data_blocking(pos)? else {
1433 return Ok(None);
1434 };
1435 for storage in biome_data.storages {
1436 if biome_storage_contains_y(&storage, y) {
1437 return Ok(Some(storage));
1438 }
1439 }
1440 Ok(None)
1441 }
1442
1443 pub fn get_biome_storages_blocking(
1445 &self,
1446 pos: ChunkPos,
1447 ) -> Result<Option<Vec<ParsedBiomeStorage>>> {
1448 Ok(self
1449 .get_biome_data_blocking(pos)?
1450 .map(|biome_data| biome_data.storages))
1451 }
1452
1453 fn get_biome_data_blocking(&self, pos: ChunkPos) -> Result<Option<ParsedBiomeData>> {
1454 for (tag, version) in [
1455 (ChunkRecordTag::Data3D, crate::ChunkVersion::New),
1456 (ChunkRecordTag::Data2D, crate::ChunkVersion::Old),
1457 (ChunkRecordTag::Data2DLegacy, crate::ChunkVersion::Old),
1458 ] {
1459 let key = ChunkKey::new(pos, tag).encode();
1460 let Some(value) = self.storage().get(&key)? else {
1461 continue;
1462 };
1463 let biome_data = match version {
1464 crate::ChunkVersion::New => parse_data3d(&value),
1465 crate::ChunkVersion::Old => parse_legacy_data2d(&value),
1466 }
1467 .map_err(|error| BedrockWorldError::CorruptWorld(format!("biome data: {error}")))?;
1468 return Ok(Some(biome_data));
1469 }
1470 Ok(None)
1471 }
1472
1473 fn has_render_chunk_records_blocking(
1474 &self,
1475 pos: ChunkPos,
1476 options: &WorldScanOptions,
1477 ) -> Result<bool> {
1478 let prefix = chunk_record_prefix(pos);
1479 let mut found = false;
1480 self.storage().for_each_prefix_key(
1481 &prefix,
1482 to_storage_read_options(options),
1483 &mut |key| {
1484 check_cancelled(options)?;
1485 if let BedrockDbKey::Chunk(chunk_key) = BedrockDbKey::decode(key) {
1486 if chunk_key.pos == pos && chunk_key.tag.is_render_chunk_record() {
1487 found = true;
1488 return Ok(StorageVisitorControl::Stop);
1489 }
1490 }
1491 Ok(StorageVisitorControl::Continue)
1492 },
1493 )?;
1494 Ok(found)
1495 }
1496
1497 pub fn get_height_at_blocking(
1499 &self,
1500 pos: ChunkPos,
1501 local_x: u8,
1502 local_z: u8,
1503 ) -> Result<Option<i16>> {
1504 validate_local_column(local_x, local_z)?;
1505 Ok(self
1506 .get_height_map_blocking(pos)?
1507 .and_then(|heights| heights[usize::from(local_z)][usize::from(local_x)]))
1508 }
1509
1510 pub fn get_height_map_blocking(
1512 &self,
1513 pos: ChunkPos,
1514 ) -> Result<Option<[[Option<i16>; 16]; 16]>> {
1515 if let Some(biome_data) = self
1516 .get_biome_data_blocking(pos)
1517 .map_err(|error| BedrockWorldError::CorruptWorld(format!("height data: {error}")))?
1518 {
1519 return Ok(Some(render_height_map_from_biome_data(pos, &biome_data)));
1520 }
1521 let key = ChunkKey::new(pos, ChunkRecordTag::LegacyTerrain).encode();
1522 if let Some(value) = self.storage().get(&key)? {
1523 let terrain = LegacyTerrain::parse(value)?;
1524 return Ok(Some(render_height_map_from_legacy_terrain(&terrain)));
1525 }
1526 Ok(None)
1527 }
1528
1529 pub fn get_legacy_biome_colors_blocking(
1531 &self,
1532 pos: ChunkPos,
1533 ) -> Result<Option<[[Option<u32>; 16]; 16]>> {
1534 let key = ChunkKey::new(pos, ChunkRecordTag::LegacyTerrain).encode();
1535 let Some(value) = self.storage().get(&key)? else {
1536 return Ok(None);
1537 };
1538 let terrain = LegacyTerrain::parse(value)?;
1539 Ok(Some(render_biome_colors_from_legacy_terrain(&terrain)))
1540 }
1541
1542 pub fn get_legacy_biome_samples_blocking(
1544 &self,
1545 pos: ChunkPos,
1546 ) -> Result<Option<[[Option<LegacyBiomeSample>; 16]; 16]>> {
1547 let key = ChunkKey::new(pos, ChunkRecordTag::LegacyTerrain).encode();
1548 let Some(value) = self.storage().get(&key)? else {
1549 return Ok(None);
1550 };
1551 let terrain = LegacyTerrain::parse(value)?;
1552 Ok(Some(render_biomes_from_legacy_terrain(&terrain)))
1553 }
1554
1555 pub fn get_legacy_biome_color_blocking(
1557 &self,
1558 pos: ChunkPos,
1559 local_x: u8,
1560 local_z: u8,
1561 ) -> Result<Option<u32>> {
1562 validate_local_column(local_x, local_z)?;
1563 Ok(self
1564 .get_legacy_biome_colors_blocking(pos)?
1565 .and_then(|colors| colors[usize::from(local_z)][usize::from(local_x)]))
1566 }
1567
1568 pub fn get_legacy_biome_sample_blocking(
1570 &self,
1571 pos: ChunkPos,
1572 local_x: u8,
1573 local_z: u8,
1574 ) -> Result<Option<LegacyBiomeSample>> {
1575 validate_local_column(local_x, local_z)?;
1576 Ok(self
1577 .get_legacy_biome_samples_blocking(pos)?
1578 .and_then(|samples| samples[usize::from(local_z)][usize::from(local_x)]))
1579 }
1580
1581 pub fn get_biome_id_blocking(
1583 &self,
1584 pos: ChunkPos,
1585 local_x: u8,
1586 local_z: u8,
1587 y: i32,
1588 ) -> Result<Option<u32>> {
1589 validate_local_column(local_x, local_z)?;
1590 let Some(storage) = self.get_biome_storage_blocking(pos, y)? else {
1591 return Ok(None);
1592 };
1593 Ok(biome_id_from_storage(&storage, local_x, local_z, y))
1594 }
1595
1596 pub fn get_surface_column_blocking(
1598 &self,
1599 pos: ChunkPos,
1600 local_x: u8,
1601 local_z: u8,
1602 options: SurfaceColumnOptions,
1603 ) -> Result<Option<SurfaceColumn>> {
1604 validate_local_column(local_x, local_z)?;
1605 let (min_y, max_y) = pos.y_range(crate::ChunkVersion::New);
1606 let start_y = match self.get_height_at_blocking(pos, local_x, local_z)? {
1607 Some(height) => i32::from(height).clamp(min_y, max_y),
1608 None => return Ok(None),
1609 };
1610 for y in (min_y..=start_y).rev() {
1611 let Some(block) = self.block_state_in_chunk_column(pos, local_x, y, local_z)? else {
1612 continue;
1613 };
1614 if options.skip_air && is_air_block_name(&block.name) {
1615 continue;
1616 }
1617 let biome_id = self.get_biome_id_blocking(pos, local_x, local_z, y)?;
1618 let (water_depth, under_water_block_name) =
1619 if options.transparent_water && is_water_block_name(&block.name) {
1620 self.find_solid_under_water(pos, local_x, local_z, y, min_y)?
1621 } else {
1622 (0, None)
1623 };
1624 return Ok(Some(SurfaceColumn {
1625 y,
1626 block_name: block.name,
1627 biome_id,
1628 water_depth,
1629 under_water_block_name,
1630 is_fallback: false,
1631 }));
1632 }
1633 Ok(None)
1634 }
1635
1636 pub fn load_render_chunk_blocking(
1638 &self,
1639 pos: ChunkPos,
1640 options: RenderChunkLoadOptions,
1641 ) -> Result<RenderChunkData> {
1642 let (mut chunks, _) = self.load_render_chunks_with_stats_blocking([pos], options)?;
1643 chunks.pop().ok_or_else(|| {
1644 BedrockWorldError::CorruptWorld("exact render load returned no chunk".to_string())
1645 })
1646 }
1647
1648 pub fn load_render_chunks_blocking(
1650 &self,
1651 positions: impl IntoIterator<Item = ChunkPos>,
1652 options: RenderChunkLoadOptions,
1653 ) -> Result<Vec<RenderChunkData>> {
1654 Ok(self
1655 .load_render_chunks_with_stats_blocking(positions, options)?
1656 .0)
1657 }
1658
1659 pub fn load_render_chunks_with_stats_blocking(
1661 &self,
1662 positions: impl IntoIterator<Item = ChunkPos>,
1663 options: RenderChunkLoadOptions,
1664 ) -> Result<(Vec<RenderChunkData>, RenderLoadStats)> {
1665 let started = Instant::now();
1666 let positions = positions.into_iter().collect::<Vec<_>>();
1667 if positions.is_empty() {
1668 log::debug!("loading render chunks skipped (chunks=0)");
1669 return Ok((Vec::new(), RenderLoadStats::default()));
1670 }
1671 let mut positions = positions;
1672 sort_render_chunk_positions(&mut positions, options.priority);
1673 let worker_count = options.threading.resolve_checked(positions.len())?;
1674 log::debug!(
1675 "loading render chunks (chunks={}, workers={}, request={:?}, queue_depth={}, priority={:?})",
1676 positions.len(),
1677 worker_count,
1678 options.request,
1679 options
1680 .pipeline
1681 .resolve_queue_depth(worker_count, positions.len()),
1682 options.priority
1683 );
1684 self.load_render_chunks_exact_batch_blocking_sorted(
1685 positions,
1686 options,
1687 worker_count,
1688 started,
1689 )
1690 }
1691
1692 #[allow(clippy::too_many_lines)]
1693 fn load_render_chunks_exact_batch_blocking_sorted(
1694 &self,
1695 positions: Vec<ChunkPos>,
1696 options: RenderChunkLoadOptions,
1697 worker_count: usize,
1698 started: Instant,
1699 ) -> Result<(Vec<RenderChunkData>, RenderLoadStats)> {
1700 check_render_load_cancelled(&options)?;
1701 let mut raw_chunks = positions
1702 .iter()
1703 .copied()
1704 .map(|pos| RawRenderChunkData {
1705 pos,
1706 biome_record: None,
1707 subchunks: BTreeMap::new(),
1708 block_entities: None,
1709 legacy_terrain: None,
1710 })
1711 .collect::<Vec<_>>();
1712
1713 let mut keys = Vec::new();
1714 let mut requests = Vec::new();
1715 for (chunk_index, pos) in positions.iter().copied().enumerate() {
1716 push_render_record_request(
1717 &mut keys,
1718 &mut requests,
1719 chunk_index,
1720 pos,
1721 RenderRecordKind::LegacyTerrain,
1722 );
1723 if request_needs_biome_record(options.request) {
1724 push_render_record_request(
1725 &mut keys,
1726 &mut requests,
1727 chunk_index,
1728 pos,
1729 RenderRecordKind::Data3D,
1730 );
1731 push_render_record_request(
1732 &mut keys,
1733 &mut requests,
1734 chunk_index,
1735 pos,
1736 RenderRecordKind::Data2D,
1737 );
1738 push_render_record_request(
1739 &mut keys,
1740 &mut requests,
1741 chunk_index,
1742 pos,
1743 RenderRecordKind::Data2DLegacy,
1744 );
1745 }
1746 if !request_uses_hint_surface_subchunks(options.request) {
1747 for y in planned_render_subchunk_ys(pos, &options, None)? {
1748 push_render_record_request(
1749 &mut keys,
1750 &mut requests,
1751 chunk_index,
1752 pos,
1753 RenderRecordKind::Subchunk(y),
1754 );
1755 }
1756 }
1757 if request_loads_block_entities(options.request) {
1758 push_render_record_request(
1759 &mut keys,
1760 &mut requests,
1761 chunk_index,
1762 pos,
1763 RenderRecordKind::BlockEntity,
1764 );
1765 }
1766 }
1767
1768 let mut keys_requested = keys.len();
1769 let mut exact_get_batches = 0usize;
1770 let mut db_read_ms = 0u128;
1771 let db_started = Instant::now();
1772 let values = self.storage().get_many(&keys)?;
1773 db_read_ms = db_read_ms.saturating_add(db_started.elapsed().as_millis());
1774 exact_get_batches = exact_get_batches.saturating_add(usize::from(!keys.is_empty()));
1775 let mut keys_found = apply_render_record_values(&mut raw_chunks, &requests, values);
1776
1777 if request_uses_hint_surface_subchunks(options.request) {
1778 let mut needed_keys = Vec::new();
1779 let mut needed_requests = Vec::new();
1780 for (chunk_index, raw) in raw_chunks.iter().enumerate() {
1781 let biome_data = parse_render_biome_record(raw.biome_record.as_ref())?;
1782 let height_map = if let Some(biome_data) = biome_data.as_ref() {
1783 Some(render_height_map_from_biome_data(raw.pos, biome_data))
1784 } else {
1785 legacy_height_map_from_raw(raw.legacy_terrain.as_ref())?
1786 };
1787 for y in planned_render_subchunk_ys(raw.pos, &options, height_map.as_ref())? {
1788 if raw.subchunks.contains_key(&y) {
1789 continue;
1790 }
1791 push_render_record_request(
1792 &mut needed_keys,
1793 &mut needed_requests,
1794 chunk_index,
1795 raw.pos,
1796 RenderRecordKind::Subchunk(y),
1797 );
1798 }
1799 }
1800 if !needed_keys.is_empty() {
1801 let db_started = Instant::now();
1802 let values = self.storage().get_many(&needed_keys)?;
1803 db_read_ms = db_read_ms.saturating_add(db_started.elapsed().as_millis());
1804 exact_get_batches = exact_get_batches.saturating_add(1);
1805 keys_requested = keys_requested.saturating_add(needed_keys.len());
1806 keys_found = keys_found.saturating_add(apply_render_record_values(
1807 &mut raw_chunks,
1808 &needed_requests,
1809 values,
1810 ));
1811 }
1812 }
1813
1814 check_render_load_cancelled(&options)?;
1815 let decode_started = Instant::now();
1816 let (mut chunks, decode_timing) = if worker_count == 1 {
1817 let mut chunks = Vec::with_capacity(raw_chunks.len());
1818 let mut timing = RenderChunkDecodeTiming::default();
1819 for raw in raw_chunks {
1820 check_render_load_cancelled(&options)?;
1821 let (chunk, chunk_timing) = render_chunk_from_raw(raw, &options)?;
1822 timing.add(chunk_timing);
1823 chunks.push(chunk);
1824 emit_render_load_progress(&options, chunks.len());
1825 }
1826 (chunks, timing)
1827 } else {
1828 let pool = world_pool(worker_count)?;
1829 let decoded = pool.install(|| {
1830 raw_chunks
1831 .into_par_iter()
1832 .map(|raw| {
1833 check_render_load_cancelled(&options)?;
1834 render_chunk_from_raw(raw, &options)
1835 })
1836 .collect::<Result<Vec<_>>>()
1837 })?;
1838 let mut chunks = Vec::with_capacity(decoded.len());
1839 let mut timing = RenderChunkDecodeTiming::default();
1840 for (chunk, chunk_timing) in decoded {
1841 timing.add(chunk_timing);
1842 chunks.push(chunk);
1843 }
1844 (chunks, timing)
1845 };
1846 let full_reload_ms =
1847 self.reload_incomplete_needed_exact_surface_chunks_blocking(&mut chunks, &options)?;
1848 let decode_ms = decode_started.elapsed().as_millis();
1849 let mut stats = render_load_stats(&chunks, worker_count, 0, started.elapsed().as_millis());
1850 stats.keys_requested = keys_requested;
1851 stats.keys_found = keys_found;
1852 stats.exact_get_batches = exact_get_batches;
1853 stats.prefix_scans = 0;
1854 stats.decode_ms = decode_ms;
1855 stats.db_read_ms = db_read_ms;
1856 stats.biome_parse_ms = decode_timing.biome_parse_ms;
1857 stats.subchunk_parse_ms = decode_timing.subchunk_parse_ms;
1858 stats.surface_scan_ms = decode_timing.surface_scan_ms;
1859 stats.block_entity_parse_ms = decode_timing.block_entity_parse_ms;
1860 stats.full_reload_ms = full_reload_ms;
1861 stats.detected_format = self.format;
1862 stats.legacy_pocket_chunks = if self.format == WorldFormat::PocketChunksDat {
1863 stats.legacy_terrain_records
1864 } else {
1865 0
1866 };
1867 log_render_load_complete(&stats);
1868 Ok((chunks, stats))
1869 }
1870
1871 fn reload_incomplete_needed_exact_surface_chunks_blocking(
1872 &self,
1873 chunks: &mut [RenderChunkData],
1874 options: &RenderChunkLoadOptions,
1875 ) -> Result<u128> {
1876 if !request_uses_hint_surface_subchunks(options.request) {
1877 return Ok(0);
1878 }
1879
1880 let mut full_options = options.clone();
1881 full_options.request = exact_surface_full_request(options.request);
1882 let mut reload_indexes = Vec::new();
1883 let mut reload_positions = Vec::new();
1884 for (index, chunk) in chunks.iter().enumerate() {
1885 if needed_exact_surface_chunk_requires_full_reload(chunk)? {
1886 reload_indexes.push(index);
1887 reload_positions.push(chunk.pos);
1888 }
1889 }
1890 if reload_positions.is_empty() {
1891 return Ok(0);
1892 }
1893 check_render_load_cancelled(options)?;
1894 let started = Instant::now();
1895 let worker_count = options.threading.resolve_checked(reload_positions.len())?;
1896 full_options.threading = if worker_count <= 1 {
1897 WorldThreadingOptions::Single
1898 } else {
1899 WorldThreadingOptions::Fixed(worker_count)
1900 };
1901 let (reloaded, stats) =
1902 self.load_render_chunks_with_stats_blocking(reload_positions, full_options)?;
1903 for (chunk_index, reloaded_chunk) in reload_indexes.into_iter().zip(reloaded) {
1904 if let Some(chunk) = chunks.get_mut(chunk_index) {
1905 *chunk = reloaded_chunk;
1906 }
1907 }
1908 let elapsed = started.elapsed().as_millis().max(stats.load_ms);
1909 log::debug!(
1910 "hint surface full reload complete (chunks={}, workers={}, load_ms={}, db_read_ms={}, decode_ms={})",
1911 stats.requested_chunks,
1912 stats.worker_threads,
1913 stats.load_ms,
1914 stats.db_read_ms,
1915 stats.decode_ms
1916 );
1917 Ok(elapsed)
1918 }
1919
1920 pub fn load_render_region_blocking(
1922 &self,
1923 region: RenderChunkRegion,
1924 options: RenderRegionLoadOptions,
1925 ) -> Result<RenderRegionData> {
1926 if region.min_chunk_x > region.max_chunk_x || region.min_chunk_z > region.max_chunk_z {
1927 return Err(BedrockWorldError::Validation(format!(
1928 "invalid render region: min=({}, {}) max=({}, {})",
1929 region.min_chunk_x, region.min_chunk_z, region.max_chunk_x, region.max_chunk_z
1930 )));
1931 }
1932 let chunk_count_x = i64::from(region.max_chunk_x) - i64::from(region.min_chunk_x) + 1;
1933 let chunk_count_z = i64::from(region.max_chunk_z) - i64::from(region.min_chunk_z) + 1;
1934 let capacity = usize::try_from(chunk_count_x.saturating_mul(chunk_count_z))
1935 .map_err(|_| BedrockWorldError::Validation("render region is too large".to_string()))?;
1936 let mut positions = Vec::with_capacity(capacity);
1937 for z in region.min_chunk_z..=region.max_chunk_z {
1938 for x in region.min_chunk_x..=region.max_chunk_x {
1939 positions.push(ChunkPos {
1940 x,
1941 z,
1942 dimension: region.dimension,
1943 });
1944 }
1945 }
1946 let (chunks, stats) =
1947 self.load_render_chunks_with_stats_blocking(positions, options.into())?;
1948 Ok(RenderRegionData {
1949 region,
1950 chunks,
1951 stats,
1952 })
1953 }
1954
1955 pub fn get_block_state_at_blocking(
1957 &self,
1958 dimension: crate::Dimension,
1959 block_pos: BlockPos,
1960 ) -> Result<Option<BlockState>> {
1961 let chunk_pos = block_pos.to_chunk_pos(dimension);
1962 let (_, block_y, _) = block_pos.in_chunk_offset();
1963 let subchunk_y = block_y_to_subchunk_y(block_y)?;
1964 let Some(subchunk) = self.parse_subchunk_blocking(
1965 chunk_pos,
1966 subchunk_y,
1967 WorldParseOptions {
1968 subchunk_decode_mode: SubChunkDecodeMode::FullIndices,
1969 ..WorldParseOptions::summary()
1970 },
1971 )?
1972 else {
1973 return Ok(None);
1974 };
1975 let (local_x, _, local_z) = block_pos.in_chunk_offset();
1976 let local_y = u8::try_from(block_y - i32::from(subchunk_y) * 16).map_err(|_| {
1977 BedrockWorldError::Validation(format!("block y={block_y} is outside subchunk bounds"))
1978 })?;
1979 Ok(subchunk.block_state_at(local_x, local_y, local_z).cloned())
1980 }
1981
1982 pub fn get_subchunk_layer_blocking(
1984 &self,
1985 pos: ChunkPos,
1986 y: i32,
1987 mode: SubChunkDecodeMode,
1988 ) -> Result<Option<SubChunk>> {
1989 let subchunk_y = block_y_to_subchunk_y(y)?;
1990 self.parse_subchunk_blocking(
1991 pos,
1992 subchunk_y,
1993 WorldParseOptions {
1994 subchunk_decode_mode: mode,
1995 ..WorldParseOptions::summary()
1996 },
1997 )
1998 }
1999
2000 fn block_state_in_chunk_column(
2001 &self,
2002 pos: ChunkPos,
2003 local_x: u8,
2004 y: i32,
2005 local_z: u8,
2006 ) -> Result<Option<BlockState>> {
2007 let subchunk_y = block_y_to_subchunk_y(y)?;
2008 let Some(subchunk) = self.parse_subchunk_blocking(
2009 pos,
2010 subchunk_y,
2011 WorldParseOptions {
2012 subchunk_decode_mode: SubChunkDecodeMode::FullIndices,
2013 ..WorldParseOptions::summary()
2014 },
2015 )?
2016 else {
2017 return Ok(None);
2018 };
2019 let local_y = u8::try_from(y - i32::from(subchunk_y) * 16).map_err(|_| {
2020 BedrockWorldError::Validation(format!("block y={y} is outside subchunk bounds"))
2021 })?;
2022 Ok(subchunk.block_state_at(local_x, local_y, local_z).cloned())
2023 }
2024
2025 fn find_solid_under_water(
2026 &self,
2027 pos: ChunkPos,
2028 local_x: u8,
2029 local_z: u8,
2030 water_y: i32,
2031 min_y: i32,
2032 ) -> Result<(u8, Option<String>)> {
2033 let mut depth = 0_u8;
2034 for y in (min_y..water_y).rev() {
2035 let Some(block) = self.block_state_in_chunk_column(pos, local_x, y, local_z)? else {
2036 continue;
2037 };
2038 if is_air_block_name(&block.name) || is_water_block_name(&block.name) {
2039 depth = depth.saturating_add(1);
2040 continue;
2041 }
2042 depth = depth.saturating_add(1);
2043 return Ok((depth, Some(block.name)));
2044 }
2045 Ok((depth, None))
2046 }
2047
2048 pub fn parse_global_data_blocking(&self) -> Result<Vec<ParsedDbEntry>> {
2050 parse_global_storage_entries(self.storage(), WorldParseOptions::summary())
2051 }
2052
2053 pub fn scan_entities_blocking(
2055 &self,
2056 options: WorldScanOptions,
2057 ) -> Result<(Vec<ParsedEntity>, WorldParseReport)> {
2058 let mut report = WorldParseReport::default();
2059 let mut entities = Vec::new();
2060 let mut entries_seen = 0usize;
2061 self.storage()
2062 .for_each_entry(to_storage_read_options(&options), &mut |key, value| {
2063 check_cancelled(&options)?;
2064 entries_seen = entries_seen.saturating_add(1);
2065 match BedrockDbKey::decode(key) {
2066 BedrockDbKey::ActorPrefix { .. } => {
2067 entities.extend(parse_entities_from_value(value, &mut report));
2068 }
2069 BedrockDbKey::Chunk(chunk_key) if chunk_key.tag == ChunkRecordTag::Entity => {
2070 entities.extend(parse_entities_from_value(value, &mut report));
2071 }
2072 _ => {}
2073 }
2074 if entries_seen.is_multiple_of(8192) {
2075 emit_progress(&options, entries_seen);
2076 }
2077 Ok(StorageVisitorControl::Continue)
2078 })?;
2079 Ok((entities, report))
2080 }
2081
2082 pub fn scan_block_entities_blocking(
2084 &self,
2085 options: WorldScanOptions,
2086 ) -> Result<(Vec<ParsedBlockEntity>, WorldParseReport)> {
2087 let mut report = WorldParseReport::default();
2088 let mut block_entities = Vec::new();
2089 let mut entries_seen = 0usize;
2090 self.storage()
2091 .for_each_entry(to_storage_read_options(&options), &mut |key, value| {
2092 check_cancelled(&options)?;
2093 entries_seen = entries_seen.saturating_add(1);
2094 if let BedrockDbKey::Chunk(chunk_key) = BedrockDbKey::decode(key) {
2095 if chunk_key.tag == ChunkRecordTag::BlockEntity {
2096 block_entities.extend(parse_block_entities_from_value(value, &mut report));
2097 }
2098 }
2099 if entries_seen.is_multiple_of(8192) {
2100 emit_progress(&options, entries_seen);
2101 }
2102 Ok(StorageVisitorControl::Continue)
2103 })?;
2104 Ok((block_entities, report))
2105 }
2106
2107 pub fn scan_items_blocking(
2109 &self,
2110 options: WorldScanOptions,
2111 ) -> Result<(Vec<ItemStack>, WorldParseReport)> {
2112 let mut report = WorldParseReport::default();
2113 let mut items = Vec::new();
2114 let mut entries_seen = 0usize;
2115 self.storage()
2116 .for_each_entry(to_storage_read_options(&options), &mut |key, value| {
2117 check_cancelled(&options)?;
2118 entries_seen = entries_seen.saturating_add(1);
2119 match BedrockDbKey::decode(key) {
2120 BedrockDbKey::LocalPlayer | BedrockDbKey::RemotePlayer(_) => {
2121 match parse_root_nbt(value) {
2122 Ok(nbt) => {
2123 let mut player_items = collect_item_stacks(&nbt);
2124 report.item_count =
2125 report.item_count.saturating_add(player_items.len());
2126 items.append(&mut player_items);
2127 }
2128 Err(error) => report
2129 .parse_errors
2130 .push(format!("player item scan failed: {error}")),
2131 }
2132 }
2133 BedrockDbKey::ActorPrefix { .. } => {
2134 for entity in parse_entities_from_value(value, &mut report) {
2135 items.extend(entity.items);
2136 }
2137 }
2138 BedrockDbKey::Chunk(chunk_key) if chunk_key.tag == ChunkRecordTag::Entity => {
2139 for entity in parse_entities_from_value(value, &mut report) {
2140 items.extend(entity.items);
2141 }
2142 }
2143 BedrockDbKey::Chunk(chunk_key)
2144 if chunk_key.tag == ChunkRecordTag::BlockEntity =>
2145 {
2146 for block_entity in parse_block_entities_from_value(value, &mut report) {
2147 items.extend(block_entity.items);
2148 }
2149 }
2150 _ => {}
2151 }
2152 if entries_seen.is_multiple_of(8192) {
2153 emit_progress(&options, entries_seen);
2154 }
2155 Ok(StorageVisitorControl::Continue)
2156 })?;
2157 Ok((items, report))
2158 }
2159
2160 pub fn scan_maps_blocking(&self) -> Result<Vec<ParsedMapData>> {
2169 Ok(self
2170 .parse_global_data_blocking()?
2171 .into_iter()
2172 .filter_map(|entry| match entry.value {
2173 ParsedDbValue::MapData(value) => Some(value),
2174 _ => None,
2175 })
2176 .collect())
2177 }
2178
2179 pub fn read_map_record_blocking(&self, id: &MapRecordId) -> Result<Option<ParsedMapData>> {
2185 self.storage()
2186 .get(&id.storage_key())?
2187 .map(|value| parse_map_record(id.clone(), value))
2188 .transpose()
2189 }
2190
2191 pub fn scan_map_records_blocking(
2197 &self,
2198 options: WorldScanOptions,
2199 ) -> Result<Vec<ParsedMapData>> {
2200 let mut records = Vec::new();
2201 self.storage().for_each_prefix_ref(
2202 b"map_",
2203 to_storage_read_options(&options),
2204 &mut |entry| {
2205 check_cancelled(&options)?;
2206 let Some(id) = MapRecordId::from_storage_key(entry.key) else {
2207 return Ok(StorageVisitorControl::Continue);
2208 };
2209 records.push(parse_map_record(id, Bytes::copy_from_slice(entry.value))?);
2210 Ok(StorageVisitorControl::Continue)
2211 },
2212 )?;
2213 Ok(records)
2214 }
2215
2216 pub fn write_map_record_blocking(&self, record: &ParsedMapData) -> Result<()> {
2223 self.ensure_writable()?;
2224 let value = encode_map_record(record)?;
2225 parse_map_record(record.record_id.clone(), value.clone())?;
2226 let mut transaction = self.transaction();
2227 transaction.put_raw_key(record.record_id.storage_key(), value);
2228 transaction.commit()
2229 }
2230
2231 pub fn delete_map_record_blocking(&self, id: &MapRecordId) -> Result<()> {
2238 self.ensure_writable()?;
2239 let mut transaction = self.transaction();
2240 transaction.delete_raw_key(id.storage_key());
2241 transaction.commit()
2242 }
2243
2244 pub fn scan_villages_blocking(&self) -> Result<Vec<ParsedVillageData>> {
2250 Ok(self
2251 .parse_global_data_blocking()?
2252 .into_iter()
2253 .filter_map(|entry| match entry.value {
2254 ParsedDbValue::VillageData(value) => Some(value),
2255 _ => None,
2256 })
2257 .collect())
2258 }
2259
2260 pub fn scan_villages_lightweight_blocking(
2262 &self,
2263 cancel: &CancelFlag,
2264 ) -> Result<Vec<ParsedVillageData>> {
2265 let mut villages = Vec::new();
2266 let options = StorageReadOptions {
2267 cancel: Some(cancel.to_storage_cancel()),
2268 ..StorageReadOptions::default()
2269 };
2270 self.storage()
2271 .for_each_prefix_ref(b"VILLAGE_", options, &mut |entry| {
2272 if cancel.is_cancelled() {
2273 return Err(BedrockWorldError::Cancelled {
2274 operation: "village scan",
2275 });
2276 }
2277 let BedrockDbKey::Village(key) = BedrockDbKey::decode(entry.key) else {
2278 return Ok(StorageVisitorControl::Continue);
2279 };
2280 let roots = parse_consecutive_root_nbt(entry.value).unwrap_or_default();
2281 villages.push(ParsedVillageData {
2282 key,
2283 roots,
2284 raw: Bytes::new(),
2285 });
2286 Ok(StorageVisitorControl::Continue)
2287 })?;
2288 Ok(villages)
2289 }
2290
2291 pub fn scan_globals_blocking(&self) -> Result<Vec<ParsedGlobalData>> {
2300 Ok(self
2301 .parse_global_data_blocking()?
2302 .into_iter()
2303 .filter_map(|entry| match entry.value {
2304 ParsedDbValue::GlobalData(value) => Some(value),
2305 _ => None,
2306 })
2307 .collect())
2308 }
2309
2310 pub fn read_global_record_blocking(
2316 &self,
2317 kind: GlobalRecordKind,
2318 ) -> Result<Option<ParsedGlobalData>> {
2319 let key = kind.storage_key();
2320 self.storage()
2321 .get(&key)?
2322 .map(|value| parse_global_record(kind.clone(), kind.name(), value))
2323 .transpose()
2324 }
2325
2326 pub fn scan_global_records_blocking(
2332 &self,
2333 options: WorldScanOptions,
2334 ) -> Result<Vec<ParsedGlobalData>> {
2335 let mut records = Vec::new();
2336 self.storage()
2337 .for_each_entry(to_storage_read_options(&options), &mut |key, value| {
2338 check_cancelled(&options)?;
2339 let BedrockDbKey::Global(kind) = BedrockDbKey::decode(key) else {
2340 return Ok(StorageVisitorControl::Continue);
2341 };
2342 records.push(parse_global_record(
2343 kind.clone(),
2344 kind.name(),
2345 value.clone(),
2346 )?);
2347 Ok(StorageVisitorControl::Continue)
2348 })?;
2349 Ok(records)
2350 }
2351
2352 pub fn write_global_record_blocking(&self, record: &ParsedGlobalData) -> Result<()> {
2359 self.ensure_writable()?;
2360 let value = encode_global_record(record)?;
2361 parse_global_record(record.kind.clone(), record.name.clone(), value.clone())?;
2362 let mut transaction = self.transaction();
2363 transaction.put_raw_key(record.kind.storage_key(), value);
2364 transaction.commit()
2365 }
2366
2367 pub fn delete_global_record_blocking(&self, kind: GlobalRecordKind) -> Result<()> {
2374 self.ensure_writable()?;
2375 let mut transaction = self.transaction();
2376 transaction.delete_raw_key(kind.storage_key());
2377 transaction.commit()
2378 }
2379
2380 pub fn get_heightmap_blocking(&self, pos: ChunkPos) -> Result<Option<HeightMap2d>> {
2386 self.get_biome_data_blocking(pos)?
2387 .map(|data| HeightMap2d::new(data.height_map))
2388 .transpose()
2389 }
2390
2391 pub fn put_heightmap_blocking(
2398 &self,
2399 pos: ChunkPos,
2400 version: ChunkVersion,
2401 height_map: HeightMap2d,
2402 ) -> Result<()> {
2403 self.ensure_writable()?;
2404 let existing = self.get_biome_data_blocking(pos)?;
2405 let storages = existing.map_or_else(Vec::new, |data| data.storages);
2406 let value = match version {
2407 ChunkVersion::Old => Biome2d::new(height_map.values, vec![0; 256])?.encode()?,
2408 ChunkVersion::New => Biome3d::new(height_map.values, storages)?.encode()?,
2409 };
2410 let tag = match version {
2411 ChunkVersion::Old => ChunkRecordTag::Data2D,
2412 ChunkVersion::New => ChunkRecordTag::Data3D,
2413 };
2414 self.put_raw_record_blocking(&ChunkKey::new(pos, tag), &value)
2415 }
2416
2417 pub fn put_biome_storage_blocking(&self, pos: ChunkPos, biome: Biome3d) -> Result<()> {
2424 self.ensure_writable()?;
2425 let value = biome.encode()?;
2426 Biome3d::parse(&value)?;
2427 self.put_raw_record_blocking(&ChunkKey::new(pos, ChunkRecordTag::Data3D), &value)
2428 }
2429
2430 pub fn scan_hsa_records_blocking(
2436 &self,
2437 options: WorldScanOptions,
2438 ) -> Result<Vec<(ChunkPos, Vec<ParsedHardcodedSpawnArea>)>> {
2439 let mut records = Vec::new();
2440 self.storage()
2441 .for_each_entry(to_storage_read_options(&options), &mut |key, value| {
2442 check_cancelled(&options)?;
2443 let BedrockDbKey::Chunk(chunk_key) = BedrockDbKey::decode(key) else {
2444 return Ok(StorageVisitorControl::Continue);
2445 };
2446 if chunk_key.tag == ChunkRecordTag::HardcodedSpawners {
2447 records.push((chunk_key.pos, parse_hardcoded_spawn_area_records(value)?));
2448 }
2449 Ok(StorageVisitorControl::Continue)
2450 })?;
2451 Ok(records)
2452 }
2453
2454 pub fn put_hsa_for_chunk_blocking(
2461 &self,
2462 pos: ChunkPos,
2463 areas: &[ParsedHardcodedSpawnArea],
2464 ) -> Result<()> {
2465 self.ensure_writable()?;
2466 let value = encode_hardcoded_spawn_area_records(areas)?;
2467 parse_hardcoded_spawn_area_records(&value)?;
2468 let mut transaction = self.transaction();
2469 transaction.put_raw_record(
2470 &ChunkKey::new(pos, ChunkRecordTag::HardcodedSpawners),
2471 value,
2472 );
2473 transaction.commit()
2474 }
2475
2476 pub fn delete_hsa_for_chunk_blocking(&self, pos: ChunkPos) -> Result<()> {
2483 self.delete_raw_record_blocking(&ChunkKey::new(pos, ChunkRecordTag::HardcodedSpawners))
2484 }
2485
2486 pub fn block_entities_in_chunk_blocking(
2492 &self,
2493 pos: ChunkPos,
2494 ) -> Result<Vec<BlockEntityRecord>> {
2495 let key = ChunkKey::new(pos, ChunkRecordTag::BlockEntity).encode();
2496 let Some(value) = self.storage().get(&key)? else {
2497 return Ok(Vec::new());
2498 };
2499 let mut report = WorldParseReport::default();
2500 Ok(parse_block_entities_from_value(&value, &mut report)
2501 .into_iter()
2502 .enumerate()
2503 .map(|(index, entity)| BlockEntityRecord {
2504 chunk: pos,
2505 index,
2506 entity,
2507 })
2508 .collect())
2509 }
2510
2511 pub fn put_block_entities_blocking(
2518 &self,
2519 pos: ChunkPos,
2520 entities: &[ParsedBlockEntity],
2521 ) -> Result<()> {
2522 self.ensure_writable()?;
2523 validate_block_entities_in_chunk(pos, entities)?;
2524 let roots = entities
2525 .iter()
2526 .map(|entity| entity.nbt.clone())
2527 .collect::<Vec<_>>();
2528 let value = encode_consecutive_roots(&roots)?;
2529 let mut report = WorldParseReport::default();
2530 let parsed = parse_block_entities_from_value(&value, &mut report);
2531 validate_block_entities_in_chunk(pos, &parsed)?;
2532 let mut transaction = self.transaction();
2533 transaction.put_raw_record(&ChunkKey::new(pos, ChunkRecordTag::BlockEntity), value);
2534 transaction.commit()
2535 }
2536
2537 pub fn edit_block_entity_at_blocking<F>(
2545 &self,
2546 pos: ChunkPos,
2547 block: BlockPos,
2548 edit: F,
2549 ) -> Result<()>
2550 where
2551 F: FnOnce(&mut NbtTag) -> Result<()>,
2552 {
2553 self.ensure_writable()?;
2554 let mut entities = self
2555 .block_entities_in_chunk_blocking(pos)?
2556 .into_iter()
2557 .map(|record| record.entity)
2558 .collect::<Vec<_>>();
2559 let Some(index) = entities
2560 .iter()
2561 .position(|entity| entity.position == Some([block.x, block.y, block.z]))
2562 else {
2563 return Err(BedrockWorldError::Validation(format!(
2564 "no block entity exists at {},{},{}",
2565 block.x, block.y, block.z
2566 )));
2567 };
2568 edit(&mut entities[index].nbt)?;
2569 let mut report = WorldParseReport::default();
2570 entities[index] = parse_block_entities_from_value(
2571 &Bytes::from(serialize_root_nbt(&entities[index].nbt)?),
2572 &mut report,
2573 )
2574 .into_iter()
2575 .next()
2576 .ok_or_else(|| BedrockWorldError::Validation("edited block entity vanished".to_string()))?;
2577 self.put_block_entities_blocking(pos, &entities)
2578 }
2579
2580 pub fn delete_block_entity_at_blocking(&self, pos: ChunkPos, block: BlockPos) -> Result<()> {
2587 self.ensure_writable()?;
2588 let entities = self
2589 .block_entities_in_chunk_blocking(pos)?
2590 .into_iter()
2591 .map(|record| record.entity)
2592 .filter(|entity| entity.position != Some([block.x, block.y, block.z]))
2593 .collect::<Vec<_>>();
2594 if entities.is_empty() {
2595 return self
2596 .delete_raw_record_blocking(&ChunkKey::new(pos, ChunkRecordTag::BlockEntity));
2597 }
2598 self.put_block_entities_blocking(pos, &entities)
2599 }
2600
2601 pub fn actors_in_chunk_blocking(&self, pos: ChunkPos) -> Result<Vec<ActorRecord>> {
2607 let mut records = Vec::new();
2608 let inline_key = ChunkKey::new(pos, ChunkRecordTag::Entity);
2609 if let Some(value) = self.storage().get(&inline_key.encode())? {
2610 let mut report = WorldParseReport::default();
2611 records.extend(
2612 parse_entities_from_value(&value, &mut report)
2613 .into_iter()
2614 .map(|entity| ActorRecord {
2615 uid: entity.unique_id.map(ActorUid),
2616 source: ActorSource::InlineChunk(inline_key.clone()),
2617 entity,
2618 raw: value.clone(),
2619 }),
2620 );
2621 }
2622 let digest_key = ActorDigestKey::new(pos).storage_key();
2623 let Some(digest) = self.storage().get(&digest_key)? else {
2624 return Ok(records);
2625 };
2626 let ids = parse_actor_digest_ids(&digest)?;
2627 let actor_keys = ids.iter().map(|id| id.storage_key()).collect::<Vec<_>>();
2628 let values = self.storage().get_many(&actor_keys)?;
2629 for (id, value) in ids.into_iter().zip(values) {
2630 let Some(value) = value else {
2631 continue;
2632 };
2633 let mut report = WorldParseReport::default();
2634 records.extend(
2635 parse_entities_from_value(&value, &mut report)
2636 .into_iter()
2637 .map(|entity| ActorRecord {
2638 uid: Some(id),
2639 source: ActorSource::ActorPrefix(id),
2640 entity,
2641 raw: value.clone(),
2642 }),
2643 );
2644 }
2645 Ok(records)
2646 }
2647
2648 pub fn put_actor_blocking(&self, pos: ChunkPos, actor: &ParsedEntity) -> Result<()> {
2655 self.ensure_writable()?;
2656 let uid = actor.unique_id.map(ActorUid).ok_or_else(|| {
2657 BedrockWorldError::Validation("actor UniqueID is required".to_string())
2658 })?;
2659 let value = Bytes::from(serialize_root_nbt(&actor.nbt)?);
2660 parse_entities_from_value(&value, &mut WorldParseReport::default());
2661 let mut transaction = self.transaction();
2662 transaction.put_actor(pos, uid, value)?;
2663 transaction.commit()
2664 }
2665
2666 pub fn delete_actor_blocking(&self, pos: ChunkPos, uid: ActorUid) -> Result<()> {
2673 self.ensure_writable()?;
2674 let mut transaction = self.transaction();
2675 transaction.delete_actor(pos, uid)?;
2676 transaction.commit()
2677 }
2678
2679 pub fn move_actor_blocking(
2686 &self,
2687 from: ChunkPos,
2688 to: ChunkPos,
2689 actor: &ParsedEntity,
2690 ) -> Result<()> {
2691 self.ensure_writable()?;
2692 let uid = actor.unique_id.map(ActorUid).ok_or_else(|| {
2693 BedrockWorldError::Validation("actor UniqueID is required".to_string())
2694 })?;
2695 let value = Bytes::from(serialize_root_nbt(&actor.nbt)?);
2696 let mut transaction = self.transaction();
2697 transaction.delete_actor(from, uid)?;
2698 transaction.put_actor(to, uid, value)?;
2699 transaction.commit()
2700 }
2701
2702 #[cfg(feature = "async")]
2703 pub async fn list_players(&self) -> Result<Vec<PlayerId>> {
2705 let world = self.blocking_clone();
2706 tokio::task::spawn_blocking(move || world.list_players_blocking())
2707 .await
2708 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
2709 }
2710
2711 #[cfg(feature = "async")]
2712 pub async fn classify_keys(
2714 &self,
2715 options: WorldScanOptions,
2716 ) -> Result<BTreeMap<String, usize>> {
2717 let world = self.blocking_clone();
2718 tokio::task::spawn_blocking(move || world.classify_keys_blocking(options))
2719 .await
2720 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
2721 }
2722
2723 #[cfg(feature = "async")]
2724 pub async fn list_chunk_positions(&self, options: WorldScanOptions) -> Result<Vec<ChunkPos>> {
2726 let world = self.blocking_clone();
2727 tokio::task::spawn_blocking(move || world.list_chunk_positions_blocking(options))
2728 .await
2729 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
2730 }
2731
2732 #[cfg(feature = "async")]
2733 pub async fn list_render_chunk_positions(
2735 &self,
2736 options: WorldScanOptions,
2737 ) -> Result<Vec<ChunkPos>> {
2738 let world = self.blocking_clone();
2739 tokio::task::spawn_blocking(move || world.list_render_chunk_positions_blocking(options))
2740 .await
2741 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
2742 }
2743
2744 #[cfg(feature = "async")]
2745 pub async fn list_render_chunk_positions_in_region(
2747 &self,
2748 region: RenderChunkRegion,
2749 options: WorldScanOptions,
2750 ) -> Result<Vec<ChunkPos>> {
2751 let world = self.blocking_clone();
2752 tokio::task::spawn_blocking(move || {
2753 world.list_render_chunk_positions_in_region_blocking(region, options)
2754 })
2755 .await
2756 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
2757 }
2758
2759 #[cfg(feature = "async")]
2760 pub async fn discover_chunk_bounds(
2762 &self,
2763 dimension: crate::Dimension,
2764 options: WorldScanOptions,
2765 ) -> Result<Option<ChunkBounds>> {
2766 let world = self.blocking_clone();
2767 tokio::task::spawn_blocking(move || {
2768 world.discover_chunk_bounds_blocking(dimension, options)
2769 })
2770 .await
2771 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
2772 }
2773
2774 #[cfg(feature = "async")]
2775 pub async fn nearest_loaded_chunk_to_spawn(
2777 &self,
2778 dimension: crate::Dimension,
2779 spawn_block_x: i32,
2780 spawn_block_z: i32,
2781 options: WorldScanOptions,
2782 ) -> Result<Option<ChunkPos>> {
2783 let world = self.blocking_clone();
2784 tokio::task::spawn_blocking(move || {
2785 world.nearest_loaded_chunk_to_spawn_blocking(
2786 dimension,
2787 spawn_block_x,
2788 spawn_block_z,
2789 options,
2790 )
2791 })
2792 .await
2793 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
2794 }
2795
2796 #[cfg(feature = "async")]
2797 pub async fn parse_chunk(
2799 &self,
2800 pos: ChunkPos,
2801 options: WorldParseOptions,
2802 ) -> Result<ParsedChunkData> {
2803 let world = self.blocking_clone();
2804 tokio::task::spawn_blocking(move || world.parse_chunk_with_options_blocking(pos, options))
2805 .await
2806 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
2807 }
2808
2809 #[cfg(feature = "async")]
2810 pub async fn load_render_chunk(
2812 &self,
2813 pos: ChunkPos,
2814 options: RenderChunkLoadOptions,
2815 ) -> Result<RenderChunkData> {
2816 let world = self.blocking_clone();
2817 tokio::task::spawn_blocking(move || world.load_render_chunk_blocking(pos, options))
2818 .await
2819 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
2820 }
2821
2822 #[cfg(feature = "async")]
2823 pub async fn load_render_chunks(
2825 &self,
2826 positions: Vec<ChunkPos>,
2827 options: RenderChunkLoadOptions,
2828 ) -> Result<Vec<RenderChunkData>> {
2829 let world = self.blocking_clone();
2830 tokio::task::spawn_blocking(move || world.load_render_chunks_blocking(positions, options))
2831 .await
2832 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
2833 }
2834
2835 #[cfg(feature = "async")]
2836 pub async fn load_render_region(
2838 &self,
2839 region: RenderChunkRegion,
2840 options: RenderRegionLoadOptions,
2841 ) -> Result<RenderRegionData> {
2842 let world = self.blocking_clone();
2843 tokio::task::spawn_blocking(move || world.load_render_region_blocking(region, options))
2844 .await
2845 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
2846 }
2847
2848 #[cfg(feature = "async")]
2849 pub async fn scan_entities(
2851 &self,
2852 options: WorldScanOptions,
2853 ) -> Result<(Vec<ParsedEntity>, WorldParseReport)> {
2854 let world = self.blocking_clone();
2855 tokio::task::spawn_blocking(move || world.scan_entities_blocking(options))
2856 .await
2857 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
2858 }
2859
2860 #[cfg(feature = "async")]
2861 pub async fn scan_block_entities(
2863 &self,
2864 options: WorldScanOptions,
2865 ) -> Result<(Vec<ParsedBlockEntity>, WorldParseReport)> {
2866 let world = self.blocking_clone();
2867 tokio::task::spawn_blocking(move || world.scan_block_entities_blocking(options))
2868 .await
2869 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
2870 }
2871
2872 #[cfg(feature = "async")]
2873 pub async fn scan_items(
2875 &self,
2876 options: WorldScanOptions,
2877 ) -> Result<(Vec<ItemStack>, WorldParseReport)> {
2878 let world = self.blocking_clone();
2879 tokio::task::spawn_blocking(move || world.scan_items_blocking(options))
2880 .await
2881 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
2882 }
2883
2884 #[cfg(feature = "async")]
2885 pub async fn scan_maps(&self) -> Result<Vec<ParsedMapData>> {
2887 let world = self.blocking_clone();
2888 tokio::task::spawn_blocking(move || world.scan_maps_blocking())
2889 .await
2890 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
2891 }
2892
2893 #[cfg(feature = "async")]
2894 pub async fn scan_villages(&self) -> Result<Vec<ParsedVillageData>> {
2896 let world = self.blocking_clone();
2897 tokio::task::spawn_blocking(move || world.scan_villages_blocking())
2898 .await
2899 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
2900 }
2901
2902 #[cfg(feature = "async")]
2903 pub async fn scan_globals(&self) -> Result<Vec<ParsedGlobalData>> {
2905 let world = self.blocking_clone();
2906 tokio::task::spawn_blocking(move || world.scan_globals_blocking())
2907 .await
2908 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
2909 }
2910
2911 #[cfg(feature = "async")]
2917 pub async fn read_map_record(&self, id: MapRecordId) -> Result<Option<ParsedMapData>> {
2918 let world = self.blocking_clone();
2919 tokio::task::spawn_blocking(move || world.read_map_record_blocking(&id))
2920 .await
2921 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
2922 }
2923
2924 #[cfg(feature = "async")]
2930 pub async fn scan_map_records(&self, options: WorldScanOptions) -> Result<Vec<ParsedMapData>> {
2931 let world = self.blocking_clone();
2932 tokio::task::spawn_blocking(move || world.scan_map_records_blocking(options))
2933 .await
2934 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
2935 }
2936
2937 #[cfg(feature = "async")]
2943 pub async fn write_map_record(&self, record: ParsedMapData) -> Result<()> {
2944 let world = self.blocking_clone();
2945 tokio::task::spawn_blocking(move || world.write_map_record_blocking(&record))
2946 .await
2947 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
2948 }
2949
2950 #[cfg(feature = "async")]
2956 pub async fn delete_map_record(&self, id: MapRecordId) -> Result<()> {
2957 let world = self.blocking_clone();
2958 tokio::task::spawn_blocking(move || world.delete_map_record_blocking(&id))
2959 .await
2960 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
2961 }
2962
2963 #[cfg(feature = "async")]
2969 pub async fn read_global_record(
2970 &self,
2971 kind: GlobalRecordKind,
2972 ) -> Result<Option<ParsedGlobalData>> {
2973 let world = self.blocking_clone();
2974 tokio::task::spawn_blocking(move || world.read_global_record_blocking(kind))
2975 .await
2976 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
2977 }
2978
2979 #[cfg(feature = "async")]
2985 pub async fn scan_global_records(
2986 &self,
2987 options: WorldScanOptions,
2988 ) -> Result<Vec<ParsedGlobalData>> {
2989 let world = self.blocking_clone();
2990 tokio::task::spawn_blocking(move || world.scan_global_records_blocking(options))
2991 .await
2992 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
2993 }
2994
2995 #[cfg(feature = "async")]
3001 pub async fn write_global_record(&self, record: ParsedGlobalData) -> Result<()> {
3002 let world = self.blocking_clone();
3003 tokio::task::spawn_blocking(move || world.write_global_record_blocking(&record))
3004 .await
3005 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
3006 }
3007
3008 #[cfg(feature = "async")]
3014 pub async fn delete_global_record(&self, kind: GlobalRecordKind) -> Result<()> {
3015 let world = self.blocking_clone();
3016 tokio::task::spawn_blocking(move || world.delete_global_record_blocking(kind))
3017 .await
3018 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
3019 }
3020
3021 #[cfg(feature = "async")]
3027 pub async fn get_heightmap(&self, pos: ChunkPos) -> Result<Option<HeightMap2d>> {
3028 let world = self.blocking_clone();
3029 tokio::task::spawn_blocking(move || world.get_heightmap_blocking(pos))
3030 .await
3031 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
3032 }
3033
3034 #[cfg(feature = "async")]
3040 pub async fn put_heightmap(
3041 &self,
3042 pos: ChunkPos,
3043 version: ChunkVersion,
3044 height_map: HeightMap2d,
3045 ) -> Result<()> {
3046 let world = self.blocking_clone();
3047 tokio::task::spawn_blocking(move || world.put_heightmap_blocking(pos, version, height_map))
3048 .await
3049 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
3050 }
3051
3052 #[cfg(feature = "async")]
3058 pub async fn put_biome_storage(&self, pos: ChunkPos, biome: Biome3d) -> Result<()> {
3059 let world = self.blocking_clone();
3060 tokio::task::spawn_blocking(move || world.put_biome_storage_blocking(pos, biome))
3061 .await
3062 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
3063 }
3064
3065 #[cfg(feature = "async")]
3071 pub async fn scan_hsa_records(
3072 &self,
3073 options: WorldScanOptions,
3074 ) -> Result<Vec<(ChunkPos, Vec<ParsedHardcodedSpawnArea>)>> {
3075 let world = self.blocking_clone();
3076 tokio::task::spawn_blocking(move || world.scan_hsa_records_blocking(options))
3077 .await
3078 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
3079 }
3080
3081 #[cfg(feature = "async")]
3087 pub async fn put_hsa_for_chunk(
3088 &self,
3089 pos: ChunkPos,
3090 areas: Vec<ParsedHardcodedSpawnArea>,
3091 ) -> Result<()> {
3092 let world = self.blocking_clone();
3093 tokio::task::spawn_blocking(move || world.put_hsa_for_chunk_blocking(pos, &areas))
3094 .await
3095 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
3096 }
3097
3098 #[cfg(feature = "async")]
3104 pub async fn delete_hsa_for_chunk(&self, pos: ChunkPos) -> Result<()> {
3105 let world = self.blocking_clone();
3106 tokio::task::spawn_blocking(move || world.delete_hsa_for_chunk_blocking(pos))
3107 .await
3108 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
3109 }
3110
3111 #[cfg(feature = "async")]
3117 pub async fn block_entities_in_chunk(&self, pos: ChunkPos) -> Result<Vec<BlockEntityRecord>> {
3118 let world = self.blocking_clone();
3119 tokio::task::spawn_blocking(move || world.block_entities_in_chunk_blocking(pos))
3120 .await
3121 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
3122 }
3123
3124 #[cfg(feature = "async")]
3130 pub async fn put_block_entities(
3131 &self,
3132 pos: ChunkPos,
3133 entities: Vec<ParsedBlockEntity>,
3134 ) -> Result<()> {
3135 let world = self.blocking_clone();
3136 tokio::task::spawn_blocking(move || world.put_block_entities_blocking(pos, &entities))
3137 .await
3138 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
3139 }
3140
3141 #[cfg(feature = "async")]
3147 pub async fn edit_block_entity_at<F>(
3148 &self,
3149 pos: ChunkPos,
3150 block: BlockPos,
3151 edit: F,
3152 ) -> Result<()>
3153 where
3154 F: FnOnce(&mut NbtTag) -> Result<()> + Send + 'static,
3155 {
3156 let world = self.blocking_clone();
3157 tokio::task::spawn_blocking(move || world.edit_block_entity_at_blocking(pos, block, edit))
3158 .await
3159 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
3160 }
3161
3162 #[cfg(feature = "async")]
3168 pub async fn delete_block_entity_at(&self, pos: ChunkPos, block: BlockPos) -> Result<()> {
3169 let world = self.blocking_clone();
3170 tokio::task::spawn_blocking(move || world.delete_block_entity_at_blocking(pos, block))
3171 .await
3172 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
3173 }
3174
3175 #[cfg(feature = "async")]
3181 pub async fn actors_in_chunk(&self, pos: ChunkPos) -> Result<Vec<ActorRecord>> {
3182 let world = self.blocking_clone();
3183 tokio::task::spawn_blocking(move || world.actors_in_chunk_blocking(pos))
3184 .await
3185 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
3186 }
3187
3188 #[cfg(feature = "async")]
3194 pub async fn put_actor(&self, pos: ChunkPos, actor: ParsedEntity) -> Result<()> {
3195 let world = self.blocking_clone();
3196 tokio::task::spawn_blocking(move || world.put_actor_blocking(pos, &actor))
3197 .await
3198 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
3199 }
3200
3201 #[cfg(feature = "async")]
3207 pub async fn delete_actor(&self, pos: ChunkPos, uid: ActorUid) -> Result<()> {
3208 let world = self.blocking_clone();
3209 tokio::task::spawn_blocking(move || world.delete_actor_blocking(pos, uid))
3210 .await
3211 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
3212 }
3213
3214 #[cfg(feature = "async")]
3220 pub async fn move_actor(
3221 &self,
3222 from: ChunkPos,
3223 to: ChunkPos,
3224 actor: ParsedEntity,
3225 ) -> Result<()> {
3226 let world = self.blocking_clone();
3227 tokio::task::spawn_blocking(move || world.move_actor_blocking(from, to, &actor))
3228 .await
3229 .map_err(|error| BedrockWorldError::Join(error.to_string()))?
3230 }
3231
3232 #[cfg(feature = "async")]
3233 #[must_use]
3234 fn blocking_clone(&self) -> Self {
3235 Self {
3236 path: self.path.clone(),
3237 options: self.options.clone(),
3238 storage: self.storage.clone(),
3239 format: self.format,
3240 }
3241 }
3242
3243 pub fn put_raw_record_blocking(&self, key: &ChunkKey, value: &[u8]) -> Result<()> {
3245 self.ensure_writable()?;
3246 self.storage().put(&key.encode(), value)
3247 }
3248
3249 pub fn delete_raw_record_blocking(&self, key: &ChunkKey) -> Result<()> {
3251 self.ensure_writable()?;
3252 self.storage().delete(&key.encode())
3253 }
3254
3255 #[must_use]
3256 pub fn transaction(&self) -> WorldTransaction<'_, S> {
3258 WorldTransaction {
3259 storage: &self.storage,
3260 batch: StorageBatch::new(),
3261 read_only: self.options.read_only,
3262 }
3263 }
3264
3265 fn ensure_writable(&self) -> Result<()> {
3266 if self.options.read_only {
3267 return Err(BedrockWorldError::ReadOnly);
3268 }
3269 Ok(())
3270 }
3271}
3272
3273pub struct WorldTransaction<'a, S = Arc<dyn WorldStorage>>
3275where
3276 S: WorldStorageHandle,
3277{
3278 storage: &'a S,
3279 batch: StorageBatch,
3280 read_only: bool,
3281}
3282
3283impl<S> WorldTransaction<'_, S>
3284where
3285 S: WorldStorageHandle,
3286{
3287 pub fn put_raw_record(&mut self, key: &ChunkKey, value: impl Into<Bytes>) {
3289 self.batch.put(key.encode(), value.into());
3290 }
3291
3292 pub fn delete_raw_record(&mut self, key: &ChunkKey) {
3294 self.batch.delete(key.encode());
3295 }
3296
3297 pub fn put_raw_key(&mut self, key: impl Into<Bytes>, value: impl Into<Bytes>) {
3299 self.batch.put(key.into(), value.into());
3300 }
3301
3302 pub fn delete_raw_key(&mut self, key: impl Into<Bytes>) {
3304 self.batch.delete(key.into());
3305 }
3306
3307 pub fn put_player(&mut self, player: &PlayerData) -> Result<()> {
3314 let Some(key) = player.id.storage_key() else {
3315 return Err(BedrockWorldError::Validation(
3316 "player id has no LevelDB key".to_string(),
3317 ));
3318 };
3319 self.batch
3320 .put(Bytes::copy_from_slice(key.as_ref()), player.raw.clone());
3321 Ok(())
3322 }
3323
3324 pub fn put_map_record(&mut self, record: &ParsedMapData) -> Result<()> {
3330 let value = encode_map_record(record)?;
3331 parse_map_record(record.record_id.clone(), value.clone())?;
3332 self.batch.put(record.record_id.storage_key(), value);
3333 Ok(())
3334 }
3335
3336 pub fn delete_map_record(&mut self, id: &MapRecordId) {
3338 self.batch.delete(id.storage_key());
3339 }
3340
3341 pub fn put_global_record(&mut self, record: &ParsedGlobalData) -> Result<()> {
3347 let value = encode_global_record(record)?;
3348 parse_global_record(record.kind.clone(), record.name.clone(), value.clone())?;
3349 self.batch.put(record.kind.storage_key(), value);
3350 Ok(())
3351 }
3352
3353 pub fn delete_global_record(&mut self, kind: &GlobalRecordKind) {
3355 self.batch.delete(kind.storage_key());
3356 }
3357
3358 pub fn put_actor(&mut self, pos: ChunkPos, uid: ActorUid, value: Bytes) -> Result<()> {
3364 parse_entities_from_value(&value, &mut WorldParseReport::default());
3365 self.batch.put(uid.storage_key(), value);
3366 self.replace_actor_digest(pos, |ids| {
3367 if !ids.contains(&uid) {
3368 ids.push(uid);
3369 }
3370 })?;
3371 Ok(())
3372 }
3373
3374 pub fn delete_actor(&mut self, pos: ChunkPos, uid: ActorUid) -> Result<()> {
3380 self.batch.delete(uid.storage_key());
3381 self.replace_actor_digest(pos, |ids| ids.retain(|id| *id != uid))
3382 }
3383
3384 pub fn commit(self) -> Result<()> {
3391 if self.read_only {
3392 return Err(BedrockWorldError::ReadOnly);
3393 }
3394 validate_batch(&self.batch)?;
3395 self.storage.storage().write_batch(&self.batch)?;
3396 self.storage.storage().flush()
3397 }
3398
3399 fn replace_actor_digest<F>(&mut self, pos: ChunkPos, update: F) -> Result<()>
3400 where
3401 F: FnOnce(&mut Vec<ActorUid>),
3402 {
3403 let key = ActorDigestKey::new(pos).storage_key();
3404 let mut ids = self
3405 .storage
3406 .storage()
3407 .get(&key)?
3408 .map_or_else(|| Ok(Vec::new()), |value| parse_actor_digest_ids(&value))?;
3409 update(&mut ids);
3410 if ids.is_empty() {
3411 self.batch.delete(key);
3412 } else {
3413 self.batch.put(key, encode_actor_digest_ids(&ids));
3414 }
3415 Ok(())
3416 }
3417}
3418
3419fn validate_batch(batch: &StorageBatch) -> Result<()> {
3420 for op in batch.ops() {
3421 match op {
3422 StorageOp::Put { key, value } => {
3423 if key.is_empty() {
3424 return Err(BedrockWorldError::Validation(
3425 "batch contains empty key".to_string(),
3426 ));
3427 }
3428 if value.is_empty() {
3429 return Err(BedrockWorldError::Validation(format!(
3430 "batch put for key {key:?} contains empty value"
3431 )));
3432 }
3433 }
3434 StorageOp::Delete { key } => {
3435 if key.is_empty() {
3436 return Err(BedrockWorldError::Validation(
3437 "batch contains empty delete key".to_string(),
3438 ));
3439 }
3440 }
3441 }
3442 }
3443 Ok(())
3444}
3445
3446fn validate_block_entities_in_chunk(pos: ChunkPos, entities: &[ParsedBlockEntity]) -> Result<()> {
3447 for entity in entities {
3448 let Some([x, y, z]) = entity.position else {
3449 return Err(BedrockWorldError::Validation(
3450 "block entity is missing x/y/z position".to_string(),
3451 ));
3452 };
3453 let block_pos = BlockPos { x, y, z };
3454 if block_pos.to_chunk_pos(pos.dimension) != pos {
3455 return Err(BedrockWorldError::Validation(format!(
3456 "block entity at {x},{y},{z} is outside chunk {pos:?}"
3457 )));
3458 }
3459 }
3460 Ok(())
3461}
3462
3463fn check_cancelled(options: &WorldScanOptions) -> Result<()> {
3464 if options
3465 .cancel
3466 .as_ref()
3467 .is_some_and(CancelFlag::is_cancelled)
3468 {
3469 return Err(BedrockWorldError::Cancelled {
3470 operation: "world scan",
3471 });
3472 }
3473 Ok(())
3474}
3475
3476fn emit_progress(options: &WorldScanOptions, entries_seen: usize) {
3477 if let Some(progress) = &options.progress {
3478 progress.emit(WorldScanProgress { entries_seen });
3479 }
3480}
3481
3482fn check_render_load_cancelled(options: &RenderChunkLoadOptions) -> Result<()> {
3483 if options
3484 .cancel
3485 .as_ref()
3486 .is_some_and(CancelFlag::is_cancelled)
3487 {
3488 return Err(BedrockWorldError::Cancelled {
3489 operation: "render chunk load",
3490 });
3491 }
3492 Ok(())
3493}
3494
3495fn emit_render_load_progress(options: &RenderChunkLoadOptions, completed_chunks: usize) {
3496 if completed_chunks.is_multiple_of(options.pipeline.resolve_progress_interval()) {
3497 if let Some(progress) = &options.progress {
3498 progress.emit(WorldScanProgress {
3499 entries_seen: completed_chunks,
3500 });
3501 }
3502 }
3503}
3504
3505fn sort_render_chunk_positions(positions: &mut [ChunkPos], priority: RenderChunkPriority) {
3506 match priority {
3507 RenderChunkPriority::RowMajor => positions.sort(),
3508 RenderChunkPriority::DistanceFrom { chunk_x, chunk_z } => positions.sort_by_key(|pos| {
3509 let dx = i64::from(pos.x) - i64::from(chunk_x);
3510 let dz = i64::from(pos.z) - i64::from(chunk_z);
3511 (
3512 dx.saturating_mul(dx).saturating_add(dz.saturating_mul(dz)),
3513 pos.z,
3514 pos.x,
3515 pos.dimension,
3516 )
3517 }),
3518 }
3519}
3520
3521fn push_render_record_request(
3522 keys: &mut Vec<Bytes>,
3523 requests: &mut Vec<RenderRecordRequest>,
3524 chunk_index: usize,
3525 pos: ChunkPos,
3526 kind: RenderRecordKind,
3527) {
3528 let key = match kind {
3529 RenderRecordKind::LegacyTerrain => {
3530 ChunkKey::new(pos, ChunkRecordTag::LegacyTerrain).encode()
3531 }
3532 RenderRecordKind::Data3D => ChunkKey::new(pos, ChunkRecordTag::Data3D).encode(),
3533 RenderRecordKind::Data2D => ChunkKey::new(pos, ChunkRecordTag::Data2D).encode(),
3534 RenderRecordKind::Data2DLegacy => ChunkKey::new(pos, ChunkRecordTag::Data2DLegacy).encode(),
3535 RenderRecordKind::Subchunk(y) => ChunkKey::subchunk(pos, y).encode(),
3536 RenderRecordKind::BlockEntity => ChunkKey::new(pos, ChunkRecordTag::BlockEntity).encode(),
3537 };
3538 keys.push(key);
3539 requests.push(RenderRecordRequest { chunk_index, kind });
3540}
3541
3542fn apply_render_record_values(
3543 chunks: &mut [RawRenderChunkData],
3544 requests: &[RenderRecordRequest],
3545 values: Vec<Option<Bytes>>,
3546) -> usize {
3547 let mut found = 0usize;
3548 for (request, value) in requests.iter().copied().zip(values) {
3549 let Some(value) = value else {
3550 continue;
3551 };
3552 found = found.saturating_add(1);
3553 let Some(chunk) = chunks.get_mut(request.chunk_index) else {
3554 continue;
3555 };
3556 match request.kind {
3557 RenderRecordKind::LegacyTerrain => {
3558 chunk.legacy_terrain = Some(value);
3559 }
3560 RenderRecordKind::Data3D => {
3561 if chunk.biome_record.is_none() {
3562 chunk.biome_record = Some((crate::ChunkVersion::New, value));
3563 }
3564 }
3565 RenderRecordKind::Data2D | RenderRecordKind::Data2DLegacy => {
3566 if chunk.biome_record.is_none() {
3567 chunk.biome_record = Some((crate::ChunkVersion::Old, value));
3568 }
3569 }
3570 RenderRecordKind::Subchunk(y) => {
3571 chunk.subchunks.insert(y, value);
3572 }
3573 RenderRecordKind::BlockEntity => {
3574 chunk.block_entities = Some(value);
3575 }
3576 }
3577 }
3578 found
3579}
3580
3581fn planned_render_subchunk_ys(
3582 pos: ChunkPos,
3583 options: &RenderChunkLoadOptions,
3584 height_map: Option<&[[Option<i16>; 16]; 16]>,
3585) -> Result<BTreeSet<i8>> {
3586 let mut subchunk_ys = BTreeSet::new();
3587 match options.request {
3588 RenderChunkRequest::ExactSurface { subchunks, .. } => {
3589 let (min_y, max_y) = pos.subchunk_index_range(crate::ChunkVersion::New);
3590 match subchunks {
3591 ExactSurfaceSubchunkPolicy::Full => {
3592 for y in min_y..=max_y {
3593 subchunk_ys.insert(y);
3594 }
3595 }
3596 ExactSurfaceSubchunkPolicy::HintThenVerify => {
3597 if let Some(height_map) = height_map {
3598 insert_needed_surface_subchunks(
3599 &mut subchunk_ys,
3600 Some(height_map),
3601 min_y,
3602 max_y,
3603 );
3604 } else {
3605 for y in min_y..=max_y {
3606 subchunk_ys.insert(y);
3607 }
3608 }
3609 }
3610 }
3611 }
3612 RenderChunkRequest::Layer { y } => {
3613 subchunk_ys.insert(block_y_to_subchunk_y(y)?);
3614 }
3615 RenderChunkRequest::RawHeightMap | RenderChunkRequest::Biome { .. } => {}
3616 }
3617 Ok(subchunk_ys)
3618}
3619
3620const fn request_needs_biome_record(request: RenderChunkRequest) -> bool {
3621 match request {
3622 RenderChunkRequest::ExactSurface { biome, .. } => {
3623 !matches!(biome, ExactSurfaceBiomeLoad::None)
3624 }
3625 RenderChunkRequest::RawHeightMap | RenderChunkRequest::Biome { .. } => true,
3626 RenderChunkRequest::Layer { .. } => false,
3627 }
3628}
3629
3630const fn request_loads_block_entities(request: RenderChunkRequest) -> bool {
3631 matches!(
3632 request,
3633 RenderChunkRequest::ExactSurface {
3634 block_entities: true,
3635 ..
3636 }
3637 )
3638}
3639
3640const fn request_builds_column_samples(request: RenderChunkRequest) -> bool {
3641 matches!(request, RenderChunkRequest::ExactSurface { .. })
3642}
3643
3644const fn request_uses_hint_surface_subchunks(request: RenderChunkRequest) -> bool {
3645 matches!(
3646 request,
3647 RenderChunkRequest::ExactSurface {
3648 subchunks: ExactSurfaceSubchunkPolicy::HintThenVerify,
3649 ..
3650 }
3651 )
3652}
3653
3654const fn exact_surface_full_request(request: RenderChunkRequest) -> RenderChunkRequest {
3655 match request {
3656 RenderChunkRequest::ExactSurface {
3657 biome,
3658 block_entities,
3659 ..
3660 } => RenderChunkRequest::ExactSurface {
3661 subchunks: ExactSurfaceSubchunkPolicy::Full,
3662 biome,
3663 block_entities,
3664 },
3665 other => other,
3666 }
3667}
3668
3669fn insert_render_biome_storages(
3670 render_biomes: &mut BTreeMap<i32, ParsedBiomeStorage>,
3671 biome_data: Option<ParsedBiomeData>,
3672 request: RenderChunkRequest,
3673) {
3674 let Some(biome_data) = biome_data else {
3675 return;
3676 };
3677 match request {
3678 RenderChunkRequest::ExactSurface {
3679 biome: ExactSurfaceBiomeLoad::TopColumns | ExactSurfaceBiomeLoad::All,
3680 ..
3681 }
3682 | RenderChunkRequest::Biome { load_all: true, .. } => {
3683 for storage in biome_data.storages {
3684 let key = storage.y.unwrap_or(i32::MIN);
3685 render_biomes.insert(key, storage);
3686 }
3687 }
3688 RenderChunkRequest::Biome { y, load_all: false } => {
3689 let mut fallback = None;
3690 for storage in biome_data.storages {
3691 if biome_storage_contains_y(&storage, y) {
3692 render_biomes.insert(biome_storage_bucket_y(y), storage);
3693 return;
3694 }
3695 fallback.get_or_insert(storage);
3696 }
3697 if let Some(storage) = fallback {
3698 render_biomes.insert(biome_storage_bucket_y(y), storage);
3699 }
3700 }
3701 RenderChunkRequest::ExactSurface {
3702 biome: ExactSurfaceBiomeLoad::None,
3703 ..
3704 }
3705 | RenderChunkRequest::RawHeightMap
3706 | RenderChunkRequest::Layer { .. } => {}
3707 }
3708}
3709
3710fn parse_render_biome_record(
3711 record: Option<&(crate::ChunkVersion, Bytes)>,
3712) -> Result<Option<ParsedBiomeData>> {
3713 let Some((version, value)) = record else {
3714 return Ok(None);
3715 };
3716 let data = match version {
3717 crate::ChunkVersion::New => parse_data3d(value),
3718 crate::ChunkVersion::Old => parse_legacy_data2d(value),
3719 }
3720 .map_err(|error| BedrockWorldError::CorruptWorld(format!("biome data: {error}")))?;
3721 Ok(Some(data))
3722}
3723
3724fn render_height_map_from_biome_data(
3725 pos: ChunkPos,
3726 biome_data: &ParsedBiomeData,
3727) -> [[Option<i16>; 16]; 16] {
3728 let mut heights = [[None; 16]; 16];
3729 for local_z in 0..16_u8 {
3730 for local_x in 0..16_u8 {
3731 let index = height_map_index(local_x, local_z);
3732 heights[usize::from(local_z)][usize::from(local_x)] = biome_data
3733 .height_map
3734 .get(index)
3735 .and_then(|height| normalize_biome_height(pos, biome_data.version, *height));
3736 }
3737 }
3738 heights
3739}
3740
3741fn normalize_biome_height(
3742 pos: ChunkPos,
3743 version: crate::ChunkVersion,
3744 stored_height: i16,
3745) -> Option<i16> {
3746 let (min_y, _) = pos.y_range(version);
3747 i16::try_from(i32::from(stored_height) + min_y).ok()
3748}
3749
3750fn legacy_height_map_from_raw(
3751 raw_legacy_terrain: Option<&Bytes>,
3752) -> Result<Option<[[Option<i16>; 16]; 16]>> {
3753 let Some(raw_legacy_terrain) = raw_legacy_terrain else {
3754 return Ok(None);
3755 };
3756 let terrain = LegacyTerrain::parse(raw_legacy_terrain.clone())?;
3757 Ok(Some(render_height_map_from_legacy_terrain(&terrain)))
3758}
3759
3760fn render_height_map_from_legacy_terrain(terrain: &LegacyTerrain) -> [[Option<i16>; 16]; 16] {
3761 let mut heights = [[None; 16]; 16];
3762 for local_z in 0..16_u8 {
3763 for local_x in 0..16_u8 {
3764 heights[usize::from(local_z)][usize::from(local_x)] =
3765 terrain.height_at(local_x, local_z).map(i16::from);
3766 }
3767 }
3768 heights
3769}
3770
3771fn render_biomes_from_legacy_terrain(
3772 terrain: &LegacyTerrain,
3773) -> [[Option<LegacyBiomeSample>; 16]; 16] {
3774 let mut samples = [[None; 16]; 16];
3775 for local_z in 0..16_u8 {
3776 for local_x in 0..16_u8 {
3777 samples[usize::from(local_z)][usize::from(local_x)] =
3778 terrain.biome_sample_at(local_x, local_z);
3779 }
3780 }
3781 samples
3782}
3783
3784fn render_biome_colors_from_legacy_terrain(terrain: &LegacyTerrain) -> [[Option<u32>; 16]; 16] {
3785 let mut colors = [[None; 16]; 16];
3786 let samples = render_biomes_from_legacy_terrain(terrain);
3787 for local_z in 0..16 {
3788 for local_x in 0..16 {
3789 colors[local_z][local_x] = samples[local_z][local_x].map(LegacyBiomeSample::rgb_u32);
3790 }
3791 }
3792 colors
3793}
3794
3795fn build_terrain_column_samples(
3796 pos: ChunkPos,
3797 version: crate::ChunkVersion,
3798 subchunks: &BTreeMap<i8, SubChunk>,
3799 legacy_terrain: Option<&LegacyTerrain>,
3800 height_map: Option<&[[Option<i16>; 16]; 16]>,
3801 legacy_biomes: Option<&[[Option<LegacyBiomeSample>; 16]; 16]>,
3802 render_biomes: &BTreeMap<i32, ParsedBiomeStorage>,
3803) -> Result<TerrainColumnSamples> {
3804 let mut columns = TerrainColumnSamples::new();
3805 let (min_y, max_y) = if legacy_terrain.is_some() && subchunks.is_empty() {
3806 (0, 127)
3807 } else {
3808 pos.y_range(version)
3809 };
3810
3811 for local_z in 0..16_u8 {
3812 for local_x in 0..16_u8 {
3813 if let Some(sample) = sample_column_top_down(
3814 local_x,
3815 local_z,
3816 min_y,
3817 max_y,
3818 subchunks,
3819 legacy_terrain,
3820 height_map,
3821 legacy_biomes,
3822 render_biomes,
3823 )? {
3824 columns.set(local_x, local_z, sample);
3825 }
3826 }
3827 }
3828 Ok(columns)
3829}
3830
3831#[allow(clippy::too_many_arguments)]
3832#[allow(clippy::too_many_lines)]
3833fn sample_column_top_down(
3834 local_x: u8,
3835 local_z: u8,
3836 min_y: i32,
3837 max_y: i32,
3838 subchunks: &BTreeMap<i8, SubChunk>,
3839 legacy_terrain: Option<&LegacyTerrain>,
3840 height_map: Option<&[[Option<i16>; 16]; 16]>,
3841 legacy_biomes: Option<&[[Option<LegacyBiomeSample>; 16]; 16]>,
3842 render_biomes: &BTreeMap<i32, ParsedBiomeStorage>,
3843) -> Result<Option<TerrainColumnSample>> {
3844 let mut overlay: Option<TerrainColumnOverlay> = None;
3845 let mut top_water: Option<(i16, BlockState, TerrainSampleSource)> = None;
3846 let mut water_depth = 0_u8;
3847 for y in (min_y..=max_y).rev() {
3848 let height = i16::try_from(y).unwrap_or(if y < 0 { i16::MIN } else { i16::MAX });
3849
3850 let subchunk_y = block_y_to_subchunk_y(y)?;
3851 let local_y = u8::try_from(y - i32::from(subchunk_y) * 16).map_err(|_| {
3852 BedrockWorldError::Validation(format!("block y={y} has invalid local subchunk offset"))
3853 })?;
3854 let mut saw_subchunk_layer = false;
3855 if let Some(subchunk) = subchunks.get(&subchunk_y) {
3856 for state in subchunk.visible_block_states_at(local_x, local_y, local_z) {
3857 saw_subchunk_layer = true;
3858 if let Some(sample) = scan_terrain_surface_state(
3859 local_x,
3860 local_z,
3861 y,
3862 height,
3863 state.clone(),
3864 TerrainSampleSource::Subchunk,
3865 &mut overlay,
3866 &mut top_water,
3867 &mut water_depth,
3868 legacy_biomes,
3869 render_biomes,
3870 ) {
3871 return Ok(Some(sample));
3872 }
3873 }
3874 if saw_subchunk_layer {
3875 continue;
3876 }
3877 if let Some(id) = subchunk.legacy_block_id_at(local_x, local_y, local_z) {
3878 let data = subchunk
3879 .legacy_block_data_at(local_x, local_y, local_z)
3880 .unwrap_or(0);
3881 if let Some(sample) = scan_terrain_surface_state(
3882 local_x,
3883 local_z,
3884 y,
3885 height,
3886 legacy_world_block_state(id, data),
3887 TerrainSampleSource::Subchunk,
3888 &mut overlay,
3889 &mut top_water,
3890 &mut water_depth,
3891 legacy_biomes,
3892 render_biomes,
3893 ) {
3894 return Ok(Some(sample));
3895 }
3896 continue;
3897 }
3898 }
3899
3900 if let Some((state, source)) =
3901 legacy_terrain_block_state_at(local_x, y, local_z, subchunks, legacy_terrain)
3902 {
3903 if let Some(sample) = scan_terrain_surface_state(
3904 local_x,
3905 local_z,
3906 y,
3907 height,
3908 state,
3909 source,
3910 &mut overlay,
3911 &mut top_water,
3912 &mut water_depth,
3913 legacy_biomes,
3914 render_biomes,
3915 ) {
3916 return Ok(Some(sample));
3917 }
3918 }
3919 }
3920
3921 if let Some((water_height, water_state, water_source)) = top_water {
3922 let biome = terrain_biome_at(
3923 local_x,
3924 local_z,
3925 i32::from(water_height),
3926 legacy_biomes,
3927 render_biomes,
3928 );
3929 let relief_y = raw_height_at(height_map, local_x, local_z).unwrap_or(water_height);
3930 return Ok(Some(TerrainColumnSample {
3931 surface_y: water_height,
3932 surface_block_state: water_state.clone(),
3933 relief_y,
3934 relief_block_state: water_state.clone(),
3935 overlay,
3936 water: Some(TerrainColumnWater {
3937 surface_y: water_height,
3938 block_state: water_state,
3939 depth: water_depth,
3940 underwater_y: None,
3941 underwater_block_state: None,
3942 source: water_source,
3943 }),
3944 biome,
3945 source: water_source,
3946 }));
3947 }
3948
3949 Ok(None)
3950}
3951
3952#[allow(clippy::too_many_arguments)]
3953fn scan_terrain_surface_state(
3954 local_x: u8,
3955 local_z: u8,
3956 y: i32,
3957 height: i16,
3958 state: BlockState,
3959 source: TerrainSampleSource,
3960 overlay: &mut Option<TerrainColumnOverlay>,
3961 top_water: &mut Option<(i16, BlockState, TerrainSampleSource)>,
3962 water_depth: &mut u8,
3963 legacy_biomes: Option<&[[Option<LegacyBiomeSample>; 16]; 16]>,
3964 render_biomes: &BTreeMap<i32, ParsedBiomeStorage>,
3965) -> Option<TerrainColumnSample> {
3966 match terrain_surface_role(&state.name) {
3967 TerrainSurfaceRole::Air => {
3968 if top_water.is_some() {
3969 *water_depth = (*water_depth).saturating_add(1);
3970 }
3971 None
3972 }
3973 TerrainSurfaceRole::Overlay => {
3974 if let Some((water_height, water_state, water_source)) = top_water.take() {
3975 let biome = terrain_biome_at(local_x, local_z, y, legacy_biomes, render_biomes);
3976 return Some(TerrainColumnSample {
3977 surface_y: water_height,
3978 surface_block_state: water_state.clone(),
3979 relief_y: height,
3980 relief_block_state: state.clone(),
3981 overlay: overlay.take(),
3982 water: Some(TerrainColumnWater {
3983 surface_y: water_height,
3984 block_state: water_state,
3985 depth: (*water_depth).saturating_add(1),
3986 underwater_y: Some(height),
3987 underwater_block_state: Some(state),
3988 source: water_source,
3989 }),
3990 biome,
3991 source: water_source,
3992 });
3993 }
3994 if overlay.is_none() {
3995 *overlay = Some(TerrainColumnOverlay {
3996 y: height,
3997 block_state: state,
3998 source,
3999 });
4000 }
4001 None
4002 }
4003 TerrainSurfaceRole::Water => {
4004 if top_water.is_none() {
4005 *top_water = Some((height, state, source));
4006 } else {
4007 *water_depth = (*water_depth).saturating_add(1);
4008 }
4009 None
4010 }
4011 TerrainSurfaceRole::Primary => {
4012 let biome = terrain_biome_at(local_x, local_z, y, legacy_biomes, render_biomes);
4013 if let Some((water_height, water_state, water_source)) = top_water.take() {
4014 return Some(TerrainColumnSample {
4015 surface_y: water_height,
4016 surface_block_state: water_state.clone(),
4017 relief_y: height,
4018 relief_block_state: state.clone(),
4019 overlay: overlay.take(),
4020 water: Some(TerrainColumnWater {
4021 surface_y: water_height,
4022 block_state: water_state,
4023 depth: (*water_depth).saturating_add(1),
4024 underwater_y: Some(height),
4025 underwater_block_state: Some(state),
4026 source: water_source,
4027 }),
4028 biome,
4029 source: water_source,
4030 });
4031 }
4032 Some(TerrainColumnSample {
4033 surface_y: height,
4034 surface_block_state: state.clone(),
4035 relief_y: height,
4036 relief_block_state: state,
4037 overlay: overlay.take(),
4038 water: None,
4039 biome,
4040 source,
4041 })
4042 }
4043 }
4044}
4045
4046fn legacy_terrain_block_state_at(
4047 local_x: u8,
4048 y: i32,
4049 local_z: u8,
4050 subchunks: &BTreeMap<i8, SubChunk>,
4051 legacy_terrain: Option<&LegacyTerrain>,
4052) -> Option<(BlockState, TerrainSampleSource)> {
4053 let terrain = legacy_terrain?;
4054 if !(0..=127).contains(&y) {
4055 return None;
4056 }
4057 let legacy_y = u8::try_from(y).ok()?;
4058 let id = terrain.block_id_at(local_x, legacy_y, local_z)?;
4059 let data = terrain
4060 .block_data_at(local_x, legacy_y, local_z)
4061 .unwrap_or(0);
4062 let source = if subchunks.is_empty() {
4063 TerrainSampleSource::LegacyTerrain
4064 } else {
4065 TerrainSampleSource::LegacyFallback
4066 };
4067 Some((legacy_world_block_state(id, data), source))
4068}
4069
4070fn terrain_biome_at(
4071 local_x: u8,
4072 local_z: u8,
4073 y: i32,
4074 legacy_biomes: Option<&[[Option<LegacyBiomeSample>; 16]; 16]>,
4075 render_biomes: &BTreeMap<i32, ParsedBiomeStorage>,
4076) -> Option<TerrainColumnBiome> {
4077 legacy_biomes
4078 .and_then(|samples| samples[usize::from(local_z)][usize::from(local_x)])
4079 .map(TerrainColumnBiome::Legacy)
4080 .or_else(|| {
4081 render_biome_id_at(local_x, local_z, y, render_biomes).map(TerrainColumnBiome::Id)
4082 })
4083}
4084
4085fn render_biome_id_at(
4086 local_x: u8,
4087 local_z: u8,
4088 y: i32,
4089 render_biomes: &BTreeMap<i32, ParsedBiomeStorage>,
4090) -> Option<u32> {
4091 let direct = render_biomes
4092 .get(&biome_storage_bucket_y(y))
4093 .or_else(|| render_biomes.values().next())
4094 .and_then(|storage| {
4095 biome_id_from_storage(storage, local_x, local_z, y).filter(|id| *id != 0)
4096 });
4097 if direct.is_some() {
4098 return direct;
4099 }
4100 for storage in render_biomes.values().rev() {
4101 if storage.y.is_none() {
4102 if let Some(id) = storage
4103 .biome_id_at(local_x, 0, local_z)
4104 .filter(|id| *id != 0)
4105 {
4106 return Some(id);
4107 }
4108 continue;
4109 }
4110 for local_y in (0..16_u8).rev() {
4111 if let Some(id) = storage
4112 .biome_id_at(local_x, local_y, local_z)
4113 .filter(|id| *id != 0)
4114 {
4115 return Some(id);
4116 }
4117 }
4118 }
4119 None
4120}
4121
4122fn render_chunk_from_raw(
4123 raw: RawRenderChunkData,
4124 options: &RenderChunkLoadOptions,
4125) -> Result<(RenderChunkData, RenderChunkDecodeTiming)> {
4126 let mut timing = RenderChunkDecodeTiming::default();
4127 let biome_started = Instant::now();
4128 let legacy_terrain = raw.legacy_terrain.map(LegacyTerrain::parse).transpose()?;
4129 let version = raw.biome_record.as_ref().map_or_else(
4130 || {
4131 if legacy_terrain.is_some() {
4132 crate::ChunkVersion::Old
4133 } else {
4134 crate::ChunkVersion::New
4135 }
4136 },
4137 |(version, _)| *version,
4138 );
4139 let biome_data = parse_render_biome_record(raw.biome_record.as_ref())?;
4140 let height_map = biome_data
4141 .as_ref()
4142 .map(|biome_data| render_height_map_from_biome_data(raw.pos, biome_data))
4143 .or_else(|| {
4144 legacy_terrain
4145 .as_ref()
4146 .map(render_height_map_from_legacy_terrain)
4147 });
4148 let legacy_biomes = legacy_terrain
4149 .as_ref()
4150 .map(render_biomes_from_legacy_terrain);
4151 let legacy_biome_colors = legacy_terrain
4152 .as_ref()
4153 .map(render_biome_colors_from_legacy_terrain);
4154 let mut render_biomes = BTreeMap::new();
4155 insert_render_biome_storages(&mut render_biomes, biome_data, options.request);
4156 timing.biome_parse_ms = biome_started.elapsed().as_millis();
4157
4158 let mut subchunks = BTreeMap::new();
4159 let subchunk_started = Instant::now();
4160 for (y, value) in raw.subchunks {
4161 check_render_load_cancelled(options)?;
4162 subchunks.insert(
4163 y,
4164 parse_subchunk_with_mode(y, value, options.subchunk_decode)?,
4165 );
4166 }
4167 timing.subchunk_parse_ms = subchunk_started.elapsed().as_millis();
4168
4169 let block_entity_started = Instant::now();
4170 let block_entities = if request_loads_block_entities(options.request) {
4171 if let Some(value) = raw.block_entities {
4172 let mut report = WorldParseReport::default();
4173 parse_block_entities_from_value(&value, &mut report)
4174 .into_iter()
4175 .map(|entity| render_block_entity_from_nbt(entity.nbt))
4176 .collect()
4177 } else {
4178 Vec::new()
4179 }
4180 } else {
4181 Vec::new()
4182 };
4183 timing.block_entity_parse_ms = block_entity_started.elapsed().as_millis();
4184
4185 let surface_scan_started = Instant::now();
4186 let column_samples = if request_builds_column_samples(options.request) {
4187 Some(build_terrain_column_samples(
4188 raw.pos,
4189 version,
4190 &subchunks,
4191 legacy_terrain.as_ref(),
4192 height_map.as_ref(),
4193 legacy_biomes.as_ref(),
4194 &render_biomes,
4195 )?)
4196 } else {
4197 None
4198 };
4199 timing.surface_scan_ms = surface_scan_started.elapsed().as_millis();
4200
4201 Ok((
4202 RenderChunkData {
4203 pos: raw.pos,
4204 is_loaded: height_map.is_some()
4205 || legacy_biome_colors.is_some()
4206 || legacy_biomes.is_some()
4207 || !render_biomes.is_empty()
4208 || !subchunks.is_empty()
4209 || !block_entities.is_empty()
4210 || legacy_terrain.is_some(),
4211 height_map,
4212 legacy_biomes,
4213 legacy_biome_colors,
4214 biome_data: render_biomes,
4215 subchunks,
4216 block_entities,
4217 legacy_terrain,
4218 column_samples,
4219 version,
4220 },
4221 timing,
4222 ))
4223}
4224
4225fn render_load_stats(
4226 chunks: &[RenderChunkData],
4227 worker_threads: usize,
4228 queue_wait_ms: u128,
4229 load_ms: u128,
4230) -> RenderLoadStats {
4231 RenderLoadStats {
4232 requested_chunks: chunks.len(),
4233 loaded_chunks: chunks.iter().filter(|chunk| chunk.is_loaded).count(),
4234 subchunks_decoded: chunks
4235 .iter()
4236 .map(|chunk| chunk.subchunks.len())
4237 .sum::<usize>(),
4238 worker_threads,
4239 queue_wait_ms,
4240 load_ms,
4241 keys_requested: 0,
4242 keys_found: 0,
4243 exact_get_batches: 0,
4244 prefix_scans: 0,
4245 decode_ms: 0,
4246 db_read_ms: 0,
4247 biome_parse_ms: 0,
4248 subchunk_parse_ms: 0,
4249 surface_scan_ms: 0,
4250 block_entity_parse_ms: 0,
4251 full_reload_ms: 0,
4252 legacy_terrain_records: chunks
4253 .iter()
4254 .filter(|chunk| chunk.legacy_terrain.is_some())
4255 .count(),
4256 legacy_biome_samples: chunks
4257 .iter()
4258 .filter(|chunk| chunk.legacy_biomes.is_some())
4259 .count(),
4260 legacy_biome_colors: chunks
4261 .iter()
4262 .filter(|chunk| chunk.legacy_biome_colors.is_some())
4263 .count(),
4264 terrain_source_legacy: chunks
4265 .iter()
4266 .filter(|chunk| chunk.legacy_terrain.is_some() && chunk.subchunks.is_empty())
4267 .count(),
4268 terrain_source_subchunk: chunks
4269 .iter()
4270 .filter(|chunk| !chunk.subchunks.is_empty())
4271 .count(),
4272 legacy_pocket_chunks: 0,
4273 detected_format: WorldFormat::LevelDb,
4274 computed_surface_columns: chunks
4275 .iter()
4276 .filter_map(|chunk| chunk.column_samples.as_ref())
4277 .map(TerrainColumnSamples::sampled_columns)
4278 .sum(),
4279 raw_height_mismatch_columns: chunks.iter().map(raw_height_mismatch_columns).sum(),
4280 missing_subchunk_columns: chunks.iter().map(missing_surface_columns).sum(),
4281 legacy_fallback_columns: chunks
4282 .iter()
4283 .filter_map(|chunk| chunk.column_samples.as_ref())
4284 .flat_map(TerrainColumnSamples::iter)
4285 .filter(|sample| sample.source == TerrainSampleSource::LegacyFallback)
4286 .count(),
4287 legacy_biome_preferred_columns: chunks
4288 .iter()
4289 .filter_map(|chunk| chunk.column_samples.as_ref())
4290 .flat_map(TerrainColumnSamples::iter)
4291 .filter(|sample| matches!(sample.biome, Some(TerrainColumnBiome::Legacy(_))))
4292 .count(),
4293 modern_biome_fallback_columns: chunks
4294 .iter()
4295 .filter(|chunk| chunk.legacy_biomes.is_some())
4296 .filter_map(|chunk| chunk.column_samples.as_ref())
4297 .flat_map(TerrainColumnSamples::iter)
4298 .filter(|sample| matches!(sample.biome, Some(TerrainColumnBiome::Id(_))))
4299 .count(),
4300 }
4301}
4302
4303fn log_render_load_complete(stats: &RenderLoadStats) {
4304 log::debug!(
4305 "render chunk load complete (requested_chunks={}, loaded_chunks={}, missing_chunks={}, subchunks_decoded={}, legacy_terrain_records={}, legacy_biome_samples={}, legacy_biome_colors={}, terrain_source_legacy={}, terrain_source_subchunk={}, legacy_pocket_chunks={}, detected_format={:?}, computed_surface_columns={}, raw_height_mismatch_columns={}, missing_subchunk_columns={}, legacy_fallback_columns={}, legacy_biome_preferred_columns={}, modern_biome_fallback_columns={}, worker_threads={}, queue_wait_ms={}, load_ms={}, exact_get_batches={}, keys_requested={}, keys_found={}, prefix_scans={}, db_read_ms={}, decode_ms={}, biome_parse_ms={}, subchunk_parse_ms={}, surface_scan_ms={}, block_entity_parse_ms={}, full_reload_ms={})",
4306 stats.requested_chunks,
4307 stats.loaded_chunks,
4308 stats.requested_chunks.saturating_sub(stats.loaded_chunks),
4309 stats.subchunks_decoded,
4310 stats.legacy_terrain_records,
4311 stats.legacy_biome_samples,
4312 stats.legacy_biome_colors,
4313 stats.terrain_source_legacy,
4314 stats.terrain_source_subchunk,
4315 stats.legacy_pocket_chunks,
4316 stats.detected_format,
4317 stats.computed_surface_columns,
4318 stats.raw_height_mismatch_columns,
4319 stats.missing_subchunk_columns,
4320 stats.legacy_fallback_columns,
4321 stats.legacy_biome_preferred_columns,
4322 stats.modern_biome_fallback_columns,
4323 stats.worker_threads,
4324 stats.queue_wait_ms,
4325 stats.load_ms,
4326 stats.exact_get_batches,
4327 stats.keys_requested,
4328 stats.keys_found,
4329 stats.prefix_scans,
4330 stats.db_read_ms,
4331 stats.decode_ms,
4332 stats.biome_parse_ms,
4333 stats.subchunk_parse_ms,
4334 stats.surface_scan_ms,
4335 stats.block_entity_parse_ms,
4336 stats.full_reload_ms
4337 );
4338}
4339
4340fn world_pool(worker_count: usize) -> Result<rayon::ThreadPool> {
4341 ThreadPoolBuilder::new()
4342 .num_threads(worker_count.max(1).saturating_add(1))
4343 .thread_name(|index| format!("bedrock-world-worker-{index}"))
4344 .build()
4345 .map_err(|error| {
4346 BedrockWorldError::Validation(format!("failed to build world worker pool: {error}"))
4347 })
4348}
4349
4350fn to_storage_read_options(options: &WorldScanOptions) -> StorageReadOptions {
4351 StorageReadOptions {
4352 threading: match options.threading {
4353 WorldThreadingOptions::Auto => StorageThreadingOptions::Auto,
4354 WorldThreadingOptions::Fixed(threads) => StorageThreadingOptions::Fixed(threads),
4355 WorldThreadingOptions::Single => StorageThreadingOptions::Single,
4356 },
4357 scan_mode: match options.threading {
4358 WorldThreadingOptions::Single => StorageScanMode::Sequential,
4359 WorldThreadingOptions::Auto | WorldThreadingOptions::Fixed(_) => {
4360 StorageScanMode::ParallelTables
4361 }
4362 },
4363 pipeline: crate::storage::StoragePipelineOptions {
4364 queue_depth: options.pipeline.queue_depth,
4365 table_batch_size: options.pipeline.chunk_batch_size,
4366 progress_interval: options.pipeline.progress_interval,
4367 },
4368 cancel: options
4369 .cancel
4370 .as_ref()
4371 .map(|cancel| StorageCancelFlag::from_shared(cancel.0.clone())),
4372 progress: options.progress.as_ref().map(|progress| {
4373 let progress = progress.clone();
4374 StorageProgressSink::new(move |storage_progress| {
4375 progress.emit(WorldScanProgress {
4376 entries_seen: storage_progress.entries_seen,
4377 });
4378 })
4379 }),
4380 }
4381}
4382
4383fn chunk_record_prefix(pos: ChunkPos) -> Bytes {
4384 let mut bytes = Vec::with_capacity(if pos.dimension == crate::Dimension::Overworld {
4385 8
4386 } else {
4387 12
4388 });
4389 bytes.extend_from_slice(&pos.x.to_le_bytes());
4390 bytes.extend_from_slice(&pos.z.to_le_bytes());
4391 if pos.dimension != crate::Dimension::Overworld {
4392 bytes.extend_from_slice(&pos.dimension.id().to_le_bytes());
4393 }
4394 Bytes::from(bytes)
4395}
4396
4397fn validate_render_region(region: RenderChunkRegion) -> Result<()> {
4398 if region.min_chunk_x > region.max_chunk_x || region.min_chunk_z > region.max_chunk_z {
4399 return Err(BedrockWorldError::Validation(format!(
4400 "invalid render region: min=({}, {}) max=({}, {})",
4401 region.min_chunk_x, region.min_chunk_z, region.max_chunk_x, region.max_chunk_z
4402 )));
4403 }
4404 Ok(())
4405}
4406
4407fn render_block_entity_from_nbt(nbt: NbtTag) -> RenderBlockEntity {
4408 let root = match &nbt {
4409 NbtTag::Compound(root) => Some(root),
4410 _ => None,
4411 };
4412 RenderBlockEntity {
4413 id: root
4414 .and_then(|root| nbt_string_field(root, "id"))
4415 .map(ToString::to_string),
4416 position: root.and_then(|root| {
4417 Some([
4418 nbt_int_field(root, "x")?,
4419 nbt_int_field(root, "y")?,
4420 nbt_int_field(root, "z")?,
4421 ])
4422 }),
4423 nbt,
4424 }
4425}
4426
4427fn nbt_string_field<'a>(
4428 root: &'a indexmap::IndexMap<String, NbtTag>,
4429 key: &str,
4430) -> Option<&'a str> {
4431 match root.get(key) {
4432 Some(NbtTag::String(value)) => Some(value),
4433 _ => None,
4434 }
4435}
4436
4437fn nbt_int_field(root: &indexmap::IndexMap<String, NbtTag>, key: &str) -> Option<i32> {
4438 match root.get(key) {
4439 Some(NbtTag::Byte(value)) => Some(i32::from(*value)),
4440 Some(NbtTag::Short(value)) => Some(i32::from(*value)),
4441 Some(NbtTag::Int(value)) => Some(*value),
4442 Some(NbtTag::Long(value)) => i32::try_from(*value).ok(),
4443 _ => None,
4444 }
4445}
4446
4447fn detect_world_format(path: &Path, hint: WorldFormatHint) -> Result<WorldFormat> {
4448 match hint {
4449 WorldFormatHint::Auto => {
4450 if path.join("db").join("CURRENT").is_file() {
4451 return Ok(detect_leveldb_world_format(path));
4452 }
4453 if path.join("chunks.dat").is_file() {
4454 return Ok(WorldFormat::PocketChunksDat);
4455 }
4456 Err(BedrockWorldError::Validation(format!(
4457 "could not detect Bedrock world storage at {}; expected db/CURRENT or chunks.dat",
4458 path.display()
4459 )))
4460 }
4461 WorldFormatHint::LevelDb => {
4462 let current = path.join("db").join("CURRENT");
4463 if !current.is_file() {
4464 return Err(BedrockWorldError::Validation(format!(
4465 "LevelDB world missing {}",
4466 current.display()
4467 )));
4468 }
4469 Ok(detect_leveldb_world_format(path))
4470 }
4471 WorldFormatHint::PocketChunksDat => {
4472 let chunks = path.join("chunks.dat");
4473 if !chunks.is_file() {
4474 return Err(BedrockWorldError::Validation(format!(
4475 "Pocket chunks.dat world missing {}",
4476 chunks.display()
4477 )));
4478 }
4479 Ok(WorldFormat::PocketChunksDat)
4480 }
4481 }
4482}
4483
4484fn detect_leveldb_world_format(path: &Path) -> WorldFormat {
4485 let Ok(document) = read_level_dat_document(&path.join("level.dat")) else {
4486 return WorldFormat::LevelDb;
4487 };
4488 let NbtTag::Compound(root) = &document.root else {
4489 return WorldFormat::LevelDb;
4490 };
4491 let storage_version = nbt_int_field(root, "StorageVersion");
4492 let network_version = nbt_int_field(root, "NetworkVersion");
4493 if storage_version.is_some_and(|version| version <= 4)
4494 || network_version.is_some_and(|version| version <= 91)
4495 {
4496 WorldFormat::LevelDbLegacyTerrain
4497 } else {
4498 WorldFormat::LevelDb
4499 }
4500}
4501
4502#[cfg(test)]
4503mod tests {
4504 use super::*;
4505 use crate::{Dimension, HardcodedSpawnAreaKind, MemoryStorage, NbtTag, block_storage_index};
4506 use indexmap::IndexMap;
4507 use std::sync::Arc;
4508
4509 #[cfg(feature = "backend-bedrock-leveldb")]
4510 fn temp_world_dir(name: &str) -> PathBuf {
4511 use std::time::{SystemTime, UNIX_EPOCH};
4512
4513 std::env::temp_dir().join(format!(
4514 "bedrock-world-{name}-{}",
4515 SystemTime::now()
4516 .duration_since(UNIX_EPOCH)
4517 .expect("time")
4518 .as_nanos()
4519 ))
4520 }
4521
4522 const fn exact_surface_request(
4523 subchunks: ExactSurfaceSubchunkPolicy,
4524 biome: ExactSurfaceBiomeLoad,
4525 block_entities: bool,
4526 ) -> RenderChunkRequest {
4527 RenderChunkRequest::ExactSurface {
4528 subchunks,
4529 biome,
4530 block_entities,
4531 }
4532 }
4533
4534 #[test]
4535 fn world_threading_validates_fixed_range_and_auto_is_not_capped_to_eight() {
4536 let expected_auto = std::thread::available_parallelism()
4537 .map(usize::from)
4538 .unwrap_or(1)
4539 .min(10_000);
4540 assert_eq!(
4541 WorldThreadingOptions::Auto
4542 .resolve_checked(10_000)
4543 .expect("auto threads"),
4544 expected_auto
4545 );
4546 assert_eq!(
4547 WorldThreadingOptions::Fixed(MAX_WORLD_THREADS)
4548 .resolve_checked(10_000)
4549 .expect("max fixed threads"),
4550 MAX_WORLD_THREADS
4551 );
4552 assert!(WorldThreadingOptions::Fixed(0).resolve_checked(10).is_err());
4553 assert!(
4554 WorldThreadingOptions::Fixed(MAX_WORLD_THREADS + 1)
4555 .resolve_checked(10)
4556 .is_err()
4557 );
4558 }
4559
4560 #[test]
4561 fn map_and_global_records_roundtrip_through_world_transactions() {
4562 let storage = Arc::new(MemoryStorage::new());
4563 let world = BedrockWorld::from_storage(
4564 "memory",
4565 storage.clone(),
4566 OpenOptions {
4567 read_only: false,
4568 ..OpenOptions::default()
4569 },
4570 );
4571 let map_id = MapRecordId::new("9").expect("map id");
4572 let map = ParsedMapData {
4573 id: map_id.to_string(),
4574 record_id: map_id.clone(),
4575 roots: vec![NbtTag::Compound(IndexMap::from([(
4576 "scale".to_string(),
4577 NbtTag::Byte(1),
4578 )]))],
4579 known_fields: crate::MapKnownFields::default(),
4580 pixels: None,
4581 raw: Bytes::new(),
4582 };
4583
4584 world.write_map_record_blocking(&map).expect("write map");
4585 let read_map = world
4586 .read_map_record_blocking(&map_id)
4587 .expect("read map")
4588 .expect("map exists");
4589 assert_eq!(read_map.known_fields.scale, Some(1));
4590
4591 let global = ParsedGlobalData {
4592 name: "scoreboard".to_string(),
4593 kind: GlobalRecordKind::Scoreboard,
4594 roots: vec![NbtTag::Compound(IndexMap::new())],
4595 raw: Bytes::new(),
4596 };
4597 world
4598 .write_global_record_blocking(&global)
4599 .expect("write global");
4600 assert!(
4601 world
4602 .read_global_record_blocking(GlobalRecordKind::Scoreboard)
4603 .expect("read global")
4604 .is_some()
4605 );
4606
4607 world
4608 .delete_map_record_blocking(&map_id)
4609 .expect("delete map");
4610 assert!(
4611 world
4612 .read_map_record_blocking(&map_id)
4613 .expect("read deleted")
4614 .is_none()
4615 );
4616 }
4617
4618 #[test]
4619 fn hsa_and_block_entities_roundtrip_with_chunk_validation() {
4620 let storage = Arc::new(MemoryStorage::new());
4621 let world = BedrockWorld::from_storage(
4622 "memory",
4623 storage,
4624 OpenOptions {
4625 read_only: false,
4626 ..OpenOptions::default()
4627 },
4628 );
4629 let pos = ChunkPos {
4630 x: 0,
4631 z: 0,
4632 dimension: Dimension::Overworld,
4633 };
4634 let area = ParsedHardcodedSpawnArea {
4635 kind: HardcodedSpawnAreaKind::NetherFortress,
4636 min: [0, 32, 0],
4637 max: [15, 80, 15],
4638 };
4639 world
4640 .put_hsa_for_chunk_blocking(pos, std::slice::from_ref(&area))
4641 .expect("write hsa");
4642 assert_eq!(
4643 world
4644 .scan_hsa_records_blocking(WorldScanOptions::default())
4645 .expect("scan hsa")[0]
4646 .1,
4647 vec![area]
4648 );
4649
4650 let block_entity = ParsedBlockEntity {
4651 id: Some("Chest".to_string()),
4652 position: Some([1, 64, 1]),
4653 is_movable: Some(true),
4654 custom_name: None,
4655 items: Vec::new(),
4656 nbt: NbtTag::Compound(IndexMap::from([
4657 ("id".to_string(), NbtTag::String("Chest".to_string())),
4658 ("x".to_string(), NbtTag::Int(1)),
4659 ("y".to_string(), NbtTag::Int(64)),
4660 ("z".to_string(), NbtTag::Int(1)),
4661 ])),
4662 };
4663 world
4664 .put_block_entities_blocking(pos, std::slice::from_ref(&block_entity))
4665 .expect("write block entity");
4666 assert_eq!(
4667 world
4668 .block_entities_in_chunk_blocking(pos)
4669 .expect("read block entities")[0]
4670 .entity
4671 .position,
4672 Some([1, 64, 1])
4673 );
4674 }
4675
4676 #[test]
4677 fn actor_write_updates_digest_and_prefix_together() {
4678 let storage = Arc::new(MemoryStorage::new());
4679 let world = BedrockWorld::from_storage(
4680 "memory",
4681 storage.clone(),
4682 OpenOptions {
4683 read_only: false,
4684 ..OpenOptions::default()
4685 },
4686 );
4687 let pos = ChunkPos {
4688 x: 2,
4689 z: 3,
4690 dimension: Dimension::Overworld,
4691 };
4692 let actor_nbt = NbtTag::Compound(IndexMap::from([
4693 (
4694 "identifier".to_string(),
4695 NbtTag::String("minecraft:pig".to_string()),
4696 ),
4697 ("UniqueID".to_string(), NbtTag::Long(77)),
4698 (
4699 "Pos".to_string(),
4700 NbtTag::List(vec![
4701 NbtTag::Float(32.0),
4702 NbtTag::Float(64.0),
4703 NbtTag::Float(48.0),
4704 ]),
4705 ),
4706 ]));
4707 let actor = ParsedEntity {
4708 identifier: Some("minecraft:pig".to_string()),
4709 definitions: Vec::new(),
4710 unique_id: Some(77),
4711 position: Some([32.0, 64.0, 48.0]),
4712 rotation: None,
4713 motion: None,
4714 items: Vec::new(),
4715 nbt: actor_nbt,
4716 };
4717
4718 world.put_actor_blocking(pos, &actor).expect("put actor");
4719 let digest = storage
4720 .get(&ActorDigestKey::new(pos).storage_key())
4721 .expect("get digest")
4722 .expect("digest exists");
4723 assert_eq!(
4724 parse_actor_digest_ids(&digest).expect("parse digest"),
4725 vec![ActorUid(77)]
4726 );
4727 assert!(
4728 storage
4729 .get(&ActorUid(77).storage_key())
4730 .expect("get actor")
4731 .is_some()
4732 );
4733
4734 world
4735 .delete_actor_blocking(pos, ActorUid(77))
4736 .expect("delete actor");
4737 assert!(
4738 storage
4739 .get(&ActorDigestKey::new(pos).storage_key())
4740 .expect("get deleted digest")
4741 .is_none()
4742 );
4743 assert!(
4744 storage
4745 .get(&ActorUid(77).storage_key())
4746 .expect("get deleted actor")
4747 .is_none()
4748 );
4749 }
4750
4751 #[test]
4752 fn render_chunk_priority_distance_orders_from_center() {
4753 let mut positions = vec![
4754 ChunkPos {
4755 x: 12,
4756 z: 0,
4757 dimension: Dimension::Overworld,
4758 },
4759 ChunkPos {
4760 x: 1,
4761 z: 0,
4762 dimension: Dimension::Overworld,
4763 },
4764 ChunkPos {
4765 x: -3,
4766 z: 0,
4767 dimension: Dimension::Overworld,
4768 },
4769 ChunkPos {
4770 x: 0,
4771 z: 0,
4772 dimension: Dimension::Overworld,
4773 },
4774 ];
4775
4776 sort_render_chunk_positions(
4777 &mut positions,
4778 RenderChunkPriority::DistanceFrom {
4779 chunk_x: 0,
4780 chunk_z: 0,
4781 },
4782 );
4783
4784 let ordered = positions
4785 .iter()
4786 .map(|pos| (pos.x, pos.z))
4787 .collect::<Vec<_>>();
4788 assert_eq!(ordered, vec![(0, 0), (1, 0), (-3, 0), (12, 0)]);
4789 }
4790
4791 #[test]
4792 fn world_pipeline_options_resolve_automatic_bounds() {
4793 let options = WorldPipelineOptions::default();
4794
4795 assert!(options.resolve_queue_depth(4, 64) >= 1);
4796 assert_eq!(options.resolve_progress_interval(), 256);
4797
4798 let explicit = WorldPipelineOptions {
4799 queue_depth: 7,
4800 progress_interval: 9,
4801 ..WorldPipelineOptions::default()
4802 };
4803 assert_eq!(explicit.resolve_queue_depth(4, 64), 7);
4804 assert_eq!(explicit.resolve_progress_interval(), 9);
4805 }
4806
4807 #[test]
4808 fn generic_memory_storage_matches_dynamic_storage_queries() {
4809 let storage = MemoryStorage::new();
4810 storage
4811 .put(b"~local_player", b"local")
4812 .expect("put local player");
4813 storage
4814 .put(b"player_remote", b"remote")
4815 .expect("put remote player");
4816
4817 let generic_world =
4818 BedrockWorld::from_typed_storage("memory", storage.clone(), OpenOptions::default());
4819 let dynamic_world = BedrockWorld::from_storage(
4820 "memory",
4821 Arc::new(storage) as Arc<dyn WorldStorage>,
4822 OpenOptions::default(),
4823 );
4824
4825 assert_eq!(
4826 generic_world.list_players_blocking().expect("generic"),
4827 dynamic_world.list_players_blocking().expect("dynamic")
4828 );
4829 assert_eq!(
4830 generic_world
4831 .classify_keys_blocking(WorldScanOptions::default())
4832 .expect("generic classify"),
4833 dynamic_world
4834 .classify_keys_blocking(WorldScanOptions::default())
4835 .expect("dynamic classify")
4836 );
4837 }
4838
4839 #[cfg(feature = "backend-bedrock-leveldb")]
4840 #[test]
4841 fn generic_leveldb_storage_matches_dynamic_storage_queries() {
4842 let temp = temp_world_dir("generic-leveldb");
4843 std::fs::create_dir_all(&temp).expect("temp dir");
4844 let db_path = temp.join("db");
4845 let db = bedrock_leveldb::Db::open(&db_path, bedrock_leveldb::OpenOptions::default())
4846 .expect("initialize db");
4847 drop(db);
4848 let storage = BedrockLevelDbStorage::open(&db_path).expect("open storage");
4849 storage
4850 .put(b"~local_player", b"local")
4851 .expect("put local player");
4852 storage
4853 .put(b"player_remote", b"remote")
4854 .expect("put remote player");
4855 storage.flush().expect("flush");
4856
4857 let generic_world =
4858 BedrockWorld::from_typed_storage(&temp, storage.clone(), OpenOptions::default());
4859 let dynamic_world = BedrockWorld::from_storage(
4860 &temp,
4861 Arc::new(storage) as Arc<dyn WorldStorage>,
4862 OpenOptions::default(),
4863 );
4864
4865 assert_eq!(
4866 generic_world.list_players_blocking().expect("generic"),
4867 dynamic_world.list_players_blocking().expect("dynamic")
4868 );
4869 assert_eq!(
4870 generic_world
4871 .classify_keys_blocking(WorldScanOptions::default())
4872 .expect("generic classify"),
4873 dynamic_world
4874 .classify_keys_blocking(WorldScanOptions::default())
4875 .expect("dynamic classify")
4876 );
4877 std::fs::remove_dir_all(temp).expect("cleanup");
4878 }
4879
4880 #[test]
4881 fn transaction_respects_read_only_option() {
4882 let pos = ChunkPos {
4883 x: 0,
4884 z: 0,
4885 dimension: Dimension::Overworld,
4886 };
4887 let key = ChunkKey::new(pos, ChunkRecordTag::Version);
4888 let encoded = key.encode();
4889 let storage = Arc::new(MemoryStorage::new());
4890 let read_only_world =
4891 BedrockWorld::from_storage("memory", storage.clone(), OpenOptions::default());
4892 let mut transaction = read_only_world.transaction();
4893 transaction.put_raw_record(&key, Bytes::from_static(b"\x01"));
4894
4895 let error = transaction.commit().expect_err("read-only commit");
4896
4897 assert_eq!(error.kind(), crate::BedrockWorldErrorKind::ReadOnly);
4898 assert_eq!(storage.get(&encoded).expect("get"), None);
4899
4900 let writable_world = BedrockWorld::from_storage(
4901 "memory",
4902 storage.clone(),
4903 OpenOptions {
4904 read_only: false,
4905 ..OpenOptions::default()
4906 },
4907 );
4908 let mut transaction = writable_world.transaction();
4909 transaction.put_raw_record(&key, Bytes::from_static(b"\x02"));
4910 transaction.commit().expect("writable commit");
4911
4912 assert_eq!(
4913 storage.get(&encoded).expect("get"),
4914 Some(Bytes::from_static(b"\x02"))
4915 );
4916 }
4917
4918 #[test]
4919 fn biome_and_height_queries_read_legacy_data2d_in_zx_column_order() {
4920 let pos = ChunkPos {
4921 x: 0,
4922 z: 0,
4923 dimension: Dimension::Overworld,
4924 };
4925 let storage = Arc::new(MemoryStorage::new());
4926 storage
4927 .put(
4928 &ChunkKey::new(pos, ChunkRecordTag::Data2D).encode(),
4929 &test_asymmetric_data2d_bytes(),
4930 )
4931 .expect("put Data2D");
4932 let world = BedrockWorld::from_storage("memory", storage, OpenOptions::default());
4933
4934 assert_eq!(
4935 world
4936 .get_biome_id_blocking(pos, 3, 2, 64)
4937 .expect("biome id"),
4938 Some(32)
4939 );
4940 assert_eq!(
4941 world
4942 .get_biome_id_blocking(pos, 2, 3, 64)
4943 .expect("biome id"),
4944 Some(23)
4945 );
4946 assert_eq!(
4947 world.get_height_at_blocking(pos, 3, 2).expect("height"),
4948 Some(132)
4949 );
4950 assert_eq!(
4951 world.get_height_at_blocking(pos, 2, 3).expect("height"),
4952 Some(123)
4953 );
4954 }
4955
4956 #[test]
4957 fn data3d_height_map_is_normalized_to_dimension_min_y() {
4958 let pos = ChunkPos {
4959 x: 0,
4960 z: 0,
4961 dimension: Dimension::Overworld,
4962 };
4963 let storage = Arc::new(MemoryStorage::new());
4964 storage
4965 .put(
4966 &ChunkKey::new(pos, ChunkRecordTag::Data3D).encode(),
4967 &test_data3d_height_bytes(130),
4968 )
4969 .expect("put Data3D");
4970 let world = BedrockWorld::from_storage("memory", storage, OpenOptions::default());
4971
4972 assert_eq!(
4973 world.get_height_at_blocking(pos, 4, 2).expect("height"),
4974 Some(66)
4975 );
4976 let chunk = world
4977 .load_render_chunk_blocking(
4978 pos,
4979 RenderChunkLoadOptions {
4980 request: RenderChunkRequest::RawHeightMap,
4981 ..RenderChunkLoadOptions::default()
4982 },
4983 )
4984 .expect("load render chunk");
4985
4986 assert_eq!(
4987 chunk.height_map.expect("height map")[usize::from(2_u8)][usize::from(4_u8)],
4988 Some(66)
4989 );
4990 assert!(chunk.column_samples.is_none());
4991 }
4992
4993 #[test]
4994 fn render_chunk_exact_load_preserves_data2d_xz_height_and_biome_coordinates() {
4995 let pos = ChunkPos {
4996 x: 0,
4997 z: 0,
4998 dimension: Dimension::Overworld,
4999 };
5000 let storage = Arc::new(MemoryStorage::new());
5001 storage
5002 .put(
5003 &ChunkKey::new(pos, ChunkRecordTag::Data2D).encode(),
5004 &test_asymmetric_data2d_bytes(),
5005 )
5006 .expect("put Data2D");
5007 let world = BedrockWorld::from_storage("memory", storage, OpenOptions::default());
5008
5009 let chunk = world
5010 .load_render_chunk_blocking(pos, RenderChunkLoadOptions::default())
5011 .expect("load render chunk");
5012 let height_map = chunk.height_map.as_ref().expect("height map");
5013 let biome_storage = chunk
5014 .biome_data
5015 .values()
5016 .next()
5017 .expect("render biome storage");
5018
5019 assert_eq!(height_map[3][1], Some(113));
5020 assert_eq!(height_map[1][3], Some(131));
5021 assert_eq!(biome_storage.biome_id_at(1, 0, 3), Some(13));
5022 assert_eq!(biome_storage.biome_id_at(3, 0, 1), Some(31));
5023 }
5024
5025 #[test]
5026 fn subchunk_layer_query_uses_block_y() {
5027 let pos = ChunkPos {
5028 x: 0,
5029 z: 0,
5030 dimension: Dimension::Overworld,
5031 };
5032 let storage = Arc::new(MemoryStorage::new());
5033 storage
5034 .put(&ChunkKey::subchunk(pos, -1).encode(), &[8, 0])
5035 .expect("put subchunk");
5036 let world = BedrockWorld::from_storage("memory", storage, OpenOptions::default());
5037
5038 let subchunk = world
5039 .get_subchunk_layer_blocking(pos, -1, SubChunkDecodeMode::CountsOnly)
5040 .expect("query")
5041 .expect("subchunk");
5042 assert_eq!(subchunk.y, -1);
5043 }
5044
5045 #[test]
5046 fn render_chunk_needed_surface_subchunks_avoids_full_y_range() {
5047 let pos = ChunkPos {
5048 x: 0,
5049 z: 0,
5050 dimension: Dimension::Overworld,
5051 };
5052 let storage = Arc::new(MemoryStorage::new());
5053 storage
5054 .put(
5055 &ChunkKey::new(pos, ChunkRecordTag::Data2D).encode(),
5056 &test_data2d_bytes(65, 7),
5057 )
5058 .expect("put Data2D");
5059 storage
5060 .put(
5061 &ChunkKey::subchunk(pos, 4).encode(),
5062 &test_surface_subchunk_bytes(),
5063 )
5064 .expect("put subchunk");
5065 let world = BedrockWorld::from_storage("memory", storage, OpenOptions::default());
5066
5067 let needed = world
5068 .load_render_chunk_blocking(
5069 pos,
5070 RenderChunkLoadOptions {
5071 request: exact_surface_request(
5072 ExactSurfaceSubchunkPolicy::HintThenVerify,
5073 ExactSurfaceBiomeLoad::TopColumns,
5074 false,
5075 ),
5076 ..RenderChunkLoadOptions::default()
5077 },
5078 )
5079 .expect("needed render chunk");
5080 let full = world
5081 .load_render_chunk_blocking(
5082 pos,
5083 RenderChunkLoadOptions {
5084 request: exact_surface_request(
5085 ExactSurfaceSubchunkPolicy::Full,
5086 ExactSurfaceBiomeLoad::TopColumns,
5087 false,
5088 ),
5089 ..RenderChunkLoadOptions::default()
5090 },
5091 )
5092 .expect("full render chunk");
5093 assert!(needed.subchunks.contains_key(&4));
5094 assert_eq!(needed.subchunks.get(&4), full.subchunks.get(&4));
5095 assert!(needed.subchunks.len() <= full.subchunks.len());
5096 }
5097
5098 #[test]
5099 fn render_chunk_needed_surface_subchunks_include_lookup_above_heightmap() {
5100 let pos = ChunkPos {
5101 x: 0,
5102 z: 0,
5103 dimension: Dimension::Overworld,
5104 };
5105 let storage = Arc::new(MemoryStorage::new());
5106 storage
5107 .put(
5108 &ChunkKey::new(pos, ChunkRecordTag::Data2D).encode(),
5109 &test_data2d_bytes(64, 7),
5110 )
5111 .expect("put Data2D");
5112 storage
5113 .put(
5114 &ChunkKey::subchunk(pos, 4).encode(),
5115 &test_uniform_named_subchunk_bytes("minecraft:stone"),
5116 )
5117 .expect("put heightmap subchunk");
5118 storage
5119 .put(
5120 &ChunkKey::subchunk(pos, 5).encode(),
5121 &test_uniform_named_subchunk_bytes("minecraft:oak_leaves"),
5122 )
5123 .expect("put upper subchunk");
5124 let world = BedrockWorld::from_storage("memory", storage, OpenOptions::default());
5125
5126 let chunk = world
5127 .load_render_chunk_blocking(
5128 pos,
5129 RenderChunkLoadOptions {
5130 request: exact_surface_request(
5131 ExactSurfaceSubchunkPolicy::HintThenVerify,
5132 ExactSurfaceBiomeLoad::TopColumns,
5133 false,
5134 ),
5135 ..RenderChunkLoadOptions::default()
5136 },
5137 )
5138 .expect("needed render chunk");
5139
5140 assert!(chunk.subchunks.contains_key(&4));
5141 assert!(chunk.subchunks.contains_key(&5));
5142 assert!(!chunk.subchunks.contains_key(&9));
5143 let sample = chunk
5144 .column_sample_at(0, 0)
5145 .expect("computed surface sample");
5146 assert_eq!(sample.surface_y, 95);
5147 assert_eq!(sample.surface_block_state.name, "minecraft:oak_leaves");
5148 }
5149
5150 #[test]
5151 fn render_chunk_needed_exact_surface_reloads_full_when_window_top_is_touched() {
5152 let pos = ChunkPos {
5153 x: 0,
5154 z: 0,
5155 dimension: Dimension::Overworld,
5156 };
5157 let storage = Arc::new(MemoryStorage::new());
5158 storage
5159 .put(
5160 &ChunkKey::new(pos, ChunkRecordTag::Data2D).encode(),
5161 &test_data2d_bytes(64, 7),
5162 )
5163 .expect("put Data2D");
5164 storage
5165 .put(
5166 &ChunkKey::subchunk(pos, 8).encode(),
5167 &test_uniform_named_subchunk_bytes("minecraft:stone"),
5168 )
5169 .expect("put window-top subchunk");
5170 storage
5171 .put(
5172 &ChunkKey::subchunk(pos, 9).encode(),
5173 &test_uniform_named_subchunk_bytes("minecraft:oak_leaves"),
5174 )
5175 .expect("put hidden upper subchunk");
5176 let world = BedrockWorld::from_storage("memory", storage, OpenOptions::default());
5177
5178 let chunk = world
5179 .load_render_chunk_blocking(
5180 pos,
5181 RenderChunkLoadOptions {
5182 request: exact_surface_request(
5183 ExactSurfaceSubchunkPolicy::HintThenVerify,
5184 ExactSurfaceBiomeLoad::TopColumns,
5185 false,
5186 ),
5187 ..RenderChunkLoadOptions::default()
5188 },
5189 )
5190 .expect("needed render chunk");
5191
5192 assert!(chunk.subchunks.contains_key(&8));
5193 assert!(chunk.subchunks.contains_key(&9));
5194 let sample = chunk
5195 .column_sample_at(0, 0)
5196 .expect("computed surface sample");
5197 assert_eq!(sample.surface_y, 159);
5198 assert_eq!(sample.surface_block_state.name, "minecraft:oak_leaves");
5199 }
5200
5201 #[test]
5202 fn render_chunk_needed_exact_surface_reloads_full_when_raw_height_is_stale() {
5203 let pos = ChunkPos {
5204 x: 0,
5205 z: 0,
5206 dimension: Dimension::Overworld,
5207 };
5208 let storage = Arc::new(MemoryStorage::new());
5209 storage
5210 .put(
5211 &ChunkKey::new(pos, ChunkRecordTag::Data2D).encode(),
5212 &test_data2d_bytes(0, 7),
5213 )
5214 .expect("put stale Data2D");
5215 storage
5216 .put(
5217 &ChunkKey::subchunk(pos, 0).encode(),
5218 &test_uniform_named_subchunk_bytes("minecraft:stone"),
5219 )
5220 .expect("put stale-height subchunk");
5221 storage
5222 .put(
5223 &ChunkKey::subchunk(pos, 4).encode(),
5224 &test_uniform_named_subchunk_bytes("minecraft:air"),
5225 )
5226 .expect("put high empty hint-window subchunk");
5227 storage
5228 .put(
5229 &ChunkKey::subchunk(pos, 10).encode(),
5230 &test_uniform_named_subchunk_bytes("minecraft:oak_leaves"),
5231 )
5232 .expect("put true roof subchunk");
5233 let world = BedrockWorld::from_storage("memory", storage, OpenOptions::default());
5234
5235 let chunk = world
5236 .load_render_chunk_blocking(
5237 pos,
5238 RenderChunkLoadOptions {
5239 request: exact_surface_request(
5240 ExactSurfaceSubchunkPolicy::HintThenVerify,
5241 ExactSurfaceBiomeLoad::TopColumns,
5242 false,
5243 ),
5244 ..RenderChunkLoadOptions::default()
5245 },
5246 )
5247 .expect("needed render chunk");
5248
5249 assert!(chunk.subchunks.contains_key(&10));
5250 let sample = chunk
5251 .column_sample_at(0, 0)
5252 .expect("computed surface sample");
5253 assert_eq!(sample.surface_y, 175);
5254 assert_eq!(sample.surface_block_state.name, "minecraft:oak_leaves");
5255 }
5256
5257 #[test]
5258 fn render_chunk_raw_heightmap_request_does_not_build_surface_samples() {
5259 let pos = ChunkPos {
5260 x: 0,
5261 z: 0,
5262 dimension: Dimension::Overworld,
5263 };
5264 let storage = Arc::new(MemoryStorage::new());
5265 storage
5266 .put(
5267 &ChunkKey::new(pos, ChunkRecordTag::Data2D).encode(),
5268 &test_data2d_bytes(0, 7),
5269 )
5270 .expect("put raw height");
5271 storage
5272 .put(
5273 &ChunkKey::subchunk(pos, 10).encode(),
5274 &test_uniform_named_subchunk_bytes("minecraft:oak_leaves"),
5275 )
5276 .expect("put high surface subchunk");
5277 let world = BedrockWorld::from_storage("memory", storage, OpenOptions::default());
5278
5279 let chunk = world
5280 .load_render_chunk_blocking(
5281 pos,
5282 RenderChunkLoadOptions {
5283 request: RenderChunkRequest::RawHeightMap,
5284 ..RenderChunkLoadOptions::default()
5285 },
5286 )
5287 .expect("load raw heightmap chunk");
5288
5289 assert_eq!(chunk.height_map.as_ref().unwrap()[0][0], Some(0));
5290 assert!(chunk.column_samples.is_none());
5291 assert!(chunk.subchunks.is_empty());
5292 }
5293
5294 #[test]
5295 fn render_chunk_needed_surface_subchunks_fall_back_to_full_without_heightmap() {
5296 let pos = ChunkPos {
5297 x: 0,
5298 z: 0,
5299 dimension: Dimension::Overworld,
5300 };
5301 let storage = Arc::new(MemoryStorage::new());
5302 storage
5303 .put(
5304 &ChunkKey::subchunk(pos, 5).encode(),
5305 &test_uniform_named_subchunk_bytes("minecraft:oak_leaves"),
5306 )
5307 .expect("put upper subchunk");
5308 let world = BedrockWorld::from_storage("memory", storage, OpenOptions::default());
5309
5310 let chunk = world
5311 .load_render_chunk_blocking(
5312 pos,
5313 RenderChunkLoadOptions {
5314 request: exact_surface_request(
5315 ExactSurfaceSubchunkPolicy::HintThenVerify,
5316 ExactSurfaceBiomeLoad::TopColumns,
5317 false,
5318 ),
5319 ..RenderChunkLoadOptions::default()
5320 },
5321 )
5322 .expect("needed render chunk");
5323
5324 assert!(chunk.subchunks.contains_key(&5));
5325 let sample = chunk
5326 .column_sample_at(0, 0)
5327 .expect("computed surface sample");
5328 assert_eq!(sample.surface_y, 95);
5329 assert_eq!(sample.surface_block_state.name, "minecraft:oak_leaves");
5330 }
5331
5332 #[test]
5333 fn render_chunk_loads_block_entities_when_requested() {
5334 let pos = ChunkPos {
5335 x: 0,
5336 z: 0,
5337 dimension: Dimension::Overworld,
5338 };
5339 let storage = Arc::new(MemoryStorage::new());
5340 let block_entity = NbtTag::Compound(IndexMap::from([
5341 ("id".to_string(), NbtTag::String("Banner".to_string())),
5342 ("x".to_string(), NbtTag::Int(3)),
5343 ("y".to_string(), NbtTag::Int(65)),
5344 ("z".to_string(), NbtTag::Int(4)),
5345 ]));
5346 storage
5347 .put(
5348 &ChunkKey::new(pos, ChunkRecordTag::BlockEntity).encode(),
5349 &crate::nbt::serialize_root_nbt(&block_entity).expect("serialize block entity"),
5350 )
5351 .expect("put block entity");
5352 let world = BedrockWorld::from_storage("memory", storage, OpenOptions::default());
5353
5354 let without_entities = world
5355 .load_render_chunk_blocking(pos, RenderChunkLoadOptions::default())
5356 .expect("load render chunk without block entities");
5357 let with_entities = world
5358 .load_render_chunk_blocking(
5359 pos,
5360 RenderChunkLoadOptions {
5361 request: exact_surface_request(
5362 ExactSurfaceSubchunkPolicy::Full,
5363 ExactSurfaceBiomeLoad::TopColumns,
5364 true,
5365 ),
5366 ..RenderChunkLoadOptions::default()
5367 },
5368 )
5369 .expect("load render chunk with block entities");
5370
5371 assert!(without_entities.block_entities.is_empty());
5372 assert_eq!(with_entities.block_entities.len(), 1);
5373 assert_eq!(
5374 with_entities.block_entities[0].id.as_deref(),
5375 Some("Banner")
5376 );
5377 assert_eq!(with_entities.block_entities[0].position, Some([3, 65, 4]));
5378 }
5379
5380 #[test]
5381 fn surface_column_query_returns_top_block_and_water_context() {
5382 let pos = ChunkPos {
5383 x: 0,
5384 z: 0,
5385 dimension: Dimension::Overworld,
5386 };
5387 let storage = Arc::new(MemoryStorage::new());
5388 storage
5389 .put(
5390 &ChunkKey::new(pos, ChunkRecordTag::Data2D).encode(),
5391 &test_data2d_bytes(65, 7),
5392 )
5393 .expect("put Data2D");
5394 storage
5395 .put(
5396 &ChunkKey::subchunk(pos, 4).encode(),
5397 &test_surface_subchunk_bytes(),
5398 )
5399 .expect("put subchunk");
5400 let world = BedrockWorld::from_storage("memory", storage, OpenOptions::default());
5401
5402 let column = world
5403 .get_surface_column_blocking(pos, 0, 0, SurfaceColumnOptions::default())
5404 .expect("surface query")
5405 .expect("surface column");
5406
5407 assert_eq!(column.y, 65);
5408 assert_eq!(column.block_name, "minecraft:water");
5409 assert_eq!(column.biome_id, Some(7));
5410 assert_eq!(column.water_depth, 1);
5411 assert_eq!(
5412 column.under_water_block_name.as_deref(),
5413 Some("minecraft:sand")
5414 );
5415 }
5416
5417 #[test]
5418 fn chunk_bounds_and_nearest_loaded_chunk_use_key_only_scan() {
5419 let storage = Arc::new(MemoryStorage::new());
5420 let positions = [
5421 ChunkPos {
5422 x: -4,
5423 z: 3,
5424 dimension: Dimension::Overworld,
5425 },
5426 ChunkPos {
5427 x: 2,
5428 z: -1,
5429 dimension: Dimension::Overworld,
5430 },
5431 ChunkPos {
5432 x: 9,
5433 z: 9,
5434 dimension: Dimension::Nether,
5435 },
5436 ];
5437 for pos in positions {
5438 storage
5439 .put(&ChunkKey::new(pos, ChunkRecordTag::Version).encode(), &[1])
5440 .expect("put chunk version");
5441 }
5442 let world = BedrockWorld::from_storage("memory", storage, OpenOptions::default());
5443
5444 let bounds = world
5445 .discover_chunk_bounds_blocking(Dimension::Overworld, WorldScanOptions::default())
5446 .expect("bounds")
5447 .expect("overworld bounds");
5448 assert_eq!(bounds.min_chunk_x, -4);
5449 assert_eq!(bounds.max_chunk_z, 3);
5450 assert_eq!(bounds.chunk_count, 2);
5451
5452 let nearest = world
5453 .nearest_loaded_chunk_to_spawn_blocking(
5454 Dimension::Overworld,
5455 0,
5456 0,
5457 WorldScanOptions::default(),
5458 )
5459 .expect("nearest")
5460 .expect("nearest chunk");
5461 assert_eq!(nearest.x, 2);
5462 assert_eq!(nearest.z, -1);
5463 }
5464
5465 #[test]
5466 #[allow(clippy::similar_names)]
5467 fn render_region_index_uses_key_only_scan_and_parallel_load_keeps_order() {
5468 let storage = Arc::new(MemoryStorage::new());
5469 let render_positions = [
5470 ChunkPos {
5471 x: 0,
5472 z: 0,
5473 dimension: Dimension::Overworld,
5474 },
5475 ChunkPos {
5476 x: 1,
5477 z: 0,
5478 dimension: Dimension::Overworld,
5479 },
5480 ];
5481 for pos in render_positions {
5482 storage
5483 .put(
5484 &ChunkKey::new(pos, ChunkRecordTag::Data2D).encode(),
5485 &test_data2d_bytes(64, 3),
5486 )
5487 .expect("put render chunk");
5488 }
5489 storage
5490 .put(
5491 &ChunkKey::new(
5492 ChunkPos {
5493 x: 2,
5494 z: 0,
5495 dimension: Dimension::Overworld,
5496 },
5497 ChunkRecordTag::Version,
5498 )
5499 .encode(),
5500 &[1],
5501 )
5502 .expect("put non-render chunk");
5503 storage
5504 .put(
5505 &ChunkKey::new(
5506 ChunkPos {
5507 x: 0,
5508 z: 0,
5509 dimension: Dimension::Nether,
5510 },
5511 ChunkRecordTag::Data2D,
5512 )
5513 .encode(),
5514 &test_data2d_bytes(64, 3),
5515 )
5516 .expect("put nether chunk");
5517
5518 let world = BedrockWorld::from_storage("memory", storage, OpenOptions::default());
5519 let visible = world
5520 .list_render_chunk_positions_in_region_blocking(
5521 RenderChunkRegion {
5522 dimension: Dimension::Overworld,
5523 min_chunk_x: 0,
5524 min_chunk_z: 0,
5525 max_chunk_x: 2,
5526 max_chunk_z: 0,
5527 },
5528 WorldScanOptions {
5529 threading: WorldThreadingOptions::Fixed(2),
5530 ..WorldScanOptions::default()
5531 },
5532 )
5533 .expect("render region index");
5534
5535 assert_eq!(visible, render_positions.to_vec());
5536
5537 let chunks = world
5538 .load_render_chunks_blocking(
5539 visible,
5540 RenderChunkLoadOptions {
5541 threading: WorldThreadingOptions::Fixed(2),
5542 ..RenderChunkLoadOptions::default()
5543 },
5544 )
5545 .expect("parallel render chunk load");
5546 assert_eq!(
5547 chunks.iter().map(|chunk| chunk.pos).collect::<Vec<_>>(),
5548 render_positions.to_vec()
5549 );
5550 }
5551
5552 #[test]
5553 fn legacy_terrain_is_renderable_and_exact_batch_loaded() {
5554 let storage = Arc::new(MemoryStorage::new());
5555 let pos = ChunkPos {
5556 x: 0,
5557 z: 0,
5558 dimension: Dimension::Overworld,
5559 };
5560 storage
5561 .put(
5562 &ChunkKey::new(pos, ChunkRecordTag::LegacyTerrain).encode(),
5563 &test_legacy_terrain_bytes(2, 65),
5564 )
5565 .expect("put legacy terrain");
5566 let world = BedrockWorld::from_storage_with_format(
5567 "memory",
5568 storage,
5569 OpenOptions::default(),
5570 WorldFormat::LevelDbLegacyTerrain,
5571 );
5572
5573 let positions = world
5574 .list_render_chunk_positions_in_region_blocking(
5575 RenderChunkRegion {
5576 dimension: Dimension::Overworld,
5577 min_chunk_x: 0,
5578 min_chunk_z: 0,
5579 max_chunk_x: 0,
5580 max_chunk_z: 0,
5581 },
5582 WorldScanOptions::default(),
5583 )
5584 .expect("legacy render index");
5585 assert_eq!(positions, vec![pos]);
5586
5587 let (chunks, stats) = world
5588 .load_render_chunks_with_stats_blocking(
5589 positions,
5590 RenderChunkLoadOptions {
5591 threading: WorldThreadingOptions::Single,
5592 ..RenderChunkLoadOptions::default()
5593 },
5594 )
5595 .expect("legacy exact render load");
5596 assert_eq!(chunks.len(), 1);
5597 assert!(chunks[0].is_loaded);
5598 assert!(chunks[0].legacy_terrain.is_some());
5599 assert_eq!(chunks[0].height_map.as_ref().unwrap()[0][0], Some(65));
5600 assert!(chunks[0].legacy_biomes.is_some());
5601 assert!(chunks[0].legacy_biome_colors.is_some());
5602 assert_eq!(stats.prefix_scans, 0);
5603 assert_eq!(stats.legacy_terrain_records, 1);
5604 assert_eq!(stats.legacy_biome_samples, 1);
5605 assert_eq!(stats.legacy_biome_colors, 1);
5606 assert_eq!(stats.terrain_source_legacy, 1);
5607 assert_eq!(stats.detected_format, WorldFormat::LevelDbLegacyTerrain);
5608 }
5609
5610 #[test]
5611 fn legacy_terrain_biome_rgb_takes_priority_over_data2d_biome_id() {
5612 let storage = Arc::new(MemoryStorage::new());
5613 let pos = ChunkPos {
5614 x: 0,
5615 z: 0,
5616 dimension: Dimension::Overworld,
5617 };
5618 let mut terrain = test_legacy_terrain_bytes(2, 65);
5619 write_legacy_biome_sample(&mut terrain, 0, 0, 12, 0x0034_a853);
5620 storage
5621 .put(
5622 &ChunkKey::new(pos, ChunkRecordTag::LegacyTerrain).encode(),
5623 &terrain,
5624 )
5625 .expect("put legacy terrain");
5626 storage
5627 .put(
5628 &ChunkKey::new(pos, ChunkRecordTag::Data2D).encode(),
5629 &test_data2d_bytes(2, 24),
5630 )
5631 .expect("put conflicting old data2d");
5632 let world = BedrockWorld::from_storage_with_format(
5633 "memory",
5634 storage,
5635 OpenOptions::default(),
5636 WorldFormat::LevelDbLegacyTerrain,
5637 );
5638
5639 let (chunks, stats) = world
5640 .load_render_chunks_with_stats_blocking(
5641 [pos],
5642 RenderChunkLoadOptions {
5643 request: exact_surface_request(
5644 ExactSurfaceSubchunkPolicy::Full,
5645 ExactSurfaceBiomeLoad::All,
5646 false,
5647 ),
5648 threading: WorldThreadingOptions::Single,
5649 ..RenderChunkLoadOptions::default()
5650 },
5651 )
5652 .expect("load conflicting legacy render chunk");
5653
5654 let sample = chunks[0]
5655 .column_sample_at(0, 0)
5656 .expect("computed column sample");
5657 assert_eq!(
5658 sample.biome,
5659 Some(TerrainColumnBiome::Legacy(LegacyBiomeSample {
5660 biome_id: 12,
5661 red: 0x34,
5662 green: 0xa8,
5663 blue: 0x53,
5664 }))
5665 );
5666 assert_eq!(stats.legacy_biome_preferred_columns, 256);
5667 assert_eq!(stats.modern_biome_fallback_columns, 0);
5668 }
5669
5670 #[test]
5671 fn modern_data2d_biome_remains_available_without_legacy_terrain() {
5672 let storage = Arc::new(MemoryStorage::new());
5673 let pos = ChunkPos {
5674 x: 0,
5675 z: 0,
5676 dimension: Dimension::Overworld,
5677 };
5678 storage
5679 .put(
5680 &ChunkKey::new(pos, ChunkRecordTag::Data2D).encode(),
5681 &test_data2d_bytes(2, 24),
5682 )
5683 .expect("put modern data2d");
5684 storage
5685 .put(
5686 &ChunkKey::subchunk(pos, 0).encode(),
5687 &test_uniform_named_subchunk_bytes("minecraft:grass_block"),
5688 )
5689 .expect("put surface subchunk");
5690 let world = BedrockWorld::from_storage("memory", storage, OpenOptions::default());
5691
5692 let (chunks, stats) = world
5693 .load_render_chunks_with_stats_blocking(
5694 [pos],
5695 RenderChunkLoadOptions {
5696 request: exact_surface_request(
5697 ExactSurfaceSubchunkPolicy::Full,
5698 ExactSurfaceBiomeLoad::All,
5699 false,
5700 ),
5701 threading: WorldThreadingOptions::Single,
5702 ..RenderChunkLoadOptions::default()
5703 },
5704 )
5705 .expect("load modern render chunk");
5706
5707 let sample = chunks[0]
5708 .column_sample_at(0, 0)
5709 .expect("computed column sample");
5710 assert_eq!(sample.biome, Some(TerrainColumnBiome::Id(24)));
5711 assert_eq!(stats.legacy_biome_preferred_columns, 0);
5712 assert_eq!(stats.modern_biome_fallback_columns, 0);
5713 }
5714
5715 #[test]
5716 fn legacy_terrain_exposes_biome_colors_without_transposing_columns() {
5717 let storage = Arc::new(MemoryStorage::new());
5718 let pos = ChunkPos {
5719 x: 0,
5720 z: 0,
5721 dimension: Dimension::Overworld,
5722 };
5723 let mut terrain = test_legacy_terrain_bytes(2, 65);
5724 write_legacy_biome_sample(&mut terrain, 0, 0, 1, 0x0011_2233);
5725 write_legacy_biome_sample(&mut terrain, 15, 0, 2, 0x0044_5566);
5726 write_legacy_biome_sample(&mut terrain, 0, 15, 3, 0x0077_8899);
5727 write_legacy_biome_sample(&mut terrain, 15, 15, 4, 0x00aa_bbcc);
5728 storage
5729 .put(
5730 &ChunkKey::new(pos, ChunkRecordTag::LegacyTerrain).encode(),
5731 &terrain,
5732 )
5733 .expect("put legacy terrain");
5734 let world = BedrockWorld::from_storage_with_format(
5735 "memory",
5736 storage,
5737 OpenOptions::default(),
5738 WorldFormat::LevelDbLegacyTerrain,
5739 );
5740
5741 let chunk = world
5742 .load_render_chunk_blocking(pos, RenderChunkLoadOptions::default())
5743 .expect("load legacy render chunk");
5744 let colors = chunk.legacy_biome_colors.expect("legacy biome colors");
5745 let samples = chunk.legacy_biomes.expect("legacy biome samples");
5746 assert_eq!(colors[0][0], Some(0x0011_2233));
5747 assert_eq!(colors[0][15], Some(0x0044_5566));
5748 assert_eq!(colors[15][0], Some(0x0077_8899));
5749 assert_eq!(colors[15][15], Some(0x00aa_bbcc));
5750 assert_eq!(samples[0][0].map(|sample| sample.biome_id), Some(1));
5751 assert_eq!(samples[0][15].map(|sample| sample.biome_id), Some(2));
5752 assert_eq!(samples[15][0].map(|sample| sample.biome_id), Some(3));
5753 assert_eq!(samples[15][15].map(|sample| sample.biome_id), Some(4));
5754 assert_eq!(
5755 world
5756 .get_legacy_biome_color_blocking(pos, 15, 0)
5757 .expect("legacy biome color"),
5758 Some(0x0044_5566)
5759 );
5760 assert_eq!(
5761 world
5762 .get_legacy_biome_sample_blocking(pos, 15, 0)
5763 .expect("legacy biome sample")
5764 .map(|sample| (sample.biome_id, sample.rgb_u32())),
5765 Some((2, 0x0044_5566))
5766 );
5767 }
5768
5769 #[test]
5770 fn render_load_keeps_subchunks_when_legacy_terrain_is_also_present() {
5771 let storage = Arc::new(MemoryStorage::new());
5772 let pos = ChunkPos {
5773 x: 0,
5774 z: 0,
5775 dimension: Dimension::Overworld,
5776 };
5777 storage
5778 .put(
5779 &ChunkKey::new(pos, ChunkRecordTag::LegacyTerrain).encode(),
5780 &test_legacy_terrain_bytes(1, 1),
5781 )
5782 .expect("put legacy terrain");
5783 storage
5784 .put(
5785 &ChunkKey::subchunk(pos, 0).encode(),
5786 &test_surface_subchunk_bytes(),
5787 )
5788 .expect("put subchunk");
5789 let world = BedrockWorld::from_storage("memory", storage, OpenOptions::default());
5790
5791 let (chunks, stats) = world
5792 .load_render_chunks_with_stats_blocking(
5793 [pos],
5794 RenderChunkLoadOptions {
5795 request: exact_surface_request(
5796 ExactSurfaceSubchunkPolicy::Full,
5797 ExactSurfaceBiomeLoad::TopColumns,
5798 false,
5799 ),
5800 ..RenderChunkLoadOptions::default()
5801 },
5802 )
5803 .expect("load mixed render chunk");
5804
5805 assert_eq!(chunks.len(), 1);
5806 assert!(chunks[0].legacy_terrain.is_some());
5807 assert!(chunks[0].subchunks.contains_key(&0));
5808 assert_eq!(stats.legacy_terrain_records, 1);
5809 assert_eq!(stats.terrain_source_subchunk, 1);
5810 assert_eq!(stats.terrain_source_legacy, 0);
5811 }
5812
5813 #[test]
5814 fn exact_surface_column_samples_use_top_block_not_raw_heightmap() {
5815 let storage = Arc::new(MemoryStorage::new());
5816 let pos = ChunkPos {
5817 x: 0,
5818 z: 0,
5819 dimension: Dimension::Overworld,
5820 };
5821 storage
5822 .put(
5823 &ChunkKey::new(pos, ChunkRecordTag::Data2D).encode(),
5824 &test_data2d_bytes(1, 3),
5825 )
5826 .expect("put misleading raw height");
5827 storage
5828 .put(
5829 &ChunkKey::subchunk(pos, 0).encode(),
5830 &test_uniform_named_subchunk_bytes("minecraft:grass_block"),
5831 )
5832 .expect("put surface subchunk");
5833 let world = BedrockWorld::from_storage("memory", storage, OpenOptions::default());
5834
5835 let (chunks, stats) = world
5836 .load_render_chunks_with_stats_blocking(
5837 [pos],
5838 RenderChunkLoadOptions {
5839 request: exact_surface_request(
5840 ExactSurfaceSubchunkPolicy::Full,
5841 ExactSurfaceBiomeLoad::TopColumns,
5842 false,
5843 ),
5844 ..RenderChunkLoadOptions::default()
5845 },
5846 )
5847 .expect("load exact surface chunk");
5848
5849 let sample = chunks[0]
5850 .column_sample_at(0, 0)
5851 .expect("computed column sample");
5852 assert_eq!(sample.surface_y, 15);
5853 assert_eq!(sample.surface_block_state.name, "minecraft:grass_block");
5854 assert_eq!(sample.source, TerrainSampleSource::Subchunk);
5855 assert_eq!(stats.computed_surface_columns, 256);
5856 assert_eq!(stats.raw_height_mismatch_columns, 256);
5857 }
5858
5859 #[test]
5860 fn exact_surface_samples_keep_visual_overlay_and_primary_thin_blocks() {
5861 let storage = Arc::new(MemoryStorage::new());
5862 let pos = ChunkPos {
5863 x: 0,
5864 z: 0,
5865 dimension: Dimension::Overworld,
5866 };
5867 storage
5868 .put(
5869 &ChunkKey::subchunk(pos, 0).encode(),
5870 &test_named_subchunk_bytes_with_values(
5871 &[
5872 "minecraft:air",
5873 "minecraft:grass_block",
5874 "minecraft:stone_button",
5875 "minecraft:red_carpet",
5876 "minecraft:snow_layer",
5877 "minecraft:vine",
5878 ],
5879 |local_x, _, local_y| match (local_x, local_y) {
5880 (_, 0) => 1,
5881 (0, 1) => 2,
5882 (1, 1) => 3,
5883 (2, 1) => 4,
5884 (3, 1) => 5,
5885 _ => 0,
5886 },
5887 ),
5888 )
5889 .expect("put overlay subchunk");
5890 let world = BedrockWorld::from_storage("memory", storage, OpenOptions::default());
5891
5892 let chunk = world
5893 .load_render_chunk_blocking(pos, RenderChunkLoadOptions::default())
5894 .expect("load exact surface chunk");
5895 let button = chunk.column_sample_at(0, 0).expect("button column");
5896 assert_eq!(button.surface_y, 0);
5897 assert_eq!(button.surface_block_state.name, "minecraft:grass_block");
5898 assert_eq!(
5899 button
5900 .overlay
5901 .as_ref()
5902 .map(|overlay| overlay.block_state.name.as_str()),
5903 Some("minecraft:stone_button")
5904 );
5905 let carpet = chunk.column_sample_at(1, 0).expect("carpet column");
5906 assert_eq!(carpet.surface_y, 1);
5907 assert_eq!(carpet.surface_block_state.name, "minecraft:red_carpet");
5908 assert!(carpet.overlay.is_none());
5909 let snow = chunk.column_sample_at(2, 0).expect("snow column");
5910 assert_eq!(snow.surface_y, 1);
5911 assert_eq!(snow.surface_block_state.name, "minecraft:snow_layer");
5912 assert!(snow.overlay.is_none());
5913 let vine = chunk.column_sample_at(3, 0).expect("vine column");
5914 assert_eq!(vine.surface_y, 0);
5915 assert_eq!(
5916 vine.overlay
5917 .as_ref()
5918 .map(|overlay| overlay.block_state.name.as_str()),
5919 Some("minecraft:vine")
5920 );
5921 }
5922
5923 #[test]
5924 fn exact_surface_samples_high_roof_from_secondary_storage() {
5925 let storage = Arc::new(MemoryStorage::new());
5926 let pos = ChunkPos {
5927 x: 0,
5928 z: 0,
5929 dimension: Dimension::Overworld,
5930 };
5931 storage
5932 .put(&ChunkKey::new(pos, ChunkRecordTag::Data2D).encode(), &{
5933 let mut bytes = Vec::with_capacity(768);
5934 for _ in 0..256 {
5935 bytes.extend_from_slice(&0_i16.to_le_bytes());
5936 }
5937 bytes.extend(std::iter::repeat_n(1_u8, 256));
5938 bytes
5939 })
5940 .expect("put low raw height map");
5941 storage
5942 .put(
5943 &ChunkKey::subchunk(pos, 0).encode(),
5944 &test_named_subchunk_bytes_with_values(
5945 &["minecraft:air", "minecraft:stone"],
5946 |_, _, local_y| u16::from(local_y == 0),
5947 ),
5948 )
5949 .expect("put low ground subchunk");
5950 storage
5951 .put(
5952 &ChunkKey::subchunk(pos, 10).encode(),
5953 &test_named_layered_subchunk_bytes(
5954 &["minecraft:air"],
5955 &["minecraft:air", "minecraft:copper_block"],
5956 |_, _, _| 0,
5957 |_, _, local_y| u16::from(local_y == 15),
5958 ),
5959 )
5960 .expect("put high secondary-storage roof");
5961 let world = BedrockWorld::from_storage("memory", storage, OpenOptions::default());
5962
5963 let chunk = world
5964 .load_render_chunk_blocking(pos, RenderChunkLoadOptions::default())
5965 .expect("load exact surface chunk");
5966 let sample = chunk.column_sample_at(0, 0).expect("roof column");
5967
5968 assert_eq!(sample.surface_y, 175);
5969 assert_eq!(sample.surface_block_state.name, "minecraft:copper_block");
5970 assert_eq!(sample.source, TerrainSampleSource::Subchunk);
5971 assert_eq!(
5972 chunk.height_map.as_ref().expect("raw height map")[0][0],
5973 Some(0)
5974 );
5975 }
5976
5977 #[test]
5978 fn exact_surface_samples_process_secondary_storage_water_and_overlay() {
5979 let storage = Arc::new(MemoryStorage::new());
5980 let pos = ChunkPos {
5981 x: 0,
5982 z: 0,
5983 dimension: Dimension::Overworld,
5984 };
5985 storage
5986 .put(
5987 &ChunkKey::subchunk(pos, 0).encode(),
5988 &test_named_layered_subchunk_bytes(
5989 &["minecraft:air", "minecraft:sand", "minecraft:grass_block"],
5990 &["minecraft:air", "minecraft:water", "minecraft:stone_button"],
5991 |local_x, _, local_y| match (local_x, local_y) {
5992 (0, 0) => 1,
5993 (1, 1) => 2,
5994 _ => 0,
5995 },
5996 |local_x, _, local_y| match (local_x, local_y) {
5997 (0, 0) => 1,
5998 (1, 1) => 2,
5999 _ => 0,
6000 },
6001 ),
6002 )
6003 .expect("put layered water and overlay");
6004 let world = BedrockWorld::from_storage("memory", storage, OpenOptions::default());
6005
6006 let chunk = world
6007 .load_render_chunk_blocking(pos, RenderChunkLoadOptions::default())
6008 .expect("load exact surface chunk");
6009 let water = chunk.column_sample_at(0, 0).expect("water column");
6010 assert_eq!(water.surface_y, 0);
6011 assert_eq!(water.surface_block_state.name, "minecraft:water");
6012 assert_eq!(water.relief_y, 0);
6013 assert_eq!(water.relief_block_state.name, "minecraft:sand");
6014 assert_eq!(
6015 water.water.as_ref().and_then(|water| water.underwater_y),
6016 Some(0)
6017 );
6018 let overlay = chunk.column_sample_at(1, 0).expect("overlay column");
6019 assert_eq!(overlay.surface_y, 1);
6020 assert_eq!(overlay.surface_block_state.name, "minecraft:grass_block");
6021 assert_eq!(
6022 overlay
6023 .overlay
6024 .as_ref()
6025 .map(|overlay| overlay.block_state.name.as_str()),
6026 Some("minecraft:stone_button")
6027 );
6028 }
6029
6030 #[test]
6031 fn exact_surface_samples_keep_transparent_water_relief_context() {
6032 let storage = Arc::new(MemoryStorage::new());
6033 let pos = ChunkPos {
6034 x: 0,
6035 z: 0,
6036 dimension: Dimension::Overworld,
6037 };
6038 storage
6039 .put(
6040 &ChunkKey::subchunk(pos, 0).encode(),
6041 &test_named_subchunk_bytes_with_values(
6042 &["minecraft:air", "minecraft:sand", "minecraft:water"],
6043 |_, _, local_y| match local_y {
6044 0 => 1,
6045 1 | 2 => 2,
6046 _ => 0,
6047 },
6048 ),
6049 )
6050 .expect("put water subchunk");
6051 let world = BedrockWorld::from_storage("memory", storage, OpenOptions::default());
6052
6053 let chunk = world
6054 .load_render_chunk_blocking(pos, RenderChunkLoadOptions::default())
6055 .expect("load exact surface chunk");
6056 let sample = chunk.column_sample_at(0, 0).expect("water column");
6057 let water = sample.water.as_ref().expect("water context");
6058 assert_eq!(sample.surface_y, 2);
6059 assert_eq!(sample.surface_block_state.name, "minecraft:water");
6060 assert_eq!(sample.relief_y, 0);
6061 assert_eq!(sample.relief_block_state.name, "minecraft:sand");
6062 assert_eq!(water.depth, 2);
6063 assert_eq!(water.underwater_y, Some(0));
6064 assert_eq!(
6065 water
6066 .underwater_block_state
6067 .as_ref()
6068 .map(|state| state.name.as_str()),
6069 Some("minecraft:sand")
6070 );
6071 }
6072
6073 #[test]
6074 fn render_chunk_exact_load_preserves_legacy_subchunk_xzy_coordinates() {
6075 let storage = Arc::new(MemoryStorage::new());
6076 let pos = ChunkPos {
6077 x: 0,
6078 z: 0,
6079 dimension: Dimension::Overworld,
6080 };
6081 storage
6082 .put(
6083 &ChunkKey::subchunk(pos, 0).encode(),
6084 &test_asymmetric_legacy_subchunk_bytes(),
6085 )
6086 .expect("put legacy subchunk");
6087 let world = BedrockWorld::from_storage("memory", storage, OpenOptions::default());
6088
6089 let chunk = world
6090 .load_render_chunk_blocking(
6091 pos,
6092 RenderChunkLoadOptions {
6093 request: RenderChunkRequest::Layer { y: 10 },
6094 ..RenderChunkLoadOptions::default()
6095 },
6096 )
6097 .expect("load legacy subchunk render chunk");
6098 let subchunk = chunk.subchunks.get(&0).expect("loaded legacy subchunk");
6099
6100 assert_eq!(subchunk.legacy_block_id_at(0, 10, 0), Some(1));
6101 assert_eq!(subchunk.legacy_block_id_at(15, 10, 0), Some(12));
6102 assert_eq!(subchunk.legacy_block_id_at(0, 10, 15), Some(24));
6103 assert_eq!(subchunk.legacy_block_id_at(15, 10, 15), Some(45));
6104 }
6105
6106 #[test]
6107 #[allow(clippy::similar_names)]
6108 fn render_chunk_exact_batch_keeps_shuffled_positions_bound_to_records() {
6109 let storage = Arc::new(MemoryStorage::new());
6110 let fixtures = [
6111 (
6112 ChunkPos {
6113 x: -3,
6114 z: 1,
6115 dimension: Dimension::Overworld,
6116 },
6117 "minecraft:signature_a",
6118 ),
6119 (
6120 ChunkPos {
6121 x: 2,
6122 z: -4,
6123 dimension: Dimension::Overworld,
6124 },
6125 "minecraft:signature_b",
6126 ),
6127 (
6128 ChunkPos {
6129 x: 0,
6130 z: 0,
6131 dimension: Dimension::Overworld,
6132 },
6133 "minecraft:signature_c",
6134 ),
6135 ];
6136 for (pos, block_name) in fixtures.iter().copied() {
6137 storage
6138 .put(
6139 &ChunkKey::subchunk(pos, 4).encode(),
6140 &test_uniform_named_subchunk_bytes(block_name),
6141 )
6142 .expect("put named subchunk");
6143 }
6144 let world = BedrockWorld::from_storage("memory", storage, OpenOptions::default());
6145
6146 let (chunks, stats) = world
6147 .load_render_chunks_with_stats_blocking(
6148 vec![fixtures[1].0, fixtures[0].0, fixtures[2].0, fixtures[1].0],
6149 RenderChunkLoadOptions {
6150 request: RenderChunkRequest::Layer { y: 64 },
6151 threading: WorldThreadingOptions::Fixed(4),
6152 priority: RenderChunkPriority::DistanceFrom {
6153 chunk_x: 0,
6154 chunk_z: 0,
6155 },
6156 ..RenderChunkLoadOptions::default()
6157 },
6158 )
6159 .expect("load shuffled render chunks");
6160
6161 assert_eq!(chunks.len(), 4);
6162 assert_eq!(stats.prefix_scans, 0);
6163 assert!(stats.exact_get_batches > 0);
6164 for chunk in chunks {
6165 let expected = fixtures
6166 .iter()
6167 .find_map(|(pos, block_name)| (*pos == chunk.pos).then_some(*block_name))
6168 .expect("known chunk position");
6169 let subchunk = chunk.subchunks.get(&4).expect("loaded subchunk");
6170 let state = subchunk
6171 .block_state_at(0, 0, 0)
6172 .expect("decoded signature block");
6173 assert_eq!(state.name, expected, "chunk {:?}", chunk.pos);
6174 }
6175 }
6176
6177 fn test_surface_subchunk_bytes() -> Vec<u8> {
6178 let palette = ["minecraft:air", "minecraft:sand", "minecraft:water"];
6179 let mut bytes = vec![8, 1, 2 << 1];
6180 let values_per_word = 16_usize;
6181 let mut words = vec![0_u32; 256];
6182 for local_z in 0..16_u8 {
6183 for local_x in 0..16_u8 {
6184 for (local_y, value) in [(0_u8, 1_u32), (1, 2)] {
6185 let block_index = block_storage_index(local_x, local_y, local_z);
6186 let word_index = block_index / values_per_word;
6187 let bit_offset = (block_index % values_per_word) * 2;
6188 words[word_index] |= value << bit_offset;
6189 }
6190 }
6191 }
6192 for word in words {
6193 bytes.extend_from_slice(&word.to_le_bytes());
6194 }
6195 bytes.extend_from_slice(&(palette.len() as i32).to_le_bytes());
6196 for name in palette {
6197 let tag = NbtTag::Compound(IndexMap::from([
6198 ("name".to_string(), NbtTag::String(name.to_string())),
6199 ("states".to_string(), NbtTag::Compound(IndexMap::new())),
6200 ("version".to_string(), NbtTag::Int(1)),
6201 ]));
6202 bytes.extend_from_slice(&crate::nbt::serialize_root_nbt(&tag).expect("nbt"));
6203 }
6204 bytes
6205 }
6206
6207 fn test_uniform_named_subchunk_bytes(block_name: &str) -> Vec<u8> {
6208 let palette = ["minecraft:air", block_name];
6209 let mut bytes = vec![8, 1, 1 << 1];
6210 let mut words = vec![0_u32; 128];
6211 for local_z in 0..16_u8 {
6212 for local_x in 0..16_u8 {
6213 for local_y in 0..16_u8 {
6214 let block_index = block_storage_index(local_x, local_y, local_z);
6215 let word_index = block_index / 32;
6216 let bit_offset = block_index % 32;
6217 words[word_index] |= 1_u32 << bit_offset;
6218 }
6219 }
6220 }
6221 for word in words {
6222 bytes.extend_from_slice(&word.to_le_bytes());
6223 }
6224 bytes.extend_from_slice(&(palette.len() as i32).to_le_bytes());
6225 for name in palette {
6226 let tag = NbtTag::Compound(IndexMap::from([
6227 ("name".to_string(), NbtTag::String(name.to_string())),
6228 ("states".to_string(), NbtTag::Compound(IndexMap::new())),
6229 ("version".to_string(), NbtTag::Int(1)),
6230 ]));
6231 bytes.extend_from_slice(&crate::nbt::serialize_root_nbt(&tag).expect("nbt"));
6232 }
6233 bytes
6234 }
6235
6236 fn test_named_subchunk_bytes_with_values(
6237 palette: &[&str],
6238 value_at: impl Fn(u8, u8, u8) -> u16,
6239 ) -> Vec<u8> {
6240 let bits_per_value = match palette.len() {
6241 0..=2 => 1_u8,
6242 3..=4 => 2_u8,
6243 5..=16 => 4_u8,
6244 _ => 8_u8,
6245 };
6246 let values_per_word = usize::from(32 / bits_per_value);
6247 let word_count = 4096_usize.div_ceil(values_per_word);
6248 let mut bytes = vec![8, 1, bits_per_value << 1];
6249 let mut words = vec![0_u32; word_count];
6250 for local_z in 0..16_u8 {
6251 for local_x in 0..16_u8 {
6252 for local_y in 0..16_u8 {
6253 let value = value_at(local_x, local_z, local_y);
6254 if value == 0 {
6255 continue;
6256 }
6257 let block_index = block_storage_index(local_x, local_y, local_z);
6258 let word_index = block_index / values_per_word;
6259 let bit_offset = (block_index % values_per_word) * usize::from(bits_per_value);
6260 words[word_index] |= u32::from(value) << bit_offset;
6261 }
6262 }
6263 }
6264 for word in words {
6265 bytes.extend_from_slice(&word.to_le_bytes());
6266 }
6267 bytes.extend_from_slice(&(palette.len() as i32).to_le_bytes());
6268 for name in palette {
6269 let tag = NbtTag::Compound(IndexMap::from([
6270 ("name".to_string(), NbtTag::String((*name).to_string())),
6271 ("states".to_string(), NbtTag::Compound(IndexMap::new())),
6272 ("version".to_string(), NbtTag::Int(1)),
6273 ]));
6274 bytes.extend_from_slice(&crate::nbt::serialize_root_nbt(&tag).expect("nbt"));
6275 }
6276 bytes
6277 }
6278
6279 fn test_named_layered_subchunk_bytes(
6280 lower_palette: &[&str],
6281 upper_palette: &[&str],
6282 lower_value_at: impl Fn(u8, u8, u8) -> u16,
6283 upper_value_at: impl Fn(u8, u8, u8) -> u16,
6284 ) -> Vec<u8> {
6285 let mut bytes = vec![8, 2];
6286 append_named_palette_storage(&mut bytes, lower_palette, lower_value_at);
6287 append_named_palette_storage(&mut bytes, upper_palette, upper_value_at);
6288 bytes
6289 }
6290
6291 fn append_named_palette_storage(
6292 bytes: &mut Vec<u8>,
6293 palette: &[&str],
6294 value_at: impl Fn(u8, u8, u8) -> u16,
6295 ) {
6296 let bits_per_value = match palette.len() {
6297 0..=2 => 1_u8,
6298 3..=4 => 2_u8,
6299 5..=16 => 4_u8,
6300 _ => 8_u8,
6301 };
6302 let values_per_word = usize::from(32 / bits_per_value);
6303 let word_count = 4096_usize.div_ceil(values_per_word);
6304 let mut words = vec![0_u32; word_count];
6305 for local_z in 0..16_u8 {
6306 for local_x in 0..16_u8 {
6307 for local_y in 0..16_u8 {
6308 let value = value_at(local_x, local_z, local_y);
6309 if value == 0 {
6310 continue;
6311 }
6312 let block_index = block_storage_index(local_x, local_y, local_z);
6313 let word_index = block_index / values_per_word;
6314 let bit_offset = (block_index % values_per_word) * usize::from(bits_per_value);
6315 words[word_index] |= u32::from(value) << bit_offset;
6316 }
6317 }
6318 }
6319 bytes.push(bits_per_value << 1);
6320 for word in words {
6321 bytes.extend_from_slice(&word.to_le_bytes());
6322 }
6323 bytes.extend_from_slice(&(palette.len() as i32).to_le_bytes());
6324 for name in palette {
6325 let tag = NbtTag::Compound(IndexMap::from([
6326 ("name".to_string(), NbtTag::String((*name).to_string())),
6327 ("states".to_string(), NbtTag::Compound(IndexMap::new())),
6328 ("version".to_string(), NbtTag::Int(1)),
6329 ]));
6330 bytes.extend_from_slice(&crate::nbt::serialize_root_nbt(&tag).expect("nbt"));
6331 }
6332 }
6333
6334 fn test_asymmetric_legacy_subchunk_bytes() -> Vec<u8> {
6335 let mut bytes = vec![0_u8; crate::LEGACY_SUBCHUNK_WITH_LIGHT_VALUE_LEN];
6336 bytes[0] = 2;
6337 for local_z in 0..16_u8 {
6338 for local_x in 0..16_u8 {
6339 let block_id = match (local_x >= 8, local_z >= 8) {
6340 (false, false) => 1,
6341 (true, false) => 12,
6342 (false, true) => 24,
6343 (true, true) => 45,
6344 };
6345 let index = crate::LegacySubChunk::block_index(local_x, 10, local_z)
6346 .expect("legacy subchunk index");
6347 bytes[1 + index] = block_id;
6348 }
6349 }
6350 bytes
6351 }
6352
6353 fn test_data2d_bytes(height: i16, biome: u8) -> Vec<u8> {
6354 let mut bytes = Vec::with_capacity(768);
6355 for _ in 0..256 {
6356 bytes.extend_from_slice(&height.to_le_bytes());
6357 }
6358 bytes.extend(std::iter::repeat_n(biome, 256));
6359 bytes
6360 }
6361
6362 fn test_data3d_height_bytes(height: i16) -> Vec<u8> {
6363 let mut bytes = Vec::with_capacity(512);
6364 for _ in 0..256 {
6365 bytes.extend_from_slice(&height.to_le_bytes());
6366 }
6367 bytes
6368 }
6369
6370 fn test_asymmetric_data2d_bytes() -> Vec<u8> {
6371 let mut bytes = Vec::with_capacity(768);
6372 for local_z in 0..16_i16 {
6373 for local_x in 0..16_i16 {
6374 let height = 100 + local_x * 10 + local_z;
6375 bytes.extend_from_slice(&height.to_le_bytes());
6376 }
6377 }
6378 for local_z in 0..16_u8 {
6379 for local_x in 0..16_u8 {
6380 bytes.push(local_x * 10 + local_z);
6381 }
6382 }
6383 bytes
6384 }
6385
6386 fn test_legacy_terrain_bytes(block_id: u8, height: u8) -> Vec<u8> {
6387 let mut bytes = vec![0_u8; crate::LEGACY_TERRAIN_VALUE_LEN];
6388 for local_z in 0..16_u8 {
6389 for local_x in 0..16_u8 {
6390 for local_y in 0..=height.min(127) {
6391 let index = crate::LegacyTerrain::block_index(local_x, local_y, local_z)
6392 .expect("legacy block index");
6393 bytes[index] = block_id;
6394 }
6395 bytes[crate::LEGACY_TERRAIN_BLOCK_COUNT
6396 + crate::LEGACY_TERRAIN_BLOCK_COUNT / 2 * 3
6397 + raw_2d_column_index(local_x, local_z)] = height;
6398 }
6399 }
6400 bytes
6401 }
6402
6403 fn write_legacy_biome_sample(
6404 bytes: &mut [u8],
6405 local_x: u8,
6406 local_z: u8,
6407 biome_id: u8,
6408 color: u32,
6409 ) {
6410 let offset = crate::LEGACY_TERRAIN_BLOCK_COUNT
6411 + crate::LEGACY_TERRAIN_BLOCK_COUNT / 2 * 3
6412 + 16 * 16
6413 + raw_2d_column_index(local_x, local_z) * 4;
6414 bytes[offset] = biome_id;
6415 bytes[offset + 1] = ((color >> 16) & 0xff) as u8;
6416 bytes[offset + 2] = ((color >> 8) & 0xff) as u8;
6417 bytes[offset + 3] = (color & 0xff) as u8;
6418 }
6419
6420 fn raw_2d_column_index(local_x: u8, local_z: u8) -> usize {
6421 usize::from(local_z) * 16 + usize::from(local_x)
6422 }
6423}
6424
6425fn validate_local_column(local_x: u8, local_z: u8) -> Result<()> {
6426 if local_x >= 16 || local_z >= 16 {
6427 return Err(BedrockWorldError::Validation(format!(
6428 "local biome coordinates must be 0..15, got x={local_x}, z={local_z}"
6429 )));
6430 }
6431 Ok(())
6432}
6433
6434fn insert_needed_surface_subchunks(
6435 subchunk_ys: &mut BTreeSet<i8>,
6436 height_map: Option<&[[Option<i16>; 16]; 16]>,
6437 min_subchunk_y: i8,
6438 max_subchunk_y: i8,
6439) {
6440 const SURFACE_LOOKDOWN_SUBCHUNKS: i8 = 6;
6441 const SURFACE_LOOKUP_SUBCHUNKS: i8 = 4;
6442 let Some(height_map) = height_map else {
6443 return;
6444 };
6445 for row in height_map {
6446 for height in row.iter().flatten() {
6447 if let Ok(surface_y) = block_y_to_subchunk_y(i32::from(*height)) {
6448 let lower_y = surface_y
6449 .saturating_sub(SURFACE_LOOKDOWN_SUBCHUNKS)
6450 .max(min_subchunk_y);
6451 let upper_y = surface_y
6452 .saturating_add(SURFACE_LOOKUP_SUBCHUNKS)
6453 .clamp(min_subchunk_y, max_subchunk_y);
6454 for subchunk_y in lower_y..=upper_y {
6455 subchunk_ys.insert(subchunk_y);
6456 }
6457 }
6458 }
6459 }
6460}
6461
6462fn block_y_to_subchunk_y(y: i32) -> Result<i8> {
6463 let subchunk_y = y.div_euclid(16);
6464 i8::try_from(subchunk_y).map_err(|_| {
6465 BedrockWorldError::Validation(format!(
6466 "block y={y} cannot be represented as a Bedrock subchunk index"
6467 ))
6468 })
6469}
6470
6471fn biome_storage_contains_y(storage: &ParsedBiomeStorage, y: i32) -> bool {
6472 storage
6473 .y
6474 .is_none_or(|start_y| (start_y..start_y + 16).contains(&y))
6475}
6476
6477fn biome_storage_bucket_y(y: i32) -> i32 {
6478 y.div_euclid(16) * 16
6479}
6480
6481fn biome_id_from_storage(
6482 storage: &ParsedBiomeStorage,
6483 local_x: u8,
6484 local_z: u8,
6485 y: i32,
6486) -> Option<u32> {
6487 let local_y = if let Some(start_y) = storage.y {
6488 u8::try_from(y - start_y).ok()?
6489 } else {
6490 0
6491 };
6492 storage.biome_id_at(local_x, local_y, local_z)
6493}
6494
6495fn height_map_index(local_x: u8, local_z: u8) -> usize {
6496 usize::from(local_z) * 16 + usize::from(local_x)
6497}
6498
6499fn column_index(local_x: u8, local_z: u8) -> Option<usize> {
6500 (local_x < 16 && local_z < 16).then_some(height_map_index(local_x, local_z))
6501}
6502
6503fn raw_height_at(
6504 height_map: Option<&[[Option<i16>; 16]; 16]>,
6505 local_x: u8,
6506 local_z: u8,
6507) -> Option<i16> {
6508 height_map?[usize::from(local_z)][usize::from(local_x)]
6509}
6510
6511fn raw_height_mismatch_columns(chunk: &RenderChunkData) -> usize {
6512 let Some(samples) = chunk.column_samples.as_ref() else {
6513 return 0;
6514 };
6515 let Some(height_map) = chunk.height_map.as_ref() else {
6516 return 0;
6517 };
6518 let mut mismatches = 0usize;
6519 for local_z in 0..16_u8 {
6520 for local_x in 0..16_u8 {
6521 if let Some(sample) = samples.get(local_x, local_z) {
6522 if height_map[usize::from(local_z)][usize::from(local_x)]
6523 .is_some_and(|raw_height| raw_height != sample.surface_y)
6524 {
6525 mismatches = mismatches.saturating_add(1);
6526 }
6527 }
6528 }
6529 }
6530 mismatches
6531}
6532
6533fn missing_surface_columns(chunk: &RenderChunkData) -> usize {
6534 chunk.column_samples.as_ref().map_or(0, |samples| {
6535 256usize.saturating_sub(samples.sampled_columns())
6536 })
6537}
6538
6539fn needed_exact_surface_chunk_requires_full_reload(chunk: &RenderChunkData) -> Result<bool> {
6540 let Some(samples) = chunk.column_samples.as_ref() else {
6541 return Ok(false);
6542 };
6543 if samples.sampled_columns() < 16 * 16 {
6544 return Ok(true);
6545 }
6546 if raw_height_mismatch_columns(chunk) > 0 {
6547 return Ok(true);
6548 }
6549 let Some(loaded_max_subchunk_y) = chunk.subchunks.keys().next_back().copied() else {
6550 return Ok(true);
6551 };
6552 let (_, world_max_subchunk_y) = chunk.pos.subchunk_index_range(chunk.version);
6553 if loaded_max_subchunk_y >= world_max_subchunk_y {
6554 return Ok(false);
6555 }
6556 for sample in samples.iter() {
6557 if block_y_to_subchunk_y(i32::from(sample.surface_y))? == loaded_max_subchunk_y {
6558 return Ok(true);
6559 }
6560 if let Some(overlay) = sample.overlay.as_ref() {
6561 if block_y_to_subchunk_y(i32::from(overlay.y))? == loaded_max_subchunk_y {
6562 return Ok(true);
6563 }
6564 }
6565 }
6566 Ok(false)
6567}
6568
6569fn legacy_world_block_state(id: u8, data: u8) -> BlockState {
6570 let mut states = BTreeMap::new();
6571 states.insert("data".to_string(), NbtTag::Byte(data as i8));
6572 BlockState {
6573 name: legacy_world_block_name(id, data),
6574 states,
6575 version: None,
6576 }
6577}
6578
6579#[allow(clippy::too_many_lines)]
6580fn legacy_world_block_name(id: u8, data: u8) -> String {
6581 let name = match id {
6582 0 => "minecraft:air",
6583 1 => match data & 0x7 {
6584 1 => "minecraft:granite",
6585 2 => "minecraft:polished_granite",
6586 3 => "minecraft:diorite",
6587 4 => "minecraft:polished_diorite",
6588 5 => "minecraft:andesite",
6589 6 => "minecraft:polished_andesite",
6590 _ => "minecraft:stone",
6591 },
6592 2 => "minecraft:grass_block",
6593 3 => match data & 0x3 {
6594 1 => "minecraft:coarse_dirt",
6595 2 => "minecraft:podzol",
6596 _ => "minecraft:dirt",
6597 },
6598 4 => "minecraft:cobblestone",
6599 5 => legacy_world_wood_name(data, "planks"),
6600 6 => "minecraft:oak_sapling",
6601 7 => "minecraft:bedrock",
6602 8 | 9 => "minecraft:water",
6603 10 | 11 => "minecraft:lava",
6604 12 => match data & 0x1 {
6605 1 => "minecraft:red_sand",
6606 _ => "minecraft:sand",
6607 },
6608 13 => "minecraft:gravel",
6609 14 => "minecraft:gold_ore",
6610 15 => "minecraft:iron_ore",
6611 16 => "minecraft:coal_ore",
6612 17 => legacy_world_wood_name(data, "log"),
6613 18 => legacy_world_wood_name(data, "leaves"),
6614 19 => "minecraft:sponge",
6615 20 => "minecraft:glass",
6616 21 => "minecraft:lapis_ore",
6617 22 => "minecraft:lapis_block",
6618 24 => "minecraft:sandstone",
6619 26 => "minecraft:bed",
6620 30 => "minecraft:cobweb",
6621 31 => match data {
6622 1 => "minecraft:short_grass",
6623 2 => "minecraft:fern",
6624 _ => "minecraft:dead_bush",
6625 },
6626 32 => "minecraft:dead_bush",
6627 35 => legacy_world_wool_name(data),
6628 37 => "minecraft:dandelion",
6629 38 => "minecraft:poppy",
6630 39 => "minecraft:brown_mushroom",
6631 40 => "minecraft:red_mushroom",
6632 41 => "minecraft:gold_block",
6633 42 => "minecraft:iron_block",
6634 43 | 44 => "minecraft:stone_slab",
6635 45 => "minecraft:bricks",
6636 46 => "minecraft:tnt",
6637 47 => "minecraft:bookshelf",
6638 48 => "minecraft:mossy_cobblestone",
6639 49 => "minecraft:obsidian",
6640 50 => "minecraft:torch",
6641 51 => "minecraft:fire",
6642 52 => "minecraft:spawner",
6643 53 => "minecraft:oak_stairs",
6644 54 => "minecraft:chest",
6645 56 => "minecraft:diamond_ore",
6646 57 => "minecraft:diamond_block",
6647 58 => "minecraft:crafting_table",
6648 59 => "minecraft:wheat",
6649 60 => "minecraft:farmland",
6650 61 | 62 => "minecraft:furnace",
6651 63 | 68 => "minecraft:oak_sign",
6652 64 => "minecraft:oak_door",
6653 65 => "minecraft:ladder",
6654 66 => "minecraft:rail",
6655 67 => "minecraft:cobblestone_stairs",
6656 71 => "minecraft:iron_door",
6657 73 | 74 => "minecraft:redstone_ore",
6658 78 => "minecraft:snow",
6659 79 => "minecraft:ice",
6660 80 => "minecraft:snow_block",
6661 81 => "minecraft:cactus",
6662 82 => "minecraft:clay",
6663 83 => "minecraft:sugar_cane",
6664 85 => "minecraft:oak_fence",
6665 86 => "minecraft:pumpkin",
6666 87 => "minecraft:netherrack",
6667 88 => "minecraft:soul_sand",
6668 89 => "minecraft:glowstone",
6669 91 => "minecraft:jack_o_lantern",
6670 95 => "minecraft:invisible_bedrock",
6671 98 => "minecraft:stone_bricks",
6672 99 | 100 => "minecraft:mushroom_stem",
6673 103 => "minecraft:melon",
6674 106 => "minecraft:vine",
6675 107 => "minecraft:oak_fence_gate",
6676 108 => "minecraft:brick_stairs",
6677 109 => "minecraft:stone_brick_stairs",
6678 110 => "minecraft:mycelium",
6679 111 => "minecraft:lily_pad",
6680 112 => "minecraft:nether_bricks",
6681 121 => "minecraft:end_stone",
6682 129 => "minecraft:emerald_ore",
6683 133 => "minecraft:emerald_block",
6684 155 => "minecraft:quartz_block",
6685 159 | 172 => "minecraft:terracotta",
6686 161 => legacy_world_wood_name(data.saturating_add(4), "leaves"),
6687 162 => legacy_world_wood_name(data.saturating_add(4), "log"),
6688 169 => "minecraft:sea_lantern",
6689 170 => "minecraft:hay_block",
6690 171 => "minecraft:white_carpet",
6691 173 => "minecraft:coal_block",
6692 174 => "minecraft:packed_ice",
6693 175 => "minecraft:sunflower",
6694 _ => return format!("legacy:{id}"),
6695 };
6696 name.to_string()
6697}
6698
6699fn legacy_world_wood_name(data: u8, suffix: &'static str) -> &'static str {
6700 match (data & 0x7, suffix) {
6701 (1, "planks") => "minecraft:spruce_planks",
6702 (2, "planks") => "minecraft:birch_planks",
6703 (3, "planks") => "minecraft:jungle_planks",
6704 (4, "planks") => "minecraft:acacia_planks",
6705 (5, "planks") => "minecraft:dark_oak_planks",
6706 (_, "planks") => "minecraft:oak_planks",
6707 (1, "log") => "minecraft:spruce_log",
6708 (2, "log") => "minecraft:birch_log",
6709 (3, "log") => "minecraft:jungle_log",
6710 (4, "log") => "minecraft:acacia_log",
6711 (5, "log") => "minecraft:dark_oak_log",
6712 (_, "log") => "minecraft:oak_log",
6713 (1, "leaves") => "minecraft:spruce_leaves",
6714 (2, "leaves") => "minecraft:birch_leaves",
6715 (3, "leaves") => "minecraft:jungle_leaves",
6716 (4, "leaves") => "minecraft:acacia_leaves",
6717 (5, "leaves") => "minecraft:dark_oak_leaves",
6718 _ => "minecraft:oak_leaves",
6719 }
6720}
6721
6722fn legacy_world_wool_name(data: u8) -> &'static str {
6723 match data & 0x0f {
6724 1 => "minecraft:orange_wool",
6725 2 => "minecraft:magenta_wool",
6726 3 => "minecraft:light_blue_wool",
6727 4 => "minecraft:yellow_wool",
6728 5 => "minecraft:lime_wool",
6729 6 => "minecraft:pink_wool",
6730 7 => "minecraft:gray_wool",
6731 8 => "minecraft:light_gray_wool",
6732 9 => "minecraft:cyan_wool",
6733 10 => "minecraft:purple_wool",
6734 11 => "minecraft:blue_wool",
6735 12 => "minecraft:brown_wool",
6736 13 => "minecraft:green_wool",
6737 14 => "minecraft:red_wool",
6738 15 => "minecraft:black_wool",
6739 _ => "minecraft:white_wool",
6740 }
6741}
6742
6743fn is_air_block_name(name: &str) -> bool {
6744 matches!(
6745 name,
6746 "air"
6747 | "cave_air"
6748 | "void_air"
6749 | "minecraft:air"
6750 | "minecraft:cave_air"
6751 | "minecraft:void_air"
6752 | "minecraft:structure_void"
6753 | "minecraft:light_block"
6754 | "minecraft:light"
6755 )
6756}
6757
6758fn is_water_block_name(name: &str) -> bool {
6759 matches!(
6760 name,
6761 "water" | "flowing_water" | "minecraft:water" | "minecraft:flowing_water"
6762 )
6763}
6764
6765pub fn terrain_surface_role(name: &str) -> TerrainSurfaceRole {
6767 if is_air_block_name(name) {
6768 return TerrainSurfaceRole::Air;
6769 }
6770 if is_water_block_name(name) {
6771 return TerrainSurfaceRole::Water;
6772 }
6773 if terrain_surface_overlay_alpha(name).is_some() {
6774 return TerrainSurfaceRole::Overlay;
6775 }
6776 TerrainSurfaceRole::Primary
6777}
6778
6779pub fn terrain_surface_overlay_alpha(name: &str) -> Option<u8> {
6781 let name = name.strip_prefix("minecraft:").unwrap_or(name);
6782 if name.contains("carpet") {
6783 return None;
6784 }
6785 if matches!(
6786 name,
6787 "short_grass" | "tallgrass" | "tall_grass" | "fern" | "large_fern" | "vine"
6788 ) || name.contains("vine")
6789 {
6790 return Some(82);
6791 }
6792 if matches!(
6793 name,
6794 "deadbush"
6795 | "dead_bush"
6796 | "brown_mushroom"
6797 | "red_mushroom"
6798 | "poppy"
6799 | "dandelion"
6800 | "blue_orchid"
6801 | "allium"
6802 | "azure_bluet"
6803 | "oxeye_daisy"
6804 | "cornflower"
6805 | "lily_of_the_valley"
6806 | "wither_rose"
6807 | "torchflower"
6808 ) || name.contains("flower")
6809 || name.contains("sapling")
6810 || name.contains("bush")
6811 || name.contains("petals")
6812 || name.contains("tulip")
6813 {
6814 return Some(115);
6815 }
6816 if matches!(
6817 name,
6818 "tripWire"
6819 | "trip_wire"
6820 | "tripwire_hook"
6821 | "redstone_wire"
6822 | "rail"
6823 | "detector_rail"
6824 | "activator_rail"
6825 | "golden_rail"
6826 ) {
6827 return Some(130);
6828 }
6829 if matches!(
6830 name,
6831 "torch"
6832 | "redstone_torch"
6833 | "unlit_redstone_torch"
6834 | "soul_torch"
6835 | "copper_torch"
6836 | "lever"
6837 ) || name.contains("button")
6838 || name.contains("pressure_plate")
6839 {
6840 return Some(155);
6841 }
6842 None
6843}