use anyhow::{Context, Result};
use serde_json::Value;
use std::collections::HashMap;
use std::env;
use std::fs;
use std::io::Write;
use std::path::Path;
mod data_sources_build {
use anyhow::{Context, Result};
use serde_json::Value;
use std::collections::HashMap;
#[derive(Debug, Clone)]
pub struct UnifiedBlockData {
pub id: String,
pub properties: HashMap<String, Vec<String>>,
pub default_state: HashMap<String, String>,
pub transparent: bool,
#[allow(dead_code)] pub extra_properties: HashMap<String, Value>,
pub bedrock_id: Option<String>,
pub bedrock_properties: Option<HashMap<String, Vec<String>>>,
pub bedrock_default_state: Option<HashMap<String, String>>,
}
pub trait DataSourceAdapter {
fn name(&self) -> &'static str;
fn fetch_url(&self) -> &'static str;
fn parse_data(&self, json_data: &str) -> Result<Vec<UnifiedBlockData>>;
fn validate_structure(&self, json: &Value) -> Result<()>;
}
pub struct PrismarineAdapter;
impl DataSourceAdapter for PrismarineAdapter {
fn name(&self) -> &'static str {
"PrismarineJS"
}
fn fetch_url(&self) -> &'static str {
"https://raw.githubusercontent.com/PrismarineJS/minecraft-data/master/data/pc/1.20.4/blocks.json"
}
fn parse_data(&self, json_data: &str) -> Result<Vec<UnifiedBlockData>> {
let parsed: Value =
serde_json::from_str(json_data).context("Failed to parse PrismarineJS JSON")?;
let blocks_array = parsed
.as_array()
.context("PrismarineJS JSON is not an array")?;
let mut unified_blocks = Vec::new();
for block in blocks_array {
let block_obj = block.as_object().context("Block is not an object")?;
let name = block_obj
.get("name")
.and_then(|n| n.as_str())
.context("Block missing name field")?;
let id = format!("minecraft:{name}");
let mut properties = HashMap::new();
if let Some(states) = block_obj.get("states").and_then(|s| s.as_array()) {
for state in states {
if let Some(state_obj) = state.as_object() {
if let (Some(prop_name), Some(prop_type), Some(num_values)) = (
state_obj.get("name").and_then(|n| n.as_str()),
state_obj.get("type").and_then(|t| t.as_str()),
state_obj.get("num_values").and_then(|n| n.as_u64()),
) {
let values = match prop_type {
"bool" => vec!["false".to_string(), "true".to_string()],
"int" => {
if let Some(values_array) =
state_obj.get("values").and_then(|v| v.as_array())
{
values_array
.iter()
.filter_map(|v| v.as_str().map(|s| s.to_string()))
.collect()
} else {
(0..num_values).map(|i| i.to_string()).collect()
}
}
"enum" => {
if let Some(values_array) =
state_obj.get("values").and_then(|v| v.as_array())
{
values_array
.iter()
.filter_map(|v| v.as_str().map(|s| s.to_string()))
.collect()
} else {
(0..num_values).map(|i| format!("value_{i}")).collect()
}
}
_ => vec!["unknown".to_string()],
};
properties.insert(prop_name.to_string(), values);
}
}
}
}
let transparent = block_obj
.get("transparent")
.and_then(|t| t.as_bool())
.unwrap_or(false);
let mut extra_properties = HashMap::new();
if let Some(hardness) = block_obj.get("hardness") {
extra_properties.insert("hardness".to_string(), hardness.clone());
}
if let Some(resistance) = block_obj.get("resistance") {
extra_properties.insert("resistance".to_string(), resistance.clone());
}
unified_blocks.push(UnifiedBlockData {
id,
properties,
default_state: HashMap::new(), transparent,
extra_properties,
bedrock_id: None,
bedrock_properties: None,
bedrock_default_state: None,
});
}
Ok(unified_blocks)
}
fn validate_structure(&self, json: &Value) -> Result<()> {
let blocks_array = json
.as_array()
.context("PrismarineJS JSON is not a valid array")?;
if blocks_array.is_empty() {
anyhow::bail!("No blocks found in PrismarineJS data");
}
for (i, block_data) in blocks_array.iter().take(3).enumerate() {
let block_obj = block_data
.as_object()
.with_context(|| format!("Block at index {i} is not an object"))?;
if !block_obj.contains_key("name") {
anyhow::bail!("Block at index {} missing 'name' field", i);
}
}
Ok(())
}
}
pub struct MCPropertyEncyclopediaAdapter;
impl DataSourceAdapter for MCPropertyEncyclopediaAdapter {
fn name(&self) -> &'static str {
"MCPropertyEncyclopedia"
}
fn fetch_url(&self) -> &'static str {
"https://raw.githubusercontent.com/JoakimThorsen/MCPropertyEncyclopedia/main/data/block_data.json"
}
fn parse_data(&self, json_data: &str) -> Result<Vec<UnifiedBlockData>> {
let parsed: Value = serde_json::from_str(json_data)
.context("Failed to parse MCPropertyEncyclopedia JSON")?;
let key_list = parsed
.get("key_list")
.and_then(|k| k.as_array())
.context("Missing or invalid key_list")?;
let properties_obj = parsed
.get("properties")
.and_then(|p| p.as_object())
.context("Missing or invalid properties")?;
let mut unified_blocks = Vec::new();
for block_name in key_list {
let block_name_str = block_name.as_str().context("Block name is not a string")?;
let id = format!(
"minecraft:{}",
block_name_str
.to_lowercase()
.replace(" ", "_")
.replace("(", "")
.replace(")", "")
.replace("-", "_")
.replace("'", "")
.replace("!", "")
.replace(".", "_")
);
let mut extra_properties = HashMap::new();
for (prop_name, prop_data) in properties_obj {
if let Some(entries) = prop_data.get("entries").and_then(|e| e.as_object()) {
if let Some(value) = entries.get(block_name_str) {
extra_properties.insert(prop_name.clone(), value.clone());
}
}
}
unified_blocks.push(UnifiedBlockData {
id,
properties: HashMap::new(), default_state: HashMap::new(),
transparent: false,
extra_properties,
bedrock_id: None,
bedrock_properties: None,
bedrock_default_state: None,
});
}
Ok(unified_blocks)
}
fn validate_structure(&self, json: &Value) -> Result<()> {
let _key_list = json
.get("key_list")
.and_then(|k| k.as_array())
.context("Missing or invalid key_list")?;
let _properties = json
.get("properties")
.and_then(|p| p.as_object())
.context("Missing or invalid properties")?;
Ok(())
}
}
pub struct BedrockDataAdapter;
impl DataSourceAdapter for BedrockDataAdapter {
fn name(&self) -> &'static str {
"BedrockBlockStates"
}
fn fetch_url(&self) -> &'static str {
"https://raw.githubusercontent.com/PrismarineJS/minecraft-data/master/data/bedrock/1.21.0/blockStates.json"
}
fn parse_data(&self, json_data: &str) -> Result<Vec<UnifiedBlockData>> {
let parsed: Value = serde_json::from_str(json_data)
.context("Failed to parse Bedrock blockStates.json")?;
let states_array = parsed
.as_array()
.context("Bedrock blockStates.json is not an array")?;
let mut block_info_map: HashMap<
String,
(HashMap<String, Vec<String>>, HashMap<String, String>),
> = HashMap::new();
for state_entry in states_array {
if let Some(state_obj) = state_entry.as_object() {
if let Some(name) = state_obj.get("name").and_then(|n| n.as_str()) {
let info = block_info_map
.entry(name.to_string())
.or_insert_with(|| (HashMap::new(), HashMap::new()));
if let Some(states) = state_obj.get("states").and_then(|s| s.as_object()) {
for (prop_name, prop_val_obj) in states {
if let Some(val) = prop_val_obj.get("value") {
let val_str = match val {
Value::Bool(b) => b.to_string(),
Value::Number(n) => n.to_string(),
Value::String(s) => s.clone(),
_ => continue,
};
let values =
info.0.entry(prop_name.clone()).or_insert_with(Vec::new);
if !values.contains(&val_str) {
values.push(val_str.clone());
}
info.1.entry(prop_name.clone()).or_insert(val_str.clone());
}
}
}
}
}
}
let mut unified_blocks = Vec::new();
for (name, (properties, default_state)) in block_info_map {
let id = format!("minecraft:{}", name);
unified_blocks.push(UnifiedBlockData {
id: id.clone(),
properties: properties.clone(),
default_state: default_state.clone(),
transparent: false, extra_properties: HashMap::new(),
bedrock_id: Some(id),
bedrock_properties: Some(properties),
bedrock_default_state: Some(default_state),
});
}
println!(
"cargo:warning=DEBUG: BedrockDataAdapter parsed {} unique blocks",
unified_blocks.len()
);
Ok(unified_blocks)
}
fn validate_structure(&self, json: &Value) -> Result<()> {
let states_array = json
.as_array()
.context("Bedrock data JSON is not a valid array")?;
if states_array.is_empty() {
anyhow::bail!("No states found in Bedrock blockStates data");
}
Ok(())
}
}
pub struct DataSourceRegistry {
sources: Vec<Box<dyn DataSourceAdapter>>,
primary_source: Option<usize>,
}
impl DataSourceRegistry {
pub fn new() -> Self {
Self {
sources: Vec::new(),
primary_source: None,
}
}
pub fn register_source(&mut self, source: Box<dyn DataSourceAdapter>) {
self.sources.push(source);
if self.primary_source.is_none() {
self.primary_source = Some(0);
}
}
pub fn set_primary_source(&mut self, name: &str) -> Result<()> {
for (i, source) in self.sources.iter().enumerate() {
if source.name() == name {
self.primary_source = Some(i);
return Ok(());
}
}
anyhow::bail!("Data source '{}' not found", name);
}
pub fn get_primary_source(&self) -> Result<&dyn DataSourceAdapter> {
let index = self.primary_source.context("No primary data source set")?;
Ok(self.sources[index].as_ref())
}
pub fn list_sources(&self) -> Vec<&str> {
self.sources.iter().map(|s| s.name()).collect()
}
pub fn fetch_unified_data(&self) -> Result<Vec<UnifiedBlockData>> {
let primary = self.get_primary_source()?;
let mut blocks = match self.fetch_with_fallback(primary) {
Ok(blocks) => blocks,
Err(e) => {
println!("cargo:warning=All data sources failed: {e}");
anyhow::bail!("Could not fetch data from any source: {}", e)
}
};
if primary.name() == "PrismarineJS" || primary.name() == "MCPropertyEncyclopedia" {
if let Some(bedrock_source) = self
.sources
.iter()
.find(|s| s.name() == "BedrockBlockStates")
{
if let Ok(bedrock_blocks) = self.try_fetch_source(bedrock_source.as_ref()) {
println!(
"cargo:warning=Supplementing with Bedrock data from {}",
bedrock_source.name()
);
self.merge_bedrock_data(&mut blocks, &bedrock_blocks);
}
}
}
Ok(blocks)
}
fn merge_bedrock_data(
&self,
java_blocks: &mut [UnifiedBlockData],
bedrock_blocks: &[UnifiedBlockData],
) {
let bedrock_map: HashMap<String, &UnifiedBlockData> =
bedrock_blocks.iter().map(|b| (b.id.clone(), b)).collect();
for java_block in java_blocks {
let target_bedrock_id = match java_block.id.as_str() {
"minecraft:wall_torch" => "minecraft:torch",
"minecraft:redstone_wall_torch" => "minecraft:redstone_torch",
"minecraft:soul_wall_torch" => "minecraft:soul_torch",
"minecraft:grass_block" => "minecraft:grass",
"minecraft:repeater" => "minecraft:unpowered_repeater", "minecraft:comparator" => "minecraft:unpowered_comparator",
id if id.ends_with("_slab") => id, id => id,
};
if let Some(bedrock_block) = bedrock_map.get(target_bedrock_id) {
java_block.bedrock_id = Some(bedrock_block.id.clone());
java_block.bedrock_properties = Some(bedrock_block.properties.clone());
java_block.bedrock_default_state = Some(bedrock_block.default_state.clone());
}
}
}
fn fetch_with_fallback(
&self,
primary: &dyn DataSourceAdapter,
) -> Result<Vec<UnifiedBlockData>> {
match self.try_fetch_source(primary) {
Ok(blocks) => {
println!(
"cargo:warning=Successfully fetched {} blocks from {}",
blocks.len(),
primary.name()
);
return Ok(blocks);
}
Err(e) => {
println!(
"cargo:warning=Failed to fetch from {} ({})",
primary.name(),
e
);
}
}
for source in &self.sources {
if source.name() != primary.name() {
match self.try_fetch_source(source.as_ref()) {
Ok(blocks) => {
println!(
"cargo:warning=Successfully fell back to {} and fetched {} blocks",
source.name(),
blocks.len()
);
return Ok(blocks);
}
Err(e) => {
println!(
"cargo:warning=Fallback to {} also failed: {}",
source.name(),
e
);
}
}
}
}
anyhow::bail!("All data sources failed to provide data")
}
fn try_fetch_source(
&self,
source: &dyn DataSourceAdapter,
) -> Result<Vec<UnifiedBlockData>> {
let url = source.fetch_url();
println!(
"cargo:warning=Fetching data from {} using {}",
url,
source.name()
);
let cache_key = format!("{}_data.json", source.name().to_lowercase());
let cache_path = std::env::var("OUT_DIR")
.map(|out_dir| std::path::Path::new(&out_dir).join(&cache_key))
.unwrap_or_else(|_| std::path::PathBuf::from(&cache_key));
if cache_path.exists() {
println!("cargo:warning=DEBUG: Found cache at {:?}", cache_path);
if let Ok(cached_data) = std::fs::read_to_string(&cache_path) {
if let Ok(parsed) = serde_json::from_str::<Value>(&cached_data) {
if source.validate_structure(&parsed).is_ok() {
if let Ok(blocks) = source.parse_data(&cached_data) {
println!("cargo:warning=Using cached data for {}", source.name());
return Ok(blocks);
}
}
}
}
println!(
"cargo:warning=Cache invalid for {}, re-downloading",
source.name()
);
}
let json_data = download_from_url(url)
.with_context(|| format!("Failed to download from {}", source.name()))?;
let parsed: Value = serde_json::from_str(&json_data)
.with_context(|| format!("Failed to parse JSON from {}", source.name()))?;
source
.validate_structure(&parsed)
.with_context(|| format!("Data validation failed for {}", source.name()))?;
let blocks = source
.parse_data(&json_data)
.with_context(|| format!("Failed to parse data from {}", source.name()))?;
if let Err(e) = std::fs::write(&cache_path, &json_data) {
println!(
"cargo:warning=Failed to cache data for {}: {}",
source.name(),
e
);
} else {
println!(
"cargo:warning=Cached data for {} for future builds",
source.name()
);
}
Ok(blocks)
}
}
impl Default for DataSourceRegistry {
fn default() -> Self {
let mut registry = Self::new();
registry.register_source(Box::new(PrismarineAdapter));
registry.register_source(Box::new(MCPropertyEncyclopediaAdapter));
registry.register_source(Box::new(BedrockDataAdapter));
registry
}
}
#[cfg(feature = "build-data")]
fn download_from_url(url: &str) -> Result<String> {
let response = reqwest::blocking::get(url).context("Failed to make HTTP request")?;
if !response.status().is_success() {
anyhow::bail!("HTTP request failed with status: {}", response.status());
}
response
.text()
.context("Failed to read response body as text")
}
#[cfg(not(feature = "build-data"))]
fn download_from_url(_url: &str) -> Result<String> {
anyhow::bail!("Network downloads disabled - build-data feature not enabled")
}
}
use data_sources_build::*;
fn use_prebuilt_data(out_dir: &str) -> Result<()> {
let manifest_dir = env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string());
let data_dir = Path::new(&manifest_dir).join("data");
let prismarinejs_file = data_dir.join("prismarinejs_blocks.json");
let mcproperty_file = data_dir.join("mcproperty_blocks.json");
let _bedrock_file = data_dir.join("bedrock_blocks.json");
if !prismarinejs_file.exists() && !mcproperty_file.exists() {
anyhow::bail!("No pre-built data files found in ./data/ directory. Run 'cargo run --bin build-data --features build-data' to generate them.");
}
let data_file = if prismarinejs_file.exists() {
println!("cargo:warning=Using pre-built PrismarineJS data");
prismarinejs_file
} else {
println!("cargo:warning=Using pre-built MCPropertyEncyclopedia data");
mcproperty_file
};
let json_data = fs::read_to_string(&data_file)
.with_context(|| format!("Failed to read pre-built data from {:?}", data_file))?;
let parsed: Value =
serde_json::from_str(&json_data).context("Failed to parse pre-built JSON data")?;
let mut bedrock_blocks = None;
let bedrock_states_file = data_dir.join("bedrock_block_states.json");
if bedrock_states_file.exists() {
println!("cargo:warning=Using pre-built Bedrock block states data");
if let Ok(bedrock_json) = fs::read_to_string(&bedrock_states_file) {
let adapter = BedrockDataAdapter;
if let Ok(blocks) = adapter.parse_data(&bedrock_json) {
bedrock_blocks = Some(blocks);
}
}
}
if let Some(bedrock_blocks) = bedrock_blocks {
let adapter: Box<dyn DataSourceAdapter> =
if data_file.to_string_lossy().contains("prismarinejs") {
Box::new(PrismarineAdapter)
} else {
Box::new(MCPropertyEncyclopediaAdapter)
};
let mut java_blocks = adapter.parse_data(&json_data)?;
let bedrock_map: HashMap<String, UnifiedBlockData> = bedrock_blocks
.into_iter()
.map(|b| (b.id.clone(), b))
.collect();
for java_block in &mut java_blocks {
let target_bedrock_id = match java_block.id.as_str() {
"minecraft:wall_torch" => "minecraft:torch",
"minecraft:redstone_wall_torch" => "minecraft:redstone_torch",
"minecraft:soul_wall_torch" => "minecraft:soul_torch",
"minecraft:grass_block" => "minecraft:grass",
"minecraft:repeater" => "minecraft:unpowered_repeater",
"minecraft:comparator" => "minecraft:unpowered_comparator",
id if id.ends_with("_slab") => id,
id => id,
};
if let Some(bedrock_block) = bedrock_map.get(target_bedrock_id) {
java_block.bedrock_id = Some(bedrock_block.id.clone());
java_block.bedrock_properties = Some(bedrock_block.properties.clone());
java_block.bedrock_default_state = Some(bedrock_block.default_state.clone());
}
}
generate_unified_phf_table(out_dir, &java_blocks)?;
} else {
generate_legacy_phf_table(out_dir, &parsed)?;
}
println!("cargo:warning=Successfully built blockpedia using pre-built data");
Ok(())
}
#[cfg(feature = "build-data")]
const BLOCKS_DATA_URL: &str = "https://raw.githubusercontent.com/PrismarineJS/minecraft-data/master/data/pc/1.20.4/blocks.json";
#[derive(Debug, Clone)]
struct ExtraData {
mock_data: HashMap<String, i32>,
color_data: HashMap<String, (u8, u8, u8, f32, f32, f32)>, }
struct FetcherRegistry {
extra_data: ExtraData,
}
impl FetcherRegistry {
fn new() -> Self {
Self {
extra_data: ExtraData {
mock_data: HashMap::new(),
color_data: HashMap::new(),
},
}
}
fn add_color_data(&mut self, block_id: &str, rgb: (u8, u8, u8)) {
let r = rgb.0 as f32 / 255.0;
let g = rgb.1 as f32 / 255.0;
let b = rgb.2 as f32 / 255.0;
let l = 0.2126 * r + 0.7152 * g + 0.0722 * b;
let a = (r - g) * 0.5;
let b_val = (r + g - 2.0 * b) * 0.25;
self.extra_data
.color_data
.insert(block_id.to_string(), (rgb.0, rgb.1, rgb.2, l, a, b_val));
}
fn extract_colors_from_textures(&mut self, available_block_ids: &[String]) -> Result<()> {
let manifest_dir = env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string());
let textures_dir = Path::new(&manifest_dir).join("assets/textures");
let data_dir = Path::new(&manifest_dir).join("data");
let cache_path = data_dir.join("color_cache.json");
if !textures_dir.exists() {
if cache_path.exists() {
println!("cargo:warning=Textures not found, but color cache exists. Loading from {cache_path:?}");
let cache_data = fs::read_to_string(&cache_path)?;
let cache: HashMap<String, (u8, u8, u8, f32, f32, f32)> = serde_json::from_str(&cache_data)?;
self.extra_data.color_data.extend(cache);
println!("cargo:warning=Loaded {} colors from cache", self.extra_data.color_data.len());
return Ok(());
}
println!("cargo:warning=No textures directory found at {textures_dir:?} and no cache found - using mock color data only");
return Ok(());
}
println!("cargo:warning=Extracting colors from textures in {textures_dir:?}");
let texture_files: Vec<String> = std::fs::read_dir(&textures_dir)
.context("Failed to read textures directory")?
.filter_map(|entry| {
let entry = entry.ok()?;
let path = entry.path();
if path.extension()? == "png" {
path.file_stem()?.to_str().map(|s| s.to_string())
} else {
None
}
})
.collect();
println!("cargo:warning=Found {} texture files", texture_files.len());
let mut extracted_count = 0;
let mut failed_count = 0;
for texture_name in texture_files {
if let Some(block_ids) = self.texture_to_block_ids(&texture_name) {
let texture_path = textures_dir.join(format!("{}.png", texture_name));
match self.extract_color_from_texture(&texture_path) {
Ok(rgb) => {
for block_id in &block_ids {
if available_block_ids.contains(block_id) {
self.add_color_data(block_id, rgb);
extracted_count += 1;
}
}
}
Err(e) => {
failed_count += 1;
if failed_count <= 5 {
println!(
"cargo:warning=Failed to extract color from {}: {}",
texture_name, e
);
}
}
}
}
}
println!(
"cargo:warning=Color extraction complete: {} colors extracted, {} failures",
extracted_count, failed_count
);
if extracted_count > 0 {
let cache_data = serde_json::to_string_pretty(&self.extra_data.color_data)?;
if !data_dir.exists() {
fs::create_dir_all(&data_dir)?;
}
fs::write(&cache_path, cache_data)?;
println!("cargo:warning=Updated color cache at {cache_path:?}");
}
Ok(())
}
fn extract_color_from_texture(&self, texture_path: &Path) -> Result<(u8, u8, u8)> {
let img = image::open(texture_path)
.with_context(|| format!("Failed to open texture: {:?}", texture_path))?;
let rgba_img = img.to_rgba8();
let (width, height) = rgba_img.dimensions();
let mut r_sum = 0u64;
let mut g_sum = 0u64;
let mut b_sum = 0u64;
let mut pixel_count = 0u64;
for y in 0..height {
for x in 0..width {
let pixel = rgba_img.get_pixel(x, y);
let [r, g, b, a] = pixel.0;
if a > 128 {
r_sum += r as u64;
g_sum += g as u64;
b_sum += b as u64;
pixel_count += 1;
}
}
}
if pixel_count == 0 {
anyhow::bail!("No opaque pixels found in texture");
}
let avg_r = (r_sum / pixel_count) as u8;
let avg_g = (g_sum / pixel_count) as u8;
let avg_b = (b_sum / pixel_count) as u8;
Ok((avg_r, avg_g, avg_b))
}
fn add_inherited_colors(&mut self, available_block_ids: &[String]) {
let mut inherited_count = 0;
let existing_colors = self.extra_data.color_data.clone();
for block_id in available_block_ids {
if existing_colors.contains_key(block_id) {
continue;
}
if let Some(base_material) = self.get_base_material_for_block(block_id) {
if let Some(color) = existing_colors.get(&base_material) {
self.extra_data.color_data.insert(block_id.clone(), *color);
inherited_count += 1;
}
}
}
println!(
"cargo:warning=Color inheritance complete: {} colors inherited from base materials",
inherited_count
);
}
fn get_base_material_for_block(&self, block_id: &str) -> Option<String> {
let block_name = block_id.strip_prefix("minecraft:").unwrap_or(block_id);
if block_name.ends_with("_stairs") {
let base = block_name.replace("_stairs", "");
return Some(format!("minecraft:{}", base));
}
if block_name.ends_with("_slab") {
let base = block_name.replace("_slab", "");
match base.as_str() {
"petrified_oak" => return Some("minecraft:oak_planks".to_string()),
"smooth_stone" => return Some("minecraft:stone".to_string()),
"cut_copper" => return Some("minecraft:copper_block".to_string()),
"exposed_cut_copper" => return Some("minecraft:exposed_copper".to_string()),
"weathered_cut_copper" => return Some("minecraft:weathered_copper".to_string()),
"oxidized_cut_copper" => return Some("minecraft:oxidized_copper".to_string()),
"waxed_cut_copper" => return Some("minecraft:copper_block".to_string()),
"waxed_exposed_cut_copper" => return Some("minecraft:exposed_copper".to_string()),
"waxed_weathered_cut_copper" => {
return Some("minecraft:weathered_copper".to_string())
}
"waxed_oxidized_cut_copper" => {
return Some("minecraft:oxidized_copper".to_string())
}
"cut_red_sandstone" => return Some("minecraft:red_sandstone".to_string()),
"cut_sandstone" => return Some("minecraft:sandstone".to_string()),
"prismarine_brick" => return Some("minecraft:prismarine_bricks".to_string()),
"nether_brick" => return Some("minecraft:nether_bricks".to_string()),
"red_nether_brick" => return Some("minecraft:red_nether_bricks".to_string()),
"polished_blackstone_brick" => {
return Some("minecraft:polished_blackstone_bricks".to_string())
}
"end_stone_brick" => return Some("minecraft:end_stone_bricks".to_string()),
"stone_brick" => return Some("minecraft:stone_bricks".to_string()),
"mossy_stone_brick" => return Some("minecraft:mossy_stone_bricks".to_string()),
"mossy_cobblestone" => return Some("minecraft:mossy_cobblestone".to_string()),
"deepslate_brick" => return Some("minecraft:deepslate_bricks".to_string()),
"deepslate_tile" => return Some("minecraft:deepslate_tiles".to_string()),
"polished_deepslate" => return Some("minecraft:polished_deepslate".to_string()),
"cobbled_deepslate" => return Some("minecraft:cobbled_deepslate".to_string()),
"tuff_brick" => return Some("minecraft:tuff_bricks".to_string()),
"polished_tuff" => return Some("minecraft:polished_tuff".to_string()),
"bamboo_mosaic" => return Some("minecraft:bamboo_planks".to_string()),
_ => {
return Some(format!("minecraft:{}", base));
}
}
}
if block_name.ends_with("_wall") {
let base = block_name.replace("_wall", "");
match base.as_str() {
"cobblestone" => return Some("minecraft:cobblestone".to_string()),
"mossy_cobblestone" => return Some("minecraft:mossy_cobblestone".to_string()),
"stone_brick" => return Some("minecraft:stone_bricks".to_string()),
"mossy_stone_brick" => return Some("minecraft:mossy_stone_bricks".to_string()),
"granite" => return Some("minecraft:granite".to_string()),
"diorite" => return Some("minecraft:diorite".to_string()),
"andesite" => return Some("minecraft:andesite".to_string()),
"cobbled_deepslate" => return Some("minecraft:cobbled_deepslate".to_string()),
"polished_deepslate" => return Some("minecraft:polished_deepslate".to_string()),
"deepslate_brick" => return Some("minecraft:deepslate_bricks".to_string()),
"deepslate_tile" => return Some("minecraft:deepslate_tiles".to_string()),
"brick" => return Some("minecraft:bricks".to_string()),
"mud_brick" => return Some("minecraft:mud_bricks".to_string()),
"nether_brick" => return Some("minecraft:nether_bricks".to_string()),
"red_nether_brick" => return Some("minecraft:red_nether_bricks".to_string()),
"sandstone" => return Some("minecraft:sandstone".to_string()),
"red_sandstone" => return Some("minecraft:red_sandstone".to_string()),
"blackstone" => return Some("minecraft:blackstone".to_string()),
"polished_blackstone" => return Some("minecraft:polished_blackstone".to_string()),
"polished_blackstone_brick" => {
return Some("minecraft:polished_blackstone_bricks".to_string())
}
"end_stone_brick" => return Some("minecraft:end_stone_bricks".to_string()),
"prismarine" => return Some("minecraft:prismarine".to_string()),
"tuff" => return Some("minecraft:tuff".to_string()),
"polished_tuff" => return Some("minecraft:polished_tuff".to_string()),
"tuff_brick" => return Some("minecraft:tuff_bricks".to_string()),
_ => {
return Some(format!("minecraft:{}", base));
}
}
}
if block_name.ends_with("_fence") && !block_name.ends_with("_fence_gate") {
let base = block_name.replace("_fence", "");
match base.as_str() {
"nether_brick" => return Some("minecraft:nether_bricks".to_string()),
_ => {
return Some(format!("minecraft:{}_planks", base));
}
}
}
if block_name.ends_with("_fence_gate") {
let base = block_name.replace("_fence_gate", "");
return Some(format!("minecraft:{}_planks", base));
}
if block_name.ends_with("_door") {
let base = block_name.replace("_door", "");
match base.as_str() {
"iron" => return Some("minecraft:iron_block".to_string()),
"copper" => return Some("minecraft:copper_block".to_string()),
"exposed_copper" => return Some("minecraft:exposed_copper".to_string()),
"weathered_copper" => return Some("minecraft:weathered_copper".to_string()),
"oxidized_copper" => return Some("minecraft:oxidized_copper".to_string()),
"waxed_copper" => return Some("minecraft:copper_block".to_string()),
"waxed_exposed_copper" => return Some("minecraft:exposed_copper".to_string()),
"waxed_weathered_copper" => return Some("minecraft:weathered_copper".to_string()),
"waxed_oxidized_copper" => return Some("minecraft:oxidized_copper".to_string()),
_ => {
return Some(format!("minecraft:{}_planks", base));
}
}
}
if block_name.ends_with("_trapdoor") {
let base = block_name.replace("_trapdoor", "");
match base.as_str() {
"iron" => return Some("minecraft:iron_block".to_string()),
"copper" => return Some("minecraft:copper_block".to_string()),
"exposed_copper" => return Some("minecraft:exposed_copper".to_string()),
"weathered_copper" => return Some("minecraft:weathered_copper".to_string()),
"oxidized_copper" => return Some("minecraft:oxidized_copper".to_string()),
"waxed_copper" => return Some("minecraft:copper_block".to_string()),
"waxed_exposed_copper" => return Some("minecraft:exposed_copper".to_string()),
"waxed_weathered_copper" => return Some("minecraft:weathered_copper".to_string()),
"waxed_oxidized_copper" => return Some("minecraft:oxidized_copper".to_string()),
_ => {
return Some(format!("minecraft:{}_planks", base));
}
}
}
None
}
fn texture_to_block_ids(&self, texture_name: &str) -> Option<Vec<String>> {
let mut block_ids = Vec::new();
let base_id = format!("minecraft:{}", texture_name);
block_ids.push(base_id);
match texture_name {
name if name.ends_with("_log") => {
let wood_base = name.replace("_log", "");
block_ids.push(format!("minecraft:{}_wood", wood_base));
block_ids.push(format!("minecraft:{}_leaves", wood_base));
if name.starts_with("stripped_") {
let base = name.replace("stripped_", "");
block_ids.push(format!("minecraft:{}", base));
}
}
name if name.ends_with("_log_top") => {
let base = name.replace("_log_top", "");
block_ids.push(format!("minecraft:{}_log", base));
block_ids.push(format!("minecraft:{}_wood", base));
}
"stone" => {
block_ids.push("minecraft:smooth_stone".to_string());
}
"grass_block_snow" => {
block_ids.push("minecraft:grass_block".to_string());
}
"sandstone_top" => {
block_ids.push("minecraft:sandstone".to_string());
block_ids.push("minecraft:smooth_sandstone".to_string());
}
"red_sandstone_top" => {
block_ids.push("minecraft:red_sandstone".to_string());
block_ids.push("minecraft:smooth_red_sandstone".to_string());
}
"furnace_side" => {
block_ids.push("minecraft:furnace".to_string());
}
"furnace_front" | "furnace_top" => {
return None;
}
"pumpkin_side" => {
block_ids.push("minecraft:pumpkin".to_string());
}
"pumpkin_top" => {
return None;
}
"melon_side" => {
block_ids.push("minecraft:melon".to_string());
}
"melon_top" => {
return None;
}
"hay_block_side" => {
block_ids.push("minecraft:hay_block".to_string());
}
"hay_block_top" => {
return None;
}
"tnt_side" => {
block_ids.push("minecraft:tnt".to_string());
}
"tnt_bottom" => {
return None;
}
"mycelium_side" => {
block_ids.push("minecraft:mycelium".to_string());
}
"mycelium_top" => {
return None;
}
"podzol_side" => {
block_ids.push("minecraft:podzol".to_string());
}
"podzol_top" => {
return None;
}
"farmland_moist" => {
block_ids.push("minecraft:farmland".to_string());
}
"farmland" => {
return None;
}
"dirt_path_top" => {
block_ids.push("minecraft:dirt_path".to_string());
}
name if name.contains("copper") && name.contains("_bulb") => {
let base = name.replace("_lit", "");
block_ids.push(format!("minecraft:{}", base));
}
"shulker_box" => {
block_ids.clear();
block_ids.push("minecraft:shulker_box".to_string());
}
_ => {
}
}
if block_ids.is_empty() {
None
} else {
Some(block_ids)
}
}
fn fetch_all(&mut self, available_block_ids: &[String]) -> Result<&ExtraData> {
self.extra_data
.mock_data
.insert("minecraft:stone".to_string(), 42);
self.extra_data
.mock_data
.insert("minecraft:dirt".to_string(), 123);
self.extra_data
.mock_data
.insert("minecraft:grass_block".to_string(), 456);
self.extra_data
.mock_data
.insert("minecraft:oak_log".to_string(), 789);
self.extra_data
.mock_data
.insert("minecraft:oak_planks".to_string(), 321);
self.extra_data
.mock_data
.insert("minecraft:cobblestone".to_string(), 654);
self.add_color_data("minecraft:stone", (125, 125, 125));
self.add_color_data("minecraft:dirt", (134, 96, 67));
self.add_color_data("minecraft:grass_block", (95, 159, 53));
self.add_color_data("minecraft:oak_log", (102, 81, 51));
self.add_color_data("minecraft:oak_leaves", (65, 137, 50));
self.add_color_data("minecraft:oak_planks", (162, 130, 78));
self.add_color_data("minecraft:water", (64, 164, 223));
self.add_color_data("minecraft:lava", (207, 108, 32));
self.add_color_data("minecraft:cobblestone", (127, 127, 127));
self.add_color_data("minecraft:sand", (219, 203, 158));
self.add_color_data("minecraft:gravel", (136, 126, 126));
self.add_color_data("minecraft:gold_ore", (252, 238, 75));
self.add_color_data("minecraft:iron_ore", (135, 130, 126));
self.add_color_data("minecraft:diamond_ore", (92, 219, 213));
if let Err(e) = self.extract_colors_from_textures(available_block_ids) {
println!(
"cargo:warning=Failed to extract colors from textures: {}",
e
);
}
self.add_inherited_colors(available_block_ids);
Ok(&self.extra_data)
}
fn generate_query_helpers(&self, file: &mut std::fs::File) -> Result<()> {
writeln!(file, "// Generated query helper functions")?;
writeln!(file, "impl crate::BlockFacts {{")?;
writeln!(
file,
" pub fn closest_to_color(target_rgb: [u8; 3]) -> Option<&'static Self> {{"
)?;
writeln!(file, " let target_oklab = rgb_to_oklab(target_rgb);")?;
writeln!(file, " let mut best_block = None;")?;
writeln!(file, " let mut best_distance = f32::INFINITY;")?;
writeln!(file, " for block in crate::all_blocks() {{")?;
writeln!(
file,
" if let Some(ref color) = block.extras.color {{"
)?;
writeln!(
file,
" let distance = oklab_distance(target_oklab, color.oklab);"
)?;
writeln!(file, " if distance < best_distance {{")?;
writeln!(file, " best_distance = distance;")?;
writeln!(file, " best_block = Some(block);")?;
writeln!(file, " }}")?;
writeln!(file, " }}")?;
writeln!(file, " }}")?;
writeln!(file, " best_block")?;
writeln!(file, " }}")?;
writeln!(file)?;
writeln!(file, " pub fn blocks_in_color_range(center_rgb: [u8; 3], max_distance: f32) -> Vec<&'static Self> {{")? ;
writeln!(file, " let center_oklab = rgb_to_oklab(center_rgb);")?;
writeln!(file, " let mut result = Vec::new();")?;
writeln!(file, " for block in crate::all_blocks() {{")?;
writeln!(
file,
" if let Some(ref color) = block.extras.color {{"
)?;
writeln!(
file,
" let distance = oklab_distance(center_oklab, color.oklab);"
)?;
writeln!(file, " if distance <= max_distance {{")?;
writeln!(file, " result.push(block);")?;
writeln!(file, " }}")?;
writeln!(file, " }}")?;
writeln!(file, " }}")?;
writeln!(file, " result")?;
writeln!(file, " }}")?;
writeln!(file, "}}")?;
writeln!(file)?;
writeln!(file, "fn rgb_to_oklab(rgb: [u8; 3]) -> [f32; 3] {{")?;
writeln!(
file,
" // Simplified RGB to Oklab conversion for build-time"
)?;
writeln!(file, " let r = rgb[0] as f32 / 255.0;")?;
writeln!(file, " let g = rgb[1] as f32 / 255.0;")?;
writeln!(file, " let b = rgb[2] as f32 / 255.0;")?;
writeln!(file, " let l = 0.2126 * r + 0.7152 * g + 0.0722 * b;")?;
writeln!(file, " let a = (r - g) * 0.5;")?;
writeln!(file, " let b_val = (r + g - 2.0 * b) * 0.25;")?;
writeln!(file, " [l, a, b_val]")?;
writeln!(file, "}}")?;
writeln!(file)?;
writeln!(
file,
"fn oklab_distance(a: [f32; 3], b: [f32; 3]) -> f32 {{"
)?;
writeln!(file, " let dl = a[0] - b[0];")?;
writeln!(file, " let da = a[1] - b[1];")?;
writeln!(file, " let db = a[2] - b[2];")?;
writeln!(file, " (dl * dl + da * da + db * db).sqrt()")?;
writeln!(file, "}}")?;
writeln!(file)?;
Ok(())
}
}
fn setup_fetchers() -> FetcherRegistry {
FetcherRegistry::new()
}
fn get_block_ids_from_json(json: &Value) -> Result<Vec<String>> {
let mut block_ids = Vec::new();
if json.is_object() && json.get("blocks").is_some() {
let blocks_obj = json["blocks"]
.as_object()
.context("'blocks' field is not an object")?;
block_ids.extend(blocks_obj.keys().cloned());
} else if json.is_array() {
let blocks_array = json.as_array().context("JSON is not a valid array")?;
for block in blocks_array {
if let Some(block_obj) = block.as_object() {
if let Some(name) = block_obj.get("name").and_then(|n| n.as_str()) {
block_ids.push(format!("minecraft:{}", name));
}
}
}
} else {
anyhow::bail!("Unsupported JSON format for extracting block IDs");
}
Ok(block_ids)
}
fn main() -> Result<()> {
let out_dir = env::var("OUT_DIR").unwrap();
println!("cargo:rerun-if-changed=build.rs");
println!("cargo:rerun-if-env-changed=BLOCKPEDIA_DATA_SOURCE");
println!("cargo:rerun-if-env-changed=BLOCKPEDIA_USE_TEST_DATA");
println!("cargo:rerun-if-env-changed=BLOCKPEDIA_VERSION_JSON_SHA");
if cfg!(feature = "use-prebuilt") || env::var("BLOCKPEDIA_USE_PREBUILT").is_ok() {
println!("cargo:warning=Using pre-built data files");
return use_prebuilt_data(&out_dir);
}
#[cfg(not(feature = "build-data"))]
{
println!("cargo:warning=Network downloads disabled (build-data feature not enabled)");
println!("cargo:warning=Checking for pre-built data as fallback...");
use_prebuilt_data(&out_dir).with_context(|| "No pre-built data available and network downloads disabled. Run 'cargo run --bin build-data --features build-data' to generate data files.")
}
#[cfg(feature = "build-data")]
{
let mut data_registry = DataSourceRegistry::default();
if let Ok(source_name) = env::var("BLOCKPEDIA_DATA_SOURCE") {
println!(
"cargo:warning=Setting data source to {} from environment variable",
source_name
);
data_registry
.set_primary_source(&source_name)
.with_context(|| format!("Failed to set data source to {}", source_name))?;
}
println!(
"cargo:warning=Available data sources: {:?}",
data_registry.list_sources()
);
println!(
"cargo:warning=Using primary data source: {}",
data_registry.get_primary_source()?.name()
);
let cache_path = Path::new(&out_dir).join("blocks_data.json");
let unified_blocks = if env::var("BLOCKPEDIA_USE_TEST_DATA").is_ok() {
let json_data = fetch_or_load_cached(&cache_path)?;
let parsed: Value =
serde_json::from_str(&json_data).context("Failed to parse downloaded JSON")?;
validate_json_structure(&parsed)?;
vec![] } else {
match data_registry.fetch_unified_data() {
Ok(blocks) => blocks,
Err(e) => {
println!(
"cargo:warning=Failed to fetch from primary source ({}): {}",
data_registry.get_primary_source()?.name(),
e
);
println!("cargo:warning=Falling back to cached/legacy method");
let json_data = fetch_or_load_cached(&cache_path)?;
let parsed: Value = serde_json::from_str(&json_data)
.context("Failed to parse downloaded JSON")?;
validate_json_structure(&parsed)?;
generate_legacy_phf_table(&out_dir, &parsed)?;
return Ok(());
}
}
};
if unified_blocks.is_empty() || env::var("BLOCKPEDIA_USE_TEST_DATA").is_ok() {
let json_data = fetch_or_load_cached(&cache_path)?;
let parsed: Value =
serde_json::from_str(&json_data).context("Failed to parse downloaded JSON")?;
validate_json_structure(&parsed)?;
generate_legacy_phf_table(&out_dir, &parsed)?;
} else {
generate_unified_phf_table(&out_dir, &unified_blocks)?;
}
Ok(())
}
}
#[cfg(feature = "build-data")]
fn fetch_or_load_cached(cache_path: &Path) -> Result<String> {
if std::env::var("BLOCKPEDIA_USE_TEST_DATA").is_ok() {
let test_file = Path::new("test_blocks_data.json");
if test_file.exists() {
println!("cargo:warning=Using local test file (BLOCKPEDIA_USE_TEST_DATA is set)");
return fs::read_to_string(test_file).context("Failed to read test JSON file");
}
}
if cache_path.exists() {
println!("cargo:warning=Using cached blocks_data.json");
return fs::read_to_string(cache_path).context("Failed to read cached JSON file");
}
println!("cargo:warning=Downloading blocks_data.json from GitHub...");
match download_json() {
Ok(data) => {
fs::write(cache_path, &data).context("Failed to cache downloaded JSON")?;
Ok(data)
}
Err(e) => {
anyhow::bail!(
"Failed to download blocks_data.json and no cache available: {}",
e
);
}
}
}
#[cfg(feature = "build-data")]
fn download_json() -> Result<String> {
let response =
reqwest::blocking::get(BLOCKS_DATA_URL).context("Failed to make HTTP request")?;
if !response.status().is_success() {
anyhow::bail!("HTTP request failed with status: {}", response.status());
}
response
.text()
.context("Failed to read response body as text")
}
#[cfg(feature = "build-data")]
fn validate_json_structure(json: &Value) -> Result<()> {
if json.is_object() && json.get("blocks").is_some() {
let blocks = json["blocks"]
.as_object()
.context("'blocks' field is not an object")?;
if blocks.is_empty() {
anyhow::bail!("No blocks found in JSON data");
}
println!(
"cargo:warning=JSON validation passed - found {} blocks (test format)",
blocks.len()
);
} else if json.is_array() {
let blocks_array = json.as_array().context("JSON is not a valid array")?;
if blocks_array.is_empty() {
anyhow::bail!("No blocks found in JSON data");
}
for (i, block_data) in blocks_array.iter().take(5).enumerate() {
let block_obj = block_data
.as_object()
.with_context(|| format!("Block at index {} is not an object", i))?;
if !block_obj.contains_key("name") {
anyhow::bail!("Block at index {} missing 'name' field", i);
}
}
println!(
"cargo:warning=JSON validation passed - found {} blocks (PrismarineJS format)",
blocks_array.len()
);
} else {
anyhow::bail!(
"JSON format not recognized - expected either {{\"blocks\": {{...}}}} or array format"
);
}
Ok(())
}
fn generate_phf_table(
out_dir: &str,
json: &Value,
extra_data: &ExtraData,
fetcher_registry: &FetcherRegistry,
) -> Result<()> {
let table_path = Path::new(out_dir).join("block_table.rs");
let mut file = std::fs::File::create(&table_path).context("Failed to create block_table.rs")?;
writeln!(file, "// Auto-generated PHF table from block data")?;
writeln!(file, "use phf::{{phf_map, Map}};")?;
writeln!(file)?;
let block_data: Vec<(String, serde_json::Value)> = if json.is_object()
&& json.get("blocks").is_some()
{
let blocks_obj = json["blocks"]
.as_object()
.context("'blocks' field is not an object")?;
blocks_obj
.iter()
.map(|(k, v)| (k.clone(), v.clone()))
.collect()
} else if json.is_array() {
let blocks_array = json.as_array().context("JSON is not a valid array")?;
blocks_array
.iter()
.filter_map(|block| {
let block_obj = block.as_object()?;
let name = block_obj.get("name")?.as_str()?;
let mut converted_block = serde_json::Map::new();
if let Some(states) = block_obj.get("states").and_then(|s| s.as_array()) {
let mut properties = serde_json::Map::new();
for state in states {
if let Some(state_obj) = state.as_object() {
if let (Some(prop_name), Some(prop_type), Some(num_values)) = (
state_obj.get("name").and_then(|n| n.as_str()),
state_obj.get("type").and_then(|t| t.as_str()),
state_obj.get("num_values").and_then(|n| n.as_u64()),
) {
let values = match prop_type {
"bool" => vec!["false".to_string(), "true".to_string()],
"int" => {
if let Some(values_array) =
state_obj.get("values").and_then(|v| v.as_array())
{
values_array
.iter()
.filter_map(|v| v.as_str().map(|s| s.to_string()))
.collect()
} else {
(0..num_values).map(|i| i.to_string()).collect()
}
}
"enum" => {
if let Some(values_array) =
state_obj.get("values").and_then(|v| v.as_array())
{
values_array
.iter()
.filter_map(|v| v.as_str().map(|s| s.to_string()))
.collect()
} else {
(0..num_values)
.map(|i| format!("value_{}", i))
.collect()
}
}
_ => vec!["unknown".to_string()],
};
properties.insert(
prop_name.to_string(),
serde_json::Value::Array(
values.into_iter().map(serde_json::Value::String).collect(),
),
);
}
}
}
if !properties.is_empty() {
converted_block.insert(
"properties".to_string(),
serde_json::Value::Object(properties),
);
} else {
converted_block.insert(
"properties".to_string(),
serde_json::Value::Object(serde_json::Map::new()),
);
}
} else {
converted_block.insert(
"properties".to_string(),
serde_json::Value::Object(serde_json::Map::new()),
);
}
converted_block.insert(
"default_state".to_string(),
serde_json::Value::Object(serde_json::Map::new()),
);
Some((
format!("minecraft:{}", name),
serde_json::Value::Object(converted_block),
))
})
.collect()
} else {
anyhow::bail!("Unsupported JSON format");
};
for (block_id, block_data) in &block_data {
let block_obj = block_data
.as_object()
.with_context(|| format!("Block '{}' is not an object", block_id))?;
let empty_props = serde_json::Map::new();
let properties = block_obj
.get("properties")
.and_then(|p| p.as_object())
.unwrap_or(&empty_props);
let empty_state = serde_json::Map::new();
let default_state = block_obj
.get("default_state")
.and_then(|d| d.as_object())
.unwrap_or(&empty_state);
let safe_name = block_id.replace(":", "_").replace("-", "_").to_uppercase();
let transparent = block_obj
.get("transparent")
.and_then(|t| t.as_bool())
.unwrap_or(false);
writeln!(
file,
"static {}: crate::BlockFacts = crate::BlockFacts {{",
safe_name
)?;
writeln!(file, " id: \"{}\",", block_id)?;
writeln!(file, " transparent: {},", transparent)?;
writeln!(file, " properties: &[")?;
for (prop_name, prop_values) in properties {
if let Some(values_array) = prop_values.as_array() {
write!(file, " (\"{}\", &[", prop_name)?;
for (i, value) in values_array.iter().enumerate() {
if i > 0 {
write!(file, ", ")?;
}
write!(file, "\"{}\"", value.as_str().unwrap_or(""))?;
}
writeln!(file, "]),")?;
}
}
writeln!(file, " ],")?;
writeln!(file, " default_state: &[")?;
for (state_name, state_value) in default_state {
writeln!(
file,
" (\"{}\", \"{}\"),",
state_name,
state_value.as_str().unwrap_or("")
)?;
}
writeln!(file, " ],")?;
write!(file, " extras: crate::Extras {{")?;
if let Some(mock_val) = extra_data.mock_data.get(block_id) {
write!(file, " mock_data: Some({}),", mock_val)?;
} else {
write!(file, " mock_data: None,")?;
}
if let Some((r, g, b, l, a, b_val)) = extra_data.color_data.get(block_id) {
let adjusted_l = if (*l - std::f32::consts::FRAC_1_PI).abs() < 0.001 {
*l + 0.001
} else {
*l
};
let adjusted_a = if (*a - std::f32::consts::FRAC_1_PI).abs() < 0.001 {
*a + 0.001
} else {
*a
};
let adjusted_b = if (*b_val - std::f32::consts::FRAC_1_PI).abs() < 0.001 {
*b_val + 0.001
} else {
*b_val
};
write!(file, " color: Some(crate::ColorData {{ rgb: [{}, {}, {}], oklab: [{:.3}, {:.3}, {:.3}] }}),", r, g, b, adjusted_l, adjusted_a, adjusted_b)?;
} else {
write!(file, " color: None,")?;
}
writeln!(file, " bedrock: None,")?;
writeln!(file, " }},")?;
writeln!(file, "}};")?;
writeln!(file)?;
}
writeln!(
file,
"pub static BLOCKS: Map<&'static str, &'static crate::BlockFacts> = phf_map! {{"
)?;
for (block_id, _) in &block_data {
let safe_name = block_id.replace(":", "_").replace("-", "_").to_uppercase();
writeln!(file, " \"{}\" => &{},", block_id, safe_name)?;
}
writeln!(file, "}};")?;
writeln!(file)?;
fetcher_registry.generate_query_helpers(&mut file)?;
println!(
"cargo:warning=Generated PHF table with {} blocks",
block_data.len()
);
Ok(())
}
fn generate_legacy_phf_table(out_dir: &str, json: &Value) -> Result<()> {
let mut fetcher_registry = setup_fetchers();
let available_block_ids = get_block_ids_from_json(json)?;
let extra_data = fetcher_registry.fetch_all(&available_block_ids)?.clone();
generate_phf_table(out_dir, json, &extra_data, &fetcher_registry)
}
fn generate_unified_phf_table(out_dir: &str, unified_blocks: &[UnifiedBlockData]) -> Result<()> {
let table_path = Path::new(out_dir).join("block_table.rs");
let mut file = std::fs::File::create(&table_path).context("Failed to create block_table.rs")?;
let mut fetcher_registry = setup_fetchers();
let available_block_ids: Vec<String> = unified_blocks.iter().map(|b| b.id.clone()).collect();
let extra_data = fetcher_registry.fetch_all(&available_block_ids)?.clone();
writeln!(file, "// Auto-generated PHF table from unified block data")?;
writeln!(file, "use phf::{{phf_map, Map}};")?;
writeln!(file)?;
for block_data in unified_blocks {
let block_id = &block_data.id;
let safe_name = block_id
.replace(":", "_")
.replace("-", "_")
.replace("'", "")
.replace("!", "")
.replace(".", "_")
.to_uppercase();
writeln!(
file,
"static {}: crate::BlockFacts = crate::BlockFacts {{",
safe_name
)?;
writeln!(file, " id: \"{}\",", block_id)?;
writeln!(file, " transparent: {},", block_data.transparent)?;
writeln!(file, " properties: &[")?;
for (prop_name, prop_values) in &block_data.properties {
write!(file, " (\"{}\", &[", prop_name)?;
for (i, value) in prop_values.iter().enumerate() {
if i > 0 {
write!(file, ", ")?;
}
write!(file, "\"{}\"", value)?;
}
writeln!(file, "]),")?;
}
writeln!(file, " ],")?;
writeln!(file, " default_state: &[")?;
for (state_name, state_value) in &block_data.default_state {
writeln!(file, " (\"{}\", \"{}\"),", state_name, state_value)?;
}
writeln!(file, " ],")?;
write!(file, " extras: crate::Extras {{")?;
if let Some(mock_val) = extra_data.mock_data.get(block_id) {
write!(file, " mock_data: Some({}),", mock_val)?;
} else {
write!(file, " mock_data: None,")?;
}
if let Some((r, g, b, l, a, b_val)) = extra_data.color_data.get(block_id) {
let adjusted_l = if (*l - std::f32::consts::FRAC_1_PI).abs() < 0.001 {
*l + 0.001
} else {
*l
};
let adjusted_a = if (*a - std::f32::consts::FRAC_1_PI).abs() < 0.001 {
*a + 0.001
} else {
*a
};
let adjusted_b = if (*b_val - std::f32::consts::FRAC_1_PI).abs() < 0.001 {
*b_val + 0.001
} else {
*b_val
};
write!(file, " color: Some(crate::ColorData {{ rgb: [{}, {}, {}], oklab: [{:.3}, {:.3}, {:.3}] }}),", r, g, b, adjusted_l, adjusted_a, adjusted_b)?;
} else {
write!(file, " color: None,")?;
}
if let Some(ref bedrock_id) = block_data.bedrock_id {
writeln!(file, " bedrock: Some(crate::BedrockData {{")?;
writeln!(file, " id: \"{}\",", bedrock_id)?;
writeln!(file, " properties: &[")?;
if let Some(ref props) = block_data.bedrock_properties {
for (prop_name, prop_values) in props {
write!(file, " (\"{}\", &[", prop_name)?;
for (i, value) in prop_values.iter().enumerate() {
if i > 0 {
write!(file, ", ")?;
}
write!(file, "\"{}\"", value)?;
}
writeln!(file, "]),")?;
}
}
writeln!(file, " ],")?;
writeln!(file, " default_state: &[")?;
if let Some(ref def_state) = block_data.bedrock_default_state {
for (prop_name, prop_value) in def_state {
writeln!(file, " (\"{}\", \"{}\"),", prop_name, prop_value)?;
}
}
writeln!(file, " ],")?;
write!(file, " }}),")?;
} else {
write!(file, " bedrock: None,")?;
}
writeln!(file, " }},")?;
writeln!(file, "}};")?;
writeln!(file)?;
}
writeln!(
file,
"pub static BLOCKS: Map<&'static str, &'static crate::BlockFacts> = phf_map! {{"
)?;
for block_data in unified_blocks {
let block_id = &block_data.id;
let safe_name = block_id
.replace(":", "_")
.replace("-", "_")
.replace("'", "")
.replace("!", "")
.replace(".", "_")
.to_uppercase();
writeln!(file, " \"{}\" => &{},", block_id, safe_name)?;
}
writeln!(file, "}};")?;
writeln!(file)?;
fetcher_registry.generate_query_helpers(&mut file)?;
println!(
"cargo:warning=Generated unified PHF table with {} blocks",
unified_blocks.len()
);
generate_bedrock_mappings(out_dir)?;
Ok(())
}
fn generate_bedrock_mappings(out_dir: &str) -> Result<()> {
let manifest_dir = env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string());
let data_dir = Path::new(&manifest_dir).join("data");
let mappings_path = Path::new(out_dir).join("bedrock_mappings.rs");
let mut file =
std::fs::File::create(&mappings_path).context("Failed to create bedrock_mappings.rs")?;
writeln!(file, "// Auto-generated bedrock blockstate mappings")?;
writeln!(file)?;
let geyser_path = data_dir.join("geyser_mappings.json");
if geyser_path.exists() {
let geyser_data =
fs::read_to_string(&geyser_path).context("Failed to read geyser_mappings.json")?;
let parsed: Option<Value> = match serde_json::from_str(&geyser_data) {
Ok(p) => Some(p),
Err(e) => {
println!("cargo:warning=Failed to parse geyser_mappings.json: {}", e);
None
}
};
if let Some(parsed) = parsed {
let mappings = parsed
.get("mappings")
.and_then(|m| m.as_array());
if let Some(mappings) = mappings {
writeln!(
file,
"pub static BEDROCK_J2B_MAP: phf::Map<&'static str, &'static str> = phf_map! {{"
)?;
let mut b2j_map: HashMap<String, String> = HashMap::new();
for mapping in mappings {
let java_state_obj = mapping.get("java_state").and_then(|s| s.as_object());
let bedrock_state_obj = mapping.get("bedrock_state").and_then(|s| s.as_object());
if let (Some(java), Some(bedrock)) = (java_state_obj, bedrock_state_obj) {
let java_name = java.get("Name").and_then(|n| n.as_str()).unwrap_or("");
let java_props = java.get("Properties").and_then(|p| p.as_object());
let java_state_str = if let Some(props) = java_props {
let mut props_vec: Vec<String> = props.iter()
.map(|(k, v)| format!("{}={}", k, v.as_str().unwrap_or("")))
.collect();
props_vec.sort();
format!("{}[{}]", java_name, props_vec.join(","))
} else {
format!("{}[]", java_name)
};
let bedrock_id_raw = bedrock.get("bedrock_identifier").and_then(|n| n.as_str()).unwrap_or("");
let bedrock_id = if !bedrock_id_raw.contains(':') {
format!("minecraft:{}", bedrock_id_raw)
} else {
bedrock_id_raw.to_string()
};
let bedrock_props = bedrock.get("state").and_then(|s| s.as_object());
let bedrock_state_str = if let Some(props) = bedrock_props {
let mut props_vec: Vec<String> = props.iter()
.map(|(k, v)| {
let val_str = match v {
Value::Bool(b) => b.to_string(),
Value::Number(n) => n.to_string(),
Value::String(s) => s.clone(),
_ => v.to_string(),
};
format!("{}={}", k, val_str)
})
.collect();
props_vec.sort();
format!("{}[{}]", bedrock_id, props_vec.join(","))
} else {
format!("{}[]", bedrock_id)
};
writeln!(file, " r#\"{}\"# => r#\"{}\"#,", java_state_str, bedrock_state_str)?;
b2j_map.entry(bedrock_state_str).or_insert(java_state_str);
}
}
writeln!(file, "}};")?;
writeln!(file)?;
println!(
"cargo:warning=Generated {} Java->Bedrock mappings from Geyser",
mappings.len()
);
writeln!(
file,
"pub static BEDROCK_B2J_MAP: phf::Map<&'static str, &'static str> = phf_map! {{"
)?;
for (bedrock_state, java_state) in &b2j_map {
writeln!(file, " r#\"{}\"# => r#\"{}\"#,", bedrock_state, java_state)?;
}
writeln!(file, "}};")?;
println!(
"cargo:warning=Generated {} Bedrock->Java mappings from Geyser",
b2j_map.len()
);
} else {
println!("cargo:warning=Invalid Geyser mappings format (missing 'mappings' array)");
generate_empty_mappings(&mut file)?;
}
} else {
generate_empty_mappings(&mut file)?;
}
} else {
writeln!(
file,
"pub static BEDROCK_J2B_MAP: phf::Map<&'static str, &'static str> = phf_map! {{}};"
)?;
writeln!(
file,
"pub static BEDROCK_B2J_MAP: phf::Map<&'static str, &'static str> = phf_map! {{}};"
)?;
println!(
"cargo:warning=geyser_mappings.json not found at {:?}, using empty mapping",
geyser_path
);
}
Ok(())
}
fn generate_empty_mappings(file: &mut std::fs::File) -> Result<()> {
writeln!(
file,
"pub static BEDROCK_J2B_MAP: phf::Map<&'static str, &'static str> = phf_map! {{}};"
)?;
writeln!(
file,
"pub static BEDROCK_B2J_MAP: phf::Map<&'static str, &'static str> = phf_map! {{}};"
)?;
Ok(())
}