use proc_macro::TokenStream;
use std::collections::HashMap;
use std::io::Read;
use std::sync::OnceLock;
use quote::quote;
use serde_derive::{Deserialize, Serialize};
use syn::parse::{Parse, ParseStream};
use syn::{LitInt, LitStr, Token, parse_macro_input};
#[derive(Deserialize, Serialize, Clone)]
struct OffsetEntry {
offset: i64,
size: i64,
is_bit: bool,
bit_offset: i32,
}
#[derive(Deserialize, Serialize)]
struct CachedData {
game_hash: String,
uploaded: u64,
class_member_map: HashMap<String, OffsetEntry>,
class_size_map: HashMap<String, i32>,
offset_map: HashMap<String, u64>,
enum_name_map: HashMap<String, String>,
}
#[derive(Deserialize)]
struct GameList {
games: Vec<Game>,
}
#[derive(Deserialize)]
struct Game {
hash: String,
engine: String,
location: String,
uploaded: u64,
}
#[derive(Deserialize)]
struct BlobInfo {
data: Vec<HashMap<String, serde_json::Value>>,
#[allow(dead_code)]
updated_at: String,
version: u64,
}
#[derive(Deserialize)]
struct OffsetBlob {
data: Vec<Vec<serde_json::Value>>,
}
static GAME_HASH: OnceLock<String> = OnceLock::new();
static DATA: OnceLock<CachedData> = OnceLock::new();
fn get_data() -> &'static CachedData {
DATA.get_or_init(|| {
let game_hash = GAME_HASH.get().expect(
"dumpspace: call setup!(\"game_hash\") at the top of your crate before using offset macros"
);
load_or_download(game_hash)
})
}
fn cache_path(game_hash: &str) -> std::path::PathBuf {
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string());
std::path::Path::new(&manifest_dir)
.join(".dsapi")
.join(format!("{}.json", game_hash))
}
fn fetch_game_list() -> GameList {
reqwest::blocking::get("https://raw.githubusercontent.com/Spuckwaffel/dumpspace/refs/heads/main/Games/GameList.json")
.expect("Failed to fetch dumpspace game list")
.json()
.expect("Failed to parse game list JSON")
}
fn load_or_download(game_hash: &str) -> CachedData {
let path = cache_path(game_hash);
let game_list = fetch_game_list();
let game = game_list
.games
.iter()
.find(|g| g.hash == game_hash)
.unwrap_or_else(|| panic!("Game hash '{}' not found in dumpspace game list", game_hash));
if let Some(cached) = try_load_cache(&path, game_hash) {
if cached.uploaded >= game.uploaded {
return cached;
}
}
let data = download(game);
if let Some(parent) = path.parent() {
let _ = std::fs::create_dir_all(parent);
}
if let Ok(json) = serde_json::to_string(&data) {
let _ = std::fs::write(&path, json);
}
data
}
fn try_load_cache(path: &std::path::Path, game_hash: &str) -> Option<CachedData> {
let contents = std::fs::read_to_string(path).ok()?;
let data: CachedData = serde_json::from_str(&contents).ok()?;
if data.game_hash == game_hash {
Some(data)
} else {
None
}
}
fn download_gz(url: &str) -> String {
let response =
reqwest::blocking::get(url).unwrap_or_else(|e| panic!("Failed to fetch {}: {}", url, e));
if !response.status().is_success() {
panic!(
"Request to {} failed with status {}",
url,
response.status()
);
}
let mut decoder = flate2::read::GzDecoder::new(response);
let mut s = String::new();
decoder
.read_to_string(&mut s)
.unwrap_or_else(|e| panic!("Failed to decompress {}: {}", url, e));
s
}
fn parse_class_info(blob: &BlobInfo, data: &mut CachedData) {
for class in &blob.data {
for (class_name, value) in class {
let members: Vec<HashMap<String, serde_json::Value>> =
serde_json::from_value(value.clone()).unwrap();
for member in members {
let key = member.keys().next().unwrap().clone();
assert!(member.keys().len() == 1);
if key == "__MDKClassSize" {
data.class_size_map.insert(
class_name.clone(),
member.get("__MDKClassSize").unwrap().as_i64().unwrap() as i32,
);
continue;
}
if key == "__InheritInfo" {
continue;
}
let arr = member.get(&key).unwrap().as_array().unwrap();
let offset = arr[1].as_i64().unwrap();
let size = arr[2].as_i64().unwrap();
let is_bit = if blob.version == 10201 {
arr.len() == 4
} else if blob.version == 10202 {
arr.len() == 5
} else {
panic!("Unknown blob version: {}", blob.version);
};
let (bit_offset, member_key) = if is_bit {
if blob.version == 10201 {
(
arr[3].as_i64().unwrap() as i32,
format!("{}{}", class_name, &key[..key.len() - 4]),
)
} else {
(
arr[4].as_i64().unwrap() as i32,
format!("{}{}", class_name, key),
)
}
} else {
(0, format!("{}{}", class_name, key))
};
data.class_member_map.insert(
member_key,
OffsetEntry {
offset,
size,
is_bit,
bit_offset,
},
);
}
}
}
}
fn download(game: &Game) -> CachedData {
let engine = &game.engine;
let location = &game.location;
let mut data = CachedData {
game_hash: game.hash.clone(),
uploaded: game.uploaded,
class_member_map: HashMap::new(),
class_size_map: HashMap::new(),
offset_map: HashMap::new(),
enum_name_map: HashMap::new(),
};
let format_url = |json_type: &str| -> String {
format!(
"https://raw.githubusercontent.com/Spuckwaffel/dumpspace/refs/heads/main/Games/{}/{}/{}.json.gz",
engine, location, json_type
)
};
let json = download_gz(&format_url("ClassesInfo"));
let blob: BlobInfo = serde_json::from_str(&json).expect("Failed to parse ClassesInfo");
parse_class_info(&blob, &mut data);
let json = download_gz(&format_url("StructsInfo"));
let blob: BlobInfo = serde_json::from_str(&json).expect("Failed to parse StructsInfo");
parse_class_info(&blob, &mut data);
let json = download_gz(&format_url("EnumsInfo"));
let blob: BlobInfo = serde_json::from_str(&json).expect("Failed to parse EnumsInfo");
for enum_info in &blob.data {
for (enum_name, value) in enum_info {
let entries = &value.as_array().unwrap()[0];
for entry in entries.as_array().unwrap() {
let obj = entry.as_object().unwrap();
let name = obj.keys().next().unwrap().clone();
let val = obj.get(&name).unwrap().as_i64().unwrap();
data.enum_name_map
.insert(format!("{}{}", enum_name, val), name);
}
}
}
let json = download_gz(&format_url("OffsetsInfo"));
let blob: OffsetBlob = serde_json::from_str(&json).expect("Failed to parse OffsetsInfo");
for entry in &blob.data {
data.offset_map.insert(
entry[0].as_str().unwrap().to_string(),
entry[1].as_u64().unwrap(),
);
}
data
}
struct TwoStrings {
first: LitStr,
second: LitStr,
}
impl Parse for TwoStrings {
fn parse(input: ParseStream) -> syn::Result<Self> {
let first = input.parse()?;
input.parse::<Token![,]>()?;
let second = input.parse()?;
Ok(Self { first, second })
}
}
struct OneString {
value: LitStr,
}
impl Parse for OneString {
fn parse(input: ParseStream) -> syn::Result<Self> {
Ok(Self {
value: input.parse()?,
})
}
}
struct EnumArgs {
name: LitStr,
value: LitInt,
}
impl Parse for EnumArgs {
fn parse(input: ParseStream) -> syn::Result<Self> {
let name = input.parse()?;
input.parse::<Token![,]>()?;
let value = input.parse()?;
Ok(Self { name, value })
}
}
#[proc_macro]
pub fn setup(input: TokenStream) -> TokenStream {
let hash = parse_macro_input!(input as LitStr);
let _ = GAME_HASH.set(hash.value());
let _ = get_data();
quote! {}.into()
}
#[proc_macro]
pub fn offset(input: TokenStream) -> TokenStream {
let TwoStrings { first, second } = parse_macro_input!(input as TwoStrings);
let class = first.value();
let member = second.value();
let data = get_data();
let key = format!("{}{}", class, member);
let entry = data
.class_member_map
.get(&key)
.unwrap_or_else(|| panic!("dumpspace: offset \"{}::{}\" not found", class, member));
let val = entry.offset as usize;
quote! { #val }.into()
}
#[proc_macro]
pub fn class_size(input: TokenStream) -> TokenStream {
let OneString { value } = parse_macro_input!(input as OneString);
let class = value.value();
let data = get_data();
let size = data
.class_size_map
.get(&class)
.unwrap_or_else(|| panic!("dumpspace: class size for \"{}\" not found", class));
let val = *size as usize;
quote! { #val }.into()
}
#[proc_macro]
pub fn global_offset(input: TokenStream) -> TokenStream {
let OneString { value } = parse_macro_input!(input as OneString);
let name = value.value();
let data = get_data();
let off = data
.offset_map
.get(&name)
.unwrap_or_else(|| panic!("dumpspace: global offset \"{}\" not found", name));
let val = *off as usize;
quote! { #val }.into()
}
#[proc_macro]
pub fn enum_name(input: TokenStream) -> TokenStream {
let EnumArgs { name, value } = parse_macro_input!(input as EnumArgs);
let enum_name = name.value();
let enum_val: i64 = value.base10_parse().unwrap();
let data = get_data();
let key = format!("{}{}", enum_name, enum_val);
let result = data.enum_name_map.get(&key).unwrap_or_else(|| {
panic!(
"dumpspace: enum value \"{}::{}\" not found",
enum_name, enum_val
)
});
quote! { #result }.into()
}
#[cfg(test)]
mod tests {
use super::*;
use serde_json::json;
use std::path::PathBuf;
use std::time::{SystemTime, UNIX_EPOCH};
fn empty_cached_data(game_hash: &str) -> CachedData {
CachedData {
game_hash: game_hash.to_string(),
uploaded: 1,
class_member_map: HashMap::new(),
class_size_map: HashMap::new(),
offset_map: HashMap::new(),
enum_name_map: HashMap::new(),
}
}
fn unique_temp_path(name: &str) -> PathBuf {
let nanos = SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("system clock drifted before UNIX_EPOCH")
.as_nanos();
std::env::temp_dir().join(format!(
"dumpspace-macros-test-{}-{}-{}.json",
std::process::id(),
name,
nanos
))
}
#[test]
fn parse_class_info_v10201_handles_class_size_and_bitfields() {
let blob: BlobInfo = serde_json::from_value(json!({
"data": [
{
"Player": [
{ "Health": ["float", 16, 4] },
{ "bIsAlive_BIT": ["bool", 20, 1, 3] },
{ "__MDKClassSize": 64 },
{ "__InheritInfo": {} }
]
}
],
"updated_at": "now",
"version": 10201
}))
.expect("valid blob JSON");
let mut data = empty_cached_data("test-hash");
parse_class_info(&blob, &mut data);
let normal = data
.class_member_map
.get("PlayerHealth")
.expect("normal member should be parsed");
assert_eq!(normal.offset, 16);
assert_eq!(normal.size, 4);
assert!(!normal.is_bit);
let bitfield = data
.class_member_map
.get("PlayerbIsAlive")
.expect("v10201 bitfield key should trim _BIT");
assert_eq!(bitfield.offset, 20);
assert_eq!(bitfield.size, 1);
assert!(bitfield.is_bit);
assert_eq!(bitfield.bit_offset, 3);
assert_eq!(
*data
.class_size_map
.get("Player")
.expect("class size should be captured"),
64
);
}
#[test]
fn parse_class_info_v10202_uses_full_bitfield_name() {
let blob: BlobInfo = serde_json::from_value(json!({
"data": [
{
"Actor": [
{ "bHidden": ["bool", 40, 1, "unused", 7] }
]
}
],
"updated_at": "now",
"version": 10202
}))
.expect("valid blob JSON");
let mut data = empty_cached_data("test-hash");
parse_class_info(&blob, &mut data);
let entry = data
.class_member_map
.get("ActorbHidden")
.expect("v10202 should keep full key");
assert_eq!(entry.offset, 40);
assert_eq!(entry.size, 1);
assert!(entry.is_bit);
assert_eq!(entry.bit_offset, 7);
}
#[test]
fn parse_class_info_panics_for_unknown_blob_version() {
let blob: BlobInfo = serde_json::from_value(json!({
"data": [
{
"Actor": [
{ "Value": ["int", 8, 4] }
]
}
],
"updated_at": "now",
"version": 99999
}))
.expect("valid blob JSON");
let mut data = empty_cached_data("test-hash");
let result = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
parse_class_info(&blob, &mut data);
}));
assert!(result.is_err(), "unknown blob versions must panic");
}
#[test]
fn try_load_cache_validates_game_hash_and_invalid_json() {
let path = unique_temp_path("cache");
let valid = serde_json::to_string(&empty_cached_data("abc123")).expect("serialize cache");
std::fs::write(&path, valid).expect("write cache file");
assert!(
try_load_cache(&path, "abc123").is_some(),
"matching cache hash should load"
);
assert!(
try_load_cache(&path, "different").is_none(),
"mismatched cache hash should be ignored"
);
std::fs::write(&path, "{ not json ").expect("write invalid JSON");
assert!(
try_load_cache(&path, "abc123").is_none(),
"invalid cache JSON should be ignored"
);
let _ = std::fs::remove_file(path);
}
#[test]
fn cache_path_appends_dsapi_directory_and_hash_file() {
let path = cache_path("deadbeef");
let suffix = std::path::Path::new(".dsapi").join("deadbeef.json");
assert!(
path.ends_with(&suffix),
"cache path should end with {} but was {}",
suffix.display(),
path.display()
);
}
}