1use proc_macro::TokenStream;
2use std::collections::HashMap;
3use std::io::Read;
4use std::sync::OnceLock;
5
6use quote::quote;
7use serde_derive::{Deserialize, Serialize};
8use syn::parse::{Parse, ParseStream};
9use syn::{LitInt, LitStr, Token, parse_macro_input};
10
11#[derive(Deserialize, Serialize, Clone)]
14struct OffsetEntry {
15 offset: i64,
16 size: i64,
17 is_bit: bool,
18 bit_offset: i32,
19}
20
21#[derive(Deserialize, Serialize)]
22struct CachedData {
23 game_hash: String,
24 uploaded: u64,
25 class_member_map: HashMap<String, OffsetEntry>,
26 class_size_map: HashMap<String, i32>,
27 offset_map: HashMap<String, u64>,
28 enum_name_map: HashMap<String, String>,
29}
30
31#[derive(Deserialize)]
32struct GameList {
33 games: Vec<Game>,
34}
35
36#[derive(Deserialize)]
37struct Game {
38 hash: String,
39 engine: String,
40 location: String,
41 uploaded: u64,
42}
43
44#[derive(Deserialize)]
45struct BlobInfo {
46 data: Vec<HashMap<String, serde_json::Value>>,
47 #[allow(dead_code)]
48 updated_at: String,
49 version: u64,
50}
51
52#[derive(Deserialize)]
53struct OffsetBlob {
54 data: Vec<Vec<serde_json::Value>>,
55}
56
57static GAME_HASH: OnceLock<String> = OnceLock::new();
60static DATA: OnceLock<CachedData> = OnceLock::new();
61
62fn get_data() -> &'static CachedData {
63 DATA.get_or_init(|| {
64 let game_hash = GAME_HASH.get().expect(
65 "dumpspace: call setup!(\"game_hash\") at the top of your crate before using offset macros"
66 );
67 load_or_download(game_hash)
68 })
69}
70
71fn cache_path(game_hash: &str) -> std::path::PathBuf {
72 let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string());
73 std::path::Path::new(&manifest_dir)
74 .join(".dsapi")
75 .join(format!("{}.json", game_hash))
76}
77
78fn fetch_game_list() -> GameList {
79 reqwest::blocking::get("https://dumpspace.spuckwaffel.com/Games/GameList.json")
80 .expect("Failed to fetch dumpspace game list")
81 .json()
82 .expect("Failed to parse game list JSON")
83}
84
85fn load_or_download(game_hash: &str) -> CachedData {
86 let path = cache_path(game_hash);
87
88 let game_list = fetch_game_list();
89 let game = game_list
90 .games
91 .iter()
92 .find(|g| g.hash == game_hash)
93 .unwrap_or_else(|| panic!("Game hash '{}' not found in dumpspace game list", game_hash));
94
95 if let Some(cached) = try_load_cache(&path, game_hash) {
97 if cached.uploaded >= game.uploaded {
98 return cached;
99 }
100 }
101
102 let data = download(game);
103
104 if let Some(parent) = path.parent() {
105 let _ = std::fs::create_dir_all(parent);
106 }
107 if let Ok(json) = serde_json::to_string(&data) {
108 let _ = std::fs::write(&path, json);
109 }
110
111 data
112}
113
114fn try_load_cache(path: &std::path::Path, game_hash: &str) -> Option<CachedData> {
115 let contents = std::fs::read_to_string(path).ok()?;
116 let data: CachedData = serde_json::from_str(&contents).ok()?;
117 if data.game_hash == game_hash {
118 Some(data)
119 } else {
120 None
121 }
122}
123
124fn download_gz(url: &str) -> String {
127 let response =
128 reqwest::blocking::get(url).unwrap_or_else(|e| panic!("Failed to fetch {}: {}", url, e));
129 if !response.status().is_success() {
130 panic!(
131 "Request to {} failed with status {}",
132 url,
133 response.status()
134 );
135 }
136 let mut decoder = flate2::read::GzDecoder::new(response);
137 let mut s = String::new();
138 decoder
139 .read_to_string(&mut s)
140 .unwrap_or_else(|e| panic!("Failed to decompress {}: {}", url, e));
141 s
142}
143
144fn parse_class_info(blob: &BlobInfo, data: &mut CachedData) {
145 for class in &blob.data {
146 for (class_name, value) in class {
147 let members: Vec<HashMap<String, serde_json::Value>> =
148 serde_json::from_value(value.clone()).unwrap();
149
150 for member in members {
151 let key = member.keys().next().unwrap().clone();
152 assert!(member.keys().len() == 1);
153
154 if key == "__MDKClassSize" {
155 data.class_size_map.insert(
156 class_name.clone(),
157 member.get("__MDKClassSize").unwrap().as_i64().unwrap() as i32,
158 );
159 continue;
160 }
161 if key == "__InheritInfo" {
162 continue;
163 }
164
165 let arr = member.get(&key).unwrap().as_array().unwrap();
166 let offset = arr[1].as_i64().unwrap();
167 let size = arr[2].as_i64().unwrap();
168
169 let is_bit = if blob.version == 10201 {
170 arr.len() == 4
171 } else if blob.version == 10202 {
172 arr.len() == 5
173 } else {
174 panic!("Unknown blob version: {}", blob.version);
175 };
176
177 let (bit_offset, member_key) = if is_bit {
178 if blob.version == 10201 {
179 (
180 arr[3].as_i64().unwrap() as i32,
181 format!("{}{}", class_name, &key[..key.len() - 4]),
182 )
183 } else {
184 (
185 arr[4].as_i64().unwrap() as i32,
186 format!("{}{}", class_name, key),
187 )
188 }
189 } else {
190 (0, format!("{}{}", class_name, key))
191 };
192
193 data.class_member_map.insert(
194 member_key,
195 OffsetEntry {
196 offset,
197 size,
198 is_bit,
199 bit_offset,
200 },
201 );
202 }
203 }
204 }
205}
206
207fn download(game: &Game) -> CachedData {
208 let engine = &game.engine;
209 let location = &game.location;
210
211 let mut data = CachedData {
212 game_hash: game.hash.clone(),
213 uploaded: game.uploaded,
214 class_member_map: HashMap::new(),
215 class_size_map: HashMap::new(),
216 offset_map: HashMap::new(),
217 enum_name_map: HashMap::new(),
218 };
219
220 let format_url = |json_type: &str| -> String {
221 format!(
222 "https://dumpspace.spuckwaffel.com/Games/{}/{}/{}.json.gz",
223 engine, location, json_type
224 )
225 };
226
227 let json = download_gz(&format_url("ClassesInfo"));
229 let blob: BlobInfo = serde_json::from_str(&json).expect("Failed to parse ClassesInfo");
230 parse_class_info(&blob, &mut data);
231
232 let json = download_gz(&format_url("StructsInfo"));
234 let blob: BlobInfo = serde_json::from_str(&json).expect("Failed to parse StructsInfo");
235 parse_class_info(&blob, &mut data);
236
237 let json = download_gz(&format_url("EnumsInfo"));
239 let blob: BlobInfo = serde_json::from_str(&json).expect("Failed to parse EnumsInfo");
240 for enum_info in &blob.data {
241 for (enum_name, value) in enum_info {
242 let entries = &value.as_array().unwrap()[0];
243 for entry in entries.as_array().unwrap() {
244 let obj = entry.as_object().unwrap();
245 let name = obj.keys().next().unwrap().clone();
246 let val = obj.get(&name).unwrap().as_i64().unwrap();
247 data.enum_name_map
248 .insert(format!("{}{}", enum_name, val), name);
249 }
250 }
251 }
252
253 let json = download_gz(&format_url("OffsetsInfo"));
255 let blob: OffsetBlob = serde_json::from_str(&json).expect("Failed to parse OffsetsInfo");
256 for entry in &blob.data {
257 data.offset_map.insert(
258 entry[0].as_str().unwrap().to_string(),
259 entry[1].as_u64().unwrap(),
260 );
261 }
262
263 data
264}
265
266struct TwoStrings {
269 first: LitStr,
270 second: LitStr,
271}
272
273impl Parse for TwoStrings {
274 fn parse(input: ParseStream) -> syn::Result<Self> {
275 let first = input.parse()?;
276 input.parse::<Token![,]>()?;
277 let second = input.parse()?;
278 Ok(Self { first, second })
279 }
280}
281
282struct OneString {
283 value: LitStr,
284}
285
286impl Parse for OneString {
287 fn parse(input: ParseStream) -> syn::Result<Self> {
288 Ok(Self {
289 value: input.parse()?,
290 })
291 }
292}
293
294struct EnumArgs {
295 name: LitStr,
296 value: LitInt,
297}
298
299impl Parse for EnumArgs {
300 fn parse(input: ParseStream) -> syn::Result<Self> {
301 let name = input.parse()?;
302 input.parse::<Token![,]>()?;
303 let value = input.parse()?;
304 Ok(Self { name, value })
305 }
306}
307
308#[proc_macro]
332pub fn setup(input: TokenStream) -> TokenStream {
333 let hash = parse_macro_input!(input as LitStr);
334 let _ = GAME_HASH.set(hash.value());
335 let _ = get_data();
337 quote! {}.into()
338}
339
340#[proc_macro]
350pub fn offset(input: TokenStream) -> TokenStream {
351 let TwoStrings { first, second } = parse_macro_input!(input as TwoStrings);
352 let class = first.value();
353 let member = second.value();
354
355 let data = get_data();
356 let key = format!("{}{}", class, member);
357 let entry = data
358 .class_member_map
359 .get(&key)
360 .unwrap_or_else(|| panic!("dumpspace: offset \"{}::{}\" not found", class, member));
361
362 let val = entry.offset as usize;
363 quote! { #val }.into()
364}
365
366#[proc_macro]
375pub fn class_size(input: TokenStream) -> TokenStream {
376 let OneString { value } = parse_macro_input!(input as OneString);
377 let class = value.value();
378
379 let data = get_data();
380 let size = data
381 .class_size_map
382 .get(&class)
383 .unwrap_or_else(|| panic!("dumpspace: class size for \"{}\" not found", class));
384
385 let val = *size as usize;
386 quote! { #val }.into()
387}
388
389#[proc_macro]
398pub fn global_offset(input: TokenStream) -> TokenStream {
399 let OneString { value } = parse_macro_input!(input as OneString);
400 let name = value.value();
401
402 let data = get_data();
403 let off = data
404 .offset_map
405 .get(&name)
406 .unwrap_or_else(|| panic!("dumpspace: global offset \"{}\" not found", name));
407
408 let val = *off as usize;
409 quote! { #val }.into()
410}
411
412#[proc_macro]
421pub fn enum_name(input: TokenStream) -> TokenStream {
422 let EnumArgs { name, value } = parse_macro_input!(input as EnumArgs);
423 let enum_name = name.value();
424 let enum_val: i64 = value.base10_parse().unwrap();
425
426 let data = get_data();
427 let key = format!("{}{}", enum_name, enum_val);
428 let result = data.enum_name_map.get(&key).unwrap_or_else(|| {
429 panic!(
430 "dumpspace: enum value \"{}::{}\" not found",
431 enum_name, enum_val
432 )
433 });
434
435 quote! { #result }.into()
436}
437
438#[cfg(test)]
439mod tests {
440 use super::*;
441 use serde_json::json;
442 use std::path::PathBuf;
443 use std::time::{SystemTime, UNIX_EPOCH};
444
445 fn empty_cached_data(game_hash: &str) -> CachedData {
446 CachedData {
447 game_hash: game_hash.to_string(),
448 uploaded: 1,
449 class_member_map: HashMap::new(),
450 class_size_map: HashMap::new(),
451 offset_map: HashMap::new(),
452 enum_name_map: HashMap::new(),
453 }
454 }
455
456 fn unique_temp_path(name: &str) -> PathBuf {
457 let nanos = SystemTime::now()
458 .duration_since(UNIX_EPOCH)
459 .expect("system clock drifted before UNIX_EPOCH")
460 .as_nanos();
461 std::env::temp_dir().join(format!(
462 "dumpspace-macros-test-{}-{}-{}.json",
463 std::process::id(),
464 name,
465 nanos
466 ))
467 }
468
469 #[test]
470 fn parse_class_info_v10201_handles_class_size_and_bitfields() {
471 let blob: BlobInfo = serde_json::from_value(json!({
472 "data": [
473 {
474 "Player": [
475 { "Health": ["float", 16, 4] },
476 { "bIsAlive_BIT": ["bool", 20, 1, 3] },
477 { "__MDKClassSize": 64 },
478 { "__InheritInfo": {} }
479 ]
480 }
481 ],
482 "updated_at": "now",
483 "version": 10201
484 }))
485 .expect("valid blob JSON");
486
487 let mut data = empty_cached_data("test-hash");
488 parse_class_info(&blob, &mut data);
489
490 let normal = data
491 .class_member_map
492 .get("PlayerHealth")
493 .expect("normal member should be parsed");
494 assert_eq!(normal.offset, 16);
495 assert_eq!(normal.size, 4);
496 assert!(!normal.is_bit);
497
498 let bitfield = data
499 .class_member_map
500 .get("PlayerbIsAlive")
501 .expect("v10201 bitfield key should trim _BIT");
502 assert_eq!(bitfield.offset, 20);
503 assert_eq!(bitfield.size, 1);
504 assert!(bitfield.is_bit);
505 assert_eq!(bitfield.bit_offset, 3);
506
507 assert_eq!(
508 *data
509 .class_size_map
510 .get("Player")
511 .expect("class size should be captured"),
512 64
513 );
514 }
515
516 #[test]
517 fn parse_class_info_v10202_uses_full_bitfield_name() {
518 let blob: BlobInfo = serde_json::from_value(json!({
519 "data": [
520 {
521 "Actor": [
522 { "bHidden": ["bool", 40, 1, "unused", 7] }
523 ]
524 }
525 ],
526 "updated_at": "now",
527 "version": 10202
528 }))
529 .expect("valid blob JSON");
530
531 let mut data = empty_cached_data("test-hash");
532 parse_class_info(&blob, &mut data);
533
534 let entry = data
535 .class_member_map
536 .get("ActorbHidden")
537 .expect("v10202 should keep full key");
538 assert_eq!(entry.offset, 40);
539 assert_eq!(entry.size, 1);
540 assert!(entry.is_bit);
541 assert_eq!(entry.bit_offset, 7);
542 }
543
544 #[test]
545 fn parse_class_info_panics_for_unknown_blob_version() {
546 let blob: BlobInfo = serde_json::from_value(json!({
547 "data": [
548 {
549 "Actor": [
550 { "Value": ["int", 8, 4] }
551 ]
552 }
553 ],
554 "updated_at": "now",
555 "version": 99999
556 }))
557 .expect("valid blob JSON");
558
559 let mut data = empty_cached_data("test-hash");
560 let result = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
561 parse_class_info(&blob, &mut data);
562 }));
563 assert!(result.is_err(), "unknown blob versions must panic");
564 }
565
566 #[test]
567 fn try_load_cache_validates_game_hash_and_invalid_json() {
568 let path = unique_temp_path("cache");
569 let valid = serde_json::to_string(&empty_cached_data("abc123")).expect("serialize cache");
570 std::fs::write(&path, valid).expect("write cache file");
571
572 assert!(
573 try_load_cache(&path, "abc123").is_some(),
574 "matching cache hash should load"
575 );
576 assert!(
577 try_load_cache(&path, "different").is_none(),
578 "mismatched cache hash should be ignored"
579 );
580
581 std::fs::write(&path, "{ not json ").expect("write invalid JSON");
582 assert!(
583 try_load_cache(&path, "abc123").is_none(),
584 "invalid cache JSON should be ignored"
585 );
586
587 let _ = std::fs::remove_file(path);
588 }
589
590 #[test]
591 fn cache_path_appends_dsapi_directory_and_hash_file() {
592 let path = cache_path("deadbeef");
593 let suffix = std::path::Path::new(".dsapi").join("deadbeef.json");
594 assert!(
595 path.ends_with(&suffix),
596 "cache path should end with {} but was {}",
597 suffix.display(),
598 path.display()
599 );
600 }
601}