ringhopper_proc/
lib.rs

1extern crate proc_macro;
2use proc_macro::TokenStream;
3
4use std::fs::{read_dir, File};
5use std::path::Path;
6use std::io::Read;
7
8extern crate serde_json;
9use serde_json::{Value, Map};
10
11extern crate syn;
12use syn::{parse_macro_input, Expr, Lit};
13
14use std::collections::HashMap;
15
16// Convert a tag group extension (e.g. "unit_hud_interface" -> "UnitHUDInterface")
17fn tag_group_extension_to_struct(group: &str) -> String {
18    let mut new_group = group.to_owned().replace("gbxmodel", "GBXModel")
19                                        .replace("bsp", "BSP")
20                                        .replace("ui_", "UI_")
21                                        .replace("hud_", "HUD_");
22    while let Some(s) = new_group.find("_") {
23        new_group.replace_range(s..s+2, &new_group[s+1..s+2].to_uppercase());
24    }
25    new_group.replace_range(0..1, &new_group[0..1].to_uppercase());
26    new_group
27}
28
29// Format the string into being 'friendly' for use as an identifier in Rust code.
30fn format_tag_field_name(string: &str) -> String {
31    let field_name_written = string.replace(" ", "_").replace("-", "_").replace("'", "").replace(":", "").replace("(", "").replace(")", "");
32
33    // Keyword
34    let field_name_written = match field_name_written.as_str() {
35        "type" | "loop" => format!("_{field_name_written}"),
36        _ => field_name_written
37    };
38
39    // Number-only bitfields
40    if field_name_written.chars().next().unwrap().is_numeric() {
41        format!("_{field_name_written}")
42    }
43    else {
44        field_name_written
45    }
46}
47
48/// Load the definitions json files.
49#[proc_macro]
50pub fn load_definition_json_def(_: TokenStream) -> TokenStream {
51    let mut stream = TokenStream::new();
52    let mut group_to_struct = Vec::<String>::new();
53
54    for path in read_dir(Path::new(env!("CARGO_MANIFEST_DIR")).join("json-definitions")).unwrap() {
55        let mut file = File::open(&path.as_ref().unwrap().path()).unwrap();
56        let mut data = Vec::<u8>::new();
57        file.read_to_end(&mut data).unwrap();
58        std::mem::drop(file);
59
60        let data_string = std::str::from_utf8(&data).unwrap();
61
62        let array = match serde_json::from_str::<Value>(data_string) {
63            Ok(json) => match json {
64                Value::Array(array) => array,
65                _ => panic!("{:?} is the wrong JSON type", path)
66            },
67            Err(e) => panic!("Can't parse {:?}! {}", path, e)
68        };
69
70        // Go through each element in the JSON
71        for i in array {
72            let object = i.as_object().unwrap();
73            let object_name = object.get("name").unwrap().as_str().unwrap();
74            let object_type = object.get("type").unwrap().as_str().unwrap();
75
76            if object_type == "enum" {
77                let mut options = String::new();
78
79                let all_options = object.get("options").unwrap().as_array().unwrap();
80                let enum_count = all_options.len();
81
82                let mut options_str = String::new();
83                let mut options_pretty_str = String::new();
84
85                for o in all_options {
86                    let o = match o.as_object() {
87                        Some(n) => n.to_owned(),
88                        None => {
89                            let mut m = Map::<String, Value>::new();
90                            m.insert("name".to_owned(), Value::String(o.as_str().unwrap().to_owned()));
91                            m
92                        }
93                    };
94
95                    // If the enum starts with a number, prefix with an underscore.
96                    let enum_name = o.get("name").unwrap().as_str().unwrap();
97                    let mut rust_enum_name = if enum_name.as_bytes()[0].is_ascii_digit() {
98                        "_".to_owned()
99                    }
100                    else {
101                        String::new()
102                    };
103
104                    // Now, add each character.
105                    rust_enum_name.reserve(enum_name.len());
106                    let mut next_character_uppercase = true;
107                    for i in enum_name.chars() {
108                        if i == ' ' {
109                            next_character_uppercase = true;
110                        }
111                        else if i == '-' {
112                            continue
113                        }
114                        else if next_character_uppercase {
115                            rust_enum_name.push(i.to_ascii_uppercase());
116                            next_character_uppercase = false;
117                        }
118                        else {
119                            rust_enum_name.push(i);
120                        }
121                    }
122
123                    match o.get("description") {
124                        Some(doc) => options += &format!(r#"#[doc="{doc}"]"#, doc=doc.as_str().unwrap().replace("\"", "\\\"")),
125                        None => ()
126                    }
127
128                    // Build the options
129                    options += &format!("{rust_enum_name},");
130
131                    let spaceless = enum_name.replace(" ", "-").replace("'", "").to_lowercase();
132                    options_str += &format!(r#""{spaceless}","#);
133                    options_pretty_str += &format!(r#""{enum_name}","#);
134                }
135
136                stream.extend(format!("
137                    #[repr(u16)]
138                    #[derive(Copy, Clone, PartialEq, Default)]
139                    pub enum {object_name} {{
140                        #[default] {options}
141                    }}
142                ").parse::<TokenStream>().unwrap());
143
144                stream.extend(format!(r#"impl TagEnumFn for {object_name} {{
145                    fn into_u16(self) -> u16 {{
146                        self as u16
147                    }}
148
149                    fn from_u16(input_value: u16) -> ErrorMessageResult<{object_name}> {{
150                        if input_value >= {enum_count} {{
151                            Err(ErrorMessage::AllocatedString(format!(get_compiled_string!("engine.h1.types.serialize.error_enum_out_of_bounds"), input_value=input_value, enum_count={enum_count}, enum_name="{object_name}")))
152                        }}
153                        else {{
154                            Ok(unsafe {{ std::mem::transmute(input_value) }})
155                        }}
156                    }}
157
158                    fn options() -> &'static [&'static str] {{
159                        &[{options_str}]
160                    }}
161
162                    fn options_pretty() -> &'static [&'static str] {{
163                        &[{options_pretty_str}]
164                    }}
165                }}"#).parse::<TokenStream>().unwrap());
166
167                stream.extend(format!(r#"impl FromStr for {object_name} {{
168                    type Err = ErrorMessage;
169                    fn from_str(s: &str) -> ErrorMessageResult<{object_name}> {{
170                        let options = Self::options();
171                        for i in 0..Self::options().len() {{
172                            if options[i] == s {{
173                                let return_value = Self::from_u16(i as u16);
174                                debug_assert!(return_value.is_ok());
175                                return return_value;
176                            }}
177                        }}
178                        Err(ErrorMessage::AllocatedString(format!(get_compiled_string!("engine.h1.types.serialize.error_enum_invalid_str"), input=s, enum_name="{object_name}", options=options)))
179                    }}
180                }}"#).parse::<TokenStream>().unwrap());
181
182                stream.extend(format!("
183                impl TagSerialize for {object_name} {{
184                    fn tag_size() -> usize {{
185                        u16::tag_size()
186                    }}
187                    fn into_tag(&self, data: &mut Vec<u8>, at: usize, struct_end: usize) -> ErrorMessageResult<()> {{
188                        self.into_u16().into_tag(data, at, struct_end)
189                    }}
190                    fn from_tag(data: &[u8], at: usize, struct_end: usize, cursor: &mut usize) -> ErrorMessageResult<{object_name}> {{
191                        {object_name}::from_u16(u16::from_tag(data, at, struct_end, cursor)?)
192                    }}
193                    fn into_tag_cached(&self, data: &mut [u8], at: usize, struct_end: usize) -> ErrorMessageResult<()> where Self: Sized {{
194                        self.into_u16().into_tag_cached(data, at, struct_end)
195                    }}
196                    fn from_tag_cached(data: &[u8], at: usize, struct_end: usize) -> ErrorMessageResult<Self> where Self: Sized {{
197                        {object_name}::from_u16(u16::from_tag_cached(data, at, struct_end)?)
198                    }}
199                }}").parse::<TokenStream>().unwrap());
200            }
201            else if object_type == "bitfield" {
202                let width = object.get("width").unwrap().as_u64().unwrap();
203                let width_bytes = width / 8;
204                let mut fields = String::new();
205                let mut into_uint_code = String::new();
206                let mut from_uint_code = String::new();
207
208                let mut current_value = 1u32;
209                let mut tag_mask = 0xFFFFFFFFu32;
210                let mut value_mask = 0u32;
211
212                for f in object.get("fields").unwrap().as_array().unwrap() {
213                    let o = match f.as_object() {
214                        Some(n) => n.to_owned(),
215                        None => {
216                            let mut m = Map::<String, Value>::new();
217                            m.insert("name".to_owned(), Value::String(f.as_str().unwrap().to_owned()));
218                            m
219                        }
220                    };
221
222                    let name = format_tag_field_name(o.get("name").unwrap().as_str().unwrap());
223
224                    match o.get("description") {
225                        Some(doc) => fields += &format!(r#"#[doc="{doc}"]"#, doc=doc.as_str().unwrap().replace("\"", "\\\"")),
226                        None => ()
227                    }
228
229                    fields += &format!("pub {name}: bool,");
230
231                    // Set these masks so we know what to read/write
232                    if f.get("cache_only").unwrap_or(&Value::Bool(false)).as_bool().unwrap() {
233                        tag_mask &= !current_value; // exclude cache only bits from being read/written from tag files
234                    }
235                    value_mask |= current_value;
236
237                    into_uint_code += &format!("return_value |= {current_value}u{width} * (self.{name} as u{width});");
238                    from_uint_code += &format!("{name}: (input_value & {current_value}u{width}) != 0,");
239
240                    current_value <<= 1;
241                }
242
243                // Exclude all bits not actually used too.
244                tag_mask &= value_mask;
245
246                stream.extend(format!("
247                #[derive(Default, Copy, Clone, PartialEq)]
248                pub struct {object_name} {{
249                    {fields}
250                }}").parse::<TokenStream>().unwrap());
251
252                stream.extend(format!("impl {object_name} {{
253                    /// Get the numeric representation of the bitfield.
254                    pub fn into_u{width}(&self) -> u{width} {{
255                        let mut return_value = 0u{width};
256                        {into_uint_code}
257                        return_value
258                    }}
259
260                    /// Convert the number into a bitfield.
261                    ///
262                    /// Bits that do not exist on the bitfield are ignored.
263                    pub fn from_u{width}(input_value: u{width}) -> {object_name} {{
264                        {object_name} {{
265                            {from_uint_code}
266                        }}
267                    }}
268                }}").parse::<TokenStream>().unwrap());
269
270                let parsing_code = format!("
271                impl TagSerialize for {object_name} {{
272                    fn tag_size() -> usize {{
273                        {width_bytes}
274                    }}
275                    fn into_tag(&self, data: &mut Vec<u8>, at: usize, struct_end: usize) -> ErrorMessageResult<()> {{
276                        (self.into_u{width}() & {tag_mask}).into_tag(data, at, struct_end)
277                    }}
278                    fn from_tag(data: &[u8], at: usize, struct_end: usize, cursor: &mut usize) -> ErrorMessageResult<{object_name}> {{
279                        Ok({object_name}::from_u{width}(u{width}::from_tag(data, at, struct_end, cursor)? & {tag_mask}))
280                    }}
281                }}");
282                stream.extend(parsing_code.parse::<TokenStream>().unwrap());
283            }
284            else if object_type == "struct" {
285                // Check if we implement copy
286                let mut implements_copy = true;
287
288                // Generate Rust code to define our struct and implementation
289                let tag_size = object.get("size").unwrap().as_u64().unwrap();
290                let mut all_fields_defined = String::new();
291
292                // If this is a group, note it here.
293                match object.get("group") {
294                    Some(n) => group_to_struct.push(tag_group_extension_to_struct(n.as_str().unwrap())),
295                    None => ()
296                }
297
298                // If we inherit anything, handle that too
299                let mut from_tag_code;
300                let mut into_tag_code;
301                match object.get("inherits") {
302                    Some(n) => {
303                        implements_copy = false; // can't determine this
304
305                        let inherited_object = n.as_str().unwrap();
306                        all_fields_defined += &format!("pub base_struct: {inherited_object},");
307                        from_tag_code = format!("new_object.base_struct = {inherited_object}::from_tag(data, at, struct_end, cursor)?; let mut local_cursor = at + {inherited_object}::tag_size();");
308                        into_tag_code = format!("self.base_struct.into_tag(data, at, struct_end)?; let mut local_cursor = at + {inherited_object}::tag_size();");
309                    },
310                    None => {
311                        from_tag_code = format!("let mut local_cursor = at;");
312                        into_tag_code = format!("let mut local_cursor = at;");
313                    }
314                }
315
316                for f in object.get("fields").unwrap().as_array().unwrap() {
317                    let field_type = f.get("type").unwrap().as_str().unwrap();
318
319                    // If it is padding, we may have some special behavior here.
320                    if field_type == "pad" {
321                        let cursor_increment = format!("local_cursor += {};", f.get("size").unwrap().as_u64().unwrap());
322                        from_tag_code += &cursor_increment;
323                        into_tag_code += &cursor_increment;
324                        continue
325                    }
326
327                    // Otherwise, let's do this
328                    let field_name = f.get("name").unwrap().as_str().unwrap();
329                    let field_name_written = format_tag_field_name(&field_name);
330
331                    if field_type == "Reflexive" || field_type == "Data" || field_type == "TagReference" {
332                        implements_copy = false;
333                    }
334
335                    let field_type_data_type = match field_type {
336                        "Reflexive" => format!("Reflexive<{}>", f.get("struct").unwrap().as_str().unwrap()),
337                        "int8" => "i8".to_owned(),
338                        "int16" => "i16".to_owned(),
339                        "int32" => "i32".to_owned(),
340                        "uint8" => "u8".to_owned(),
341                        "uint16" => "u16".to_owned(),
342                        "uint32" => "u32".to_owned(),
343                        "float" | "Angle" | "Fraction" => "f32".to_owned(),
344                        f => f.to_owned()
345                    };
346
347                    // Here's the type
348                    let field_type_data_type = if f.get("bounds").unwrap_or(&Value::Bool(false)).as_bool().unwrap() {
349                        format!("Bounds<{field_type_data_type}>")
350                    }
351                    else {
352                        field_type_data_type
353                    };
354
355                    // Next, do we need to make it an array?
356                    let count = match f.get("count") {
357                        Some(n) => n.as_u64().unwrap(),
358                        None => 1
359                    };
360
361                    // To make sure Rust is happy, we put a '::' before any <'s
362                    let field_type_written_expression = field_type_data_type.replace("<", "::<");
363
364                    // Array?
365                    let field_type_struct = match count {
366                        1 => field_type_data_type,
367                        _ => format!("[{field_type_data_type}; {count}]")
368                    };
369
370                    // Is this cached?
371                    let little_endian = f.get("little_endian").unwrap_or(&Value::Bool(false)).as_bool().unwrap();
372                    let uses_cursor = field_type == "TagReference" || field_type == "Data" || field_type == "Reflexive";
373
374                    // This is not allowed if it is little endian and uses a data pointer.
375                    if little_endian && uses_cursor {
376                        if little_endian {
377                            panic!("{field_type} cannot be little endian!", field_type=field_type);
378                        }
379                    }
380
381                    // Is this cache only?
382                    let cache_only = f.get("cache_only").unwrap_or(&Value::Bool(false)).as_bool().unwrap();
383
384                    let mut doc = f.get("comment").unwrap_or(&Value::String(String::new())).as_str().unwrap().to_owned();
385
386                    // Write the serialization code
387                    let mut write_serialization_code = |type_suffix: &str| {
388                        //from_tag_code += &format!("println!(\"...reading {field_name_written} - {field_type_struct}, AT: 0x{{at:08X}} -> 0x{{local_cursor:08X}} / SE: 0x{{struct_end:08X}} / SZ: 0x{{size:08X}}\", at=at, local_cursor=local_cursor, struct_end=struct_end, size=data.len());");
389
390                        // If it is cache only, if it is inconsequential (i.e. does not advance the data cursor), we ignore it. Otherwise, we read it from the tag but don't keep it.
391                        // Either way, we do not generate any code to put it in a tag file.
392                        if cache_only {
393                            if uses_cursor {
394                                from_tag_code += &format!("{field_type_written_expression}::from_tag(data, local_cursor, struct_end, cursor)?;");
395                            }
396                        }
397
398                        // Otherwise we serialize it normally
399                        else {
400                            if little_endian {
401                                into_tag_code += &format!("self.{field_name_written}{type_suffix}.into_tag_cached(data, local_cursor, struct_end)?;");
402                                from_tag_code += &format!("new_object.{field_name_written}{type_suffix} = {field_type_written_expression}::from_tag_cached(data, local_cursor, struct_end)?;");
403                            }
404                            else {
405                                into_tag_code += &format!("self.{field_name_written}{type_suffix}.into_tag(data, local_cursor, struct_end)?;");
406                                from_tag_code += &format!("new_object.{field_name_written}{type_suffix} = {field_type_written_expression}::from_tag(data, local_cursor, struct_end, cursor)?;");
407                            }
408                        }
409
410                        let cursor_increment = format!("local_cursor += {field_type_written_expression}::tag_size();");
411                        from_tag_code += &cursor_increment;
412                        into_tag_code += &cursor_increment;
413                    };
414
415                    // One object, not an array
416                    if count == 1 {
417                        write_serialization_code("");
418                    }
419
420                    // Array
421                    else {
422                        for i in 0..count {
423                            write_serialization_code(&format!("[{i}]"));
424                        }
425                    }
426
427                    if field_type_struct == "TagReference" {
428                        if !doc.is_empty() {
429                            doc += "\n\n";
430                        }
431
432                        let groups_arr = f.get("groups").unwrap().as_array().unwrap();
433                        let mut groups = Vec::<String>::new();
434
435                        for g in groups_arr {
436                            let appended_group: &'static [&'static str] = match g.as_str().unwrap() {
437                                "unit"   => &["biped", "vehicle"],
438                                "item"   => &["weapon", "equipment", "garbage"],
439                                "device" => &["device_machine", "device_light_fixture", "device_control"],
440                                "object" => &["biped", "vehicle",
441                                              "weapon", "equipment", "garbage",
442                                              "scenery",
443                                              "device_machine", "device_light_fixture", "device_control",
444                                              "placeholder",
445                                              "sound_scenery"],
446
447                                "model"  => &["gbxmodel", "model"],
448
449                                "shader" => &[
450                                    "shader_environment",
451                                    "shader_model",
452                                    "shader_transparent_chicago_extended",
453                                    "shader_transparent_chicago",
454                                    "shader_transparent_generic",
455                                    "shader_transparent_glass",
456                                    "shader_transparent_meter",
457                                    "shader_transparent_plasma",
458                                    "shader_transparent_water"
459                                ],
460
461                                n => {
462                                    groups.push(n.to_owned());
463                                    &[]
464                                }
465                            };
466
467                            groups.reserve(appended_group.len());
468                            for &i in appended_group {
469                                groups.push(i.to_owned());
470                            }
471                        }
472
473                        groups.dedup();
474                        groups.sort();
475
476                        if groups == ["*"] {
477                            doc += &format!("Allowed groups: All\n");
478                        }
479                        else {
480                            doc += &format!("Allowed groups: ");
481                            let mut add_comma = false;
482
483                            // Format groups into matching their equivalent group name
484                            for g in &groups {
485                                let new_group = tag_group_extension_to_struct(&g);
486
487                                match add_comma {
488                                    true => doc += ", ",
489                                    false => add_comma = true
490                                };
491
492                                doc += &format!("[{new_group}](TagGroup::{new_group})");
493                            }
494                            doc += "\n";
495                        }
496
497                        // Allow default_group to be specified
498                        let default_group = if groups == ["*"] {
499                            "TagCollection".to_owned()
500                        }
501                        else if groups_arr[0].as_str().unwrap() == "model" {
502                            "Model".to_owned()
503                        }
504                        else {
505                            tag_group_extension_to_struct(&groups[0])
506                        };
507
508                        // Default the group
509                        if !cache_only {
510                            from_tag_code += &format!("if new_object.{field_name_written}.get_group() == TagGroup::_None {{ new_object.{field_name_written}.set_group(TagGroup::{default_group}); }}");
511                        }
512                    }
513
514                    // Put the doc in the struct
515                    if doc != "" {
516                        doc = doc.replace("\"", "\\\"");
517                        all_fields_defined += &format!(r#"#[doc="{doc}"] "#)
518                    }
519
520                    // Put it in the struct
521                    all_fields_defined += &format!("pub {field_name_written}: {field_type_struct},");
522                }
523
524                // Define the struct
525                stream.extend(format!("#[derive(Default{}, Clone, PartialEq)] pub struct {object_name} {{ {all_fields_defined} }}", match implements_copy { true => ", Copy", false => "" } ).parse::<TokenStream>().unwrap());
526
527                // Define parsing it too
528                stream.extend(format!("
529                impl TagBlockFn for {object_name} {{
530                    fn field_count(&self) -> usize {{ todo!() }}
531                    fn field_at_index(&self, _: usize) -> TagField {{ todo!() }}
532                    fn field_at_index_mut(&mut self, _: usize) -> TagField{{ todo!() }}
533                }}").parse::<TokenStream>().unwrap());
534
535                // Next serializing code
536                let parsing_code = format!("
537                impl TagSerialize for {object_name} {{
538                    fn tag_size() -> usize {{
539                        {tag_size}
540                    }}
541                    fn into_tag(&self, data: &mut Vec<u8>, at: usize, struct_end: usize) -> ErrorMessageResult<()> {{
542                        {into_tag_code}
543                        debug_assert_eq!(at + {tag_size}, local_cursor, \"Size for {object_name} is wrong\");
544                        Ok(())
545                    }}
546                    fn from_tag(data: &[u8], at: usize, struct_end: usize, cursor: &mut usize) -> ErrorMessageResult<{object_name}> {{
547                        let mut new_object = {object_name}::default();
548                        {from_tag_code}
549                        debug_assert_eq!(at + {tag_size}, local_cursor, \"Size for {object_name} is wrong\");
550                        Ok(new_object)
551                    }}
552                }}");
553                stream.extend(parsing_code.parse::<TokenStream>().unwrap());
554            }
555        }
556    }
557
558    // Write functions for reading tags with TagFileSerializeFn
559    let mut group_read_match_block = String::new();
560    for group in group_to_struct {
561        stream.extend(format!("impl TagFileSerializeFn for {group} {{
562            fn from_tag_file(data: &[u8]) -> ErrorMessageResult<ParsedTagFile<Self>> {{
563                let header = TagFileHeader::from_tag(data, 0, TAG_FILE_HEADER_LEN, &mut TAG_FILE_HEADER_LEN.clone())?;
564                if header.tag_group == TagGroup::{group} {{
565                    ParsedTagFile::from_tag(data)
566                }}
567                else {{
568                    Err(ErrorMessage::AllocatedString(format!(get_compiled_string!(\"engine.h1.types.tag.header.error_reason_wrong_group\"), group_expected=\"{group}\", group_actual=header.tag_group.as_str())))
569                }}
570            }}
571            fn into_tag_file(&self) -> ErrorMessageResult<Vec<u8>> {{
572                ParsedTagFile::into_tag(self, TagGroup::{group})
573            }}
574        }}").parse::<TokenStream>());
575
576        group_read_match_block += &format!("TagGroup::{group} => {{
577            let tag_file = {group}::from_tag_file(data)?;
578            Ok(ParsedTagFile {{
579                header: tag_file.header,
580                data: tag_file.data
581            }})
582        }},");
583    }
584
585    stream.extend(format!("
586        /// Generic function for parsing a tag file for when knowing the tag group is not required.
587        ///
588        /// Returns an error if the tag could not be parsed.
589        pub fn parse_tag_file(data: &[u8]) -> ErrorMessageResult<ParsedTagFile<dyn TagFileSerializeFn>> {{
590        let header = TagFileHeader::from_tag(data, 0, TAG_FILE_HEADER_LEN, &mut TAG_FILE_HEADER_LEN.clone())?;
591        match header.tag_group {{
592            {group_read_match_block}
593            n => Err(ErrorMessage::AllocatedString(format!(get_compiled_string!(\"engine.h1.types.tag.header.error_reason_unparsable_group\"), group=n.as_str())))
594        }}
595    }}").parse::<TokenStream>());
596
597    stream
598}
599
600/// Load the definitions json files.
601#[proc_macro]
602pub fn load_target_json_def(_: TokenStream) -> TokenStream {
603    let mut stream = TokenStream::new();
604
605    // Parse each json one by one
606    let mut jsons = HashMap::<String, Map<String, Value>>::new();
607    for path in read_dir(Path::new(env!("CARGO_MANIFEST_DIR")).join("json-targets")).unwrap() {
608        let path = path.as_ref().unwrap().path();
609        let mut file = File::open(&path).unwrap();
610        let mut data = Vec::<u8>::new();
611        file.read_to_end(&mut data).unwrap();
612        std::mem::drop(file);
613
614        let data_string = std::str::from_utf8(&data).unwrap();
615        let object = match serde_json::from_str::<Value>(data_string) {
616            Ok(json) => match json {
617                Value::Object(array) => array,
618                _ => panic!("{:?} is the wrong JSON type", path)
619            },
620            Err(e) => panic!("Can't parse {:?}! {}", path, e)
621        };
622        jsons.insert(path.file_name().unwrap().to_str().unwrap().to_owned(), object);
623    }
624
625    let mut engine_array = String::new();
626
627    for filename in jsons.keys() {
628        fn get_value(key: &str, base_filename: &str, jsons: &HashMap::<String, Map<String, Value>>) -> Option<Value> {
629            let json = jsons.get(base_filename).unwrap();
630            let derives = json.get("derives");
631
632            // Get this value and the derived value.
633            let this_value = match json.get(key) {
634                Some(Value::Null) => None,
635                Some(n) => Some(n),
636                None => None
637            };
638
639            let derived_value = if let Some(v) = derives {
640                get_value(key, &v.as_str().unwrap(), jsons)
641            }
642            else {
643                None
644            };
645
646            // If we don't have a value here, return the derived value and call it a day.
647            if this_value.is_none() {
648                // Set fallback to false
649                if key == "fallback" {
650                    return Some(Value::Bool(false));
651                }
652
653                // Don't pass through shorthands
654                if key == "shorthand" {
655                    return None;
656                }
657
658                return derived_value;
659            }
660
661            // If we aren't looking for required tags or we have no derived value, return this value and call it a day.
662            if key != "required_tags" {
663                return this_value.map(|f| f.to_owned());
664            }
665
666            // Merge required tags
667            let mut this_value = this_value.unwrap().to_owned().as_object().unwrap().to_owned();
668
669            macro_rules! insert_if_not_inserted {
670                ($key:expr) => {{
671                    if !this_value.contains_key($key) {
672                        this_value.insert($key.to_owned(), Value::Array(Vec::new()));
673                    }
674                }}
675            }
676
677            insert_if_not_inserted!("all");
678            insert_if_not_inserted!("singleplayer");
679            insert_if_not_inserted!("multiplayer");
680            insert_if_not_inserted!("user_interface");
681            insert_if_not_inserted!("singleplayer_demo");
682            insert_if_not_inserted!("multiplayer_demo");
683            insert_if_not_inserted!("user_interface_demo");
684
685            // If we have nothing derived, we're done!
686            if derived_value.is_none() {
687                return Some(Value::Object(this_value));
688            }
689
690            // Otherwise, merge them.
691            let mut derived_value = derived_value.unwrap().as_object().unwrap().to_owned();
692
693            macro_rules! merge_it_all {
694                ($key:expr) => {{
695                    derived_value.get_mut($key).unwrap().as_array_mut().unwrap().append(this_value.get_mut("all").unwrap().as_array_mut().unwrap());
696                }}
697            }
698
699            merge_it_all!("all");
700            merge_it_all!("singleplayer");
701            merge_it_all!("multiplayer");
702            merge_it_all!("user_interface");
703            merge_it_all!("singleplayer_demo");
704            merge_it_all!("multiplayer_demo");
705            merge_it_all!("user_interface_demo");
706
707            Some(Value::Object(derived_value))
708        }
709
710        fn parse_int_value(value: Value) -> u64 {
711            match value {
712                Value::String(string) => {
713                    if string.starts_with("0x") {
714                        u64::from_str_radix(&string[2..], 16).unwrap()
715                    }
716                    else {
717                        panic!()
718                    }
719                },
720                Value::Number(number) => number.as_u64().unwrap(),
721                _ => panic!("expected unsigned int or hex")
722            }
723        }
724
725        fn parse_optional_string(value: Option<Value>) -> Option<String> {
726            value.map(|v| v.as_str().unwrap().to_owned())
727        }
728
729        fn encapsulate_optional_string(string: Option<String>) -> String {
730            match string {
731                Some(n) => format!("Some({})", encapsulate_string(n)),
732                None => "None".to_owned()
733            }
734        }
735
736        fn encapsulate_string(string: String) -> String {
737            format!("\"{string}\"")
738        }
739
740        let name = encapsulate_string(get_value("name", filename, &jsons).unwrap().as_str().unwrap().to_owned());
741
742        let version = encapsulate_optional_string(parse_optional_string(get_value("version", filename, &jsons)));
743        let shorthand = encapsulate_optional_string(parse_optional_string(get_value("shorthand", filename, &jsons)));
744        let build = encapsulate_optional_string(parse_optional_string(get_value("build", filename, &jsons)));
745        let script_compile_target = match get_value("script_compile_target", filename, &jsons).unwrap().as_str().unwrap() {
746            "xbox" => "CompileTarget::HaloCEXboxNTSC",
747            "mcc-cea" => "CompileTarget::HaloCEA",
748            "gbx-custom" => "CompileTarget::HaloCustomEdition",
749            "gbx-retail" => "CompileTarget::HaloCEGBX",
750            "gbx-demo" => "CompileTarget::HaloCEGBXDemo",
751            n => panic!("Unknown script compile target {}", n)
752        };
753
754        let fallback = get_value("fallback", filename, &jsons).unwrap().as_bool().unwrap();
755        let required_tags = get_value("required_tags", filename, &jsons).unwrap().as_object().unwrap().to_owned();
756        let max_script_nodes = parse_int_value(get_value("max_script_nodes", &filename, &jsons).unwrap());
757        let cache_file_version = parse_int_value(get_value("cache_file_version", &filename, &jsons).unwrap());
758        let bsps_occupy_tag_space = get_value("bsps_occupy_tag_space", filename, &jsons).unwrap_or(Value::Bool(false)).as_bool().unwrap();
759        let max_tag_space = parse_int_value(get_value("max_tag_space", &filename, &jsons).unwrap());
760
761        let max_cache_file_size = get_value("max_cache_file_size", filename, &jsons).unwrap().as_object().unwrap().to_owned();
762        let max_cache_file_size_user_interface = parse_int_value(max_cache_file_size.get("user_interface").unwrap().to_owned());
763        let max_cache_file_size_singleplayer = parse_int_value(max_cache_file_size.get("singleplayer").unwrap().to_owned());
764        let max_cache_file_size_multiplayer = parse_int_value(max_cache_file_size.get("multiplayer").unwrap().to_owned());
765
766        let base_memory_address = get_value("base_memory_address", filename, &jsons).unwrap().as_object().unwrap().to_owned();
767        let base_memory_address_value = parse_int_value(base_memory_address.get("value").unwrap().to_owned());
768        let base_memory_address_type = base_memory_address.get("type").unwrap().as_str().unwrap().to_owned();
769
770        let base_memory_address_tokens = match base_memory_address_type.as_str() {
771            "fixed" => format!("BaseMemoryAddressType::Fixed({base_memory_address_value})"),
772            "inferred" => format!("BaseMemoryAddressType::Inferred({base_memory_address_value})"),
773            n => panic!("unknown base memory address type {}", n)
774        };
775
776        let mut required_tags_tokens = String::new();
777        for (target, tags) in required_tags {
778            let mut new_tags = String::new();
779            for i in tags.as_array().unwrap() {
780                new_tags += &format!("\"{}\",", i.as_str().unwrap().replace("\\", "\\\\"));
781            }
782            required_tags_tokens += &format!("{target}: &[{new_tags}],");
783        }
784        let required_tags_tokens = format!("RequiredTags {{ {required_tags_tokens} }}");
785
786        let tokens = format!("
787            EngineTarget {{
788                name: {name},
789                version: {version},
790                shorthand: {shorthand},
791                build: {build},
792                fallback: {fallback},
793                max_script_nodes: {max_script_nodes},
794                cache_file_version: {cache_file_version},
795                bsps_occupy_tag_space: {bsps_occupy_tag_space},
796                max_tag_space: {max_tag_space},
797                max_cache_file_size_user_interface: {max_cache_file_size_user_interface},
798                max_cache_file_size_singleplayer: {max_cache_file_size_singleplayer},
799                max_cache_file_size_multiplayer: {max_cache_file_size_multiplayer},
800                base_memory_address: {base_memory_address_tokens},
801                required_tags: {required_tags_tokens},
802                script_compile_target: {script_compile_target}
803            }},
804        ");
805
806        engine_array += &tokens;
807    }
808
809    stream.extend(format!("pub const ALL_TARGETS: &'static [EngineTarget] = &[{engine_array}];").parse::<TokenStream>());
810
811    stream
812}
813
814const STRINGS_JSON_DATA: &'static str = include_str!("strings.json");
815
816fn get_string(name: String) -> String {
817    let map = match serde_json::from_str::<Value>(STRINGS_JSON_DATA) {
818        Ok(json) => match json {
819            Value::Object(map) => map,
820            _ => panic!("strings.json is the wrong JSON type")
821        },
822        Err(e) => panic!("Can't parse strings.json! {}", e)
823    };
824
825    match map.get(&name) {
826        Some(n) => match n {
827            Value::String(s) => s.to_string(),
828            _ => panic!("\"{}\" does not correspond to a string in strings.json!", name)
829        },
830        None => panic!("\"{}\" does not correspond to anything in strings.json", name)
831    }
832}
833
834/// Get the string compiled into strings.json.
835#[proc_macro]
836pub fn get_compiled_string(input: TokenStream) -> TokenStream {
837    let literal = match parse_macro_input!(input as Expr) {
838        Expr::Lit(n) => n,
839        _ => panic!("expected a string literal here")
840    };
841
842    let string = match literal.lit {
843        Lit::Str(n) => n,
844        _ => panic!("expected a string literal here")
845    }.value();
846
847    format!("\"{}\"", get_string(string).replace("\\", "\\\\").replace("\"", "\\\"")).parse().unwrap()
848}