spark-protos 0.1.11

Protos for Spark (https://www.spark.money/). These protos are used to communicate with Spark Operators.
Documentation
//! This build script generates the Rust code for the Spark Operator RPC protocol buffers.
//! It uses the `tonic-build` crate to generate the Rust code from the protocol buffer definitions.
//! The generated code is placed in the `src/generated` directory.

#[expect(clippy::expect_used)]
fn main() {
    use std::{env, fs::read, path::PathBuf};

    use prost_wkt_build::Message as _;
    use tonic_build::FileDescriptorSet;

    let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());

    let spark_descriptor_file = out_dir.join("spark_descriptor.bin");

    // Let's create a specific config
    let mut config = tonic_build::configure()
    .build_server(false)
    .out_dir(&out_dir)
    .compile_well_known_types(true)
    .type_attribute(".", "#[derive(serde::Serialize, serde::Deserialize)]")
    .type_attribute(".", "#[serde(rename_all = \"camelCase\")]")
    // Map all Google protobuf types to their Rust equivalents
    .extern_path(".google.protobuf.Timestamp", "::prost_wkt_types::Timestamp")
    .extern_path(".google.protobuf.Any", "::prost_wkt_types::Any")
    .extern_path(".google.protobuf.Value", "::prost_wkt_types::Value")
    .extern_path(".google.protobuf.Empty", "::prost_wkt_types::Empty")
    .field_attribute(".", "#[allow(clippy::all)]");

    // Add the prost imports at the top of the generated file
    config = config.file_descriptor_set_path(&spark_descriptor_file);

    // Also try to set specific attributes for Vec<u8> fields
    config = config.field_attribute("bytes = \"vec\"", "#[derive(Default)]");

    // Compile the protos
    config
        .compile_protos(
            &[
                "protos/spark/common.proto",
                "protos/spark/spark.proto",
                "protos/spark/spark_tree.proto",
                "protos/spark/frost.proto",
                "protos/spark/spark_authn.proto",
            ],
            &["protos"],
        )
        .expect("Failed to compile Spark Operator RPC protocol buffers");

    let spark_descriptor_bytes = read(&spark_descriptor_file)
        .expect("Failed to read Spark Operator RPC protocol buffer descriptor");
    let spark_descriptor = FileDescriptorSet::decode(&*spark_descriptor_bytes)
        .expect("Failed to decode Spark Operator RPC protocol buffer descriptor");

    prost_wkt_build::add_serde(out_dir, spark_descriptor);

    println!("cargo:rerun-if-changed=spark-protos");
    println!("cargo:rerun-if-changed=build.rs");
}