#[expect(clippy::expect_used)]
fn main() {
use std::{env, fs::read, path::PathBuf};
use prost_wkt_build::Message as _;
use tonic_build::FileDescriptorSet;
let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());
let spark_descriptor_file = out_dir.join("spark_descriptor.bin");
let mut config = tonic_build::configure()
.build_server(false)
.out_dir(&out_dir)
.compile_well_known_types(true)
.type_attribute(".", "#[derive(serde::Serialize, serde::Deserialize)]")
.type_attribute(".", "#[serde(rename_all = \"camelCase\")]")
.extern_path(".google.protobuf.Timestamp", "::prost_wkt_types::Timestamp")
.extern_path(".google.protobuf.Any", "::prost_wkt_types::Any")
.extern_path(".google.protobuf.Value", "::prost_wkt_types::Value")
.extern_path(".google.protobuf.Empty", "::prost_wkt_types::Empty")
.field_attribute(".", "#[allow(clippy::all)]");
config = config.file_descriptor_set_path(&spark_descriptor_file);
config = config.field_attribute("bytes = \"vec\"", "#[derive(Default)]");
config
.compile_protos(
&[
"protos/spark/common.proto",
"protos/spark/spark.proto",
"protos/spark/spark_tree.proto",
"protos/spark/frost.proto",
"protos/spark/spark_authn.proto",
],
&["protos"],
)
.expect("Failed to compile Spark Operator RPC protocol buffers");
let spark_descriptor_bytes = read(&spark_descriptor_file)
.expect("Failed to read Spark Operator RPC protocol buffer descriptor");
let spark_descriptor = FileDescriptorSet::decode(&*spark_descriptor_bytes)
.expect("Failed to decode Spark Operator RPC protocol buffer descriptor");
prost_wkt_build::add_serde(out_dir, spark_descriptor);
println!("cargo:rerun-if-changed=spark-protos");
println!("cargo:rerun-if-changed=build.rs");
}