website_crawler 0.5.1

gRPC web crawler that is very fast
use std::env;
use std::process::Command;

fn main() -> Result<(), Box<dyn std::error::Error>> {
    let out_dir = env::var("OUT_DIR").unwrap();
    let npm = Command::new("command")
        .args(["-v", "npm"])
        .output()
        .expect("npm command not found");

    let npm = String::from_utf8(npm.stdout).unwrap();

    if !npm.is_empty() {
        Command::new("npm")
            .args(["i", "--prefix", &out_dir, "@a11ywatch/protos"])
            .output()
            .expect("failed to execute process");
    
        tonic_build::compile_protos(format!(
            "{}/node_modules/@a11ywatch/protos/crawler.proto",
            out_dir
        ))?;
        tonic_build::compile_protos(format!(
            "{}/node_modules/@a11ywatch/protos/website.proto",
            out_dir
        ))?;
        tonic_build::compile_protos(format!(
            "{}/node_modules/@a11ywatch/protos/health.proto",
            out_dir
        ))?;
    } else {
        println!("npm is required for installing proto files!");
    }

    Ok(())
}