#![allow(clippy::all, clippy::pedantic, clippy::disallowed_methods)]
#![allow(
unreachable_code,
unused_variables,
unused_imports,
dead_code,
unused_assignments
)]
use clap::{Parser, Subcommand, ValueEnum};
use std::path::{Path, PathBuf};
#[macro_use]
#[allow(unused_macros, clippy::duplicated_attributes)]
mod generated_contracts;
mod commands;
pub mod error;
mod output;
pub mod pipe;
pub use error::CliError;
pub mod qa_types {
pub use crate::commands::qa::{GateResult, QaReport, SystemInfo};
}
pub mod model_pull {
pub use crate::commands::pull::{list, run};
}
#[cfg(feature = "inference")]
pub mod federation;
use commands::{
bench, canary, canary::CanaryCommands, cbtop, chat, compare_hf, compile, convert, data, debug,
diagnose, diff, distill, eval, explain, export, flow, hex, import, inspect, lint, mcp, merge,
oracle, pipeline, probar, profile, prune, publish, pull, qa, qualify, quantize, rosetta,
rosetta::RosettaCommands, run, serve, showcase, stamp, tensors, tokenize, trace, tree, tui,
validate, validate_manifest,
};
#[cfg(feature = "training")]
use commands::{finetune, gpu, train, tune};
#[cfg(feature = "training")]
pub use commands::pretrain::PretrainMode;
#[derive(Parser, Debug)]
#[command(name = "apr")]
#[command(author, version = concat!(env!("CARGO_PKG_VERSION"), " (", env!("APR_GIT_SHA"), ")"), about, long_about = None)]
#[command(propagate_version = true)]
pub struct Cli {
#[command(subcommand)]
pub command: Box<Commands>,
#[arg(long, global = true)]
pub json: bool,
#[arg(short, long, global = true)]
pub verbose: bool,
#[arg(short, long, global = true)]
pub quiet: bool,
#[arg(long, global = true)]
pub offline: bool,
#[arg(long, global = true)]
pub skip_contract: bool,
}
include!("commands_enum.rs");
include!("model_ops_commands.rs");
include!("extended_commands.rs");
include!("tool_commands.rs");
include!("data_commands.rs");
#[cfg(feature = "training")]
include!("train_commands.rs");
include!("serve_commands.rs");
include!("tokenize_commands.rs");
include!("pipeline_commands.rs");
include!("validate.rs");
include!("dispatch_run.rs");
include!("dispatch.rs");
include!("dispatch_analysis.rs");
include!("lib_07.rs");
pub fn cli_main() -> std::process::ExitCode {
#[cfg(unix)]
#[allow(unsafe_code)]
unsafe {
libc::signal(libc::SIGPIPE, libc::SIG_DFL);
}
#[cfg(target_arch = "aarch64")]
#[allow(unsafe_code)]
unsafe {
let fpcr: u64;
core::arch::asm!("mrs {}, fpcr", out(reg) fpcr);
if fpcr & (1 << 19) != 0 {
let new_fpcr = fpcr & !(1 << 19);
core::arch::asm!("msr fpcr, {}", in(reg) new_fpcr);
}
}
let no_color = std::env::var("NO_COLOR").is_ok();
let is_tty = std::io::IsTerminal::is_terminal(&std::io::stdout());
if no_color || !is_tty {
colored::control::set_override(false);
}
let raw: Vec<String> = std::env::args().collect();
if raw.iter().any(|a| a == "--version") && raw.iter().any(|a| a == "--json") {
emit_version_json();
return std::process::ExitCode::SUCCESS;
}
let cli = Cli::parse();
match execute_command(&cli) {
Ok(()) => std::process::ExitCode::SUCCESS,
Err(e) => {
eprintln!("error: {e}");
e.exit_code()
}
}
}
pub fn emit_version_json() {
let cuda_feature = cfg!(feature = "cuda");
let cuda_runtime_available = std::process::Command::new("nvidia-smi")
.arg("-L")
.output()
.map(|o| o.status.success())
.unwrap_or(false);
let visible_devices: Vec<String> = if cuda_runtime_available {
std::process::Command::new("nvidia-smi")
.arg("-L")
.output()
.ok()
.and_then(|o| String::from_utf8(o.stdout).ok())
.map(|s| {
s.lines()
.filter_map(|line| {
line.strip_prefix("GPU ").and_then(|rest| {
rest.split_once(':').map(|(idx, _)| idx.trim().to_string())
})
})
.collect()
})
.unwrap_or_default()
} else {
Vec::new()
};
let body = serde_json::json!({
"name": "apr",
"version": env!("CARGO_PKG_VERSION"),
"git_sha": env!("APR_GIT_SHA"),
"cuda_feature": cuda_feature,
"cuda_runtime_available": cuda_runtime_available,
"visible_devices": visible_devices,
});
println!(
"{}",
serde_json::to_string_pretty(&body).expect("build version json")
);
}