use clap::{Parser, Subcommand};
use std::path::PathBuf;
pub mod cli;
pub mod cli_actions;
pub mod commands;
pub mod config;
pub mod export;
pub mod profiling;
pub mod tools;
pub use cli_actions::*;
#[derive(Parser)]
#[command(name = "oxirs")]
#[command(about = "OxiRS command-line interface")]
#[command(version)]
#[command(
long_about = "OxiRS command-line interface for RDF processing, SPARQL operations, and semantic data management.\n\nComplete documentation at https://oxirs.io/docs/cli"
)]
pub struct Cli {
#[command(subcommand)]
pub command: Commands,
#[arg(short, long, global = true)]
pub verbose: bool,
#[arg(short, long, global = true)]
pub config: Option<PathBuf>,
#[arg(short, long, global = true, conflicts_with = "verbose")]
pub quiet: bool,
#[arg(long, global = true)]
pub no_color: bool,
#[arg(short, long, global = true)]
pub interactive: bool,
#[arg(short = 'P', long, global = true)]
pub profile: Option<String>,
#[arg(long, value_enum, hide = true)]
pub completion: Option<clap_complete::Shell>,
}
#[derive(Subcommand)]
pub enum Commands {
Init {
name: String,
#[arg(long, default_value = "tdb2")]
format: String,
#[arg(short, long)]
location: Option<PathBuf>,
},
Serve {
config: PathBuf,
#[arg(short, long, default_value = "3030")]
port: u16,
#[arg(long, default_value = "localhost")]
host: String,
#[arg(long)]
graphql: bool,
},
Import {
dataset: String,
file: PathBuf,
#[arg(short, long)]
format: Option<String>,
#[arg(short, long)]
graph: Option<String>,
#[arg(long)]
resume: bool,
},
Export {
dataset: String,
file: PathBuf,
#[arg(short, long, default_value = "turtle")]
format: String,
#[arg(short, long)]
graph: Option<String>,
#[arg(long)]
resume: bool,
},
Query {
dataset: String,
query: String,
#[arg(short, long)]
file: bool,
#[arg(short, long, default_value = "table")]
output: String,
},
Update {
dataset: String,
update: String,
#[arg(short, long)]
file: bool,
},
Benchmark {
#[command(subcommand)]
action: BenchmarkAction,
},
Migrate {
#[command(subcommand)]
action: MigrateAction,
},
Generate {
output: PathBuf,
#[arg(short, long, default_value = "small")]
size: String,
#[arg(short = 't', long, default_value = "rdf")]
r#type: String,
#[arg(short, long, default_value = "turtle")]
format: String,
#[arg(long)]
seed: Option<u64>,
#[arg(long)]
schema: Option<PathBuf>,
},
Index {
#[command(subcommand)]
action: IndexAction,
},
Visualize {
dataset: String,
output: PathBuf,
#[arg(short, long, default_value = "dot")]
format: String,
#[arg(short, long)]
graph: Option<String>,
#[arg(long, default_value = "1000")]
max_nodes: Option<usize>,
},
Config {
#[command(subcommand)]
action: ConfigAction,
},
Riot {
#[arg(required = true)]
input: Vec<PathBuf>,
#[arg(long, default_value = "turtle")]
output: String,
#[arg(long)]
out: Option<PathBuf>,
#[arg(long)]
syntax: Option<String>,
#[arg(long)]
base: Option<String>,
#[arg(long)]
validate: bool,
#[arg(long)]
count: bool,
},
RdfCat {
#[arg(required = true)]
files: Vec<PathBuf>,
#[arg(short, long, default_value = "turtle")]
format: String,
#[arg(short, long)]
output: Option<PathBuf>,
},
RdfCopy {
source: PathBuf,
target: PathBuf,
#[arg(long)]
source_format: Option<String>,
#[arg(long)]
target_format: Option<String>,
},
RdfDiff {
first: PathBuf,
second: PathBuf,
#[arg(short, long, default_value = "text")]
format: String,
},
RdfParse {
file: PathBuf,
#[arg(short, long)]
format: Option<String>,
#[arg(short, long)]
base: Option<String>,
},
Arq {
#[arg(long)]
query: Option<String>,
#[arg(long)]
query_file: Option<PathBuf>,
#[arg(long, action = clap::ArgAction::Append)]
data: Vec<PathBuf>,
#[arg(long, action = clap::ArgAction::Append)]
namedgraph: Vec<String>,
#[arg(long, default_value = "table")]
results: String,
#[arg(long)]
dataset: Option<PathBuf>,
#[arg(long)]
explain: bool,
#[arg(long)]
optimize: bool,
#[arg(long)]
time: bool,
},
RSparql {
#[arg(long)]
service: String,
#[arg(long)]
query: Option<String>,
#[arg(long)]
query_file: Option<PathBuf>,
#[arg(long, default_value = "table")]
results: String,
#[arg(long, default_value = "30")]
timeout: u64,
},
RUpdate {
#[arg(long)]
service: String,
#[arg(long)]
update: Option<String>,
#[arg(long)]
update_file: Option<PathBuf>,
#[arg(long, default_value = "30")]
timeout: u64,
},
QParse {
query: String,
#[arg(short, long)]
file: bool,
#[arg(long)]
print_ast: bool,
#[arg(long)]
print_algebra: bool,
},
UParse {
update: String,
#[arg(short, long)]
file: bool,
#[arg(long)]
print_ast: bool,
},
TdbLoader {
location: PathBuf,
files: Vec<PathBuf>,
#[arg(short, long)]
graph: Option<String>,
#[arg(long)]
progress: bool,
#[arg(long)]
stats: bool,
},
TdbDump {
location: PathBuf,
#[arg(short, long)]
output: Option<PathBuf>,
#[arg(short, long, default_value = "nquads")]
format: String,
#[arg(short, long)]
graph: Option<String>,
},
TdbQuery {
location: PathBuf,
query: String,
#[arg(short, long)]
file: bool,
#[arg(long, default_value = "table")]
results: String,
},
TdbUpdate {
location: PathBuf,
update: String,
#[arg(short, long)]
file: bool,
},
TdbStats {
location: PathBuf,
#[arg(long)]
detailed: bool,
#[arg(long, default_value = "text")]
format: String,
},
TdbBackup {
source: PathBuf,
target: PathBuf,
#[arg(long)]
compress: bool,
#[arg(long)]
incremental: bool,
#[arg(long)]
encrypt: bool,
#[arg(long, requires = "encrypt")]
password: Option<String>,
#[arg(long, requires = "encrypt", conflicts_with = "password")]
keyfile: Option<PathBuf>,
#[arg(long, conflicts_with_all = ["encrypt", "password"])]
generate_keyfile: Option<PathBuf>,
},
TdbCompact {
location: PathBuf,
#[arg(long)]
delete_old: bool,
},
Pitr {
#[command(subcommand)]
action: PitrAction,
},
Shacl {
#[arg(long)]
data: Option<PathBuf>,
#[arg(long)]
dataset: Option<PathBuf>,
#[arg(long)]
shapes: PathBuf,
#[arg(long, default_value = "text")]
format: String,
#[arg(short, long)]
output: Option<PathBuf>,
},
Shex {
#[arg(long)]
data: Option<PathBuf>,
#[arg(long)]
dataset: Option<PathBuf>,
#[arg(long)]
schema: PathBuf,
#[arg(long)]
shape_map: Option<PathBuf>,
#[arg(long, default_value = "text")]
format: String,
},
Infer {
data: PathBuf,
#[arg(long)]
ontology: Option<PathBuf>,
#[arg(long, default_value = "rdfs")]
profile: String,
#[arg(short, long)]
output: Option<PathBuf>,
#[arg(long, default_value = "turtle")]
format: String,
},
SchemaGen {
data: PathBuf,
#[arg(long, default_value = "shacl")]
schema_type: String,
#[arg(short, long)]
output: Option<PathBuf>,
#[arg(long)]
stats: bool,
},
Aspect {
#[command(subcommand)]
action: AspectAction,
},
Aas {
#[command(subcommand)]
action: AasAction,
},
Package {
#[command(subcommand)]
action: PackageAction,
},
Iri {
iri: String,
#[arg(long)]
resolve: Option<String>,
#[arg(long)]
validate: bool,
#[arg(long)]
normalize: bool,
},
LangTag {
tag: String,
#[arg(long)]
validate: bool,
#[arg(long)]
normalize: bool,
},
JUuid {
#[arg(short = 'n', long, default_value = "1")]
count: usize,
#[arg(short, long, default_value = "uuid")]
format: String,
},
Utf8 {
input: String,
#[arg(short, long)]
file: bool,
#[arg(long)]
validate: bool,
#[arg(long)]
fix: bool,
},
WwwEnc {
input: String,
#[arg(long, default_value = "url")]
encoding: String,
},
WwwDec {
input: String,
#[arg(long, default_value = "url")]
decoding: String,
},
RSet {
input: PathBuf,
#[arg(long)]
input_format: Option<String>,
#[arg(long, default_value = "table")]
output_format: String,
#[arg(short, long)]
output: Option<PathBuf>,
},
Interactive {
#[arg(short, long)]
dataset: Option<String>,
#[arg(long)]
history: Option<PathBuf>,
},
Performance {
#[command(subcommand)]
action: commands::performance::PerformanceCommand,
},
Explain {
dataset: String,
query: String,
#[arg(short, long)]
file: bool,
#[arg(short, long, default_value = "explain")]
mode: String,
#[arg(short, long)]
graphviz: Option<PathBuf>,
},
Optimize {
query: String,
#[arg(short, long)]
file: bool,
},
Template {
#[command(subcommand)]
action: TemplateAction,
},
History {
#[command(subcommand)]
action: HistoryAction,
},
Cicd {
#[command(subcommand)]
action: CicdAction,
},
Alias {
#[command(subcommand)]
action: AliasAction,
},
Cache {
#[command(subcommand)]
action: CacheAction,
},
Rebac(commands::rebac::RebacArgs),
Docs {
#[arg(short, long, default_value = "markdown")]
format: String,
#[arg(short, long)]
output: Option<PathBuf>,
#[arg(long)]
command: Option<String>,
},
Tutorial {
#[arg(short, long)]
lesson: Option<String>,
},
GraphAnalytics {
dataset: String,
#[arg(short, long, default_value = "pagerank")]
operation: String,
#[arg(long, default_value = "0.85")]
damping: f64,
#[arg(long, default_value = "100")]
max_iter: usize,
#[arg(long, default_value = "0.000001")]
tolerance: f64,
#[arg(long)]
source: Option<String>,
#[arg(long)]
target: Option<String>,
#[arg(short = 'k', long, default_value = "20")]
top: usize,
},
Tsdb {
#[command(subcommand)]
action: TsdbAction,
},
Modbus {
#[command(subcommand)]
action: ModbusAction,
},
Canbus {
#[command(subcommand)]
action: CanbusAction,
},
Profile {
#[command(subcommand)]
action: ProfilerAction,
},
ResultCache {
#[command(subcommand)]
action: ResultCacheAction,
},
Stream {
#[command(subcommand)]
action: StreamAction,
},
}
pub async fn run(cli: Cli) -> Result<(), Box<dyn std::error::Error>> {
use cli::{completion, CliContext};
if let Some(shell) = cli.completion {
use clap::CommandFactory;
let mut app = Cli::command();
completion::print_completions(shell, &mut app);
return Ok(());
}
let ctx = CliContext::from_cli(cli.verbose, cli.quiet, cli.no_color);
let log_format = if std::env::var("OXIRS_LOG_FORMAT").as_deref() == Ok("json") {
cli::LogFormat::Json
} else if ctx.verbose {
cli::LogFormat::Pretty
} else {
cli::LogFormat::Text
};
let log_config = cli::LogConfig {
level: if ctx.verbose {
"debug".to_string()
} else if ctx.quiet {
"error".to_string()
} else {
std::env::var("OXIRS_LOG_LEVEL").unwrap_or_else(|_| "info".to_string())
},
format: log_format,
timestamps: !ctx.quiet,
source_location: ctx.verbose,
thread_ids: false,
perf_threshold_ms: std::env::var("OXIRS_PERF_THRESHOLD")
.ok()
.and_then(|s| s.parse().ok()),
file: std::env::var("OXIRS_LOG_FILE").ok(),
};
cli::init_logging(&log_config).expect("Failed to initialize logging");
if ctx.should_show_output() {
ctx.info(&format!("Oxirs CLI v{}", env!("CARGO_PKG_VERSION")));
}
match cli.command {
Commands::Init {
name,
format,
location,
} => commands::init::run(name, format, location)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error>),
Commands::Serve {
config,
port,
host,
graphql,
} => commands::serve::run(config, port, host, graphql)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error>),
Commands::Import {
dataset,
file,
format,
graph,
resume,
} => commands::import::run(dataset, file, format, graph, resume)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error>),
Commands::Export {
dataset,
file,
format,
graph,
resume,
} => commands::export::run(dataset, file, format, graph, resume)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error>),
Commands::Query {
dataset,
query,
file,
output,
} => commands::query::run(dataset, query, file, output)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error>),
Commands::Update {
dataset,
update,
file,
} => commands::update::run(dataset, update, file)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error>),
Commands::Benchmark { action } => match action {
BenchmarkAction::Run {
dataset,
suite,
iterations,
output,
detailed,
warmup,
} => commands::benchmark::run(dataset, suite, iterations, output, detailed, warmup)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error>),
BenchmarkAction::Generate {
output,
size,
dataset_type,
seed,
triples,
schema,
} => commands::benchmark::generate(output, size, dataset_type, seed, triples, schema)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error>),
BenchmarkAction::Analyze {
input,
output,
format,
suggestions,
patterns,
} => commands::benchmark::analyze(input, output, format, suggestions, patterns)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error>),
BenchmarkAction::Compare {
baseline,
current,
output,
threshold,
format,
} => commands::benchmark::compare(baseline, current, output, threshold, format)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error>),
},
Commands::Migrate { action } => match action {
MigrateAction::Format {
source,
target,
from,
to,
} => commands::migrate::format(source, target, from, to)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error>),
MigrateAction::FromTdb1 {
tdb_dir,
dataset,
skip_validation,
} => commands::migrate::from_tdb1(tdb_dir, dataset, skip_validation)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error>),
MigrateAction::FromTdb2 {
tdb_dir,
dataset,
skip_validation,
} => commands::migrate::from_tdb2(tdb_dir, dataset, skip_validation)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error>),
MigrateAction::FromVirtuoso {
connection,
dataset,
graphs,
} => commands::migrate::from_virtuoso(connection, dataset, graphs)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error>),
MigrateAction::FromRdf4j { repo_dir, dataset } => {
commands::migrate::from_rdf4j(repo_dir, dataset)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error>)
}
MigrateAction::FromBlazegraph {
endpoint,
dataset,
namespace,
} => commands::migrate::from_blazegraph(endpoint, dataset, namespace)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error>),
MigrateAction::FromGraphdb {
endpoint,
dataset,
repository,
} => commands::migrate::from_graphdb(endpoint, dataset, repository)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error>),
},
Commands::Generate {
output,
size,
r#type,
format,
seed,
schema,
} => commands::generate::run(output, size, r#type, format, seed, schema)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error>),
Commands::Index { action } => match action {
IndexAction::List { dataset } => commands::index::list(dataset)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error>),
IndexAction::Rebuild { dataset, index } => commands::index::rebuild(dataset, index)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error>),
IndexAction::Stats { dataset, format } => commands::index::stats(dataset, format)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error>),
IndexAction::Optimize { dataset } => commands::index::optimize(dataset)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error>),
},
Commands::Visualize {
dataset,
output,
format,
graph,
max_nodes,
} => commands::visualize::export(dataset, output, format, graph, max_nodes)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error>),
Commands::Config { action } => commands::config::run(action)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error>),
Commands::Riot {
input,
output,
out,
syntax,
base,
validate,
count,
} => tools::riot::run(input, output, out, syntax, base, validate, count).await,
Commands::RdfCat {
files,
format,
output,
} => tools::rdfcat::run(files, format, output).await,
Commands::RdfCopy {
source,
target,
source_format,
target_format,
} => tools::rdfcopy::run(source, target, source_format, target_format).await,
Commands::RdfDiff {
first,
second,
format,
} => tools::rdfdiff::run(first, second, format).await,
Commands::RdfParse { file, format, base } => tools::rdfparse::run(file, format, base).await,
Commands::Arq {
query,
query_file,
data,
namedgraph,
results,
dataset,
explain,
optimize,
time,
} => {
tools::arq::run(tools::arq::ArqConfig {
query,
query_file,
data,
namedgraph,
results_format: results,
dataset,
explain,
optimize,
time,
})
.await
}
Commands::RSparql {
service,
query,
query_file,
results,
timeout,
} => tools::rsparql::run(service, query, query_file, results, timeout).await,
Commands::RUpdate {
service,
update,
update_file,
timeout,
} => tools::rupdate::run(service, update, update_file, timeout).await,
Commands::QParse {
query,
file,
print_ast,
print_algebra,
} => tools::qparse::run(query, file, print_ast, print_algebra).await,
Commands::UParse {
update,
file,
print_ast,
} => tools::uparse::run(update, file, print_ast).await,
Commands::TdbLoader {
location,
files,
graph,
progress,
stats,
} => tools::tdbloader::run(location, files, graph, progress, stats).await,
Commands::TdbDump {
location,
output,
format,
graph,
} => tools::tdbdump::run(location, output, format, graph).await,
Commands::TdbQuery {
location,
query,
file,
results,
} => tools::tdbquery::run(location, query, file, results).await,
Commands::TdbUpdate {
location,
update,
file,
} => tools::tdbupdate::run(location, update, file).await,
Commands::TdbStats {
location,
detailed,
format,
} => tools::tdbstats::run(location, detailed, format).await,
Commands::TdbBackup {
source,
target,
compress,
incremental,
encrypt,
password,
keyfile,
generate_keyfile,
} => {
use tools::backup_encryption;
if let Some(keyfile_path) = generate_keyfile {
println!("Generating encryption keyfile...");
backup_encryption::generate_keyfile(&keyfile_path)?;
println!(
"Keyfile generated successfully at: {}",
keyfile_path.display()
);
println!(
"⚠️ Keep this keyfile secure! Loss of the keyfile means loss of data access."
);
return Ok(());
}
let target_for_encryption = target.clone();
tools::tdbbackup::run(source, target, compress, incremental).await?;
if encrypt {
use dialoguer::Password;
println!("\nEncrypting backup...");
let backup_file = &target_for_encryption;
let encrypted_file = backup_file.with_extension("oxirs.enc");
let encryption_config = if let Some(ref pwd) = password {
backup_encryption::EncryptionConfig {
password: Some(pwd.clone()),
keyfile: None,
verify: true,
}
} else if let Some(ref kf) = keyfile {
backup_encryption::EncryptionConfig {
password: None,
keyfile: Some(kf.clone()),
verify: true,
}
} else {
let pwd = Password::new()
.with_prompt("Enter encryption password")
.with_confirmation("Confirm password", "Passwords don't match")
.interact()?;
backup_encryption::EncryptionConfig {
password: Some(pwd),
keyfile: None,
verify: true,
}
};
backup_encryption::encrypt_backup(
backup_file,
&encrypted_file,
&encryption_config,
)?;
println!(
"✓ Backup encrypted successfully: {}",
encrypted_file.display()
);
}
Ok(())
}
Commands::TdbCompact {
location,
delete_old,
} => tools::tdbcompact::run(location, delete_old).await,
Commands::Pitr { action } => {
use chrono::{DateTime, Utc};
use tools::pitr::{PitrConfig, TransactionLog};
match action {
PitrAction::Init {
dataset,
max_log_size,
auto_archive,
} => {
println!("Initializing PITR for dataset: {}", dataset.display());
let config = PitrConfig {
log_dir: dataset.join("pitr/logs"),
archive_dir: dataset.join("pitr/archive"),
max_log_size: max_log_size * 1_048_576, auto_archive,
};
let _log = TransactionLog::new(config)?;
println!("✓ PITR initialized successfully");
}
PitrAction::Checkpoint { dataset, name } => {
let config = PitrConfig {
log_dir: dataset.join("pitr/logs"),
archive_dir: dataset.join("pitr/archive"),
max_log_size: 100_000_000,
auto_archive: false,
};
let log = TransactionLog::new(config)?;
let checkpoint_path = log.create_checkpoint(&name)?;
println!("✓ Checkpoint created: {}", checkpoint_path.display());
}
PitrAction::List { dataset, format } => {
let config = PitrConfig {
log_dir: dataset.join("pitr/logs"),
archive_dir: dataset.join("pitr/archive"),
max_log_size: 100_000_000,
auto_archive: false,
};
let log = TransactionLog::new(config)?;
let checkpoints = log.list_checkpoints()?;
if format == "json" {
println!("{}", serde_json::to_string_pretty(&checkpoints)?);
} else {
println!("Available Checkpoints:");
println!("{:-<80}", "");
for cp in checkpoints {
println!("Name: {}", cp.name);
println!(" Timestamp: {}", cp.timestamp.to_rfc3339());
println!(" Last Transaction ID: {}", cp.last_transaction_id);
println!(" Log Files: {}", cp.log_files.len());
println!();
}
}
}
PitrAction::RecoverTimestamp {
dataset,
timestamp,
output,
} => {
let target_time: DateTime<Utc> = timestamp.parse()?;
let config = PitrConfig {
log_dir: dataset.join("pitr/logs"),
archive_dir: dataset.join("pitr/archive"),
max_log_size: 100_000_000,
auto_archive: false,
};
let log = TransactionLog::new(config)?;
let count = log.recover_to_timestamp(target_time, &output)?;
println!("✓ Recovered {} transactions to {}", count, output.display());
}
PitrAction::RecoverTransaction {
dataset,
transaction_id,
output,
} => {
let config = PitrConfig {
log_dir: dataset.join("pitr/logs"),
archive_dir: dataset.join("pitr/archive"),
max_log_size: 100_000_000,
auto_archive: false,
};
let log = TransactionLog::new(config)?;
let count = log.recover_to_transaction(transaction_id, &output)?;
println!("✓ Recovered {} transactions to {}", count, output.display());
}
PitrAction::Archive { dataset } => {
let config = PitrConfig {
log_dir: dataset.join("pitr/logs"),
archive_dir: dataset.join("pitr/archive"),
max_log_size: 100_000_000,
auto_archive: false,
};
let mut log = TransactionLog::new(config)?;
let archived = log.archive_logs()?;
println!("✓ Archived {} log files", archived);
}
}
Ok(())
}
Commands::Shacl {
data,
dataset,
shapes,
format,
output,
} => tools::shacl::run(data, dataset, shapes, format, output).await,
Commands::Shex {
data,
dataset,
schema,
shape_map,
format,
} => tools::shex::run(data, dataset, schema, shape_map, format).await,
Commands::Infer {
data,
ontology,
profile,
output,
format,
} => tools::infer::run(data, ontology, profile, output, format).await,
Commands::SchemaGen {
data,
schema_type,
output,
stats,
} => tools::schemagen::run(data, schema_type, output, stats).await,
Commands::Aspect { action } => commands::aspect::run(action)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error>),
Commands::Aas { action } => commands::aas::run(action)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error>),
Commands::Package { action } => commands::package::run(action)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error>),
Commands::Iri {
iri,
resolve,
validate,
normalize,
} => tools::iri::run(iri, resolve, validate, normalize).await,
Commands::LangTag {
tag,
validate,
normalize,
} => tools::langtag::run(tag, validate, normalize).await,
Commands::JUuid { count, format } => tools::juuid::run(count, format).await,
Commands::Utf8 {
input,
file,
validate,
fix,
} => tools::utf8::run(input, file, validate, fix).await,
Commands::WwwEnc { input, encoding } => tools::wwwenc::run(input, encoding).await,
Commands::WwwDec { input, decoding } => tools::wwwdec::run(input, decoding).await,
Commands::RSet {
input,
input_format,
output_format,
output,
} => tools::rset::run(input, input_format, output_format, output).await,
Commands::Interactive {
dataset,
history: _,
} => {
ctx.info("Starting interactive SPARQL shell...");
commands::interactive::execute(dataset, cli.config)
.map_err(|e| Box::new(e) as Box<dyn std::error::Error>)
}
Commands::Performance { action } => {
let config = config::Config::default();
action
.execute(&config)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error>)
}
Commands::Explain {
dataset,
query,
file,
mode,
graphviz,
} => {
let analysis_mode = match mode.to_lowercase().as_str() {
"explain" => commands::explain::AnalysisMode::Explain,
"analyze" => commands::explain::AnalysisMode::Analyze,
"full" => commands::explain::AnalysisMode::Full,
_ => {
eprintln!(
"Invalid mode '{}'. Valid modes: explain, analyze, full",
mode
);
return Err("Invalid analysis mode".into());
}
};
commands::explain::explain_query_with_options(
dataset,
query,
file,
analysis_mode,
graphviz,
)
.await
.map_err(|e| e.into())
}
Commands::Optimize { query, file } => {
commands::query_optimizer::optimize_command(query, file)
.await
.map_err(|e| e.into())
}
Commands::Template { action } => {
use std::collections::HashMap;
match action {
TemplateAction::List { category } => commands::templates::list_command(category)
.await
.map_err(|e| e.into()),
TemplateAction::Show { name } => commands::templates::show_command(name)
.await
.map_err(|e| e.into()),
TemplateAction::Render { name, param } => {
let mut params = HashMap::new();
for p in param {
let parts: Vec<&str> = p.splitn(2, '=').collect();
if parts.len() != 2 {
eprintln!("Invalid parameter format: '{}'. Expected key=value", p);
return Err("Invalid parameter format".into());
}
params.insert(parts[0].to_string(), parts[1].to_string());
}
commands::templates::render_command(name, params)
.await
.map_err(|e| e.into())
}
}
}
Commands::History { action } => match action {
HistoryAction::List { limit, dataset } => {
commands::history::commands::list_command(limit, dataset)
.await
.map_err(|e| e.into())
}
HistoryAction::Show { id } => commands::history::commands::show_command(id)
.await
.map_err(|e| e.into()),
HistoryAction::Replay { id, output } => {
commands::history::commands::replay_command(id, output)
.await
.map_err(|e| e.into())
}
HistoryAction::Search { query } => commands::history::commands::search_command(query)
.await
.map_err(|e| e.into()),
HistoryAction::Clear => commands::history::commands::clear_command()
.await
.map_err(|e| e.into()),
HistoryAction::Stats => commands::history::commands::stats_command()
.await
.map_err(|e| e.into()),
HistoryAction::Analytics { dataset } => {
commands::history::commands::analytics_command(dataset)
.await
.map_err(|e| e.into())
}
},
Commands::Cicd { action } => match action {
CicdAction::Report {
input,
output,
format,
} => commands::cicd::generate_test_report(input, output, format)
.await
.map_err(|e| e.into()),
CicdAction::Docker { output } => commands::cicd::generate_docker_files(output)
.await
.map_err(|e| e.into()),
CicdAction::Github { output } => commands::cicd::generate_github_workflow(output)
.await
.map_err(|e| e.into()),
CicdAction::Gitlab { output } => commands::cicd::generate_gitlab_ci(output)
.await
.map_err(|e| e.into()),
},
Commands::Alias { action } => match action {
AliasAction::List => commands::alias::list().await.map_err(|e| e.into()),
AliasAction::Show { name } => commands::alias::show(name.clone())
.await
.map_err(|e| e.into()),
AliasAction::Add { name, command } => {
commands::alias::add(name.clone(), command.clone())
.await
.map_err(|e| e.into())
}
AliasAction::Remove { name } => commands::alias::remove(name.clone())
.await
.map_err(|e| e.into()),
AliasAction::Reset => commands::alias::reset().await.map_err(|e| e.into()),
},
Commands::Cache { action } => match action {
CacheAction::Stats => commands::cache::commands::stats_command()
.await
.map_err(|e| e.into()),
CacheAction::Clear => commands::cache::commands::clear_command()
.await
.map_err(|e| e.into()),
CacheAction::Config { ttl, max_size } => {
commands::cache::commands::config_command(ttl, max_size)
.await
.map_err(|e| e.into())
}
},
Commands::Rebac(args) => commands::rebac::execute(args).await.map_err(|e| e.into()),
Commands::Docs {
format,
output,
command,
} => {
use cli::doc_generator::{DocFormat, DocGenerator};
use std::io::Write;
let doc_format: DocFormat = format
.parse()
.map_err(|e: String| std::io::Error::new(std::io::ErrorKind::InvalidInput, e))?;
let generator = DocGenerator::new();
if let Some(cmd_name) = command {
ctx.info(&format!(
"Generating documentation for command: {}",
cmd_name
));
ctx.warn(
"Single command documentation not yet implemented. Generating all commands.",
);
}
let content = generator
.generate(doc_format)
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e.to_string()))?;
if let Some(output_path) = output {
let mut file = std::fs::File::create(&output_path)?;
file.write_all(content.as_bytes())?;
ctx.success(&format!(
"Documentation written to: {}",
output_path.display()
));
} else {
println!("{}", content);
}
Ok(())
}
Commands::Tutorial { lesson } => {
use cli::tutorial::TutorialManager;
let mut manager = TutorialManager::new();
if let Some(lesson_name) = lesson {
ctx.info(&format!("Starting tutorial with lesson: {}", lesson_name));
ctx.warn(
"Specific lesson selection not yet implemented. Starting interactive tutorial.",
);
}
manager.start().map_err(|e| {
std::io::Error::new(std::io::ErrorKind::Other, format!("Tutorial error: {}", e))
})?;
Ok(())
}
Commands::GraphAnalytics {
dataset,
operation,
damping,
max_iter,
tolerance,
source,
target,
top,
} => {
use commands::graph_analytics::{
execute_graph_analytics, AnalyticsConfig, AnalyticsOperation,
};
use std::path::Path;
let op: AnalyticsOperation = operation
.parse()
.map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidInput, e))?;
let config = AnalyticsConfig {
operation: op,
damping_factor: damping,
max_iterations: max_iter,
tolerance,
source_node: source.clone(),
target_node: target.clone(),
top_k: top,
katz_alpha: 0.1, katz_beta: 1.0, k_core_value: None, enable_simd: true, enable_parallel: true, enable_gpu: false, enable_cache: true, export_path: None, enable_benchmarking: false, };
let dataset_path = Path::new(dataset.as_str());
execute_graph_analytics(dataset_path, &config)
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?;
Ok(())
}
Commands::Tsdb { action } => commands::tsdb::execute(action, &ctx)
.await
.map_err(|e| e.into()),
Commands::Modbus { action } => commands::modbus::execute(action, &ctx)
.await
.map_err(|e| e.into()),
Commands::Canbus { action } => commands::canbus::execute(action, &ctx)
.await
.map_err(|e| e.into()),
Commands::Profile { action } => match action {
ProfilerAction::Run {
dataset,
query,
file,
iterations,
suggestions,
} => commands::query_profiler::run_profile_command(
dataset,
query,
file,
iterations,
suggestions,
)
.await
.map_err(|e| e.into()),
ProfilerAction::Suggest { query, file } => {
let q = if file {
std::fs::read_to_string(&query)
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?
} else {
query
};
let features = commands::query_profiler::QueryProfileFeatures::extract(&q);
let suggestions = commands::query_profiler::generate_suggestions(&features, &q);
for s in suggestions {
println!("[{}] {}: {}", s.severity.label(), s.title, s.description);
}
Ok(())
}
},
Commands::ResultCache { action } => match action {
ResultCacheAction::Stats => commands::result_cache::commands::stats_command()
.await
.map_err(|e| e.into()),
ResultCacheAction::Clear => commands::result_cache::commands::clear_command()
.await
.map_err(|e| e.into()),
ResultCacheAction::Invalidate { dataset } => {
commands::result_cache::commands::invalidate_dataset_command(&dataset)
.await
.map_err(|e| e.into())
}
ResultCacheAction::Evict => commands::result_cache::commands::evict_expired_command()
.await
.map_err(|e| e.into()),
ResultCacheAction::List { dataset } => {
commands::result_cache::commands::list_command(dataset.as_deref())
.await
.map_err(|e| e.into())
}
ResultCacheAction::Config { max_size, ttl } => {
let cache = commands::result_cache::global_lru_cache();
if let Some(sz) = max_size {
println!("Max entries updated to {}", sz);
let _ = sz; }
if let Some(t) = ttl {
println!("Default TTL updated to {}s", t);
let _ = t;
}
let _ = cache;
Ok(())
}
},
Commands::Stream { action } => match action {
StreamAction::Query {
dataset,
query,
file,
chunk_size,
format,
max_rows,
no_progress,
output,
} => commands::stream::run_stream_command(
dataset,
query,
file,
chunk_size,
format,
max_rows,
no_progress,
output,
)
.await
.map_err(|e| e.into()),
},
}
}