use clap::{Parser, ValueEnum};
use std::path::PathBuf;
#[derive(Parser, Debug)]
#[command(name = "flowscope")]
#[command(about = "Analyze SQL files for data lineage", long_about = None)]
#[command(version)]
pub struct Args {
#[arg(value_name = "FILES")]
pub files: Vec<PathBuf>,
#[arg(short, long, default_value = "generic", value_enum)]
pub dialect: DialectArg,
#[arg(short, long, default_value = "table", value_enum)]
pub format: OutputFormat,
#[arg(short, long, value_name = "FILE")]
pub schema: Option<PathBuf>,
#[cfg(feature = "metadata-provider")]
#[arg(long, value_name = "URL")]
pub metadata_url: Option<String>,
#[cfg(feature = "metadata-provider")]
#[arg(long, value_name = "SCHEMA")]
pub metadata_schema: Option<String>,
#[arg(short, long, value_name = "FILE")]
pub output: Option<PathBuf>,
#[arg(long, default_value = "lineage")]
pub project_name: String,
#[arg(long, value_name = "SCHEMA")]
pub export_schema: Option<String>,
#[arg(short, long, default_value = "table", value_enum)]
pub view: ViewMode,
#[arg(long)]
pub lint: bool,
#[arg(long, requires = "lint")]
pub fix: bool,
#[arg(long, requires_all = ["lint", "fix"])]
pub fix_only: bool,
#[arg(long, requires_all = ["lint", "fix"])]
pub unsafe_fixes: bool,
#[arg(long, requires_all = ["lint", "fix"])]
pub legacy_ast_fixes: bool,
#[arg(long, requires = "lint")]
pub show_fixes: bool,
#[arg(long, requires = "lint", value_delimiter = ',')]
pub exclude_rules: Vec<String>,
#[arg(long, requires = "lint", value_name = "JSON")]
pub rule_configs: Option<String>,
#[arg(
long,
requires = "lint",
value_name = "N",
value_parser = parse_positive_usize
)]
pub jobs: Option<usize>,
#[arg(long, requires = "lint")]
pub no_respect_gitignore: bool,
#[arg(short, long)]
pub quiet: bool,
#[arg(short, long)]
pub compact: bool,
#[cfg(feature = "templating")]
#[arg(long, value_enum)]
pub template: Option<TemplateArg>,
#[cfg(feature = "templating")]
#[arg(long = "template-var", value_name = "KEY=VALUE")]
pub template_vars: Vec<String>,
#[cfg(feature = "serve")]
#[arg(long)]
pub serve: bool,
#[cfg(feature = "serve")]
#[arg(long, default_value = "3000")]
pub port: u16,
#[cfg(feature = "serve")]
#[arg(long, value_name = "DIR")]
pub watch: Vec<PathBuf>,
#[cfg(feature = "serve")]
#[arg(long)]
pub open: bool,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, ValueEnum)]
pub enum DialectArg {
Generic,
Ansi,
Bigquery,
Clickhouse,
#[value(alias = "sparksql")]
Databricks,
Duckdb,
Hive,
Mssql,
Mysql,
Oracle,
Postgres,
Redshift,
Snowflake,
Sqlite,
}
impl From<DialectArg> for flowscope_core::Dialect {
fn from(d: DialectArg) -> Self {
match d {
DialectArg::Generic => flowscope_core::Dialect::Generic,
DialectArg::Ansi => flowscope_core::Dialect::Ansi,
DialectArg::Bigquery => flowscope_core::Dialect::Bigquery,
DialectArg::Clickhouse => flowscope_core::Dialect::Clickhouse,
DialectArg::Databricks => flowscope_core::Dialect::Databricks,
DialectArg::Duckdb => flowscope_core::Dialect::Duckdb,
DialectArg::Hive => flowscope_core::Dialect::Hive,
DialectArg::Mssql => flowscope_core::Dialect::Mssql,
DialectArg::Mysql => flowscope_core::Dialect::Mysql,
DialectArg::Oracle => flowscope_core::Dialect::Oracle,
DialectArg::Postgres => flowscope_core::Dialect::Postgres,
DialectArg::Redshift => flowscope_core::Dialect::Redshift,
DialectArg::Snowflake => flowscope_core::Dialect::Snowflake,
DialectArg::Sqlite => flowscope_core::Dialect::Sqlite,
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, ValueEnum)]
pub enum OutputFormat {
Table,
Json,
Mermaid,
Html,
Sql,
Csv,
Xlsx,
Duckdb,
}
fn parse_positive_usize(value: &str) -> Result<usize, String> {
let parsed = value
.parse::<usize>()
.map_err(|_| format!("invalid value '{value}', expected a positive integer"))?;
if parsed == 0 {
return Err("must be greater than zero".to_string());
}
Ok(parsed)
}
#[cfg(feature = "templating")]
#[derive(Debug, Clone, Copy, PartialEq, Eq, ValueEnum)]
pub enum TemplateArg {
Jinja,
Dbt,
}
#[cfg(feature = "templating")]
impl From<TemplateArg> for flowscope_core::TemplateMode {
fn from(t: TemplateArg) -> Self {
match t {
TemplateArg::Jinja => flowscope_core::TemplateMode::Jinja,
TemplateArg::Dbt => flowscope_core::TemplateMode::Dbt,
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, ValueEnum)]
pub enum ViewMode {
Script,
Table,
Column,
Hybrid,
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_dialect_conversion() {
let dialect: flowscope_core::Dialect = DialectArg::Postgres.into();
assert_eq!(dialect, flowscope_core::Dialect::Postgres);
}
#[test]
fn test_sparksql_dialect_alias_maps_to_databricks() {
let args = Args::parse_from(["flowscope", "-d", "sparksql", "test.sql"]);
assert_eq!(args.dialect, DialectArg::Databricks);
let core_dialect: flowscope_core::Dialect = args.dialect.into();
assert_eq!(core_dialect, flowscope_core::Dialect::Databricks);
}
#[test]
fn test_parse_minimal_args() {
let args = Args::parse_from(["flowscope", "test.sql"]);
assert_eq!(args.files.len(), 1);
assert_eq!(args.dialect, DialectArg::Generic);
assert_eq!(args.format, OutputFormat::Table);
assert_eq!(args.project_name, "lineage");
assert!(args.export_schema.is_none());
}
#[test]
fn test_parse_full_args() {
let args = Args::parse_from([
"flowscope",
"-d",
"postgres",
"-f",
"json",
"-s",
"schema.sql",
"-o",
"output.json",
"-v",
"column",
"--quiet",
"--compact",
"--project-name",
"demo",
"--export-schema",
"lineage",
"file1.sql",
"file2.sql",
]);
assert_eq!(args.dialect, DialectArg::Postgres);
assert_eq!(args.format, OutputFormat::Json);
assert_eq!(args.schema.unwrap().to_str().unwrap(), "schema.sql");
assert_eq!(args.output.unwrap().to_str().unwrap(), "output.json");
assert_eq!(args.view, ViewMode::Column);
assert_eq!(args.project_name, "demo");
assert_eq!(args.export_schema.as_deref(), Some("lineage"));
assert!(args.quiet);
assert!(args.compact);
assert_eq!(args.files.len(), 2);
}
#[test]
fn test_lint_flag() {
let args = Args::parse_from(["flowscope", "--lint", "test.sql"]);
assert!(args.lint);
assert!(!args.fix);
assert!(!args.fix_only);
assert!(!args.unsafe_fixes);
assert!(!args.legacy_ast_fixes);
assert!(!args.show_fixes);
assert!(args.exclude_rules.is_empty());
assert!(args.rule_configs.is_none());
assert!(args.jobs.is_none());
assert!(!args.no_respect_gitignore);
}
#[test]
fn test_lint_fix_flag() {
let args = Args::parse_from(["flowscope", "--lint", "--fix", "test.sql"]);
assert!(args.lint);
assert!(args.fix);
assert!(!args.fix_only);
assert!(!args.unsafe_fixes);
assert!(!args.legacy_ast_fixes);
assert!(!args.show_fixes);
}
#[test]
fn test_fix_only_flag() {
let args = Args::parse_from(["flowscope", "--lint", "--fix", "--fix-only", "test.sql"]);
assert!(args.lint);
assert!(args.fix);
assert!(args.fix_only);
}
#[test]
fn test_fix_only_requires_lint_and_fix() {
let missing_both = Args::try_parse_from(["flowscope", "--fix-only", "test.sql"]);
assert!(missing_both.is_err());
let missing_fix = Args::try_parse_from(["flowscope", "--lint", "--fix-only", "test.sql"]);
assert!(missing_fix.is_err());
}
#[test]
fn test_fix_requires_lint() {
let result = Args::try_parse_from(["flowscope", "--fix", "test.sql"]);
assert!(result.is_err());
}
#[test]
fn test_unsafe_fixes_flag() {
let args = Args::parse_from(["flowscope", "--lint", "--fix", "--unsafe-fixes", "test.sql"]);
assert!(args.lint);
assert!(args.fix);
assert!(args.unsafe_fixes);
}
#[test]
fn test_unsafe_fixes_requires_lint_and_fix() {
let missing_both = Args::try_parse_from(["flowscope", "--unsafe-fixes", "test.sql"]);
assert!(missing_both.is_err());
let missing_fix =
Args::try_parse_from(["flowscope", "--lint", "--unsafe-fixes", "test.sql"]);
assert!(missing_fix.is_err());
}
#[test]
fn test_legacy_ast_fixes_flag() {
let args = Args::parse_from([
"flowscope",
"--lint",
"--fix",
"--legacy-ast-fixes",
"test.sql",
]);
assert!(args.lint);
assert!(args.fix);
assert!(args.legacy_ast_fixes);
}
#[test]
fn test_legacy_ast_fixes_requires_lint_and_fix() {
let missing_both = Args::try_parse_from(["flowscope", "--legacy-ast-fixes", "test.sql"]);
assert!(missing_both.is_err());
let missing_fix =
Args::try_parse_from(["flowscope", "--lint", "--legacy-ast-fixes", "test.sql"]);
assert!(missing_fix.is_err());
}
#[test]
fn test_show_fixes_flag() {
let args = Args::parse_from(["flowscope", "--lint", "--show-fixes", "test.sql"]);
assert!(args.lint);
assert!(args.show_fixes);
}
#[test]
fn test_show_fixes_requires_lint() {
let result = Args::try_parse_from(["flowscope", "--show-fixes", "test.sql"]);
assert!(result.is_err());
}
#[test]
fn test_lint_exclude_rules() {
let args = Args::parse_from([
"flowscope",
"--lint",
"--exclude-rules",
"LINT_AM_008,LINT_ST_006",
"test.sql",
]);
assert!(args.lint);
assert_eq!(args.exclude_rules, vec!["LINT_AM_008", "LINT_ST_006"]);
}
#[test]
fn test_lint_exclude_rules_repeated() {
let args = Args::parse_from([
"flowscope",
"--lint",
"--exclude-rules",
"LINT_AM_008",
"--exclude-rules",
"LINT_ST_006",
"test.sql",
]);
assert_eq!(args.exclude_rules, vec!["LINT_AM_008", "LINT_ST_006"]);
}
#[test]
fn test_lint_exclude_rules_requires_lint() {
let result = Args::try_parse_from([
"flowscope",
"--exclude-rules",
"LINT_AM_008,LINT_ST_006",
"test.sql",
]);
assert!(result.is_err());
}
#[test]
fn test_lint_rule_configs_json() {
let args = Args::parse_from([
"flowscope",
"--lint",
"--rule-configs",
r#"{"structure.subquery":{"forbid_subquery_in":"both"}}"#,
"test.sql",
]);
assert_eq!(
args.rule_configs.as_deref(),
Some(r#"{"structure.subquery":{"forbid_subquery_in":"both"}}"#)
);
}
#[test]
fn test_lint_jobs_flag() {
let args = Args::parse_from(["flowscope", "--lint", "--jobs", "4", "test.sql"]);
assert_eq!(args.jobs, Some(4));
}
#[test]
fn test_lint_jobs_requires_lint() {
let result = Args::try_parse_from(["flowscope", "--jobs", "4", "test.sql"]);
assert!(result.is_err());
}
#[test]
fn test_lint_jobs_must_be_positive() {
let result = Args::try_parse_from(["flowscope", "--lint", "--jobs", "0", "test.sql"]);
assert!(result.is_err());
}
#[test]
fn test_lint_no_respect_gitignore_flag() {
let args = Args::parse_from(["flowscope", "--lint", "--no-respect-gitignore", "test.sql"]);
assert!(args.no_respect_gitignore);
}
#[test]
fn test_lint_no_respect_gitignore_requires_lint() {
let result = Args::try_parse_from(["flowscope", "--no-respect-gitignore", "test.sql"]);
assert!(result.is_err());
}
#[cfg(feature = "serve")]
#[test]
fn test_serve_args_defaults() {
let args = Args::parse_from(["flowscope", "--serve"]);
assert!(args.serve);
assert_eq!(args.port, 3000);
assert!(args.watch.is_empty());
assert!(!args.open);
}
#[cfg(feature = "serve")]
#[test]
fn test_serve_args_custom_port() {
let args = Args::parse_from(["flowscope", "--serve", "--port", "8080"]);
assert!(args.serve);
assert_eq!(args.port, 8080);
}
#[cfg(feature = "serve")]
#[test]
fn test_serve_args_watch_dirs() {
let args = Args::parse_from([
"flowscope",
"--serve",
"--watch",
"./sql",
"--watch",
"./queries",
]);
assert!(args.serve);
assert_eq!(args.watch.len(), 2);
assert_eq!(args.watch[0].to_str().unwrap(), "./sql");
assert_eq!(args.watch[1].to_str().unwrap(), "./queries");
}
#[cfg(feature = "serve")]
#[test]
fn test_serve_args_open_browser() {
let args = Args::parse_from(["flowscope", "--serve", "--open"]);
assert!(args.serve);
assert!(args.open);
}
#[cfg(feature = "serve")]
#[test]
fn test_serve_args_full() {
let args = Args::parse_from([
"flowscope",
"--serve",
"--port",
"9000",
"--watch",
"./examples",
"--open",
"-d",
"postgres",
]);
assert!(args.serve);
assert_eq!(args.port, 9000);
assert_eq!(args.watch.len(), 1);
assert!(args.open);
assert_eq!(args.dialect, DialectArg::Postgres);
}
}