use std::path::{Path, PathBuf};
use std::process::ExitCode;
use anyhow::{anyhow, bail, Context, Result};
use clap::{Parser, Subcommand};
use console::style;
use include_dir::{include_dir, Dir, DirEntry};
use serde::Deserialize;
mod schema;
static TEMPLATE: Dir<'static> = include_dir!("$CARGO_MANIFEST_DIR/template");
#[derive(Parser)]
#[command(bin_name = "cargo")]
enum CargoCli {
Reef(ReefArgs),
}
#[derive(Parser)]
#[command(
name = "reef",
about = "Scaffold and manage Reef apps",
version,
disable_help_subcommand = true
)]
struct ReefArgs {
#[command(subcommand)]
cmd: ReefCommand,
}
#[derive(Subcommand)]
enum ReefCommand {
New {
name: String,
},
Dev {
#[arg(trailing_var_arg = true, allow_hyphen_values = true)]
extra: Vec<String>,
},
Migrate {
#[command(subcommand)]
cmd: MigrateCommand,
},
#[command(name = "db:push")]
DbPush {
#[arg(long, default_value = "src/server/db/schema.rs")]
schema: std::path::PathBuf,
#[arg(long, value_delimiter = ',', value_name = "F1,F2,...")]
features: Vec<String>,
#[arg(short = 'y', long)]
yes: bool,
#[arg(long, value_name = "NAME")]
write: Option<String>,
#[arg(long)]
dry_run: bool,
#[arg(long)]
allow_drop: bool,
},
#[command(name = "_debug-schema", hide = true)]
DebugSchema {
#[arg(default_value = "src/server/db/schema.rs")]
path: std::path::PathBuf,
#[arg(long, value_delimiter = ',', value_name = "F1,F2,...")]
features: Vec<String>,
},
#[command(name = "_debug-sql", hide = true)]
DebugSql {
#[arg(default_value = "src/server/db/schema.rs")]
path: std::path::PathBuf,
#[arg(long, value_delimiter = ',', value_name = "F1,F2,...")]
features: Vec<String>,
},
#[command(name = "_debug-introspect", hide = true)]
DebugIntrospect {
#[arg(default_value = "./data/reef.db")]
db: std::path::PathBuf,
},
#[command(name = "_debug-diff", hide = true)]
DebugDiff {
#[arg(long, default_value = "src/server/db/schema.rs")]
schema: std::path::PathBuf,
#[arg(long, default_value = "./data/reef.db")]
db: std::path::PathBuf,
#[arg(long, value_delimiter = ',', value_name = "F1,F2,...")]
features: Vec<String>,
},
}
#[derive(Subcommand)]
enum MigrateCommand {
Run,
New {
name: String,
#[arg(long)]
with_down: bool,
},
Status,
Revert,
}
fn main() -> ExitCode {
let CargoCli::Reef(args) = CargoCli::parse();
let result = match args.cmd {
ReefCommand::New { name } => scaffold_new(&name),
ReefCommand::Dev { extra } => run_dev(&extra),
ReefCommand::Migrate { cmd } => run_migrate(cmd),
ReefCommand::DebugSchema { path, features } => {
debug_schema(&path, &feature_set(features))
}
ReefCommand::DebugSql { path, features } => debug_sql(&path, &feature_set(features)),
ReefCommand::DebugIntrospect { db } => block_on(debug_introspect(&db)),
ReefCommand::DebugDiff {
schema,
db,
features,
} => block_on(debug_diff(&schema, &db, &feature_set(features))),
ReefCommand::DbPush {
schema,
features,
yes,
write,
dry_run,
allow_drop,
} => block_on(db_push(
&schema,
&feature_set(features),
yes,
write.as_deref(),
dry_run,
allow_drop,
)),
};
match result {
Ok(()) => ExitCode::SUCCESS,
Err(e) => {
eprintln!("{} {:#}", style("error:").red().bold(), e);
ExitCode::FAILURE
}
}
}
fn scaffold_new(input: &str) -> Result<()> {
let target = PathBuf::from(input);
let name = target
.file_name()
.and_then(|n| n.to_str())
.ok_or_else(|| anyhow!("could not derive a project name from `{input}`"))?;
validate_name(name)?;
if target.exists() {
bail!(
"directory `{}` already exists — pick a different name or remove it",
target.display()
);
}
print_banner();
std::fs::create_dir_all(&target)
.with_context(|| format!("creating directory `{}`", target.display()))?;
let mut count = 0;
walk_template(&TEMPLATE, &target, name, &mut count)?;
init_git(&target);
println!();
println!(
"{} Generated {} files in {} ({})",
style("✓").green().bold(),
count,
style(input).bold(),
style(format!("package: {name}")).dim()
);
println!();
println!("Next steps:");
println!();
println!(" {} {}", style("$").dim(), style(format!("cd {input}")).bold());
println!(" {} {}", style("$").dim(), style("cargo reef migrate run").bold());
println!(" {} {}", style("$").dim(), style("cargo reef dev").bold());
println!();
println!("Learn more at {}", style("https://reef.rs").underlined().cyan());
println!();
Ok(())
}
fn walk_template(dir: &Dir, target: &Path, project_name: &str, count: &mut usize) -> Result<()> {
for entry in dir.entries() {
match entry {
DirEntry::Dir(d) => {
let dst = target.join(d.path());
std::fs::create_dir_all(&dst).with_context(|| format!("mkdir {}", dst.display()))?;
walk_template(d, target, project_name, count)?;
}
DirEntry::File(f) => {
let dst_path = strip_template_suffix(f.path());
let dst = target.join(dst_path);
if let Some(parent) = dst.parent() {
std::fs::create_dir_all(parent)
.with_context(|| format!("mkdir {}", parent.display()))?;
}
let bytes = if let Some(text) = f.contents_utf8() {
substitute(text, project_name).into_bytes()
} else {
f.contents().to_vec()
};
std::fs::write(&dst, bytes).with_context(|| format!("write {}", dst.display()))?;
*count += 1;
}
}
}
Ok(())
}
fn strip_template_suffix(p: &Path) -> PathBuf {
if let Some(name) = p.file_name().and_then(|n| n.to_str()) {
if let Some(stripped) = name.strip_suffix(".in") {
if let Some(parent) = p.parent() {
return parent.join(stripped);
}
}
}
p.to_path_buf()
}
fn substitute(text: &str, project_name: &str) -> String {
text.replace("reef-template", project_name)
}
fn validate_name(name: &str) -> Result<()> {
if name.is_empty() {
bail!("project name cannot be empty");
}
let mut chars = name.chars();
let first = chars.next().unwrap();
if !first.is_ascii_alphabetic() {
bail!("project name must start with a letter, got `{name}`");
}
for c in chars {
if !(c.is_ascii_alphanumeric() || c == '_' || c == '-') {
bail!(
"project name `{name}` contains invalid character `{c}` (use letters, digits, _, -)"
);
}
}
Ok(())
}
fn init_git(target: &Path) {
let _ = std::process::Command::new("git")
.arg("init")
.arg("--quiet")
.current_dir(target)
.status();
}
fn run_dev(extra: &[String]) -> Result<()> {
print_banner();
println!("{}", style("Starting dev loop (dx serve --web --interactive false)…").dim());
println!();
let dx_check = std::process::Command::new("dx").arg("--version").output();
if dx_check.is_err() {
bail!(
"dx (Dioxus CLI) not found in PATH. Install with:\n\n \
cargo install dioxus-cli\n\n\
then retry `cargo reef dev`."
);
}
let user_set_interactive = extra
.iter()
.any(|a| a == "--interactive" || a == "-i" || a.starts_with("--interactive="));
let mut args: Vec<String> = vec!["serve".into(), "--web".into()];
if !user_set_interactive {
args.push("--interactive".into());
args.push("false".into());
}
args.extend(extra.iter().cloned());
let status = std::process::Command::new("dx")
.args(&args)
.status()
.context("launching dx serve")?;
if !status.success() {
bail!(
"dx serve exited with status {status}. \
If this was unexpected, try `rm -rf target/dx` and retry; \
stale dx state from an abnormal exit can cause this."
);
}
Ok(())
}
#[derive(Debug, Deserialize)]
struct ReefConfig {
storage: StorageConfig,
}
#[derive(Debug, Deserialize)]
struct StorageConfig {
#[serde(default = "default_db_url_env")]
db_url_env: String,
#[serde(default = "default_db_path")]
db_path_default: String,
#[serde(default = "default_migrations_dir")]
migrations_dir: String,
}
fn default_db_url_env() -> String {
"DATABASE_URL".to_string()
}
fn default_db_path() -> String {
"./data/reef.db".to_string()
}
fn default_migrations_dir() -> String {
"migrations".to_string()
}
fn read_config() -> Result<ReefConfig> {
let path = Path::new(".reef/config.toml");
if !path.exists() {
bail!(
"no .reef/config.toml found in {}. \
Run this from a Reef project root (or scaffold one with `cargo reef new`).",
std::env::current_dir().unwrap_or_default().display()
);
}
let text = std::fs::read_to_string(path).context("reading .reef/config.toml")?;
let cfg: ReefConfig = toml::from_str(&text).context("parsing .reef/config.toml")?;
Ok(cfg)
}
fn resolve_db_path(cfg: &StorageConfig) -> String {
std::env::var(&cfg.db_url_env).unwrap_or_else(|_| cfg.db_path_default.clone())
}
fn run_migrate(cmd: MigrateCommand) -> Result<()> {
let cfg = read_config()?.storage;
match cmd {
MigrateCommand::Run => block_on(migrate_run(&cfg)),
MigrateCommand::New { name, with_down } => migrate_new(&cfg, &name, with_down),
MigrateCommand::Status => block_on(migrate_status(&cfg)),
MigrateCommand::Revert => block_on(migrate_revert(&cfg)),
}
}
fn block_on<F: std::future::Future<Output = Result<()>>>(fut: F) -> Result<()> {
tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.context("building tokio runtime")?
.block_on(fut)
}
fn discover_forward_migrations(migrations_dir: &str) -> Result<Vec<(String, PathBuf)>> {
let dir = Path::new(migrations_dir);
if !dir.exists() {
return Ok(Vec::new());
}
let mut entries = Vec::new();
for e in std::fs::read_dir(dir).with_context(|| format!("reading {migrations_dir}"))? {
let e = e?;
let path = e.path();
let name = match path.file_name().and_then(|s| s.to_str()) {
Some(n) => n,
None => continue,
};
if !name.ends_with(".sql") {
continue;
}
if name.ends_with(".down.sql") {
continue;
}
let stem = name.trim_end_matches(".sql").to_string();
entries.push((stem, path));
}
entries.sort_by(|a, b| a.0.cmp(&b.0));
Ok(entries)
}
async fn open_db(path: &str) -> Result<libsql::Connection> {
if let Some(parent) = Path::new(path).parent() {
if !parent.as_os_str().is_empty() {
tokio::fs::create_dir_all(parent).await.ok();
}
}
let db = libsql::Builder::new_local(path)
.build()
.await
.context("opening libSQL database")?;
let conn = db.connect().context("connecting to libSQL database")?;
conn.execute(
"CREATE TABLE IF NOT EXISTS schema_migrations (
name TEXT PRIMARY KEY,
applied_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
duration_ms INTEGER,
checksum TEXT
)",
(),
)
.await
.context("creating schema_migrations table")?;
Ok(conn)
}
async fn applied_migrations(conn: &libsql::Connection) -> Result<std::collections::HashMap<String, Option<String>>> {
let mut rows = conn
.query("SELECT name, checksum FROM schema_migrations", ())
.await
.context("querying schema_migrations")?;
let mut map = std::collections::HashMap::new();
while let Some(row) = rows.next().await? {
let name: String = row.get(0)?;
let checksum: Option<String> = row.get(1).ok();
map.insert(name, checksum);
}
Ok(map)
}
fn checksum(bytes: &[u8]) -> String {
use std::hash::{Hash, Hasher};
let mut h = std::collections::hash_map::DefaultHasher::new();
bytes.hash(&mut h);
format!("{:016x}", h.finish())
}
async fn migrate_run(cfg: &StorageConfig) -> Result<()> {
let db_path = resolve_db_path(cfg);
println!("{} {}", style("Database: ").dim(), style(&db_path).bold());
println!(
"{} {}",
style("Migrations dir: ").dim(),
style(&cfg.migrations_dir).bold()
);
println!();
let conn = open_db(&db_path).await?;
let applied = applied_migrations(&conn).await?;
let files = discover_forward_migrations(&cfg.migrations_dir)?;
let mut applied_count = 0;
let mut warned = false;
for (name, path) in &files {
let sql = tokio::fs::read_to_string(path)
.await
.with_context(|| format!("reading {}", path.display()))?;
let sum = checksum(sql.as_bytes());
match applied.get(name) {
Some(prev) => {
if let Some(p) = prev {
if p != &sum {
eprintln!(
" {} {} (checksum mismatch — file was edited after being applied)",
style("⚠").yellow().bold(),
style(name).bold()
);
warned = true;
}
}
continue;
}
None => {
let started = std::time::Instant::now();
conn.execute_batch(&sql)
.await
.with_context(|| format!("applying {name}"))?;
let duration_ms = started.elapsed().as_millis() as i64;
conn.execute(
"INSERT INTO schema_migrations (name, duration_ms, checksum) VALUES (?1, ?2, ?3)",
libsql::params![name.clone(), duration_ms, sum],
)
.await
.with_context(|| format!("recording {name}"))?;
println!(
" {} {} {}",
style("✓").green().bold(),
style(name).bold(),
style(format!("({}ms)", duration_ms)).dim()
);
applied_count += 1;
}
}
}
println!();
if applied_count == 0 {
println!("{} (already up to date)", style("Nothing to do").dim());
} else {
println!(
"{} Applied {} migration{}",
style("✓").green().bold(),
applied_count,
if applied_count == 1 { "" } else { "s" }
);
}
if warned {
println!();
println!(
"{} See checksum warnings above. Edited migrations after being applied is risky — \
prefer rolling forward with a new migration.",
style("Note:").yellow().bold()
);
}
Ok(())
}
fn migrate_new(cfg: &StorageConfig, name: &str, with_down: bool) -> Result<()> {
validate_migration_name(name)?;
let dir = Path::new(&cfg.migrations_dir);
std::fs::create_dir_all(dir).with_context(|| format!("mkdir {}", dir.display()))?;
let now = chrono::Utc::now();
let timestamp = now.format("%Y%m%d_%H%M%S").to_string();
let stem = format!("{timestamp}_{name}");
let forward_path = dir.join(format!("{stem}.sql"));
let forward_template = format!(
"-- Migration: {name}\n\
-- Generated: {generated}\n\
-- Forward — applied by `cargo reef migrate run`\n\n\
-- Your CREATE/ALTER/DROP statements here.\n",
name = name,
generated = now.format("%Y-%m-%dT%H:%M:%SZ")
);
std::fs::write(&forward_path, forward_template)
.with_context(|| format!("writing {}", forward_path.display()))?;
println!(
"{} {}",
style("✓").green().bold(),
style(forward_path.display().to_string()).bold()
);
if with_down {
let down_path = dir.join(format!("{stem}.down.sql"));
let down_template = format!(
"-- Rollback for: {name}\n\
-- Generated: {generated}\n\
-- Applied by `cargo reef migrate revert`\n\n\
-- Statements that undo the forward migration.\n",
name = name,
generated = now.format("%Y-%m-%dT%H:%M:%SZ")
);
std::fs::write(&down_path, down_template)
.with_context(|| format!("writing {}", down_path.display()))?;
println!(
"{} {}",
style("✓").green().bold(),
style(down_path.display().to_string()).bold()
);
}
Ok(())
}
fn validate_migration_name(name: &str) -> Result<()> {
if name.is_empty() {
bail!("migration name cannot be empty");
}
for c in name.chars() {
if !(c.is_ascii_alphanumeric() || c == '_') {
bail!(
"migration name `{name}` contains invalid character `{c}` \
(use letters, digits, underscores)"
);
}
}
Ok(())
}
async fn migrate_status(cfg: &StorageConfig) -> Result<()> {
let db_path = resolve_db_path(cfg);
let conn = open_db(&db_path).await?;
let applied = applied_migrations(&conn).await?;
let files = discover_forward_migrations(&cfg.migrations_dir)?;
println!();
println!(
"{} {} ({} migration{} known)",
style("Database:").dim(),
style(&db_path).bold(),
files.len(),
if files.len() == 1 { "" } else { "s" }
);
println!();
if files.is_empty() {
println!("{} no migrations in `{}`", style("ℹ").cyan(), &cfg.migrations_dir);
return Ok(());
}
let pending: Vec<&(String, PathBuf)> =
files.iter().filter(|(n, _)| !applied.contains_key(n)).collect();
let applied_files: Vec<&(String, PathBuf)> =
files.iter().filter(|(n, _)| applied.contains_key(n)).collect();
if !applied_files.is_empty() {
println!("{}", style("Applied:").bold());
for (name, _) in applied_files {
println!(" {} {}", style("✓").green(), name);
}
println!();
}
if !pending.is_empty() {
println!("{}", style("Pending:").bold());
for (name, _) in pending {
println!(" {} {}", style("→").yellow(), name);
}
println!();
println!(
"Run {} to apply.",
style("cargo reef migrate run").bold()
);
} else {
println!("{} all caught up", style("✓").green().bold());
}
Ok(())
}
async fn migrate_revert(cfg: &StorageConfig) -> Result<()> {
let db_path = resolve_db_path(cfg);
let conn = open_db(&db_path).await?;
let last = {
let mut rows = conn
.query(
"SELECT name FROM schema_migrations ORDER BY applied_at DESC, name DESC LIMIT 1",
(),
)
.await?;
match rows.next().await? {
Some(row) => row.get::<String>(0)?,
None => {
println!("{} nothing to revert (no applied migrations)", style("ℹ").cyan());
return Ok(());
}
}
};
let down_path = Path::new(&cfg.migrations_dir).join(format!("{last}.down.sql"));
if !down_path.exists() {
bail!(
"no rollback file `{}` for `{}`. \
Roll forward with a new corrective migration instead, \
or write a `.down.sql` and re-run.",
down_path.display(),
last
);
}
let sql = tokio::fs::read_to_string(&down_path)
.await
.with_context(|| format!("reading {}", down_path.display()))?;
println!("{} {}", style("Reverting:").bold(), style(&last).bold());
let started = std::time::Instant::now();
conn.execute_batch(&sql)
.await
.with_context(|| format!("applying rollback for {last}"))?;
conn.execute(
"DELETE FROM schema_migrations WHERE name = ?1",
libsql::params![last.clone()],
)
.await
.context("removing migration record")?;
println!(
" {} done {}",
style("✓").green().bold(),
style(format!("({}ms)", started.elapsed().as_millis())).dim()
);
Ok(())
}
const BANNERS: &[&str] = &[
"🦀 Scuttling…",
"🦀 Skittering…",
"🦀 Crabwalking…",
"🦀 Sidestepping…",
"🦀 Pinching…",
"🦀 Molting…",
"🪸 Branching out…",
"🪸 Calcifying…",
"🪸 Anchoring…",
"🪸 Reefing in…",
"🐚 Shelling…",
"🐚 Spiraling…",
"🐚 Pearling…",
"🫧 Bubbling up…",
"🫧 Frothing…",
"🫧 Effervescing…",
"🌊 Cresting…",
"🌊 Surfing in…",
"🐙 Tentacling…",
"🪼 Drifting…",
"🐠 Darting…",
"🐟 Schooling…",
"🐡 Puffing up…",
"🦐 Shrimping…",
"🤿 Diving in…",
"🦀 Welcome to the Reef.",
];
fn print_banner() {
use std::hash::{Hash, Hasher};
let mut h = std::collections::hash_map::DefaultHasher::new();
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.map(|d| d.as_nanos())
.unwrap_or(0)
.hash(&mut h);
std::process::id().hash(&mut h);
let idx = (h.finish() as usize) % BANNERS.len();
println!();
println!("{}", style(BANNERS[idx]).bold().cyan());
println!();
}
fn feature_set(features: Vec<String>) -> schema::FeatureSet {
if features.is_empty() {
schema::FeatureSet::unconstrained()
} else {
schema::FeatureSet::from_features(features)
}
}
fn debug_schema(path: &std::path::Path, features: &schema::FeatureSet) -> Result<()> {
let schema = schema::parse_file(path, features)
.with_context(|| format!("parsing {}", path.display()))?;
let json = serde_json::to_string_pretty(&schema).context("rendering schema as JSON")?;
println!("{json}");
Ok(())
}
fn debug_sql(path: &std::path::Path, features: &schema::FeatureSet) -> Result<()> {
let schema = schema::parse_file(path, features)
.with_context(|| format!("parsing {}", path.display()))?;
let stmts = schema::emit_schema(&schema);
println!("{}", stmts.join("\n\n"));
Ok(())
}
async fn debug_introspect(db_path: &std::path::Path) -> Result<()> {
if !db_path.exists() {
bail!("database file not found: {}", db_path.display());
}
let db = libsql::Builder::new_local(db_path)
.build()
.await
.context("opening libSQL database")?;
let conn = db.connect().context("connecting to libSQL database")?;
let schema = schema::introspect_db(&conn).await?;
let json = serde_json::to_string_pretty(&schema).context("rendering schema as JSON")?;
println!("{json}");
Ok(())
}
async fn debug_diff(
schema_path: &std::path::Path,
db_path: &std::path::Path,
features: &schema::FeatureSet,
) -> Result<()> {
let desired = schema::parse_file(schema_path, features)
.with_context(|| format!("parsing {}", schema_path.display()))?;
let actual = if db_path.exists() {
let db = libsql::Builder::new_local(db_path)
.build()
.await
.context("opening libSQL database")?;
let conn = db.connect().context("connecting to libSQL database")?;
schema::introspect_db(&conn).await?
} else {
schema::Schema { tables: Vec::new() }
};
let diff = schema::diff(&desired, &actual);
println!("{}", schema::render_diff(&diff));
Ok(())
}
async fn db_push(
schema_path: &std::path::Path,
features: &schema::FeatureSet,
yes: bool,
write: Option<&str>,
dry_run: bool,
allow_drop: bool,
) -> Result<()> {
let cfg = read_config()?.storage;
let db_path = resolve_db_path(&cfg);
let desired = schema::parse_file(schema_path, features)
.with_context(|| format!("parsing {}", schema_path.display()))?;
let actual = if std::path::Path::new(&db_path).exists() {
let db = libsql::Builder::new_local(&db_path)
.build()
.await
.context("opening libSQL database")?;
let conn = db.connect().context("connecting to libSQL database")?;
schema::introspect_db(&conn).await?
} else {
schema::Schema { tables: Vec::new() }
};
let diff = schema::diff(&desired, &actual);
println!(
"{} {} {} {}",
style("schema:").dim(),
style(schema_path.display()).bold(),
style("→ db:").dim(),
style(&db_path).bold()
);
println!();
println!("{}", schema::render_diff(&diff));
let needs_rebuild = diff
.actions
.iter()
.any(|a| matches!(a, schema::Action::NeedsRebuild { .. }));
let drops: Vec<String> = diff
.actions
.iter()
.filter_map(|a| match a {
schema::Action::DropTable(t) => Some(format!("DROP TABLE {t}")),
schema::Action::DropColumn { table, column } => {
Some(format!("DROP COLUMN {table}.{column}"))
}
_ => None,
})
.collect();
let appliable: Vec<String> = diff
.actions
.iter()
.filter_map(schema::emit_action)
.collect();
if appliable.is_empty() && !needs_rebuild {
return Ok(());
}
if dry_run {
if !appliable.is_empty() {
println!();
println!("{}", style("SQL that would be applied:").bold());
for sql in &appliable {
println!(" {}", style(sql).dim());
}
}
return Ok(());
}
if !drops.is_empty() && !allow_drop {
bail!(
"diff contains destructive changes ({}) — re-run with `--allow-drop` \
to confirm, or use `--write <name>` to capture the migration without \
applying. Most-likely cause: a schema rename that wasn't reflected in \
FK references or other tables.",
drops.join(", ")
);
}
if let Some(name) = write {
return write_migration(&cfg.migrations_dir, name, &appliable, needs_rebuild);
}
if appliable.is_empty() {
bail!("only manual migrations are required (see above) — re-run with `cargo reef migrate new <name>`");
}
if !yes && needs_rebuild {
bail!(
"the diff contains both auto-appliable changes AND manual migrations. \
Re-run with `--write <name>` to capture the auto changes as a migration \
file, then write the manual parts by hand."
);
}
if !yes {
let prompt = format!(
"Apply {} change{} to {}?",
appliable.len(),
if appliable.len() == 1 { "" } else { "s" },
db_path
);
let confirmed = dialoguer::Confirm::new()
.with_prompt(prompt)
.default(false)
.interact()
.context("reading confirmation")?;
if !confirmed {
println!("{} aborted", style("✗").red());
return Ok(());
}
}
apply_sql(&db_path, &appliable).await?;
println!(
"{} applied {} statement{}",
style("✓").green().bold(),
appliable.len(),
if appliable.len() == 1 { "" } else { "s" }
);
Ok(())
}
async fn apply_sql(db_path: &str, statements: &[String]) -> Result<()> {
if let Some(parent) = std::path::Path::new(db_path).parent() {
if !parent.as_os_str().is_empty() {
tokio::fs::create_dir_all(parent).await.ok();
}
}
let db = libsql::Builder::new_local(db_path)
.build()
.await
.context("opening libSQL database")?;
let conn = db.connect().context("connecting to libSQL database")?;
for stmt in statements {
conn.execute_batch(stmt)
.await
.with_context(|| format!("applying:\n {stmt}"))?;
}
Ok(())
}
fn write_migration(
migrations_dir: &str,
name: &str,
statements: &[String],
needs_rebuild: bool,
) -> Result<()> {
let dir = std::path::Path::new(migrations_dir);
std::fs::create_dir_all(dir).with_context(|| format!("mkdir {}", dir.display()))?;
let now = chrono::Utc::now();
let stem = format!("{}_{}", now.format("%Y%m%d_%H%M%S"), name);
let path = dir.join(format!("{stem}.sql"));
let mut body = String::new();
body.push_str(&format!(
"-- Migration: {name}\n-- Generated by `cargo reef db:push --write {name}` at {}\n\n",
now.format("%Y-%m-%dT%H:%M:%SZ")
));
if needs_rebuild {
body.push_str(
"-- NOTE: The diff also flagged manual migration items (see CLI output).\n\
-- Add the corresponding hand-written statements to this file.\n\n",
);
}
for stmt in statements {
body.push_str(stmt);
body.push_str("\n\n");
}
std::fs::write(&path, body).with_context(|| format!("writing {}", path.display()))?;
println!(
"{} {}",
style("✓").green().bold(),
style(path.display().to_string()).bold()
);
if needs_rebuild {
println!(
"{} edit the file to add manual migration statements before running \
`cargo reef migrate run`.",
style("note:").yellow().bold()
);
}
Ok(())
}