use crate::error::{DataError, DataResult};
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum AdapterKind {
None,
Postgres,
MySql,
Sqlite,
SingleStore,
ClickHouse,
BigQuery,
OpenSearch,
}
impl AdapterKind {
pub fn as_str(self) -> &'static str {
match self {
Self::None => "none",
Self::Postgres => "postgres",
Self::MySql => "mysql",
Self::Sqlite => "sqlite",
Self::SingleStore => "singlestore",
Self::ClickHouse => "clickhouse",
Self::BigQuery => "bigquery",
Self::OpenSearch => "opensearch",
}
}
pub fn parse(raw: &str) -> DataResult<Self> {
match raw.trim().to_ascii_lowercase().as_str() {
"none" => Ok(Self::None),
"postgres" | "postgresql" | "pg" => Ok(Self::Postgres),
"mysql" => Ok(Self::MySql),
"sqlite" | "sqlite3" => Ok(Self::Sqlite),
"singlestore" | "single_store" | "memsql" => Ok(Self::SingleStore),
"clickhouse" | "click_house" => Ok(Self::ClickHouse),
"bigquery" | "big_query" | "bq" => Ok(Self::BigQuery),
"opensearch" | "open_search" => Ok(Self::OpenSearch),
value => Err(DataError::Config(format!(
"unsupported database adapter `{value}`; expected one of: none, postgres, mysql, sqlite, singlestore, clickhouse, bigquery, opensearch"
))),
}
}
pub fn is_sql_backend(self) -> bool {
matches!(
self,
Self::Postgres
| Self::MySql
| Self::Sqlite
| Self::SingleStore
| Self::ClickHouse
| Self::BigQuery
)
}
pub fn supports_migrations(self) -> bool {
self.is_sql_backend()
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct DatabaseConfig {
pub adapter: AdapterKind,
pub url: Option<String>,
pub url_env: Option<String>,
}
impl Default for DatabaseConfig {
fn default() -> Self {
Self {
adapter: AdapterKind::None,
url: None,
url_env: Some("DATABASE_URL".to_string()),
}
}
}
impl DatabaseConfig {
pub fn from_toml_like_str(content: &str) -> DataResult<Self> {
let mut config = Self::default();
let mut in_database_section = false;
for raw_line in content.lines() {
let line = raw_line.trim();
if line.is_empty() || line.starts_with('#') {
continue;
}
if line.starts_with('[') && line.ends_with(']') {
in_database_section = line == "[database]";
continue;
}
if !in_database_section {
continue;
}
let Some((key, value)) = line.split_once('=') else {
continue;
};
let key = key.trim();
let value = strip_quotes(value.trim());
match key {
"adapter" => config.adapter = AdapterKind::parse(value)?,
"url" => config.url = Some(value.to_string()),
"url_env" => config.url_env = Some(value.to_string()),
_ => {}
}
}
Ok(config)
}
pub fn resolve_url(&self) -> Option<String> {
if let Some(url) = &self.url {
return Some(url.clone());
}
self.url_env
.as_deref()
.and_then(|env_name| std::env::var(env_name).ok())
}
}
fn strip_quotes(value: &str) -> &str {
value
.strip_prefix('"')
.and_then(|rest| rest.strip_suffix('"'))
.unwrap_or(value)
}
#[cfg(test)]
mod tests {
use super::{AdapterKind, DatabaseConfig};
use proptest::prelude::*;
use std::sync::atomic::{AtomicU64, Ordering};
static TEST_ENV_COUNTER: AtomicU64 = AtomicU64::new(0);
#[test]
fn parse_database_config() {
let config = DatabaseConfig::from_toml_like_str(
r#"
[database]
adapter = "postgres"
url_env = "APP_DB_URL"
"#,
)
.unwrap();
assert_eq!(config.adapter, AdapterKind::Postgres);
assert_eq!(config.url_env.as_deref(), Some("APP_DB_URL"));
}
#[test]
fn resolve_url_prefers_inline_then_env() {
let inline = DatabaseConfig {
adapter: AdapterKind::Sqlite,
url: Some("sqlite://inline.db".to_string()),
url_env: Some("IGNORED_ENV".to_string()),
};
assert_eq!(inline.resolve_url().as_deref(), Some("sqlite://inline.db"));
let key = format!(
"SHELLY_DATA_TEST_DB_URL_{}",
TEST_ENV_COUNTER.fetch_add(1, Ordering::Relaxed)
);
std::env::set_var(&key, "sqlite://from-env.db");
let from_env = DatabaseConfig {
adapter: AdapterKind::Sqlite,
url: None,
url_env: Some(key.clone()),
};
assert_eq!(
from_env.resolve_url().as_deref(),
Some("sqlite://from-env.db")
);
std::env::remove_var(key);
}
proptest! {
#[test]
fn adapter_parse_accepts_aliases_case_and_whitespace(
alias in prop_oneof![
Just("none"),
Just("postgres"),
Just("postgresql"),
Just("pg"),
Just("mysql"),
Just("sqlite"),
Just("sqlite3"),
Just("singlestore"),
Just("single_store"),
Just("memsql"),
Just("clickhouse"),
Just("click_house"),
Just("bigquery"),
Just("big_query"),
Just("bq"),
Just("opensearch"),
Just("open_search"),
],
left_ws in 0usize..3,
right_ws in 0usize..3,
uppercase in any::<bool>(),
) {
let alias = if uppercase {
alias.to_ascii_uppercase()
} else {
alias.to_string()
};
let input = format!("{}{}{}", " ".repeat(left_ws), alias, " ".repeat(right_ws));
let kind = AdapterKind::parse(&input).unwrap();
let expected = match alias.to_ascii_lowercase().as_str() {
"none" => AdapterKind::None,
"postgres" | "postgresql" | "pg" => AdapterKind::Postgres,
"mysql" => AdapterKind::MySql,
"sqlite" | "sqlite3" => AdapterKind::Sqlite,
"singlestore" | "single_store" | "memsql" => AdapterKind::SingleStore,
"clickhouse" | "click_house" => AdapterKind::ClickHouse,
"bigquery" | "big_query" | "bq" => AdapterKind::BigQuery,
"opensearch" | "open_search" => AdapterKind::OpenSearch,
_ => unreachable!("input generated from known aliases"),
};
prop_assert_eq!(kind, expected);
}
#[test]
fn adapter_parse_rejects_unknown_values(raw in "[a-zA-Z0-9_\\-]{1,24}") {
let normalized = raw.trim().to_ascii_lowercase();
prop_assume!(
normalized != "none" &&
normalized != "postgres" &&
normalized != "postgresql" &&
normalized != "pg" &&
normalized != "mysql" &&
normalized != "sqlite" &&
normalized != "sqlite3" &&
normalized != "singlestore" &&
normalized != "single_store" &&
normalized != "memsql" &&
normalized != "clickhouse" &&
normalized != "click_house" &&
normalized != "bigquery" &&
normalized != "big_query" &&
normalized != "bq" &&
normalized != "opensearch" &&
normalized != "open_search"
);
prop_assert!(AdapterKind::parse(&raw).is_err());
}
}
}