use std::fmt::{Debug, Display, Formatter};
use std::fs;
use std::io::Write as _;
use std::marker::PhantomData;
use std::path::{Path, PathBuf};
use std::sync::{Arc, LazyLock, Mutex};
use serde::{Serialize, Serializer};
use sqlx_core::database::Database;
use sqlx_core::describe::Describe;
use sqlx_core::HashMap;
use crate::database::DatabaseExt;
use crate::query::cache::MtimeCache;
#[derive(serde::Serialize)]
#[serde(bound(serialize = "Describe<DB>: serde::Serialize"))]
#[derive(Debug)]
pub struct QueryData<DB: Database> {
db_name: SerializeDbName<DB>,
#[allow(dead_code)]
pub(super) query: String,
pub(super) describe: Describe<DB>,
pub(super) hash: String,
}
impl<DB: Database> QueryData<DB> {
pub fn from_describe(query: &str, describe: Describe<DB>) -> Self {
QueryData {
db_name: SerializeDbName::default(),
query: query.into(),
describe,
hash: hash_string(query),
}
}
}
struct SerializeDbName<DB>(PhantomData<DB>);
impl<DB> Default for SerializeDbName<DB> {
fn default() -> Self {
SerializeDbName(PhantomData)
}
}
impl<DB: Database> Debug for SerializeDbName<DB> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("SerializeDbName").field(&DB::NAME).finish()
}
}
impl<DB: Database> Display for SerializeDbName<DB> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.pad(DB::NAME)
}
}
impl<DB: Database> Serialize for SerializeDbName<DB> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(DB::NAME)
}
}
static OFFLINE_DATA_CACHE: LazyLock<Mutex<HashMap<PathBuf, Arc<MtimeCache<DynQueryData>>>>> =
LazyLock::new(Default::default);
#[derive(Clone, serde::Deserialize)]
pub struct DynQueryData {
pub db_name: String,
pub query: String,
pub describe: serde_json::Value,
pub hash: String,
}
impl DynQueryData {
pub fn from_data_file(path: &Path, query: &str) -> crate::Result<Self> {
let cache = OFFLINE_DATA_CACHE
.lock()
.unwrap_or_else(|poison_err| {
let mut guard = poison_err.into_inner();
*guard = Default::default();
guard
})
.entry_ref(path)
.or_insert_with(|| Arc::new(MtimeCache::new()))
.clone();
cache.get_or_try_init(|builder| {
builder.add_path(path.into());
let offline_data_contents = fs::read_to_string(path).map_err(|e| {
format!("failed to read saved query path {}: {}", path.display(), e)
})?;
let dyn_data: DynQueryData = serde_json::from_str(&offline_data_contents)?;
if query != dyn_data.query {
return Err("hash collision for saved query data".into());
}
Ok(dyn_data)
})
}
}
impl<DB: DatabaseExt> QueryData<DB>
where
Describe<DB>: serde::Serialize + serde::de::DeserializeOwned,
{
pub fn from_dyn_data(dyn_data: DynQueryData) -> crate::Result<Self> {
assert!(!dyn_data.db_name.is_empty());
assert!(!dyn_data.hash.is_empty());
if DB::NAME == dyn_data.db_name {
let describe: Describe<DB> = serde_json::from_value(dyn_data.describe)?;
Ok(QueryData {
db_name: SerializeDbName::default(),
query: dyn_data.query,
describe,
hash: dyn_data.hash,
})
} else {
Err(format!(
"expected query data for {}, got data for {}",
DB::NAME,
dyn_data.db_name
)
.into())
}
}
pub(super) fn save_in(&self, dir: &Path) -> crate::Result<()> {
use std::io::ErrorKind;
let path = dir.join(format!("query-{}.json", self.hash));
if let Err(err) = fs::remove_file(&path) {
match err.kind() {
ErrorKind::NotFound | ErrorKind::PermissionDenied => (),
ErrorKind::NotADirectory => {
return Err(format!(
"sqlx offline path exists, but is not a directory: {dir:?}"
)
.into());
}
_ => return Err(format!("failed to delete {path:?}: {err:?}").into()),
}
}
let mut file = match fs::OpenOptions::new()
.write(true)
.create_new(true)
.open(&path)
{
Ok(file) => file,
Err(err) => {
return match err.kind() {
ErrorKind::AlreadyExists => Ok(()),
ErrorKind::NotFound => {
Err(format!("sqlx offline path does not exist: {dir:?}").into())
}
ErrorKind::NotADirectory => Err(format!(
"sqlx offline path exists, but is not a directory: {dir:?}"
)
.into()),
_ => Err(format!("failed to exclusively create {path:?}: {err:?}").into()),
};
}
};
let mut data = Vec::with_capacity(4096);
serde_json::to_writer_pretty(&mut data, self).expect("BUG: failed to serialize query data");
data.push(b'\n');
file.write_all(&data)
.map_err(|err| format!("failed to write query data to file {path:?}: {err:?}"))?;
Ok(())
}
}
pub(super) fn hash_string(query: &str) -> String {
use sha2::{Digest, Sha256};
hex::encode(Sha256::digest(query.as_bytes()))
}