#![forbid(unsafe_code)]
#![doc(html_root_url = "https://docs.rs/cyrs-db/0.0.1")]
pub mod inputs;
pub mod options;
pub mod queries;
pub mod workspace;
pub use inputs::{AnalysisOptions, FileOptions, WorkspaceInputs, options_digest};
pub use options::DatabaseOptions;
pub use queries::{
Analysis, AstOutput, DiagnosticsOutput, PlanOutput, ResolvedNamesOutput, all_diagnostics,
analyse_file, parse_ast, plan_of, resolved_names, sema_diagnostics,
};
pub use workspace::{Database, DatabaseSnapshot, FileId, UnknownFileId};
use std::sync::Arc;
use cyrs_syntax::{Parse, parse};
use salsa::Setter as _;
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash)]
#[non_exhaustive]
pub enum DialectMode {
#[default]
GqlAligned,
OpenCypherV9,
}
macro_rules! assert_send {
($T:ty) => {
const _: () = {
fn _check()
where
$T: Send,
{
}
};
};
}
macro_rules! assert_sync {
($T:ty) => {
const _: () = {
fn _check()
where
$T: Sync,
{
}
};
};
}
#[derive(Debug, Clone)]
pub struct ParseOutput(Arc<Parse>);
impl ParseOutput {
fn new(p: Parse) -> Self {
Self(Arc::new(p))
}
#[must_use]
pub fn parse(&self) -> &Parse {
&self.0
}
#[must_use]
pub fn strong_count(&self) -> usize {
Arc::strong_count(&self.0)
}
}
impl PartialEq for ParseOutput {
fn eq(&self, other: &Self) -> bool {
Arc::ptr_eq(&self.0, &other.0)
}
}
impl Eq for ParseOutput {}
assert_send!(ParseOutput);
assert_sync!(ParseOutput);
mod source_file_input {
#![allow(missing_docs)]
use super::{DialectMode, ParseOutput};
#[salsa::input]
pub struct SourceFile {
#[returns(ref)]
pub source: String,
pub dialect: DialectMode,
pub options_digest: u64,
#[returns(ref)]
pub precomputed_parse: Option<ParseOutput>,
}
}
pub use source_file_input::SourceFile;
#[salsa::tracked(lru = 256)]
pub fn parse_cst(db: &dyn CypherDb, file: SourceFile) -> ParseOutput {
if let Some(hint) = file.precomputed_parse(db) {
return hint.clone();
}
let src = file.source(db);
ParseOutput::new(parse(src))
}
pub fn set_parse_cst_lru(db: &mut impl CypherDb, cap: usize) {
parse_cst::set_lru_capacity(db, cap);
}
#[salsa::db]
pub trait CypherDb: salsa::Database {}
#[salsa::db]
#[derive(Clone, Default)]
pub struct CypherDatabase {
storage: salsa::Storage<Self>,
}
impl std::fmt::Debug for CypherDatabase {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("CypherDatabase").finish_non_exhaustive()
}
}
impl CypherDatabase {
#[must_use]
pub fn new() -> Self {
Self::default()
}
pub fn new_source_file(&mut self, source: impl Into<String>) -> SourceFile {
SourceFile::new(self, source.into(), DialectMode::default(), 0, None)
}
pub fn new_source_file_with(
&mut self,
source: impl Into<String>,
dialect: DialectMode,
options_digest: u64,
) -> SourceFile {
SourceFile::new(self, source.into(), dialect, options_digest, None)
}
pub fn set_source(&mut self, file: SourceFile, source: impl Into<String>) {
file.set_source(self).to(source.into());
file.set_precomputed_parse(self).to(None);
}
pub fn set_source_with_parse(
&mut self,
file: SourceFile,
source: impl Into<String>,
parse: ParseOutput,
) {
file.set_source(self).to(source.into());
file.set_precomputed_parse(self).to(Some(parse));
}
pub fn set_dialect(&mut self, file: SourceFile, dialect: DialectMode) {
file.set_dialect(self).to(dialect);
}
pub fn new_file_options(&mut self, options: AnalysisOptions) -> FileOptions {
FileOptions::new(self, options)
}
pub fn set_options(&mut self, file_opts: FileOptions, options: AnalysisOptions) {
file_opts.set_options(self).to(options);
}
pub fn new_workspace_inputs(
&mut self,
schema: Option<Arc<dyn cyrs_schema::SchemaProvider>>,
) -> WorkspaceInputs {
WorkspaceInputs::new(self, schema)
}
pub fn set_schema(
&mut self,
ws: WorkspaceInputs,
schema: Option<Arc<dyn cyrs_schema::SchemaProvider>>,
) {
ws.set_schema(self).to(schema);
}
}
#[salsa::db]
impl salsa::Database for CypherDatabase {}
#[salsa::db]
impl CypherDb for CypherDatabase {}
assert_send!(CypherDatabase);
use std::sync::Mutex as StdMutex;
use cyrs_diag::{Diagnostic, DiagnosticsSink};
use cyrs_fmt::{FormatOptions, format_with as fmt_format_with};
use cyrs_schema::{EmptySchema, SchemaProvider};
use cyrs_sema::SemaOptions;
use smol_str::SmolStr;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct LegacyFileId(pub u32);
#[derive(Default)]
struct LegacyInner {
sources: indexmap::IndexMap<LegacyFileId, Arc<str>>,
dialects: indexmap::IndexMap<LegacyFileId, DialectMode>,
#[allow(dead_code)] sema_opts: SemaOptions,
next_file_id: u32,
}
pub struct LegacyDatabase {
inner: StdMutex<LegacyInner>,
schema: StdMutex<Arc<dyn SchemaProvider>>,
}
impl Default for LegacyDatabase {
fn default() -> Self {
Self::new()
}
}
#[allow(missing_docs)]
impl LegacyDatabase {
#[must_use]
pub fn new() -> Self {
Self {
inner: StdMutex::new(LegacyInner::default()),
schema: StdMutex::new(Arc::new(EmptySchema)),
}
}
pub fn set_schema(&self, schema: Arc<dyn SchemaProvider>) {
*self.schema.lock().expect("db mutex") = schema;
}
pub fn allocate_file(&self) -> LegacyFileId {
let mut i = self.inner.lock().expect("db mutex");
let id = LegacyFileId(i.next_file_id);
i.next_file_id += 1;
id
}
pub fn set_source(&self, file: LegacyFileId, src: impl Into<Arc<str>>) {
let mut i = self.inner.lock().expect("db mutex");
i.sources.insert(file, src.into());
}
pub fn set_dialect(&self, file: LegacyFileId, d: DialectMode) {
let mut i = self.inner.lock().expect("db mutex");
i.dialects.insert(file, d);
}
fn source_of_inner(&self, file: LegacyFileId) -> Arc<str> {
let i = self.inner.lock().expect("db mutex");
i.sources
.get(&file)
.cloned()
.unwrap_or_else(|| Arc::from(""))
}
#[must_use]
pub fn parse(&self, file: LegacyFileId) -> Parse {
let src = self.source_of_inner(file);
parse(&src)
}
#[must_use]
pub fn diagnostics(&self, file: LegacyFileId) -> Vec<Diagnostic> {
let _parse = self.parse(file);
let sink = DiagnosticsSink::new();
sink.into_sorted()
}
#[must_use]
pub fn formatted(&self, file: LegacyFileId, opts: &FormatOptions) -> SmolStr {
let src = self.source_of_inner(file);
fmt_format_with(&src, opts)
.expect("formatter is infallible")
.into()
}
}
impl std::fmt::Debug for LegacyDatabase {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("LegacyDatabase").finish_non_exhaustive()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_cst_basic() {
let mut db = CypherDatabase::new();
let file = db.new_source_file("MATCH (n) RETURN n");
let out = parse_cst(&db, file);
assert_eq!(
out.parse().syntax().to_string(),
"MATCH (n) RETURN n",
"lossless CST round-trip"
);
}
#[test]
fn parse_cst_cached() {
let mut db = CypherDatabase::new();
let file = db.new_source_file("RETURN 1");
let out1 = parse_cst(&db, file);
let out2 = parse_cst(&db, file);
assert!(
Arc::ptr_eq(&out1.0, &out2.0),
"second call should return cached ParseOutput"
);
}
#[test]
fn parse_cst_invalidates_on_source_change() {
let mut db = CypherDatabase::new();
let file = db.new_source_file("MATCH (n) RETURN n");
let out1 = parse_cst(&db, file);
assert_eq!(out1.parse().syntax().to_string(), "MATCH (n) RETURN n");
db.set_source(file, "RETURN 42");
let out2 = parse_cst(&db, file);
assert_eq!(out2.parse().syntax().to_string(), "RETURN 42");
assert!(
!Arc::ptr_eq(&out1.0, &out2.0),
"parse_cst should re-execute after source change"
);
}
#[test]
fn snapshot_is_send_and_readable() {
let mut db = CypherDatabase::new();
let file = db.new_source_file("RETURN 1");
let out1 = parse_cst(&db, file);
assert_eq!(out1.parse().syntax().to_string(), "RETURN 1");
let snapshot = db.clone();
let out_snap = parse_cst(&snapshot, file);
assert_eq!(
out_snap.parse().syntax().to_string(),
"RETURN 1",
"snapshot sees the same state"
);
let out_thread =
std::thread::spawn(move || parse_cst(&snapshot, file).parse().syntax().to_string())
.join()
.expect("thread panicked");
assert_eq!(out_thread, "RETURN 1");
}
#[test]
fn send_sync_properties() {
fn require_send<T: Send>(_: T) {}
fn require_send_sync<T: Send + Sync>(_: T) {}
let db = CypherDatabase::new();
require_send(db);
let mut db2 = CypherDatabase::new();
let file = db2.new_source_file("RETURN 1");
let out = parse_cst(&db2, file);
require_send_sync(out);
}
#[test]
fn empty_source_ok() {
let mut db = CypherDatabase::new();
let file = db.new_source_file("");
let out = parse_cst(&db, file);
assert_eq!(out.parse().syntax().to_string(), "");
assert!(out.parse().errors().is_empty());
}
#[test]
fn legacy_parse_through_db() {
let db = LegacyDatabase::new();
let f = db.allocate_file();
db.set_source(f, "MATCH (n) RETURN n");
let p = db.parse(f);
assert_eq!(p.syntax().to_string(), "MATCH (n) RETURN n");
}
#[test]
fn legacy_empty_source_is_ok() {
let db = LegacyDatabase::new();
let f = db.allocate_file();
assert_eq!(db.parse(f).syntax().to_string(), "");
assert!(db.diagnostics(f).is_empty());
}
}