cyrs_db/lib.rs
1//! `cyrs-db` — incremental analysis database (spec 0001 §11).
2//!
3//! This crate builds on the Salsa skeleton from `cy-zx6` and adds the
4//! complete input-query surface from spec §11.2 (`cy-nk7`), plus the
5//! `FileId` model, snapshot API, and workspace-scoped `SchemaProvider`
6//! wiring from spec §11.4–§11.5 (`cy-amr`).
7//!
8//! ## Primary API (spec §11.4–§11.5)
9//!
10//! See [`workspace`] for the high-level `Database` + `FileId` + snapshot API
11//! intended for consumers (`cyrs-lsp`, `cyrs-agent`, `cyrs-cli`,
12//! `cyrs-tck`).
13//!
14//! - [`workspace::Database`] — workspace-scoped database. Owns a
15//! [`CypherDatabase`] plus a `FileId → SourceFile` registry.
16//! - [`workspace::FileId`] — stable u32 handle, the unit of caching (§11.4).
17//! - [`workspace::DatabaseSnapshot`] — `Send` read-only snapshot for
18//! cross-thread queries (§11.5).
19//! - [`workspace::UnknownFileId`] — error returned for stale handles.
20//!
21//! ## Salsa internals (low-level, for crate authors)
22//!
23//! - [`SourceFile`] — Salsa `#[input]` for per-file `source` + `dialect`.
24//! - [`inputs::FileOptions`] — Salsa `#[input]` for per-file [`inputs::AnalysisOptions`].
25//! - [`inputs::WorkspaceInputs`] — Salsa `#[input]` for workspace-scoped schema (§11.4).
26//! - [`inputs::options_digest`] — `#[salsa::tracked]` derived query: stable u64 hash
27//! of `AnalysisOptions`, used to gate all analysis-dependent derived queries.
28//! - [`CypherDatabase`] — the concrete `salsa::Database` impl.
29//! - [`CypherDb`] — the database trait that all concrete DBs implement.
30//! - [`ParseOutput`] — memoised result of parsing a [`SourceFile`].
31//! - [`parse_cst`] — first derived query: lossless CST, re-evaluated only
32//! when `source` changes.
33//!
34//! ## Input query surface (spec §11.2)
35//!
36//! | Query | Salsa kind | Scope |
37//! |-----------------------------------|-------------------|-----------|
38//! | `source_text(file) -> &str` | `#[input]` field | per-file |
39//! | `dialect(file) -> DialectMode` | `#[input]` field | per-file |
40//! | `options(file) -> AnalysisOptions`| `#[input]` field | per-file |
41//! | `options_digest(file) -> u64` | `#[tracked]` | per-file |
42//! | `schema() -> Option<Arc<dyn …>>` | `#[input]` field | workspace |
43//!
44//! ## Legacy facade API (preserved for backward compat)
45//!
46//! The legacy [`LegacyDatabase`] / legacy `FileId` (u32 newtype) remain
47//! exported under their old names for backward compatibility while binary
48//! crates migrate to the new [`workspace::Database`] API.
49//!
50//! ## Send + Sync / snapshot semantics (spec §11.5)
51//!
52//! `CypherDatabase` is `Clone`. Cloning shares the `Arc<Zalsa>` backing
53//! store and creates a fresh `ZalsaLocal`, producing a snapshot. The clone
54//! can be sent to another thread for concurrent read queries (`Send`).
55//! Writes require `&mut self` and block until all clones are dropped.
56
57#![forbid(unsafe_code)]
58#![doc(html_root_url = "https://docs.rs/cyrs-db/0.0.1")]
59
60pub mod inputs;
61pub mod options;
62pub mod queries;
63pub mod workspace;
64
65pub use inputs::{AnalysisOptions, FileOptions, WorkspaceInputs, options_digest};
66pub use options::DatabaseOptions;
67pub use queries::{
68 Analysis, AstOutput, DiagnosticsOutput, PlanOutput, ResolvedNamesOutput, all_diagnostics,
69 analyse_file, parse_ast, plan_of, resolved_names, sema_diagnostics,
70};
71pub use workspace::{Database, DatabaseSnapshot, FileId, UnknownFileId};
72
73use std::sync::Arc;
74
75use cyrs_syntax::{Parse, parse};
76use salsa::Setter as _;
77
78// ---------------------------------------------------------------------------
79// Dialect mode (spec §9)
80// ---------------------------------------------------------------------------
81
82/// Dialect mode selected at parse time. Spec §9.
83///
84/// Marked `#[non_exhaustive]` (cy-2i9.1) so new dialects can land
85/// without forcing a SemVer-major release.
86#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash)]
87#[non_exhaustive]
88pub enum DialectMode {
89 /// GQL-aligned parsing (default).
90 #[default]
91 GqlAligned,
92 /// openCypher 9 compatibility mode.
93 OpenCypherV9,
94}
95
96// ---------------------------------------------------------------------------
97// Compile-time Send / Sync assertion helpers (no unsafe)
98// ---------------------------------------------------------------------------
99
100macro_rules! assert_send {
101 ($T:ty) => {
102 const _: () = {
103 fn _check()
104 where
105 $T: Send,
106 {
107 }
108 };
109 };
110}
111
112macro_rules! assert_sync {
113 ($T:ty) => {
114 const _: () = {
115 fn _check()
116 where
117 $T: Sync,
118 {
119 }
120 };
121 };
122}
123
124// ---------------------------------------------------------------------------
125// ParseOutput — newtype wrapping Arc<Parse> with pointer-equality Eq
126//
127// Salsa tracked functions require Output: Eq so that it can detect whether
128// re-execution produced the same value (§11.3). `Parse` itself does not
129// implement `Eq`, so we wrap it in an `Arc` and compare by pointer identity.
130// Two re-executions of an unchanged source return the cached Arc, so they
131// compare equal. A mutation produces a fresh `Arc`, so they compare unequal.
132// ---------------------------------------------------------------------------
133
134/// Memoised result of parsing a [`SourceFile`].
135///
136/// Wraps an `Arc<Parse>`; equality is by pointer identity so that Salsa
137/// can detect when re-parsing produced the same tree.
138#[derive(Debug, Clone)]
139pub struct ParseOutput(Arc<Parse>);
140
141impl ParseOutput {
142 fn new(p: Parse) -> Self {
143 Self(Arc::new(p))
144 }
145
146 /// Access the underlying [`Parse`].
147 #[must_use]
148 pub fn parse(&self) -> &Parse {
149 &self.0
150 }
151
152 /// Strong-reference count of the wrapped `Arc<Parse>`.
153 ///
154 /// Exposed so tests and memory-diagnostics tooling can observe Salsa
155 /// memo retention (§11.6): after a `FileId` is evicted and the next
156 /// revision replaces the memo entry for its `SourceFile`, the old
157 /// `ParseOutput` held by the memo is dropped and any caller-held clone
158 /// of its `Arc<Parse>` sees its strong count drop to 1.
159 #[must_use]
160 pub fn strong_count(&self) -> usize {
161 Arc::strong_count(&self.0)
162 }
163}
164
165impl PartialEq for ParseOutput {
166 fn eq(&self, other: &Self) -> bool {
167 Arc::ptr_eq(&self.0, &other.0)
168 }
169}
170
171impl Eq for ParseOutput {}
172
173// Salsa requires Output: Send + Sync.
174// Arc<Parse> is Send+Sync because GreenNode (Arc-backed) and Vec<SyntaxError>
175// (all fields Send+Sync) are Send+Sync.
176assert_send!(ParseOutput);
177assert_sync!(ParseOutput);
178
179// ---------------------------------------------------------------------------
180// Salsa: input structs
181// ---------------------------------------------------------------------------
182
183mod source_file_input {
184 // Wrapping the `#[salsa::input]` in a private module lets us put
185 // `#![allow(missing_docs)]` at inner-module scope so the macro's
186 // generated impl block inherits the exemption. The struct
187 // itself is re-exported below. Outer-item `#[allow]` attributes
188 // do not propagate into Salsa's expansion.
189 #![allow(missing_docs)]
190
191 use super::{DialectMode, ParseOutput};
192
193 /// A single source file tracked by the incremental database.
194 ///
195 /// Fields:
196 /// - `source` — raw UTF-8 source text.
197 /// - `dialect` — parsing dialect (spec §9).
198 /// - `options_digest` — hash of analysis options. Full shape deferred to
199 /// bead cy-nk7; zero is a valid "no options" value.
200 /// - `precomputed_parse` — optional pre-computed [`ParseOutput`] (cy-li6).
201 /// When `Some`, [`super::parse_cst`] returns this value directly instead
202 /// of re-parsing `source` from scratch. Set by
203 /// [`super::workspace::Database::edit_file`] after the smart-path
204 /// sub-tree splice produces a fresh [`super::Parse`]; cleared on every
205 /// non-incremental source mutation (`set_source`, `update_file`).
206 #[salsa::input]
207 pub struct SourceFile {
208 /// Raw UTF-8 source text for this file.
209 #[returns(ref)]
210 pub source: String,
211
212 /// Dialect used when parsing this file.
213 pub dialect: DialectMode,
214
215 /// Hash of options that affect derived queries.
216 /// Shape is stabilised in cy-nk7; zero is a valid "no options" value.
217 pub options_digest: u64,
218
219 /// Optional pre-computed parse, supplied by the incremental edit
220 /// path (cy-li6). `None` means "no hint, re-parse `source`".
221 #[returns(ref)]
222 pub precomputed_parse: Option<ParseOutput>,
223 }
224}
225pub use source_file_input::SourceFile;
226
227// ---------------------------------------------------------------------------
228// Salsa: derived queries
229// ---------------------------------------------------------------------------
230
231/// Parse a [`SourceFile`] into a lossless CST.
232///
233/// The result is memoised; it is re-evaluated only when `source`, `dialect`,
234/// or the cy-li6 `precomputed_parse` hint changes. See [`ParseOutput`]
235/// for equality semantics.
236///
237/// ## cy-li6 fast path
238///
239/// If [`SourceFile::precomputed_parse`] is `Some`, this query returns the
240/// hint verbatim instead of calling [`parse`] on `source`. This wires the
241/// smart-path sub-tree splice produced by
242/// [`workspace::Database::edit_file`] through Salsa as the published
243/// [`ParseOutput`] for the next revision, so downstream tracked queries
244/// (`parse_ast`, `sema_diagnostics`, `plan_of`, `analyse_file`) consume the
245/// spliced tree without paying a whole-file reparse.
246///
247/// The hint is cleared on every non-incremental source mutation so a
248/// follow-up `set_source` always produces a fresh full parse.
249#[salsa::tracked(lru = 256)]
250pub fn parse_cst(db: &dyn CypherDb, file: SourceFile) -> ParseOutput {
251 if let Some(hint) = file.precomputed_parse(db) {
252 return hint.clone();
253 }
254 let src = file.source(db);
255 ParseOutput::new(parse(src))
256}
257
258/// Adjust the LRU capacity of [`parse_cst`] at runtime.
259///
260/// Called by [`workspace::Database::with_options`] to apply
261/// [`options::DatabaseOptions::parse_lru`]. Must be called before any
262/// queries are issued.
263pub fn set_parse_cst_lru(db: &mut impl CypherDb, cap: usize) {
264 parse_cst::set_lru_capacity(db, cap);
265}
266
267// ---------------------------------------------------------------------------
268// Salsa: database trait + concrete struct
269// ---------------------------------------------------------------------------
270
271/// Trait that all concrete databases in this workspace implement.
272///
273/// Using a trait lets downstream crates write `&dyn CypherDb` functions
274/// that are testable against mock databases.
275#[salsa::db]
276pub trait CypherDb: salsa::Database {}
277
278/// The concrete incremental database (Salsa 2022-style, spec §11.1).
279///
280/// ## Send + Sync via snapshots (spec §11.5)
281///
282/// `CypherDatabase` is `Send` but not `Sync` — `ZalsaLocal` contains
283/// per-thread `UnsafeCell` state by design. Thread-safety is achieved via
284/// snapshots: `clone()` shares the `Arc<Zalsa>` backing store and produces a
285/// fresh `ZalsaLocal`, allowing the clone to be sent to another thread and
286/// queried concurrently. The LSP server clones once per request; the CLI
287/// never needs to.
288#[salsa::db]
289#[derive(Clone, Default)]
290pub struct CypherDatabase {
291 storage: salsa::Storage<Self>,
292}
293
294impl std::fmt::Debug for CypherDatabase {
295 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
296 f.debug_struct("CypherDatabase").finish_non_exhaustive()
297 }
298}
299
300impl CypherDatabase {
301 /// Construct a new, empty database.
302 #[must_use]
303 pub fn new() -> Self {
304 Self::default()
305 }
306
307 /// Create a new [`SourceFile`] input with the given source text,
308 /// using the default dialect and a zero options digest.
309 pub fn new_source_file(&mut self, source: impl Into<String>) -> SourceFile {
310 SourceFile::new(self, source.into(), DialectMode::default(), 0, None)
311 }
312
313 /// Create a new [`SourceFile`] input with explicit dialect and digest.
314 pub fn new_source_file_with(
315 &mut self,
316 source: impl Into<String>,
317 dialect: DialectMode,
318 options_digest: u64,
319 ) -> SourceFile {
320 SourceFile::new(self, source.into(), dialect, options_digest, None)
321 }
322
323 /// Update the source text of an existing [`SourceFile`], bumping the
324 /// Salsa revision so that derived queries are invalidated.
325 ///
326 /// Always clears any cy-li6 [`precomputed_parse`](SourceFile::precomputed_parse)
327 /// hint, because a fresh source string must produce a fresh parse —
328 /// reusing a stale hint would silently desync the published CST from
329 /// `source`.
330 pub fn set_source(&mut self, file: SourceFile, source: impl Into<String>) {
331 file.set_source(self).to(source.into());
332 // Clear any stale incremental hint — see doc comment above.
333 file.set_precomputed_parse(self).to(None);
334 }
335
336 /// Atomically replace `source` and publish a precomputed [`Parse`]
337 /// (cy-li6).
338 ///
339 /// Used by [`workspace::Database::edit_file`] after
340 /// [`cyrs_syntax::incremental_reparse`] has produced a spliced tree
341 /// for `new_source`. Bumps the Salsa revision once for both fields,
342 /// so the next [`parse_cst`] query returns `parse` directly without
343 /// re-parsing.
344 pub fn set_source_with_parse(
345 &mut self,
346 file: SourceFile,
347 source: impl Into<String>,
348 parse: ParseOutput,
349 ) {
350 file.set_source(self).to(source.into());
351 file.set_precomputed_parse(self).to(Some(parse));
352 }
353
354 /// Update the dialect of an existing [`SourceFile`].
355 pub fn set_dialect(&mut self, file: SourceFile, dialect: DialectMode) {
356 file.set_dialect(self).to(dialect);
357 }
358
359 /// Create a new [`FileOptions`] input with the given [`AnalysisOptions`].
360 pub fn new_file_options(&mut self, options: AnalysisOptions) -> FileOptions {
361 FileOptions::new(self, options)
362 }
363
364 /// Update the [`AnalysisOptions`] of an existing [`FileOptions`] input.
365 ///
366 /// Bumps the Salsa revision for `file_opts`, which cascades through
367 /// `options_digest` and all derived queries that read it.
368 pub fn set_options(&mut self, file_opts: FileOptions, options: AnalysisOptions) {
369 file_opts.set_options(self).to(options);
370 }
371
372 /// Create a new [`WorkspaceInputs`] input.
373 ///
374 /// There should be exactly one `WorkspaceInputs` per database.
375 /// Call this once at database initialisation; update it with
376 /// [`set_schema`](Self::set_schema).
377 pub fn new_workspace_inputs(
378 &mut self,
379 schema: Option<Arc<dyn cyrs_schema::SchemaProvider>>,
380 ) -> WorkspaceInputs {
381 WorkspaceInputs::new(self, schema)
382 }
383
384 /// Update the workspace-scoped schema.
385 ///
386 /// Invalidates all derived queries that depend on the schema.
387 pub fn set_schema(
388 &mut self,
389 ws: WorkspaceInputs,
390 schema: Option<Arc<dyn cyrs_schema::SchemaProvider>>,
391 ) {
392 ws.set_schema(self).to(schema);
393 }
394}
395
396#[salsa::db]
397impl salsa::Database for CypherDatabase {}
398
399#[salsa::db]
400impl CypherDb for CypherDatabase {}
401
402// CypherDatabase is Send (salsa adds `unsafe impl Send` via the #[salsa::db]
403// macro) but not Sync.
404assert_send!(CypherDatabase);
405
406// ---------------------------------------------------------------------------
407// Legacy thin facade — preserved for backward compatibility.
408//
409// Binary crates (cyrs-cli, cyrs-agent) and cyrs-testkit depend on this
410// API. They will migrate to the new `workspace::Database` in subsequent beads.
411// ---------------------------------------------------------------------------
412
413use std::sync::Mutex as StdMutex;
414
415use cyrs_diag::{Diagnostic, DiagnosticsSink};
416use cyrs_fmt::{FormatOptions, format_with as fmt_format_with};
417use cyrs_schema::{EmptySchema, SchemaProvider};
418use cyrs_sema::SemaOptions;
419use smol_str::SmolStr;
420
421/// File identity within the legacy [`LegacyDatabase`].
422///
423/// Deprecated: use [`workspace::FileId`] with [`workspace::Database`] instead.
424#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
425pub struct LegacyFileId(pub u32);
426
427#[derive(Default)]
428struct LegacyInner {
429 sources: indexmap::IndexMap<LegacyFileId, Arc<str>>,
430 dialects: indexmap::IndexMap<LegacyFileId, DialectMode>,
431 #[allow(dead_code)] // consumed when sema passes are wired in
432 sema_opts: SemaOptions,
433 next_file_id: u32,
434}
435
436/// Legacy analysis database (non-incremental). Preserved for backward
437/// compatibility while binary crates migrate to [`workspace::Database`].
438///
439/// `Send + Sync` via `Mutex`-guarded interior. The incremental replacement
440/// is [`workspace::Database`] / [`CypherDatabase`].
441pub struct LegacyDatabase {
442 inner: StdMutex<LegacyInner>,
443 schema: StdMutex<Arc<dyn SchemaProvider>>,
444}
445
446impl Default for LegacyDatabase {
447 fn default() -> Self {
448 Self::new()
449 }
450}
451
452// `LegacyDatabase` is the pre-workspace u32-FileId façade kept for
453// backward compatibility while binary crates migrate to the new
454// `workspace::Database`. Its methods are intentionally thin
455// wrappers and document themselves; the module-level doc already
456// explains the migration status. `#[allow(missing_docs)]` on the
457// whole impl avoids churning through 10+ trivial one-liners that
458// will be deleted alongside the façade.
459#[allow(missing_docs)]
460impl LegacyDatabase {
461 #[must_use]
462 pub fn new() -> Self {
463 Self {
464 inner: StdMutex::new(LegacyInner::default()),
465 schema: StdMutex::new(Arc::new(EmptySchema)),
466 }
467 }
468
469 pub fn set_schema(&self, schema: Arc<dyn SchemaProvider>) {
470 *self.schema.lock().expect("db mutex") = schema;
471 }
472
473 pub fn allocate_file(&self) -> LegacyFileId {
474 let mut i = self.inner.lock().expect("db mutex");
475 let id = LegacyFileId(i.next_file_id);
476 i.next_file_id += 1;
477 id
478 }
479
480 pub fn set_source(&self, file: LegacyFileId, src: impl Into<Arc<str>>) {
481 let mut i = self.inner.lock().expect("db mutex");
482 i.sources.insert(file, src.into());
483 }
484
485 pub fn set_dialect(&self, file: LegacyFileId, d: DialectMode) {
486 let mut i = self.inner.lock().expect("db mutex");
487 i.dialects.insert(file, d);
488 }
489
490 fn source_of_inner(&self, file: LegacyFileId) -> Arc<str> {
491 let i = self.inner.lock().expect("db mutex");
492 i.sources
493 .get(&file)
494 .cloned()
495 .unwrap_or_else(|| Arc::from(""))
496 }
497
498 #[must_use]
499 pub fn parse(&self, file: LegacyFileId) -> Parse {
500 let src = self.source_of_inner(file);
501 parse(&src)
502 }
503
504 #[must_use]
505 pub fn diagnostics(&self, file: LegacyFileId) -> Vec<Diagnostic> {
506 let _parse = self.parse(file);
507 let sink = DiagnosticsSink::new();
508 sink.into_sorted()
509 }
510
511 #[must_use]
512 pub fn formatted(&self, file: LegacyFileId, opts: &FormatOptions) -> SmolStr {
513 let src = self.source_of_inner(file);
514 fmt_format_with(&src, opts)
515 .expect("formatter is infallible")
516 .into()
517 }
518}
519
520impl std::fmt::Debug for LegacyDatabase {
521 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
522 f.debug_struct("LegacyDatabase").finish_non_exhaustive()
523 }
524}
525
526// ---------------------------------------------------------------------------
527// Tests
528// ---------------------------------------------------------------------------
529
530#[cfg(test)]
531mod tests {
532 use super::*;
533
534 // --- Salsa / CypherDatabase tests ---
535
536 /// Basic construction: create a DB, create a [`SourceFile`], run the
537 /// derived query, check the CST round-trips the source.
538 #[test]
539 fn parse_cst_basic() {
540 let mut db = CypherDatabase::new();
541 let file = db.new_source_file("MATCH (n) RETURN n");
542 let out = parse_cst(&db, file);
543 assert_eq!(
544 out.parse().syntax().to_string(),
545 "MATCH (n) RETURN n",
546 "lossless CST round-trip"
547 );
548 }
549
550 /// Calling `parse_cst` twice returns the same `Arc` (pointer equality),
551 /// proving Salsa returned the cached result.
552 #[test]
553 fn parse_cst_cached() {
554 let mut db = CypherDatabase::new();
555 let file = db.new_source_file("RETURN 1");
556 let out1 = parse_cst(&db, file);
557 let out2 = parse_cst(&db, file);
558 // Same Arc pointer → cached, not re-executed.
559 assert!(
560 Arc::ptr_eq(&out1.0, &out2.0),
561 "second call should return cached ParseOutput"
562 );
563 }
564
565 /// Modifying the source bumps the revision and causes `parse_cst` to
566 /// re-execute, producing a new `Arc`.
567 #[test]
568 fn parse_cst_invalidates_on_source_change() {
569 let mut db = CypherDatabase::new();
570 let file = db.new_source_file("MATCH (n) RETURN n");
571
572 let out1 = parse_cst(&db, file);
573 assert_eq!(out1.parse().syntax().to_string(), "MATCH (n) RETURN n");
574
575 // Mutate the source → revision bump.
576 db.set_source(file, "RETURN 42");
577
578 let out2 = parse_cst(&db, file);
579 assert_eq!(out2.parse().syntax().to_string(), "RETURN 42");
580
581 // Different Arc pointer → query was re-executed.
582 assert!(
583 !Arc::ptr_eq(&out1.0, &out2.0),
584 "parse_cst should re-execute after source change"
585 );
586 }
587
588 /// Cloning the DB creates a snapshot that can be sent to another thread.
589 /// Both clones share `Arc<Zalsa>` so they see the same revision; each has
590 /// its own `ZalsaLocal` allowing concurrent reads without `&mut`.
591 #[test]
592 fn snapshot_is_send_and_readable() {
593 let mut db = CypherDatabase::new();
594 let file = db.new_source_file("RETURN 1");
595
596 // Parse once to populate the memo cache.
597 let out1 = parse_cst(&db, file);
598 assert_eq!(out1.parse().syntax().to_string(), "RETURN 1");
599
600 // Clone = snapshot that can be sent to another thread.
601 let snapshot = db.clone();
602
603 // The snapshot can run queries (no &mut needed).
604 let out_snap = parse_cst(&snapshot, file);
605 assert_eq!(
606 out_snap.parse().syntax().to_string(),
607 "RETURN 1",
608 "snapshot sees the same state"
609 );
610
611 // The snapshot can be sent across thread boundaries.
612 let out_thread =
613 std::thread::spawn(move || parse_cst(&snapshot, file).parse().syntax().to_string())
614 .join()
615 .expect("thread panicked");
616 assert_eq!(out_thread, "RETURN 1");
617 }
618
619 /// `CypherDatabase` is `Send`; `ParseOutput` is `Send + Sync`.
620 #[test]
621 fn send_sync_properties() {
622 fn require_send<T: Send>(_: T) {}
623 fn require_send_sync<T: Send + Sync>(_: T) {}
624
625 let db = CypherDatabase::new();
626 require_send(db);
627
628 let mut db2 = CypherDatabase::new();
629 let file = db2.new_source_file("RETURN 1");
630 let out = parse_cst(&db2, file);
631 require_send_sync(out);
632 }
633
634 /// Empty source produces a valid (empty) CST and no parse errors.
635 #[test]
636 fn empty_source_ok() {
637 let mut db = CypherDatabase::new();
638 let file = db.new_source_file("");
639 let out = parse_cst(&db, file);
640 assert_eq!(out.parse().syntax().to_string(), "");
641 assert!(out.parse().errors().is_empty());
642 }
643
644 // --- Legacy LegacyDatabase tests (backward compat) ---
645
646 #[test]
647 fn legacy_parse_through_db() {
648 let db = LegacyDatabase::new();
649 let f = db.allocate_file();
650 db.set_source(f, "MATCH (n) RETURN n");
651 let p = db.parse(f);
652 assert_eq!(p.syntax().to_string(), "MATCH (n) RETURN n");
653 }
654
655 #[test]
656 fn legacy_empty_source_is_ok() {
657 let db = LegacyDatabase::new();
658 let f = db.allocate_file();
659 assert_eq!(db.parse(f).syntax().to_string(), "");
660 assert!(db.diagnostics(f).is_empty());
661 }
662}