ra_ap_base_db/
lib.rs

1//! base_db defines basic database traits. The concrete DB is defined by ide.
2
3pub use salsa;
4pub use salsa_macros;
5
6// FIXME: Rename this crate, base db is non descriptive
7mod change;
8mod input;
9pub mod target;
10
11use std::{
12    cell::RefCell,
13    hash::BuildHasherDefault,
14    panic,
15    sync::{Once, atomic::AtomicUsize},
16};
17
18pub use crate::{
19    change::FileChange,
20    input::{
21        BuiltCrateData, BuiltDependency, Crate, CrateBuilder, CrateBuilderId, CrateDataBuilder,
22        CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CratesIdMap, CratesMap,
23        DependencyBuilder, Env, ExtraCrateData, LangCrateOrigin, ProcMacroLoadingError,
24        ProcMacroPaths, ReleaseChannel, SourceRoot, SourceRootId, UniqueCrateData,
25    },
26};
27use dashmap::{DashMap, mapref::entry::Entry};
28pub use query_group::{self};
29use rustc_hash::FxHasher;
30use salsa::{Durability, Setter};
31pub use semver::{BuildMetadata, Prerelease, Version, VersionReq};
32use span::Edition;
33use syntax::{Parse, SyntaxError, ast};
34use triomphe::Arc;
35pub use vfs::{AnchoredPath, AnchoredPathBuf, FileId, VfsPath, file_set::FileSet};
36
37pub type FxIndexSet<T> = indexmap::IndexSet<T, rustc_hash::FxBuildHasher>;
38pub type FxIndexMap<K, V> =
39    indexmap::IndexMap<K, V, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
40
41#[macro_export]
42macro_rules! impl_intern_key {
43    ($id:ident, $loc:ident) => {
44        #[salsa_macros::interned(no_lifetime, revisions = usize::MAX)]
45        #[derive(PartialOrd, Ord)]
46        pub struct $id {
47            pub loc: $loc,
48        }
49
50        // If we derive this salsa prints the values recursively, and this causes us to blow.
51        impl ::std::fmt::Debug for $id {
52            fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
53                f.debug_tuple(stringify!($id))
54                    .field(&format_args!("{:04x}", self.0.index()))
55                    .finish()
56            }
57        }
58    };
59}
60
61pub const DEFAULT_FILE_TEXT_LRU_CAP: u16 = 16;
62pub const DEFAULT_PARSE_LRU_CAP: u16 = 128;
63pub const DEFAULT_BORROWCK_LRU_CAP: u16 = 2024;
64
65#[derive(Debug, Default)]
66pub struct Files {
67    files: Arc<DashMap<vfs::FileId, FileText, BuildHasherDefault<FxHasher>>>,
68    source_roots: Arc<DashMap<SourceRootId, SourceRootInput, BuildHasherDefault<FxHasher>>>,
69    file_source_roots: Arc<DashMap<vfs::FileId, FileSourceRootInput, BuildHasherDefault<FxHasher>>>,
70}
71
72impl Files {
73    pub fn file_text(&self, file_id: vfs::FileId) -> FileText {
74        match self.files.get(&file_id) {
75            Some(text) => *text,
76            None => {
77                panic!("Unable to fetch file text for `vfs::FileId`: {file_id:?}; this is a bug")
78            }
79        }
80    }
81
82    pub fn set_file_text(&self, db: &mut dyn SourceDatabase, file_id: vfs::FileId, text: &str) {
83        match self.files.entry(file_id) {
84            Entry::Occupied(mut occupied) => {
85                occupied.get_mut().set_text(db).to(Arc::from(text));
86            }
87            Entry::Vacant(vacant) => {
88                let text = FileText::new(db, Arc::from(text), file_id);
89                vacant.insert(text);
90            }
91        };
92    }
93
94    pub fn set_file_text_with_durability(
95        &self,
96        db: &mut dyn SourceDatabase,
97        file_id: vfs::FileId,
98        text: &str,
99        durability: Durability,
100    ) {
101        match self.files.entry(file_id) {
102            Entry::Occupied(mut occupied) => {
103                occupied.get_mut().set_text(db).with_durability(durability).to(Arc::from(text));
104            }
105            Entry::Vacant(vacant) => {
106                let text =
107                    FileText::builder(Arc::from(text), file_id).durability(durability).new(db);
108                vacant.insert(text);
109            }
110        };
111    }
112
113    /// Source root of the file.
114    pub fn source_root(&self, source_root_id: SourceRootId) -> SourceRootInput {
115        let source_root = match self.source_roots.get(&source_root_id) {
116            Some(source_root) => source_root,
117            None => panic!(
118                "Unable to fetch `SourceRootInput` with `SourceRootId` ({source_root_id:?}); this is a bug"
119            ),
120        };
121
122        *source_root
123    }
124
125    pub fn set_source_root_with_durability(
126        &self,
127        db: &mut dyn SourceDatabase,
128        source_root_id: SourceRootId,
129        source_root: Arc<SourceRoot>,
130        durability: Durability,
131    ) {
132        match self.source_roots.entry(source_root_id) {
133            Entry::Occupied(mut occupied) => {
134                occupied.get_mut().set_source_root(db).with_durability(durability).to(source_root);
135            }
136            Entry::Vacant(vacant) => {
137                let source_root =
138                    SourceRootInput::builder(source_root).durability(durability).new(db);
139                vacant.insert(source_root);
140            }
141        };
142    }
143
144    pub fn file_source_root(&self, id: vfs::FileId) -> FileSourceRootInput {
145        let file_source_root = match self.file_source_roots.get(&id) {
146            Some(file_source_root) => file_source_root,
147            None => panic!(
148                "Unable to get `FileSourceRootInput` with `vfs::FileId` ({id:?}); this is a bug",
149            ),
150        };
151        *file_source_root
152    }
153
154    pub fn set_file_source_root_with_durability(
155        &self,
156        db: &mut dyn SourceDatabase,
157        id: vfs::FileId,
158        source_root_id: SourceRootId,
159        durability: Durability,
160    ) {
161        match self.file_source_roots.entry(id) {
162            Entry::Occupied(mut occupied) => {
163                occupied
164                    .get_mut()
165                    .set_source_root_id(db)
166                    .with_durability(durability)
167                    .to(source_root_id);
168            }
169            Entry::Vacant(vacant) => {
170                let file_source_root =
171                    FileSourceRootInput::builder(source_root_id).durability(durability).new(db);
172                vacant.insert(file_source_root);
173            }
174        };
175    }
176}
177
178#[salsa_macros::interned(no_lifetime, debug, constructor=from_span, revisions = usize::MAX)]
179#[derive(PartialOrd, Ord)]
180pub struct EditionedFileId {
181    pub editioned_file_id: span::EditionedFileId,
182}
183
184impl EditionedFileId {
185    // Salsa already uses the name `new`...
186    #[inline]
187    pub fn new(db: &dyn salsa::Database, file_id: FileId, edition: Edition) -> Self {
188        EditionedFileId::from_span(db, span::EditionedFileId::new(file_id, edition))
189    }
190
191    #[inline]
192    pub fn current_edition(db: &dyn salsa::Database, file_id: FileId) -> Self {
193        EditionedFileId::new(db, file_id, Edition::CURRENT)
194    }
195
196    #[inline]
197    pub fn file_id(self, db: &dyn salsa::Database) -> vfs::FileId {
198        let id = self.editioned_file_id(db);
199        id.file_id()
200    }
201
202    #[inline]
203    pub fn unpack(self, db: &dyn salsa::Database) -> (vfs::FileId, span::Edition) {
204        let id = self.editioned_file_id(db);
205        (id.file_id(), id.edition())
206    }
207
208    #[inline]
209    pub fn edition(self, db: &dyn SourceDatabase) -> Edition {
210        self.editioned_file_id(db).edition()
211    }
212}
213
214#[salsa_macros::input(debug)]
215pub struct FileText {
216    #[returns(ref)]
217    pub text: Arc<str>,
218    pub file_id: vfs::FileId,
219}
220
221#[salsa_macros::input(debug)]
222pub struct FileSourceRootInput {
223    pub source_root_id: SourceRootId,
224}
225
226#[salsa_macros::input(debug)]
227pub struct SourceRootInput {
228    pub source_root: Arc<SourceRoot>,
229}
230
231/// Database which stores all significant input facts: source code and project
232/// model. Everything else in rust-analyzer is derived from these queries.
233#[query_group::query_group]
234pub trait RootQueryDb: SourceDatabase + salsa::Database {
235    /// Parses the file into the syntax tree.
236    #[salsa::invoke(parse)]
237    #[salsa::lru(128)]
238    fn parse(&self, file_id: EditionedFileId) -> Parse<ast::SourceFile>;
239
240    /// Returns the set of errors obtained from parsing the file including validation errors.
241    #[salsa::transparent]
242    fn parse_errors(&self, file_id: EditionedFileId) -> Option<&[SyntaxError]>;
243
244    #[salsa::transparent]
245    fn toolchain_channel(&self, krate: Crate) -> Option<ReleaseChannel>;
246
247    /// Crates whose root file is in `id`.
248    #[salsa::invoke_interned(source_root_crates)]
249    fn source_root_crates(&self, id: SourceRootId) -> Arc<[Crate]>;
250
251    #[salsa::transparent]
252    fn relevant_crates(&self, file_id: FileId) -> Arc<[Crate]>;
253
254    /// Returns the crates in topological order.
255    ///
256    /// **Warning**: do not use this query in `hir-*` crates! It kills incrementality across crate metadata modifications.
257    #[salsa::input]
258    fn all_crates(&self) -> Arc<Box<[Crate]>>;
259}
260
261#[salsa_macros::db]
262pub trait SourceDatabase: salsa::Database {
263    /// Text of the file.
264    fn file_text(&self, file_id: vfs::FileId) -> FileText;
265
266    fn set_file_text(&mut self, file_id: vfs::FileId, text: &str);
267
268    fn set_file_text_with_durability(
269        &mut self,
270        file_id: vfs::FileId,
271        text: &str,
272        durability: Durability,
273    );
274
275    /// Contents of the source root.
276    fn source_root(&self, id: SourceRootId) -> SourceRootInput;
277
278    fn file_source_root(&self, id: vfs::FileId) -> FileSourceRootInput;
279
280    fn set_file_source_root_with_durability(
281        &mut self,
282        id: vfs::FileId,
283        source_root_id: SourceRootId,
284        durability: Durability,
285    );
286
287    /// Source root of the file.
288    fn set_source_root_with_durability(
289        &mut self,
290        source_root_id: SourceRootId,
291        source_root: Arc<SourceRoot>,
292        durability: Durability,
293    );
294
295    fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
296        // FIXME: this *somehow* should be platform agnostic...
297        let source_root = self.file_source_root(path.anchor);
298        let source_root = self.source_root(source_root.source_root_id(self));
299        source_root.source_root(self).resolve_path(path)
300    }
301
302    #[doc(hidden)]
303    fn crates_map(&self) -> Arc<CratesMap>;
304
305    fn nonce_and_revision(&self) -> (Nonce, salsa::Revision);
306}
307
308static NEXT_NONCE: AtomicUsize = AtomicUsize::new(0);
309
310#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
311pub struct Nonce(usize);
312
313impl Default for Nonce {
314    #[inline]
315    fn default() -> Self {
316        Nonce::new()
317    }
318}
319
320impl Nonce {
321    #[inline]
322    pub fn new() -> Nonce {
323        Nonce(NEXT_NONCE.fetch_add(1, std::sync::atomic::Ordering::SeqCst))
324    }
325}
326
327/// Crate related data shared by the whole workspace.
328#[derive(Debug, PartialEq, Eq, Hash, Clone)]
329pub struct CrateWorkspaceData {
330    pub target: Result<target::TargetData, target::TargetLoadError>,
331    /// Toolchain version used to compile the crate.
332    pub toolchain: Option<Version>,
333}
334
335impl CrateWorkspaceData {
336    pub fn is_atleast_187(&self) -> bool {
337        const VERSION_187: Version = Version {
338            major: 1,
339            minor: 87,
340            patch: 0,
341            pre: Prerelease::EMPTY,
342            build: BuildMetadata::EMPTY,
343        };
344        self.toolchain.as_ref().map_or(false, |v| *v >= VERSION_187)
345    }
346}
347
348fn toolchain_channel(db: &dyn RootQueryDb, krate: Crate) -> Option<ReleaseChannel> {
349    krate.workspace_data(db).toolchain.as_ref().and_then(|v| ReleaseChannel::from_str(&v.pre))
350}
351
352fn parse(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Parse<ast::SourceFile> {
353    let _p = tracing::info_span!("parse", ?file_id).entered();
354    let (file_id, edition) = file_id.unpack(db.as_dyn_database());
355    let text = db.file_text(file_id).text(db);
356    ast::SourceFile::parse(text, edition)
357}
358
359fn parse_errors(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Option<&[SyntaxError]> {
360    #[salsa_macros::tracked(returns(ref))]
361    fn parse_errors(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Option<Box<[SyntaxError]>> {
362        let errors = db.parse(file_id).errors();
363        match &*errors {
364            [] => None,
365            [..] => Some(errors.into()),
366        }
367    }
368    parse_errors(db, file_id).as_ref().map(|it| &**it)
369}
370
371fn source_root_crates(db: &dyn RootQueryDb, id: SourceRootId) -> Arc<[Crate]> {
372    let crates = db.all_crates();
373    crates
374        .iter()
375        .copied()
376        .filter(|&krate| {
377            let root_file = krate.data(db).root_file_id;
378            db.file_source_root(root_file).source_root_id(db) == id
379        })
380        .collect()
381}
382
383fn relevant_crates(db: &dyn RootQueryDb, file_id: FileId) -> Arc<[Crate]> {
384    let _p = tracing::info_span!("relevant_crates").entered();
385
386    let source_root = db.file_source_root(file_id);
387    db.source_root_crates(source_root.source_root_id(db))
388}
389
390#[must_use]
391#[non_exhaustive]
392pub struct DbPanicContext;
393
394impl Drop for DbPanicContext {
395    fn drop(&mut self) {
396        Self::with_ctx(|ctx| assert!(ctx.pop().is_some()));
397    }
398}
399
400impl DbPanicContext {
401    pub fn enter(frame: String) -> DbPanicContext {
402        #[expect(clippy::print_stderr, reason = "already panicking anyway")]
403        fn set_hook() {
404            let default_hook = panic::take_hook();
405            panic::set_hook(Box::new(move |panic_info| {
406                default_hook(panic_info);
407                if let Some(backtrace) = salsa::Backtrace::capture() {
408                    eprintln!("{backtrace:#}");
409                }
410                DbPanicContext::with_ctx(|ctx| {
411                    if !ctx.is_empty() {
412                        eprintln!("additional context:");
413                        for (idx, frame) in ctx.iter().enumerate() {
414                            eprintln!("{idx:>4}: {frame}\n");
415                        }
416                    }
417                });
418            }));
419        }
420
421        static SET_HOOK: Once = Once::new();
422        SET_HOOK.call_once(set_hook);
423
424        Self::with_ctx(|ctx| ctx.push(frame));
425        DbPanicContext
426    }
427
428    fn with_ctx(f: impl FnOnce(&mut Vec<String>)) {
429        thread_local! {
430            static CTX: RefCell<Vec<String>> = const { RefCell::new(Vec::new()) };
431        }
432        CTX.with(|ctx| f(&mut ctx.borrow_mut()));
433    }
434}