ra_ap_base_db/
lib.rs

1//! base_db defines basic database traits. The concrete DB is defined by ide.
2// FIXME: Rename this crate, base db is non descriptive
3mod change;
4mod input;
5
6use std::panic;
7
8use ra_salsa::Durability;
9use rustc_hash::FxHashMap;
10use span::EditionedFileId;
11use syntax::{ast, Parse, SourceFile, SyntaxError};
12use triomphe::Arc;
13use vfs::{AbsPathBuf, FileId};
14
15pub use crate::{
16    change::FileChange,
17    input::{
18        CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Env,
19        LangCrateOrigin, ProcMacroPaths, ReleaseChannel, SourceRoot, SourceRootId,
20        TargetLayoutLoadResult,
21    },
22};
23pub use ra_salsa::{self, Cancelled};
24pub use vfs::{file_set::FileSet, AnchoredPath, AnchoredPathBuf, VfsPath};
25
26pub use semver::{BuildMetadata, Prerelease, Version, VersionReq};
27
28#[macro_export]
29macro_rules! impl_intern_key {
30    ($name:ident) => {
31        impl $crate::ra_salsa::InternKey for $name {
32            fn from_intern_id(v: $crate::ra_salsa::InternId) -> Self {
33                $name(v)
34            }
35            fn as_intern_id(&self) -> $crate::ra_salsa::InternId {
36                self.0
37            }
38        }
39    };
40}
41
42pub trait Upcast<T: ?Sized> {
43    fn upcast(&self) -> &T;
44}
45
46pub const DEFAULT_FILE_TEXT_LRU_CAP: u16 = 16;
47pub const DEFAULT_PARSE_LRU_CAP: u16 = 128;
48pub const DEFAULT_BORROWCK_LRU_CAP: u16 = 2024;
49
50pub trait FileLoader {
51    fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId>;
52    /// Crates whose root's source root is the same as the source root of `file_id`
53    fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]>;
54}
55
56/// Database which stores all significant input facts: source code and project
57/// model. Everything else in rust-analyzer is derived from these queries.
58#[ra_salsa::query_group(SourceDatabaseStorage)]
59pub trait SourceDatabase: FileLoader + std::fmt::Debug {
60    #[ra_salsa::input]
61    fn compressed_file_text(&self, file_id: FileId) -> Arc<[u8]>;
62
63    /// Text of the file.
64    #[ra_salsa::lru]
65    fn file_text(&self, file_id: FileId) -> Arc<str>;
66
67    /// Parses the file into the syntax tree.
68    #[ra_salsa::lru]
69    fn parse(&self, file_id: EditionedFileId) -> Parse<ast::SourceFile>;
70
71    /// Returns the set of errors obtained from parsing the file including validation errors.
72    fn parse_errors(&self, file_id: EditionedFileId) -> Option<Arc<[SyntaxError]>>;
73
74    /// The crate graph.
75    #[ra_salsa::input]
76    fn crate_graph(&self) -> Arc<CrateGraph>;
77
78    #[ra_salsa::input]
79    fn crate_workspace_data(&self) -> Arc<FxHashMap<CrateId, Arc<CrateWorkspaceData>>>;
80
81    #[ra_salsa::transparent]
82    fn toolchain_channel(&self, krate: CrateId) -> Option<ReleaseChannel>;
83}
84
85/// Crate related data shared by the whole workspace.
86#[derive(Debug, PartialEq, Eq, Hash, Clone)]
87pub struct CrateWorkspaceData {
88    /// The working directory to run proc-macros in. This is usually the workspace root of cargo workspaces.
89    pub proc_macro_cwd: Option<AbsPathBuf>,
90    // FIXME: Consider removing this, making HirDatabase::target_data_layout an input query
91    pub data_layout: TargetLayoutLoadResult,
92    /// Toolchain version used to compile the crate.
93    pub toolchain: Option<Version>,
94}
95
96fn toolchain_channel(db: &dyn SourceDatabase, krate: CrateId) -> Option<ReleaseChannel> {
97    db.crate_workspace_data()
98        .get(&krate)?
99        .toolchain
100        .as_ref()
101        .and_then(|v| ReleaseChannel::from_str(&v.pre))
102}
103
104fn parse(db: &dyn SourceDatabase, file_id: EditionedFileId) -> Parse<ast::SourceFile> {
105    let _p = tracing::info_span!("parse", ?file_id).entered();
106    let (file_id, edition) = file_id.unpack();
107    let text = db.file_text(file_id);
108    SourceFile::parse(&text, edition)
109}
110
111fn parse_errors(db: &dyn SourceDatabase, file_id: EditionedFileId) -> Option<Arc<[SyntaxError]>> {
112    let errors = db.parse(file_id).errors();
113    match &*errors {
114        [] => None,
115        [..] => Some(errors.into()),
116    }
117}
118
119fn file_text(db: &dyn SourceDatabase, file_id: FileId) -> Arc<str> {
120    let bytes = db.compressed_file_text(file_id);
121    let bytes =
122        lz4_flex::decompress_size_prepended(&bytes).expect("lz4 decompression should not fail");
123    let text = std::str::from_utf8(&bytes).expect("file contents should be valid UTF-8");
124    Arc::from(text)
125}
126
127/// We don't want to give HIR knowledge of source roots, hence we extract these
128/// methods into a separate DB.
129#[ra_salsa::query_group(SourceRootDatabaseStorage)]
130pub trait SourceRootDatabase: SourceDatabase {
131    /// Path to a file, relative to the root of its source root.
132    /// Source root of the file.
133    #[ra_salsa::input]
134    fn file_source_root(&self, file_id: FileId) -> SourceRootId;
135    /// Contents of the source root.
136    #[ra_salsa::input]
137    fn source_root(&self, id: SourceRootId) -> Arc<SourceRoot>;
138
139    /// Crates whose root file is in `id`.
140    fn source_root_crates(&self, id: SourceRootId) -> Arc<[CrateId]>;
141}
142
143pub trait SourceDatabaseFileInputExt {
144    fn set_file_text(&mut self, file_id: FileId, text: &str) {
145        self.set_file_text_with_durability(file_id, text, Durability::LOW);
146    }
147
148    fn set_file_text_with_durability(
149        &mut self,
150        file_id: FileId,
151        text: &str,
152        durability: Durability,
153    );
154}
155
156impl<Db: ?Sized + SourceRootDatabase> SourceDatabaseFileInputExt for Db {
157    fn set_file_text_with_durability(
158        &mut self,
159        file_id: FileId,
160        text: &str,
161        durability: Durability,
162    ) {
163        let bytes = text.as_bytes();
164        let compressed = lz4_flex::compress_prepend_size(bytes);
165        self.set_compressed_file_text_with_durability(
166            file_id,
167            Arc::from(compressed.as_slice()),
168            durability,
169        )
170    }
171}
172
173fn source_root_crates(db: &dyn SourceRootDatabase, id: SourceRootId) -> Arc<[CrateId]> {
174    let graph = db.crate_graph();
175    let mut crates = graph
176        .iter()
177        .filter(|&krate| {
178            let root_file = graph[krate].root_file_id;
179            db.file_source_root(root_file) == id
180        })
181        .collect::<Vec<_>>();
182    crates.sort();
183    crates.dedup();
184    crates.into_iter().collect()
185}
186
187// FIXME: Would be nice to get rid of this somehow
188/// Silly workaround for cyclic deps due to the SourceRootDatabase and SourceDatabase split
189/// regarding FileLoader
190pub struct FileLoaderDelegate<T>(pub T);
191
192impl<T: SourceRootDatabase> FileLoader for FileLoaderDelegate<&'_ T> {
193    fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
194        // FIXME: this *somehow* should be platform agnostic...
195        let source_root = self.0.file_source_root(path.anchor);
196        let source_root = self.0.source_root(source_root);
197        source_root.resolve_path(path)
198    }
199
200    fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]> {
201        let _p = tracing::info_span!("relevant_crates").entered();
202        let source_root = self.0.file_source_root(file_id);
203        self.0.source_root_crates(source_root)
204    }
205}