wdl_analysis/
analyzer.rs

1//! Implementation of the analyzer.
2
3use std::ffi::OsStr;
4use std::fmt;
5use std::future::Future;
6use std::mem::ManuallyDrop;
7use std::ops::Range;
8use std::path::Path;
9use std::path::absolute;
10use std::sync::Arc;
11use std::thread::JoinHandle;
12
13use anyhow::Context;
14use anyhow::Error;
15use anyhow::Result;
16use anyhow::anyhow;
17use anyhow::bail;
18use ignore::WalkBuilder;
19use indexmap::IndexSet;
20use line_index::LineCol;
21use line_index::LineIndex;
22use line_index::WideEncoding;
23use line_index::WideLineCol;
24use lsp_types::CompletionResponse;
25use lsp_types::DocumentSymbolResponse;
26use lsp_types::GotoDefinitionResponse;
27use lsp_types::Hover;
28use lsp_types::InlayHint;
29use lsp_types::Location;
30use lsp_types::SemanticTokensResult;
31use lsp_types::SignatureHelp;
32use lsp_types::SymbolInformation;
33use lsp_types::WorkspaceEdit;
34use path_clean::PathClean;
35use tokio::runtime::Handle;
36use tokio::sync::mpsc;
37use tokio::sync::oneshot;
38use url::Url;
39
40use crate::config::Config;
41use crate::document::Document;
42use crate::graph::DocumentGraphNode;
43use crate::graph::ParseState;
44use crate::queue::AddRequest;
45use crate::queue::AnalysisQueue;
46use crate::queue::AnalyzeRequest;
47use crate::queue::CompletionRequest;
48use crate::queue::DocumentSymbolRequest;
49use crate::queue::FindAllReferencesRequest;
50use crate::queue::FormatRequest;
51use crate::queue::GotoDefinitionRequest;
52use crate::queue::HoverRequest;
53use crate::queue::InlayHintsRequest;
54use crate::queue::NotifyChangeRequest;
55use crate::queue::NotifyIncrementalChangeRequest;
56use crate::queue::RemoveRequest;
57use crate::queue::RenameRequest;
58use crate::queue::Request;
59use crate::queue::SemanticTokenRequest;
60use crate::queue::SignatureHelpRequest;
61use crate::queue::WorkspaceSymbolRequest;
62use crate::rayon::RayonHandle;
63
64/// Represents the kind of analysis progress being reported.
65#[derive(Debug, Clone, Copy, PartialEq, Eq)]
66pub enum ProgressKind {
67    /// The progress is for parsing documents.
68    Parsing,
69    /// The progress is for analyzing documents.
70    Analyzing,
71}
72
73impl fmt::Display for ProgressKind {
74    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
75        match self {
76            Self::Parsing => write!(f, "parsing"),
77            Self::Analyzing => write!(f, "analyzing"),
78        }
79    }
80}
81
82/// Converts a local file path to a file schemed URI.
83pub fn path_to_uri(path: impl AsRef<Path>) -> Option<Url> {
84    Url::from_file_path(absolute(path).ok()?.clean()).ok()
85}
86
87/// Represents the result of an analysis.
88///
89/// Analysis results are cheap to clone.
90#[derive(Debug, Clone)]
91pub struct AnalysisResult {
92    /// The error that occurred when attempting to parse the file (e.g. the file
93    /// could not be opened).
94    error: Option<Arc<Error>>,
95    /// The monotonic version of the document that was parsed.
96    ///
97    /// This value comes from incremental changes to the file.
98    ///
99    /// If `None`, the parsed version had no incremental changes.
100    version: Option<i32>,
101    /// The lines indexed for the parsed file.
102    lines: Option<Arc<LineIndex>>,
103    /// The analyzed document.
104    document: Document,
105}
106
107impl AnalysisResult {
108    /// Constructs a new analysis result for the given graph node.
109    pub(crate) fn new(node: &DocumentGraphNode) -> Self {
110        if let Some(error) = node.analysis_error() {
111            return Self {
112                error: Some(error.clone()),
113                version: node.parse_state().version(),
114                lines: node.parse_state().lines().cloned(),
115                document: Document::default_from_uri(node.uri().clone()),
116            };
117        }
118
119        let (error, version, lines) = match node.parse_state() {
120            ParseState::NotParsed => unreachable!("document should have been parsed"),
121            ParseState::Error(e) => (Some(e), None, None),
122            ParseState::Parsed { version, lines, .. } => (None, *version, Some(lines)),
123        };
124
125        Self {
126            error: error.cloned(),
127            version,
128            lines: lines.cloned(),
129            document: node
130                .document()
131                .expect("analysis should have completed")
132                .clone(),
133        }
134    }
135
136    /// Gets the error that occurred when attempting to parse the document.
137    ///
138    /// An example error would be if the file could not be opened.
139    ///
140    /// Returns `None` if the document was parsed successfully.
141    pub fn error(&self) -> Option<&Arc<Error>> {
142        self.error.as_ref()
143    }
144
145    /// Gets the incremental version of the parsed document.
146    ///
147    /// Returns `None` if there was an error parsing the document or if the
148    /// parsed document had no incremental changes.
149    pub fn version(&self) -> Option<i32> {
150        self.version
151    }
152
153    /// Gets the line index of the parsed document.
154    ///
155    /// Returns `None` if there was an error parsing the document.
156    pub fn lines(&self) -> Option<&Arc<LineIndex>> {
157        self.lines.as_ref()
158    }
159
160    /// Gets the analyzed document.
161    pub fn document(&self) -> &Document {
162        &self.document
163    }
164}
165
166/// Represents a position in a document's source.
167#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Copy, Clone, Default)]
168pub struct SourcePosition {
169    /// Line position in a document (zero-based).
170    // NOTE: this field must come before `character` to maintain a correct sort order.
171    pub line: u32,
172    /// Character offset on a line in a document (zero-based). The meaning of
173    /// this offset is determined by the position encoding.
174    pub character: u32,
175}
176
177impl SourcePosition {
178    /// Constructs a new source position from a line and character offset.
179    pub fn new(line: u32, character: u32) -> Self {
180        Self { line, character }
181    }
182}
183
184/// Represents the encoding of a source position.
185#[derive(Debug, Eq, PartialEq, Copy, Clone)]
186pub enum SourcePositionEncoding {
187    /// The position is UTF8 encoded.
188    ///
189    /// A position's character is the UTF-8 offset from the start of the line.
190    UTF8,
191    /// The position is UTF16 encoded.
192    ///
193    /// A position's character is the UTF-16 offset from the start of the line.
194    UTF16,
195}
196
197/// Represents an edit to a document's source.
198#[derive(Debug, Clone)]
199pub struct SourceEdit {
200    /// The range of the edit.
201    ///
202    /// Note that invalid ranges will cause the edit to be ignored.
203    range: Range<SourcePosition>,
204    /// The encoding of the edit positions.
205    encoding: SourcePositionEncoding,
206    /// The replacement text.
207    text: String,
208}
209
210impl SourceEdit {
211    /// Creates a new source edit for the given range and replacement text.
212    pub fn new(
213        range: Range<SourcePosition>,
214        encoding: SourcePositionEncoding,
215        text: impl Into<String>,
216    ) -> Self {
217        Self {
218            range,
219            encoding,
220            text: text.into(),
221        }
222    }
223
224    /// Gets the range of the edit.
225    pub(crate) fn range(&self) -> Range<SourcePosition> {
226        self.range.start..self.range.end
227    }
228
229    /// Applies the edit to the given string if it's in range.
230    pub(crate) fn apply(&self, source: &mut String, lines: &LineIndex) -> Result<()> {
231        let (start, end) = match self.encoding {
232            SourcePositionEncoding::UTF8 => (
233                LineCol {
234                    line: self.range.start.line,
235                    col: self.range.start.character,
236                },
237                LineCol {
238                    line: self.range.end.line,
239                    col: self.range.end.character,
240                },
241            ),
242            SourcePositionEncoding::UTF16 => (
243                lines
244                    .to_utf8(
245                        WideEncoding::Utf16,
246                        WideLineCol {
247                            line: self.range.start.line,
248                            col: self.range.start.character,
249                        },
250                    )
251                    .context("invalid edit start position")?,
252                lines
253                    .to_utf8(
254                        WideEncoding::Utf16,
255                        WideLineCol {
256                            line: self.range.end.line,
257                            col: self.range.end.character,
258                        },
259                    )
260                    .context("invalid edit end position")?,
261            ),
262        };
263
264        let range: Range<usize> = lines
265            .offset(start)
266            .context("invalid edit start position")?
267            .into()
268            ..lines
269                .offset(end)
270                .context("invalid edit end position")?
271                .into();
272
273        if !source.is_char_boundary(range.start) {
274            bail!("edit start position is not at a character boundary");
275        }
276
277        if !source.is_char_boundary(range.end) {
278            bail!("edit end position is not at a character boundary");
279        }
280
281        source.replace_range(range, &self.text);
282        Ok(())
283    }
284}
285
286/// Represents an incremental change to a document.
287#[derive(Clone, Debug)]
288pub struct IncrementalChange {
289    /// The monotonic version of the document.
290    ///
291    /// This is expected to increase for each incremental change.
292    pub version: i32,
293    /// The source to start from for applying edits.
294    ///
295    /// If this is `Some`, a full reparse will occur after applying edits to
296    /// this string.
297    ///
298    /// If this is `None`, edits will be applied to the existing CST and an
299    /// attempt will be made to incrementally parse the file.
300    pub start: Option<String>,
301    /// The source edits to apply.
302    pub edits: Vec<SourceEdit>,
303}
304
305/// Represents a Workflow Description Language (WDL) document analyzer.
306///
307/// By default, analysis parses documents, performs validation checks, resolves
308/// imports, and performs type checking.
309///
310/// Each analysis operation is processed in order of request; however, the
311/// individual parsing, resolution, and analysis of documents is performed
312/// across a thread pool.
313///
314/// Note that dropping the analyzer is a blocking operation as it will wait for
315/// the queue thread to join.
316///
317/// The type parameter is the context type passed to the progress callback.
318#[derive(Debug)]
319pub struct Analyzer<Context> {
320    /// The sender for sending analysis requests to the queue.
321    sender: ManuallyDrop<mpsc::UnboundedSender<Request<Context>>>,
322    /// The join handle for the queue task.
323    handle: Option<JoinHandle<()>>,
324    /// The config to use during analysis.
325    config: Config,
326}
327
328impl<Context> Analyzer<Context>
329where
330    Context: Send + Clone + 'static,
331{
332    /// Constructs a new analyzer with the given config.
333    ///
334    /// The provided progress callback will be invoked during analysis.
335    ///
336    /// The analyzer will use a default validator for validation.
337    ///
338    /// The analyzer must be constructed from the context of a Tokio runtime.
339    pub fn new<Progress, Return>(config: Config, progress: Progress) -> Self
340    where
341        Progress: Fn(Context, ProgressKind, usize, usize) -> Return + Send + 'static,
342        Return: Future<Output = ()>,
343    {
344        Self::new_with_validator(config, progress, crate::Validator::default)
345    }
346
347    /// Constructs a new analyzer with the given config and validator function.
348    ///
349    /// The provided progress callback will be invoked during analysis.
350    ///
351    /// This validator function will be called once per worker thread to
352    /// initialize a thread-local validator.
353    ///
354    /// The analyzer must be constructed from the context of a Tokio runtime.
355    pub fn new_with_validator<Progress, Return, Validator>(
356        config: Config,
357        progress: Progress,
358        validator: Validator,
359    ) -> Self
360    where
361        Progress: Fn(Context, ProgressKind, usize, usize) -> Return + Send + 'static,
362        Return: Future<Output = ()>,
363        Validator: Fn() -> crate::Validator + Send + Sync + 'static,
364    {
365        let (tx, rx) = mpsc::unbounded_channel();
366        let tokio = Handle::current();
367        let inner_config = config.clone();
368        let handle = std::thread::spawn(move || {
369            let queue = AnalysisQueue::new(inner_config, tokio, progress, validator);
370            queue.run(rx);
371        });
372
373        Self {
374            sender: ManuallyDrop::new(tx),
375            handle: Some(handle),
376            config,
377        }
378    }
379
380    /// Adds a document to the analyzer. Document can be a local file or a URL.
381    ///
382    /// Returns an error if the document could not be added.
383    pub async fn add_document(&self, uri: Url) -> Result<()> {
384        let mut documents = IndexSet::new();
385        documents.insert(uri);
386
387        let (tx, rx) = oneshot::channel();
388        self.sender
389            .send(Request::Add(AddRequest {
390                documents,
391                completed: tx,
392            }))
393            .map_err(|_| {
394                anyhow!("failed to send request to analysis queue because the channel has closed")
395            })?;
396
397        rx.await.map_err(|_| {
398            anyhow!("failed to receive response from analysis queue because the channel has closed")
399        })?;
400
401        Ok(())
402    }
403
404    /// Adds a directory to the analyzer. It will recursively search for WDL
405    /// documents in the supplied directory.
406    ///
407    /// Returns an error if there was a problem discovering documents for the
408    /// specified path.
409    pub async fn add_directory(&self, path: impl AsRef<Path>) -> Result<()> {
410        let path = path.as_ref().to_path_buf();
411        let config = self.config.clone();
412        // Start by searching for documents
413        let documents = RayonHandle::spawn(move || -> Result<IndexSet<Url>> {
414            let mut documents = IndexSet::new();
415
416            let metadata = path.metadata().with_context(|| {
417                format!(
418                    "failed to read metadata for `{path}`",
419                    path = path.display()
420                )
421            })?;
422
423            if metadata.is_file() {
424                bail!("`{path}` is a file, not a directory", path = path.display());
425            }
426
427            let mut walker = WalkBuilder::new(&path);
428            if let Some(ignore_filename) = config.ignore_filename() {
429                walker.add_custom_ignore_filename(ignore_filename);
430            }
431            let walker = walker
432                .standard_filters(false)
433                .parents(true)
434                .follow_links(true)
435                .build();
436
437            for result in walker {
438                let entry = result.with_context(|| {
439                    format!("failed to read directory `{path}`", path = path.display())
440                })?;
441
442                // Skip entries without a file type
443                let Some(file_type) = entry.file_type() else {
444                    continue;
445                };
446                // Skip non-files
447                if !file_type.is_file() {
448                    continue;
449                }
450                // Skip files without a `.wdl` extension
451                if entry.path().extension() != Some(OsStr::new("wdl")) {
452                    continue;
453                }
454
455                documents.insert(path_to_uri(entry.path()).with_context(|| {
456                    format!(
457                        "failed to convert path `{path}` to a URI",
458                        path = entry.path().display()
459                    )
460                })?);
461            }
462
463            Ok(documents)
464        })
465        .await?;
466
467        if documents.is_empty() {
468            return Ok(());
469        }
470
471        // Send the add request to the queue
472        let (tx, rx) = oneshot::channel();
473        self.sender
474            .send(Request::Add(AddRequest {
475                documents,
476                completed: tx,
477            }))
478            .map_err(|_| {
479                anyhow!("failed to send request to analysis queue because the channel has closed")
480            })?;
481
482        rx.await.map_err(|_| {
483            anyhow!("failed to receive response from analysis queue because the channel has closed")
484        })?;
485
486        Ok(())
487    }
488
489    /// Removes the specified documents from the analyzer.
490    ///
491    /// If a specified URI is a prefix (i.e. directory) of documents known to
492    /// the analyzer, those documents will be removed.
493    ///
494    /// Documents are only removed when not referenced from importing documents.
495    pub async fn remove_documents(&self, documents: Vec<Url>) -> Result<()> {
496        // Send the remove request to the queue
497        let (tx, rx) = oneshot::channel();
498        self.sender
499            .send(Request::Remove(RemoveRequest {
500                documents,
501                completed: tx,
502            }))
503            .map_err(|_| {
504                anyhow!("failed to send request to analysis queue because the channel has closed")
505            })?;
506
507        rx.await.map_err(|_| {
508            anyhow!("failed to receive response from analysis queue because the channel has closed")
509        })?;
510
511        Ok(())
512    }
513
514    /// Notifies the analyzer that a document has an incremental change.
515    ///
516    /// Changes to documents that aren't known to the analyzer are ignored.
517    pub fn notify_incremental_change(
518        &self,
519        document: Url,
520        change: IncrementalChange,
521    ) -> Result<()> {
522        self.sender
523            .send(Request::NotifyIncrementalChange(
524                NotifyIncrementalChangeRequest { document, change },
525            ))
526            .map_err(|_| {
527                anyhow!("failed to send request to analysis queue because the channel has closed")
528            })
529    }
530
531    /// Notifies the analyzer that a document has fully changed and should be
532    /// fetched again.
533    ///
534    /// Changes to documents that aren't known to the analyzer are ignored.
535    ///
536    /// If `discard_pending` is true, then any pending incremental changes are
537    /// discarded; otherwise, the full change is ignored if there are pending
538    /// incremental changes.
539    pub fn notify_change(&self, document: Url, discard_pending: bool) -> Result<()> {
540        self.sender
541            .send(Request::NotifyChange(NotifyChangeRequest {
542                document,
543                discard_pending,
544            }))
545            .map_err(|_| {
546                anyhow!("failed to send request to analysis queue because the channel has closed")
547            })
548    }
549
550    /// Analyzes a specific document.
551    ///
552    /// The provided context is passed to the progress callback.
553    ///
554    /// If the document is up-to-date and was previously analyzed, the current
555    /// analysis result is returned.
556    ///
557    /// Returns an analysis result for each document that was analyzed.
558    pub async fn analyze_document(
559        &self,
560        context: Context,
561        document: Url,
562    ) -> Result<Vec<AnalysisResult>> {
563        // Send the analyze request to the queue
564        let (tx, rx) = oneshot::channel();
565        self.sender
566            .send(Request::Analyze(AnalyzeRequest {
567                document: Some(document),
568                context,
569                completed: tx,
570            }))
571            .map_err(|_| {
572                anyhow!("failed to send request to analysis queue because the channel has closed")
573            })?;
574
575        rx.await.map_err(|_| {
576            anyhow!("failed to receive response from analysis queue because the channel has closed")
577        })?
578    }
579
580    /// Performs analysis of all documents.
581    ///
582    /// The provided context is passed to the progress callback.
583    ///
584    /// If a document is up-to-date and was previously analyzed, the current
585    /// analysis result is returned.
586    ///
587    /// Returns an analysis result for each document that was analyzed.
588    pub async fn analyze(&self, context: Context) -> Result<Vec<AnalysisResult>> {
589        // Send the analyze request to the queue
590        let (tx, rx) = oneshot::channel();
591        self.sender
592            .send(Request::Analyze(AnalyzeRequest {
593                document: None, // analyze all documents
594                context,
595                completed: tx,
596            }))
597            .map_err(|_| {
598                anyhow!("failed to send request to analysis queue because the channel has closed")
599            })?;
600
601        rx.await.map_err(|_| {
602            anyhow!("failed to receive response from analysis queue because the channel has closed")
603        })?
604    }
605
606    /// Formats a document.
607    pub async fn format_document(&self, document: Url) -> Result<Option<(u32, u32, String)>> {
608        let (tx, rx) = oneshot::channel();
609        self.sender
610            .send(Request::Format(FormatRequest {
611                document,
612                completed: tx,
613            }))
614            .map_err(|_| {
615                anyhow!("failed to send format request to the queue because the channel has closed")
616            })?;
617
618        rx.await.map_err(|_| {
619            anyhow!("failed to send format request to the queue because the channel has closed")
620        })
621    }
622
623    /// Performs a "goto definition" for a symbol at the current position.
624    pub async fn goto_definition(
625        &self,
626        document: Url,
627        position: SourcePosition,
628        encoding: SourcePositionEncoding,
629    ) -> Result<Option<GotoDefinitionResponse>> {
630        let (tx, rx) = oneshot::channel();
631        self.sender
632            .send(Request::GotoDefinition(GotoDefinitionRequest {
633                document,
634                position,
635                encoding,
636                completed: tx,
637            }))
638            .map_err(|_| {
639                anyhow!(
640                    "failed to send goto definition request to analysis queue because the channel \
641                     has closed"
642                )
643            })?;
644
645        rx.await.map_err(|_| {
646            anyhow!(
647                "failed to receive goto definition response from analysis queue because the \
648                 channel has closed"
649            )
650        })
651    }
652
653    /// Performs a `find references` for a symbol across all the documents.
654    pub async fn find_all_references(
655        &self,
656        document: Url,
657        position: SourcePosition,
658        encoding: SourcePositionEncoding,
659        include_declaration: bool,
660    ) -> Result<Vec<Location>> {
661        let (tx, rx) = oneshot::channel();
662        self.sender
663            .send(Request::FindAllReferences(FindAllReferencesRequest {
664                document,
665                position,
666                encoding,
667                include_declaration,
668                completed: tx,
669            }))
670            .map_err(|_| {
671                anyhow!(
672                    "failed to send find all references request to analysis queue because the \
673                     channel has closed"
674                )
675            })?;
676
677        rx.await.map_err(|_| {
678            anyhow!(
679                "failed to receive find all references response from analysis queue because the \
680                 client channel has closed"
681            )
682        })
683    }
684
685    /// Performs a `auto-completion` for a symbol.
686    pub async fn completion(
687        &self,
688        context: Context,
689        document: Url,
690        position: SourcePosition,
691        encoding: SourcePositionEncoding,
692    ) -> Result<Option<CompletionResponse>> {
693        let (tx, rx) = oneshot::channel();
694        self.sender
695            .send(Request::Completion(CompletionRequest {
696                document,
697                position,
698                encoding,
699                context,
700                completed: tx,
701            }))
702            .map_err(|_| {
703                anyhow!(
704                    "failed to send completion request to analysis queue because the channel has \
705                     closed"
706                )
707            })?;
708
709        rx.await.map_err(|_| {
710            anyhow!(
711                "failed to send completion request to analysis queue because the channel has \
712                 closed"
713            )
714        })
715    }
716
717    /// Performs a `hover` for a symbol at a given position in a document.
718    pub async fn hover(
719        &self,
720        document: Url,
721        position: SourcePosition,
722        encoding: SourcePositionEncoding,
723    ) -> Result<Option<Hover>> {
724        let (tx, rx) = oneshot::channel();
725        self.sender
726            .send(Request::Hover(HoverRequest {
727                document,
728                position,
729                encoding,
730                completed: tx,
731            }))
732            .map_err(|_| {
733                anyhow!(
734                    "failed to send hover request to analysis queue because the channel has closed"
735                )
736            })?;
737
738        rx.await.map_err(|_| {
739            anyhow!("failed to send hover request to analysis queue because the channel has closed")
740        })
741    }
742
743    /// Renames a symbol at a given position across the workspace.
744    pub async fn rename(
745        &self,
746        document: Url,
747        position: SourcePosition,
748        encoding: SourcePositionEncoding,
749        new_name: String,
750    ) -> Result<Option<WorkspaceEdit>> {
751        let (tx, rx) = oneshot::channel();
752        self.sender
753            .send(Request::Rename(RenameRequest {
754                document,
755                position,
756                encoding,
757                new_name,
758                completed: tx,
759            }))
760            .map_err(|_| {
761                anyhow!(
762                    "failed to send rename request to analysis queue because the channel has \
763                     closed"
764                )
765            })?;
766
767        rx.await.map_err(|_| {
768            anyhow!(
769                "failed to receive rename response from analysis queue because the channel has \
770                 closed"
771            )
772        })
773    }
774
775    /// Gets semantic tokens for a document
776    pub async fn semantic_tokens(&self, document: Url) -> Result<Option<SemanticTokensResult>> {
777        let (tx, rx) = oneshot::channel();
778        self.sender
779            .send(Request::SemanticTokens(SemanticTokenRequest {
780                document,
781                completed: tx,
782            }))
783            .map_err(|_| {
784                anyhow!(
785                    "failed to send semantic tokens request to analysis queue because the channel \
786                     has closed"
787                )
788            })?;
789
790        rx.await.map_err(|_| {
791            anyhow!(
792                "failed to receive semantic tokens response from analysis queue because the \
793                 channel has closed"
794            )
795        })
796    }
797
798    /// Gets document symbols for a document.
799    pub async fn document_symbol(&self, document: Url) -> Result<Option<DocumentSymbolResponse>> {
800        let (tx, rx) = oneshot::channel();
801        self.sender
802            .send(Request::DocumentSymbol(DocumentSymbolRequest {
803                document,
804                completed: tx,
805            }))
806            .map_err(|_| {
807                anyhow!(
808                    "failed to send document symbol request to analysis queue because the channel \
809                     has closed"
810                )
811            })?;
812
813        rx.await.map_err(|_| {
814            anyhow!(
815                "failed to receive document symbol request to analysis queue because the channel \
816                 has closed"
817            )
818        })
819    }
820
821    /// Gets document symbols for the workspace.
822    pub async fn workspace_symbol(&self, query: String) -> Result<Option<Vec<SymbolInformation>>> {
823        let (tx, rx) = oneshot::channel();
824        self.sender
825            .send(Request::WorkspaceSymbol(WorkspaceSymbolRequest {
826                query,
827                completed: tx,
828            }))
829            .map_err(|_| {
830                anyhow!(
831                    "failed to send workspace symbol request to analysis queue because the \
832                     channel has closed"
833                )
834            })?;
835
836        rx.await.map_err(|_| {
837            anyhow!(
838                "failed to receive workspace symbol response from analysis queue because the \
839                 channel has closed"
840            )
841        })
842    }
843
844    /// Gets signature help for a function call at a given position.
845    pub async fn signature_help(
846        &self,
847        document: Url,
848        position: SourcePosition,
849        encoding: SourcePositionEncoding,
850    ) -> Result<Option<SignatureHelp>> {
851        let (tx, rx) = oneshot::channel();
852        self.sender
853            .send(Request::SignatureHelp(SignatureHelpRequest {
854                document,
855                position,
856                encoding,
857                completed: tx,
858            }))
859            .map_err(|_| {
860                anyhow!(
861                    "failed to send signature help request to analysis queue because the channel \
862                     has closed"
863                )
864            })?;
865
866        rx.await.map_err(|_| {
867            anyhow!(
868                "failed to receive signature help response from analysis queue because the \
869                 channel has closed"
870            )
871        })
872    }
873
874    /// Requests inlay hints for a document.
875    pub async fn inlay_hints(
876        &self,
877        document: Url,
878        range: lsp_types::Range,
879    ) -> Result<Option<Vec<InlayHint>>> {
880        let (tx, rx) = oneshot::channel();
881        self.sender
882            .send(Request::InlayHints(InlayHintsRequest {
883                document,
884                range,
885                completed: tx,
886            }))
887            .map_err(|_| {
888                anyhow!(
889                    "failed to send inlay hints request to analysis queue because the channel has \
890                     closed"
891                )
892            })?;
893
894        rx.await.map_err(|_| {
895            anyhow!(
896                "failed to receive inlay hints response from analysis queue because the channel \
897                 has closed"
898            )
899        })
900    }
901}
902
903impl Default for Analyzer<()> {
904    fn default() -> Self {
905        Self::new(Default::default(), |_, _, _, _| async {})
906    }
907}
908
909impl<C> Drop for Analyzer<C> {
910    fn drop(&mut self) {
911        unsafe { ManuallyDrop::drop(&mut self.sender) };
912        if let Some(handle) = self.handle.take() {
913            handle.join().unwrap();
914        }
915    }
916}
917
918/// Constant that asserts `Analyzer` is `Send + Sync`; if not, it fails to
919/// compile.
920const _: () = {
921    /// Helper that will fail to compile if T is not `Send + Sync`.
922    const fn _assert<T: Send + Sync>() {}
923    _assert::<Analyzer<()>>();
924};
925
926#[cfg(test)]
927mod test {
928    use std::fs;
929
930    use tempfile::TempDir;
931    use wdl_ast::Severity;
932
933    use super::*;
934
935    #[tokio::test]
936    async fn it_returns_empty_results() {
937        let analyzer = Analyzer::default();
938        let results = analyzer.analyze(()).await.unwrap();
939        assert!(results.is_empty());
940    }
941
942    #[tokio::test]
943    async fn it_analyzes_a_document() {
944        let dir = TempDir::new().expect("failed to create temporary directory");
945        let path = dir.path().join("foo.wdl");
946        fs::write(
947            &path,
948            r#"version 1.1
949
950task test {
951    command <<<>>>
952}
953
954workflow test {
955}
956"#,
957        )
958        .expect("failed to create test file");
959
960        // Analyze the file and check the resulting diagnostic
961        let analyzer = Analyzer::default();
962        analyzer
963            .add_document(path_to_uri(&path).expect("should convert to URI"))
964            .await
965            .expect("should add document");
966
967        let results = analyzer.analyze(()).await.unwrap();
968        assert_eq!(results.len(), 1);
969        assert_eq!(results[0].document.diagnostics().count(), 1);
970        assert_eq!(
971            results[0].document.diagnostics().next().unwrap().rule(),
972            None
973        );
974        assert_eq!(
975            results[0].document.diagnostics().next().unwrap().severity(),
976            Severity::Error
977        );
978        assert_eq!(
979            results[0].document.diagnostics().next().unwrap().message(),
980            "conflicting workflow name `test`"
981        );
982
983        // Analyze again and ensure the analysis result id is unchanged
984        let id = results[0].document.id().clone();
985        let results = analyzer.analyze(()).await.unwrap();
986        assert_eq!(results.len(), 1);
987        assert_eq!(results[0].document.id().as_ref(), id.as_ref());
988        assert_eq!(results[0].document.diagnostics().count(), 1);
989        assert_eq!(
990            results[0].document.diagnostics().next().unwrap().rule(),
991            None
992        );
993        assert_eq!(
994            results[0].document.diagnostics().next().unwrap().severity(),
995            Severity::Error
996        );
997        assert_eq!(
998            results[0].document.diagnostics().next().unwrap().message(),
999            "conflicting workflow name `test`"
1000        );
1001    }
1002
1003    #[tokio::test]
1004    async fn it_reanalyzes_a_document_on_change() {
1005        let dir = TempDir::new().expect("failed to create temporary directory");
1006        let path = dir.path().join("foo.wdl");
1007        fs::write(
1008            &path,
1009            r#"version 1.1
1010
1011task test {
1012    command <<<>>>
1013}
1014
1015workflow test {
1016}
1017"#,
1018        )
1019        .expect("failed to create test file");
1020
1021        // Analyze the file and check the resulting diagnostic
1022        let analyzer = Analyzer::default();
1023        analyzer
1024            .add_document(path_to_uri(&path).expect("should convert to URI"))
1025            .await
1026            .expect("should add document");
1027
1028        let results = analyzer.analyze(()).await.unwrap();
1029        assert_eq!(results.len(), 1);
1030        assert_eq!(results[0].document.diagnostics().count(), 1);
1031        assert_eq!(
1032            results[0].document.diagnostics().next().unwrap().rule(),
1033            None
1034        );
1035        assert_eq!(
1036            results[0].document.diagnostics().next().unwrap().severity(),
1037            Severity::Error
1038        );
1039        assert_eq!(
1040            results[0].document.diagnostics().next().unwrap().message(),
1041            "conflicting workflow name `test`"
1042        );
1043
1044        // Rewrite the file to correct the issue
1045        fs::write(
1046            &path,
1047            r#"version 1.1
1048
1049task test {
1050    command <<<>>>
1051}
1052
1053workflow something_else {
1054}
1055"#,
1056        )
1057        .expect("failed to create test file");
1058
1059        let uri = path_to_uri(&path).expect("should convert to URI");
1060        analyzer.notify_change(uri.clone(), false).unwrap();
1061
1062        // Analyze again and ensure the analysis result id is changed and the issue
1063        // fixed
1064        let id = results[0].document.id().clone();
1065        let results = analyzer.analyze(()).await.unwrap();
1066        assert_eq!(results.len(), 1);
1067        assert!(results[0].document.id().as_ref() != id.as_ref());
1068        assert_eq!(results[0].document.diagnostics().count(), 0);
1069
1070        // Analyze again and ensure the analysis result id is unchanged
1071        let id = results[0].document.id().clone();
1072        let results = analyzer.analyze_document((), uri).await.unwrap();
1073        assert_eq!(results.len(), 1);
1074        assert!(results[0].document.id().as_ref() == id.as_ref());
1075        assert_eq!(results[0].document.diagnostics().count(), 0);
1076    }
1077
1078    #[tokio::test]
1079    async fn it_reanalyzes_a_document_on_incremental_change() {
1080        let dir = TempDir::new().expect("failed to create temporary directory");
1081        let path = dir.path().join("foo.wdl");
1082        fs::write(
1083            &path,
1084            r#"version 1.1
1085
1086task test {
1087    command <<<>>>
1088}
1089
1090workflow test {
1091}
1092"#,
1093        )
1094        .expect("failed to create test file");
1095
1096        // Analyze the file and check the resulting diagnostic
1097        let analyzer = Analyzer::default();
1098        analyzer
1099            .add_document(path_to_uri(&path).expect("should convert to URI"))
1100            .await
1101            .expect("should add document");
1102
1103        let results = analyzer.analyze(()).await.unwrap();
1104        assert_eq!(results.len(), 1);
1105        assert_eq!(results[0].document.diagnostics().count(), 1);
1106        assert_eq!(
1107            results[0].document.diagnostics().next().unwrap().rule(),
1108            None
1109        );
1110        assert_eq!(
1111            results[0].document.diagnostics().next().unwrap().severity(),
1112            Severity::Error
1113        );
1114        assert_eq!(
1115            results[0].document.diagnostics().next().unwrap().message(),
1116            "conflicting workflow name `test`"
1117        );
1118
1119        // Edit the file to correct the issue
1120        let uri = path_to_uri(&path).expect("should convert to URI");
1121        analyzer
1122            .notify_incremental_change(
1123                uri.clone(),
1124                IncrementalChange {
1125                    version: 2,
1126                    start: None,
1127                    edits: vec![SourceEdit {
1128                        range: SourcePosition::new(6, 9)..SourcePosition::new(6, 13),
1129                        encoding: SourcePositionEncoding::UTF8,
1130                        text: "something_else".to_string(),
1131                    }],
1132                },
1133            )
1134            .unwrap();
1135
1136        // Analyze again and ensure the analysis result id is changed and the issue was
1137        // fixed
1138        let id = results[0].document.id().clone();
1139        let results = analyzer.analyze_document((), uri).await.unwrap();
1140        assert_eq!(results.len(), 1);
1141        assert!(results[0].document.id().as_ref() != id.as_ref());
1142        assert_eq!(results[0].document.diagnostics().count(), 0);
1143    }
1144
1145    #[tokio::test]
1146    async fn it_removes_documents() {
1147        let dir = TempDir::new().expect("failed to create temporary directory");
1148        let foo = dir.path().join("foo.wdl");
1149        fs::write(
1150            &foo,
1151            r#"version 1.1
1152workflow test {
1153}
1154"#,
1155        )
1156        .expect("failed to create test file");
1157
1158        let bar = dir.path().join("bar.wdl");
1159        fs::write(
1160            &bar,
1161            r#"version 1.1
1162workflow test {
1163}
1164"#,
1165        )
1166        .expect("failed to create test file");
1167
1168        let baz = dir.path().join("baz.wdl");
1169        fs::write(
1170            &baz,
1171            r#"version 1.1
1172workflow test {
1173}
1174"#,
1175        )
1176        .expect("failed to create test file");
1177
1178        // Add all three documents to the analyzer
1179        let analyzer = Analyzer::default();
1180        analyzer
1181            .add_directory(dir.path())
1182            .await
1183            .expect("should add documents");
1184
1185        // Analyze the documents
1186        let results = analyzer.analyze(()).await.unwrap();
1187        assert_eq!(results.len(), 3);
1188        assert!(results[0].document.diagnostics().next().is_none());
1189        assert!(results[1].document.diagnostics().next().is_none());
1190        assert!(results[2].document.diagnostics().next().is_none());
1191
1192        // Analyze the documents again
1193        let results = analyzer.analyze(()).await.unwrap();
1194        assert_eq!(results.len(), 3);
1195
1196        // Remove the documents by directory
1197        analyzer
1198            .remove_documents(vec![
1199                path_to_uri(dir.path()).expect("should convert to URI"),
1200            ])
1201            .await
1202            .unwrap();
1203        let results = analyzer.analyze(()).await.unwrap();
1204        assert!(results.is_empty());
1205    }
1206}