wdl_analysis/
analyzer.rs

1//! Implementation of the analyzer.
2
3use std::ffi::OsStr;
4use std::fmt;
5use std::future::Future;
6use std::mem::ManuallyDrop;
7use std::ops::Range;
8use std::path::Path;
9use std::path::absolute;
10use std::sync::Arc;
11use std::thread::JoinHandle;
12
13use anyhow::Context;
14use anyhow::Error;
15use anyhow::Result;
16use anyhow::anyhow;
17use anyhow::bail;
18use ignore::WalkBuilder;
19use indexmap::IndexSet;
20use line_index::LineCol;
21use line_index::LineIndex;
22use line_index::WideEncoding;
23use line_index::WideLineCol;
24use lsp_types::CompletionResponse;
25use lsp_types::DocumentSymbolResponse;
26use lsp_types::GotoDefinitionResponse;
27use lsp_types::Hover;
28use lsp_types::Location;
29use lsp_types::SemanticTokensResult;
30use lsp_types::SignatureHelp;
31use lsp_types::SymbolInformation;
32use lsp_types::WorkspaceEdit;
33use path_clean::PathClean;
34use tokio::runtime::Handle;
35use tokio::sync::mpsc;
36use tokio::sync::oneshot;
37use url::Url;
38
39use crate::config::Config;
40use crate::document::Document;
41use crate::graph::DocumentGraphNode;
42use crate::graph::ParseState;
43use crate::queue::AddRequest;
44use crate::queue::AnalysisQueue;
45use crate::queue::AnalyzeRequest;
46use crate::queue::CompletionRequest;
47use crate::queue::DocumentSymbolRequest;
48use crate::queue::FindAllReferencesRequest;
49use crate::queue::FormatRequest;
50use crate::queue::GotoDefinitionRequest;
51use crate::queue::HoverRequest;
52use crate::queue::NotifyChangeRequest;
53use crate::queue::NotifyIncrementalChangeRequest;
54use crate::queue::RemoveRequest;
55use crate::queue::RenameRequest;
56use crate::queue::Request;
57use crate::queue::SemanticTokenRequest;
58use crate::queue::SignatureHelpRequest;
59use crate::queue::WorkspaceSymbolRequest;
60use crate::rayon::RayonHandle;
61
62/// Represents the kind of analysis progress being reported.
63#[derive(Debug, Clone, Copy, PartialEq, Eq)]
64pub enum ProgressKind {
65    /// The progress is for parsing documents.
66    Parsing,
67    /// The progress is for analyzing documents.
68    Analyzing,
69}
70
71impl fmt::Display for ProgressKind {
72    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
73        match self {
74            Self::Parsing => write!(f, "parsing"),
75            Self::Analyzing => write!(f, "analyzing"),
76        }
77    }
78}
79
80/// Converts a local file path to a file schemed URI.
81pub fn path_to_uri(path: impl AsRef<Path>) -> Option<Url> {
82    Url::from_file_path(absolute(path).ok()?.clean()).ok()
83}
84
85/// Represents the result of an analysis.
86///
87/// Analysis results are cheap to clone.
88#[derive(Debug, Clone)]
89pub struct AnalysisResult {
90    /// The error that occurred when attempting to parse the file (e.g. the file
91    /// could not be opened).
92    error: Option<Arc<Error>>,
93    /// The monotonic version of the document that was parsed.
94    ///
95    /// This value comes from incremental changes to the file.
96    ///
97    /// If `None`, the parsed version had no incremental changes.
98    version: Option<i32>,
99    /// The lines indexed for the parsed file.
100    lines: Option<Arc<LineIndex>>,
101    /// The analyzed document.
102    document: Document,
103}
104
105impl AnalysisResult {
106    /// Constructs a new analysis result for the given graph node.
107    pub(crate) fn new(node: &DocumentGraphNode) -> Self {
108        if let Some(error) = node.analysis_error() {
109            return Self {
110                error: Some(error.clone()),
111                version: node.parse_state().version(),
112                lines: node.parse_state().lines().cloned(),
113                document: Document::default_from_uri(node.uri().clone()),
114            };
115        }
116
117        let (error, version, lines) = match node.parse_state() {
118            ParseState::NotParsed => unreachable!("document should have been parsed"),
119            ParseState::Error(e) => (Some(e), None, None),
120            ParseState::Parsed { version, lines, .. } => (None, *version, Some(lines)),
121        };
122
123        Self {
124            error: error.cloned(),
125            version,
126            lines: lines.cloned(),
127            document: node
128                .document()
129                .expect("analysis should have completed")
130                .clone(),
131        }
132    }
133
134    /// Gets the error that occurred when attempting to parse the document.
135    ///
136    /// An example error would be if the file could not be opened.
137    ///
138    /// Returns `None` if the document was parsed successfully.
139    pub fn error(&self) -> Option<&Arc<Error>> {
140        self.error.as_ref()
141    }
142
143    /// Gets the incremental version of the parsed document.
144    ///
145    /// Returns `None` if there was an error parsing the document or if the
146    /// parsed document had no incremental changes.
147    pub fn version(&self) -> Option<i32> {
148        self.version
149    }
150
151    /// Gets the line index of the parsed document.
152    ///
153    /// Returns `None` if there was an error parsing the document.
154    pub fn lines(&self) -> Option<&Arc<LineIndex>> {
155        self.lines.as_ref()
156    }
157
158    /// Gets the analyzed document.
159    pub fn document(&self) -> &Document {
160        &self.document
161    }
162}
163
164/// Represents a position in a document's source.
165#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Copy, Clone, Default)]
166pub struct SourcePosition {
167    /// Line position in a document (zero-based).
168    // NOTE: this field must come before `character` to maintain a correct sort order.
169    pub line: u32,
170    /// Character offset on a line in a document (zero-based). The meaning of
171    /// this offset is determined by the position encoding.
172    pub character: u32,
173}
174
175impl SourcePosition {
176    /// Constructs a new source position from a line and character offset.
177    pub fn new(line: u32, character: u32) -> Self {
178        Self { line, character }
179    }
180}
181
182/// Represents the encoding of a source position.
183#[derive(Debug, Eq, PartialEq, Copy, Clone)]
184pub enum SourcePositionEncoding {
185    /// The position is UTF8 encoded.
186    ///
187    /// A position's character is the UTF-8 offset from the start of the line.
188    UTF8,
189    /// The position is UTF16 encoded.
190    ///
191    /// A position's character is the UTF-16 offset from the start of the line.
192    UTF16,
193}
194
195/// Represents an edit to a document's source.
196#[derive(Debug, Clone)]
197pub struct SourceEdit {
198    /// The range of the edit.
199    ///
200    /// Note that invalid ranges will cause the edit to be ignored.
201    range: Range<SourcePosition>,
202    /// The encoding of the edit positions.
203    encoding: SourcePositionEncoding,
204    /// The replacement text.
205    text: String,
206}
207
208impl SourceEdit {
209    /// Creates a new source edit for the given range and replacement text.
210    pub fn new(
211        range: Range<SourcePosition>,
212        encoding: SourcePositionEncoding,
213        text: impl Into<String>,
214    ) -> Self {
215        Self {
216            range,
217            encoding,
218            text: text.into(),
219        }
220    }
221
222    /// Gets the range of the edit.
223    pub(crate) fn range(&self) -> Range<SourcePosition> {
224        self.range.start..self.range.end
225    }
226
227    /// Applies the edit to the given string if it's in range.
228    pub(crate) fn apply(&self, source: &mut String, lines: &LineIndex) -> Result<()> {
229        let (start, end) = match self.encoding {
230            SourcePositionEncoding::UTF8 => (
231                LineCol {
232                    line: self.range.start.line,
233                    col: self.range.start.character,
234                },
235                LineCol {
236                    line: self.range.end.line,
237                    col: self.range.end.character,
238                },
239            ),
240            SourcePositionEncoding::UTF16 => (
241                lines
242                    .to_utf8(
243                        WideEncoding::Utf16,
244                        WideLineCol {
245                            line: self.range.start.line,
246                            col: self.range.start.character,
247                        },
248                    )
249                    .context("invalid edit start position")?,
250                lines
251                    .to_utf8(
252                        WideEncoding::Utf16,
253                        WideLineCol {
254                            line: self.range.end.line,
255                            col: self.range.end.character,
256                        },
257                    )
258                    .context("invalid edit end position")?,
259            ),
260        };
261
262        let range: Range<usize> = lines
263            .offset(start)
264            .context("invalid edit start position")?
265            .into()
266            ..lines
267                .offset(end)
268                .context("invalid edit end position")?
269                .into();
270
271        if !source.is_char_boundary(range.start) {
272            bail!("edit start position is not at a character boundary");
273        }
274
275        if !source.is_char_boundary(range.end) {
276            bail!("edit end position is not at a character boundary");
277        }
278
279        source.replace_range(range, &self.text);
280        Ok(())
281    }
282}
283
284/// Represents an incremental change to a document.
285#[derive(Clone, Debug)]
286pub struct IncrementalChange {
287    /// The monotonic version of the document.
288    ///
289    /// This is expected to increase for each incremental change.
290    pub version: i32,
291    /// The source to start from for applying edits.
292    ///
293    /// If this is `Some`, a full reparse will occur after applying edits to
294    /// this string.
295    ///
296    /// If this is `None`, edits will be applied to the existing CST and an
297    /// attempt will be made to incrementally parse the file.
298    pub start: Option<String>,
299    /// The source edits to apply.
300    pub edits: Vec<SourceEdit>,
301}
302
303/// Represents a Workflow Description Language (WDL) document analyzer.
304///
305/// By default, analysis parses documents, performs validation checks, resolves
306/// imports, and performs type checking.
307///
308/// Each analysis operation is processed in order of request; however, the
309/// individual parsing, resolution, and analysis of documents is performed
310/// across a thread pool.
311///
312/// Note that dropping the analyzer is a blocking operation as it will wait for
313/// the queue thread to join.
314///
315/// The type parameter is the context type passed to the progress callback.
316#[derive(Debug)]
317pub struct Analyzer<Context> {
318    /// The sender for sending analysis requests to the queue.
319    sender: ManuallyDrop<mpsc::UnboundedSender<Request<Context>>>,
320    /// The join handle for the queue task.
321    handle: Option<JoinHandle<()>>,
322    /// The config to use during analysis.
323    config: Config,
324}
325
326impl<Context> Analyzer<Context>
327where
328    Context: Send + Clone + 'static,
329{
330    /// Constructs a new analyzer with the given config.
331    ///
332    /// The provided progress callback will be invoked during analysis.
333    ///
334    /// The analyzer will use a default validator for validation.
335    ///
336    /// The analyzer must be constructed from the context of a Tokio runtime.
337    pub fn new<Progress, Return>(config: Config, progress: Progress) -> Self
338    where
339        Progress: Fn(Context, ProgressKind, usize, usize) -> Return + Send + 'static,
340        Return: Future<Output = ()>,
341    {
342        Self::new_with_validator(config, progress, crate::Validator::default)
343    }
344
345    /// Constructs a new analyzer with the given config and validator function.
346    ///
347    /// The provided progress callback will be invoked during analysis.
348    ///
349    /// This validator function will be called once per worker thread to
350    /// initialize a thread-local validator.
351    ///
352    /// The analyzer must be constructed from the context of a Tokio runtime.
353    pub fn new_with_validator<Progress, Return, Validator>(
354        config: Config,
355        progress: Progress,
356        validator: Validator,
357    ) -> Self
358    where
359        Progress: Fn(Context, ProgressKind, usize, usize) -> Return + Send + 'static,
360        Return: Future<Output = ()>,
361        Validator: Fn() -> crate::Validator + Send + Sync + 'static,
362    {
363        let (tx, rx) = mpsc::unbounded_channel();
364        let tokio = Handle::current();
365        let inner_config = config.clone();
366        let handle = std::thread::spawn(move || {
367            let queue = AnalysisQueue::new(inner_config, tokio, progress, validator);
368            queue.run(rx);
369        });
370
371        Self {
372            sender: ManuallyDrop::new(tx),
373            handle: Some(handle),
374            config,
375        }
376    }
377
378    /// Adds a document to the analyzer. Document can be a local file or a URL.
379    ///
380    /// Returns an error if the document could not be added.
381    pub async fn add_document(&self, uri: Url) -> Result<()> {
382        let mut documents = IndexSet::new();
383        documents.insert(uri);
384
385        let (tx, rx) = oneshot::channel();
386        self.sender
387            .send(Request::Add(AddRequest {
388                documents,
389                completed: tx,
390            }))
391            .map_err(|_| {
392                anyhow!("failed to send request to analysis queue because the channel has closed")
393            })?;
394
395        rx.await.map_err(|_| {
396            anyhow!("failed to receive response from analysis queue because the channel has closed")
397        })?;
398
399        Ok(())
400    }
401
402    /// Adds a directory to the analyzer. It will recursively search for WDL
403    /// documents in the supplied directory.
404    ///
405    /// Returns an error if there was a problem discovering documents for the
406    /// specified path.
407    pub async fn add_directory(&self, path: impl AsRef<Path>) -> Result<()> {
408        let path = path.as_ref().to_path_buf();
409        let config = self.config.clone();
410        // Start by searching for documents
411        let documents = RayonHandle::spawn(move || -> Result<IndexSet<Url>> {
412            let mut documents = IndexSet::new();
413
414            let metadata = path.metadata().with_context(|| {
415                format!(
416                    "failed to read metadata for `{path}`",
417                    path = path.display()
418                )
419            })?;
420
421            if metadata.is_file() {
422                bail!("`{path}` is a file, not a directory", path = path.display());
423            }
424
425            let mut walker = WalkBuilder::new(&path);
426            if let Some(ignore_filename) = config.ignore_filename() {
427                walker.add_custom_ignore_filename(ignore_filename);
428            }
429            let walker = walker
430                .standard_filters(false)
431                .parents(true)
432                .follow_links(true)
433                .build();
434
435            for result in walker {
436                let entry = result.with_context(|| {
437                    format!("failed to read directory `{path}`", path = path.display())
438                })?;
439
440                // Skip entries without a file type
441                let Some(file_type) = entry.file_type() else {
442                    continue;
443                };
444                // Skip non-files
445                if !file_type.is_file() {
446                    continue;
447                }
448                // Skip files without a `.wdl` extension
449                if entry.path().extension() != Some(OsStr::new("wdl")) {
450                    continue;
451                }
452
453                documents.insert(path_to_uri(entry.path()).with_context(|| {
454                    format!(
455                        "failed to convert path `{path}` to a URI",
456                        path = entry.path().display()
457                    )
458                })?);
459            }
460
461            Ok(documents)
462        })
463        .await?;
464
465        if documents.is_empty() {
466            return Ok(());
467        }
468
469        // Send the add request to the queue
470        let (tx, rx) = oneshot::channel();
471        self.sender
472            .send(Request::Add(AddRequest {
473                documents,
474                completed: tx,
475            }))
476            .map_err(|_| {
477                anyhow!("failed to send request to analysis queue because the channel has closed")
478            })?;
479
480        rx.await.map_err(|_| {
481            anyhow!("failed to receive response from analysis queue because the channel has closed")
482        })?;
483
484        Ok(())
485    }
486
487    /// Removes the specified documents from the analyzer.
488    ///
489    /// If a specified URI is a prefix (i.e. directory) of documents known to
490    /// the analyzer, those documents will be removed.
491    ///
492    /// Documents are only removed when not referenced from importing documents.
493    pub async fn remove_documents(&self, documents: Vec<Url>) -> Result<()> {
494        // Send the remove request to the queue
495        let (tx, rx) = oneshot::channel();
496        self.sender
497            .send(Request::Remove(RemoveRequest {
498                documents,
499                completed: tx,
500            }))
501            .map_err(|_| {
502                anyhow!("failed to send request to analysis queue because the channel has closed")
503            })?;
504
505        rx.await.map_err(|_| {
506            anyhow!("failed to receive response from analysis queue because the channel has closed")
507        })?;
508
509        Ok(())
510    }
511
512    /// Notifies the analyzer that a document has an incremental change.
513    ///
514    /// Changes to documents that aren't known to the analyzer are ignored.
515    pub fn notify_incremental_change(
516        &self,
517        document: Url,
518        change: IncrementalChange,
519    ) -> Result<()> {
520        self.sender
521            .send(Request::NotifyIncrementalChange(
522                NotifyIncrementalChangeRequest { document, change },
523            ))
524            .map_err(|_| {
525                anyhow!("failed to send request to analysis queue because the channel has closed")
526            })
527    }
528
529    /// Notifies the analyzer that a document has fully changed and should be
530    /// fetched again.
531    ///
532    /// Changes to documents that aren't known to the analyzer are ignored.
533    ///
534    /// If `discard_pending` is true, then any pending incremental changes are
535    /// discarded; otherwise, the full change is ignored if there are pending
536    /// incremental changes.
537    pub fn notify_change(&self, document: Url, discard_pending: bool) -> Result<()> {
538        self.sender
539            .send(Request::NotifyChange(NotifyChangeRequest {
540                document,
541                discard_pending,
542            }))
543            .map_err(|_| {
544                anyhow!("failed to send request to analysis queue because the channel has closed")
545            })
546    }
547
548    /// Analyzes a specific document.
549    ///
550    /// The provided context is passed to the progress callback.
551    ///
552    /// If the document is up-to-date and was previously analyzed, the current
553    /// analysis result is returned.
554    ///
555    /// Returns an analysis result for each document that was analyzed.
556    pub async fn analyze_document(
557        &self,
558        context: Context,
559        document: Url,
560    ) -> Result<Vec<AnalysisResult>> {
561        // Send the analyze request to the queue
562        let (tx, rx) = oneshot::channel();
563        self.sender
564            .send(Request::Analyze(AnalyzeRequest {
565                document: Some(document),
566                context,
567                completed: tx,
568            }))
569            .map_err(|_| {
570                anyhow!("failed to send request to analysis queue because the channel has closed")
571            })?;
572
573        rx.await.map_err(|_| {
574            anyhow!("failed to receive response from analysis queue because the channel has closed")
575        })?
576    }
577
578    /// Performs analysis of all documents.
579    ///
580    /// The provided context is passed to the progress callback.
581    ///
582    /// If a document is up-to-date and was previously analyzed, the current
583    /// analysis result is returned.
584    ///
585    /// Returns an analysis result for each document that was analyzed.
586    pub async fn analyze(&self, context: Context) -> Result<Vec<AnalysisResult>> {
587        // Send the analyze request to the queue
588        let (tx, rx) = oneshot::channel();
589        self.sender
590            .send(Request::Analyze(AnalyzeRequest {
591                document: None, // analyze all documents
592                context,
593                completed: tx,
594            }))
595            .map_err(|_| {
596                anyhow!("failed to send request to analysis queue because the channel has closed")
597            })?;
598
599        rx.await.map_err(|_| {
600            anyhow!("failed to receive response from analysis queue because the channel has closed")
601        })?
602    }
603
604    /// Formats a document.
605    pub async fn format_document(&self, document: Url) -> Result<Option<(u32, u32, String)>> {
606        let (tx, rx) = oneshot::channel();
607        self.sender
608            .send(Request::Format(FormatRequest {
609                document,
610                completed: tx,
611            }))
612            .map_err(|_| {
613                anyhow!("failed to send format request to the queue because the channel has closed")
614            })?;
615
616        rx.await.map_err(|_| {
617            anyhow!("failed to send format request to the queue because the channel has closed")
618        })
619    }
620
621    /// Performs a "goto definition" for a symbol at the current position.
622    pub async fn goto_definition(
623        &self,
624        document: Url,
625        position: SourcePosition,
626        encoding: SourcePositionEncoding,
627    ) -> Result<Option<GotoDefinitionResponse>> {
628        let (tx, rx) = oneshot::channel();
629        self.sender
630            .send(Request::GotoDefinition(GotoDefinitionRequest {
631                document,
632                position,
633                encoding,
634                completed: tx,
635            }))
636            .map_err(|_| {
637                anyhow!(
638                    "failed to send goto definition request to analysis queue because the channel \
639                     has closed"
640                )
641            })?;
642
643        rx.await.map_err(|_| {
644            anyhow!(
645                "failed to receive goto definition response from analysis queue because the \
646                 channel has closed"
647            )
648        })
649    }
650
651    /// Performs a `find references` for a symbol across all the documents.
652    pub async fn find_all_references(
653        &self,
654        document: Url,
655        position: SourcePosition,
656        encoding: SourcePositionEncoding,
657        include_declaration: bool,
658    ) -> Result<Vec<Location>> {
659        let (tx, rx) = oneshot::channel();
660        self.sender
661            .send(Request::FindAllReferences(FindAllReferencesRequest {
662                document,
663                position,
664                encoding,
665                include_declaration,
666                completed: tx,
667            }))
668            .map_err(|_| {
669                anyhow!(
670                    "failed to send find all references request to analysis queue because the \
671                     channel has closed"
672                )
673            })?;
674
675        rx.await.map_err(|_| {
676            anyhow!(
677                "failed to receive find all references response from analysis queue because the \
678                 client channel has closed"
679            )
680        })
681    }
682
683    /// Performs a `auto-completion` for a symbol.
684    pub async fn completion(
685        &self,
686        context: Context,
687        document: Url,
688        position: SourcePosition,
689        encoding: SourcePositionEncoding,
690    ) -> Result<Option<CompletionResponse>> {
691        let (tx, rx) = oneshot::channel();
692        self.sender
693            .send(Request::Completion(CompletionRequest {
694                document,
695                position,
696                encoding,
697                context,
698                completed: tx,
699            }))
700            .map_err(|_| {
701                anyhow!(
702                    "failed to send completion request to analysis queue because the channel has \
703                     closed"
704                )
705            })?;
706
707        rx.await.map_err(|_| {
708            anyhow!(
709                "failed to send completion request to analysis queue because the channel has \
710                 closed"
711            )
712        })
713    }
714
715    /// Performs a `hover` for a symbol at a given position in a document.
716    pub async fn hover(
717        &self,
718        document: Url,
719        position: SourcePosition,
720        encoding: SourcePositionEncoding,
721    ) -> Result<Option<Hover>> {
722        let (tx, rx) = oneshot::channel();
723        self.sender
724            .send(Request::Hover(HoverRequest {
725                document,
726                position,
727                encoding,
728                completed: tx,
729            }))
730            .map_err(|_| {
731                anyhow!(
732                    "failed to send hover request to analysis queue because the channel has closed"
733                )
734            })?;
735
736        rx.await.map_err(|_| {
737            anyhow!("failed to send hover request to analysis queue because the channel has closed")
738        })
739    }
740
741    /// Renames a symbol at a given position across the workspace.
742    pub async fn rename(
743        &self,
744        document: Url,
745        position: SourcePosition,
746        encoding: SourcePositionEncoding,
747        new_name: String,
748    ) -> Result<Option<WorkspaceEdit>> {
749        let (tx, rx) = oneshot::channel();
750        self.sender
751            .send(Request::Rename(RenameRequest {
752                document,
753                position,
754                encoding,
755                new_name,
756                completed: tx,
757            }))
758            .map_err(|_| {
759                anyhow!(
760                    "failed to send rename request to analysis queue because the channel has \
761                     closed"
762                )
763            })?;
764
765        rx.await.map_err(|_| {
766            anyhow!(
767                "failed to receive rename response from analysis queue because the channel has \
768                 closed"
769            )
770        })
771    }
772
773    /// Gets semantic tokens for a document
774    pub async fn semantic_tokens(&self, document: Url) -> Result<Option<SemanticTokensResult>> {
775        let (tx, rx) = oneshot::channel();
776        self.sender
777            .send(Request::SemanticTokens(SemanticTokenRequest {
778                document,
779                completed: tx,
780            }))
781            .map_err(|_| {
782                anyhow!(
783                    "failed to send semantic tokens request to analysis queue because the channel \
784                     has closed"
785                )
786            })?;
787
788        rx.await.map_err(|_| {
789            anyhow!(
790                "failed to receive semantic tokens response from analysis queue because the \
791                 channel has closed"
792            )
793        })
794    }
795
796    /// Gets document symbols for a document.
797    pub async fn document_symbol(&self, document: Url) -> Result<Option<DocumentSymbolResponse>> {
798        let (tx, rx) = oneshot::channel();
799        self.sender
800            .send(Request::DocumentSymbol(DocumentSymbolRequest {
801                document,
802                completed: tx,
803            }))
804            .map_err(|_| {
805                anyhow!(
806                    "failed to send document symbol request to analysis queue because the channel \
807                     has closed"
808                )
809            })?;
810
811        rx.await.map_err(|_| {
812            anyhow!(
813                "failed to receive document symbol request to analysis queue because the channel \
814                 has closed"
815            )
816        })
817    }
818
819    /// Gets document symbols for the workspace.
820    pub async fn workspace_symbol(&self, query: String) -> Result<Option<Vec<SymbolInformation>>> {
821        let (tx, rx) = oneshot::channel();
822        self.sender
823            .send(Request::WorkspaceSymbol(WorkspaceSymbolRequest {
824                query,
825                completed: tx,
826            }))
827            .map_err(|_| {
828                anyhow!(
829                    "failed to send workspace symbol request to analysis queue because the \
830                     channel has closed"
831                )
832            })?;
833
834        rx.await.map_err(|_| {
835            anyhow!(
836                "failed to receive workspace symbol response from analysis queue because the \
837                 channel has closed"
838            )
839        })
840    }
841
842    /// Gets signature help for a function call at a given position.
843    pub async fn signature_help(
844        &self,
845        document: Url,
846        position: SourcePosition,
847        encoding: SourcePositionEncoding,
848    ) -> Result<Option<SignatureHelp>> {
849        let (tx, rx) = oneshot::channel();
850        self.sender
851            .send(Request::SignatureHelp(SignatureHelpRequest {
852                document,
853                position,
854                encoding,
855                completed: tx,
856            }))
857            .map_err(|_| {
858                anyhow!(
859                    "failed to send signature help request to analysis queue because the channel \
860                     has closed"
861                )
862            })?;
863
864        rx.await.map_err(|_| {
865            anyhow!(
866                "failed to receive signature help response from analysis queue because the \
867                 channel has closed"
868            )
869        })
870    }
871}
872
873impl Default for Analyzer<()> {
874    fn default() -> Self {
875        Self::new(Default::default(), |_, _, _, _| async {})
876    }
877}
878
879impl<C> Drop for Analyzer<C> {
880    fn drop(&mut self) {
881        unsafe { ManuallyDrop::drop(&mut self.sender) };
882        if let Some(handle) = self.handle.take() {
883            handle.join().unwrap();
884        }
885    }
886}
887
888/// Constant that asserts `Analyzer` is `Send + Sync`; if not, it fails to
889/// compile.
890const _: () = {
891    /// Helper that will fail to compile if T is not `Send + Sync`.
892    const fn _assert<T: Send + Sync>() {}
893    _assert::<Analyzer<()>>();
894};
895
896#[cfg(test)]
897mod test {
898    use std::fs;
899
900    use tempfile::TempDir;
901    use wdl_ast::Severity;
902
903    use super::*;
904
905    #[tokio::test]
906    async fn it_returns_empty_results() {
907        let analyzer = Analyzer::default();
908        let results = analyzer.analyze(()).await.unwrap();
909        assert!(results.is_empty());
910    }
911
912    #[tokio::test]
913    async fn it_analyzes_a_document() {
914        let dir = TempDir::new().expect("failed to create temporary directory");
915        let path = dir.path().join("foo.wdl");
916        fs::write(
917            &path,
918            r#"version 1.1
919
920task test {
921    command <<<>>>
922}
923
924workflow test {
925}
926"#,
927        )
928        .expect("failed to create test file");
929
930        // Analyze the file and check the resulting diagnostic
931        let analyzer = Analyzer::default();
932        analyzer
933            .add_document(path_to_uri(&path).expect("should convert to URI"))
934            .await
935            .expect("should add document");
936
937        let results = analyzer.analyze(()).await.unwrap();
938        assert_eq!(results.len(), 1);
939        assert_eq!(results[0].document.diagnostics().count(), 1);
940        assert_eq!(
941            results[0].document.diagnostics().next().unwrap().rule(),
942            None
943        );
944        assert_eq!(
945            results[0].document.diagnostics().next().unwrap().severity(),
946            Severity::Error
947        );
948        assert_eq!(
949            results[0].document.diagnostics().next().unwrap().message(),
950            "conflicting workflow name `test`"
951        );
952
953        // Analyze again and ensure the analysis result id is unchanged
954        let id = results[0].document.id().clone();
955        let results = analyzer.analyze(()).await.unwrap();
956        assert_eq!(results.len(), 1);
957        assert_eq!(results[0].document.id().as_ref(), id.as_ref());
958        assert_eq!(results[0].document.diagnostics().count(), 1);
959        assert_eq!(
960            results[0].document.diagnostics().next().unwrap().rule(),
961            None
962        );
963        assert_eq!(
964            results[0].document.diagnostics().next().unwrap().severity(),
965            Severity::Error
966        );
967        assert_eq!(
968            results[0].document.diagnostics().next().unwrap().message(),
969            "conflicting workflow name `test`"
970        );
971    }
972
973    #[tokio::test]
974    async fn it_reanalyzes_a_document_on_change() {
975        let dir = TempDir::new().expect("failed to create temporary directory");
976        let path = dir.path().join("foo.wdl");
977        fs::write(
978            &path,
979            r#"version 1.1
980
981task test {
982    command <<<>>>
983}
984
985workflow test {
986}
987"#,
988        )
989        .expect("failed to create test file");
990
991        // Analyze the file and check the resulting diagnostic
992        let analyzer = Analyzer::default();
993        analyzer
994            .add_document(path_to_uri(&path).expect("should convert to URI"))
995            .await
996            .expect("should add document");
997
998        let results = analyzer.analyze(()).await.unwrap();
999        assert_eq!(results.len(), 1);
1000        assert_eq!(results[0].document.diagnostics().count(), 1);
1001        assert_eq!(
1002            results[0].document.diagnostics().next().unwrap().rule(),
1003            None
1004        );
1005        assert_eq!(
1006            results[0].document.diagnostics().next().unwrap().severity(),
1007            Severity::Error
1008        );
1009        assert_eq!(
1010            results[0].document.diagnostics().next().unwrap().message(),
1011            "conflicting workflow name `test`"
1012        );
1013
1014        // Rewrite the file to correct the issue
1015        fs::write(
1016            &path,
1017            r#"version 1.1
1018
1019task test {
1020    command <<<>>>
1021}
1022
1023workflow something_else {
1024}
1025"#,
1026        )
1027        .expect("failed to create test file");
1028
1029        let uri = path_to_uri(&path).expect("should convert to URI");
1030        analyzer.notify_change(uri.clone(), false).unwrap();
1031
1032        // Analyze again and ensure the analysis result id is changed and the issue
1033        // fixed
1034        let id = results[0].document.id().clone();
1035        let results = analyzer.analyze(()).await.unwrap();
1036        assert_eq!(results.len(), 1);
1037        assert!(results[0].document.id().as_ref() != id.as_ref());
1038        assert_eq!(results[0].document.diagnostics().count(), 0);
1039
1040        // Analyze again and ensure the analysis result id is unchanged
1041        let id = results[0].document.id().clone();
1042        let results = analyzer.analyze_document((), uri).await.unwrap();
1043        assert_eq!(results.len(), 1);
1044        assert!(results[0].document.id().as_ref() == id.as_ref());
1045        assert_eq!(results[0].document.diagnostics().count(), 0);
1046    }
1047
1048    #[tokio::test]
1049    async fn it_reanalyzes_a_document_on_incremental_change() {
1050        let dir = TempDir::new().expect("failed to create temporary directory");
1051        let path = dir.path().join("foo.wdl");
1052        fs::write(
1053            &path,
1054            r#"version 1.1
1055
1056task test {
1057    command <<<>>>
1058}
1059
1060workflow test {
1061}
1062"#,
1063        )
1064        .expect("failed to create test file");
1065
1066        // Analyze the file and check the resulting diagnostic
1067        let analyzer = Analyzer::default();
1068        analyzer
1069            .add_document(path_to_uri(&path).expect("should convert to URI"))
1070            .await
1071            .expect("should add document");
1072
1073        let results = analyzer.analyze(()).await.unwrap();
1074        assert_eq!(results.len(), 1);
1075        assert_eq!(results[0].document.diagnostics().count(), 1);
1076        assert_eq!(
1077            results[0].document.diagnostics().next().unwrap().rule(),
1078            None
1079        );
1080        assert_eq!(
1081            results[0].document.diagnostics().next().unwrap().severity(),
1082            Severity::Error
1083        );
1084        assert_eq!(
1085            results[0].document.diagnostics().next().unwrap().message(),
1086            "conflicting workflow name `test`"
1087        );
1088
1089        // Edit the file to correct the issue
1090        let uri = path_to_uri(&path).expect("should convert to URI");
1091        analyzer
1092            .notify_incremental_change(
1093                uri.clone(),
1094                IncrementalChange {
1095                    version: 2,
1096                    start: None,
1097                    edits: vec![SourceEdit {
1098                        range: SourcePosition::new(6, 9)..SourcePosition::new(6, 13),
1099                        encoding: SourcePositionEncoding::UTF8,
1100                        text: "something_else".to_string(),
1101                    }],
1102                },
1103            )
1104            .unwrap();
1105
1106        // Analyze again and ensure the analysis result id is changed and the issue was
1107        // fixed
1108        let id = results[0].document.id().clone();
1109        let results = analyzer.analyze_document((), uri).await.unwrap();
1110        assert_eq!(results.len(), 1);
1111        assert!(results[0].document.id().as_ref() != id.as_ref());
1112        assert_eq!(results[0].document.diagnostics().count(), 0);
1113    }
1114
1115    #[tokio::test]
1116    async fn it_removes_documents() {
1117        let dir = TempDir::new().expect("failed to create temporary directory");
1118        let foo = dir.path().join("foo.wdl");
1119        fs::write(
1120            &foo,
1121            r#"version 1.1
1122workflow test {
1123}
1124"#,
1125        )
1126        .expect("failed to create test file");
1127
1128        let bar = dir.path().join("bar.wdl");
1129        fs::write(
1130            &bar,
1131            r#"version 1.1
1132workflow test {
1133}
1134"#,
1135        )
1136        .expect("failed to create test file");
1137
1138        let baz = dir.path().join("baz.wdl");
1139        fs::write(
1140            &baz,
1141            r#"version 1.1
1142workflow test {
1143}
1144"#,
1145        )
1146        .expect("failed to create test file");
1147
1148        // Add all three documents to the analyzer
1149        let analyzer = Analyzer::default();
1150        analyzer
1151            .add_directory(dir.path())
1152            .await
1153            .expect("should add documents");
1154
1155        // Analyze the documents
1156        let results = analyzer.analyze(()).await.unwrap();
1157        assert_eq!(results.len(), 3);
1158        assert!(results[0].document.diagnostics().next().is_none());
1159        assert!(results[1].document.diagnostics().next().is_none());
1160        assert!(results[2].document.diagnostics().next().is_none());
1161
1162        // Analyze the documents again
1163        let results = analyzer.analyze(()).await.unwrap();
1164        assert_eq!(results.len(), 3);
1165
1166        // Remove the documents by directory
1167        analyzer
1168            .remove_documents(vec![
1169                path_to_uri(dir.path()).expect("should convert to URI"),
1170            ])
1171            .await
1172            .unwrap();
1173        let results = analyzer.analyze(()).await.unwrap();
1174        assert!(results.is_empty());
1175    }
1176}