1use std::ffi::OsStr;
4use std::fmt;
5use std::future::Future;
6use std::mem::ManuallyDrop;
7use std::ops::Range;
8use std::path::Path;
9use std::path::absolute;
10use std::sync::Arc;
11use std::thread::JoinHandle;
12
13use anyhow::Context;
14use anyhow::Error;
15use anyhow::Result;
16use anyhow::anyhow;
17use anyhow::bail;
18use ignore::WalkBuilder;
19use indexmap::IndexSet;
20use line_index::LineCol;
21use line_index::LineIndex;
22use line_index::WideEncoding;
23use line_index::WideLineCol;
24use lsp_types::CompletionResponse;
25use lsp_types::DocumentSymbolResponse;
26use lsp_types::GotoDefinitionResponse;
27use lsp_types::Hover;
28use lsp_types::Location;
29use lsp_types::SemanticTokensResult;
30use lsp_types::SignatureHelp;
31use lsp_types::SymbolInformation;
32use lsp_types::WorkspaceEdit;
33use path_clean::PathClean;
34use tokio::runtime::Handle;
35use tokio::sync::mpsc;
36use tokio::sync::oneshot;
37use url::Url;
38
39use crate::config::Config;
40use crate::document::Document;
41use crate::graph::DocumentGraphNode;
42use crate::graph::ParseState;
43use crate::queue::AddRequest;
44use crate::queue::AnalysisQueue;
45use crate::queue::AnalyzeRequest;
46use crate::queue::CompletionRequest;
47use crate::queue::DocumentSymbolRequest;
48use crate::queue::FindAllReferencesRequest;
49use crate::queue::FormatRequest;
50use crate::queue::GotoDefinitionRequest;
51use crate::queue::HoverRequest;
52use crate::queue::NotifyChangeRequest;
53use crate::queue::NotifyIncrementalChangeRequest;
54use crate::queue::RemoveRequest;
55use crate::queue::RenameRequest;
56use crate::queue::Request;
57use crate::queue::SemanticTokenRequest;
58use crate::queue::SignatureHelpRequest;
59use crate::queue::WorkspaceSymbolRequest;
60use crate::rayon::RayonHandle;
61
62#[derive(Debug, Clone, Copy, PartialEq, Eq)]
64pub enum ProgressKind {
65 Parsing,
67 Analyzing,
69}
70
71impl fmt::Display for ProgressKind {
72 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
73 match self {
74 Self::Parsing => write!(f, "parsing"),
75 Self::Analyzing => write!(f, "analyzing"),
76 }
77 }
78}
79
80pub fn path_to_uri(path: impl AsRef<Path>) -> Option<Url> {
82 Url::from_file_path(absolute(path).ok()?.clean()).ok()
83}
84
85#[derive(Debug, Clone)]
89pub struct AnalysisResult {
90 error: Option<Arc<Error>>,
93 version: Option<i32>,
99 lines: Option<Arc<LineIndex>>,
101 document: Document,
103}
104
105impl AnalysisResult {
106 pub(crate) fn new(node: &DocumentGraphNode) -> Self {
108 if let Some(error) = node.analysis_error() {
109 return Self {
110 error: Some(error.clone()),
111 version: node.parse_state().version(),
112 lines: node.parse_state().lines().cloned(),
113 document: Document::default_from_uri(node.uri().clone()),
114 };
115 }
116
117 let (error, version, lines) = match node.parse_state() {
118 ParseState::NotParsed => unreachable!("document should have been parsed"),
119 ParseState::Error(e) => (Some(e), None, None),
120 ParseState::Parsed { version, lines, .. } => (None, *version, Some(lines)),
121 };
122
123 Self {
124 error: error.cloned(),
125 version,
126 lines: lines.cloned(),
127 document: node
128 .document()
129 .expect("analysis should have completed")
130 .clone(),
131 }
132 }
133
134 pub fn error(&self) -> Option<&Arc<Error>> {
140 self.error.as_ref()
141 }
142
143 pub fn version(&self) -> Option<i32> {
148 self.version
149 }
150
151 pub fn lines(&self) -> Option<&Arc<LineIndex>> {
155 self.lines.as_ref()
156 }
157
158 pub fn document(&self) -> &Document {
160 &self.document
161 }
162}
163
164#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Copy, Clone, Default)]
166pub struct SourcePosition {
167 pub line: u32,
170 pub character: u32,
173}
174
175impl SourcePosition {
176 pub fn new(line: u32, character: u32) -> Self {
178 Self { line, character }
179 }
180}
181
182#[derive(Debug, Eq, PartialEq, Copy, Clone)]
184pub enum SourcePositionEncoding {
185 UTF8,
189 UTF16,
193}
194
195#[derive(Debug, Clone)]
197pub struct SourceEdit {
198 range: Range<SourcePosition>,
202 encoding: SourcePositionEncoding,
204 text: String,
206}
207
208impl SourceEdit {
209 pub fn new(
211 range: Range<SourcePosition>,
212 encoding: SourcePositionEncoding,
213 text: impl Into<String>,
214 ) -> Self {
215 Self {
216 range,
217 encoding,
218 text: text.into(),
219 }
220 }
221
222 pub(crate) fn range(&self) -> Range<SourcePosition> {
224 self.range.start..self.range.end
225 }
226
227 pub(crate) fn apply(&self, source: &mut String, lines: &LineIndex) -> Result<()> {
229 let (start, end) = match self.encoding {
230 SourcePositionEncoding::UTF8 => (
231 LineCol {
232 line: self.range.start.line,
233 col: self.range.start.character,
234 },
235 LineCol {
236 line: self.range.end.line,
237 col: self.range.end.character,
238 },
239 ),
240 SourcePositionEncoding::UTF16 => (
241 lines
242 .to_utf8(
243 WideEncoding::Utf16,
244 WideLineCol {
245 line: self.range.start.line,
246 col: self.range.start.character,
247 },
248 )
249 .context("invalid edit start position")?,
250 lines
251 .to_utf8(
252 WideEncoding::Utf16,
253 WideLineCol {
254 line: self.range.end.line,
255 col: self.range.end.character,
256 },
257 )
258 .context("invalid edit end position")?,
259 ),
260 };
261
262 let range: Range<usize> = lines
263 .offset(start)
264 .context("invalid edit start position")?
265 .into()
266 ..lines
267 .offset(end)
268 .context("invalid edit end position")?
269 .into();
270
271 if !source.is_char_boundary(range.start) {
272 bail!("edit start position is not at a character boundary");
273 }
274
275 if !source.is_char_boundary(range.end) {
276 bail!("edit end position is not at a character boundary");
277 }
278
279 source.replace_range(range, &self.text);
280 Ok(())
281 }
282}
283
284#[derive(Clone, Debug)]
286pub struct IncrementalChange {
287 pub version: i32,
291 pub start: Option<String>,
299 pub edits: Vec<SourceEdit>,
301}
302
303#[derive(Debug)]
317pub struct Analyzer<Context> {
318 sender: ManuallyDrop<mpsc::UnboundedSender<Request<Context>>>,
320 handle: Option<JoinHandle<()>>,
322 config: Config,
324}
325
326impl<Context> Analyzer<Context>
327where
328 Context: Send + Clone + 'static,
329{
330 pub fn new<Progress, Return>(config: Config, progress: Progress) -> Self
338 where
339 Progress: Fn(Context, ProgressKind, usize, usize) -> Return + Send + 'static,
340 Return: Future<Output = ()>,
341 {
342 Self::new_with_validator(config, progress, crate::Validator::default)
343 }
344
345 pub fn new_with_validator<Progress, Return, Validator>(
354 config: Config,
355 progress: Progress,
356 validator: Validator,
357 ) -> Self
358 where
359 Progress: Fn(Context, ProgressKind, usize, usize) -> Return + Send + 'static,
360 Return: Future<Output = ()>,
361 Validator: Fn() -> crate::Validator + Send + Sync + 'static,
362 {
363 let (tx, rx) = mpsc::unbounded_channel();
364 let tokio = Handle::current();
365 let inner_config = config.clone();
366 let handle = std::thread::spawn(move || {
367 let queue = AnalysisQueue::new(inner_config, tokio, progress, validator);
368 queue.run(rx);
369 });
370
371 Self {
372 sender: ManuallyDrop::new(tx),
373 handle: Some(handle),
374 config,
375 }
376 }
377
378 pub async fn add_document(&self, uri: Url) -> Result<()> {
382 let mut documents = IndexSet::new();
383 documents.insert(uri);
384
385 let (tx, rx) = oneshot::channel();
386 self.sender
387 .send(Request::Add(AddRequest {
388 documents,
389 completed: tx,
390 }))
391 .map_err(|_| {
392 anyhow!("failed to send request to analysis queue because the channel has closed")
393 })?;
394
395 rx.await.map_err(|_| {
396 anyhow!("failed to receive response from analysis queue because the channel has closed")
397 })?;
398
399 Ok(())
400 }
401
402 pub async fn add_directory(&self, path: impl AsRef<Path>) -> Result<()> {
408 let path = path.as_ref().to_path_buf();
409 let config = self.config.clone();
410 let documents = RayonHandle::spawn(move || -> Result<IndexSet<Url>> {
412 let mut documents = IndexSet::new();
413
414 let metadata = path.metadata().with_context(|| {
415 format!(
416 "failed to read metadata for `{path}`",
417 path = path.display()
418 )
419 })?;
420
421 if metadata.is_file() {
422 bail!("`{path}` is a file, not a directory", path = path.display());
423 }
424
425 let mut walker = WalkBuilder::new(&path);
426 if let Some(ignore_filename) = config.ignore_filename() {
427 walker.add_custom_ignore_filename(ignore_filename);
428 }
429 let walker = walker
430 .standard_filters(false)
431 .parents(true)
432 .follow_links(true)
433 .build();
434
435 for result in walker {
436 let entry = result.with_context(|| {
437 format!("failed to read directory `{path}`", path = path.display())
438 })?;
439
440 let Some(file_type) = entry.file_type() else {
442 continue;
443 };
444 if !file_type.is_file() {
446 continue;
447 }
448 if entry.path().extension() != Some(OsStr::new("wdl")) {
450 continue;
451 }
452
453 documents.insert(path_to_uri(entry.path()).with_context(|| {
454 format!(
455 "failed to convert path `{path}` to a URI",
456 path = entry.path().display()
457 )
458 })?);
459 }
460
461 Ok(documents)
462 })
463 .await?;
464
465 if documents.is_empty() {
466 return Ok(());
467 }
468
469 let (tx, rx) = oneshot::channel();
471 self.sender
472 .send(Request::Add(AddRequest {
473 documents,
474 completed: tx,
475 }))
476 .map_err(|_| {
477 anyhow!("failed to send request to analysis queue because the channel has closed")
478 })?;
479
480 rx.await.map_err(|_| {
481 anyhow!("failed to receive response from analysis queue because the channel has closed")
482 })?;
483
484 Ok(())
485 }
486
487 pub async fn remove_documents(&self, documents: Vec<Url>) -> Result<()> {
494 let (tx, rx) = oneshot::channel();
496 self.sender
497 .send(Request::Remove(RemoveRequest {
498 documents,
499 completed: tx,
500 }))
501 .map_err(|_| {
502 anyhow!("failed to send request to analysis queue because the channel has closed")
503 })?;
504
505 rx.await.map_err(|_| {
506 anyhow!("failed to receive response from analysis queue because the channel has closed")
507 })?;
508
509 Ok(())
510 }
511
512 pub fn notify_incremental_change(
516 &self,
517 document: Url,
518 change: IncrementalChange,
519 ) -> Result<()> {
520 self.sender
521 .send(Request::NotifyIncrementalChange(
522 NotifyIncrementalChangeRequest { document, change },
523 ))
524 .map_err(|_| {
525 anyhow!("failed to send request to analysis queue because the channel has closed")
526 })
527 }
528
529 pub fn notify_change(&self, document: Url, discard_pending: bool) -> Result<()> {
538 self.sender
539 .send(Request::NotifyChange(NotifyChangeRequest {
540 document,
541 discard_pending,
542 }))
543 .map_err(|_| {
544 anyhow!("failed to send request to analysis queue because the channel has closed")
545 })
546 }
547
548 pub async fn analyze_document(
557 &self,
558 context: Context,
559 document: Url,
560 ) -> Result<Vec<AnalysisResult>> {
561 let (tx, rx) = oneshot::channel();
563 self.sender
564 .send(Request::Analyze(AnalyzeRequest {
565 document: Some(document),
566 context,
567 completed: tx,
568 }))
569 .map_err(|_| {
570 anyhow!("failed to send request to analysis queue because the channel has closed")
571 })?;
572
573 rx.await.map_err(|_| {
574 anyhow!("failed to receive response from analysis queue because the channel has closed")
575 })?
576 }
577
578 pub async fn analyze(&self, context: Context) -> Result<Vec<AnalysisResult>> {
587 let (tx, rx) = oneshot::channel();
589 self.sender
590 .send(Request::Analyze(AnalyzeRequest {
591 document: None, context,
593 completed: tx,
594 }))
595 .map_err(|_| {
596 anyhow!("failed to send request to analysis queue because the channel has closed")
597 })?;
598
599 rx.await.map_err(|_| {
600 anyhow!("failed to receive response from analysis queue because the channel has closed")
601 })?
602 }
603
604 pub async fn format_document(&self, document: Url) -> Result<Option<(u32, u32, String)>> {
606 let (tx, rx) = oneshot::channel();
607 self.sender
608 .send(Request::Format(FormatRequest {
609 document,
610 completed: tx,
611 }))
612 .map_err(|_| {
613 anyhow!("failed to send format request to the queue because the channel has closed")
614 })?;
615
616 rx.await.map_err(|_| {
617 anyhow!("failed to send format request to the queue because the channel has closed")
618 })
619 }
620
621 pub async fn goto_definition(
623 &self,
624 document: Url,
625 position: SourcePosition,
626 encoding: SourcePositionEncoding,
627 ) -> Result<Option<GotoDefinitionResponse>> {
628 let (tx, rx) = oneshot::channel();
629 self.sender
630 .send(Request::GotoDefinition(GotoDefinitionRequest {
631 document,
632 position,
633 encoding,
634 completed: tx,
635 }))
636 .map_err(|_| {
637 anyhow!(
638 "failed to send goto definition request to analysis queue because the channel \
639 has closed"
640 )
641 })?;
642
643 rx.await.map_err(|_| {
644 anyhow!(
645 "failed to receive goto definition response from analysis queue because the \
646 channel has closed"
647 )
648 })
649 }
650
651 pub async fn find_all_references(
653 &self,
654 document: Url,
655 position: SourcePosition,
656 encoding: SourcePositionEncoding,
657 include_declaration: bool,
658 ) -> Result<Vec<Location>> {
659 let (tx, rx) = oneshot::channel();
660 self.sender
661 .send(Request::FindAllReferences(FindAllReferencesRequest {
662 document,
663 position,
664 encoding,
665 include_declaration,
666 completed: tx,
667 }))
668 .map_err(|_| {
669 anyhow!(
670 "failed to send find all references request to analysis queue because the \
671 channel has closed"
672 )
673 })?;
674
675 rx.await.map_err(|_| {
676 anyhow!(
677 "failed to receive find all references response from analysis queue because the \
678 client channel has closed"
679 )
680 })
681 }
682
683 pub async fn completion(
685 &self,
686 context: Context,
687 document: Url,
688 position: SourcePosition,
689 encoding: SourcePositionEncoding,
690 ) -> Result<Option<CompletionResponse>> {
691 let (tx, rx) = oneshot::channel();
692 self.sender
693 .send(Request::Completion(CompletionRequest {
694 document,
695 position,
696 encoding,
697 context,
698 completed: tx,
699 }))
700 .map_err(|_| {
701 anyhow!(
702 "failed to send completion request to analysis queue because the channel has \
703 closed"
704 )
705 })?;
706
707 rx.await.map_err(|_| {
708 anyhow!(
709 "failed to send completion request to analysis queue because the channel has \
710 closed"
711 )
712 })
713 }
714
715 pub async fn hover(
717 &self,
718 document: Url,
719 position: SourcePosition,
720 encoding: SourcePositionEncoding,
721 ) -> Result<Option<Hover>> {
722 let (tx, rx) = oneshot::channel();
723 self.sender
724 .send(Request::Hover(HoverRequest {
725 document,
726 position,
727 encoding,
728 completed: tx,
729 }))
730 .map_err(|_| {
731 anyhow!(
732 "failed to send hover request to analysis queue because the channel has closed"
733 )
734 })?;
735
736 rx.await.map_err(|_| {
737 anyhow!("failed to send hover request to analysis queue because the channel has closed")
738 })
739 }
740
741 pub async fn rename(
743 &self,
744 document: Url,
745 position: SourcePosition,
746 encoding: SourcePositionEncoding,
747 new_name: String,
748 ) -> Result<Option<WorkspaceEdit>> {
749 let (tx, rx) = oneshot::channel();
750 self.sender
751 .send(Request::Rename(RenameRequest {
752 document,
753 position,
754 encoding,
755 new_name,
756 completed: tx,
757 }))
758 .map_err(|_| {
759 anyhow!(
760 "failed to send rename request to analysis queue because the channel has \
761 closed"
762 )
763 })?;
764
765 rx.await.map_err(|_| {
766 anyhow!(
767 "failed to receive rename response from analysis queue because the channel has \
768 closed"
769 )
770 })
771 }
772
773 pub async fn semantic_tokens(&self, document: Url) -> Result<Option<SemanticTokensResult>> {
775 let (tx, rx) = oneshot::channel();
776 self.sender
777 .send(Request::SemanticTokens(SemanticTokenRequest {
778 document,
779 completed: tx,
780 }))
781 .map_err(|_| {
782 anyhow!(
783 "failed to send semantic tokens request to analysis queue because the channel \
784 has closed"
785 )
786 })?;
787
788 rx.await.map_err(|_| {
789 anyhow!(
790 "failed to receive semantic tokens response from analysis queue because the \
791 channel has closed"
792 )
793 })
794 }
795
796 pub async fn document_symbol(&self, document: Url) -> Result<Option<DocumentSymbolResponse>> {
798 let (tx, rx) = oneshot::channel();
799 self.sender
800 .send(Request::DocumentSymbol(DocumentSymbolRequest {
801 document,
802 completed: tx,
803 }))
804 .map_err(|_| {
805 anyhow!(
806 "failed to send document symbol request to analysis queue because the channel \
807 has closed"
808 )
809 })?;
810
811 rx.await.map_err(|_| {
812 anyhow!(
813 "failed to receive document symbol request to analysis queue because the channel \
814 has closed"
815 )
816 })
817 }
818
819 pub async fn workspace_symbol(&self, query: String) -> Result<Option<Vec<SymbolInformation>>> {
821 let (tx, rx) = oneshot::channel();
822 self.sender
823 .send(Request::WorkspaceSymbol(WorkspaceSymbolRequest {
824 query,
825 completed: tx,
826 }))
827 .map_err(|_| {
828 anyhow!(
829 "failed to send workspace symbol request to analysis queue because the \
830 channel has closed"
831 )
832 })?;
833
834 rx.await.map_err(|_| {
835 anyhow!(
836 "failed to receive workspace symbol response from analysis queue because the \
837 channel has closed"
838 )
839 })
840 }
841
842 pub async fn signature_help(
844 &self,
845 document: Url,
846 position: SourcePosition,
847 encoding: SourcePositionEncoding,
848 ) -> Result<Option<SignatureHelp>> {
849 let (tx, rx) = oneshot::channel();
850 self.sender
851 .send(Request::SignatureHelp(SignatureHelpRequest {
852 document,
853 position,
854 encoding,
855 completed: tx,
856 }))
857 .map_err(|_| {
858 anyhow!(
859 "failed to send signature help request to analysis queue because the channel \
860 has closed"
861 )
862 })?;
863
864 rx.await.map_err(|_| {
865 anyhow!(
866 "failed to receive signature help response from analysis queue because the \
867 channel has closed"
868 )
869 })
870 }
871}
872
873impl Default for Analyzer<()> {
874 fn default() -> Self {
875 Self::new(Default::default(), |_, _, _, _| async {})
876 }
877}
878
879impl<C> Drop for Analyzer<C> {
880 fn drop(&mut self) {
881 unsafe { ManuallyDrop::drop(&mut self.sender) };
882 if let Some(handle) = self.handle.take() {
883 handle.join().unwrap();
884 }
885 }
886}
887
888const _: () = {
891 const fn _assert<T: Send + Sync>() {}
893 _assert::<Analyzer<()>>();
894};
895
896#[cfg(test)]
897mod test {
898 use std::fs;
899
900 use tempfile::TempDir;
901 use wdl_ast::Severity;
902
903 use super::*;
904
905 #[tokio::test]
906 async fn it_returns_empty_results() {
907 let analyzer = Analyzer::default();
908 let results = analyzer.analyze(()).await.unwrap();
909 assert!(results.is_empty());
910 }
911
912 #[tokio::test]
913 async fn it_analyzes_a_document() {
914 let dir = TempDir::new().expect("failed to create temporary directory");
915 let path = dir.path().join("foo.wdl");
916 fs::write(
917 &path,
918 r#"version 1.1
919
920task test {
921 command <<<>>>
922}
923
924workflow test {
925}
926"#,
927 )
928 .expect("failed to create test file");
929
930 let analyzer = Analyzer::default();
932 analyzer
933 .add_document(path_to_uri(&path).expect("should convert to URI"))
934 .await
935 .expect("should add document");
936
937 let results = analyzer.analyze(()).await.unwrap();
938 assert_eq!(results.len(), 1);
939 assert_eq!(results[0].document.diagnostics().count(), 1);
940 assert_eq!(
941 results[0].document.diagnostics().next().unwrap().rule(),
942 None
943 );
944 assert_eq!(
945 results[0].document.diagnostics().next().unwrap().severity(),
946 Severity::Error
947 );
948 assert_eq!(
949 results[0].document.diagnostics().next().unwrap().message(),
950 "conflicting workflow name `test`"
951 );
952
953 let id = results[0].document.id().clone();
955 let results = analyzer.analyze(()).await.unwrap();
956 assert_eq!(results.len(), 1);
957 assert_eq!(results[0].document.id().as_ref(), id.as_ref());
958 assert_eq!(results[0].document.diagnostics().count(), 1);
959 assert_eq!(
960 results[0].document.diagnostics().next().unwrap().rule(),
961 None
962 );
963 assert_eq!(
964 results[0].document.diagnostics().next().unwrap().severity(),
965 Severity::Error
966 );
967 assert_eq!(
968 results[0].document.diagnostics().next().unwrap().message(),
969 "conflicting workflow name `test`"
970 );
971 }
972
973 #[tokio::test]
974 async fn it_reanalyzes_a_document_on_change() {
975 let dir = TempDir::new().expect("failed to create temporary directory");
976 let path = dir.path().join("foo.wdl");
977 fs::write(
978 &path,
979 r#"version 1.1
980
981task test {
982 command <<<>>>
983}
984
985workflow test {
986}
987"#,
988 )
989 .expect("failed to create test file");
990
991 let analyzer = Analyzer::default();
993 analyzer
994 .add_document(path_to_uri(&path).expect("should convert to URI"))
995 .await
996 .expect("should add document");
997
998 let results = analyzer.analyze(()).await.unwrap();
999 assert_eq!(results.len(), 1);
1000 assert_eq!(results[0].document.diagnostics().count(), 1);
1001 assert_eq!(
1002 results[0].document.diagnostics().next().unwrap().rule(),
1003 None
1004 );
1005 assert_eq!(
1006 results[0].document.diagnostics().next().unwrap().severity(),
1007 Severity::Error
1008 );
1009 assert_eq!(
1010 results[0].document.diagnostics().next().unwrap().message(),
1011 "conflicting workflow name `test`"
1012 );
1013
1014 fs::write(
1016 &path,
1017 r#"version 1.1
1018
1019task test {
1020 command <<<>>>
1021}
1022
1023workflow something_else {
1024}
1025"#,
1026 )
1027 .expect("failed to create test file");
1028
1029 let uri = path_to_uri(&path).expect("should convert to URI");
1030 analyzer.notify_change(uri.clone(), false).unwrap();
1031
1032 let id = results[0].document.id().clone();
1035 let results = analyzer.analyze(()).await.unwrap();
1036 assert_eq!(results.len(), 1);
1037 assert!(results[0].document.id().as_ref() != id.as_ref());
1038 assert_eq!(results[0].document.diagnostics().count(), 0);
1039
1040 let id = results[0].document.id().clone();
1042 let results = analyzer.analyze_document((), uri).await.unwrap();
1043 assert_eq!(results.len(), 1);
1044 assert!(results[0].document.id().as_ref() == id.as_ref());
1045 assert_eq!(results[0].document.diagnostics().count(), 0);
1046 }
1047
1048 #[tokio::test]
1049 async fn it_reanalyzes_a_document_on_incremental_change() {
1050 let dir = TempDir::new().expect("failed to create temporary directory");
1051 let path = dir.path().join("foo.wdl");
1052 fs::write(
1053 &path,
1054 r#"version 1.1
1055
1056task test {
1057 command <<<>>>
1058}
1059
1060workflow test {
1061}
1062"#,
1063 )
1064 .expect("failed to create test file");
1065
1066 let analyzer = Analyzer::default();
1068 analyzer
1069 .add_document(path_to_uri(&path).expect("should convert to URI"))
1070 .await
1071 .expect("should add document");
1072
1073 let results = analyzer.analyze(()).await.unwrap();
1074 assert_eq!(results.len(), 1);
1075 assert_eq!(results[0].document.diagnostics().count(), 1);
1076 assert_eq!(
1077 results[0].document.diagnostics().next().unwrap().rule(),
1078 None
1079 );
1080 assert_eq!(
1081 results[0].document.diagnostics().next().unwrap().severity(),
1082 Severity::Error
1083 );
1084 assert_eq!(
1085 results[0].document.diagnostics().next().unwrap().message(),
1086 "conflicting workflow name `test`"
1087 );
1088
1089 let uri = path_to_uri(&path).expect("should convert to URI");
1091 analyzer
1092 .notify_incremental_change(
1093 uri.clone(),
1094 IncrementalChange {
1095 version: 2,
1096 start: None,
1097 edits: vec![SourceEdit {
1098 range: SourcePosition::new(6, 9)..SourcePosition::new(6, 13),
1099 encoding: SourcePositionEncoding::UTF8,
1100 text: "something_else".to_string(),
1101 }],
1102 },
1103 )
1104 .unwrap();
1105
1106 let id = results[0].document.id().clone();
1109 let results = analyzer.analyze_document((), uri).await.unwrap();
1110 assert_eq!(results.len(), 1);
1111 assert!(results[0].document.id().as_ref() != id.as_ref());
1112 assert_eq!(results[0].document.diagnostics().count(), 0);
1113 }
1114
1115 #[tokio::test]
1116 async fn it_removes_documents() {
1117 let dir = TempDir::new().expect("failed to create temporary directory");
1118 let foo = dir.path().join("foo.wdl");
1119 fs::write(
1120 &foo,
1121 r#"version 1.1
1122workflow test {
1123}
1124"#,
1125 )
1126 .expect("failed to create test file");
1127
1128 let bar = dir.path().join("bar.wdl");
1129 fs::write(
1130 &bar,
1131 r#"version 1.1
1132workflow test {
1133}
1134"#,
1135 )
1136 .expect("failed to create test file");
1137
1138 let baz = dir.path().join("baz.wdl");
1139 fs::write(
1140 &baz,
1141 r#"version 1.1
1142workflow test {
1143}
1144"#,
1145 )
1146 .expect("failed to create test file");
1147
1148 let analyzer = Analyzer::default();
1150 analyzer
1151 .add_directory(dir.path())
1152 .await
1153 .expect("should add documents");
1154
1155 let results = analyzer.analyze(()).await.unwrap();
1157 assert_eq!(results.len(), 3);
1158 assert!(results[0].document.diagnostics().next().is_none());
1159 assert!(results[1].document.diagnostics().next().is_none());
1160 assert!(results[2].document.diagnostics().next().is_none());
1161
1162 let results = analyzer.analyze(()).await.unwrap();
1164 assert_eq!(results.len(), 3);
1165
1166 analyzer
1168 .remove_documents(vec![
1169 path_to_uri(dir.path()).expect("should convert to URI"),
1170 ])
1171 .await
1172 .unwrap();
1173 let results = analyzer.analyze(()).await.unwrap();
1174 assert!(results.is_empty());
1175 }
1176}