1use std::ffi::OsStr;
4use std::fmt;
5use std::future::Future;
6use std::mem::ManuallyDrop;
7use std::ops::Range;
8use std::path::Path;
9use std::path::absolute;
10use std::sync::Arc;
11use std::thread::JoinHandle;
12
13use anyhow::Context;
14use anyhow::Error;
15use anyhow::Result;
16use anyhow::anyhow;
17use anyhow::bail;
18use ignore::WalkBuilder;
19use indexmap::IndexSet;
20use line_index::LineCol;
21use line_index::LineIndex;
22use line_index::WideEncoding;
23use line_index::WideLineCol;
24use lsp_types::CompletionResponse;
25use lsp_types::DocumentSymbolResponse;
26use lsp_types::GotoDefinitionResponse;
27use lsp_types::Hover;
28use lsp_types::InlayHint;
29use lsp_types::Location;
30use lsp_types::SemanticTokensResult;
31use lsp_types::SignatureHelp;
32use lsp_types::SymbolInformation;
33use lsp_types::WorkspaceEdit;
34use path_clean::PathClean;
35use tokio::runtime::Handle;
36use tokio::sync::mpsc;
37use tokio::sync::oneshot;
38use url::Url;
39
40use crate::config::Config;
41use crate::document::Document;
42use crate::graph::DocumentGraphNode;
43use crate::graph::ParseState;
44use crate::queue::AddRequest;
45use crate::queue::AnalysisQueue;
46use crate::queue::AnalyzeRequest;
47use crate::queue::CompletionRequest;
48use crate::queue::DocumentSymbolRequest;
49use crate::queue::FindAllReferencesRequest;
50use crate::queue::FormatRequest;
51use crate::queue::GotoDefinitionRequest;
52use crate::queue::HoverRequest;
53use crate::queue::InlayHintsRequest;
54use crate::queue::NotifyChangeRequest;
55use crate::queue::NotifyIncrementalChangeRequest;
56use crate::queue::RemoveRequest;
57use crate::queue::RenameRequest;
58use crate::queue::Request;
59use crate::queue::SemanticTokenRequest;
60use crate::queue::SignatureHelpRequest;
61use crate::queue::WorkspaceSymbolRequest;
62use crate::rayon::RayonHandle;
63
64#[derive(Debug, Clone, Copy, PartialEq, Eq)]
66pub enum ProgressKind {
67 Parsing,
69 Analyzing,
71}
72
73impl fmt::Display for ProgressKind {
74 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
75 match self {
76 Self::Parsing => write!(f, "parsing"),
77 Self::Analyzing => write!(f, "analyzing"),
78 }
79 }
80}
81
82pub fn path_to_uri(path: impl AsRef<Path>) -> Option<Url> {
84 Url::from_file_path(absolute(path).ok()?.clean()).ok()
85}
86
87#[derive(Debug, Clone)]
91pub struct AnalysisResult {
92 error: Option<Arc<Error>>,
95 version: Option<i32>,
101 lines: Option<Arc<LineIndex>>,
103 document: Document,
105}
106
107impl AnalysisResult {
108 pub(crate) fn new(node: &DocumentGraphNode) -> Self {
110 if let Some(error) = node.analysis_error() {
111 return Self {
112 error: Some(error.clone()),
113 version: node.parse_state().version(),
114 lines: node.parse_state().lines().cloned(),
115 document: Document::default_from_uri(node.uri().clone()),
116 };
117 }
118
119 let (error, version, lines) = match node.parse_state() {
120 ParseState::NotParsed => unreachable!("document should have been parsed"),
121 ParseState::Error(e) => (Some(e), None, None),
122 ParseState::Parsed { version, lines, .. } => (None, *version, Some(lines)),
123 };
124
125 Self {
126 error: error.cloned(),
127 version,
128 lines: lines.cloned(),
129 document: node
130 .document()
131 .expect("analysis should have completed")
132 .clone(),
133 }
134 }
135
136 pub fn error(&self) -> Option<&Arc<Error>> {
142 self.error.as_ref()
143 }
144
145 pub fn version(&self) -> Option<i32> {
150 self.version
151 }
152
153 pub fn lines(&self) -> Option<&Arc<LineIndex>> {
157 self.lines.as_ref()
158 }
159
160 pub fn document(&self) -> &Document {
162 &self.document
163 }
164}
165
166#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Copy, Clone, Default)]
168pub struct SourcePosition {
169 pub line: u32,
172 pub character: u32,
175}
176
177impl SourcePosition {
178 pub fn new(line: u32, character: u32) -> Self {
180 Self { line, character }
181 }
182}
183
184#[derive(Debug, Eq, PartialEq, Copy, Clone)]
186pub enum SourcePositionEncoding {
187 UTF8,
191 UTF16,
195}
196
197#[derive(Debug, Clone)]
199pub struct SourceEdit {
200 range: Range<SourcePosition>,
204 encoding: SourcePositionEncoding,
206 text: String,
208}
209
210impl SourceEdit {
211 pub fn new(
213 range: Range<SourcePosition>,
214 encoding: SourcePositionEncoding,
215 text: impl Into<String>,
216 ) -> Self {
217 Self {
218 range,
219 encoding,
220 text: text.into(),
221 }
222 }
223
224 pub(crate) fn range(&self) -> Range<SourcePosition> {
226 self.range.start..self.range.end
227 }
228
229 pub(crate) fn apply(&self, source: &mut String, lines: &LineIndex) -> Result<()> {
231 let (start, end) = match self.encoding {
232 SourcePositionEncoding::UTF8 => (
233 LineCol {
234 line: self.range.start.line,
235 col: self.range.start.character,
236 },
237 LineCol {
238 line: self.range.end.line,
239 col: self.range.end.character,
240 },
241 ),
242 SourcePositionEncoding::UTF16 => (
243 lines
244 .to_utf8(
245 WideEncoding::Utf16,
246 WideLineCol {
247 line: self.range.start.line,
248 col: self.range.start.character,
249 },
250 )
251 .context("invalid edit start position")?,
252 lines
253 .to_utf8(
254 WideEncoding::Utf16,
255 WideLineCol {
256 line: self.range.end.line,
257 col: self.range.end.character,
258 },
259 )
260 .context("invalid edit end position")?,
261 ),
262 };
263
264 let range: Range<usize> = lines
265 .offset(start)
266 .context("invalid edit start position")?
267 .into()
268 ..lines
269 .offset(end)
270 .context("invalid edit end position")?
271 .into();
272
273 if !source.is_char_boundary(range.start) {
274 bail!("edit start position is not at a character boundary");
275 }
276
277 if !source.is_char_boundary(range.end) {
278 bail!("edit end position is not at a character boundary");
279 }
280
281 source.replace_range(range, &self.text);
282 Ok(())
283 }
284}
285
286#[derive(Clone, Debug)]
288pub struct IncrementalChange {
289 pub version: i32,
293 pub start: Option<String>,
301 pub edits: Vec<SourceEdit>,
303}
304
305#[derive(Debug)]
319pub struct Analyzer<Context> {
320 sender: ManuallyDrop<mpsc::UnboundedSender<Request<Context>>>,
322 handle: Option<JoinHandle<()>>,
324 config: Config,
326}
327
328impl<Context> Analyzer<Context>
329where
330 Context: Send + Clone + 'static,
331{
332 pub fn new<Progress, Return>(config: Config, progress: Progress) -> Self
340 where
341 Progress: Fn(Context, ProgressKind, usize, usize) -> Return + Send + 'static,
342 Return: Future<Output = ()>,
343 {
344 Self::new_with_validator(config, progress, crate::Validator::default)
345 }
346
347 pub fn new_with_validator<Progress, Return, Validator>(
356 config: Config,
357 progress: Progress,
358 validator: Validator,
359 ) -> Self
360 where
361 Progress: Fn(Context, ProgressKind, usize, usize) -> Return + Send + 'static,
362 Return: Future<Output = ()>,
363 Validator: Fn() -> crate::Validator + Send + Sync + 'static,
364 {
365 let (tx, rx) = mpsc::unbounded_channel();
366 let tokio = Handle::current();
367 let inner_config = config.clone();
368 let handle = std::thread::spawn(move || {
369 let queue = AnalysisQueue::new(inner_config, tokio, progress, validator);
370 queue.run(rx);
371 });
372
373 Self {
374 sender: ManuallyDrop::new(tx),
375 handle: Some(handle),
376 config,
377 }
378 }
379
380 pub async fn add_document(&self, uri: Url) -> Result<()> {
384 let mut documents = IndexSet::new();
385 documents.insert(uri);
386
387 let (tx, rx) = oneshot::channel();
388 self.sender
389 .send(Request::Add(AddRequest {
390 documents,
391 completed: tx,
392 }))
393 .map_err(|_| {
394 anyhow!("failed to send request to analysis queue because the channel has closed")
395 })?;
396
397 rx.await.map_err(|_| {
398 anyhow!("failed to receive response from analysis queue because the channel has closed")
399 })?;
400
401 Ok(())
402 }
403
404 pub async fn add_directory(&self, path: impl AsRef<Path>) -> Result<()> {
410 let path = path.as_ref().to_path_buf();
411 let config = self.config.clone();
412 let documents = RayonHandle::spawn(move || -> Result<IndexSet<Url>> {
414 let mut documents = IndexSet::new();
415
416 let metadata = path.metadata().with_context(|| {
417 format!(
418 "failed to read metadata for `{path}`",
419 path = path.display()
420 )
421 })?;
422
423 if metadata.is_file() {
424 bail!("`{path}` is a file, not a directory", path = path.display());
425 }
426
427 let mut walker = WalkBuilder::new(&path);
428 if let Some(ignore_filename) = config.ignore_filename() {
429 walker.add_custom_ignore_filename(ignore_filename);
430 }
431 let walker = walker
432 .standard_filters(false)
433 .parents(true)
434 .follow_links(true)
435 .build();
436
437 for result in walker {
438 let entry = result.with_context(|| {
439 format!("failed to read directory `{path}`", path = path.display())
440 })?;
441
442 let Some(file_type) = entry.file_type() else {
444 continue;
445 };
446 if !file_type.is_file() {
448 continue;
449 }
450 if entry.path().extension() != Some(OsStr::new("wdl")) {
452 continue;
453 }
454
455 documents.insert(path_to_uri(entry.path()).with_context(|| {
456 format!(
457 "failed to convert path `{path}` to a URI",
458 path = entry.path().display()
459 )
460 })?);
461 }
462
463 Ok(documents)
464 })
465 .await?;
466
467 if documents.is_empty() {
468 return Ok(());
469 }
470
471 let (tx, rx) = oneshot::channel();
473 self.sender
474 .send(Request::Add(AddRequest {
475 documents,
476 completed: tx,
477 }))
478 .map_err(|_| {
479 anyhow!("failed to send request to analysis queue because the channel has closed")
480 })?;
481
482 rx.await.map_err(|_| {
483 anyhow!("failed to receive response from analysis queue because the channel has closed")
484 })?;
485
486 Ok(())
487 }
488
489 pub async fn remove_documents(&self, documents: Vec<Url>) -> Result<()> {
496 let (tx, rx) = oneshot::channel();
498 self.sender
499 .send(Request::Remove(RemoveRequest {
500 documents,
501 completed: tx,
502 }))
503 .map_err(|_| {
504 anyhow!("failed to send request to analysis queue because the channel has closed")
505 })?;
506
507 rx.await.map_err(|_| {
508 anyhow!("failed to receive response from analysis queue because the channel has closed")
509 })?;
510
511 Ok(())
512 }
513
514 pub fn notify_incremental_change(
518 &self,
519 document: Url,
520 change: IncrementalChange,
521 ) -> Result<()> {
522 self.sender
523 .send(Request::NotifyIncrementalChange(
524 NotifyIncrementalChangeRequest { document, change },
525 ))
526 .map_err(|_| {
527 anyhow!("failed to send request to analysis queue because the channel has closed")
528 })
529 }
530
531 pub fn notify_change(&self, document: Url, discard_pending: bool) -> Result<()> {
540 self.sender
541 .send(Request::NotifyChange(NotifyChangeRequest {
542 document,
543 discard_pending,
544 }))
545 .map_err(|_| {
546 anyhow!("failed to send request to analysis queue because the channel has closed")
547 })
548 }
549
550 pub async fn analyze_document(
559 &self,
560 context: Context,
561 document: Url,
562 ) -> Result<Vec<AnalysisResult>> {
563 let (tx, rx) = oneshot::channel();
565 self.sender
566 .send(Request::Analyze(AnalyzeRequest {
567 document: Some(document),
568 context,
569 completed: tx,
570 }))
571 .map_err(|_| {
572 anyhow!("failed to send request to analysis queue because the channel has closed")
573 })?;
574
575 rx.await.map_err(|_| {
576 anyhow!("failed to receive response from analysis queue because the channel has closed")
577 })?
578 }
579
580 pub async fn analyze(&self, context: Context) -> Result<Vec<AnalysisResult>> {
589 let (tx, rx) = oneshot::channel();
591 self.sender
592 .send(Request::Analyze(AnalyzeRequest {
593 document: None, context,
595 completed: tx,
596 }))
597 .map_err(|_| {
598 anyhow!("failed to send request to analysis queue because the channel has closed")
599 })?;
600
601 rx.await.map_err(|_| {
602 anyhow!("failed to receive response from analysis queue because the channel has closed")
603 })?
604 }
605
606 pub async fn format_document(&self, document: Url) -> Result<Option<(u32, u32, String)>> {
608 let (tx, rx) = oneshot::channel();
609 self.sender
610 .send(Request::Format(FormatRequest {
611 document,
612 completed: tx,
613 }))
614 .map_err(|_| {
615 anyhow!("failed to send format request to the queue because the channel has closed")
616 })?;
617
618 rx.await.map_err(|_| {
619 anyhow!("failed to send format request to the queue because the channel has closed")
620 })
621 }
622
623 pub async fn goto_definition(
625 &self,
626 document: Url,
627 position: SourcePosition,
628 encoding: SourcePositionEncoding,
629 ) -> Result<Option<GotoDefinitionResponse>> {
630 let (tx, rx) = oneshot::channel();
631 self.sender
632 .send(Request::GotoDefinition(GotoDefinitionRequest {
633 document,
634 position,
635 encoding,
636 completed: tx,
637 }))
638 .map_err(|_| {
639 anyhow!(
640 "failed to send goto definition request to analysis queue because the channel \
641 has closed"
642 )
643 })?;
644
645 rx.await.map_err(|_| {
646 anyhow!(
647 "failed to receive goto definition response from analysis queue because the \
648 channel has closed"
649 )
650 })
651 }
652
653 pub async fn find_all_references(
655 &self,
656 document: Url,
657 position: SourcePosition,
658 encoding: SourcePositionEncoding,
659 include_declaration: bool,
660 ) -> Result<Vec<Location>> {
661 let (tx, rx) = oneshot::channel();
662 self.sender
663 .send(Request::FindAllReferences(FindAllReferencesRequest {
664 document,
665 position,
666 encoding,
667 include_declaration,
668 completed: tx,
669 }))
670 .map_err(|_| {
671 anyhow!(
672 "failed to send find all references request to analysis queue because the \
673 channel has closed"
674 )
675 })?;
676
677 rx.await.map_err(|_| {
678 anyhow!(
679 "failed to receive find all references response from analysis queue because the \
680 client channel has closed"
681 )
682 })
683 }
684
685 pub async fn completion(
687 &self,
688 context: Context,
689 document: Url,
690 position: SourcePosition,
691 encoding: SourcePositionEncoding,
692 ) -> Result<Option<CompletionResponse>> {
693 let (tx, rx) = oneshot::channel();
694 self.sender
695 .send(Request::Completion(CompletionRequest {
696 document,
697 position,
698 encoding,
699 context,
700 completed: tx,
701 }))
702 .map_err(|_| {
703 anyhow!(
704 "failed to send completion request to analysis queue because the channel has \
705 closed"
706 )
707 })?;
708
709 rx.await.map_err(|_| {
710 anyhow!(
711 "failed to send completion request to analysis queue because the channel has \
712 closed"
713 )
714 })
715 }
716
717 pub async fn hover(
719 &self,
720 document: Url,
721 position: SourcePosition,
722 encoding: SourcePositionEncoding,
723 ) -> Result<Option<Hover>> {
724 let (tx, rx) = oneshot::channel();
725 self.sender
726 .send(Request::Hover(HoverRequest {
727 document,
728 position,
729 encoding,
730 completed: tx,
731 }))
732 .map_err(|_| {
733 anyhow!(
734 "failed to send hover request to analysis queue because the channel has closed"
735 )
736 })?;
737
738 rx.await.map_err(|_| {
739 anyhow!("failed to send hover request to analysis queue because the channel has closed")
740 })
741 }
742
743 pub async fn rename(
745 &self,
746 document: Url,
747 position: SourcePosition,
748 encoding: SourcePositionEncoding,
749 new_name: String,
750 ) -> Result<Option<WorkspaceEdit>> {
751 let (tx, rx) = oneshot::channel();
752 self.sender
753 .send(Request::Rename(RenameRequest {
754 document,
755 position,
756 encoding,
757 new_name,
758 completed: tx,
759 }))
760 .map_err(|_| {
761 anyhow!(
762 "failed to send rename request to analysis queue because the channel has \
763 closed"
764 )
765 })?;
766
767 rx.await.map_err(|_| {
768 anyhow!(
769 "failed to receive rename response from analysis queue because the channel has \
770 closed"
771 )
772 })
773 }
774
775 pub async fn semantic_tokens(&self, document: Url) -> Result<Option<SemanticTokensResult>> {
777 let (tx, rx) = oneshot::channel();
778 self.sender
779 .send(Request::SemanticTokens(SemanticTokenRequest {
780 document,
781 completed: tx,
782 }))
783 .map_err(|_| {
784 anyhow!(
785 "failed to send semantic tokens request to analysis queue because the channel \
786 has closed"
787 )
788 })?;
789
790 rx.await.map_err(|_| {
791 anyhow!(
792 "failed to receive semantic tokens response from analysis queue because the \
793 channel has closed"
794 )
795 })
796 }
797
798 pub async fn document_symbol(&self, document: Url) -> Result<Option<DocumentSymbolResponse>> {
800 let (tx, rx) = oneshot::channel();
801 self.sender
802 .send(Request::DocumentSymbol(DocumentSymbolRequest {
803 document,
804 completed: tx,
805 }))
806 .map_err(|_| {
807 anyhow!(
808 "failed to send document symbol request to analysis queue because the channel \
809 has closed"
810 )
811 })?;
812
813 rx.await.map_err(|_| {
814 anyhow!(
815 "failed to receive document symbol request to analysis queue because the channel \
816 has closed"
817 )
818 })
819 }
820
821 pub async fn workspace_symbol(&self, query: String) -> Result<Option<Vec<SymbolInformation>>> {
823 let (tx, rx) = oneshot::channel();
824 self.sender
825 .send(Request::WorkspaceSymbol(WorkspaceSymbolRequest {
826 query,
827 completed: tx,
828 }))
829 .map_err(|_| {
830 anyhow!(
831 "failed to send workspace symbol request to analysis queue because the \
832 channel has closed"
833 )
834 })?;
835
836 rx.await.map_err(|_| {
837 anyhow!(
838 "failed to receive workspace symbol response from analysis queue because the \
839 channel has closed"
840 )
841 })
842 }
843
844 pub async fn signature_help(
846 &self,
847 document: Url,
848 position: SourcePosition,
849 encoding: SourcePositionEncoding,
850 ) -> Result<Option<SignatureHelp>> {
851 let (tx, rx) = oneshot::channel();
852 self.sender
853 .send(Request::SignatureHelp(SignatureHelpRequest {
854 document,
855 position,
856 encoding,
857 completed: tx,
858 }))
859 .map_err(|_| {
860 anyhow!(
861 "failed to send signature help request to analysis queue because the channel \
862 has closed"
863 )
864 })?;
865
866 rx.await.map_err(|_| {
867 anyhow!(
868 "failed to receive signature help response from analysis queue because the \
869 channel has closed"
870 )
871 })
872 }
873
874 pub async fn inlay_hints(
876 &self,
877 document: Url,
878 range: lsp_types::Range,
879 ) -> Result<Option<Vec<InlayHint>>> {
880 let (tx, rx) = oneshot::channel();
881 self.sender
882 .send(Request::InlayHints(InlayHintsRequest {
883 document,
884 range,
885 completed: tx,
886 }))
887 .map_err(|_| {
888 anyhow!(
889 "failed to send inlay hints request to analysis queue because the channel has \
890 closed"
891 )
892 })?;
893
894 rx.await.map_err(|_| {
895 anyhow!(
896 "failed to receive inlay hints response from analysis queue because the channel \
897 has closed"
898 )
899 })
900 }
901}
902
903impl Default for Analyzer<()> {
904 fn default() -> Self {
905 Self::new(Default::default(), |_, _, _, _| async {})
906 }
907}
908
909impl<C> Drop for Analyzer<C> {
910 fn drop(&mut self) {
911 unsafe { ManuallyDrop::drop(&mut self.sender) };
912 if let Some(handle) = self.handle.take() {
913 handle.join().unwrap();
914 }
915 }
916}
917
918const _: () = {
921 const fn _assert<T: Send + Sync>() {}
923 _assert::<Analyzer<()>>();
924};
925
926#[cfg(test)]
927mod test {
928 use std::fs;
929
930 use tempfile::TempDir;
931 use wdl_ast::Severity;
932
933 use super::*;
934
935 #[tokio::test]
936 async fn it_returns_empty_results() {
937 let analyzer = Analyzer::default();
938 let results = analyzer.analyze(()).await.unwrap();
939 assert!(results.is_empty());
940 }
941
942 #[tokio::test]
943 async fn it_analyzes_a_document() {
944 let dir = TempDir::new().expect("failed to create temporary directory");
945 let path = dir.path().join("foo.wdl");
946 fs::write(
947 &path,
948 r#"version 1.1
949
950task test {
951 command <<<>>>
952}
953
954workflow test {
955}
956"#,
957 )
958 .expect("failed to create test file");
959
960 let analyzer = Analyzer::default();
962 analyzer
963 .add_document(path_to_uri(&path).expect("should convert to URI"))
964 .await
965 .expect("should add document");
966
967 let results = analyzer.analyze(()).await.unwrap();
968 assert_eq!(results.len(), 1);
969 assert_eq!(results[0].document.diagnostics().count(), 1);
970 assert_eq!(
971 results[0].document.diagnostics().next().unwrap().rule(),
972 None
973 );
974 assert_eq!(
975 results[0].document.diagnostics().next().unwrap().severity(),
976 Severity::Error
977 );
978 assert_eq!(
979 results[0].document.diagnostics().next().unwrap().message(),
980 "conflicting workflow name `test`"
981 );
982
983 let id = results[0].document.id().clone();
985 let results = analyzer.analyze(()).await.unwrap();
986 assert_eq!(results.len(), 1);
987 assert_eq!(results[0].document.id().as_ref(), id.as_ref());
988 assert_eq!(results[0].document.diagnostics().count(), 1);
989 assert_eq!(
990 results[0].document.diagnostics().next().unwrap().rule(),
991 None
992 );
993 assert_eq!(
994 results[0].document.diagnostics().next().unwrap().severity(),
995 Severity::Error
996 );
997 assert_eq!(
998 results[0].document.diagnostics().next().unwrap().message(),
999 "conflicting workflow name `test`"
1000 );
1001 }
1002
1003 #[tokio::test]
1004 async fn it_reanalyzes_a_document_on_change() {
1005 let dir = TempDir::new().expect("failed to create temporary directory");
1006 let path = dir.path().join("foo.wdl");
1007 fs::write(
1008 &path,
1009 r#"version 1.1
1010
1011task test {
1012 command <<<>>>
1013}
1014
1015workflow test {
1016}
1017"#,
1018 )
1019 .expect("failed to create test file");
1020
1021 let analyzer = Analyzer::default();
1023 analyzer
1024 .add_document(path_to_uri(&path).expect("should convert to URI"))
1025 .await
1026 .expect("should add document");
1027
1028 let results = analyzer.analyze(()).await.unwrap();
1029 assert_eq!(results.len(), 1);
1030 assert_eq!(results[0].document.diagnostics().count(), 1);
1031 assert_eq!(
1032 results[0].document.diagnostics().next().unwrap().rule(),
1033 None
1034 );
1035 assert_eq!(
1036 results[0].document.diagnostics().next().unwrap().severity(),
1037 Severity::Error
1038 );
1039 assert_eq!(
1040 results[0].document.diagnostics().next().unwrap().message(),
1041 "conflicting workflow name `test`"
1042 );
1043
1044 fs::write(
1046 &path,
1047 r#"version 1.1
1048
1049task test {
1050 command <<<>>>
1051}
1052
1053workflow something_else {
1054}
1055"#,
1056 )
1057 .expect("failed to create test file");
1058
1059 let uri = path_to_uri(&path).expect("should convert to URI");
1060 analyzer.notify_change(uri.clone(), false).unwrap();
1061
1062 let id = results[0].document.id().clone();
1065 let results = analyzer.analyze(()).await.unwrap();
1066 assert_eq!(results.len(), 1);
1067 assert!(results[0].document.id().as_ref() != id.as_ref());
1068 assert_eq!(results[0].document.diagnostics().count(), 0);
1069
1070 let id = results[0].document.id().clone();
1072 let results = analyzer.analyze_document((), uri).await.unwrap();
1073 assert_eq!(results.len(), 1);
1074 assert!(results[0].document.id().as_ref() == id.as_ref());
1075 assert_eq!(results[0].document.diagnostics().count(), 0);
1076 }
1077
1078 #[tokio::test]
1079 async fn it_reanalyzes_a_document_on_incremental_change() {
1080 let dir = TempDir::new().expect("failed to create temporary directory");
1081 let path = dir.path().join("foo.wdl");
1082 fs::write(
1083 &path,
1084 r#"version 1.1
1085
1086task test {
1087 command <<<>>>
1088}
1089
1090workflow test {
1091}
1092"#,
1093 )
1094 .expect("failed to create test file");
1095
1096 let analyzer = Analyzer::default();
1098 analyzer
1099 .add_document(path_to_uri(&path).expect("should convert to URI"))
1100 .await
1101 .expect("should add document");
1102
1103 let results = analyzer.analyze(()).await.unwrap();
1104 assert_eq!(results.len(), 1);
1105 assert_eq!(results[0].document.diagnostics().count(), 1);
1106 assert_eq!(
1107 results[0].document.diagnostics().next().unwrap().rule(),
1108 None
1109 );
1110 assert_eq!(
1111 results[0].document.diagnostics().next().unwrap().severity(),
1112 Severity::Error
1113 );
1114 assert_eq!(
1115 results[0].document.diagnostics().next().unwrap().message(),
1116 "conflicting workflow name `test`"
1117 );
1118
1119 let uri = path_to_uri(&path).expect("should convert to URI");
1121 analyzer
1122 .notify_incremental_change(
1123 uri.clone(),
1124 IncrementalChange {
1125 version: 2,
1126 start: None,
1127 edits: vec![SourceEdit {
1128 range: SourcePosition::new(6, 9)..SourcePosition::new(6, 13),
1129 encoding: SourcePositionEncoding::UTF8,
1130 text: "something_else".to_string(),
1131 }],
1132 },
1133 )
1134 .unwrap();
1135
1136 let id = results[0].document.id().clone();
1139 let results = analyzer.analyze_document((), uri).await.unwrap();
1140 assert_eq!(results.len(), 1);
1141 assert!(results[0].document.id().as_ref() != id.as_ref());
1142 assert_eq!(results[0].document.diagnostics().count(), 0);
1143 }
1144
1145 #[tokio::test]
1146 async fn it_removes_documents() {
1147 let dir = TempDir::new().expect("failed to create temporary directory");
1148 let foo = dir.path().join("foo.wdl");
1149 fs::write(
1150 &foo,
1151 r#"version 1.1
1152workflow test {
1153}
1154"#,
1155 )
1156 .expect("failed to create test file");
1157
1158 let bar = dir.path().join("bar.wdl");
1159 fs::write(
1160 &bar,
1161 r#"version 1.1
1162workflow test {
1163}
1164"#,
1165 )
1166 .expect("failed to create test file");
1167
1168 let baz = dir.path().join("baz.wdl");
1169 fs::write(
1170 &baz,
1171 r#"version 1.1
1172workflow test {
1173}
1174"#,
1175 )
1176 .expect("failed to create test file");
1177
1178 let analyzer = Analyzer::default();
1180 analyzer
1181 .add_directory(dir.path())
1182 .await
1183 .expect("should add documents");
1184
1185 let results = analyzer.analyze(()).await.unwrap();
1187 assert_eq!(results.len(), 3);
1188 assert!(results[0].document.diagnostics().next().is_none());
1189 assert!(results[1].document.diagnostics().next().is_none());
1190 assert!(results[2].document.diagnostics().next().is_none());
1191
1192 let results = analyzer.analyze(()).await.unwrap();
1194 assert_eq!(results.len(), 3);
1195
1196 analyzer
1198 .remove_documents(vec![
1199 path_to_uri(dir.path()).expect("should convert to URI"),
1200 ])
1201 .await
1202 .unwrap();
1203 let results = analyzer.analyze(()).await.unwrap();
1204 assert!(results.is_empty());
1205 }
1206}