1use std::ffi::OsStr;
4use std::fmt;
5use std::future::Future;
6use std::mem::ManuallyDrop;
7use std::ops::Range;
8use std::path::Path;
9use std::path::PathBuf;
10use std::path::absolute;
11use std::sync::Arc;
12use std::thread::JoinHandle;
13
14use anyhow::Context;
15use anyhow::Error;
16use anyhow::Result;
17use anyhow::anyhow;
18use anyhow::bail;
19use indexmap::IndexSet;
20use line_index::LineCol;
21use line_index::LineIndex;
22use line_index::WideEncoding;
23use line_index::WideLineCol;
24use path_clean::clean;
25use tokio::runtime::Handle;
26use tokio::sync::mpsc;
27use tokio::sync::oneshot;
28use url::Url;
29use walkdir::WalkDir;
30use wdl_ast::Severity;
31use wdl_ast::SyntaxNode;
32use wdl_ast::SyntaxNodeExt;
33use wdl_ast::Validator;
34
35use crate::Rule;
36use crate::UNNECESSARY_FUNCTION_CALL;
37use crate::UNUSED_CALL_RULE_ID;
38use crate::UNUSED_DECL_RULE_ID;
39use crate::UNUSED_IMPORT_RULE_ID;
40use crate::UNUSED_INPUT_RULE_ID;
41use crate::document::Document;
42use crate::graph::DocumentGraphNode;
43use crate::graph::ParseState;
44use crate::queue::AddRequest;
45use crate::queue::AnalysisQueue;
46use crate::queue::AnalyzeRequest;
47use crate::queue::FormatRequest;
48use crate::queue::NotifyChangeRequest;
49use crate::queue::NotifyIncrementalChangeRequest;
50use crate::queue::RemoveRequest;
51use crate::queue::Request;
52use crate::rayon::RayonHandle;
53
54#[derive(Debug, Clone, Copy, PartialEq, Eq)]
56pub enum ProgressKind {
57 Parsing,
59 Analyzing,
61}
62
63impl fmt::Display for ProgressKind {
64 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
65 match self {
66 Self::Parsing => write!(f, "parsing"),
67 Self::Analyzing => write!(f, "analyzing"),
68 }
69 }
70}
71
72pub fn path_to_uri(path: impl AsRef<Path>) -> Option<Url> {
74 Url::from_file_path(clean(absolute(path).ok()?)).ok()
75}
76
77#[derive(Debug, Clone)]
81pub struct AnalysisResult {
82 error: Option<Arc<Error>>,
85 version: Option<i32>,
91 lines: Option<Arc<LineIndex>>,
93 document: Document,
95}
96
97impl AnalysisResult {
98 pub(crate) fn new(node: &DocumentGraphNode) -> Self {
100 let (error, version, lines) = match node.parse_state() {
101 ParseState::NotParsed => unreachable!("document should have been parsed"),
102 ParseState::Error(e) => (Some(e), None, None),
103 ParseState::Parsed { version, lines, .. } => (None, *version, Some(lines)),
104 };
105
106 Self {
107 error: error.cloned(),
108 version,
109 lines: lines.cloned(),
110 document: node
111 .document()
112 .expect("analysis should have completed")
113 .clone(),
114 }
115 }
116
117 pub fn error(&self) -> Option<&Arc<Error>> {
123 self.error.as_ref()
124 }
125
126 pub fn version(&self) -> Option<i32> {
131 self.version
132 }
133
134 pub fn lines(&self) -> Option<&Arc<LineIndex>> {
138 self.lines.as_ref()
139 }
140
141 pub fn document(&self) -> &Document {
143 &self.document
144 }
145}
146
147#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Copy, Clone, Default)]
149pub struct SourcePosition {
150 pub line: u32,
153 pub character: u32,
156}
157
158impl SourcePosition {
159 pub fn new(line: u32, character: u32) -> Self {
161 Self { line, character }
162 }
163}
164
165#[derive(Debug, Eq, PartialEq, Copy, Clone)]
167pub enum SourcePositionEncoding {
168 UTF8,
172 UTF16,
176}
177
178#[derive(Debug, Clone)]
180pub struct SourceEdit {
181 range: Range<SourcePosition>,
185 encoding: SourcePositionEncoding,
187 text: String,
189}
190
191impl SourceEdit {
192 pub fn new(
194 range: Range<SourcePosition>,
195 encoding: SourcePositionEncoding,
196 text: impl Into<String>,
197 ) -> Self {
198 Self {
199 range,
200 encoding,
201 text: text.into(),
202 }
203 }
204
205 pub(crate) fn range(&self) -> Range<SourcePosition> {
207 self.range.start..self.range.end
208 }
209
210 pub(crate) fn apply(&self, source: &mut String, lines: &LineIndex) -> Result<()> {
212 let (start, end) = match self.encoding {
213 SourcePositionEncoding::UTF8 => (
214 LineCol {
215 line: self.range.start.line,
216 col: self.range.start.character,
217 },
218 LineCol {
219 line: self.range.end.line,
220 col: self.range.end.character,
221 },
222 ),
223 SourcePositionEncoding::UTF16 => (
224 lines
225 .to_utf8(
226 WideEncoding::Utf16,
227 WideLineCol {
228 line: self.range.start.line,
229 col: self.range.start.character,
230 },
231 )
232 .context("invalid edit start position")?,
233 lines
234 .to_utf8(
235 WideEncoding::Utf16,
236 WideLineCol {
237 line: self.range.end.line,
238 col: self.range.end.character,
239 },
240 )
241 .context("invalid edit end position")?,
242 ),
243 };
244
245 let range: Range<usize> = lines
246 .offset(start)
247 .context("invalid edit start position")?
248 .into()
249 ..lines
250 .offset(end)
251 .context("invalid edit end position")?
252 .into();
253
254 if !source.is_char_boundary(range.start) {
255 bail!("edit start position is not at a character boundary");
256 }
257
258 if !source.is_char_boundary(range.end) {
259 bail!("edit end position is not at a character boundary");
260 }
261
262 source.replace_range(range, &self.text);
263 Ok(())
264 }
265}
266
267#[derive(Clone, Debug)]
269pub struct IncrementalChange {
270 pub version: i32,
274 pub start: Option<String>,
282 pub edits: Vec<SourceEdit>,
284}
285
286#[derive(Debug, Clone, Copy)]
293pub struct DiagnosticsConfig {
294 pub unused_import: Option<Severity>,
298 pub unused_input: Option<Severity>,
302 pub unused_declaration: Option<Severity>,
306 pub unused_call: Option<Severity>,
310 pub unnecessary_function_call: Option<Severity>,
314}
315
316impl DiagnosticsConfig {
317 pub fn new<T: AsRef<dyn Rule>>(rules: impl IntoIterator<Item = T>) -> Self {
319 let mut unused_import = None;
320 let mut unused_input = None;
321 let mut unused_declaration = None;
322 let mut unused_call = None;
323 let mut unnecessary_function_call = None;
324
325 for rule in rules {
326 let rule = rule.as_ref();
327 match rule.id() {
328 UNUSED_IMPORT_RULE_ID => unused_import = Some(rule.severity()),
329 UNUSED_INPUT_RULE_ID => unused_input = Some(rule.severity()),
330 UNUSED_DECL_RULE_ID => unused_declaration = Some(rule.severity()),
331 UNUSED_CALL_RULE_ID => unused_call = Some(rule.severity()),
332 UNNECESSARY_FUNCTION_CALL => unnecessary_function_call = Some(rule.severity()),
333 _ => {}
334 }
335 }
336
337 Self {
338 unused_import,
339 unused_input,
340 unused_declaration,
341 unused_call,
342 unnecessary_function_call,
343 }
344 }
345
346 pub fn excepted_for_node(mut self, node: &SyntaxNode) -> Self {
349 let exceptions = node.rule_exceptions();
350
351 if exceptions.contains(UNUSED_IMPORT_RULE_ID) {
352 self.unused_import = None;
353 }
354
355 if exceptions.contains(UNUSED_INPUT_RULE_ID) {
356 self.unused_input = None;
357 }
358
359 if exceptions.contains(UNUSED_DECL_RULE_ID) {
360 self.unused_declaration = None;
361 }
362
363 if exceptions.contains(UNUSED_CALL_RULE_ID) {
364 self.unused_call = None;
365 }
366
367 if exceptions.contains(UNNECESSARY_FUNCTION_CALL) {
368 self.unnecessary_function_call = None;
369 }
370
371 self
372 }
373
374 pub fn except_all() -> Self {
376 Self {
377 unused_import: None,
378 unused_input: None,
379 unused_declaration: None,
380 unused_call: None,
381 unnecessary_function_call: None,
382 }
383 }
384}
385
386#[derive(Debug)]
400pub struct Analyzer<Context> {
401 sender: ManuallyDrop<mpsc::UnboundedSender<Request<Context>>>,
403 handle: Option<JoinHandle<()>>,
405}
406
407impl<Context> Analyzer<Context>
408where
409 Context: Send + Clone + 'static,
410{
411 pub fn new<Progress, Return>(config: DiagnosticsConfig, progress: Progress) -> Self
419 where
420 Progress: Fn(Context, ProgressKind, usize, usize) -> Return + Send + 'static,
421 Return: Future<Output = ()>,
422 {
423 Self::new_with_validator(config, progress, Validator::default)
424 }
425
426 pub fn new_with_validator<Progress, Return, Validator>(
436 config: DiagnosticsConfig,
437 progress: Progress,
438 validator: Validator,
439 ) -> Self
440 where
441 Progress: Fn(Context, ProgressKind, usize, usize) -> Return + Send + 'static,
442 Return: Future<Output = ()>,
443 Validator: Fn() -> wdl_ast::Validator + Send + Sync + 'static,
444 {
445 let (tx, rx) = mpsc::unbounded_channel();
446 let tokio = Handle::current();
447 let handle = std::thread::spawn(move || {
448 let queue = AnalysisQueue::new(config, tokio, progress, validator);
449 queue.run(rx);
450 });
451
452 Self {
453 sender: ManuallyDrop::new(tx),
454 handle: Some(handle),
455 }
456 }
457
458 pub async fn add_document(&self, uri: Url) -> Result<()> {
462 let mut documents = IndexSet::new();
463 documents.insert(uri);
464
465 let (tx, rx) = oneshot::channel();
466 self.sender
467 .send(Request::Add(AddRequest {
468 documents,
469 completed: tx,
470 }))
471 .map_err(|_| {
472 anyhow!("failed to send request to analysis queue because the channel has closed")
473 })?;
474
475 rx.await.map_err(|_| {
476 anyhow!("failed to receive response from analysis queue because the channel has closed")
477 })?;
478
479 Ok(())
480 }
481
482 pub async fn add_directory(&self, path: PathBuf) -> Result<()> {
488 let documents = RayonHandle::spawn(move || -> Result<IndexSet<Url>> {
490 let mut documents = IndexSet::new();
491
492 let metadata = path.metadata().with_context(|| {
493 format!(
494 "failed to read metadata for `{path}`",
495 path = path.display()
496 )
497 })?;
498
499 if metadata.is_file() {
500 bail!("`{path}` is a file, not a directory", path = path.display());
501 }
502
503 for result in WalkDir::new(&path).follow_links(true) {
504 let entry = result.with_context(|| {
505 format!("failed to read directory `{path}`", path = path.display())
506 })?;
507 if !entry.file_type().is_file()
508 || entry.path().extension().and_then(OsStr::to_str) != Some("wdl")
509 {
510 continue;
511 }
512
513 documents.insert(path_to_uri(entry.path()).with_context(|| {
514 format!(
515 "failed to convert path `{path}` to a URI",
516 path = entry.path().display()
517 )
518 })?);
519 }
520
521 Ok(documents)
522 })
523 .await?;
524
525 if documents.is_empty() {
526 return Ok(());
527 }
528
529 let (tx, rx) = oneshot::channel();
531 self.sender
532 .send(Request::Add(AddRequest {
533 documents,
534 completed: tx,
535 }))
536 .map_err(|_| {
537 anyhow!("failed to send request to analysis queue because the channel has closed")
538 })?;
539
540 rx.await.map_err(|_| {
541 anyhow!("failed to receive response from analysis queue because the channel has closed")
542 })?;
543
544 Ok(())
545 }
546
547 pub async fn remove_documents(&self, documents: Vec<Url>) -> Result<()> {
554 let (tx, rx) = oneshot::channel();
556 self.sender
557 .send(Request::Remove(RemoveRequest {
558 documents,
559 completed: tx,
560 }))
561 .map_err(|_| {
562 anyhow!("failed to send request to analysis queue because the channel has closed")
563 })?;
564
565 rx.await.map_err(|_| {
566 anyhow!("failed to receive response from analysis queue because the channel has closed")
567 })?;
568
569 Ok(())
570 }
571
572 pub fn notify_incremental_change(
576 &self,
577 document: Url,
578 change: IncrementalChange,
579 ) -> Result<()> {
580 self.sender
581 .send(Request::NotifyIncrementalChange(
582 NotifyIncrementalChangeRequest { document, change },
583 ))
584 .map_err(|_| {
585 anyhow!("failed to send request to analysis queue because the channel has closed")
586 })
587 }
588
589 pub fn notify_change(&self, document: Url, discard_pending: bool) -> Result<()> {
598 self.sender
599 .send(Request::NotifyChange(NotifyChangeRequest {
600 document,
601 discard_pending,
602 }))
603 .map_err(|_| {
604 anyhow!("failed to send request to analysis queue because the channel has closed")
605 })
606 }
607
608 pub async fn analyze_document(
617 &self,
618 context: Context,
619 document: Url,
620 ) -> Result<Vec<AnalysisResult>> {
621 let (tx, rx) = oneshot::channel();
623 self.sender
624 .send(Request::Analyze(AnalyzeRequest {
625 document: Some(document),
626 context,
627 completed: tx,
628 }))
629 .map_err(|_| {
630 anyhow!("failed to send request to analysis queue because the channel has closed")
631 })?;
632
633 rx.await.map_err(|_| {
634 anyhow!("failed to receive response from analysis queue because the channel has closed")
635 })?
636 }
637
638 pub async fn analyze(&self, context: Context) -> Result<Vec<AnalysisResult>> {
647 let (tx, rx) = oneshot::channel();
649 self.sender
650 .send(Request::Analyze(AnalyzeRequest {
651 document: None,
652 context,
653 completed: tx,
654 }))
655 .map_err(|_| {
656 anyhow!("failed to send request to analysis queue because the channel has closed")
657 })?;
658
659 rx.await.map_err(|_| {
660 anyhow!("failed to receive response from analysis queue because the channel has closed")
661 })?
662 }
663
664 pub async fn format_document(&self, document: Url) -> Result<Option<(u32, u32, String)>> {
666 let (tx, rx) = oneshot::channel();
667 self.sender
668 .send(Request::Format(FormatRequest {
669 document,
670 completed: tx,
671 }))
672 .map_err(|_| {
673 anyhow!("failed to send format request to the queue because the channel has closed")
674 })?;
675
676 rx.await.map_err(|_| {
677 anyhow!("failed to send format request to the queue because the channel has closed")
678 })
679 }
680}
681
682impl<C> Drop for Analyzer<C> {
683 fn drop(&mut self) {
684 unsafe { ManuallyDrop::drop(&mut self.sender) };
685 if let Some(handle) = self.handle.take() {
686 handle.join().unwrap();
687 }
688 }
689}
690
691const _: () = {
694 const fn _assert<T: Send + Sync>() {}
696 _assert::<Analyzer<()>>();
697};
698
699#[cfg(test)]
700mod test {
701 use std::fs;
702
703 use tempfile::TempDir;
704 use wdl_ast::Severity;
705
706 use super::*;
707 use crate::rules;
708
709 #[tokio::test]
710 async fn it_returns_empty_results() {
711 let analyzer = Analyzer::new(DiagnosticsConfig::new(rules()), |_: (), _, _, _| async {});
712 let results = analyzer.analyze(()).await.unwrap();
713 assert!(results.is_empty());
714 }
715
716 #[tokio::test]
717 async fn it_analyzes_a_document() {
718 let dir = TempDir::new().expect("failed to create temporary directory");
719 let path = dir.path().join("foo.wdl");
720 fs::write(
721 &path,
722 r#"version 1.1
723
724task test {
725 command <<<>>>
726}
727
728workflow test {
729}
730"#,
731 )
732 .expect("failed to create test file");
733
734 let analyzer = Analyzer::new(DiagnosticsConfig::new(rules()), |_: (), _, _, _| async {});
736 analyzer
737 .add_document(path_to_uri(&path).expect("should convert to URI"))
738 .await
739 .expect("should add document");
740
741 let results = analyzer.analyze(()).await.unwrap();
742 assert_eq!(results.len(), 1);
743 assert_eq!(results[0].document.diagnostics().len(), 1);
744 assert_eq!(results[0].document.diagnostics()[0].rule(), None);
745 assert_eq!(
746 results[0].document.diagnostics()[0].severity(),
747 Severity::Error
748 );
749 assert_eq!(
750 results[0].document.diagnostics()[0].message(),
751 "conflicting workflow name `test`"
752 );
753
754 let id = results[0].document.id().clone();
756 let results = analyzer.analyze(()).await.unwrap();
757 assert_eq!(results.len(), 1);
758 assert_eq!(results[0].document.id().as_ref(), id.as_ref());
759 assert_eq!(results[0].document.diagnostics().len(), 1);
760 assert_eq!(results[0].document.diagnostics()[0].rule(), None);
761 assert_eq!(
762 results[0].document.diagnostics()[0].severity(),
763 Severity::Error
764 );
765 assert_eq!(
766 results[0].document.diagnostics()[0].message(),
767 "conflicting workflow name `test`"
768 );
769 }
770
771 #[tokio::test]
772 async fn it_reanalyzes_a_document_on_change() {
773 let dir = TempDir::new().expect("failed to create temporary directory");
774 let path = dir.path().join("foo.wdl");
775 fs::write(
776 &path,
777 r#"version 1.1
778
779task test {
780 command <<<>>>
781}
782
783workflow test {
784}
785"#,
786 )
787 .expect("failed to create test file");
788
789 let analyzer = Analyzer::new(DiagnosticsConfig::new(rules()), |_: (), _, _, _| async {});
791 analyzer
792 .add_document(path_to_uri(&path).expect("should convert to URI"))
793 .await
794 .expect("should add document");
795
796 let results = analyzer.analyze(()).await.unwrap();
797 assert_eq!(results.len(), 1);
798 assert_eq!(results[0].document.diagnostics().len(), 1);
799 assert_eq!(results[0].document.diagnostics()[0].rule(), None);
800 assert_eq!(
801 results[0].document.diagnostics()[0].severity(),
802 Severity::Error
803 );
804 assert_eq!(
805 results[0].document.diagnostics()[0].message(),
806 "conflicting workflow name `test`"
807 );
808
809 fs::write(
811 &path,
812 r#"version 1.1
813
814task test {
815 command <<<>>>
816}
817
818workflow something_else {
819}
820"#,
821 )
822 .expect("failed to create test file");
823
824 let uri = path_to_uri(&path).expect("should convert to URI");
825 analyzer.notify_change(uri.clone(), false).unwrap();
826
827 let id = results[0].document.id().clone();
830 let results = analyzer.analyze(()).await.unwrap();
831 assert_eq!(results.len(), 1);
832 assert!(results[0].document.id().as_ref() != id.as_ref());
833 assert_eq!(results[0].document.diagnostics().len(), 0);
834
835 let id = results[0].document.id().clone();
837 let results = analyzer.analyze_document((), uri).await.unwrap();
838 assert_eq!(results.len(), 1);
839 assert!(results[0].document.id().as_ref() == id.as_ref());
840 assert_eq!(results[0].document.diagnostics().len(), 0);
841 }
842
843 #[tokio::test]
844 async fn it_reanalyzes_a_document_on_incremental_change() {
845 let dir = TempDir::new().expect("failed to create temporary directory");
846 let path = dir.path().join("foo.wdl");
847 fs::write(
848 &path,
849 r#"version 1.1
850
851task test {
852 command <<<>>>
853}
854
855workflow test {
856}
857"#,
858 )
859 .expect("failed to create test file");
860
861 let analyzer = Analyzer::new(DiagnosticsConfig::new(rules()), |_: (), _, _, _| async {});
863 analyzer
864 .add_document(path_to_uri(&path).expect("should convert to URI"))
865 .await
866 .expect("should add document");
867
868 let results = analyzer.analyze(()).await.unwrap();
869 assert_eq!(results.len(), 1);
870 assert_eq!(results[0].document.diagnostics().len(), 1);
871 assert_eq!(results[0].document.diagnostics()[0].rule(), None);
872 assert_eq!(
873 results[0].document.diagnostics()[0].severity(),
874 Severity::Error
875 );
876 assert_eq!(
877 results[0].document.diagnostics()[0].message(),
878 "conflicting workflow name `test`"
879 );
880
881 let uri = path_to_uri(&path).expect("should convert to URI");
883 analyzer
884 .notify_incremental_change(
885 uri.clone(),
886 IncrementalChange {
887 version: 2,
888 start: None,
889 edits: vec![SourceEdit {
890 range: SourcePosition::new(6, 9)..SourcePosition::new(6, 13),
891 encoding: SourcePositionEncoding::UTF8,
892 text: "something_else".to_string(),
893 }],
894 },
895 )
896 .unwrap();
897
898 let id = results[0].document.id().clone();
901 let results = analyzer.analyze_document((), uri).await.unwrap();
902 assert_eq!(results.len(), 1);
903 assert!(results[0].document.id().as_ref() != id.as_ref());
904 assert_eq!(results[0].document.diagnostics().len(), 0);
905 }
906
907 #[tokio::test]
908 async fn it_removes_documents() {
909 let dir = TempDir::new().expect("failed to create temporary directory");
910 let foo = dir.path().join("foo.wdl");
911 fs::write(
912 &foo,
913 r#"version 1.1
914workflow test {
915}
916"#,
917 )
918 .expect("failed to create test file");
919
920 let bar = dir.path().join("bar.wdl");
921 fs::write(
922 &bar,
923 r#"version 1.1
924workflow test {
925}
926"#,
927 )
928 .expect("failed to create test file");
929
930 let baz = dir.path().join("baz.wdl");
931 fs::write(
932 &baz,
933 r#"version 1.1
934workflow test {
935}
936"#,
937 )
938 .expect("failed to create test file");
939
940 let analyzer = Analyzer::new(DiagnosticsConfig::new(rules()), |_: (), _, _, _| async {});
942 analyzer
943 .add_directory(dir.path().to_path_buf())
944 .await
945 .expect("should add documents");
946
947 let results = analyzer.analyze(()).await.unwrap();
949 assert_eq!(results.len(), 3);
950 assert!(results[0].document.diagnostics().is_empty());
951 assert!(results[1].document.diagnostics().is_empty());
952 assert!(results[2].document.diagnostics().is_empty());
953
954 let results = analyzer.analyze(()).await.unwrap();
956 assert_eq!(results.len(), 3);
957
958 analyzer
960 .remove_documents(vec![
961 path_to_uri(dir.path()).expect("should convert to URI"),
962 ])
963 .await
964 .unwrap();
965 let results = analyzer.analyze(()).await.unwrap();
966 assert!(results.is_empty());
967 }
968}