1use std::ffi::OsStr;
4use std::fmt;
5use std::future::Future;
6use std::mem::ManuallyDrop;
7use std::ops::Range;
8use std::path::Path;
9use std::path::PathBuf;
10use std::path::absolute;
11use std::sync::Arc;
12use std::thread::JoinHandle;
13
14use anyhow::Context;
15use anyhow::Error;
16use anyhow::Result;
17use anyhow::anyhow;
18use anyhow::bail;
19use indexmap::IndexSet;
20use line_index::LineCol;
21use line_index::LineIndex;
22use line_index::WideEncoding;
23use line_index::WideLineCol;
24use path_clean::clean;
25use tokio::runtime::Handle;
26use tokio::sync::mpsc;
27use tokio::sync::oneshot;
28use url::Url;
29use walkdir::WalkDir;
30use wdl_ast::Severity;
31use wdl_ast::SyntaxNode;
32
33use crate::Rule;
34use crate::SyntaxNodeExt;
35use crate::UNNECESSARY_FUNCTION_CALL;
36use crate::UNUSED_CALL_RULE_ID;
37use crate::UNUSED_DECL_RULE_ID;
38use crate::UNUSED_IMPORT_RULE_ID;
39use crate::UNUSED_INPUT_RULE_ID;
40use crate::document::Document;
41use crate::graph::DocumentGraphNode;
42use crate::graph::ParseState;
43use crate::queue::AddRequest;
44use crate::queue::AnalysisQueue;
45use crate::queue::AnalyzeRequest;
46use crate::queue::FormatRequest;
47use crate::queue::NotifyChangeRequest;
48use crate::queue::NotifyIncrementalChangeRequest;
49use crate::queue::RemoveRequest;
50use crate::queue::Request;
51use crate::rayon::RayonHandle;
52use crate::rules;
53
54#[derive(Debug, Clone, Copy, PartialEq, Eq)]
56pub enum ProgressKind {
57 Parsing,
59 Analyzing,
61}
62
63impl fmt::Display for ProgressKind {
64 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
65 match self {
66 Self::Parsing => write!(f, "parsing"),
67 Self::Analyzing => write!(f, "analyzing"),
68 }
69 }
70}
71
72pub fn path_to_uri(path: impl AsRef<Path>) -> Option<Url> {
74 Url::from_file_path(clean(absolute(path).ok()?)).ok()
75}
76
77#[derive(Debug, Clone)]
81pub struct AnalysisResult {
82 error: Option<Arc<Error>>,
85 version: Option<i32>,
91 lines: Option<Arc<LineIndex>>,
93 document: Document,
95}
96
97impl AnalysisResult {
98 pub(crate) fn new(node: &DocumentGraphNode) -> Self {
100 if let Some(error) = node.analysis_error() {
101 return Self {
102 error: Some(error.clone()),
103 version: node.parse_state().version(),
104 lines: node.parse_state().lines().cloned(),
105 document: Document::default_from_uri(node.uri().clone()),
106 };
107 }
108
109 let (error, version, lines) = match node.parse_state() {
110 ParseState::NotParsed => unreachable!("document should have been parsed"),
111 ParseState::Error(e) => (Some(e), None, None),
112 ParseState::Parsed { version, lines, .. } => (None, *version, Some(lines)),
113 };
114
115 Self {
116 error: error.cloned(),
117 version,
118 lines: lines.cloned(),
119 document: node
120 .document()
121 .expect("analysis should have completed")
122 .clone(),
123 }
124 }
125
126 pub fn error(&self) -> Option<&Arc<Error>> {
132 self.error.as_ref()
133 }
134
135 pub fn version(&self) -> Option<i32> {
140 self.version
141 }
142
143 pub fn lines(&self) -> Option<&Arc<LineIndex>> {
147 self.lines.as_ref()
148 }
149
150 pub fn document(&self) -> &Document {
152 &self.document
153 }
154}
155
156#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Copy, Clone, Default)]
158pub struct SourcePosition {
159 pub line: u32,
162 pub character: u32,
165}
166
167impl SourcePosition {
168 pub fn new(line: u32, character: u32) -> Self {
170 Self { line, character }
171 }
172}
173
174#[derive(Debug, Eq, PartialEq, Copy, Clone)]
176pub enum SourcePositionEncoding {
177 UTF8,
181 UTF16,
185}
186
187#[derive(Debug, Clone)]
189pub struct SourceEdit {
190 range: Range<SourcePosition>,
194 encoding: SourcePositionEncoding,
196 text: String,
198}
199
200impl SourceEdit {
201 pub fn new(
203 range: Range<SourcePosition>,
204 encoding: SourcePositionEncoding,
205 text: impl Into<String>,
206 ) -> Self {
207 Self {
208 range,
209 encoding,
210 text: text.into(),
211 }
212 }
213
214 pub(crate) fn range(&self) -> Range<SourcePosition> {
216 self.range.start..self.range.end
217 }
218
219 pub(crate) fn apply(&self, source: &mut String, lines: &LineIndex) -> Result<()> {
221 let (start, end) = match self.encoding {
222 SourcePositionEncoding::UTF8 => (
223 LineCol {
224 line: self.range.start.line,
225 col: self.range.start.character,
226 },
227 LineCol {
228 line: self.range.end.line,
229 col: self.range.end.character,
230 },
231 ),
232 SourcePositionEncoding::UTF16 => (
233 lines
234 .to_utf8(
235 WideEncoding::Utf16,
236 WideLineCol {
237 line: self.range.start.line,
238 col: self.range.start.character,
239 },
240 )
241 .context("invalid edit start position")?,
242 lines
243 .to_utf8(
244 WideEncoding::Utf16,
245 WideLineCol {
246 line: self.range.end.line,
247 col: self.range.end.character,
248 },
249 )
250 .context("invalid edit end position")?,
251 ),
252 };
253
254 let range: Range<usize> = lines
255 .offset(start)
256 .context("invalid edit start position")?
257 .into()
258 ..lines
259 .offset(end)
260 .context("invalid edit end position")?
261 .into();
262
263 if !source.is_char_boundary(range.start) {
264 bail!("edit start position is not at a character boundary");
265 }
266
267 if !source.is_char_boundary(range.end) {
268 bail!("edit end position is not at a character boundary");
269 }
270
271 source.replace_range(range, &self.text);
272 Ok(())
273 }
274}
275
276#[derive(Clone, Debug)]
278pub struct IncrementalChange {
279 pub version: i32,
283 pub start: Option<String>,
291 pub edits: Vec<SourceEdit>,
293}
294
295#[derive(Debug, Clone, Copy)]
302pub struct DiagnosticsConfig {
303 pub unused_import: Option<Severity>,
307 pub unused_input: Option<Severity>,
311 pub unused_declaration: Option<Severity>,
315 pub unused_call: Option<Severity>,
319 pub unnecessary_function_call: Option<Severity>,
323}
324
325impl Default for DiagnosticsConfig {
326 fn default() -> Self {
327 let mut unused_import = None;
328 let mut unused_input = None;
329 let mut unused_declaration = None;
330 let mut unused_call = None;
331 let mut unnecessary_function_call = None;
332
333 for rule in rules() {
334 let rule = rule.as_ref();
335 match rule.id() {
336 UNUSED_IMPORT_RULE_ID => unused_import = Some(rule.severity()),
337 UNUSED_INPUT_RULE_ID => unused_input = Some(rule.severity()),
338 UNUSED_DECL_RULE_ID => unused_declaration = Some(rule.severity()),
339 UNUSED_CALL_RULE_ID => unused_call = Some(rule.severity()),
340 UNNECESSARY_FUNCTION_CALL => unnecessary_function_call = Some(rule.severity()),
341 _ => {
342 unreachable!("unknown rule ID: {}", rule.id());
343 }
344 }
345 }
346
347 Self {
348 unused_import,
349 unused_input,
350 unused_declaration,
351 unused_call,
352 unnecessary_function_call,
353 }
354 }
355}
356
357impl DiagnosticsConfig {
358 pub fn new<T: AsRef<dyn Rule>>(rules: impl IntoIterator<Item = T>) -> Self {
360 let mut unused_import = None;
361 let mut unused_input = None;
362 let mut unused_declaration = None;
363 let mut unused_call = None;
364 let mut unnecessary_function_call = None;
365
366 for rule in rules {
367 let rule = rule.as_ref();
368 match rule.id() {
369 UNUSED_IMPORT_RULE_ID => unused_import = Some(rule.severity()),
370 UNUSED_INPUT_RULE_ID => unused_input = Some(rule.severity()),
371 UNUSED_DECL_RULE_ID => unused_declaration = Some(rule.severity()),
372 UNUSED_CALL_RULE_ID => unused_call = Some(rule.severity()),
373 UNNECESSARY_FUNCTION_CALL => unnecessary_function_call = Some(rule.severity()),
374 _ => {}
375 }
376 }
377
378 Self {
379 unused_import,
380 unused_input,
381 unused_declaration,
382 unused_call,
383 unnecessary_function_call,
384 }
385 }
386
387 pub fn excepted_for_node(mut self, node: &SyntaxNode) -> Self {
390 let exceptions = node.rule_exceptions();
391
392 if exceptions.contains(UNUSED_IMPORT_RULE_ID) {
393 self.unused_import = None;
394 }
395
396 if exceptions.contains(UNUSED_INPUT_RULE_ID) {
397 self.unused_input = None;
398 }
399
400 if exceptions.contains(UNUSED_DECL_RULE_ID) {
401 self.unused_declaration = None;
402 }
403
404 if exceptions.contains(UNUSED_CALL_RULE_ID) {
405 self.unused_call = None;
406 }
407
408 if exceptions.contains(UNNECESSARY_FUNCTION_CALL) {
409 self.unnecessary_function_call = None;
410 }
411
412 self
413 }
414
415 pub fn except_all() -> Self {
417 Self {
418 unused_import: None,
419 unused_input: None,
420 unused_declaration: None,
421 unused_call: None,
422 unnecessary_function_call: None,
423 }
424 }
425}
426
427#[derive(Debug)]
441pub struct Analyzer<Context> {
442 sender: ManuallyDrop<mpsc::UnboundedSender<Request<Context>>>,
444 handle: Option<JoinHandle<()>>,
446}
447
448impl<Context> Analyzer<Context>
449where
450 Context: Send + Clone + 'static,
451{
452 pub fn new<Progress, Return>(config: DiagnosticsConfig, progress: Progress) -> Self
460 where
461 Progress: Fn(Context, ProgressKind, usize, usize) -> Return + Send + 'static,
462 Return: Future<Output = ()>,
463 {
464 Self::new_with_validator(config, progress, crate::Validator::default)
465 }
466
467 pub fn new_with_validator<Progress, Return, Validator>(
477 config: DiagnosticsConfig,
478 progress: Progress,
479 validator: Validator,
480 ) -> Self
481 where
482 Progress: Fn(Context, ProgressKind, usize, usize) -> Return + Send + 'static,
483 Return: Future<Output = ()>,
484 Validator: Fn() -> crate::Validator + Send + Sync + 'static,
485 {
486 let (tx, rx) = mpsc::unbounded_channel();
487 let tokio = Handle::current();
488 let handle = std::thread::spawn(move || {
489 let queue = AnalysisQueue::new(config, tokio, progress, validator);
490 queue.run(rx);
491 });
492
493 Self {
494 sender: ManuallyDrop::new(tx),
495 handle: Some(handle),
496 }
497 }
498
499 pub async fn add_document(&self, uri: Url) -> Result<()> {
503 let mut documents = IndexSet::new();
504 documents.insert(uri);
505
506 let (tx, rx) = oneshot::channel();
507 self.sender
508 .send(Request::Add(AddRequest {
509 documents,
510 completed: tx,
511 }))
512 .map_err(|_| {
513 anyhow!("failed to send request to analysis queue because the channel has closed")
514 })?;
515
516 rx.await.map_err(|_| {
517 anyhow!("failed to receive response from analysis queue because the channel has closed")
518 })?;
519
520 Ok(())
521 }
522
523 pub async fn add_directory(&self, path: PathBuf) -> Result<()> {
529 let documents = RayonHandle::spawn(move || -> Result<IndexSet<Url>> {
531 let mut documents = IndexSet::new();
532
533 let metadata = path.metadata().with_context(|| {
534 format!(
535 "failed to read metadata for `{path}`",
536 path = path.display()
537 )
538 })?;
539
540 if metadata.is_file() {
541 bail!("`{path}` is a file, not a directory", path = path.display());
542 }
543
544 for result in WalkDir::new(&path).follow_links(true) {
545 let entry = result.with_context(|| {
546 format!("failed to read directory `{path}`", path = path.display())
547 })?;
548 if !entry.file_type().is_file()
549 || entry.path().extension().and_then(OsStr::to_str) != Some("wdl")
550 {
551 continue;
552 }
553
554 documents.insert(path_to_uri(entry.path()).with_context(|| {
555 format!(
556 "failed to convert path `{path}` to a URI",
557 path = entry.path().display()
558 )
559 })?);
560 }
561
562 Ok(documents)
563 })
564 .await?;
565
566 if documents.is_empty() {
567 return Ok(());
568 }
569
570 let (tx, rx) = oneshot::channel();
572 self.sender
573 .send(Request::Add(AddRequest {
574 documents,
575 completed: tx,
576 }))
577 .map_err(|_| {
578 anyhow!("failed to send request to analysis queue because the channel has closed")
579 })?;
580
581 rx.await.map_err(|_| {
582 anyhow!("failed to receive response from analysis queue because the channel has closed")
583 })?;
584
585 Ok(())
586 }
587
588 pub async fn remove_documents(&self, documents: Vec<Url>) -> Result<()> {
595 let (tx, rx) = oneshot::channel();
597 self.sender
598 .send(Request::Remove(RemoveRequest {
599 documents,
600 completed: tx,
601 }))
602 .map_err(|_| {
603 anyhow!("failed to send request to analysis queue because the channel has closed")
604 })?;
605
606 rx.await.map_err(|_| {
607 anyhow!("failed to receive response from analysis queue because the channel has closed")
608 })?;
609
610 Ok(())
611 }
612
613 pub fn notify_incremental_change(
617 &self,
618 document: Url,
619 change: IncrementalChange,
620 ) -> Result<()> {
621 self.sender
622 .send(Request::NotifyIncrementalChange(
623 NotifyIncrementalChangeRequest { document, change },
624 ))
625 .map_err(|_| {
626 anyhow!("failed to send request to analysis queue because the channel has closed")
627 })
628 }
629
630 pub fn notify_change(&self, document: Url, discard_pending: bool) -> Result<()> {
639 self.sender
640 .send(Request::NotifyChange(NotifyChangeRequest {
641 document,
642 discard_pending,
643 }))
644 .map_err(|_| {
645 anyhow!("failed to send request to analysis queue because the channel has closed")
646 })
647 }
648
649 pub async fn analyze_document(
658 &self,
659 context: Context,
660 document: Url,
661 ) -> Result<Vec<AnalysisResult>> {
662 let (tx, rx) = oneshot::channel();
664 self.sender
665 .send(Request::Analyze(AnalyzeRequest {
666 document: Some(document),
667 context,
668 completed: tx,
669 }))
670 .map_err(|_| {
671 anyhow!("failed to send request to analysis queue because the channel has closed")
672 })?;
673
674 rx.await.map_err(|_| {
675 anyhow!("failed to receive response from analysis queue because the channel has closed")
676 })?
677 }
678
679 pub async fn analyze(&self, context: Context) -> Result<Vec<AnalysisResult>> {
688 let (tx, rx) = oneshot::channel();
690 self.sender
691 .send(Request::Analyze(AnalyzeRequest {
692 document: None, context,
694 completed: tx,
695 }))
696 .map_err(|_| {
697 anyhow!("failed to send request to analysis queue because the channel has closed")
698 })?;
699
700 rx.await.map_err(|_| {
701 anyhow!("failed to receive response from analysis queue because the channel has closed")
702 })?
703 }
704
705 pub async fn format_document(&self, document: Url) -> Result<Option<(u32, u32, String)>> {
707 let (tx, rx) = oneshot::channel();
708 self.sender
709 .send(Request::Format(FormatRequest {
710 document,
711 completed: tx,
712 }))
713 .map_err(|_| {
714 anyhow!("failed to send format request to the queue because the channel has closed")
715 })?;
716
717 rx.await.map_err(|_| {
718 anyhow!("failed to send format request to the queue because the channel has closed")
719 })
720 }
721}
722
723impl Default for Analyzer<()> {
724 fn default() -> Self {
725 Self::new(DiagnosticsConfig::default(), |_, _, _, _| async {})
726 }
727}
728
729impl<C> Drop for Analyzer<C> {
730 fn drop(&mut self) {
731 unsafe { ManuallyDrop::drop(&mut self.sender) };
732 if let Some(handle) = self.handle.take() {
733 handle.join().unwrap();
734 }
735 }
736}
737
738const _: () = {
741 const fn _assert<T: Send + Sync>() {}
743 _assert::<Analyzer<()>>();
744};
745
746#[cfg(test)]
747mod test {
748 use std::fs;
749
750 use tempfile::TempDir;
751 use wdl_ast::Severity;
752
753 use super::*;
754 use crate::rules;
755
756 #[tokio::test]
757 async fn it_returns_empty_results() {
758 let analyzer = Analyzer::new(DiagnosticsConfig::new(rules()), |_: (), _, _, _| async {});
759 let results = analyzer.analyze(()).await.unwrap();
760 assert!(results.is_empty());
761 }
762
763 #[tokio::test]
764 async fn it_analyzes_a_document() {
765 let dir = TempDir::new().expect("failed to create temporary directory");
766 let path = dir.path().join("foo.wdl");
767 fs::write(
768 &path,
769 r#"version 1.1
770
771task test {
772 command <<<>>>
773}
774
775workflow test {
776}
777"#,
778 )
779 .expect("failed to create test file");
780
781 let analyzer = Analyzer::new(DiagnosticsConfig::new(rules()), |_: (), _, _, _| async {});
783 analyzer
784 .add_document(path_to_uri(&path).expect("should convert to URI"))
785 .await
786 .expect("should add document");
787
788 let results = analyzer.analyze(()).await.unwrap();
789 assert_eq!(results.len(), 1);
790 assert_eq!(results[0].document.diagnostics().len(), 1);
791 assert_eq!(results[0].document.diagnostics()[0].rule(), None);
792 assert_eq!(
793 results[0].document.diagnostics()[0].severity(),
794 Severity::Error
795 );
796 assert_eq!(
797 results[0].document.diagnostics()[0].message(),
798 "conflicting workflow name `test`"
799 );
800
801 let id = results[0].document.id().clone();
803 let results = analyzer.analyze(()).await.unwrap();
804 assert_eq!(results.len(), 1);
805 assert_eq!(results[0].document.id().as_ref(), id.as_ref());
806 assert_eq!(results[0].document.diagnostics().len(), 1);
807 assert_eq!(results[0].document.diagnostics()[0].rule(), None);
808 assert_eq!(
809 results[0].document.diagnostics()[0].severity(),
810 Severity::Error
811 );
812 assert_eq!(
813 results[0].document.diagnostics()[0].message(),
814 "conflicting workflow name `test`"
815 );
816 }
817
818 #[tokio::test]
819 async fn it_reanalyzes_a_document_on_change() {
820 let dir = TempDir::new().expect("failed to create temporary directory");
821 let path = dir.path().join("foo.wdl");
822 fs::write(
823 &path,
824 r#"version 1.1
825
826task test {
827 command <<<>>>
828}
829
830workflow test {
831}
832"#,
833 )
834 .expect("failed to create test file");
835
836 let analyzer = Analyzer::new(DiagnosticsConfig::new(rules()), |_: (), _, _, _| async {});
838 analyzer
839 .add_document(path_to_uri(&path).expect("should convert to URI"))
840 .await
841 .expect("should add document");
842
843 let results = analyzer.analyze(()).await.unwrap();
844 assert_eq!(results.len(), 1);
845 assert_eq!(results[0].document.diagnostics().len(), 1);
846 assert_eq!(results[0].document.diagnostics()[0].rule(), None);
847 assert_eq!(
848 results[0].document.diagnostics()[0].severity(),
849 Severity::Error
850 );
851 assert_eq!(
852 results[0].document.diagnostics()[0].message(),
853 "conflicting workflow name `test`"
854 );
855
856 fs::write(
858 &path,
859 r#"version 1.1
860
861task test {
862 command <<<>>>
863}
864
865workflow something_else {
866}
867"#,
868 )
869 .expect("failed to create test file");
870
871 let uri = path_to_uri(&path).expect("should convert to URI");
872 analyzer.notify_change(uri.clone(), false).unwrap();
873
874 let id = results[0].document.id().clone();
877 let results = analyzer.analyze(()).await.unwrap();
878 assert_eq!(results.len(), 1);
879 assert!(results[0].document.id().as_ref() != id.as_ref());
880 assert_eq!(results[0].document.diagnostics().len(), 0);
881
882 let id = results[0].document.id().clone();
884 let results = analyzer.analyze_document((), uri).await.unwrap();
885 assert_eq!(results.len(), 1);
886 assert!(results[0].document.id().as_ref() == id.as_ref());
887 assert_eq!(results[0].document.diagnostics().len(), 0);
888 }
889
890 #[tokio::test]
891 async fn it_reanalyzes_a_document_on_incremental_change() {
892 let dir = TempDir::new().expect("failed to create temporary directory");
893 let path = dir.path().join("foo.wdl");
894 fs::write(
895 &path,
896 r#"version 1.1
897
898task test {
899 command <<<>>>
900}
901
902workflow test {
903}
904"#,
905 )
906 .expect("failed to create test file");
907
908 let analyzer = Analyzer::new(DiagnosticsConfig::new(rules()), |_: (), _, _, _| async {});
910 analyzer
911 .add_document(path_to_uri(&path).expect("should convert to URI"))
912 .await
913 .expect("should add document");
914
915 let results = analyzer.analyze(()).await.unwrap();
916 assert_eq!(results.len(), 1);
917 assert_eq!(results[0].document.diagnostics().len(), 1);
918 assert_eq!(results[0].document.diagnostics()[0].rule(), None);
919 assert_eq!(
920 results[0].document.diagnostics()[0].severity(),
921 Severity::Error
922 );
923 assert_eq!(
924 results[0].document.diagnostics()[0].message(),
925 "conflicting workflow name `test`"
926 );
927
928 let uri = path_to_uri(&path).expect("should convert to URI");
930 analyzer
931 .notify_incremental_change(
932 uri.clone(),
933 IncrementalChange {
934 version: 2,
935 start: None,
936 edits: vec![SourceEdit {
937 range: SourcePosition::new(6, 9)..SourcePosition::new(6, 13),
938 encoding: SourcePositionEncoding::UTF8,
939 text: "something_else".to_string(),
940 }],
941 },
942 )
943 .unwrap();
944
945 let id = results[0].document.id().clone();
948 let results = analyzer.analyze_document((), uri).await.unwrap();
949 assert_eq!(results.len(), 1);
950 assert!(results[0].document.id().as_ref() != id.as_ref());
951 assert_eq!(results[0].document.diagnostics().len(), 0);
952 }
953
954 #[tokio::test]
955 async fn it_removes_documents() {
956 let dir = TempDir::new().expect("failed to create temporary directory");
957 let foo = dir.path().join("foo.wdl");
958 fs::write(
959 &foo,
960 r#"version 1.1
961workflow test {
962}
963"#,
964 )
965 .expect("failed to create test file");
966
967 let bar = dir.path().join("bar.wdl");
968 fs::write(
969 &bar,
970 r#"version 1.1
971workflow test {
972}
973"#,
974 )
975 .expect("failed to create test file");
976
977 let baz = dir.path().join("baz.wdl");
978 fs::write(
979 &baz,
980 r#"version 1.1
981workflow test {
982}
983"#,
984 )
985 .expect("failed to create test file");
986
987 let analyzer = Analyzer::new(DiagnosticsConfig::new(rules()), |_: (), _, _, _| async {});
989 analyzer
990 .add_directory(dir.path().to_path_buf())
991 .await
992 .expect("should add documents");
993
994 let results = analyzer.analyze(()).await.unwrap();
996 assert_eq!(results.len(), 3);
997 assert!(results[0].document.diagnostics().is_empty());
998 assert!(results[1].document.diagnostics().is_empty());
999 assert!(results[2].document.diagnostics().is_empty());
1000
1001 let results = analyzer.analyze(()).await.unwrap();
1003 assert_eq!(results.len(), 3);
1004
1005 analyzer
1007 .remove_documents(vec![
1008 path_to_uri(dir.path()).expect("should convert to URI"),
1009 ])
1010 .await
1011 .unwrap();
1012 let results = analyzer.analyze(()).await.unwrap();
1013 assert!(results.is_empty());
1014 }
1015}