doc_chunks/
cluster.rs

1//! Cluster `proc_macro2::Literal`s into `LiteralSets`
2
3use syn::spanned::Spanned;
4use syn::LitStr;
5use syn::Macro;
6use syn::Token;
7
8use super::{LiteralSet, TokenTree, TrimmedLiteral};
9use crate::developer::extract_developer_comments;
10
11use crate::errors::*;
12use crate::Span;
13
14mod kw {
15    syn::custom_keyword!(doc);
16}
17
18enum DocContent {
19    LitStr(LitStr),
20    Macro(Macro),
21}
22impl DocContent {
23    fn span(&self) -> proc_macro2::Span {
24        match self {
25            Self::LitStr(inner) => inner.span(),
26            Self::Macro(inner) => inner.span(),
27        }
28    }
29}
30
31struct DocComment {
32    #[allow(dead_code)]
33    doc: kw::doc,
34    #[allow(dead_code)]
35    eq_token: Token![=],
36    content: DocContent,
37}
38
39impl syn::parse::Parse for DocComment {
40    fn parse(input: syn::parse::ParseStream<'_>) -> syn::Result<Self> {
41        let doc = input.parse::<kw::doc>()?;
42        let eq_token: Token![=] = input.parse()?;
43
44        let lookahead = input.lookahead1();
45        let content = if lookahead.peek(LitStr) {
46            input.parse().map(DocContent::LitStr)?
47        } else {
48            input.parse().map(DocContent::Macro)?
49        };
50        Ok(Self {
51            doc,
52            eq_token,
53            content,
54        })
55    }
56}
57
58/// Cluster comments together, such they appear as continuous text blocks.
59#[derive(Debug)]
60pub struct Clusters {
61    pub(crate) set: Vec<LiteralSet>,
62}
63
64impl Clusters {
65    /// Only works if the file is processed line by line, otherwise requires a
66    /// adjacency list.
67    fn process_literal(&mut self, source: &str, comment: DocComment) -> Result<()> {
68        let span = Span::from(comment.content.span());
69        let trimmed_literal = match comment.content {
70            DocContent::LitStr(_s) => TrimmedLiteral::load_from(source, span)?,
71            DocContent::Macro(_) => {
72                TrimmedLiteral::new_empty(source, span, crate::CommentVariant::MacroDocEqMacro)
73            }
74        };
75        if let Some(cls) = self.set.last_mut() {
76            if let Err(trimmed_literal) = cls.add_adjacent(trimmed_literal) {
77                log::trace!(target: "documentation",
78                    "appending, but failed to append: {trimmed_literal:?} to set {cls:?}",
79                );
80                self.set.push(LiteralSet::from(trimmed_literal))
81            } else {
82                log::trace!("successfully appended to existing: {cls:?} to set");
83            }
84        } else {
85            self.set.push(LiteralSet::from(trimmed_literal));
86        }
87        Ok(())
88    }
89
90    /// Helper function to parse a stream and associate the found literals.
91    pub fn parse_token_tree(
92        &mut self,
93        source: &str,
94        stream: proc_macro2::TokenStream,
95    ) -> Result<()> {
96        let iter = stream.into_iter();
97        for tree in iter {
98            if let TokenTree::Group(group) = tree {
99                if let Ok(comment) = syn::parse2::<DocComment>(group.stream()) {
100                    if let Err(e) = self.process_literal(source, comment) {
101                        log::error!("BUG: Failed to guarantee literal content/span integrity: {e}");
102                        continue;
103                    }
104                } else {
105                    self.parse_token_tree(source, group.stream())?;
106                }
107            };
108        }
109        Ok(())
110    }
111
112    /// From the given source text, extracts developer comments to `LiteralSet`s
113    /// and adds them to this `Clusters`
114    fn parse_developer_comments(&mut self, source: &str) {
115        let developer_comments = extract_developer_comments(source);
116        self.set.extend(developer_comments);
117    }
118
119    /// Sort the `LiteralSet`s in this `Cluster` by start line descending, to
120    /// ensure that the comments higher up in the source file appear first to
121    /// the user
122    fn ensure_sorted(&mut self) {
123        self.set.sort_by(|ls1, ls2| ls1.coverage.cmp(&ls2.coverage));
124    }
125
126    /// Load clusters from a `&str`. Optionally loads developer comments as
127    /// well.
128    pub fn load_from_str(source: &str, doc_comments: bool, dev_comments: bool) -> Result<Self> {
129        let mut chunk = Self {
130            set: Vec::with_capacity(64),
131        };
132        if doc_comments {
133            let stream =
134                syn::parse_str::<proc_macro2::TokenStream>(source).map_err(Error::ParserFailure)?;
135            chunk.parse_token_tree(source, stream)?;
136        }
137        if dev_comments {
138            chunk.parse_developer_comments(source);
139        }
140        chunk.ensure_sorted();
141        Ok(chunk)
142    }
143}
144
145#[cfg(test)]
146mod tests {
147    use super::*;
148
149    #[test]
150    fn doc_comment_parse() {
151        let _ = syn::parse_str::<DocComment>(r########"doc=foo!(bar!(xxx))"########).unwrap();
152        let _ = syn::parse_str::<DocComment>(r########"doc="s""########).unwrap();
153        let _ = syn::parse_str::<DocComment>(r########"doc=r#"s"#"########).unwrap();
154        let _ = syn::parse_str::<DocComment>(r########"doc=r##"s"##"########).unwrap();
155        let _ = syn::parse_str::<DocComment>(r########"doc=r###"s"###"########).unwrap();
156        let _ = syn::parse_str::<DocComment>(r########"doc=r####"s"####"########).unwrap();
157    }
158
159    #[test]
160    fn create_cluster() {
161        static CONTENT: &str = r#####"
162mod mm_mm {
163
164/// A
165#[doc=foo!(B)]
166/// C
167#[doc=r##"D"##]
168struct X;
169
170}
171"#####;
172        let clusters = Clusters::load_from_str(CONTENT, true, true).unwrap();
173        assert_eq!(clusters.set.len(), 1);
174        dbg!(&clusters.set[0]);
175    }
176
177    #[test]
178    fn space_in_code_block_does_not_break_cluster() {
179        static CONTENT: &str = r#####"
180// ```c
181// hugloboi
182//
183// fucksteufelswuid
184// ```
185struct DefinitelyNotZ;
186"#####;
187        let clusters = Clusters::load_from_str(CONTENT, true, true).unwrap();
188        assert_eq!(clusters.set.len(), 1);
189        dbg!(&clusters.set[0]);
190    }
191
192    #[test]
193    fn polite() {
194        static CONTENT: &str = r#####"
195// Hello Sir
196//
197// How are you doing today?
198struct VeryWellThanks;
199"#####;
200        let clusters = Clusters::load_from_str(CONTENT, true, true).unwrap();
201        assert_eq!(clusters.set.len(), 1);
202        dbg!(&clusters.set[0]);
203    }
204}