document_features/
lib.rs

1// Copyright © SixtyFPS GmbH <info@sixtyfps.io>
2// SPDX-License-Identifier: MIT OR Apache-2.0
3
4/*!
5Document your crate's feature flags.
6
7This crates provides a macro that extracts "documentation" comments from Cargo.toml
8
9To use this crate, add `#![doc = document_features::document_features!()]` in your crate documentation.
10The `document_features!()` macro reads your `Cargo.toml` file, extracts feature comments and generates
11a markdown string for your documentation.
12
13Basic example:
14
15```rust
16//! Normal crate documentation goes here.
17//!
18//! ## Feature flags
19#![doc = document_features::document_features!()]
20
21// rest of the crate goes here.
22```
23
24## Documentation format:
25
26The documentation of your crate features goes into `Cargo.toml`, where they are defined.
27
28The `document_features!()` macro analyzes the contents of `Cargo.toml`.
29Similar to Rust's documentation comments `///` and `//!`, the macro understands
30comments that start with `## ` and `#! `. Note the required trailing space.
31Lines starting with `###` will not be understood as doc comment.
32
33`## ` comments are meant to be *above* the feature they document.
34There can be several `## ` comments, but they must always be followed by a
35feature name or an optional dependency.
36There should not be `#! ` comments between the comment and the feature they document.
37
38`#! ` comments are not associated with a particular feature, and will be printed
39in where they occur. Use them to group features, for example.
40
41## Examples:
42
43*/
44#![doc = self_test!(/**
45[package]
46name = "..."
47# ...
48
49[features]
50default = ["foo"]
51#! This comments goes on top
52
53## The foo feature enables the `foo` functions
54foo = []
55
56## The bar feature enables the bar module
57bar = []
58
59#! ### Experimental features
60#! The following features are experimental
61
62## Enable the fusion reactor
63##
64## ⚠️ Can lead to explosions
65fusion = []
66
67[dependencies]
68document-features = "0.2"
69
70#! ### Optional dependencies
71
72## Enable this feature to implement the trait for the types from the genial crate
73genial = { version = "0.2", optional = true }
74
75## This awesome dependency is specified in its own table
76[dependencies.awesome]
77version = "1.3.5"
78optional = true
79*/
80=>
81    /**
82This comments goes on top
83* **`foo`** *(enabled by default)* —  The foo feature enables the `foo` functions
84* **`bar`** —  The bar feature enables the bar module
85
86#### Experimental features
87The following features are experimental
88* **`fusion`** —  Enable the fusion reactor
89
90  ⚠️ Can lead to explosions
91
92#### Optional dependencies
93* **`genial`** —  Enable this feature to implement the trait for the types from the genial crate
94* **`awesome`** —  This awesome dependency is specified in its own table
95*/
96)]
97/*!
98
99## Customization
100
101You can customize the formatting of the features in the generated documentation by setting
102the key **`feature_label=`** to a given format string. This format string must be either
103a [string literal](https://doc.rust-lang.org/reference/tokens.html#string-literals) or
104a [raw string literal](https://doc.rust-lang.org/reference/tokens.html#raw-string-literals).
105Every occurrence of `{feature}` inside the format string will be substituted with the name of the feature.
106
107For instance, to emulate the HTML formatting used by `rustdoc` one can use the following:
108
109```rust
110#![doc = document_features::document_features!(feature_label = r#"<span class="stab portability"><code>{feature}</code></span>"#)]
111```
112
113The default formatting is equivalent to:
114
115```rust
116#![doc = document_features::document_features!(feature_label = "**`{feature}`**")]
117```
118
119## Compatibility
120
121The minimum Rust version required to use this crate is Rust 1.56.
122You can make this crate optional and use `#[cfg_attr()]` statements to enable it only when building the documentation:
123You need to have two levels of `cfg_attr` because Rust < 1.54 doesn't parse the attribute
124otherwise.
125
126```rust,ignore
127#![cfg_attr(
128    feature = "document-features",
129    cfg_attr(doc, doc = ::document_features::document_features!())
130)]
131```
132
133In your Cargo.toml, enable this feature while generating the documentation on docs.rs:
134
135```toml
136[dependencies]
137document-features = { version = "0.2", optional = true }
138
139[package.metadata.docs.rs]
140features = ["document-features"]
141## Alternative: enable all features so they are all documented
142## all-features = true
143```
144 */
145
146#[cfg(not(feature = "default"))]
147compile_error!(
148    "The feature `default` must be enabled to ensure \
149    forward compatibility with future version of this crate"
150);
151
152extern crate proc_macro;
153
154use proc_macro::{TokenStream, TokenTree};
155use std::borrow::Cow;
156use std::collections::{HashMap, HashSet};
157use std::convert::TryFrom;
158use std::fmt::Write;
159use std::path::Path;
160use std::str::FromStr;
161
162fn error(e: &str) -> TokenStream {
163    TokenStream::from_str(&format!("::core::compile_error!{{\"{}\"}}", e.escape_default())).unwrap()
164}
165
166fn compile_error(msg: &str, tt: Option<TokenTree>) -> TokenStream {
167    let span = tt.as_ref().map_or_else(proc_macro::Span::call_site, TokenTree::span);
168    use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Spacing};
169    use std::iter::FromIterator;
170    TokenStream::from_iter(vec![
171        TokenTree::Ident(Ident::new("compile_error", span)),
172        TokenTree::Punct({
173            let mut punct = Punct::new('!', Spacing::Alone);
174            punct.set_span(span);
175            punct
176        }),
177        TokenTree::Group({
178            let mut group = Group::new(Delimiter::Brace, {
179                TokenStream::from_iter([TokenTree::Literal({
180                    let mut string = Literal::string(msg);
181                    string.set_span(span);
182                    string
183                })])
184            });
185            group.set_span(span);
186            group
187        }),
188    ])
189}
190
191#[derive(Default)]
192struct Args {
193    feature_label: Option<String>,
194}
195
196fn parse_args(input: TokenStream) -> Result<Args, TokenStream> {
197    let mut token_trees = input.into_iter().fuse();
198
199    // parse the key, ensuring that it is the identifier `feature_label`
200    match token_trees.next() {
201        None => return Ok(Args::default()),
202        Some(TokenTree::Ident(ident)) if ident.to_string() == "feature_label" => (),
203        tt => return Err(compile_error("expected `feature_label`", tt)),
204    }
205
206    // parse a single equal sign `=`
207    match token_trees.next() {
208        Some(TokenTree::Punct(p)) if p.as_char() == '=' => (),
209        tt => return Err(compile_error("expected `=`", tt)),
210    }
211
212    // parse the value, ensuring that it is a string literal containing the substring `"{feature}"`
213    let feature_label;
214    if let Some(tt) = token_trees.next() {
215        match litrs::StringLit::<String>::try_from(&tt) {
216            Ok(string_lit) if string_lit.value().contains("{feature}") => {
217                feature_label = string_lit.into_value()
218            }
219            _ => {
220                return Err(compile_error(
221                    "expected a string literal containing the substring \"{feature}\"",
222                    Some(tt),
223                ))
224            }
225        }
226    } else {
227        return Err(compile_error(
228            "expected a string literal containing the substring \"{feature}\"",
229            None,
230        ));
231    }
232
233    // ensure there is nothing left after the format string
234    if let tt @ Some(_) = token_trees.next() {
235        return Err(compile_error("unexpected token after the format string", tt));
236    }
237
238    Ok(Args { feature_label: Some(feature_label) })
239}
240
241/// Produce a literal string containing documentation extracted from Cargo.toml
242///
243/// See the [crate] documentation for details
244#[proc_macro]
245pub fn document_features(tokens: TokenStream) -> TokenStream {
246    parse_args(tokens)
247        .and_then(|args| document_features_impl(&args))
248        .unwrap_or_else(std::convert::identity)
249}
250
251fn document_features_impl(args: &Args) -> Result<TokenStream, TokenStream> {
252    let path = std::env::var("CARGO_MANIFEST_DIR").unwrap();
253    let mut cargo_toml = std::fs::read_to_string(Path::new(&path).join("Cargo.toml"))
254        .map_err(|e| error(&format!("Can't open Cargo.toml: {:?}", e)))?;
255
256    if !has_doc_comments(&cargo_toml) {
257        // On crates.io, Cargo.toml is usually "normalized" and stripped of all comments.
258        // The original Cargo.toml has been renamed Cargo.toml.orig
259        if let Ok(orig) = std::fs::read_to_string(Path::new(&path).join("Cargo.toml.orig")) {
260            if has_doc_comments(&orig) {
261                cargo_toml = orig;
262            }
263        }
264    }
265
266    let result = process_toml(&cargo_toml, args).map_err(|e| error(&e))?;
267    Ok(std::iter::once(proc_macro::TokenTree::from(proc_macro::Literal::string(&result))).collect())
268}
269
270/// Check if the Cargo.toml has comments that looks like doc comments.
271fn has_doc_comments(cargo_toml: &str) -> bool {
272    let mut lines = cargo_toml.lines().map(str::trim);
273    while let Some(line) = lines.next() {
274        if line.starts_with("## ") || line.starts_with("#! ") {
275            return true;
276        }
277        let before_coment = line.split_once('#').map_or(line, |(before, _)| before);
278        if line.starts_with("#") {
279            continue;
280        }
281        if let Some((_, mut quote)) = before_coment.split_once("\"\"\"") {
282            loop {
283                // skip slashes.
284                if let Some((_, s)) = quote.split_once('\\') {
285                    quote = s.strip_prefix('\\').or_else(|| s.strip_prefix('"')).unwrap_or(s);
286                    continue;
287                }
288                // skip quotes.
289                if let Some((_, out_quote)) = quote.split_once("\"\"\"") {
290                    let out_quote = out_quote.trim_start_matches('"');
291                    let out_quote =
292                        out_quote.split_once('#').map_or(out_quote, |(before, _)| before);
293                    if let Some((_, q)) = out_quote.split_once("\"\"\"") {
294                        quote = q;
295                        continue;
296                    }
297                    break;
298                };
299                match lines.next() {
300                    Some(l) => quote = l,
301                    None => return false,
302                }
303            }
304        }
305    }
306    false
307}
308
309#[test]
310fn test_has_doc_comments() {
311    assert!(has_doc_comments("foo\nbar\n## comment\nddd"));
312    assert!(!has_doc_comments("foo\nbar\n#comment\nddd"));
313    assert!(!has_doc_comments(
314        r#"
315[[package.metadata.release.pre-release-replacements]]
316exactly = 1 # not a doc comment
317file = "CHANGELOG.md"
318replace = """
319<!-- next-header -->
320## [Unreleased] - ReleaseDate
321"""
322search = "<!-- next-header -->"
323array = ["""foo""", """
324bar""", """eee
325## not a comment
326"""]
327    "#
328    ));
329    assert!(has_doc_comments(
330        r#"
331[[package.metadata.release.pre-release-replacements]]
332exactly = 1 # """
333file = "CHANGELOG.md"
334replace = """
335<!-- next-header -->
336## [Unreleased] - ReleaseDate
337"""
338search = "<!-- next-header -->"
339array = ["""foo""", """
340bar""", """eee
341## not a comment
342"""]
343## This is a comment
344feature = "45"
345        "#
346    ));
347
348    assert!(!has_doc_comments(
349        r#"
350[[package.metadata.release.pre-release-replacements]]
351value = """" string \"""
352## within the string
353\""""
354another_string = """"" # """
355## also within"""
356"#
357    ));
358
359    assert!(has_doc_comments(
360        r#"
361[[package.metadata.release.pre-release-replacements]]
362value = """" string \"""
363## within the string
364\""""
365another_string = """"" # """
366## also within"""
367## out of the string
368foo = bar
369        "#
370    ));
371}
372
373fn dependents(
374    feature_dependencies: &HashMap<String, Vec<String>>,
375    feature: &str,
376    collected: &mut HashSet<String>,
377) {
378    if collected.contains(feature) {
379        return;
380    }
381    collected.insert(feature.to_string());
382    if let Some(dependencies) = feature_dependencies.get(feature) {
383        for dependency in dependencies {
384            dependents(feature_dependencies, dependency, collected);
385        }
386    }
387}
388
389fn parse_feature_deps<'a>(
390    s: &'a str,
391    dep: &str,
392) -> Result<impl Iterator<Item = String> + 'a, String> {
393    Ok(s.trim()
394        .strip_prefix('[')
395        .and_then(|r| r.strip_suffix(']'))
396        .ok_or_else(|| format!("Parse error while parsing dependency {}", dep))?
397        .split(',')
398        .map(|d| d.trim().trim_matches(|c| c == '"' || c == '\'').trim().to_string())
399        .filter(|d: &String| !d.is_empty()))
400}
401
402fn process_toml(cargo_toml: &str, args: &Args) -> Result<String, String> {
403    // Get all lines between the "[features]" and the next block
404    let mut lines = cargo_toml
405        .lines()
406        .map(str::trim)
407        // and skip empty lines and comments that are not docs comments
408        .filter(|l| {
409            !l.is_empty() && (!l.starts_with('#') || l.starts_with("##") || l.starts_with("#!"))
410        });
411    let mut top_comment = String::new();
412    let mut current_comment = String::new();
413    let mut features = vec![];
414    let mut default_features = HashSet::new();
415    let mut current_table = "";
416    let mut dependencies = HashMap::new();
417    while let Some(line) = lines.next() {
418        if let Some(x) = line.strip_prefix("#!") {
419            if !x.is_empty() && !x.starts_with(' ') {
420                continue; // it's not a doc comment
421            }
422            if !current_comment.is_empty() {
423                return Err("Cannot mix ## and #! comments between features.".into());
424            }
425            if top_comment.is_empty() && !features.is_empty() {
426                top_comment = "\n".into();
427            }
428            writeln!(top_comment, "{}", x).unwrap();
429        } else if let Some(x) = line.strip_prefix("##") {
430            if !x.is_empty() && !x.starts_with(' ') {
431                continue; // it's not a doc comment
432            }
433            writeln!(current_comment, " {}", x).unwrap();
434        } else if let Some(table) = line.strip_prefix('[') {
435            current_table = table
436                .split_once(']')
437                .map(|(t, _)| t.trim())
438                .ok_or_else(|| format!("Parse error while parsing line: {}", line))?;
439            if !current_comment.is_empty() {
440                #[allow(clippy::unnecessary_lazy_evaluations)]
441                let dep = current_table
442                    .rsplit_once('.')
443                    .and_then(|(table, dep)| table.trim().ends_with("dependencies").then(|| dep))
444                    .ok_or_else(|| format!("Not a feature: `{}`", line))?;
445                features.push((
446                    dep.trim(),
447                    std::mem::take(&mut top_comment),
448                    std::mem::take(&mut current_comment),
449                ));
450            }
451        } else if let Some((dep, rest)) = line.split_once('=') {
452            let dep = dep.trim().trim_matches('"');
453            let rest = get_balanced(rest, &mut lines)
454                .map_err(|e| format!("Parse error while parsing value {}: {}", dep, e))?;
455            if current_table == "features" {
456                if dep == "default" {
457                    default_features.extend(parse_feature_deps(&rest, dep)?);
458                } else {
459                    for d in parse_feature_deps(&rest, dep)? {
460                        dependencies
461                            .entry(dep.to_string())
462                            .or_insert_with(Vec::new)
463                            .push(d.clone());
464                    }
465                }
466            }
467            if !current_comment.is_empty() {
468                if current_table.ends_with("dependencies") {
469                    if !rest
470                        .split_once("optional")
471                        .and_then(|(_, r)| r.trim().strip_prefix('='))
472                        .map_or(false, |r| r.trim().starts_with("true"))
473                    {
474                        return Err(format!("Dependency {} is not an optional dependency", dep));
475                    }
476                } else if current_table != "features" {
477                    return Err(format!(
478                        r#"Comment cannot be associated with a feature: "{}""#,
479                        current_comment.trim()
480                    ));
481                }
482                features.push((
483                    dep,
484                    std::mem::take(&mut top_comment),
485                    std::mem::take(&mut current_comment),
486                ));
487            }
488        }
489    }
490    let df = default_features.iter().cloned().collect::<Vec<_>>();
491    for feature in df {
492        let mut resolved = HashSet::new();
493        dependents(&dependencies, &feature, &mut resolved);
494        default_features.extend(resolved.into_iter());
495    }
496    if !current_comment.is_empty() {
497        return Err("Found comment not associated with a feature".into());
498    }
499    if features.is_empty() {
500        return Ok("*No documented features in Cargo.toml*".into());
501    }
502    let mut result = String::new();
503    for (f, top, comment) in features {
504        let default = if default_features.contains(f) { " *(enabled by default)*" } else { "" };
505        let feature_label = args.feature_label.as_deref().unwrap_or("**`{feature}`**");
506        let comment = if comment.trim().is_empty() {
507            String::new()
508        } else {
509            format!(" —{}", comment.trim_end())
510        };
511
512        writeln!(
513            result,
514            "{}* {}{}{}",
515            top,
516            feature_label.replace("{feature}", f),
517            default,
518            comment,
519        )
520        .unwrap();
521    }
522    result += &top_comment;
523    Ok(result)
524}
525
526fn get_balanced<'a>(
527    first_line: &'a str,
528    lines: &mut impl Iterator<Item = &'a str>,
529) -> Result<Cow<'a, str>, String> {
530    let mut line = first_line;
531    let mut result = Cow::from("");
532
533    let mut in_quote = false;
534    let mut level = 0;
535    loop {
536        let mut last_slash = false;
537        for (idx, b) in line.as_bytes().iter().enumerate() {
538            if last_slash {
539                last_slash = false
540            } else if in_quote {
541                match b {
542                    b'\\' => last_slash = true,
543                    b'"' | b'\'' => in_quote = false,
544                    _ => (),
545                }
546            } else {
547                match b {
548                    b'\\' => last_slash = true,
549                    b'"' => in_quote = true,
550                    b'{' | b'[' => level += 1,
551                    b'}' | b']' if level == 0 => return Err("unbalanced source".into()),
552                    b'}' | b']' => level -= 1,
553                    b'#' => {
554                        line = &line[..idx];
555                        break;
556                    }
557                    _ => (),
558                }
559            }
560        }
561        if result.len() == 0 {
562            result = Cow::from(line);
563        } else {
564            *result.to_mut() += line;
565        }
566        if level == 0 {
567            return Ok(result);
568        }
569        line = if let Some(l) = lines.next() {
570            l
571        } else {
572            return Err("unbalanced source".into());
573        };
574    }
575}
576
577#[test]
578fn test_get_balanced() {
579    assert_eq!(
580        get_balanced(
581            "{",
582            &mut IntoIterator::into_iter(["a", "{ abc[], #ignore", " def }", "}", "xxx"])
583        ),
584        Ok("{a{ abc[],  def }}".into())
585    );
586    assert_eq!(
587        get_balanced("{ foo = \"{#\" } #ignore", &mut IntoIterator::into_iter(["xxx"])),
588        Ok("{ foo = \"{#\" } ".into())
589    );
590    assert_eq!(
591        get_balanced("]", &mut IntoIterator::into_iter(["["])),
592        Err("unbalanced source".into())
593    );
594}
595
596#[cfg(feature = "self-test")]
597#[proc_macro]
598#[doc(hidden)]
599/// Helper macro for the tests. Do not use
600pub fn self_test_helper(input: TokenStream) -> TokenStream {
601    let mut code = String::new();
602    for line in (&input).to_string().trim_matches(|c| c == '"' || c == '#').lines() {
603        // Rustdoc removes the lines that starts with `# ` and removes one `#` from lines that starts with # followed by space.
604        // We need to re-add the `#` that was removed by rustdoc to get the original.
605        if line.strip_prefix('#').map_or(false, |x| x.is_empty() || x.starts_with(' ')) {
606            code += "#";
607        }
608        code += line;
609        code += "\n";
610    }
611    process_toml(&code, &Args::default()).map_or_else(
612        |e| error(&e),
613        |r| std::iter::once(proc_macro::TokenTree::from(proc_macro::Literal::string(&r))).collect(),
614    )
615}
616
617#[cfg(feature = "self-test")]
618macro_rules! self_test {
619    (#[doc = $toml:literal] => #[doc = $md:literal]) => {
620        concat!(
621            "\n`````rust\n\
622            fn normalize_md(md : &str) -> String {
623               md.lines().skip_while(|l| l.is_empty()).map(|l| l.trim())
624                .collect::<Vec<_>>().join(\"\\n\")
625            }
626            assert_eq!(normalize_md(document_features::self_test_helper!(",
627            stringify!($toml),
628            ")), normalize_md(",
629            stringify!($md),
630            "));\n`````\n\n"
631        )
632    };
633}
634
635#[cfg(not(feature = "self-test"))]
636macro_rules! self_test {
637    (#[doc = $toml:literal] => #[doc = $md:literal]) => {
638        concat!(
639            "This contents in Cargo.toml:\n`````toml",
640            $toml,
641            "\n`````\n Generates the following:\n\
642            <table><tr><th>Preview</th></tr><tr><td>\n\n",
643            $md,
644            "\n</td></tr></table>\n\n&nbsp;\n",
645        )
646    };
647}
648
649#[allow(unused)] // Workaround until https://github.com/rust-lang/rust/pull/147914 is merged
650use self_test;
651
652// The following struct is inserted only during generation of the documentation in order to exploit doc-tests.
653// These doc-tests are used to check that invalid arguments to the `document_features!` macro cause a compile time error.
654// For a more principled way of testing compilation error, maybe investigate <https://docs.rs/trybuild>.
655//
656/// ```rust
657/// #![doc = document_features::document_features!()]
658/// #![doc = document_features::document_features!(feature_label = "**`{feature}`**")]
659/// #![doc = document_features::document_features!(feature_label = r"**`{feature}`**")]
660/// #![doc = document_features::document_features!(feature_label = r#"**`{feature}`**"#)]
661/// #![doc = document_features::document_features!(feature_label = "<span class=\"stab portability\"><code>{feature}</code></span>")]
662/// #![doc = document_features::document_features!(feature_label = r#"<span class="stab portability"><code>{feature}</code></span>"#)]
663/// ```
664/// ```compile_fail
665/// #![doc = document_features::document_features!(feature_label > "<span>{feature}</span>")]
666/// ```
667/// ```compile_fail
668/// #![doc = document_features::document_features!(label = "<span>{feature}</span>")]
669/// ```
670/// ```compile_fail
671/// #![doc = document_features::document_features!(feature_label = "{feat}")]
672/// ```
673/// ```compile_fail
674/// #![doc = document_features::document_features!(feature_label = 3.14)]
675/// ```
676/// ```compile_fail
677/// #![doc = document_features::document_features!(feature_label = )]
678/// ```
679/// ```compile_fail
680/// #![doc = document_features::document_features!(feature_label = "**`{feature}`**" extra)]
681/// ```
682#[cfg(doc)]
683struct FeatureLabelCompilationTest;
684
685#[cfg(test)]
686mod tests {
687    use super::{process_toml, Args};
688
689    #[track_caller]
690    fn test_error(toml: &str, expected: &str) {
691        let err = process_toml(toml, &Args::default()).unwrap_err();
692        assert!(err.contains(expected), "{:?} does not contain {:?}", err, expected)
693    }
694
695    #[test]
696    fn only_get_balanced_in_correct_table() {
697        process_toml(
698            r#"
699
700[package.metadata.release]
701pre-release-replacements = [
702  {test=\"\#\# \"},
703]
704[abcd]
705[features]#xyz
706#! abc
707#
708###
709#! def
710#!
711## 123
712## 456
713feat1 = ["plop"]
714#! ghi
715no_doc = []
716##
717feat2 = ["momo"]
718#! klm
719default = ["feat1", "something_else"]
720#! end
721            "#,
722            &Args::default(),
723        )
724        .unwrap();
725    }
726
727    #[test]
728    fn no_features() {
729        let r = process_toml(
730            r#"
731[features]
732[dependencies]
733foo = 4;
734"#,
735            &Args::default(),
736        )
737        .unwrap();
738        assert_eq!(r, "*No documented features in Cargo.toml*");
739    }
740
741    #[test]
742    fn no_features2() {
743        let r = process_toml(
744            r#"
745[packages]
746[dependencies]
747"#,
748            &Args::default(),
749        )
750        .unwrap();
751        assert_eq!(r, "*No documented features in Cargo.toml*");
752    }
753
754    #[test]
755    fn parse_error3() {
756        test_error(
757            r#"
758[features]
759ff = []
760[abcd
761efgh
762[dependencies]
763"#,
764            "Parse error while parsing line: [abcd",
765        );
766    }
767
768    #[test]
769    fn parse_error4() {
770        test_error(
771            r#"
772[features]
773## dd
774## ff
775#! ee
776## ff
777"#,
778            "Cannot mix",
779        );
780    }
781
782    #[test]
783    fn parse_error5() {
784        test_error(
785            r#"
786[features]
787## dd
788"#,
789            "not associated with a feature",
790        );
791    }
792
793    #[test]
794    fn parse_error6() {
795        test_error(
796            r#"
797[features]
798# ff
799foo = []
800default = [
801#ffff
802# ff
803"#,
804            "Parse error while parsing value default",
805        );
806    }
807
808    #[test]
809    fn parse_error7() {
810        test_error(
811            r#"
812[features]
813# f
814foo = [ x = { ]
815bar = []
816"#,
817            "Parse error while parsing value foo",
818        );
819    }
820
821    #[test]
822    fn not_a_feature1() {
823        test_error(
824            r#"
825## hallo
826[features]
827"#,
828            "Not a feature: `[features]`",
829        );
830    }
831
832    #[test]
833    fn not_a_feature2() {
834        test_error(
835            r#"
836[package]
837## hallo
838foo = []
839"#,
840            "Comment cannot be associated with a feature: \"hallo\"",
841        );
842    }
843
844    #[test]
845    fn non_optional_dep1() {
846        test_error(
847            r#"
848[dev-dependencies]
849## Not optional
850foo = { version = "1.2", optional = false }
851"#,
852            "Dependency foo is not an optional dependency",
853        );
854    }
855
856    #[test]
857    fn non_optional_dep2() {
858        test_error(
859            r#"
860[dev-dependencies]
861## Not optional
862foo = { version = "1.2" }
863"#,
864            "Dependency foo is not an optional dependency",
865        );
866    }
867
868    #[test]
869    fn basic() {
870        let toml = r#"
871[abcd]
872[features]#xyz
873#! abc
874#
875###
876#! def
877#!
878## 123
879## 456
880feat1 = ["plop"]
881#! ghi
882no_doc = []
883##
884feat2 = ["momo"]
885#! klm
886default = ["feat1", "something_else"]
887#! end
888        "#;
889        let parsed = process_toml(toml, &Args::default()).unwrap();
890        assert_eq!(
891            parsed,
892            " abc\n def\n\n* **`feat1`** *(enabled by default)* —  123\n  456\n\n ghi\n* **`feat2`**\n\n klm\n end\n"
893        );
894        let parsed = process_toml(
895            toml,
896            &Args {
897                feature_label: Some(
898                    "<span class=\"stab portability\"><code>{feature}</code></span>".into(),
899                ),
900            },
901        )
902        .unwrap();
903        assert_eq!(
904            parsed,
905            " abc\n def\n\n* <span class=\"stab portability\"><code>feat1</code></span> *(enabled by default)* —  123\n  456\n\n ghi\n* <span class=\"stab portability\"><code>feat2</code></span>\n\n klm\n end\n"
906        );
907    }
908
909    #[test]
910    fn dependencies() {
911        let toml = r#"
912#! top
913[dev-dependencies] #yo
914## dep1
915dep1 = { version="1.2", optional=true}
916#! yo
917dep2 = "1.3"
918## dep3
919[target.'cfg(unix)'.build-dependencies.dep3]
920version = "42"
921optional = true
922        "#;
923        let parsed = process_toml(toml, &Args::default()).unwrap();
924        assert_eq!(parsed, " top\n* **`dep1`** —  dep1\n\n yo\n* **`dep3`** —  dep3\n");
925        let parsed = process_toml(
926            toml,
927            &Args {
928                feature_label: Some(
929                    "<span class=\"stab portability\"><code>{feature}</code></span>".into(),
930                ),
931            },
932        )
933        .unwrap();
934        assert_eq!(parsed, " top\n* <span class=\"stab portability\"><code>dep1</code></span> —  dep1\n\n yo\n* <span class=\"stab portability\"><code>dep3</code></span> —  dep3\n");
935    }
936
937    #[test]
938    fn multi_lines() {
939        let toml = r#"
940[package.metadata.foo]
941ixyz = [
942    ["array"],
943    [
944        "of",
945        "arrays"
946    ]
947]
948[dev-dependencies]
949## dep1
950dep1 = {
951    version="1.2-}",
952    optional=true
953}
954[features]
955default = [
956    "goo",
957    "\"]",
958    "bar",
959]
960## foo
961foo = [
962   "bar"
963]
964## bar
965bar = [
966
967]
968        "#;
969        let parsed = process_toml(toml, &Args::default()).unwrap();
970        assert_eq!(
971            parsed,
972            "* **`dep1`** —  dep1\n* **`foo`** —  foo\n* **`bar`** *(enabled by default)* —  bar\n"
973        );
974        let parsed = process_toml(
975            toml,
976            &Args {
977                feature_label: Some(
978                    "<span class=\"stab portability\"><code>{feature}</code></span>".into(),
979                ),
980            },
981        )
982        .unwrap();
983        assert_eq!(
984            parsed,
985            "* <span class=\"stab portability\"><code>dep1</code></span> —  dep1\n* <span class=\"stab portability\"><code>foo</code></span> —  foo\n* <span class=\"stab portability\"><code>bar</code></span> *(enabled by default)* —  bar\n"
986        );
987    }
988
989    #[test]
990    fn dots_in_feature() {
991        let toml = r#"
992[features]
993## This is a test
994"teßt." = []
995default = ["teßt."]
996[dependencies]
997## A dep
998"dep" = { version = "123", optional = true }
999        "#;
1000        let parsed = process_toml(toml, &Args::default()).unwrap();
1001        assert_eq!(
1002            parsed,
1003            "* **`teßt.`** *(enabled by default)* —  This is a test\n* **`dep`** —  A dep\n"
1004        );
1005        let parsed = process_toml(
1006            toml,
1007            &Args {
1008                feature_label: Some(
1009                    "<span class=\"stab portability\"><code>{feature}</code></span>".into(),
1010                ),
1011            },
1012        )
1013        .unwrap();
1014        assert_eq!(
1015            parsed,
1016            "* <span class=\"stab portability\"><code>teßt.</code></span> *(enabled by default)* —  This is a test\n* <span class=\"stab portability\"><code>dep</code></span> —  A dep\n"
1017        );
1018    }
1019
1020    #[test]
1021    fn recursive_default() {
1022        let toml = r#"
1023[features]
1024default=["qqq"]
1025
1026## Qqq
1027qqq=["www"]
1028
1029## Www
1030www=[]
1031        "#;
1032        let parsed = process_toml(toml, &Args::default()).unwrap();
1033        assert_eq!(parsed, "* **`qqq`** *(enabled by default)* —  Qqq\n* **`www`** *(enabled by default)* —  Www\n");
1034    }
1035}