1#![doc = self_test!(=>
81 )]
97#[cfg(not(feature = "default"))]
147compile_error!(
148 "The feature `default` must be enabled to ensure \
149 forward compatibility with future version of this crate"
150);
151
152extern crate proc_macro;
153
154use proc_macro::{TokenStream, TokenTree};
155use std::borrow::Cow;
156use std::collections::{HashMap, HashSet};
157use std::convert::TryFrom;
158use std::fmt::Write;
159use std::path::Path;
160use std::str::FromStr;
161
162fn error(e: &str) -> TokenStream {
163 TokenStream::from_str(&format!("::core::compile_error!{{\"{}\"}}", e.escape_default())).unwrap()
164}
165
166fn compile_error(msg: &str, tt: Option<TokenTree>) -> TokenStream {
167 let span = tt.as_ref().map_or_else(proc_macro::Span::call_site, TokenTree::span);
168 use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Spacing};
169 use std::iter::FromIterator;
170 TokenStream::from_iter(vec![
171 TokenTree::Ident(Ident::new("compile_error", span)),
172 TokenTree::Punct({
173 let mut punct = Punct::new('!', Spacing::Alone);
174 punct.set_span(span);
175 punct
176 }),
177 TokenTree::Group({
178 let mut group = Group::new(Delimiter::Brace, {
179 TokenStream::from_iter([TokenTree::Literal({
180 let mut string = Literal::string(msg);
181 string.set_span(span);
182 string
183 })])
184 });
185 group.set_span(span);
186 group
187 }),
188 ])
189}
190
191#[derive(Default)]
192struct Args {
193 feature_label: Option<String>,
194}
195
196fn parse_args(input: TokenStream) -> Result<Args, TokenStream> {
197 let mut token_trees = input.into_iter().fuse();
198
199 match token_trees.next() {
201 None => return Ok(Args::default()),
202 Some(TokenTree::Ident(ident)) if ident.to_string() == "feature_label" => (),
203 tt => return Err(compile_error("expected `feature_label`", tt)),
204 }
205
206 match token_trees.next() {
208 Some(TokenTree::Punct(p)) if p.as_char() == '=' => (),
209 tt => return Err(compile_error("expected `=`", tt)),
210 }
211
212 let feature_label;
214 if let Some(tt) = token_trees.next() {
215 match litrs::StringLit::<String>::try_from(&tt) {
216 Ok(string_lit) if string_lit.value().contains("{feature}") => {
217 feature_label = string_lit.into_value()
218 }
219 _ => {
220 return Err(compile_error(
221 "expected a string literal containing the substring \"{feature}\"",
222 Some(tt),
223 ))
224 }
225 }
226 } else {
227 return Err(compile_error(
228 "expected a string literal containing the substring \"{feature}\"",
229 None,
230 ));
231 }
232
233 if let tt @ Some(_) = token_trees.next() {
235 return Err(compile_error("unexpected token after the format string", tt));
236 }
237
238 Ok(Args { feature_label: Some(feature_label) })
239}
240
241#[proc_macro]
245pub fn document_features(tokens: TokenStream) -> TokenStream {
246 parse_args(tokens)
247 .and_then(|args| document_features_impl(&args))
248 .unwrap_or_else(std::convert::identity)
249}
250
251fn document_features_impl(args: &Args) -> Result<TokenStream, TokenStream> {
252 let path = std::env::var("CARGO_MANIFEST_DIR").unwrap();
253 let mut cargo_toml = std::fs::read_to_string(Path::new(&path).join("Cargo.toml"))
254 .map_err(|e| error(&format!("Can't open Cargo.toml: {:?}", e)))?;
255
256 if !has_doc_comments(&cargo_toml) {
257 if let Ok(orig) = std::fs::read_to_string(Path::new(&path).join("Cargo.toml.orig")) {
260 if has_doc_comments(&orig) {
261 cargo_toml = orig;
262 }
263 }
264 }
265
266 let result = process_toml(&cargo_toml, args).map_err(|e| error(&e))?;
267 Ok(std::iter::once(proc_macro::TokenTree::from(proc_macro::Literal::string(&result))).collect())
268}
269
270fn has_doc_comments(cargo_toml: &str) -> bool {
272 let mut lines = cargo_toml.lines().map(str::trim);
273 while let Some(line) = lines.next() {
274 if line.starts_with("## ") || line.starts_with("#! ") {
275 return true;
276 }
277 let before_coment = line.split_once('#').map_or(line, |(before, _)| before);
278 if line.starts_with("#") {
279 continue;
280 }
281 if let Some((_, mut quote)) = before_coment.split_once("\"\"\"") {
282 loop {
283 if let Some((_, s)) = quote.split_once('\\') {
285 quote = s.strip_prefix('\\').or_else(|| s.strip_prefix('"')).unwrap_or(s);
286 continue;
287 }
288 if let Some((_, out_quote)) = quote.split_once("\"\"\"") {
290 let out_quote = out_quote.trim_start_matches('"');
291 let out_quote =
292 out_quote.split_once('#').map_or(out_quote, |(before, _)| before);
293 if let Some((_, q)) = out_quote.split_once("\"\"\"") {
294 quote = q;
295 continue;
296 }
297 break;
298 };
299 match lines.next() {
300 Some(l) => quote = l,
301 None => return false,
302 }
303 }
304 }
305 }
306 false
307}
308
309#[test]
310fn test_has_doc_comments() {
311 assert!(has_doc_comments("foo\nbar\n## comment\nddd"));
312 assert!(!has_doc_comments("foo\nbar\n#comment\nddd"));
313 assert!(!has_doc_comments(
314 r#"
315[[package.metadata.release.pre-release-replacements]]
316exactly = 1 # not a doc comment
317file = "CHANGELOG.md"
318replace = """
319<!-- next-header -->
320## [Unreleased] - ReleaseDate
321"""
322search = "<!-- next-header -->"
323array = ["""foo""", """
324bar""", """eee
325## not a comment
326"""]
327 "#
328 ));
329 assert!(has_doc_comments(
330 r#"
331[[package.metadata.release.pre-release-replacements]]
332exactly = 1 # """
333file = "CHANGELOG.md"
334replace = """
335<!-- next-header -->
336## [Unreleased] - ReleaseDate
337"""
338search = "<!-- next-header -->"
339array = ["""foo""", """
340bar""", """eee
341## not a comment
342"""]
343## This is a comment
344feature = "45"
345 "#
346 ));
347
348 assert!(!has_doc_comments(
349 r#"
350[[package.metadata.release.pre-release-replacements]]
351value = """" string \"""
352## within the string
353\""""
354another_string = """"" # """
355## also within"""
356"#
357 ));
358
359 assert!(has_doc_comments(
360 r#"
361[[package.metadata.release.pre-release-replacements]]
362value = """" string \"""
363## within the string
364\""""
365another_string = """"" # """
366## also within"""
367## out of the string
368foo = bar
369 "#
370 ));
371}
372
373fn dependents(
374 feature_dependencies: &HashMap<String, Vec<String>>,
375 feature: &str,
376 collected: &mut HashSet<String>,
377) {
378 if collected.contains(feature) {
379 return;
380 }
381 collected.insert(feature.to_string());
382 if let Some(dependencies) = feature_dependencies.get(feature) {
383 for dependency in dependencies {
384 dependents(feature_dependencies, dependency, collected);
385 }
386 }
387}
388
389fn parse_feature_deps<'a>(
390 s: &'a str,
391 dep: &str,
392) -> Result<impl Iterator<Item = String> + 'a, String> {
393 Ok(s.trim()
394 .strip_prefix('[')
395 .and_then(|r| r.strip_suffix(']'))
396 .ok_or_else(|| format!("Parse error while parsing dependency {}", dep))?
397 .split(',')
398 .map(|d| d.trim().trim_matches(|c| c == '"' || c == '\'').trim().to_string())
399 .filter(|d: &String| !d.is_empty()))
400}
401
402fn process_toml(cargo_toml: &str, args: &Args) -> Result<String, String> {
403 let mut lines = cargo_toml
405 .lines()
406 .map(str::trim)
407 .filter(|l| {
409 !l.is_empty() && (!l.starts_with('#') || l.starts_with("##") || l.starts_with("#!"))
410 });
411 let mut top_comment = String::new();
412 let mut current_comment = String::new();
413 let mut features = vec![];
414 let mut default_features = HashSet::new();
415 let mut current_table = "";
416 let mut dependencies = HashMap::new();
417 while let Some(line) = lines.next() {
418 if let Some(x) = line.strip_prefix("#!") {
419 if !x.is_empty() && !x.starts_with(' ') {
420 continue; }
422 if !current_comment.is_empty() {
423 return Err("Cannot mix ## and #! comments between features.".into());
424 }
425 if top_comment.is_empty() && !features.is_empty() {
426 top_comment = "\n".into();
427 }
428 writeln!(top_comment, "{}", x).unwrap();
429 } else if let Some(x) = line.strip_prefix("##") {
430 if !x.is_empty() && !x.starts_with(' ') {
431 continue; }
433 writeln!(current_comment, " {}", x).unwrap();
434 } else if let Some(table) = line.strip_prefix('[') {
435 current_table = table
436 .split_once(']')
437 .map(|(t, _)| t.trim())
438 .ok_or_else(|| format!("Parse error while parsing line: {}", line))?;
439 if !current_comment.is_empty() {
440 #[allow(clippy::unnecessary_lazy_evaluations)]
441 let dep = current_table
442 .rsplit_once('.')
443 .and_then(|(table, dep)| table.trim().ends_with("dependencies").then(|| dep))
444 .ok_or_else(|| format!("Not a feature: `{}`", line))?;
445 features.push((
446 dep.trim(),
447 std::mem::take(&mut top_comment),
448 std::mem::take(&mut current_comment),
449 ));
450 }
451 } else if let Some((dep, rest)) = line.split_once('=') {
452 let dep = dep.trim().trim_matches('"');
453 let rest = get_balanced(rest, &mut lines)
454 .map_err(|e| format!("Parse error while parsing value {}: {}", dep, e))?;
455 if current_table == "features" {
456 if dep == "default" {
457 default_features.extend(parse_feature_deps(&rest, dep)?);
458 } else {
459 for d in parse_feature_deps(&rest, dep)? {
460 dependencies
461 .entry(dep.to_string())
462 .or_insert_with(Vec::new)
463 .push(d.clone());
464 }
465 }
466 }
467 if !current_comment.is_empty() {
468 if current_table.ends_with("dependencies") {
469 if !rest
470 .split_once("optional")
471 .and_then(|(_, r)| r.trim().strip_prefix('='))
472 .map_or(false, |r| r.trim().starts_with("true"))
473 {
474 return Err(format!("Dependency {} is not an optional dependency", dep));
475 }
476 } else if current_table != "features" {
477 return Err(format!(
478 r#"Comment cannot be associated with a feature: "{}""#,
479 current_comment.trim()
480 ));
481 }
482 features.push((
483 dep,
484 std::mem::take(&mut top_comment),
485 std::mem::take(&mut current_comment),
486 ));
487 }
488 }
489 }
490 let df = default_features.iter().cloned().collect::<Vec<_>>();
491 for feature in df {
492 let mut resolved = HashSet::new();
493 dependents(&dependencies, &feature, &mut resolved);
494 default_features.extend(resolved.into_iter());
495 }
496 if !current_comment.is_empty() {
497 return Err("Found comment not associated with a feature".into());
498 }
499 if features.is_empty() {
500 return Ok("*No documented features in Cargo.toml*".into());
501 }
502 let mut result = String::new();
503 for (f, top, comment) in features {
504 let default = if default_features.contains(f) { " *(enabled by default)*" } else { "" };
505 let feature_label = args.feature_label.as_deref().unwrap_or("**`{feature}`**");
506 let comment = if comment.trim().is_empty() {
507 String::new()
508 } else {
509 format!(" —{}", comment.trim_end())
510 };
511
512 writeln!(
513 result,
514 "{}* {}{}{}",
515 top,
516 feature_label.replace("{feature}", f),
517 default,
518 comment,
519 )
520 .unwrap();
521 }
522 result += &top_comment;
523 Ok(result)
524}
525
526fn get_balanced<'a>(
527 first_line: &'a str,
528 lines: &mut impl Iterator<Item = &'a str>,
529) -> Result<Cow<'a, str>, String> {
530 let mut line = first_line;
531 let mut result = Cow::from("");
532
533 let mut in_quote = false;
534 let mut level = 0;
535 loop {
536 let mut last_slash = false;
537 for (idx, b) in line.as_bytes().iter().enumerate() {
538 if last_slash {
539 last_slash = false
540 } else if in_quote {
541 match b {
542 b'\\' => last_slash = true,
543 b'"' | b'\'' => in_quote = false,
544 _ => (),
545 }
546 } else {
547 match b {
548 b'\\' => last_slash = true,
549 b'"' => in_quote = true,
550 b'{' | b'[' => level += 1,
551 b'}' | b']' if level == 0 => return Err("unbalanced source".into()),
552 b'}' | b']' => level -= 1,
553 b'#' => {
554 line = &line[..idx];
555 break;
556 }
557 _ => (),
558 }
559 }
560 }
561 if result.len() == 0 {
562 result = Cow::from(line);
563 } else {
564 *result.to_mut() += line;
565 }
566 if level == 0 {
567 return Ok(result);
568 }
569 line = if let Some(l) = lines.next() {
570 l
571 } else {
572 return Err("unbalanced source".into());
573 };
574 }
575}
576
577#[test]
578fn test_get_balanced() {
579 assert_eq!(
580 get_balanced(
581 "{",
582 &mut IntoIterator::into_iter(["a", "{ abc[], #ignore", " def }", "}", "xxx"])
583 ),
584 Ok("{a{ abc[], def }}".into())
585 );
586 assert_eq!(
587 get_balanced("{ foo = \"{#\" } #ignore", &mut IntoIterator::into_iter(["xxx"])),
588 Ok("{ foo = \"{#\" } ".into())
589 );
590 assert_eq!(
591 get_balanced("]", &mut IntoIterator::into_iter(["["])),
592 Err("unbalanced source".into())
593 );
594}
595
596#[cfg(feature = "self-test")]
597#[proc_macro]
598#[doc(hidden)]
599pub fn self_test_helper(input: TokenStream) -> TokenStream {
601 let mut code = String::new();
602 for line in (&input).to_string().trim_matches(|c| c == '"' || c == '#').lines() {
603 if line.strip_prefix('#').map_or(false, |x| x.is_empty() || x.starts_with(' ')) {
606 code += "#";
607 }
608 code += line;
609 code += "\n";
610 }
611 process_toml(&code, &Args::default()).map_or_else(
612 |e| error(&e),
613 |r| std::iter::once(proc_macro::TokenTree::from(proc_macro::Literal::string(&r))).collect(),
614 )
615}
616
617#[cfg(feature = "self-test")]
618macro_rules! self_test {
619 (#[doc = $toml:literal] => #[doc = $md:literal]) => {
620 concat!(
621 "\n`````rust\n\
622 fn normalize_md(md : &str) -> String {
623 md.lines().skip_while(|l| l.is_empty()).map(|l| l.trim())
624 .collect::<Vec<_>>().join(\"\\n\")
625 }
626 assert_eq!(normalize_md(document_features::self_test_helper!(",
627 stringify!($toml),
628 ")), normalize_md(",
629 stringify!($md),
630 "));\n`````\n\n"
631 )
632 };
633}
634
635#[cfg(not(feature = "self-test"))]
636macro_rules! self_test {
637 (#[doc = $toml:literal] => #[doc = $md:literal]) => {
638 concat!(
639 "This contents in Cargo.toml:\n`````toml",
640 $toml,
641 "\n`````\n Generates the following:\n\
642 <table><tr><th>Preview</th></tr><tr><td>\n\n",
643 $md,
644 "\n</td></tr></table>\n\n \n",
645 )
646 };
647}
648
649#[allow(unused)] use self_test;
651
652#[cfg(doc)]
683struct FeatureLabelCompilationTest;
684
685#[cfg(test)]
686mod tests {
687 use super::{process_toml, Args};
688
689 #[track_caller]
690 fn test_error(toml: &str, expected: &str) {
691 let err = process_toml(toml, &Args::default()).unwrap_err();
692 assert!(err.contains(expected), "{:?} does not contain {:?}", err, expected)
693 }
694
695 #[test]
696 fn only_get_balanced_in_correct_table() {
697 process_toml(
698 r#"
699
700[package.metadata.release]
701pre-release-replacements = [
702 {test=\"\#\# \"},
703]
704[abcd]
705[features]#xyz
706#! abc
707#
708###
709#! def
710#!
711## 123
712## 456
713feat1 = ["plop"]
714#! ghi
715no_doc = []
716##
717feat2 = ["momo"]
718#! klm
719default = ["feat1", "something_else"]
720#! end
721 "#,
722 &Args::default(),
723 )
724 .unwrap();
725 }
726
727 #[test]
728 fn no_features() {
729 let r = process_toml(
730 r#"
731[features]
732[dependencies]
733foo = 4;
734"#,
735 &Args::default(),
736 )
737 .unwrap();
738 assert_eq!(r, "*No documented features in Cargo.toml*");
739 }
740
741 #[test]
742 fn no_features2() {
743 let r = process_toml(
744 r#"
745[packages]
746[dependencies]
747"#,
748 &Args::default(),
749 )
750 .unwrap();
751 assert_eq!(r, "*No documented features in Cargo.toml*");
752 }
753
754 #[test]
755 fn parse_error3() {
756 test_error(
757 r#"
758[features]
759ff = []
760[abcd
761efgh
762[dependencies]
763"#,
764 "Parse error while parsing line: [abcd",
765 );
766 }
767
768 #[test]
769 fn parse_error4() {
770 test_error(
771 r#"
772[features]
773## dd
774## ff
775#! ee
776## ff
777"#,
778 "Cannot mix",
779 );
780 }
781
782 #[test]
783 fn parse_error5() {
784 test_error(
785 r#"
786[features]
787## dd
788"#,
789 "not associated with a feature",
790 );
791 }
792
793 #[test]
794 fn parse_error6() {
795 test_error(
796 r#"
797[features]
798# ff
799foo = []
800default = [
801#ffff
802# ff
803"#,
804 "Parse error while parsing value default",
805 );
806 }
807
808 #[test]
809 fn parse_error7() {
810 test_error(
811 r#"
812[features]
813# f
814foo = [ x = { ]
815bar = []
816"#,
817 "Parse error while parsing value foo",
818 );
819 }
820
821 #[test]
822 fn not_a_feature1() {
823 test_error(
824 r#"
825## hallo
826[features]
827"#,
828 "Not a feature: `[features]`",
829 );
830 }
831
832 #[test]
833 fn not_a_feature2() {
834 test_error(
835 r#"
836[package]
837## hallo
838foo = []
839"#,
840 "Comment cannot be associated with a feature: \"hallo\"",
841 );
842 }
843
844 #[test]
845 fn non_optional_dep1() {
846 test_error(
847 r#"
848[dev-dependencies]
849## Not optional
850foo = { version = "1.2", optional = false }
851"#,
852 "Dependency foo is not an optional dependency",
853 );
854 }
855
856 #[test]
857 fn non_optional_dep2() {
858 test_error(
859 r#"
860[dev-dependencies]
861## Not optional
862foo = { version = "1.2" }
863"#,
864 "Dependency foo is not an optional dependency",
865 );
866 }
867
868 #[test]
869 fn basic() {
870 let toml = r#"
871[abcd]
872[features]#xyz
873#! abc
874#
875###
876#! def
877#!
878## 123
879## 456
880feat1 = ["plop"]
881#! ghi
882no_doc = []
883##
884feat2 = ["momo"]
885#! klm
886default = ["feat1", "something_else"]
887#! end
888 "#;
889 let parsed = process_toml(toml, &Args::default()).unwrap();
890 assert_eq!(
891 parsed,
892 " abc\n def\n\n* **`feat1`** *(enabled by default)* — 123\n 456\n\n ghi\n* **`feat2`**\n\n klm\n end\n"
893 );
894 let parsed = process_toml(
895 toml,
896 &Args {
897 feature_label: Some(
898 "<span class=\"stab portability\"><code>{feature}</code></span>".into(),
899 ),
900 },
901 )
902 .unwrap();
903 assert_eq!(
904 parsed,
905 " abc\n def\n\n* <span class=\"stab portability\"><code>feat1</code></span> *(enabled by default)* — 123\n 456\n\n ghi\n* <span class=\"stab portability\"><code>feat2</code></span>\n\n klm\n end\n"
906 );
907 }
908
909 #[test]
910 fn dependencies() {
911 let toml = r#"
912#! top
913[dev-dependencies] #yo
914## dep1
915dep1 = { version="1.2", optional=true}
916#! yo
917dep2 = "1.3"
918## dep3
919[target.'cfg(unix)'.build-dependencies.dep3]
920version = "42"
921optional = true
922 "#;
923 let parsed = process_toml(toml, &Args::default()).unwrap();
924 assert_eq!(parsed, " top\n* **`dep1`** — dep1\n\n yo\n* **`dep3`** — dep3\n");
925 let parsed = process_toml(
926 toml,
927 &Args {
928 feature_label: Some(
929 "<span class=\"stab portability\"><code>{feature}</code></span>".into(),
930 ),
931 },
932 )
933 .unwrap();
934 assert_eq!(parsed, " top\n* <span class=\"stab portability\"><code>dep1</code></span> — dep1\n\n yo\n* <span class=\"stab portability\"><code>dep3</code></span> — dep3\n");
935 }
936
937 #[test]
938 fn multi_lines() {
939 let toml = r#"
940[package.metadata.foo]
941ixyz = [
942 ["array"],
943 [
944 "of",
945 "arrays"
946 ]
947]
948[dev-dependencies]
949## dep1
950dep1 = {
951 version="1.2-}",
952 optional=true
953}
954[features]
955default = [
956 "goo",
957 "\"]",
958 "bar",
959]
960## foo
961foo = [
962 "bar"
963]
964## bar
965bar = [
966
967]
968 "#;
969 let parsed = process_toml(toml, &Args::default()).unwrap();
970 assert_eq!(
971 parsed,
972 "* **`dep1`** — dep1\n* **`foo`** — foo\n* **`bar`** *(enabled by default)* — bar\n"
973 );
974 let parsed = process_toml(
975 toml,
976 &Args {
977 feature_label: Some(
978 "<span class=\"stab portability\"><code>{feature}</code></span>".into(),
979 ),
980 },
981 )
982 .unwrap();
983 assert_eq!(
984 parsed,
985 "* <span class=\"stab portability\"><code>dep1</code></span> — dep1\n* <span class=\"stab portability\"><code>foo</code></span> — foo\n* <span class=\"stab portability\"><code>bar</code></span> *(enabled by default)* — bar\n"
986 );
987 }
988
989 #[test]
990 fn dots_in_feature() {
991 let toml = r#"
992[features]
993## This is a test
994"teßt." = []
995default = ["teßt."]
996[dependencies]
997## A dep
998"dep" = { version = "123", optional = true }
999 "#;
1000 let parsed = process_toml(toml, &Args::default()).unwrap();
1001 assert_eq!(
1002 parsed,
1003 "* **`teßt.`** *(enabled by default)* — This is a test\n* **`dep`** — A dep\n"
1004 );
1005 let parsed = process_toml(
1006 toml,
1007 &Args {
1008 feature_label: Some(
1009 "<span class=\"stab portability\"><code>{feature}</code></span>".into(),
1010 ),
1011 },
1012 )
1013 .unwrap();
1014 assert_eq!(
1015 parsed,
1016 "* <span class=\"stab portability\"><code>teßt.</code></span> *(enabled by default)* — This is a test\n* <span class=\"stab portability\"><code>dep</code></span> — A dep\n"
1017 );
1018 }
1019
1020 #[test]
1021 fn recursive_default() {
1022 let toml = r#"
1023[features]
1024default=["qqq"]
1025
1026## Qqq
1027qqq=["www"]
1028
1029## Www
1030www=[]
1031 "#;
1032 let parsed = process_toml(toml, &Args::default()).unwrap();
1033 assert_eq!(parsed, "* **`qqq`** *(enabled by default)* — Qqq\n* **`www`** *(enabled by default)* — Www\n");
1034 }
1035}