1#![doc = self_test!(=>
81 )]
97#[cfg(not(feature = "default"))]
148compile_error!(
149 "The feature `default` must be enabled to ensure \
150 forward compatibility with future version of this crate"
151);
152
153extern crate proc_macro;
154
155use proc_macro::{TokenStream, TokenTree};
156use std::borrow::Cow;
157use std::collections::HashSet;
158use std::convert::TryFrom;
159use std::fmt::Write;
160use std::path::Path;
161use std::str::FromStr;
162
163fn error(e: &str) -> TokenStream {
164 TokenStream::from_str(&format!("::core::compile_error!{{\"{}\"}}", e.escape_default())).unwrap()
165}
166
167fn compile_error(msg: &str, tt: Option<TokenTree>) -> TokenStream {
168 let span = tt.as_ref().map_or_else(proc_macro::Span::call_site, TokenTree::span);
169 use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Spacing};
170 use std::iter::FromIterator;
171 TokenStream::from_iter(vec![
172 TokenTree::Ident(Ident::new("compile_error", span)),
173 TokenTree::Punct({
174 let mut punct = Punct::new('!', Spacing::Alone);
175 punct.set_span(span);
176 punct
177 }),
178 TokenTree::Group({
179 let mut group = Group::new(Delimiter::Brace, {
180 TokenStream::from_iter([TokenTree::Literal({
181 let mut string = Literal::string(msg);
182 string.set_span(span);
183 string
184 })])
185 });
186 group.set_span(span);
187 group
188 }),
189 ])
190}
191
192#[derive(Default)]
193struct Args {
194 feature_label: Option<String>,
195}
196
197fn parse_args(input: TokenStream) -> Result<Args, TokenStream> {
198 let mut token_trees = input.into_iter().fuse();
199
200 match token_trees.next() {
202 None => return Ok(Args::default()),
203 Some(TokenTree::Ident(ident)) if ident.to_string() == "feature_label" => (),
204 tt => return Err(compile_error("expected `feature_label`", tt)),
205 }
206
207 match token_trees.next() {
209 Some(TokenTree::Punct(p)) if p.as_char() == '=' => (),
210 tt => return Err(compile_error("expected `=`", tt)),
211 }
212
213 let feature_label;
215 if let Some(tt) = token_trees.next() {
216 match litrs::StringLit::<String>::try_from(&tt) {
217 Ok(string_lit) if string_lit.value().contains("{feature}") => {
218 feature_label = string_lit.value().to_string()
219 }
220 _ => {
221 return Err(compile_error(
222 "expected a string literal containing the substring \"{feature}\"",
223 Some(tt),
224 ))
225 }
226 }
227 } else {
228 return Err(compile_error(
229 "expected a string literal containing the substring \"{feature}\"",
230 None,
231 ));
232 }
233
234 if let tt @ Some(_) = token_trees.next() {
236 return Err(compile_error("unexpected token after the format string", tt));
237 }
238
239 Ok(Args { feature_label: Some(feature_label) })
240}
241
242#[proc_macro]
246pub fn document_features(tokens: TokenStream) -> TokenStream {
247 parse_args(tokens)
248 .and_then(|args| document_features_impl(&args))
249 .unwrap_or_else(std::convert::identity)
250}
251
252fn document_features_impl(args: &Args) -> Result<TokenStream, TokenStream> {
253 let path = std::env::var("CARGO_MANIFEST_DIR").unwrap();
254 let mut cargo_toml = std::fs::read_to_string(Path::new(&path).join("Cargo.toml"))
255 .map_err(|e| error(&format!("Can't open Cargo.toml: {:?}", e)))?;
256
257 if !has_doc_comments(&cargo_toml) {
258 if let Ok(orig) = std::fs::read_to_string(Path::new(&path).join("Cargo.toml.orig")) {
261 if has_doc_comments(&orig) {
262 cargo_toml = orig;
263 }
264 }
265 }
266
267 let result = process_toml(&cargo_toml, args).map_err(|e| error(&e))?;
268 Ok(std::iter::once(proc_macro::TokenTree::from(proc_macro::Literal::string(&result))).collect())
269}
270
271fn has_doc_comments(cargo_toml: &str) -> bool {
273 let mut lines = cargo_toml.lines().map(str::trim);
274 while let Some(line) = lines.next() {
275 if line.starts_with("## ") || line.starts_with("#! ") {
276 return true;
277 }
278 let before_coment = line.split_once('#').map_or(line, |(before, _)| before);
279 if line.starts_with("#") {
280 continue;
281 }
282 if let Some((_, mut quote)) = before_coment.split_once("\"\"\"") {
283 loop {
284 if let Some((_, s)) = quote.split_once('\\') {
286 quote = s.strip_prefix('\\').or_else(|| s.strip_prefix('"')).unwrap_or(s);
287 continue;
288 }
289 if let Some((_, out_quote)) = quote.split_once("\"\"\"") {
291 let out_quote = out_quote.trim_start_matches('"');
292 let out_quote =
293 out_quote.split_once('#').map_or(out_quote, |(before, _)| before);
294 if let Some((_, q)) = out_quote.split_once("\"\"\"") {
295 quote = q;
296 continue;
297 }
298 break;
299 };
300 match lines.next() {
301 Some(l) => quote = l,
302 None => return false,
303 }
304 }
305 }
306 }
307 false
308}
309
310#[test]
311fn test_has_doc_coment() {
312 assert!(has_doc_comments("foo\nbar\n## comment\nddd"));
313 assert!(!has_doc_comments("foo\nbar\n#comment\nddd"));
314 assert!(!has_doc_comments(
315 r#"
316[[package.metadata.release.pre-release-replacements]]
317exactly = 1 # not a doc comment
318file = "CHANGELOG.md"
319replace = """
320<!-- next-header -->
321## [Unreleased] - ReleaseDate
322"""
323search = "<!-- next-header -->"
324array = ["""foo""", """
325bar""", """eee
326## not a comment
327"""]
328 "#
329 ));
330 assert!(has_doc_comments(
331 r#"
332[[package.metadata.release.pre-release-replacements]]
333exactly = 1 # """
334file = "CHANGELOG.md"
335replace = """
336<!-- next-header -->
337## [Unreleased] - ReleaseDate
338"""
339search = "<!-- next-header -->"
340array = ["""foo""", """
341bar""", """eee
342## not a comment
343"""]
344## This is a comment
345feature = "45"
346 "#
347 ));
348
349 assert!(!has_doc_comments(
350 r#"
351[[package.metadata.release.pre-release-replacements]]
352value = """" string \"""
353## within the string
354\""""
355another_string = """"" # """
356## also within"""
357"#
358 ));
359
360 assert!(has_doc_comments(
361 r#"
362[[package.metadata.release.pre-release-replacements]]
363value = """" string \"""
364## within the string
365\""""
366another_string = """"" # """
367## also within"""
368## out of the string
369foo = bar
370 "#
371 ));
372}
373
374fn process_toml(cargo_toml: &str, args: &Args) -> Result<String, String> {
375 let mut lines = cargo_toml
377 .lines()
378 .map(str::trim)
379 .filter(|l| {
381 !l.is_empty() && (!l.starts_with('#') || l.starts_with("##") || l.starts_with("#!"))
382 });
383 let mut top_comment = String::new();
384 let mut current_comment = String::new();
385 let mut features = vec![];
386 let mut default_features = HashSet::new();
387 let mut current_table = "";
388 while let Some(line) = lines.next() {
389 if let Some(x) = line.strip_prefix("#!") {
390 if !x.is_empty() && !x.starts_with(' ') {
391 continue; }
393 if !current_comment.is_empty() {
394 return Err("Cannot mix ## and #! comments between features.".into());
395 }
396 if top_comment.is_empty() && !features.is_empty() {
397 top_comment = "\n".into();
398 }
399 writeln!(top_comment, "{}", x).unwrap();
400 } else if let Some(x) = line.strip_prefix("##") {
401 if !x.is_empty() && !x.starts_with(' ') {
402 continue; }
404 writeln!(current_comment, " {}", x).unwrap();
405 } else if let Some(table) = line.strip_prefix('[') {
406 current_table = table
407 .split_once(']')
408 .map(|(t, _)| t.trim())
409 .ok_or_else(|| format!("Parse error while parsing line: {}", line))?;
410 if !current_comment.is_empty() {
411 let dep = current_table
412 .rsplit_once('.')
413 .and_then(|(table, dep)| table.trim().ends_with("dependencies").then(|| dep))
414 .ok_or_else(|| format!("Not a feature: `{}`", line))?;
415 features.push((
416 dep.trim(),
417 std::mem::take(&mut top_comment),
418 std::mem::take(&mut current_comment),
419 ));
420 }
421 } else if let Some((dep, rest)) = line.split_once('=') {
422 let dep = dep.trim().trim_matches('"');
423 let rest = get_balanced(rest, &mut lines)
424 .map_err(|e| format!("Parse error while parsing value {}: {}", dep, e))?;
425 if current_table == "features" && dep == "default" {
426 let defaults = rest
427 .trim()
428 .strip_prefix('[')
429 .and_then(|r| r.strip_suffix(']'))
430 .ok_or_else(|| format!("Parse error while parsing dependency {}", dep))?
431 .split(',')
432 .map(|d| d.trim().trim_matches(|c| c == '"' || c == '\'').trim().to_string())
433 .filter(|d| !d.is_empty());
434 default_features.extend(defaults);
435 }
436 if !current_comment.is_empty() {
437 if current_table.ends_with("dependencies") {
438 if !rest
439 .split_once("optional")
440 .and_then(|(_, r)| r.trim().strip_prefix('='))
441 .map_or(false, |r| r.trim().starts_with("true"))
442 {
443 return Err(format!("Dependency {} is not an optional dependency", dep));
444 }
445 } else if current_table != "features" {
446 return Err(format!(
447 r#"Comment cannot be associated with a feature: "{}""#,
448 current_comment.trim()
449 ));
450 }
451 features.push((
452 dep,
453 std::mem::take(&mut top_comment),
454 std::mem::take(&mut current_comment),
455 ));
456 }
457 }
458 }
459 if !current_comment.is_empty() {
460 return Err("Found comment not associated with a feature".into());
461 }
462 if features.is_empty() {
463 return Ok("*No documented features in Cargo.toml*".into());
464 }
465 let mut result = String::new();
466 for (f, top, comment) in features {
467 let default = if default_features.contains(f) { " *(enabled by default)*" } else { "" };
468 if !comment.trim().is_empty() {
469 if let Some(feature_label) = &args.feature_label {
470 writeln!(
471 result,
472 "{}* {}{} —{}",
473 top,
474 feature_label.replace("{feature}", f),
475 default,
476 comment.trim_end(),
477 )
478 .unwrap();
479 } else {
480 writeln!(result, "{}* **`{}`**{} —{}", top, f, default, comment.trim_end())
481 .unwrap();
482 }
483 } else if let Some(feature_label) = &args.feature_label {
484 writeln!(result, "{}* {}{}", top, feature_label.replace("{feature}", f), default,)
485 .unwrap();
486 } else {
487 writeln!(result, "{}* **`{}`**{}", top, f, default).unwrap();
488 }
489 }
490 result += &top_comment;
491 Ok(result)
492}
493
494fn get_balanced<'a>(
495 first_line: &'a str,
496 lines: &mut impl Iterator<Item = &'a str>,
497) -> Result<Cow<'a, str>, String> {
498 let mut line = first_line;
499 let mut result = Cow::from("");
500
501 let mut in_quote = false;
502 let mut level = 0;
503 loop {
504 let mut last_slash = false;
505 for (idx, b) in line.as_bytes().iter().enumerate() {
506 if last_slash {
507 last_slash = false
508 } else if in_quote {
509 match b {
510 b'\\' => last_slash = true,
511 b'"' | b'\'' => in_quote = false,
512 _ => (),
513 }
514 } else {
515 match b {
516 b'\\' => last_slash = true,
517 b'"' => in_quote = true,
518 b'{' | b'[' => level += 1,
519 b'}' | b']' if level == 0 => return Err("unbalanced source".into()),
520 b'}' | b']' => level -= 1,
521 b'#' => {
522 line = &line[..idx];
523 break;
524 }
525 _ => (),
526 }
527 }
528 }
529 if result.len() == 0 {
530 result = Cow::from(line);
531 } else {
532 *result.to_mut() += line;
533 }
534 if level == 0 {
535 return Ok(result);
536 }
537 line = if let Some(l) = lines.next() {
538 l
539 } else {
540 return Err("unbalanced source".into());
541 };
542 }
543}
544
545#[test]
546fn test_get_balanced() {
547 assert_eq!(
548 get_balanced(
549 "{",
550 &mut IntoIterator::into_iter(["a", "{ abc[], #ignore", " def }", "}", "xxx"])
551 ),
552 Ok("{a{ abc[], def }}".into())
553 );
554 assert_eq!(
555 get_balanced("{ foo = \"{#\" } #ignore", &mut IntoIterator::into_iter(["xxx"])),
556 Ok("{ foo = \"{#\" } ".into())
557 );
558 assert_eq!(
559 get_balanced("]", &mut IntoIterator::into_iter(["["])),
560 Err("unbalanced source".into())
561 );
562}
563
564#[cfg(feature = "self-test")]
565#[proc_macro]
566#[doc(hidden)]
567pub fn self_test_helper(input: TokenStream) -> TokenStream {
569 let mut code = String::new();
570 for line in (&input).to_string().trim_matches(|c| c == '"' || c == '#').lines() {
571 if line.strip_prefix('#').map_or(false, |x| x.is_empty() || x.starts_with(' ')) {
574 code += "#";
575 }
576 code += line;
577 code += "\n";
578 }
579 process_toml(&code, &Args::default()).map_or_else(
580 |e| error(&e),
581 |r| std::iter::once(proc_macro::TokenTree::from(proc_macro::Literal::string(&r))).collect(),
582 )
583}
584
585#[cfg(feature = "self-test")]
586macro_rules! self_test {
587 (#[doc = $toml:literal] => #[doc = $md:literal]) => {
588 concat!(
589 "\n`````rust\n\
590 fn normalize_md(md : &str) -> String {
591 md.lines().skip_while(|l| l.is_empty()).map(|l| l.trim())
592 .collect::<Vec<_>>().join(\"\\n\")
593 }
594 assert_eq!(normalize_md(document_features::self_test_helper!(",
595 stringify!($toml),
596 ")), normalize_md(",
597 stringify!($md),
598 "));\n`````\n\n"
599 )
600 };
601}
602
603#[cfg(not(feature = "self-test"))]
604macro_rules! self_test {
605 (#[doc = $toml:literal] => #[doc = $md:literal]) => {
606 concat!(
607 "This contents in Cargo.toml:\n`````toml",
608 $toml,
609 "\n`````\n Generates the following:\n\
610 <table><tr><th>Preview</th></tr><tr><td>\n\n",
611 $md,
612 "\n</td></tr></table>\n\n \n",
613 )
614 };
615}
616
617use self_test;
618
619#[cfg(doc)]
650struct FeatureLabelCompilationTest;
651
652#[cfg(test)]
653mod tests {
654 use super::{process_toml, Args};
655
656 #[track_caller]
657 fn test_error(toml: &str, expected: &str) {
658 let err = process_toml(toml, &Args::default()).unwrap_err();
659 assert!(err.contains(expected), "{:?} does not contain {:?}", err, expected)
660 }
661
662 #[test]
663 fn only_get_balanced_in_correct_table() {
664 process_toml(
665 r#"
666
667[package.metadata.release]
668pre-release-replacements = [
669 {test=\"\#\# \"},
670]
671[abcd]
672[features]#xyz
673#! abc
674#
675###
676#! def
677#!
678## 123
679## 456
680feat1 = ["plop"]
681#! ghi
682no_doc = []
683##
684feat2 = ["momo"]
685#! klm
686default = ["feat1", "something_else"]
687#! end
688 "#,
689 &Args::default(),
690 )
691 .unwrap();
692 }
693
694 #[test]
695 fn no_features() {
696 let r = process_toml(
697 r#"
698[features]
699[dependencies]
700foo = 4;
701"#,
702 &Args::default(),
703 )
704 .unwrap();
705 assert_eq!(r, "*No documented features in Cargo.toml*");
706 }
707
708 #[test]
709 fn no_features2() {
710 let r = process_toml(
711 r#"
712[packages]
713[dependencies]
714"#,
715 &Args::default(),
716 )
717 .unwrap();
718 assert_eq!(r, "*No documented features in Cargo.toml*");
719 }
720
721 #[test]
722 fn parse_error3() {
723 test_error(
724 r#"
725[features]
726ff = []
727[abcd
728efgh
729[dependencies]
730"#,
731 "Parse error while parsing line: [abcd",
732 );
733 }
734
735 #[test]
736 fn parse_error4() {
737 test_error(
738 r#"
739[features]
740## dd
741## ff
742#! ee
743## ff
744"#,
745 "Cannot mix",
746 );
747 }
748
749 #[test]
750 fn parse_error5() {
751 test_error(
752 r#"
753[features]
754## dd
755"#,
756 "not associated with a feature",
757 );
758 }
759
760 #[test]
761 fn parse_error6() {
762 test_error(
763 r#"
764[features]
765# ff
766foo = []
767default = [
768#ffff
769# ff
770"#,
771 "Parse error while parsing value default",
772 );
773 }
774
775 #[test]
776 fn parse_error7() {
777 test_error(
778 r#"
779[features]
780# f
781foo = [ x = { ]
782bar = []
783"#,
784 "Parse error while parsing value foo",
785 );
786 }
787
788 #[test]
789 fn not_a_feature1() {
790 test_error(
791 r#"
792## hallo
793[features]
794"#,
795 "Not a feature: `[features]`",
796 );
797 }
798
799 #[test]
800 fn not_a_feature2() {
801 test_error(
802 r#"
803[package]
804## hallo
805foo = []
806"#,
807 "Comment cannot be associated with a feature: \"hallo\"",
808 );
809 }
810
811 #[test]
812 fn non_optional_dep1() {
813 test_error(
814 r#"
815[dev-dependencies]
816## Not optional
817foo = { version = "1.2", optional = false }
818"#,
819 "Dependency foo is not an optional dependency",
820 );
821 }
822
823 #[test]
824 fn non_optional_dep2() {
825 test_error(
826 r#"
827[dev-dependencies]
828## Not optional
829foo = { version = "1.2" }
830"#,
831 "Dependency foo is not an optional dependency",
832 );
833 }
834
835 #[test]
836 fn basic() {
837 let toml = r#"
838[abcd]
839[features]#xyz
840#! abc
841#
842###
843#! def
844#!
845## 123
846## 456
847feat1 = ["plop"]
848#! ghi
849no_doc = []
850##
851feat2 = ["momo"]
852#! klm
853default = ["feat1", "something_else"]
854#! end
855 "#;
856 let parsed = process_toml(toml, &Args::default()).unwrap();
857 assert_eq!(
858 parsed,
859 " abc\n def\n\n* **`feat1`** *(enabled by default)* — 123\n 456\n\n ghi\n* **`feat2`**\n\n klm\n end\n"
860 );
861 let parsed = process_toml(
862 toml,
863 &Args {
864 feature_label: Some(
865 "<span class=\"stab portability\"><code>{feature}</code></span>".into(),
866 ),
867 },
868 )
869 .unwrap();
870 assert_eq!(
871 parsed,
872 " abc\n def\n\n* <span class=\"stab portability\"><code>feat1</code></span> *(enabled by default)* — 123\n 456\n\n ghi\n* <span class=\"stab portability\"><code>feat2</code></span>\n\n klm\n end\n"
873 );
874 }
875
876 #[test]
877 fn dependencies() {
878 let toml = r#"
879#! top
880[dev-dependencies] #yo
881## dep1
882dep1 = { version="1.2", optional=true}
883#! yo
884dep2 = "1.3"
885## dep3
886[target.'cfg(unix)'.build-dependencies.dep3]
887version = "42"
888optional = true
889 "#;
890 let parsed = process_toml(toml, &Args::default()).unwrap();
891 assert_eq!(parsed, " top\n* **`dep1`** — dep1\n\n yo\n* **`dep3`** — dep3\n");
892 let parsed = process_toml(
893 toml,
894 &Args {
895 feature_label: Some(
896 "<span class=\"stab portability\"><code>{feature}</code></span>".into(),
897 ),
898 },
899 )
900 .unwrap();
901 assert_eq!(parsed, " top\n* <span class=\"stab portability\"><code>dep1</code></span> — dep1\n\n yo\n* <span class=\"stab portability\"><code>dep3</code></span> — dep3\n");
902 }
903
904 #[test]
905 fn multi_lines() {
906 let toml = r#"
907[package.metadata.foo]
908ixyz = [
909 ["array"],
910 [
911 "of",
912 "arrays"
913 ]
914]
915[dev-dependencies]
916## dep1
917dep1 = {
918 version="1.2-}",
919 optional=true
920}
921[features]
922default = [
923 "goo",
924 "\"]",
925 "bar",
926]
927## foo
928foo = [
929 "bar"
930]
931## bar
932bar = [
933
934]
935 "#;
936 let parsed = process_toml(toml, &Args::default()).unwrap();
937 assert_eq!(
938 parsed,
939 "* **`dep1`** — dep1\n* **`foo`** — foo\n* **`bar`** *(enabled by default)* — bar\n"
940 );
941 let parsed = process_toml(
942 toml,
943 &Args {
944 feature_label: Some(
945 "<span class=\"stab portability\"><code>{feature}</code></span>".into(),
946 ),
947 },
948 )
949 .unwrap();
950 assert_eq!(
951 parsed,
952 "* <span class=\"stab portability\"><code>dep1</code></span> — dep1\n* <span class=\"stab portability\"><code>foo</code></span> — foo\n* <span class=\"stab portability\"><code>bar</code></span> *(enabled by default)* — bar\n"
953 );
954 }
955
956 #[test]
957 fn dots_in_feature() {
958 let toml = r#"
959[features]
960## This is a test
961"teßt." = []
962default = ["teßt."]
963[dependencies]
964## A dep
965"dep" = { version = "123", optional = true }
966 "#;
967 let parsed = process_toml(toml, &Args::default()).unwrap();
968 assert_eq!(
969 parsed,
970 "* **`teßt.`** *(enabled by default)* — This is a test\n* **`dep`** — A dep\n"
971 );
972 let parsed = process_toml(
973 toml,
974 &Args {
975 feature_label: Some(
976 "<span class=\"stab portability\"><code>{feature}</code></span>".into(),
977 ),
978 },
979 )
980 .unwrap();
981 assert_eq!(
982 parsed,
983 "* <span class=\"stab portability\"><code>teßt.</code></span> *(enabled by default)* — This is a test\n* <span class=\"stab portability\"><code>dep</code></span> — A dep\n"
984 );
985 }
986}