moverox_codegen/
attributes.rs1use std::collections::HashSet;
2
3use move_syn::Attributes;
4use quote::quote;
5use unsynn::{IParse as _, Ident, ToTokens as _, TokenStream};
6
7use crate::Result;
8
9mod grammar {
10 use unsynn::*;
11
12 mod kw {
13 use unsynn::unsynn;
14
15 unsynn! {
16 pub(super) keyword Moverox = "moverox";
17 pub(super) keyword Otw = "OTW";
18 pub(super) keyword Type = "type_";
21 }
22 }
23
24 unsynn! {
25 pub(crate) struct Annotation {
34 kw: kw::Moverox,
35 contents: ParenthesisGroupContaining<CommaDelimitedVec<Setting>>
36 }
37
38 pub(super) enum Setting {
40 Type(Type)
42 }
43
44 pub(super) struct Type {
46 kw: kw::Type,
47 contents: ParenthesisGroupContaining<CommaDelimitedVec<TypeDefault>>,
48 }
49
50 struct TypeDefault {
52 ident: Ident,
54 assign: Assign,
55 default: kw::Otw,
57 }
58 }
59
60 impl Annotation {
61 pub(super) fn settings(&self) -> impl Iterator<Item = &Setting> + '_ {
62 self.contents
63 .content
64 .iter()
65 .map(|delimited| &delimited.value)
66 }
67 }
68
69 impl Setting {
70 pub(super) fn otw_types(&self) -> impl Iterator<Item = &Ident> + '_ {
71 let Self::Type(ty) = self;
72 ty.contents
73 .content
74 .iter()
75 .map(|delimited| &delimited.value.ident)
76 }
77 }
78}
79
80pub(super) fn extract(attrs: &[Attributes]) -> Result<(TokenStream, HashSet<Ident>)> {
82 let (move_docs, other): (Vec<_>, Vec<_>) = attrs.iter().partition(|attr| attr.is_doc());
83
84 let rust_docs = move_docs.into_iter().map(process_doc).collect();
85
86 let custom: Vec<_> = other.into_iter().flat_map(as_moverox).collect();
87 let mut otw_types = HashSet::new();
88 for ident in custom
89 .iter()
90 .flat_map(|custom| custom.settings())
91 .flat_map(|setting| setting.otw_types())
92 {
93 if otw_types.contains(ident) {
94 return Err(format!("Type {ident} declared twice").into());
95 }
96 otw_types.insert(ident.to_owned());
97 }
98
99 Ok((rust_docs, otw_types))
100}
101
102pub(super) fn as_moverox(attr: &Attributes) -> impl Iterator<Item = self::grammar::Annotation> {
103 attr.external_attributes()
104 .filter_map(|ext| ext.to_token_iter().parse_all().ok())
105}
106
107fn process_doc(attr: &Attributes) -> TokenStream {
108 let inner = attr.contents().to_token_stream();
109 quote!(#[cfg_attr(not(doctest), #inner)])
112}