1use convert_case::{Case, Casing};
2use proc_macro2::{Span, TokenStream};
3use proc_macro_error2::{abort, abort_call_site, proc_macro_error, OptionExt};
4use quote::{quote, ToTokens};
5use syn::{
6 parse::{Parse, ParseStream, Parser},
7 punctuated::Punctuated,
8 token::Comma,
9 ExprClosure, Field, Fields, GenericParam, Generics, Ident, Index, Meta,
10 Result, Token, Type, TypeParam, Variant, Visibility, WhereClause,
11};
12
13#[proc_macro_error]
14#[proc_macro_derive(Store, attributes(store))]
15pub fn derive_store(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
16 syn::parse_macro_input!(input as Model)
17 .into_token_stream()
18 .into()
19}
20
21#[proc_macro_error]
22#[proc_macro_derive(Patch, attributes(store, patch))]
23pub fn derive_patch(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
24 syn::parse_macro_input!(input as PatchModel)
25 .into_token_stream()
26 .into()
27}
28
29fn remove_constraint_from_generics(generics: &Generics) -> Generics {
90 let mut new_generics = generics.clone();
91
92 for param in new_generics.params.iter_mut() {
96 match param {
97 GenericParam::Lifetime(lifetime) => {
98 lifetime.bounds.clear(); lifetime.colon_token = None;
100 }
101 GenericParam::Type(type_param) => {
102 type_param.bounds.clear(); type_param.colon_token = None;
104 type_param.eq_token = None;
105 type_param.default = None;
106 }
107 GenericParam::Const(const_param) => {
108 *param = GenericParam::Type(TypeParam {
110 attrs: const_param.attrs.clone(),
111 ident: const_param.ident.clone(),
112 colon_token: None,
113 bounds: Punctuated::new(),
114 eq_token: None,
115 default: None,
116 });
117 }
118 }
119 }
120
121 new_generics.where_clause = None; new_generics
124}
125
126struct Model {
127 vis: Visibility,
128 name: Ident,
129 generics: Generics,
130 ty: ModelTy,
131}
132
133enum ModelTy {
134 Struct { fields: Vec<Field> },
135 Enum { variants: Vec<Variant> },
136}
137
138impl Parse for Model {
139 fn parse(input: ParseStream) -> Result<Self> {
140 let input = syn::DeriveInput::parse(input)?;
141
142 let ty = match input.data {
143 syn::Data::Struct(s) => {
144 let fields = match s.fields {
145 syn::Fields::Unit => {
146 abort!(s.semi_token, "unit structs are not supported");
147 }
148 syn::Fields::Named(fields) => {
149 fields.named.into_iter().collect::<Vec<_>>()
150 }
151 syn::Fields::Unnamed(fields) => {
152 fields.unnamed.into_iter().collect::<Vec<_>>()
153 }
154 };
155
156 ModelTy::Struct { fields }
157 }
158 syn::Data::Enum(e) => ModelTy::Enum {
159 variants: e.variants.into_iter().collect(),
160 },
161 _ => {
162 abort_call_site!(
163 "only structs and enums can be used with `Store`"
164 );
165 }
166 };
167
168 Ok(Self {
169 vis: input.vis,
170 generics: input.generics,
171 name: input.ident,
172 ty,
173 })
174 }
175}
176
177#[derive(Clone)]
178enum SubfieldMode {
179 Keyed(Box<ExprClosure>, Box<Type>),
180 Skip,
181}
182
183impl Parse for SubfieldMode {
184 fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
185 let mode: Ident = input.parse()?;
186 if mode == "key" {
187 let _col: Token![:] = input.parse()?;
188 let ty: Type = input.parse()?;
189 let _eq: Token![=] = input.parse()?;
190 let closure: ExprClosure = input.parse()?;
191 Ok(SubfieldMode::Keyed(Box::new(closure), Box::new(ty)))
192 } else if mode == "skip" {
193 Ok(SubfieldMode::Skip)
194 } else {
195 Err(input.error("expected `key: <Type> = <closure>`"))
196 }
197 }
198}
199
200impl ToTokens for Model {
201 fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
202 let library_path = quote! { reactive_stores };
203 let Model {
204 vis,
205 name,
206 generics,
207 ty,
208 } = &self;
209 let any_store_field = Ident::new("AnyStoreField", Span::call_site());
210 let trait_name = Ident::new(&format!("{name}StoreFields"), name.span());
211 let clear_generics = remove_constraint_from_generics(generics);
212 let params = &generics.params;
213 let clear_params = &clear_generics.params;
214 let generics_with_orig = quote! { <#any_store_field, #params> };
215 let where_with_orig = {
216 generics
217 .where_clause
218 .as_ref()
219 .map(|w| {
220 let WhereClause {
221 where_token,
222 predicates,
223 } = &w;
224 quote! {
225 #where_token
226 #any_store_field: #library_path::StoreField<Value = #name < #clear_params > >,
227 #predicates
228 }
229 })
230 .unwrap_or_else(|| quote! { where #any_store_field: #library_path::StoreField<Value = #name < #clear_params > > })
231 };
232
233 let (trait_fields, read_fields): (Vec<_>, Vec<_>) = ty.to_field_data(
236 &library_path,
237 generics,
238 &clear_generics,
239 &any_store_field,
240 name,
241 );
242
243 tokens.extend(quote! {
245 #vis trait #trait_name <AnyStoreField, #params>
246 #where_with_orig
247 {
248 #(#trait_fields)*
249 }
250
251 impl #generics_with_orig #trait_name <AnyStoreField, #clear_params> for AnyStoreField
252 #where_with_orig
253 {
254 #(#read_fields)*
255 }
256 });
257 }
258}
259
260impl ModelTy {
261 fn to_field_data(
262 &self,
263 library_path: &TokenStream,
264 generics: &Generics,
265 clear_generics: &Generics,
266 any_store_field: &Ident,
267 name: &Ident,
268 ) -> (Vec<TokenStream>, Vec<TokenStream>) {
269 match self {
270 ModelTy::Struct { fields } => fields
271 .iter()
272 .enumerate()
273 .map(|(idx, field)| {
274 let Field {
275 ident, ty, attrs, ..
276 } = &field;
277 let modes = attrs
278 .iter()
279 .find_map(|attr| {
280 attr.meta.path().is_ident("store").then(|| {
281 match &attr.meta {
282 Meta::List(list) => {
283 match Punctuated::<
284 SubfieldMode,
285 Comma,
286 >::parse_terminated
287 .parse2(list.tokens.clone())
288 {
289 Ok(modes) => Some(
290 modes
291 .iter()
292 .cloned()
293 .collect::<Vec<_>>(),
294 ),
295 Err(e) => abort!(list, e),
296 }
297 }
298 _ => None,
299 }
300 })
301 })
302 .flatten();
303
304 (
305 field_to_tokens(
306 idx,
307 false,
308 modes.as_deref(),
309 library_path,
310 ident.as_ref(),
311 generics,
312 clear_generics,
313 any_store_field,
314 name,
315 ty,
316 ),
317 field_to_tokens(
318 idx,
319 true,
320 modes.as_deref(),
321 library_path,
322 ident.as_ref(),
323 generics,
324 clear_generics,
325 any_store_field,
326 name,
327 ty,
328 ),
329 )
330 })
331 .unzip(),
332 ModelTy::Enum { variants } => variants
333 .iter()
334 .map(|variant| {
335 let Variant { ident, fields, .. } = variant;
336
337 (
338 variant_to_tokens(
339 false,
340 library_path,
341 ident,
342 generics,
343 clear_generics,
344 any_store_field,
345 name,
346 fields,
347 ),
348 variant_to_tokens(
349 true,
350 library_path,
351 ident,
352 generics,
353 clear_generics,
354 any_store_field,
355 name,
356 fields,
357 ),
358 )
359 })
360 .unzip(),
361 }
362 }
363}
364
365#[allow(clippy::too_many_arguments)]
366fn field_to_tokens(
367 idx: usize,
368 include_body: bool,
369 modes: Option<&[SubfieldMode]>,
370 library_path: &proc_macro2::TokenStream,
371 orig_ident: Option<&Ident>,
372 _generics: &Generics,
373 clear_generics: &Generics,
374 any_store_field: &Ident,
375 name: &Ident,
376 ty: &Type,
377) -> proc_macro2::TokenStream {
378 let ident = if orig_ident.is_none() {
379 let idx = Ident::new(&format!("field{idx}"), Span::call_site());
380 quote! { #idx }
381 } else {
382 quote! { #orig_ident }
383 };
384 let locator = if orig_ident.is_none() {
385 let idx = Index::from(idx);
386 quote! { #idx }
387 } else {
388 quote! { #ident }
389 };
390
391 if let Some(modes) = modes {
392 if modes.len() == 1 {
393 let mode = &modes[0];
394 match mode {
395 SubfieldMode::Keyed(keyed_by, key_ty) => {
396 let signature = quote! {
397 #[track_caller]
398 fn #ident(self) -> #library_path::KeyedSubfield<#any_store_field, #name #clear_generics, #key_ty, #ty>
399 };
400 return if include_body {
401 quote! {
402 #signature {
403 #library_path::KeyedSubfield::new(
404 self,
405 #idx.into(),
406 #keyed_by,
407 |prev| &prev.#locator,
408 |prev| &mut prev.#locator,
409 )
410 }
411 }
412 } else {
413 quote! { #signature; }
414 };
415 }
416 SubfieldMode::Skip => return quote! {},
417 }
418 } else {
419 abort!(
420 orig_ident
421 .map(|ident| ident.span())
422 .unwrap_or_else(Span::call_site),
423 "multiple modes not currently supported"
424 );
425 }
426 }
427
428 if include_body {
430 quote! {
431 fn #ident(self) -> #library_path::Subfield<#any_store_field, #name #clear_generics, #ty> {
432 #library_path::Subfield::new(
433 self,
434 #idx.into(),
435 |prev| &prev.#locator,
436 |prev| &mut prev.#locator,
437 )
438 }
439 }
440 } else {
441 quote! {
442 fn #ident(self) -> #library_path::Subfield<#any_store_field, #name #clear_generics, #ty>;
443 }
444 }
445}
446
447#[allow(clippy::too_many_arguments)]
448fn variant_to_tokens(
449 include_body: bool,
450 library_path: &proc_macro2::TokenStream,
451 ident: &Ident,
452 _generics: &Generics,
453 clear_generics: &Generics,
454 any_store_field: &Ident,
455 name: &Ident,
456 fields: &Fields,
457) -> proc_macro2::TokenStream {
458 let orig_ident = &ident;
460 let ident =
461 Ident::new(&ident.to_string().to_case(Case::Snake), ident.span());
462
463 match fields {
464 Fields::Unit => {
467 if include_body {
469 quote! {
470 fn #ident(self) -> bool {
471 match #library_path::StoreField::reader(&self) {
472 Some(reader) => {
473 #library_path::StoreField::track_field(&self);
474 matches!(&*reader, #name::#orig_ident)
475 },
476 None => false
477 }
478 }
479 }
480 } else {
481 quote! {
482 fn #ident(self) -> bool;
483 }
484 }
485 }
486 Fields::Named(fields) => {
490 let mut tokens = if include_body {
491 quote! {
492 fn #ident(self) -> bool {
493 match #library_path::StoreField::reader(&self) {
494 Some(reader) => {
495 #library_path::StoreField::track_field(&self);
496 matches!(&*reader, #name::#orig_ident { .. })
497 },
498 None => false
499 }
500 }
501 }
502 } else {
503 quote! {
504 fn #ident(self) -> bool;
505 }
506 };
507
508 tokens.extend(fields
509 .named
510 .iter()
511 .map(|field| {
512 let field_ident = field.ident.as_ref().unwrap();
513 let field_ty = &field.ty;
514 let combined_ident = Ident::new(
515 &format!("{ident}_{field_ident}"),
516 field_ident.span(),
517 );
518
519 if include_body {
521 quote! {
522 fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #clear_generics, #field_ty>> {
523 #library_path::StoreField::track_field(&self);
524 let reader = #library_path::StoreField::reader(&self);
525 let matches = reader
526 .map(|reader| matches!(&*reader, #name::#orig_ident { .. }))
527 .unwrap_or(false);
528 if matches {
529 Some(#library_path::Subfield::new(
530 self,
531 0.into(),
532 |prev| {
533 match prev {
534 #name::#orig_ident { #field_ident, .. } => Some(#field_ident),
535 _ => None,
536 }
537 .expect("accessed an enum field that is no longer matched")
538 },
539 |prev| {
540 match prev {
541 #name::#orig_ident { #field_ident, .. } => Some(#field_ident),
542 _ => None,
543 }
544 .expect("accessed an enum field that is no longer matched")
545 },
546 ))
547 } else {
548 None
549 }
550 }
551 }
552 } else {
553 quote! {
554 fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #clear_generics, #field_ty>>;
555 }
556 }
557 }));
558
559 tokens
560 }
561 Fields::Unnamed(fields) => {
565 let mut tokens = if include_body {
566 quote! {
567 fn #ident(self) -> bool {
568 match #library_path::StoreField::reader(&self) {
569 Some(reader) => {
570 #library_path::StoreField::track_field(&self);
571 matches!(&*reader, #name::#orig_ident { .. })
572 },
573 None => false
574 }
575 }
576 }
577 } else {
578 quote! {
579 fn #ident(self) -> bool;
580 }
581 };
582
583 let number_of_fields = fields.unnamed.len();
584
585 tokens.extend(fields
586 .unnamed
587 .iter()
588 .enumerate()
589 .map(|(idx, field)| {
590 let field_ident = idx;
591 let field_ty = &field.ty;
592 let combined_ident = Ident::new(
593 &format!("{ident}_{field_ident}"),
594 ident.span(),
595 );
596
597 let ignore_before = (0..idx).map(|_| quote! { _, });
598 let ignore_before2 = ignore_before.clone();
599 let ignore_after = (idx..number_of_fields.saturating_sub(1)).map(|_| quote !{_, });
600 let ignore_after2 = ignore_after.clone();
601
602 if include_body {
604 quote! {
605 fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #clear_generics, #field_ty>> {
606 #library_path::StoreField::track_field(&self);
607 let reader = #library_path::StoreField::reader(&self);
608 let matches = reader
609 .map(|reader| matches!(&*reader, #name::#orig_ident(..)))
610 .unwrap_or(false);
611 if matches {
612 Some(#library_path::Subfield::new(
613 self,
614 0.into(),
615 |prev| {
616 match prev {
617 #name::#orig_ident(#(#ignore_before)* this, #(#ignore_after)*) => Some(this),
618 _ => None,
619 }
620 .expect("accessed an enum field that is no longer matched")
621 },
622 |prev| {
623 match prev {
624 #name::#orig_ident(#(#ignore_before2)* this, #(#ignore_after2)*) => Some(this),
625 _ => None,
626 }
627 .expect("accessed an enum field that is no longer matched")
628 },
629 ))
630 } else {
631 None
632 }
633 }
634 }
635 } else {
636 quote! {
637 fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #clear_generics, #field_ty>>;
638 }
639 }
640 }));
641
642 tokens
643 }
644 }
645}
646
647struct PatchModel {
648 pub name: Ident,
649 pub generics: Generics,
650 pub ty: PatchModelTy,
651}
652
653enum PatchModelTy {
654 Struct {
655 fields: Vec<Field>,
656 },
657 #[allow(dead_code)]
658 Enum {
659 variants: Vec<Variant>,
660 },
661}
662
663impl Parse for PatchModel {
664 fn parse(input: ParseStream) -> Result<Self> {
665 let input = syn::DeriveInput::parse(input)?;
666
667 let ty = match input.data {
668 syn::Data::Struct(s) => {
669 let fields = match s.fields {
670 syn::Fields::Unit => {
671 abort!(s.semi_token, "unit structs are not supported");
672 }
673 syn::Fields::Named(fields) => {
674 fields.named.into_iter().collect::<Vec<_>>()
675 }
676 syn::Fields::Unnamed(fields) => {
677 fields.unnamed.into_iter().collect::<Vec<_>>()
678 }
679 };
680
681 PatchModelTy::Struct { fields }
682 }
683 syn::Data::Enum(_e) => {
684 abort_call_site!("only structs can be used with `Patch`");
685
686 }
691 _ => {
692 abort_call_site!(
693 "only structs and enums can be used with `Store`"
694 );
695 }
696 };
697
698 Ok(Self {
699 name: input.ident,
700 generics: input.generics,
701 ty,
702 })
703 }
704}
705
706impl ToTokens for PatchModel {
707 fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
708 let library_path = quote! { reactive_stores };
709 let PatchModel { name, generics, ty } = &self;
710
711 let fields = match ty {
712 PatchModelTy::Struct { fields } => {
713 fields.iter().enumerate().map(|(idx, field)| {
714 let Field {
715 attrs, ident, ..
716 } = &field;
717 let locator = match &ident {
718 Some(ident) => Either::Left(ident),
719 None => Either::Right(Index::from(idx)),
720 };
721 let closure = attrs
722 .iter()
723 .find_map(|attr| {
724 attr.meta.path().is_ident("patch").then(
725 || match &attr.meta {
726 Meta::List(list) => {
727 match Punctuated::<
728 ExprClosure,
729 Comma,
730 >::parse_terminated
731 .parse2(list.tokens.clone())
732 {
733 Ok(closures) => {
734 let closure = closures.iter().next().cloned().expect_or_abort("should have ONE closure");
735 if closure.inputs.len() != 2 {
736 abort!(closure.inputs, "patch closure should have TWO params as in #[patch(|this, new| ...)]");
737 }
738 closure
739 },
740 Err(e) => abort!(list, e),
741 }
742 }
743 _ => abort!(attr.meta, "needs to be as `#[patch(|this, new| ...)]`"),
744 },
745 )
746 });
747 let keyed = attrs
748 .iter()
749 .find_map(|attr| {
750 attr.meta.path().is_ident("store").then(|| {
751 match &attr.meta {
752 Meta::List(list) => {
753 let subfields = match Punctuated::<
754 SubfieldMode,
755 Comma,
756 >::parse_terminated
757 .parse2(list.tokens.clone())
758 {
759 Ok(modes) => Some(
760 modes
761 .iter()
762 .cloned()
763 .collect::<Vec<_>>(),
764 ),
765 Err(e) => abort!(list, e),
766 }.unwrap_or_default();
767 subfields.into_iter().find_map(|subfield| match subfield {
768 SubfieldMode::Keyed(closure, _ty) => Some(closure),
769 SubfieldMode::Skip => None,
770 })
771 }
772 _ => None,
773 }
774 })
775 }).flatten();
776
777 if let Some(closure) = closure {
778 let params = closure.inputs;
779 let body = closure.body;
780 quote! {
781 if new.#locator != self.#locator {
782 _ = {
783 let (#params) = (&mut self.#locator, new.#locator);
784 #body
785 };
786 notify(&new_path);
787 }
788 new_path.replace_last(#idx + 1);
789 }
790 } else if let Some(closure) = keyed {
791 quote! {
792 #library_path::PatchFieldKeyed::patch_field_keyed(
793 &mut self.#locator,
794 new.#locator,
795 notify,
796 keys,
797 #closure,
798 |key| {
799 let keys = keys.as_ref()?;
800 let segment = keys
801 .with_field_keys(
802 path.clone(),
803 |keys| (keys.get(key), vec![]),
804 || vec![],
805 )
806 .flatten()
807 .map(|(_, idx)| idx)?;
808 let mut path = path.clone();
809 path.push(segment);
810 Some(path)
811 }
812 );
813 new_path.replace_last(#idx + 1);
814 }
815 } else {
816 quote! {
817 #library_path::PatchField::patch_field(
818 &mut self.#locator,
819 new.#locator,
820 &new_path,
821 notify,
822 keys
823 );
824 new_path.replace_last(#idx + 1);
825 }
826 }
827 }).collect::<Vec<_>>()
828 }
829 PatchModelTy::Enum { variants: _ } => {
830 unreachable!("not implemented currently")
831 }
832 };
833
834 let clear_generics = remove_constraint_from_generics(generics);
835 let params = clear_generics.params;
836 let where_clause = &generics.where_clause;
837
838 tokens.extend(quote! {
840 impl #generics #library_path::PatchField for #name <#params>
841 #where_clause
842 {
843 fn patch_field(
844 &mut self,
845 new: Self,
846 path: &#library_path::StorePath,
847 notify: &mut dyn FnMut(&#library_path::StorePath),
848 keys: Option<&#library_path::KeyMap>,
849 ) {
850 let mut new_path = path.clone();
851 new_path.push(0);
852 #(#fields)*
853 }
854 }
855 });
856 }
857}
858
859enum Either<A, B> {
860 Left(A),
861 Right(B),
862}
863
864impl<A: ToTokens, B: ToTokens> ToTokens for Either<A, B> {
865 fn to_tokens(&self, tokens: &mut TokenStream) {
866 match self {
867 Either::Left(a) => a.to_tokens(tokens),
868 Either::Right(b) => b.to_tokens(tokens),
869 }
870 }
871}