1use proc_macro::TokenStream;
2use proc_macro2::TokenStream as TokenStream2;
3use proc_macro2::TokenTree as TokenTree2;
4use quote::{quote, ToTokens, TokenStreamExt};
5use std::iter;
6use syn::token::Comma;
7use syn::{
8 parenthesized,
9 parse::{Parse, ParseStream},
10 parse_macro_input,
11 punctuated::Punctuated,
12 spanned::Spanned,
13 token, Data, DeriveInput, Error, Expr, ExprRange, Ident, Index, Lit, LitInt, Path, RangeLimits,
14 Result, Token, Type, TypePath, Visibility,
15};
16
17#[proc_macro_derive(Component)]
18pub fn derive_component(input: TokenStream) -> TokenStream {
19 let ast = parse_macro_input!(input as DeriveInput);
20 let ident = ast.ident.clone();
21
22 TokenStream::from(quote! {
23 impl my_ecs::prelude::Component for #ident {
25 const IS_DEFAULT: bool
26 = my_ecs::ds::TypeHelper::<#ident>::IS_DEFAULT;
27 const FN_DEFAULT: my_ecs::ds::FnDefaultRaw
28 = my_ecs::ds::TypeHelper::<#ident>::FN_DEFAULT;
29 const IS_CLONE: bool
30 = my_ecs::ds::TypeHelper::<#ident>::IS_CLONE;
31 const FN_CLONE: my_ecs::ds::FnCloneRaw
32 = my_ecs::ds::TypeHelper::<#ident>::FN_CLONE;
33 }
34 })
35}
36
37#[proc_macro_derive(Entity, attributes(container, random_state))]
38pub fn derive_entity(input: TokenStream) -> TokenStream {
39 let input = parse_macro_input!(input as DeriveInput);
40 let vis = input.vis.clone();
41 let ident = input.ident.clone();
42 let ident_str = ident.to_string();
43
44 let (field_idents, field_types): (Vec<_>, Vec<_>) = match input.data {
45 Data::Struct(data_struct) => data_struct
46 .fields
47 .iter()
48 .map(|field| (field.ident.clone().unwrap(), field.ty.clone()))
49 .unzip(),
50 _ => panic!("only struct is allowed for this macro"),
51 };
52 let field_ident_strs = field_idents
53 .iter()
54 .map(|ident| ident.to_string())
55 .collect::<Vec<_>>();
56
57 let validate_impl_component = quote! {
59 const _: () = {
60 const fn validate<T: my_ecs::prelude::Component>() {}
61 #(
62 validate::<#field_types>();
63 )*
64 };
65 };
66
67 let container = input
69 .attrs
70 .iter()
71 .filter_map(|attr| {
72 if attr.path().is_ident("container") {
73 let ty: Path = attr.parse_args().unwrap();
74 Some(quote! { #ty })
75 } else {
76 None
77 }
78 })
79 .next()
80 .unwrap_or(quote! { SparseSet });
81 let random_state = input
82 .attrs
83 .iter()
84 .filter_map(|attr| {
85 if attr.path().is_ident("random_state") {
86 let ty: Path = attr.parse_args().unwrap();
87 Some(quote! { #ty })
88 } else {
89 None
90 }
91 })
92 .next()
93 .unwrap_or(quote! { std::hash::RandomState });
94
95 let impl_as_entity_ref = quote! {
97 impl my_ecs::prelude::AsEntityReg for #ident {
98 fn entity_descriptor() -> my_ecs::prelude::EntityReg {
99 let name = my_ecs::prelude::EntityName::new(
100 #ident_str.into()
101 );
102 let cont = Box::new(
103 my_ecs::prelude::#container::<#random_state>::new()
104 );
105 let mut desc = my_ecs::prelude::EntityReg::new(
106 Some(name), cont
107 );
108 #(
109 desc.add_component(my_ecs::tinfo!(#field_types));
110 )*
111 desc
112 }
113 }
114 };
115
116 let num_fields = field_types.len();
118 let impl_components = quote! {
119 impl my_ecs::prelude::Components for #ident {
120 type Keys = [my_ecs::prelude::ComponentKey; #num_fields];
121 type Infos = [my_ecs::ds::TypeInfo; #num_fields];
122
123 const LEN: usize = #num_fields;
124
125 fn keys() -> Self::Keys {
126 [#(
127 <#field_types as my_ecs::prelude::Component>::key()
128 ),*]
129 }
130
131 fn infos() -> Self::Infos {
132 [#(
133 <#field_types as my_ecs::prelude::Component>::type_info()
134 ),*]
135 }
136
137 fn sorted_keys() -> Self::Keys {
138 let mut keys = [#(
139 <#field_types as my_ecs::prelude::Component>::key()
140 ),*];
141 keys.sort_unstable();
142 keys
143 }
144 }
145 };
146
147 let ref_ident = Ident::new(&format!("{}__Ref", ident_str), ident.span());
149 let decl_entity_ref = quote! {
150 #[allow(non_camel_case_types)]
151 #vis struct #ref_ident<'cont> {
152 #(
153 #vis #field_idents: &'cont #field_types
154 ),*
155 }
156 };
157
158 let mut_ident = Ident::new(&format!("{}__Mut", ident_str), ident.span());
160 let decl_entity_mut = quote! {
161 #[allow(non_camel_case_types)]
162 #vis struct #mut_ident<'cont> {
163 #(
164 #vis #field_idents: &'cont mut #field_types
165 ),*
166 }
167 };
168
169 fn create_entity_ref_or_mut_impl_debug<'a>(
171 ident_str: &str,
172 ref_ident: &Ident,
173 field_idents: &'a [Ident],
174 field_types: &'a [Type],
175 field_ident_strs: &'a [String],
176 ) -> TokenStream2 {
177 quote! {
178 impl<'cont> std::fmt::Debug for #ref_ident<'cont> {
179 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
180 let mut s = f.debug_struct(#ident_str);
181 let mut is_full = true;
182
183 #(
184 if my_ecs::ds::TypeHelper::<#field_types>::IS_DEBUG {
185 let helper = my_ecs::ds::DebugHelper {
186 f: my_ecs::ds::TypeHelper::<#field_types>::FN_FMT,
187 ptr: self.#field_idents as *const #field_types as *const u8,
188 };
189 s.field(#field_ident_strs, &helper);
190 } else {
191 is_full = false;
192 }
193 )*
194
195 if is_full {
196 s.finish()
197 } else {
198 s.finish_non_exhaustive()
199 }
200 }
201 }
202 }
203 }
204 let impl_debug_for_entity_ref = create_entity_ref_or_mut_impl_debug(
205 &ident_str,
206 &ref_ident,
207 &field_idents,
208 &field_types,
209 &field_ident_strs,
210 );
211 let impl_debug_for_entity_mut = create_entity_ref_or_mut_impl_debug(
212 &ident_str,
213 &mut_ident,
214 &field_idents,
215 &field_types,
216 &field_ident_strs,
217 );
218
219 let col_idxs = (0..field_idents.len()).collect::<Vec<_>>();
221 let impl_entity = quote! {
222 impl my_ecs::prelude::Entity for #ident {
223 type Ref<'cont> = #ref_ident<'cont>;
224 type Mut<'cont> = #mut_ident<'cont>;
225
226 const OFFSETS_BY_FIELD_INDEX: &'static [usize] = &[
227 #(
228 std::mem::offset_of!(#ident, #field_idents)
229 ),*
230 ];
231
232 fn field_to_column_index(fi: usize) -> usize {
233 use std::{sync::OnceLock, any::TypeId};
234
235 static MAP: OnceLock<[usize; #num_fields]> = OnceLock::new();
236
237 let map = MAP.get_or_init(|| {
238 let mut map = [0; #num_fields];
239
240 let decl = [ #( TypeId::of::<#field_types>() ),* ];
241 let mut sorted = decl.clone();
242 sorted.sort_unstable();
243
244 for i in 0..#num_fields {
245 for j in 0..#num_fields {
246 if decl[i] == sorted[j] {
247 map[i] = j;
248 break;
249 }
250 }
251 }
252
253 map
254 });
255
256 map[fi]
257 }
258
259 fn get_ref_from<Cont: my_ecs::prelude::ContainEntity + ?Sized>(
260 cont: &Cont, vi: usize
261 ) -> Self::Ref<'_> {
262 unsafe { #ref_ident {
263 #(
264 #field_idents:
265 cont.value_ptr_by_value_index(
267 Self::field_to_column_index(#col_idxs),
268 vi
269 ).unwrap()
270 .cast::<#field_types>()
272 .as_ref()
274 ),*
275 } }
276 }
277
278 fn get_mut_from<Cont: my_ecs::prelude::ContainEntity + ?Sized>(
279 cont: &mut Cont, vi: usize
280 ) -> Self::Mut<'_> {
281 unsafe { #mut_ident {
282 #(
283 #field_idents:
284 cont.value_ptr_by_value_index(
286 Self::field_to_column_index(#col_idxs),
287 vi
288 ).unwrap()
289 .cast::<#field_types>()
291 .as_mut()
293 ),*
294 } }
295 }
296 }
297
298 #decl_entity_ref
299 #decl_entity_mut
300 #impl_debug_for_entity_ref
301 #impl_debug_for_entity_mut
302 };
303
304 TokenStream::from(quote! {
305 #validate_impl_component
306 #impl_as_entity_ref
307 #impl_components
308 #impl_entity
309 })
310}
311
312#[proc_macro_derive(Resource)]
313pub fn derive_resource(input: TokenStream) -> TokenStream {
314 let ast = parse_macro_input!(input as DeriveInput);
315 let ident = ast.ident.clone();
316
317 TokenStream::from(quote! {
318 impl my_ecs::prelude::Resource for #ident {}
320 })
321}
322
323#[proc_macro]
324pub fn request(input: TokenStream) -> TokenStream {
325 let req = parse_macro_input!(input as Request);
326
327 TokenStream::from(quote! { #req })
328}
329
330#[proc_macro]
331pub fn filter(input: TokenStream) -> TokenStream {
332 let sel = parse_macro_input!(input as Select);
333
334 let empty = Punctuated::<TypePath, Token![,]>::new();
336 let all = get_iter(&sel.filter.all, &empty);
337 let any = get_iter(&sel.filter.any, &empty);
338 let none = get_iter(&sel.filter.none, &empty);
339
340 let validate_non_overlap = if let Some(target) = &sel.target {
342 let pos = iter::once(&target.ty).chain(all.clone()).chain(any.clone());
343 validate_non_overlap_tokens(pos, none.clone())
344 } else {
345 let pos = all.clone().chain(any.clone());
346 validate_non_overlap_tokens(pos, none.clone())
347 };
348
349 if let Some(target) = &sel.target {
354 let mut pos = iter::once(&target.ty).chain(all).chain(any);
355 if let Some(conflict) = none.clone().find(|&n| pos.any(|p| p == n)) {
356 let err = Error::new(conflict.span(), "conflicts").into_compile_error();
357 return TokenStream::from(err);
358 }
359 } else {
360 let mut pos = all.chain(any);
361 if let Some(conflict) = none.clone().find(|&n| pos.any(|p| p == n)) {
362 let err = Error::new(conflict.span(), "conflicts").into_compile_error();
363 return TokenStream::from(err);
364 }
365 }
366
367 return TokenStream::from(quote! {
368 #validate_non_overlap
369 #sel
370 });
371
372 fn get_iter<'a>(
375 x: &'a Option<(Token![,], Ident, Token![=], Types)>,
376 empty: &'a Punctuated<TypePath, Token![,]>,
377 ) -> syn::punctuated::Iter<'a, TypePath> {
378 if let Some((_, _, _, list)) = x {
379 list.types.iter()
380 } else {
381 empty.iter()
382 }
383 }
384
385 fn validate_non_overlap_tokens<'a, 'b, Ia, Ib>(ia: Ia, ib: Ib) -> TokenStream2
386 where
387 Ia: Iterator<Item = &'a TypePath> + Clone,
388 Ib: Iterator<Item = &'b TypePath> + Clone,
389 {
390 let pairs = ia.flat_map(|a| ib.clone().map(move |b| (a, b)));
391 let pair_as = pairs.clone().map(|(a, _)| a);
392 let pair_bs = pairs.map(|(_, b)| b);
393
394 quote! {
395 const _: () = {#(
396 assert!(
397 !my_ecs::ds::TypeHelper::<(#pair_as, #pair_bs)>::IS_EQUAL_TYPE,
398 "Types in `Target`, `All`, and `Any` must not be included in `None`",
399 );
400 )*};
401 }
402 }
403}
404
405#[derive(Debug)]
406struct Request {
407 vis: Visibility,
408 ident: Ident,
409 read: Option<(Token![,], Ident, Token![=], Types)>,
410 write: Option<(Token![,], Ident, Token![=], Types)>,
411 res_read: Option<(Token![,], Ident, Token![=], Types)>,
412 res_write: Option<(Token![,], Ident, Token![=], Types)>,
413 ent_write: Option<(Token![,], Ident, Token![=], Types)>,
414}
415
416impl Parse for Request {
417 fn parse(input: ParseStream) -> Result<Self> {
418 let vis = input.parse()?;
419 let ident = input.parse()?;
420
421 let mut read = None;
422 let mut write = None;
423 let mut res_read = None;
424 let mut res_write = None;
425 let mut ent_write = None;
426 while input.peek(Token![,]) || input.peek(Ident) {
427 let comma: Token![,] = if input.peek(Token![,]) {
428 input.parse()?
429 } else {
430 Comma::default()
431 };
432 let ident: Ident = input.parse()?;
433 let ident_str = ident.to_string();
434 match ident_str.to_ascii_lowercase().as_str() {
435 "read" => {
436 if read.is_some() {
437 return Err(Error::new(ident.span(), "duplicate `Read`"));
438 }
439 let eq: Token![=] = input.parse()?;
440 let types: Types = input.parse()?;
441 read = Some((comma, ident, eq, types));
442 }
443 "write" => {
444 if write.is_some() {
445 return Err(Error::new(ident.span(), "duplicate `Write`"));
446 }
447 let eq: Token![=] = input.parse()?;
448 let types: Types = input.parse()?;
449 write = Some((comma, ident, eq, types));
450 }
451 "resread" => {
452 if res_read.is_some() {
453 return Err(Error::new(ident.span(), "duplicate `ResRead`"));
454 }
455 let eq: Token![=] = input.parse()?;
456 let types: Types = input.parse()?;
457 res_read = Some((comma, ident, eq, types));
458 }
459 "reswrite" => {
460 if res_write.is_some() {
461 return Err(Error::new(ident.span(), "duplicate `ResWrite`"));
462 }
463 let eq: Token![=] = input.parse()?;
464 let types: Types = input.parse()?;
465 res_write = Some((comma, ident, eq, types));
466 }
467 "entwrite" => {
468 if ent_write.is_some() {
469 return Err(Error::new(ident.span(), "duplicate `EntWrite`"));
470 }
471 let eq: Token![=] = input.parse()?;
472 let types: Types = input.parse()?;
473 ent_write = Some((comma, ident, eq, types));
474 }
475 _ => {
476 return Err(Error::new(
477 ident.span(),
478 "expected `Read`, `Write`, `ResRead`, `ResWrite`, or `EntWrite`",
479 ));
480 }
481 }
482 }
483
484 Ok(Self {
485 vis,
486 ident,
487 read,
488 write,
489 res_read,
490 res_write,
491 ent_write,
492 })
493 }
494}
495
496impl ToTokens for Request {
497 fn to_tokens(&self, tokens: &mut TokenStream2) {
498 let vis = &self.vis;
499 let ident = &self.ident;
500 let read = helper(&self.read);
501 let write = helper(&self.write);
502 let res_read = helper(&self.res_read);
503 let res_write = helper(&self.res_write);
504 let ent_write = helper(&self.ent_write);
505
506 tokens.append_all(quote! {
508 #vis struct #ident;
509 });
510
511 tokens.append_all(quote! {
513 impl my_ecs::prelude::Request for #ident {
514 type Read = #read;
515 type Write = #write;
516 type ResRead = #res_read;
517 type ResWrite = #res_write;
518 type EntWrite = #ent_write;
519 }
520 });
521
522 fn helper(x: &Option<(Token![,], Ident, Token![=], Types)>) -> TokenStream2 {
525 if let Some((_, _, _, types)) = x {
526 let types = &types.types;
527 if types.len() == 1 {
528 quote! { #types }
529 } else {
530 quote! {( #types )}
531 }
532 } else {
533 quote! {()}
534 }
535 }
536 }
537}
538
539#[derive(Debug)]
540struct Select {
541 vis: Visibility,
542 ident: Ident,
543 _comma: Token![,],
544 target: Option<SelectTarget>,
545 filter: Filter,
546}
547
548impl Parse for Select {
549 fn parse(input: ParseStream) -> Result<Self> {
550 let vis = input.parse()?;
551 let ident = input.parse()?;
552 let _comma = input.parse()?;
553
554 let contains_target = input
555 .step(|cursor| {
556 if let Some((tt, next)) = cursor.token_tree() {
557 match &tt {
558 TokenTree2::Ident(ident) if &ident.to_string() == "Target" => {
559 Ok(((), next))
560 }
561 _ => Err(cursor.error("")),
562 }
563 } else {
564 Err(cursor.error(""))
565 }
566 })
567 .is_ok();
568
569 let (target, filter) = if contains_target {
570 let target = input.parse().ok();
571 let filter = input.parse()?;
572 (target, filter)
573 } else {
574 let filter = input.parse()?;
575 (None, filter)
576 };
577
578 Ok(Self {
579 vis,
580 ident,
581 _comma,
582 target,
583 filter,
584 })
585 }
586}
587
588impl ToTokens for Select {
589 fn to_tokens(&self, tokens: &mut TokenStream2) {
590 let vis = &self.vis;
591 let ident = &self.ident;
592 let all = helper(&self.filter.all);
593 let any = helper(&self.filter.any);
594 let none = helper(&self.filter.none);
595 let exact = helper(&self.filter.exact);
596
597 tokens.append_all(quote! {
599 #vis struct #ident;
600 });
601
602 tokens.append_all(quote! {
604 impl my_ecs::prelude::Filter for #ident {
605 type All = #all;
606 type Any = #any;
607 type None = #none;
608 type Exact = #exact;
609 }
610 });
611
612 if let Some(target) = &self.target {
614 tokens.append_all(quote! {
615 impl my_ecs::prelude::Select for #ident {
616 type Target = #target;
617 type Filter = #ident;
618 }
619 });
620 }
621
622 fn helper(x: &Option<(Token![,], Ident, Token![=], Types)>) -> TokenStream2 {
625 if let Some((_, _, _, types)) = x {
626 let types = &types.types;
627 if types.len() == 1 {
628 quote! { #types }
629 } else {
630 quote! {( #types )}
631 }
632 } else {
633 quote! {()}
634 }
635 }
636 }
637}
638
639#[derive(Debug)]
640struct SelectTarget {
641 _eq: Token![=],
642 ty: TypePath,
643}
644
645impl Parse for SelectTarget {
646 fn parse(input: ParseStream) -> Result<Self> {
647 let eq: Token![=] = input.parse()?;
648 let ty: TypePath = input.parse()?;
649
650 Ok(Self { _eq: eq, ty })
651 }
652}
653
654impl ToTokens for SelectTarget {
655 fn to_tokens(&self, tokens: &mut TokenStream2) {
656 let ty = &self.ty;
657 let ty = quote! { #ty };
658 tokens.append_all(ty);
659 }
660}
661
662#[derive(Debug)]
663struct Filter {
664 all: Option<(Token![,], Ident, Token![=], Types)>,
665 any: Option<(Token![,], Ident, Token![=], Types)>,
666 none: Option<(Token![,], Ident, Token![=], Types)>,
667 exact: Option<(Token![,], Ident, Token![=], Types)>,
668}
669
670impl Parse for Filter {
671 fn parse(input: ParseStream) -> Result<Self> {
672 let mut all = None;
673 let mut any = None;
674 let mut none = None;
675 let mut exact = None;
676 while input.peek(Token![,]) || input.peek(Ident) {
677 let comma: Token![,] = if input.peek(Token![,]) {
678 input.parse()?
679 } else {
680 Comma::default()
681 };
682 let ident: Ident = input.parse()?;
683 let ident_str = ident.to_string();
684 match ident_str.to_ascii_lowercase().as_str() {
685 "all" => {
686 if all.is_some() {
687 return Err(Error::new(ident.span(), "duplicate `All`"));
688 }
689 let eq: Token![=] = input.parse()?;
690 let types: Types = input.parse()?;
691 all = Some((comma, ident, eq, types));
692 }
693 "any" => {
694 if any.is_some() {
695 return Err(Error::new(ident.span(), "duplicate `Any`"));
696 }
697 let eq: Token![=] = input.parse()?;
698 let types: Types = input.parse()?;
699 any = Some((comma, ident, eq, types));
700 }
701 "none" => {
702 if none.is_some() {
703 return Err(Error::new(ident.span(), "duplicate `None`"));
704 }
705 let eq: Token![=] = input.parse()?;
706 let types: Types = input.parse()?;
707 none = Some((comma, ident, eq, types));
708 }
709 "exact" => {
710 if exact.is_some() {
711 return Err(Error::new(ident.span(), "duplicate `Exact`"));
712 }
713 let eq: Token![=] = input.parse()?;
714 let types: Types = input.parse()?;
715 exact = Some((comma, ident, eq, types));
716 }
717 _ => {
718 return Err(Error::new(
719 ident.span(),
720 "expected `All`, `Any`, `None`, or `Exact`",
721 ));
722 }
723 }
724 }
725
726 if exact.is_some() && (all.is_some() || any.is_some() || none.is_some()) {
727 return Err(Error::new(
728 input.span(),
729 "`Exact` cannot be with `All`, `Any`, or `None`",
730 ));
731 }
732
733 Ok(Self {
734 all,
735 any,
736 none,
737 exact,
738 })
739 }
740}
741
742#[derive(Debug)]
743struct Types {
744 _paren: Option<token::Paren>,
745 types: Punctuated<TypePath, Token![,]>,
746}
747
748impl Parse for Types {
749 fn parse(input: ParseStream) -> Result<Self> {
750 let (paren, types) = if input.peek(token::Paren) {
751 let content;
752 let paren = Some(parenthesized!(content in input));
753 let types = content.parse_terminated(TypePath::parse, Token![,])?;
754 (paren, types)
755 } else {
756 let paren = None;
757 let ty: TypePath = input.parse()?;
758 let types: Punctuated<TypePath, Token![,]> = iter::once(ty).collect();
759 (paren, types)
760 };
761
762 Ok(Self {
763 _paren: paren,
764 types,
765 })
766 }
767}
768
769impl ToTokens for Types {
770 fn to_tokens(&self, tokens: &mut TokenStream2) {
771 let types = &self.types;
772 if types.len() == 1 {
773 tokens.append_all(quote! { #types });
774 } else {
775 tokens.append_all(quote! {( #types )});
776 }
777 }
778}
779
780#[proc_macro]
781pub fn nth(input: TokenStream) -> TokenStream {
782 let input = parse_macro_input!(input as Nth);
783 let identifiers = input.identifiers.into_iter().collect::<Vec<_>>();
784 if input.n < identifiers.len() {
785 let ident = &identifiers[input.n];
786 TokenStream::from(quote! { #ident })
787 } else {
788 panic!("Index out of bounds");
789 }
790}
791
792struct Nth {
793 n: usize,
794 _comma: Token![,],
795 identifiers: Punctuated<Ident, Token![,]>,
796}
797
798impl Parse for Nth {
799 fn parse(input: ParseStream) -> Result<Self> {
800 let n: LitInt = input.parse()?;
801 Ok(Nth {
802 n: n.base10_parse()?,
803 _comma: input.parse()?,
804 identifiers: input.parse_terminated(Ident::parse, Token![,])?,
805 })
806 }
807}
808
809#[proc_macro]
831pub fn repeat_macro(input: TokenStream) -> TokenStream {
832 let RepeatMacro { id, start, end, .. } = parse_macro_input!(input as RepeatMacro);
833
834 let repeats = (start..end).map(|n| {
835 match n {
836 0 => quote! { #id!(0,); }, n => {
838 let mut list = Punctuated::<Index, Token![,]>::new();
839 for i in 0..n {
840 let i = Index::from(i); list.push(i);
842 }
843 let n = Index::from(n); quote! { #id!(#n, #list); }
845 }
846 }
847 });
848
849 TokenStream::from(quote! {
850 #( #repeats )*
851 })
852}
853
854struct RepeatMacro {
855 id: Ident,
856 _comma: Token![,],
857 start: usize,
858 end: usize,
859}
860
861impl RepeatMacro {
862 fn parse_range(expr_range: ExprRange) -> Result<(usize, usize)> {
863 const RNG_ERR: &str = "expected integer literal";
864
865 let start = if let Some(start) = &expr_range.start {
866 match start.as_ref() {
867 Expr::Lit(expr_lit) => {
868 if let Lit::Int(lit_int) = &expr_lit.lit {
869 lit_int.base10_parse()?
870 } else {
871 return Err(Error::new(expr_lit.span(), RNG_ERR));
872 }
873 }
874 _ => return Err(Error::new(start.span(), RNG_ERR)),
875 }
876 } else {
877 0
878 };
879
880 let end = if let Some(end) = &expr_range.end {
881 match end.as_ref() {
882 Expr::Lit(expr_lit) => {
883 if let Lit::Int(lit_int) = &expr_lit.lit {
884 let parsed = lit_int.base10_parse()?;
885 match expr_range.limits {
886 RangeLimits::HalfOpen(_) => parsed,
887 RangeLimits::Closed(_) => parsed + 1,
888 }
889 } else {
890 return Err(Error::new(expr_lit.span(), RNG_ERR));
891 }
892 }
893 _ => return Err(Error::new(end.span(), RNG_ERR)),
894 }
895 } else {
896 usize::MAX
897 };
898
899 if start > end {
900 return Err(Error::new(
901 expr_range.span(),
902 "`end` must be greater than or equal to `start`",
903 ));
904 }
905
906 Ok((start, end))
907 }
908}
909
910impl Parse for RepeatMacro {
911 fn parse(input: ParseStream) -> Result<Self> {
912 let id: Ident = input.parse()?;
913 let _comma = input.parse()?;
914 let expr_range: ExprRange = input.parse()?;
915 let (start, end) = Self::parse_range(expr_range)?;
916
917 Ok(Self {
918 id,
919 _comma,
920 start,
921 end,
922 })
923 }
924}