1use crate::ast::error::ParseError;
2use crate::{ParseResult, UnresolvedPackage, UnresolvedPackageGroup};
3use alloc::borrow::Cow;
4use alloc::boxed::Box;
5use alloc::format;
6use alloc::string::{String, ToString};
7use alloc::vec::Vec;
8#[cfg(feature = "std")]
9use anyhow::Context as _;
10use core::fmt;
11use core::mem;
12use core::result::Result;
13use lex::{Span, Token, Tokenizer};
14use semver::Version;
15#[cfg(feature = "std")]
16use std::path::Path;
17
18pub mod error;
19pub mod lex;
20
21pub use resolve::Resolver;
22mod resolve;
23pub mod toposort;
24
25pub use lex::validate_id;
26
27struct PackageFile<'a> {
29 package_id: Option<PackageName<'a>>,
31 decl_list: DeclList<'a>,
33}
34
35impl<'a> PackageFile<'a> {
36 fn parse(tokens: &mut Tokenizer<'a>) -> ParseResult<Self> {
41 let mut package_name_tokens_peek = tokens.clone();
42 let docs = parse_docs(&mut package_name_tokens_peek)?;
43
44 let package_id = if package_name_tokens_peek.eat(Token::Package)? {
47 let name = PackageName::parse(&mut package_name_tokens_peek, docs)?;
48 if package_name_tokens_peek.eat(Token::Semicolon)? {
49 *tokens = package_name_tokens_peek;
50 Some(name)
51 } else {
52 None
53 }
54 } else {
55 None
56 };
57 let decl_list = DeclList::parse_until(tokens, None)?;
58 Ok(PackageFile {
59 package_id,
60 decl_list,
61 })
62 }
63
64 fn parse_nested(
66 tokens: &mut Tokenizer<'a>,
67 docs: Docs<'a>,
68 attributes: Vec<Attribute<'a>>,
69 ) -> ParseResult<Self> {
70 let span = tokens.expect(Token::Package)?;
71 if !attributes.is_empty() {
72 return Err(ParseError::new_syntax(
73 span,
74 format!("cannot place attributes on nested packages"),
75 ));
76 }
77 let package_id = PackageName::parse(tokens, docs)?;
78 tokens.expect(Token::LeftBrace)?;
79 let decl_list = DeclList::parse_until(tokens, Some(Token::RightBrace))?;
80 Ok(PackageFile {
81 package_id: Some(package_id),
82 decl_list,
83 })
84 }
85}
86
87#[derive(Default)]
123pub struct DeclList<'a> {
124 items: Vec<AstItem<'a>>,
125}
126
127impl<'a> DeclList<'a> {
128 fn parse_until(tokens: &mut Tokenizer<'a>, end: Option<Token>) -> ParseResult<DeclList<'a>> {
129 let mut items = Vec::new();
130 let mut docs = parse_docs(tokens)?;
131 loop {
132 match end {
133 Some(end) => {
134 if tokens.eat(end)? {
135 break;
136 }
137 }
138 None => {
139 if tokens.clone().next()?.is_none() {
140 break;
141 }
142 }
143 }
144 items.push(AstItem::parse(tokens, docs)?);
145 docs = parse_docs(tokens)?;
146 }
147 Ok(DeclList { items })
148 }
149
150 fn for_each_path<'b>(
151 &'b self,
152 f: &mut dyn FnMut(
153 Option<&'b Id<'a>>,
154 &'b [Attribute<'a>],
155 &'b UsePath<'a>,
156 Option<&'b [UseName<'a>]>,
157 WorldOrInterface,
158 ) -> ParseResult<()>,
159 ) -> ParseResult<()> {
160 for item in self.items.iter() {
161 match item {
162 AstItem::World(world) => {
163 let mut imports = Vec::new();
168 let mut exports = Vec::new();
169 for item in world.items.iter() {
170 match item {
171 WorldItem::Use(u) => f(
172 None,
173 &u.attributes,
174 &u.from,
175 Some(&u.names),
176 WorldOrInterface::Interface,
177 )?,
178 WorldItem::Include(i) => f(
179 Some(&world.name),
180 &i.attributes,
181 &i.from,
182 None,
183 WorldOrInterface::World,
184 )?,
185 WorldItem::Type(_) => {}
186 WorldItem::Import(Import {
187 kind, attributes, ..
188 }) => imports.push((kind, attributes)),
189 WorldItem::Export(Export {
190 kind, attributes, ..
191 }) => exports.push((kind, attributes)),
192 }
193 }
194
195 let mut visit_kind =
196 |kind: &'b ExternKind<'a>, attrs: &'b [Attribute<'a>]| match kind {
197 ExternKind::Interface(_, items) => {
198 for item in items {
199 match item {
200 InterfaceItem::Use(u) => f(
201 None,
202 &u.attributes,
203 &u.from,
204 Some(&u.names),
205 WorldOrInterface::Interface,
206 )?,
207 _ => {}
208 }
209 }
210 Ok(())
211 }
212 ExternKind::Path(path) | ExternKind::NamedPath(_, path) => {
213 f(None, attrs, path, None, WorldOrInterface::Interface)
214 }
215 ExternKind::Func(..) => Ok(()),
216 };
217
218 for (kind, attrs) in imports {
219 visit_kind(kind, attrs)?;
220 }
221 for (kind, attrs) in exports {
222 visit_kind(kind, attrs)?;
223 }
224 }
225 AstItem::Interface(i) => {
226 for item in i.items.iter() {
227 match item {
228 InterfaceItem::Use(u) => f(
229 Some(&i.name),
230 &u.attributes,
231 &u.from,
232 Some(&u.names),
233 WorldOrInterface::Interface,
234 )?,
235 _ => {}
236 }
237 }
238 }
239 AstItem::Use(u) => {
240 f(
243 None,
244 &u.attributes,
245 &u.item,
246 None,
247 WorldOrInterface::Unknown,
248 )?;
249 }
250
251 AstItem::Package(pkg) => pkg.decl_list.for_each_path(f)?,
252 }
253 }
254 Ok(())
255 }
256}
257
258enum AstItem<'a> {
259 Interface(Interface<'a>),
260 World(World<'a>),
261 Use(ToplevelUse<'a>),
262 Package(PackageFile<'a>),
263}
264
265impl<'a> AstItem<'a> {
266 fn parse(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> ParseResult<Self> {
267 let attributes = Attribute::parse_list(tokens)?;
268 match tokens.clone().next()? {
269 Some((_span, Token::Interface)) => {
270 Interface::parse(tokens, docs, attributes).map(Self::Interface)
271 }
272 Some((_span, Token::World)) => World::parse(tokens, docs, attributes).map(Self::World),
273 Some((_span, Token::Use)) => ToplevelUse::parse(tokens, attributes).map(Self::Use),
274 Some((_span, Token::Package)) => {
275 PackageFile::parse_nested(tokens, docs, attributes).map(Self::Package)
276 }
277 other => Err(err_expected(tokens, "`world`, `interface` or `use`", other).into()),
278 }
279 }
280}
281
282#[derive(Debug, Clone)]
283struct PackageName<'a> {
284 docs: Docs<'a>,
285 span: Span,
286 namespace: Id<'a>,
287 name: Id<'a>,
288 version: Option<(Span, Version)>,
289}
290
291impl<'a> PackageName<'a> {
292 fn parse(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> ParseResult<Self> {
293 let namespace = parse_id(tokens)?;
294 tokens.expect(Token::Colon)?;
295 let name = parse_id(tokens)?;
296 let version = parse_opt_version(tokens)?;
297 Ok(PackageName {
298 docs,
299 span: Span::new(
300 namespace.span.start(),
301 version
302 .as_ref()
303 .map(|(s, _)| s.end())
304 .unwrap_or(name.span.end()),
305 ),
306 namespace,
307 name,
308 version,
309 })
310 }
311
312 fn package_name(&self) -> crate::PackageName {
313 crate::PackageName {
314 namespace: self.namespace.name.to_string(),
315 name: self.name.name.to_string(),
316 version: self.version.as_ref().map(|(_, v)| v.clone()),
317 }
318 }
319}
320
321struct ToplevelUse<'a> {
322 span: Span,
323 attributes: Vec<Attribute<'a>>,
324 item: UsePath<'a>,
325 as_: Option<Id<'a>>,
326}
327
328impl<'a> ToplevelUse<'a> {
329 fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> ParseResult<Self> {
330 let span = tokens.expect(Token::Use)?;
331 let item = UsePath::parse(tokens)?;
332 let as_ = if tokens.eat(Token::As)? {
333 Some(parse_id(tokens)?)
334 } else {
335 None
336 };
337 tokens.expect_semicolon()?;
338 Ok(ToplevelUse {
339 span,
340 attributes,
341 item,
342 as_,
343 })
344 }
345}
346
347struct World<'a> {
348 docs: Docs<'a>,
349 attributes: Vec<Attribute<'a>>,
350 name: Id<'a>,
351 items: Vec<WorldItem<'a>>,
352}
353
354impl<'a> World<'a> {
355 fn parse(
356 tokens: &mut Tokenizer<'a>,
357 docs: Docs<'a>,
358 attributes: Vec<Attribute<'a>>,
359 ) -> ParseResult<Self> {
360 tokens.expect(Token::World)?;
361 let name = parse_id(tokens)?;
362 let items = Self::parse_items(tokens)?;
363 Ok(World {
364 docs,
365 attributes,
366 name,
367 items,
368 })
369 }
370
371 fn parse_items(tokens: &mut Tokenizer<'a>) -> ParseResult<Vec<WorldItem<'a>>> {
372 tokens.expect(Token::LeftBrace)?;
373 let mut items = Vec::new();
374 loop {
375 let docs = parse_docs(tokens)?;
376 if tokens.eat(Token::RightBrace)? {
377 break;
378 }
379 let attributes = Attribute::parse_list(tokens)?;
380 items.push(WorldItem::parse(tokens, docs, attributes)?);
381 }
382 Ok(items)
383 }
384}
385
386enum WorldItem<'a> {
387 Import(Import<'a>),
388 Export(Export<'a>),
389 Use(Use<'a>),
390 Type(TypeDef<'a>),
391 Include(Include<'a>),
392}
393
394impl<'a> WorldItem<'a> {
395 fn parse(
396 tokens: &mut Tokenizer<'a>,
397 docs: Docs<'a>,
398 attributes: Vec<Attribute<'a>>,
399 ) -> ParseResult<WorldItem<'a>> {
400 match tokens.clone().next()? {
401 Some((_span, Token::Import)) => {
402 Import::parse(tokens, docs, attributes).map(WorldItem::Import)
403 }
404 Some((_span, Token::Export)) => {
405 Export::parse(tokens, docs, attributes).map(WorldItem::Export)
406 }
407 Some((_span, Token::Use)) => Use::parse(tokens, attributes).map(WorldItem::Use),
408 Some((_span, Token::Type)) => {
409 TypeDef::parse(tokens, docs, attributes).map(WorldItem::Type)
410 }
411 Some((_span, Token::Flags)) => {
412 TypeDef::parse_flags(tokens, docs, attributes).map(WorldItem::Type)
413 }
414 Some((_span, Token::Resource)) => {
415 TypeDef::parse_resource(tokens, docs, attributes).map(WorldItem::Type)
416 }
417 Some((_span, Token::Record)) => {
418 TypeDef::parse_record(tokens, docs, attributes).map(WorldItem::Type)
419 }
420 Some((_span, Token::Variant)) => {
421 TypeDef::parse_variant(tokens, docs, attributes).map(WorldItem::Type)
422 }
423 Some((_span, Token::Enum)) => {
424 TypeDef::parse_enum(tokens, docs, attributes).map(WorldItem::Type)
425 }
426 Some((_span, Token::Include)) => {
427 Include::parse(tokens, attributes).map(WorldItem::Include)
428 }
429 other => Err(err_expected(
430 tokens,
431 "`import`, `export`, `include`, `use`, or type definition",
432 other,
433 )
434 .into()),
435 }
436 }
437}
438
439struct Import<'a> {
440 docs: Docs<'a>,
441 attributes: Vec<Attribute<'a>>,
442 kind: ExternKind<'a>,
443}
444
445impl<'a> Import<'a> {
446 fn parse(
447 tokens: &mut Tokenizer<'a>,
448 docs: Docs<'a>,
449 attributes: Vec<Attribute<'a>>,
450 ) -> ParseResult<Import<'a>> {
451 tokens.expect(Token::Import)?;
452 let kind = ExternKind::parse(tokens)?;
453 Ok(Import {
454 docs,
455 attributes,
456 kind,
457 })
458 }
459}
460
461struct Export<'a> {
462 docs: Docs<'a>,
463 attributes: Vec<Attribute<'a>>,
464 kind: ExternKind<'a>,
465}
466
467impl<'a> Export<'a> {
468 fn parse(
469 tokens: &mut Tokenizer<'a>,
470 docs: Docs<'a>,
471 attributes: Vec<Attribute<'a>>,
472 ) -> ParseResult<Export<'a>> {
473 tokens.expect(Token::Export)?;
474 let kind = ExternKind::parse(tokens)?;
475 Ok(Export {
476 docs,
477 attributes,
478 kind,
479 })
480 }
481}
482
483enum ExternKind<'a> {
484 Interface(Id<'a>, Vec<InterfaceItem<'a>>),
485 Path(UsePath<'a>),
486 Func(Id<'a>, Func<'a>),
487 NamedPath(Id<'a>, UsePath<'a>),
489}
490
491impl<'a> ExternKind<'a> {
492 fn parse(tokens: &mut Tokenizer<'a>) -> ParseResult<ExternKind<'a>> {
493 let mut clone = tokens.clone();
502 let id = parse_id(&mut clone)?;
503 if clone.eat(Token::Colon)? {
504 if clone.clone().eat(Token::Func)? || clone.clone().eat(Token::Async)? {
506 *tokens = clone;
507 let ret = ExternKind::Func(id, Func::parse(tokens)?);
508 tokens.expect_semicolon()?;
509 return Ok(ret);
510 }
511
512 if clone.eat(Token::Interface)? {
514 *tokens = clone;
515 return Ok(ExternKind::Interface(id, Interface::parse_items(tokens)?));
516 }
517
518 let mut peek = clone.clone();
529 let is_qualified_path =
530 parse_id(&mut peek).is_ok() && peek.clone().eat(Token::Slash).unwrap_or(false);
531 if !is_qualified_path {
532 *tokens = clone;
533 let path = UsePath::parse(tokens)?;
534 tokens.expect_semicolon()?;
535 return Ok(ExternKind::NamedPath(id, path));
536 }
537 }
538
539 let ret = ExternKind::Path(UsePath::parse(tokens)?);
543 tokens.expect_semicolon()?;
544 Ok(ret)
545 }
546
547 fn span(&self) -> Span {
548 match self {
549 ExternKind::Interface(id, _) => id.span,
550 ExternKind::Path(UsePath::Id(id)) => id.span,
551 ExternKind::Path(UsePath::Package { name, .. }) => name.span,
552 ExternKind::Func(id, _) => id.span,
553 ExternKind::NamedPath(id, _) => id.span,
554 }
555 }
556}
557
558struct Interface<'a> {
559 docs: Docs<'a>,
560 attributes: Vec<Attribute<'a>>,
561 name: Id<'a>,
562 items: Vec<InterfaceItem<'a>>,
563}
564
565impl<'a> Interface<'a> {
566 fn parse(
567 tokens: &mut Tokenizer<'a>,
568 docs: Docs<'a>,
569 attributes: Vec<Attribute<'a>>,
570 ) -> ParseResult<Self> {
571 tokens.expect(Token::Interface)?;
572 let name = parse_id(tokens)?;
573 let items = Self::parse_items(tokens)?;
574 Ok(Interface {
575 docs,
576 attributes,
577 name,
578 items,
579 })
580 }
581
582 pub(super) fn parse_items(tokens: &mut Tokenizer<'a>) -> ParseResult<Vec<InterfaceItem<'a>>> {
583 tokens.expect(Token::LeftBrace)?;
584 let mut items = Vec::new();
585 loop {
586 let docs = parse_docs(tokens)?;
587 if tokens.eat(Token::RightBrace)? {
588 break;
589 }
590 let attributes = Attribute::parse_list(tokens)?;
591 items.push(InterfaceItem::parse(tokens, docs, attributes)?);
592 }
593 Ok(items)
594 }
595}
596
597#[derive(Debug)]
598pub enum WorldOrInterface {
599 World,
600 Interface,
601 Unknown,
602}
603
604enum InterfaceItem<'a> {
605 TypeDef(TypeDef<'a>),
606 Func(NamedFunc<'a>),
607 Use(Use<'a>),
608}
609
610struct Use<'a> {
611 attributes: Vec<Attribute<'a>>,
612 from: UsePath<'a>,
613 names: Vec<UseName<'a>>,
614}
615
616#[derive(Debug)]
617enum UsePath<'a> {
618 Id(Id<'a>),
619 Package { id: PackageName<'a>, name: Id<'a> },
620}
621
622impl<'a> UsePath<'a> {
623 fn parse(tokens: &mut Tokenizer<'a>) -> ParseResult<Self> {
624 let id = parse_id(tokens)?;
625 if tokens.eat(Token::Colon)? {
626 let namespace = id;
628 let pkg_name = parse_id(tokens)?;
629 tokens.expect(Token::Slash)?;
630 let name = parse_id(tokens)?;
631 let version = parse_opt_version(tokens)?;
632 Ok(UsePath::Package {
633 id: PackageName {
634 docs: Default::default(),
635 span: Span::new(namespace.span.start(), pkg_name.span.end()),
636 namespace,
637 name: pkg_name,
638 version,
639 },
640 name,
641 })
642 } else {
643 Ok(UsePath::Id(id))
645 }
646 }
647
648 fn name(&self) -> &Id<'a> {
649 match self {
650 UsePath::Id(id) => id,
651 UsePath::Package { name, .. } => name,
652 }
653 }
654}
655
656struct UseName<'a> {
657 name: Id<'a>,
658 as_: Option<Id<'a>>,
659}
660
661impl<'a> Use<'a> {
662 fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> ParseResult<Self> {
663 tokens.expect(Token::Use)?;
664 let from = UsePath::parse(tokens)?;
665 tokens.expect(Token::Period)?;
666 tokens.expect(Token::LeftBrace)?;
667
668 let mut names = Vec::new();
669 while !tokens.eat(Token::RightBrace)? {
670 let mut name = UseName {
671 name: parse_id(tokens)?,
672 as_: None,
673 };
674 if tokens.eat(Token::As)? {
675 name.as_ = Some(parse_id(tokens)?);
676 }
677 names.push(name);
678 if !tokens.eat(Token::Comma)? {
679 tokens.expect(Token::RightBrace)?;
680 break;
681 }
682 }
683 tokens.expect_semicolon()?;
684 Ok(Use {
685 attributes,
686 from,
687 names,
688 })
689 }
690}
691
692struct Include<'a> {
693 from: UsePath<'a>,
694 attributes: Vec<Attribute<'a>>,
695 names: Vec<IncludeName<'a>>,
696}
697
698struct IncludeName<'a> {
699 name: Id<'a>,
700 as_: Id<'a>,
701}
702
703impl<'a> Include<'a> {
704 fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> ParseResult<Self> {
705 tokens.expect(Token::Include)?;
706 let from = UsePath::parse(tokens)?;
707
708 let names = if tokens.eat(Token::With)? {
709 parse_list(
710 tokens,
711 Token::LeftBrace,
712 Token::RightBrace,
713 |_docs, tokens| {
714 let name = parse_id(tokens)?;
715 tokens.expect(Token::As)?;
716 let as_ = parse_id(tokens)?;
717 Ok(IncludeName { name, as_ })
718 },
719 )?
720 } else {
721 tokens.expect_semicolon()?;
722 Vec::new()
723 };
724
725 Ok(Include {
726 attributes,
727 from,
728 names,
729 })
730 }
731}
732
733#[derive(Debug, Clone)]
734pub struct Id<'a> {
735 name: &'a str,
736 span: Span,
737}
738
739impl<'a> From<&'a str> for Id<'a> {
740 fn from(s: &'a str) -> Id<'a> {
741 Id {
742 name: s.into(),
743 span: Default::default(),
744 }
745 }
746}
747
748#[derive(Debug, Clone)]
749pub struct Docs<'a> {
750 docs: Vec<Cow<'a, str>>,
751 span: Span,
752}
753
754impl<'a> Default for Docs<'a> {
755 fn default() -> Self {
756 Self {
757 docs: Default::default(),
758 span: Default::default(),
759 }
760 }
761}
762
763struct TypeDef<'a> {
764 docs: Docs<'a>,
765 attributes: Vec<Attribute<'a>>,
766 name: Id<'a>,
767 ty: Type<'a>,
768}
769
770enum Type<'a> {
771 Bool(Span),
772 U8(Span),
773 U16(Span),
774 U32(Span),
775 U64(Span),
776 S8(Span),
777 S16(Span),
778 S32(Span),
779 S64(Span),
780 F32(Span),
781 F64(Span),
782 Char(Span),
783 String(Span),
784 Name(Id<'a>),
785 List(List<'a>),
786 Map(Map<'a>),
787 FixedLengthList(FixedLengthList<'a>),
788 Handle(Handle<'a>),
789 Resource(Resource<'a>),
790 Record(Record<'a>),
791 Flags(Flags<'a>),
792 Variant(Variant<'a>),
793 Tuple(Tuple<'a>),
794 Enum(Enum<'a>),
795 Option(Option_<'a>),
796 Result(Result_<'a>),
797 Future(Future<'a>),
798 Stream(Stream<'a>),
799 ErrorContext(Span),
800}
801
802enum Handle<'a> {
803 Own { resource: Id<'a> },
804 Borrow { resource: Id<'a> },
805}
806
807impl Handle<'_> {
808 fn span(&self) -> Span {
809 match self {
810 Handle::Own { resource } | Handle::Borrow { resource } => resource.span,
811 }
812 }
813}
814
815struct Resource<'a> {
816 span: Span,
817 funcs: Vec<ResourceFunc<'a>>,
818}
819
820enum ResourceFunc<'a> {
821 Method(NamedFunc<'a>),
822 Static(NamedFunc<'a>),
823 Constructor(NamedFunc<'a>),
824}
825
826impl<'a> ResourceFunc<'a> {
827 fn parse(
828 docs: Docs<'a>,
829 attributes: Vec<Attribute<'a>>,
830 tokens: &mut Tokenizer<'a>,
831 ) -> ParseResult<Self> {
832 match tokens.clone().next()? {
833 Some((span, Token::Constructor)) => {
834 tokens.expect(Token::Constructor)?;
835 tokens.expect(Token::LeftParen)?;
836 let params = parse_list_trailer(tokens, Token::RightParen, |_docs, tokens| {
837 let name = parse_id(tokens)?;
838 tokens.expect(Token::Colon)?;
839 let ty = Type::parse(tokens)?;
840 Ok((name, ty))
841 })?;
842 let result = if tokens.eat(Token::RArrow)? {
843 let ty = Type::parse(tokens)?;
844 Some(ty)
845 } else {
846 None
847 };
848 tokens.expect_semicolon()?;
849 Ok(ResourceFunc::Constructor(NamedFunc {
850 docs,
851 attributes,
852 name: Id {
853 span,
854 name: "constructor",
855 },
856 func: Func {
857 span,
858 async_: false,
859 params,
860 result,
861 },
862 }))
863 }
864 Some((_span, Token::Id | Token::ExplicitId)) => {
865 let name = parse_id(tokens)?;
866 tokens.expect(Token::Colon)?;
867 let ctor = if tokens.eat(Token::Static)? {
868 ResourceFunc::Static
869 } else {
870 ResourceFunc::Method
871 };
872 let func = Func::parse(tokens)?;
873 tokens.expect_semicolon()?;
874 Ok(ctor(NamedFunc {
875 docs,
876 attributes,
877 name,
878 func,
879 }))
880 }
881 other => Err(err_expected(tokens, "`constructor` or identifier", other).into()),
882 }
883 }
884
885 fn named_func(&self) -> &NamedFunc<'a> {
886 use ResourceFunc::*;
887 match self {
888 Method(f) | Static(f) | Constructor(f) => f,
889 }
890 }
891}
892
893struct Record<'a> {
894 span: Span,
895 fields: Vec<Field<'a>>,
896}
897
898struct Field<'a> {
899 docs: Docs<'a>,
900 name: Id<'a>,
901 ty: Type<'a>,
902}
903
904struct Flags<'a> {
905 span: Span,
906 flags: Vec<Flag<'a>>,
907}
908
909struct Flag<'a> {
910 docs: Docs<'a>,
911 name: Id<'a>,
912}
913
914struct Variant<'a> {
915 span: Span,
916 cases: Vec<Case<'a>>,
917}
918
919struct Case<'a> {
920 docs: Docs<'a>,
921 name: Id<'a>,
922 ty: Option<Type<'a>>,
923}
924
925struct Enum<'a> {
926 span: Span,
927 cases: Vec<EnumCase<'a>>,
928}
929
930struct EnumCase<'a> {
931 docs: Docs<'a>,
932 name: Id<'a>,
933}
934
935struct Option_<'a> {
936 span: Span,
937 ty: Box<Type<'a>>,
938}
939
940struct List<'a> {
941 span: Span,
942 ty: Box<Type<'a>>,
943}
944
945struct Map<'a> {
946 span: Span,
947 key: Box<Type<'a>>,
948 value: Box<Type<'a>>,
949}
950
951struct FixedLengthList<'a> {
952 span: Span,
953 ty: Box<Type<'a>>,
954 size: u32,
955}
956
957struct Future<'a> {
958 span: Span,
959 ty: Option<Box<Type<'a>>>,
960}
961
962struct Tuple<'a> {
963 span: Span,
964 types: Vec<Type<'a>>,
965}
966
967struct Result_<'a> {
968 span: Span,
969 ok: Option<Box<Type<'a>>>,
970 err: Option<Box<Type<'a>>>,
971}
972
973struct Stream<'a> {
974 span: Span,
975 ty: Option<Box<Type<'a>>>,
976}
977
978struct NamedFunc<'a> {
979 docs: Docs<'a>,
980 attributes: Vec<Attribute<'a>>,
981 name: Id<'a>,
982 func: Func<'a>,
983}
984
985type ParamList<'a> = Vec<(Id<'a>, Type<'a>)>;
986
987struct Func<'a> {
988 span: Span,
989 async_: bool,
990 params: ParamList<'a>,
991 result: Option<Type<'a>>,
992}
993
994impl<'a> Func<'a> {
995 fn parse(tokens: &mut Tokenizer<'a>) -> ParseResult<Func<'a>> {
996 fn parse_params<'a>(
997 tokens: &mut Tokenizer<'a>,
998 left_paren: bool,
999 ) -> ParseResult<ParamList<'a>> {
1000 if left_paren {
1001 tokens.expect(Token::LeftParen)?;
1002 };
1003 parse_list_trailer(tokens, Token::RightParen, |_docs, tokens| {
1004 let name = parse_id(tokens)?;
1005 tokens.expect(Token::Colon)?;
1006 let ty = Type::parse(tokens)?;
1007 Ok((name, ty))
1008 })
1009 }
1010
1011 let async_ = tokens.eat(Token::Async)?;
1012 let span = tokens.expect(Token::Func)?;
1013 let params = parse_params(tokens, true)?;
1014 let result = if tokens.eat(Token::RArrow)? {
1015 let ty = Type::parse(tokens)?;
1016 Some(ty)
1017 } else {
1018 None
1019 };
1020 Ok(Func {
1021 span,
1022 async_,
1023 params,
1024 result,
1025 })
1026 }
1027}
1028
1029impl<'a> InterfaceItem<'a> {
1030 fn parse(
1031 tokens: &mut Tokenizer<'a>,
1032 docs: Docs<'a>,
1033 attributes: Vec<Attribute<'a>>,
1034 ) -> ParseResult<InterfaceItem<'a>> {
1035 match tokens.clone().next()? {
1036 Some((_span, Token::Type)) => {
1037 TypeDef::parse(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1038 }
1039 Some((_span, Token::Flags)) => {
1040 TypeDef::parse_flags(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1041 }
1042 Some((_span, Token::Enum)) => {
1043 TypeDef::parse_enum(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1044 }
1045 Some((_span, Token::Variant)) => {
1046 TypeDef::parse_variant(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1047 }
1048 Some((_span, Token::Resource)) => {
1049 TypeDef::parse_resource(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1050 }
1051 Some((_span, Token::Record)) => {
1052 TypeDef::parse_record(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1053 }
1054 Some((_span, Token::Id)) | Some((_span, Token::ExplicitId)) => {
1055 NamedFunc::parse(tokens, docs, attributes).map(InterfaceItem::Func)
1056 }
1057 Some((_span, Token::Use)) => Use::parse(tokens, attributes).map(InterfaceItem::Use),
1058 other => Err(err_expected(tokens, "`type`, `resource` or `func`", other).into()),
1059 }
1060 }
1061}
1062
1063impl<'a> TypeDef<'a> {
1064 fn parse(
1065 tokens: &mut Tokenizer<'a>,
1066 docs: Docs<'a>,
1067 attributes: Vec<Attribute<'a>>,
1068 ) -> ParseResult<Self> {
1069 tokens.expect(Token::Type)?;
1070 let name = parse_id(tokens)?;
1071 tokens.expect(Token::Equals)?;
1072 let ty = Type::parse(tokens)?;
1073 tokens.expect_semicolon()?;
1074 Ok(TypeDef {
1075 docs,
1076 attributes,
1077 name,
1078 ty,
1079 })
1080 }
1081
1082 fn parse_flags(
1083 tokens: &mut Tokenizer<'a>,
1084 docs: Docs<'a>,
1085 attributes: Vec<Attribute<'a>>,
1086 ) -> ParseResult<Self> {
1087 tokens.expect(Token::Flags)?;
1088 let name = parse_id(tokens)?;
1089 let ty = Type::Flags(Flags {
1090 span: name.span,
1091 flags: parse_list(
1092 tokens,
1093 Token::LeftBrace,
1094 Token::RightBrace,
1095 |docs, tokens| {
1096 let name = parse_id(tokens)?;
1097 Ok(Flag { docs, name })
1098 },
1099 )?,
1100 });
1101 Ok(TypeDef {
1102 docs,
1103 attributes,
1104 name,
1105 ty,
1106 })
1107 }
1108
1109 fn parse_resource(
1110 tokens: &mut Tokenizer<'a>,
1111 docs: Docs<'a>,
1112 attributes: Vec<Attribute<'a>>,
1113 ) -> ParseResult<Self> {
1114 tokens.expect(Token::Resource)?;
1115 let name = parse_id(tokens)?;
1116 let mut funcs = Vec::new();
1117 if tokens.eat(Token::LeftBrace)? {
1118 while !tokens.eat(Token::RightBrace)? {
1119 let docs = parse_docs(tokens)?;
1120 let attributes = Attribute::parse_list(tokens)?;
1121 funcs.push(ResourceFunc::parse(docs, attributes, tokens)?);
1122 }
1123 } else {
1124 tokens.expect_semicolon()?;
1125 }
1126 let ty = Type::Resource(Resource {
1127 span: name.span,
1128 funcs,
1129 });
1130 Ok(TypeDef {
1131 docs,
1132 attributes,
1133 name,
1134 ty,
1135 })
1136 }
1137
1138 fn parse_record(
1139 tokens: &mut Tokenizer<'a>,
1140 docs: Docs<'a>,
1141 attributes: Vec<Attribute<'a>>,
1142 ) -> ParseResult<Self> {
1143 tokens.expect(Token::Record)?;
1144 let name = parse_id(tokens)?;
1145 let ty = Type::Record(Record {
1146 span: name.span,
1147 fields: parse_list(
1148 tokens,
1149 Token::LeftBrace,
1150 Token::RightBrace,
1151 |docs, tokens| {
1152 let name = parse_id(tokens)?;
1153 tokens.expect(Token::Colon)?;
1154 let ty = Type::parse(tokens)?;
1155 Ok(Field { docs, name, ty })
1156 },
1157 )?,
1158 });
1159 Ok(TypeDef {
1160 docs,
1161 attributes,
1162 name,
1163 ty,
1164 })
1165 }
1166
1167 fn parse_variant(
1168 tokens: &mut Tokenizer<'a>,
1169 docs: Docs<'a>,
1170 attributes: Vec<Attribute<'a>>,
1171 ) -> ParseResult<Self> {
1172 tokens.expect(Token::Variant)?;
1173 let name = parse_id(tokens)?;
1174 let ty = Type::Variant(Variant {
1175 span: name.span,
1176 cases: parse_list(
1177 tokens,
1178 Token::LeftBrace,
1179 Token::RightBrace,
1180 |docs, tokens| {
1181 let name = parse_id(tokens)?;
1182 let ty = if tokens.eat(Token::LeftParen)? {
1183 let ty = Type::parse(tokens)?;
1184 tokens.expect(Token::RightParen)?;
1185 Some(ty)
1186 } else {
1187 None
1188 };
1189 Ok(Case { docs, name, ty })
1190 },
1191 )?,
1192 });
1193 Ok(TypeDef {
1194 docs,
1195 attributes,
1196 name,
1197 ty,
1198 })
1199 }
1200
1201 fn parse_enum(
1202 tokens: &mut Tokenizer<'a>,
1203 docs: Docs<'a>,
1204 attributes: Vec<Attribute<'a>>,
1205 ) -> ParseResult<Self> {
1206 tokens.expect(Token::Enum)?;
1207 let name = parse_id(tokens)?;
1208 let ty = Type::Enum(Enum {
1209 span: name.span,
1210 cases: parse_list(
1211 tokens,
1212 Token::LeftBrace,
1213 Token::RightBrace,
1214 |docs, tokens| {
1215 let name = parse_id(tokens)?;
1216 Ok(EnumCase { docs, name })
1217 },
1218 )?,
1219 });
1220 Ok(TypeDef {
1221 docs,
1222 attributes,
1223 name,
1224 ty,
1225 })
1226 }
1227}
1228
1229impl<'a> NamedFunc<'a> {
1230 fn parse(
1231 tokens: &mut Tokenizer<'a>,
1232 docs: Docs<'a>,
1233 attributes: Vec<Attribute<'a>>,
1234 ) -> ParseResult<Self> {
1235 let name = parse_id(tokens)?;
1236 tokens.expect(Token::Colon)?;
1237 let func = Func::parse(tokens)?;
1238 tokens.expect_semicolon()?;
1239 Ok(NamedFunc {
1240 docs,
1241 attributes,
1242 name,
1243 func,
1244 })
1245 }
1246}
1247
1248fn parse_id<'a>(tokens: &mut Tokenizer<'a>) -> ParseResult<Id<'a>> {
1249 match tokens.next()? {
1250 Some((span, Token::Id)) => Ok(Id {
1251 name: tokens.parse_id(span)?,
1252 span,
1253 }),
1254 Some((span, Token::ExplicitId)) => Ok(Id {
1255 name: tokens.parse_explicit_id(span)?,
1256 span,
1257 }),
1258 other => Err(err_expected(tokens, "an identifier or string", other)),
1259 }
1260}
1261
1262fn parse_opt_version(tokens: &mut Tokenizer<'_>) -> ParseResult<Option<(Span, Version)>> {
1263 if tokens.eat(Token::At)? {
1264 parse_version(tokens).map(Some)
1265 } else {
1266 Ok(None)
1267 }
1268}
1269
1270fn parse_version(tokens: &mut Tokenizer<'_>) -> ParseResult<(Span, Version)> {
1271 let start = tokens.expect(Token::Integer)?.start();
1272 tokens.expect(Token::Period)?;
1273 tokens.expect(Token::Integer)?;
1274 tokens.expect(Token::Period)?;
1275 let end = tokens.expect(Token::Integer)?.end();
1276 let mut span = Span::new(start, end);
1277 eat_ids(tokens, Token::Minus, &mut span)?;
1278 eat_ids(tokens, Token::Plus, &mut span)?;
1279 let string = tokens.get_span(span);
1280 let version =
1281 Version::parse(string).map_err(|e| ParseError::new_syntax(span, e.to_string()))?;
1282 return Ok((span, version));
1283
1284 fn eat_ids(
1338 tokens: &mut Tokenizer<'_>,
1339 prefix: Token,
1340 end: &mut Span,
1341 ) -> Result<(), lex::Error> {
1342 if !tokens.eat(prefix)? {
1343 return Ok(());
1344 }
1345 loop {
1346 let mut clone = tokens.clone();
1347 match clone.next()? {
1348 Some((span, Token::Id | Token::Integer | Token::Minus)) => {
1349 end.set_end(span.end());
1350 *tokens = clone;
1351 }
1352 Some((_span, Token::Period)) => match clone.next()? {
1353 Some((span, Token::Id | Token::Integer | Token::Minus)) => {
1354 end.set_end(span.end());
1355 *tokens = clone;
1356 }
1357 _ => break Ok(()),
1358 },
1359 _ => break Ok(()),
1360 }
1361 }
1362 }
1363}
1364
1365fn parse_docs<'a>(tokens: &mut Tokenizer<'a>) -> Result<Docs<'a>, lex::Error> {
1366 let mut docs = Docs::default();
1367 let mut clone = tokens.clone();
1368 let mut started = false;
1369 while let Some((span, token)) = clone.next_raw()? {
1370 match token {
1371 Token::Whitespace => {}
1372 Token::Comment => {
1373 let comment = tokens.get_span(span);
1374 if !started {
1375 docs.span.set_start(span.start());
1376 started = true;
1377 }
1378 let trailing_ws = comment
1379 .bytes()
1380 .rev()
1381 .take_while(|ch| ch.is_ascii_whitespace())
1382 .count();
1383 docs.span.set_end(span.end() - (trailing_ws as u32));
1384 docs.docs.push(comment.into());
1385 }
1386 _ => break,
1387 };
1388 *tokens = clone.clone();
1389 }
1390 Ok(docs)
1391}
1392
1393impl<'a> Type<'a> {
1394 fn parse(tokens: &mut Tokenizer<'a>) -> ParseResult<Self> {
1395 match tokens.next()? {
1396 Some((span, Token::U8)) => Ok(Type::U8(span)),
1397 Some((span, Token::U16)) => Ok(Type::U16(span)),
1398 Some((span, Token::U32)) => Ok(Type::U32(span)),
1399 Some((span, Token::U64)) => Ok(Type::U64(span)),
1400 Some((span, Token::S8)) => Ok(Type::S8(span)),
1401 Some((span, Token::S16)) => Ok(Type::S16(span)),
1402 Some((span, Token::S32)) => Ok(Type::S32(span)),
1403 Some((span, Token::S64)) => Ok(Type::S64(span)),
1404 Some((span, Token::F32)) => Ok(Type::F32(span)),
1405 Some((span, Token::F64)) => Ok(Type::F64(span)),
1406 Some((span, Token::Char)) => Ok(Type::Char(span)),
1407
1408 Some((span, Token::Tuple)) => {
1410 let types = parse_list(
1411 tokens,
1412 Token::LessThan,
1413 Token::GreaterThan,
1414 |_docs, tokens| Type::parse(tokens),
1415 )?;
1416 Ok(Type::Tuple(Tuple { span, types }))
1417 }
1418
1419 Some((span, Token::Bool)) => Ok(Type::Bool(span)),
1420 Some((span, Token::String_)) => Ok(Type::String(span)),
1421
1422 Some((span, Token::List)) => {
1425 tokens.expect(Token::LessThan)?;
1426 let ty = Type::parse(tokens)?;
1427 let size = if tokens.eat(Token::Comma)? {
1428 let number = tokens.next()?;
1429 if let Some((span, Token::Integer)) = number {
1430 let size: u32 = tokens.get_span(span).parse().map_err(|e| {
1431 ParseError::new_syntax(span, format!("invalid list size: {e}"))
1432 })?;
1433 Some(size)
1434 } else {
1435 return Err(err_expected(tokens, "fixed-length", number).into());
1436 }
1437 } else {
1438 None
1439 };
1440 tokens.expect(Token::GreaterThan)?;
1441 if let Some(size) = size {
1442 Ok(Type::FixedLengthList(FixedLengthList {
1443 span,
1444 ty: Box::new(ty),
1445 size,
1446 }))
1447 } else {
1448 Ok(Type::List(List {
1449 span,
1450 ty: Box::new(ty),
1451 }))
1452 }
1453 }
1454
1455 Some((span, Token::Map)) => {
1457 tokens.expect(Token::LessThan)?;
1458 let key = Type::parse(tokens)?;
1459 tokens.expect(Token::Comma)?;
1460 let value = Type::parse(tokens)?;
1461 tokens.expect(Token::GreaterThan)?;
1462 Ok(Type::Map(Map {
1463 span,
1464 key: Box::new(key),
1465 value: Box::new(value),
1466 }))
1467 }
1468
1469 Some((span, Token::Option_)) => {
1471 tokens.expect(Token::LessThan)?;
1472 let ty = Type::parse(tokens)?;
1473 tokens.expect(Token::GreaterThan)?;
1474 Ok(Type::Option(Option_ {
1475 span,
1476 ty: Box::new(ty),
1477 }))
1478 }
1479
1480 Some((span, Token::Result_)) => {
1485 let mut ok = None;
1486 let mut err = None;
1487
1488 if tokens.eat(Token::LessThan)? {
1489 if tokens.eat(Token::Underscore)? {
1490 tokens.expect(Token::Comma)?;
1491 err = Some(Box::new(Type::parse(tokens)?));
1492 } else {
1493 ok = Some(Box::new(Type::parse(tokens)?));
1494 if tokens.eat(Token::Comma)? {
1495 err = Some(Box::new(Type::parse(tokens)?));
1496 }
1497 };
1498 tokens.expect(Token::GreaterThan)?;
1499 };
1500 Ok(Type::Result(Result_ { span, ok, err }))
1501 }
1502
1503 Some((span, Token::Future)) => {
1506 let mut ty = None;
1507
1508 if tokens.eat(Token::LessThan)? {
1509 ty = Some(Box::new(Type::parse(tokens)?));
1510 tokens.expect(Token::GreaterThan)?;
1511 };
1512 Ok(Type::Future(Future { span, ty }))
1513 }
1514
1515 Some((span, Token::Stream)) => {
1518 let mut ty = None;
1519
1520 if tokens.eat(Token::LessThan)? {
1521 ty = Some(Box::new(Type::parse(tokens)?));
1522 tokens.expect(Token::GreaterThan)?;
1523 };
1524 Ok(Type::Stream(Stream { span, ty }))
1525 }
1526
1527 Some((span, Token::ErrorContext)) => Ok(Type::ErrorContext(span)),
1529
1530 Some((_span, Token::Own)) => {
1532 tokens.expect(Token::LessThan)?;
1533 let resource = parse_id(tokens)?;
1534 tokens.expect(Token::GreaterThan)?;
1535 Ok(Type::Handle(Handle::Own { resource }))
1536 }
1537
1538 Some((_span, Token::Borrow)) => {
1540 tokens.expect(Token::LessThan)?;
1541 let resource = parse_id(tokens)?;
1542 tokens.expect(Token::GreaterThan)?;
1543 Ok(Type::Handle(Handle::Borrow { resource }))
1544 }
1545
1546 Some((span, Token::Id)) => Ok(Type::Name(Id {
1548 name: tokens.parse_id(span)?.into(),
1549 span,
1550 })),
1551 Some((span, Token::ExplicitId)) => Ok(Type::Name(Id {
1553 name: tokens.parse_explicit_id(span)?.into(),
1554 span,
1555 })),
1556
1557 other => Err(err_expected(tokens, "a type", other).into()),
1558 }
1559 }
1560
1561 fn span(&self) -> Span {
1562 match self {
1563 Type::Bool(span)
1564 | Type::U8(span)
1565 | Type::U16(span)
1566 | Type::U32(span)
1567 | Type::U64(span)
1568 | Type::S8(span)
1569 | Type::S16(span)
1570 | Type::S32(span)
1571 | Type::S64(span)
1572 | Type::F32(span)
1573 | Type::F64(span)
1574 | Type::Char(span)
1575 | Type::String(span)
1576 | Type::ErrorContext(span) => *span,
1577 Type::Name(id) => id.span,
1578 Type::List(l) => l.span,
1579 Type::Map(m) => m.span,
1580 Type::FixedLengthList(l) => l.span,
1581 Type::Handle(h) => h.span(),
1582 Type::Resource(r) => r.span,
1583 Type::Record(r) => r.span,
1584 Type::Flags(f) => f.span,
1585 Type::Variant(v) => v.span,
1586 Type::Tuple(t) => t.span,
1587 Type::Enum(e) => e.span,
1588 Type::Option(o) => o.span,
1589 Type::Result(r) => r.span,
1590 Type::Future(f) => f.span,
1591 Type::Stream(s) => s.span,
1592 }
1593 }
1594}
1595
1596fn parse_list<'a, T>(
1597 tokens: &mut Tokenizer<'a>,
1598 start: Token,
1599 end: Token,
1600 parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> ParseResult<T>,
1601) -> ParseResult<Vec<T>> {
1602 tokens.expect(start)?;
1603 parse_list_trailer(tokens, end, parse)
1604}
1605
1606fn parse_list_trailer<'a, T>(
1607 tokens: &mut Tokenizer<'a>,
1608 end: Token,
1609 mut parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> ParseResult<T>,
1610) -> ParseResult<Vec<T>> {
1611 let mut items = Vec::new();
1612 loop {
1613 let docs = parse_docs(tokens)?;
1615
1616 if tokens.eat(end)? {
1618 break;
1619 }
1620
1621 let item = parse(docs, tokens)?;
1622 items.push(item);
1623
1624 if !tokens.eat(Token::Comma)? {
1627 tokens.expect(end)?;
1628 break;
1629 }
1630 }
1631 Ok(items)
1632}
1633
1634fn err_expected(
1635 tokens: &Tokenizer<'_>,
1636 expected: &'static str,
1637 found: Option<(Span, Token)>,
1638) -> ParseError {
1639 match found {
1640 Some((span, token)) => ParseError::new_syntax(
1641 span,
1642 format!("expected {}, found {}", expected, token.describe()),
1643 ),
1644 None => {
1645 ParseError::new_syntax(tokens.eof_span(), format!("expected {expected}, found eof"))
1646 }
1647 }
1648}
1649
1650enum Attribute<'a> {
1651 Since { span: Span, version: Version },
1652 Unstable { span: Span, feature: Id<'a> },
1653 Deprecated { span: Span, version: Version },
1654}
1655
1656impl<'a> Attribute<'a> {
1657 fn parse_list(tokens: &mut Tokenizer<'a>) -> ParseResult<Vec<Attribute<'a>>> {
1658 let mut ret = Vec::new();
1659 while tokens.eat(Token::At)? {
1660 let id = parse_id(tokens)?;
1661 let attr = match id.name {
1662 "since" => {
1663 tokens.expect(Token::LeftParen)?;
1664 eat_id(tokens, "version")?;
1665 tokens.expect(Token::Equals)?;
1666 let (_span, version) = parse_version(tokens)?;
1667 tokens.expect(Token::RightParen)?;
1668 Attribute::Since {
1669 span: id.span,
1670 version,
1671 }
1672 }
1673 "unstable" => {
1674 tokens.expect(Token::LeftParen)?;
1675 eat_id(tokens, "feature")?;
1676 tokens.expect(Token::Equals)?;
1677 let feature = parse_id(tokens)?;
1678 tokens.expect(Token::RightParen)?;
1679 Attribute::Unstable {
1680 span: id.span,
1681 feature,
1682 }
1683 }
1684 "deprecated" => {
1685 tokens.expect(Token::LeftParen)?;
1686 eat_id(tokens, "version")?;
1687 tokens.expect(Token::Equals)?;
1688 let (_span, version) = parse_version(tokens)?;
1689 tokens.expect(Token::RightParen)?;
1690 Attribute::Deprecated {
1691 span: id.span,
1692 version,
1693 }
1694 }
1695 other => {
1696 return Err(ParseError::new_syntax(
1697 id.span,
1698 format!("unknown attribute `{other}`"),
1699 ));
1700 }
1701 };
1702 ret.push(attr);
1703 }
1704 Ok(ret)
1705 }
1706
1707 fn span(&self) -> Span {
1708 match self {
1709 Attribute::Since { span, .. }
1710 | Attribute::Unstable { span, .. }
1711 | Attribute::Deprecated { span, .. } => *span,
1712 }
1713 }
1714}
1715
1716fn eat_id(tokens: &mut Tokenizer<'_>, expected: &str) -> ParseResult<Span> {
1717 let id = parse_id(tokens)?;
1718 if id.name != expected {
1719 return Err(ParseError::new_syntax(
1720 id.span,
1721 format!("expected `{expected}`, found `{}`", id.name),
1722 ));
1723 }
1724 Ok(id.span)
1725}
1726
1727#[derive(Clone, Default, Debug, PartialEq, Eq)]
1732pub struct SourceMap {
1733 sources: Vec<Source>,
1734 offset: u32,
1735}
1736
1737#[derive(Clone, Debug, PartialEq, Eq)]
1738struct Source {
1739 offset: u32,
1740 path: String,
1741 contents: String,
1742}
1743
1744impl SourceMap {
1745 pub fn new() -> SourceMap {
1747 SourceMap::default()
1748 }
1749
1750 #[cfg(feature = "std")]
1753 pub fn push_file(&mut self, path: &Path) -> anyhow::Result<()> {
1754 let contents = std::fs::read_to_string(path)
1755 .with_context(|| format!("failed to read file {path:?}"))?;
1756 self.push(path, contents);
1757 Ok(())
1758 }
1759
1760 #[cfg(feature = "std")]
1768 pub fn push(&mut self, path: &Path, contents: impl Into<String>) {
1769 self.push_str(&path.display().to_string(), contents);
1770 }
1771
1772 pub fn push_str(&mut self, path: &str, contents: impl Into<String>) {
1780 let mut contents = contents.into();
1781 contents.push('\n');
1787 let new_offset = self.offset + u32::try_from(contents.len()).unwrap();
1788 self.sources.push(Source {
1789 offset: self.offset,
1790 path: path.to_string(),
1791 contents,
1792 });
1793 self.offset = new_offset;
1794 }
1795
1796 pub fn append(&mut self, other: SourceMap) -> u32 {
1802 let base = self.offset;
1803 for mut source in other.sources {
1804 source.offset += base;
1805 self.sources.push(source);
1806 }
1807 self.offset += other.offset;
1808 base
1809 }
1810
1811 pub fn parse(self) -> Result<UnresolvedPackageGroup, (Self, ParseError)> {
1817 match self.parse_inner() {
1818 Ok((main, nested)) => Ok(UnresolvedPackageGroup {
1819 main,
1820 nested,
1821 source_map: self,
1822 }),
1823 Err(e) => Err((self, e)),
1824 }
1825 }
1826
1827 fn parse_inner(&self) -> ParseResult<(UnresolvedPackage, Vec<UnresolvedPackage>)> {
1828 let mut nested = Vec::new();
1829 let mut resolver = Resolver::default();
1830 let mut srcs = self.sources.iter().collect::<Vec<_>>();
1831 srcs.sort_by_key(|src| &src.path);
1832
1833 for src in srcs {
1837 let mut tokens = Tokenizer::new(
1838 &src.contents[..src.contents.len() - 1],
1841 src.offset,
1842 )?;
1843 let mut file = PackageFile::parse(&mut tokens)?;
1844
1845 for item in mem::take(&mut file.decl_list.items) {
1854 match item {
1855 AstItem::Package(nested_pkg) => {
1856 let mut resolve = Resolver::default();
1857 resolve.push(nested_pkg)?;
1858 nested.push(resolve.resolve()?);
1859 }
1860 other => file.decl_list.items.push(other),
1861 }
1862 }
1863
1864 resolver.push(file)?;
1866 }
1867
1868 Ok((resolver.resolve()?, nested))
1869 }
1870
1871 pub(crate) fn highlight_span(&self, span: Span, err: impl fmt::Display) -> Option<String> {
1872 if !span.is_known() {
1873 return None;
1874 }
1875 Some(self.highlight_err(span.start(), Some(span.end()), err))
1876 }
1877
1878 fn highlight_err(&self, start: u32, end: Option<u32>, err: impl fmt::Display) -> String {
1879 let src = self.source_for_offset(start);
1880 let start = src.to_relative_offset(start);
1881 let end = end.map(|end| src.to_relative_offset(end));
1882 let (line, col) = src.linecol(start);
1883 let snippet = src.contents.lines().nth(line).unwrap_or("");
1884 let line = line + 1;
1885 let col = col + 1;
1886
1887 if snippet.len() > 500 {
1891 return format!("{}:{line}:{col}: {err}", src.path);
1892 }
1893 let mut msg = format!(
1894 "\
1895{err}
1896 --> {file}:{line}:{col}
1897 |
1898 {line:4} | {snippet}
1899 | {marker:>0$}",
1900 col,
1901 file = src.path,
1902 marker = "^",
1903 );
1904 if let Some(end) = end {
1905 if let Some(s) = src.contents.get(start..end) {
1906 for _ in s.chars().skip(1) {
1907 msg.push('-');
1908 }
1909 }
1910 }
1911 return msg;
1912 }
1913
1914 pub fn render_location(&self, span: Span) -> String {
1916 if !span.is_known() {
1917 return "<unknown>".to_string();
1918 }
1919 let start = span.start();
1920 let src = self.source_for_offset(start);
1921 let rel_start = src.to_relative_offset(start);
1922 let (line, col) = src.linecol(rel_start);
1923 format!(
1924 "{file}:{line}:{col}",
1925 file = src.path,
1926 line = line + 1,
1927 col = col + 1,
1928 )
1929 }
1930
1931 fn source_for_offset(&self, start: u32) -> &Source {
1932 let i = match self.sources.binary_search_by_key(&start, |src| src.offset) {
1933 Ok(i) => i,
1934 Err(i) => i - 1,
1935 };
1936 &self.sources[i]
1937 }
1938
1939 #[cfg(feature = "std")]
1941 pub fn source_files(&self) -> impl Iterator<Item = &Path> {
1942 self.sources.iter().map(|src| Path::new(&src.path))
1943 }
1944
1945 pub fn source_names(&self) -> impl Iterator<Item = &str> {
1947 self.sources.iter().map(|src| src.path.as_str())
1948 }
1949}
1950
1951impl Source {
1952 fn to_relative_offset(&self, offset: u32) -> usize {
1953 usize::try_from(offset - self.offset).unwrap()
1954 }
1955
1956 fn linecol(&self, relative_offset: usize) -> (usize, usize) {
1957 let mut cur = 0;
1958 for (i, line) in self.contents.split_terminator('\n').enumerate() {
1962 if cur + line.len() + 1 > relative_offset {
1963 return (i, relative_offset - cur);
1964 }
1965 cur += line.len() + 1;
1966 }
1967 (self.contents.lines().count(), 0)
1968 }
1969}
1970
1971pub enum ParsedUsePath {
1972 Name(String),
1973 Package(crate::PackageName, String),
1974}
1975
1976pub fn parse_use_path(s: &str) -> anyhow::Result<ParsedUsePath> {
1977 let mut tokens = Tokenizer::new(s, 0)?;
1978 let path = UsePath::parse(&mut tokens)?;
1979 if tokens.next()?.is_some() {
1980 anyhow::bail!("trailing tokens in path specifier");
1981 }
1982 Ok(match path {
1983 UsePath::Id(id) => ParsedUsePath::Name(id.name.to_string()),
1984 UsePath::Package { id, name } => {
1985 ParsedUsePath::Package(id.package_name(), name.name.to_string())
1986 }
1987 })
1988}