wit_parser/
ast.rs

1use crate::{Error, PackageNotFoundError, UnresolvedPackageGroup};
2use anyhow::{Context, Result, bail};
3use lex::{Span, Token, Tokenizer};
4use semver::Version;
5use std::borrow::Cow;
6use std::fmt;
7use std::mem;
8use std::path::{Path, PathBuf};
9
10pub mod lex;
11
12pub use resolve::Resolver;
13mod resolve;
14pub mod toposort;
15
16pub use lex::validate_id;
17
18/// Representation of a single WIT `*.wit` file and nested packages.
19struct PackageFile<'a> {
20    /// Optional `package foo:bar;` header
21    package_id: Option<PackageName<'a>>,
22    /// Other AST items.
23    decl_list: DeclList<'a>,
24}
25
26impl<'a> PackageFile<'a> {
27    /// Parse a standalone file represented by `tokens`.
28    ///
29    /// This will optionally start with `package foo:bar;` and then will have a
30    /// list of ast items after it.
31    fn parse(tokens: &mut Tokenizer<'a>) -> Result<Self> {
32        let mut package_name_tokens_peek = tokens.clone();
33        let docs = parse_docs(&mut package_name_tokens_peek)?;
34
35        // Parse `package foo:bar;` but throw it out if it's actually
36        // `package foo:bar { ... }` since that's an ast item instead.
37        let package_id = if package_name_tokens_peek.eat(Token::Package)? {
38            let name = PackageName::parse(&mut package_name_tokens_peek, docs)?;
39            if package_name_tokens_peek.eat(Token::Semicolon)? {
40                *tokens = package_name_tokens_peek;
41                Some(name)
42            } else {
43                None
44            }
45        } else {
46            None
47        };
48        let decl_list = DeclList::parse_until(tokens, None)?;
49        Ok(PackageFile {
50            package_id,
51            decl_list,
52        })
53    }
54
55    /// Parse a nested package of the form `package foo:bar { ... }`
56    fn parse_nested(
57        tokens: &mut Tokenizer<'a>,
58        docs: Docs<'a>,
59        attributes: Vec<Attribute<'a>>,
60    ) -> Result<Self> {
61        let span = tokens.expect(Token::Package)?;
62        if !attributes.is_empty() {
63            bail!(Error::new(
64                span,
65                format!("cannot place attributes on nested packages"),
66            ));
67        }
68        let package_id = PackageName::parse(tokens, docs)?;
69        tokens.expect(Token::LeftBrace)?;
70        let decl_list = DeclList::parse_until(tokens, Some(Token::RightBrace))?;
71        Ok(PackageFile {
72            package_id: Some(package_id),
73            decl_list,
74        })
75    }
76}
77
78/// Stores all of the declarations in a package's scope. In AST terms, this
79/// means everything except the `package` declaration that demarcates a package
80/// scope. In the traditional implicit format, these are all of the declarations
81/// non-`package` declarations in the file:
82///
83/// ```wit
84/// package foo:name;
85///
86/// /* START DECL LIST */
87/// // Some comment...
88/// interface i {}
89/// world w {}
90/// /* END DECL LIST */
91/// ```
92///
93/// In the nested package style, a [`DeclList`] is everything inside of each
94/// `package` element's brackets:
95///
96/// ```wit
97/// package foo:name {
98///   /* START FIRST DECL LIST */
99///   // Some comment...
100///   interface i {}
101///   world w {}
102///   /* END FIRST DECL LIST */
103/// }
104///
105/// package bar:name {
106///   /* START SECOND DECL LIST */
107///   // Some comment...
108///   interface i {}
109///   world w {}
110///   /* END SECOND DECL LIST */
111/// }
112/// ```
113#[derive(Default)]
114pub struct DeclList<'a> {
115    items: Vec<AstItem<'a>>,
116}
117
118impl<'a> DeclList<'a> {
119    fn parse_until(tokens: &mut Tokenizer<'a>, end: Option<Token>) -> Result<DeclList<'a>> {
120        let mut items = Vec::new();
121        let mut docs = parse_docs(tokens)?;
122        loop {
123            match end {
124                Some(end) => {
125                    if tokens.eat(end)? {
126                        break;
127                    }
128                }
129                None => {
130                    if tokens.clone().next()?.is_none() {
131                        break;
132                    }
133                }
134            }
135            items.push(AstItem::parse(tokens, docs)?);
136            docs = parse_docs(tokens)?;
137        }
138        Ok(DeclList { items })
139    }
140
141    fn for_each_path<'b>(
142        &'b self,
143        f: &mut dyn FnMut(
144            Option<&'b Id<'a>>,
145            &'b [Attribute<'a>],
146            &'b UsePath<'a>,
147            Option<&'b [UseName<'a>]>,
148            WorldOrInterface,
149        ) -> Result<()>,
150    ) -> Result<()> {
151        for item in self.items.iter() {
152            match item {
153                AstItem::World(world) => {
154                    // Visit imports here first before exports to help preserve
155                    // round-tripping of documents because printing a world puts
156                    // imports first but textually they can be listed with
157                    // exports first.
158                    let mut imports = Vec::new();
159                    let mut exports = Vec::new();
160                    for item in world.items.iter() {
161                        match item {
162                            WorldItem::Use(u) => f(
163                                None,
164                                &u.attributes,
165                                &u.from,
166                                Some(&u.names),
167                                WorldOrInterface::Interface,
168                            )?,
169                            WorldItem::Include(i) => f(
170                                Some(&world.name),
171                                &i.attributes,
172                                &i.from,
173                                None,
174                                WorldOrInterface::World,
175                            )?,
176                            WorldItem::Type(_) => {}
177                            WorldItem::Import(Import {
178                                kind, attributes, ..
179                            }) => imports.push((kind, attributes)),
180                            WorldItem::Export(Export {
181                                kind, attributes, ..
182                            }) => exports.push((kind, attributes)),
183                        }
184                    }
185
186                    let mut visit_kind =
187                        |kind: &'b ExternKind<'a>, attrs: &'b [Attribute<'a>]| match kind {
188                            ExternKind::Interface(_, items) => {
189                                for item in items {
190                                    match item {
191                                        InterfaceItem::Use(u) => f(
192                                            None,
193                                            &u.attributes,
194                                            &u.from,
195                                            Some(&u.names),
196                                            WorldOrInterface::Interface,
197                                        )?,
198                                        _ => {}
199                                    }
200                                }
201                                Ok(())
202                            }
203                            ExternKind::Path(path) => {
204                                f(None, attrs, path, None, WorldOrInterface::Interface)
205                            }
206                            ExternKind::Func(..) => Ok(()),
207                        };
208
209                    for (kind, attrs) in imports {
210                        visit_kind(kind, attrs)?;
211                    }
212                    for (kind, attrs) in exports {
213                        visit_kind(kind, attrs)?;
214                    }
215                }
216                AstItem::Interface(i) => {
217                    for item in i.items.iter() {
218                        match item {
219                            InterfaceItem::Use(u) => f(
220                                Some(&i.name),
221                                &u.attributes,
222                                &u.from,
223                                Some(&u.names),
224                                WorldOrInterface::Interface,
225                            )?,
226                            _ => {}
227                        }
228                    }
229                }
230                AstItem::Use(u) => {
231                    // At the top-level, we don't know if this is a world or an interface
232                    // It is up to the resolver to decides how to handle this ambiguity.
233                    f(
234                        None,
235                        &u.attributes,
236                        &u.item,
237                        None,
238                        WorldOrInterface::Unknown,
239                    )?;
240                }
241
242                AstItem::Package(pkg) => pkg.decl_list.for_each_path(f)?,
243            }
244        }
245        Ok(())
246    }
247}
248
249enum AstItem<'a> {
250    Interface(Interface<'a>),
251    World(World<'a>),
252    Use(ToplevelUse<'a>),
253    Package(PackageFile<'a>),
254}
255
256impl<'a> AstItem<'a> {
257    fn parse(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> Result<Self> {
258        let attributes = Attribute::parse_list(tokens)?;
259        match tokens.clone().next()? {
260            Some((_span, Token::Interface)) => {
261                Interface::parse(tokens, docs, attributes).map(Self::Interface)
262            }
263            Some((_span, Token::World)) => World::parse(tokens, docs, attributes).map(Self::World),
264            Some((_span, Token::Use)) => ToplevelUse::parse(tokens, attributes).map(Self::Use),
265            Some((_span, Token::Package)) => {
266                PackageFile::parse_nested(tokens, docs, attributes).map(Self::Package)
267            }
268            other => Err(err_expected(tokens, "`world`, `interface` or `use`", other).into()),
269        }
270    }
271}
272
273#[derive(Debug, Clone)]
274struct PackageName<'a> {
275    docs: Docs<'a>,
276    span: Span,
277    namespace: Id<'a>,
278    name: Id<'a>,
279    version: Option<(Span, Version)>,
280}
281
282impl<'a> PackageName<'a> {
283    fn parse(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> Result<Self> {
284        let namespace = parse_id(tokens)?;
285        tokens.expect(Token::Colon)?;
286        let name = parse_id(tokens)?;
287        let version = parse_opt_version(tokens)?;
288        Ok(PackageName {
289            docs,
290            span: Span {
291                start: namespace.span.start,
292                end: version
293                    .as_ref()
294                    .map(|(s, _)| s.end)
295                    .unwrap_or(name.span.end),
296            },
297            namespace,
298            name,
299            version,
300        })
301    }
302
303    fn package_name(&self) -> crate::PackageName {
304        crate::PackageName {
305            namespace: self.namespace.name.to_string(),
306            name: self.name.name.to_string(),
307            version: self.version.as_ref().map(|(_, v)| v.clone()),
308        }
309    }
310}
311
312struct ToplevelUse<'a> {
313    span: Span,
314    attributes: Vec<Attribute<'a>>,
315    item: UsePath<'a>,
316    as_: Option<Id<'a>>,
317}
318
319impl<'a> ToplevelUse<'a> {
320    fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> Result<Self> {
321        let span = tokens.expect(Token::Use)?;
322        let item = UsePath::parse(tokens)?;
323        let as_ = if tokens.eat(Token::As)? {
324            Some(parse_id(tokens)?)
325        } else {
326            None
327        };
328        tokens.expect_semicolon()?;
329        Ok(ToplevelUse {
330            span,
331            attributes,
332            item,
333            as_,
334        })
335    }
336}
337
338struct World<'a> {
339    docs: Docs<'a>,
340    attributes: Vec<Attribute<'a>>,
341    name: Id<'a>,
342    items: Vec<WorldItem<'a>>,
343}
344
345impl<'a> World<'a> {
346    fn parse(
347        tokens: &mut Tokenizer<'a>,
348        docs: Docs<'a>,
349        attributes: Vec<Attribute<'a>>,
350    ) -> Result<Self> {
351        tokens.expect(Token::World)?;
352        let name = parse_id(tokens)?;
353        let items = Self::parse_items(tokens)?;
354        Ok(World {
355            docs,
356            attributes,
357            name,
358            items,
359        })
360    }
361
362    fn parse_items(tokens: &mut Tokenizer<'a>) -> Result<Vec<WorldItem<'a>>> {
363        tokens.expect(Token::LeftBrace)?;
364        let mut items = Vec::new();
365        loop {
366            let docs = parse_docs(tokens)?;
367            if tokens.eat(Token::RightBrace)? {
368                break;
369            }
370            let attributes = Attribute::parse_list(tokens)?;
371            items.push(WorldItem::parse(tokens, docs, attributes)?);
372        }
373        Ok(items)
374    }
375}
376
377enum WorldItem<'a> {
378    Import(Import<'a>),
379    Export(Export<'a>),
380    Use(Use<'a>),
381    Type(TypeDef<'a>),
382    Include(Include<'a>),
383}
384
385impl<'a> WorldItem<'a> {
386    fn parse(
387        tokens: &mut Tokenizer<'a>,
388        docs: Docs<'a>,
389        attributes: Vec<Attribute<'a>>,
390    ) -> Result<WorldItem<'a>> {
391        match tokens.clone().next()? {
392            Some((_span, Token::Import)) => {
393                Import::parse(tokens, docs, attributes).map(WorldItem::Import)
394            }
395            Some((_span, Token::Export)) => {
396                Export::parse(tokens, docs, attributes).map(WorldItem::Export)
397            }
398            Some((_span, Token::Use)) => Use::parse(tokens, attributes).map(WorldItem::Use),
399            Some((_span, Token::Type)) => {
400                TypeDef::parse(tokens, docs, attributes).map(WorldItem::Type)
401            }
402            Some((_span, Token::Flags)) => {
403                TypeDef::parse_flags(tokens, docs, attributes).map(WorldItem::Type)
404            }
405            Some((_span, Token::Resource)) => {
406                TypeDef::parse_resource(tokens, docs, attributes).map(WorldItem::Type)
407            }
408            Some((_span, Token::Record)) => {
409                TypeDef::parse_record(tokens, docs, attributes).map(WorldItem::Type)
410            }
411            Some((_span, Token::Variant)) => {
412                TypeDef::parse_variant(tokens, docs, attributes).map(WorldItem::Type)
413            }
414            Some((_span, Token::Enum)) => {
415                TypeDef::parse_enum(tokens, docs, attributes).map(WorldItem::Type)
416            }
417            Some((_span, Token::Include)) => {
418                Include::parse(tokens, attributes).map(WorldItem::Include)
419            }
420            other => Err(err_expected(
421                tokens,
422                "`import`, `export`, `include`, `use`, or type definition",
423                other,
424            )
425            .into()),
426        }
427    }
428}
429
430struct Import<'a> {
431    docs: Docs<'a>,
432    attributes: Vec<Attribute<'a>>,
433    kind: ExternKind<'a>,
434}
435
436impl<'a> Import<'a> {
437    fn parse(
438        tokens: &mut Tokenizer<'a>,
439        docs: Docs<'a>,
440        attributes: Vec<Attribute<'a>>,
441    ) -> Result<Import<'a>> {
442        tokens.expect(Token::Import)?;
443        let kind = ExternKind::parse(tokens)?;
444        Ok(Import {
445            docs,
446            attributes,
447            kind,
448        })
449    }
450}
451
452struct Export<'a> {
453    docs: Docs<'a>,
454    attributes: Vec<Attribute<'a>>,
455    kind: ExternKind<'a>,
456}
457
458impl<'a> Export<'a> {
459    fn parse(
460        tokens: &mut Tokenizer<'a>,
461        docs: Docs<'a>,
462        attributes: Vec<Attribute<'a>>,
463    ) -> Result<Export<'a>> {
464        tokens.expect(Token::Export)?;
465        let kind = ExternKind::parse(tokens)?;
466        Ok(Export {
467            docs,
468            attributes,
469            kind,
470        })
471    }
472}
473
474enum ExternKind<'a> {
475    Interface(Id<'a>, Vec<InterfaceItem<'a>>),
476    Path(UsePath<'a>),
477    Func(Id<'a>, Func<'a>),
478}
479
480impl<'a> ExternKind<'a> {
481    fn parse(tokens: &mut Tokenizer<'a>) -> Result<ExternKind<'a>> {
482        // Create a copy of the token stream to test out if this is a function
483        // or an interface import. In those situations the token stream gets
484        // reset to the state of the clone and we continue down those paths.
485        //
486        // If neither a function nor an interface appears here though then the
487        // clone is thrown away and the original token stream is parsed for an
488        // interface. This will redo the original ID parse and the original
489        // colon parse, but that shouldn't be too bad perf-wise.
490        let mut clone = tokens.clone();
491        let id = parse_id(&mut clone)?;
492        if clone.eat(Token::Colon)? {
493            // import foo: async? func(...)
494            if clone.clone().eat(Token::Func)? || clone.clone().eat(Token::Async)? {
495                *tokens = clone;
496                let ret = ExternKind::Func(id, Func::parse(tokens)?);
497                tokens.expect_semicolon()?;
498                return Ok(ret);
499            }
500
501            // import foo: interface { ... }
502            if clone.eat(Token::Interface)? {
503                *tokens = clone;
504                return Ok(ExternKind::Interface(id, Interface::parse_items(tokens)?));
505            }
506        }
507
508        // import foo
509        // import foo/bar
510        // import foo:bar/baz
511        let ret = ExternKind::Path(UsePath::parse(tokens)?);
512        tokens.expect_semicolon()?;
513        Ok(ret)
514    }
515
516    fn span(&self) -> Span {
517        match self {
518            ExternKind::Interface(id, _) => id.span,
519            ExternKind::Path(UsePath::Id(id)) => id.span,
520            ExternKind::Path(UsePath::Package { name, .. }) => name.span,
521            ExternKind::Func(id, _) => id.span,
522        }
523    }
524}
525
526struct Interface<'a> {
527    docs: Docs<'a>,
528    attributes: Vec<Attribute<'a>>,
529    name: Id<'a>,
530    items: Vec<InterfaceItem<'a>>,
531}
532
533impl<'a> Interface<'a> {
534    fn parse(
535        tokens: &mut Tokenizer<'a>,
536        docs: Docs<'a>,
537        attributes: Vec<Attribute<'a>>,
538    ) -> Result<Self> {
539        tokens.expect(Token::Interface)?;
540        let name = parse_id(tokens)?;
541        let items = Self::parse_items(tokens)?;
542        Ok(Interface {
543            docs,
544            attributes,
545            name,
546            items,
547        })
548    }
549
550    pub(super) fn parse_items(tokens: &mut Tokenizer<'a>) -> Result<Vec<InterfaceItem<'a>>> {
551        tokens.expect(Token::LeftBrace)?;
552        let mut items = Vec::new();
553        loop {
554            let docs = parse_docs(tokens)?;
555            if tokens.eat(Token::RightBrace)? {
556                break;
557            }
558            let attributes = Attribute::parse_list(tokens)?;
559            items.push(InterfaceItem::parse(tokens, docs, attributes)?);
560        }
561        Ok(items)
562    }
563}
564
565#[derive(Debug)]
566pub enum WorldOrInterface {
567    World,
568    Interface,
569    Unknown,
570}
571
572enum InterfaceItem<'a> {
573    TypeDef(TypeDef<'a>),
574    Func(NamedFunc<'a>),
575    Use(Use<'a>),
576}
577
578struct Use<'a> {
579    attributes: Vec<Attribute<'a>>,
580    from: UsePath<'a>,
581    names: Vec<UseName<'a>>,
582}
583
584#[derive(Debug)]
585enum UsePath<'a> {
586    Id(Id<'a>),
587    Package { id: PackageName<'a>, name: Id<'a> },
588}
589
590impl<'a> UsePath<'a> {
591    fn parse(tokens: &mut Tokenizer<'a>) -> Result<Self> {
592        let id = parse_id(tokens)?;
593        if tokens.eat(Token::Colon)? {
594            // `foo:bar/baz@1.0`
595            let namespace = id;
596            let pkg_name = parse_id(tokens)?;
597            tokens.expect(Token::Slash)?;
598            let name = parse_id(tokens)?;
599            let version = parse_opt_version(tokens)?;
600            Ok(UsePath::Package {
601                id: PackageName {
602                    docs: Default::default(),
603                    span: Span {
604                        start: namespace.span.start,
605                        end: pkg_name.span.end,
606                    },
607                    namespace,
608                    name: pkg_name,
609                    version,
610                },
611                name,
612            })
613        } else {
614            // `foo`
615            Ok(UsePath::Id(id))
616        }
617    }
618
619    fn name(&self) -> &Id<'a> {
620        match self {
621            UsePath::Id(id) => id,
622            UsePath::Package { name, .. } => name,
623        }
624    }
625}
626
627struct UseName<'a> {
628    name: Id<'a>,
629    as_: Option<Id<'a>>,
630}
631
632impl<'a> Use<'a> {
633    fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> Result<Self> {
634        tokens.expect(Token::Use)?;
635        let from = UsePath::parse(tokens)?;
636        tokens.expect(Token::Period)?;
637        tokens.expect(Token::LeftBrace)?;
638
639        let mut names = Vec::new();
640        while !tokens.eat(Token::RightBrace)? {
641            let mut name = UseName {
642                name: parse_id(tokens)?,
643                as_: None,
644            };
645            if tokens.eat(Token::As)? {
646                name.as_ = Some(parse_id(tokens)?);
647            }
648            names.push(name);
649            if !tokens.eat(Token::Comma)? {
650                tokens.expect(Token::RightBrace)?;
651                break;
652            }
653        }
654        tokens.expect_semicolon()?;
655        Ok(Use {
656            attributes,
657            from,
658            names,
659        })
660    }
661}
662
663struct Include<'a> {
664    from: UsePath<'a>,
665    attributes: Vec<Attribute<'a>>,
666    names: Vec<IncludeName<'a>>,
667}
668
669struct IncludeName<'a> {
670    name: Id<'a>,
671    as_: Id<'a>,
672}
673
674impl<'a> Include<'a> {
675    fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> Result<Self> {
676        tokens.expect(Token::Include)?;
677        let from = UsePath::parse(tokens)?;
678
679        let names = if tokens.eat(Token::With)? {
680            parse_list(
681                tokens,
682                Token::LeftBrace,
683                Token::RightBrace,
684                |_docs, tokens| {
685                    let name = parse_id(tokens)?;
686                    tokens.expect(Token::As)?;
687                    let as_ = parse_id(tokens)?;
688                    Ok(IncludeName { name, as_ })
689                },
690            )?
691        } else {
692            tokens.expect_semicolon()?;
693            Vec::new()
694        };
695
696        Ok(Include {
697            attributes,
698            from,
699            names,
700        })
701    }
702}
703
704#[derive(Debug, Clone)]
705pub struct Id<'a> {
706    name: &'a str,
707    span: Span,
708}
709
710impl<'a> From<&'a str> for Id<'a> {
711    fn from(s: &'a str) -> Id<'a> {
712        Id {
713            name: s.into(),
714            span: Span { start: 0, end: 0 },
715        }
716    }
717}
718
719#[derive(Debug, Clone)]
720pub struct Docs<'a> {
721    docs: Vec<Cow<'a, str>>,
722    span: Span,
723}
724
725impl<'a> Default for Docs<'a> {
726    fn default() -> Self {
727        Self {
728            docs: Default::default(),
729            span: Span { start: 0, end: 0 },
730        }
731    }
732}
733
734struct TypeDef<'a> {
735    docs: Docs<'a>,
736    attributes: Vec<Attribute<'a>>,
737    name: Id<'a>,
738    ty: Type<'a>,
739}
740
741enum Type<'a> {
742    Bool(Span),
743    U8(Span),
744    U16(Span),
745    U32(Span),
746    U64(Span),
747    S8(Span),
748    S16(Span),
749    S32(Span),
750    S64(Span),
751    F32(Span),
752    F64(Span),
753    Char(Span),
754    String(Span),
755    Name(Id<'a>),
756    List(List<'a>),
757    FixedSizeList(FixedSizeList<'a>),
758    Handle(Handle<'a>),
759    Resource(Resource<'a>),
760    Record(Record<'a>),
761    Flags(Flags<'a>),
762    Variant(Variant<'a>),
763    Tuple(Tuple<'a>),
764    Enum(Enum<'a>),
765    Option(Option_<'a>),
766    Result(Result_<'a>),
767    Future(Future<'a>),
768    Stream(Stream<'a>),
769    ErrorContext(Span),
770}
771
772enum Handle<'a> {
773    Own { resource: Id<'a> },
774    Borrow { resource: Id<'a> },
775}
776
777impl Handle<'_> {
778    fn span(&self) -> Span {
779        match self {
780            Handle::Own { resource } | Handle::Borrow { resource } => resource.span,
781        }
782    }
783}
784
785struct Resource<'a> {
786    span: Span,
787    funcs: Vec<ResourceFunc<'a>>,
788}
789
790enum ResourceFunc<'a> {
791    Method(NamedFunc<'a>),
792    Static(NamedFunc<'a>),
793    Constructor(NamedFunc<'a>),
794}
795
796impl<'a> ResourceFunc<'a> {
797    fn parse(
798        docs: Docs<'a>,
799        attributes: Vec<Attribute<'a>>,
800        tokens: &mut Tokenizer<'a>,
801    ) -> Result<Self> {
802        match tokens.clone().next()? {
803            Some((span, Token::Constructor)) => {
804                tokens.expect(Token::Constructor)?;
805                tokens.expect(Token::LeftParen)?;
806                let params = parse_list_trailer(tokens, Token::RightParen, |_docs, tokens| {
807                    let name = parse_id(tokens)?;
808                    tokens.expect(Token::Colon)?;
809                    let ty = Type::parse(tokens)?;
810                    Ok((name, ty))
811                })?;
812                tokens.expect_semicolon()?;
813                Ok(ResourceFunc::Constructor(NamedFunc {
814                    docs,
815                    attributes,
816                    name: Id {
817                        span,
818                        name: "constructor",
819                    },
820                    func: Func {
821                        span,
822                        async_: false,
823                        params,
824                        result: None,
825                    },
826                }))
827            }
828            Some((_span, Token::Id | Token::ExplicitId)) => {
829                let name = parse_id(tokens)?;
830                tokens.expect(Token::Colon)?;
831                let ctor = if tokens.eat(Token::Static)? {
832                    ResourceFunc::Static
833                } else {
834                    ResourceFunc::Method
835                };
836                let func = Func::parse(tokens)?;
837                tokens.expect_semicolon()?;
838                Ok(ctor(NamedFunc {
839                    docs,
840                    attributes,
841                    name,
842                    func,
843                }))
844            }
845            other => Err(err_expected(tokens, "`constructor` or identifier", other).into()),
846        }
847    }
848
849    fn named_func(&self) -> &NamedFunc<'a> {
850        use ResourceFunc::*;
851        match self {
852            Method(f) | Static(f) | Constructor(f) => f,
853        }
854    }
855}
856
857struct Record<'a> {
858    span: Span,
859    fields: Vec<Field<'a>>,
860}
861
862struct Field<'a> {
863    docs: Docs<'a>,
864    name: Id<'a>,
865    ty: Type<'a>,
866}
867
868struct Flags<'a> {
869    span: Span,
870    flags: Vec<Flag<'a>>,
871}
872
873struct Flag<'a> {
874    docs: Docs<'a>,
875    name: Id<'a>,
876}
877
878struct Variant<'a> {
879    span: Span,
880    cases: Vec<Case<'a>>,
881}
882
883struct Case<'a> {
884    docs: Docs<'a>,
885    name: Id<'a>,
886    ty: Option<Type<'a>>,
887}
888
889struct Enum<'a> {
890    span: Span,
891    cases: Vec<EnumCase<'a>>,
892}
893
894struct EnumCase<'a> {
895    docs: Docs<'a>,
896    name: Id<'a>,
897}
898
899struct Option_<'a> {
900    span: Span,
901    ty: Box<Type<'a>>,
902}
903
904struct List<'a> {
905    span: Span,
906    ty: Box<Type<'a>>,
907}
908
909struct FixedSizeList<'a> {
910    span: Span,
911    ty: Box<Type<'a>>,
912    size: u32,
913}
914
915struct Future<'a> {
916    span: Span,
917    ty: Option<Box<Type<'a>>>,
918}
919
920struct Tuple<'a> {
921    span: Span,
922    types: Vec<Type<'a>>,
923}
924
925struct Result_<'a> {
926    span: Span,
927    ok: Option<Box<Type<'a>>>,
928    err: Option<Box<Type<'a>>>,
929}
930
931struct Stream<'a> {
932    span: Span,
933    ty: Option<Box<Type<'a>>>,
934}
935
936struct NamedFunc<'a> {
937    docs: Docs<'a>,
938    attributes: Vec<Attribute<'a>>,
939    name: Id<'a>,
940    func: Func<'a>,
941}
942
943type ParamList<'a> = Vec<(Id<'a>, Type<'a>)>;
944
945struct Func<'a> {
946    span: Span,
947    async_: bool,
948    params: ParamList<'a>,
949    result: Option<Type<'a>>,
950}
951
952impl<'a> Func<'a> {
953    fn parse(tokens: &mut Tokenizer<'a>) -> Result<Func<'a>> {
954        fn parse_params<'a>(tokens: &mut Tokenizer<'a>, left_paren: bool) -> Result<ParamList<'a>> {
955            if left_paren {
956                tokens.expect(Token::LeftParen)?;
957            };
958            parse_list_trailer(tokens, Token::RightParen, |_docs, tokens| {
959                let name = parse_id(tokens)?;
960                tokens.expect(Token::Colon)?;
961                let ty = Type::parse(tokens)?;
962                Ok((name, ty))
963            })
964        }
965
966        let async_ = tokens.eat(Token::Async)?;
967        let span = tokens.expect(Token::Func)?;
968        let params = parse_params(tokens, true)?;
969        let result = if tokens.eat(Token::RArrow)? {
970            let ty = Type::parse(tokens)?;
971            Some(ty)
972        } else {
973            None
974        };
975        Ok(Func {
976            span,
977            async_,
978            params,
979            result,
980        })
981    }
982}
983
984impl<'a> InterfaceItem<'a> {
985    fn parse(
986        tokens: &mut Tokenizer<'a>,
987        docs: Docs<'a>,
988        attributes: Vec<Attribute<'a>>,
989    ) -> Result<InterfaceItem<'a>> {
990        match tokens.clone().next()? {
991            Some((_span, Token::Type)) => {
992                TypeDef::parse(tokens, docs, attributes).map(InterfaceItem::TypeDef)
993            }
994            Some((_span, Token::Flags)) => {
995                TypeDef::parse_flags(tokens, docs, attributes).map(InterfaceItem::TypeDef)
996            }
997            Some((_span, Token::Enum)) => {
998                TypeDef::parse_enum(tokens, docs, attributes).map(InterfaceItem::TypeDef)
999            }
1000            Some((_span, Token::Variant)) => {
1001                TypeDef::parse_variant(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1002            }
1003            Some((_span, Token::Resource)) => {
1004                TypeDef::parse_resource(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1005            }
1006            Some((_span, Token::Record)) => {
1007                TypeDef::parse_record(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1008            }
1009            Some((_span, Token::Id)) | Some((_span, Token::ExplicitId)) => {
1010                NamedFunc::parse(tokens, docs, attributes).map(InterfaceItem::Func)
1011            }
1012            Some((_span, Token::Use)) => Use::parse(tokens, attributes).map(InterfaceItem::Use),
1013            other => Err(err_expected(tokens, "`type`, `resource` or `func`", other).into()),
1014        }
1015    }
1016}
1017
1018impl<'a> TypeDef<'a> {
1019    fn parse(
1020        tokens: &mut Tokenizer<'a>,
1021        docs: Docs<'a>,
1022        attributes: Vec<Attribute<'a>>,
1023    ) -> Result<Self> {
1024        tokens.expect(Token::Type)?;
1025        let name = parse_id(tokens)?;
1026        tokens.expect(Token::Equals)?;
1027        let ty = Type::parse(tokens)?;
1028        tokens.expect_semicolon()?;
1029        Ok(TypeDef {
1030            docs,
1031            attributes,
1032            name,
1033            ty,
1034        })
1035    }
1036
1037    fn parse_flags(
1038        tokens: &mut Tokenizer<'a>,
1039        docs: Docs<'a>,
1040        attributes: Vec<Attribute<'a>>,
1041    ) -> Result<Self> {
1042        tokens.expect(Token::Flags)?;
1043        let name = parse_id(tokens)?;
1044        let ty = Type::Flags(Flags {
1045            span: name.span,
1046            flags: parse_list(
1047                tokens,
1048                Token::LeftBrace,
1049                Token::RightBrace,
1050                |docs, tokens| {
1051                    let name = parse_id(tokens)?;
1052                    Ok(Flag { docs, name })
1053                },
1054            )?,
1055        });
1056        Ok(TypeDef {
1057            docs,
1058            attributes,
1059            name,
1060            ty,
1061        })
1062    }
1063
1064    fn parse_resource(
1065        tokens: &mut Tokenizer<'a>,
1066        docs: Docs<'a>,
1067        attributes: Vec<Attribute<'a>>,
1068    ) -> Result<Self> {
1069        tokens.expect(Token::Resource)?;
1070        let name = parse_id(tokens)?;
1071        let mut funcs = Vec::new();
1072        if tokens.eat(Token::LeftBrace)? {
1073            while !tokens.eat(Token::RightBrace)? {
1074                let docs = parse_docs(tokens)?;
1075                let attributes = Attribute::parse_list(tokens)?;
1076                funcs.push(ResourceFunc::parse(docs, attributes, tokens)?);
1077            }
1078        } else {
1079            tokens.expect_semicolon()?;
1080        }
1081        let ty = Type::Resource(Resource {
1082            span: name.span,
1083            funcs,
1084        });
1085        Ok(TypeDef {
1086            docs,
1087            attributes,
1088            name,
1089            ty,
1090        })
1091    }
1092
1093    fn parse_record(
1094        tokens: &mut Tokenizer<'a>,
1095        docs: Docs<'a>,
1096        attributes: Vec<Attribute<'a>>,
1097    ) -> Result<Self> {
1098        tokens.expect(Token::Record)?;
1099        let name = parse_id(tokens)?;
1100        let ty = Type::Record(Record {
1101            span: name.span,
1102            fields: parse_list(
1103                tokens,
1104                Token::LeftBrace,
1105                Token::RightBrace,
1106                |docs, tokens| {
1107                    let name = parse_id(tokens)?;
1108                    tokens.expect(Token::Colon)?;
1109                    let ty = Type::parse(tokens)?;
1110                    Ok(Field { docs, name, ty })
1111                },
1112            )?,
1113        });
1114        Ok(TypeDef {
1115            docs,
1116            attributes,
1117            name,
1118            ty,
1119        })
1120    }
1121
1122    fn parse_variant(
1123        tokens: &mut Tokenizer<'a>,
1124        docs: Docs<'a>,
1125        attributes: Vec<Attribute<'a>>,
1126    ) -> Result<Self> {
1127        tokens.expect(Token::Variant)?;
1128        let name = parse_id(tokens)?;
1129        let ty = Type::Variant(Variant {
1130            span: name.span,
1131            cases: parse_list(
1132                tokens,
1133                Token::LeftBrace,
1134                Token::RightBrace,
1135                |docs, tokens| {
1136                    let name = parse_id(tokens)?;
1137                    let ty = if tokens.eat(Token::LeftParen)? {
1138                        let ty = Type::parse(tokens)?;
1139                        tokens.expect(Token::RightParen)?;
1140                        Some(ty)
1141                    } else {
1142                        None
1143                    };
1144                    Ok(Case { docs, name, ty })
1145                },
1146            )?,
1147        });
1148        Ok(TypeDef {
1149            docs,
1150            attributes,
1151            name,
1152            ty,
1153        })
1154    }
1155
1156    fn parse_enum(
1157        tokens: &mut Tokenizer<'a>,
1158        docs: Docs<'a>,
1159        attributes: Vec<Attribute<'a>>,
1160    ) -> Result<Self> {
1161        tokens.expect(Token::Enum)?;
1162        let name = parse_id(tokens)?;
1163        let ty = Type::Enum(Enum {
1164            span: name.span,
1165            cases: parse_list(
1166                tokens,
1167                Token::LeftBrace,
1168                Token::RightBrace,
1169                |docs, tokens| {
1170                    let name = parse_id(tokens)?;
1171                    Ok(EnumCase { docs, name })
1172                },
1173            )?,
1174        });
1175        Ok(TypeDef {
1176            docs,
1177            attributes,
1178            name,
1179            ty,
1180        })
1181    }
1182}
1183
1184impl<'a> NamedFunc<'a> {
1185    fn parse(
1186        tokens: &mut Tokenizer<'a>,
1187        docs: Docs<'a>,
1188        attributes: Vec<Attribute<'a>>,
1189    ) -> Result<Self> {
1190        let name = parse_id(tokens)?;
1191        tokens.expect(Token::Colon)?;
1192        let func = Func::parse(tokens)?;
1193        tokens.expect_semicolon()?;
1194        Ok(NamedFunc {
1195            docs,
1196            attributes,
1197            name,
1198            func,
1199        })
1200    }
1201}
1202
1203fn parse_id<'a>(tokens: &mut Tokenizer<'a>) -> Result<Id<'a>> {
1204    match tokens.next()? {
1205        Some((span, Token::Id)) => Ok(Id {
1206            name: tokens.parse_id(span)?,
1207            span,
1208        }),
1209        Some((span, Token::ExplicitId)) => Ok(Id {
1210            name: tokens.parse_explicit_id(span)?,
1211            span,
1212        }),
1213        other => Err(err_expected(tokens, "an identifier or string", other).into()),
1214    }
1215}
1216
1217fn parse_opt_version(tokens: &mut Tokenizer<'_>) -> Result<Option<(Span, Version)>> {
1218    if tokens.eat(Token::At)? {
1219        parse_version(tokens).map(Some)
1220    } else {
1221        Ok(None)
1222    }
1223}
1224
1225fn parse_version(tokens: &mut Tokenizer<'_>) -> Result<(Span, Version)> {
1226    let start = tokens.expect(Token::Integer)?.start;
1227    tokens.expect(Token::Period)?;
1228    tokens.expect(Token::Integer)?;
1229    tokens.expect(Token::Period)?;
1230    let end = tokens.expect(Token::Integer)?.end;
1231    let mut span = Span { start, end };
1232    eat_ids(tokens, Token::Minus, &mut span)?;
1233    eat_ids(tokens, Token::Plus, &mut span)?;
1234    let string = tokens.get_span(span);
1235    let version = Version::parse(string).map_err(|e| Error::new(span, e.to_string()))?;
1236    return Ok((span, version));
1237
1238    // According to `semver.org` this is what we're parsing:
1239    //
1240    // ```ebnf
1241    // <pre-release> ::= <dot-separated pre-release identifiers>
1242    //
1243    // <dot-separated pre-release identifiers> ::= <pre-release identifier>
1244    //                                           | <pre-release identifier> "." <dot-separated pre-release identifiers>
1245    //
1246    // <build> ::= <dot-separated build identifiers>
1247    //
1248    // <dot-separated build identifiers> ::= <build identifier>
1249    //                                     | <build identifier> "." <dot-separated build identifiers>
1250    //
1251    // <pre-release identifier> ::= <alphanumeric identifier>
1252    //                            | <numeric identifier>
1253    //
1254    // <build identifier> ::= <alphanumeric identifier>
1255    //                      | <digits>
1256    //
1257    // <alphanumeric identifier> ::= <non-digit>
1258    //                             | <non-digit> <identifier characters>
1259    //                             | <identifier characters> <non-digit>
1260    //                             | <identifier characters> <non-digit> <identifier characters>
1261    //
1262    // <numeric identifier> ::= "0"
1263    //                        | <positive digit>
1264    //                        | <positive digit> <digits>
1265    //
1266    // <identifier characters> ::= <identifier character>
1267    //                           | <identifier character> <identifier characters>
1268    //
1269    // <identifier character> ::= <digit>
1270    //                          | <non-digit>
1271    //
1272    // <non-digit> ::= <letter>
1273    //               | "-"
1274    //
1275    // <digits> ::= <digit>
1276    //            | <digit> <digits>
1277    // ```
1278    //
1279    // This is loosely based on WIT syntax and an approximation is parsed here:
1280    //
1281    // * This function starts by parsing the optional leading `-` and `+` which
1282    //   indicates pre-release and build metadata.
1283    // * Afterwards all of $id, $integer, `-`, and `.` are chomped. The only
1284    //   exception here is that if `.` isn't followed by $id, $integer, or `-`
1285    //   then it's assumed that it's something like `use a:b@1.0.0-a.{...}`
1286    //   where the `.` is part of WIT syntax, not semver.
1287    //
1288    // Note that this additionally doesn't try to return any first-class errors.
1289    // Instead this bails out on something unrecognized for something else in
1290    // the system to return an error.
1291    fn eat_ids(tokens: &mut Tokenizer<'_>, prefix: Token, end: &mut Span) -> Result<()> {
1292        if !tokens.eat(prefix)? {
1293            return Ok(());
1294        }
1295        loop {
1296            let mut clone = tokens.clone();
1297            match clone.next()? {
1298                Some((span, Token::Id | Token::Integer | Token::Minus)) => {
1299                    end.end = span.end;
1300                    *tokens = clone;
1301                }
1302                Some((_span, Token::Period)) => match clone.next()? {
1303                    Some((span, Token::Id | Token::Integer | Token::Minus)) => {
1304                        end.end = span.end;
1305                        *tokens = clone;
1306                    }
1307                    _ => break Ok(()),
1308                },
1309                _ => break Ok(()),
1310            }
1311        }
1312    }
1313}
1314
1315fn parse_docs<'a>(tokens: &mut Tokenizer<'a>) -> Result<Docs<'a>> {
1316    let mut docs = Docs::default();
1317    let mut clone = tokens.clone();
1318    let mut started = false;
1319    while let Some((span, token)) = clone.next_raw()? {
1320        match token {
1321            Token::Whitespace => {}
1322            Token::Comment => {
1323                let comment = tokens.get_span(span);
1324                if !started {
1325                    docs.span.start = span.start;
1326                    started = true;
1327                }
1328                let trailing_ws = comment
1329                    .bytes()
1330                    .rev()
1331                    .take_while(|ch| ch.is_ascii_whitespace())
1332                    .count();
1333                docs.span.end = span.end - (trailing_ws as u32);
1334                docs.docs.push(comment.into());
1335            }
1336            _ => break,
1337        };
1338        *tokens = clone.clone();
1339    }
1340    Ok(docs)
1341}
1342
1343impl<'a> Type<'a> {
1344    fn parse(tokens: &mut Tokenizer<'a>) -> Result<Self> {
1345        match tokens.next()? {
1346            Some((span, Token::U8)) => Ok(Type::U8(span)),
1347            Some((span, Token::U16)) => Ok(Type::U16(span)),
1348            Some((span, Token::U32)) => Ok(Type::U32(span)),
1349            Some((span, Token::U64)) => Ok(Type::U64(span)),
1350            Some((span, Token::S8)) => Ok(Type::S8(span)),
1351            Some((span, Token::S16)) => Ok(Type::S16(span)),
1352            Some((span, Token::S32)) => Ok(Type::S32(span)),
1353            Some((span, Token::S64)) => Ok(Type::S64(span)),
1354            Some((span, Token::F32)) => Ok(Type::F32(span)),
1355            Some((span, Token::F64)) => Ok(Type::F64(span)),
1356            Some((span, Token::Char)) => Ok(Type::Char(span)),
1357
1358            // tuple<T, U, ...>
1359            Some((span, Token::Tuple)) => {
1360                let types = parse_list(
1361                    tokens,
1362                    Token::LessThan,
1363                    Token::GreaterThan,
1364                    |_docs, tokens| Type::parse(tokens),
1365                )?;
1366                Ok(Type::Tuple(Tuple { span, types }))
1367            }
1368
1369            Some((span, Token::Bool)) => Ok(Type::Bool(span)),
1370            Some((span, Token::String_)) => Ok(Type::String(span)),
1371
1372            // list<T>
1373            // list<T, N>
1374            Some((span, Token::List)) => {
1375                tokens.expect(Token::LessThan)?;
1376                let ty = Type::parse(tokens)?;
1377                let size = if tokens.eat(Token::Comma)? {
1378                    let number = tokens.next()?;
1379                    if let Some((span, Token::Integer)) = number {
1380                        let size: u32 = tokens.get_span(span).parse()?;
1381                        Some(size)
1382                    } else {
1383                        return Err(err_expected(tokens, "fixed size", number).into());
1384                    }
1385                } else {
1386                    None
1387                };
1388                tokens.expect(Token::GreaterThan)?;
1389                if let Some(size) = size {
1390                    Ok(Type::FixedSizeList(FixedSizeList {
1391                        span,
1392                        ty: Box::new(ty),
1393                        size,
1394                    }))
1395                } else {
1396                    Ok(Type::List(List {
1397                        span,
1398                        ty: Box::new(ty),
1399                    }))
1400                }
1401            }
1402
1403            // option<T>
1404            Some((span, Token::Option_)) => {
1405                tokens.expect(Token::LessThan)?;
1406                let ty = Type::parse(tokens)?;
1407                tokens.expect(Token::GreaterThan)?;
1408                Ok(Type::Option(Option_ {
1409                    span,
1410                    ty: Box::new(ty),
1411                }))
1412            }
1413
1414            // result<T, E>
1415            // result<_, E>
1416            // result<T>
1417            // result
1418            Some((span, Token::Result_)) => {
1419                let mut ok = None;
1420                let mut err = None;
1421
1422                if tokens.eat(Token::LessThan)? {
1423                    if tokens.eat(Token::Underscore)? {
1424                        tokens.expect(Token::Comma)?;
1425                        err = Some(Box::new(Type::parse(tokens)?));
1426                    } else {
1427                        ok = Some(Box::new(Type::parse(tokens)?));
1428                        if tokens.eat(Token::Comma)? {
1429                            err = Some(Box::new(Type::parse(tokens)?));
1430                        }
1431                    };
1432                    tokens.expect(Token::GreaterThan)?;
1433                };
1434                Ok(Type::Result(Result_ { span, ok, err }))
1435            }
1436
1437            // future<T>
1438            // future
1439            Some((span, Token::Future)) => {
1440                let mut ty = None;
1441
1442                if tokens.eat(Token::LessThan)? {
1443                    ty = Some(Box::new(Type::parse(tokens)?));
1444                    tokens.expect(Token::GreaterThan)?;
1445                };
1446                Ok(Type::Future(Future { span, ty }))
1447            }
1448
1449            // stream<T>
1450            // stream
1451            Some((span, Token::Stream)) => {
1452                let mut ty = None;
1453
1454                if tokens.eat(Token::LessThan)? {
1455                    ty = Some(Box::new(Type::parse(tokens)?));
1456                    tokens.expect(Token::GreaterThan)?;
1457                };
1458                Ok(Type::Stream(Stream { span, ty }))
1459            }
1460
1461            // error-context
1462            Some((span, Token::ErrorContext)) => Ok(Type::ErrorContext(span)),
1463
1464            // own<T>
1465            Some((_span, Token::Own)) => {
1466                tokens.expect(Token::LessThan)?;
1467                let resource = parse_id(tokens)?;
1468                tokens.expect(Token::GreaterThan)?;
1469                Ok(Type::Handle(Handle::Own { resource }))
1470            }
1471
1472            // borrow<T>
1473            Some((_span, Token::Borrow)) => {
1474                tokens.expect(Token::LessThan)?;
1475                let resource = parse_id(tokens)?;
1476                tokens.expect(Token::GreaterThan)?;
1477                Ok(Type::Handle(Handle::Borrow { resource }))
1478            }
1479
1480            // `foo`
1481            Some((span, Token::Id)) => Ok(Type::Name(Id {
1482                name: tokens.parse_id(span)?.into(),
1483                span,
1484            })),
1485            // `%foo`
1486            Some((span, Token::ExplicitId)) => Ok(Type::Name(Id {
1487                name: tokens.parse_explicit_id(span)?.into(),
1488                span,
1489            })),
1490
1491            other => Err(err_expected(tokens, "a type", other).into()),
1492        }
1493    }
1494
1495    fn span(&self) -> Span {
1496        match self {
1497            Type::Bool(span)
1498            | Type::U8(span)
1499            | Type::U16(span)
1500            | Type::U32(span)
1501            | Type::U64(span)
1502            | Type::S8(span)
1503            | Type::S16(span)
1504            | Type::S32(span)
1505            | Type::S64(span)
1506            | Type::F32(span)
1507            | Type::F64(span)
1508            | Type::Char(span)
1509            | Type::String(span)
1510            | Type::ErrorContext(span) => *span,
1511            Type::Name(id) => id.span,
1512            Type::List(l) => l.span,
1513            Type::FixedSizeList(l) => l.span,
1514            Type::Handle(h) => h.span(),
1515            Type::Resource(r) => r.span,
1516            Type::Record(r) => r.span,
1517            Type::Flags(f) => f.span,
1518            Type::Variant(v) => v.span,
1519            Type::Tuple(t) => t.span,
1520            Type::Enum(e) => e.span,
1521            Type::Option(o) => o.span,
1522            Type::Result(r) => r.span,
1523            Type::Future(f) => f.span,
1524            Type::Stream(s) => s.span,
1525        }
1526    }
1527}
1528
1529fn parse_list<'a, T>(
1530    tokens: &mut Tokenizer<'a>,
1531    start: Token,
1532    end: Token,
1533    parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> Result<T>,
1534) -> Result<Vec<T>> {
1535    tokens.expect(start)?;
1536    parse_list_trailer(tokens, end, parse)
1537}
1538
1539fn parse_list_trailer<'a, T>(
1540    tokens: &mut Tokenizer<'a>,
1541    end: Token,
1542    mut parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> Result<T>,
1543) -> Result<Vec<T>> {
1544    let mut items = Vec::new();
1545    loop {
1546        // get docs before we skip them to try to eat the end token
1547        let docs = parse_docs(tokens)?;
1548
1549        // if we found an end token then we're done
1550        if tokens.eat(end)? {
1551            break;
1552        }
1553
1554        let item = parse(docs, tokens)?;
1555        items.push(item);
1556
1557        // if there's no trailing comma then this is required to be the end,
1558        // otherwise we go through the loop to try to get another item
1559        if !tokens.eat(Token::Comma)? {
1560            tokens.expect(end)?;
1561            break;
1562        }
1563    }
1564    Ok(items)
1565}
1566
1567fn err_expected(
1568    tokens: &Tokenizer<'_>,
1569    expected: &'static str,
1570    found: Option<(Span, Token)>,
1571) -> Error {
1572    match found {
1573        Some((span, token)) => Error::new(
1574            span,
1575            format!("expected {}, found {}", expected, token.describe()),
1576        ),
1577        None => Error::new(tokens.eof_span(), format!("expected {expected}, found eof")),
1578    }
1579}
1580
1581enum Attribute<'a> {
1582    Since { span: Span, version: Version },
1583    Unstable { span: Span, feature: Id<'a> },
1584    Deprecated { span: Span, version: Version },
1585}
1586
1587impl<'a> Attribute<'a> {
1588    fn parse_list(tokens: &mut Tokenizer<'a>) -> Result<Vec<Attribute<'a>>> {
1589        let mut ret = Vec::new();
1590        while tokens.eat(Token::At)? {
1591            let id = parse_id(tokens)?;
1592            let attr = match id.name {
1593                "since" => {
1594                    tokens.expect(Token::LeftParen)?;
1595                    eat_id(tokens, "version")?;
1596                    tokens.expect(Token::Equals)?;
1597                    let (_span, version) = parse_version(tokens)?;
1598                    tokens.expect(Token::RightParen)?;
1599                    Attribute::Since {
1600                        span: id.span,
1601                        version,
1602                    }
1603                }
1604                "unstable" => {
1605                    tokens.expect(Token::LeftParen)?;
1606                    eat_id(tokens, "feature")?;
1607                    tokens.expect(Token::Equals)?;
1608                    let feature = parse_id(tokens)?;
1609                    tokens.expect(Token::RightParen)?;
1610                    Attribute::Unstable {
1611                        span: id.span,
1612                        feature,
1613                    }
1614                }
1615                "deprecated" => {
1616                    tokens.expect(Token::LeftParen)?;
1617                    eat_id(tokens, "version")?;
1618                    tokens.expect(Token::Equals)?;
1619                    let (_span, version) = parse_version(tokens)?;
1620                    tokens.expect(Token::RightParen)?;
1621                    Attribute::Deprecated {
1622                        span: id.span,
1623                        version,
1624                    }
1625                }
1626                other => {
1627                    bail!(Error::new(id.span, format!("unknown attribute `{other}`"),))
1628                }
1629            };
1630            ret.push(attr);
1631        }
1632        Ok(ret)
1633    }
1634
1635    fn span(&self) -> Span {
1636        match self {
1637            Attribute::Since { span, .. }
1638            | Attribute::Unstable { span, .. }
1639            | Attribute::Deprecated { span, .. } => *span,
1640        }
1641    }
1642}
1643
1644fn eat_id(tokens: &mut Tokenizer<'_>, expected: &str) -> Result<Span> {
1645    let id = parse_id(tokens)?;
1646    if id.name != expected {
1647        bail!(Error::new(
1648            id.span,
1649            format!("expected `{expected}`, found `{}`", id.name),
1650        ));
1651    }
1652    Ok(id.span)
1653}
1654
1655/// A listing of source files which are used to get parsed into an
1656/// [`UnresolvedPackage`].
1657///
1658/// [`UnresolvedPackage`]: crate::UnresolvedPackage
1659#[derive(Clone, Default)]
1660pub struct SourceMap {
1661    sources: Vec<Source>,
1662    offset: u32,
1663    require_f32_f64: Option<bool>,
1664}
1665
1666#[derive(Clone)]
1667struct Source {
1668    offset: u32,
1669    path: PathBuf,
1670    contents: String,
1671}
1672
1673impl SourceMap {
1674    /// Creates a new empty source map.
1675    pub fn new() -> SourceMap {
1676        SourceMap::default()
1677    }
1678
1679    #[doc(hidden)] // NB: only here for a transitionary period
1680    pub fn set_require_f32_f64(&mut self, enable: bool) {
1681        self.require_f32_f64 = Some(enable);
1682    }
1683
1684    /// Reads the file `path` on the filesystem and appends its contents to this
1685    /// [`SourceMap`].
1686    pub fn push_file(&mut self, path: &Path) -> Result<()> {
1687        let contents = std::fs::read_to_string(path)
1688            .with_context(|| format!("failed to read file {path:?}"))?;
1689        self.push(path, contents);
1690        Ok(())
1691    }
1692
1693    /// Appends the given contents with the given path into this source map.
1694    ///
1695    /// The `path` provided is not read from the filesystem and is instead only
1696    /// used during error messages. Each file added to a [`SourceMap`] is
1697    /// used to create the final parsed package namely by unioning all the
1698    /// interfaces and worlds defined together. Note that each file has its own
1699    /// personal namespace, however, for top-level `use` and such.
1700    pub fn push(&mut self, path: &Path, contents: impl Into<String>) {
1701        let mut contents = contents.into();
1702        // Guarantee that there's at least one character in these contents by
1703        // appending a single newline to the end. This is excluded from
1704        // tokenization below so it's only here to ensure that spans which point
1705        // one byte beyond the end of a file (eof) point to the same original
1706        // file.
1707        contents.push('\n');
1708        let new_offset = self.offset + u32::try_from(contents.len()).unwrap();
1709        self.sources.push(Source {
1710            offset: self.offset,
1711            path: path.to_path_buf(),
1712            contents,
1713        });
1714        self.offset = new_offset;
1715    }
1716
1717    /// Parses the files added to this source map into a
1718    /// [`UnresolvedPackageGroup`].
1719    pub fn parse(self) -> Result<UnresolvedPackageGroup> {
1720        let mut nested = Vec::new();
1721        let main = self.rewrite_error(|| {
1722            let mut resolver = Resolver::default();
1723            let mut srcs = self.sources.iter().collect::<Vec<_>>();
1724            srcs.sort_by_key(|src| &src.path);
1725
1726            // Parse each source file individually. A tokenizer is created here
1727            // form settings and then `PackageFile` is used to parse the whole
1728            // stream of tokens.
1729            for src in srcs {
1730                let mut tokens = Tokenizer::new(
1731                    // chop off the forcibly appended `\n` character when
1732                    // passing through the source to get tokenized.
1733                    &src.contents[..src.contents.len() - 1],
1734                    src.offset,
1735                    self.require_f32_f64,
1736                )
1737                .with_context(|| format!("failed to tokenize path: {}", src.path.display()))?;
1738                let mut file = PackageFile::parse(&mut tokens)?;
1739
1740                // Filter out any nested packages and resolve them separately.
1741                // Nested packages have only a single "file" so only one item
1742                // is pushed into a `Resolver`. Note that a nested `Resolver`
1743                // is used here, not the outer one.
1744                //
1745                // Note that filtering out `Package` items is required due to
1746                // how the implementation of disallowing nested packages in
1747                // nested packages currently works.
1748                for item in mem::take(&mut file.decl_list.items) {
1749                    match item {
1750                        AstItem::Package(nested_pkg) => {
1751                            let mut resolve = Resolver::default();
1752                            resolve.push(nested_pkg).with_context(|| {
1753                                format!(
1754                                    "failed to handle nested package in: {}",
1755                                    src.path.display()
1756                                )
1757                            })?;
1758
1759                            nested.push(resolve.resolve()?);
1760                        }
1761                        other => file.decl_list.items.push(other),
1762                    }
1763                }
1764
1765                // With nested packages handled push this file into the
1766                // resolver.
1767                resolver.push(file).with_context(|| {
1768                    format!("failed to start resolving path: {}", src.path.display())
1769                })?;
1770            }
1771            Ok(resolver.resolve()?)
1772        })?;
1773        Ok(UnresolvedPackageGroup {
1774            main,
1775            nested,
1776            source_map: self,
1777        })
1778    }
1779
1780    pub(crate) fn rewrite_error<F, T>(&self, f: F) -> Result<T>
1781    where
1782        F: FnOnce() -> Result<T>,
1783    {
1784        let mut err = match f() {
1785            Ok(t) => return Ok(t),
1786            Err(e) => e,
1787        };
1788        if let Some(parse) = err.downcast_mut::<Error>() {
1789            if parse.highlighted.is_none() {
1790                let msg = self.highlight_err(parse.span.start, Some(parse.span.end), &parse.msg);
1791                parse.highlighted = Some(msg);
1792            }
1793        }
1794        if let Some(_) = err.downcast_mut::<Error>() {
1795            return Err(err);
1796        }
1797        if let Some(notfound) = err.downcast_mut::<PackageNotFoundError>() {
1798            if notfound.highlighted.is_none() {
1799                let msg = self.highlight_err(
1800                    notfound.span.start,
1801                    Some(notfound.span.end),
1802                    &format!("{notfound}"),
1803                );
1804                notfound.highlighted = Some(msg);
1805            }
1806        }
1807        if let Some(_) = err.downcast_mut::<PackageNotFoundError>() {
1808            return Err(err);
1809        }
1810
1811        if let Some(lex) = err.downcast_ref::<lex::Error>() {
1812            let pos = match lex {
1813                lex::Error::Unexpected(at, _)
1814                | lex::Error::UnterminatedComment(at)
1815                | lex::Error::Wanted { at, .. }
1816                | lex::Error::InvalidCharInId(at, _)
1817                | lex::Error::IdPartEmpty(at)
1818                | lex::Error::InvalidEscape(at, _) => *at,
1819            };
1820            let msg = self.highlight_err(pos, None, lex);
1821            bail!("{msg}")
1822        }
1823
1824        if let Some(sort) = err.downcast_mut::<toposort::Error>() {
1825            if sort.highlighted().is_none() {
1826                let span = match sort {
1827                    toposort::Error::NonexistentDep { span, .. }
1828                    | toposort::Error::Cycle { span, .. } => *span,
1829                };
1830                let highlighted = self.highlight_err(span.start, Some(span.end), &sort);
1831                sort.set_highlighted(highlighted);
1832            }
1833        }
1834
1835        Err(err)
1836    }
1837
1838    fn highlight_err(&self, start: u32, end: Option<u32>, err: impl fmt::Display) -> String {
1839        let src = self.source_for_offset(start);
1840        let start = src.to_relative_offset(start);
1841        let end = end.map(|end| src.to_relative_offset(end));
1842        let (line, col) = src.linecol(start);
1843        let snippet = src.contents.lines().nth(line).unwrap_or("");
1844        let mut msg = format!(
1845            "\
1846{err}
1847     --> {file}:{line}:{col}
1848      |
1849 {line:4} | {snippet}
1850      | {marker:>0$}",
1851            col + 1,
1852            file = src.path.display(),
1853            line = line + 1,
1854            col = col + 1,
1855            marker = "^",
1856        );
1857        if let Some(end) = end {
1858            if let Some(s) = src.contents.get(start..end) {
1859                for _ in s.chars().skip(1) {
1860                    msg.push('-');
1861                }
1862            }
1863        }
1864        return msg;
1865    }
1866
1867    pub(crate) fn render_location(&self, span: Span) -> String {
1868        let src = self.source_for_offset(span.start);
1869        let start = src.to_relative_offset(span.start);
1870        let (line, col) = src.linecol(start);
1871        format!(
1872            "{file}:{line}:{col}",
1873            file = src.path.display(),
1874            line = line + 1,
1875            col = col + 1,
1876        )
1877    }
1878
1879    fn source_for_offset(&self, start: u32) -> &Source {
1880        let i = match self.sources.binary_search_by_key(&start, |src| src.offset) {
1881            Ok(i) => i,
1882            Err(i) => i - 1,
1883        };
1884        &self.sources[i]
1885    }
1886
1887    /// Returns an iterator over all filenames added to this source map.
1888    pub fn source_files(&self) -> impl Iterator<Item = &Path> {
1889        self.sources.iter().map(|src| src.path.as_path())
1890    }
1891}
1892
1893impl Source {
1894    fn to_relative_offset(&self, offset: u32) -> usize {
1895        usize::try_from(offset - self.offset).unwrap()
1896    }
1897
1898    fn linecol(&self, relative_offset: usize) -> (usize, usize) {
1899        let mut cur = 0;
1900        // Use split_terminator instead of lines so that if there is a `\r`,
1901        // it is included in the offset calculation. The `+1` values below
1902        // account for the `\n`.
1903        for (i, line) in self.contents.split_terminator('\n').enumerate() {
1904            if cur + line.len() + 1 > relative_offset {
1905                return (i, relative_offset - cur);
1906            }
1907            cur += line.len() + 1;
1908        }
1909        (self.contents.lines().count(), 0)
1910    }
1911}
1912
1913pub enum ParsedUsePath {
1914    Name(String),
1915    Package(crate::PackageName, String),
1916}
1917
1918pub fn parse_use_path(s: &str) -> Result<ParsedUsePath> {
1919    let mut tokens = Tokenizer::new(s, 0, None)?;
1920    let path = UsePath::parse(&mut tokens)?;
1921    if tokens.next()?.is_some() {
1922        bail!("trailing tokens in path specifier");
1923    }
1924    Ok(match path {
1925        UsePath::Id(id) => ParsedUsePath::Name(id.name.to_string()),
1926        UsePath::Package { id, name } => {
1927            ParsedUsePath::Package(id.package_name(), name.name.to_string())
1928        }
1929    })
1930}