wit_parser/
ast.rs

1use crate::{Error, PackageNotFoundError, UnresolvedPackageGroup};
2use anyhow::{Context, Result, bail};
3use lex::{Span, Token, Tokenizer};
4use semver::Version;
5use std::borrow::Cow;
6use std::fmt;
7use std::mem;
8use std::path::{Path, PathBuf};
9
10pub mod lex;
11
12pub use resolve::Resolver;
13mod resolve;
14pub mod toposort;
15
16pub use lex::validate_id;
17
18/// Representation of a single WIT `*.wit` file and nested packages.
19struct PackageFile<'a> {
20    /// Optional `package foo:bar;` header
21    package_id: Option<PackageName<'a>>,
22    /// Other AST items.
23    decl_list: DeclList<'a>,
24}
25
26impl<'a> PackageFile<'a> {
27    /// Parse a standalone file represented by `tokens`.
28    ///
29    /// This will optionally start with `package foo:bar;` and then will have a
30    /// list of ast items after it.
31    fn parse(tokens: &mut Tokenizer<'a>) -> Result<Self> {
32        let mut package_name_tokens_peek = tokens.clone();
33        let docs = parse_docs(&mut package_name_tokens_peek)?;
34
35        // Parse `package foo:bar;` but throw it out if it's actually
36        // `package foo:bar { ... }` since that's an ast item instead.
37        let package_id = if package_name_tokens_peek.eat(Token::Package)? {
38            let name = PackageName::parse(&mut package_name_tokens_peek, docs)?;
39            if package_name_tokens_peek.eat(Token::Semicolon)? {
40                *tokens = package_name_tokens_peek;
41                Some(name)
42            } else {
43                None
44            }
45        } else {
46            None
47        };
48        let decl_list = DeclList::parse_until(tokens, None)?;
49        Ok(PackageFile {
50            package_id,
51            decl_list,
52        })
53    }
54
55    /// Parse a nested package of the form `package foo:bar { ... }`
56    fn parse_nested(
57        tokens: &mut Tokenizer<'a>,
58        docs: Docs<'a>,
59        attributes: Vec<Attribute<'a>>,
60    ) -> Result<Self> {
61        let span = tokens.expect(Token::Package)?;
62        if !attributes.is_empty() {
63            bail!(Error::new(
64                span,
65                format!("cannot place attributes on nested packages"),
66            ));
67        }
68        let package_id = PackageName::parse(tokens, docs)?;
69        tokens.expect(Token::LeftBrace)?;
70        let decl_list = DeclList::parse_until(tokens, Some(Token::RightBrace))?;
71        Ok(PackageFile {
72            package_id: Some(package_id),
73            decl_list,
74        })
75    }
76}
77
78/// Stores all of the declarations in a package's scope. In AST terms, this
79/// means everything except the `package` declaration that demarcates a package
80/// scope. In the traditional implicit format, these are all of the declarations
81/// non-`package` declarations in the file:
82///
83/// ```wit
84/// package foo:name;
85///
86/// /* START DECL LIST */
87/// // Some comment...
88/// interface i {}
89/// world w {}
90/// /* END DECL LIST */
91/// ```
92///
93/// In the nested package style, a [`DeclList`] is everything inside of each
94/// `package` element's brackets:
95///
96/// ```wit
97/// package foo:name {
98///   /* START FIRST DECL LIST */
99///   // Some comment...
100///   interface i {}
101///   world w {}
102///   /* END FIRST DECL LIST */
103/// }
104///
105/// package bar:name {
106///   /* START SECOND DECL LIST */
107///   // Some comment...
108///   interface i {}
109///   world w {}
110///   /* END SECOND DECL LIST */
111/// }
112/// ```
113#[derive(Default)]
114pub struct DeclList<'a> {
115    items: Vec<AstItem<'a>>,
116}
117
118impl<'a> DeclList<'a> {
119    fn parse_until(tokens: &mut Tokenizer<'a>, end: Option<Token>) -> Result<DeclList<'a>> {
120        let mut items = Vec::new();
121        let mut docs = parse_docs(tokens)?;
122        loop {
123            match end {
124                Some(end) => {
125                    if tokens.eat(end)? {
126                        break;
127                    }
128                }
129                None => {
130                    if tokens.clone().next()?.is_none() {
131                        break;
132                    }
133                }
134            }
135            items.push(AstItem::parse(tokens, docs)?);
136            docs = parse_docs(tokens)?;
137        }
138        Ok(DeclList { items })
139    }
140
141    fn for_each_path<'b>(
142        &'b self,
143        f: &mut dyn FnMut(
144            Option<&'b Id<'a>>,
145            &'b [Attribute<'a>],
146            &'b UsePath<'a>,
147            Option<&'b [UseName<'a>]>,
148            WorldOrInterface,
149        ) -> Result<()>,
150    ) -> Result<()> {
151        for item in self.items.iter() {
152            match item {
153                AstItem::World(world) => {
154                    // Visit imports here first before exports to help preserve
155                    // round-tripping of documents because printing a world puts
156                    // imports first but textually they can be listed with
157                    // exports first.
158                    let mut imports = Vec::new();
159                    let mut exports = Vec::new();
160                    for item in world.items.iter() {
161                        match item {
162                            WorldItem::Use(u) => f(
163                                None,
164                                &u.attributes,
165                                &u.from,
166                                Some(&u.names),
167                                WorldOrInterface::Interface,
168                            )?,
169                            WorldItem::Include(i) => f(
170                                Some(&world.name),
171                                &i.attributes,
172                                &i.from,
173                                None,
174                                WorldOrInterface::World,
175                            )?,
176                            WorldItem::Type(_) => {}
177                            WorldItem::Import(Import {
178                                kind, attributes, ..
179                            }) => imports.push((kind, attributes)),
180                            WorldItem::Export(Export {
181                                kind, attributes, ..
182                            }) => exports.push((kind, attributes)),
183                        }
184                    }
185
186                    let mut visit_kind =
187                        |kind: &'b ExternKind<'a>, attrs: &'b [Attribute<'a>]| match kind {
188                            ExternKind::Interface(_, items) => {
189                                for item in items {
190                                    match item {
191                                        InterfaceItem::Use(u) => f(
192                                            None,
193                                            &u.attributes,
194                                            &u.from,
195                                            Some(&u.names),
196                                            WorldOrInterface::Interface,
197                                        )?,
198                                        _ => {}
199                                    }
200                                }
201                                Ok(())
202                            }
203                            ExternKind::Path(path) => {
204                                f(None, attrs, path, None, WorldOrInterface::Interface)
205                            }
206                            ExternKind::Func(..) => Ok(()),
207                        };
208
209                    for (kind, attrs) in imports {
210                        visit_kind(kind, attrs)?;
211                    }
212                    for (kind, attrs) in exports {
213                        visit_kind(kind, attrs)?;
214                    }
215                }
216                AstItem::Interface(i) => {
217                    for item in i.items.iter() {
218                        match item {
219                            InterfaceItem::Use(u) => f(
220                                Some(&i.name),
221                                &u.attributes,
222                                &u.from,
223                                Some(&u.names),
224                                WorldOrInterface::Interface,
225                            )?,
226                            _ => {}
227                        }
228                    }
229                }
230                AstItem::Use(u) => {
231                    // At the top-level, we don't know if this is a world or an interface
232                    // It is up to the resolver to decides how to handle this ambiguity.
233                    f(
234                        None,
235                        &u.attributes,
236                        &u.item,
237                        None,
238                        WorldOrInterface::Unknown,
239                    )?;
240                }
241
242                AstItem::Package(pkg) => pkg.decl_list.for_each_path(f)?,
243            }
244        }
245        Ok(())
246    }
247}
248
249enum AstItem<'a> {
250    Interface(Interface<'a>),
251    World(World<'a>),
252    Use(ToplevelUse<'a>),
253    Package(PackageFile<'a>),
254}
255
256impl<'a> AstItem<'a> {
257    fn parse(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> Result<Self> {
258        let attributes = Attribute::parse_list(tokens)?;
259        match tokens.clone().next()? {
260            Some((_span, Token::Interface)) => {
261                Interface::parse(tokens, docs, attributes).map(Self::Interface)
262            }
263            Some((_span, Token::World)) => World::parse(tokens, docs, attributes).map(Self::World),
264            Some((_span, Token::Use)) => ToplevelUse::parse(tokens, attributes).map(Self::Use),
265            Some((_span, Token::Package)) => {
266                PackageFile::parse_nested(tokens, docs, attributes).map(Self::Package)
267            }
268            other => Err(err_expected(tokens, "`world`, `interface` or `use`", other).into()),
269        }
270    }
271}
272
273#[derive(Debug, Clone)]
274struct PackageName<'a> {
275    docs: Docs<'a>,
276    span: Span,
277    namespace: Id<'a>,
278    name: Id<'a>,
279    version: Option<(Span, Version)>,
280}
281
282impl<'a> PackageName<'a> {
283    fn parse(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> Result<Self> {
284        let namespace = parse_id(tokens)?;
285        tokens.expect(Token::Colon)?;
286        let name = parse_id(tokens)?;
287        let version = parse_opt_version(tokens)?;
288        Ok(PackageName {
289            docs,
290            span: Span {
291                start: namespace.span.start,
292                end: version
293                    .as_ref()
294                    .map(|(s, _)| s.end)
295                    .unwrap_or(name.span.end),
296            },
297            namespace,
298            name,
299            version,
300        })
301    }
302
303    fn package_name(&self) -> crate::PackageName {
304        crate::PackageName {
305            namespace: self.namespace.name.to_string(),
306            name: self.name.name.to_string(),
307            version: self.version.as_ref().map(|(_, v)| v.clone()),
308        }
309    }
310}
311
312struct ToplevelUse<'a> {
313    span: Span,
314    attributes: Vec<Attribute<'a>>,
315    item: UsePath<'a>,
316    as_: Option<Id<'a>>,
317}
318
319impl<'a> ToplevelUse<'a> {
320    fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> Result<Self> {
321        let span = tokens.expect(Token::Use)?;
322        let item = UsePath::parse(tokens)?;
323        let as_ = if tokens.eat(Token::As)? {
324            Some(parse_id(tokens)?)
325        } else {
326            None
327        };
328        tokens.expect_semicolon()?;
329        Ok(ToplevelUse {
330            span,
331            attributes,
332            item,
333            as_,
334        })
335    }
336}
337
338struct World<'a> {
339    docs: Docs<'a>,
340    attributes: Vec<Attribute<'a>>,
341    name: Id<'a>,
342    items: Vec<WorldItem<'a>>,
343}
344
345impl<'a> World<'a> {
346    fn parse(
347        tokens: &mut Tokenizer<'a>,
348        docs: Docs<'a>,
349        attributes: Vec<Attribute<'a>>,
350    ) -> Result<Self> {
351        tokens.expect(Token::World)?;
352        let name = parse_id(tokens)?;
353        let items = Self::parse_items(tokens)?;
354        Ok(World {
355            docs,
356            attributes,
357            name,
358            items,
359        })
360    }
361
362    fn parse_items(tokens: &mut Tokenizer<'a>) -> Result<Vec<WorldItem<'a>>> {
363        tokens.expect(Token::LeftBrace)?;
364        let mut items = Vec::new();
365        loop {
366            let docs = parse_docs(tokens)?;
367            if tokens.eat(Token::RightBrace)? {
368                break;
369            }
370            let attributes = Attribute::parse_list(tokens)?;
371            items.push(WorldItem::parse(tokens, docs, attributes)?);
372        }
373        Ok(items)
374    }
375}
376
377enum WorldItem<'a> {
378    Import(Import<'a>),
379    Export(Export<'a>),
380    Use(Use<'a>),
381    Type(TypeDef<'a>),
382    Include(Include<'a>),
383}
384
385impl<'a> WorldItem<'a> {
386    fn parse(
387        tokens: &mut Tokenizer<'a>,
388        docs: Docs<'a>,
389        attributes: Vec<Attribute<'a>>,
390    ) -> Result<WorldItem<'a>> {
391        match tokens.clone().next()? {
392            Some((_span, Token::Import)) => {
393                Import::parse(tokens, docs, attributes).map(WorldItem::Import)
394            }
395            Some((_span, Token::Export)) => {
396                Export::parse(tokens, docs, attributes).map(WorldItem::Export)
397            }
398            Some((_span, Token::Use)) => Use::parse(tokens, attributes).map(WorldItem::Use),
399            Some((_span, Token::Type)) => {
400                TypeDef::parse(tokens, docs, attributes).map(WorldItem::Type)
401            }
402            Some((_span, Token::Flags)) => {
403                TypeDef::parse_flags(tokens, docs, attributes).map(WorldItem::Type)
404            }
405            Some((_span, Token::Resource)) => {
406                TypeDef::parse_resource(tokens, docs, attributes).map(WorldItem::Type)
407            }
408            Some((_span, Token::Record)) => {
409                TypeDef::parse_record(tokens, docs, attributes).map(WorldItem::Type)
410            }
411            Some((_span, Token::Variant)) => {
412                TypeDef::parse_variant(tokens, docs, attributes).map(WorldItem::Type)
413            }
414            Some((_span, Token::Enum)) => {
415                TypeDef::parse_enum(tokens, docs, attributes).map(WorldItem::Type)
416            }
417            Some((_span, Token::Include)) => {
418                Include::parse(tokens, attributes).map(WorldItem::Include)
419            }
420            other => Err(err_expected(
421                tokens,
422                "`import`, `export`, `include`, `use`, or type definition",
423                other,
424            )
425            .into()),
426        }
427    }
428}
429
430struct Import<'a> {
431    docs: Docs<'a>,
432    attributes: Vec<Attribute<'a>>,
433    kind: ExternKind<'a>,
434}
435
436impl<'a> Import<'a> {
437    fn parse(
438        tokens: &mut Tokenizer<'a>,
439        docs: Docs<'a>,
440        attributes: Vec<Attribute<'a>>,
441    ) -> Result<Import<'a>> {
442        tokens.expect(Token::Import)?;
443        let kind = ExternKind::parse(tokens)?;
444        Ok(Import {
445            docs,
446            attributes,
447            kind,
448        })
449    }
450}
451
452struct Export<'a> {
453    docs: Docs<'a>,
454    attributes: Vec<Attribute<'a>>,
455    kind: ExternKind<'a>,
456}
457
458impl<'a> Export<'a> {
459    fn parse(
460        tokens: &mut Tokenizer<'a>,
461        docs: Docs<'a>,
462        attributes: Vec<Attribute<'a>>,
463    ) -> Result<Export<'a>> {
464        tokens.expect(Token::Export)?;
465        let kind = ExternKind::parse(tokens)?;
466        Ok(Export {
467            docs,
468            attributes,
469            kind,
470        })
471    }
472}
473
474enum ExternKind<'a> {
475    Interface(Id<'a>, Vec<InterfaceItem<'a>>),
476    Path(UsePath<'a>),
477    Func(Id<'a>, Func<'a>),
478}
479
480impl<'a> ExternKind<'a> {
481    fn parse(tokens: &mut Tokenizer<'a>) -> Result<ExternKind<'a>> {
482        // Create a copy of the token stream to test out if this is a function
483        // or an interface import. In those situations the token stream gets
484        // reset to the state of the clone and we continue down those paths.
485        //
486        // If neither a function nor an interface appears here though then the
487        // clone is thrown away and the original token stream is parsed for an
488        // interface. This will redo the original ID parse and the original
489        // colon parse, but that shouldn't be too bad perf-wise.
490        let mut clone = tokens.clone();
491        let id = parse_id(&mut clone)?;
492        if clone.eat(Token::Colon)? {
493            // import foo: async? func(...)
494            if clone.clone().eat(Token::Func)? || clone.clone().eat(Token::Async)? {
495                *tokens = clone;
496                let ret = ExternKind::Func(id, Func::parse(tokens)?);
497                tokens.expect_semicolon()?;
498                return Ok(ret);
499            }
500
501            // import foo: interface { ... }
502            if clone.eat(Token::Interface)? {
503                *tokens = clone;
504                return Ok(ExternKind::Interface(id, Interface::parse_items(tokens)?));
505            }
506        }
507
508        // import foo
509        // import foo/bar
510        // import foo:bar/baz
511        let ret = ExternKind::Path(UsePath::parse(tokens)?);
512        tokens.expect_semicolon()?;
513        Ok(ret)
514    }
515
516    fn span(&self) -> Span {
517        match self {
518            ExternKind::Interface(id, _) => id.span,
519            ExternKind::Path(UsePath::Id(id)) => id.span,
520            ExternKind::Path(UsePath::Package { name, .. }) => name.span,
521            ExternKind::Func(id, _) => id.span,
522        }
523    }
524}
525
526struct Interface<'a> {
527    docs: Docs<'a>,
528    attributes: Vec<Attribute<'a>>,
529    name: Id<'a>,
530    items: Vec<InterfaceItem<'a>>,
531}
532
533impl<'a> Interface<'a> {
534    fn parse(
535        tokens: &mut Tokenizer<'a>,
536        docs: Docs<'a>,
537        attributes: Vec<Attribute<'a>>,
538    ) -> Result<Self> {
539        tokens.expect(Token::Interface)?;
540        let name = parse_id(tokens)?;
541        let items = Self::parse_items(tokens)?;
542        Ok(Interface {
543            docs,
544            attributes,
545            name,
546            items,
547        })
548    }
549
550    pub(super) fn parse_items(tokens: &mut Tokenizer<'a>) -> Result<Vec<InterfaceItem<'a>>> {
551        tokens.expect(Token::LeftBrace)?;
552        let mut items = Vec::new();
553        loop {
554            let docs = parse_docs(tokens)?;
555            if tokens.eat(Token::RightBrace)? {
556                break;
557            }
558            let attributes = Attribute::parse_list(tokens)?;
559            items.push(InterfaceItem::parse(tokens, docs, attributes)?);
560        }
561        Ok(items)
562    }
563}
564
565#[derive(Debug)]
566pub enum WorldOrInterface {
567    World,
568    Interface,
569    Unknown,
570}
571
572enum InterfaceItem<'a> {
573    TypeDef(TypeDef<'a>),
574    Func(NamedFunc<'a>),
575    Use(Use<'a>),
576}
577
578struct Use<'a> {
579    attributes: Vec<Attribute<'a>>,
580    from: UsePath<'a>,
581    names: Vec<UseName<'a>>,
582}
583
584#[derive(Debug)]
585enum UsePath<'a> {
586    Id(Id<'a>),
587    Package { id: PackageName<'a>, name: Id<'a> },
588}
589
590impl<'a> UsePath<'a> {
591    fn parse(tokens: &mut Tokenizer<'a>) -> Result<Self> {
592        let id = parse_id(tokens)?;
593        if tokens.eat(Token::Colon)? {
594            // `foo:bar/baz@1.0`
595            let namespace = id;
596            let pkg_name = parse_id(tokens)?;
597            tokens.expect(Token::Slash)?;
598            let name = parse_id(tokens)?;
599            let version = parse_opt_version(tokens)?;
600            Ok(UsePath::Package {
601                id: PackageName {
602                    docs: Default::default(),
603                    span: Span {
604                        start: namespace.span.start,
605                        end: pkg_name.span.end,
606                    },
607                    namespace,
608                    name: pkg_name,
609                    version,
610                },
611                name,
612            })
613        } else {
614            // `foo`
615            Ok(UsePath::Id(id))
616        }
617    }
618
619    fn name(&self) -> &Id<'a> {
620        match self {
621            UsePath::Id(id) => id,
622            UsePath::Package { name, .. } => name,
623        }
624    }
625}
626
627struct UseName<'a> {
628    name: Id<'a>,
629    as_: Option<Id<'a>>,
630}
631
632impl<'a> Use<'a> {
633    fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> Result<Self> {
634        tokens.expect(Token::Use)?;
635        let from = UsePath::parse(tokens)?;
636        tokens.expect(Token::Period)?;
637        tokens.expect(Token::LeftBrace)?;
638
639        let mut names = Vec::new();
640        while !tokens.eat(Token::RightBrace)? {
641            let mut name = UseName {
642                name: parse_id(tokens)?,
643                as_: None,
644            };
645            if tokens.eat(Token::As)? {
646                name.as_ = Some(parse_id(tokens)?);
647            }
648            names.push(name);
649            if !tokens.eat(Token::Comma)? {
650                tokens.expect(Token::RightBrace)?;
651                break;
652            }
653        }
654        tokens.expect_semicolon()?;
655        Ok(Use {
656            attributes,
657            from,
658            names,
659        })
660    }
661}
662
663struct Include<'a> {
664    from: UsePath<'a>,
665    attributes: Vec<Attribute<'a>>,
666    names: Vec<IncludeName<'a>>,
667}
668
669struct IncludeName<'a> {
670    name: Id<'a>,
671    as_: Id<'a>,
672}
673
674impl<'a> Include<'a> {
675    fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> Result<Self> {
676        tokens.expect(Token::Include)?;
677        let from = UsePath::parse(tokens)?;
678
679        let names = if tokens.eat(Token::With)? {
680            parse_list(
681                tokens,
682                Token::LeftBrace,
683                Token::RightBrace,
684                |_docs, tokens| {
685                    let name = parse_id(tokens)?;
686                    tokens.expect(Token::As)?;
687                    let as_ = parse_id(tokens)?;
688                    Ok(IncludeName { name, as_ })
689                },
690            )?
691        } else {
692            tokens.expect_semicolon()?;
693            Vec::new()
694        };
695
696        Ok(Include {
697            attributes,
698            from,
699            names,
700        })
701    }
702}
703
704#[derive(Debug, Clone)]
705pub struct Id<'a> {
706    name: &'a str,
707    span: Span,
708}
709
710impl<'a> From<&'a str> for Id<'a> {
711    fn from(s: &'a str) -> Id<'a> {
712        Id {
713            name: s.into(),
714            span: Span { start: 0, end: 0 },
715        }
716    }
717}
718
719#[derive(Debug, Clone)]
720pub struct Docs<'a> {
721    docs: Vec<Cow<'a, str>>,
722    span: Span,
723}
724
725impl<'a> Default for Docs<'a> {
726    fn default() -> Self {
727        Self {
728            docs: Default::default(),
729            span: Span { start: 0, end: 0 },
730        }
731    }
732}
733
734struct TypeDef<'a> {
735    docs: Docs<'a>,
736    attributes: Vec<Attribute<'a>>,
737    name: Id<'a>,
738    ty: Type<'a>,
739}
740
741enum Type<'a> {
742    Bool(Span),
743    U8(Span),
744    U16(Span),
745    U32(Span),
746    U64(Span),
747    S8(Span),
748    S16(Span),
749    S32(Span),
750    S64(Span),
751    F32(Span),
752    F64(Span),
753    Char(Span),
754    String(Span),
755    Name(Id<'a>),
756    List(List<'a>),
757    Map(Map<'a>),
758    FixedSizeList(FixedSizeList<'a>),
759    Handle(Handle<'a>),
760    Resource(Resource<'a>),
761    Record(Record<'a>),
762    Flags(Flags<'a>),
763    Variant(Variant<'a>),
764    Tuple(Tuple<'a>),
765    Enum(Enum<'a>),
766    Option(Option_<'a>),
767    Result(Result_<'a>),
768    Future(Future<'a>),
769    Stream(Stream<'a>),
770    ErrorContext(Span),
771}
772
773enum Handle<'a> {
774    Own { resource: Id<'a> },
775    Borrow { resource: Id<'a> },
776}
777
778impl Handle<'_> {
779    fn span(&self) -> Span {
780        match self {
781            Handle::Own { resource } | Handle::Borrow { resource } => resource.span,
782        }
783    }
784}
785
786struct Resource<'a> {
787    span: Span,
788    funcs: Vec<ResourceFunc<'a>>,
789}
790
791enum ResourceFunc<'a> {
792    Method(NamedFunc<'a>),
793    Static(NamedFunc<'a>),
794    Constructor(NamedFunc<'a>),
795}
796
797impl<'a> ResourceFunc<'a> {
798    fn parse(
799        docs: Docs<'a>,
800        attributes: Vec<Attribute<'a>>,
801        tokens: &mut Tokenizer<'a>,
802    ) -> Result<Self> {
803        match tokens.clone().next()? {
804            Some((span, Token::Constructor)) => {
805                tokens.expect(Token::Constructor)?;
806                tokens.expect(Token::LeftParen)?;
807                let params = parse_list_trailer(tokens, Token::RightParen, |_docs, tokens| {
808                    let name = parse_id(tokens)?;
809                    tokens.expect(Token::Colon)?;
810                    let ty = Type::parse(tokens)?;
811                    Ok((name, ty))
812                })?;
813                let result = if tokens.eat(Token::RArrow)? {
814                    let ty = Type::parse(tokens)?;
815                    Some(ty)
816                } else {
817                    None
818                };
819                tokens.expect_semicolon()?;
820                Ok(ResourceFunc::Constructor(NamedFunc {
821                    docs,
822                    attributes,
823                    name: Id {
824                        span,
825                        name: "constructor",
826                    },
827                    func: Func {
828                        span,
829                        async_: false,
830                        params,
831                        result,
832                    },
833                }))
834            }
835            Some((_span, Token::Id | Token::ExplicitId)) => {
836                let name = parse_id(tokens)?;
837                tokens.expect(Token::Colon)?;
838                let ctor = if tokens.eat(Token::Static)? {
839                    ResourceFunc::Static
840                } else {
841                    ResourceFunc::Method
842                };
843                let func = Func::parse(tokens)?;
844                tokens.expect_semicolon()?;
845                Ok(ctor(NamedFunc {
846                    docs,
847                    attributes,
848                    name,
849                    func,
850                }))
851            }
852            other => Err(err_expected(tokens, "`constructor` or identifier", other).into()),
853        }
854    }
855
856    fn named_func(&self) -> &NamedFunc<'a> {
857        use ResourceFunc::*;
858        match self {
859            Method(f) | Static(f) | Constructor(f) => f,
860        }
861    }
862}
863
864struct Record<'a> {
865    span: Span,
866    fields: Vec<Field<'a>>,
867}
868
869struct Field<'a> {
870    docs: Docs<'a>,
871    name: Id<'a>,
872    ty: Type<'a>,
873}
874
875struct Flags<'a> {
876    span: Span,
877    flags: Vec<Flag<'a>>,
878}
879
880struct Flag<'a> {
881    docs: Docs<'a>,
882    name: Id<'a>,
883}
884
885struct Variant<'a> {
886    span: Span,
887    cases: Vec<Case<'a>>,
888}
889
890struct Case<'a> {
891    docs: Docs<'a>,
892    name: Id<'a>,
893    ty: Option<Type<'a>>,
894}
895
896struct Enum<'a> {
897    span: Span,
898    cases: Vec<EnumCase<'a>>,
899}
900
901struct EnumCase<'a> {
902    docs: Docs<'a>,
903    name: Id<'a>,
904}
905
906struct Option_<'a> {
907    span: Span,
908    ty: Box<Type<'a>>,
909}
910
911struct List<'a> {
912    span: Span,
913    ty: Box<Type<'a>>,
914}
915
916struct Map<'a> {
917    span: Span,
918    key: Box<Type<'a>>,
919    value: Box<Type<'a>>,
920}
921
922struct FixedSizeList<'a> {
923    span: Span,
924    ty: Box<Type<'a>>,
925    size: u32,
926}
927
928struct Future<'a> {
929    span: Span,
930    ty: Option<Box<Type<'a>>>,
931}
932
933struct Tuple<'a> {
934    span: Span,
935    types: Vec<Type<'a>>,
936}
937
938struct Result_<'a> {
939    span: Span,
940    ok: Option<Box<Type<'a>>>,
941    err: Option<Box<Type<'a>>>,
942}
943
944struct Stream<'a> {
945    span: Span,
946    ty: Option<Box<Type<'a>>>,
947}
948
949struct NamedFunc<'a> {
950    docs: Docs<'a>,
951    attributes: Vec<Attribute<'a>>,
952    name: Id<'a>,
953    func: Func<'a>,
954}
955
956type ParamList<'a> = Vec<(Id<'a>, Type<'a>)>;
957
958struct Func<'a> {
959    span: Span,
960    async_: bool,
961    params: ParamList<'a>,
962    result: Option<Type<'a>>,
963}
964
965impl<'a> Func<'a> {
966    fn parse(tokens: &mut Tokenizer<'a>) -> Result<Func<'a>> {
967        fn parse_params<'a>(tokens: &mut Tokenizer<'a>, left_paren: bool) -> Result<ParamList<'a>> {
968            if left_paren {
969                tokens.expect(Token::LeftParen)?;
970            };
971            parse_list_trailer(tokens, Token::RightParen, |_docs, tokens| {
972                let name = parse_id(tokens)?;
973                tokens.expect(Token::Colon)?;
974                let ty = Type::parse(tokens)?;
975                Ok((name, ty))
976            })
977        }
978
979        let async_ = tokens.eat(Token::Async)?;
980        let span = tokens.expect(Token::Func)?;
981        let params = parse_params(tokens, true)?;
982        let result = if tokens.eat(Token::RArrow)? {
983            let ty = Type::parse(tokens)?;
984            Some(ty)
985        } else {
986            None
987        };
988        Ok(Func {
989            span,
990            async_,
991            params,
992            result,
993        })
994    }
995}
996
997impl<'a> InterfaceItem<'a> {
998    fn parse(
999        tokens: &mut Tokenizer<'a>,
1000        docs: Docs<'a>,
1001        attributes: Vec<Attribute<'a>>,
1002    ) -> Result<InterfaceItem<'a>> {
1003        match tokens.clone().next()? {
1004            Some((_span, Token::Type)) => {
1005                TypeDef::parse(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1006            }
1007            Some((_span, Token::Flags)) => {
1008                TypeDef::parse_flags(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1009            }
1010            Some((_span, Token::Enum)) => {
1011                TypeDef::parse_enum(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1012            }
1013            Some((_span, Token::Variant)) => {
1014                TypeDef::parse_variant(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1015            }
1016            Some((_span, Token::Resource)) => {
1017                TypeDef::parse_resource(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1018            }
1019            Some((_span, Token::Record)) => {
1020                TypeDef::parse_record(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1021            }
1022            Some((_span, Token::Id)) | Some((_span, Token::ExplicitId)) => {
1023                NamedFunc::parse(tokens, docs, attributes).map(InterfaceItem::Func)
1024            }
1025            Some((_span, Token::Use)) => Use::parse(tokens, attributes).map(InterfaceItem::Use),
1026            other => Err(err_expected(tokens, "`type`, `resource` or `func`", other).into()),
1027        }
1028    }
1029}
1030
1031impl<'a> TypeDef<'a> {
1032    fn parse(
1033        tokens: &mut Tokenizer<'a>,
1034        docs: Docs<'a>,
1035        attributes: Vec<Attribute<'a>>,
1036    ) -> Result<Self> {
1037        tokens.expect(Token::Type)?;
1038        let name = parse_id(tokens)?;
1039        tokens.expect(Token::Equals)?;
1040        let ty = Type::parse(tokens)?;
1041        tokens.expect_semicolon()?;
1042        Ok(TypeDef {
1043            docs,
1044            attributes,
1045            name,
1046            ty,
1047        })
1048    }
1049
1050    fn parse_flags(
1051        tokens: &mut Tokenizer<'a>,
1052        docs: Docs<'a>,
1053        attributes: Vec<Attribute<'a>>,
1054    ) -> Result<Self> {
1055        tokens.expect(Token::Flags)?;
1056        let name = parse_id(tokens)?;
1057        let ty = Type::Flags(Flags {
1058            span: name.span,
1059            flags: parse_list(
1060                tokens,
1061                Token::LeftBrace,
1062                Token::RightBrace,
1063                |docs, tokens| {
1064                    let name = parse_id(tokens)?;
1065                    Ok(Flag { docs, name })
1066                },
1067            )?,
1068        });
1069        Ok(TypeDef {
1070            docs,
1071            attributes,
1072            name,
1073            ty,
1074        })
1075    }
1076
1077    fn parse_resource(
1078        tokens: &mut Tokenizer<'a>,
1079        docs: Docs<'a>,
1080        attributes: Vec<Attribute<'a>>,
1081    ) -> Result<Self> {
1082        tokens.expect(Token::Resource)?;
1083        let name = parse_id(tokens)?;
1084        let mut funcs = Vec::new();
1085        if tokens.eat(Token::LeftBrace)? {
1086            while !tokens.eat(Token::RightBrace)? {
1087                let docs = parse_docs(tokens)?;
1088                let attributes = Attribute::parse_list(tokens)?;
1089                funcs.push(ResourceFunc::parse(docs, attributes, tokens)?);
1090            }
1091        } else {
1092            tokens.expect_semicolon()?;
1093        }
1094        let ty = Type::Resource(Resource {
1095            span: name.span,
1096            funcs,
1097        });
1098        Ok(TypeDef {
1099            docs,
1100            attributes,
1101            name,
1102            ty,
1103        })
1104    }
1105
1106    fn parse_record(
1107        tokens: &mut Tokenizer<'a>,
1108        docs: Docs<'a>,
1109        attributes: Vec<Attribute<'a>>,
1110    ) -> Result<Self> {
1111        tokens.expect(Token::Record)?;
1112        let name = parse_id(tokens)?;
1113        let ty = Type::Record(Record {
1114            span: name.span,
1115            fields: parse_list(
1116                tokens,
1117                Token::LeftBrace,
1118                Token::RightBrace,
1119                |docs, tokens| {
1120                    let name = parse_id(tokens)?;
1121                    tokens.expect(Token::Colon)?;
1122                    let ty = Type::parse(tokens)?;
1123                    Ok(Field { docs, name, ty })
1124                },
1125            )?,
1126        });
1127        Ok(TypeDef {
1128            docs,
1129            attributes,
1130            name,
1131            ty,
1132        })
1133    }
1134
1135    fn parse_variant(
1136        tokens: &mut Tokenizer<'a>,
1137        docs: Docs<'a>,
1138        attributes: Vec<Attribute<'a>>,
1139    ) -> Result<Self> {
1140        tokens.expect(Token::Variant)?;
1141        let name = parse_id(tokens)?;
1142        let ty = Type::Variant(Variant {
1143            span: name.span,
1144            cases: parse_list(
1145                tokens,
1146                Token::LeftBrace,
1147                Token::RightBrace,
1148                |docs, tokens| {
1149                    let name = parse_id(tokens)?;
1150                    let ty = if tokens.eat(Token::LeftParen)? {
1151                        let ty = Type::parse(tokens)?;
1152                        tokens.expect(Token::RightParen)?;
1153                        Some(ty)
1154                    } else {
1155                        None
1156                    };
1157                    Ok(Case { docs, name, ty })
1158                },
1159            )?,
1160        });
1161        Ok(TypeDef {
1162            docs,
1163            attributes,
1164            name,
1165            ty,
1166        })
1167    }
1168
1169    fn parse_enum(
1170        tokens: &mut Tokenizer<'a>,
1171        docs: Docs<'a>,
1172        attributes: Vec<Attribute<'a>>,
1173    ) -> Result<Self> {
1174        tokens.expect(Token::Enum)?;
1175        let name = parse_id(tokens)?;
1176        let ty = Type::Enum(Enum {
1177            span: name.span,
1178            cases: parse_list(
1179                tokens,
1180                Token::LeftBrace,
1181                Token::RightBrace,
1182                |docs, tokens| {
1183                    let name = parse_id(tokens)?;
1184                    Ok(EnumCase { docs, name })
1185                },
1186            )?,
1187        });
1188        Ok(TypeDef {
1189            docs,
1190            attributes,
1191            name,
1192            ty,
1193        })
1194    }
1195}
1196
1197impl<'a> NamedFunc<'a> {
1198    fn parse(
1199        tokens: &mut Tokenizer<'a>,
1200        docs: Docs<'a>,
1201        attributes: Vec<Attribute<'a>>,
1202    ) -> Result<Self> {
1203        let name = parse_id(tokens)?;
1204        tokens.expect(Token::Colon)?;
1205        let func = Func::parse(tokens)?;
1206        tokens.expect_semicolon()?;
1207        Ok(NamedFunc {
1208            docs,
1209            attributes,
1210            name,
1211            func,
1212        })
1213    }
1214}
1215
1216fn parse_id<'a>(tokens: &mut Tokenizer<'a>) -> Result<Id<'a>> {
1217    match tokens.next()? {
1218        Some((span, Token::Id)) => Ok(Id {
1219            name: tokens.parse_id(span)?,
1220            span,
1221        }),
1222        Some((span, Token::ExplicitId)) => Ok(Id {
1223            name: tokens.parse_explicit_id(span)?,
1224            span,
1225        }),
1226        other => Err(err_expected(tokens, "an identifier or string", other).into()),
1227    }
1228}
1229
1230fn parse_opt_version(tokens: &mut Tokenizer<'_>) -> Result<Option<(Span, Version)>> {
1231    if tokens.eat(Token::At)? {
1232        parse_version(tokens).map(Some)
1233    } else {
1234        Ok(None)
1235    }
1236}
1237
1238fn parse_version(tokens: &mut Tokenizer<'_>) -> Result<(Span, Version)> {
1239    let start = tokens.expect(Token::Integer)?.start;
1240    tokens.expect(Token::Period)?;
1241    tokens.expect(Token::Integer)?;
1242    tokens.expect(Token::Period)?;
1243    let end = tokens.expect(Token::Integer)?.end;
1244    let mut span = Span { start, end };
1245    eat_ids(tokens, Token::Minus, &mut span)?;
1246    eat_ids(tokens, Token::Plus, &mut span)?;
1247    let string = tokens.get_span(span);
1248    let version = Version::parse(string).map_err(|e| Error::new(span, e.to_string()))?;
1249    return Ok((span, version));
1250
1251    // According to `semver.org` this is what we're parsing:
1252    //
1253    // ```ebnf
1254    // <pre-release> ::= <dot-separated pre-release identifiers>
1255    //
1256    // <dot-separated pre-release identifiers> ::= <pre-release identifier>
1257    //                                           | <pre-release identifier> "." <dot-separated pre-release identifiers>
1258    //
1259    // <build> ::= <dot-separated build identifiers>
1260    //
1261    // <dot-separated build identifiers> ::= <build identifier>
1262    //                                     | <build identifier> "." <dot-separated build identifiers>
1263    //
1264    // <pre-release identifier> ::= <alphanumeric identifier>
1265    //                            | <numeric identifier>
1266    //
1267    // <build identifier> ::= <alphanumeric identifier>
1268    //                      | <digits>
1269    //
1270    // <alphanumeric identifier> ::= <non-digit>
1271    //                             | <non-digit> <identifier characters>
1272    //                             | <identifier characters> <non-digit>
1273    //                             | <identifier characters> <non-digit> <identifier characters>
1274    //
1275    // <numeric identifier> ::= "0"
1276    //                        | <positive digit>
1277    //                        | <positive digit> <digits>
1278    //
1279    // <identifier characters> ::= <identifier character>
1280    //                           | <identifier character> <identifier characters>
1281    //
1282    // <identifier character> ::= <digit>
1283    //                          | <non-digit>
1284    //
1285    // <non-digit> ::= <letter>
1286    //               | "-"
1287    //
1288    // <digits> ::= <digit>
1289    //            | <digit> <digits>
1290    // ```
1291    //
1292    // This is loosely based on WIT syntax and an approximation is parsed here:
1293    //
1294    // * This function starts by parsing the optional leading `-` and `+` which
1295    //   indicates pre-release and build metadata.
1296    // * Afterwards all of $id, $integer, `-`, and `.` are chomped. The only
1297    //   exception here is that if `.` isn't followed by $id, $integer, or `-`
1298    //   then it's assumed that it's something like `use a:b@1.0.0-a.{...}`
1299    //   where the `.` is part of WIT syntax, not semver.
1300    //
1301    // Note that this additionally doesn't try to return any first-class errors.
1302    // Instead this bails out on something unrecognized for something else in
1303    // the system to return an error.
1304    fn eat_ids(tokens: &mut Tokenizer<'_>, prefix: Token, end: &mut Span) -> Result<()> {
1305        if !tokens.eat(prefix)? {
1306            return Ok(());
1307        }
1308        loop {
1309            let mut clone = tokens.clone();
1310            match clone.next()? {
1311                Some((span, Token::Id | Token::Integer | Token::Minus)) => {
1312                    end.end = span.end;
1313                    *tokens = clone;
1314                }
1315                Some((_span, Token::Period)) => match clone.next()? {
1316                    Some((span, Token::Id | Token::Integer | Token::Minus)) => {
1317                        end.end = span.end;
1318                        *tokens = clone;
1319                    }
1320                    _ => break Ok(()),
1321                },
1322                _ => break Ok(()),
1323            }
1324        }
1325    }
1326}
1327
1328fn parse_docs<'a>(tokens: &mut Tokenizer<'a>) -> Result<Docs<'a>> {
1329    let mut docs = Docs::default();
1330    let mut clone = tokens.clone();
1331    let mut started = false;
1332    while let Some((span, token)) = clone.next_raw()? {
1333        match token {
1334            Token::Whitespace => {}
1335            Token::Comment => {
1336                let comment = tokens.get_span(span);
1337                if !started {
1338                    docs.span.start = span.start;
1339                    started = true;
1340                }
1341                let trailing_ws = comment
1342                    .bytes()
1343                    .rev()
1344                    .take_while(|ch| ch.is_ascii_whitespace())
1345                    .count();
1346                docs.span.end = span.end - (trailing_ws as u32);
1347                docs.docs.push(comment.into());
1348            }
1349            _ => break,
1350        };
1351        *tokens = clone.clone();
1352    }
1353    Ok(docs)
1354}
1355
1356impl<'a> Type<'a> {
1357    fn parse(tokens: &mut Tokenizer<'a>) -> Result<Self> {
1358        match tokens.next()? {
1359            Some((span, Token::U8)) => Ok(Type::U8(span)),
1360            Some((span, Token::U16)) => Ok(Type::U16(span)),
1361            Some((span, Token::U32)) => Ok(Type::U32(span)),
1362            Some((span, Token::U64)) => Ok(Type::U64(span)),
1363            Some((span, Token::S8)) => Ok(Type::S8(span)),
1364            Some((span, Token::S16)) => Ok(Type::S16(span)),
1365            Some((span, Token::S32)) => Ok(Type::S32(span)),
1366            Some((span, Token::S64)) => Ok(Type::S64(span)),
1367            Some((span, Token::F32)) => Ok(Type::F32(span)),
1368            Some((span, Token::F64)) => Ok(Type::F64(span)),
1369            Some((span, Token::Char)) => Ok(Type::Char(span)),
1370
1371            // tuple<T, U, ...>
1372            Some((span, Token::Tuple)) => {
1373                let types = parse_list(
1374                    tokens,
1375                    Token::LessThan,
1376                    Token::GreaterThan,
1377                    |_docs, tokens| Type::parse(tokens),
1378                )?;
1379                Ok(Type::Tuple(Tuple { span, types }))
1380            }
1381
1382            Some((span, Token::Bool)) => Ok(Type::Bool(span)),
1383            Some((span, Token::String_)) => Ok(Type::String(span)),
1384
1385            // list<T>
1386            // list<T, N>
1387            Some((span, Token::List)) => {
1388                tokens.expect(Token::LessThan)?;
1389                let ty = Type::parse(tokens)?;
1390                let size = if tokens.eat(Token::Comma)? {
1391                    let number = tokens.next()?;
1392                    if let Some((span, Token::Integer)) = number {
1393                        let size: u32 = tokens.get_span(span).parse()?;
1394                        Some(size)
1395                    } else {
1396                        return Err(err_expected(tokens, "fixed size", number).into());
1397                    }
1398                } else {
1399                    None
1400                };
1401                tokens.expect(Token::GreaterThan)?;
1402                if let Some(size) = size {
1403                    Ok(Type::FixedSizeList(FixedSizeList {
1404                        span,
1405                        ty: Box::new(ty),
1406                        size,
1407                    }))
1408                } else {
1409                    Ok(Type::List(List {
1410                        span,
1411                        ty: Box::new(ty),
1412                    }))
1413                }
1414            }
1415
1416            // map<K, V>
1417            Some((span, Token::Map)) => {
1418                tokens.expect(Token::LessThan)?;
1419                let key = Type::parse(tokens)?;
1420                tokens.expect(Token::Comma)?;
1421                let value = Type::parse(tokens)?;
1422                tokens.expect(Token::GreaterThan)?;
1423                Ok(Type::Map(Map {
1424                    span,
1425                    key: Box::new(key),
1426                    value: Box::new(value),
1427                }))
1428            }
1429
1430            // option<T>
1431            Some((span, Token::Option_)) => {
1432                tokens.expect(Token::LessThan)?;
1433                let ty = Type::parse(tokens)?;
1434                tokens.expect(Token::GreaterThan)?;
1435                Ok(Type::Option(Option_ {
1436                    span,
1437                    ty: Box::new(ty),
1438                }))
1439            }
1440
1441            // result<T, E>
1442            // result<_, E>
1443            // result<T>
1444            // result
1445            Some((span, Token::Result_)) => {
1446                let mut ok = None;
1447                let mut err = None;
1448
1449                if tokens.eat(Token::LessThan)? {
1450                    if tokens.eat(Token::Underscore)? {
1451                        tokens.expect(Token::Comma)?;
1452                        err = Some(Box::new(Type::parse(tokens)?));
1453                    } else {
1454                        ok = Some(Box::new(Type::parse(tokens)?));
1455                        if tokens.eat(Token::Comma)? {
1456                            err = Some(Box::new(Type::parse(tokens)?));
1457                        }
1458                    };
1459                    tokens.expect(Token::GreaterThan)?;
1460                };
1461                Ok(Type::Result(Result_ { span, ok, err }))
1462            }
1463
1464            // future<T>
1465            // future
1466            Some((span, Token::Future)) => {
1467                let mut ty = None;
1468
1469                if tokens.eat(Token::LessThan)? {
1470                    ty = Some(Box::new(Type::parse(tokens)?));
1471                    tokens.expect(Token::GreaterThan)?;
1472                };
1473                Ok(Type::Future(Future { span, ty }))
1474            }
1475
1476            // stream<T>
1477            // stream
1478            Some((span, Token::Stream)) => {
1479                let mut ty = None;
1480
1481                if tokens.eat(Token::LessThan)? {
1482                    ty = Some(Box::new(Type::parse(tokens)?));
1483                    tokens.expect(Token::GreaterThan)?;
1484                };
1485                Ok(Type::Stream(Stream { span, ty }))
1486            }
1487
1488            // error-context
1489            Some((span, Token::ErrorContext)) => Ok(Type::ErrorContext(span)),
1490
1491            // own<T>
1492            Some((_span, Token::Own)) => {
1493                tokens.expect(Token::LessThan)?;
1494                let resource = parse_id(tokens)?;
1495                tokens.expect(Token::GreaterThan)?;
1496                Ok(Type::Handle(Handle::Own { resource }))
1497            }
1498
1499            // borrow<T>
1500            Some((_span, Token::Borrow)) => {
1501                tokens.expect(Token::LessThan)?;
1502                let resource = parse_id(tokens)?;
1503                tokens.expect(Token::GreaterThan)?;
1504                Ok(Type::Handle(Handle::Borrow { resource }))
1505            }
1506
1507            // `foo`
1508            Some((span, Token::Id)) => Ok(Type::Name(Id {
1509                name: tokens.parse_id(span)?.into(),
1510                span,
1511            })),
1512            // `%foo`
1513            Some((span, Token::ExplicitId)) => Ok(Type::Name(Id {
1514                name: tokens.parse_explicit_id(span)?.into(),
1515                span,
1516            })),
1517
1518            other => Err(err_expected(tokens, "a type", other).into()),
1519        }
1520    }
1521
1522    fn span(&self) -> Span {
1523        match self {
1524            Type::Bool(span)
1525            | Type::U8(span)
1526            | Type::U16(span)
1527            | Type::U32(span)
1528            | Type::U64(span)
1529            | Type::S8(span)
1530            | Type::S16(span)
1531            | Type::S32(span)
1532            | Type::S64(span)
1533            | Type::F32(span)
1534            | Type::F64(span)
1535            | Type::Char(span)
1536            | Type::String(span)
1537            | Type::ErrorContext(span) => *span,
1538            Type::Name(id) => id.span,
1539            Type::List(l) => l.span,
1540            Type::Map(m) => m.span,
1541            Type::FixedSizeList(l) => l.span,
1542            Type::Handle(h) => h.span(),
1543            Type::Resource(r) => r.span,
1544            Type::Record(r) => r.span,
1545            Type::Flags(f) => f.span,
1546            Type::Variant(v) => v.span,
1547            Type::Tuple(t) => t.span,
1548            Type::Enum(e) => e.span,
1549            Type::Option(o) => o.span,
1550            Type::Result(r) => r.span,
1551            Type::Future(f) => f.span,
1552            Type::Stream(s) => s.span,
1553        }
1554    }
1555}
1556
1557fn parse_list<'a, T>(
1558    tokens: &mut Tokenizer<'a>,
1559    start: Token,
1560    end: Token,
1561    parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> Result<T>,
1562) -> Result<Vec<T>> {
1563    tokens.expect(start)?;
1564    parse_list_trailer(tokens, end, parse)
1565}
1566
1567fn parse_list_trailer<'a, T>(
1568    tokens: &mut Tokenizer<'a>,
1569    end: Token,
1570    mut parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> Result<T>,
1571) -> Result<Vec<T>> {
1572    let mut items = Vec::new();
1573    loop {
1574        // get docs before we skip them to try to eat the end token
1575        let docs = parse_docs(tokens)?;
1576
1577        // if we found an end token then we're done
1578        if tokens.eat(end)? {
1579            break;
1580        }
1581
1582        let item = parse(docs, tokens)?;
1583        items.push(item);
1584
1585        // if there's no trailing comma then this is required to be the end,
1586        // otherwise we go through the loop to try to get another item
1587        if !tokens.eat(Token::Comma)? {
1588            tokens.expect(end)?;
1589            break;
1590        }
1591    }
1592    Ok(items)
1593}
1594
1595fn err_expected(
1596    tokens: &Tokenizer<'_>,
1597    expected: &'static str,
1598    found: Option<(Span, Token)>,
1599) -> Error {
1600    match found {
1601        Some((span, token)) => Error::new(
1602            span,
1603            format!("expected {}, found {}", expected, token.describe()),
1604        ),
1605        None => Error::new(tokens.eof_span(), format!("expected {expected}, found eof")),
1606    }
1607}
1608
1609enum Attribute<'a> {
1610    Since { span: Span, version: Version },
1611    Unstable { span: Span, feature: Id<'a> },
1612    Deprecated { span: Span, version: Version },
1613}
1614
1615impl<'a> Attribute<'a> {
1616    fn parse_list(tokens: &mut Tokenizer<'a>) -> Result<Vec<Attribute<'a>>> {
1617        let mut ret = Vec::new();
1618        while tokens.eat(Token::At)? {
1619            let id = parse_id(tokens)?;
1620            let attr = match id.name {
1621                "since" => {
1622                    tokens.expect(Token::LeftParen)?;
1623                    eat_id(tokens, "version")?;
1624                    tokens.expect(Token::Equals)?;
1625                    let (_span, version) = parse_version(tokens)?;
1626                    tokens.expect(Token::RightParen)?;
1627                    Attribute::Since {
1628                        span: id.span,
1629                        version,
1630                    }
1631                }
1632                "unstable" => {
1633                    tokens.expect(Token::LeftParen)?;
1634                    eat_id(tokens, "feature")?;
1635                    tokens.expect(Token::Equals)?;
1636                    let feature = parse_id(tokens)?;
1637                    tokens.expect(Token::RightParen)?;
1638                    Attribute::Unstable {
1639                        span: id.span,
1640                        feature,
1641                    }
1642                }
1643                "deprecated" => {
1644                    tokens.expect(Token::LeftParen)?;
1645                    eat_id(tokens, "version")?;
1646                    tokens.expect(Token::Equals)?;
1647                    let (_span, version) = parse_version(tokens)?;
1648                    tokens.expect(Token::RightParen)?;
1649                    Attribute::Deprecated {
1650                        span: id.span,
1651                        version,
1652                    }
1653                }
1654                other => {
1655                    bail!(Error::new(id.span, format!("unknown attribute `{other}`"),))
1656                }
1657            };
1658            ret.push(attr);
1659        }
1660        Ok(ret)
1661    }
1662
1663    fn span(&self) -> Span {
1664        match self {
1665            Attribute::Since { span, .. }
1666            | Attribute::Unstable { span, .. }
1667            | Attribute::Deprecated { span, .. } => *span,
1668        }
1669    }
1670}
1671
1672fn eat_id(tokens: &mut Tokenizer<'_>, expected: &str) -> Result<Span> {
1673    let id = parse_id(tokens)?;
1674    if id.name != expected {
1675        bail!(Error::new(
1676            id.span,
1677            format!("expected `{expected}`, found `{}`", id.name),
1678        ));
1679    }
1680    Ok(id.span)
1681}
1682
1683/// A listing of source files which are used to get parsed into an
1684/// [`UnresolvedPackage`].
1685///
1686/// [`UnresolvedPackage`]: crate::UnresolvedPackage
1687#[derive(Clone, Default)]
1688pub struct SourceMap {
1689    sources: Vec<Source>,
1690    offset: u32,
1691    require_f32_f64: Option<bool>,
1692}
1693
1694#[derive(Clone)]
1695struct Source {
1696    offset: u32,
1697    path: PathBuf,
1698    contents: String,
1699}
1700
1701impl SourceMap {
1702    /// Creates a new empty source map.
1703    pub fn new() -> SourceMap {
1704        SourceMap::default()
1705    }
1706
1707    #[doc(hidden)] // NB: only here for a transitionary period
1708    pub fn set_require_f32_f64(&mut self, enable: bool) {
1709        self.require_f32_f64 = Some(enable);
1710    }
1711
1712    /// Reads the file `path` on the filesystem and appends its contents to this
1713    /// [`SourceMap`].
1714    pub fn push_file(&mut self, path: &Path) -> Result<()> {
1715        let contents = std::fs::read_to_string(path)
1716            .with_context(|| format!("failed to read file {path:?}"))?;
1717        self.push(path, contents);
1718        Ok(())
1719    }
1720
1721    /// Appends the given contents with the given path into this source map.
1722    ///
1723    /// The `path` provided is not read from the filesystem and is instead only
1724    /// used during error messages. Each file added to a [`SourceMap`] is
1725    /// used to create the final parsed package namely by unioning all the
1726    /// interfaces and worlds defined together. Note that each file has its own
1727    /// personal namespace, however, for top-level `use` and such.
1728    pub fn push(&mut self, path: &Path, contents: impl Into<String>) {
1729        let mut contents = contents.into();
1730        // Guarantee that there's at least one character in these contents by
1731        // appending a single newline to the end. This is excluded from
1732        // tokenization below so it's only here to ensure that spans which point
1733        // one byte beyond the end of a file (eof) point to the same original
1734        // file.
1735        contents.push('\n');
1736        let new_offset = self.offset + u32::try_from(contents.len()).unwrap();
1737        self.sources.push(Source {
1738            offset: self.offset,
1739            path: path.to_path_buf(),
1740            contents,
1741        });
1742        self.offset = new_offset;
1743    }
1744
1745    /// Parses the files added to this source map into a
1746    /// [`UnresolvedPackageGroup`].
1747    pub fn parse(self) -> Result<UnresolvedPackageGroup> {
1748        let mut nested = Vec::new();
1749        let main = self.rewrite_error(|| {
1750            let mut resolver = Resolver::default();
1751            let mut srcs = self.sources.iter().collect::<Vec<_>>();
1752            srcs.sort_by_key(|src| &src.path);
1753
1754            // Parse each source file individually. A tokenizer is created here
1755            // form settings and then `PackageFile` is used to parse the whole
1756            // stream of tokens.
1757            for src in srcs {
1758                let mut tokens = Tokenizer::new(
1759                    // chop off the forcibly appended `\n` character when
1760                    // passing through the source to get tokenized.
1761                    &src.contents[..src.contents.len() - 1],
1762                    src.offset,
1763                    self.require_f32_f64,
1764                )
1765                .with_context(|| format!("failed to tokenize path: {}", src.path.display()))?;
1766                let mut file = PackageFile::parse(&mut tokens)?;
1767
1768                // Filter out any nested packages and resolve them separately.
1769                // Nested packages have only a single "file" so only one item
1770                // is pushed into a `Resolver`. Note that a nested `Resolver`
1771                // is used here, not the outer one.
1772                //
1773                // Note that filtering out `Package` items is required due to
1774                // how the implementation of disallowing nested packages in
1775                // nested packages currently works.
1776                for item in mem::take(&mut file.decl_list.items) {
1777                    match item {
1778                        AstItem::Package(nested_pkg) => {
1779                            let mut resolve = Resolver::default();
1780                            resolve.push(nested_pkg).with_context(|| {
1781                                format!(
1782                                    "failed to handle nested package in: {}",
1783                                    src.path.display()
1784                                )
1785                            })?;
1786
1787                            nested.push(resolve.resolve()?);
1788                        }
1789                        other => file.decl_list.items.push(other),
1790                    }
1791                }
1792
1793                // With nested packages handled push this file into the
1794                // resolver.
1795                resolver.push(file).with_context(|| {
1796                    format!("failed to start resolving path: {}", src.path.display())
1797                })?;
1798            }
1799            Ok(resolver.resolve()?)
1800        })?;
1801        Ok(UnresolvedPackageGroup {
1802            main,
1803            nested,
1804            source_map: self,
1805        })
1806    }
1807
1808    pub(crate) fn rewrite_error<F, T>(&self, f: F) -> Result<T>
1809    where
1810        F: FnOnce() -> Result<T>,
1811    {
1812        let mut err = match f() {
1813            Ok(t) => return Ok(t),
1814            Err(e) => e,
1815        };
1816        if let Some(parse) = err.downcast_mut::<Error>() {
1817            if parse.highlighted.is_none() {
1818                let msg = self.highlight_err(parse.span.start, Some(parse.span.end), &parse.msg);
1819                parse.highlighted = Some(msg);
1820            }
1821        }
1822        if let Some(_) = err.downcast_mut::<Error>() {
1823            return Err(err);
1824        }
1825        if let Some(notfound) = err.downcast_mut::<PackageNotFoundError>() {
1826            if notfound.highlighted.is_none() {
1827                let msg = self.highlight_err(
1828                    notfound.span.start,
1829                    Some(notfound.span.end),
1830                    &format!("{notfound}"),
1831                );
1832                notfound.highlighted = Some(msg);
1833            }
1834        }
1835        if let Some(_) = err.downcast_mut::<PackageNotFoundError>() {
1836            return Err(err);
1837        }
1838
1839        if let Some(lex) = err.downcast_ref::<lex::Error>() {
1840            let pos = match lex {
1841                lex::Error::Unexpected(at, _)
1842                | lex::Error::UnterminatedComment(at)
1843                | lex::Error::Wanted { at, .. }
1844                | lex::Error::InvalidCharInId(at, _)
1845                | lex::Error::IdPartEmpty(at)
1846                | lex::Error::InvalidEscape(at, _) => *at,
1847            };
1848            let msg = self.highlight_err(pos, None, lex);
1849            bail!("{msg}")
1850        }
1851
1852        if let Some(sort) = err.downcast_mut::<toposort::Error>() {
1853            if sort.highlighted().is_none() {
1854                let span = match sort {
1855                    toposort::Error::NonexistentDep { span, .. }
1856                    | toposort::Error::Cycle { span, .. } => *span,
1857                };
1858                let highlighted = self.highlight_err(span.start, Some(span.end), &sort);
1859                sort.set_highlighted(highlighted);
1860            }
1861        }
1862
1863        Err(err)
1864    }
1865
1866    fn highlight_err(&self, start: u32, end: Option<u32>, err: impl fmt::Display) -> String {
1867        let src = self.source_for_offset(start);
1868        let start = src.to_relative_offset(start);
1869        let end = end.map(|end| src.to_relative_offset(end));
1870        let (line, col) = src.linecol(start);
1871        let snippet = src.contents.lines().nth(line).unwrap_or("");
1872        let mut msg = format!(
1873            "\
1874{err}
1875     --> {file}:{line}:{col}
1876      |
1877 {line:4} | {snippet}
1878      | {marker:>0$}",
1879            col + 1,
1880            file = src.path.display(),
1881            line = line + 1,
1882            col = col + 1,
1883            marker = "^",
1884        );
1885        if let Some(end) = end {
1886            if let Some(s) = src.contents.get(start..end) {
1887                for _ in s.chars().skip(1) {
1888                    msg.push('-');
1889                }
1890            }
1891        }
1892        return msg;
1893    }
1894
1895    pub(crate) fn render_location(&self, span: Span) -> String {
1896        let src = self.source_for_offset(span.start);
1897        let start = src.to_relative_offset(span.start);
1898        let (line, col) = src.linecol(start);
1899        format!(
1900            "{file}:{line}:{col}",
1901            file = src.path.display(),
1902            line = line + 1,
1903            col = col + 1,
1904        )
1905    }
1906
1907    fn source_for_offset(&self, start: u32) -> &Source {
1908        let i = match self.sources.binary_search_by_key(&start, |src| src.offset) {
1909            Ok(i) => i,
1910            Err(i) => i - 1,
1911        };
1912        &self.sources[i]
1913    }
1914
1915    /// Returns an iterator over all filenames added to this source map.
1916    pub fn source_files(&self) -> impl Iterator<Item = &Path> {
1917        self.sources.iter().map(|src| src.path.as_path())
1918    }
1919}
1920
1921impl Source {
1922    fn to_relative_offset(&self, offset: u32) -> usize {
1923        usize::try_from(offset - self.offset).unwrap()
1924    }
1925
1926    fn linecol(&self, relative_offset: usize) -> (usize, usize) {
1927        let mut cur = 0;
1928        // Use split_terminator instead of lines so that if there is a `\r`,
1929        // it is included in the offset calculation. The `+1` values below
1930        // account for the `\n`.
1931        for (i, line) in self.contents.split_terminator('\n').enumerate() {
1932            if cur + line.len() + 1 > relative_offset {
1933                return (i, relative_offset - cur);
1934            }
1935            cur += line.len() + 1;
1936        }
1937        (self.contents.lines().count(), 0)
1938    }
1939}
1940
1941pub enum ParsedUsePath {
1942    Name(String),
1943    Package(crate::PackageName, String),
1944}
1945
1946pub fn parse_use_path(s: &str) -> Result<ParsedUsePath> {
1947    let mut tokens = Tokenizer::new(s, 0, None)?;
1948    let path = UsePath::parse(&mut tokens)?;
1949    if tokens.next()?.is_some() {
1950        bail!("trailing tokens in path specifier");
1951    }
1952    Ok(match path {
1953        UsePath::Id(id) => ParsedUsePath::Name(id.name.to_string()),
1954        UsePath::Package { id, name } => {
1955            ParsedUsePath::Package(id.package_name(), name.name.to_string())
1956        }
1957    })
1958}