1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
use gramatika::{Parse, ParseStreamer, Span, Spanned, Substr, Token as _};

use crate::{
	token::{keyword, punct, Token, TokenKind},
	ParseStream,
};

#[derive(Clone, DebugLisp)]
pub struct ExtensionDecl {
	pub keyword: Token,
	pub name: Token,
}

#[derive(Clone, DebugLisp)]
pub struct ModuleDecl {
	pub import_kw: Token,
	pub name: Token,
	pub from_kw: Token,
	pub path: Token,
	pub semicolon: Token,
}

impl Parse for ExtensionDecl {
	type Stream = ParseStream;

	fn parse(input: &mut Self::Stream) -> gramatika::Result<Self> {
		let keyword = input.consume(keyword![enable])?;
		let name = input.consume_as(TokenKind::Ident, Token::Module)?;
		input.consume(punct![;])?;

		Ok(Self { keyword, name })
	}
}

impl Spanned for ExtensionDecl {
	fn span(&self) -> Span {
		self.keyword.span().through(self.name.span())
	}
}

impl ModuleDecl {
	pub fn path(&self) -> Substr {
		let path = self.path.lexeme();

		path.substr(1..path.len() - 1)
	}
}

impl Parse for ModuleDecl {
	type Stream = ParseStream;

	fn parse(input: &mut Self::Stream) -> gramatika::Result<Self> {
		let import_kw = input.consume(keyword![import])?;
		let name = input.consume_as(TokenKind::Ident, Token::Module)?;
		let from_kw = input.consume(keyword![from])?;
		let path = input.consume_kind(TokenKind::Path)?;
		let semicolon = input.consume(punct![;])?;

		Ok(Self {
			import_kw,
			name,
			from_kw,
			path,
			semicolon,
		})
	}
}

impl Spanned for ModuleDecl {
	fn span(&self) -> Span {
		self.import_kw.span().through(self.semicolon.span())
	}
}