Skip to main content

sage_parser/
token.rs

1//! Token definitions for the Sage lexer.
2
3use logos::Logos;
4
5/// All tokens in the Sage language.
6#[derive(Logos, Debug, Clone, PartialEq, Eq, Hash)]
7#[logos(skip r"[ \t\r\n]+")]
8#[logos(skip r"//[^\n]*")]
9pub enum Token {
10    // =========================================================================
11    // Keywords
12    // =========================================================================
13    #[token("agent")]
14    KwAgent,
15
16    #[token("belief")]
17    KwBelief,
18
19    #[token("on")]
20    KwOn,
21
22    #[token("start")]
23    KwStart,
24
25    #[token("stop")]
26    KwStop,
27
28    /// v2 lifecycle: runs before start, after persistent state loaded.
29    #[token("waking")]
30    KwWaking,
31
32    /// v2 lifecycle: runs when supervisor signals graceful pause.
33    #[token("pause")]
34    KwPause,
35
36    /// v2 lifecycle: runs when agent is unpaused.
37    #[token("resume")]
38    KwResume,
39
40    /// v2 lifecycle: alias for stop (v2 terminology).
41    #[token("resting")]
42    KwResting,
43
44    #[token("message")]
45    KwMessage,
46
47    #[token("divine")]
48    KwDivine,
49
50    #[token("infer")]
51    KwInfer,
52
53    #[token("summon")]
54    KwSummon,
55
56    #[token("await")]
57    KwAwait,
58
59    #[token("send")]
60    KwSend,
61
62    #[token("yield")]
63    KwYield,
64
65    #[token("run")]
66    KwRun,
67
68    #[token("fn")]
69    KwFn,
70
71    #[token("let")]
72    KwLet,
73
74    #[token("return")]
75    KwReturn,
76
77    #[token("if")]
78    KwIf,
79
80    #[token("else")]
81    KwElse,
82
83    #[token("for")]
84    KwFor,
85
86    #[token("while")]
87    KwWhile,
88
89    #[token("loop")]
90    KwLoop,
91
92    #[token("break")]
93    KwBreak,
94
95    #[token("in")]
96    KwIn,
97
98    #[token("self")]
99    KwSelf,
100
101    #[token("true")]
102    KwTrue,
103
104    #[token("false")]
105    KwFalse,
106
107    #[token("mod")]
108    KwMod,
109
110    #[token("use")]
111    KwUse,
112
113    #[token("pub")]
114    KwPub,
115
116    #[token("as")]
117    KwAs,
118
119    #[token("super")]
120    KwSuper,
121
122    #[token("record")]
123    KwRecord,
124
125    #[token("enum")]
126    KwEnum,
127
128    #[token("match")]
129    KwMatch,
130
131    #[token("const")]
132    KwConst,
133
134    #[token("receives")]
135    KwReceives,
136
137    #[token("receive")]
138    KwReceive,
139
140    #[token("fail")]
141    KwFail,
142
143    #[token("fails")]
144    KwFails,
145
146    #[token("timeout")]
147    KwTimeout,
148
149    #[token("retry")]
150    KwRetry,
151
152    #[token("delay")]
153    KwDelay,
154
155    #[token("try")]
156    KwTry,
157
158    #[token("catch")]
159    KwCatch,
160
161    #[token("error")]
162    KwError,
163
164    #[token("tool")]
165    KwTool,
166
167    /// RFC-0012: Test declaration keyword.
168    #[token("test")]
169    KwTest,
170
171    /// RFC-0012: Mock keyword for LLM mocking.
172    #[token("mock")]
173    KwMock,
174
175    /// Trace keyword for emitting trace events.
176    #[token("trace")]
177    KwTrace,
178
179    /// Span keyword for timed observability blocks.
180    #[token("span")]
181    KwSpan,
182
183    /// Checkpoint keyword for explicit persistence checkpoint.
184    #[token("checkpoint")]
185    KwCheckpoint,
186
187    // =========================================================================
188    // Supervision tree keywords (v2)
189    // =========================================================================
190    /// Supervisor declaration keyword.
191    #[token("supervisor")]
192    KwSupervisor,
193
194    /// Children block in supervisor.
195    #[token("children")]
196    KwChildren,
197
198    /// Supervision strategy keyword.
199    #[token("strategy")]
200    KwStrategy,
201
202    /// Restart policy keyword.
203    #[token("restart")]
204    KwRestart,
205
206    // =========================================================================
207    // Session types keywords (Phase 3)
208    // =========================================================================
209    /// Protocol declaration keyword.
210    #[token("protocol")]
211    KwProtocol,
212
213    /// Follows clause in agent declaration.
214    #[token("follows")]
215    KwFollows,
216
217    /// Reply expression in message handlers.
218    #[token("reply")]
219    KwReply,
220
221    // =========================================================================
222    // Algebraic effects keywords (Phase 3)
223    // =========================================================================
224    /// Effect handler declaration keyword.
225    #[token("handler")]
226    KwHandler,
227
228    /// Handles clause in effect handler declaration.
229    #[token("handles")]
230    KwHandles,
231
232    // =========================================================================
233    // Type keywords
234    // =========================================================================
235    #[token("Int")]
236    TyInt,
237
238    #[token("Float")]
239    TyFloat,
240
241    #[token("Bool")]
242    TyBool,
243
244    #[token("String")]
245    TyString,
246
247    #[token("Unit")]
248    TyUnit,
249
250    #[token("List")]
251    TyList,
252
253    #[token("Option")]
254    TyOption,
255
256    #[token("Oracle")]
257    TyOracle,
258
259    #[token("Agent")]
260    TyAgent,
261
262    #[token("Error")]
263    TyError,
264
265    #[token("ErrorKind")]
266    TyErrorKind,
267
268    /// Function type keyword: `Fn`
269    #[token("Fn")]
270    TyFn,
271
272    /// Map type keyword: `Map`
273    #[token("Map")]
274    TyMap,
275
276    /// Result type keyword: `Result`
277    #[token("Result")]
278    TyResult,
279
280    // =========================================================================
281    // Literals
282    // =========================================================================
283    /// Integer literal (e.g., `42`, `-7`).
284    #[regex(r"-?[0-9]+", priority = 2)]
285    IntLit,
286
287    /// Float literal (e.g., `3.14`, `-0.5`).
288    #[regex(r"-?[0-9]+\.[0-9]+")]
289    FloatLit,
290
291    /// String literal (e.g., `"hello"` or `'hello'`).
292    /// Supports escape sequences: \n, \t, \r, \\, \", \'
293    /// Both double and single quotes are allowed. Use single quotes inside
294    /// interpolations when needing string literals: `"Result: {len('hello')}"`
295    #[regex(r#""([^"\\]|\\.)*""#)]
296    #[regex(r#"'([^'\\]|\\.)*'"#)]
297    StringLit,
298
299    // =========================================================================
300    // Identifiers
301    // =========================================================================
302    /// Identifier (e.g., `foo`, `myAgent`, `_private`).
303    #[regex(r"[a-zA-Z_][a-zA-Z0-9_]*")]
304    Ident,
305
306    // =========================================================================
307    // Punctuation
308    // =========================================================================
309    #[token("{")]
310    LBrace,
311
312    #[token("}")]
313    RBrace,
314
315    #[token("(")]
316    LParen,
317
318    #[token(")")]
319    RParen,
320
321    #[token("[")]
322    LBracket,
323
324    #[token("]")]
325    RBracket,
326
327    #[token(",")]
328    Comma,
329
330    #[token("::")]
331    ColonColon,
332
333    #[token(":")]
334    Colon,
335
336    #[token(".")]
337    Dot,
338
339    #[token("->")]
340    Arrow,
341
342    #[token("=>")]
343    FatArrow,
344
345    /// Annotation marker for test attributes.
346    #[token("@")]
347    At,
348
349    // =========================================================================
350    // Operators
351    // =========================================================================
352    #[token("=")]
353    Eq,
354
355    #[token("==")]
356    EqEq,
357
358    #[token("!=")]
359    Ne,
360
361    #[token("<")]
362    Lt,
363
364    #[token(">")]
365    Gt,
366
367    #[token("<=")]
368    Le,
369
370    #[token(">=")]
371    Ge,
372
373    #[token("+")]
374    Plus,
375
376    #[token("-")]
377    Minus,
378
379    #[token("*")]
380    Star,
381
382    #[token("/")]
383    Slash,
384
385    #[token("!")]
386    Bang,
387
388    #[token("&&")]
389    And,
390
391    #[token("||")]
392    Or,
393
394    /// Single pipe for closure parameters: `|`
395    #[token("|")]
396    Pipe,
397
398    /// String concatenation operator.
399    #[token("++")]
400    PlusPlus,
401
402    /// Modulo/remainder operator.
403    #[token("%")]
404    Percent,
405
406    /// Statement terminator.
407    #[token(";")]
408    Semicolon,
409}
410
411impl Token {
412    /// Returns true if this token is a keyword.
413    #[must_use]
414    pub fn is_keyword(&self) -> bool {
415        matches!(
416            self,
417            Token::KwAgent
418                | Token::KwBelief
419                | Token::KwOn
420                | Token::KwStart
421                | Token::KwStop
422                | Token::KwWaking
423                | Token::KwPause
424                | Token::KwResume
425                | Token::KwResting
426                | Token::KwMessage
427                | Token::KwDivine
428                | Token::KwInfer
429                | Token::KwSummon
430                | Token::KwAwait
431                | Token::KwSend
432                | Token::KwYield
433                | Token::KwRun
434                | Token::KwFn
435                | Token::KwLet
436                | Token::KwReturn
437                | Token::KwIf
438                | Token::KwElse
439                | Token::KwFor
440                | Token::KwWhile
441                | Token::KwLoop
442                | Token::KwBreak
443                | Token::KwIn
444                | Token::KwSelf
445                | Token::KwTrue
446                | Token::KwFalse
447                | Token::KwMod
448                | Token::KwUse
449                | Token::KwPub
450                | Token::KwAs
451                | Token::KwSuper
452                | Token::KwRecord
453                | Token::KwEnum
454                | Token::KwMatch
455                | Token::KwConst
456                | Token::KwReceives
457                | Token::KwReceive
458                | Token::KwFail
459                | Token::KwFails
460                | Token::KwTimeout
461                | Token::KwRetry
462                | Token::KwDelay
463                | Token::KwTry
464                | Token::KwCatch
465                | Token::KwError
466                | Token::KwTool
467                | Token::KwTrace
468                | Token::KwSpan
469                | Token::KwCheckpoint
470                | Token::KwSupervisor
471                | Token::KwChildren
472                | Token::KwStrategy
473                | Token::KwRestart
474                | Token::KwProtocol
475                | Token::KwFollows
476                | Token::KwReply
477                | Token::KwHandler
478                | Token::KwHandles
479        )
480    }
481
482    /// Returns true if this token is a type keyword.
483    #[must_use]
484    pub fn is_type_keyword(&self) -> bool {
485        matches!(
486            self,
487            Token::TyInt
488                | Token::TyFloat
489                | Token::TyBool
490                | Token::TyString
491                | Token::TyUnit
492                | Token::TyList
493                | Token::TyOption
494                | Token::TyOracle
495                | Token::TyAgent
496                | Token::TyError
497                | Token::TyErrorKind
498                | Token::TyFn
499                | Token::TyMap
500                | Token::TyResult
501        )
502    }
503
504    /// Returns true if this token is a literal.
505    #[must_use]
506    pub fn is_literal(&self) -> bool {
507        matches!(
508            self,
509            Token::IntLit | Token::FloatLit | Token::StringLit | Token::KwTrue | Token::KwFalse
510        )
511    }
512
513    /// Returns true if this token is an operator.
514    #[must_use]
515    pub fn is_operator(&self) -> bool {
516        matches!(
517            self,
518            Token::Eq
519                | Token::EqEq
520                | Token::Ne
521                | Token::Lt
522                | Token::Gt
523                | Token::Le
524                | Token::Ge
525                | Token::Plus
526                | Token::Minus
527                | Token::Star
528                | Token::Slash
529                | Token::Percent
530                | Token::Bang
531                | Token::And
532                | Token::Or
533                | Token::PlusPlus
534        )
535    }
536}
537
538impl std::fmt::Display for Token {
539    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
540        match self {
541            // Keywords
542            Token::KwAgent => write!(f, "agent"),
543            Token::KwBelief => write!(f, "belief"),
544            Token::KwOn => write!(f, "on"),
545            Token::KwStart => write!(f, "start"),
546            Token::KwStop => write!(f, "stop"),
547            Token::KwWaking => write!(f, "waking"),
548            Token::KwPause => write!(f, "pause"),
549            Token::KwResume => write!(f, "resume"),
550            Token::KwResting => write!(f, "resting"),
551            Token::KwMessage => write!(f, "message"),
552            Token::KwDivine => write!(f, "divine"),
553            Token::KwInfer => write!(f, "infer"),
554            Token::KwSummon => write!(f, "summon"),
555            Token::KwAwait => write!(f, "await"),
556            Token::KwSend => write!(f, "send"),
557            Token::KwYield => write!(f, "yield"),
558            Token::KwRun => write!(f, "run"),
559            Token::KwFn => write!(f, "fn"),
560            Token::KwLet => write!(f, "let"),
561            Token::KwReturn => write!(f, "return"),
562            Token::KwIf => write!(f, "if"),
563            Token::KwElse => write!(f, "else"),
564            Token::KwFor => write!(f, "for"),
565            Token::KwWhile => write!(f, "while"),
566            Token::KwLoop => write!(f, "loop"),
567            Token::KwBreak => write!(f, "break"),
568            Token::KwIn => write!(f, "in"),
569            Token::KwSelf => write!(f, "self"),
570            Token::KwTrue => write!(f, "true"),
571            Token::KwFalse => write!(f, "false"),
572            Token::KwMod => write!(f, "mod"),
573            Token::KwUse => write!(f, "use"),
574            Token::KwPub => write!(f, "pub"),
575            Token::KwAs => write!(f, "as"),
576            Token::KwSuper => write!(f, "super"),
577            Token::KwRecord => write!(f, "record"),
578            Token::KwEnum => write!(f, "enum"),
579            Token::KwMatch => write!(f, "match"),
580            Token::KwConst => write!(f, "const"),
581            Token::KwReceives => write!(f, "receives"),
582            Token::KwReceive => write!(f, "receive"),
583            Token::KwFail => write!(f, "fail"),
584            Token::KwFails => write!(f, "fails"),
585            Token::KwTimeout => write!(f, "timeout"),
586            Token::KwRetry => write!(f, "retry"),
587            Token::KwDelay => write!(f, "delay"),
588            Token::KwTry => write!(f, "try"),
589            Token::KwCatch => write!(f, "catch"),
590            Token::KwError => write!(f, "error"),
591            Token::KwTool => write!(f, "tool"),
592            Token::KwTest => write!(f, "test"),
593            Token::KwMock => write!(f, "mock"),
594            Token::KwTrace => write!(f, "trace"),
595            Token::KwSpan => write!(f, "span"),
596            Token::KwCheckpoint => write!(f, "checkpoint"),
597            Token::KwSupervisor => write!(f, "supervisor"),
598            Token::KwChildren => write!(f, "children"),
599            Token::KwStrategy => write!(f, "strategy"),
600            Token::KwRestart => write!(f, "restart"),
601            Token::KwProtocol => write!(f, "protocol"),
602            Token::KwFollows => write!(f, "follows"),
603            Token::KwReply => write!(f, "reply"),
604            Token::KwHandler => write!(f, "handler"),
605            Token::KwHandles => write!(f, "handles"),
606
607            // Type keywords
608            Token::TyInt => write!(f, "Int"),
609            Token::TyFloat => write!(f, "Float"),
610            Token::TyBool => write!(f, "Bool"),
611            Token::TyString => write!(f, "String"),
612            Token::TyUnit => write!(f, "Unit"),
613            Token::TyList => write!(f, "List"),
614            Token::TyOption => write!(f, "Option"),
615            Token::TyOracle => write!(f, "Oracle"),
616            Token::TyAgent => write!(f, "Agent"),
617            Token::TyError => write!(f, "Error"),
618            Token::TyErrorKind => write!(f, "ErrorKind"),
619            Token::TyFn => write!(f, "Fn"),
620            Token::TyMap => write!(f, "Map"),
621            Token::TyResult => write!(f, "Result"),
622
623            // Literals
624            Token::IntLit => write!(f, "<int>"),
625            Token::FloatLit => write!(f, "<float>"),
626            Token::StringLit => write!(f, "<string>"),
627
628            // Identifier
629            Token::Ident => write!(f, "<ident>"),
630
631            // Punctuation
632            Token::LBrace => write!(f, "{{"),
633            Token::RBrace => write!(f, "}}"),
634            Token::LParen => write!(f, "("),
635            Token::RParen => write!(f, ")"),
636            Token::LBracket => write!(f, "["),
637            Token::RBracket => write!(f, "]"),
638            Token::Comma => write!(f, ","),
639            Token::ColonColon => write!(f, "::"),
640            Token::Colon => write!(f, ":"),
641            Token::Dot => write!(f, "."),
642            Token::Arrow => write!(f, "->"),
643            Token::FatArrow => write!(f, "=>"),
644            Token::At => write!(f, "@"),
645
646            // Operators
647            Token::Eq => write!(f, "="),
648            Token::EqEq => write!(f, "=="),
649            Token::Ne => write!(f, "!="),
650            Token::Lt => write!(f, "<"),
651            Token::Gt => write!(f, ">"),
652            Token::Le => write!(f, "<="),
653            Token::Ge => write!(f, ">="),
654            Token::Plus => write!(f, "+"),
655            Token::Minus => write!(f, "-"),
656            Token::Star => write!(f, "*"),
657            Token::Slash => write!(f, "/"),
658            Token::Bang => write!(f, "!"),
659            Token::And => write!(f, "&&"),
660            Token::Or => write!(f, "||"),
661            Token::Pipe => write!(f, "|"),
662            Token::PlusPlus => write!(f, "++"),
663            Token::Percent => write!(f, "%"),
664            Token::Semicolon => write!(f, ";"),
665        }
666    }
667}
668
669#[cfg(test)]
670mod tests {
671    use super::*;
672
673    #[test]
674    fn lex_keywords() {
675        let mut lexer = Token::lexer("agent belief on start stop message");
676        assert_eq!(lexer.next(), Some(Ok(Token::KwAgent)));
677        assert_eq!(lexer.next(), Some(Ok(Token::KwBelief)));
678        assert_eq!(lexer.next(), Some(Ok(Token::KwOn)));
679        assert_eq!(lexer.next(), Some(Ok(Token::KwStart)));
680        assert_eq!(lexer.next(), Some(Ok(Token::KwStop)));
681        assert_eq!(lexer.next(), Some(Ok(Token::KwMessage)));
682        assert_eq!(lexer.next(), None);
683    }
684
685    #[test]
686    fn lex_more_keywords() {
687        let mut lexer = Token::lexer(
688            "divine summon await send yield run fn let return if else for in self true false",
689        );
690        assert_eq!(lexer.next(), Some(Ok(Token::KwDivine)));
691        assert_eq!(lexer.next(), Some(Ok(Token::KwSummon)));
692        assert_eq!(lexer.next(), Some(Ok(Token::KwAwait)));
693        assert_eq!(lexer.next(), Some(Ok(Token::KwSend)));
694        assert_eq!(lexer.next(), Some(Ok(Token::KwYield)));
695        assert_eq!(lexer.next(), Some(Ok(Token::KwRun)));
696        assert_eq!(lexer.next(), Some(Ok(Token::KwFn)));
697        assert_eq!(lexer.next(), Some(Ok(Token::KwLet)));
698        assert_eq!(lexer.next(), Some(Ok(Token::KwReturn)));
699        assert_eq!(lexer.next(), Some(Ok(Token::KwIf)));
700        assert_eq!(lexer.next(), Some(Ok(Token::KwElse)));
701        assert_eq!(lexer.next(), Some(Ok(Token::KwFor)));
702        assert_eq!(lexer.next(), Some(Ok(Token::KwIn)));
703        assert_eq!(lexer.next(), Some(Ok(Token::KwSelf)));
704        assert_eq!(lexer.next(), Some(Ok(Token::KwTrue)));
705        assert_eq!(lexer.next(), Some(Ok(Token::KwFalse)));
706        assert_eq!(lexer.next(), None);
707    }
708
709    #[test]
710    fn lex_type_keywords() {
711        let mut lexer = Token::lexer("Int Float Bool String Unit List Option Oracle Agent");
712        assert_eq!(lexer.next(), Some(Ok(Token::TyInt)));
713        assert_eq!(lexer.next(), Some(Ok(Token::TyFloat)));
714        assert_eq!(lexer.next(), Some(Ok(Token::TyBool)));
715        assert_eq!(lexer.next(), Some(Ok(Token::TyString)));
716        assert_eq!(lexer.next(), Some(Ok(Token::TyUnit)));
717        assert_eq!(lexer.next(), Some(Ok(Token::TyList)));
718        assert_eq!(lexer.next(), Some(Ok(Token::TyOption)));
719        assert_eq!(lexer.next(), Some(Ok(Token::TyOracle)));
720        assert_eq!(lexer.next(), Some(Ok(Token::TyAgent)));
721        assert_eq!(lexer.next(), None);
722    }
723
724    #[test]
725    fn lex_integer_literals() {
726        let mut lexer = Token::lexer("42 -7 0 123456");
727        assert_eq!(lexer.next(), Some(Ok(Token::IntLit)));
728        assert_eq!(lexer.slice(), "42");
729        assert_eq!(lexer.next(), Some(Ok(Token::IntLit)));
730        assert_eq!(lexer.slice(), "-7");
731        assert_eq!(lexer.next(), Some(Ok(Token::IntLit)));
732        assert_eq!(lexer.slice(), "0");
733        assert_eq!(lexer.next(), Some(Ok(Token::IntLit)));
734        assert_eq!(lexer.slice(), "123456");
735        assert_eq!(lexer.next(), None);
736    }
737
738    #[test]
739    fn lex_float_literals() {
740        let mut lexer = Token::lexer("3.14 -0.5 0.0 123.456");
741        assert_eq!(lexer.next(), Some(Ok(Token::FloatLit)));
742        assert_eq!(lexer.slice(), "3.14");
743        assert_eq!(lexer.next(), Some(Ok(Token::FloatLit)));
744        assert_eq!(lexer.slice(), "-0.5");
745        assert_eq!(lexer.next(), Some(Ok(Token::FloatLit)));
746        assert_eq!(lexer.slice(), "0.0");
747        assert_eq!(lexer.next(), Some(Ok(Token::FloatLit)));
748        assert_eq!(lexer.slice(), "123.456");
749        assert_eq!(lexer.next(), None);
750    }
751
752    #[test]
753    fn lex_string_literals() {
754        let mut lexer = Token::lexer(r#""hello" "world" "with spaces""#);
755        assert_eq!(lexer.next(), Some(Ok(Token::StringLit)));
756        assert_eq!(lexer.slice(), r#""hello""#);
757        assert_eq!(lexer.next(), Some(Ok(Token::StringLit)));
758        assert_eq!(lexer.slice(), r#""world""#);
759        assert_eq!(lexer.next(), Some(Ok(Token::StringLit)));
760        assert_eq!(lexer.slice(), r#""with spaces""#);
761        assert_eq!(lexer.next(), None);
762    }
763
764    #[test]
765    fn lex_string_with_escapes() {
766        let mut lexer = Token::lexer(r#""hello\nworld" "tab\there" "quote\"here""#);
767        assert_eq!(lexer.next(), Some(Ok(Token::StringLit)));
768        assert_eq!(lexer.slice(), r#""hello\nworld""#);
769        assert_eq!(lexer.next(), Some(Ok(Token::StringLit)));
770        assert_eq!(lexer.slice(), r#""tab\there""#);
771        assert_eq!(lexer.next(), Some(Ok(Token::StringLit)));
772        assert_eq!(lexer.slice(), r#""quote\"here""#);
773        assert_eq!(lexer.next(), None);
774    }
775
776    #[test]
777    fn lex_identifiers() {
778        let mut lexer = Token::lexer("foo bar _private myAgent agent2");
779        assert_eq!(lexer.next(), Some(Ok(Token::Ident)));
780        assert_eq!(lexer.slice(), "foo");
781        assert_eq!(lexer.next(), Some(Ok(Token::Ident)));
782        assert_eq!(lexer.slice(), "bar");
783        assert_eq!(lexer.next(), Some(Ok(Token::Ident)));
784        assert_eq!(lexer.slice(), "_private");
785        assert_eq!(lexer.next(), Some(Ok(Token::Ident)));
786        assert_eq!(lexer.slice(), "myAgent");
787        assert_eq!(lexer.next(), Some(Ok(Token::Ident)));
788        assert_eq!(lexer.slice(), "agent2");
789        assert_eq!(lexer.next(), None);
790    }
791
792    #[test]
793    fn keyword_vs_identifier() {
794        // "agent" is a keyword, "agent_name" is an identifier
795        let mut lexer = Token::lexer("agent agent_name agents");
796        assert_eq!(lexer.next(), Some(Ok(Token::KwAgent)));
797        assert_eq!(lexer.next(), Some(Ok(Token::Ident)));
798        assert_eq!(lexer.slice(), "agent_name");
799        assert_eq!(lexer.next(), Some(Ok(Token::Ident)));
800        assert_eq!(lexer.slice(), "agents");
801        assert_eq!(lexer.next(), None);
802    }
803
804    #[test]
805    fn lex_punctuation() {
806        let mut lexer = Token::lexer("{ } ( ) [ ] , : . ->");
807        assert_eq!(lexer.next(), Some(Ok(Token::LBrace)));
808        assert_eq!(lexer.next(), Some(Ok(Token::RBrace)));
809        assert_eq!(lexer.next(), Some(Ok(Token::LParen)));
810        assert_eq!(lexer.next(), Some(Ok(Token::RParen)));
811        assert_eq!(lexer.next(), Some(Ok(Token::LBracket)));
812        assert_eq!(lexer.next(), Some(Ok(Token::RBracket)));
813        assert_eq!(lexer.next(), Some(Ok(Token::Comma)));
814        assert_eq!(lexer.next(), Some(Ok(Token::Colon)));
815        assert_eq!(lexer.next(), Some(Ok(Token::Dot)));
816        assert_eq!(lexer.next(), Some(Ok(Token::Arrow)));
817        assert_eq!(lexer.next(), None);
818    }
819
820    #[test]
821    fn lex_operators() {
822        let mut lexer = Token::lexer("= == != < > <= >= + - * / % ! && || ++");
823        assert_eq!(lexer.next(), Some(Ok(Token::Eq)));
824        assert_eq!(lexer.next(), Some(Ok(Token::EqEq)));
825        assert_eq!(lexer.next(), Some(Ok(Token::Ne)));
826        assert_eq!(lexer.next(), Some(Ok(Token::Lt)));
827        assert_eq!(lexer.next(), Some(Ok(Token::Gt)));
828        assert_eq!(lexer.next(), Some(Ok(Token::Le)));
829        assert_eq!(lexer.next(), Some(Ok(Token::Ge)));
830        assert_eq!(lexer.next(), Some(Ok(Token::Plus)));
831        assert_eq!(lexer.next(), Some(Ok(Token::Minus)));
832        assert_eq!(lexer.next(), Some(Ok(Token::Star)));
833        assert_eq!(lexer.next(), Some(Ok(Token::Slash)));
834        assert_eq!(lexer.next(), Some(Ok(Token::Percent)));
835        assert_eq!(lexer.next(), Some(Ok(Token::Bang)));
836        assert_eq!(lexer.next(), Some(Ok(Token::And)));
837        assert_eq!(lexer.next(), Some(Ok(Token::Or)));
838        assert_eq!(lexer.next(), Some(Ok(Token::PlusPlus)));
839        assert_eq!(lexer.next(), None);
840    }
841
842    #[test]
843    fn skip_whitespace() {
844        let mut lexer = Token::lexer("  agent   belief\n\ttrue  ");
845        assert_eq!(lexer.next(), Some(Ok(Token::KwAgent)));
846        assert_eq!(lexer.next(), Some(Ok(Token::KwBelief)));
847        assert_eq!(lexer.next(), Some(Ok(Token::KwTrue)));
848        assert_eq!(lexer.next(), None);
849    }
850
851    #[test]
852    fn skip_comments() {
853        let mut lexer = Token::lexer("agent // this is a comment\nbelief");
854        assert_eq!(lexer.next(), Some(Ok(Token::KwAgent)));
855        assert_eq!(lexer.next(), Some(Ok(Token::KwBelief)));
856        assert_eq!(lexer.next(), None);
857    }
858
859    #[test]
860    fn comment_at_end() {
861        let mut lexer = Token::lexer("agent // comment at end");
862        assert_eq!(lexer.next(), Some(Ok(Token::KwAgent)));
863        assert_eq!(lexer.next(), None);
864    }
865
866    #[test]
867    fn lex_agent_declaration() {
868        let source = r#"
869            agent Researcher {
870                belief topic: String
871
872                on start {
873                    let result: Oracle<String> = divine("test")
874                    yield(result)
875                }
876            }
877        "#;
878        let tokens: Vec<_> = Token::lexer(source)
879            .map(|r| r.expect("valid token"))
880            .collect();
881
882        assert_eq!(tokens[0], Token::KwAgent);
883        assert_eq!(tokens[1], Token::Ident); // Researcher
884        assert_eq!(tokens[2], Token::LBrace);
885        assert_eq!(tokens[3], Token::KwBelief);
886        assert_eq!(tokens[4], Token::Ident); // topic
887        assert_eq!(tokens[5], Token::Colon);
888        assert_eq!(tokens[6], Token::TyString);
889        assert_eq!(tokens[7], Token::KwOn);
890        assert_eq!(tokens[8], Token::KwStart);
891        assert_eq!(tokens[9], Token::LBrace);
892        assert_eq!(tokens[10], Token::KwLet);
893    }
894
895    #[test]
896    fn is_keyword_helper() {
897        assert!(Token::KwAgent.is_keyword());
898        assert!(Token::KwLet.is_keyword());
899        assert!(!Token::TyInt.is_keyword());
900        assert!(!Token::Ident.is_keyword());
901    }
902
903    #[test]
904    fn is_type_keyword_helper() {
905        assert!(Token::TyInt.is_type_keyword());
906        assert!(Token::TyAgent.is_type_keyword());
907        assert!(!Token::KwAgent.is_type_keyword());
908        assert!(!Token::Ident.is_type_keyword());
909    }
910
911    #[test]
912    fn is_literal_helper() {
913        assert!(Token::IntLit.is_literal());
914        assert!(Token::FloatLit.is_literal());
915        assert!(Token::StringLit.is_literal());
916        assert!(Token::KwTrue.is_literal());
917        assert!(Token::KwFalse.is_literal());
918        assert!(!Token::Ident.is_literal());
919    }
920
921    #[test]
922    fn is_operator_helper() {
923        assert!(Token::Plus.is_operator());
924        assert!(Token::EqEq.is_operator());
925        assert!(Token::PlusPlus.is_operator());
926        assert!(!Token::LBrace.is_operator());
927        assert!(!Token::Ident.is_operator());
928    }
929
930    #[test]
931    fn lex_module_keywords() {
932        let mut lexer = Token::lexer("mod use pub as super");
933        assert_eq!(lexer.next(), Some(Ok(Token::KwMod)));
934        assert_eq!(lexer.next(), Some(Ok(Token::KwUse)));
935        assert_eq!(lexer.next(), Some(Ok(Token::KwPub)));
936        assert_eq!(lexer.next(), Some(Ok(Token::KwAs)));
937        assert_eq!(lexer.next(), Some(Ok(Token::KwSuper)));
938        assert_eq!(lexer.next(), None);
939    }
940
941    #[test]
942    fn lex_path_separator() {
943        let mut lexer = Token::lexer("agents::Researcher");
944        assert_eq!(lexer.next(), Some(Ok(Token::Ident)));
945        assert_eq!(lexer.slice(), "agents");
946        assert_eq!(lexer.next(), Some(Ok(Token::ColonColon)));
947        assert_eq!(lexer.next(), Some(Ok(Token::Ident)));
948        assert_eq!(lexer.slice(), "Researcher");
949        assert_eq!(lexer.next(), None);
950    }
951
952    #[test]
953    fn lex_use_statement() {
954        let mut lexer = Token::lexer("use agents::{Researcher, Coordinator as Coord}");
955        assert_eq!(lexer.next(), Some(Ok(Token::KwUse)));
956        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // agents
957        assert_eq!(lexer.next(), Some(Ok(Token::ColonColon)));
958        assert_eq!(lexer.next(), Some(Ok(Token::LBrace)));
959        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // Researcher
960        assert_eq!(lexer.next(), Some(Ok(Token::Comma)));
961        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // Coordinator
962        assert_eq!(lexer.next(), Some(Ok(Token::KwAs)));
963        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // Coord
964        assert_eq!(lexer.next(), Some(Ok(Token::RBrace)));
965        assert_eq!(lexer.next(), None);
966    }
967
968    #[test]
969    fn lex_pub_agent() {
970        let mut lexer = Token::lexer("pub agent Researcher");
971        assert_eq!(lexer.next(), Some(Ok(Token::KwPub)));
972        assert_eq!(lexer.next(), Some(Ok(Token::KwAgent)));
973        assert_eq!(lexer.next(), Some(Ok(Token::Ident)));
974        assert_eq!(lexer.next(), None);
975    }
976
977    #[test]
978    fn token_display() {
979        assert_eq!(format!("{}", Token::KwAgent), "agent");
980        assert_eq!(format!("{}", Token::TyInt), "Int");
981        assert_eq!(format!("{}", Token::IntLit), "<int>");
982        assert_eq!(format!("{}", Token::Ident), "<ident>");
983        assert_eq!(format!("{}", Token::LBrace), "{");
984        assert_eq!(format!("{}", Token::PlusPlus), "++");
985    }
986
987    #[test]
988    fn lex_type_keywords_record_enum_match_const() {
989        let mut lexer = Token::lexer("record enum match const");
990        assert_eq!(lexer.next(), Some(Ok(Token::KwRecord)));
991        assert_eq!(lexer.next(), Some(Ok(Token::KwEnum)));
992        assert_eq!(lexer.next(), Some(Ok(Token::KwMatch)));
993        assert_eq!(lexer.next(), Some(Ok(Token::KwConst)));
994        assert_eq!(lexer.next(), None);
995    }
996
997    #[test]
998    fn lex_fat_arrow() {
999        let mut lexer = Token::lexer("=> -> =");
1000        assert_eq!(lexer.next(), Some(Ok(Token::FatArrow)));
1001        assert_eq!(lexer.next(), Some(Ok(Token::Arrow)));
1002        assert_eq!(lexer.next(), Some(Ok(Token::Eq)));
1003        assert_eq!(lexer.next(), None);
1004    }
1005
1006    #[test]
1007    fn lex_match_expression() {
1008        let mut lexer = Token::lexer("match status { Active => 1, Inactive => 0 }");
1009        assert_eq!(lexer.next(), Some(Ok(Token::KwMatch)));
1010        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // status
1011        assert_eq!(lexer.next(), Some(Ok(Token::LBrace)));
1012        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // Active
1013        assert_eq!(lexer.next(), Some(Ok(Token::FatArrow)));
1014        assert_eq!(lexer.next(), Some(Ok(Token::IntLit))); // 1
1015        assert_eq!(lexer.next(), Some(Ok(Token::Comma)));
1016        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // Inactive
1017        assert_eq!(lexer.next(), Some(Ok(Token::FatArrow)));
1018        assert_eq!(lexer.next(), Some(Ok(Token::IntLit))); // 0
1019        assert_eq!(lexer.next(), Some(Ok(Token::RBrace)));
1020        assert_eq!(lexer.next(), None);
1021    }
1022
1023    #[test]
1024    fn lex_record_declaration() {
1025        let mut lexer = Token::lexer("record Point { x: Int, y: Int }");
1026        assert_eq!(lexer.next(), Some(Ok(Token::KwRecord)));
1027        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // Point
1028        assert_eq!(lexer.next(), Some(Ok(Token::LBrace)));
1029        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // x
1030        assert_eq!(lexer.next(), Some(Ok(Token::Colon)));
1031        assert_eq!(lexer.next(), Some(Ok(Token::TyInt)));
1032        assert_eq!(lexer.next(), Some(Ok(Token::Comma)));
1033        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // y
1034        assert_eq!(lexer.next(), Some(Ok(Token::Colon)));
1035        assert_eq!(lexer.next(), Some(Ok(Token::TyInt)));
1036        assert_eq!(lexer.next(), Some(Ok(Token::RBrace)));
1037        assert_eq!(lexer.next(), None);
1038    }
1039
1040    #[test]
1041    fn lex_enum_declaration() {
1042        let mut lexer = Token::lexer("enum Status { Active, Pending, Done }");
1043        assert_eq!(lexer.next(), Some(Ok(Token::KwEnum)));
1044        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // Status
1045        assert_eq!(lexer.next(), Some(Ok(Token::LBrace)));
1046        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // Active
1047        assert_eq!(lexer.next(), Some(Ok(Token::Comma)));
1048        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // Pending
1049        assert_eq!(lexer.next(), Some(Ok(Token::Comma)));
1050        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // Done
1051        assert_eq!(lexer.next(), Some(Ok(Token::RBrace)));
1052        assert_eq!(lexer.next(), None);
1053    }
1054
1055    #[test]
1056    fn lex_const_declaration() {
1057        let mut lexer = Token::lexer("const MAX_RETRIES: Int = 3");
1058        assert_eq!(lexer.next(), Some(Ok(Token::KwConst)));
1059        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // MAX_RETRIES
1060        assert_eq!(lexer.next(), Some(Ok(Token::Colon)));
1061        assert_eq!(lexer.next(), Some(Ok(Token::TyInt)));
1062        assert_eq!(lexer.next(), Some(Ok(Token::Eq)));
1063        assert_eq!(lexer.next(), Some(Ok(Token::IntLit))); // 3
1064        assert_eq!(lexer.next(), None);
1065    }
1066
1067    #[test]
1068    fn new_keywords_are_keywords() {
1069        assert!(Token::KwRecord.is_keyword());
1070        assert!(Token::KwEnum.is_keyword());
1071        assert!(Token::KwMatch.is_keyword());
1072        assert!(Token::KwConst.is_keyword());
1073    }
1074
1075    #[test]
1076    fn lex_loop_break() {
1077        let mut lexer = Token::lexer("loop { break }");
1078        assert_eq!(lexer.next(), Some(Ok(Token::KwLoop)));
1079        assert_eq!(lexer.next(), Some(Ok(Token::LBrace)));
1080        assert_eq!(lexer.next(), Some(Ok(Token::KwBreak)));
1081        assert_eq!(lexer.next(), Some(Ok(Token::RBrace)));
1082        assert_eq!(lexer.next(), None);
1083    }
1084
1085    #[test]
1086    fn lex_receives_receive() {
1087        let mut lexer = Token::lexer("agent Worker receives WorkerMsg { receive }");
1088        assert_eq!(lexer.next(), Some(Ok(Token::KwAgent)));
1089        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // Worker
1090        assert_eq!(lexer.next(), Some(Ok(Token::KwReceives)));
1091        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // WorkerMsg
1092        assert_eq!(lexer.next(), Some(Ok(Token::LBrace)));
1093        assert_eq!(lexer.next(), Some(Ok(Token::KwReceive)));
1094        assert_eq!(lexer.next(), Some(Ok(Token::RBrace)));
1095        assert_eq!(lexer.next(), None);
1096    }
1097
1098    #[test]
1099    fn rfc6_keywords_are_keywords() {
1100        assert!(Token::KwLoop.is_keyword());
1101        assert!(Token::KwBreak.is_keyword());
1102        assert!(Token::KwReceives.is_keyword());
1103        assert!(Token::KwReceive.is_keyword());
1104    }
1105
1106    #[test]
1107    fn lex_error_handling_keywords() {
1108        let mut lexer = Token::lexer("fails try catch error");
1109        assert_eq!(lexer.next(), Some(Ok(Token::KwFails)));
1110        assert_eq!(lexer.next(), Some(Ok(Token::KwTry)));
1111        assert_eq!(lexer.next(), Some(Ok(Token::KwCatch)));
1112        assert_eq!(lexer.next(), Some(Ok(Token::KwError)));
1113        assert_eq!(lexer.next(), None);
1114    }
1115
1116    #[test]
1117    fn lex_try_catch_expression() {
1118        let mut lexer = Token::lexer("let x = try divine(prompt) catch { fallback }");
1119        assert_eq!(lexer.next(), Some(Ok(Token::KwLet)));
1120        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // x
1121        assert_eq!(lexer.next(), Some(Ok(Token::Eq)));
1122        assert_eq!(lexer.next(), Some(Ok(Token::KwTry)));
1123        assert_eq!(lexer.next(), Some(Ok(Token::KwDivine)));
1124        assert_eq!(lexer.next(), Some(Ok(Token::LParen)));
1125        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // prompt
1126        assert_eq!(lexer.next(), Some(Ok(Token::RParen)));
1127        assert_eq!(lexer.next(), Some(Ok(Token::KwCatch)));
1128        assert_eq!(lexer.next(), Some(Ok(Token::LBrace)));
1129        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // fallback
1130        assert_eq!(lexer.next(), Some(Ok(Token::RBrace)));
1131        assert_eq!(lexer.next(), None);
1132    }
1133
1134    #[test]
1135    fn lex_fails_function() {
1136        let mut lexer = Token::lexer("fn fetch(url: String) -> String fails { }");
1137        assert_eq!(lexer.next(), Some(Ok(Token::KwFn)));
1138        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // fetch
1139        assert_eq!(lexer.next(), Some(Ok(Token::LParen)));
1140        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // url
1141        assert_eq!(lexer.next(), Some(Ok(Token::Colon)));
1142        assert_eq!(lexer.next(), Some(Ok(Token::TyString)));
1143        assert_eq!(lexer.next(), Some(Ok(Token::RParen)));
1144        assert_eq!(lexer.next(), Some(Ok(Token::Arrow)));
1145        assert_eq!(lexer.next(), Some(Ok(Token::TyString)));
1146        assert_eq!(lexer.next(), Some(Ok(Token::KwFails)));
1147        assert_eq!(lexer.next(), Some(Ok(Token::LBrace)));
1148        assert_eq!(lexer.next(), Some(Ok(Token::RBrace)));
1149        assert_eq!(lexer.next(), None);
1150    }
1151
1152    #[test]
1153    fn lex_on_error_handler() {
1154        let mut lexer = Token::lexer("on error(e) { yield(fallback) }");
1155        assert_eq!(lexer.next(), Some(Ok(Token::KwOn)));
1156        assert_eq!(lexer.next(), Some(Ok(Token::KwError)));
1157        assert_eq!(lexer.next(), Some(Ok(Token::LParen)));
1158        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // e
1159        assert_eq!(lexer.next(), Some(Ok(Token::RParen)));
1160        assert_eq!(lexer.next(), Some(Ok(Token::LBrace)));
1161        assert_eq!(lexer.next(), Some(Ok(Token::KwYield)));
1162        assert_eq!(lexer.next(), Some(Ok(Token::LParen)));
1163        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // fallback
1164        assert_eq!(lexer.next(), Some(Ok(Token::RParen)));
1165        assert_eq!(lexer.next(), Some(Ok(Token::RBrace)));
1166        assert_eq!(lexer.next(), None);
1167    }
1168
1169    #[test]
1170    fn rfc7_keywords_are_keywords() {
1171        assert!(Token::KwFail.is_keyword());
1172        assert!(Token::KwFails.is_keyword());
1173        assert!(Token::KwTry.is_keyword());
1174        assert!(Token::KwCatch.is_keyword());
1175        assert!(Token::KwError.is_keyword());
1176    }
1177
1178    #[test]
1179    fn lex_fail_expression() {
1180        let mut lexer = Token::lexer("fail \"error message\"");
1181        assert_eq!(lexer.next(), Some(Ok(Token::KwFail)));
1182        assert_eq!(lexer.next(), Some(Ok(Token::StringLit)));
1183        assert_eq!(lexer.next(), None);
1184    }
1185
1186    // =========================================================================
1187    // RFC-0009: Closures
1188    // =========================================================================
1189
1190    #[test]
1191    fn lex_closure_syntax() {
1192        // |x: Int| x + 1
1193        let mut lexer = Token::lexer("|x: Int| x + 1");
1194        assert_eq!(lexer.next(), Some(Ok(Token::Pipe)));
1195        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // x
1196        assert_eq!(lexer.next(), Some(Ok(Token::Colon)));
1197        assert_eq!(lexer.next(), Some(Ok(Token::TyInt)));
1198        assert_eq!(lexer.next(), Some(Ok(Token::Pipe)));
1199        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // x
1200        assert_eq!(lexer.next(), Some(Ok(Token::Plus)));
1201        assert_eq!(lexer.next(), Some(Ok(Token::IntLit)));
1202        assert_eq!(lexer.next(), None);
1203    }
1204
1205    #[test]
1206    fn lex_empty_closure() {
1207        // || 42
1208        let mut lexer = Token::lexer("|| 42");
1209        assert_eq!(lexer.next(), Some(Ok(Token::Or))); // || lexes as Or
1210        assert_eq!(lexer.next(), Some(Ok(Token::IntLit)));
1211        assert_eq!(lexer.next(), None);
1212    }
1213
1214    #[test]
1215    fn lex_fn_type() {
1216        // Fn(Int, String) -> Bool
1217        let mut lexer = Token::lexer("Fn(Int, String) -> Bool");
1218        assert_eq!(lexer.next(), Some(Ok(Token::TyFn)));
1219        assert_eq!(lexer.next(), Some(Ok(Token::LParen)));
1220        assert_eq!(lexer.next(), Some(Ok(Token::TyInt)));
1221        assert_eq!(lexer.next(), Some(Ok(Token::Comma)));
1222        assert_eq!(lexer.next(), Some(Ok(Token::TyString)));
1223        assert_eq!(lexer.next(), Some(Ok(Token::RParen)));
1224        assert_eq!(lexer.next(), Some(Ok(Token::Arrow)));
1225        assert_eq!(lexer.next(), Some(Ok(Token::TyBool)));
1226        assert_eq!(lexer.next(), None);
1227    }
1228
1229    #[test]
1230    fn fn_is_type_keyword() {
1231        assert!(Token::TyFn.is_type_keyword());
1232    }
1233
1234    #[test]
1235    fn pipe_display() {
1236        assert_eq!(format!("{}", Token::Pipe), "|");
1237        assert_eq!(format!("{}", Token::TyFn), "Fn");
1238    }
1239
1240    // =========================================================================
1241    // RFC-0011: Tool Support
1242    // =========================================================================
1243
1244    #[test]
1245    fn lex_tool_keyword() {
1246        let mut lexer = Token::lexer("tool Http { fn get(url: String) -> String }");
1247        assert_eq!(lexer.next(), Some(Ok(Token::KwTool)));
1248        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // Http
1249        assert_eq!(lexer.next(), Some(Ok(Token::LBrace)));
1250        assert_eq!(lexer.next(), Some(Ok(Token::KwFn)));
1251        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // get
1252        assert_eq!(lexer.next(), Some(Ok(Token::LParen)));
1253        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // url
1254        assert_eq!(lexer.next(), Some(Ok(Token::Colon)));
1255        assert_eq!(lexer.next(), Some(Ok(Token::TyString)));
1256        assert_eq!(lexer.next(), Some(Ok(Token::RParen)));
1257        assert_eq!(lexer.next(), Some(Ok(Token::Arrow)));
1258        assert_eq!(lexer.next(), Some(Ok(Token::TyString)));
1259        assert_eq!(lexer.next(), Some(Ok(Token::RBrace)));
1260        assert_eq!(lexer.next(), None);
1261    }
1262
1263    #[test]
1264    fn tool_is_keyword() {
1265        assert!(Token::KwTool.is_keyword());
1266    }
1267
1268    #[test]
1269    fn lex_agent_use_tool() {
1270        let mut lexer = Token::lexer("agent Fetcher { use Http }");
1271        assert_eq!(lexer.next(), Some(Ok(Token::KwAgent)));
1272        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // Fetcher
1273        assert_eq!(lexer.next(), Some(Ok(Token::LBrace)));
1274        assert_eq!(lexer.next(), Some(Ok(Token::KwUse)));
1275        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // Http
1276        assert_eq!(lexer.next(), Some(Ok(Token::RBrace)));
1277        assert_eq!(lexer.next(), None);
1278    }
1279
1280    // =========================================================================
1281    // Observability: trace and span
1282    // =========================================================================
1283
1284    #[test]
1285    fn lex_trace_span_keywords() {
1286        let mut lexer = Token::lexer("trace span");
1287        assert_eq!(lexer.next(), Some(Ok(Token::KwTrace)));
1288        assert_eq!(lexer.next(), Some(Ok(Token::KwSpan)));
1289        assert_eq!(lexer.next(), None);
1290    }
1291
1292    #[test]
1293    fn trace_span_are_keywords() {
1294        assert!(Token::KwTrace.is_keyword());
1295        assert!(Token::KwSpan.is_keyword());
1296    }
1297
1298    #[test]
1299    fn lex_span_block() {
1300        let mut lexer = Token::lexer("span \"fetch_data\" { let x = 1 }");
1301        assert_eq!(lexer.next(), Some(Ok(Token::KwSpan)));
1302        assert_eq!(lexer.next(), Some(Ok(Token::StringLit)));
1303        assert_eq!(lexer.next(), Some(Ok(Token::LBrace)));
1304        assert_eq!(lexer.next(), Some(Ok(Token::KwLet)));
1305        assert_eq!(lexer.next(), Some(Ok(Token::Ident)));
1306        assert_eq!(lexer.next(), Some(Ok(Token::Eq)));
1307        assert_eq!(lexer.next(), Some(Ok(Token::IntLit)));
1308        assert_eq!(lexer.next(), Some(Ok(Token::RBrace)));
1309        assert_eq!(lexer.next(), None);
1310    }
1311
1312    // =========================================================================
1313    // Phase 3: Session types and algebraic effects
1314    // =========================================================================
1315
1316    #[test]
1317    fn lex_protocol_keywords() {
1318        let mut lexer = Token::lexer("protocol follows reply");
1319        assert_eq!(lexer.next(), Some(Ok(Token::KwProtocol)));
1320        assert_eq!(lexer.next(), Some(Ok(Token::KwFollows)));
1321        assert_eq!(lexer.next(), Some(Ok(Token::KwReply)));
1322        assert_eq!(lexer.next(), None);
1323    }
1324
1325    #[test]
1326    fn lex_effect_handler_keywords() {
1327        let mut lexer = Token::lexer("handler handles");
1328        assert_eq!(lexer.next(), Some(Ok(Token::KwHandler)));
1329        assert_eq!(lexer.next(), Some(Ok(Token::KwHandles)));
1330        assert_eq!(lexer.next(), None);
1331    }
1332
1333    #[test]
1334    fn protocol_keywords_are_keywords() {
1335        assert!(Token::KwProtocol.is_keyword());
1336        assert!(Token::KwFollows.is_keyword());
1337        assert!(Token::KwReply.is_keyword());
1338        assert!(Token::KwHandler.is_keyword());
1339        assert!(Token::KwHandles.is_keyword());
1340    }
1341
1342    #[test]
1343    fn lex_protocol_declaration() {
1344        let mut lexer = Token::lexer("protocol SchemaSync { Steward -> API: Changed }");
1345        assert_eq!(lexer.next(), Some(Ok(Token::KwProtocol)));
1346        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // SchemaSync
1347        assert_eq!(lexer.next(), Some(Ok(Token::LBrace)));
1348        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // Steward
1349        assert_eq!(lexer.next(), Some(Ok(Token::Arrow)));
1350        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // API
1351        assert_eq!(lexer.next(), Some(Ok(Token::Colon)));
1352        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // Changed
1353        assert_eq!(lexer.next(), Some(Ok(Token::RBrace)));
1354        assert_eq!(lexer.next(), None);
1355    }
1356
1357    #[test]
1358    fn lex_agent_follows() {
1359        let mut lexer = Token::lexer("agent API follows SchemaSync as APISteward");
1360        assert_eq!(lexer.next(), Some(Ok(Token::KwAgent)));
1361        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // API
1362        assert_eq!(lexer.next(), Some(Ok(Token::KwFollows)));
1363        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // SchemaSync
1364        assert_eq!(lexer.next(), Some(Ok(Token::KwAs)));
1365        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // APISteward
1366        assert_eq!(lexer.next(), None);
1367    }
1368
1369    #[test]
1370    fn lex_handler_declaration() {
1371        let mut lexer = Token::lexer("handler DefaultLLM handles Infer { model: \"gpt-4\" }");
1372        assert_eq!(lexer.next(), Some(Ok(Token::KwHandler)));
1373        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // DefaultLLM
1374        assert_eq!(lexer.next(), Some(Ok(Token::KwHandles)));
1375        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // Infer
1376        assert_eq!(lexer.next(), Some(Ok(Token::LBrace)));
1377        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // model
1378        assert_eq!(lexer.next(), Some(Ok(Token::Colon)));
1379        assert_eq!(lexer.next(), Some(Ok(Token::StringLit)));
1380        assert_eq!(lexer.next(), Some(Ok(Token::RBrace)));
1381        assert_eq!(lexer.next(), None);
1382    }
1383
1384    #[test]
1385    fn lex_reply_expression() {
1386        let mut lexer = Token::lexer("reply(Ack {})");
1387        assert_eq!(lexer.next(), Some(Ok(Token::KwReply)));
1388        assert_eq!(lexer.next(), Some(Ok(Token::LParen)));
1389        assert_eq!(lexer.next(), Some(Ok(Token::Ident))); // Ack
1390        assert_eq!(lexer.next(), Some(Ok(Token::LBrace)));
1391        assert_eq!(lexer.next(), Some(Ok(Token::RBrace)));
1392        assert_eq!(lexer.next(), Some(Ok(Token::RParen)));
1393        assert_eq!(lexer.next(), None);
1394    }
1395
1396    #[test]
1397    fn token_display_phase3() {
1398        assert_eq!(format!("{}", Token::KwProtocol), "protocol");
1399        assert_eq!(format!("{}", Token::KwFollows), "follows");
1400        assert_eq!(format!("{}", Token::KwReply), "reply");
1401        assert_eq!(format!("{}", Token::KwHandler), "handler");
1402        assert_eq!(format!("{}", Token::KwHandles), "handles");
1403    }
1404}