trixy 0.4.0

A rust crate used to generate multi-language apis for your application
Documentation
/*
* Copyright (C) 2023 - 2024:
* The Trinitrix Project <soispha@vhack.eu, antifallobst@systemausfall.org>
* SPDX-License-Identifier: GPL-3.0-or-later
*
* This file is part of the Trixy crate for Trinitrix.
*
* Trixy is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* and the GNU General Public License along with this program.
* If not, see <https://www.gnu.org/licenses/>.
*/

use crate::parser::lexing::{Keyword, Token, TokenKind, TokenSpan};

use super::TokenStream;

use pretty_assertions::assert_eq;

#[test]
fn test_lexing_trixy() {
    let input = "
mod commands {
    fn expect(event: String) -> String;
}
";
    let token_stream = TokenStream::lex(input).unwrap();
    let expected_token_stream = {
        let tokens = vec![
            Token {
                span: TokenSpan { start: 1, end: 4 },
                kind: TokenKind::Keyword(Keyword::r#mod),
            },
            Token {
                span: TokenSpan { start: 5, end: 13 },
                kind: TokenKind::Identifier("commands".to_owned()),
            },
            Token {
                span: TokenSpan { start: 14, end: 15 },
                kind: TokenKind::CurlyBracketOpen,
            },
            Token {
                span: TokenSpan { start: 20, end: 22 },
                kind: TokenKind::Keyword(Keyword::r#fn),
            },
            Token {
                span: TokenSpan { start: 23, end: 29 },
                kind: TokenKind::Identifier("expect".to_owned()),
            },
            Token {
                span: TokenSpan { start: 29, end: 30 },
                kind: TokenKind::CurvedBracketOpen,
            },
            Token {
                span: TokenSpan { start: 30, end: 35 },
                kind: TokenKind::Identifier("event".to_owned()),
            },
            Token {
                span: TokenSpan { start: 35, end: 36 },
                kind: TokenKind::Colon,
            },
            Token {
                span: TokenSpan { start: 37, end: 43 },
                kind: TokenKind::Identifier("String".to_owned()),
            },
            Token {
                span: TokenSpan { start: 43, end: 44 },
                kind: TokenKind::CurvedBracketClose,
            },
            Token {
                span: TokenSpan { start: 45, end: 47 },
                kind: TokenKind::Arrow,
            },
            Token {
                span: TokenSpan { start: 48, end: 54 },
                kind: TokenKind::Identifier("String".to_owned()),
            },
            Token {
                span: TokenSpan { start: 54, end: 55 },
                kind: TokenKind::Semicolon,
            },
            Token {
                span: TokenSpan { start: 56, end: 57 },
                kind: TokenKind::CurlyBracketClose,
            },
        ];
        TokenStream {
            tokens,
            original_file: input.to_owned(),
        }
    };
    assert_eq!(token_stream, expected_token_stream)
}

#[test]
fn test_failing_lexing() {
    let input = "
mod trinitrix {
    mod - commands {
        fn hi(strings: String) -> String;
    }
}
";
    let token_stream = TokenStream::lex(input);
    eprintln!("{}", token_stream.as_ref().unwrap_err());

    // uncomment the next line to see the error message, without having to remove cargo's output filter
    // assert!(!token_stream.is_err());
    assert!(token_stream.is_err());
}

#[test]
fn test_multiple_tokens() {
    let input = "
mod mod {{
}}
";
    let token_stream = TokenStream::lex(input).unwrap();
    let expected_token_stream = {
        let tokens = vec![
            Token {
                span: TokenSpan { start: 1, end: 4 },
                kind: TokenKind::Keyword(Keyword::r#mod),
            },
            Token {
                span: TokenSpan { start: 5, end: 8 },
                kind: TokenKind::Keyword(Keyword::r#mod),
            },
            Token {
                span: TokenSpan { start: 9, end: 10 },
                kind: TokenKind::CurlyBracketOpen,
            },
            Token {
                span: TokenSpan { start: 10, end: 11 },
                kind: TokenKind::CurlyBracketOpen,
            },
            Token {
                span: TokenSpan { start: 12, end: 13 },
                kind: TokenKind::CurlyBracketClose,
            },
            Token {
                span: TokenSpan { start: 13, end: 14 },
                kind: TokenKind::CurlyBracketClose,
            },
        ];
        TokenStream {
            tokens,
            original_file: input.to_owned(),
        }
    };
    assert_eq!(token_stream, expected_token_stream)
}

#[test]
fn test_comments() {
    let input = "
        // Some comment
        mod mod {{

        }}
        // NOTE(@soispha): We do not support nested multi line comments <2023-12-16>
        /* Some
        * multi
        * line
        * comment
        */
";
    let token_stream = TokenStream::lex(input)
        .map_err(|e| {
            eprintln!("{}", e);
            panic!();
        })
        .unwrap();
    let expected_token_stream = {
        let tokens = vec![
            Token {
                span: TokenSpan { start: 33, end: 36 },
                kind: TokenKind::Keyword(Keyword::r#mod),
            },
            Token {
                span: TokenSpan { start: 37, end: 40 },
                kind: TokenKind::Keyword(Keyword::r#mod),
            },
            Token {
                span: TokenSpan { start: 41, end: 42 },
                kind: TokenKind::CurlyBracketOpen,
            },
            Token {
                span: TokenSpan { start: 42, end: 43 },
                kind: TokenKind::CurlyBracketOpen,
            },
            Token {
                span: TokenSpan { start: 53, end: 54 },
                kind: TokenKind::CurlyBracketClose,
            },
            Token {
                span: TokenSpan { start: 54, end: 55 },
                kind: TokenKind::CurlyBracketClose,
            },
        ];
        TokenStream {
            tokens,
            original_file: input.to_owned(),
        }
    };
    assert_eq!(token_stream, expected_token_stream)
}