pub fn lexer_with_user_keywords(
    path: &str,
    user_keywords: Vec<&str>
) -> Vec<Token>
Expand description

Converts a string to tokens
Input -> text: &str , user_keywords: Vec<&str>
Return -> Vec
This allows the user to have lexcial_scanner create custom tokens. This makes for the parsing and or ast mode to become manageable. Example

pub use lexical_scanner::*;
pub use enums::*;

let path = "./test/test.txt";
let user_keywords = vec!["up", "down", "left", "right"]
let token_list = lexical_scanner::lexer_with_user_keywords(text, user_keywords);

//Display tokens
for (i, token) in token_list.iter().enumerate(){
    println!("{}. {:?}", i, token);
}
 
output ->
0. KW_Use
1. WhiteSpace
2. KW_Super
3. PathSep
4. Star
5. Semi
6. Newline
7. KW_UserDefined("left")
8. Newline
9. KW_UserDefined("down")
10. Newline
11. KW_UserDefined("right")
12. Newline
13. KW_UserDefined("up")
14. Newline