llmaker 0.0.1

Make LL(1) token parser code for Rust.
//
// This file was generated by llmaker.
//

use super::lexer;
use super::types;
use std::cmp::Ordering;

#[derive(Debug, Clone)]
pub enum ParseError {
  UnexpectedToken(lexer::Token),
  RedundantExpression(lexer::Token),
  Eof,
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
pub fn parse(tokens: Vec<lexer::Token>) -> Result<types::Term, ParseError> {
  let (ret, pos) = _parse_fn_main(&tokens, 0)?;
  match pos.cmp(&tokens.len()) {
    Ordering::Equal => Ok(ret),
    Ordering::Greater => Err(ParseError::Eof), // pos > tokens.len()
    Ordering::Less => Err(ParseError::RedundantExpression(tokens[pos].clone())),
  }
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_fn_main(tokens: &[lexer::Token], pos: usize) -> Result<(types::Term, usize), ParseError> {
  let mut _token_pos = pos;
  let token1 = tokens.get(pos);
  enum CodeType {
    Code0,
    Other,
  }
  let code_type = token1.ok_or(ParseError::Eof).map(|tok| match tok {
    (lexer::TokenKind::STR(_), _) => CodeType::Code0,

    _ => CodeType::Other,
  });
  let main = match code_type? {
    CodeType::Code0 => {
      let (head, pos) = _parse_fn_head(tokens, pos)?;
      let (_gr, pos) = _parse_fn_gr(tokens, pos)?;
      let (setting, pos) = _parse_fn_setting(tokens, pos)?;
      let (body, pos) = _parse_fn_body(tokens, pos)?;
      let (_v, pos) = _parse_token_Tok_EOF(tokens, pos)?;

      _token_pos = pos;
      let mut v = head;
      v.reverse();
      (v, setting, body)
    }
    _ => {
      return Err(ParseError::UnexpectedToken(
        tokens.iter().next().unwrap().clone(),
      ))
    }
  };
  Ok((main, _token_pos))
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_fn_head(tokens: &[lexer::Token], pos: usize) -> Result<(types::Head, usize), ParseError> {
  let mut _token_pos = pos;
  let token1 = tokens.get(pos);
  enum CodeType {
    Code0,
    Other,
  }
  let code_type = token1.ok_or(ParseError::Eof).map(|tok| match tok {
    (lexer::TokenKind::STR(_), _) => CodeType::Code0,

    _ => CodeType::Other,
  });
  let main = match code_type? {
    CodeType::Code0 => {
      let (tok, pos) = _parse_token_Tok_STR(tokens, pos)?;
      let (tail, pos) = _parse_fn_head_tail(tokens, pos)?;

      _token_pos = pos;
      let mut tail_v = tail;
      let (stok, rng) = tok;
      let s = lexer::get_string(stok).unwrap();
      tail_v.push((rng, s));
      tail_v
    }
    _ => Vec::new(),
  };
  Ok((main, _token_pos))
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_fn_head_tail(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(types::Head, usize), ParseError> {
  let mut _token_pos = pos;
  let token1 = tokens.get(pos);
  enum CodeType {
    Code0,
    Other,
  }
  let code_type = token1.ok_or(ParseError::Eof).map(|tok| match tok {
    (lexer::TokenKind::STR(_), _) => CodeType::Code0,

    _ => CodeType::Other,
  });
  let main = match code_type? {
    CodeType::Code0 => {
      let (head, pos) = _parse_token_Tok_STR(tokens, pos)?;
      let (tail, pos) = _parse_fn_head(tokens, pos)?;

      _token_pos = pos;
      let mut tail_v = tail;
      let (stok, rng) = head;
      let s = lexer::get_string(stok).unwrap();
      tail_v.push((rng, s));
      tail_v
    }
    _ => Vec::new(),
  };
  Ok((main, _token_pos))
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_fn_gr(tokens: &[lexer::Token], pos: usize) -> Result<((), usize), ParseError> {
  let mut _token_pos = pos;
  let token1 = tokens.get(pos);
  enum CodeType {
    Code0,
    Other,
  }
  let code_type = token1.ok_or(ParseError::Eof).map(|tok| match tok {
    (lexer::TokenKind::GRAMMAR, _) => CodeType::Code0,

    _ => CodeType::Other,
  });
  let main = match code_type? {
    CodeType::Code0 => {
      let (_v1, pos) = _parse_token_Tok_GRAMMAR(tokens, pos)?;
      let (_v2, pos) = _parse_token_Tok_SEMICOLON(tokens, pos)?;

      _token_pos = pos;
    }
    _ => {
      return Err(ParseError::UnexpectedToken(
        tokens.iter().next().unwrap().clone(),
      ))
    }
  };
  Ok((main, _token_pos))
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_fn_setting(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(types::Setting, usize), ParseError> {
  let mut _token_pos = pos;
  let token1 = tokens.get(pos);
  enum CodeType {
    Code0,
    Other,
  }
  let code_type = token1.ok_or(ParseError::Eof).map(|tok| match tok {
    (lexer::TokenKind::EXTERN, _) => CodeType::Code0,

    _ => CodeType::Other,
  });
  let main = match code_type? {
    CodeType::Code0 => {
      let (_v1, pos) = _parse_token_Tok_EXTERN(tokens, pos)?;
      let (_v2, pos) = _parse_token_Tok_LCURLYBRACES(tokens, pos)?;
      let (types, pos) = _parse_fn_types(tokens, pos)?;
      let (_v3, pos) = _parse_token_Tok_RCURLYBRACES(tokens, pos)?;

      _token_pos = pos;
      types
    }
    _ => {
      return Err(ParseError::UnexpectedToken(
        tokens.iter().next().unwrap().clone(),
      ))
    }
  };
  Ok((main, _token_pos))
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_fn_types(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(types::Setting, usize), ParseError> {
  let mut _token_pos = pos;
  let token1 = tokens.get(pos);
  enum CodeType {
    Code0,
    Other,
  }
  let code_type = token1.ok_or(ParseError::Eof).map(|tok| match tok {
    (lexer::TokenKind::ENUM, _) => CodeType::Code0,

    _ => CodeType::Other,
  });
  let main = match code_type? {
    CodeType::Code0 => {
      let (_v1, pos) = _parse_token_Tok_ENUM(tokens, pos)?;
      let (nametok, pos) = _parse_token_Tok_STR(tokens, pos)?;
      let (_v2, pos) = _parse_token_Tok_LCURLYBRACES(tokens, pos)?;
      let (settokens_rev, pos) = _parse_fn_settokens(tokens, pos)?;
      let (_v3, pos) = _parse_token_Tok_RCURLYBRACES(tokens, pos)?;

      _token_pos = pos;
      let (stok, _) = nametok;
      let s = lexer::get_string(stok).unwrap();
      let mut settokens = settokens_rev;
      settokens.reverse();
      (s, settokens)
    }
    _ => {
      return Err(ParseError::UnexpectedToken(
        tokens.iter().next().unwrap().clone(),
      ))
    }
  };
  Ok((main, _token_pos))
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_fn_settokens(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(Vec<(types::Range, String, types::TypeStr)>, usize), ParseError> {
  let mut _token_pos = pos;
  let token1 = tokens.get(pos);
  enum CodeType {
    Code0,
    Other,
  }
  let code_type = token1.ok_or(ParseError::Eof).map(|tok| match tok {
    (lexer::TokenKind::CONSTRUCTOR(_), _) => CodeType::Code0,

    _ => CodeType::Other,
  });
  let main = match code_type? {
    CodeType::Code0 => {
      let (settoken, pos) = _parse_fn_settoken(tokens, pos)?;
      let (settokens, pos) = _parse_fn_settokens_sub(tokens, pos)?;

      _token_pos = pos;
      let mut v = settokens;
      v.push(settoken);
      v
    }
    _ => Vec::new(),
  };
  Ok((main, _token_pos))
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_fn_settokens_sub(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(Vec<(types::Range, String, types::TypeStr)>, usize), ParseError> {
  let mut _token_pos = pos;
  let token1 = tokens.get(pos);
  enum CodeType {
    Code0,
    Other,
  }
  let code_type = token1.ok_or(ParseError::Eof).map(|tok| match tok {
    (lexer::TokenKind::COMMA, _) => CodeType::Code0,

    _ => CodeType::Other,
  });
  let main = match code_type? {
    CodeType::Code0 => {
      let (_v, pos) = _parse_token_Tok_COMMA(tokens, pos)?;
      let (tail, pos) = _parse_fn_settokens_sub_sub(tokens, pos)?;

      _token_pos = pos;
      tail
    }
    _ => Vec::new(),
  };
  Ok((main, _token_pos))
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_fn_settokens_sub_sub(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(Vec<(types::Range, String, types::TypeStr)>, usize), ParseError> {
  let mut _token_pos = pos;
  let token1 = tokens.get(pos);
  enum CodeType {
    Code0,
    Other,
  }
  let code_type = token1.ok_or(ParseError::Eof).map(|tok| match tok {
    (lexer::TokenKind::CONSTRUCTOR(_), _) => CodeType::Code0,

    _ => CodeType::Other,
  });
  let main = match code_type? {
    CodeType::Code0 => {
      let (settoken, pos) = _parse_fn_settoken(tokens, pos)?;
      let (settokens, pos) = _parse_fn_settokens_sub(tokens, pos)?;

      _token_pos = pos;
      let mut v = settokens;
      v.push(settoken);
      v
    }
    _ => Vec::new(),
  };
  Ok((main, _token_pos))
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_fn_settoken(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<((types::Range, String, types::TypeStr), usize), ParseError> {
  let mut _token_pos = pos;
  let token1 = tokens.get(pos);
  enum CodeType {
    Code0,
    Other,
  }
  let code_type = token1.ok_or(ParseError::Eof).map(|tok| match tok {
    (lexer::TokenKind::CONSTRUCTOR(_), _) => CodeType::Code0,

    _ => CodeType::Other,
  });
  let main = match code_type? {
    CodeType::Code0 => {
      let (name, pos) = _parse_token_Tok_CONSTRUCTOR(tokens, pos)?;
      let (_v, pos) = _parse_token_Tok_ARROW(tokens, pos)?;
      let (typestr, pos) = _parse_token_Tok_STR(tokens, pos)?;

      _token_pos = pos;
      let (v1tok, rng1) = name;
      let v1 = lexer::get_string(v1tok).unwrap();
      let (v2tok, rng2) = typestr;
      let v2 = lexer::get_string(v2tok).unwrap();
      (types::Range::unite(rng1, rng2), v1, v2)
    }
    _ => {
      return Err(ParseError::UnexpectedToken(
        tokens.iter().next().unwrap().clone(),
      ))
    }
  };
  Ok((main, _token_pos))
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_fn_body(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(Vec<types::Bnf>, usize), ParseError> {
  let mut _token_pos = pos;
  let token1 = tokens.get(pos);
  enum CodeType {
    Code0,
    Other,
  }
  let code_type = token1.ok_or(ParseError::Eof).map(|tok| match tok {
    (lexer::TokenKind::PUB, _) => CodeType::Code0,
    (lexer::TokenKind::VAR(_), _) => CodeType::Code0,

    _ => CodeType::Other,
  });
  let main = match code_type? {
    CodeType::Code0 => {
      let (bnflst, pos) = _parse_fn_bnflst(tokens, pos)?;

      _token_pos = pos;
      let mut v = bnflst;
      v.reverse();
      v
    }
    _ => {
      return Err(ParseError::UnexpectedToken(
        tokens.iter().next().unwrap().clone(),
      ))
    }
  };
  Ok((main, _token_pos))
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_fn_bnflst(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(Vec<types::Bnf>, usize), ParseError> {
  let mut _token_pos = pos;
  let token1 = tokens.get(pos);
  enum CodeType {
    Code0,
    Other,
  }
  let code_type = token1.ok_or(ParseError::Eof).map(|tok| match tok {
    (lexer::TokenKind::PUB, _) => CodeType::Code0,
    (lexer::TokenKind::VAR(_), _) => CodeType::Code0,

    _ => CodeType::Other,
  });
  let main = match code_type? {
    CodeType::Code0 => {
      let (bnf, pos) = _parse_fn_bnf(tokens, pos)?;
      let (bnflst, pos) = _parse_fn_bnflst_sub(tokens, pos)?;

      _token_pos = pos;
      let mut v = bnflst;
      v.push(bnf);
      v
    }
    _ => Vec::new(),
  };
  Ok((main, _token_pos))
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_fn_bnflst_sub(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(Vec<types::Bnf>, usize), ParseError> {
  let mut _token_pos = pos;
  let token1 = tokens.get(pos);
  enum CodeType {
    Code0,
    Other,
  }
  let code_type = token1.ok_or(ParseError::Eof).map(|tok| match tok {
    (lexer::TokenKind::SEMICOLON, _) => CodeType::Code0,

    _ => CodeType::Other,
  });
  let main = match code_type? {
    CodeType::Code0 => {
      let (_v, pos) = _parse_token_Tok_SEMICOLON(tokens, pos)?;
      let (tail, pos) = _parse_fn_bnflst_sub_sub(tokens, pos)?;

      _token_pos = pos;
      tail
    }
    _ => Vec::new(),
  };
  Ok((main, _token_pos))
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_fn_bnflst_sub_sub(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(Vec<types::Bnf>, usize), ParseError> {
  let mut _token_pos = pos;
  let token1 = tokens.get(pos);
  enum CodeType {
    Code0,
    Other,
  }
  let code_type = token1.ok_or(ParseError::Eof).map(|tok| match tok {
    (lexer::TokenKind::PUB, _) => CodeType::Code0,
    (lexer::TokenKind::VAR(_), _) => CodeType::Code0,

    _ => CodeType::Other,
  });
  let main = match code_type? {
    CodeType::Code0 => {
      let (bnf, pos) = _parse_fn_bnf(tokens, pos)?;
      let (bnflst, pos) = _parse_fn_bnflst_sub(tokens, pos)?;

      _token_pos = pos;
      let mut v = bnflst;
      v.push(bnf);
      v
    }
    _ => Vec::new(),
  };
  Ok((main, _token_pos))
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_fn_bnf(tokens: &[lexer::Token], pos: usize) -> Result<(types::Bnf, usize), ParseError> {
  let mut _token_pos = pos;
  let token1 = tokens.get(pos);
  enum CodeType {
    Code0,
    Code1,
    Other,
  }
  let code_type = token1.ok_or(ParseError::Eof).map(|tok| match tok {
    (lexer::TokenKind::PUB, _) => CodeType::Code0,
    (lexer::TokenKind::VAR(_), _) => CodeType::Code1,

    _ => CodeType::Other,
  });
  let main = match code_type? {
    CodeType::Code0 => {
      let (_v1, pos) = _parse_token_Tok_PUB(tokens, pos)?;
      let (fnname, pos) = _parse_token_Tok_VAR(tokens, pos)?;
      let (_v2, pos) = _parse_token_Tok_COLON(tokens, pos)?;
      let (typestr, pos) = _parse_token_Tok_STR(tokens, pos)?;
      let (_v3, pos) = _parse_token_Tok_EQ(tokens, pos)?;
      let (_v4, pos) = _parse_token_Tok_LCURLYBRACES(tokens, pos)?;
      let (bnf_code_lst, pos) = _parse_fn_bnf_code_lst(tokens, pos)?;
      let (v5, pos) = _parse_token_Tok_RCURLYBRACES(tokens, pos)?;

      _token_pos = pos;
      let (nametok, rng1) = fnname;
      let name = lexer::get_string(nametok).unwrap();
      let (stok, _) = typestr;
      let s = lexer::get_string(stok).unwrap();
      let (_, rng2) = v5;
      let rng = types::Range::unite(rng1, rng2);
      types::Bnf::Pub(rng, name, s, bnf_code_lst)
    }
    CodeType::Code1 => {
      let (fnname, pos) = _parse_token_Tok_VAR(tokens, pos)?;
      let (_v2, pos) = _parse_token_Tok_COLON(tokens, pos)?;
      let (typestr, pos) = _parse_token_Tok_STR(tokens, pos)?;
      let (_v3, pos) = _parse_token_Tok_EQ(tokens, pos)?;
      let (_v4, pos) = _parse_token_Tok_LCURLYBRACES(tokens, pos)?;
      let (bnf_code_lst_rev, pos) = _parse_fn_bnf_code_lst(tokens, pos)?;
      let (v5, pos) = _parse_token_Tok_RCURLYBRACES(tokens, pos)?;

      _token_pos = pos;
      let (nametok, rng1) = fnname;
      let name = lexer::get_string(nametok).unwrap();
      let (stok, _) = typestr;
      let s = lexer::get_string(stok).unwrap();
      let (_, rng2) = v5;
      let rng = types::Range::unite(rng1, rng2);
      let mut bnf_code_lst = bnf_code_lst_rev;
      bnf_code_lst.reverse();
      types::Bnf::NonPub(rng, name, s, bnf_code_lst)
    }
    _ => {
      return Err(ParseError::UnexpectedToken(
        tokens.iter().next().unwrap().clone(),
      ))
    }
  };
  Ok((main, _token_pos))
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_fn_bnf_code_lst(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(Vec<types::Code>, usize), ParseError> {
  let mut _token_pos = pos;
  let token1 = tokens.get(pos);
  enum CodeType {
    Code0,
    Other,
  }
  let code_type = token1.ok_or(ParseError::Eof).map(|tok| match tok {
    (lexer::TokenKind::ARROW, _) => CodeType::Code0,
    (lexer::TokenKind::LBRACES, _) => CodeType::Code0,

    _ => CodeType::Other,
  });
  let main = match code_type? {
    CodeType::Code0 => {
      let (bnf_code, pos) = _parse_fn_bnf_code(tokens, pos)?;
      let (bnf_code_lst, pos) = _parse_fn_bnf_code_lst_sub(tokens, pos)?;

      _token_pos = pos;
      let mut v = bnf_code_lst;
      v.push(bnf_code);
      v
    }
    _ => Vec::new(),
  };
  Ok((main, _token_pos))
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_fn_bnf_code_lst_sub(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(Vec<types::Code>, usize), ParseError> {
  let mut _token_pos = pos;
  let token1 = tokens.get(pos);
  enum CodeType {
    Code0,
    Other,
  }
  let code_type = token1.ok_or(ParseError::Eof).map(|tok| match tok {
    (lexer::TokenKind::COMMA, _) => CodeType::Code0,

    _ => CodeType::Other,
  });
  let main = match code_type? {
    CodeType::Code0 => {
      let (_v, pos) = _parse_token_Tok_COMMA(tokens, pos)?;
      let (tail, pos) = _parse_fn_bnf_code_lst_sub_sub(tokens, pos)?;

      _token_pos = pos;
      tail
    }
    _ => Vec::new(),
  };
  Ok((main, _token_pos))
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_fn_bnf_code_lst_sub_sub(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(Vec<types::Code>, usize), ParseError> {
  let mut _token_pos = pos;
  let token1 = tokens.get(pos);
  enum CodeType {
    Code0,
    Other,
  }
  let code_type = token1.ok_or(ParseError::Eof).map(|tok| match tok {
    (lexer::TokenKind::ARROW, _) => CodeType::Code0,
    (lexer::TokenKind::LBRACES, _) => CodeType::Code0,

    _ => CodeType::Other,
  });
  let main = match code_type? {
    CodeType::Code0 => {
      let (bnf_code, pos) = _parse_fn_bnf_code(tokens, pos)?;
      let (bnf_code_lst, pos) = _parse_fn_bnf_code_lst_sub(tokens, pos)?;

      _token_pos = pos;
      let mut v = bnf_code_lst;
      v.push(bnf_code);
      v
    }
    _ => Vec::new(),
  };
  Ok((main, _token_pos))
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_fn_bnf_code(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(types::Code, usize), ParseError> {
  let mut _token_pos = pos;
  let token1 = tokens.get(pos);
  enum CodeType {
    Code0,
    Code1,
    Other,
  }
  let code_type = token1.ok_or(ParseError::Eof).map(|tok| match tok {
    (lexer::TokenKind::ARROW, _) => CodeType::Code1,
    (lexer::TokenKind::LBRACES, _) => CodeType::Code0,

    _ => CodeType::Other,
  });
  let main = match code_type? {
    CodeType::Code0 => {
      let (fn_or_token, pos) = _parse_fn_fn_or_token(tokens, pos)?;
      let (fn_or_tokens, pos) = _parse_fn_fn_or_token_lst(tokens, pos)?;
      let (_v1, pos) = _parse_token_Tok_ARROW(tokens, pos)?;
      let (_v2, pos) = _parse_token_Tok_LCURLYBRACES(tokens, pos)?;
      let (code, pos) = _parse_token_Tok_STR(tokens, pos)?;
      let (_v3, pos) = _parse_token_Tok_RCURLYBRACES(tokens, pos)?;

      _token_pos = pos;
      let (codetok, _) = code;
      let codestr = lexer::get_string(codetok).unwrap();
      let mut v = fn_or_tokens;
      v.push(fn_or_token);
      v.reverse();
      (v, codestr)
    }
    CodeType::Code1 => {
      let (_v1, pos) = _parse_token_Tok_ARROW(tokens, pos)?;
      let (_v2, pos) = _parse_token_Tok_LCURLYBRACES(tokens, pos)?;
      let (code, pos) = _parse_token_Tok_STR(tokens, pos)?;
      let (_v3, pos) = _parse_token_Tok_RCURLYBRACES(tokens, pos)?;

      _token_pos = pos;
      let (codetok, _) = code;
      let codestr = lexer::get_string(codetok).unwrap();
      let mut v = Vec::new();
      v.reverse();
      (v, codestr)
    }
    _ => {
      return Err(ParseError::UnexpectedToken(
        tokens.iter().next().unwrap().clone(),
      ))
    }
  };
  Ok((main, _token_pos))
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_fn_fn_or_token_lst(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(Vec<(String, types::FnOrToken)>, usize), ParseError> {
  let mut _token_pos = pos;
  let token1 = tokens.get(pos);
  enum CodeType {
    Code0,
    Other,
  }
  let code_type = token1.ok_or(ParseError::Eof).map(|tok| match tok {
    (lexer::TokenKind::LBRACES, _) => CodeType::Code0,

    _ => CodeType::Other,
  });
  let main = match code_type? {
    CodeType::Code0 => {
      let (f, pos) = _parse_fn_fn_or_token(tokens, pos)?;
      let (fs, pos) = _parse_fn_fn_or_token_lst_sub(tokens, pos)?;

      _token_pos = pos;
      let mut v = fs;
      v.push(f);
      v
    }
    _ => Vec::new(),
  };
  Ok((main, _token_pos))
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_fn_fn_or_token_lst_sub(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(Vec<(String, types::FnOrToken)>, usize), ParseError> {
  let mut _token_pos = pos;
  let token1 = tokens.get(pos);
  enum CodeType {
    Code0,
    Other,
  }
  let code_type = token1.ok_or(ParseError::Eof).map(|tok| match tok {
    (lexer::TokenKind::LBRACES, _) => CodeType::Code0,

    _ => CodeType::Other,
  });
  let main = match code_type? {
    CodeType::Code0 => {
      let (f, pos) = _parse_fn_fn_or_token(tokens, pos)?;
      let (fs, pos) = _parse_fn_fn_or_token_lst(tokens, pos)?;

      _token_pos = pos;
      let mut v = fs;
      v.push(f);
      v
    }
    _ => Vec::new(),
  };
  Ok((main, _token_pos))
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_fn_fn_or_token(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<((String, types::FnOrToken), usize), ParseError> {
  let mut _token_pos = pos;
  let token1 = tokens.get(pos);
  enum CodeType {
    Code0,
    Other,
  }
  let code_type = token1.ok_or(ParseError::Eof).map(|tok| match tok {
    (lexer::TokenKind::LBRACES, _) => CodeType::Code0,

    _ => CodeType::Other,
  });
  let main = match code_type? {
    CodeType::Code0 => {
      let (_v1, pos) = _parse_token_Tok_LBRACES(tokens, pos)?;
      let (name, pos) = _parse_token_Tok_VAR(tokens, pos)?;
      let (_v2, pos) = _parse_token_Tok_COLON(tokens, pos)?;
      let (tail, pos) = _parse_fn_fn_or_token_sub(tokens, pos)?;

      _token_pos = pos;
      let (nametok, _) = name;
      let namestr = lexer::get_string(nametok).unwrap();
      (namestr, tail)
    }
    _ => {
      return Err(ParseError::UnexpectedToken(
        tokens.iter().next().unwrap().clone(),
      ))
    }
  };
  Ok((main, _token_pos))
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_fn_fn_or_token_sub(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(types::FnOrToken, usize), ParseError> {
  let mut _token_pos = pos;
  let token1 = tokens.get(pos);
  enum CodeType {
    Code0,
    Code1,
    Other,
  }
  let code_type = token1.ok_or(ParseError::Eof).map(|tok| match tok {
    (lexer::TokenKind::CONSTRUCTOR(_), _) => CodeType::Code1,
    (lexer::TokenKind::VAR(_), _) => CodeType::Code0,

    _ => CodeType::Other,
  });
  let main = match code_type? {
    CodeType::Code0 => {
      let (fnname, pos) = _parse_token_Tok_VAR(tokens, pos)?;
      let (_v3, pos) = _parse_token_Tok_RBRACES(tokens, pos)?;

      _token_pos = pos;
      let (fnnametok, _) = fnname;
      let fnnamestr = lexer::get_string(fnnametok).unwrap();
      types::FnOrToken::Function(fnnamestr)
    }
    CodeType::Code1 => {
      let (tokname, pos) = _parse_token_Tok_CONSTRUCTOR(tokens, pos)?;
      let (_v3, pos) = _parse_token_Tok_RBRACES(tokens, pos)?;

      _token_pos = pos;
      let (toknametok, _) = tokname;
      let toknamestr = lexer::get_string(toknametok).unwrap();
      types::FnOrToken::Token(toknamestr)
    }
    _ => {
      return Err(ParseError::UnexpectedToken(
        tokens.iter().next().unwrap().clone(),
      ))
    }
  };
  Ok((main, _token_pos))
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_token_Tok_EOF(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(lexer::Token, usize), ParseError> {
  let token1 = tokens.get(pos);
  token1.ok_or(ParseError::Eof).and_then(|tok| match tok {
    (lexer::TokenKind::EOF, _) => Ok((tok.clone(), pos + 1)),
    _ => Err(ParseError::UnexpectedToken(tok.clone())),
  })
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_token_Tok_GRAMMAR(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(lexer::Token, usize), ParseError> {
  let token1 = tokens.get(pos);
  token1.ok_or(ParseError::Eof).and_then(|tok| match tok {
    (lexer::TokenKind::GRAMMAR, _) => Ok((tok.clone(), pos + 1)),
    _ => Err(ParseError::UnexpectedToken(tok.clone())),
  })
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_token_Tok_EXTERN(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(lexer::Token, usize), ParseError> {
  let token1 = tokens.get(pos);
  token1.ok_or(ParseError::Eof).and_then(|tok| match tok {
    (lexer::TokenKind::EXTERN, _) => Ok((tok.clone(), pos + 1)),
    _ => Err(ParseError::UnexpectedToken(tok.clone())),
  })
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_token_Tok_ENUM(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(lexer::Token, usize), ParseError> {
  let token1 = tokens.get(pos);
  token1.ok_or(ParseError::Eof).and_then(|tok| match tok {
    (lexer::TokenKind::ENUM, _) => Ok((tok.clone(), pos + 1)),
    _ => Err(ParseError::UnexpectedToken(tok.clone())),
  })
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_token_Tok_PUB(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(lexer::Token, usize), ParseError> {
  let token1 = tokens.get(pos);
  token1.ok_or(ParseError::Eof).and_then(|tok| match tok {
    (lexer::TokenKind::PUB, _) => Ok((tok.clone(), pos + 1)),
    _ => Err(ParseError::UnexpectedToken(tok.clone())),
  })
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_token_Tok_VAR(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(lexer::Token, usize), ParseError> {
  let token1 = tokens.get(pos);
  token1.ok_or(ParseError::Eof).and_then(|tok| match tok {
    (lexer::TokenKind::VAR(_), _) => Ok((tok.clone(), pos + 1)),
    _ => Err(ParseError::UnexpectedToken(tok.clone())),
  })
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_token_Tok_CONSTRUCTOR(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(lexer::Token, usize), ParseError> {
  let token1 = tokens.get(pos);
  token1.ok_or(ParseError::Eof).and_then(|tok| match tok {
    (lexer::TokenKind::CONSTRUCTOR(_), _) => Ok((tok.clone(), pos + 1)),
    _ => Err(ParseError::UnexpectedToken(tok.clone())),
  })
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_token_Tok_LCURLYBRACES(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(lexer::Token, usize), ParseError> {
  let token1 = tokens.get(pos);
  token1.ok_or(ParseError::Eof).and_then(|tok| match tok {
    (lexer::TokenKind::LCURLYBRACES, _) => Ok((tok.clone(), pos + 1)),
    _ => Err(ParseError::UnexpectedToken(tok.clone())),
  })
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_token_Tok_RCURLYBRACES(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(lexer::Token, usize), ParseError> {
  let token1 = tokens.get(pos);
  token1.ok_or(ParseError::Eof).and_then(|tok| match tok {
    (lexer::TokenKind::RCURLYBRACES, _) => Ok((tok.clone(), pos + 1)),
    _ => Err(ParseError::UnexpectedToken(tok.clone())),
  })
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_token_Tok_EQ(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(lexer::Token, usize), ParseError> {
  let token1 = tokens.get(pos);
  token1.ok_or(ParseError::Eof).and_then(|tok| match tok {
    (lexer::TokenKind::EQ, _) => Ok((tok.clone(), pos + 1)),
    _ => Err(ParseError::UnexpectedToken(tok.clone())),
  })
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_token_Tok_COMMA(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(lexer::Token, usize), ParseError> {
  let token1 = tokens.get(pos);
  token1.ok_or(ParseError::Eof).and_then(|tok| match tok {
    (lexer::TokenKind::COMMA, _) => Ok((tok.clone(), pos + 1)),
    _ => Err(ParseError::UnexpectedToken(tok.clone())),
  })
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_token_Tok_SEMICOLON(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(lexer::Token, usize), ParseError> {
  let token1 = tokens.get(pos);
  token1.ok_or(ParseError::Eof).and_then(|tok| match tok {
    (lexer::TokenKind::SEMICOLON, _) => Ok((tok.clone(), pos + 1)),
    _ => Err(ParseError::UnexpectedToken(tok.clone())),
  })
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_token_Tok_COLON(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(lexer::Token, usize), ParseError> {
  let token1 = tokens.get(pos);
  token1.ok_or(ParseError::Eof).and_then(|tok| match tok {
    (lexer::TokenKind::COLON, _) => Ok((tok.clone(), pos + 1)),
    _ => Err(ParseError::UnexpectedToken(tok.clone())),
  })
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_token_Tok_LBRACES(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(lexer::Token, usize), ParseError> {
  let token1 = tokens.get(pos);
  token1.ok_or(ParseError::Eof).and_then(|tok| match tok {
    (lexer::TokenKind::LBRACES, _) => Ok((tok.clone(), pos + 1)),
    _ => Err(ParseError::UnexpectedToken(tok.clone())),
  })
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_token_Tok_RBRACES(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(lexer::Token, usize), ParseError> {
  let token1 = tokens.get(pos);
  token1.ok_or(ParseError::Eof).and_then(|tok| match tok {
    (lexer::TokenKind::RBRACES, _) => Ok((tok.clone(), pos + 1)),
    _ => Err(ParseError::UnexpectedToken(tok.clone())),
  })
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_token_Tok_ARROW(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(lexer::Token, usize), ParseError> {
  let token1 = tokens.get(pos);
  token1.ok_or(ParseError::Eof).and_then(|tok| match tok {
    (lexer::TokenKind::ARROW, _) => Ok((tok.clone(), pos + 1)),
    _ => Err(ParseError::UnexpectedToken(tok.clone())),
  })
}

#[allow(non_camel_case_types)]
#[allow(non_snake_case)]
#[allow(unused_parens)]
#[allow(clippy::type_complexity)]
fn _parse_token_Tok_STR(
  tokens: &[lexer::Token],
  pos: usize,
) -> Result<(lexer::Token, usize), ParseError> {
  let token1 = tokens.get(pos);
  token1.ok_or(ParseError::Eof).and_then(|tok| match tok {
    (lexer::TokenKind::STR(_), _) => Ok((tok.clone(), pos + 1)),
    _ => Err(ParseError::UnexpectedToken(tok.clone())),
  })
}