1#![recursion_limit = "256"]
11#![warn(clippy::pedantic,clippy::nursery,clippy::complexity,clippy::perf,clippy::correctness,clippy::all)]
12#![warn(clippy::cognitive_complexity,clippy::large_const_arrays)]
13#![warn(clippy::style,clippy::suspicious,large_assignments,rustdoc::all)]
14#![warn(missing_docs)]
15#![allow(clippy::must_use_candidate,clippy::wildcard_imports,clippy::enum_glob_use,clippy::missing_panics_doc,clippy::missing_errors_doc)]
16
17pub(crate) mod ccarp;
18pub(crate) mod ccarp_c;
19pub(crate) mod ccarp_rust;
20
21pub use ccarp::error::{CCErr,Result,ProjectResult};
22pub use ccarp::translator::*;
23
24#[cfg(test)]
25mod unit_tests {
26 use pest::Parser;
27
28 use crate::{ccarp::error::{parse_err, rule_err}, ccarp_c::{decl::*, defs::*, expr::*, stmt::Statement, trans_unit::{FunctionDef, TranslationUnit}, tt::*}, ccarp_rust::{defs::{Context, RFrom}, rustdecl::RDecl, rustexpr::RExpr}, CCErr, Result};
29
30 fn tokenise(string: &str) -> Result<(String,Vec<Include>)> {
31 let string=string.replace("\\\n", "");
32 let parsed=ParserTy::parse(Rule::tokentree, &string).map_err(|e| parse_err!(e))?.next().ok_or_else(|| rule_err!("Parsed code is empty!"))?;
33 let tt=TokenTree::take(parsed)?;
34 let (includes,tt): (Vec<Token>,Vec<Token>)=tt.0.into_iter().partition(|x| matches!(x,Token::Include(_)));
35 let includes=includes.into_iter().map(|x| if let Token::Include(incl)=x { incl } else { Include(String::new()) }).collect();
36 Ok((format!("{}",TokenTree(tt)),includes))
37 }
38
39 #[test]
40 fn parsing_ok() {
41 assert!(ParserTy::parse(Rule::tokentree, "int main() { return 0; }").is_ok());
42 }
43 #[test]
44 fn parsing_ident() {
45 let parsed=ParserTy::parse(Rule::ident, "ident");
46 assert!(parsed.is_ok());
47 let ident=Identifier::take(parsed.unwrap().next().unwrap());
48 assert!(ident.is_ok(),"Error: {}",ident.unwrap_err());
49 assert_eq!(ident.unwrap(),Identifier(String::from("ident")));
50 }
51 #[test]
52 fn parsing_tt() {
53 let parsed=ParserTy::parse(Rule::tokentree, "struct ABStruct { int a,b; char* c; }");
54 assert!(parsed.is_ok());
55 let tt=TokenTree::take(parsed.unwrap().next().unwrap());
56 assert!(tt.is_ok(),"Error: {}",tt.unwrap_err());
57
58 assert_eq!(tt.unwrap(),TokenTree(vec![
59 Token::Keyword(Keyword(String::from("struct"))),
60 Token::Identifier(Identifier(String::from("ABStruct"))),
61 Token::Punctuator(Punctuator(String::from("{"))),
62 Token::Keyword(Keyword(String::from("int"))),
63 Token::Identifier(Identifier(String::from("a"))),
64 Token::Punctuator(Punctuator(String::from(","))),
65 Token::Identifier(Identifier(String::from("b"))),
66 Token::Punctuator(Punctuator(String::from(";"))),
67 Token::Keyword(Keyword(String::from("char"))),
68 Token::Punctuator(Punctuator(String::from("*"))),
69 Token::Identifier(Identifier(String::from("c"))),
70 Token::Punctuator(Punctuator(String::from(";"))),
71 Token::Punctuator(Punctuator(String::from("}")))
72 ]));
73 }
74 #[test]
75 fn tokenisation() {
76 let res=tokenise("int func(long long a, char *b) { return (int)a; }");
77 assert!(res.is_ok());
78 assert_eq!(res.unwrap().0.as_str(),"int #ifunc#i #p(#p long long #ia#i #p,#p char #p*#p #ib#i #p)#p #p{#p return #p(#p int #p)#p #ia#i #p;#p #p}#p");
79 }
80 #[test]
81 fn parsing_expr() {
82 let tt=tokenise("2+2");
83 assert!(tt.is_ok());
84 let tt=tt.unwrap();
85 let pair=ParserTy::parse(Rule::expr, tt.0.as_str());
86 let expr=Expression::take(pair.unwrap().next().unwrap());
87 assert!(expr.is_ok());
88 }
89 #[test]
90 fn parsing_decl() {
91 let tt=tokenise("int a=2;");
92 assert!(tt.is_ok());
93 let tt=tt.unwrap();
94 let pair=ParserTy::parse(Rule::decl, tt.0.as_str());
95 let decl=Declaration::take(pair.unwrap().next().unwrap());
96 assert!(decl.is_ok());
97 }
98 #[test]
99 fn parsing_stmt() {
100 let tt=tokenise("if (i<10) { for(int j=0;j<100;j++) i+=1; } else { return 0; }");
101 assert!(tt.is_ok());
102 let tt=tt.unwrap();
103 let pair=ParserTy::parse(Rule::stmt, tt.0.as_str());
104 let stmt=Statement::take(pair.unwrap().next().unwrap());
105 assert!(stmt.is_ok());
106 }
107 #[test]
108 fn parsing_fn() {
109 let tt=tokenise("int main() { int a=1,b=0,*c,d=2; a++; b+=a+d; return 0; }");
110 assert!(tt.is_ok());
111 let tt=tt.unwrap();
112 let pair=ParserTy::parse(Rule::fn_def, tt.0.as_str());
113 let fun=FunctionDef::take(pair.unwrap().next().unwrap());
114 assert!(fun.is_ok());
115 }
116 #[test]
117 fn parsing_translation_unit() {
118 let tt=tokenise("int a; char *b; struct CDStruct { int c,d; }; int main() { int a=1,b=0,*c,d=2; a++; b+=a+d; return 0; }");
119 assert!(tt.is_ok());
120 let tt=tt.unwrap();
121 let pair=ParserTy::parse(Rule::trans_unit, tt.0.as_str());
122 let unit=TranslationUnit::take(pair.unwrap().next().unwrap());
123 assert!(unit.is_ok());
124 }
125 #[test]
126 fn translate_expr() {
127 let tt=tokenise("2+2");
128 assert!(tt.is_ok());
129 let tt=tt.unwrap();
130 let pair=ParserTy::parse(Rule::expr, tt.0.as_str());
131 let expr=Expression::take(pair.unwrap().next().unwrap());
132 assert!(expr.is_ok());
133 let expr=RExpr::rfrom(expr.unwrap(), &mut Context::default());
134 assert!(expr.is_ok());
135 assert!(expr.unwrap().to_string().as_str()=="2 + 2");
136 }
137 #[test]
138 fn translate_decl() {
139 let tt=tokenise("int a=0,b=2;");
140 assert!(tt.is_ok());
141 let tt=tt.unwrap();
142 let pair=ParserTy::parse(Rule::decl, tt.0.as_str());
143 let decl=Declaration::take(pair.unwrap().next().unwrap());
144 assert!(decl.is_ok());
145 let decl=RDecl::rfrom(decl.unwrap(), &mut Context::default());
146 assert!(decl.is_ok());
147 assert!(decl.unwrap().to_string().as_str()=="let mut a: i32=(0);\nlet mut b: i32=(2);");
148 }
149}