1use anyhow::Result;
2use regex::Regex;
3use std::collections::HashMap;
4
5use crate::declarations::Declaration;
6
7#[derive(Debug, Clone)]
8pub struct Token {
9 pub token_type: String,
10 pub value: String,
11}
12
13#[derive(Debug)]
14struct Hook {
15 index: usize,
16 value: String,
17}
18
19#[derive(Default)]
20pub struct CompilerOptions {
21 pub keep_imports: bool,
22 pub jsx: bool,
23 pub civet_options: Vec<String>,
24 pub civet_global: Vec<String>,
25 pub cls: bool,
26 pub included: bool,
27
28 pub local_declarations: HashMap<String, Declaration>,
29 pub global_declarations: HashMap<String, Declaration>,
30}
31
32pub struct CompilerResults {
33 #[allow(unused)]
34 pub options: CompilerOptions,
35 pub code: String,
36}
37
38pub fn tokenize_coffee_script(code: &str) -> Vec<Token> {
47 let mut tokens = Vec::new();
48 let mut i = 0;
49 let chars: Vec<char> = code.chars().collect();
50
51 while i < chars.len() {
52 let char = chars[i];
53 let next_char = chars.get(i + 1).copied();
54 let next_next_char = chars.get(i + 2).copied();
55
56 if char == '#' {
57 let comment_end = code[i..].find('\n').unwrap_or(code.len() - i);
58 let comment = &code[i..i + comment_end + 1];
59 tokens.push(Token {
60 token_type: "COMMENT".to_string(),
61 value: comment.to_string(),
62 });
63 i += comment.len() - 1;
64 } else if char == '"' && next_char == Some('"') && next_next_char == Some('"') {
65 let mut string = "\"\"\"".to_string();
66 i += 3;
67 while i < chars.len()
68 && !(chars[i] == '"' && chars.get(i + 1) == Some(&'"') && chars.get(i + 2) == Some(&'"'))
69 {
70 string.push(chars[i]);
71 i += 1;
72 }
73 string.push_str("\"\"\"");
74 tokens.push(Token {
75 token_type: "TRIPLE_STRING".to_string(),
76 value: string,
77 });
78 i += 2;
79 } else if char == '"' || char == '\'' {
80 let mut string = char.to_string();
81 let mut escaped = false;
82 i += 1;
83 while i < chars.len() && (chars[i] != char || escaped) {
84 string.push(chars[i]);
85 escaped = chars[i] == '\\' && !escaped;
86 i += 1;
87 }
88 string.push(char);
89 tokens.push(Token {
90 token_type: "STRING".to_string(),
91 value: string,
92 });
93 } else if char.is_whitespace() {
94 if let Some(last_token) = tokens.last_mut() {
95 if last_token.token_type == "WHITESPACE" {
96 last_token.value.push(char);
97 } else {
98 tokens.push(Token {
99 token_type: "WHITESPACE".to_string(),
100 value: char.to_string(),
101 });
102 }
103 } else {
104 tokens.push(Token {
105 token_type: "WHITESPACE".to_string(),
106 value: char.to_string(),
107 });
108 }
109 } else if char.is_alphabetic() || char == '_' || char == '$' || char == '@' {
110 let mut identifier = char.to_string();
111 i += 1;
112 while i < chars.len() && (chars[i].is_alphanumeric() || chars[i] == '_' || chars[i] == '$') {
113 identifier.push(chars[i]);
114 i += 1;
115 }
116 tokens.push(Token {
117 token_type: "IDENTIFIER".to_string(),
118 value: identifier,
119 });
120 i -= 1;
121 } else if char.is_numeric() {
122 let mut identifier = char.to_string();
123 i += 1;
124 while i < chars.len() && (chars[i].is_numeric() || chars[i] == '.') {
125 identifier.push(chars[i]);
126 i += 1;
127 }
128 tokens.push(Token {
129 token_type: "NUMBER".to_string(),
130 value: identifier,
131 });
132 i -= 1;
133 } else {
134 tokens.push(Token {
135 token_type: "OTHER".to_string(),
136 value: char.to_string(),
137 });
138 }
139 i += 1;
140 }
141
142 tokens
143}
144
145fn get_next_token(i: usize, n: i32, tokens: &[Token]) -> Option<(Token, i32, usize)> {
146 let index = ((i as i32) + n) as usize;
147 if index >= tokens.len() {
148 return None;
149 }
150
151 if tokens[index].token_type == "WHITESPACE" {
152 return get_next_token(i, n + 1, tokens);
153 }
154
155 Some((tokens[index].clone(), n, index))
156}
157
158fn get_next_token_whitespace(i: usize, n: i32, tokens: &[Token]) -> Option<(Token, i32, usize)> {
159 let index = ((i as i32) + n) as usize;
160 if index >= tokens.len() {
161 return None;
162 }
163
164 Some((tokens[index].clone(), n, index))
165}
166
167fn get_prev_token(i: usize, n: i32, tokens: &[Token]) -> Option<(Token, i32, usize)> {
168 let index = ((i as i32) - n) as usize;
169 if index >= tokens.len() {
170 return None;
171 }
172
173 if tokens[index].token_type == "WHITESPACE" {
174 return get_prev_token(i, n + 1, tokens);
175 }
176
177 Some((tokens[index].clone(), n, index))
178}
179
180fn find_next_token(
181 start: usize,
182 tokens: &[Token],
183 expected_type: &str,
184 expected_value: Option<&str>,
185 break_on_find: Option<(&str, Option<&str>)>,
186) -> Option<(Token, usize)> {
187 let mut idx = start;
188 while idx < tokens.len() {
189 let token = &tokens[idx];
190
191 if let Some((break_type, break_value)) = break_on_find {
192 if token.token_type == break_type {
193 if let Some(val) = break_value {
194 if token.value == val {
195 return None;
196 }
197 } else {
198 return None;
199 }
200 }
201 }
202
203 let pass = if expected_type == "WHITESPACE" {
204 token.token_type == "WHITESPACE"
205 } else {
206 token.token_type != "WHITESPACE"
207 };
208
209 if pass && token.token_type == expected_type {
210 if let Some(val) = expected_value {
211 if token.value == val {
212 return Some((token.clone(), idx));
213 }
214 } else if expected_type == "WHITESPACE" {
215 if let Some(val) = expected_value {
216 if token.value.contains(val) {
217 return Some((token.clone(), idx));
218 }
219 } else {
220 return Some((token.clone(), idx));
221 }
222 } else {
223 return Some((token.clone(), idx));
224 }
225 }
226 idx += 1;
227 }
228 None
229}
230
231fn apply_declarations(
243 token: &Token,
244 index: usize,
245 tokens: &[Token],
246 local_declarations: &HashMap<String, Declaration>,
247 global_declarations: &HashMap<String, Declaration>,
248) -> Option<(usize, String)> {
249 let mut additional_idx = 0;
250 if token.token_type == "IDENTIFIER" {
251 let values = global_declarations
252 .values()
253 .chain(local_declarations.values());
254 for decl in values {
255 let is_declaration = decl.is_definition;
256 let is_macro = decl.is_macro;
257 let trigger = decl.trigger.clone();
259
260 if token.value == trigger {
263 let mut conditions_met = true;
264
265 if let Some(prev_condition) = &decl.condition_prev {
266 let mut prev_idx = index;
267 while prev_idx > 0 {
268 prev_idx -= 1;
269 if tokens[prev_idx].token_type != "WHITESPACE" {
270 break;
271 }
272 }
273
274 if prev_idx < index {
277 if tokens[prev_idx].value != *prev_condition {
278 conditions_met = false;
279 }
280 } else {
281 conditions_met = false;
282 }
283 }
284
285 if let Some(next_condition) = &decl.condition_next {
286 if let Some((next_token, _, _)) = get_next_token(index, 1, tokens) {
287 if next_token.value != *next_condition {
288 conditions_met = false;
289 }
290 } else {
291 conditions_met = false;
292 }
293 }
294
295 if conditions_met {
296 if is_declaration {
297 let mut str = String::new();
298 let mut args = String::new();
299 let mut cidx = index;
300 let mut next_token = if let Some((token, _, idx)) = get_next_token(index, 1, tokens) {
301 cidx = idx;
302 token
303 } else {
304 Token {
305 token_type: "OTHER".to_string(),
306 value: "".to_string(),
307 }
308 };
309 if next_token.token_type == "OTHER" && next_token.value == "(" {
310 if let Some((_, bc_idx)) = find_next_token(index, tokens, "OTHER", Some(")"), None) {
311 let mut arg_tokens = Vec::new();
312 let mut arg_idx = cidx + 1;
313
314 while arg_idx < bc_idx {
315 arg_tokens.push(&tokens[arg_idx]);
316 arg_idx += 1;
317 }
318
319 args = arg_tokens
320 .iter()
321 .map(|t| t.value.clone())
322 .collect::<Vec<String>>()
323 .join("");
324
325 next_token = if let Some((token, _, new_idx)) = get_next_token(bc_idx, 1, tokens) {
326 cidx = new_idx;
327 token
328 } else {
329 Token {
330 token_type: "OTHER".to_string(),
331 value: "".to_string(),
332 }
333 };
334 }
335 }
336 if next_token.token_type == "IDENTIFIER" {
337 if let Some((eq_token, _, _)) = get_next_token(cidx, 1, tokens) {
338 if eq_token.value == "=" {
339 str.push_str(
340 format!(
341 "{} = {} ",
342 next_token.value,
343 if decl.is_constructor {
344 format!("new {}", decl.replacement.clone())
345 } else {
346 decl.replacement.clone()
347 }
348 )
349 .as_str(),
350 );
351 if let Some((_, eq_idx)) = find_next_token(
352 index,
353 tokens,
354 "OTHER",
355 Some("="),
356 Some(("WHITESPACE", Some("\n"))),
357 ) {
358 if !args.is_empty() {
359 str.push_str(args.as_str());
360 str.push(',');
361 }
362 additional_idx = eq_idx - index
363 } else {
364 if !args.is_empty() {
365 str.push_str(args.as_str());
366 } else {
367 str = String::from(str.trim());
368 str.push_str("()");
369 }
370 additional_idx = cidx - index;
371 }
372 } else {
373 return None;
374 }
375 } else {
376 return None;
377 }
378 } else {
379 return None;
380 }
381 return Some((index + 1 + additional_idx, str));
382 } else if is_macro {
383 let next_token = if let Some((token, _, idx)) = get_next_token(index, 1, tokens) {
384 additional_idx = idx;
385 token
386 } else {
387 Token {
388 token_type: "OTHER".to_string(),
389 value: "".to_string(),
390 }
391 };
392 if next_token.token_type == "OTHER" && next_token.value == "!" {
395 return Some((additional_idx, decl.replacement.clone()));
396 } else {
397 return None;
398 }
399 } else {
400 return Some((index + 1 + additional_idx, decl.replacement.clone()));
401 }
402 }
403 }
404 }
405 }
406 None
407}
408
409fn get_string_until(
410 tokens: &[Token],
411 start: usize,
412 end_chars: &[&str],
413 end_types: &[&str],
414) -> (String, usize) {
415 let mut result = String::new();
416 let mut i = start;
417 while i < tokens.len() {
418 if end_chars.contains(&tokens[i].value.as_str()) {
419 break;
420 }
421 if end_types.contains(&tokens[i].token_type.as_str()) {
422 break;
423 }
424 result.push_str(&tokens[i].value);
425 i += 1;
426 }
427 (result, i)
428}
429
430fn finalize_handle_import(tokens: &[Token], current_idx: usize) -> Result<(bool, Option<Token>)> {
431 match get_next_token(current_idx, 1, tokens) {
432 Some((current_token, _, _)) => {
433 if current_token.token_type == "STRING" {
434 Ok((true, Some(current_token)))
435 } else {
436 Ok((false, None))
437 }
438 }
439 _ => Ok((false, None)),
440 }
441}
442
443fn handle_import(tokens: &[Token], i: usize) -> (String, usize) {
444 let mut result = String::new();
445 let mut current_idx = i + 1;
446
447 while current_idx < tokens.len() && tokens[current_idx].token_type == "WHITESPACE" {
448 current_idx += 1;
449 }
450
451 if current_idx >= tokens.len() {
452 return (String::new(), current_idx);
453 }
454
455 let token = &tokens[current_idx];
456
457 match token.token_type.as_str() {
458 "STRING" => {
459 result.push_str(&format!("rew::mod::find module, {}", token.value));
460 current_idx += 1;
461 }
462 "IDENTIFIER" | "OTHER" => {
463 if token.value == "{" {
464 let (imports, new_idx) = get_string_until(tokens, current_idx + 1, &["}"], &[]);
465 current_idx = new_idx + 1;
466
467 while current_idx < tokens.len() && tokens[current_idx].value != "from" {
468 current_idx += 1;
469 }
470
471 current_idx += 1;
472
473 if let Ok((should_handle, _)) = finalize_handle_import(tokens, current_idx) {
474 if should_handle {
475 let re = Regex::new(r"(\w+)\s+as\s+(\w+)").unwrap();
476 let replaced_imports = re.replace_all(&imports, "$1: $2").to_string();
477 result.push_str(&format!(
478 "{{ {} }} := rew::mod::find module, ",
479 replaced_imports
480 ));
481 }
482 }
483 } else {
484 let mut default_name = token.value.clone();
485 let mut used_multiple = false;
486
487 let re = Regex::new(r"(\w+)\s+as\s+(\w+)").unwrap();
488 while current_idx < tokens.len() && tokens[current_idx].value != "from" {
489 if tokens[current_idx].value == "as" {
490 if let Some((token, _, _)) = get_next_token(current_idx + 1, 1, tokens) {
491 if token.token_type == "IDENTIFIER" {
492 default_name = token.value;
493 }
494 }
495 } else if tokens[current_idx].value == "{" {
496 let (imports, new_idx) = get_string_until(tokens, current_idx + 1, &["}"], &[]);
497 let replaced_imports = re.replace_all(&imports, "$1: $2").to_string();
498 default_name.insert_str(0, &format!("{{ {} }} = ", replaced_imports));
499 used_multiple = true;
500 current_idx = new_idx + 1;
501 }
502 current_idx += 1;
503 }
504
505 current_idx += 1;
506
507 if let Ok((should_handle, _)) = finalize_handle_import(tokens, current_idx) {
508 let slug = if used_multiple { "=" } else { ":=" };
509 if should_handle {
510 result.push_str(&format!(
511 "{} {} rew::mod::find module, ",
512 default_name, slug
513 ));
514 }
515 }
516 }
517 }
518 _ => {}
519 }
520
521 if let Some((_, assert_idx)) =
522 find_next_token(current_idx, tokens, "IDENTIFIER", Some("assert"), None)
523 {
524 if let Some((from_token, _)) = find_next_token(current_idx - 1, tokens, "STRING", None, None) {
525 result.push_str(&format!("{}, ", from_token.value.trim()));
526 }
527 current_idx = assert_idx + 1;
528 }
536
537 (result, current_idx)
538}
539
540fn handle_compiler_options(
541 tokens: &[Token],
542 options: &mut CompilerOptions,
543 i: usize,
544 is_pub: bool,
545) -> usize {
546 let mut current_idx = i + 1;
547
548 if let Some((name_token, idx)) = find_next_token(current_idx, tokens, "IDENTIFIER", None, None) {
549 let mut name = name_token.value.clone();
550 current_idx = idx + 1;
551 if let Some((_dot, _, idx)) = get_next_token(idx, 1, tokens) {
552 if _dot.value == "." {
553 if let Some((_state, _, idx)) = get_next_token(idx, 1, tokens) {
554 current_idx = idx + 1;
555 if _state.token_type == "IDENTIFIER" {
556 name.push_str(format!(".{}", _state.value).as_str())
557 }
558 }
559 }
560 }
561
562 options.civet_options.push(name.clone());
563 if is_pub {
564 options.civet_global.push(name.clone());
565 }
566 }
567
568 current_idx
569}
570
571pub fn compile_rew_stuff(content: &str, options: &mut CompilerOptions) -> Result<CompilerResults> {
581 let tokens = tokenize_coffee_script(content);
582 let mut result = String::new();
583 let mut i = 0;
584 let mut next_function_ignore_name = false;
585 let mut hooks: Vec<Hook> = Vec::new();
586 let local_declarations = options.local_declarations.clone();
587 let global_declarations = options.global_declarations.clone();
588
589 while i < tokens.len() {
590 let token = &tokens[i];
591 let next_token = get_next_token(i, 1, &tokens);
592 let prev_token = if i > 1 {
593 get_prev_token(i, 1, &tokens)
594 } else {
595 None
596 };
597
598 if token.token_type == "COMMENT" && i < 2 && token.value.starts_with("#!") {
600 i += 1;
601 continue;
602 }
603
604 if token.value == "&" {
605 if let Some((next, _, _)) = get_next_token_whitespace(i, 1, &tokens) {
606 if next.token_type == "IDENTIFIER"
608 || next.token_type == "STRING"
609 || next.value == "("
610 || next.token_type == "NUMBER"
611 {
612 result.push_str("rew::ptr::of(");
613 if next.value != "(" {
614 result.push_str(next.value.clone().as_str());
615 if let Some((token, _, idx)) = get_next_token(i + 2, 1, &tokens) {
616 if token.value == "as" {
617 if let Some((token, _, idx)) = get_next_token(idx, 1, &tokens) {
618 result.push_str(format!(", \"{}\"", token.value).as_str());
619 result.push_str(")");
620 i = idx + 1;
621 } else {
622 result.push_str(")");
623 i += 2;
624 }
625 } else {
626 result.push_str(")");
627 i += 2;
628 }
629 } else {
630 result.push_str(")");
631 i += 2;
632 }
633 } else {
634 i += 2;
635 }
636 continue;
637 }
638 }
639 }
640
641 if token.value == "*" {
642 if let Some((next, _, _)) = get_next_token_whitespace(i, 1, &tokens) {
643 if next.token_type == "IDENTIFIER" || next.value == "(" {
644 result.push_str("rew::ptr::deref(");
645 if next.value != "(" {
646 result.push_str(next.value.clone().as_str());
647 if let Some((token, _, idx)) = get_next_token(i + 1, 1, &tokens) {
648 if token.value == "!" {
649 result.push_str(", 'any')");
650 i = idx + 1;
651 } else if let Some((token, _, idx)) = get_next_token(i + 2, 1, &tokens) {
652 if token.value == "as" {
653 if let Some((token, _, idx)) = get_next_token(idx, 1, &tokens) {
654 if token.token_type == "IDENTIFIER" {
655 result.push_str(format!(", \"{}\"", token.value).as_str());
656 result.push_str(")");
657 i = idx + 1;
658 } else {
659 result.push_str(")");
660 i += 2;
661 }
662 } else {
663 result.push_str(")");
664 i += 2;
665 }
666 } else if token.value == "=" {
667 result.push_str(").set ");
668 i = idx + 1;
669 } else if token.value == "!" {
670 result.push_str(", 'any')");
671 i = idx + 1;
672 } else {
673 result.push_str(")");
674 i += 2;
675 }
676 } else {
677 result.push_str(")");
678 i += 2;
679 }
680 } else {
681 result.push_str(")");
682 i += 2;
683 }
684 } else {
685 i += 2;
686 }
687 continue;
688 }
689 }
690 }
691
692 if token.value == "\\" {
693 if let Some((next, _, _)) = get_next_token_whitespace(i, 1, &tokens) {
694 if next.value == "^" {
695 i += 1;
696 continue;
697 }
698 }
699 }
700
701 if token.value == "^" {
702 if let Some((next, _, _)) = get_next_token_whitespace(i, 1, &tokens) {
703 if (next.token_type == "STRING" || next.token_type == "IDENTIFIER" || next.value == "(")
704 && prev_token
705 .clone()
706 .is_none_or(|(t, _, _)| t.value != "/" && t.value != "\\")
707 {
708 result.push_str("rew::encoding::stringToBytes(");
709 if next.value != "(" {
710 result.push_str(next.value.clone().as_str());
711 result.push_str(")");
712 }
713 i += 2;
714 continue;
715 }
716 }
717 }
718
719 if token.token_type == "IDENTIFIER" && token.value == "fn" && i < 2 {
720 if let Some((next, _, _)) = next_token.clone() {
721 if prev_token.clone().is_none_or(|(t, _, _)| t.value != ".")
722 && next.token_type == "IDENTIFIER"
723 {
724 result.push_str("function");
725 i += 1;
726 continue;
727 }
728 }
729 }
730
731 if prev_token.clone().is_none_or(|(t, _, _)| t.value != ".")
732 && prev_token.clone().is_none_or(|(t, _, _)| t.value != ":")
733 && token.value == "@"
734 && next_token.clone().is_some_and(|(t, _, _)| t.value == "{")
735 {
736 let main_idx = next_token.clone().unwrap().2;
737 if let Some((_, brace_idx)) = find_next_token(main_idx, &tokens, "OTHER", Some("}"), None) {
738 if let Some((should_be_func, _, idx)) = get_next_token(brace_idx, 1, &tokens) {
739 if should_be_func.value == "function" {
740 if let Some((func_name, _, idx)) = get_next_token(idx, 1, &tokens) {
741 if let Some((next_token, _, _)) = get_next_token(idx, 1, &tokens) {
742 let (decorator_string, _) = get_string_until(&tokens, main_idx, &["}"], &[]);
743 let fixed_string = &decorator_string[1..decorator_string.len()];
744 let mut func_parts: Vec<&str> = fixed_string.split(',').collect();
745 let decorator_name = func_parts[0];
746 func_parts.remove(0);
747 let func_args = func_parts.join(",");
748 if next_token.value != "." && next_token.value != ":" {
749 result.push_str(
750 format!(
751 "{} = {} {},",
752 func_name.value.clone(),
753 decorator_name,
754 if func_args.is_empty() {
755 format!("\"{}\"", func_name.value.clone())
756 } else {
757 format!("\"{}\", {}", func_name.value.clone(), func_args)
758 }
759 )
760 .as_str(),
761 );
762 } else {
763 let (item_name, _) = get_string_until(&tokens, idx, &["("], &[]);
764 result.push_str(
765 format!(
766 "{} = {} {},",
767 item_name.clone(),
768 decorator_name,
769 if func_args.is_empty() {
770 format!("\"{}\"", item_name)
771 } else {
772 format!("\"{}\", {}", item_name, func_args)
773 }
774 )
775 .as_str(),
776 );
777 next_function_ignore_name = true;
778 }
779 i = brace_idx + 1;
780 continue;
781 }
782 }
783 }
784 }
785 }
786 }
787
788 if prev_token.clone().is_none_or(|(t, _, _)| t.value != ".")
789 && prev_token.clone().is_none_or(|(t, _, _)| t.value != ":")
790 && token.value == "function"
791 && next_token
792 .clone()
793 .is_some_and(|(t, _, _)| t.token_type == "IDENTIFIER")
794 {
795 if let Some((_, _, idx)) = next_token.clone() {
796 if let Some((next_token, _, idx)) = get_next_token(idx, 1, &tokens) {
797 if next_token.value == "." || next_token.value == ":" {
798 let (_, start_idx) = find_next_token(idx, &tokens, "OTHER", Some("("), None).unwrap();
799 let (_, end_idx) = find_next_token(idx, &tokens, "OTHER", Some(")"), None).unwrap();
800 if !next_function_ignore_name {
801 hooks.push(Hook {
802 index: start_idx - 1,
803 value: " = ".to_string(),
804 });
805 }
806 if let Some((after_end, _, idx)) = get_next_token(end_idx, 1, &tokens) {
807 if after_end.value == ":" {
808 let (_, identifier_idx) =
809 find_next_token(idx, &tokens, "IDENTIFIER", None, None).unwrap();
810 hooks.push(Hook {
811 index: identifier_idx,
812 value: " ->".to_string(),
813 });
814 } else {
815 hooks.push(Hook {
816 index: end_idx,
817 value: " ->".to_string(),
818 });
819 }
820 } else {
821 hooks.push(Hook {
822 index: end_idx,
823 value: " ->".to_string(),
824 });
825 }
826 if next_function_ignore_name {
827 next_function_ignore_name = false;
828 i = start_idx;
829 } else {
830 i += 2;
831 }
832 continue;
833 }
834 }
842 }
843 }
844
845 if prev_token.clone().is_none_or(|(t, _, _)| t.value != ".")
846 && prev_token.clone().is_none_or(|(t, _, _)| t.value != ":")
847 && token.value == "using"
848 && next_token.clone().is_some_and(|(t, _, _)| t.value == "JSX")
849 {
850 options.jsx = true;
851 }
852
853 if prev_token.clone().is_none_or(|(t, _, _)| t.value != ".")
854 && prev_token.clone().is_none_or(|(t, _, _)| t.value != ":")
855 && token.value == "using"
856 && next_token
857 .clone()
858 .is_some_and(|(t, _, _)| t.value == "compiler" || t.value == "pub" || t.value == "public")
859 {
860 if let Some((next, _, idx)) = next_token.clone() {
861 if next.value == "pub" || next.value == "public" {
862 if let Some((next_token, _, idx)) = get_next_token(idx, 1, &tokens) {
863 if next_token.value == "compiler" {
864 i = handle_compiler_options(&tokens, options, idx, true);
865 continue;
866 } else if next_token.value == "JSX" {
867 options.jsx = true;
868 options.civet_global.push("JSX".to_string());
869 }
870 }
871 } else {
872 i = handle_compiler_options(&tokens, options, idx, false);
873 continue;
874 }
875 }
876 }
877
878 if prev_token.clone().is_none_or(|(t, _, _)| t.value != ".")
879 && prev_token.clone().is_none_or(|(t, _, _)| t.value != ":")
880 && token.value == "private"
881 && next_token
882 .clone()
883 .is_some_and(|(t, _, _)| t.value == "!" || t.value.starts_with("_"))
884 {
885 result.push_str("private ");
886 i += 2;
887 continue;
888 }
889
890 if prev_token.clone().is_none_or(|(t, _, _)| t.value != ".")
891 && prev_token.clone().is_none_or(|(t, _, _)| t.value != ":")
892 && token.value == "private"
893 {
894 result.push_str("pvt");
895 i += 1;
896 continue;
897 }
898
899 if prev_token.clone().is_none_or(|(t, _, _)| t.value != ".")
900 && prev_token.clone().is_none_or(|(t, _, _)| t.value != ":")
901 && token.value == "native"
902 {
903 result.push_str("declare");
904 i += 1;
905 continue;
906 }
907
908 if prev_token.clone().is_none_or(|(t, _, _)| t.value != ".")
909 && prev_token.clone().is_none_or(|(t, _, _)| t.value != ":")
910 && token.value == "public"
911 {
912 result.push_str("pub");
913 i += 1;
914 continue;
915 }
916
917 if token.token_type == "COMMENT" && token.value[1..].trim() == "@cls" {
918 options.cls = true;
919 }
920
921 if prev_token
922 .clone()
923 .is_none_or(|(t, _, _)| t.value == "export")
924 && token.token_type == "IDENTIFIER"
925 && token.value == "default"
926 && !options.keep_imports
927 {
928 i += 1;
929 continue;
930 }
931
932 if prev_token.clone().is_none_or(|(t, _, _)| t.value != ".")
933 && prev_token.clone().is_none_or(|(t, _, _)| t.value != ":")
934 && token.token_type == "IDENTIFIER"
935 && token.value == "package"
936 {
937 if let Some((next_token, _, idx)) = get_next_token(i, 1, &tokens) {
938 if next_token.token_type == "IDENTIFIER" {
939 let (item, new_idx) = get_string_until(&tokens, idx, &[";"], &["WHITESPACE"]);
940 result.push_str(format!("rew::mod::package \"{}\"", item).as_str());
941 i = new_idx;
942 } else {
943 i += 1;
944 }
945 continue;
946 }
947 }
948
949 if prev_token.clone().is_none_or(|(t, _, _)| t.value != ".")
950 && prev_token.clone().is_none_or(|(t, _, _)| t.value != ":")
951 && token.token_type == "IDENTIFIER"
952 && token.value == "export"
953 && !options.keep_imports
954 {
955 if let Some((next_token, _, idx)) = get_next_token(i, 1, &tokens) {
957 if next_token.value == "{" {
958 result.push_str("module.exports = ");
959 } else {
960 let mut title = next_token.value.clone();
961 if next_token.value == "default" {
962 i += 1;
963 }
964 if next_token.value == "class" {
965 if let Some((next_token, _, _)) = get_next_token(idx, 1, &tokens) {
966 title = next_token.value.clone();
967 }
968 }
969 if next_token.value == "function" {
970 if let Some((next_token, _, _)) = get_next_token(idx, 1, &tokens) {
971 title = next_token.value.clone();
972 }
973 }
974
975 result.push_str(format!("module.exports.{} = ", title).as_str());
976 }
977 }
978 i += 1;
979 continue;
980 }
981
982 if prev_token.is_none_or(|(t, _, _)| t.value != ".")
983 && token.token_type == "IDENTIFIER"
984 && token.value == "import"
985 && !options.keep_imports
986 {
987 let (import_str, new_idx) = handle_import(&tokens, i);
988 result.push_str(&import_str);
989 i = new_idx;
990 continue;
991 }
992
993 if let Some((new_idx, replacement)) =
994 apply_declarations(token, i, &tokens, &local_declarations, &global_declarations)
995 {
996 result.push_str(&replacement);
997 i = new_idx;
998 continue;
999 }
1000
1001 result.push_str(&token.value);
1002 hooks.retain(|hook| {
1003 if hook.index == i {
1004 result.push_str(&hook.value);
1005 false
1006 } else {
1007 true
1008 }
1009 });
1010
1011 i += 1;
1012 }
1013
1014 if options.included {
1015 options.local_declarations = local_declarations;
1016 }
1017
1018 let compiler_results = CompilerResults {
1019 options: std::mem::replace(
1020 options,
1021 CompilerOptions {
1022 keep_imports: false,
1023 civet_options: vec![],
1024 civet_global: vec![],
1025 jsx: false,
1026 cls: false,
1027 included: false,
1028 local_declarations: HashMap::new(),
1029 global_declarations: HashMap::new(),
1030 },
1031 ),
1032 code: result,
1033 };
1034
1035 Ok(compiler_results)
1039}