1#![warn(missing_docs)]
12#![warn(clippy::all)]
13#![deny(unsafe_code)]
14
15use anyhow::{Context, Result};
16use decy_analyzer::patterns::PatternDetector;
17use decy_codegen::CodeGenerator;
18use decy_hir::HirFunction;
19use decy_ownership::{
20 array_slice::ArrayParameterTransformer, borrow_gen::BorrowGenerator,
21 dataflow::DataflowAnalyzer, inference::OwnershipInferencer, lifetime::LifetimeAnalyzer,
22 lifetime_gen::LifetimeAnnotator,
23};
24use decy_parser::parser::CParser;
25use decy_stdlib::StdlibPrototypes;
26use petgraph::graph::{DiGraph, NodeIndex};
27use petgraph::visit::Topo;
28use std::collections::HashMap;
29use std::path::{Path, PathBuf};
30
31#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
36pub struct TranspiledFile {
37 pub source_path: PathBuf,
39
40 pub rust_code: String,
42
43 pub dependencies: Vec<PathBuf>,
45
46 pub functions_exported: Vec<String>,
48
49 pub ffi_declarations: String,
51}
52
53impl TranspiledFile {
54 pub fn new(
56 source_path: PathBuf,
57 rust_code: String,
58 dependencies: Vec<PathBuf>,
59 functions_exported: Vec<String>,
60 ffi_declarations: String,
61 ) -> Self {
62 Self {
63 source_path,
64 rust_code,
65 dependencies,
66 functions_exported,
67 ffi_declarations,
68 }
69 }
70}
71
72#[derive(Debug, Clone, Default)]
77pub struct ProjectContext {
78 types: HashMap<String, String>,
80
81 functions: HashMap<String, String>,
83
84 transpiled_files: HashMap<PathBuf, TranspiledFile>,
86}
87
88impl ProjectContext {
89 pub fn new() -> Self {
91 Self::default()
92 }
93
94 pub fn add_transpiled_file(&mut self, file: &TranspiledFile) {
99 self.transpiled_files
101 .insert(file.source_path.clone(), file.clone());
102
103 if file.rust_code.contains("struct") {
106 for line in file.rust_code.lines() {
108 if line.contains("struct") {
109 if let Some(name) = self.extract_type_name(line) {
110 self.types.insert(name.clone(), line.to_string());
111 }
112 }
113 }
114 }
115
116 for func_name in &file.functions_exported {
118 self.functions.insert(
119 func_name.clone(),
120 file.source_path.to_string_lossy().to_string(),
121 );
122 }
123 }
124
125 pub fn has_type(&self, type_name: &str) -> bool {
127 self.types.contains_key(type_name)
128 }
129
130 pub fn has_function(&self, func_name: &str) -> bool {
132 self.functions.contains_key(func_name)
133 }
134
135 pub fn get_function_source(&self, func_name: &str) -> Option<&str> {
137 self.functions.get(func_name).map(|s| s.as_str())
138 }
139
140 fn extract_type_name(&self, line: &str) -> Option<String> {
142 let words: Vec<&str> = line.split_whitespace().collect();
144 if let Some(idx) = words.iter().position(|&w| w == "struct" || w == "enum") {
145 if idx + 1 < words.len() {
146 let name = words[idx + 1].trim_end_matches('{').trim_end_matches('<');
147 return Some(name.to_string());
148 }
149 }
150 None
151 }
152}
153
154#[derive(Debug, Clone)]
159pub struct DependencyGraph {
160 graph: DiGraph<PathBuf, ()>,
162
163 path_to_node: HashMap<PathBuf, NodeIndex>,
165}
166
167impl DependencyGraph {
168 pub fn new() -> Self {
170 Self {
171 graph: DiGraph::new(),
172 path_to_node: HashMap::new(),
173 }
174 }
175
176 pub fn is_empty(&self) -> bool {
178 self.graph.node_count() == 0
179 }
180
181 pub fn file_count(&self) -> usize {
183 self.graph.node_count()
184 }
185
186 pub fn contains_file(&self, path: &Path) -> bool {
188 self.path_to_node.contains_key(path)
189 }
190
191 pub fn add_file(&mut self, path: &Path) {
195 if !self.contains_file(path) {
196 let node = self.graph.add_node(path.to_path_buf());
197 self.path_to_node.insert(path.to_path_buf(), node);
198 }
199 }
200
201 pub fn add_dependency(&mut self, from: &Path, to: &Path) {
205 let from_node = *self
206 .path_to_node
207 .get(from)
208 .expect("from file must be added to graph first");
209 let to_node = *self
210 .path_to_node
211 .get(to)
212 .expect("to file must be added to graph first");
213
214 self.graph.add_edge(from_node, to_node, ());
215 }
216
217 pub fn has_dependency(&self, from: &Path, to: &Path) -> bool {
219 if let (Some(&from_node), Some(&to_node)) =
220 (self.path_to_node.get(from), self.path_to_node.get(to))
221 {
222 self.graph.contains_edge(from_node, to_node)
223 } else {
224 false
225 }
226 }
227
228 pub fn topological_sort(&self) -> Result<Vec<PathBuf>> {
233 if petgraph::algo::is_cyclic_directed(&self.graph) {
235 return Err(anyhow::anyhow!(
236 "Circular dependency detected in file dependencies"
237 ));
238 }
239
240 let mut topo = Topo::new(&self.graph);
241 let mut build_order = Vec::new();
242
243 while let Some(node) = topo.next(&self.graph) {
244 if let Some(path) = self.graph.node_weight(node) {
245 build_order.push(path.clone());
246 }
247 }
248
249 build_order.reverse();
251
252 Ok(build_order)
253 }
254
255 pub fn from_files(files: &[PathBuf]) -> Result<Self> {
259 let mut graph = Self::new();
260
261 for file in files {
263 graph.add_file(file);
264 }
265
266 for file in files {
268 let content = std::fs::read_to_string(file)
269 .with_context(|| format!("Failed to read file: {}", file.display()))?;
270
271 let includes = Self::parse_include_directives(&content);
272
273 let file_dir = file.parent().unwrap_or_else(|| Path::new("."));
275
276 for include in includes {
277 let include_path = file_dir.join(&include);
278
279 if graph.contains_file(&include_path) {
281 graph.add_dependency(file, &include_path);
282 }
283 }
284 }
285
286 Ok(graph)
287 }
288
289 pub fn parse_include_directives(code: &str) -> Vec<String> {
293 let mut includes = Vec::new();
294
295 for line in code.lines() {
296 let trimmed = line.trim();
297 if trimmed.starts_with("#include") {
298 if let Some(start) = trimmed.find('"').or_else(|| trimmed.find('<')) {
300 let end_char = if trimmed.chars().nth(start) == Some('"') {
301 '"'
302 } else {
303 '>'
304 };
305 if let Some(end) = trimmed[start + 1..].find(end_char) {
306 let filename = &trimmed[start + 1..start + 1 + end];
307 includes.push(filename.to_string());
308 }
309 }
310 }
311 }
312
313 includes
314 }
315
316 pub fn has_header_guard(path: &Path) -> Result<bool> {
318 let content = std::fs::read_to_string(path)
319 .with_context(|| format!("Failed to read file: {}", path.display()))?;
320
321 let has_ifndef = content.lines().any(|line| {
322 let trimmed = line.trim();
323 trimmed.starts_with("#ifndef") || trimmed.starts_with("#if !defined")
324 });
325
326 let has_define = content
327 .lines()
328 .any(|line| line.trim().starts_with("#define"));
329 let has_endif = content
330 .lines()
331 .any(|line| line.trim().starts_with("#endif"));
332
333 Ok(has_ifndef && has_define && has_endif)
334 }
335}
336
337impl Default for DependencyGraph {
338 fn default() -> Self {
339 Self::new()
340 }
341}
342
343#[derive(Debug, Clone)]
345pub struct CacheStatistics {
346 pub hits: usize,
348 pub misses: usize,
350 pub total_files: usize,
352}
353
354#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
356struct CacheEntry {
357 hash: String,
359 transpiled: TranspiledFile,
361 dependency_hashes: HashMap<PathBuf, String>,
363}
364
365#[derive(Debug, Clone)]
391pub struct TranspilationCache {
392 entries: HashMap<PathBuf, CacheEntry>,
394 cache_dir: Option<PathBuf>,
396 hits: usize,
398 misses: usize,
399}
400
401impl TranspilationCache {
402 pub fn new() -> Self {
404 Self {
405 entries: HashMap::new(),
406 cache_dir: None,
407 hits: 0,
408 misses: 0,
409 }
410 }
411
412 pub fn with_directory(cache_dir: &Path) -> Self {
414 Self {
415 entries: HashMap::new(),
416 cache_dir: Some(cache_dir.to_path_buf()),
417 hits: 0,
418 misses: 0,
419 }
420 }
421
422 pub fn compute_hash(&self, path: &Path) -> Result<String> {
426 use sha2::{Digest, Sha256};
427
428 let content = std::fs::read(path)
429 .with_context(|| format!("Failed to read file for hashing: {}", path.display()))?;
430
431 let mut hasher = Sha256::new();
432 hasher.update(&content);
433 let result = hasher.finalize();
434
435 Ok(format!("{:x}", result))
436 }
437
438 pub fn insert(&mut self, path: &Path, transpiled: &TranspiledFile) {
440 let hash = match self.compute_hash(path) {
441 Ok(h) => h,
442 Err(_) => return, };
444
445 let mut dependency_hashes = HashMap::new();
447 for dep_path in &transpiled.dependencies {
448 if let Ok(dep_hash) = self.compute_hash(dep_path) {
449 dependency_hashes.insert(dep_path.clone(), dep_hash);
450 }
451 }
452
453 let entry = CacheEntry {
454 hash,
455 transpiled: transpiled.clone(),
456 dependency_hashes,
457 };
458
459 self.entries.insert(path.to_path_buf(), entry);
460 }
461
462 pub fn get(&mut self, path: &Path) -> Option<&TranspiledFile> {
469 let entry = self.entries.get(&path.to_path_buf())?;
470
471 let current_hash = self.compute_hash(path).ok()?;
473 if current_hash != entry.hash {
474 self.misses += 1;
475 return None;
476 }
477
478 for (dep_path, cached_hash) in &entry.dependency_hashes {
480 if let Ok(current_dep_hash) = self.compute_hash(dep_path) {
481 if ¤t_dep_hash != cached_hash {
482 self.misses += 1;
483 return None;
484 }
485 }
486 }
487
488 self.hits += 1;
489 Some(&entry.transpiled)
490 }
491
492 pub fn save(&self) -> Result<()> {
494 let cache_dir = self
495 .cache_dir
496 .as_ref()
497 .ok_or_else(|| anyhow::anyhow!("Cache directory not set"))?;
498
499 std::fs::create_dir_all(cache_dir).with_context(|| {
500 format!("Failed to create cache directory: {}", cache_dir.display())
501 })?;
502
503 let cache_file = cache_dir.join("cache.json");
504 let json =
505 serde_json::to_string_pretty(&self.entries).context("Failed to serialize cache")?;
506
507 std::fs::write(&cache_file, json)
508 .with_context(|| format!("Failed to write cache file: {}", cache_file.display()))?;
509
510 Ok(())
511 }
512
513 pub fn load(cache_dir: &Path) -> Result<Self> {
515 let cache_file = cache_dir.join("cache.json");
516
517 if !cache_file.exists() {
518 return Ok(Self::with_directory(cache_dir));
520 }
521
522 let json = std::fs::read_to_string(&cache_file)
523 .with_context(|| format!("Failed to read cache file: {}", cache_file.display()))?;
524
525 let entries: HashMap<PathBuf, CacheEntry> =
526 serde_json::from_str(&json).context("Failed to deserialize cache")?;
527
528 Ok(Self {
529 entries,
530 cache_dir: Some(cache_dir.to_path_buf()),
531 hits: 0,
532 misses: 0,
533 })
534 }
535
536 pub fn clear(&mut self) {
538 self.entries.clear();
539 self.hits = 0;
540 self.misses = 0;
541 }
542
543 pub fn statistics(&self) -> CacheStatistics {
545 CacheStatistics {
546 hits: self.hits,
547 misses: self.misses,
548 total_files: self.entries.len(),
549 }
550 }
551}
552
553impl Default for TranspilationCache {
554 fn default() -> Self {
555 Self::new()
556 }
557}
558
559fn preprocess_includes(
580 source: &str,
581 base_dir: Option<&Path>,
582 processed: &mut std::collections::HashSet<PathBuf>,
583 stdlib_prototypes: &StdlibPrototypes,
584 injected_headers: &mut std::collections::HashSet<String>,
585) -> Result<String> {
586 let mut result = String::new();
587 let base_dir = base_dir.unwrap_or_else(|| Path::new("."));
588
589 for line in source.lines() {
590 let trimmed = line.trim();
591
592 if trimmed.starts_with("#include") {
594 let (filename, is_system) = if let Some(start) = trimmed.find('"') {
596 if let Some(end) = trimmed[start + 1..].find('"') {
597 let filename = &trimmed[start + 1..start + 1 + end];
598 (filename, false)
599 } else {
600 result.push_str(line);
602 result.push('\n');
603 continue;
604 }
605 } else if let Some(start) = trimmed.find('<') {
606 if let Some(end) = trimmed[start + 1..].find('>') {
607 let filename = &trimmed[start + 1..start + 1 + end];
608 (filename, true)
609 } else {
610 result.push_str(line);
612 result.push('\n');
613 continue;
614 }
615 } else {
616 result.push_str(line);
618 result.push('\n');
619 continue;
620 };
621
622 if is_system {
625 result.push_str(&format!("// {}\n", line));
627
628 if !injected_headers.contains(filename) {
630 injected_headers.insert(filename.to_string());
632
633 if let Some(header) = decy_stdlib::StdHeader::from_filename(filename) {
635 result
636 .push_str(&format!("// BEGIN: Built-in prototypes for {}\n", filename));
637 result.push_str(&stdlib_prototypes.inject_prototypes_for_header(header));
638 result.push_str(&format!("// END: Built-in prototypes for {}\n", filename));
639 } else {
640 result.push_str(&format!("// Unknown system header: {}\n", filename));
642 }
643 }
644
645 continue;
646 }
647
648 let include_path = base_dir.join(filename);
650
651 if processed.contains(&include_path) {
653 result.push_str(&format!("// Already included: {}\n", filename));
655 continue;
656 }
657
658 if let Ok(included_content) = std::fs::read_to_string(&include_path) {
660 processed.insert(include_path.clone());
662
663 let included_dir = include_path.parent().unwrap_or(base_dir);
665
666 let preprocessed = preprocess_includes(
668 &included_content,
669 Some(included_dir),
670 processed,
671 stdlib_prototypes,
672 injected_headers,
673 )?;
674
675 result.push_str(&format!("// BEGIN INCLUDE: {}\n", filename));
677 result.push_str(&preprocessed);
678 result.push_str(&format!("// END INCLUDE: {}\n", filename));
679 } else {
680 anyhow::bail!("Failed to find include file: {}", include_path.display());
682 }
683 } else {
684 result.push_str(line);
686 result.push('\n');
687 }
688 }
689
690 Ok(result)
691}
692
693pub fn transpile(c_code: &str) -> Result<String> {
719 transpile_with_includes(c_code, None)
720}
721
722pub fn transpile_with_includes(c_code: &str, base_dir: Option<&Path>) -> Result<String> {
740 let stdlib_prototypes = StdlibPrototypes::new();
742 let mut processed_files = std::collections::HashSet::new();
743 let mut injected_headers = std::collections::HashSet::new();
744 let preprocessed = preprocess_includes(
745 c_code,
746 base_dir,
747 &mut processed_files,
748 &stdlib_prototypes,
749 &mut injected_headers,
750 )?;
751
752 let parser = CParser::new().context("Failed to create C parser")?;
758 let ast = parser
759 .parse(&preprocessed)
760 .context("Failed to parse C code")?;
761
762 let hir_functions: Vec<HirFunction> = ast
764 .functions()
765 .iter()
766 .map(HirFunction::from_ast_function)
767 .collect();
768
769 let hir_structs: Vec<decy_hir::HirStruct> = ast
771 .structs()
772 .iter()
773 .map(|s| {
774 let fields = s
775 .fields
776 .iter()
777 .map(|f| {
778 decy_hir::HirStructField::new(
779 f.name.clone(),
780 decy_hir::HirType::from_ast_type(&f.field_type),
781 )
782 })
783 .collect();
784 decy_hir::HirStruct::new(s.name.clone(), fields)
785 })
786 .collect();
787
788 let hir_variables: Vec<decy_hir::HirStatement> = ast
790 .variables()
791 .iter()
792 .map(|v| decy_hir::HirStatement::VariableDeclaration {
793 name: v.name().to_string(),
794 var_type: decy_hir::HirType::from_ast_type(v.var_type()),
795 initializer: v
796 .initializer()
797 .map(decy_hir::HirExpression::from_ast_expression),
798 })
799 .collect();
800
801 let hir_typedefs: Vec<decy_hir::HirTypedef> = ast
803 .typedefs()
804 .iter()
805 .map(|t| {
806 decy_hir::HirTypedef::new(
807 t.name().to_string(),
808 decy_hir::HirType::from_ast_type(&t.underlying_type),
809 )
810 })
811 .collect();
812
813 let mut transformed_functions = Vec::new();
815
816 for func in hir_functions {
817 let dataflow_analyzer = DataflowAnalyzer::new();
819 let dataflow_graph = dataflow_analyzer.analyze(&func);
820
821 let ownership_inferencer = OwnershipInferencer::new();
823 let ownership_inferences = ownership_inferencer.infer(&dataflow_graph);
824
825 let borrow_generator = BorrowGenerator::new();
827 let func_with_borrows = borrow_generator.transform_function(&func, &ownership_inferences);
828
829 let array_transformer = ArrayParameterTransformer::new();
831 let func_with_slices = array_transformer.transform(&func_with_borrows, &dataflow_graph);
832
833 let lifetime_analyzer = LifetimeAnalyzer::new();
835 let scope_tree = lifetime_analyzer.build_scope_tree(&func_with_slices);
836 let _lifetimes = lifetime_analyzer.track_lifetimes(&func_with_slices, &scope_tree);
837
838 let lifetime_annotator = LifetimeAnnotator::new();
840 let annotated_signature = lifetime_annotator.annotate_function(&func_with_slices);
841
842 transformed_functions.push((func_with_slices, annotated_signature));
844 }
845
846 let code_generator = CodeGenerator::new();
848 let mut rust_code = String::new();
849
850 for hir_struct in &hir_structs {
852 let struct_code = code_generator.generate_struct(hir_struct);
853 rust_code.push_str(&struct_code);
854 rust_code.push('\n');
855 }
856
857 for typedef in &hir_typedefs {
859 if let Ok(typedef_code) = code_generator.generate_typedef(typedef) {
860 rust_code.push_str(&typedef_code);
861 rust_code.push('\n');
862 }
863 }
864
865 for var_stmt in &hir_variables {
867 if let decy_hir::HirStatement::VariableDeclaration {
868 name,
869 var_type,
870 initializer,
871 } = var_stmt
872 {
873 let type_str = CodeGenerator::map_type(var_type);
875
876 if let Some(init_expr) = initializer {
877 let init_code = code_generator.generate_expression(init_expr);
878 rust_code.push_str(&format!(
879 "static mut {}: {} = {};\n",
880 name, type_str, init_code
881 ));
882 } else {
883 rust_code.push_str(&format!(
885 "static mut {}: Option<{}> = None;\n",
886 name, type_str
887 ));
888 }
889 }
890 }
891 if !hir_variables.is_empty() {
892 rust_code.push('\n');
893 }
894
895 for (func, annotated_sig) in &transformed_functions {
897 let generated = code_generator.generate_function_with_lifetimes_and_structs(
898 func,
899 annotated_sig,
900 &hir_structs,
901 );
902 rust_code.push_str(&generated);
903 rust_code.push('\n');
904 }
905
906 Ok(rust_code)
907}
908
909pub fn transpile_with_box_transform(c_code: &str) -> Result<String> {
931 let parser = CParser::new().context("Failed to create C parser")?;
933 let ast = parser.parse(c_code).context("Failed to parse C code")?;
934
935 let hir_functions: Vec<HirFunction> = ast
937 .functions()
938 .iter()
939 .map(HirFunction::from_ast_function)
940 .collect();
941
942 let code_generator = CodeGenerator::new();
944 let pattern_detector = PatternDetector::new();
945 let mut rust_code = String::new();
946
947 for func in &hir_functions {
948 let candidates = pattern_detector.find_box_candidates(func);
950
951 let generated = code_generator.generate_function_with_box_transform(func, &candidates);
952 rust_code.push_str(&generated);
953 rust_code.push('\n');
954 }
955
956 Ok(rust_code)
957}
958
959pub fn transpile_file(path: &Path, _context: &ProjectContext) -> Result<TranspiledFile> {
986 let c_code = std::fs::read_to_string(path)
988 .with_context(|| format!("Failed to read file: {}", path.display()))?;
989
990 let dependencies = extract_dependencies(path, &c_code)?;
992
993 let rust_code = transpile(&c_code)?;
995
996 let functions_exported = extract_function_names(&rust_code);
998
999 let ffi_declarations = generate_ffi_declarations(&functions_exported);
1001
1002 Ok(TranspiledFile::new(
1003 path.to_path_buf(),
1004 rust_code,
1005 dependencies,
1006 functions_exported,
1007 ffi_declarations,
1008 ))
1009}
1010
1011fn extract_dependencies(source_path: &Path, c_code: &str) -> Result<Vec<PathBuf>> {
1016 let mut dependencies = Vec::new();
1017 let source_dir = source_path
1018 .parent()
1019 .ok_or_else(|| anyhow::anyhow!("Source file has no parent directory"))?;
1020
1021 for line in c_code.lines() {
1022 let trimmed = line.trim();
1023 if trimmed.starts_with("#include") {
1024 if let Some(start) = trimmed.find('"') {
1026 if let Some(end) = trimmed[start + 1..].find('"') {
1027 let header_name = &trimmed[start + 1..start + 1 + end];
1028 let header_path = source_dir.join(header_name);
1029 if header_path.exists() {
1030 dependencies.push(header_path);
1031 }
1032 }
1033 }
1034 }
1035 }
1036
1037 Ok(dependencies)
1038}
1039
1040fn extract_function_names(rust_code: &str) -> Vec<String> {
1044 let mut functions = Vec::new();
1045
1046 for line in rust_code.lines() {
1047 let trimmed = line.trim();
1048 if (trimmed.starts_with("fn ") || trimmed.starts_with("pub fn ")) && trimmed.contains('(') {
1050 let start_idx = if trimmed.starts_with("pub fn ") {
1051 7 } else {
1053 3 };
1055
1056 if let Some(paren_idx) = trimmed[start_idx..].find('(') {
1057 let func_name = &trimmed[start_idx..start_idx + paren_idx];
1058 let func_name_clean = if let Some(angle_idx) = func_name.find('<') {
1060 &func_name[..angle_idx]
1061 } else {
1062 func_name
1063 };
1064 functions.push(func_name_clean.trim().to_string());
1065 }
1066 }
1067 }
1068
1069 functions
1070}
1071
1072fn generate_ffi_declarations(functions: &[String]) -> String {
1076 if functions.is_empty() {
1077 return String::new();
1078 }
1079
1080 let mut ffi = String::from("// FFI declarations for C interoperability\n");
1081 ffi.push_str("#[no_mangle]\n");
1082 ffi.push_str("extern \"C\" {\n");
1083
1084 for func_name in functions {
1085 ffi.push_str(&format!(" // {}\n", func_name));
1086 }
1087
1088 ffi.push_str("}\n");
1089 ffi
1090}
1091
1092#[cfg(test)]
1093mod tests {
1094 use super::*;
1095
1096 #[test]
1097 fn test_transpile_simple_function() {
1098 let c_code = "int add(int a, int b) { return a + b; }";
1099 let result = transpile(c_code);
1100 assert!(result.is_ok(), "Transpilation should succeed");
1101
1102 let rust_code = result.unwrap();
1103 assert!(rust_code.contains("fn add"), "Should contain function name");
1104 assert!(rust_code.contains("i32"), "Should contain Rust int type");
1105 }
1106
1107 #[test]
1108 fn test_transpile_with_parameters() {
1109 let c_code = "int multiply(int x, int y) { return x * y; }";
1110 let result = transpile(c_code);
1111 assert!(result.is_ok());
1112
1113 let rust_code = result.unwrap();
1114 assert!(rust_code.contains("fn multiply"));
1115 assert!(rust_code.contains("x"));
1116 assert!(rust_code.contains("y"));
1117 }
1118
1119 #[test]
1120 fn test_transpile_void_function() {
1121 let c_code = "void do_nothing() { }";
1122 let result = transpile(c_code);
1123 assert!(result.is_ok());
1124
1125 let rust_code = result.unwrap();
1126 assert!(rust_code.contains("fn do_nothing"));
1127 }
1128
1129 #[test]
1130 fn test_transpile_with_box_transform_simple() {
1131 let c_code = "int get_value() { return 42; }";
1133 let result = transpile_with_box_transform(c_code);
1134 assert!(result.is_ok());
1135
1136 let rust_code = result.unwrap();
1137 assert!(rust_code.contains("fn get_value"));
1138 }
1139
1140 #[test]
1141 fn test_transpile_empty_input() {
1142 let c_code = "";
1143 let result = transpile(c_code);
1144 assert!(result.is_ok());
1146 }
1147
1148 #[test]
1149 fn test_transpile_integration_pipeline() {
1150 let c_code = r#"
1152 int calculate(int a, int b) {
1153 int result;
1154 result = a + b;
1155 return result;
1156 }
1157 "#;
1158 let result = transpile(c_code);
1159 assert!(result.is_ok(), "Full pipeline should execute");
1160
1161 let rust_code = result.unwrap();
1162 assert!(rust_code.contains("fn calculate"));
1163 assert!(rust_code.contains("let mut result"));
1164 }
1165
1166 #[test]
1167 fn test_transpile_with_lifetime_annotations() {
1168 let c_code = "int add(int a, int b) { return a + b; }";
1172 let result = transpile(c_code);
1173 assert!(
1174 result.is_ok(),
1175 "Transpilation with lifetime analysis should succeed"
1176 );
1177
1178 let rust_code = result.unwrap();
1179 assert!(rust_code.contains("fn add"));
1181
1182 }
1185}