1#![warn(missing_docs)]
12#![warn(clippy::all)]
13#![deny(unsafe_code)]
14
15use anyhow::{Context, Result};
16use decy_analyzer::patterns::PatternDetector;
17use decy_codegen::CodeGenerator;
18use decy_hir::HirFunction;
19use decy_ownership::{
20 borrow_gen::BorrowGenerator, dataflow::DataflowAnalyzer, inference::OwnershipInferencer,
21 lifetime::LifetimeAnalyzer, lifetime_gen::LifetimeAnnotator,
22};
23use decy_parser::parser::CParser;
24use petgraph::graph::{DiGraph, NodeIndex};
25use petgraph::visit::Topo;
26use std::collections::HashMap;
27use std::path::{Path, PathBuf};
28
29#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
34pub struct TranspiledFile {
35 pub source_path: PathBuf,
37
38 pub rust_code: String,
40
41 pub dependencies: Vec<PathBuf>,
43
44 pub functions_exported: Vec<String>,
46
47 pub ffi_declarations: String,
49}
50
51impl TranspiledFile {
52 pub fn new(
54 source_path: PathBuf,
55 rust_code: String,
56 dependencies: Vec<PathBuf>,
57 functions_exported: Vec<String>,
58 ffi_declarations: String,
59 ) -> Self {
60 Self {
61 source_path,
62 rust_code,
63 dependencies,
64 functions_exported,
65 ffi_declarations,
66 }
67 }
68}
69
70#[derive(Debug, Clone, Default)]
75pub struct ProjectContext {
76 types: HashMap<String, String>,
78
79 functions: HashMap<String, String>,
81
82 transpiled_files: HashMap<PathBuf, TranspiledFile>,
84}
85
86impl ProjectContext {
87 pub fn new() -> Self {
89 Self::default()
90 }
91
92 pub fn add_transpiled_file(&mut self, file: &TranspiledFile) {
97 self.transpiled_files
99 .insert(file.source_path.clone(), file.clone());
100
101 if file.rust_code.contains("struct") {
104 for line in file.rust_code.lines() {
106 if line.contains("struct") {
107 if let Some(name) = self.extract_type_name(line) {
108 self.types.insert(name.clone(), line.to_string());
109 }
110 }
111 }
112 }
113
114 for func_name in &file.functions_exported {
116 self.functions.insert(
117 func_name.clone(),
118 file.source_path.to_string_lossy().to_string(),
119 );
120 }
121 }
122
123 pub fn has_type(&self, type_name: &str) -> bool {
125 self.types.contains_key(type_name)
126 }
127
128 pub fn has_function(&self, func_name: &str) -> bool {
130 self.functions.contains_key(func_name)
131 }
132
133 pub fn get_function_source(&self, func_name: &str) -> Option<&str> {
135 self.functions.get(func_name).map(|s| s.as_str())
136 }
137
138 fn extract_type_name(&self, line: &str) -> Option<String> {
140 let words: Vec<&str> = line.split_whitespace().collect();
142 if let Some(idx) = words.iter().position(|&w| w == "struct" || w == "enum") {
143 if idx + 1 < words.len() {
144 let name = words[idx + 1].trim_end_matches('{').trim_end_matches('<');
145 return Some(name.to_string());
146 }
147 }
148 None
149 }
150}
151
152#[derive(Debug, Clone)]
157pub struct DependencyGraph {
158 graph: DiGraph<PathBuf, ()>,
160
161 path_to_node: HashMap<PathBuf, NodeIndex>,
163}
164
165impl DependencyGraph {
166 pub fn new() -> Self {
168 Self {
169 graph: DiGraph::new(),
170 path_to_node: HashMap::new(),
171 }
172 }
173
174 pub fn is_empty(&self) -> bool {
176 self.graph.node_count() == 0
177 }
178
179 pub fn file_count(&self) -> usize {
181 self.graph.node_count()
182 }
183
184 pub fn contains_file(&self, path: &Path) -> bool {
186 self.path_to_node.contains_key(path)
187 }
188
189 pub fn add_file(&mut self, path: &Path) {
193 if !self.contains_file(path) {
194 let node = self.graph.add_node(path.to_path_buf());
195 self.path_to_node.insert(path.to_path_buf(), node);
196 }
197 }
198
199 pub fn add_dependency(&mut self, from: &Path, to: &Path) {
203 let from_node = *self
204 .path_to_node
205 .get(from)
206 .expect("from file must be added to graph first");
207 let to_node = *self
208 .path_to_node
209 .get(to)
210 .expect("to file must be added to graph first");
211
212 self.graph.add_edge(from_node, to_node, ());
213 }
214
215 pub fn has_dependency(&self, from: &Path, to: &Path) -> bool {
217 if let (Some(&from_node), Some(&to_node)) =
218 (self.path_to_node.get(from), self.path_to_node.get(to))
219 {
220 self.graph.contains_edge(from_node, to_node)
221 } else {
222 false
223 }
224 }
225
226 pub fn topological_sort(&self) -> Result<Vec<PathBuf>> {
231 if petgraph::algo::is_cyclic_directed(&self.graph) {
233 return Err(anyhow::anyhow!(
234 "Circular dependency detected in file dependencies"
235 ));
236 }
237
238 let mut topo = Topo::new(&self.graph);
239 let mut build_order = Vec::new();
240
241 while let Some(node) = topo.next(&self.graph) {
242 if let Some(path) = self.graph.node_weight(node) {
243 build_order.push(path.clone());
244 }
245 }
246
247 build_order.reverse();
249
250 Ok(build_order)
251 }
252
253 pub fn from_files(files: &[PathBuf]) -> Result<Self> {
257 let mut graph = Self::new();
258
259 for file in files {
261 graph.add_file(file);
262 }
263
264 for file in files {
266 let content = std::fs::read_to_string(file)
267 .with_context(|| format!("Failed to read file: {}", file.display()))?;
268
269 let includes = Self::parse_include_directives(&content);
270
271 let file_dir = file.parent().unwrap_or_else(|| Path::new("."));
273
274 for include in includes {
275 let include_path = file_dir.join(&include);
276
277 if graph.contains_file(&include_path) {
279 graph.add_dependency(file, &include_path);
280 }
281 }
282 }
283
284 Ok(graph)
285 }
286
287 pub fn parse_include_directives(code: &str) -> Vec<String> {
291 let mut includes = Vec::new();
292
293 for line in code.lines() {
294 let trimmed = line.trim();
295 if trimmed.starts_with("#include") {
296 if let Some(start) = trimmed.find('"').or_else(|| trimmed.find('<')) {
298 let end_char = if trimmed.chars().nth(start) == Some('"') {
299 '"'
300 } else {
301 '>'
302 };
303 if let Some(end) = trimmed[start + 1..].find(end_char) {
304 let filename = &trimmed[start + 1..start + 1 + end];
305 includes.push(filename.to_string());
306 }
307 }
308 }
309 }
310
311 includes
312 }
313
314 pub fn has_header_guard(path: &Path) -> Result<bool> {
316 let content = std::fs::read_to_string(path)
317 .with_context(|| format!("Failed to read file: {}", path.display()))?;
318
319 let has_ifndef = content.lines().any(|line| {
320 let trimmed = line.trim();
321 trimmed.starts_with("#ifndef") || trimmed.starts_with("#if !defined")
322 });
323
324 let has_define = content
325 .lines()
326 .any(|line| line.trim().starts_with("#define"));
327 let has_endif = content
328 .lines()
329 .any(|line| line.trim().starts_with("#endif"));
330
331 Ok(has_ifndef && has_define && has_endif)
332 }
333}
334
335impl Default for DependencyGraph {
336 fn default() -> Self {
337 Self::new()
338 }
339}
340
341#[derive(Debug, Clone)]
343pub struct CacheStatistics {
344 pub hits: usize,
346 pub misses: usize,
348 pub total_files: usize,
350}
351
352#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
354struct CacheEntry {
355 hash: String,
357 transpiled: TranspiledFile,
359 dependency_hashes: HashMap<PathBuf, String>,
361}
362
363#[derive(Debug, Clone)]
389pub struct TranspilationCache {
390 entries: HashMap<PathBuf, CacheEntry>,
392 cache_dir: Option<PathBuf>,
394 hits: usize,
396 misses: usize,
397}
398
399impl TranspilationCache {
400 pub fn new() -> Self {
402 Self {
403 entries: HashMap::new(),
404 cache_dir: None,
405 hits: 0,
406 misses: 0,
407 }
408 }
409
410 pub fn with_directory(cache_dir: &Path) -> Self {
412 Self {
413 entries: HashMap::new(),
414 cache_dir: Some(cache_dir.to_path_buf()),
415 hits: 0,
416 misses: 0,
417 }
418 }
419
420 pub fn compute_hash(&self, path: &Path) -> Result<String> {
424 use sha2::{Digest, Sha256};
425
426 let content = std::fs::read(path)
427 .with_context(|| format!("Failed to read file for hashing: {}", path.display()))?;
428
429 let mut hasher = Sha256::new();
430 hasher.update(&content);
431 let result = hasher.finalize();
432
433 Ok(format!("{:x}", result))
434 }
435
436 pub fn insert(&mut self, path: &Path, transpiled: &TranspiledFile) {
438 let hash = match self.compute_hash(path) {
439 Ok(h) => h,
440 Err(_) => return, };
442
443 let mut dependency_hashes = HashMap::new();
445 for dep_path in &transpiled.dependencies {
446 if let Ok(dep_hash) = self.compute_hash(dep_path) {
447 dependency_hashes.insert(dep_path.clone(), dep_hash);
448 }
449 }
450
451 let entry = CacheEntry {
452 hash,
453 transpiled: transpiled.clone(),
454 dependency_hashes,
455 };
456
457 self.entries.insert(path.to_path_buf(), entry);
458 }
459
460 pub fn get(&mut self, path: &Path) -> Option<&TranspiledFile> {
467 let entry = self.entries.get(&path.to_path_buf())?;
468
469 let current_hash = self.compute_hash(path).ok()?;
471 if current_hash != entry.hash {
472 self.misses += 1;
473 return None;
474 }
475
476 for (dep_path, cached_hash) in &entry.dependency_hashes {
478 if let Ok(current_dep_hash) = self.compute_hash(dep_path) {
479 if ¤t_dep_hash != cached_hash {
480 self.misses += 1;
481 return None;
482 }
483 }
484 }
485
486 self.hits += 1;
487 Some(&entry.transpiled)
488 }
489
490 pub fn save(&self) -> Result<()> {
492 let cache_dir = self
493 .cache_dir
494 .as_ref()
495 .ok_or_else(|| anyhow::anyhow!("Cache directory not set"))?;
496
497 std::fs::create_dir_all(cache_dir).with_context(|| {
498 format!("Failed to create cache directory: {}", cache_dir.display())
499 })?;
500
501 let cache_file = cache_dir.join("cache.json");
502 let json =
503 serde_json::to_string_pretty(&self.entries).context("Failed to serialize cache")?;
504
505 std::fs::write(&cache_file, json)
506 .with_context(|| format!("Failed to write cache file: {}", cache_file.display()))?;
507
508 Ok(())
509 }
510
511 pub fn load(cache_dir: &Path) -> Result<Self> {
513 let cache_file = cache_dir.join("cache.json");
514
515 if !cache_file.exists() {
516 return Ok(Self::with_directory(cache_dir));
518 }
519
520 let json = std::fs::read_to_string(&cache_file)
521 .with_context(|| format!("Failed to read cache file: {}", cache_file.display()))?;
522
523 let entries: HashMap<PathBuf, CacheEntry> =
524 serde_json::from_str(&json).context("Failed to deserialize cache")?;
525
526 Ok(Self {
527 entries,
528 cache_dir: Some(cache_dir.to_path_buf()),
529 hits: 0,
530 misses: 0,
531 })
532 }
533
534 pub fn clear(&mut self) {
536 self.entries.clear();
537 self.hits = 0;
538 self.misses = 0;
539 }
540
541 pub fn statistics(&self) -> CacheStatistics {
543 CacheStatistics {
544 hits: self.hits,
545 misses: self.misses,
546 total_files: self.entries.len(),
547 }
548 }
549}
550
551impl Default for TranspilationCache {
552 fn default() -> Self {
553 Self::new()
554 }
555}
556
557fn preprocess_includes(
572 source: &str,
573 base_dir: Option<&Path>,
574 processed: &mut std::collections::HashSet<PathBuf>,
575) -> Result<String> {
576 let mut result = String::new();
577 let base_dir = base_dir.unwrap_or_else(|| Path::new("."));
578
579 for line in source.lines() {
580 let trimmed = line.trim();
581
582 if trimmed.starts_with("#include") {
584 let (filename, is_system) = if let Some(start) = trimmed.find('"') {
586 if let Some(end) = trimmed[start + 1..].find('"') {
587 let filename = &trimmed[start + 1..start + 1 + end];
588 (filename, false)
589 } else {
590 result.push_str(line);
592 result.push('\n');
593 continue;
594 }
595 } else if let Some(start) = trimmed.find('<') {
596 if let Some(end) = trimmed[start + 1..].find('>') {
597 let filename = &trimmed[start + 1..start + 1 + end];
598 (filename, true)
599 } else {
600 result.push_str(line);
602 result.push('\n');
603 continue;
604 }
605 } else {
606 result.push_str(line);
608 result.push('\n');
609 continue;
610 };
611
612 if is_system {
614 result.push_str(&format!("// {}\n", line));
616 continue;
617 }
618
619 let include_path = base_dir.join(filename);
621
622 if processed.contains(&include_path) {
624 result.push_str(&format!("// Already included: {}\n", filename));
626 continue;
627 }
628
629 if let Ok(included_content) = std::fs::read_to_string(&include_path) {
631 processed.insert(include_path.clone());
633
634 let included_dir = include_path.parent().unwrap_or(base_dir);
636
637 let preprocessed =
639 preprocess_includes(&included_content, Some(included_dir), processed)?;
640
641 result.push_str(&format!("// BEGIN INCLUDE: {}\n", filename));
643 result.push_str(&preprocessed);
644 result.push_str(&format!("// END INCLUDE: {}\n", filename));
645 } else {
646 anyhow::bail!("Failed to find include file: {}", include_path.display());
648 }
649 } else {
650 result.push_str(line);
652 result.push('\n');
653 }
654 }
655
656 Ok(result)
657}
658
659pub fn transpile(c_code: &str) -> Result<String> {
685 transpile_with_includes(c_code, None)
686}
687
688pub fn transpile_with_includes(c_code: &str, base_dir: Option<&Path>) -> Result<String> {
706 let mut processed_files = std::collections::HashSet::new();
708 let preprocessed = preprocess_includes(c_code, base_dir, &mut processed_files)?;
709
710 let parser = CParser::new().context("Failed to create C parser")?;
712 let ast = parser
713 .parse(&preprocessed)
714 .context("Failed to parse C code")?;
715
716 let hir_functions: Vec<HirFunction> = ast
718 .functions()
719 .iter()
720 .map(HirFunction::from_ast_function)
721 .collect();
722
723 let hir_structs: Vec<decy_hir::HirStruct> = ast
725 .structs()
726 .iter()
727 .map(|s| {
728 let fields = s
729 .fields
730 .iter()
731 .map(|f| {
732 decy_hir::HirStructField::new(
733 f.name.clone(),
734 decy_hir::HirType::from_ast_type(&f.field_type),
735 )
736 })
737 .collect();
738 decy_hir::HirStruct::new(s.name.clone(), fields)
739 })
740 .collect();
741
742 let hir_variables: Vec<decy_hir::HirStatement> = ast
744 .variables()
745 .iter()
746 .map(|v| decy_hir::HirStatement::VariableDeclaration {
747 name: v.name().to_string(),
748 var_type: decy_hir::HirType::from_ast_type(v.var_type()),
749 initializer: v
750 .initializer()
751 .map(decy_hir::HirExpression::from_ast_expression),
752 })
753 .collect();
754
755 let hir_typedefs: Vec<decy_hir::HirTypedef> = ast
757 .typedefs()
758 .iter()
759 .map(|t| {
760 decy_hir::HirTypedef::new(
761 t.name().to_string(),
762 decy_hir::HirType::from_ast_type(&t.underlying_type),
763 )
764 })
765 .collect();
766
767 let mut transformed_functions = Vec::new();
769
770 for func in hir_functions {
771 let dataflow_analyzer = DataflowAnalyzer::new();
773 let dataflow_graph = dataflow_analyzer.analyze(&func);
774
775 let ownership_inferencer = OwnershipInferencer::new();
777 let ownership_inferences = ownership_inferencer.infer(&dataflow_graph);
778
779 let borrow_generator = BorrowGenerator::new();
781 let func_with_borrows = borrow_generator.transform_function(&func, &ownership_inferences);
782
783 let lifetime_analyzer = LifetimeAnalyzer::new();
785 let scope_tree = lifetime_analyzer.build_scope_tree(&func_with_borrows);
786 let _lifetimes = lifetime_analyzer.track_lifetimes(&func_with_borrows, &scope_tree);
787
788 let lifetime_annotator = LifetimeAnnotator::new();
790 let annotated_signature = lifetime_annotator.annotate_function(&func_with_borrows);
791
792 transformed_functions.push((func_with_borrows, annotated_signature));
794 }
795
796 let code_generator = CodeGenerator::new();
798 let mut rust_code = String::new();
799
800 for hir_struct in &hir_structs {
802 let struct_code = code_generator.generate_struct(hir_struct);
803 rust_code.push_str(&struct_code);
804 rust_code.push('\n');
805 }
806
807 for typedef in &hir_typedefs {
809 if let Ok(typedef_code) = code_generator.generate_typedef(typedef) {
810 rust_code.push_str(&typedef_code);
811 rust_code.push('\n');
812 }
813 }
814
815 for var_stmt in &hir_variables {
817 if let decy_hir::HirStatement::VariableDeclaration {
818 name,
819 var_type,
820 initializer,
821 } = var_stmt
822 {
823 let type_str = CodeGenerator::map_type(var_type);
825
826 if let Some(init_expr) = initializer {
827 let init_code = code_generator.generate_expression(init_expr);
828 rust_code.push_str(&format!(
829 "static mut {}: {} = {};\n",
830 name, type_str, init_code
831 ));
832 } else {
833 rust_code.push_str(&format!(
835 "static mut {}: Option<{}> = None;\n",
836 name, type_str
837 ));
838 }
839 }
840 }
841 if !hir_variables.is_empty() {
842 rust_code.push('\n');
843 }
844
845 for (func, annotated_sig) in &transformed_functions {
847 let generated = code_generator.generate_function_with_lifetimes_and_structs(
848 func,
849 annotated_sig,
850 &hir_structs,
851 );
852 rust_code.push_str(&generated);
853 rust_code.push('\n');
854 }
855
856 Ok(rust_code)
857}
858
859pub fn transpile_with_box_transform(c_code: &str) -> Result<String> {
881 let parser = CParser::new().context("Failed to create C parser")?;
883 let ast = parser.parse(c_code).context("Failed to parse C code")?;
884
885 let hir_functions: Vec<HirFunction> = ast
887 .functions()
888 .iter()
889 .map(HirFunction::from_ast_function)
890 .collect();
891
892 let code_generator = CodeGenerator::new();
894 let pattern_detector = PatternDetector::new();
895 let mut rust_code = String::new();
896
897 for func in &hir_functions {
898 let candidates = pattern_detector.find_box_candidates(func);
900
901 let generated = code_generator.generate_function_with_box_transform(func, &candidates);
902 rust_code.push_str(&generated);
903 rust_code.push('\n');
904 }
905
906 Ok(rust_code)
907}
908
909pub fn transpile_file(path: &Path, _context: &ProjectContext) -> Result<TranspiledFile> {
936 let c_code = std::fs::read_to_string(path)
938 .with_context(|| format!("Failed to read file: {}", path.display()))?;
939
940 let dependencies = extract_dependencies(path, &c_code)?;
942
943 let rust_code = transpile(&c_code)?;
945
946 let functions_exported = extract_function_names(&rust_code);
948
949 let ffi_declarations = generate_ffi_declarations(&functions_exported);
951
952 Ok(TranspiledFile::new(
953 path.to_path_buf(),
954 rust_code,
955 dependencies,
956 functions_exported,
957 ffi_declarations,
958 ))
959}
960
961fn extract_dependencies(source_path: &Path, c_code: &str) -> Result<Vec<PathBuf>> {
966 let mut dependencies = Vec::new();
967 let source_dir = source_path
968 .parent()
969 .ok_or_else(|| anyhow::anyhow!("Source file has no parent directory"))?;
970
971 for line in c_code.lines() {
972 let trimmed = line.trim();
973 if trimmed.starts_with("#include") {
974 if let Some(start) = trimmed.find('"') {
976 if let Some(end) = trimmed[start + 1..].find('"') {
977 let header_name = &trimmed[start + 1..start + 1 + end];
978 let header_path = source_dir.join(header_name);
979 if header_path.exists() {
980 dependencies.push(header_path);
981 }
982 }
983 }
984 }
985 }
986
987 Ok(dependencies)
988}
989
990fn extract_function_names(rust_code: &str) -> Vec<String> {
994 let mut functions = Vec::new();
995
996 for line in rust_code.lines() {
997 let trimmed = line.trim();
998 if (trimmed.starts_with("fn ") || trimmed.starts_with("pub fn ")) && trimmed.contains('(') {
1000 let start_idx = if trimmed.starts_with("pub fn ") {
1001 7 } else {
1003 3 };
1005
1006 if let Some(paren_idx) = trimmed[start_idx..].find('(') {
1007 let func_name = &trimmed[start_idx..start_idx + paren_idx];
1008 let func_name_clean = if let Some(angle_idx) = func_name.find('<') {
1010 &func_name[..angle_idx]
1011 } else {
1012 func_name
1013 };
1014 functions.push(func_name_clean.trim().to_string());
1015 }
1016 }
1017 }
1018
1019 functions
1020}
1021
1022fn generate_ffi_declarations(functions: &[String]) -> String {
1026 if functions.is_empty() {
1027 return String::new();
1028 }
1029
1030 let mut ffi = String::from("// FFI declarations for C interoperability\n");
1031 ffi.push_str("#[no_mangle]\n");
1032 ffi.push_str("extern \"C\" {\n");
1033
1034 for func_name in functions {
1035 ffi.push_str(&format!(" // {}\n", func_name));
1036 }
1037
1038 ffi.push_str("}\n");
1039 ffi
1040}
1041
1042#[cfg(test)]
1043mod tests {
1044 use super::*;
1045
1046 #[test]
1047 fn test_transpile_simple_function() {
1048 let c_code = "int add(int a, int b) { return a + b; }";
1049 let result = transpile(c_code);
1050 assert!(result.is_ok(), "Transpilation should succeed");
1051
1052 let rust_code = result.unwrap();
1053 assert!(rust_code.contains("fn add"), "Should contain function name");
1054 assert!(rust_code.contains("i32"), "Should contain Rust int type");
1055 }
1056
1057 #[test]
1058 fn test_transpile_with_parameters() {
1059 let c_code = "int multiply(int x, int y) { return x * y; }";
1060 let result = transpile(c_code);
1061 assert!(result.is_ok());
1062
1063 let rust_code = result.unwrap();
1064 assert!(rust_code.contains("fn multiply"));
1065 assert!(rust_code.contains("x"));
1066 assert!(rust_code.contains("y"));
1067 }
1068
1069 #[test]
1070 fn test_transpile_void_function() {
1071 let c_code = "void do_nothing() { }";
1072 let result = transpile(c_code);
1073 assert!(result.is_ok());
1074
1075 let rust_code = result.unwrap();
1076 assert!(rust_code.contains("fn do_nothing"));
1077 }
1078
1079 #[test]
1080 fn test_transpile_with_box_transform_simple() {
1081 let c_code = "int get_value() { return 42; }";
1083 let result = transpile_with_box_transform(c_code);
1084 assert!(result.is_ok());
1085
1086 let rust_code = result.unwrap();
1087 assert!(rust_code.contains("fn get_value"));
1088 }
1089
1090 #[test]
1091 fn test_transpile_empty_input() {
1092 let c_code = "";
1093 let result = transpile(c_code);
1094 assert!(result.is_ok());
1096 }
1097
1098 #[test]
1099 fn test_transpile_integration_pipeline() {
1100 let c_code = r#"
1102 int calculate(int a, int b) {
1103 int result;
1104 result = a + b;
1105 return result;
1106 }
1107 "#;
1108 let result = transpile(c_code);
1109 assert!(result.is_ok(), "Full pipeline should execute");
1110
1111 let rust_code = result.unwrap();
1112 assert!(rust_code.contains("fn calculate"));
1113 assert!(rust_code.contains("let mut result"));
1114 }
1115
1116 #[test]
1117 fn test_transpile_with_lifetime_annotations() {
1118 let c_code = "int add(int a, int b) { return a + b; }";
1122 let result = transpile(c_code);
1123 assert!(
1124 result.is_ok(),
1125 "Transpilation with lifetime analysis should succeed"
1126 );
1127
1128 let rust_code = result.unwrap();
1129 assert!(rust_code.contains("fn add"));
1131
1132 }
1135}