1#![warn(missing_docs)]
12#![warn(clippy::all)]
13#![deny(unsafe_code)]
14
15use anyhow::{Context, Result};
16use decy_analyzer::patterns::PatternDetector;
17use decy_codegen::CodeGenerator;
18use decy_hir::HirFunction;
19use decy_ownership::{
20 array_slice::ArrayParameterTransformer, borrow_gen::BorrowGenerator,
21 dataflow::DataflowAnalyzer, inference::OwnershipInferencer, lifetime::LifetimeAnalyzer,
22 lifetime_gen::LifetimeAnnotator,
23};
24use decy_parser::parser::CParser;
25use petgraph::graph::{DiGraph, NodeIndex};
26use petgraph::visit::Topo;
27use std::collections::HashMap;
28use std::path::{Path, PathBuf};
29
30#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
35pub struct TranspiledFile {
36 pub source_path: PathBuf,
38
39 pub rust_code: String,
41
42 pub dependencies: Vec<PathBuf>,
44
45 pub functions_exported: Vec<String>,
47
48 pub ffi_declarations: String,
50}
51
52impl TranspiledFile {
53 pub fn new(
55 source_path: PathBuf,
56 rust_code: String,
57 dependencies: Vec<PathBuf>,
58 functions_exported: Vec<String>,
59 ffi_declarations: String,
60 ) -> Self {
61 Self {
62 source_path,
63 rust_code,
64 dependencies,
65 functions_exported,
66 ffi_declarations,
67 }
68 }
69}
70
71#[derive(Debug, Clone, Default)]
76pub struct ProjectContext {
77 types: HashMap<String, String>,
79
80 functions: HashMap<String, String>,
82
83 transpiled_files: HashMap<PathBuf, TranspiledFile>,
85}
86
87impl ProjectContext {
88 pub fn new() -> Self {
90 Self::default()
91 }
92
93 pub fn add_transpiled_file(&mut self, file: &TranspiledFile) {
98 self.transpiled_files
100 .insert(file.source_path.clone(), file.clone());
101
102 if file.rust_code.contains("struct") {
105 for line in file.rust_code.lines() {
107 if line.contains("struct") {
108 if let Some(name) = self.extract_type_name(line) {
109 self.types.insert(name.clone(), line.to_string());
110 }
111 }
112 }
113 }
114
115 for func_name in &file.functions_exported {
117 self.functions.insert(
118 func_name.clone(),
119 file.source_path.to_string_lossy().to_string(),
120 );
121 }
122 }
123
124 pub fn has_type(&self, type_name: &str) -> bool {
126 self.types.contains_key(type_name)
127 }
128
129 pub fn has_function(&self, func_name: &str) -> bool {
131 self.functions.contains_key(func_name)
132 }
133
134 pub fn get_function_source(&self, func_name: &str) -> Option<&str> {
136 self.functions.get(func_name).map(|s| s.as_str())
137 }
138
139 fn extract_type_name(&self, line: &str) -> Option<String> {
141 let words: Vec<&str> = line.split_whitespace().collect();
143 if let Some(idx) = words.iter().position(|&w| w == "struct" || w == "enum") {
144 if idx + 1 < words.len() {
145 let name = words[idx + 1].trim_end_matches('{').trim_end_matches('<');
146 return Some(name.to_string());
147 }
148 }
149 None
150 }
151}
152
153#[derive(Debug, Clone)]
158pub struct DependencyGraph {
159 graph: DiGraph<PathBuf, ()>,
161
162 path_to_node: HashMap<PathBuf, NodeIndex>,
164}
165
166impl DependencyGraph {
167 pub fn new() -> Self {
169 Self {
170 graph: DiGraph::new(),
171 path_to_node: HashMap::new(),
172 }
173 }
174
175 pub fn is_empty(&self) -> bool {
177 self.graph.node_count() == 0
178 }
179
180 pub fn file_count(&self) -> usize {
182 self.graph.node_count()
183 }
184
185 pub fn contains_file(&self, path: &Path) -> bool {
187 self.path_to_node.contains_key(path)
188 }
189
190 pub fn add_file(&mut self, path: &Path) {
194 if !self.contains_file(path) {
195 let node = self.graph.add_node(path.to_path_buf());
196 self.path_to_node.insert(path.to_path_buf(), node);
197 }
198 }
199
200 pub fn add_dependency(&mut self, from: &Path, to: &Path) {
204 let from_node = *self
205 .path_to_node
206 .get(from)
207 .expect("from file must be added to graph first");
208 let to_node = *self
209 .path_to_node
210 .get(to)
211 .expect("to file must be added to graph first");
212
213 self.graph.add_edge(from_node, to_node, ());
214 }
215
216 pub fn has_dependency(&self, from: &Path, to: &Path) -> bool {
218 if let (Some(&from_node), Some(&to_node)) =
219 (self.path_to_node.get(from), self.path_to_node.get(to))
220 {
221 self.graph.contains_edge(from_node, to_node)
222 } else {
223 false
224 }
225 }
226
227 pub fn topological_sort(&self) -> Result<Vec<PathBuf>> {
232 if petgraph::algo::is_cyclic_directed(&self.graph) {
234 return Err(anyhow::anyhow!(
235 "Circular dependency detected in file dependencies"
236 ));
237 }
238
239 let mut topo = Topo::new(&self.graph);
240 let mut build_order = Vec::new();
241
242 while let Some(node) = topo.next(&self.graph) {
243 if let Some(path) = self.graph.node_weight(node) {
244 build_order.push(path.clone());
245 }
246 }
247
248 build_order.reverse();
250
251 Ok(build_order)
252 }
253
254 pub fn from_files(files: &[PathBuf]) -> Result<Self> {
258 let mut graph = Self::new();
259
260 for file in files {
262 graph.add_file(file);
263 }
264
265 for file in files {
267 let content = std::fs::read_to_string(file)
268 .with_context(|| format!("Failed to read file: {}", file.display()))?;
269
270 let includes = Self::parse_include_directives(&content);
271
272 let file_dir = file.parent().unwrap_or_else(|| Path::new("."));
274
275 for include in includes {
276 let include_path = file_dir.join(&include);
277
278 if graph.contains_file(&include_path) {
280 graph.add_dependency(file, &include_path);
281 }
282 }
283 }
284
285 Ok(graph)
286 }
287
288 pub fn parse_include_directives(code: &str) -> Vec<String> {
292 let mut includes = Vec::new();
293
294 for line in code.lines() {
295 let trimmed = line.trim();
296 if trimmed.starts_with("#include") {
297 if let Some(start) = trimmed.find('"').or_else(|| trimmed.find('<')) {
299 let end_char = if trimmed.chars().nth(start) == Some('"') {
300 '"'
301 } else {
302 '>'
303 };
304 if let Some(end) = trimmed[start + 1..].find(end_char) {
305 let filename = &trimmed[start + 1..start + 1 + end];
306 includes.push(filename.to_string());
307 }
308 }
309 }
310 }
311
312 includes
313 }
314
315 pub fn has_header_guard(path: &Path) -> Result<bool> {
317 let content = std::fs::read_to_string(path)
318 .with_context(|| format!("Failed to read file: {}", path.display()))?;
319
320 let has_ifndef = content.lines().any(|line| {
321 let trimmed = line.trim();
322 trimmed.starts_with("#ifndef") || trimmed.starts_with("#if !defined")
323 });
324
325 let has_define = content
326 .lines()
327 .any(|line| line.trim().starts_with("#define"));
328 let has_endif = content
329 .lines()
330 .any(|line| line.trim().starts_with("#endif"));
331
332 Ok(has_ifndef && has_define && has_endif)
333 }
334}
335
336impl Default for DependencyGraph {
337 fn default() -> Self {
338 Self::new()
339 }
340}
341
342#[derive(Debug, Clone)]
344pub struct CacheStatistics {
345 pub hits: usize,
347 pub misses: usize,
349 pub total_files: usize,
351}
352
353#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
355struct CacheEntry {
356 hash: String,
358 transpiled: TranspiledFile,
360 dependency_hashes: HashMap<PathBuf, String>,
362}
363
364#[derive(Debug, Clone)]
390pub struct TranspilationCache {
391 entries: HashMap<PathBuf, CacheEntry>,
393 cache_dir: Option<PathBuf>,
395 hits: usize,
397 misses: usize,
398}
399
400impl TranspilationCache {
401 pub fn new() -> Self {
403 Self {
404 entries: HashMap::new(),
405 cache_dir: None,
406 hits: 0,
407 misses: 0,
408 }
409 }
410
411 pub fn with_directory(cache_dir: &Path) -> Self {
413 Self {
414 entries: HashMap::new(),
415 cache_dir: Some(cache_dir.to_path_buf()),
416 hits: 0,
417 misses: 0,
418 }
419 }
420
421 pub fn compute_hash(&self, path: &Path) -> Result<String> {
425 use sha2::{Digest, Sha256};
426
427 let content = std::fs::read(path)
428 .with_context(|| format!("Failed to read file for hashing: {}", path.display()))?;
429
430 let mut hasher = Sha256::new();
431 hasher.update(&content);
432 let result = hasher.finalize();
433
434 Ok(format!("{:x}", result))
435 }
436
437 pub fn insert(&mut self, path: &Path, transpiled: &TranspiledFile) {
439 let hash = match self.compute_hash(path) {
440 Ok(h) => h,
441 Err(_) => return, };
443
444 let mut dependency_hashes = HashMap::new();
446 for dep_path in &transpiled.dependencies {
447 if let Ok(dep_hash) = self.compute_hash(dep_path) {
448 dependency_hashes.insert(dep_path.clone(), dep_hash);
449 }
450 }
451
452 let entry = CacheEntry {
453 hash,
454 transpiled: transpiled.clone(),
455 dependency_hashes,
456 };
457
458 self.entries.insert(path.to_path_buf(), entry);
459 }
460
461 pub fn get(&mut self, path: &Path) -> Option<&TranspiledFile> {
468 let entry = self.entries.get(&path.to_path_buf())?;
469
470 let current_hash = self.compute_hash(path).ok()?;
472 if current_hash != entry.hash {
473 self.misses += 1;
474 return None;
475 }
476
477 for (dep_path, cached_hash) in &entry.dependency_hashes {
479 if let Ok(current_dep_hash) = self.compute_hash(dep_path) {
480 if ¤t_dep_hash != cached_hash {
481 self.misses += 1;
482 return None;
483 }
484 }
485 }
486
487 self.hits += 1;
488 Some(&entry.transpiled)
489 }
490
491 pub fn save(&self) -> Result<()> {
493 let cache_dir = self
494 .cache_dir
495 .as_ref()
496 .ok_or_else(|| anyhow::anyhow!("Cache directory not set"))?;
497
498 std::fs::create_dir_all(cache_dir).with_context(|| {
499 format!("Failed to create cache directory: {}", cache_dir.display())
500 })?;
501
502 let cache_file = cache_dir.join("cache.json");
503 let json =
504 serde_json::to_string_pretty(&self.entries).context("Failed to serialize cache")?;
505
506 std::fs::write(&cache_file, json)
507 .with_context(|| format!("Failed to write cache file: {}", cache_file.display()))?;
508
509 Ok(())
510 }
511
512 pub fn load(cache_dir: &Path) -> Result<Self> {
514 let cache_file = cache_dir.join("cache.json");
515
516 if !cache_file.exists() {
517 return Ok(Self::with_directory(cache_dir));
519 }
520
521 let json = std::fs::read_to_string(&cache_file)
522 .with_context(|| format!("Failed to read cache file: {}", cache_file.display()))?;
523
524 let entries: HashMap<PathBuf, CacheEntry> =
525 serde_json::from_str(&json).context("Failed to deserialize cache")?;
526
527 Ok(Self {
528 entries,
529 cache_dir: Some(cache_dir.to_path_buf()),
530 hits: 0,
531 misses: 0,
532 })
533 }
534
535 pub fn clear(&mut self) {
537 self.entries.clear();
538 self.hits = 0;
539 self.misses = 0;
540 }
541
542 pub fn statistics(&self) -> CacheStatistics {
544 CacheStatistics {
545 hits: self.hits,
546 misses: self.misses,
547 total_files: self.entries.len(),
548 }
549 }
550}
551
552impl Default for TranspilationCache {
553 fn default() -> Self {
554 Self::new()
555 }
556}
557
558fn preprocess_includes(
573 source: &str,
574 base_dir: Option<&Path>,
575 processed: &mut std::collections::HashSet<PathBuf>,
576) -> Result<String> {
577 let mut result = String::new();
578 let base_dir = base_dir.unwrap_or_else(|| Path::new("."));
579
580 for line in source.lines() {
581 let trimmed = line.trim();
582
583 if trimmed.starts_with("#include") {
585 let (filename, is_system) = if let Some(start) = trimmed.find('"') {
587 if let Some(end) = trimmed[start + 1..].find('"') {
588 let filename = &trimmed[start + 1..start + 1 + end];
589 (filename, false)
590 } else {
591 result.push_str(line);
593 result.push('\n');
594 continue;
595 }
596 } else if let Some(start) = trimmed.find('<') {
597 if let Some(end) = trimmed[start + 1..].find('>') {
598 let filename = &trimmed[start + 1..start + 1 + end];
599 (filename, true)
600 } else {
601 result.push_str(line);
603 result.push('\n');
604 continue;
605 }
606 } else {
607 result.push_str(line);
609 result.push('\n');
610 continue;
611 };
612
613 if is_system {
615 result.push_str(&format!("// {}\n", line));
617 continue;
618 }
619
620 let include_path = base_dir.join(filename);
622
623 if processed.contains(&include_path) {
625 result.push_str(&format!("// Already included: {}\n", filename));
627 continue;
628 }
629
630 if let Ok(included_content) = std::fs::read_to_string(&include_path) {
632 processed.insert(include_path.clone());
634
635 let included_dir = include_path.parent().unwrap_or(base_dir);
637
638 let preprocessed =
640 preprocess_includes(&included_content, Some(included_dir), processed)?;
641
642 result.push_str(&format!("// BEGIN INCLUDE: {}\n", filename));
644 result.push_str(&preprocessed);
645 result.push_str(&format!("// END INCLUDE: {}\n", filename));
646 } else {
647 anyhow::bail!("Failed to find include file: {}", include_path.display());
649 }
650 } else {
651 result.push_str(line);
653 result.push('\n');
654 }
655 }
656
657 Ok(result)
658}
659
660pub fn transpile(c_code: &str) -> Result<String> {
686 transpile_with_includes(c_code, None)
687}
688
689pub fn transpile_with_includes(c_code: &str, base_dir: Option<&Path>) -> Result<String> {
707 let mut processed_files = std::collections::HashSet::new();
709 let preprocessed = preprocess_includes(c_code, base_dir, &mut processed_files)?;
710
711 let parser = CParser::new().context("Failed to create C parser")?;
713 let ast = parser
714 .parse(&preprocessed)
715 .context("Failed to parse C code")?;
716
717 let hir_functions: Vec<HirFunction> = ast
719 .functions()
720 .iter()
721 .map(HirFunction::from_ast_function)
722 .collect();
723
724 let hir_structs: Vec<decy_hir::HirStruct> = ast
726 .structs()
727 .iter()
728 .map(|s| {
729 let fields = s
730 .fields
731 .iter()
732 .map(|f| {
733 decy_hir::HirStructField::new(
734 f.name.clone(),
735 decy_hir::HirType::from_ast_type(&f.field_type),
736 )
737 })
738 .collect();
739 decy_hir::HirStruct::new(s.name.clone(), fields)
740 })
741 .collect();
742
743 let hir_variables: Vec<decy_hir::HirStatement> = ast
745 .variables()
746 .iter()
747 .map(|v| decy_hir::HirStatement::VariableDeclaration {
748 name: v.name().to_string(),
749 var_type: decy_hir::HirType::from_ast_type(v.var_type()),
750 initializer: v
751 .initializer()
752 .map(decy_hir::HirExpression::from_ast_expression),
753 })
754 .collect();
755
756 let hir_typedefs: Vec<decy_hir::HirTypedef> = ast
758 .typedefs()
759 .iter()
760 .map(|t| {
761 decy_hir::HirTypedef::new(
762 t.name().to_string(),
763 decy_hir::HirType::from_ast_type(&t.underlying_type),
764 )
765 })
766 .collect();
767
768 let mut transformed_functions = Vec::new();
770
771 for func in hir_functions {
772 let dataflow_analyzer = DataflowAnalyzer::new();
774 let dataflow_graph = dataflow_analyzer.analyze(&func);
775
776 let ownership_inferencer = OwnershipInferencer::new();
778 let ownership_inferences = ownership_inferencer.infer(&dataflow_graph);
779
780 let borrow_generator = BorrowGenerator::new();
782 let func_with_borrows = borrow_generator.transform_function(&func, &ownership_inferences);
783
784 let array_transformer = ArrayParameterTransformer::new();
786 let func_with_slices = array_transformer.transform(&func_with_borrows, &dataflow_graph);
787
788 let lifetime_analyzer = LifetimeAnalyzer::new();
790 let scope_tree = lifetime_analyzer.build_scope_tree(&func_with_slices);
791 let _lifetimes = lifetime_analyzer.track_lifetimes(&func_with_slices, &scope_tree);
792
793 let lifetime_annotator = LifetimeAnnotator::new();
795 let annotated_signature = lifetime_annotator.annotate_function(&func_with_slices);
796
797 transformed_functions.push((func_with_slices, annotated_signature));
799 }
800
801 let code_generator = CodeGenerator::new();
803 let mut rust_code = String::new();
804
805 for hir_struct in &hir_structs {
807 let struct_code = code_generator.generate_struct(hir_struct);
808 rust_code.push_str(&struct_code);
809 rust_code.push('\n');
810 }
811
812 for typedef in &hir_typedefs {
814 if let Ok(typedef_code) = code_generator.generate_typedef(typedef) {
815 rust_code.push_str(&typedef_code);
816 rust_code.push('\n');
817 }
818 }
819
820 for var_stmt in &hir_variables {
822 if let decy_hir::HirStatement::VariableDeclaration {
823 name,
824 var_type,
825 initializer,
826 } = var_stmt
827 {
828 let type_str = CodeGenerator::map_type(var_type);
830
831 if let Some(init_expr) = initializer {
832 let init_code = code_generator.generate_expression(init_expr);
833 rust_code.push_str(&format!(
834 "static mut {}: {} = {};\n",
835 name, type_str, init_code
836 ));
837 } else {
838 rust_code.push_str(&format!(
840 "static mut {}: Option<{}> = None;\n",
841 name, type_str
842 ));
843 }
844 }
845 }
846 if !hir_variables.is_empty() {
847 rust_code.push('\n');
848 }
849
850 for (func, annotated_sig) in &transformed_functions {
852 let generated = code_generator.generate_function_with_lifetimes_and_structs(
853 func,
854 annotated_sig,
855 &hir_structs,
856 );
857 rust_code.push_str(&generated);
858 rust_code.push('\n');
859 }
860
861 Ok(rust_code)
862}
863
864pub fn transpile_with_box_transform(c_code: &str) -> Result<String> {
886 let parser = CParser::new().context("Failed to create C parser")?;
888 let ast = parser.parse(c_code).context("Failed to parse C code")?;
889
890 let hir_functions: Vec<HirFunction> = ast
892 .functions()
893 .iter()
894 .map(HirFunction::from_ast_function)
895 .collect();
896
897 let code_generator = CodeGenerator::new();
899 let pattern_detector = PatternDetector::new();
900 let mut rust_code = String::new();
901
902 for func in &hir_functions {
903 let candidates = pattern_detector.find_box_candidates(func);
905
906 let generated = code_generator.generate_function_with_box_transform(func, &candidates);
907 rust_code.push_str(&generated);
908 rust_code.push('\n');
909 }
910
911 Ok(rust_code)
912}
913
914pub fn transpile_file(path: &Path, _context: &ProjectContext) -> Result<TranspiledFile> {
941 let c_code = std::fs::read_to_string(path)
943 .with_context(|| format!("Failed to read file: {}", path.display()))?;
944
945 let dependencies = extract_dependencies(path, &c_code)?;
947
948 let rust_code = transpile(&c_code)?;
950
951 let functions_exported = extract_function_names(&rust_code);
953
954 let ffi_declarations = generate_ffi_declarations(&functions_exported);
956
957 Ok(TranspiledFile::new(
958 path.to_path_buf(),
959 rust_code,
960 dependencies,
961 functions_exported,
962 ffi_declarations,
963 ))
964}
965
966fn extract_dependencies(source_path: &Path, c_code: &str) -> Result<Vec<PathBuf>> {
971 let mut dependencies = Vec::new();
972 let source_dir = source_path
973 .parent()
974 .ok_or_else(|| anyhow::anyhow!("Source file has no parent directory"))?;
975
976 for line in c_code.lines() {
977 let trimmed = line.trim();
978 if trimmed.starts_with("#include") {
979 if let Some(start) = trimmed.find('"') {
981 if let Some(end) = trimmed[start + 1..].find('"') {
982 let header_name = &trimmed[start + 1..start + 1 + end];
983 let header_path = source_dir.join(header_name);
984 if header_path.exists() {
985 dependencies.push(header_path);
986 }
987 }
988 }
989 }
990 }
991
992 Ok(dependencies)
993}
994
995fn extract_function_names(rust_code: &str) -> Vec<String> {
999 let mut functions = Vec::new();
1000
1001 for line in rust_code.lines() {
1002 let trimmed = line.trim();
1003 if (trimmed.starts_with("fn ") || trimmed.starts_with("pub fn ")) && trimmed.contains('(') {
1005 let start_idx = if trimmed.starts_with("pub fn ") {
1006 7 } else {
1008 3 };
1010
1011 if let Some(paren_idx) = trimmed[start_idx..].find('(') {
1012 let func_name = &trimmed[start_idx..start_idx + paren_idx];
1013 let func_name_clean = if let Some(angle_idx) = func_name.find('<') {
1015 &func_name[..angle_idx]
1016 } else {
1017 func_name
1018 };
1019 functions.push(func_name_clean.trim().to_string());
1020 }
1021 }
1022 }
1023
1024 functions
1025}
1026
1027fn generate_ffi_declarations(functions: &[String]) -> String {
1031 if functions.is_empty() {
1032 return String::new();
1033 }
1034
1035 let mut ffi = String::from("// FFI declarations for C interoperability\n");
1036 ffi.push_str("#[no_mangle]\n");
1037 ffi.push_str("extern \"C\" {\n");
1038
1039 for func_name in functions {
1040 ffi.push_str(&format!(" // {}\n", func_name));
1041 }
1042
1043 ffi.push_str("}\n");
1044 ffi
1045}
1046
1047#[cfg(test)]
1048mod tests {
1049 use super::*;
1050
1051 #[test]
1052 fn test_transpile_simple_function() {
1053 let c_code = "int add(int a, int b) { return a + b; }";
1054 let result = transpile(c_code);
1055 assert!(result.is_ok(), "Transpilation should succeed");
1056
1057 let rust_code = result.unwrap();
1058 assert!(rust_code.contains("fn add"), "Should contain function name");
1059 assert!(rust_code.contains("i32"), "Should contain Rust int type");
1060 }
1061
1062 #[test]
1063 fn test_transpile_with_parameters() {
1064 let c_code = "int multiply(int x, int y) { return x * y; }";
1065 let result = transpile(c_code);
1066 assert!(result.is_ok());
1067
1068 let rust_code = result.unwrap();
1069 assert!(rust_code.contains("fn multiply"));
1070 assert!(rust_code.contains("x"));
1071 assert!(rust_code.contains("y"));
1072 }
1073
1074 #[test]
1075 fn test_transpile_void_function() {
1076 let c_code = "void do_nothing() { }";
1077 let result = transpile(c_code);
1078 assert!(result.is_ok());
1079
1080 let rust_code = result.unwrap();
1081 assert!(rust_code.contains("fn do_nothing"));
1082 }
1083
1084 #[test]
1085 fn test_transpile_with_box_transform_simple() {
1086 let c_code = "int get_value() { return 42; }";
1088 let result = transpile_with_box_transform(c_code);
1089 assert!(result.is_ok());
1090
1091 let rust_code = result.unwrap();
1092 assert!(rust_code.contains("fn get_value"));
1093 }
1094
1095 #[test]
1096 fn test_transpile_empty_input() {
1097 let c_code = "";
1098 let result = transpile(c_code);
1099 assert!(result.is_ok());
1101 }
1102
1103 #[test]
1104 fn test_transpile_integration_pipeline() {
1105 let c_code = r#"
1107 int calculate(int a, int b) {
1108 int result;
1109 result = a + b;
1110 return result;
1111 }
1112 "#;
1113 let result = transpile(c_code);
1114 assert!(result.is_ok(), "Full pipeline should execute");
1115
1116 let rust_code = result.unwrap();
1117 assert!(rust_code.contains("fn calculate"));
1118 assert!(rust_code.contains("let mut result"));
1119 }
1120
1121 #[test]
1122 fn test_transpile_with_lifetime_annotations() {
1123 let c_code = "int add(int a, int b) { return a + b; }";
1127 let result = transpile(c_code);
1128 assert!(
1129 result.is_ok(),
1130 "Transpilation with lifetime analysis should succeed"
1131 );
1132
1133 let rust_code = result.unwrap();
1134 assert!(rust_code.contains("fn add"));
1136
1137 }
1140}