#![warn(missing_docs)]
#![warn(clippy::all)]
#![deny(unsafe_code)]
pub mod metrics;
pub use metrics::{CompileMetrics, TranspilationResult};
use anyhow::{Context, Result};
use decy_analyzer::patterns::PatternDetector;
use decy_codegen::CodeGenerator;
use decy_hir::{HirExpression, HirFunction, HirStatement};
use decy_ownership::{
array_slice::ArrayParameterTransformer, borrow_gen::BorrowGenerator,
classifier_integration::classify_with_rules, dataflow::DataflowAnalyzer,
lifetime::LifetimeAnalyzer, lifetime_gen::LifetimeAnnotator,
};
use decy_parser::parser::CParser;
use decy_stdlib::StdlibPrototypes;
use petgraph::graph::{DiGraph, NodeIndex};
use petgraph::visit::Topo;
use std::collections::HashMap;
use std::path::{Path, PathBuf};
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct TranspiledFile {
pub source_path: PathBuf,
pub rust_code: String,
pub dependencies: Vec<PathBuf>,
pub functions_exported: Vec<String>,
pub ffi_declarations: String,
}
impl TranspiledFile {
pub fn new(
source_path: PathBuf,
rust_code: String,
dependencies: Vec<PathBuf>,
functions_exported: Vec<String>,
ffi_declarations: String,
) -> Self {
Self {
source_path,
rust_code,
dependencies,
functions_exported,
ffi_declarations,
}
}
}
#[derive(Debug, Clone, Default)]
pub struct ProjectContext {
types: HashMap<String, String>,
functions: HashMap<String, String>,
transpiled_files: HashMap<PathBuf, TranspiledFile>,
}
impl ProjectContext {
pub fn new() -> Self {
Self::default()
}
pub fn add_transpiled_file(&mut self, file: &TranspiledFile) {
self.transpiled_files
.insert(file.source_path.clone(), file.clone());
if file.rust_code.contains("struct") {
for line in file.rust_code.lines() {
if line.contains("struct") {
if let Some(name) = self.extract_type_name(line) {
self.types.insert(name.clone(), line.to_string());
}
}
}
}
for func_name in &file.functions_exported {
self.functions.insert(
func_name.clone(),
file.source_path.to_string_lossy().to_string(),
);
}
}
pub fn has_type(&self, type_name: &str) -> bool {
self.types.contains_key(type_name)
}
pub fn has_function(&self, func_name: &str) -> bool {
self.functions.contains_key(func_name)
}
pub fn get_function_source(&self, func_name: &str) -> Option<&str> {
self.functions.get(func_name).map(|s| s.as_str())
}
fn extract_type_name(&self, line: &str) -> Option<String> {
let words: Vec<&str> = line.split_whitespace().collect();
if let Some(idx) = words.iter().position(|&w| w == "struct" || w == "enum") {
if idx + 1 < words.len() {
let name = words[idx + 1].trim_end_matches('{').trim_end_matches('<');
return Some(name.to_string());
}
}
None
}
}
#[derive(Debug, Clone)]
pub struct DependencyGraph {
graph: DiGraph<PathBuf, ()>,
path_to_node: HashMap<PathBuf, NodeIndex>,
}
impl DependencyGraph {
pub fn new() -> Self {
Self {
graph: DiGraph::new(),
path_to_node: HashMap::new(),
}
}
pub fn is_empty(&self) -> bool {
self.graph.node_count() == 0
}
pub fn file_count(&self) -> usize {
self.graph.node_count()
}
pub fn contains_file(&self, path: &Path) -> bool {
self.path_to_node.contains_key(path)
}
pub fn add_file(&mut self, path: &Path) {
if !self.contains_file(path) {
let node = self.graph.add_node(path.to_path_buf());
self.path_to_node.insert(path.to_path_buf(), node);
}
}
pub fn add_dependency(&mut self, from: &Path, to: &Path) {
let from_node = *self
.path_to_node
.get(from)
.expect("from file must be added to graph first");
let to_node = *self
.path_to_node
.get(to)
.expect("to file must be added to graph first");
self.graph.add_edge(from_node, to_node, ());
}
pub fn has_dependency(&self, from: &Path, to: &Path) -> bool {
if let (Some(&from_node), Some(&to_node)) =
(self.path_to_node.get(from), self.path_to_node.get(to))
{
self.graph.contains_edge(from_node, to_node)
} else {
false
}
}
pub fn topological_sort(&self) -> Result<Vec<PathBuf>> {
if petgraph::algo::is_cyclic_directed(&self.graph) {
return Err(anyhow::anyhow!(
"Circular dependency detected in file dependencies"
));
}
let mut topo = Topo::new(&self.graph);
let mut build_order = Vec::new();
while let Some(node) = topo.next(&self.graph) {
if let Some(path) = self.graph.node_weight(node) {
build_order.push(path.clone());
}
}
build_order.reverse();
Ok(build_order)
}
pub fn from_files(files: &[PathBuf]) -> Result<Self> {
let mut graph = Self::new();
for file in files {
graph.add_file(file);
}
for file in files {
let content = std::fs::read_to_string(file)
.with_context(|| format!("Failed to read file: {}", file.display()))?;
let includes = Self::parse_include_directives(&content);
let file_dir = file.parent().unwrap_or_else(|| Path::new("."));
for include in includes {
let include_path = file_dir.join(&include);
if graph.contains_file(&include_path) {
graph.add_dependency(file, &include_path);
}
}
}
Ok(graph)
}
pub fn parse_include_directives(code: &str) -> Vec<String> {
let mut includes = Vec::new();
for line in code.lines() {
let trimmed = line.trim();
if trimmed.starts_with("#include") {
if let Some(start) = trimmed.find('"').or_else(|| trimmed.find('<')) {
let end_char = if trimmed.chars().nth(start) == Some('"') {
'"'
} else {
'>'
};
if let Some(end) = trimmed[start + 1..].find(end_char) {
let filename = &trimmed[start + 1..start + 1 + end];
includes.push(filename.to_string());
}
}
}
}
includes
}
pub fn has_header_guard(path: &Path) -> Result<bool> {
let content = std::fs::read_to_string(path)
.with_context(|| format!("Failed to read file: {}", path.display()))?;
let has_ifndef = content.lines().any(|line| {
let trimmed = line.trim();
trimmed.starts_with("#ifndef") || trimmed.starts_with("#if !defined")
});
let has_define = content
.lines()
.any(|line| line.trim().starts_with("#define"));
let has_endif = content
.lines()
.any(|line| line.trim().starts_with("#endif"));
Ok(has_ifndef && has_define && has_endif)
}
}
impl Default for DependencyGraph {
fn default() -> Self {
Self::new()
}
}
#[derive(Debug, Clone)]
pub struct CacheStatistics {
pub hits: usize,
pub misses: usize,
pub total_files: usize,
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
struct CacheEntry {
hash: String,
transpiled: TranspiledFile,
dependency_hashes: HashMap<PathBuf, String>,
}
#[derive(Debug, Clone)]
pub struct TranspilationCache {
entries: HashMap<PathBuf, CacheEntry>,
cache_dir: Option<PathBuf>,
hits: usize,
misses: usize,
}
impl TranspilationCache {
pub fn new() -> Self {
Self {
entries: HashMap::new(),
cache_dir: None,
hits: 0,
misses: 0,
}
}
pub fn with_directory(cache_dir: &Path) -> Self {
Self {
entries: HashMap::new(),
cache_dir: Some(cache_dir.to_path_buf()),
hits: 0,
misses: 0,
}
}
pub fn compute_hash(&self, path: &Path) -> Result<String> {
use sha2::{Digest, Sha256};
let content = std::fs::read(path)
.with_context(|| format!("Failed to read file for hashing: {}", path.display()))?;
let mut hasher = Sha256::new();
hasher.update(&content);
let result = hasher.finalize();
Ok(format!("{:x}", result))
}
pub fn insert(&mut self, path: &Path, transpiled: &TranspiledFile) {
let hash = match self.compute_hash(path) {
Ok(h) => h,
Err(_) => return, };
let mut dependency_hashes = HashMap::new();
for dep_path in &transpiled.dependencies {
if let Ok(dep_hash) = self.compute_hash(dep_path) {
dependency_hashes.insert(dep_path.clone(), dep_hash);
}
}
let entry = CacheEntry {
hash,
transpiled: transpiled.clone(),
dependency_hashes,
};
self.entries.insert(path.to_path_buf(), entry);
}
pub fn get(&mut self, path: &Path) -> Option<&TranspiledFile> {
let entry = self.entries.get(&path.to_path_buf())?;
let current_hash = self.compute_hash(path).ok()?;
if current_hash != entry.hash {
self.misses += 1;
return None;
}
for (dep_path, cached_hash) in &entry.dependency_hashes {
if let Ok(current_dep_hash) = self.compute_hash(dep_path) {
if ¤t_dep_hash != cached_hash {
self.misses += 1;
return None;
}
}
}
self.hits += 1;
Some(&entry.transpiled)
}
pub fn save(&self) -> Result<()> {
let cache_dir = self
.cache_dir
.as_ref()
.ok_or_else(|| anyhow::anyhow!("Cache directory not set"))?;
std::fs::create_dir_all(cache_dir).with_context(|| {
format!("Failed to create cache directory: {}", cache_dir.display())
})?;
let cache_file = cache_dir.join("cache.json");
let json =
serde_json::to_string_pretty(&self.entries).context("Failed to serialize cache")?;
std::fs::write(&cache_file, json)
.with_context(|| format!("Failed to write cache file: {}", cache_file.display()))?;
Ok(())
}
pub fn load(cache_dir: &Path) -> Result<Self> {
let cache_file = cache_dir.join("cache.json");
if !cache_file.exists() {
return Ok(Self::with_directory(cache_dir));
}
let json = std::fs::read_to_string(&cache_file)
.with_context(|| format!("Failed to read cache file: {}", cache_file.display()))?;
let entries: HashMap<PathBuf, CacheEntry> =
serde_json::from_str(&json).context("Failed to deserialize cache")?;
Ok(Self {
entries,
cache_dir: Some(cache_dir.to_path_buf()),
hits: 0,
misses: 0,
})
}
pub fn clear(&mut self) {
self.entries.clear();
self.hits = 0;
self.misses = 0;
}
pub fn statistics(&self) -> CacheStatistics {
CacheStatistics {
hits: self.hits,
misses: self.misses,
total_files: self.entries.len(),
}
}
}
impl Default for TranspilationCache {
fn default() -> Self {
Self::new()
}
}
fn preprocess_includes(
source: &str,
base_dir: Option<&Path>,
processed: &mut std::collections::HashSet<PathBuf>,
stdlib_prototypes: &StdlibPrototypes,
injected_headers: &mut std::collections::HashSet<String>,
) -> Result<String> {
let mut result = String::new();
let base_dir = base_dir.unwrap_or_else(|| Path::new("."));
for line in source.lines() {
let trimmed = line.trim();
if trimmed.starts_with("#include") {
let (filename, is_system) = if let Some(start) = trimmed.find('"') {
if let Some(end) = trimmed[start + 1..].find('"') {
let filename = &trimmed[start + 1..start + 1 + end];
(filename, false)
} else {
result.push_str(line);
result.push('\n');
continue;
}
} else if let Some(start) = trimmed.find('<') {
if let Some(end) = trimmed[start + 1..].find('>') {
let filename = &trimmed[start + 1..start + 1 + end];
(filename, true)
} else {
result.push_str(line);
result.push('\n');
continue;
}
} else {
result.push_str(line);
result.push('\n');
continue;
};
if is_system {
result.push_str(&format!("// {}\n", line));
if !injected_headers.contains(filename) {
injected_headers.insert(filename.to_string());
if let Some(header) = decy_stdlib::StdHeader::from_filename(filename) {
result
.push_str(&format!("// BEGIN: Built-in prototypes for {}\n", filename));
result.push_str(&stdlib_prototypes.inject_prototypes_for_header(header));
result.push_str(&format!("// END: Built-in prototypes for {}\n", filename));
} else {
result.push_str(&format!("// Unknown system header: {}\n", filename));
}
}
continue;
}
let include_path = base_dir.join(filename);
if processed.contains(&include_path) {
result.push_str(&format!("// Already included: {}\n", filename));
continue;
}
if let Ok(included_content) = std::fs::read_to_string(&include_path) {
processed.insert(include_path.clone());
let included_dir = include_path.parent().unwrap_or(base_dir);
let preprocessed = preprocess_includes(
&included_content,
Some(included_dir),
processed,
stdlib_prototypes,
injected_headers,
)?;
result.push_str(&format!("// BEGIN INCLUDE: {}\n", filename));
result.push_str(&preprocessed);
result.push_str(&format!("// END INCLUDE: {}\n", filename));
} else {
anyhow::bail!("Failed to find include file: {}", include_path.display());
}
} else {
result.push_str(line);
result.push('\n');
}
}
Ok(result)
}
pub fn transpile(c_code: &str) -> Result<String> {
transpile_with_includes(c_code, None)
}
pub fn transpile_with_verification(c_code: &str) -> Result<TranspilationResult> {
match transpile(c_code) {
Ok(rust_code) => Ok(TranspilationResult::success(rust_code)),
Err(e) => {
Ok(TranspilationResult::failure(
String::new(),
vec![e.to_string()],
))
}
}
}
pub fn transpile_with_includes(c_code: &str, base_dir: Option<&Path>) -> Result<String> {
let stdlib_prototypes = StdlibPrototypes::new();
let mut processed_files = std::collections::HashSet::new();
let mut injected_headers = std::collections::HashSet::new();
let preprocessed = preprocess_includes(
c_code,
base_dir,
&mut processed_files,
&stdlib_prototypes,
&mut injected_headers,
)?;
let parser = CParser::new().context("Failed to create C parser")?;
let ast = parser
.parse(&preprocessed)
.context("Failed to parse C code")?;
let all_hir_functions: Vec<HirFunction> = ast
.functions()
.iter()
.map(HirFunction::from_ast_function)
.collect();
let hir_functions: Vec<HirFunction> = {
use std::collections::HashMap;
let mut func_map: HashMap<String, HirFunction> = HashMap::new();
for func in all_hir_functions {
let name = func.name().to_string();
if let Some(existing) = func_map.get(&name) {
if func.has_body() && !existing.has_body() {
func_map.insert(name, func);
}
} else {
func_map.insert(name, func);
}
}
func_map.into_values().collect()
};
let hir_structs: Vec<decy_hir::HirStruct> = ast
.structs()
.iter()
.map(|s| {
let fields = s
.fields
.iter()
.map(|f| {
decy_hir::HirStructField::new(
f.name.clone(),
decy_hir::HirType::from_ast_type(&f.field_type),
)
})
.collect();
decy_hir::HirStruct::new(s.name.clone(), fields)
})
.collect();
let hir_variables: Vec<decy_hir::HirStatement> = ast
.variables()
.iter()
.map(|v| decy_hir::HirStatement::VariableDeclaration {
name: v.name().to_string(),
var_type: decy_hir::HirType::from_ast_type(v.var_type()),
initializer: v
.initializer()
.map(decy_hir::HirExpression::from_ast_expression),
})
.collect();
let hir_typedefs: Vec<decy_hir::HirTypedef> = ast
.typedefs()
.iter()
.map(|t| {
decy_hir::HirTypedef::new(
t.name().to_string(),
decy_hir::HirType::from_ast_type(&t.underlying_type),
)
})
.collect();
let slice_func_args: Vec<(String, Vec<(usize, usize)>)> = hir_functions
.iter()
.filter_map(|func| {
let mut mappings = Vec::new();
let params = func.parameters();
for (i, param) in params.iter().enumerate() {
if matches!(param.param_type(), decy_hir::HirType::Pointer(_)) {
if i + 1 < params.len() {
let next_param = ¶ms[i + 1];
if matches!(next_param.param_type(), decy_hir::HirType::Int) {
let param_name = next_param.name().to_lowercase();
if param_name.contains("len")
|| param_name.contains("size")
|| param_name.contains("count")
|| param_name == "n"
|| param_name == "num"
{
mappings.push((i, i + 1));
}
}
}
}
}
if mappings.is_empty() {
None
} else {
Some((func.name().to_string(), mappings))
}
})
.collect();
let mut transformed_functions = Vec::new();
for func in hir_functions {
let dataflow_analyzer = DataflowAnalyzer::new();
let dataflow_graph = dataflow_analyzer.analyze(&func);
let ownership_inferences = classify_with_rules(&dataflow_graph, &func);
let borrow_generator = BorrowGenerator::new();
let func_with_borrows = borrow_generator.transform_function(&func, &ownership_inferences);
let array_transformer = ArrayParameterTransformer::new();
let func_with_slices = array_transformer.transform(&func_with_borrows, &dataflow_graph);
let lifetime_analyzer = LifetimeAnalyzer::new();
let scope_tree = lifetime_analyzer.build_scope_tree(&func_with_slices);
let _lifetimes = lifetime_analyzer.track_lifetimes(&func_with_slices, &scope_tree);
let lifetime_annotator = LifetimeAnnotator::new();
let annotated_signature = lifetime_annotator.annotate_function(&func_with_slices);
transformed_functions.push((func_with_slices, annotated_signature));
}
let code_generator = CodeGenerator::new();
let mut rust_code = String::new();
let mut emitted_structs = std::collections::HashSet::new();
let mut emitted_typedefs = std::collections::HashSet::new();
for hir_struct in &hir_structs {
let struct_name = hir_struct.name();
if emitted_structs.contains(struct_name) {
continue; }
emitted_structs.insert(struct_name.to_string());
let struct_code = code_generator.generate_struct(hir_struct);
rust_code.push_str(&struct_code);
rust_code.push('\n');
}
for typedef in &hir_typedefs {
let typedef_name = typedef.name();
if emitted_typedefs.contains(typedef_name) {
continue; }
emitted_typedefs.insert(typedef_name.to_string());
if let Ok(typedef_code) = code_generator.generate_typedef(typedef) {
rust_code.push_str(&typedef_code);
rust_code.push('\n');
}
}
for var_stmt in &hir_variables {
if let decy_hir::HirStatement::VariableDeclaration {
name,
var_type,
initializer,
} = var_stmt
{
let type_str = CodeGenerator::map_type(var_type);
if let Some(init_expr) = initializer {
let init_code = if let decy_hir::HirType::Array {
element_type,
size: Some(size_val),
} = var_type
{
if let decy_hir::HirExpression::IntLiteral(n) = init_expr {
if *n as usize == *size_val {
let element_init = match element_type.as_ref() {
decy_hir::HirType::Char => "0u8".to_string(),
decy_hir::HirType::Int => "0i32".to_string(),
decy_hir::HirType::Float => "0.0f32".to_string(),
decy_hir::HirType::Double => "0.0f64".to_string(),
_ => "0".to_string(),
};
format!("[{}; {}]", element_init, size_val)
} else {
code_generator.generate_expression(init_expr)
}
} else {
code_generator.generate_expression(init_expr)
}
} else {
code_generator.generate_expression(init_expr)
};
rust_code.push_str(&format!(
"static mut {}: {} = {};\n",
name, type_str, init_code
));
} else {
let default_value = match var_type {
decy_hir::HirType::Int => "0".to_string(),
decy_hir::HirType::UnsignedInt => "0".to_string(),
decy_hir::HirType::Char => "0".to_string(),
decy_hir::HirType::Float => "0.0".to_string(),
decy_hir::HirType::Double => "0.0".to_string(),
decy_hir::HirType::Pointer(_) => "std::ptr::null_mut()".to_string(),
decy_hir::HirType::Array { element_type, size } => {
let elem_default = match element_type.as_ref() {
decy_hir::HirType::Char => "0u8",
decy_hir::HirType::Int => "0i32",
decy_hir::HirType::UnsignedInt => "0u32",
decy_hir::HirType::Float => "0.0f32",
decy_hir::HirType::Double => "0.0f64",
_ => "0",
};
if let Some(n) = size {
format!("[{}; {}]", elem_default, n)
} else {
format!("[{}; 0]", elem_default)
}
}
decy_hir::HirType::FunctionPointer { .. } => {
rust_code.push_str(&format!(
"static mut {}: Option<{}> = None;\n",
name, type_str
));
continue;
}
_ => "Default::default()".to_string(),
};
rust_code.push_str(&format!(
"static mut {}: {} = {};\n",
name, type_str, default_value
));
}
}
}
if !hir_variables.is_empty() {
rust_code.push('\n');
}
let all_function_sigs: Vec<(String, Vec<decy_hir::HirType>)> = transformed_functions
.iter()
.map(|(func, _sig)| {
let param_types: Vec<decy_hir::HirType> = func
.parameters()
.iter()
.map(|p| {
if let decy_hir::HirType::Pointer(inner) = p.param_type() {
if uses_pointer_arithmetic(func, p.name())
|| pointer_compared_to_null(func, p.name())
{
p.param_type().clone()
} else {
decy_hir::HirType::Reference {
inner: inner.clone(),
mutable: true,
}
}
} else {
p.param_type().clone()
}
})
.collect();
(func.name().to_string(), param_types)
})
.collect();
let string_iter_funcs: Vec<(String, Vec<(usize, bool)>)> = transformed_functions
.iter()
.filter_map(|(func, _)| {
let params = code_generator.get_string_iteration_params(func);
if params.is_empty() {
None
} else {
Some((func.name().to_string(), params))
}
})
.collect();
for (func, annotated_sig) in &transformed_functions {
let generated = code_generator.generate_function_with_lifetimes_and_structs(
func,
annotated_sig,
&hir_structs,
&all_function_sigs,
&slice_func_args,
&string_iter_funcs,
);
rust_code.push_str(&generated);
rust_code.push('\n');
}
Ok(rust_code)
}
pub fn transpile_with_box_transform(c_code: &str) -> Result<String> {
let parser = CParser::new().context("Failed to create C parser")?;
let ast = parser.parse(c_code).context("Failed to parse C code")?;
let hir_functions: Vec<HirFunction> = ast
.functions()
.iter()
.map(HirFunction::from_ast_function)
.collect();
let code_generator = CodeGenerator::new();
let pattern_detector = PatternDetector::new();
let mut rust_code = String::new();
for func in &hir_functions {
let candidates = pattern_detector.find_box_candidates(func);
let generated = code_generator.generate_function_with_box_transform(func, &candidates);
rust_code.push_str(&generated);
rust_code.push('\n');
}
Ok(rust_code)
}
pub fn transpile_file(path: &Path, _context: &ProjectContext) -> Result<TranspiledFile> {
let c_code = std::fs::read_to_string(path)
.with_context(|| format!("Failed to read file: {}", path.display()))?;
let dependencies = extract_dependencies(path, &c_code)?;
let rust_code = transpile(&c_code)?;
let functions_exported = extract_function_names(&rust_code);
let ffi_declarations = generate_ffi_declarations(&functions_exported);
Ok(TranspiledFile::new(
path.to_path_buf(),
rust_code,
dependencies,
functions_exported,
ffi_declarations,
))
}
fn extract_dependencies(source_path: &Path, c_code: &str) -> Result<Vec<PathBuf>> {
let mut dependencies = Vec::new();
let source_dir = source_path
.parent()
.ok_or_else(|| anyhow::anyhow!("Source file has no parent directory"))?;
for line in c_code.lines() {
let trimmed = line.trim();
if trimmed.starts_with("#include") {
if let Some(start) = trimmed.find('"') {
if let Some(end) = trimmed[start + 1..].find('"') {
let header_name = &trimmed[start + 1..start + 1 + end];
let header_path = source_dir.join(header_name);
if header_path.exists() {
dependencies.push(header_path);
}
}
}
}
}
Ok(dependencies)
}
fn extract_function_names(rust_code: &str) -> Vec<String> {
let mut functions = Vec::new();
for line in rust_code.lines() {
let trimmed = line.trim();
if (trimmed.starts_with("fn ") || trimmed.starts_with("pub fn ")) && trimmed.contains('(') {
let start_idx = if trimmed.starts_with("pub fn ") {
7 } else {
3 };
if let Some(paren_idx) = trimmed[start_idx..].find('(') {
let func_name = &trimmed[start_idx..start_idx + paren_idx];
let func_name_clean = if let Some(angle_idx) = func_name.find('<') {
&func_name[..angle_idx]
} else {
func_name
};
functions.push(func_name_clean.trim().to_string());
}
}
}
functions
}
fn generate_ffi_declarations(functions: &[String]) -> String {
if functions.is_empty() {
return String::new();
}
let mut ffi = String::from("// FFI declarations for C interoperability\n");
ffi.push_str("#[no_mangle]\n");
ffi.push_str("extern \"C\" {\n");
for func_name in functions {
ffi.push_str(&format!(" // {}\n", func_name));
}
ffi.push_str("}\n");
ffi
}
fn uses_pointer_arithmetic(func: &HirFunction, param_name: &str) -> bool {
for stmt in func.body() {
if statement_uses_pointer_arithmetic(stmt, param_name) {
return true;
}
}
false
}
fn pointer_compared_to_null(func: &HirFunction, param_name: &str) -> bool {
for stmt in func.body() {
if statement_compares_to_null(stmt, param_name) {
return true;
}
}
false
}
fn statement_compares_to_null(stmt: &HirStatement, var_name: &str) -> bool {
match stmt {
HirStatement::If {
condition,
then_block,
else_block,
} => {
if expression_compares_to_null(condition, var_name) {
return true;
}
then_block
.iter()
.any(|s| statement_compares_to_null(s, var_name))
|| else_block
.as_ref()
.is_some_and(|blk| blk.iter().any(|s| statement_compares_to_null(s, var_name)))
}
HirStatement::While { condition, body } => {
expression_compares_to_null(condition, var_name)
|| body.iter().any(|s| statement_compares_to_null(s, var_name))
}
HirStatement::For {
condition, body, ..
} => {
expression_compares_to_null(condition, var_name)
|| body.iter().any(|s| statement_compares_to_null(s, var_name))
}
HirStatement::Switch {
condition, cases, ..
} => {
expression_compares_to_null(condition, var_name)
|| cases.iter().any(|c| {
c.body
.iter()
.any(|s| statement_compares_to_null(s, var_name))
})
}
_ => false,
}
}
fn expression_compares_to_null(expr: &HirExpression, var_name: &str) -> bool {
use decy_hir::BinaryOperator;
match expr {
HirExpression::BinaryOp { op, left, right } => {
if matches!(op, BinaryOperator::Equal | BinaryOperator::NotEqual) {
let left_is_var =
matches!(&**left, HirExpression::Variable(name) if name == var_name);
let right_is_var =
matches!(&**right, HirExpression::Variable(name) if name == var_name);
let left_is_null = matches!(
&**left,
HirExpression::NullLiteral | HirExpression::IntLiteral(0)
);
let right_is_null = matches!(
&**right,
HirExpression::NullLiteral | HirExpression::IntLiteral(0)
);
if (left_is_var && right_is_null) || (right_is_var && left_is_null) {
return true;
}
}
expression_compares_to_null(left, var_name)
|| expression_compares_to_null(right, var_name)
}
HirExpression::UnaryOp { operand, .. } => expression_compares_to_null(operand, var_name),
_ => false,
}
}
fn statement_uses_pointer_arithmetic(stmt: &HirStatement, var_name: &str) -> bool {
use decy_hir::BinaryOperator;
match stmt {
HirStatement::Assignment { target, value } => {
if target == var_name {
if let HirExpression::BinaryOp { op, left, .. } = value {
if matches!(op, BinaryOperator::Add | BinaryOperator::Subtract) {
if let HirExpression::Variable(name) = &**left {
if name == var_name {
return true;
}
}
}
}
}
false
}
HirStatement::If {
then_block,
else_block,
..
} => {
then_block
.iter()
.any(|s| statement_uses_pointer_arithmetic(s, var_name))
|| else_block.as_ref().is_some_and(|blk| {
blk.iter()
.any(|s| statement_uses_pointer_arithmetic(s, var_name))
})
}
HirStatement::While { body, .. } | HirStatement::For { body, .. } => body
.iter()
.any(|s| statement_uses_pointer_arithmetic(s, var_name)),
_ => false,
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_transpile_simple_function() {
let c_code = "int add(int a, int b) { return a + b; }";
let result = transpile(c_code);
assert!(result.is_ok(), "Transpilation should succeed");
let rust_code = result.unwrap();
assert!(rust_code.contains("fn add"), "Should contain function name");
assert!(rust_code.contains("i32"), "Should contain Rust int type");
}
#[test]
fn test_transpile_with_parameters() {
let c_code = "int multiply(int x, int y) { return x * y; }";
let result = transpile(c_code);
assert!(result.is_ok());
let rust_code = result.unwrap();
assert!(rust_code.contains("fn multiply"));
assert!(rust_code.contains("x"));
assert!(rust_code.contains("y"));
}
#[test]
fn test_transpile_void_function() {
let c_code = "void do_nothing() { }";
let result = transpile(c_code);
assert!(result.is_ok());
let rust_code = result.unwrap();
assert!(rust_code.contains("fn do_nothing"));
}
#[test]
fn test_transpile_with_box_transform_simple() {
let c_code = "int get_value() { return 42; }";
let result = transpile_with_box_transform(c_code);
assert!(result.is_ok());
let rust_code = result.unwrap();
assert!(rust_code.contains("fn get_value"));
}
#[test]
fn test_transpile_empty_input() {
let c_code = "";
let result = transpile(c_code);
assert!(result.is_ok());
}
#[test]
fn test_transpile_integration_pipeline() {
let c_code = r#"
int calculate(int a, int b) {
int result;
result = a + b;
return result;
}
"#;
let result = transpile(c_code);
assert!(result.is_ok(), "Full pipeline should execute");
let rust_code = result.unwrap();
assert!(rust_code.contains("fn calculate"));
assert!(rust_code.contains("let mut result"));
}
#[test]
fn test_transpile_with_lifetime_annotations() {
let c_code = "int add(int a, int b) { return a + b; }";
let result = transpile(c_code);
assert!(
result.is_ok(),
"Transpilation with lifetime analysis should succeed"
);
let rust_code = result.unwrap();
assert!(rust_code.contains("fn add"));
}
}