batuta/pipeline/stages/
transpilation.rs1use anyhow::Result;
4
5#[cfg(feature = "native")]
6use tracing::{info, warn};
7
8#[cfg(not(feature = "native"))]
10macro_rules! info {
11 ($($arg:tt)*) => {{}};
12}
13
14#[cfg(not(feature = "native"))]
15macro_rules! warn {
16 ($($arg:tt)*) => {{}};
17}
18
19use crate::pipeline::types::{PipelineContext, PipelineStage, ValidationResult};
20use crate::pipeline_analysis::LibraryAnalyzer;
21
22pub struct TranspilationStage {
24 pub(crate) incremental: bool,
25 pub(crate) cache: bool,
26 pub(crate) library_analyzer: LibraryAnalyzer,
27}
28
29impl TranspilationStage {
30 pub fn new(incremental: bool, cache: bool) -> Self {
31 Self { incremental, cache, library_analyzer: LibraryAnalyzer::new() }
32 }
33}
34
35#[async_trait::async_trait]
36impl PipelineStage for TranspilationStage {
37 fn name(&self) -> &'static str {
38 "Transpilation"
39 }
40
41 async fn execute(&self, mut ctx: PipelineContext) -> Result<PipelineContext> {
42 info!(
43 "Transpiling {} to Rust",
44 ctx.primary_language
45 .as_ref()
46 .map(|l| format!("{}", l))
47 .unwrap_or_else(|| "unknown".to_string())
48 );
49
50 std::fs::create_dir_all(&ctx.output_path)?;
52 std::fs::create_dir_all(ctx.output_path.join("src"))?;
53
54 #[cfg(feature = "native")]
56 if let Some(crate::types::Language::Python) = ctx.primary_language {
57 self.analyze_python_libraries(&mut ctx)?;
58 }
59
60 let tools = crate::tools::ToolRegistry::detect();
62
63 if let Some(lang) = &ctx.primary_language {
65 use crate::types::Language;
66
67 info!("Starting transpilation for language: {}", lang);
68
69 let result = match lang {
70 Language::Python => {
71 if tools.depyler.is_some() {
72 info!("Using Depyler for Python transpilation");
73 crate::tools::transpile_python(&ctx.input_path, &ctx.output_path)
74 } else {
75 anyhow::bail!("Depyler not available. Install with: cargo install depyler");
76 }
77 }
78 Language::Shell => {
79 if tools.bashrs.is_some() {
80 info!("Using Bashrs for Shell transpilation");
81 crate::tools::transpile_shell(&ctx.input_path, &ctx.output_path)
82 } else {
83 anyhow::bail!("Bashrs not available. Install with: cargo install bashrs");
84 }
85 }
86 Language::C | Language::Cpp => {
87 if tools.decy.is_some() {
88 info!("Using Decy for C/C++ transpilation");
89 crate::tools::transpile_c_cpp(&ctx.input_path, &ctx.output_path)
90 } else {
91 anyhow::bail!("Decy not available. Install with: cargo install decy");
92 }
93 }
94 _ => {
95 anyhow::bail!("No transpiler available for language: {}", lang);
96 }
97 };
98
99 match result {
100 Ok(output) => {
101 info!("Transpilation completed successfully");
102 info!("Output: {}", output);
103
104 ctx.metadata
105 .insert("transpiler".to_string(), serde_json::json!(format!("{}", lang)));
106 ctx.metadata
107 .insert("transpilation_output".to_string(), serde_json::json!(output));
108 }
109 Err(e) => {
110 warn!("Transpilation failed: {}", e);
111 anyhow::bail!("Transpilation failed: {}", e);
112 }
113 }
114 } else {
115 anyhow::bail!("No primary language detected");
116 }
117
118 Ok(ctx)
119 }
120
121 fn validate(&self, ctx: &PipelineContext) -> Result<ValidationResult> {
122 let src_dir = ctx.output_path.join("src");
124 let passed = src_dir.exists() && src_dir.read_dir()?.next().is_some();
125
126 Ok(ValidationResult {
127 stage: self.name().to_string(),
128 passed,
129 message: if passed {
130 "Transpilation output validated".to_string()
131 } else {
132 "No transpiled files found".to_string()
133 },
134 details: None,
135 })
136 }
137}
138
139impl TranspilationStage {
140 #[cfg(feature = "native")]
141 #[allow(clippy::cognitive_complexity)]
142 fn analyze_python_libraries(&self, ctx: &mut PipelineContext) -> Result<()> {
143 info!("Analyzing NumPy usage for conversion guidance");
145 match self.library_analyzer.analyze_numpy_usage(&ctx.input_path) {
146 Ok(recommendations) => {
147 if !recommendations.is_empty() {
148 info!("Found {} NumPy operations to convert:", recommendations.len());
149 for rec in &recommendations {
150 info!(" - {}", rec);
151 }
152
153 ctx.metadata.insert(
154 "numpy_conversions".to_string(),
155 serde_json::json!(recommendations),
156 );
157
158 ctx.metadata.insert("numpy_detected".to_string(), serde_json::json!(true));
159 } else {
160 info!("No NumPy usage detected");
161 ctx.metadata.insert("numpy_detected".to_string(), serde_json::json!(false));
162 }
163 }
164 Err(e) => {
165 warn!("NumPy analysis failed: {}", e);
166 }
167 }
168
169 info!("Analyzing sklearn usage for conversion guidance");
171 match self.library_analyzer.analyze_sklearn_usage(&ctx.input_path) {
172 Ok(recommendations) => {
173 if !recommendations.is_empty() {
174 info!("Found {} sklearn algorithms to convert:", recommendations.len());
175 for rec in &recommendations {
176 info!(" - {}", rec);
177 }
178
179 ctx.metadata.insert(
180 "sklearn_conversions".to_string(),
181 serde_json::json!(recommendations),
182 );
183
184 ctx.metadata.insert("sklearn_detected".to_string(), serde_json::json!(true));
185 } else {
186 info!("No sklearn usage detected");
187 ctx.metadata.insert("sklearn_detected".to_string(), serde_json::json!(false));
188 }
189 }
190 Err(e) => {
191 warn!("sklearn analysis failed: {}", e);
192 }
193 }
194
195 info!("Analyzing PyTorch usage for conversion guidance");
197 match self.library_analyzer.analyze_pytorch_usage(&ctx.input_path) {
198 Ok(recommendations) => {
199 if !recommendations.is_empty() {
200 info!("Found {} PyTorch operations to convert:", recommendations.len());
201 for rec in &recommendations {
202 info!(" - {}", rec);
203 }
204
205 ctx.metadata.insert(
206 "pytorch_conversions".to_string(),
207 serde_json::json!(recommendations),
208 );
209
210 ctx.metadata.insert("pytorch_detected".to_string(), serde_json::json!(true));
211 } else {
212 info!("No PyTorch usage detected");
213 ctx.metadata.insert("pytorch_detected".to_string(), serde_json::json!(false));
214 }
215 }
216 Err(e) => {
217 warn!("PyTorch analysis failed: {}", e);
218 }
219 }
220
221 Ok(())
222 }
223}