1use anyhow::Result;
3use axum::{
4 extract::{Multipart, State},
5 http::StatusCode,
6 response::Json,
7};
8use serde::Serialize;
9use serde_json::{Value, json};
10use uuid::Uuid;
11
12use crate::{
15 AppState,
16 binary::{BinaryAnalysis, ScanResult, analyze_binary, scan_binary},
17};
18
19#[allow(dead_code)]
23pub fn validate_file_path(
24 file_path: &str,
25) -> Result<std::path::PathBuf, (StatusCode, Json<ErrorResponse>)> {
26 if file_path.contains("..") {
28 return Err((
29 StatusCode::BAD_REQUEST,
30 Json(ErrorResponse {
31 error: "invalid_input".to_string(),
32 message: "Path traversal not allowed".to_string(),
33 }),
34 ));
35 }
36
37 if file_path.starts_with('/') || (cfg!(windows) && file_path.contains(':')) {
39 return Err((
40 StatusCode::BAD_REQUEST,
41 Json(ErrorResponse {
42 error: "invalid_input".to_string(),
43 message: "Absolute paths not allowed".to_string(),
44 }),
45 ));
46 }
47
48 let path = std::path::Path::new(file_path);
50
51 let base_dir = std::env::current_dir().map_err(|_e| {
53 (
54 StatusCode::INTERNAL_SERVER_ERROR,
55 Json(ErrorResponse {
56 error: "server_error".to_string(),
57 message: "Failed to get current directory".to_string(),
58 }),
59 )
60 })?;
61
62 let full_path = base_dir.join(path);
64 let canonical_path = full_path.canonicalize().map_err(|_e| {
65 (
66 StatusCode::BAD_REQUEST,
67 Json(ErrorResponse {
68 error: "invalid_input".to_string(),
69 message: "Invalid file path".to_string(),
70 }),
71 )
72 })?;
73
74 if !canonical_path.starts_with(&base_dir) {
76 return Err((
77 StatusCode::BAD_REQUEST,
78 Json(ErrorResponse {
79 error: "invalid_input".to_string(),
80 message: "Access denied: Path outside allowed directory".to_string(),
81 }),
82 ));
83 }
84
85 if !canonical_path.exists() {
87 return Err((
88 StatusCode::BAD_REQUEST,
89 Json(ErrorResponse {
90 error: "file_not_found".to_string(),
91 message: "File not found".to_string(),
92 }),
93 ));
94 }
95
96 let metadata = std::fs::metadata(&canonical_path).map_err(|_e| {
98 (
99 StatusCode::BAD_REQUEST,
100 Json(ErrorResponse {
101 error: "file_error".to_string(),
102 message: "Cannot access file".to_string(),
103 }),
104 )
105 })?;
106
107 if !metadata.is_file() {
108 return Err((
109 StatusCode::BAD_REQUEST,
110 Json(ErrorResponse {
111 error: "invalid_input".to_string(),
112 message: "Path is not a regular file".to_string(),
113 }),
114 ));
115 }
116
117 Ok(canonical_path)
118}
119
120#[derive(Debug, Serialize)]
121pub struct BinaryUploadResponse {
122 pub id: Uuid,
123 pub hash: String,
124 pub analysis: BinaryAnalysis,
125}
126
127#[derive(Debug, Serialize)]
128pub struct ErrorResponse {
129 pub error: String,
130 pub message: String,
131}
132
133#[derive(Debug, Serialize)]
134pub struct CveScanResponse {
135 pub scan_result: ScanResult,
136}
137
138pub async fn health_check() -> Json<serde_json::Value> {
139 Json(json!({
140 "status": "healthy",
141 "service": "Nabla",
142 "version": env!("CARGO_PKG_VERSION"),
143 }))
144}
145
146pub async fn upload_and_analyze_binary(
148 State(_state): State<AppState>,
149 mut multipart: Multipart,
150) -> Result<Json<BinaryUploadResponse>, (StatusCode, Json<ErrorResponse>)> {
151 let mut file_name = "unknown".to_string();
152 let mut contents = vec![];
153 let mut found_file = false;
154
155 while let Some(field) = multipart.next_field().await.map_err(|e| {
157 (
158 StatusCode::BAD_REQUEST,
159 Json(ErrorResponse {
160 error: "multipart_error".to_string(),
161 message: format!("Failed to parse multipart form: {}", e),
162 }),
163 )
164 })? {
165 let field_name = field.name().unwrap_or("unknown_field").to_string();
166 tracing::debug!("Processing multipart field: '{}'", field_name);
167
168 let field_filename = field.file_name().map(|s| s.to_string());
170 if let Some(name) = &field_filename {
171 file_name = name.clone();
172 tracing::info!("Found filename in multipart: '{}'", file_name);
173 }
174
175 let field_contents = field
177 .bytes()
178 .await
179 .map_err(|e| {
180 (
181 StatusCode::BAD_REQUEST,
182 Json(ErrorResponse {
183 error: "read_error".to_string(),
184 message: format!("Failed to read field '{}' contents: {}", field_name, e),
185 }),
186 )
187 })?
188 .to_vec();
189
190 tracing::debug!(
191 "Field '{}': {} bytes, filename: {:?}",
192 field_name,
193 field_contents.len(),
194 field_filename
195 );
196
197 if !field_contents.is_empty()
199 && (
200 field_name == "file"
201 || field_name == "binary"
202 || field_filename.is_some()
203 || field_contents.len() > 10
204 )
206 {
207 contents = field_contents;
208 found_file = true;
209 tracing::info!(
210 "Using {} bytes from field '{}' as file content",
211 contents.len(),
212 field_name
213 );
214 }
215 }
216
217 if !found_file {
218 tracing::warn!("No file field found in multipart form");
219 }
220
221 if contents.is_empty() {
222 return Err((
223 StatusCode::BAD_REQUEST,
224 Json(ErrorResponse {
225 error: "empty_file".to_string(),
226 message: "No file content provided".to_string(),
227 }),
228 ));
229 }
230
231 tracing::info!("Analyzing file: '{}' ({} bytes)", file_name, contents.len());
233
234 let analysis = analyze_binary(&file_name, &contents).await.map_err(|e| {
236 tracing::error!("Binary analysis failed: {}", e);
237 (
238 StatusCode::INTERNAL_SERVER_ERROR,
239 Json(ErrorResponse {
240 error: "analysis_error".to_string(),
241 message: format!("Failed to analyze binary: {}", e),
242 }),
243 )
244 })?;
245
246 tracing::info!(
247 "Analysis completed for {}: format={}, arch={}, {} strings",
248 file_name,
249 analysis.format,
250 analysis.architecture,
251 analysis.embedded_strings.len()
252 );
253
254 Ok(Json(BinaryUploadResponse {
255 id: analysis.id,
256 hash: analysis.hash_sha256.clone(),
257 analysis,
258 }))
259}
260
261use crate::binary::enterprise_scan_binary;
262
263pub async fn check_cve(
264 State(state): State<AppState>,
265 mut multipart: Multipart,
266) -> Result<Json<Value>, (StatusCode, Json<ErrorResponse>)> {
267 tracing::info!("check_cve handler called");
268
269 let mut contents = vec![];
270 let mut file_name = "uploaded.bin".to_string();
271
272 while let Some(field) = multipart.next_field().await.map_err(|e| {
273 tracing::error!("Error parsing multipart: {}", e);
274 (
275 StatusCode::BAD_REQUEST,
276 Json(ErrorResponse {
277 error: "multipart_error".to_string(),
278 message: format!("Failed to parse multipart form: {}", e),
279 }),
280 )
281 })? {
282 tracing::info!("Found field in multipart: {:?}", field.name());
283
284 if let Some(name) = field.file_name() {
285 file_name = name.to_string();
286 tracing::info!("Uploaded file: {}", file_name);
287 }
288
289 contents = field
290 .bytes()
291 .await
292 .map_err(|e| {
293 tracing::error!("Error reading file: {}", e);
294 (
295 StatusCode::BAD_REQUEST,
296 Json(ErrorResponse {
297 error: "read_error".to_string(),
298 message: format!("Failed to read file contents: {}", e),
299 }),
300 )
301 })?
302 .to_vec();
303 }
304
305 if contents.is_empty() {
306 tracing::warn!("No file content provided");
307 return Err((
308 StatusCode::BAD_REQUEST,
309 Json(ErrorResponse {
310 error: "empty_file".to_string(),
311 message: "No file content provided".to_string(),
312 }),
313 ));
314 }
315
316 let analysis = analyze_binary(&file_name, &contents).await.map_err(|e| {
317 tracing::error!("Binary analysis failed: {}", e);
318 (
319 StatusCode::INTERNAL_SERVER_ERROR,
320 Json(ErrorResponse {
321 error: "analysis_error".to_string(),
322 message: format!("Failed to analyze binary: {}", e),
323 }),
324 )
325 })?;
326
327 tracing::info!("Binary analysis complete: {:?}", analysis);
328
329 let response_json = if state.config.enterprise_features {
330 let scan_result = enterprise_scan_binary(&analysis);
331 tracing::info!(
332 "Enterprise vuln scan complete. {} vulnerability findings, {} security findings",
333 scan_result.vulnerability_findings.len(),
334 scan_result.security_findings.len()
335 );
336 serde_json::to_value(scan_result).unwrap_or_default()
337 } else {
338 let scan_result = scan_binary(&analysis);
339 tracing::info!(
340 "OSS vuln scan complete. {} vulnerability findings, {} security findings",
341 scan_result.vulnerability_findings.len(),
342 scan_result.security_findings.len()
343 );
344 serde_json::to_value(scan_result).unwrap_or_default()
345 };
346
347 Ok(Json(response_json))
348}
349
350pub async fn diff_binaries(
352 State(_state): State<AppState>,
353 mut multipart: Multipart,
354) -> Result<Json<Value>, (StatusCode, Json<ErrorResponse>)> {
355 let mut files: Vec<(String, Vec<u8>)> = Vec::new();
357 while let Some(field) = multipart.next_field().await.map_err(|e| {
358 (
359 StatusCode::BAD_REQUEST,
360 Json(ErrorResponse {
361 error: "multipart_error".to_string(),
362 message: format!("Failed parsing multipart: {}", e),
363 }),
364 )
365 })? {
366 let name = field
367 .file_name()
368 .map(|s| s.to_string())
369 .unwrap_or_else(|| "file".to_string());
370 let bytes = field
371 .bytes()
372 .await
373 .map_err(|e| {
374 (
375 StatusCode::BAD_REQUEST,
376 Json(ErrorResponse {
377 error: "read_error".to_string(),
378 message: format!("Failed to read file: {}", e),
379 }),
380 )
381 })?
382 .to_vec();
383 files.push((name, bytes));
384 }
385
386 if files.len() != 2 {
387 return Err((
388 StatusCode::BAD_REQUEST,
389 Json(ErrorResponse {
390 error: "invalid_input".to_string(),
391 message: "Exactly two files must be provided".to_string(),
392 }),
393 ));
394 }
395
396 let analysis1 = analyze_binary(&files[0].0, &files[0].1)
398 .await
399 .map_err(|e| {
400 (
401 StatusCode::INTERNAL_SERVER_ERROR,
402 Json(ErrorResponse {
403 error: "analysis_error".to_string(),
404 message: format!("Failed to analyze first binary: {}", e),
405 }),
406 )
407 })?;
408
409 let analysis2 = analyze_binary(&files[1].0, &files[1].1)
410 .await
411 .map_err(|e| {
412 (
413 StatusCode::INTERNAL_SERVER_ERROR,
414 Json(ErrorResponse {
415 error: "analysis_error".to_string(),
416 message: format!("Failed to analyze second binary: {}", e),
417 }),
418 )
419 })?;
420
421 use sha2::Digest;
422 use std::collections::HashSet;
423
424 let mut meta = serde_json::Map::new();
425 for (idx, (name, data)) in files.iter().enumerate() {
426 meta.insert(format!("file{}_name", idx + 1), serde_json::json!(name));
427 meta.insert(
428 format!("file{}_size", idx + 1),
429 serde_json::json!(data.len()),
430 );
431 meta.insert(
432 format!("file{}_sha256", idx + 1),
433 serde_json::json!(format!("{:x}", sha2::Sha256::digest(data))),
434 );
435 }
436 meta.insert(
437 "size_diff_bytes".to_string(),
438 serde_json::json!((files[0].1.len() as i64) - (files[1].1.len() as i64)),
439 );
440
441 let imports1: HashSet<String> = analysis1.imports.iter().cloned().collect();
443 let imports2: HashSet<String> = analysis2.imports.iter().cloned().collect();
444 let exports1: HashSet<String> = analysis1.exports.iter().cloned().collect();
445 let exports2: HashSet<String> = analysis2.exports.iter().cloned().collect();
446 let symbols1: HashSet<String> = analysis1.detected_symbols.iter().cloned().collect();
447 let symbols2: HashSet<String> = analysis2.detected_symbols.iter().cloned().collect();
448
449 let imports_added: Vec<String> = imports2.difference(&imports1).cloned().collect();
450 let imports_removed: Vec<String> = imports1.difference(&imports2).cloned().collect();
451 let exports_added: Vec<String> = exports2.difference(&exports1).cloned().collect();
452 let exports_removed: Vec<String> = exports1.difference(&exports2).cloned().collect();
453 let symbols_added: Vec<String> = symbols2.difference(&symbols1).cloned().collect();
454 let symbols_removed: Vec<String> = symbols1.difference(&symbols2).cloned().collect();
455
456 meta.insert(
457 "imports_added".to_string(),
458 serde_json::json!(imports_added),
459 );
460 meta.insert(
461 "imports_removed".to_string(),
462 serde_json::json!(imports_removed),
463 );
464 meta.insert(
465 "exports_added".to_string(),
466 serde_json::json!(exports_added),
467 );
468 meta.insert(
469 "exports_removed".to_string(),
470 serde_json::json!(exports_removed),
471 );
472 meta.insert(
473 "symbols_added".to_string(),
474 serde_json::json!(symbols_added),
475 );
476 meta.insert(
477 "symbols_removed".to_string(),
478 serde_json::json!(symbols_removed),
479 );
480
481 Ok(Json(meta.into()))
482}