use crate::context::ProcessContext;
use crate::errors::BntoError;
use crate::metadata::{NodeCategory, NodeMetadata};
use crate::progress::ProgressReporter;
pub struct NodeInput {
pub data: Vec<u8>,
pub filename: String,
pub mime_type: Option<String>,
pub params: serde_json::Map<String, serde_json::Value>,
}
pub struct NodeOutput {
pub files: Vec<OutputFile>,
pub metadata: serde_json::Map<String, serde_json::Value>,
}
pub struct OutputFile {
pub data: Vec<u8>,
pub filename: String,
pub mime_type: String,
}
pub struct BatchInput {
pub files: Vec<BatchFile>,
pub params: serde_json::Map<String, serde_json::Value>,
}
pub struct BatchFile {
pub data: Vec<u8>,
pub filename: String,
pub mime_type: Option<String>,
}
pub trait NodeProcessor {
fn name(&self) -> &str;
fn process(
&self,
input: NodeInput,
progress: &ProgressReporter,
ctx: &dyn ProcessContext,
) -> Result<NodeOutput, BntoError>;
fn validate(&self, _params: &serde_json::Map<String, serde_json::Value>) -> Vec<String> {
Vec::new()
}
fn process_batch(
&self,
input: BatchInput,
progress: &ProgressReporter,
ctx: &dyn ProcessContext,
) -> Result<NodeOutput, BntoError> {
let total = input.files.len();
let mut all_files = Vec::new();
let mut combined_metadata = serde_json::Map::new();
for (i, file) in input.files.into_iter().enumerate() {
let pct = ((i as u32) * 100) / (total as u32).max(1);
progress.report(pct, &format!("Processing file {} of {total}...", i + 1));
let single_input = NodeInput {
data: file.data,
filename: file.filename,
mime_type: file.mime_type,
params: input.params.clone(),
};
let output = self.process(single_input, progress, ctx)?;
all_files.extend(output.files);
combined_metadata = output.metadata;
}
Ok(NodeOutput {
files: all_files,
metadata: combined_metadata,
})
}
fn metadata(&self) -> NodeMetadata {
NodeMetadata {
node_type: "unknown".to_string(),
name: self.name().to_string(),
description: String::new(),
category: NodeCategory::Data,
accepts: vec![],
platforms: vec![],
parameters: vec![],
input_cardinality: Default::default(),
requires: vec![],
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::context::NoopContext;
struct EchoProcessor;
impl NodeProcessor for EchoProcessor {
fn name(&self) -> &str {
"echo"
}
fn process(
&self,
input: NodeInput,
_progress: &ProgressReporter,
_ctx: &dyn ProcessContext,
) -> Result<NodeOutput, BntoError> {
Ok(NodeOutput {
files: vec![OutputFile {
data: input.data,
filename: input.filename,
mime_type: input
.mime_type
.unwrap_or_else(|| "application/octet-stream".to_string()),
}],
metadata: serde_json::Map::new(),
})
}
}
struct FailProcessor;
impl NodeProcessor for FailProcessor {
fn name(&self) -> &str {
"fail"
}
fn process(
&self,
_input: NodeInput,
_progress: &ProgressReporter,
_ctx: &dyn ProcessContext,
) -> Result<NodeOutput, BntoError> {
Err(BntoError::ProcessingFailed(
"intentional test failure".to_string(),
))
}
}
fn make_test_input(data: &[u8], filename: &str) -> NodeInput {
NodeInput {
data: data.to_vec(),
filename: filename.to_string(),
mime_type: None,
params: serde_json::Map::new(),
}
}
#[test]
fn test_echo_processor_name() {
let processor = EchoProcessor;
assert_eq!(processor.name(), "echo");
}
#[test]
fn test_echo_processor_echoes_data() {
let processor = EchoProcessor;
let progress = ProgressReporter::new_noop();
let input = make_test_input(b"hello world", "test.txt");
let output = processor.process(input, &progress, &NoopContext).unwrap();
assert_eq!(output.files.len(), 1);
assert_eq!(output.files[0].data, b"hello world");
assert_eq!(output.files[0].filename, "test.txt");
}
#[test]
fn test_fail_processor_returns_error() {
let processor = FailProcessor;
let progress = ProgressReporter::new_noop();
let input = make_test_input(b"data", "test.txt");
let result = processor.process(input, &progress, &NoopContext);
assert!(result.is_err());
if let Err(e) = result {
assert!(e.to_string().contains("intentional test failure"));
}
}
#[test]
fn test_default_validate_returns_empty() {
let processor = EchoProcessor;
let params = serde_json::Map::new();
let errors = processor.validate(¶ms);
assert!(errors.is_empty());
}
#[test]
fn test_default_process_batch_falls_back_to_per_file() {
let processor = EchoProcessor;
let progress = ProgressReporter::new_noop();
let input = BatchInput {
files: vec![
BatchFile {
data: b"file1".to_vec(),
filename: "a.txt".to_string(),
mime_type: None,
},
BatchFile {
data: b"file2".to_vec(),
filename: "b.txt".to_string(),
mime_type: None,
},
],
params: serde_json::Map::new(),
};
let output = processor
.process_batch(input, &progress, &NoopContext)
.unwrap();
assert_eq!(output.files.len(), 2);
assert_eq!(output.files[0].filename, "a.txt");
assert_eq!(output.files[0].data, b"file1");
assert_eq!(output.files[1].filename, "b.txt");
assert_eq!(output.files[1].data, b"file2");
}
#[test]
fn test_default_process_batch_empty_input() {
let processor = EchoProcessor;
let progress = ProgressReporter::new_noop();
let input = BatchInput {
files: vec![],
params: serde_json::Map::new(),
};
let output = processor
.process_batch(input, &progress, &NoopContext)
.unwrap();
assert_eq!(output.files.len(), 0);
}
#[test]
fn test_default_process_batch_propagates_errors() {
let processor = FailProcessor;
let progress = ProgressReporter::new_noop();
let input = BatchInput {
files: vec![BatchFile {
data: b"data".to_vec(),
filename: "test.txt".to_string(),
mime_type: None,
}],
params: serde_json::Map::new(),
};
let result = processor.process_batch(input, &progress, &NoopContext);
assert!(result.is_err());
}
}