use super::recipes::*;
use super::*;
#[test]
fn test_recipe_compress_images_event_sequence() {
let def = parse_def(compress_images_json());
let registry = recipe_registry();
let recorder = RecordingReporter::new();
let reporter = recorder.reporter();
let files = vec![make_file("a.jpg", b"aaa"), make_file("b.jpg", b"bbb")];
execute_pipeline(&def, files, ®istry, &reporter, &NoopContext, fake_now).unwrap();
let events = recorder.events();
assert!(matches!(events[0], PipelineEvent::PipelineStarted { .. }));
assert!(matches!(
events.last().unwrap(),
PipelineEvent::PipelineCompleted { .. }
));
let group_started = events.iter().any(
|e| matches!(e, PipelineEvent::NodeStarted { node_id, .. } if node_id == "batch-compress"),
);
assert!(
group_started,
"Should emit NodeStarted for sub-recipe group node"
);
let loop_started = events.iter().any(
|e| matches!(e, PipelineEvent::NodeStarted { node_id, .. } if node_id == "compress-loop"),
);
assert!(
loop_started,
"Should emit NodeStarted for loop node inside sub-recipe"
);
let child_started_count = events
.iter()
.filter(|e| {
matches!(e, PipelineEvent::NodeStarted { node_id, .. } if node_id == "compress-image")
})
.count();
assert_eq!(
child_started_count, 2,
"Child processor should start once per file"
);
let child_completed_count = events
.iter()
.filter(|e| {
matches!(e, PipelineEvent::NodeCompleted { node_id, .. } if node_id == "compress-image")
})
.count();
assert_eq!(
child_completed_count, 2,
"Child processor should complete once per file"
);
}
#[test]
fn test_recipe_clean_csv_event_sequence() {
let def = parse_def(clean_csv_json());
let registry = recipe_registry();
let recorder = RecordingReporter::new();
let reporter = recorder.reporter();
let files = vec![make_file("data.csv", b"csv-content")];
execute_pipeline(&def, files, ®istry, &reporter, &NoopContext, fake_now).unwrap();
let events = recorder.events();
if let PipelineEvent::PipelineStarted {
total_nodes,
total_files,
} = &events[0]
{
assert_eq!(
*total_nodes, 1,
"1 top-level processing node (csv-cleaner group), I/O excluded"
);
assert_eq!(*total_files, 1);
} else {
panic!("First event should be PipelineStarted");
}
let node_started_count = events
.iter()
.filter(|e| matches!(e, PipelineEvent::NodeStarted { .. }))
.count();
assert_eq!(
node_started_count, 2,
"Group + processor = 2 NodeStarted events"
);
}
#[test]
fn test_all_six_recipe_structures_deserialize() {
let recipes = [
compress_images_json(),
clean_csv_json(),
rename_files_json(),
r#"{
"nodes": [
{ "id": "in", "type": "input", "parameters": {} },
{ "id": "batch-resize", "type": "group", "parameters": {}, "nodes": [
{ "id": "loop", "type": "loop", "parameters": { "mode": "forEach" }, "nodes": [
{ "id": "proc", "type": "image-resize", "parameters": { "width": 200 } }
]}
]},
{ "id": "out", "type": "output", "parameters": {} }
]
}"#,
r#"{
"nodes": [
{ "id": "in", "type": "input", "parameters": {} },
{ "id": "batch-convert", "type": "group", "parameters": {}, "nodes": [
{ "id": "loop", "type": "loop", "parameters": { "mode": "forEach" }, "nodes": [
{ "id": "proc", "type": "image-convert", "parameters": { "format": "webp" } }
]}
]},
{ "id": "out", "type": "output", "parameters": {} }
]
}"#,
r#"{
"nodes": [
{ "id": "in", "type": "input", "parameters": {} },
{ "id": "col-renamer", "type": "group", "parameters": {}, "nodes": [
{ "id": "proc", "type": "spreadsheet-rename", "parameters": { "columns": {} } }
]},
{ "id": "out", "type": "output", "parameters": {} }
]
}"#,
];
for (i, json) in recipes.iter().enumerate() {
let result: Result<PipelineDefinition, _> = serde_json::from_str(json);
assert!(
result.is_ok(),
"Recipe {} failed to deserialize: {:?}",
i,
result.err()
);
}
}
#[test]
fn test_all_six_recipes_execute_with_mocks() {
let recipes = [
compress_images_json(),
clean_csv_json(),
rename_files_json(),
r#"{
"nodes": [
{ "id": "in", "type": "input", "parameters": {} },
{ "id": "batch-resize", "type": "group", "parameters": {}, "nodes": [
{ "id": "loop", "type": "loop", "parameters": {}, "nodes": [
{ "id": "proc", "type": "image-resize" }
]}
]},
{ "id": "out", "type": "output", "parameters": {} }
]
}"#,
r#"{
"nodes": [
{ "id": "in", "type": "input", "parameters": {} },
{ "id": "batch-convert", "type": "group", "parameters": {}, "nodes": [
{ "id": "loop", "type": "loop", "parameters": {}, "nodes": [
{ "id": "proc", "type": "image-convert" }
]}
]},
{ "id": "out", "type": "output", "parameters": {} }
]
}"#,
r#"{
"nodes": [
{ "id": "in", "type": "input", "parameters": {} },
{ "id": "col-renamer", "type": "group", "parameters": {}, "nodes": [
{ "id": "proc", "type": "spreadsheet-rename" }
]},
{ "id": "out", "type": "output", "parameters": {} }
]
}"#,
];
let registry = recipe_registry();
let files = vec![make_file("test-file.dat", b"test-data")];
for (i, json) in recipes.iter().enumerate() {
let def = parse_def(json);
let reporter = PipelineReporter::new_noop();
let result = execute_pipeline(
&def,
files.clone(),
®istry,
&reporter,
&NoopContext,
fake_now,
);
assert!(
result.is_ok(),
"Recipe {} failed to execute: {:?}",
i,
result.err()
);
assert!(
!result.unwrap().files.is_empty(),
"Recipe {} produced no output files",
i
);
}
}