1pub mod london_tdd_tests;
8
9use crate::error::{CleanroomError, Result};
11use std::collections::HashMap;
12use std::sync::OnceLock;
13
14#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
16pub struct FrameworkTestResults {
17 pub total_tests: u32,
19 pub passed_tests: u32,
21 pub failed_tests: u32,
23 pub total_duration_ms: u64,
25 pub test_results: Vec<TestResult>,
27}
28
29#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
31pub struct TestResult {
32 pub name: String,
34 pub passed: bool,
36 pub duration_ms: u64,
38 pub error: Option<String>,
40}
41
42#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
44pub struct SuiteResult {
45 pub name: String,
47 pub test_count: u32,
49 pub passed: bool,
51 pub duration_ms: u64,
53 pub tests: Vec<TestResult>,
55}
56
57static TEST_CONFIG_CACHE: OnceLock<HashMap<String, crate::config::TestConfig>> = OnceLock::new();
60
61pub fn get_cached_test_config(name: &str) -> Option<&'static crate::config::TestConfig> {
64 let cache = TEST_CONFIG_CACHE.get_or_init(|| {
65 let mut configs = HashMap::new();
66
67 if let Ok(config) = crate::config::loader::load_config_from_file(std::path::Path::new(
69 "tests/basic.clnrm.toml",
70 )) {
71 configs.insert("basic".to_string(), config);
72 }
73
74 if let Ok(config) = crate::config::loader::load_config_from_file(std::path::Path::new(
75 "tests/integration/end_to_end.toml",
76 )) {
77 configs.insert("end_to_end".to_string(), config);
78 }
79
80 configs
82 });
83
84 cache.get(name)
85}
86
87pub async fn run_framework_tests() -> Result<FrameworkTestResults> {
89 run_framework_tests_by_suite(None).await
90}
91
92pub async fn run_framework_tests_by_suite(
94 suite_filter: Option<&str>,
95) -> Result<FrameworkTestResults> {
96 let start_time = std::time::Instant::now();
97 let mut all_results = FrameworkTestResults {
98 total_tests: 0,
99 passed_tests: 0,
100 failed_tests: 0,
101 total_duration_ms: 0,
102 test_results: Vec::new(),
103 };
104
105 let suites = vec![
107 (
108 "framework",
109 run_framework_suite
110 as fn() -> std::pin::Pin<
111 Box<dyn std::future::Future<Output = Result<SuiteResult>> + Send>,
112 >,
113 ),
114 (
115 "container",
116 run_container_suite
117 as fn() -> std::pin::Pin<
118 Box<dyn std::future::Future<Output = Result<SuiteResult>> + Send>,
119 >,
120 ),
121 (
122 "plugin",
123 run_plugin_suite
124 as fn() -> std::pin::Pin<
125 Box<dyn std::future::Future<Output = Result<SuiteResult>> + Send>,
126 >,
127 ),
128 (
129 "cli",
130 run_cli_suite
131 as fn() -> std::pin::Pin<
132 Box<dyn std::future::Future<Output = Result<SuiteResult>> + Send>,
133 >,
134 ),
135 (
136 "otel",
137 run_otel_suite
138 as fn() -> std::pin::Pin<
139 Box<dyn std::future::Future<Output = Result<SuiteResult>> + Send>,
140 >,
141 ),
142 ];
143
144 for (suite_name, suite_fn) in suites {
145 if let Some(filter) = suite_filter {
147 if suite_name != filter {
148 continue;
149 }
150 }
151
152 match suite_fn().await {
153 Ok(suite_result) => {
154 all_results.total_tests += suite_result.test_count;
155 if suite_result.passed {
156 all_results.passed_tests += suite_result.test_count;
157 } else {
158 all_results.failed_tests +=
159 suite_result.tests.iter().filter(|t| !t.passed).count() as u32;
160 all_results.passed_tests +=
161 suite_result.tests.iter().filter(|t| t.passed).count() as u32;
162 }
163 all_results.test_results.extend(suite_result.tests);
164 }
165 Err(e) => {
166 all_results.total_tests += 1;
168 all_results.failed_tests += 1;
169 all_results.test_results.push(TestResult {
170 name: format!("{} (suite error)", suite_name),
171 passed: false,
172 duration_ms: 0,
173 error: Some(e.to_string()),
174 });
175 }
176 }
177 }
178
179 all_results.total_duration_ms = start_time.elapsed().as_millis() as u64;
180 Ok(all_results)
181}
182
183fn run_framework_suite(
189) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<SuiteResult>> + Send>> {
190 Box::pin(async {
191 let start = std::time::Instant::now();
192 let mut tests = Vec::new();
193
194 tests.push(run_test("TOML Config Parsing", test_toml_parsing).await);
196
197 tests.push(run_test("Config Validation", test_config_validation).await);
199
200 tests.push(run_test("Template Rendering", test_template_rendering).await);
202
203 tests.push(run_test("Service Config", test_service_configuration).await);
205
206 tests.push(run_test("Error Handling", test_error_handling).await);
208
209 let passed = tests.iter().all(|t| t.passed);
210 Ok(SuiteResult {
211 name: "framework".to_string(),
212 test_count: tests.len() as u32,
213 passed,
214 duration_ms: start.elapsed().as_millis() as u64,
215 tests,
216 })
217 })
218}
219
220fn run_container_suite(
222) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<SuiteResult>> + Send>> {
223 Box::pin(async {
224 let start = std::time::Instant::now();
225 let mut tests = Vec::new();
226
227 tests.push(run_test("Container Creation", test_container_creation).await);
229
230 tests.push(run_test("Command Execution", test_container_execution).await);
232
233 tests.push(run_test("Container Cleanup", test_container_cleanup).await);
235
236 let passed = tests.iter().all(|t| t.passed);
237 Ok(SuiteResult {
238 name: "container".to_string(),
239 test_count: tests.len() as u32,
240 passed,
241 duration_ms: start.elapsed().as_millis() as u64,
242 tests,
243 })
244 })
245}
246
247fn run_plugin_suite(
249) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<SuiteResult>> + Send>> {
250 Box::pin(async {
251 let start = std::time::Instant::now();
252 let mut tests = Vec::new();
253
254 tests.push(run_test("Plugin Registration", test_plugin_registration).await);
256
257 tests.push(run_test("Plugin Lifecycle", test_plugin_system).await);
259
260 tests.push(run_test("Plugin Coordination", test_plugin_coordination).await);
262
263 tests.push(run_test("GenericContainer Plugin", test_generic_container_plugin).await);
265
266 tests.push(run_test("SurrealDB Plugin", test_surrealdb_plugin).await);
268
269 tests.push(run_test("Plugin Health Checks", test_plugin_health_checks).await);
271
272 tests.push(run_test("Plugin Error Handling", test_plugin_error_handling).await);
274
275 tests.push(run_test("Multi-Plugin Coordination", test_multi_plugin_coordination).await);
277
278 let passed = tests.iter().all(|t| t.passed);
279 Ok(SuiteResult {
280 name: "plugin".to_string(),
281 test_count: tests.len() as u32,
282 passed,
283 duration_ms: start.elapsed().as_millis() as u64,
284 tests,
285 })
286 })
287}
288
289fn run_cli_suite(
291) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<SuiteResult>> + Send>> {
292 Box::pin(async {
293 let start = std::time::Instant::now();
294 let mut tests = Vec::new();
295
296 tests.push(run_test("CLI Argument Parsing", test_cli_parsing).await);
298
299 tests.push(run_test("Config Validation Command", test_cli_validation).await);
301
302 tests.push(run_test("Report Generation", test_cli_report_generation).await);
304
305 tests.push(run_test("Format Command", test_cli_format).await);
307
308 tests.push(run_test("Init Command", test_cli_init).await);
310
311 tests.push(run_test("Run Command", test_cli_run).await);
313
314 tests.push(run_test("Dry-Run Command", test_cli_dry_run).await);
316
317 tests.push(run_test("Error Message Quality", test_cli_error_messages).await);
319
320 tests.push(run_test("Help Text", test_cli_help).await);
322
323 tests.push(run_test("Version Command", test_cli_version).await);
325
326 tests.push(run_test("Multiple Config Files", test_cli_multiple_configs).await);
328
329 tests.push(run_test("Output Formats", test_cli_output_formats).await);
331
332 let passed = tests.iter().all(|t| t.passed);
333 Ok(SuiteResult {
334 name: "cli".to_string(),
335 test_count: tests.len() as u32,
336 passed,
337 duration_ms: start.elapsed().as_millis() as u64,
338 tests,
339 })
340 })
341}
342
343fn run_otel_suite(
345) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<SuiteResult>> + Send>> {
346 Box::pin(async {
347 let start = std::time::Instant::now();
348 let mut tests = Vec::new();
349
350 tests.push(run_test("OTEL Initialization", test_otel_init).await);
352
353 tests.push(run_test("Span Creation", test_otel_span_creation).await);
355
356 tests.push(run_test("Trace Context", test_otel_trace_context).await);
358
359 tests.push(run_test("OTEL Exporters", test_otel_exporters).await);
361
362 let passed = tests.iter().all(|t| t.passed);
363 Ok(SuiteResult {
364 name: "otel".to_string(),
365 test_count: tests.len() as u32,
366 passed,
367 duration_ms: start.elapsed().as_millis() as u64,
368 tests,
369 })
370 })
371}
372
373async fn run_test<F, Fut>(name: &str, test_fn: F) -> TestResult
379where
380 F: FnOnce() -> Fut,
381 Fut: std::future::Future<Output = Result<()>>,
382{
383 let start = std::time::Instant::now();
384 match test_fn().await {
385 Ok(_) => TestResult {
386 name: name.to_string(),
387 passed: true,
388 duration_ms: start.elapsed().as_millis() as u64,
389 error: None,
390 },
391 Err(e) => TestResult {
392 name: name.to_string(),
393 passed: false,
394 duration_ms: start.elapsed().as_millis() as u64,
395 error: Some(e.to_string()),
396 },
397 }
398}
399
400async fn test_toml_parsing() -> Result<()> {
405 use crate::config::parse_toml_config;
406
407 let toml = r#"
408[meta]
409name = "test"
410version = "1.0.0"
411
412[[scenario]]
413name = "test_scenario"
414
415[[scenario.steps]]
416name = "test_step"
417command = ["echo", "hello"]
418"#;
419
420 let config = parse_toml_config(toml).map_err(|e| {
421 CleanroomError::internal_error("TOML parsing failed")
422 .with_context("Failed to parse valid TOML configuration")
423 .with_source(e.to_string())
424 })?;
425
426 if let Some(meta) = &config.meta {
427 if meta.name != "test" {
428 return Err(CleanroomError::validation_error("Config name mismatch"));
429 }
430 } else {
431 return Err(CleanroomError::validation_error("Meta section not parsed"));
432 }
433
434 Ok(())
435}
436
437async fn test_config_validation() -> Result<()> {
438 use crate::validation::shape::ShapeValidator;
439 use std::fs;
440 use tempfile::TempDir;
441
442 let temp_dir = TempDir::new().map_err(|e| {
443 CleanroomError::internal_error("Failed to create temp dir").with_source(e.to_string())
444 })?;
445
446 let config_path = temp_dir.path().join("test.toml");
447 let config = r#"
448[meta]
449name = "validation_test"
450version = "1.0.0"
451
452[[scenario]]
453name = "s1"
454
455[[scenario.steps]]
456name = "step1"
457command = ["echo"]
458"#;
459
460 fs::write(&config_path, config).map_err(|e| {
461 CleanroomError::internal_error("Failed to write config file").with_source(e.to_string())
462 })?;
463
464 let mut validator = ShapeValidator::new();
465 let result = validator.validate_file(&config_path)?;
466
467 if !result.passed {
468 return Err(CleanroomError::validation_error("Config validation failed")
469 .with_source(format!("{:?}", result.errors)));
470 }
471
472 Ok(())
473}
474
475async fn test_template_rendering() -> Result<()> {
476 use crate::{TemplateContext, TemplateRenderer};
477
478 let mut renderer = TemplateRenderer::new()?;
479 let mut context = TemplateContext::new();
480 context.vars.insert(
481 "name".to_string(),
482 serde_json::Value::String("test".to_string()),
483 );
484
485 let template = "Hello {{ name }}!";
486 let rendered = renderer.render_str(template, "test").map_err(|e| {
487 CleanroomError::internal_error("Template rendering failed").with_source(e.to_string())
488 })?;
489
490 if rendered != "Hello test!" {
491 return Err(CleanroomError::validation_error("Template output mismatch"));
492 }
493
494 Ok(())
495}
496
497async fn test_service_configuration() -> Result<()> {
498 use crate::config::parse_toml_config;
499
500 let toml = r#"
501[meta]
502name = "service_test"
503version = "1.0.0"
504
505[services.db]
506type = "generic_container"
507image = "postgres:14"
508
509[[scenario]]
510name = "test"
511
512[[scenario.steps]]
513name = "step1"
514command = ["echo"]
515service = "db"
516"#;
517
518 let config = parse_toml_config(toml)?;
519
520 let services = config
521 .services
522 .ok_or_else(|| CleanroomError::validation_error("Services not parsed"))?;
523
524 if !services.contains_key("db") {
525 return Err(CleanroomError::validation_error("Service 'db' not found"));
526 }
527
528 Ok(())
529}
530
531async fn test_error_handling() -> Result<()> {
532 use crate::error::CleanroomError;
533
534 let error = CleanroomError::validation_error("Test error")
536 .with_context("Test context")
537 .with_source("Test source");
538
539 if !error.message.contains("Test error") {
540 return Err(CleanroomError::validation_error(
541 "Error message not preserved",
542 ));
543 }
544
545 if !error.context.iter().any(|c| c.contains("Test context")) {
546 return Err(CleanroomError::validation_error(
547 "Error context not preserved",
548 ));
549 }
550
551 Ok(())
552}
553
554async fn test_container_creation() -> Result<()> {
555 let environment = crate::cleanroom::CleanroomEnvironment::new().await?;
556 let plugin = crate::services::generic::GenericContainerPlugin::new("test", "alpine:latest");
557 environment.register_service(Box::new(plugin)).await?;
558 Ok(())
559}
560
561async fn test_container_execution() -> Result<()> {
562 let environment = crate::cleanroom::CleanroomEnvironment::new()
564 .await
565 .map_err(|e| {
566 CleanroomError::internal_error("Failed to create CleanroomEnvironment")
567 .with_context("Container execution test setup failed")
568 .with_source(e.to_string())
569 })?;
570
571 let plugin =
573 crate::services::generic::GenericContainerPlugin::new("test_container", "alpine:latest");
574 environment
575 .register_service(Box::new(plugin))
576 .await
577 .map_err(|e| {
578 CleanroomError::internal_error("Failed to register test container plugin")
579 .with_context("Plugin registration failed during container execution test")
580 .with_source(e.to_string())
581 })?;
582
583 let handle = environment
585 .start_service("test_container")
586 .await
587 .map_err(|e| {
588 CleanroomError::internal_error("Failed to start test container service")
589 .with_context("Service startup failed during container execution test")
590 .with_source(e.to_string())
591 })?;
592
593 let command = vec!["echo".to_string(), "test".to_string()];
595 let execution_result = environment
596 .execute_in_container("test_container", &command, None, None)
597 .await
598 .map_err(|e| {
599 CleanroomError::internal_error("Failed to execute command in test container")
600 .with_context("Command execution failed during container execution test")
601 .with_source(e.to_string())
602 })?;
603
604 if !execution_result.succeeded() {
606 return Err(CleanroomError::validation_error("Test command failed")
607 .with_context(format!(
608 "Command '{}' exited with code {}",
609 command.join(" "),
610 execution_result.exit_code
611 ))
612 .with_source(format!("stderr: {}", execution_result.stderr)));
613 }
614
615 if !execution_result.stdout.trim().contains("test") {
616 return Err(
617 CleanroomError::validation_error("Test command output validation failed")
618 .with_context(format!(
619 "Expected output to contain 'test', got: '{}'",
620 execution_result.stdout.trim()
621 ))
622 .with_source("Command output did not match expected pattern"),
623 );
624 }
625
626 environment.stop_service(&handle.id).await.map_err(|e| {
628 CleanroomError::internal_error("Failed to stop test container service")
629 .with_context("Service cleanup failed during container execution test")
630 .with_source(e.to_string())
631 })?;
632
633 Ok(())
634}
635
636async fn test_plugin_system() -> Result<()> {
637 let environment = crate::cleanroom::CleanroomEnvironment::new()
639 .await
640 .map_err(|e| {
641 CleanroomError::internal_error("Failed to create CleanroomEnvironment")
642 .with_context("Plugin system test setup failed")
643 .with_source(e.to_string())
644 })?;
645
646 let container_plugin =
648 crate::services::generic::GenericContainerPlugin::new("test_container", "alpine:latest");
649 environment
650 .register_service(Box::new(container_plugin))
651 .await
652 .map_err(|e| {
653 CleanroomError::internal_error("Failed to register container plugin")
654 .with_context("Container plugin registration failed during plugin system test")
655 .with_source(e.to_string())
656 })?;
657
658 let mock_plugin = crate::cleanroom::MockDatabasePlugin::new();
659 environment
660 .register_service(Box::new(mock_plugin))
661 .await
662 .map_err(|e| {
663 CleanroomError::internal_error("Failed to register mock plugin")
664 .with_context("Mock plugin registration failed during plugin system test")
665 .with_source(e.to_string())
666 })?;
667
668 let services = environment.services().await;
670 if !services.active_services().is_empty() {
671 return Err(
672 CleanroomError::validation_error("Services should be empty before starting")
673 .with_context("Plugin system test precondition failed")
674 .with_source("Services were already active before test started"),
675 );
676 }
677
678 let container_handle = environment
680 .start_service("test_container")
681 .await
682 .map_err(|e| {
683 CleanroomError::internal_error("Failed to start container service")
684 .with_context("Container service startup failed during plugin system test")
685 .with_source(e.to_string())
686 })?;
687
688 let mock_handle = environment
689 .start_service("mock_database")
690 .await
691 .map_err(|e| {
692 CleanroomError::internal_error("Failed to start mock service")
693 .with_context("Mock service startup failed during plugin system test")
694 .with_source(e.to_string())
695 })?;
696
697 let health_status = environment.check_health().await;
699 if health_status.len() != 2 {
700 return Err(
701 CleanroomError::validation_error("Expected 2 active services")
702 .with_context("Plugin system test health check failed")
703 .with_source(format!(
704 "Expected 2 services, found {}",
705 health_status.len()
706 )),
707 );
708 }
709
710 let command = vec!["echo".to_string(), "plugin_coordination_test".to_string()];
712 let execution_result = environment
713 .execute_in_container("test_container", &command, None, None)
714 .await
715 .map_err(|e| {
716 CleanroomError::internal_error("Failed to execute coordination test command")
717 .with_context("Plugin coordination test failed")
718 .with_source(e.to_string())
719 })?;
720
721 if !execution_result.succeeded() {
722 return Err(
723 CleanroomError::validation_error("Plugin coordination test command failed")
724 .with_context("Command execution failed during plugin coordination test")
725 .with_source(format!(
726 "Exit code: {}, stderr: {}",
727 execution_result.exit_code, execution_result.stderr
728 )),
729 );
730 }
731
732 environment
734 .stop_service(&container_handle.id)
735 .await
736 .map_err(|e| {
737 CleanroomError::internal_error("Failed to stop container service")
738 .with_context("Container service cleanup failed during plugin system test")
739 .with_source(e.to_string())
740 })?;
741
742 environment
743 .stop_service(&mock_handle.id)
744 .await
745 .map_err(|e| {
746 CleanroomError::internal_error("Failed to stop mock service")
747 .with_context("Mock service cleanup failed during plugin system test")
748 .with_source(e.to_string())
749 })?;
750
751 let final_health_status = environment.check_health().await;
753 if !final_health_status.is_empty() {
754 return Err(
755 CleanroomError::validation_error("Services should be stopped after cleanup")
756 .with_context("Plugin system test cleanup verification failed")
757 .with_source(format!(
758 "Expected 0 active services, found {}",
759 final_health_status.len()
760 )),
761 );
762 }
763
764 Ok(())
765}
766
767async fn test_container_cleanup() -> Result<()> {
768 let environment = crate::cleanroom::CleanroomEnvironment::new().await?;
769 let plugin =
770 crate::services::generic::GenericContainerPlugin::new("cleanup_test", "alpine:latest");
771 environment.register_service(Box::new(plugin)).await?;
772 let handle = environment.start_service("cleanup_test").await?;
773 environment.stop_service(&handle.id).await?;
774
775 let health = environment.check_health().await;
777 if !health.is_empty() {
778 return Err(CleanroomError::validation_error("Container not cleaned up"));
779 }
780 Ok(())
781}
782
783async fn test_plugin_registration() -> Result<()> {
784 let environment = crate::cleanroom::CleanroomEnvironment::new().await?;
785 let plugin = crate::services::generic::GenericContainerPlugin::new("reg_test", "alpine:latest");
786 environment.register_service(Box::new(plugin)).await?;
787 Ok(())
788}
789
790async fn test_plugin_coordination() -> Result<()> {
791 let environment = crate::cleanroom::CleanroomEnvironment::new().await?;
792 let plugin1 = crate::services::generic::GenericContainerPlugin::new("svc1", "alpine:latest");
793 let plugin2 = crate::services::generic::GenericContainerPlugin::new("svc2", "alpine:latest");
794 environment.register_service(Box::new(plugin1)).await?;
795 environment.register_service(Box::new(plugin2)).await?;
796 Ok(())
797}
798
799async fn test_generic_container_plugin() -> Result<()> {
800 use crate::cleanroom::ServicePlugin;
801 let plugin = crate::services::generic::GenericContainerPlugin::new("test", "alpine:latest");
802 if plugin.name() != "test" {
803 return Err(CleanroomError::validation_error("Plugin name mismatch"));
804 }
805 Ok(())
806}
807
808async fn test_surrealdb_plugin() -> Result<()> {
809 use crate::cleanroom::ServicePlugin;
810 let plugin = crate::services::surrealdb::SurrealDbPlugin::new();
811 if plugin.name() != "db" {
812 return Err(CleanroomError::validation_error(
813 "SurrealDB plugin name mismatch",
814 ));
815 }
816 Ok(())
817}
818
819async fn test_plugin_health_checks() -> Result<()> {
820 let environment = crate::cleanroom::CleanroomEnvironment::new().await?;
821 let health = environment.check_health().await;
822 if !health.is_empty() {
823 return Err(CleanroomError::validation_error(
824 "Unexpected active services",
825 ));
826 }
827 Ok(())
828}
829
830async fn test_plugin_error_handling() -> Result<()> {
831 let environment = crate::cleanroom::CleanroomEnvironment::new().await?;
832 let result = environment.start_service("nonexistent").await;
834 if result.is_ok() {
835 return Err(CleanroomError::validation_error(
836 "Should fail for nonexistent service",
837 ));
838 }
839 Ok(())
840}
841
842async fn test_multi_plugin_coordination() -> Result<()> {
843 let environment = crate::cleanroom::CleanroomEnvironment::new().await?;
844 let plugin1 = crate::services::generic::GenericContainerPlugin::new("multi1", "alpine:latest");
845 let plugin2 = crate::services::generic::GenericContainerPlugin::new("multi2", "alpine:latest");
846 environment.register_service(Box::new(plugin1)).await?;
847 environment.register_service(Box::new(plugin2)).await?;
848 let _h1 = environment.start_service("multi1").await?;
849 let _h2 = environment.start_service("multi2").await?;
850 Ok(())
851}
852
853async fn test_cli_parsing() -> Result<()> {
855 use crate::config::parse_toml_config;
856
857 let toml = r#"
859[meta]
860name = "cli_test"
861version = "1.0.0"
862
863[[scenario]]
864name = "test"
865
866[[scenario.steps]]
867name = "step1"
868command = ["echo", "test"]
869"#;
870
871 let config = parse_toml_config(toml).map_err(|e| {
872 CleanroomError::internal_error("CLI parsing failed")
873 .with_context("Failed to parse TOML configuration in CLI test")
874 .with_source(e.to_string())
875 })?;
876
877 if let Some(meta) = &config.meta {
878 if meta.name != "cli_test" {
879 return Err(CleanroomError::validation_error(
880 "CLI parsing: name mismatch",
881 ));
882 }
883 } else {
884 return Err(CleanroomError::validation_error(
885 "CLI parsing: meta not found",
886 ));
887 }
888
889 Ok(())
890}
891
892async fn test_cli_validation() -> Result<()> {
893 use crate::validation::shape::ShapeValidator;
894 use std::fs;
895 use tempfile::TempDir;
896
897 let temp_dir = TempDir::new().map_err(|e| {
899 CleanroomError::internal_error("Failed to create temp dir for CLI validation test")
900 .with_source(e.to_string())
901 })?;
902
903 let test_file = temp_dir.path().join("test.toml");
904 let valid_toml = r#"
905[meta]
906name = "validation_test"
907version = "1.0.0"
908
909[[scenario]]
910name = "test_scenario"
911
912[[scenario.steps]]
913name = "test_step"
914command = ["echo", "test"]
915"#;
916
917 fs::write(&test_file, valid_toml).map_err(|e| {
918 CleanroomError::internal_error("Failed to write test file for CLI validation")
919 .with_source(e.to_string())
920 })?;
921
922 let mut validator = ShapeValidator::new();
924 let result = validator.validate_file(&test_file)?;
925
926 if !result.passed {
927 return Err(CleanroomError::validation_error("CLI validation failed")
928 .with_source(format!("Errors: {:?}", result.errors)));
929 }
930
931 Ok(())
932}
933
934async fn test_cli_report_generation() -> Result<()> {
935 use crate::reporting::{generate_reports, ReportConfig};
936 use tempfile::TempDir;
937
938 let temp_dir = TempDir::new().map_err(|e| {
940 CleanroomError::internal_error("Failed to create temp dir for report test")
941 .with_source(e.to_string())
942 })?;
943
944 let test_results = FrameworkTestResults {
946 total_tests: 1,
947 passed_tests: 1,
948 failed_tests: 0,
949 total_duration_ms: 100,
950 test_results: vec![TestResult {
951 name: "test".to_string(),
952 passed: true,
953 duration_ms: 100,
954 error: None,
955 }],
956 };
957
958 use crate::validation::ValidationReport;
960
961 let report_dir = temp_dir.path().join("reports");
962 std::fs::create_dir_all(&report_dir).map_err(|e| {
963 CleanroomError::internal_error("Failed to create report directory")
964 .with_source(e.to_string())
965 })?;
966
967 let mut validation_report = ValidationReport::new();
969 if test_results.failed_tests == 0 {
970 for _ in 0..test_results.total_tests {
971 validation_report.add_pass("test_passed");
972 }
973 } else {
974 for _ in 0..test_results.failed_tests {
975 validation_report.add_fail("test_failed", "Test failed".to_string());
976 }
977 }
978
979 let config = ReportConfig {
980 json_path: Some(
981 report_dir
982 .join("results.json")
983 .to_string_lossy()
984 .to_string(),
985 ),
986 junit_path: None,
987 digest_path: None,
988 };
989
990 let spans_json = "[]";
992
993 generate_reports(&config, &validation_report, spans_json).map_err(|e| {
994 CleanroomError::internal_error("Report generation failed")
995 .with_context("Failed to generate test reports in CLI test")
996 .with_source(e.to_string())
997 })?;
998
999 if !report_dir.exists() {
1001 return Err(CleanroomError::validation_error(
1002 "Report directory not created",
1003 ));
1004 }
1005
1006 Ok(())
1007}
1008
1009async fn test_cli_format() -> Result<()> {
1010 use crate::formatting::format_toml_content;
1011
1012 let unformatted = "[meta]\nname=\"test\"\nversion=\"1.0.0\"";
1014 let formatted = format_toml_content(unformatted).map_err(|e| {
1015 CleanroomError::internal_error("TOML formatting failed")
1016 .with_context("Failed to format TOML content in CLI test")
1017 .with_source(e.to_string())
1018 })?;
1019
1020 if formatted.is_empty() {
1022 return Err(CleanroomError::validation_error("Formatted TOML is empty"));
1023 }
1024
1025 if !formatted.contains("[meta]") || !formatted.contains("name") {
1027 return Err(CleanroomError::validation_error(
1028 "Formatted TOML missing key elements",
1029 ));
1030 }
1031
1032 Ok(())
1033}
1034
1035async fn test_cli_init() -> Result<()> {
1036 Ok(())
1038}
1039
1040async fn test_cli_run() -> Result<()> {
1041 Ok(())
1043}
1044
1045async fn test_cli_dry_run() -> Result<()> {
1046 Ok(())
1048}
1049
1050async fn test_cli_error_messages() -> Result<()> {
1051 let error = CleanroomError::validation_error("Test");
1052 if error.message.is_empty() {
1053 return Err(CleanroomError::validation_error("Error message empty"));
1054 }
1055 Ok(())
1056}
1057
1058async fn test_cli_help() -> Result<()> {
1059 Ok(())
1061}
1062
1063async fn test_cli_version() -> Result<()> {
1064 Ok(())
1066}
1067
1068async fn test_cli_multiple_configs() -> Result<()> {
1069 Ok(())
1071}
1072
1073async fn test_cli_output_formats() -> Result<()> {
1074 Ok(())
1076}
1077
1078async fn test_otel_init() -> Result<()> {
1080 use crate::telemetry::{init_otel, Export, OtelConfig};
1081
1082 let config = OtelConfig {
1084 service_name: "test-service",
1085 deployment_env: "test",
1086 sample_ratio: 1.0,
1087 export: Export::Stdout,
1088 enable_fmt_layer: false,
1089 headers: None,
1090 };
1091
1092 let guard = init_otel(config).map_err(|e| {
1093 CleanroomError::internal_error("OTEL initialization failed")
1094 .with_context("Failed to initialize OTEL with stdout exporter")
1095 .with_source(e.to_string())
1096 })?;
1097
1098 drop(guard);
1100
1101 Ok(())
1102}
1103
1104async fn test_otel_span_creation() -> Result<()> {
1105 use opentelemetry::global;
1106 use opentelemetry::trace::{Tracer, TracerProvider};
1107
1108 let tracer_provider = global::tracer_provider();
1110 let span = tracer_provider.tracer("test-tracer").start("test-span");
1111
1112 drop(span); Ok(())
1116}
1117
1118async fn test_otel_trace_context() -> Result<()> {
1119 use opentelemetry::global;
1120 use opentelemetry::trace::{Span, Tracer, TracerProvider};
1121 use opentelemetry::KeyValue;
1122
1123 let tracer_provider = global::tracer_provider();
1125 let mut span = tracer_provider.tracer("test-tracer").start("context-test");
1126
1127 span.set_attributes(vec![
1129 KeyValue::new("test.key", "test.value"),
1130 KeyValue::new("test.number", 42),
1131 ]);
1132
1133 span.end();
1135
1136 Ok(())
1137}
1138
1139async fn test_otel_exporters() -> Result<()> {
1140 use crate::telemetry::Export;
1141
1142 let _stdout = Export::Stdout;
1144 let _otlp_http = Export::OtlpHttp {
1145 endpoint: "http://localhost:4318",
1146 };
1147 let _otlp_grpc = Export::OtlpGrpc {
1148 endpoint: "http://localhost:4317",
1149 };
1150
1151 match _stdout {
1153 Export::Stdout => Ok(()),
1154 _ => Err(CleanroomError::validation_error("Export type mismatch")),
1155 }
1156}