1use std::path::Path;
2use std::sync::Mutex;
3
4use anyhow::{Context, Result, bail};
5use cairo_lang_compiler::db::RootDatabase;
6use cairo_lang_compiler::diagnostics::DiagnosticsReporter;
7use cairo_lang_compiler::project::setup_project;
8use cairo_lang_filesystem::cfg::{Cfg, CfgSet};
9use cairo_lang_filesystem::ids::CrateId;
10use cairo_lang_runner::casm_run::format_for_panic;
11use cairo_lang_runner::profiling::{
12 ProfilingInfo, ProfilingInfoProcessor, ProfilingInfoProcessorParams,
13};
14use cairo_lang_runner::{
15 ProfilingInfoCollectionConfig, RunResultValue, SierraCasmRunner, StarknetExecutionResources,
16};
17use cairo_lang_sierra::extensions::gas::CostTokenType;
18use cairo_lang_sierra::ids::FunctionId;
19use cairo_lang_sierra::program::{Program, StatementIdx};
20use cairo_lang_sierra_generator::db::SierraGenGroup;
21use cairo_lang_sierra_to_casm::metadata::MetadataComputationConfig;
22use cairo_lang_starknet::contract::ContractInfo;
23use cairo_lang_starknet::starknet_plugin_suite;
24use cairo_lang_test_plugin::test_config::{PanicExpectation, TestExpectation};
25use cairo_lang_test_plugin::{
26 TestCompilation, TestCompilationMetadata, TestConfig, TestsCompilationConfig,
27 compile_test_prepared_db, test_plugin_suite,
28};
29use cairo_lang_utils::casts::IntoOrPanic;
30use cairo_lang_utils::ordered_hash_map::OrderedHashMap;
31use cairo_lang_utils::unordered_hash_map::UnorderedHashMap;
32use colored::Colorize;
33use itertools::Itertools;
34use num_traits::ToPrimitive;
35use rayon::prelude::{IntoParallelIterator, ParallelIterator};
36use starknet_types_core::felt::Felt as Felt252;
37
38#[cfg(test)]
39mod test;
40
41pub struct TestRunner {
43 compiler: TestCompiler,
44 config: TestRunConfig,
45}
46
47impl TestRunner {
48 pub fn new(
58 path: &Path,
59 starknet: bool,
60 allow_warnings: bool,
61 config: TestRunConfig,
62 ) -> Result<Self> {
63 let compiler = TestCompiler::try_new(
64 path,
65 allow_warnings,
66 config.gas_enabled,
67 TestsCompilationConfig {
68 starknet,
69 add_statements_functions: config.run_profiler == RunProfilerConfig::Cairo,
70 add_statements_code_locations: false,
71 contract_declarations: None,
72 contract_crate_ids: None,
73 executable_crate_ids: None,
74 },
75 )?;
76 Ok(Self { compiler, config })
77 }
78
79 pub fn run(&self) -> Result<Option<TestsSummary>> {
81 let runner = CompiledTestRunner::new(self.compiler.build()?, self.config.clone());
82 runner.run(Some(&self.compiler.db))
83 }
84}
85
86pub struct CompiledTestRunner {
87 pub compiled: TestCompilation,
88 pub config: TestRunConfig,
89}
90
91impl CompiledTestRunner {
92 pub fn new(compiled: TestCompilation, config: TestRunConfig) -> Self {
99 Self { compiled, config }
100 }
101
102 pub fn run(self, db: Option<&RootDatabase>) -> Result<Option<TestsSummary>> {
104 let (compiled, filtered_out) = filter_test_cases(
105 self.compiled,
106 self.config.include_ignored,
107 self.config.ignored,
108 &self.config.filter,
109 );
110
111 let TestsSummary { passed, failed, ignored, failed_run_results } = run_tests(
112 if self.config.run_profiler == RunProfilerConfig::Cairo {
113 let db = db.expect("db must be passed when profiling.");
114 let statements_locations = compiled
115 .metadata
116 .statements_locations
117 .expect("statements locations must be present when profiling.");
118 Some(PorfilingAuxData {
119 db,
120 statements_functions: statements_locations
121 .get_statements_functions_map_for_tests(db),
122 })
123 } else {
124 None
125 },
126 compiled.metadata.named_tests,
127 compiled.sierra_program.program,
128 compiled.metadata.function_set_costs,
129 compiled.metadata.contracts_info,
130 &self.config,
131 )?;
132
133 if failed.is_empty() {
134 println!(
135 "test result: {}. {} passed; {} failed; {} ignored; {filtered_out} filtered out;",
136 "ok".bright_green(),
137 passed.len(),
138 failed.len(),
139 ignored.len()
140 );
141 Ok(None)
142 } else {
143 println!("failures:");
144 for (failure, run_result) in failed.iter().zip_eq(failed_run_results) {
145 print!(" {failure} - ");
146 match run_result {
147 RunResultValue::Success(_) => {
148 println!("expected panic but finished successfully.");
149 }
150 RunResultValue::Panic(values) => {
151 println!("{}", format_for_panic(values.into_iter()));
152 }
153 }
154 }
155 println!();
156 bail!(
157 "test result: {}. {} passed; {} failed; {} ignored",
158 "FAILED".bright_red(),
159 passed.len(),
160 failed.len(),
161 ignored.len()
162 );
163 }
164 }
165}
166
167#[derive(Clone, Debug, PartialEq, Eq)]
174pub enum RunProfilerConfig {
175 None,
176 Cairo,
177 Sierra,
178}
179
180#[derive(Clone, Debug)]
182pub struct TestRunConfig {
183 pub filter: String,
184 pub include_ignored: bool,
185 pub ignored: bool,
186 pub run_profiler: RunProfilerConfig,
188 pub gas_enabled: bool,
190 pub print_resource_usage: bool,
192}
193
194pub struct TestCompiler {
196 pub db: RootDatabase,
197 pub main_crate_ids: Vec<CrateId>,
198 pub test_crate_ids: Vec<CrateId>,
199 pub allow_warnings: bool,
200 pub config: TestsCompilationConfig,
201}
202
203impl TestCompiler {
204 pub fn try_new(
211 path: &Path,
212 allow_warnings: bool,
213 gas_enabled: bool,
214 config: TestsCompilationConfig,
215 ) -> Result<Self> {
216 let db = &mut {
217 let mut b = RootDatabase::builder();
218 let mut cfg = CfgSet::from_iter([Cfg::name("test"), Cfg::kv("target", "test")]);
219 if !gas_enabled {
220 cfg.insert(Cfg::kv("gas", "disabled"));
221 b.skip_auto_withdraw_gas();
222 }
223 b.detect_corelib();
224 b.with_cfg(cfg);
225 b.with_plugin_suite(test_plugin_suite());
226 if config.starknet {
227 b.with_plugin_suite(starknet_plugin_suite());
228 }
229 b.build()?
230 };
231
232 let main_crate_ids = setup_project(db, Path::new(&path))?;
233
234 Ok(Self {
235 db: db.snapshot(),
236 test_crate_ids: main_crate_ids.clone(),
237 main_crate_ids,
238 allow_warnings,
239 config,
240 })
241 }
242
243 pub fn build(&self) -> Result<TestCompilation> {
245 let mut diag_reporter =
246 DiagnosticsReporter::stderr().with_crates(&self.main_crate_ids.clone());
247 if self.allow_warnings {
248 diag_reporter = diag_reporter.allow_warnings();
249 }
250
251 compile_test_prepared_db(
252 &self.db,
253 self.config.clone(),
254 self.test_crate_ids.clone(),
255 diag_reporter,
256 )
257 }
258}
259
260pub fn filter_test_cases(
270 compiled: TestCompilation,
271 include_ignored: bool,
272 ignored: bool,
273 filter: &str,
274) -> (TestCompilation, usize) {
275 let total_tests_count = compiled.metadata.named_tests.len();
276 let named_tests = compiled
277 .metadata
278 .named_tests
279 .into_iter()
280 .filter(|(_, test)| !ignored || test.ignored || include_ignored)
282 .map(|(func, mut test)| {
283 if include_ignored || ignored {
285 test.ignored = false;
286 }
287 (func, test)
288 })
289 .filter(|(name, _)| name.contains(filter))
290 .collect_vec();
291 let filtered_out = total_tests_count - named_tests.len();
292 let tests = TestCompilation {
293 sierra_program: compiled.sierra_program,
294 metadata: TestCompilationMetadata { named_tests, ..(compiled.metadata) },
295 };
296 (tests, filtered_out)
297}
298
299enum TestStatus {
301 Success,
302 Fail(RunResultValue),
303}
304
305struct TestResult {
307 status: TestStatus,
309 gas_usage: Option<i64>,
311 used_resources: StarknetExecutionResources,
313 profiling_info: Option<ProfilingInfo>,
315}
316
317pub struct TestsSummary {
319 passed: Vec<String>,
320 failed: Vec<String>,
321 ignored: Vec<String>,
322 failed_run_results: Vec<RunResultValue>,
323}
324
325pub struct PorfilingAuxData<'a> {
327 pub db: &'a dyn SierraGenGroup,
328 pub statements_functions: UnorderedHashMap<StatementIdx, String>,
329}
330
331pub fn run_tests(
333 profiler_data: Option<PorfilingAuxData<'_>>,
334 named_tests: Vec<(String, TestConfig)>,
335 sierra_program: Program,
336 function_set_costs: OrderedHashMap<FunctionId, OrderedHashMap<CostTokenType, i32>>,
337 contracts_info: OrderedHashMap<Felt252, ContractInfo>,
338 config: &TestRunConfig,
339) -> Result<TestsSummary> {
340 let runner = SierraCasmRunner::new(
341 sierra_program.clone(),
342 if config.gas_enabled {
343 Some(MetadataComputationConfig {
344 function_set_costs,
345 linear_gas_solver: true,
346 linear_ap_change_solver: true,
347 skip_non_linear_solver_comparisons: false,
348 compute_runtime_costs: false,
349 })
350 } else {
351 None
352 },
353 contracts_info,
354 match config.run_profiler {
355 RunProfilerConfig::None => None,
356 RunProfilerConfig::Cairo | RunProfilerConfig::Sierra => {
357 Some(ProfilingInfoCollectionConfig::default())
358 }
359 },
360 )
361 .with_context(|| "Failed setting up runner.")?;
362 let suffix = if named_tests.len() != 1 { "s" } else { "" };
363 println!("running {} test{}", named_tests.len(), suffix);
364 let wrapped_summary = Mutex::new(Ok(TestsSummary {
365 passed: vec![],
366 failed: vec![],
367 ignored: vec![],
368 failed_run_results: vec![],
369 }));
370
371 if profiler_data.is_none() {
373 named_tests
374 .into_par_iter()
375 .map(|(name, test)| run_single_test(test, name, &runner))
376 .for_each(|res| {
377 update_summary(
378 &wrapped_summary,
379 res,
380 &None,
381 &sierra_program,
382 &ProfilingInfoProcessorParams {
383 process_by_original_user_function: false,
384 process_by_cairo_function: false,
385 ..ProfilingInfoProcessorParams::default()
386 },
387 config.print_resource_usage,
388 );
389 });
390 } else {
391 eprintln!("Note: Tests don't run in parallel when running with profiling.");
392 named_tests
393 .into_iter()
394 .map(move |(name, test)| run_single_test(test, name, &runner))
395 .for_each(|test_result| {
396 update_summary(
397 &wrapped_summary,
398 test_result,
399 &profiler_data,
400 &sierra_program,
401 &ProfilingInfoProcessorParams::default(),
402 config.print_resource_usage,
403 );
404 });
405 }
406
407 wrapped_summary.into_inner().unwrap()
408}
409
410fn run_single_test(
412 test: TestConfig,
413 name: String,
414 runner: &SierraCasmRunner,
415) -> anyhow::Result<(String, Option<TestResult>)> {
416 if test.ignored {
417 return Ok((name, None));
418 }
419 let func = runner.find_function(name.as_str())?;
420 let result = runner
421 .run_function_with_starknet_context(func, vec![], test.available_gas, Default::default())
422 .with_context(|| format!("Failed to run the function `{}`.", name.as_str()))?;
423 Ok((
424 name,
425 Some(TestResult {
426 status: match &result.value {
427 RunResultValue::Success(_) => match test.expectation {
428 TestExpectation::Success => TestStatus::Success,
429 TestExpectation::Panics(_) => TestStatus::Fail(result.value),
430 },
431 RunResultValue::Panic(value) => match test.expectation {
432 TestExpectation::Success => TestStatus::Fail(result.value),
433 TestExpectation::Panics(panic_expectation) => match panic_expectation {
434 PanicExpectation::Exact(expected) if value != &expected => {
435 TestStatus::Fail(result.value)
436 }
437 _ => TestStatus::Success,
438 },
439 },
440 },
441 gas_usage: test
442 .available_gas
443 .zip(result.gas_counter)
444 .map(|(before, after)| {
445 before.into_or_panic::<i64>() - after.to_bigint().to_i64().unwrap()
446 })
447 .or_else(|| {
448 runner.initial_required_gas(func).map(|gas| gas.into_or_panic::<i64>())
449 }),
450 used_resources: result.used_resources,
451 profiling_info: result.profiling_info,
452 }),
453 ))
454}
455
456fn update_summary(
458 wrapped_summary: &Mutex<std::prelude::v1::Result<TestsSummary, anyhow::Error>>,
459 test_result: std::prelude::v1::Result<(String, Option<TestResult>), anyhow::Error>,
460 profiler_data: &Option<PorfilingAuxData<'_>>,
461 sierra_program: &Program,
462 profiling_params: &ProfilingInfoProcessorParams,
463 print_resource_usage: bool,
464) {
465 let mut wrapped_summary = wrapped_summary.lock().unwrap();
466 if wrapped_summary.is_err() {
467 return;
468 }
469 let (name, opt_result) = match test_result {
470 Ok((name, opt_result)) => (name, opt_result),
471 Err(err) => {
472 *wrapped_summary = Err(err);
473 return;
474 }
475 };
476 let summary = wrapped_summary.as_mut().unwrap();
477 let (res_type, status_str, gas_usage, used_resources, profiling_info) =
478 if let Some(result) = opt_result {
479 let (res_type, status_str) = match result.status {
480 TestStatus::Success => (&mut summary.passed, "ok".bright_green()),
481 TestStatus::Fail(run_result) => {
482 summary.failed_run_results.push(run_result);
483 (&mut summary.failed, "fail".bright_red())
484 }
485 };
486 (
487 res_type,
488 status_str,
489 result.gas_usage,
490 print_resource_usage.then_some(result.used_resources),
491 result.profiling_info,
492 )
493 } else {
494 (&mut summary.ignored, "ignored".bright_yellow(), None, None, None)
495 };
496 if let Some(gas_usage) = gas_usage {
497 println!("test {name} ... {status_str} (gas usage est.: {gas_usage})");
498 } else {
499 println!("test {name} ... {status_str}");
500 }
501 if let Some(used_resources) = used_resources {
502 let filtered = used_resources.basic_resources.filter_unused_builtins();
503 println!(" steps: {}", filtered.n_steps);
517 println!(" memory holes: {}", filtered.n_memory_holes);
518
519 print_resource_map(
520 filtered.builtin_instance_counter.into_iter().map(|(k, v)| (k.to_string(), v)),
521 "builtins",
522 );
523 print_resource_map(used_resources.syscalls.into_iter(), "syscalls");
524 }
525 if let Some(profiling_info) = profiling_info {
526 let Some(PorfilingAuxData { db, statements_functions }) = profiler_data else {
527 panic!("profiler_data is None");
528 };
529 let profiling_processor = ProfilingInfoProcessor::new(
530 Some(*db),
531 sierra_program.clone(),
532 statements_functions.clone(),
533 Default::default(),
534 );
535 let processed_profiling_info =
536 profiling_processor.process_ex(&profiling_info, profiling_params);
537 println!("Profiling info:\n{processed_profiling_info}");
538 }
539 res_type.push(name);
540}
541
542fn print_resource_map(m: impl ExactSizeIterator<Item = (String, usize)>, resource_type: &str) {
546 if m.len() != 0 {
547 println!(
548 " {resource_type}: ({})",
549 m.into_iter().sorted().map(|(k, v)| format!(r#""{k}": {v}"#)).join(", ")
550 );
551 }
552}