1use std::path::Path;
2use std::sync::Mutex;
3
4use anyhow::{Context, Result, bail};
5use cairo_lang_compiler::db::RootDatabase;
6use cairo_lang_compiler::diagnostics::DiagnosticsReporter;
7use cairo_lang_compiler::project::setup_project;
8use cairo_lang_filesystem::cfg::{Cfg, CfgSet};
9use cairo_lang_filesystem::ids::CrateId;
10use cairo_lang_runner::casm_run::format_for_panic;
11use cairo_lang_runner::profiling::{
12 ProfilingInfo, ProfilingInfoProcessor, ProfilingInfoProcessorParams,
13};
14use cairo_lang_runner::{
15 ProfilingInfoCollectionConfig, RunResultValue, SierraCasmRunner, StarknetExecutionResources,
16};
17use cairo_lang_sierra::extensions::gas::CostTokenType;
18use cairo_lang_sierra::ids::FunctionId;
19use cairo_lang_sierra::program::{Program, StatementIdx};
20use cairo_lang_sierra_generator::db::SierraGenGroup;
21use cairo_lang_sierra_to_casm::metadata::MetadataComputationConfig;
22use cairo_lang_starknet::contract::ContractInfo;
23use cairo_lang_starknet::starknet_plugin_suite;
24use cairo_lang_test_plugin::test_config::{PanicExpectation, TestExpectation};
25use cairo_lang_test_plugin::{
26 TestCompilation, TestCompilationMetadata, TestConfig, TestsCompilationConfig,
27 compile_test_prepared_db, test_plugin_suite,
28};
29use cairo_lang_utils::casts::IntoOrPanic;
30use cairo_lang_utils::ordered_hash_map::OrderedHashMap;
31use cairo_lang_utils::unordered_hash_map::UnorderedHashMap;
32use colored::Colorize;
33use itertools::Itertools;
34use num_traits::ToPrimitive;
35use rayon::prelude::{IntoParallelIterator, ParallelIterator};
36use starknet_types_core::felt::Felt as Felt252;
37
38#[cfg(test)]
39mod test;
40
41pub struct TestRunner {
43 compiler: TestCompiler,
44 config: TestRunConfig,
45}
46
47impl TestRunner {
48 pub fn new(
58 path: &Path,
59 starknet: bool,
60 allow_warnings: bool,
61 config: TestRunConfig,
62 ) -> Result<Self> {
63 let compiler = TestCompiler::try_new(
64 path,
65 allow_warnings,
66 config.gas_enabled,
67 TestsCompilationConfig {
68 starknet,
69 add_statements_functions: config.run_profiler == RunProfilerConfig::Cairo,
70 add_statements_code_locations: false,
71 contract_declarations: None,
72 contract_crate_ids: None,
73 executable_crate_ids: None,
74 },
75 )?;
76 Ok(Self { compiler, config })
77 }
78
79 pub fn run(&self) -> Result<Option<TestsSummary>> {
81 let runner = CompiledTestRunner::new(self.compiler.build()?, self.config.clone());
82 runner.run(Some(&self.compiler.db))
83 }
84}
85
86pub struct CompiledTestRunner {
87 pub compiled: TestCompilation,
88 pub config: TestRunConfig,
89}
90
91impl CompiledTestRunner {
92 pub fn new(compiled: TestCompilation, config: TestRunConfig) -> Self {
99 Self { compiled, config }
100 }
101
102 pub fn run(self, db: Option<&RootDatabase>) -> Result<Option<TestsSummary>> {
104 let (compiled, filtered_out) = filter_test_cases(
105 self.compiled,
106 self.config.include_ignored,
107 self.config.ignored,
108 &self.config.filter,
109 );
110
111 let TestsSummary { passed, failed, ignored, failed_run_results } = run_tests(
112 if self.config.run_profiler == RunProfilerConfig::Cairo {
113 let db = db.expect("db must be passed when profiling.");
114 let statements_locations = compiled
115 .metadata
116 .statements_locations
117 .expect("statements locations must be present when profiling.");
118 Some(PorfilingAuxData {
119 db,
120 statements_functions: statements_locations
121 .get_statements_functions_map_for_tests(db),
122 })
123 } else {
124 None
125 },
126 compiled.metadata.named_tests,
127 compiled.sierra_program.program,
128 compiled.metadata.function_set_costs,
129 compiled.metadata.contracts_info,
130 &self.config,
131 )?;
132
133 if failed.is_empty() {
134 println!(
135 "test result: {}. {} passed; {} failed; {} ignored; {filtered_out} filtered out;",
136 "ok".bright_green(),
137 passed.len(),
138 failed.len(),
139 ignored.len()
140 );
141 Ok(None)
142 } else {
143 println!("failures:");
144 for (failure, run_result) in failed.iter().zip_eq(failed_run_results) {
145 print!(" {failure} - ");
146 match run_result {
147 RunResultValue::Success(_) => {
148 println!("expected panic but finished successfully.");
149 }
150 RunResultValue::Panic(values) => {
151 println!("{}", format_for_panic(values.into_iter()));
152 }
153 }
154 }
155 println!();
156 bail!(
157 "test result: {}. {} passed; {} failed; {} ignored",
158 "FAILED".bright_red(),
159 passed.len(),
160 failed.len(),
161 ignored.len()
162 );
163 }
164 }
165}
166
167#[derive(Clone, Debug, PartialEq, Eq)]
174pub enum RunProfilerConfig {
175 None,
176 Cairo,
177 Sierra,
178}
179
180#[derive(Clone, Debug)]
182pub struct TestRunConfig {
183 pub filter: String,
184 pub include_ignored: bool,
185 pub ignored: bool,
186 pub run_profiler: RunProfilerConfig,
188 pub gas_enabled: bool,
190 pub print_resource_usage: bool,
192}
193
194pub struct TestCompiler {
196 pub db: RootDatabase,
197 pub main_crate_ids: Vec<CrateId>,
198 pub test_crate_ids: Vec<CrateId>,
199 pub allow_warnings: bool,
200 pub config: TestsCompilationConfig,
201}
202
203impl TestCompiler {
204 pub fn try_new(
211 path: &Path,
212 allow_warnings: bool,
213 gas_enabled: bool,
214 config: TestsCompilationConfig,
215 ) -> Result<Self> {
216 let db = &mut {
217 let mut b = RootDatabase::builder();
218 let mut cfg = CfgSet::from_iter([Cfg::name("test"), Cfg::kv("target", "test")]);
219 if !gas_enabled {
220 cfg.insert(Cfg::kv("gas", "disabled"));
221 b.skip_auto_withdraw_gas();
222 }
223 b.detect_corelib();
224 b.with_cfg(cfg);
225 b.with_default_plugin_suite(test_plugin_suite());
226 if config.starknet {
227 b.with_default_plugin_suite(starknet_plugin_suite());
228 }
229 b.build()?
230 };
231
232 let main_crate_ids = setup_project(db, Path::new(&path))?;
233
234 Ok(Self {
235 db: db.snapshot(),
236 test_crate_ids: main_crate_ids.clone(),
237 main_crate_ids,
238 allow_warnings,
239 config,
240 })
241 }
242
243 pub fn build(&self) -> Result<TestCompilation> {
245 let mut diag_reporter = DiagnosticsReporter::stderr().with_crates(&self.main_crate_ids);
246 if self.allow_warnings {
247 diag_reporter = diag_reporter.allow_warnings();
248 }
249
250 compile_test_prepared_db(
251 &self.db,
252 self.config.clone(),
253 self.test_crate_ids.clone(),
254 diag_reporter,
255 )
256 }
257}
258
259pub fn filter_test_cases(
269 compiled: TestCompilation,
270 include_ignored: bool,
271 ignored: bool,
272 filter: &str,
273) -> (TestCompilation, usize) {
274 let total_tests_count = compiled.metadata.named_tests.len();
275 let named_tests = compiled
276 .metadata
277 .named_tests
278 .into_iter()
279 .filter(|(_, test)| !ignored || test.ignored || include_ignored)
281 .map(|(func, mut test)| {
282 if include_ignored || ignored {
284 test.ignored = false;
285 }
286 (func, test)
287 })
288 .filter(|(name, _)| name.contains(filter))
289 .collect_vec();
290 let filtered_out = total_tests_count - named_tests.len();
291 let tests = TestCompilation {
292 sierra_program: compiled.sierra_program,
293 metadata: TestCompilationMetadata { named_tests, ..(compiled.metadata) },
294 };
295 (tests, filtered_out)
296}
297
298enum TestStatus {
300 Success,
301 Fail(RunResultValue),
302}
303
304struct TestResult {
306 status: TestStatus,
308 gas_usage: Option<i64>,
310 used_resources: StarknetExecutionResources,
312 profiling_info: Option<ProfilingInfo>,
314}
315
316pub struct TestsSummary {
318 passed: Vec<String>,
319 failed: Vec<String>,
320 ignored: Vec<String>,
321 failed_run_results: Vec<RunResultValue>,
322}
323
324pub struct PorfilingAuxData<'a> {
326 pub db: &'a dyn SierraGenGroup,
327 pub statements_functions: UnorderedHashMap<StatementIdx, String>,
328}
329
330pub fn run_tests(
332 profiler_data: Option<PorfilingAuxData<'_>>,
333 named_tests: Vec<(String, TestConfig)>,
334 sierra_program: Program,
335 function_set_costs: OrderedHashMap<FunctionId, OrderedHashMap<CostTokenType, i32>>,
336 contracts_info: OrderedHashMap<Felt252, ContractInfo>,
337 config: &TestRunConfig,
338) -> Result<TestsSummary> {
339 let runner = SierraCasmRunner::new(
340 sierra_program.clone(),
341 if config.gas_enabled {
342 Some(MetadataComputationConfig {
343 function_set_costs,
344 linear_gas_solver: true,
345 linear_ap_change_solver: true,
346 skip_non_linear_solver_comparisons: false,
347 compute_runtime_costs: false,
348 })
349 } else {
350 None
351 },
352 contracts_info,
353 match config.run_profiler {
354 RunProfilerConfig::None => None,
355 RunProfilerConfig::Cairo | RunProfilerConfig::Sierra => {
356 Some(ProfilingInfoCollectionConfig::default())
357 }
358 },
359 )
360 .with_context(|| "Failed setting up runner.")?;
361 let suffix = if named_tests.len() != 1 { "s" } else { "" };
362 println!("running {} test{}", named_tests.len(), suffix);
363 let wrapped_summary = Mutex::new(Ok(TestsSummary {
364 passed: vec![],
365 failed: vec![],
366 ignored: vec![],
367 failed_run_results: vec![],
368 }));
369
370 if profiler_data.is_none() {
372 named_tests
373 .into_par_iter()
374 .map(|(name, test)| run_single_test(test, name, &runner))
375 .for_each(|res| {
376 update_summary(
377 &wrapped_summary,
378 res,
379 &None,
380 &sierra_program,
381 &ProfilingInfoProcessorParams {
382 process_by_original_user_function: false,
383 process_by_cairo_function: false,
384 ..ProfilingInfoProcessorParams::default()
385 },
386 config.print_resource_usage,
387 );
388 });
389 } else {
390 eprintln!("Note: Tests don't run in parallel when running with profiling.");
391 named_tests
392 .into_iter()
393 .map(move |(name, test)| run_single_test(test, name, &runner))
394 .for_each(|test_result| {
395 update_summary(
396 &wrapped_summary,
397 test_result,
398 &profiler_data,
399 &sierra_program,
400 &ProfilingInfoProcessorParams::default(),
401 config.print_resource_usage,
402 );
403 });
404 }
405
406 wrapped_summary.into_inner().unwrap()
407}
408
409fn run_single_test(
411 test: TestConfig,
412 name: String,
413 runner: &SierraCasmRunner,
414) -> anyhow::Result<(String, Option<TestResult>)> {
415 if test.ignored {
416 return Ok((name, None));
417 }
418 let func = runner.find_function(name.as_str())?;
419 let result = runner
420 .run_function_with_starknet_context(func, vec![], test.available_gas, Default::default())
421 .with_context(|| format!("Failed to run the function `{}`.", name.as_str()))?;
422 Ok((
423 name,
424 Some(TestResult {
425 status: match &result.value {
426 RunResultValue::Success(_) => match test.expectation {
427 TestExpectation::Success => TestStatus::Success,
428 TestExpectation::Panics(_) => TestStatus::Fail(result.value),
429 },
430 RunResultValue::Panic(value) => match test.expectation {
431 TestExpectation::Success => TestStatus::Fail(result.value),
432 TestExpectation::Panics(panic_expectation) => match panic_expectation {
433 PanicExpectation::Exact(expected) if value != &expected => {
434 TestStatus::Fail(result.value)
435 }
436 _ => TestStatus::Success,
437 },
438 },
439 },
440 gas_usage: test
441 .available_gas
442 .zip(result.gas_counter)
443 .map(|(before, after)| {
444 before.into_or_panic::<i64>() - after.to_bigint().to_i64().unwrap()
445 })
446 .or_else(|| {
447 runner.initial_required_gas(func).map(|gas| gas.into_or_panic::<i64>())
448 }),
449 used_resources: result.used_resources,
450 profiling_info: result.profiling_info,
451 }),
452 ))
453}
454
455fn update_summary(
457 wrapped_summary: &Mutex<std::prelude::v1::Result<TestsSummary, anyhow::Error>>,
458 test_result: std::prelude::v1::Result<(String, Option<TestResult>), anyhow::Error>,
459 profiler_data: &Option<PorfilingAuxData<'_>>,
460 sierra_program: &Program,
461 profiling_params: &ProfilingInfoProcessorParams,
462 print_resource_usage: bool,
463) {
464 let mut wrapped_summary = wrapped_summary.lock().unwrap();
465 if wrapped_summary.is_err() {
466 return;
467 }
468 let (name, opt_result) = match test_result {
469 Ok((name, opt_result)) => (name, opt_result),
470 Err(err) => {
471 *wrapped_summary = Err(err);
472 return;
473 }
474 };
475 let summary = wrapped_summary.as_mut().unwrap();
476 let (res_type, status_str, gas_usage, used_resources, profiling_info) =
477 if let Some(result) = opt_result {
478 let (res_type, status_str) = match result.status {
479 TestStatus::Success => (&mut summary.passed, "ok".bright_green()),
480 TestStatus::Fail(run_result) => {
481 summary.failed_run_results.push(run_result);
482 (&mut summary.failed, "fail".bright_red())
483 }
484 };
485 (
486 res_type,
487 status_str,
488 result.gas_usage,
489 print_resource_usage.then_some(result.used_resources),
490 result.profiling_info,
491 )
492 } else {
493 (&mut summary.ignored, "ignored".bright_yellow(), None, None, None)
494 };
495 if let Some(gas_usage) = gas_usage {
496 println!("test {name} ... {status_str} (gas usage est.: {gas_usage})");
497 } else {
498 println!("test {name} ... {status_str}");
499 }
500 if let Some(used_resources) = used_resources {
501 let filtered = used_resources.basic_resources.filter_unused_builtins();
502 println!(" steps: {}", filtered.n_steps);
516 println!(" memory holes: {}", filtered.n_memory_holes);
517
518 print_resource_map(
519 filtered.builtin_instance_counter.into_iter().map(|(k, v)| (k.to_string(), v)),
520 "builtins",
521 );
522 print_resource_map(used_resources.syscalls.into_iter(), "syscalls");
523 }
524 if let Some(profiling_info) = profiling_info {
525 let Some(PorfilingAuxData { db, statements_functions }) = profiler_data else {
526 panic!("profiler_data is None");
527 };
528 let profiling_processor = ProfilingInfoProcessor::new(
529 Some(*db),
530 sierra_program.clone(),
531 statements_functions.clone(),
532 Default::default(),
533 );
534 let processed_profiling_info =
535 profiling_processor.process_ex(&profiling_info, profiling_params);
536 println!("Profiling info:\n{processed_profiling_info}");
537 }
538 res_type.push(name);
539}
540
541fn print_resource_map(m: impl ExactSizeIterator<Item = (String, usize)>, resource_type: &str) {
545 if m.len() != 0 {
546 println!(
547 " {resource_type}: ({})",
548 m.into_iter().sorted().map(|(k, v)| format!(r#""{k}": {v}"#)).join(", ")
549 );
550 }
551}