1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use bencher::Bencher;
use deno_core::v8;
use deno_core::Extension;
use deno_core::JsRuntime;
use deno_core::PollEventLoopOptions;
use deno_core::RuntimeOptions;

use crate::profiling::is_profiling;

pub fn create_js_runtime(setup: impl FnOnce() -> Vec<Extension>) -> JsRuntime {
  JsRuntime::new(RuntimeOptions {
    extensions: setup(),
    module_loader: None,
    ..Default::default()
  })
}

fn loop_code(iters: u64, src: &str) -> String {
  format!(r#"for(let i=0; i < {iters}; i++) {{ {src} }}"#,)
}

#[derive(Copy, Clone)]
pub struct BenchOptions {
  pub benching_inner: u64,
  pub profiling_inner: u64,
  pub profiling_outer: u64,
}

impl Default for BenchOptions {
  fn default() -> Self {
    Self {
      benching_inner: 1_000,
      profiling_inner: 1_000,
      profiling_outer: 10_000,
    }
  }
}

pub fn bench_js_sync(
  b: &mut Bencher,
  src: &str,
  setup: impl FnOnce() -> Vec<Extension>,
) {
  bench_js_sync_with(b, src, setup, Default::default())
}

pub fn bench_js_sync_with(
  b: &mut Bencher,
  src: &str,
  setup: impl FnOnce() -> Vec<Extension>,
  opts: BenchOptions,
) {
  let mut runtime = create_js_runtime(setup);
  let scope = &mut runtime.handle_scope();

  // Increase JS iterations if profiling for nicer flamegraphs
  let inner_iters = if is_profiling() {
    opts.profiling_inner * opts.profiling_outer
  } else {
    opts.benching_inner
  };
  // Looped code
  let looped_src = loop_code(inner_iters, src);

  let code = v8::String::new(scope, looped_src.as_ref()).unwrap();
  let script = v8::Script::compile(scope, code, None).unwrap();
  // Run once if profiling, otherwise regular bench loop
  if is_profiling() {
    script.run(scope).unwrap();
  } else {
    b.iter(|| {
      script.run(scope).unwrap();
    });
  }
}

pub fn bench_js_async(
  b: &mut Bencher,
  src: &str,
  setup: impl FnOnce() -> Vec<Extension>,
) {
  bench_js_async_with(b, src, setup, Default::default())
}

pub fn bench_js_async_with(
  b: &mut Bencher,
  src: &str,
  setup: impl FnOnce() -> Vec<Extension>,
  opts: BenchOptions,
) {
  let mut runtime = create_js_runtime(setup);
  let tokio_runtime = tokio::runtime::Builder::new_current_thread()
    .enable_all()
    .build()
    .unwrap();

  // Looped code
  let inner_iters = if is_profiling() {
    opts.profiling_inner
  } else {
    opts.benching_inner
  };
  let looped = loop_code(inner_iters, src);
  // Get a &'static str by leaking -- this is fine because it's benchmarking code
  let src = Box::leak(looped.into_boxed_str());
  if is_profiling() {
    for _ in 0..opts.profiling_outer {
      tokio_runtime.block_on(inner_async(src, &mut runtime));
    }
  } else {
    b.iter(|| {
      tokio_runtime.block_on(inner_async(src, &mut runtime));
    });
  }
}

async fn inner_async(src: &'static str, runtime: &mut JsRuntime) {
  runtime.execute_script("inner_loop", src).unwrap();
  runtime
    .run_event_loop(PollEventLoopOptions::default())
    .await
    .unwrap();
}