Skip to main content

deno_core/
ops.rs

1// Copyright 2018-2026 the Deno authors. MIT license.
2
3use std::cell::RefCell;
4use std::collections::HashSet;
5use std::ops::Deref;
6use std::ops::DerefMut;
7use std::rc::Rc;
8use std::sync::Arc;
9use std::sync::atomic::AtomicUsize;
10use std::sync::atomic::Ordering;
11
12use futures::task::AtomicWaker;
13use v8::fast_api::CFunction;
14
15use crate::OpDecl;
16use crate::ResourceId;
17use crate::error::JsStackFrame;
18use crate::gotham_state::GothamState;
19use crate::io::ResourceTable;
20use crate::ops_metrics::OpMetricsFn;
21use crate::runtime::JsRuntimeState;
22use crate::runtime::OpDriverImpl;
23use crate::runtime::UnrefedOps;
24
25pub type PromiseId = i32;
26pub type OpId = u16;
27
28#[cfg(debug_assertions)]
29thread_local! {
30  static CURRENT_OP: std::cell::Cell<Option<&'static OpDecl>> = None.into();
31}
32
33#[cfg(debug_assertions)]
34pub struct ReentrancyGuard {}
35
36#[cfg(debug_assertions)]
37impl Drop for ReentrancyGuard {
38  fn drop(&mut self) {
39    CURRENT_OP.with(|f| f.set(None));
40  }
41}
42
43/// Creates an op re-entrancy check for the given [`OpDecl`].
44#[cfg(debug_assertions)]
45#[doc(hidden)]
46pub fn reentrancy_check(decl: &'static OpDecl) -> Option<ReentrancyGuard> {
47  if decl.is_reentrant {
48    return None;
49  }
50
51  let current = CURRENT_OP.with(|f| f.get());
52  if let Some(current) = current {
53    panic!(
54      "op {} was not marked as #[op2(reentrant)], but re-entrantly invoked op {}",
55      current.name, decl.name
56    );
57  }
58  CURRENT_OP.with(|f| f.set(Some(decl)));
59  Some(ReentrancyGuard {})
60}
61
62#[derive(Clone, Copy)]
63pub struct OpMetadata {
64  /// A description of the op for use in sanitizer output.
65  pub sanitizer_details: Option<&'static str>,
66  /// The fix for the issue described in `sanitizer_details`.
67  pub sanitizer_fix: Option<&'static str>,
68}
69
70impl OpMetadata {
71  pub const fn default() -> Self {
72    Self {
73      sanitizer_details: None,
74      sanitizer_fix: None,
75    }
76  }
77}
78
79/// Per-op context.
80///
81// Note: We don't worry too much about the size of this struct because it's allocated once per realm, and is
82// stored in a contiguous array.
83pub struct OpCtx {
84  /// The id for this op. Will be identical across realms.
85  pub id: OpId,
86
87  /// A stashed Isolate that ops can make use of. This is a raw isolate pointer, and as such, is
88  /// extremely dangerous to use.
89  pub isolate: v8::UnsafeRawIsolatePtr,
90
91  #[doc(hidden)]
92  pub state: Rc<RefCell<OpState>>,
93  #[doc(hidden)]
94  pub enable_stack_trace: bool,
95
96  pub(crate) decl: OpDecl,
97  pub(crate) fast_fn_info: Option<CFunction>,
98  pub(crate) metrics_fn: Option<OpMetricsFn>,
99
100  op_driver: Rc<OpDriverImpl>,
101  runtime_state: *const JsRuntimeState,
102}
103
104impl OpCtx {
105  #[allow(clippy::too_many_arguments, reason = "all arguments are needed")]
106  pub(crate) fn new(
107    id: OpId,
108    isolate: v8::UnsafeRawIsolatePtr,
109    op_driver: Rc<OpDriverImpl>,
110    decl: OpDecl,
111    state: Rc<RefCell<OpState>>,
112    runtime_state: *const JsRuntimeState,
113    metrics_fn: Option<OpMetricsFn>,
114    enable_stack_trace: bool,
115  ) -> Self {
116    // If we want metrics for this function, create the fastcall `CFunctionInfo` from the metrics
117    // `CFunction`. For some extremely fast ops, the parameter list may change for the metrics
118    // version and require a slightly different set of arguments (for example, it may need the fastcall
119    // callback information to get the `OpCtx`).
120    let fast_fn_info = if metrics_fn.is_some() {
121      decl.fast_fn_with_metrics
122    } else {
123      decl.fast_fn
124    };
125
126    Self {
127      id,
128      state,
129      runtime_state,
130      decl,
131      op_driver,
132      fast_fn_info,
133      isolate,
134      metrics_fn,
135      enable_stack_trace,
136    }
137  }
138
139  #[inline(always)]
140  pub const fn decl(&self) -> &OpDecl {
141    &self.decl
142  }
143
144  #[inline(always)]
145  pub const fn metrics_enabled(&self) -> bool {
146    self.metrics_fn.is_some()
147  }
148
149  /// Generates four external references for each op. If an op does not have a fastcall, it generates
150  /// "null" slots to avoid changing the size of the external references array.
151  pub const fn external_references(&self) -> [v8::ExternalReference; 4] {
152    extern "C" fn placeholder() {}
153
154    let ctx_ptr = v8::ExternalReference {
155      pointer: self as *const OpCtx as _,
156    };
157    let null = v8::ExternalReference {
158      pointer: placeholder as _,
159    };
160
161    if self.metrics_enabled() {
162      let slow_fn = v8::ExternalReference {
163        function: self.decl.slow_fn_with_metrics,
164      };
165      if let (Some(fast_fn), Some(fast_fn_info)) =
166        (self.decl.fast_fn_with_metrics, self.fast_fn_info)
167      {
168        let fast_fn = v8::ExternalReference {
169          pointer: fast_fn.address() as _,
170        };
171        let fast_info = v8::ExternalReference {
172          type_info: fast_fn_info.type_info(),
173        };
174        [ctx_ptr, slow_fn, fast_fn, fast_info]
175      } else {
176        [ctx_ptr, slow_fn, null, null]
177      }
178    } else {
179      let slow_fn = v8::ExternalReference {
180        function: self.decl.slow_fn,
181      };
182      if let (Some(fast_fn), Some(fast_fn_info)) =
183        (self.decl.fast_fn, self.fast_fn_info)
184      {
185        let fast_fn = v8::ExternalReference {
186          pointer: fast_fn.address() as _,
187        };
188        let fast_info = v8::ExternalReference {
189          type_info: fast_fn_info.type_info(),
190        };
191        [ctx_ptr, slow_fn, fast_fn, fast_info]
192      } else {
193        [ctx_ptr, slow_fn, null, null]
194      }
195    }
196  }
197
198  pub(crate) fn op_driver(&self) -> &OpDriverImpl {
199    &self.op_driver
200  }
201
202  /// Get the [`JsRuntimeState`] for this op.
203  pub(crate) fn runtime_state(&self) -> &JsRuntimeState {
204    // SAFETY: JsRuntimeState outlives OpCtx
205    unsafe { &*self.runtime_state }
206  }
207}
208
209/// Allows an embedder to track operations which should
210/// keep the event loop alive.
211#[derive(Debug, Clone)]
212pub struct ExternalOpsTracker {
213  counter: Arc<AtomicUsize>,
214}
215
216impl ExternalOpsTracker {
217  pub fn ref_op(&self) {
218    self.counter.fetch_add(1, Ordering::Relaxed);
219  }
220
221  pub fn unref_op(&self) {
222    let _ =
223      self
224        .counter
225        .fetch_update(Ordering::Relaxed, Ordering::Relaxed, |x| {
226          if x == 0 { None } else { Some(x - 1) }
227        });
228  }
229
230  pub(crate) fn has_pending_ops(&self) -> bool {
231    self.counter.load(Ordering::Relaxed) > 0
232  }
233}
234
235pub type OpStackTraceCallback = Box<dyn Fn(Vec<JsStackFrame>)>;
236
237/// Maintains the resources and ops inside a JS runtime.
238pub struct OpState {
239  pub resource_table: ResourceTable,
240  pub(crate) gotham_state: GothamState,
241  pub waker: Arc<AtomicWaker>,
242  pub external_ops_tracker: ExternalOpsTracker,
243  pub op_stack_trace_callback: Option<OpStackTraceCallback>,
244  /// Reference to the unrefered ops state in `ContextState`.
245  pub(crate) unrefed_ops: UnrefedOps,
246  /// Resources that are not referenced by the event loop. All async
247  /// resource ops on these resources will not keep the event loop alive.
248  ///
249  /// Used to implement `uv_ref` and `uv_unref` methods for Node compat.
250  pub(crate) unrefed_resources: HashSet<ResourceId>,
251}
252
253impl OpState {
254  pub fn new(op_stack_trace_callback: Option<OpStackTraceCallback>) -> OpState {
255    OpState {
256      resource_table: Default::default(),
257      gotham_state: Default::default(),
258      waker: Arc::new(AtomicWaker::new()),
259      external_ops_tracker: ExternalOpsTracker {
260        counter: Arc::new(AtomicUsize::new(0)),
261      },
262      op_stack_trace_callback,
263      unrefed_ops: Default::default(),
264      unrefed_resources: Default::default(),
265    }
266  }
267
268  /// Clear all user-provided resources and state.
269  pub(crate) fn clear(&mut self) {
270    std::mem::take(&mut self.gotham_state);
271    std::mem::take(&mut self.resource_table);
272  }
273
274  // Silly but improves readability.
275  pub fn uv_unref(&mut self, resource_id: ResourceId) {
276    self.unrefed_resources.insert(resource_id);
277  }
278
279  pub fn uv_ref(&mut self, resource_id: ResourceId) {
280    self.unrefed_resources.remove(&resource_id);
281  }
282
283  pub fn has_ref(&self, resource_id: ResourceId) -> bool {
284    !self.unrefed_resources.contains(&resource_id)
285  }
286}
287
288impl Deref for OpState {
289  type Target = GothamState;
290
291  fn deref(&self) -> &Self::Target {
292    &self.gotham_state
293  }
294}
295
296impl DerefMut for OpState {
297  fn deref_mut(&mut self) -> &mut Self::Target {
298    &mut self.gotham_state
299  }
300}