dinja_core/renderer/
pool.rs

1//! Thread-local renderer pooling to avoid reloading static JS libraries on every request.
2//!
3//! ## Architecture
4//!
5//! This module implements a thread-local cache of JavaScript renderers using LRU (Least Recently Used)
6//! eviction. Each thread maintains its own cache, preventing the need to reload engine and other
7//! static libraries on every request.
8//!
9//! ## Thread Safety
10//!
11//! **Critical**: This module uses `thread_local!` because `JsRuntime` is not `Send` or `Sync`.
12//! This means:
13//!
14//! - Each thread has its own independent cache
15//! - Renderers cannot be shared across threads
16//! - The pool automatically manages renderer lifecycle per thread
17//!
18//! ## Why Thread-Local Storage?
19//!
20//! Deno Core's `JsRuntime` is not thread-safe and cannot be shared across threads. Using
21//! thread-local storage allows each thread to maintain its own cache of renderers without
22//! requiring synchronization primitives like `Mutex`, which would serialize access and hurt
23//! performance in a multi-threaded web server.
24//!
25//! ## Performance Considerations
26//!
27//! - Renderers are cached per profile (Engine)
28//! - LRU eviction prevents unbounded memory growth
29//! - Pool warming reduces first-request latency
30//! - Maximum cache size per profile prevents excessive memory usage
31//!
32//! ## Example
33//!
34//! ```no_run
35//! use dinja_core::renderer::pool::{RendererPool, RendererProfile};
36//!
37//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
38//! let pool = RendererPool::new("static", 4);
39//! pool.warm(1); // Pre-create renderers for common profiles
40//!
41//! let lease = pool.checkout(RendererProfile::Engine)?;
42//! let renderer = lease.renderer()?;
43//! // Use renderer...
44//! // Renderer is automatically returned to pool when lease is dropped
45//! # Ok(())
46//! # }
47//! ```
48use super::JsRenderer;
49use crate::error::MdxError;
50use anyhow::Result as AnyhowResult;
51use std::cell::RefCell;
52use std::collections::{HashMap, VecDeque};
53use std::hash::Hash;
54use std::path::PathBuf;
55use std::thread_local;
56
57/// Cache entry tracking renderers and their access order for LRU eviction
58struct CacheEntry {
59    /// Stack of available renderers (most recently used at the end)
60    renderers: VecDeque<JsRenderer>,
61}
62
63impl CacheEntry {
64    fn new() -> Self {
65        Self {
66            renderers: VecDeque::new(),
67        }
68    }
69
70    /// Pops the most recently used renderer (LRU: remove from front)
71    fn pop(&mut self) -> Option<JsRenderer> {
72        self.renderers.pop_back()
73    }
74
75    /// Pushes a renderer, evicting the least recently used if at capacity
76    fn push_with_limit(&mut self, renderer: JsRenderer, max_size: usize) {
77        // If at capacity, remove least recently used (front of deque)
78        if self.renderers.len() >= max_size {
79            let _ = self.renderers.pop_front();
80        }
81        // Add most recently used to the back
82        self.renderers.push_back(renderer);
83    }
84
85    fn len(&self) -> usize {
86        self.renderers.len()
87    }
88}
89
90impl Drop for CacheEntry {
91    fn drop(&mut self) {
92        // V8 isolates must be dropped LIFO (reverse creation order) or the runtime panics.
93        while self.renderers.pop_back().is_some() {}
94    }
95}
96
97thread_local! {
98    static RENDERER_CACHE: RefCell<HashMap<RendererKey, CacheEntry>> =
99        RefCell::new(HashMap::new());
100}
101
102#[derive(Clone, Copy, Hash, Eq, PartialEq)]
103enum RendererKey {
104    Engine,
105}
106
107/// Profiles describe the runtime flavor required for a given render request.
108#[derive(Clone, Copy)]
109pub enum RendererProfile {
110    /// Standard engine renderer used for HTML and JavaScript outputs.
111    Engine,
112}
113
114impl RendererProfile {
115    fn key(self) -> RendererKey {
116        match self {
117            RendererProfile::Engine => RendererKey::Engine,
118        }
119    }
120}
121
122/// Lease that returns the renderer to the cache when dropped.
123pub struct RendererLease<'pool> {
124    renderer: Option<JsRenderer>,
125    key: RendererKey,
126    pool: &'pool RendererPool,
127}
128
129impl<'pool> RendererLease<'pool> {
130    /// Returns a reference to the leased renderer.
131    ///
132    /// # Errors
133    /// Returns an error if the renderer has already been returned to the pool.
134    pub fn renderer(&self) -> Result<&JsRenderer, MdxError> {
135        self.renderer
136            .as_ref()
137            .ok_or_else(|| MdxError::TsxTransform("Renderer already returned to pool".to_string()))
138    }
139}
140
141impl<'pool> Drop for RendererLease<'pool> {
142    fn drop(&mut self) {
143        if let Some(renderer) = self.renderer.take() {
144            self.pool.return_renderer(self.key, renderer);
145        }
146    }
147}
148
149/// Thread-local cache of initialized JavaScript runtimes.
150///
151/// This pool uses LRU (Least Recently Used) eviction to manage cached renderers.
152/// Each thread maintains its own cache, so renderers are not shared across threads.
153/// This is necessary because `JsRuntime` is not `Send` or `Sync`.
154///
155/// The cache automatically evicts the least recently used renderer when the
156/// maximum cache size is reached for a given profile.
157#[derive(Clone)]
158pub struct RendererPool {
159    static_dir: PathBuf,
160    max_cached_per_key: usize,
161}
162
163impl RendererPool {
164    /// Creates a new renderer pool.
165    ///
166    /// # Arguments
167    /// * `static_dir` - Directory containing static JavaScript files
168    /// * `max_cached_per_key` - Maximum number of cached renderers per profile
169    ///
170    /// # Returns
171    /// A new `RendererPool` instance
172    pub fn new(static_dir: impl Into<PathBuf>, max_cached_per_key: usize) -> Self {
173        Self {
174            static_dir: static_dir.into(),
175            max_cached_per_key,
176        }
177    }
178
179    /// Warms up the pool by pre-creating renderers for common profiles.
180    ///
181    /// This reduces first-request latency by initializing renderers ahead of time.
182    /// Errors during warming are logged but don't prevent pool creation.
183    ///
184    /// # Arguments
185    /// * `warm_count` - Number of renderers to pre-create per profile (defaults to 1)
186    pub fn warm(&self, warm_count: usize) {
187        if warm_count == 0 {
188            return;
189        }
190
191        // Warm up common profiles
192        let profiles = [RendererProfile::Engine];
193
194        for profile in profiles.iter() {
195            for _ in 0..warm_count.min(self.max_cached_per_key) {
196                if let Ok(renderer) = self.create_renderer(*profile) {
197                    let key = profile.key();
198                    self.return_renderer(key, renderer);
199                } else {
200                    // Log but continue - warming is best-effort
201                    eprintln!("Warning: Failed to warm renderer for profile Engine");
202                }
203            }
204        }
205    }
206
207    /// Checks out a renderer from the pool for the given profile.
208    ///
209    /// The renderer is returned to the pool when the `RendererLease` is dropped.
210    ///
211    /// # Arguments
212    /// * `profile` - The renderer profile (Engine)
213    ///
214    /// # Returns
215    /// A `RendererLease` containing the renderer, or an error if creation fails
216    pub fn checkout<'pool>(
217        &'pool self,
218        profile: RendererProfile,
219    ) -> AnyhowResult<RendererLease<'pool>> {
220        let key = profile.key();
221        let renderer =
222            Self::take_cached_renderer(key).map_or_else(|| self.create_renderer(profile), Ok)?;
223
224        Ok(RendererLease {
225            renderer: Some(renderer),
226            key,
227            pool: self,
228        })
229    }
230
231    fn create_renderer(&self, profile: RendererProfile) -> AnyhowResult<JsRenderer> {
232        match profile {
233            RendererProfile::Engine => JsRenderer::new(&self.static_dir),
234        }
235    }
236
237    fn take_cached_renderer(key: RendererKey) -> Option<JsRenderer> {
238        RENDERER_CACHE.with(|cache| {
239            let mut cache = cache.borrow_mut();
240            let entry = cache.get_mut(&key)?;
241            let renderer = entry.pop()?;
242            // Remove entry if empty to prevent cache bloat
243            if entry.len() == 0 {
244                cache.remove(&key);
245            }
246            Some(renderer)
247        })
248    }
249
250    fn return_renderer(&self, key: RendererKey, renderer: JsRenderer) {
251        RENDERER_CACHE.with(|cache| {
252            let mut cache = cache.borrow_mut();
253            let entry = cache.entry(key).or_insert_with(CacheEntry::new);
254            // Use LRU eviction: remove oldest if at capacity
255            entry.push_with_limit(renderer, self.max_cached_per_key);
256        });
257    }
258}