1use alloc::collections::BTreeMap;
7use alloc::vec::Vec;
8use core::any::Any;
9use core::cell::{Ref, RefCell, RefMut};
10use std::boxed::Box;
11
12use crate::encode::{BatchableResult, BinaryDecode};
13use crate::ipc::DecodedData;
14use crate::ipc::{EncodedData, IPCMessage, MessageType};
15use crate::lazy::ThreadLocalKey;
16use crate::runtime::WryIPC;
17use crate::value::{JSIDX_OFFSET, JSIDX_RESERVED};
18
19pub struct Runtime {
25 encoder: EncodedData,
27 free_ids: Vec<u64>,
29 max_id: u64,
31 ids_to_free: Vec<Vec<u64>>,
34 is_batching: bool,
36 borrow_stack_pointer: u64,
39 borrow_frame_stack: Vec<u64>,
41 reserved_placeholder_count: u32,
44 type_cache: BTreeMap<Vec<u8>, u32>,
46 next_type_id: u32,
48 objects: BTreeMap<u32, Box<dyn Any>>,
50 next_object_handle: u32,
52 ipc: WryIPC,
54 webview_id: u64,
56 thread_locals: BTreeMap<ThreadLocalKey<'static>, Box<dyn Any>>,
58}
59
60impl Runtime {
61 pub(crate) fn new(ipc: WryIPC, webview_id: u64) -> Self {
62 Self {
63 encoder: Self::new_encoder_for_evaluate(),
64 free_ids: Vec::new(),
65 max_id: JSIDX_RESERVED,
67 ids_to_free: Vec::new(),
68 is_batching: false,
69 borrow_stack_pointer: JSIDX_OFFSET,
71 borrow_frame_stack: Vec::new(),
73 reserved_placeholder_count: 0,
75 type_cache: BTreeMap::new(),
77 next_type_id: 0,
79 objects: BTreeMap::new(),
81 next_object_handle: 0,
83 ipc,
84 webview_id,
85 thread_locals: BTreeMap::new(),
86 }
87 }
88
89 fn new_encoder_for_evaluate() -> EncodedData {
90 let mut encoder = EncodedData::new();
91 encoder.push_u8(MessageType::Evaluate as u8);
92 encoder
93 }
94
95 pub fn get_next_heap_id(&mut self) -> u64 {
98 let id = self.max_id;
99 self.max_id += 1;
100 id
101 }
102
103 pub fn get_next_placeholder_id(&mut self) -> u64 {
107 let id = self.get_next_heap_id();
108 if self.is_batching {
109 self.reserved_placeholder_count += 1;
110 }
111 id
112 }
113
114 pub fn get_next_borrow_id(&mut self) -> u64 {
118 if self.borrow_stack_pointer <= 1 {
119 panic!("Borrow stack overflow: too many borrowed references in a single operation");
120 }
121 self.borrow_stack_pointer -= 1;
122 self.borrow_stack_pointer
123 }
124
125 pub fn push_borrow_frame(&mut self) {
128 self.borrow_frame_stack.push(self.borrow_stack_pointer);
129 }
130
131 pub fn pop_borrow_frame(&mut self) {
134 if let Some(saved_pointer) = self.borrow_frame_stack.pop() {
135 self.borrow_stack_pointer = saved_pointer;
136 } else {
137 panic!("pop_borrow_frame called with empty frame stack");
138 }
139 }
140
141 pub fn release_heap_id(&mut self, id: u64) -> Option<u64> {
143 if id < JSIDX_RESERVED {
145 unreachable!("Attempted to release reserved JS heap ID {}", id);
146 }
147
148 debug_assert!(
149 !self.free_ids.contains(&id) && !self.ids_to_free.iter().any(|ids| ids.contains(&id)),
150 "Double-free detected for heap ID {id}"
151 );
152 match self.ids_to_free.last_mut() {
153 Some(ids) => {
154 ids.push(id);
155 None
156 }
157 None => {
158 self.free_ids.push(id);
159 Some(id)
160 }
161 }
162 }
163
164 pub(crate) fn take_message(&mut self) -> IPCMessage {
168 let reserved_count = self.take_reserved_placeholder_count();
169 let mut encoder = self.take_encoder();
170 encoder.prepend_u32(reserved_count);
171 IPCMessage::new(encoder.to_bytes())
172 }
173
174 pub(crate) fn is_empty(&self) -> bool {
175 self.encoder.byte_len() <= 13
177 }
178
179 pub(crate) fn push_ids_to_free(&mut self) {
180 self.ids_to_free.push(Vec::new());
181 }
182
183 pub(crate) fn pop_and_release_ids(&mut self) -> Vec<u64> {
184 let mut to_free = Vec::new();
185 if let Some(ids) = self.ids_to_free.pop() {
186 for id in ids {
187 if let Some(freed_id) = self.release_heap_id(id) {
188 to_free.push(freed_id);
189 }
190 }
191 }
192 to_free
193 }
194
195 pub(crate) fn set_batching(&mut self, batching: bool) {
196 self.is_batching = batching;
197 }
198
199 pub(crate) fn is_batching(&self) -> bool {
200 self.is_batching
201 }
202
203 pub(crate) fn take_reserved_placeholder_count(&mut self) -> u32 {
206 core::mem::take(&mut self.reserved_placeholder_count)
207 }
208
209 pub(crate) fn take_encoder(&mut self) -> EncodedData {
210 core::mem::replace(&mut self.encoder, Self::new_encoder_for_evaluate())
211 }
212
213 pub(crate) fn extend_encoder(&mut self, other: &EncodedData) {
214 self.encoder.u8_buf.extend_from_slice(&other.u8_buf[1..]);
216 self.encoder.u16_buf.extend_from_slice(&other.u16_buf);
217 self.encoder.u32_buf.extend_from_slice(&other.u32_buf);
218 self.encoder.str_buf.extend_from_slice(&other.str_buf);
219 }
220
221 pub(crate) fn get_or_create_type_id(&mut self, type_bytes: Vec<u8>) -> (u32, bool) {
224 if let Some(&id) = self.type_cache.get(&type_bytes) {
225 (id, true)
226 } else {
227 let id = self.next_type_id;
228 self.next_type_id += 1;
229 self.type_cache.insert(type_bytes, id);
230 (id, false)
231 }
232 }
233
234 pub(crate) fn insert_object<T: 'static>(&mut self, obj: T) -> u32 {
236 let handle = self.next_object_handle;
237 self.next_object_handle = self.next_object_handle.wrapping_add(1);
238 self.objects.insert(handle, Box::new(RefCell::new(obj)));
239 handle
240 }
241
242 pub(crate) fn take_thread_local<T: 'static>(&mut self, key: ThreadLocalKey<'static>) -> T {
244 *self
245 .thread_locals
246 .remove(&key)
247 .expect("thread local not found")
248 .downcast::<T>()
249 .expect("type mismatch")
250 }
251
252 pub(crate) fn insert_thread_local<T: 'static>(
254 &mut self,
255 key: ThreadLocalKey<'static>,
256 value: T,
257 ) {
258 self.thread_locals.insert(key, Box::new(value));
259 }
260
261 pub(crate) fn has_thread_local(&self, key: ThreadLocalKey<'static>) -> bool {
263 self.thread_locals.contains_key(&key)
264 }
265
266 pub(crate) fn get_object<T: 'static>(&self, handle: u32) -> Ref<'_, T> {
268 let boxed = self.objects.get(&handle).expect("invalid handle");
269 let cell = boxed.downcast_ref::<RefCell<T>>().expect("type mismatch");
270 cell.borrow()
271 }
272
273 pub(crate) fn get_object_mut<T: 'static>(&self, handle: u32) -> RefMut<'_, T> {
275 let boxed = self.objects.get(&handle).expect("invalid handle");
276 let cell = boxed.downcast_ref::<RefCell<T>>().expect("type mismatch");
277 cell.borrow_mut()
278 }
279
280 pub(crate) fn remove_object<T: 'static>(&mut self, handle: u32) -> T {
282 let boxed = self.objects.remove(&handle).expect("invalid handle");
283 let cell = boxed.downcast::<RefCell<T>>().expect("type mismatch");
284 cell.into_inner()
285 }
286
287 pub(crate) fn remove_object_untyped(&mut self, handle: u32) -> Option<Box<dyn Any>> {
289 self.objects.remove(&handle)
290 }
291
292 pub(crate) fn ipc(&self) -> &WryIPC {
294 &self.ipc
295 }
296
297 pub(crate) fn webview_id(&self) -> u64 {
299 self.webview_id
300 }
301}
302
303thread_local! {
304 pub(crate) static RUNTIME: RefCell<Vec<Runtime>> = const { RefCell::new(Vec::new()) };
306}
307
308fn push_runtime(runtime: Runtime) {
309 RUNTIME.with(|state| {
310 state.borrow_mut().push(runtime);
311 });
312}
313
314fn pop_runtime() -> Runtime {
315 RUNTIME.with(|state| {
316 state
317 .borrow_mut()
318 .pop()
319 .expect("No runtime available to pop")
320 })
321}
322
323pub(crate) fn in_runtime<O>(runtime: Runtime, run: impl FnOnce() -> O) -> (Runtime, O) {
324 push_runtime(runtime);
325 let out = run();
326 let runtime = pop_runtime();
327 (runtime, out)
328}
329
330pub(crate) fn with_runtime<R>(f: impl FnOnce(&mut Runtime) -> R) -> R {
331 RUNTIME.with(|state| {
332 let mut state = state.borrow_mut();
333 f(state.last_mut().expect("No runtime available"))
334 })
335}
336
337pub fn is_batching() -> bool {
339 with_runtime(|state| state.is_batching())
340}
341
342pub(crate) fn queue_js_drop(id: u64) {
345 debug_assert!(
346 id >= JSIDX_RESERVED,
347 "Attempted to drop reserved JS heap ID {id}"
348 );
349
350 let runtime_already_dropped = RUNTIME.with(|state| state.borrow().is_empty());
351 if runtime_already_dropped {
353 return;
354 }
355
356 let id = with_runtime(|state| state.release_heap_id(id));
357 if let Some(id) = id {
358 crate::js_helpers::js_drop_heap_ref(id);
359 }
360}
361
362pub(crate) fn add_operation(
364 encoder: &mut EncodedData,
365 fn_id: u32,
366 add_args: impl FnOnce(&mut EncodedData),
367) {
368 encoder.push_u32(fn_id);
369 add_args(encoder);
370}
371
372pub(crate) fn run_js_sync<R: BatchableResult>(
379 fn_id: u32,
380 add_args: impl FnOnce(&mut EncodedData),
381) -> R {
382 let mut batch = with_runtime(|state| {
387 state.push_ids_to_free();
389 state.take_encoder()
390 });
391 add_operation(&mut batch, fn_id, add_args);
392
393 let needs_flush = batch.needs_flush;
395
396 with_runtime(|state| {
397 let encoded_during_op = core::mem::replace(&mut state.encoder, batch);
398 state.extend_encoder(&encoded_during_op);
399 });
400
401 let get_placeholder = || with_runtime(|state| R::try_placeholder(state));
404
405 let result = if !is_batching() || needs_flush {
408 flush_and_then(|mut data| {
409 let response = get_placeholder()
410 .unwrap_or_else(|| R::decode(&mut data).expect("Failed to decode return value"));
411 assert!(
412 data.is_empty(),
413 "Extra data remaining after decoding response"
414 );
415 response
416 })
417 } else {
418 get_placeholder().unwrap_or_else(|| flush_and_return::<R>())
419 };
420
421 let ids = with_runtime(|state| state.pop_and_release_ids());
423 for id in ids {
424 crate::js_helpers::js_drop_heap_ref(id);
425 }
426
427 result
428}
429
430pub(crate) fn flush_and_return<R: BinaryDecode>() -> R {
432 flush_and_then(|mut data| {
433 let response = R::decode(&mut data).expect("Failed to decode return value");
434 assert!(
435 data.is_empty(),
436 "Extra data remaining after decoding response"
437 );
438 response
439 })
440}
441
442pub(crate) fn flush_and_then<R>(then: impl for<'a> Fn(DecodedData<'a>) -> R) -> R {
443 use crate::runtime::WryBindgenEvent;
444
445 let batch_msg = with_runtime(|state| state.take_message());
446
447 with_runtime(|runtime| {
449 (runtime.ipc().proxy)(WryBindgenEvent::ipc(runtime.webview_id(), batch_msg))
450 });
451 loop {
452 if let Some(result) = crate::runtime::progress_js_with(&then) {
453 return result;
454 }
455 }
456}
457
458pub fn batch<R, F: FnOnce() -> R>(f: F) -> R {
462 let currently_batching = is_batching();
463 with_runtime(|state| state.set_batching(true));
465
466 let result = f();
468
469 if !currently_batching {
470 force_flush();
472 }
473
474 with_runtime(|state| state.set_batching(currently_batching));
476
477 result
478}
479
480pub fn batch_async<'a, R, F: core::future::Future<Output = R> + 'a>(
482 f: F,
483) -> impl core::future::Future<Output = R> + 'a {
484 let mut f = Box::pin(f);
485 std::future::poll_fn(move |ctx| batch(|| f.as_mut().poll(ctx)))
486}
487
488pub fn force_flush() {
489 let has_pending = with_runtime(|state| !state.is_empty());
490 if has_pending {
491 flush_and_return::<()>();
492 }
493}