dusk_wasmtime/runtime/store/
data.rs1use crate::store::StoreOpaque;
2use crate::{StoreContext, StoreContextMut};
3use std::fmt;
4use std::marker;
5use std::num::NonZeroU64;
6use std::ops::{Index, IndexMut};
7use std::sync::atomic::{AtomicU64, Ordering::Relaxed};
8
9#[derive(Copy, Clone)]
14pub struct InstanceId(pub(super) usize);
15
16impl InstanceId {
17 pub fn from_index(idx: usize) -> InstanceId {
18 InstanceId(idx)
19 }
20}
21
22pub struct StoreData {
23 id: StoreId,
24 funcs: Vec<crate::func::FuncData>,
25 tables: Vec<wasmtime_runtime::ExportTable>,
26 globals: Vec<wasmtime_runtime::ExportGlobal>,
27 instances: Vec<crate::instance::InstanceData>,
28 memories: Vec<wasmtime_runtime::ExportMemory>,
29 #[cfg(feature = "component-model")]
30 pub(crate) components: crate::component::ComponentStoreData,
31}
32
33pub trait StoredData: Sized {
34 fn list(data: &StoreData) -> &Vec<Self>;
35 fn list_mut(data: &mut StoreData) -> &mut Vec<Self>;
36}
37
38macro_rules! impl_store_data {
39 ($($field:ident => $t:ty,)*) => ($(
40 impl StoredData for $t {
41 #[inline]
42 fn list(data: &StoreData) -> &Vec<Self> { &data.$field }
43 #[inline]
44 fn list_mut(data: &mut StoreData) -> &mut Vec<Self> { &mut data.$field }
45 }
46 )*)
47}
48
49impl_store_data! {
50 funcs => crate::func::FuncData,
51 tables => wasmtime_runtime::ExportTable,
52 globals => wasmtime_runtime::ExportGlobal,
53 instances => crate::instance::InstanceData,
54 memories => wasmtime_runtime::ExportMemory,
55}
56
57impl StoreData {
58 pub fn new() -> StoreData {
59 StoreData {
60 id: StoreId::allocate(),
61 funcs: Vec::new(),
62 tables: Vec::new(),
63 globals: Vec::new(),
64 instances: Vec::new(),
65 memories: Vec::new(),
66 #[cfg(feature = "component-model")]
67 components: Default::default(),
68 }
69 }
70
71 pub fn id(&self) -> StoreId {
72 self.id
73 }
74
75 pub fn insert<T>(&mut self, data: T) -> Stored<T>
76 where
77 T: StoredData,
78 {
79 let list = T::list_mut(self);
80 let index = list.len();
81 list.push(data);
82 Stored::new(self.id, index)
83 }
84
85 pub fn next_id<T>(&self) -> Stored<T>
86 where
87 T: StoredData,
88 {
89 Stored::new(self.id, T::list(self).len())
90 }
91
92 pub fn contains<T>(&self, id: Stored<T>) -> bool
93 where
94 T: StoredData,
95 {
96 if id.store_id != self.id {
97 return false;
98 }
99 debug_assert!(id.index() < T::list(self).len());
102 true
103 }
104
105 pub fn iter<T>(&self) -> impl ExactSizeIterator<Item = Stored<T>>
106 where
107 T: StoredData,
108 {
109 let id = self.id;
110 (0..T::list(self).len()).map(move |i| Stored::new(id, i))
111 }
112
113 pub(crate) fn reserve_funcs(&mut self, count: usize) {
114 self.funcs.reserve(count);
115 }
116}
117
118impl<T> Index<Stored<T>> for StoreData
119where
120 T: StoredData,
121{
122 type Output = T;
123
124 #[inline]
125 fn index(&self, index: Stored<T>) -> &Self::Output {
126 index.assert_belongs_to(self.id);
127 &T::list(self)[index.index()]
133 }
134}
135
136impl<T> IndexMut<Stored<T>> for StoreData
137where
138 T: StoredData,
139{
140 #[inline]
141 fn index_mut(&mut self, index: Stored<T>) -> &mut Self::Output {
142 index.assert_belongs_to(self.id);
143 &mut T::list_mut(self)[index.index()]
146 }
147}
148
149impl<I, T> Index<I> for StoreContext<'_, T>
151where
152 StoreData: Index<I>,
153{
154 type Output = <StoreData as Index<I>>::Output;
155
156 #[inline]
157 fn index(&self, index: I) -> &Self::Output {
158 self.0.store_data.index(index)
159 }
160}
161
162impl<I, T> Index<I> for StoreContextMut<'_, T>
164where
165 StoreData: Index<I>,
166{
167 type Output = <StoreData as Index<I>>::Output;
168
169 #[inline]
170 fn index(&self, index: I) -> &Self::Output {
171 self.0.store_data.index(index)
172 }
173}
174
175impl<I> Index<I> for StoreOpaque
177where
178 StoreData: Index<I>,
179{
180 type Output = <StoreData as Index<I>>::Output;
181
182 #[inline]
183 fn index(&self, index: I) -> &Self::Output {
184 self.store_data().index(index)
185 }
186}
187impl<I> IndexMut<I> for StoreOpaque
188where
189 StoreData: IndexMut<I>,
190{
191 #[inline]
192 fn index_mut(&mut self, index: I) -> &mut Self::Output {
193 self.store_data_mut().index_mut(index)
194 }
195}
196
197#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
205pub struct StoreId(NonZeroU64);
206
207impl StoreId {
208 fn allocate() -> StoreId {
211 static NEXT_ID: AtomicU64 = AtomicU64::new(0);
212
213 let id = NEXT_ID.fetch_add(1, Relaxed);
223 if id & (1 << 63) != 0 {
224 NEXT_ID.store(1 << 63, Relaxed);
225 panic!("store id allocator overflow");
226 }
227
228 StoreId(NonZeroU64::new(id + 1).unwrap())
229 }
230
231 #[inline]
232 pub fn assert_belongs_to(&self, store: StoreId) {
233 if *self == store {
234 return;
235 }
236 store_id_mismatch();
237 }
238}
239
240#[repr(C)] pub struct Stored<T> {
242 store_id: StoreId,
243 index: usize,
244 _marker: marker::PhantomData<fn() -> T>,
245}
246
247impl<T> Stored<T> {
248 fn new(store_id: StoreId, index: usize) -> Stored<T> {
249 Stored {
250 store_id,
251 index,
252 _marker: marker::PhantomData,
253 }
254 }
255
256 #[inline]
257 pub fn assert_belongs_to(&self, store: StoreId) {
258 self.store_id.assert_belongs_to(store)
259 }
260
261 fn index(&self) -> usize {
262 self.index
263 }
264}
265
266#[cold]
267fn store_id_mismatch() {
268 panic!("object used with the wrong store");
269}
270
271impl<T> PartialEq for Stored<T> {
272 fn eq(&self, other: &Stored<T>) -> bool {
273 self.store_id == other.store_id && self.index == other.index
274 }
275}
276
277impl<T> Copy for Stored<T> {}
278
279impl<T> Clone for Stored<T> {
280 fn clone(&self) -> Self {
281 *self
282 }
283}
284
285impl<T> fmt::Debug for Stored<T> {
286 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
287 write!(f, "store={}, index={}", self.store_id.0, self.index())
288 }
289}