1use alloc::{vec, vec::Vec};
2use core::{
3 cmp::{max, min},
4 mem,
5 ops::{BitAnd, Not, Range},
6};
7
8use crate::error::{ExitException, ExitFatal};
9#[allow(unused_imports)]
10use crate::uint::{U256, U256Ext};
11
12#[derive(Clone, Debug)]
15pub struct Memory {
16 data: Vec<u8>,
17 effective_len: U256,
18 limit: usize,
19}
20
21impl Memory {
22 #[must_use]
24 pub fn new(limit: usize) -> Self {
25 Self {
26 data: Vec::new(),
27 effective_len: U256::ZERO,
28 limit,
29 }
30 }
31
32 #[must_use]
34 pub const fn limit(&self) -> usize {
35 self.limit
36 }
37
38 #[must_use]
40 pub fn len(&self) -> usize {
41 self.data.len()
42 }
43
44 #[must_use]
46 pub const fn effective_len(&self) -> U256 {
47 self.effective_len
48 }
49
50 #[must_use]
52 pub fn is_empty(&self) -> bool {
53 self.len() == 0
54 }
55
56 #[must_use]
58 pub const fn data(&self) -> &Vec<u8> {
59 &self.data
60 }
61
62 pub(crate) fn swap_and_clear(&mut self, other: &mut Vec<u8>) {
63 mem::swap(&mut self.data, other);
64 self.data = Vec::new();
65 self.effective_len = U256::ZERO;
66 }
67
68 pub fn resize_offset(&mut self, offset: U256, len: U256) -> Result<(), ExitException> {
72 if len == U256::ZERO {
73 return Ok(());
74 }
75
76 offset
77 .checked_add(len)
78 .map_or(Err(ExitException::InvalidRange), |end| self.resize_end(end))
79 }
80
81 pub fn resize_end(&mut self, end: U256) -> Result<(), ExitException> {
83 if end > self.effective_len {
84 let new_end = next_multiple_of_32(end).ok_or(ExitException::InvalidRange)?;
85 self.effective_len = new_end;
86 }
87
88 Ok(())
89 }
90
91 pub fn resize_to_range(&mut self, return_range: Range<U256>) {
93 let ret = if return_range.start > U256::USIZE_MAX {
94 vec![0; (return_range.end - return_range.start).low_usize()]
96 } else if return_range.end > U256::USIZE_MAX {
97 let mut ret = self.get(
99 return_range.start.low_usize(),
100 usize::MAX - return_range.start.low_usize(),
101 );
102 while ret.len() < (return_range.end - return_range.start).low_usize() {
104 ret.push(0);
105 }
106 ret
107 } else {
108 self.get(
109 return_range.start.low_usize(),
111 (return_range.end - return_range.start).low_usize(),
114 )
115 };
116 self.data = ret;
117 self.effective_len = return_range.end - return_range.start;
118 }
119
120 #[must_use]
127 pub fn get(&self, offset: usize, size: usize) -> Vec<u8> {
128 let mut ret = vec![0; size];
129
130 #[allow(clippy::needless_range_loop)]
131 for index in 0..size {
132 let position = if let Some(position) = offset.checked_add(index) {
133 position
134 } else {
135 break;
136 };
137
138 if position >= self.data.len() {
139 break;
140 }
141
142 ret[index] = self.data[position];
143 }
144
145 ret
146 }
147
148 pub fn set(
151 &mut self,
152 offset: usize,
153 value: &[u8],
154 target_size: Option<usize>,
155 ) -> Result<(), ExitFatal> {
156 let target_size = target_size.unwrap_or(value.len());
157 if target_size == 0 {
158 return Ok(());
159 }
160
161 if offset
162 .checked_add(target_size)
163 .map(|pos| pos > self.limit)
164 .unwrap_or(true)
165 {
166 return Err(ExitFatal::NotSupported);
167 }
168
169 if self.data.len() < offset + target_size {
170 self.data.resize(offset + target_size, 0);
171 }
172
173 if target_size > value.len() {
174 self.data[offset..((value.len()) + offset)].clone_from_slice(value);
175 for index in (value.len())..target_size {
176 self.data[offset + index] = 0;
177 }
178 } else {
179 self.data[offset..(target_size + offset)].clone_from_slice(&value[..target_size]);
180 }
181
182 Ok(())
183 }
184
185 pub fn copy_large(
187 &mut self,
188 memory_offset: U256,
189 data_offset: U256,
190 len: U256,
191 data: &[u8],
192 ) -> Result<(), ExitFatal> {
193 if len.is_zero() {
199 return Ok(());
200 }
201
202 let memory_offset = if memory_offset > U256::USIZE_MAX {
203 return Err(ExitFatal::NotSupported);
204 } else {
205 memory_offset.low_usize()
207 };
208
209 let ulen = if len > U256::USIZE_MAX {
210 return Err(ExitFatal::NotSupported);
211 } else {
212 len.low_usize()
214 };
215
216 let data: &[u8] = data_offset.checked_add(len).map_or(&[], |end| {
217 if end > U256::USIZE_MAX {
218 &[]
219 } else {
220 let data_offset = data_offset.low_usize();
223 let end = end.low_usize();
225
226 if data_offset > data.len() {
227 &[]
228 } else {
229 &data[data_offset..min(end, data.len())]
230 }
231 }
232 });
233
234 self.set(memory_offset, data, Some(ulen))
235 }
236
237 pub fn copy(&mut self, dst: usize, src: usize, len: usize) {
239 let resize_offset = max(dst, src);
240 if self.data.len() < resize_offset + len {
241 self.data.resize(resize_offset + len, 0);
242 }
243 self.data.copy_within(src..src + len, dst);
244 }
245}
246
247#[inline]
249fn next_multiple_of_32(x: U256) -> Option<U256> {
250 let r = x.low_u32().bitand(31).not().wrapping_add(1).bitand(31);
251 x.checked_add(U256::from_u32(r))
252}
253
254#[cfg(test)]
255mod tests {
256 use super::{Memory, U256, U256Ext, next_multiple_of_32};
257
258 #[test]
259 fn test_next_multiple_of_32() {
260 for i in 0..32 {
262 let x = U256::from_usize(i * 32);
263 assert_eq!(Some(x), next_multiple_of_32(x));
264 }
265
266 for x in 0..1024 {
268 if x % 32 == 0 {
269 continue;
270 }
271 let next_multiple = x + 32 - (x % 32);
272 assert_eq!(
273 Some(U256::from_usize(next_multiple)),
274 next_multiple_of_32(U256::from_usize(x))
275 );
276 }
277
278 let last_multiple_of_32 = U256::MAX & !U256::from_usize(31);
280 for i in 0..63 {
281 let x = U256::MAX - U256::from_usize(i);
282 if x > last_multiple_of_32 {
283 assert_eq!(None, next_multiple_of_32(x));
284 } else {
285 assert_eq!(Some(last_multiple_of_32), next_multiple_of_32(x));
286 }
287 }
288 }
289
290 #[test]
291 fn test_memory_copy_works() {
292 let mut memory = Memory::new(100usize);
294
295 memory.set(3usize, &[1u8, 2u8, 3u8, 4u8], None).unwrap();
300 assert_eq!(memory.data(), &[0u8, 0u8, 0u8, 1u8, 2u8, 3u8, 4u8].to_vec());
301
302 memory.copy(0usize, 3usize, 1usize);
305
306 assert_eq!(memory.data(), &[1u8, 0u8, 0u8, 1u8, 2u8, 3u8, 4u8].to_vec());
308 }
309
310 #[test]
311 fn test_memory_copy_resize() {
312 let mut memory = Memory::new(100usize);
314
315 memory.set(3usize, &[1u8, 2u8, 3u8, 4u8], None).unwrap();
320 assert_eq!(memory.data(), &[0u8, 0u8, 0u8, 1u8, 2u8, 3u8, 4u8].to_vec());
321
322 memory.copy(3usize, 6usize, 2usize);
326
327 assert_eq!(
330 memory.data(),
331 &[0u8, 0u8, 0u8, 4u8, 0u8, 3u8, 4u8, 0u8].to_vec()
332 );
333 }
334}