1use std::fmt;
2use std::mem::size_of;
3use std::sync::Arc;
4
5use fugue::ir::{Address, AddressValue, AddressSpace};
6
7use crate::traits::{State, StateOps, StateValue};
8
9use thiserror::Error;
10use serde::{Serialize, Deserialize};
11
12#[derive(Debug, Error, Clone)]
13pub enum Error {
14 #[error("{access} access violation at {address} of {size} bytes in space `{}`", address.space().index())]
15 AccessViolation { address: AddressValue, size: usize, access: Access },
16 #[error("out-of-bounds read of `{size}` bytes at {address}")]
17 OOBRead { address: Address, size: usize },
18 #[error("out-of-bounds write of `{size}` bytes at {address}")]
19 OOBWrite { address: Address, size: usize },
20}
21
22#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]
23pub struct FlatState<T: StateValue> {
24 backing: Vec<T>,
25 dirty: DirtyBacking,
26 permissions: Permissions,
27 space: Arc<AddressSpace>,
28}
29
30impl<T: StateValue> AsRef<Self> for FlatState<T> {
31 #[inline(always)]
32 fn as_ref(&self) -> &Self {
33 self
34 }
35}
36
37impl<T: StateValue> AsMut<Self> for FlatState<T> {
38 #[inline(always)]
39 fn as_mut(&mut self) -> &mut Self {
40 self
41 }
42}
43
44impl<T: StateValue> FlatState<T> {
45 pub fn new(space: Arc<AddressSpace>, size: usize) -> Self {
46 Self {
47 backing: vec![T::default(); size],
48 dirty: DirtyBacking::new(size),
49 permissions: Permissions::new(space.clone(), size),
50 space,
51 }
52 }
53
54 pub fn read_only(space: Arc<AddressSpace>, size: usize) -> Self {
55 Self {
56 backing: vec![T::default(); size],
57 dirty: DirtyBacking::new(size),
58 permissions: Permissions::new_with(space.clone(), size, PERM_READ_MASK),
59 space,
60 }
61 }
62
63 pub fn from_vec(space: Arc<AddressSpace>, values: Vec<T>) -> Self {
64 let size = values.len();
65 Self {
66 backing: values,
67 dirty: DirtyBacking::new(size),
68 permissions: Permissions::new(space.clone(), size),
69 space,
70 }
71 }
72
73 pub fn permissions(&self) -> &Permissions {
74 &self.permissions
75 }
76
77 pub fn permissions_mut(&mut self) -> &mut Permissions {
78 &mut self.permissions
79 }
80
81 pub fn address_space(&self) -> Arc<AddressSpace> {
82 self.space.clone()
83 }
84
85 pub fn address_space_ref(&self) -> &AddressSpace {
86 self.space.as_ref()
87 }
88}
89
90impl<V: StateValue> State for FlatState<V> {
91 type Error = Error;
92
93 fn fork(&self) -> Self {
94 Self {
95 backing: self.backing.clone(),
96 dirty: self.dirty.fork(),
97 permissions: self.permissions.clone(),
98 space: self.space.clone(),
99 }
100 }
101
102 fn restore(&mut self, other: &Self) {
103 for block in self.dirty.indices.drain(..) {
104 let start = usize::from(block.start_address());
105 let end = usize::from(block.end_address());
106
107 let real_end = self.backing.len().min(end);
108
109 self.dirty.bitsmap[block.index()] = 0;
110 self.backing[start..real_end].clone_from_slice(&other.backing[start..real_end]);
111 }
112 self.permissions.restore(&other.permissions);
113 self.dirty.clone_from(&other.dirty);
114 }
115}
116
117impl<V: StateValue> StateOps for FlatState<V> {
118 type Value = V;
119
120 fn len(&self) -> usize {
121 self.backing.len()
122 }
123
124 fn copy_values<F, T>(&mut self, from: F, to: T, size: usize) -> Result<(), Error>
125 where F: Into<Address>,
126 T: Into<Address> {
127 let from = from.into();
128 let to = to.into();
129
130 let soff = usize::from(from);
131 let doff = usize::from(to);
132
133 if soff > self.len() || soff.checked_add(size).is_none() || soff + size > self.len() {
134 return Err(Error::OOBRead {
135 address: from.clone(),
136 size, });
138 }
139
140 if !self.permissions.all_readable(&from, size) {
141 return Err(Error::AccessViolation {
142 address: AddressValue::new(self.space.clone(), from.into()),
143 size,
144 access: Access::Read,
145 })
146 }
147
148 if doff > self.len() || doff.checked_add(size).is_none() || doff + size > self.len() {
149 return Err(Error::OOBWrite {
150 address: to.clone(),
151 size, });
153 }
154
155 if !self.permissions.all_writable(&to, size) {
156 return Err(Error::AccessViolation {
157 address: AddressValue::new(self.space.clone(), to.into()),
158 size,
159 access: Access::Write,
160 })
161 }
162
163 if doff == soff {
164 return Ok(())
165 }
166
167 if doff >= soff + size {
168 let (shalf, dhalf) = self.backing.split_at_mut(doff);
169 dhalf.clone_from_slice(&shalf[soff..(soff + size)]);
170 } else if doff + size <= soff {
171 let (dhalf, shalf) = self.backing.split_at_mut(soff);
172 dhalf[doff..(doff+size)].clone_from_slice(&shalf);
173 } else { if doff < soff {
175 for i in 0..size {
176 unsafe {
177 let dptr = self.backing.as_mut_ptr().add(doff + i);
178 let sptr = self.backing.as_ptr().add(soff + i);
179 (&mut *dptr).clone_from(&*sptr);
180 }
181 }
182 } else {
183 for i in (0..size).rev() {
184 unsafe {
185 let dptr = self.backing.as_mut_ptr().add(doff + i);
186 let sptr = self.backing.as_ptr().add(soff + i);
187 (&mut *dptr).clone_from(&*sptr);
188 }
189 }
190 }
191 }
192
193 self.dirty.dirty_region(&to, size);
194
195 Ok(())
196 }
197
198 fn get_values<A>(&self, address: A, values: &mut [Self::Value]) -> Result<(), Error>
199 where A: Into<Address> {
200 let address = address.into();
201 let size = values.len();
202 let start = usize::from(address);
203 let end = start.checked_add(size);
204
205 if start > self.len() || end.is_none() || end.unwrap() > self.len() {
206 return Err(Error::OOBRead {
207 address: address.clone(),
208 size: values.len(),
209 });
210 }
211
212 if !self.permissions.all_readable(&address, size) {
213 return Err(Error::AccessViolation {
214 address: AddressValue::new(self.space.clone(), address.into()),
215 size,
216 access: Access::Read,
217 })
218 }
219
220 let end = end.unwrap();
221
222 values[..].clone_from_slice(&self.backing[start..end]);
223
224 Ok(())
225 }
226
227 fn view_values<A>(&self, address: A, size: usize) -> Result<&[Self::Value], Error>
228 where A: Into<Address> {
229 let address = address.into();
230 let start = usize::from(address);
231 let end = start.checked_add(size);
232
233 if start > self.len() || end.is_none() || end.unwrap() > self.len() {
234 return Err(Error::OOBRead {
235 address: address.clone(),
236 size,
237 });
238 }
239
240 if !self.permissions.all_readable(&address, size) {
241 return Err(Error::AccessViolation {
242 address: AddressValue::new(self.space.clone(), address.into()),
243 size,
244 access: Access::Read,
245 })
246 }
247
248 let end = end.unwrap();
249
250 Ok(&self.backing[start..end])
251 }
252
253 fn view_values_mut<A>(&mut self, address: A, size: usize) -> Result<&mut [Self::Value], Error>
254 where A: Into<Address> {
255 let address = address.into();
256 let start = usize::from(address);
257 let end = start.checked_add(size);
258
259 if start > self.len() || end.is_none() || end.unwrap() > self.len() {
260 return Err(Error::OOBRead {
261 address: address.clone(),
262 size,
263 });
264 }
265
266 if !self.permissions.all_readable_and_writable(&address, size) {
267 return Err(Error::AccessViolation {
268 address: AddressValue::new(self.space.clone(), address.into()),
269 size,
270 access: Access::ReadWrite,
271 })
272 }
273
274 let end = end.unwrap();
275
276 self.dirty.dirty_region(&address, size);
277
278 Ok(&mut self.backing[start..end])
279 }
280
281 fn set_values<A>(&mut self, address: A, values: &[Self::Value]) -> Result<(), Error>
282 where A: Into<Address> {
283 let address = address.into();
284 let size = values.len();
285 let start = usize::from(address);
286 let end = start.checked_add(size);
287
288 if start > self.len() || end.is_none() || end.unwrap() > self.len() {
289 return Err(Error::OOBWrite {
290 address: address.clone(),
291 size,
292 });
293 }
294
295 if !self.permissions.all_writable(&address, size) {
296 return Err(Error::AccessViolation {
297 address: AddressValue::new(self.space.clone(), address.into()),
298 size,
299 access: Access::Write,
300 })
301 }
302
303 let end = end.unwrap();
304
305 self.backing[start..end].clone_from_slice(values);
306 self.dirty.dirty_region(&address, size);
307
308 Ok(())
309 }
310}
311
312#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
313#[repr(transparent)]
314pub struct Block(u64);
315
316pub const BLOCK_SIZE: u64 = 64;
317
318impl From<&'_ Address> for Block {
319 fn from(t: &Address) -> Block {
320 Self(u64::from(*t) / BLOCK_SIZE)
321 }
322}
323
324impl From<Address> for Block {
325 fn from(t: Address) -> Block {
326 Self(u64::from(t) / BLOCK_SIZE)
327 }
328}
329
330impl From<u64> for Block {
331 fn from(t: u64) -> Block {
332 Self(t)
333 }
334}
335
336impl Block {
337 #[inline]
338 fn bit(&self) -> usize {
339 self.0 as usize % size_of::<Self>()
340 }
341
342 #[inline]
343 fn index(&self) -> usize {
344 self.0 as usize / size_of::<Self>()
345 }
346
347 #[inline]
348 fn start_address(&self) -> Address {
349 (self.0 * BLOCK_SIZE).into()
350 }
351
352 #[inline]
353 fn end_address(&self) -> Address {
354 ((self.0 + 1) * BLOCK_SIZE).into()
355 }
356}
357
358#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]
359pub struct DirtyBacking {
360 indices: Vec<Block>,
361 bitsmap: Vec<u64>,
362}
363
364impl DirtyBacking {
365 pub fn new(size: usize) -> Self {
366 let backing_size = 1 + (size as u64 / BLOCK_SIZE) as usize;
367 Self {
368 indices: Vec::with_capacity(backing_size),
369 bitsmap: vec![0 as u64; 1 + backing_size / size_of::<u64>()],
370 }
371 }
372
373 #[inline]
374 pub fn fork(&self) -> Self {
375 self.clone()
382 }
383
384 #[inline]
385 pub fn dirty<B: Into<Block>>(&mut self, block: B) {
386 let block = block.into();
387 let index = block.index();
388 let check = 1 << block.bit();
389
390 if self.bitsmap[index] & check == 0 {
391 self.bitsmap[index] |= check;
392 self.indices.push(block);
393 }
394 }
395
396 #[inline]
397 pub fn dirty_region(&mut self, start: &Address, size: usize) {
398 let sblock = Block::from(start).0;
399 let eblock = Block::from(*start + size as u64).0;
400
401 for block in sblock..=eblock {
402 self.dirty(block);
403 }
404 }
405}
406
407#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
408pub enum Access {
409 Read,
410 Write,
411 ReadWrite,
412}
413
414impl fmt::Display for Access {
415 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
416 match self {
417 Access::Read => write!(f, "read"),
418 Access::Write => write!(f, "write"),
419 Access::ReadWrite => write!(f, "read/write")
420 }
421 }
422}
423
424impl Access {
425 #[inline]
426 pub fn is_read(&self) -> bool {
427 matches!(self, Access::Read)
428 }
429
430 #[inline]
431 pub fn is_write(&self) -> bool {
432 matches!(self, Access::Write)
433 }
434
435 #[inline]
436 pub fn is_read_write(&self) -> bool {
437 matches!(self, Access::ReadWrite)
438 }
439}
440
441#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]
442pub struct Permissions {
443 bitsmap: Vec<u64>,
444 space: Arc<AddressSpace>,
445}
446
447const PERM_READ_OFF: usize = 1;
448const PERM_WRITE_OFF: usize = 0;
449const PERM_READ_WRITE_OFF: usize = 0;
450
451const PERM_READ_MASK: u64 = 0xAAAAAAAAAAAAAAAA;
452const PERM_WRITE_MASK: u64 = 0x5555555555555555;
453
454const PERM_SCALE: usize = (size_of::<u64>() << 3) >> 1;
455const PERM_SELECT: usize = 1;
456
457impl Permissions {
458 pub fn new(space: Arc<AddressSpace>, size: usize) -> Self {
459 Self::new_with(space, size, PERM_READ_MASK | PERM_WRITE_MASK)
460 }
461
462 #[inline]
463 pub fn new_with(space: Arc<AddressSpace>, size: usize, mask: u64) -> Self {
464 Self {
465 bitsmap: vec![mask; 1 + size / PERM_SCALE],
468 space,
469 }
470 }
471
472 pub fn restore(&mut self, other: &Permissions) {
473 for (t, s) in self.bitsmap.iter_mut().zip(other.bitsmap.iter()) {
474 *t = *s;
475 }
476 }
477
478 #[inline]
479 pub fn is_marked(&self, address: &Address, access: Access) -> bool {
480 let address = u64::from(address);
481 let index = (address / PERM_SCALE as u64) as usize;
482 let bit = ((address % PERM_SCALE as u64) as usize) << PERM_SELECT;
483 let check = if access.is_read_write() {
484 0b11 << (bit + PERM_READ_WRITE_OFF)
485 } else {
486 1 << if access.is_read() {
487 bit + PERM_READ_OFF
488 } else {
489 bit + PERM_WRITE_OFF
490 }
491 };
492
493 self.bitsmap[index] & check == check
494 }
495
496 #[inline]
497 pub fn is_readable(&self, address: &Address) -> bool {
498 self.is_marked(address, Access::Read)
499 }
500
501 #[inline]
502 pub fn is_writable(&self, address: &Address) -> bool {
503 self.is_marked(address, Access::Write)
504 }
505
506 #[inline]
507 pub fn is_readable_and_writable(&self, address: &Address) -> bool {
508 self.is_marked(address, Access::ReadWrite)
509 }
510
511 #[inline]
512 pub fn all_marked(&self, address: &Address, size: usize, access: Access) -> bool {
513 let start = u64::from(address);
514 for addr in start..(start + size as u64) {
515 if !self.is_marked(&Address::new(self.space.as_ref(), addr), access) {
516 return false
517 }
518 }
519 true
520 }
521
522 #[inline]
523 pub fn all_readable(&self, address: &Address, size: usize) -> bool {
524 self.all_marked(address, size, Access::Read)
525 }
526
527 #[inline]
528 pub fn all_writable(&self, address: &Address, size: usize) -> bool {
529 self.all_marked(address, size, Access::Write)
530 }
531
532 #[inline]
533 pub fn all_readable_and_writable(&self, address: &Address, size: usize) -> bool {
534 self.all_marked(address, size, Access::ReadWrite)
535 }
536
537 #[inline]
538 pub fn clear_byte(&mut self, address: &Address, access: Access) {
539 let address = u64::from(address);
540 let index = (address / PERM_SCALE as u64) as usize;
541 let bit = ((address % PERM_SCALE as u64) as usize) << PERM_SELECT;
542 let check = if access.is_read_write() {
543 0b11 << (bit + PERM_READ_WRITE_OFF)
544 } else {
545 1 << if access.is_read() {
546 bit + PERM_READ_OFF
547 } else {
548 bit + PERM_WRITE_OFF
549 }
550 };
551 self.bitsmap[index] &= !check;
552 }
553
554 #[inline]
555 pub fn set_byte(&mut self, address: &Address, access: Access) {
556 let address = u64::from(address);
557 let index = (address / PERM_SCALE as u64) as usize;
558 let bit = ((address % PERM_SCALE as u64) as usize) << PERM_SELECT;
559 let check = if access.is_read_write() {
560 0b11 << (bit + PERM_READ_WRITE_OFF)
561 } else {
562 1 << if access.is_read() {
563 bit + PERM_READ_OFF
564 } else {
565 bit + PERM_WRITE_OFF
566 }
567 };
568
569 self.bitsmap[index] |= check;
570 }
571
572 #[inline]
573 pub fn clear_region(&mut self, address: &Address, size: usize, access: Access) {
574 let start = u64::from(address);
575 for addr in start..(start + size as u64) {
576 self.clear_byte(&Address::new(self.space.as_ref(), addr), access);
577 }
578 }
579
580 #[inline]
581 pub fn set_region(&mut self, address: &Address, size: usize, access: Access) {
582 let start = u64::from(address);
583 for addr in start..(start + size as u64) {
584 self.set_byte(&Address::new(self.space.as_ref(), addr), access);
585 }
586 }
587}