1use std::sync::atomic::{AtomicU64, Ordering};
18
19use hyperlight_common::layout::{scratch_base_gpa, scratch_base_gva};
20use hyperlight_common::vmem::{self, BasicMapping, CowMapping, Mapping, MappingKind, PAGE_SIZE};
21use tracing::{Span, instrument};
22
23use crate::HyperlightError::MemoryRegionSizeMismatch;
24use crate::Result;
25use crate::hypervisor::regs::CommonSpecialRegisters;
26use crate::mem::exe::LoadInfo;
27use crate::mem::layout::SandboxMemoryLayout;
28use crate::mem::memory_region::MemoryRegion;
29use crate::mem::mgr::{GuestPageTableBuffer, SnapshotSharedMemory};
30use crate::mem::shared_mem::{ReadonlySharedMemory, SharedMemory};
31use crate::sandbox::SandboxConfiguration;
32use crate::sandbox::uninitialized::{GuestBinary, GuestEnvironment};
33
34pub(super) static SANDBOX_CONFIGURATION_COUNTER: AtomicU64 = AtomicU64::new(0);
35
36#[derive(Copy, Clone, PartialEq, Eq)]
46pub enum NextAction {
47 Initialise(u64),
50 Call(u64),
53 #[cfg(test)]
56 None,
57}
58
59pub struct Snapshot {
62 sandbox_id: u64,
65 layout: crate::mem::layout::SandboxMemoryLayout,
72 memory: ReadonlySharedMemory,
74 regions: Vec<MemoryRegion>,
77 load_info: LoadInfo,
85 hash: [u8; 32],
93 stack_top_gva: u64,
95
96 sregs: Option<CommonSpecialRegisters>,
102
103 entrypoint: NextAction,
105}
106impl core::convert::AsRef<Snapshot> for Snapshot {
107 fn as_ref(&self) -> &Self {
108 self
109 }
110}
111impl hyperlight_common::vmem::TableReadOps for Snapshot {
112 type TableAddr = u64;
113 fn entry_addr(addr: u64, offset: u64) -> u64 {
114 addr + offset
115 }
116 unsafe fn read_entry(&self, addr: u64) -> u64 {
117 let addr = addr as usize;
118 let Some(pte_bytes) = self.memory.as_slice().get(addr..addr + 8) else {
119 return 0;
124 };
125 #[allow(clippy::unwrap_used)]
128 let n: [u8; 8] = pte_bytes.try_into().unwrap();
129 u64::from_ne_bytes(n)
130 }
131 fn to_phys(addr: u64) -> u64 {
132 addr
133 }
134 fn from_phys(addr: u64) -> u64 {
135 addr
136 }
137 fn root_table(&self) -> u64 {
138 self.root_pt_gpa()
139 }
140}
141
142fn hash(memory: &[u8], regions: &[MemoryRegion]) -> Result<[u8; 32]> {
147 let mut hasher = blake3::Hasher::new();
148 hasher.update(memory);
149 for rgn in regions {
150 hasher.update(&usize::to_le_bytes(rgn.guest_region.start));
151 let guest_len = rgn.guest_region.end - rgn.guest_region.start;
152 #[allow(clippy::useless_conversion)]
153 let host_start_addr: usize = rgn.host_region.start.into();
154 #[allow(clippy::useless_conversion)]
155 let host_end_addr: usize = rgn.host_region.end.into();
156 hasher.update(&usize::to_le_bytes(host_start_addr));
157 let host_len = host_end_addr - host_start_addr;
158 if guest_len != host_len {
159 return Err(MemoryRegionSizeMismatch(
160 host_len,
161 guest_len,
162 format!("{:?}", rgn),
163 ));
164 }
165 hasher.update(&usize::to_le_bytes(guest_len));
169 hasher.update(&u32::to_le_bytes(rgn.flags.bits()));
170 }
171 Ok(hasher.finalize().into())
175}
176
177pub(crate) fn access_gpa<'a>(
178 snap: &'a [u8],
179 scratch: &'a [u8],
180 layout: SandboxMemoryLayout,
181 gpa: u64,
182) -> Option<(&'a [u8], usize)> {
183 let resolved = layout.resolve_gpa(gpa, &[])?.with_memories(snap, scratch);
184 Some((resolved.base.as_ref(), resolved.offset))
185}
186
187pub(crate) struct SharedMemoryPageTableBuffer<'a> {
188 snap: &'a [u8],
189 scratch: &'a [u8],
190 layout: SandboxMemoryLayout,
191 root: u64,
192}
193impl<'a> SharedMemoryPageTableBuffer<'a> {
194 pub(crate) fn new(
195 snap: &'a [u8],
196 scratch: &'a [u8],
197 layout: SandboxMemoryLayout,
198 root: u64,
199 ) -> Self {
200 Self {
201 snap,
202 scratch,
203 layout,
204 root,
205 }
206 }
207}
208impl<'a> hyperlight_common::vmem::TableReadOps for SharedMemoryPageTableBuffer<'a> {
209 type TableAddr = u64;
210 fn entry_addr(addr: u64, offset: u64) -> u64 {
211 addr + offset
212 }
213 unsafe fn read_entry(&self, addr: u64) -> u64 {
214 let memoff = access_gpa(self.snap, self.scratch, self.layout, addr);
215 let Some(pte_bytes) = memoff.and_then(|(mem, off)| mem.get(off..off + 8)) else {
216 return 0;
221 };
222 #[allow(clippy::unwrap_used)]
225 let n: [u8; 8] = pte_bytes.try_into().unwrap();
226 u64::from_ne_bytes(n)
227 }
228 fn to_phys(addr: u64) -> u64 {
229 addr
230 }
231 fn from_phys(addr: u64) -> u64 {
232 addr
233 }
234 fn root_table(&self) -> u64 {
235 self.root
236 }
237}
238impl<'a> core::convert::AsRef<SharedMemoryPageTableBuffer<'a>> for SharedMemoryPageTableBuffer<'a> {
239 fn as_ref(&self) -> &Self {
240 self
241 }
242}
243fn filtered_mappings<'a>(
244 snap: &'a [u8],
245 scratch: &'a [u8],
246 regions: &[MemoryRegion],
247 layout: SandboxMemoryLayout,
248 root_pt: u64,
249) -> Vec<(Mapping, &'a [u8])> {
250 let op = SharedMemoryPageTableBuffer::new(snap, scratch, layout, root_pt);
251 unsafe {
252 hyperlight_common::vmem::virt_to_phys(&op, 0, hyperlight_common::layout::MAX_GVA as u64)
253 }
254 .filter_map(move |mapping| {
255 if mapping.virt_base >= scratch_base_gva(layout.get_scratch_size()) {
257 return None;
258 }
259 #[cfg(not(feature = "nanvix-unstable"))]
261 if mapping.virt_base >= hyperlight_common::layout::SNAPSHOT_PT_GVA_MIN as u64
262 && mapping.virt_base <= hyperlight_common::layout::SNAPSHOT_PT_GVA_MAX as u64
263 {
264 return None;
265 }
266 let contents = unsafe { guest_page(snap, scratch, regions, layout, mapping.phys_base) }?;
268 Some((mapping, contents))
269 })
270 .collect()
271}
272
273unsafe fn guest_page<'a>(
281 snap: &'a [u8],
282 scratch: &'a [u8],
283 regions: &[MemoryRegion],
284 layout: SandboxMemoryLayout,
285 gpa: u64,
286) -> Option<&'a [u8]> {
287 let resolved = layout
288 .resolve_gpa(gpa, regions)?
289 .with_memories(snap, scratch);
290 if resolved.as_ref().len() < PAGE_SIZE {
291 return None;
292 }
293 Some(&resolved.as_ref()[..PAGE_SIZE])
294}
295
296fn map_specials(pt_buf: &GuestPageTableBuffer, scratch_size: usize) {
297 let mapping = Mapping {
299 phys_base: scratch_base_gpa(scratch_size),
300 virt_base: scratch_base_gva(scratch_size),
301 len: scratch_size as u64,
302 kind: MappingKind::Basic(BasicMapping {
303 readable: true,
304 writable: true,
305 executable: false,
308 }),
309 };
310 unsafe { vmem::map(pt_buf, mapping) };
311}
312
313impl Snapshot {
314 pub(crate) fn from_env<'a, 'b>(
317 env: impl Into<GuestEnvironment<'a, 'b>>,
318 cfg: SandboxConfiguration,
319 ) -> Result<Self> {
320 let env = env.into();
321 let mut bin = env.guest_binary;
322 bin.canonicalize()?;
323 let blob = env.init_data;
324
325 use crate::mem::exe::ExeInfo;
326 let exe_info = match bin {
327 GuestBinary::FilePath(bin_path_str) => ExeInfo::from_file(&bin_path_str)?,
328 GuestBinary::Buffer(buffer) => ExeInfo::from_buf(buffer)?,
329 };
330
331 let host_version = env!("CARGO_PKG_VERSION");
333 if let Some(v) = exe_info.guest_bin_version()
334 && v != host_version
335 {
336 return Err(crate::HyperlightError::GuestBinVersionMismatch {
337 guest_bin_version: v.to_string(),
338 host_version: host_version.to_string(),
339 });
340 }
341
342 let guest_blob_size = blob.as_ref().map(|b| b.data.len()).unwrap_or(0);
343 let guest_blob_mem_flags = blob.as_ref().map(|b| b.permissions);
344
345 #[cfg_attr(feature = "nanvix-unstable", allow(unused_mut))]
346 let mut layout = crate::mem::layout::SandboxMemoryLayout::new(
347 cfg,
348 exe_info.loaded_size(),
349 guest_blob_size,
350 guest_blob_mem_flags,
351 )?;
352
353 let load_addr = layout.get_guest_code_address() as u64;
354 let entrypoint_offset: u64 = exe_info.entrypoint().into();
355
356 let mut memory = vec![0; layout.get_memory_size()?];
357
358 let load_info = exe_info.load(
359 load_addr.try_into()?,
360 &mut memory[layout.get_guest_code_offset()..],
361 )?;
362
363 layout.write_peb(&mut memory)?;
364
365 blob.map(|x| layout.write_init_data(&mut memory, x.data))
366 .transpose()?;
367
368 #[cfg(not(feature = "nanvix-unstable"))]
369 {
370 let pt_buf = GuestPageTableBuffer::new(layout.get_pt_base_gpa() as usize);
372
373 use crate::mem::memory_region::{GuestMemoryRegion, MemoryRegionFlags};
374
375 for rgn in layout.get_memory_regions_::<GuestMemoryRegion>(())?.iter() {
377 let readable = rgn.flags.contains(MemoryRegionFlags::READ);
378 let executable = rgn.flags.contains(MemoryRegionFlags::EXECUTE);
379 let writable = rgn.flags.contains(MemoryRegionFlags::WRITE);
380 let kind = if writable {
381 MappingKind::Cow(CowMapping {
382 readable,
383 executable,
384 })
385 } else {
386 MappingKind::Basic(BasicMapping {
387 readable,
388 writable: false,
389 executable,
390 })
391 };
392 let mapping = Mapping {
393 phys_base: rgn.guest_region.start as u64,
394 virt_base: rgn.guest_region.start as u64,
395 len: rgn.guest_region.len() as u64,
396 kind,
397 };
398 unsafe { vmem::map(&pt_buf, mapping) };
399 }
400
401 map_specials(&pt_buf, layout.get_scratch_size());
403
404 let pt_bytes = pt_buf.into_bytes();
405 layout.set_pt_size(pt_bytes.len())?;
406 memory.extend(&pt_bytes);
407 };
408
409 let exn_stack_top_gva = hyperlight_common::layout::MAX_GVA as u64
410 - hyperlight_common::layout::SCRATCH_TOP_EXN_STACK_OFFSET
411 + 1;
412
413 let extra_regions = Vec::new();
414 let hash = hash(&memory, &extra_regions)?;
415
416 Ok(Self {
417 sandbox_id: SANDBOX_CONFIGURATION_COUNTER.fetch_add(1, Ordering::Relaxed),
418 memory: ReadonlySharedMemory::from_bytes(&memory)?,
419 layout,
420 regions: extra_regions,
421 load_info,
422 hash,
423 stack_top_gva: exn_stack_top_gva,
424 sregs: None,
425 entrypoint: NextAction::Initialise(load_addr + entrypoint_offset),
426 })
427 }
428
429 #[allow(clippy::too_many_arguments)]
435 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
438 pub(crate) fn new<S: SharedMemory>(
439 shared_mem: &mut SnapshotSharedMemory<S>,
440 scratch_mem: &mut S,
441 sandbox_id: u64,
442 mut layout: SandboxMemoryLayout,
443 load_info: LoadInfo,
444 regions: Vec<MemoryRegion>,
445 root_pt_gpa: u64,
446 stack_top_gva: u64,
447 sregs: CommonSpecialRegisters,
448 entrypoint: NextAction,
449 ) -> Result<Self> {
450 use std::collections::HashMap;
451 let mut phys_seen = HashMap::<u64, usize>::new();
452 let memory = shared_mem.with_contents(|snap_c| {
453 scratch_mem.with_contents(|scratch_c| {
454 let live_pages =
456 filtered_mappings(snap_c, scratch_c, ®ions, layout, root_pt_gpa);
457
458 let pt_buf = GuestPageTableBuffer::new(layout.get_pt_base_gpa() as usize);
461 let mut snapshot_memory: Vec<u8> = Vec::new();
462 for (mapping, contents) in live_pages {
463 let kind = match mapping.kind {
464 MappingKind::Cow(cm) => MappingKind::Cow(cm),
465 MappingKind::Basic(bm) if bm.writable => MappingKind::Cow(CowMapping {
466 readable: bm.readable,
467 executable: bm.executable,
468 }),
469 MappingKind::Basic(bm) => MappingKind::Basic(BasicMapping {
470 readable: bm.readable,
471 writable: false,
472 executable: bm.executable,
473 }),
474 MappingKind::Unmapped => continue,
475 };
476 let new_gpa = phys_seen.entry(mapping.phys_base).or_insert_with(|| {
477 let new_offset = snapshot_memory.len();
478 snapshot_memory.extend(contents);
479 new_offset + SandboxMemoryLayout::BASE_ADDRESS
480 });
481 let mapping = Mapping {
482 phys_base: *new_gpa as u64,
483 virt_base: mapping.virt_base,
484 len: PAGE_SIZE as u64,
485 kind,
486 };
487 unsafe { vmem::map(&pt_buf, mapping) };
488 }
489 map_specials(&pt_buf, layout.get_scratch_size());
491 let pt_bytes = pt_buf.into_bytes();
492 layout.set_pt_size(pt_bytes.len())?;
493 snapshot_memory.extend(&pt_bytes);
494 Ok::<Vec<u8>, crate::HyperlightError>(snapshot_memory)
495 })
496 })???;
497 layout.set_snapshot_size(memory.len());
498
499 let regions = Vec::new();
503
504 let hash = hash(&memory, ®ions)?;
505 Ok(Self {
506 sandbox_id,
507 layout,
508 memory: ReadonlySharedMemory::from_bytes(&memory)?,
509 regions,
510 load_info,
511 hash,
512 stack_top_gva,
513 sregs: Some(sregs),
514 entrypoint,
515 })
516 }
517
518 pub(crate) fn sandbox_id(&self) -> u64 {
520 self.sandbox_id
521 }
522
523 pub(crate) fn regions(&self) -> &[MemoryRegion] {
525 &self.regions
526 }
527
528 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
530 pub(crate) fn memory(&self) -> &ReadonlySharedMemory {
531 &self.memory
532 }
533
534 pub(crate) fn load_info(&self) -> LoadInfo {
536 self.load_info.clone()
537 }
538
539 pub(crate) fn layout(&self) -> &crate::mem::layout::SandboxMemoryLayout {
540 &self.layout
541 }
542
543 pub(crate) fn root_pt_gpa(&self) -> u64 {
544 self.layout.get_pt_base_gpa()
545 }
546
547 pub(crate) fn stack_top_gva(&self) -> u64 {
548 self.stack_top_gva
549 }
550
551 pub(crate) fn sregs(&self) -> Option<&CommonSpecialRegisters> {
557 self.sregs.as_ref()
558 }
559
560 pub(crate) fn entrypoint(&self) -> NextAction {
561 self.entrypoint
562 }
563}
564
565impl PartialEq for Snapshot {
566 fn eq(&self, other: &Snapshot) -> bool {
567 self.hash == other.hash
568 }
569}
570
571#[cfg(test)]
572mod tests {
573 use hyperlight_common::vmem::{self, BasicMapping, Mapping, MappingKind, PAGE_SIZE};
574
575 use crate::hypervisor::regs::CommonSpecialRegisters;
576 use crate::mem::exe::LoadInfo;
577 use crate::mem::layout::SandboxMemoryLayout;
578 use crate::mem::mgr::{GuestPageTableBuffer, SandboxMemoryManager, SnapshotSharedMemory};
579 use crate::mem::shared_mem::{
580 ExclusiveSharedMemory, HostSharedMemory, ReadonlySharedMemory, SharedMemory,
581 };
582
583 fn default_sregs() -> CommonSpecialRegisters {
584 CommonSpecialRegisters::default()
585 }
586
587 const SIMPLE_PT_BASE: usize = PAGE_SIZE + SandboxMemoryLayout::BASE_ADDRESS;
588
589 fn make_simple_pt_mem(contents: &[u8]) -> SnapshotSharedMemory<ExclusiveSharedMemory> {
590 let pt_buf = GuestPageTableBuffer::new(SIMPLE_PT_BASE);
591 let mapping = Mapping {
592 phys_base: SandboxMemoryLayout::BASE_ADDRESS as u64,
593 virt_base: SandboxMemoryLayout::BASE_ADDRESS as u64,
594 len: PAGE_SIZE as u64,
595 kind: MappingKind::Basic(BasicMapping {
596 readable: true,
597 writable: true,
598 executable: true,
599 }),
600 };
601 unsafe { vmem::map(&pt_buf, mapping) };
602 super::map_specials(&pt_buf, PAGE_SIZE);
603 let pt_bytes = pt_buf.into_bytes();
604
605 let mut snapshot_mem = vec![0u8; PAGE_SIZE + pt_bytes.len()];
606 snapshot_mem[0..PAGE_SIZE].copy_from_slice(contents);
607 snapshot_mem[PAGE_SIZE..].copy_from_slice(&pt_bytes);
608 ReadonlySharedMemory::from_bytes(&snapshot_mem)
609 .unwrap()
610 .to_mgr_snapshot_mem()
611 .unwrap()
612 }
613
614 fn make_simple_pt_mgr() -> (SandboxMemoryManager<HostSharedMemory>, u64) {
615 let cfg = crate::sandbox::SandboxConfiguration::default();
616 let scratch_mem = ExclusiveSharedMemory::new(cfg.get_scratch_size()).unwrap();
617 let mgr = SandboxMemoryManager::new(
618 SandboxMemoryLayout::new(cfg, 4096, 0x3000, None).unwrap(),
619 make_simple_pt_mem(&[0u8; PAGE_SIZE]),
620 scratch_mem,
621 super::NextAction::None,
622 );
623 let (mgr, _) = mgr.build().unwrap();
624 (mgr, SIMPLE_PT_BASE as u64)
625 }
626
627 #[test]
628 fn multiple_snapshots_independent() {
629 let (mut mgr, pt_base) = make_simple_pt_mgr();
630
631 let pattern_a = vec![0xAA; PAGE_SIZE];
633 let snapshot_a = super::Snapshot::new(
634 &mut make_simple_pt_mem(&pattern_a).build().0,
635 &mut mgr.scratch_mem,
636 1,
637 mgr.layout,
638 LoadInfo::dummy(),
639 Vec::new(),
640 pt_base,
641 0,
642 default_sregs(),
643 super::NextAction::None,
644 )
645 .unwrap();
646
647 let pattern_b = vec![0xBB; PAGE_SIZE];
649 let snapshot_b = super::Snapshot::new(
650 &mut make_simple_pt_mem(&pattern_b).build().0,
651 &mut mgr.scratch_mem,
652 2,
653 mgr.layout,
654 LoadInfo::dummy(),
655 Vec::new(),
656 pt_base,
657 0,
658 default_sregs(),
659 super::NextAction::None,
660 )
661 .unwrap();
662
663 mgr.restore_snapshot(&snapshot_a).unwrap();
665 mgr.shared_mem
666 .with_contents(|contents| assert_eq!(&contents[0..pattern_a.len()], &pattern_a[..]))
667 .unwrap();
668
669 mgr.restore_snapshot(&snapshot_b).unwrap();
671 mgr.shared_mem
672 .with_contents(|contents| assert_eq!(&contents[0..pattern_b.len()], &pattern_b[..]))
673 .unwrap();
674 }
675}