1use std::collections::HashSet;
18use std::path::Path;
19use std::sync::atomic::Ordering;
20use std::sync::{Arc, Mutex};
21
22use flatbuffers::FlatBufferBuilder;
23use hyperlight_common::flatbuffer_wrappers::function_call::{FunctionCall, FunctionCallType};
24use hyperlight_common::flatbuffer_wrappers::function_types::{
25 ParameterValue, ReturnType, ReturnValue,
26};
27use hyperlight_common::flatbuffer_wrappers::util::estimate_flatbuffer_capacity;
28use tracing::{Span, instrument};
29
30use super::Callable;
31use super::file_mapping::prepare_file_cow;
32use super::host_funcs::FunctionRegistry;
33use super::snapshot::Snapshot;
34use crate::HyperlightError::{self, SnapshotSandboxMismatch};
35use crate::func::{ParameterTuple, SupportedReturnType};
36use crate::hypervisor::InterruptHandle;
37use crate::hypervisor::hyperlight_vm::{HyperlightVm, HyperlightVmError};
38use crate::mem::memory_region::{MemoryRegion, MemoryRegionFlags};
39use crate::mem::mgr::SandboxMemoryManager;
40use crate::mem::shared_mem::{HostSharedMemory, SharedMemory as _};
41use crate::metrics::{
42 METRIC_GUEST_ERROR, METRIC_GUEST_ERROR_LABEL_CODE, maybe_time_and_emit_guest_call,
43};
44use crate::{Result, log_then_return};
45
46pub struct MultiUseSandbox {
85 id: u64,
87 poisoned: bool,
89 pub(crate) host_funcs: Arc<Mutex<FunctionRegistry>>,
90 pub(crate) mem_mgr: SandboxMemoryManager<HostSharedMemory>,
91 vm: HyperlightVm,
92 #[cfg(gdb)]
93 dbg_mem_access_fn: Arc<Mutex<SandboxMemoryManager<HostSharedMemory>>>,
94 snapshot: Option<Arc<Snapshot>>,
97 pt_root_finder: Option<PtRootFinder>,
101}
102
103pub type PtRootFinder = Box<dyn Fn(&[u8], &[u8], u64) -> Vec<u64> + Send>;
114
115impl MultiUseSandbox {
116 #[instrument(skip_all, parent = Span::current(), level = "Trace")]
122 pub(super) fn from_uninit(
123 host_funcs: Arc<Mutex<FunctionRegistry>>,
124 mgr: SandboxMemoryManager<HostSharedMemory>,
125 vm: HyperlightVm,
126 #[cfg(gdb)] dbg_mem_access_fn: Arc<Mutex<SandboxMemoryManager<HostSharedMemory>>>,
127 ) -> MultiUseSandbox {
128 Self {
129 id: super::snapshot::SANDBOX_CONFIGURATION_COUNTER.fetch_add(1, Ordering::Relaxed),
130 poisoned: false,
131 host_funcs,
132 mem_mgr: mgr,
133 vm,
134 #[cfg(gdb)]
135 dbg_mem_access_fn,
136 snapshot: None,
137 pt_root_finder: None,
138 }
139 }
140
141 pub fn set_pt_root_finder(&mut self, finder: PtRootFinder) {
145 self.pt_root_finder = Some(finder);
146 }
147
148 #[instrument(err(Debug), skip_all, parent = Span::current())]
177 pub fn snapshot(&mut self) -> Result<Arc<Snapshot>> {
178 if self.poisoned {
179 return Err(crate::HyperlightError::PoisonedSandbox);
180 }
181
182 if let Some(snapshot) = &self.snapshot {
183 return Ok(snapshot.clone());
184 }
185 let mapped_regions_iter = self.vm.get_mapped_regions();
186 let mapped_regions_vec: Vec<MemoryRegion> = mapped_regions_iter.cloned().collect();
187 let cr3 = self
189 .vm
190 .get_root_pt()
191 .map_err(|e| HyperlightError::HyperlightVmError(e.into()))?;
192 let root_pt_gpas = if let Some(finder) = &self.pt_root_finder {
194 let roots = self.mem_mgr.shared_mem.with_contents(|snap| {
195 self.mem_mgr
196 .scratch_mem
197 .with_contents(|scratch| finder(snap, scratch, cr3))
198 })??;
199 if roots.is_empty() { vec![cr3] } else { roots }
200 } else {
201 vec![cr3]
202 };
203
204 let stack_top_gpa = self.vm.get_stack_top();
205 let sregs = self
206 .vm
207 .get_snapshot_sregs()
208 .map_err(|e| HyperlightError::HyperlightVmError(e.into()))?;
209 let entrypoint = self.vm.get_entrypoint();
210 let memory_snapshot = self.mem_mgr.snapshot(
211 self.id,
212 mapped_regions_vec,
213 &root_pt_gpas,
214 stack_top_gpa,
215 sregs,
216 entrypoint,
217 )?;
218 let snapshot = Arc::new(memory_snapshot);
219 self.snapshot = Some(snapshot.clone());
220 Ok(snapshot)
221 }
222
223 #[instrument(err(Debug), skip_all, parent = Span::current())]
299 pub fn restore(&mut self, snapshot: Arc<Snapshot>) -> Result<()> {
300 if self.id != snapshot.sandbox_id() {
324 return Err(SnapshotSandboxMismatch);
325 }
326
327 let (gsnapshot, gscratch) = self.mem_mgr.restore_snapshot(&snapshot)?;
328 if let Some(gsnapshot) = gsnapshot {
329 self.vm
330 .update_snapshot_mapping(gsnapshot)
331 .map_err(|e| HyperlightError::HyperlightVmError(e.into()))?;
332 }
333 if let Some(gscratch) = gscratch {
334 self.vm
335 .update_scratch_mapping(gscratch)
336 .map_err(|e| HyperlightError::HyperlightVmError(e.into()))?;
337 }
338
339 let sregs = snapshot.sregs().ok_or_else(|| {
340 HyperlightError::Error("snapshot from running sandbox should have sregs".to_string())
341 })?;
342 self.vm
345 .reset_vcpu(snapshot.root_pt_gpa(), sregs)
346 .map_err(|e| {
347 self.poisoned = true;
348 HyperlightVmError::Restore(e)
349 })?;
350
351 self.vm.set_stack_top(snapshot.stack_top_gva());
352 self.vm.set_entrypoint(snapshot.entrypoint());
353
354 let current_regions: HashSet<_> = self.vm.get_mapped_regions().cloned().collect();
355 let snapshot_regions: HashSet<_> = snapshot.regions().iter().cloned().collect();
356
357 let regions_to_unmap = current_regions.difference(&snapshot_regions);
358 let regions_to_map = snapshot_regions.difference(¤t_regions);
359
360 for region in regions_to_unmap {
361 self.vm
362 .unmap_region(region)
363 .map_err(HyperlightVmError::UnmapRegion)?;
364 }
365
366 for region in regions_to_map {
367 unsafe { self.vm.map_region(region) }.map_err(HyperlightVmError::MapRegion)?;
370 }
371
372 self.snapshot = Some(snapshot.clone());
374
375 self.poisoned = false;
385
386 Ok(())
387 }
388
389 #[doc(hidden)]
426 #[deprecated(
427 since = "0.8.0",
428 note = "Deprecated in favour of call and snapshot/restore."
429 )]
430 #[instrument(err(Debug), skip(self, args), parent = Span::current())]
431 pub fn call_guest_function_by_name<Output: SupportedReturnType>(
432 &mut self,
433 func_name: &str,
434 args: impl ParameterTuple,
435 ) -> Result<Output> {
436 if self.poisoned {
437 return Err(crate::HyperlightError::PoisonedSandbox);
438 }
439 let snapshot = self.snapshot()?;
440 let res = self.call(func_name, args);
441 self.restore(snapshot)?;
442 res
443 }
444
445 #[instrument(err(Debug), skip(self, args), parent = Span::current())]
522 pub fn call<Output: SupportedReturnType>(
523 &mut self,
524 func_name: &str,
525 args: impl ParameterTuple,
526 ) -> Result<Output> {
527 if self.poisoned {
528 return Err(crate::HyperlightError::PoisonedSandbox);
529 }
530 self.snapshot = None;
532 maybe_time_and_emit_guest_call(func_name, || {
533 let ret = self.call_guest_function_by_name_no_reset(
534 func_name,
535 Output::TYPE,
536 args.into_value(),
537 );
538 let ret = Output::from_value(ret?)?;
541 Ok(ret)
542 })
543 }
544
545 #[instrument(err(Debug), skip(self, rgn), parent = Span::current())]
561 pub unsafe fn map_region(&mut self, rgn: &MemoryRegion) -> Result<()> {
562 if self.poisoned {
563 return Err(crate::HyperlightError::PoisonedSandbox);
564 }
565 if rgn.flags.contains(MemoryRegionFlags::WRITE) {
566 log_then_return!("TODO: Writable mappings not yet supported");
570 }
571 self.snapshot = None;
573 unsafe { self.vm.map_region(rgn) }.map_err(HyperlightVmError::MapRegion)?;
574 self.mem_mgr.mapped_rgns += 1;
575 Ok(())
576 }
577
578 #[instrument(err(Debug), skip(self, file_path, guest_base, label), parent = Span::current())]
590 pub fn map_file_cow(
591 &mut self,
592 file_path: &Path,
593 guest_base: u64,
594 label: Option<&str>,
595 ) -> Result<u64> {
596 if self.poisoned {
597 return Err(crate::HyperlightError::PoisonedSandbox);
598 }
599
600 #[cfg(feature = "nanvix-unstable")]
603 let current_count = self
604 .mem_mgr
605 .shared_mem
606 .read::<u64>(self.mem_mgr.layout.get_file_mappings_size_offset())?
607 as usize;
608 #[cfg(feature = "nanvix-unstable")]
609 if current_count >= hyperlight_common::mem::MAX_FILE_MAPPINGS {
610 return Err(crate::HyperlightError::Error(format!(
611 "map_file_cow: file mapping limit reached ({} of {})",
612 current_count,
613 hyperlight_common::mem::MAX_FILE_MAPPINGS,
614 )));
615 }
616
617 let mut prepared = prepare_file_cow(file_path, guest_base, label)?;
619
620 let shared_size = self.mem_mgr.shared_mem.mem_size() as u64;
623 let base_addr = crate::mem::layout::SandboxMemoryLayout::BASE_ADDRESS as u64;
624 let shared_end = base_addr.checked_add(shared_size).ok_or_else(|| {
625 crate::HyperlightError::Error("shared memory end overflow".to_string())
626 })?;
627 let mapping_end = guest_base
628 .checked_add(prepared.size as u64)
629 .ok_or_else(|| {
630 crate::HyperlightError::Error(format!(
631 "map_file_cow: guest address overflow: {:#x} + {:#x}",
632 guest_base, prepared.size
633 ))
634 })?;
635 if guest_base < shared_end && mapping_end > base_addr {
636 return Err(crate::HyperlightError::Error(format!(
637 "map_file_cow: mapping [{:#x}..{:#x}) overlaps sandbox shared memory [{:#x}..{:#x})",
638 guest_base, mapping_end, base_addr, shared_end,
639 )));
640 }
641
642 let region = prepared.to_memory_region()?;
644
645 for existing_region in self.vm.get_mapped_regions() {
647 let ex_start = existing_region.guest_region.start as u64;
648 let ex_end = existing_region.guest_region.end as u64;
649 if guest_base < ex_end && mapping_end > ex_start {
650 return Err(crate::HyperlightError::Error(format!(
651 "map_file_cow: mapping [{:#x}..{:#x}) overlaps existing mapping [{:#x}..{:#x})",
652 guest_base, mapping_end, ex_start, ex_end,
653 )));
654 }
655 }
656
657 self.snapshot = None;
659
660 unsafe { self.vm.map_region(®ion) }
661 .map_err(HyperlightVmError::MapRegion)
662 .map_err(crate::HyperlightError::HyperlightVmError)?;
663
664 let size = prepared.size as u64;
665
666 prepared.mark_consumed();
674 self.mem_mgr.mapped_rgns += 1;
675
676 #[cfg(feature = "nanvix-unstable")]
681 self.mem_mgr
682 .write_file_mapping_entry(prepared.guest_base, size, &prepared.label)?;
683
684 Ok(size)
685 }
686
687 #[cfg(feature = "fuzzing")]
696 #[instrument(err(Debug), skip(self, args), parent = Span::current())]
697 pub fn call_type_erased_guest_function_by_name(
698 &mut self,
699 func_name: &str,
700 ret_type: ReturnType,
701 args: Vec<ParameterValue>,
702 ) -> Result<ReturnValue> {
703 if self.poisoned {
704 return Err(crate::HyperlightError::PoisonedSandbox);
705 }
706 self.snapshot = None;
708 maybe_time_and_emit_guest_call(func_name, || {
709 self.call_guest_function_by_name_no_reset(func_name, ret_type, args)
710 })
711 }
712
713 fn call_guest_function_by_name_no_reset(
714 &mut self,
715 function_name: &str,
716 return_type: ReturnType,
717 args: Vec<ParameterValue>,
718 ) -> Result<ReturnValue> {
719 if self.poisoned {
720 return Err(crate::HyperlightError::PoisonedSandbox);
721 }
722 self.vm.clear_cancel();
726
727 let res = (|| {
728 let estimated_capacity = estimate_flatbuffer_capacity(function_name, &args);
729
730 let fc = FunctionCall::new(
731 function_name.to_string(),
732 Some(args),
733 FunctionCallType::Guest,
734 return_type,
735 );
736
737 let mut builder = FlatBufferBuilder::with_capacity(estimated_capacity);
738 let buffer = fc.encode(&mut builder);
739
740 self.mem_mgr.write_guest_function_call(buffer)?;
741
742 let dispatch_res = self.vm.dispatch_call_from_host(
743 &mut self.mem_mgr,
744 &self.host_funcs,
745 #[cfg(gdb)]
746 self.dbg_mem_access_fn.clone(),
747 );
748
749 if let Err(e) = dispatch_res {
752 let (error, should_poison) = e.promote();
753 self.poisoned |= should_poison;
754 return Err(error);
755 }
756
757 let guest_result = self.mem_mgr.get_guest_function_call_result()?.into_inner();
758
759 match guest_result {
760 Ok(val) => Ok(val),
761 Err(guest_error) => {
762 metrics::counter!(
763 METRIC_GUEST_ERROR,
764 METRIC_GUEST_ERROR_LABEL_CODE => (guest_error.code as u64).to_string()
765 )
766 .increment(1);
767
768 Err(HyperlightError::GuestError(
769 guest_error.code,
770 guest_error.message,
771 ))
772 }
773 }
774 })();
775
776 self.mem_mgr.abort_buffer.clear();
778
779 if let Err(e) = &res {
785 self.mem_mgr.clear_io_buffers();
786
787 self.poisoned |= e.is_poison_error();
789 }
790
791 res
794 }
795
796 pub fn interrupt_handle(&self) -> Arc<dyn InterruptHandle> {
826 self.vm.interrupt_handle()
827 }
828
829 #[cfg(crashdump)]
865 #[instrument(err(Debug), skip_all, parent = Span::current())]
866 pub fn generate_crashdump(&mut self) -> Result<()> {
867 crate::hypervisor::crashdump::generate_crashdump(&self.vm, &mut self.mem_mgr, None)
868 }
869
870 #[cfg(crashdump)]
877 #[instrument(err(Debug), skip_all, parent = Span::current())]
878 pub fn generate_crashdump_to_dir(&mut self, dir: impl Into<String>) -> Result<()> {
879 crate::hypervisor::crashdump::generate_crashdump(
880 &self.vm,
881 &mut self.mem_mgr,
882 Some(dir.into()),
883 )
884 }
885
886 pub fn poisoned(&self) -> bool {
923 self.poisoned
924 }
925}
926
927impl Callable for MultiUseSandbox {
928 fn call<Output: SupportedReturnType>(
929 &mut self,
930 func_name: &str,
931 args: impl ParameterTuple,
932 ) -> Result<Output> {
933 if self.poisoned {
934 return Err(crate::HyperlightError::PoisonedSandbox);
935 }
936 self.call(func_name, args)
937 }
938}
939
940impl std::fmt::Debug for MultiUseSandbox {
941 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
942 f.debug_struct("MultiUseSandbox").finish()
943 }
944}
945
946#[cfg(test)]
947mod tests {
948 use std::sync::{Arc, Barrier};
949 use std::thread;
950
951 use hyperlight_common::flatbuffer_wrappers::guest_error::ErrorCode;
952 use hyperlight_testing::sandbox_sizes::{LARGE_HEAP_SIZE, MEDIUM_HEAP_SIZE, SMALL_HEAP_SIZE};
953 use hyperlight_testing::simple_guest_as_string;
954
955 use crate::mem::memory_region::{MemoryRegion, MemoryRegionFlags, MemoryRegionType};
956 use crate::mem::shared_mem::{ExclusiveSharedMemory, GuestSharedMemory, SharedMemory as _};
957 use crate::sandbox::SandboxConfiguration;
958 use crate::{GuestBinary, HyperlightError, MultiUseSandbox, Result, UninitializedSandbox};
959
960 #[test]
961 fn poison() {
962 let mut sbox: MultiUseSandbox = {
963 let path = simple_guest_as_string().unwrap();
964 let u_sbox = UninitializedSandbox::new(GuestBinary::FilePath(path), None).unwrap();
965 u_sbox.evolve()
966 }
967 .unwrap();
968 let snapshot = sbox.snapshot().unwrap();
969
970 let res = sbox
972 .call::<()>("guest_panic", "hello".to_string())
973 .unwrap_err();
974 assert!(
975 matches!(res, HyperlightError::GuestAborted(code, context) if code == ErrorCode::UnknownError as u8 && context.contains("hello"))
976 );
977 assert!(sbox.poisoned());
978
979 let res = sbox
981 .call::<()>("guest_panic", "hello2".to_string())
982 .unwrap_err();
983 assert!(matches!(res, HyperlightError::PoisonedSandbox));
984
985 if let Err(e) = sbox.snapshot() {
987 assert!(sbox.poisoned());
988 assert!(matches!(e, HyperlightError::PoisonedSandbox));
989 } else {
990 panic!("Snapshot should fail");
991 }
992
993 {
995 let map_mem = allocate_guest_memory();
996 let guest_base = 0x0;
997 let region = region_for_memory(&map_mem, guest_base, MemoryRegionFlags::READ);
998 let res = unsafe { sbox.map_region(®ion) }.unwrap_err();
999 assert!(matches!(res, HyperlightError::PoisonedSandbox));
1000 }
1001
1002 {
1004 let temp_file = std::env::temp_dir().join("test_poison_map_file.bin");
1005 let res = sbox.map_file_cow(&temp_file, 0x0, None).unwrap_err();
1006 assert!(matches!(res, HyperlightError::PoisonedSandbox));
1007 std::fs::remove_file(&temp_file).ok(); }
1009
1010 #[allow(deprecated)]
1012 let res = sbox
1013 .call_guest_function_by_name::<String>("Echo", "test".to_string())
1014 .unwrap_err();
1015 assert!(matches!(res, HyperlightError::PoisonedSandbox));
1016
1017 sbox.restore(snapshot.clone()).unwrap();
1019 assert!(!sbox.poisoned());
1020
1021 let res = sbox.call::<String>("Echo", "hello2".to_string()).unwrap();
1023 assert_eq!(res, "hello2".to_string());
1024 assert!(!sbox.poisoned());
1025
1026 let res = sbox
1028 .call::<()>("guest_panic", "hello".to_string())
1029 .unwrap_err();
1030 assert!(
1031 matches!(res, HyperlightError::GuestAborted(code, context) if code == ErrorCode::UnknownError as u8 && context.contains("hello"))
1032 );
1033 assert!(sbox.poisoned());
1034
1035 sbox.restore(snapshot.clone()).unwrap();
1037 assert!(!sbox.poisoned());
1038
1039 let res = sbox.call::<String>("Echo", "hello3".to_string()).unwrap();
1041 assert_eq!(res, "hello3".to_string());
1042 assert!(!sbox.poisoned());
1043
1044 let _ = sbox.snapshot().unwrap();
1046 }
1047
1048 #[test]
1050 fn host_func_error() {
1051 let path = simple_guest_as_string().unwrap();
1052 let mut sandbox = UninitializedSandbox::new(GuestBinary::FilePath(path), None).unwrap();
1053 sandbox
1054 .register("HostError", || -> Result<()> {
1055 Err(HyperlightError::Error("hi".to_string()))
1056 })
1057 .unwrap();
1058 let mut sandbox = sandbox.evolve().unwrap();
1059
1060 for _ in 0..1000 {
1062 let result = sandbox
1063 .call::<i64>(
1064 "CallGivenParamlessHostFuncThatReturnsI64",
1065 "HostError".to_string(),
1066 )
1067 .unwrap_err();
1068
1069 assert!(
1070 matches!(result, HyperlightError::GuestError(code, msg) if code == ErrorCode::HostFunctionError && msg == "hi"),
1071 );
1072 }
1073 }
1074
1075 #[test]
1076 fn call_host_func_expect_error() {
1077 let path = simple_guest_as_string().unwrap();
1078 let sandbox = UninitializedSandbox::new(GuestBinary::FilePath(path), None).unwrap();
1079 let mut sandbox = sandbox.evolve().unwrap();
1080 sandbox
1081 .call::<()>("CallHostExpectError", "SomeUnknownHostFunc".to_string())
1082 .unwrap();
1083 }
1084
1085 #[test]
1087 fn io_buffer_reset() {
1088 let mut cfg = SandboxConfiguration::default();
1089 cfg.set_input_data_size(4096);
1090 cfg.set_output_data_size(4096);
1091 let path = simple_guest_as_string().unwrap();
1092 let mut sandbox =
1093 UninitializedSandbox::new(GuestBinary::FilePath(path), Some(cfg)).unwrap();
1094 sandbox.register("HostAdd", |a: i32, b: i32| a + b).unwrap();
1095 let mut sandbox = sandbox.evolve().unwrap();
1096
1097 for _ in 0..1000 {
1099 let result = sandbox.call::<i32>("Add", (5i32, 10i32)).unwrap();
1100 assert_eq!(result, 15);
1101 let result = sandbox.call::<i32>("AddToStaticAndFail", ()).unwrap_err();
1102 assert!(
1103 matches!(result, HyperlightError::GuestError (code, msg ) if code == ErrorCode::GuestError && msg == "Crash on purpose")
1104 );
1105 }
1106 }
1107
1108 #[test]
1110 fn test_call_guest_function_by_name() {
1111 let mut sbox: MultiUseSandbox = {
1112 let path = simple_guest_as_string().unwrap();
1113 let u_sbox = UninitializedSandbox::new(GuestBinary::FilePath(path), None).unwrap();
1114 u_sbox.evolve()
1115 }
1116 .unwrap();
1117
1118 let snapshot = sbox.snapshot().unwrap();
1119
1120 let _ = sbox.call::<i32>("AddToStatic", 5i32).unwrap();
1121 let res: i32 = sbox.call("GetStatic", ()).unwrap();
1122 assert_eq!(res, 5);
1123
1124 sbox.restore(snapshot).unwrap();
1125 #[allow(deprecated)]
1126 let _ = sbox
1127 .call_guest_function_by_name::<i32>("AddToStatic", 5i32)
1128 .unwrap();
1129 #[allow(deprecated)]
1130 let res: i32 = sbox.call_guest_function_by_name("GetStatic", ()).unwrap();
1131 assert_eq!(res, 0);
1132 }
1133
1134 #[test]
1137 fn test_with_small_stack_and_heap() {
1138 let mut cfg = SandboxConfiguration::default();
1139 cfg.set_heap_size(20 * 1024);
1140 let min_scratch = hyperlight_common::layout::min_scratch_size(
1145 cfg.get_input_data_size(),
1146 cfg.get_output_data_size(),
1147 );
1148 cfg.set_scratch_size(min_scratch + 0x10000 + 0x10000);
1149
1150 let mut sbox1: MultiUseSandbox = {
1151 let path = simple_guest_as_string().unwrap();
1152 let u_sbox = UninitializedSandbox::new(GuestBinary::FilePath(path), Some(cfg)).unwrap();
1153 u_sbox.evolve()
1154 }
1155 .unwrap();
1156
1157 for _ in 0..1000 {
1158 sbox1.call::<String>("Echo", "hello".to_string()).unwrap();
1159 }
1160
1161 let mut sbox2: MultiUseSandbox = {
1162 let path = simple_guest_as_string().unwrap();
1163 let u_sbox = UninitializedSandbox::new(GuestBinary::FilePath(path), Some(cfg)).unwrap();
1164 u_sbox.evolve()
1165 }
1166 .unwrap();
1167
1168 for i in 0..1000 {
1169 sbox2
1170 .call::<i32>(
1171 "PrintUsingPrintf",
1172 format!("Hello World {}\n", i).to_string(),
1173 )
1174 .unwrap();
1175 }
1176 }
1177
1178 #[test]
1181 fn snapshot_evolve_restore_handles_state_correctly() {
1182 let mut sbox: MultiUseSandbox = {
1183 let path = simple_guest_as_string().unwrap();
1184 let u_sbox = UninitializedSandbox::new(GuestBinary::FilePath(path), None).unwrap();
1185 u_sbox.evolve()
1186 }
1187 .unwrap();
1188
1189 let snapshot = sbox.snapshot().unwrap();
1190
1191 let _ = sbox.call::<i32>("AddToStatic", 5i32).unwrap();
1192
1193 let res: i32 = sbox.call("GetStatic", ()).unwrap();
1194 assert_eq!(res, 5);
1195
1196 sbox.restore(snapshot).unwrap();
1197 let res: i32 = sbox.call("GetStatic", ()).unwrap();
1198 assert_eq!(res, 0);
1199 }
1200
1201 #[test]
1202 fn test_trigger_exception_on_guest() {
1203 let usbox = UninitializedSandbox::new(
1204 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
1205 None,
1206 )
1207 .unwrap();
1208
1209 let mut multi_use_sandbox: MultiUseSandbox = usbox.evolve().unwrap();
1210
1211 let res: Result<()> = multi_use_sandbox.call("TriggerException", ());
1212
1213 assert!(res.is_err());
1214
1215 match res.unwrap_err() {
1216 HyperlightError::GuestAborted(_, msg) => {
1217 assert!(msg.contains("InvalidOpcode"));
1219 }
1220 e => panic!(
1221 "Expected HyperlightError::GuestExecutionError but got {:?}",
1222 e
1223 ),
1224 }
1225 }
1226
1227 #[test]
1228 fn create_200_sandboxes() {
1229 const NUM_THREADS: usize = 10;
1230 const SANDBOXES_PER_THREAD: usize = 20;
1231
1232 let start_barrier = Arc::new(Barrier::new(NUM_THREADS + 1));
1234 let mut thread_handles = vec![];
1235
1236 for _ in 0..NUM_THREADS {
1237 let barrier = start_barrier.clone();
1238
1239 let handle = thread::spawn(move || {
1240 barrier.wait();
1241
1242 for _ in 0..SANDBOXES_PER_THREAD {
1243 let guest_path = simple_guest_as_string().expect("Guest Binary Missing");
1244 let uninit =
1245 UninitializedSandbox::new(GuestBinary::FilePath(guest_path), None).unwrap();
1246
1247 let mut sandbox: MultiUseSandbox = uninit.evolve().unwrap();
1248
1249 let result: i32 = sandbox.call("GetStatic", ()).unwrap();
1250 assert_eq!(result, 0);
1251 }
1252 });
1253
1254 thread_handles.push(handle);
1255 }
1256
1257 start_barrier.wait();
1258
1259 for handle in thread_handles {
1260 handle.join().unwrap();
1261 }
1262 }
1263
1264 #[test]
1265 fn test_mmap() {
1266 let mut sbox = UninitializedSandbox::new(
1267 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
1268 None,
1269 )
1270 .unwrap()
1271 .evolve()
1272 .unwrap();
1273
1274 let expected = b"hello world";
1275 let map_mem = page_aligned_memory(expected);
1276 let guest_base = 0x1_0000_0000; unsafe {
1279 sbox.map_region(®ion_for_memory(
1280 &map_mem,
1281 guest_base,
1282 MemoryRegionFlags::READ,
1283 ))
1284 .unwrap();
1285 }
1286
1287 let _guard = map_mem.lock.try_read().unwrap();
1288 let actual: Vec<u8> = sbox
1289 .call(
1290 "ReadMappedBuffer",
1291 (guest_base as u64, expected.len() as u64, true),
1292 )
1293 .unwrap();
1294
1295 assert_eq!(actual, expected);
1296 }
1297
1298 #[test]
1300 fn test_mmap_write_exec() {
1301 let mut sbox = UninitializedSandbox::new(
1302 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
1303 None,
1304 )
1305 .unwrap()
1306 .evolve()
1307 .unwrap();
1308
1309 let expected = &[0x90, 0x90, 0x90, 0xC3]; let map_mem = page_aligned_memory(expected);
1311 let guest_base = 0x1_0000_0000; unsafe {
1314 sbox.map_region(®ion_for_memory(
1315 &map_mem,
1316 guest_base,
1317 MemoryRegionFlags::READ | MemoryRegionFlags::EXECUTE,
1318 ))
1319 .unwrap();
1320 }
1321
1322 let _guard = map_mem.lock.try_read().unwrap();
1323
1324 let succeed = sbox
1326 .call::<bool>(
1327 "ExecMappedBuffer",
1328 (guest_base as u64, expected.len() as u64),
1329 )
1330 .unwrap();
1331 assert!(succeed, "Expected execution of mapped buffer to succeed");
1332
1333 let err = sbox
1335 .call::<bool>(
1336 "WriteMappedBuffer",
1337 (guest_base as u64, expected.len() as u64),
1338 )
1339 .unwrap_err();
1340
1341 match err {
1342 HyperlightError::MemoryAccessViolation(addr, ..) if addr == guest_base as u64 => {}
1343 _ => panic!("Expected MemoryAccessViolation error"),
1344 };
1345 }
1346
1347 fn page_aligned_memory(src: &[u8]) -> GuestSharedMemory {
1348 use hyperlight_common::mem::PAGE_SIZE_USIZE;
1349
1350 let len = src.len().div_ceil(PAGE_SIZE_USIZE) * PAGE_SIZE_USIZE;
1351
1352 let mut mem = ExclusiveSharedMemory::new(len).unwrap();
1353 mem.copy_from_slice(src, 0).unwrap();
1354
1355 let (_, guest_mem) = mem.build();
1356
1357 guest_mem
1358 }
1359
1360 fn region_for_memory(
1361 mem: &GuestSharedMemory,
1362 guest_base: usize,
1363 flags: MemoryRegionFlags,
1364 ) -> MemoryRegion {
1365 let len = mem.mem_size();
1366 MemoryRegion {
1367 host_region: mem.host_region_base()..mem.host_region_end(),
1368 guest_region: guest_base..(guest_base + len),
1369 flags,
1370 region_type: MemoryRegionType::Heap,
1371 }
1372 }
1373
1374 fn allocate_guest_memory() -> GuestSharedMemory {
1375 page_aligned_memory(b"test data for snapshot")
1376 }
1377
1378 #[test]
1379 fn snapshot_restore_handles_remapping_correctly() {
1380 let mut sbox: MultiUseSandbox = {
1381 let path = simple_guest_as_string().unwrap();
1382 let u_sbox = UninitializedSandbox::new(GuestBinary::FilePath(path), None).unwrap();
1383 u_sbox.evolve().unwrap()
1384 };
1385
1386 let snapshot1 = sbox.snapshot().unwrap();
1388 assert_eq!(sbox.vm.get_mapped_regions().count(), 0);
1389
1390 let map_mem = allocate_guest_memory();
1392 let guest_base = 0x200000000_usize;
1393 let region = region_for_memory(&map_mem, guest_base, MemoryRegionFlags::READ);
1394
1395 unsafe { sbox.map_region(®ion).unwrap() };
1396 assert_eq!(sbox.vm.get_mapped_regions().count(), 1);
1397 let orig_read = sbox
1398 .call::<Vec<u8>>(
1399 "ReadMappedBuffer",
1400 (
1401 guest_base as u64,
1402 hyperlight_common::vmem::PAGE_SIZE as u64,
1403 true,
1404 ),
1405 )
1406 .unwrap();
1407
1408 let snapshot2 = sbox.snapshot().unwrap();
1410 assert_eq!(sbox.vm.get_mapped_regions().count(), 1);
1411
1412 sbox.restore(snapshot1.clone()).unwrap();
1414 assert_eq!(sbox.vm.get_mapped_regions().count(), 0);
1415 let is_mapped = sbox
1416 .call::<bool>("CheckMapped", (guest_base as u64,))
1417 .unwrap();
1418 assert!(!is_mapped);
1419
1420 sbox.restore(snapshot2.clone()).unwrap();
1423 assert_eq!(sbox.vm.get_mapped_regions().count(), 0);
1424 let is_mapped = sbox
1425 .call::<bool>("CheckMapped", (guest_base as u64,))
1426 .unwrap();
1427 assert!(is_mapped);
1428
1429 let new_read = sbox
1431 .call::<Vec<u8>>(
1432 "ReadMappedBuffer",
1433 (
1434 guest_base as u64,
1435 hyperlight_common::vmem::PAGE_SIZE as u64,
1436 false,
1437 ),
1438 )
1439 .unwrap();
1440 assert_eq!(new_read, orig_read);
1441 }
1442
1443 #[test]
1444 fn snapshot_different_sandbox() {
1445 let mut sandbox = {
1446 let path = simple_guest_as_string().unwrap();
1447 let u_sbox = UninitializedSandbox::new(GuestBinary::FilePath(path), None).unwrap();
1448 u_sbox.evolve().unwrap()
1449 };
1450
1451 let mut sandbox2 = {
1452 let path = simple_guest_as_string().unwrap();
1453 let u_sbox = UninitializedSandbox::new(GuestBinary::FilePath(path), None).unwrap();
1454 u_sbox.evolve().unwrap()
1455 };
1456 assert_ne!(sandbox.id, sandbox2.id);
1457
1458 let snapshot = sandbox.snapshot().unwrap();
1459 let err = sandbox2.restore(snapshot.clone());
1460 assert!(matches!(err, Err(HyperlightError::SnapshotSandboxMismatch)));
1461
1462 let sandbox_id = sandbox.id;
1463 drop(sandbox);
1464 drop(sandbox2);
1465 drop(snapshot);
1466
1467 let sandbox3 = {
1468 let path = simple_guest_as_string().unwrap();
1469 let u_sbox = UninitializedSandbox::new(GuestBinary::FilePath(path), None).unwrap();
1470 u_sbox.evolve().unwrap()
1471 };
1472 assert_ne!(sandbox3.id, sandbox_id);
1473 }
1474
1475 #[test]
1478 fn snapshot_restore_resets_debug_registers() {
1479 let mut sandbox: MultiUseSandbox = {
1480 let path = simple_guest_as_string().unwrap();
1481 let u_sbox = UninitializedSandbox::new(GuestBinary::FilePath(path), None).unwrap();
1482 u_sbox.evolve().unwrap()
1483 };
1484
1485 let snapshot = sandbox.snapshot().unwrap();
1486
1487 let dr0_initial: u64 = sandbox.call("GetDr0", ()).unwrap();
1489 assert_eq!(dr0_initial, 0, "DR0 should initially be 0");
1490
1491 const DIRTY_VALUE: u64 = 0xDEAD_BEEF_CAFE_BABE;
1493 sandbox.call::<()>("SetDr0", DIRTY_VALUE).unwrap();
1494 let dr0_dirty: u64 = sandbox.call("GetDr0", ()).unwrap();
1495 assert_eq!(
1496 dr0_dirty, DIRTY_VALUE,
1497 "DR0 should be dirty after SetDr0 call"
1498 );
1499
1500 sandbox.restore(snapshot).unwrap();
1502
1503 let dr0_after_restore: u64 = sandbox.call("GetDr0", ()).unwrap();
1504 assert_eq!(
1505 dr0_after_restore, 0,
1506 "DR0 should be 0 after restore (reset_vcpu should have been called)"
1507 );
1508 }
1509
1510 #[test]
1513 fn stale_abort_buffer_does_not_leak_across_calls() {
1514 let mut sbox: MultiUseSandbox = {
1515 let path = simple_guest_as_string().unwrap();
1516 let u_sbox = UninitializedSandbox::new(GuestBinary::FilePath(path), None).unwrap();
1517 u_sbox.evolve().unwrap()
1518 };
1519
1520 sbox.mem_mgr.abort_buffer.extend_from_slice(&[0xAA; 1020]);
1522
1523 let res = sbox.call::<String>("Echo", "hello".to_string());
1524 assert!(
1525 res.is_ok(),
1526 "Expected Ok after stale abort buffer, got: {:?}",
1527 res.unwrap_err()
1528 );
1529
1530 assert!(
1532 sbox.mem_mgr.abort_buffer.is_empty(),
1533 "abort_buffer should be empty after a guest call"
1534 );
1535 }
1536
1537 #[test]
1539 fn test_sandbox_creation_various_sizes() {
1540 let test_cases: [(&str, u64); 3] = [
1541 ("small (8MB heap)", SMALL_HEAP_SIZE),
1542 ("medium (64MB heap)", MEDIUM_HEAP_SIZE),
1543 ("large (256MB heap)", LARGE_HEAP_SIZE),
1544 ];
1545
1546 for (name, heap_size) in test_cases {
1547 let mut cfg = SandboxConfiguration::default();
1548 cfg.set_heap_size(heap_size);
1549 cfg.set_scratch_size(0x100000);
1550
1551 let path = simple_guest_as_string().unwrap();
1552 let sbox = UninitializedSandbox::new(GuestBinary::FilePath(path), Some(cfg))
1553 .unwrap_or_else(|e| panic!("Failed to create {} sandbox: {}", name, e))
1554 .evolve()
1555 .unwrap_or_else(|e| panic!("Failed to evolve {} sandbox: {}", name, e));
1556
1557 drop(sbox);
1558 }
1559 }
1560
1561 #[cfg(feature = "trace_guest")]
1563 fn sandbox_for_gva_tests() -> MultiUseSandbox {
1564 let path = simple_guest_as_string().unwrap();
1565 UninitializedSandbox::new(GuestBinary::FilePath(path), None)
1566 .unwrap()
1567 .evolve()
1568 .unwrap()
1569 }
1570
1571 #[cfg(feature = "trace_guest")]
1575 fn assert_gva_read_matches(sbox: &mut MultiUseSandbox, gva: u64, len: usize) {
1576 let expected: Vec<u8> = sbox
1578 .call("ReadMappedBuffer", (gva, len as u64, true))
1579 .unwrap();
1580 assert_eq!(expected.len(), len);
1581
1582 let root_pt = sbox.vm.get_root_pt().unwrap();
1584 let actual = sbox
1585 .mem_mgr
1586 .read_guest_memory_by_gva(gva, len, root_pt)
1587 .unwrap();
1588
1589 assert_eq!(
1590 actual, expected,
1591 "read_guest_memory_by_gva at GVA {:#x} (len {}) differs from guest ReadMappedBuffer",
1592 gva, len,
1593 );
1594 }
1595
1596 #[test]
1599 #[cfg(feature = "trace_guest")]
1600 fn read_guest_memory_by_gva_single_page() {
1601 let mut sbox = sandbox_for_gva_tests();
1602 let code_gva = sbox.mem_mgr.layout.get_guest_code_address() as u64;
1603 assert_gva_read_matches(&mut sbox, code_gva, 128);
1604 }
1605
1606 #[test]
1609 #[cfg(feature = "trace_guest")]
1610 fn read_guest_memory_by_gva_full_page() {
1611 let mut sbox = sandbox_for_gva_tests();
1612 let code_gva = sbox.mem_mgr.layout.get_guest_code_address() as u64;
1613 assert_gva_read_matches(&mut sbox, code_gva, 4096);
1614 }
1615
1616 #[test]
1619 #[cfg(feature = "trace_guest")]
1620 fn read_guest_memory_by_gva_unaligned_cross_page() {
1621 let mut sbox = sandbox_for_gva_tests();
1622 let code_gva = sbox.mem_mgr.layout.get_guest_code_address() as u64;
1623 let start = code_gva + 4096 - 1;
1626 println!(
1627 "Testing unaligned cross-page read starting at {:#x} spanning 4097 bytes",
1628 start
1629 );
1630 assert_gva_read_matches(&mut sbox, start, 4097);
1631 }
1632
1633 #[test]
1635 #[cfg(feature = "trace_guest")]
1636 fn read_guest_memory_by_gva_two_full_pages() {
1637 let mut sbox = sandbox_for_gva_tests();
1638 let code_gva = sbox.mem_mgr.layout.get_guest_code_address() as u64;
1639 assert_gva_read_matches(&mut sbox, code_gva, 4096 * 2);
1640 }
1641
1642 #[test]
1646 #[cfg(feature = "trace_guest")]
1647 fn read_guest_memory_by_gva_cross_page_boundary() {
1648 let mut sbox = sandbox_for_gva_tests();
1649 let code_gva = sbox.mem_mgr.layout.get_guest_code_address() as u64;
1650 let start = code_gva + 4096 - 100;
1652 assert_gva_read_matches(&mut sbox, start, 200);
1653 }
1654
1655 fn create_test_file(name: &str, content: &[u8]) -> (std::path::PathBuf, Vec<u8>) {
1659 use std::io::Write;
1660
1661 let page_size = page_size::get();
1662 let padded_len = content.len().max(page_size).div_ceil(page_size) * page_size;
1663 let mut padded = vec![0u8; padded_len];
1664 padded[..content.len()].copy_from_slice(content);
1665
1666 let temp_dir = std::env::temp_dir();
1667 let path = temp_dir.join(name);
1668 let _ = std::fs::remove_file(&path); let mut f = std::fs::File::create(&path).unwrap();
1670 f.write_all(&padded).unwrap();
1671 (path, content.to_vec())
1672 }
1673
1674 #[test]
1677 fn test_map_file_cow_basic() {
1678 let expected = b"hello world from map_file_cow";
1679 let (path, expected_bytes) =
1680 create_test_file("hyperlight_test_map_file_cow_basic.bin", expected);
1681
1682 let mut sbox = UninitializedSandbox::new(
1683 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
1684 None,
1685 )
1686 .unwrap()
1687 .evolve()
1688 .unwrap();
1689
1690 let guest_base: u64 = 0x1_0000_0000;
1691 let mapped_size = sbox.map_file_cow(&path, guest_base, None).unwrap();
1692 assert!(mapped_size > 0, "mapped_size should be positive");
1693 assert!(
1694 mapped_size >= expected.len() as u64,
1695 "mapped_size should be >= file content length"
1696 );
1697
1698 let actual: Vec<u8> = sbox
1700 .call(
1701 "ReadMappedBuffer",
1702 (guest_base, expected_bytes.len() as u64, true),
1703 )
1704 .unwrap();
1705
1706 assert_eq!(
1707 actual, expected_bytes,
1708 "Guest should read back the exact file content"
1709 );
1710
1711 let _ = std::fs::remove_file(&path);
1713 }
1714
1715 #[test]
1718 fn test_map_file_cow_read_only_enforcement() {
1719 let content = &[0xBB; 4096];
1720 let (path, _) = create_test_file("hyperlight_test_map_file_cow_readonly.bin", content);
1721
1722 let mut sbox = UninitializedSandbox::new(
1723 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
1724 None,
1725 )
1726 .unwrap()
1727 .evolve()
1728 .unwrap();
1729
1730 let guest_base: u64 = 0x1_0000_0000;
1731 sbox.map_file_cow(&path, guest_base, None).unwrap();
1732
1733 let err = sbox
1735 .call::<bool>("WriteMappedBuffer", (guest_base, content.len() as u64))
1736 .unwrap_err();
1737
1738 match err {
1739 HyperlightError::MemoryAccessViolation(addr, ..) if addr == guest_base => {}
1740 _ => panic!(
1741 "Expected MemoryAccessViolation at guest_base, got: {:?}",
1742 err
1743 ),
1744 };
1745
1746 let _ = std::fs::remove_file(&path);
1748 }
1749
1750 #[test]
1753 fn test_map_file_cow_poisoned() {
1754 let (path, _) = create_test_file("hyperlight_test_map_file_cow_poison.bin", &[0xCC; 4096]);
1755
1756 let mut sbox: MultiUseSandbox = {
1757 let path = simple_guest_as_string().unwrap();
1758 let u_sbox = UninitializedSandbox::new(GuestBinary::FilePath(path), None).unwrap();
1759 u_sbox.evolve()
1760 }
1761 .unwrap();
1762 let snapshot = sbox.snapshot().unwrap();
1763
1764 let _ = sbox
1766 .call::<()>("guest_panic", "hello".to_string())
1767 .unwrap_err();
1768 assert!(sbox.poisoned());
1769
1770 let err = sbox.map_file_cow(&path, 0x1_0000_0000, None).unwrap_err();
1772 assert!(matches!(err, HyperlightError::PoisonedSandbox));
1773
1774 sbox.restore(snapshot).unwrap();
1776 assert!(!sbox.poisoned());
1777 let result = sbox.map_file_cow(&path, 0x1_0000_0000, None);
1778 assert!(result.is_ok());
1779
1780 let _ = std::fs::remove_file(&path);
1781 }
1782
1783 #[test]
1786 fn test_map_file_cow_multi_vm_same_file() {
1787 let expected = b"shared file content across VMs";
1788 let (path, expected_bytes) =
1789 create_test_file("hyperlight_test_map_file_cow_multi_vm.bin", expected);
1790
1791 let guest_base: u64 = 0x1_0000_0000;
1792
1793 let mut sbox1 = UninitializedSandbox::new(
1794 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
1795 None,
1796 )
1797 .unwrap()
1798 .evolve()
1799 .unwrap();
1800
1801 let mut sbox2 = UninitializedSandbox::new(
1802 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
1803 None,
1804 )
1805 .unwrap()
1806 .evolve()
1807 .unwrap();
1808
1809 sbox1.map_file_cow(&path, guest_base, None).unwrap();
1811 sbox2.map_file_cow(&path, guest_base, None).unwrap();
1812
1813 let actual1: Vec<u8> = sbox1
1815 .call(
1816 "ReadMappedBuffer",
1817 (guest_base, expected_bytes.len() as u64, true),
1818 )
1819 .unwrap();
1820 let actual2: Vec<u8> = sbox2
1821 .call(
1822 "ReadMappedBuffer",
1823 (guest_base, expected_bytes.len() as u64, true),
1824 )
1825 .unwrap();
1826
1827 assert_eq!(
1828 actual1, expected_bytes,
1829 "Sandbox 1 should read correct content"
1830 );
1831 assert_eq!(
1832 actual2, expected_bytes,
1833 "Sandbox 2 should read correct content"
1834 );
1835
1836 let _ = std::fs::remove_file(&path);
1837 }
1838
1839 #[test]
1842 fn test_map_file_cow_multi_vm_threaded() {
1843 let expected = b"threaded file mapping test data";
1844 let (path, expected_bytes) =
1845 create_test_file("hyperlight_test_map_file_cow_threaded.bin", expected);
1846
1847 const NUM_THREADS: usize = 5;
1848 let path = Arc::new(path);
1849 let expected_bytes = Arc::new(expected_bytes);
1850 let barrier = Arc::new(Barrier::new(NUM_THREADS));
1851 let mut handles = vec![];
1852
1853 for _ in 0..NUM_THREADS {
1854 let path = path.clone();
1855 let expected_bytes = expected_bytes.clone();
1856 let barrier = barrier.clone();
1857
1858 handles.push(thread::spawn(move || {
1859 barrier.wait();
1860
1861 let mut sbox = UninitializedSandbox::new(
1862 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
1863 None,
1864 )
1865 .unwrap()
1866 .evolve()
1867 .unwrap();
1868
1869 let guest_base: u64 = 0x1_0000_0000;
1870 sbox.map_file_cow(&path, guest_base, None).unwrap();
1871
1872 let actual: Vec<u8> = sbox
1873 .call(
1874 "ReadMappedBuffer",
1875 (guest_base, expected_bytes.len() as u64, true),
1876 )
1877 .unwrap();
1878
1879 assert_eq!(actual, *expected_bytes);
1880 }));
1881 }
1882
1883 for h in handles {
1884 h.join().unwrap();
1885 }
1886
1887 let _ = std::fs::remove_file(&*path);
1888 }
1889
1890 #[test]
1893 #[cfg(target_os = "windows")]
1894 fn test_map_file_cow_cleanup_no_handle_leak() {
1895 let (path, _) = create_test_file("hyperlight_test_map_file_cow_cleanup.bin", &[0xDD; 4096]);
1896
1897 {
1898 let mut sbox = UninitializedSandbox::new(
1899 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
1900 None,
1901 )
1902 .unwrap()
1903 .evolve()
1904 .unwrap();
1905
1906 sbox.map_file_cow(&path, 0x1_0000_0000, None).unwrap();
1907 }
1909
1910 std::fs::remove_file(&path)
1911 .expect("File should be deletable after sandbox with map_file_cow is dropped");
1912 }
1913
1914 #[test]
1918 fn test_map_file_cow_snapshot_remapping_cycle() {
1919 let expected = b"snapshot remapping cycle test!";
1920 let (path, expected_bytes) =
1921 create_test_file("hyperlight_test_map_file_cow_snapshot_remap.bin", expected);
1922
1923 let mut sbox = UninitializedSandbox::new(
1924 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
1925 None,
1926 )
1927 .unwrap()
1928 .evolve()
1929 .unwrap();
1930
1931 let guest_base: u64 = 0x1_0000_0000;
1932
1933 let snapshot1 = sbox.snapshot().unwrap();
1935
1936 sbox.map_file_cow(&path, guest_base, None).unwrap();
1938
1939 let actual: Vec<u8> = sbox
1941 .call(
1942 "ReadMappedBuffer",
1943 (guest_base, expected_bytes.len() as u64, true),
1944 )
1945 .unwrap();
1946 assert_eq!(actual, expected_bytes);
1947
1948 let snapshot2 = sbox.snapshot().unwrap();
1950
1951 sbox.restore(snapshot1.clone()).unwrap();
1953 let is_mapped: bool = sbox.call("CheckMapped", (guest_base,)).unwrap();
1954 assert!(
1955 !is_mapped,
1956 "Region should be unmapped after restoring to snapshot₁"
1957 );
1958
1959 sbox.restore(snapshot2).unwrap();
1962 let is_mapped: bool = sbox.call("CheckMapped", (guest_base,)).unwrap();
1963 assert!(
1964 is_mapped,
1965 "Region should be mapped after restoring to snapshot₂"
1966 );
1967 let actual2: Vec<u8> = sbox
1968 .call(
1969 "ReadMappedBuffer",
1970 (guest_base, expected_bytes.len() as u64, false),
1971 )
1972 .unwrap();
1973 assert_eq!(
1974 actual2, expected_bytes,
1975 "Data should be intact after snapshot₂ restore"
1976 );
1977
1978 let _ = std::fs::remove_file(&path);
1979 }
1980
1981 #[test]
1984 fn test_map_file_cow_snapshot_restore() {
1985 let expected = b"snapshot restore basic test!!";
1986 let (path, expected_bytes) =
1987 create_test_file("hyperlight_test_map_file_cow_snap_restore.bin", expected);
1988
1989 let mut sbox = UninitializedSandbox::new(
1990 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
1991 None,
1992 )
1993 .unwrap()
1994 .evolve()
1995 .unwrap();
1996
1997 let guest_base: u64 = 0x1_0000_0000;
1998 sbox.map_file_cow(&path, guest_base, None).unwrap();
1999
2000 let actual: Vec<u8> = sbox
2002 .call(
2003 "ReadMappedBuffer",
2004 (guest_base, expected_bytes.len() as u64, true),
2005 )
2006 .unwrap();
2007 assert_eq!(actual, expected_bytes);
2008
2009 let snapshot = sbox.snapshot().unwrap();
2011
2012 sbox.restore(snapshot).unwrap();
2014
2015 let actual2: Vec<u8> = sbox
2017 .call(
2018 "ReadMappedBuffer",
2019 (guest_base, expected_bytes.len() as u64, false),
2020 )
2021 .unwrap();
2022 assert_eq!(
2023 actual2, expected_bytes,
2024 "Data should be readable after restore from snapshot"
2025 );
2026
2027 let _ = std::fs::remove_file(&path);
2028 }
2029
2030 #[test]
2034 fn test_map_file_cow_deferred_basic() {
2035 let expected = b"deferred map_file_cow test data";
2036 let (path, expected_bytes) =
2037 create_test_file("hyperlight_test_map_file_cow_deferred.bin", expected);
2038
2039 let guest_base: u64 = 0x1_0000_0000;
2040
2041 let mut u_sbox = UninitializedSandbox::new(
2042 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
2043 None,
2044 )
2045 .unwrap();
2046
2047 let mapped_size = u_sbox.map_file_cow(&path, guest_base, None).unwrap();
2049 assert!(mapped_size > 0, "mapped_size should be positive");
2050 assert!(
2051 mapped_size >= expected.len() as u64,
2052 "mapped_size should be >= file content length"
2053 );
2054
2055 let mut sbox: MultiUseSandbox = u_sbox.evolve().unwrap();
2057
2058 let actual: Vec<u8> = sbox
2060 .call(
2061 "ReadMappedBuffer",
2062 (guest_base, expected_bytes.len() as u64, true),
2063 )
2064 .unwrap();
2065
2066 assert_eq!(
2067 actual, expected_bytes,
2068 "Guest should read back the exact file content after deferred mapping"
2069 );
2070
2071 let _ = std::fs::remove_file(&path);
2072 }
2073
2074 #[test]
2078 fn test_map_file_cow_deferred_drop_without_evolve() {
2079 let (path, _) = create_test_file(
2080 "hyperlight_test_map_file_cow_deferred_drop.bin",
2081 &[0xAA; 4096],
2082 );
2083
2084 let guest_base: u64 = 0x1_0000_0000;
2085
2086 {
2087 let mut u_sbox = UninitializedSandbox::new(
2088 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
2089 None,
2090 )
2091 .unwrap();
2092
2093 u_sbox.map_file_cow(&path, guest_base, None).unwrap();
2094 }
2097
2098 #[cfg(target_os = "windows")]
2101 std::fs::remove_file(&path)
2102 .expect("File should be deletable after dropping UninitializedSandbox");
2103 #[cfg(not(target_os = "windows"))]
2104 let _ = std::fs::remove_file(&path);
2105 }
2106
2107 #[test]
2110 fn test_map_file_cow_unaligned_guest_base() {
2111 let (path, _) =
2112 create_test_file("hyperlight_test_map_file_cow_unaligned.bin", &[0xBB; 4096]);
2113
2114 let mut u_sbox = UninitializedSandbox::new(
2115 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
2116 None,
2117 )
2118 .unwrap();
2119
2120 let unaligned_base: u64 = (page_size::get() + 1) as u64;
2122 let result = u_sbox.map_file_cow(&path, unaligned_base, None);
2123 assert!(
2124 result.is_err(),
2125 "map_file_cow should reject unaligned guest_base"
2126 );
2127
2128 let _ = std::fs::remove_file(&path);
2129 }
2130
2131 #[test]
2133 fn test_map_file_cow_empty_file() {
2134 let temp_dir = std::env::temp_dir();
2135 let path = temp_dir.join("hyperlight_test_map_file_cow_empty.bin");
2136 let _ = std::fs::remove_file(&path);
2137 std::fs::File::create(&path).unwrap(); let mut u_sbox = UninitializedSandbox::new(
2140 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
2141 None,
2142 )
2143 .unwrap();
2144
2145 let guest_base: u64 = 0x1_0000_0000;
2146 let result = u_sbox.map_file_cow(&path, guest_base, None);
2147 assert!(result.is_err(), "map_file_cow should reject empty files");
2148
2149 let _ = std::fs::remove_file(&path);
2150 }
2151
2152 #[test]
2154 fn test_map_file_cow_custom_label() {
2155 let (path, _) = create_test_file("hyperlight_test_map_file_cow_label.bin", &[0xDD; 4096]);
2156
2157 let mut sbox = UninitializedSandbox::new(
2158 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
2159 None,
2160 )
2161 .unwrap()
2162 .evolve()
2163 .unwrap();
2164
2165 let result = sbox.map_file_cow(&path, 0x1_0000_0000, Some("my_ramfs"));
2166 assert!(
2167 result.is_ok(),
2168 "map_file_cow with custom label should succeed"
2169 );
2170
2171 let _ = std::fs::remove_file(&path);
2172 }
2173
2174 #[test]
2178 #[cfg(feature = "nanvix-unstable")]
2179 fn test_map_file_cow_peb_entry_multiuse() {
2180 use std::mem::offset_of;
2181
2182 use hyperlight_common::mem::{FILE_MAPPING_LABEL_MAX_LEN, FileMappingInfo};
2183
2184 let (path, _) = create_test_file("hyperlight_test_peb_entry_multiuse.bin", &[0xDD; 4096]);
2185
2186 let guest_base: u64 = 0x1_0000_0000;
2187 let label = "my_ramfs";
2188
2189 let mut sbox = UninitializedSandbox::new(
2190 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
2191 None,
2192 )
2193 .unwrap()
2194 .evolve()
2195 .unwrap();
2196
2197 let mapped_size = sbox.map_file_cow(&path, guest_base, Some(label)).unwrap();
2199
2200 let count = sbox
2202 .mem_mgr
2203 .shared_mem
2204 .read::<u64>(sbox.mem_mgr.layout.get_file_mappings_size_offset())
2205 .unwrap();
2206 assert_eq!(
2207 count, 1,
2208 "PEB file_mappings count should be 1 after one mapping"
2209 );
2210
2211 let entry_offset = sbox.mem_mgr.layout.get_file_mappings_array_offset();
2213
2214 let stored_addr = sbox
2215 .mem_mgr
2216 .shared_mem
2217 .read::<u64>(entry_offset + offset_of!(FileMappingInfo, guest_addr))
2218 .unwrap();
2219 assert_eq!(stored_addr, guest_base, "PEB entry guest_addr should match");
2220
2221 let stored_size = sbox
2222 .mem_mgr
2223 .shared_mem
2224 .read::<u64>(entry_offset + offset_of!(FileMappingInfo, size))
2225 .unwrap();
2226 assert_eq!(
2227 stored_size, mapped_size,
2228 "PEB entry size should match mapped_size"
2229 );
2230
2231 let label_offset = entry_offset + offset_of!(FileMappingInfo, label);
2233 let mut label_buf = [0u8; FILE_MAPPING_LABEL_MAX_LEN + 1];
2234 for (i, byte) in label_buf.iter_mut().enumerate() {
2235 *byte = sbox
2236 .mem_mgr
2237 .shared_mem
2238 .read::<u8>(label_offset + i)
2239 .unwrap();
2240 }
2241 let label_len = label_buf
2242 .iter()
2243 .position(|&b| b == 0)
2244 .unwrap_or(label_buf.len());
2245 let stored_label = std::str::from_utf8(&label_buf[..label_len]).unwrap();
2246 assert_eq!(stored_label, label, "PEB entry label should match");
2247
2248 let _ = std::fs::remove_file(&path);
2249 }
2250
2251 #[test]
2254 #[cfg(feature = "nanvix-unstable")]
2255 fn test_map_file_cow_peb_entry_deferred() {
2256 use std::mem::offset_of;
2257
2258 use hyperlight_common::mem::{FILE_MAPPING_LABEL_MAX_LEN, FileMappingInfo};
2259
2260 let (path, _) = create_test_file("hyperlight_test_peb_entry_deferred.bin", &[0xEE; 4096]);
2261
2262 let guest_base: u64 = 0x1_0000_0000;
2263 let label = "deferred_fs";
2264
2265 let mut u_sbox = UninitializedSandbox::new(
2266 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
2267 None,
2268 )
2269 .unwrap();
2270
2271 let mapped_size = u_sbox.map_file_cow(&path, guest_base, Some(label)).unwrap();
2272
2273 let sbox: MultiUseSandbox = u_sbox.evolve().unwrap();
2275
2276 let count = sbox
2278 .mem_mgr
2279 .shared_mem
2280 .read::<u64>(sbox.mem_mgr.layout.get_file_mappings_size_offset())
2281 .unwrap();
2282 assert_eq!(count, 1, "PEB file_mappings count should be 1 after evolve");
2283
2284 let entry_offset = sbox.mem_mgr.layout.get_file_mappings_array_offset();
2286
2287 let stored_addr = sbox
2288 .mem_mgr
2289 .shared_mem
2290 .read::<u64>(entry_offset + offset_of!(FileMappingInfo, guest_addr))
2291 .unwrap();
2292 assert_eq!(stored_addr, guest_base);
2293
2294 let stored_size = sbox
2295 .mem_mgr
2296 .shared_mem
2297 .read::<u64>(entry_offset + offset_of!(FileMappingInfo, size))
2298 .unwrap();
2299 assert_eq!(stored_size, mapped_size);
2300
2301 let label_offset = entry_offset + offset_of!(FileMappingInfo, label);
2303 let mut label_buf = [0u8; FILE_MAPPING_LABEL_MAX_LEN + 1];
2304 for (i, byte) in label_buf.iter_mut().enumerate() {
2305 *byte = sbox
2306 .mem_mgr
2307 .shared_mem
2308 .read::<u8>(label_offset + i)
2309 .unwrap();
2310 }
2311 let label_len = label_buf
2312 .iter()
2313 .position(|&b| b == 0)
2314 .unwrap_or(label_buf.len());
2315 let stored_label = std::str::from_utf8(&label_buf[..label_len]).unwrap();
2316 assert_eq!(
2317 stored_label, label,
2318 "PEB entry label should match after evolve"
2319 );
2320
2321 let _ = std::fs::remove_file(&path);
2322 }
2323
2324 #[test]
2328 #[cfg(feature = "nanvix-unstable")]
2329 fn test_map_file_cow_peb_multiple_entries() {
2330 use std::mem::{offset_of, size_of};
2331
2332 use hyperlight_common::mem::{FILE_MAPPING_LABEL_MAX_LEN, FileMappingInfo};
2333
2334 const NUM_FILES: usize = 5;
2335 const DEFERRED_COUNT: usize = 3;
2336
2337 let mut paths = Vec::new();
2339 let mut labels: Vec<String> = Vec::new();
2340 for i in 0..NUM_FILES {
2341 let name = format!("hyperlight_test_peb_multi_{}.bin", i);
2342 let content = vec![i as u8 + 0xA0; 4096];
2343 let (path, _) = create_test_file(&name, &content);
2344 paths.push(path);
2345 labels.push(format!("file_{}", i));
2346 }
2347
2348 let page_size = page_size::get() as u64;
2351 let base: u64 = 0x1_0000_0000;
2352 let guest_bases: Vec<u64> = (0..NUM_FILES as u64)
2353 .map(|i| base + i * page_size)
2354 .collect();
2355
2356 let mut u_sbox = UninitializedSandbox::new(
2357 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
2358 None,
2359 )
2360 .unwrap();
2361
2362 let mut mapped_sizes = Vec::new();
2364 for i in 0..DEFERRED_COUNT {
2365 let size = u_sbox
2366 .map_file_cow(&paths[i], guest_bases[i], Some(&labels[i]))
2367 .unwrap();
2368 mapped_sizes.push(size);
2369 }
2370
2371 let mut sbox: MultiUseSandbox = u_sbox.evolve().unwrap();
2373
2374 for i in DEFERRED_COUNT..NUM_FILES {
2376 let size = sbox
2377 .map_file_cow(&paths[i], guest_bases[i], Some(&labels[i]))
2378 .unwrap();
2379 mapped_sizes.push(size);
2380 }
2381
2382 let count = sbox
2384 .mem_mgr
2385 .shared_mem
2386 .read::<u64>(sbox.mem_mgr.layout.get_file_mappings_size_offset())
2387 .unwrap();
2388 assert_eq!(
2389 count, NUM_FILES as u64,
2390 "PEB should have {NUM_FILES} entries"
2391 );
2392
2393 let array_base = sbox.mem_mgr.layout.get_file_mappings_array_offset();
2395 for i in 0..NUM_FILES {
2396 let entry_offset = array_base + i * size_of::<FileMappingInfo>();
2397
2398 let stored_addr = sbox
2399 .mem_mgr
2400 .shared_mem
2401 .read::<u64>(entry_offset + offset_of!(FileMappingInfo, guest_addr))
2402 .unwrap();
2403 assert_eq!(
2404 stored_addr, guest_bases[i],
2405 "Entry {i}: guest_addr mismatch"
2406 );
2407
2408 let stored_size = sbox
2409 .mem_mgr
2410 .shared_mem
2411 .read::<u64>(entry_offset + offset_of!(FileMappingInfo, size))
2412 .unwrap();
2413 assert_eq!(stored_size, mapped_sizes[i], "Entry {i}: size mismatch");
2414
2415 let label_base = entry_offset + offset_of!(FileMappingInfo, label);
2417 let mut label_buf = [0u8; FILE_MAPPING_LABEL_MAX_LEN + 1];
2418 for (j, byte) in label_buf.iter_mut().enumerate() {
2419 *byte = sbox.mem_mgr.shared_mem.read::<u8>(label_base + j).unwrap();
2420 }
2421 let label_len = label_buf
2422 .iter()
2423 .position(|&b| b == 0)
2424 .unwrap_or(label_buf.len());
2425 let stored_label = std::str::from_utf8(&label_buf[..label_len]).unwrap();
2426 assert_eq!(stored_label, labels[i], "Entry {i}: label mismatch");
2427 }
2428
2429 for path in &paths {
2431 let _ = std::fs::remove_file(path);
2432 }
2433 }
2434
2435 #[test]
2437 fn test_map_file_cow_label_too_long() {
2438 let (path, _) =
2439 create_test_file("hyperlight_test_map_file_cow_long_label.bin", &[0xEE; 4096]);
2440
2441 let guest_base: u64 = 0x1_0000_0000;
2442
2443 let mut u_sbox = UninitializedSandbox::new(
2444 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
2445 None,
2446 )
2447 .unwrap();
2448
2449 let long_label = "A".repeat(64);
2451 let result = u_sbox.map_file_cow(&path, guest_base, Some(&long_label));
2452 assert!(
2453 result.is_err(),
2454 "map_file_cow should reject labels longer than 63 bytes"
2455 );
2456
2457 let ok_label = "B".repeat(63);
2459 let result = u_sbox.map_file_cow(&path, guest_base, Some(&ok_label));
2460 assert!(
2461 result.is_ok(),
2462 "map_file_cow should accept labels of exactly 63 bytes"
2463 );
2464
2465 let _ = std::fs::remove_file(&path);
2466 }
2467
2468 #[test]
2470 fn test_map_file_cow_label_null_byte() {
2471 let (path, _) =
2472 create_test_file("hyperlight_test_map_file_cow_null_label.bin", &[0xFF; 4096]);
2473
2474 let guest_base: u64 = 0x1_0000_0000;
2475
2476 let mut u_sbox = UninitializedSandbox::new(
2477 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
2478 None,
2479 )
2480 .unwrap();
2481
2482 let result = u_sbox.map_file_cow(&path, guest_base, Some("has\0null"));
2483 assert!(
2484 result.is_err(),
2485 "map_file_cow should reject labels containing null bytes"
2486 );
2487
2488 let _ = std::fs::remove_file(&path);
2489 }
2490
2491 #[test]
2493 fn test_map_file_cow_overlapping_mappings() {
2494 let (path1, _) =
2495 create_test_file("hyperlight_test_map_file_cow_overlap1.bin", &[0xAA; 4096]);
2496 let (path2, _) =
2497 create_test_file("hyperlight_test_map_file_cow_overlap2.bin", &[0xBB; 4096]);
2498
2499 let guest_base: u64 = 0x1_0000_0000;
2500
2501 let mut u_sbox = UninitializedSandbox::new(
2502 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
2503 None,
2504 )
2505 .unwrap();
2506
2507 u_sbox.map_file_cow(&path1, guest_base, None).unwrap();
2509
2510 let result = u_sbox.map_file_cow(&path2, guest_base, None);
2512 assert!(
2513 result.is_err(),
2514 "map_file_cow should reject overlapping guest address ranges"
2515 );
2516
2517 let _ = std::fs::remove_file(&path1);
2518 let _ = std::fs::remove_file(&path2);
2519 }
2520
2521 #[test]
2524 fn test_map_file_cow_shared_mem_overlap() {
2525 let (path, _) = create_test_file(
2526 "hyperlight_test_map_file_cow_overlap_shm.bin",
2527 &[0xCC; 4096],
2528 );
2529
2530 let mut u_sbox = UninitializedSandbox::new(
2531 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
2532 None,
2533 )
2534 .unwrap();
2535
2536 let base_addr = crate::mem::layout::SandboxMemoryLayout::BASE_ADDRESS as u64;
2538 let result = u_sbox.map_file_cow(&path, base_addr, None);
2540 assert!(
2541 result.is_err(),
2542 "map_file_cow should reject guest_base inside shared memory"
2543 );
2544
2545 let _ = std::fs::remove_file(&path);
2546 }
2547
2548 #[test]
2551 fn test_map_file_cow_max_limit() {
2552 use hyperlight_common::mem::MAX_FILE_MAPPINGS;
2553
2554 let mut u_sbox = UninitializedSandbox::new(
2555 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
2556 None,
2557 )
2558 .unwrap();
2559
2560 let page_size = page_size::get() as u64;
2561 let base: u64 = 0x1_0000_0000;
2563
2564 let mut paths = Vec::new();
2567 for i in 0..MAX_FILE_MAPPINGS {
2568 let name = format!("hyperlight_test_max_limit_{}.bin", i);
2569 let (path, _) = create_test_file(&name, &[0xAA; 4096]);
2570 let guest_base = base + (i as u64) * page_size;
2571 u_sbox.map_file_cow(&path, guest_base, None).unwrap();
2572 paths.push(path);
2573 }
2574
2575 let name = format!("hyperlight_test_max_limit_{}.bin", MAX_FILE_MAPPINGS);
2577 let (path, _) = create_test_file(&name, &[0xBB; 4096]);
2578 let guest_base = base + (MAX_FILE_MAPPINGS as u64) * page_size;
2579 let result = u_sbox.map_file_cow(&path, guest_base, None);
2580 assert!(
2581 result.is_err(),
2582 "map_file_cow should reject after MAX_FILE_MAPPINGS registrations"
2583 );
2584
2585 for p in &paths {
2587 let _ = std::fs::remove_file(p);
2588 }
2589 let _ = std::fs::remove_file(&path);
2590 }
2591}