1use std::collections::HashSet;
18use std::path::Path;
19use std::sync::atomic::Ordering;
20use std::sync::{Arc, Mutex};
21
22use flatbuffers::FlatBufferBuilder;
23use hyperlight_common::flatbuffer_wrappers::function_call::{FunctionCall, FunctionCallType};
24use hyperlight_common::flatbuffer_wrappers::function_types::{
25 ParameterValue, ReturnType, ReturnValue,
26};
27use hyperlight_common::flatbuffer_wrappers::util::estimate_flatbuffer_capacity;
28use tracing::{Span, instrument};
29
30use super::Callable;
31use super::file_mapping::prepare_file_cow;
32use super::host_funcs::FunctionRegistry;
33use super::snapshot::Snapshot;
34use crate::HyperlightError::{self, SnapshotSandboxMismatch};
35use crate::func::{ParameterTuple, SupportedReturnType};
36use crate::hypervisor::InterruptHandle;
37use crate::hypervisor::hyperlight_vm::{HyperlightVm, HyperlightVmError};
38use crate::mem::memory_region::{MemoryRegion, MemoryRegionFlags};
39use crate::mem::mgr::SandboxMemoryManager;
40use crate::mem::shared_mem::{HostSharedMemory, SharedMemory as _};
41use crate::metrics::{
42 METRIC_GUEST_ERROR, METRIC_GUEST_ERROR_LABEL_CODE, maybe_time_and_emit_guest_call,
43};
44use crate::{Result, log_then_return};
45
46pub struct MultiUseSandbox {
85 id: u64,
87 poisoned: bool,
89 pub(super) host_funcs: Arc<Mutex<FunctionRegistry>>,
90 pub(crate) mem_mgr: SandboxMemoryManager<HostSharedMemory>,
91 vm: HyperlightVm,
92 #[cfg(gdb)]
93 dbg_mem_access_fn: Arc<Mutex<SandboxMemoryManager<HostSharedMemory>>>,
94 snapshot: Option<Arc<Snapshot>>,
97}
98
99impl MultiUseSandbox {
100 #[instrument(skip_all, parent = Span::current(), level = "Trace")]
106 pub(super) fn from_uninit(
107 host_funcs: Arc<Mutex<FunctionRegistry>>,
108 mgr: SandboxMemoryManager<HostSharedMemory>,
109 vm: HyperlightVm,
110 #[cfg(gdb)] dbg_mem_access_fn: Arc<Mutex<SandboxMemoryManager<HostSharedMemory>>>,
111 ) -> MultiUseSandbox {
112 Self {
113 id: super::snapshot::SANDBOX_CONFIGURATION_COUNTER.fetch_add(1, Ordering::Relaxed),
114 poisoned: false,
115 host_funcs,
116 mem_mgr: mgr,
117 vm,
118 #[cfg(gdb)]
119 dbg_mem_access_fn,
120 snapshot: None,
121 }
122 }
123
124 #[instrument(err(Debug), skip_all, parent = Span::current())]
153 pub fn snapshot(&mut self) -> Result<Arc<Snapshot>> {
154 if self.poisoned {
155 return Err(crate::HyperlightError::PoisonedSandbox);
156 }
157
158 if let Some(snapshot) = &self.snapshot {
159 return Ok(snapshot.clone());
160 }
161 let mapped_regions_iter = self.vm.get_mapped_regions();
162 let mapped_regions_vec: Vec<MemoryRegion> = mapped_regions_iter.cloned().collect();
163 let root_pt_gpa = self
164 .vm
165 .get_root_pt()
166 .map_err(|e| HyperlightError::HyperlightVmError(e.into()))?;
167 let stack_top_gpa = self.vm.get_stack_top();
168 let sregs = self
169 .vm
170 .get_snapshot_sregs()
171 .map_err(|e| HyperlightError::HyperlightVmError(e.into()))?;
172 let entrypoint = self.vm.get_entrypoint();
173 let memory_snapshot = self.mem_mgr.snapshot(
174 self.id,
175 mapped_regions_vec,
176 root_pt_gpa,
177 stack_top_gpa,
178 sregs,
179 entrypoint,
180 )?;
181 let snapshot = Arc::new(memory_snapshot);
182 self.snapshot = Some(snapshot.clone());
183 Ok(snapshot)
184 }
185
186 #[instrument(err(Debug), skip_all, parent = Span::current())]
262 pub fn restore(&mut self, snapshot: Arc<Snapshot>) -> Result<()> {
263 if self.id != snapshot.sandbox_id() {
287 return Err(SnapshotSandboxMismatch);
288 }
289
290 let (gsnapshot, gscratch) = self.mem_mgr.restore_snapshot(&snapshot)?;
291 if let Some(gsnapshot) = gsnapshot {
292 self.vm
293 .update_snapshot_mapping(gsnapshot)
294 .map_err(|e| HyperlightError::HyperlightVmError(e.into()))?;
295 }
296 if let Some(gscratch) = gscratch {
297 self.vm
298 .update_scratch_mapping(gscratch)
299 .map_err(|e| HyperlightError::HyperlightVmError(e.into()))?;
300 }
301
302 let sregs = snapshot.sregs().ok_or_else(|| {
303 HyperlightError::Error("snapshot from running sandbox should have sregs".to_string())
304 })?;
305 self.vm
308 .reset_vcpu(snapshot.root_pt_gpa(), sregs)
309 .map_err(|e| {
310 self.poisoned = true;
311 HyperlightVmError::Restore(e)
312 })?;
313
314 self.vm.set_stack_top(snapshot.stack_top_gva());
315 self.vm.set_entrypoint(snapshot.entrypoint());
316
317 let current_regions: HashSet<_> = self.vm.get_mapped_regions().cloned().collect();
318 let snapshot_regions: HashSet<_> = snapshot.regions().iter().cloned().collect();
319
320 let regions_to_unmap = current_regions.difference(&snapshot_regions);
321 let regions_to_map = snapshot_regions.difference(¤t_regions);
322
323 for region in regions_to_unmap {
324 self.vm
325 .unmap_region(region)
326 .map_err(HyperlightVmError::UnmapRegion)?;
327 }
328
329 for region in regions_to_map {
330 unsafe { self.vm.map_region(region) }.map_err(HyperlightVmError::MapRegion)?;
333 }
334
335 self.snapshot = Some(snapshot.clone());
337
338 self.poisoned = false;
348
349 Ok(())
350 }
351
352 #[doc(hidden)]
389 #[deprecated(
390 since = "0.8.0",
391 note = "Deprecated in favour of call and snapshot/restore."
392 )]
393 #[instrument(err(Debug), skip(self, args), parent = Span::current())]
394 pub fn call_guest_function_by_name<Output: SupportedReturnType>(
395 &mut self,
396 func_name: &str,
397 args: impl ParameterTuple,
398 ) -> Result<Output> {
399 if self.poisoned {
400 return Err(crate::HyperlightError::PoisonedSandbox);
401 }
402 let snapshot = self.snapshot()?;
403 let res = self.call(func_name, args);
404 self.restore(snapshot)?;
405 res
406 }
407
408 #[instrument(err(Debug), skip(self, args), parent = Span::current())]
485 pub fn call<Output: SupportedReturnType>(
486 &mut self,
487 func_name: &str,
488 args: impl ParameterTuple,
489 ) -> Result<Output> {
490 if self.poisoned {
491 return Err(crate::HyperlightError::PoisonedSandbox);
492 }
493 self.snapshot = None;
495 maybe_time_and_emit_guest_call(func_name, || {
496 let ret = self.call_guest_function_by_name_no_reset(
497 func_name,
498 Output::TYPE,
499 args.into_value(),
500 );
501 let ret = Output::from_value(ret?)?;
504 Ok(ret)
505 })
506 }
507
508 #[instrument(err(Debug), skip(self, rgn), parent = Span::current())]
524 pub unsafe fn map_region(&mut self, rgn: &MemoryRegion) -> Result<()> {
525 if self.poisoned {
526 return Err(crate::HyperlightError::PoisonedSandbox);
527 }
528 if rgn.flags.contains(MemoryRegionFlags::WRITE) {
529 log_then_return!("TODO: Writable mappings not yet supported");
533 }
534 self.snapshot = None;
536 unsafe { self.vm.map_region(rgn) }.map_err(HyperlightVmError::MapRegion)?;
537 self.mem_mgr.mapped_rgns += 1;
538 Ok(())
539 }
540
541 #[instrument(err(Debug), skip(self, file_path, guest_base, label), parent = Span::current())]
553 pub fn map_file_cow(
554 &mut self,
555 file_path: &Path,
556 guest_base: u64,
557 label: Option<&str>,
558 ) -> Result<u64> {
559 if self.poisoned {
560 return Err(crate::HyperlightError::PoisonedSandbox);
561 }
562
563 #[cfg(feature = "nanvix-unstable")]
566 let current_count = self
567 .mem_mgr
568 .shared_mem
569 .read::<u64>(self.mem_mgr.layout.get_file_mappings_size_offset())?
570 as usize;
571 #[cfg(feature = "nanvix-unstable")]
572 if current_count >= hyperlight_common::mem::MAX_FILE_MAPPINGS {
573 return Err(crate::HyperlightError::Error(format!(
574 "map_file_cow: file mapping limit reached ({} of {})",
575 current_count,
576 hyperlight_common::mem::MAX_FILE_MAPPINGS,
577 )));
578 }
579
580 let mut prepared = prepare_file_cow(file_path, guest_base, label)?;
582
583 let shared_size = self.mem_mgr.shared_mem.mem_size() as u64;
586 let base_addr = crate::mem::layout::SandboxMemoryLayout::BASE_ADDRESS as u64;
587 let shared_end = base_addr.checked_add(shared_size).ok_or_else(|| {
588 crate::HyperlightError::Error("shared memory end overflow".to_string())
589 })?;
590 let mapping_end = guest_base
591 .checked_add(prepared.size as u64)
592 .ok_or_else(|| {
593 crate::HyperlightError::Error(format!(
594 "map_file_cow: guest address overflow: {:#x} + {:#x}",
595 guest_base, prepared.size
596 ))
597 })?;
598 if guest_base < shared_end && mapping_end > base_addr {
599 return Err(crate::HyperlightError::Error(format!(
600 "map_file_cow: mapping [{:#x}..{:#x}) overlaps sandbox shared memory [{:#x}..{:#x})",
601 guest_base, mapping_end, base_addr, shared_end,
602 )));
603 }
604
605 let region = prepared.to_memory_region()?;
607
608 for existing_region in self.vm.get_mapped_regions() {
610 let ex_start = existing_region.guest_region.start as u64;
611 let ex_end = existing_region.guest_region.end as u64;
612 if guest_base < ex_end && mapping_end > ex_start {
613 return Err(crate::HyperlightError::Error(format!(
614 "map_file_cow: mapping [{:#x}..{:#x}) overlaps existing mapping [{:#x}..{:#x})",
615 guest_base, mapping_end, ex_start, ex_end,
616 )));
617 }
618 }
619
620 self.snapshot = None;
622
623 unsafe { self.vm.map_region(®ion) }
624 .map_err(HyperlightVmError::MapRegion)
625 .map_err(crate::HyperlightError::HyperlightVmError)?;
626
627 let size = prepared.size as u64;
628
629 prepared.mark_consumed();
637 self.mem_mgr.mapped_rgns += 1;
638
639 #[cfg(feature = "nanvix-unstable")]
644 self.mem_mgr
645 .write_file_mapping_entry(prepared.guest_base, size, &prepared.label)?;
646
647 Ok(size)
648 }
649
650 #[cfg(feature = "fuzzing")]
659 #[instrument(err(Debug), skip(self, args), parent = Span::current())]
660 pub fn call_type_erased_guest_function_by_name(
661 &mut self,
662 func_name: &str,
663 ret_type: ReturnType,
664 args: Vec<ParameterValue>,
665 ) -> Result<ReturnValue> {
666 if self.poisoned {
667 return Err(crate::HyperlightError::PoisonedSandbox);
668 }
669 self.snapshot = None;
671 maybe_time_and_emit_guest_call(func_name, || {
672 self.call_guest_function_by_name_no_reset(func_name, ret_type, args)
673 })
674 }
675
676 fn call_guest_function_by_name_no_reset(
677 &mut self,
678 function_name: &str,
679 return_type: ReturnType,
680 args: Vec<ParameterValue>,
681 ) -> Result<ReturnValue> {
682 if self.poisoned {
683 return Err(crate::HyperlightError::PoisonedSandbox);
684 }
685 self.vm.clear_cancel();
689
690 let res = (|| {
691 let estimated_capacity = estimate_flatbuffer_capacity(function_name, &args);
692
693 let fc = FunctionCall::new(
694 function_name.to_string(),
695 Some(args),
696 FunctionCallType::Guest,
697 return_type,
698 );
699
700 let mut builder = FlatBufferBuilder::with_capacity(estimated_capacity);
701 let buffer = fc.encode(&mut builder);
702
703 self.mem_mgr.write_guest_function_call(buffer)?;
704
705 let dispatch_res = self.vm.dispatch_call_from_host(
706 &mut self.mem_mgr,
707 &self.host_funcs,
708 #[cfg(gdb)]
709 self.dbg_mem_access_fn.clone(),
710 );
711
712 if let Err(e) = dispatch_res {
715 let (error, should_poison) = e.promote();
716 self.poisoned |= should_poison;
717 return Err(error);
718 }
719
720 let guest_result = self.mem_mgr.get_guest_function_call_result()?.into_inner();
721
722 match guest_result {
723 Ok(val) => Ok(val),
724 Err(guest_error) => {
725 metrics::counter!(
726 METRIC_GUEST_ERROR,
727 METRIC_GUEST_ERROR_LABEL_CODE => (guest_error.code as u64).to_string()
728 )
729 .increment(1);
730
731 Err(HyperlightError::GuestError(
732 guest_error.code,
733 guest_error.message,
734 ))
735 }
736 }
737 })();
738
739 self.mem_mgr.abort_buffer.clear();
741
742 if let Err(e) = &res {
748 self.mem_mgr.clear_io_buffers();
749
750 self.poisoned |= e.is_poison_error();
752 }
753
754 res
757 }
758
759 pub fn interrupt_handle(&self) -> Arc<dyn InterruptHandle> {
789 self.vm.interrupt_handle()
790 }
791
792 #[cfg(crashdump)]
828 #[instrument(err(Debug), skip_all, parent = Span::current())]
829 pub fn generate_crashdump(&mut self) -> Result<()> {
830 crate::hypervisor::crashdump::generate_crashdump(&self.vm, &mut self.mem_mgr, None)
831 }
832
833 #[cfg(crashdump)]
840 #[instrument(err(Debug), skip_all, parent = Span::current())]
841 pub fn generate_crashdump_to_dir(&mut self, dir: impl Into<String>) -> Result<()> {
842 crate::hypervisor::crashdump::generate_crashdump(
843 &self.vm,
844 &mut self.mem_mgr,
845 Some(dir.into()),
846 )
847 }
848
849 pub fn poisoned(&self) -> bool {
886 self.poisoned
887 }
888}
889
890impl Callable for MultiUseSandbox {
891 fn call<Output: SupportedReturnType>(
892 &mut self,
893 func_name: &str,
894 args: impl ParameterTuple,
895 ) -> Result<Output> {
896 if self.poisoned {
897 return Err(crate::HyperlightError::PoisonedSandbox);
898 }
899 self.call(func_name, args)
900 }
901}
902
903impl std::fmt::Debug for MultiUseSandbox {
904 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
905 f.debug_struct("MultiUseSandbox").finish()
906 }
907}
908
909#[cfg(test)]
910mod tests {
911 use std::sync::{Arc, Barrier};
912 use std::thread;
913
914 use hyperlight_common::flatbuffer_wrappers::guest_error::ErrorCode;
915 use hyperlight_testing::sandbox_sizes::{LARGE_HEAP_SIZE, MEDIUM_HEAP_SIZE, SMALL_HEAP_SIZE};
916 use hyperlight_testing::simple_guest_as_string;
917
918 use crate::mem::memory_region::{MemoryRegion, MemoryRegionFlags, MemoryRegionType};
919 use crate::mem::shared_mem::{ExclusiveSharedMemory, GuestSharedMemory, SharedMemory as _};
920 use crate::sandbox::SandboxConfiguration;
921 use crate::{GuestBinary, HyperlightError, MultiUseSandbox, Result, UninitializedSandbox};
922
923 #[test]
924 fn poison() {
925 let mut sbox: MultiUseSandbox = {
926 let path = simple_guest_as_string().unwrap();
927 let u_sbox = UninitializedSandbox::new(GuestBinary::FilePath(path), None).unwrap();
928 u_sbox.evolve()
929 }
930 .unwrap();
931 let snapshot = sbox.snapshot().unwrap();
932
933 let res = sbox
935 .call::<()>("guest_panic", "hello".to_string())
936 .unwrap_err();
937 assert!(
938 matches!(res, HyperlightError::GuestAborted(code, context) if code == ErrorCode::UnknownError as u8 && context.contains("hello"))
939 );
940 assert!(sbox.poisoned());
941
942 let res = sbox
944 .call::<()>("guest_panic", "hello2".to_string())
945 .unwrap_err();
946 assert!(matches!(res, HyperlightError::PoisonedSandbox));
947
948 if let Err(e) = sbox.snapshot() {
950 assert!(sbox.poisoned());
951 assert!(matches!(e, HyperlightError::PoisonedSandbox));
952 } else {
953 panic!("Snapshot should fail");
954 }
955
956 {
958 let map_mem = allocate_guest_memory();
959 let guest_base = 0x0;
960 let region = region_for_memory(&map_mem, guest_base, MemoryRegionFlags::READ);
961 let res = unsafe { sbox.map_region(®ion) }.unwrap_err();
962 assert!(matches!(res, HyperlightError::PoisonedSandbox));
963 }
964
965 {
967 let temp_file = std::env::temp_dir().join("test_poison_map_file.bin");
968 let res = sbox.map_file_cow(&temp_file, 0x0, None).unwrap_err();
969 assert!(matches!(res, HyperlightError::PoisonedSandbox));
970 std::fs::remove_file(&temp_file).ok(); }
972
973 #[allow(deprecated)]
975 let res = sbox
976 .call_guest_function_by_name::<String>("Echo", "test".to_string())
977 .unwrap_err();
978 assert!(matches!(res, HyperlightError::PoisonedSandbox));
979
980 sbox.restore(snapshot.clone()).unwrap();
982 assert!(!sbox.poisoned());
983
984 let res = sbox.call::<String>("Echo", "hello2".to_string()).unwrap();
986 assert_eq!(res, "hello2".to_string());
987 assert!(!sbox.poisoned());
988
989 let res = sbox
991 .call::<()>("guest_panic", "hello".to_string())
992 .unwrap_err();
993 assert!(
994 matches!(res, HyperlightError::GuestAborted(code, context) if code == ErrorCode::UnknownError as u8 && context.contains("hello"))
995 );
996 assert!(sbox.poisoned());
997
998 sbox.restore(snapshot.clone()).unwrap();
1000 assert!(!sbox.poisoned());
1001
1002 let res = sbox.call::<String>("Echo", "hello3".to_string()).unwrap();
1004 assert_eq!(res, "hello3".to_string());
1005 assert!(!sbox.poisoned());
1006
1007 let _ = sbox.snapshot().unwrap();
1009 }
1010
1011 #[test]
1013 fn host_func_error() {
1014 let path = simple_guest_as_string().unwrap();
1015 let mut sandbox = UninitializedSandbox::new(GuestBinary::FilePath(path), None).unwrap();
1016 sandbox
1017 .register("HostError", || -> Result<()> {
1018 Err(HyperlightError::Error("hi".to_string()))
1019 })
1020 .unwrap();
1021 let mut sandbox = sandbox.evolve().unwrap();
1022
1023 for _ in 0..1000 {
1025 let result = sandbox
1026 .call::<i64>(
1027 "CallGivenParamlessHostFuncThatReturnsI64",
1028 "HostError".to_string(),
1029 )
1030 .unwrap_err();
1031
1032 assert!(
1033 matches!(result, HyperlightError::GuestError(code, msg) if code == ErrorCode::HostFunctionError && msg == "hi"),
1034 );
1035 }
1036 }
1037
1038 #[test]
1039 fn call_host_func_expect_error() {
1040 let path = simple_guest_as_string().unwrap();
1041 let sandbox = UninitializedSandbox::new(GuestBinary::FilePath(path), None).unwrap();
1042 let mut sandbox = sandbox.evolve().unwrap();
1043 sandbox
1044 .call::<()>("CallHostExpectError", "SomeUnknownHostFunc".to_string())
1045 .unwrap();
1046 }
1047
1048 #[test]
1050 fn io_buffer_reset() {
1051 let mut cfg = SandboxConfiguration::default();
1052 cfg.set_input_data_size(4096);
1053 cfg.set_output_data_size(4096);
1054 let path = simple_guest_as_string().unwrap();
1055 let mut sandbox =
1056 UninitializedSandbox::new(GuestBinary::FilePath(path), Some(cfg)).unwrap();
1057 sandbox.register("HostAdd", |a: i32, b: i32| a + b).unwrap();
1058 let mut sandbox = sandbox.evolve().unwrap();
1059
1060 for _ in 0..1000 {
1062 let result = sandbox.call::<i32>("Add", (5i32, 10i32)).unwrap();
1063 assert_eq!(result, 15);
1064 let result = sandbox.call::<i32>("AddToStaticAndFail", ()).unwrap_err();
1065 assert!(
1066 matches!(result, HyperlightError::GuestError (code, msg ) if code == ErrorCode::GuestError && msg == "Crash on purpose")
1067 );
1068 }
1069 }
1070
1071 #[test]
1073 fn test_call_guest_function_by_name() {
1074 let mut sbox: MultiUseSandbox = {
1075 let path = simple_guest_as_string().unwrap();
1076 let u_sbox = UninitializedSandbox::new(GuestBinary::FilePath(path), None).unwrap();
1077 u_sbox.evolve()
1078 }
1079 .unwrap();
1080
1081 let snapshot = sbox.snapshot().unwrap();
1082
1083 let _ = sbox.call::<i32>("AddToStatic", 5i32).unwrap();
1084 let res: i32 = sbox.call("GetStatic", ()).unwrap();
1085 assert_eq!(res, 5);
1086
1087 sbox.restore(snapshot).unwrap();
1088 #[allow(deprecated)]
1089 let _ = sbox
1090 .call_guest_function_by_name::<i32>("AddToStatic", 5i32)
1091 .unwrap();
1092 #[allow(deprecated)]
1093 let res: i32 = sbox.call_guest_function_by_name("GetStatic", ()).unwrap();
1094 assert_eq!(res, 0);
1095 }
1096
1097 #[test]
1100 fn test_with_small_stack_and_heap() {
1101 let mut cfg = SandboxConfiguration::default();
1102 cfg.set_heap_size(20 * 1024);
1103 let min_scratch = hyperlight_common::layout::min_scratch_size(
1108 cfg.get_input_data_size(),
1109 cfg.get_output_data_size(),
1110 );
1111 cfg.set_scratch_size(min_scratch + 0x10000 + 0x10000);
1112
1113 let mut sbox1: MultiUseSandbox = {
1114 let path = simple_guest_as_string().unwrap();
1115 let u_sbox = UninitializedSandbox::new(GuestBinary::FilePath(path), Some(cfg)).unwrap();
1116 u_sbox.evolve()
1117 }
1118 .unwrap();
1119
1120 for _ in 0..1000 {
1121 sbox1.call::<String>("Echo", "hello".to_string()).unwrap();
1122 }
1123
1124 let mut sbox2: MultiUseSandbox = {
1125 let path = simple_guest_as_string().unwrap();
1126 let u_sbox = UninitializedSandbox::new(GuestBinary::FilePath(path), Some(cfg)).unwrap();
1127 u_sbox.evolve()
1128 }
1129 .unwrap();
1130
1131 for i in 0..1000 {
1132 sbox2
1133 .call::<i32>(
1134 "PrintUsingPrintf",
1135 format!("Hello World {}\n", i).to_string(),
1136 )
1137 .unwrap();
1138 }
1139 }
1140
1141 #[test]
1144 fn snapshot_evolve_restore_handles_state_correctly() {
1145 let mut sbox: MultiUseSandbox = {
1146 let path = simple_guest_as_string().unwrap();
1147 let u_sbox = UninitializedSandbox::new(GuestBinary::FilePath(path), None).unwrap();
1148 u_sbox.evolve()
1149 }
1150 .unwrap();
1151
1152 let snapshot = sbox.snapshot().unwrap();
1153
1154 let _ = sbox.call::<i32>("AddToStatic", 5i32).unwrap();
1155
1156 let res: i32 = sbox.call("GetStatic", ()).unwrap();
1157 assert_eq!(res, 5);
1158
1159 sbox.restore(snapshot).unwrap();
1160 let res: i32 = sbox.call("GetStatic", ()).unwrap();
1161 assert_eq!(res, 0);
1162 }
1163
1164 #[test]
1165 fn test_trigger_exception_on_guest() {
1166 let usbox = UninitializedSandbox::new(
1167 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
1168 None,
1169 )
1170 .unwrap();
1171
1172 let mut multi_use_sandbox: MultiUseSandbox = usbox.evolve().unwrap();
1173
1174 let res: Result<()> = multi_use_sandbox.call("TriggerException", ());
1175
1176 assert!(res.is_err());
1177
1178 match res.unwrap_err() {
1179 HyperlightError::GuestAborted(_, msg) => {
1180 assert!(msg.contains("InvalidOpcode"));
1182 }
1183 e => panic!(
1184 "Expected HyperlightError::GuestExecutionError but got {:?}",
1185 e
1186 ),
1187 }
1188 }
1189
1190 #[test]
1191 fn create_200_sandboxes() {
1192 const NUM_THREADS: usize = 10;
1193 const SANDBOXES_PER_THREAD: usize = 20;
1194
1195 let start_barrier = Arc::new(Barrier::new(NUM_THREADS + 1));
1197 let mut thread_handles = vec![];
1198
1199 for _ in 0..NUM_THREADS {
1200 let barrier = start_barrier.clone();
1201
1202 let handle = thread::spawn(move || {
1203 barrier.wait();
1204
1205 for _ in 0..SANDBOXES_PER_THREAD {
1206 let guest_path = simple_guest_as_string().expect("Guest Binary Missing");
1207 let uninit =
1208 UninitializedSandbox::new(GuestBinary::FilePath(guest_path), None).unwrap();
1209
1210 let mut sandbox: MultiUseSandbox = uninit.evolve().unwrap();
1211
1212 let result: i32 = sandbox.call("GetStatic", ()).unwrap();
1213 assert_eq!(result, 0);
1214 }
1215 });
1216
1217 thread_handles.push(handle);
1218 }
1219
1220 start_barrier.wait();
1221
1222 for handle in thread_handles {
1223 handle.join().unwrap();
1224 }
1225 }
1226
1227 #[test]
1228 fn test_mmap() {
1229 let mut sbox = UninitializedSandbox::new(
1230 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
1231 None,
1232 )
1233 .unwrap()
1234 .evolve()
1235 .unwrap();
1236
1237 let expected = b"hello world";
1238 let map_mem = page_aligned_memory(expected);
1239 let guest_base = 0x1_0000_0000; unsafe {
1242 sbox.map_region(®ion_for_memory(
1243 &map_mem,
1244 guest_base,
1245 MemoryRegionFlags::READ,
1246 ))
1247 .unwrap();
1248 }
1249
1250 let _guard = map_mem.lock.try_read().unwrap();
1251 let actual: Vec<u8> = sbox
1252 .call(
1253 "ReadMappedBuffer",
1254 (guest_base as u64, expected.len() as u64, true),
1255 )
1256 .unwrap();
1257
1258 assert_eq!(actual, expected);
1259 }
1260
1261 #[test]
1263 fn test_mmap_write_exec() {
1264 let mut sbox = UninitializedSandbox::new(
1265 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
1266 None,
1267 )
1268 .unwrap()
1269 .evolve()
1270 .unwrap();
1271
1272 let expected = &[0x90, 0x90, 0x90, 0xC3]; let map_mem = page_aligned_memory(expected);
1274 let guest_base = 0x1_0000_0000; unsafe {
1277 sbox.map_region(®ion_for_memory(
1278 &map_mem,
1279 guest_base,
1280 MemoryRegionFlags::READ | MemoryRegionFlags::EXECUTE,
1281 ))
1282 .unwrap();
1283 }
1284
1285 let _guard = map_mem.lock.try_read().unwrap();
1286
1287 let succeed = sbox
1289 .call::<bool>(
1290 "ExecMappedBuffer",
1291 (guest_base as u64, expected.len() as u64),
1292 )
1293 .unwrap();
1294 assert!(succeed, "Expected execution of mapped buffer to succeed");
1295
1296 let err = sbox
1298 .call::<bool>(
1299 "WriteMappedBuffer",
1300 (guest_base as u64, expected.len() as u64),
1301 )
1302 .unwrap_err();
1303
1304 match err {
1305 HyperlightError::MemoryAccessViolation(addr, ..) if addr == guest_base as u64 => {}
1306 _ => panic!("Expected MemoryAccessViolation error"),
1307 };
1308 }
1309
1310 fn page_aligned_memory(src: &[u8]) -> GuestSharedMemory {
1311 use hyperlight_common::mem::PAGE_SIZE_USIZE;
1312
1313 let len = src.len().div_ceil(PAGE_SIZE_USIZE) * PAGE_SIZE_USIZE;
1314
1315 let mut mem = ExclusiveSharedMemory::new(len).unwrap();
1316 mem.copy_from_slice(src, 0).unwrap();
1317
1318 let (_, guest_mem) = mem.build();
1319
1320 guest_mem
1321 }
1322
1323 fn region_for_memory(
1324 mem: &GuestSharedMemory,
1325 guest_base: usize,
1326 flags: MemoryRegionFlags,
1327 ) -> MemoryRegion {
1328 let len = mem.mem_size();
1329 MemoryRegion {
1330 host_region: mem.host_region_base()..mem.host_region_end(),
1331 guest_region: guest_base..(guest_base + len),
1332 flags,
1333 region_type: MemoryRegionType::Heap,
1334 }
1335 }
1336
1337 fn allocate_guest_memory() -> GuestSharedMemory {
1338 page_aligned_memory(b"test data for snapshot")
1339 }
1340
1341 #[test]
1342 fn snapshot_restore_handles_remapping_correctly() {
1343 let mut sbox: MultiUseSandbox = {
1344 let path = simple_guest_as_string().unwrap();
1345 let u_sbox = UninitializedSandbox::new(GuestBinary::FilePath(path), None).unwrap();
1346 u_sbox.evolve().unwrap()
1347 };
1348
1349 let snapshot1 = sbox.snapshot().unwrap();
1351 assert_eq!(sbox.vm.get_mapped_regions().count(), 0);
1352
1353 let map_mem = allocate_guest_memory();
1355 let guest_base = 0x200000000_usize;
1356 let region = region_for_memory(&map_mem, guest_base, MemoryRegionFlags::READ);
1357
1358 unsafe { sbox.map_region(®ion).unwrap() };
1359 assert_eq!(sbox.vm.get_mapped_regions().count(), 1);
1360 let orig_read = sbox
1361 .call::<Vec<u8>>(
1362 "ReadMappedBuffer",
1363 (
1364 guest_base as u64,
1365 hyperlight_common::vmem::PAGE_SIZE as u64,
1366 true,
1367 ),
1368 )
1369 .unwrap();
1370
1371 let snapshot2 = sbox.snapshot().unwrap();
1373 assert_eq!(sbox.vm.get_mapped_regions().count(), 1);
1374
1375 sbox.restore(snapshot1.clone()).unwrap();
1377 assert_eq!(sbox.vm.get_mapped_regions().count(), 0);
1378 let is_mapped = sbox
1379 .call::<bool>("CheckMapped", (guest_base as u64,))
1380 .unwrap();
1381 assert!(!is_mapped);
1382
1383 sbox.restore(snapshot2.clone()).unwrap();
1386 assert_eq!(sbox.vm.get_mapped_regions().count(), 0);
1387 let is_mapped = sbox
1388 .call::<bool>("CheckMapped", (guest_base as u64,))
1389 .unwrap();
1390 assert!(is_mapped);
1391
1392 let new_read = sbox
1394 .call::<Vec<u8>>(
1395 "ReadMappedBuffer",
1396 (
1397 guest_base as u64,
1398 hyperlight_common::vmem::PAGE_SIZE as u64,
1399 false,
1400 ),
1401 )
1402 .unwrap();
1403 assert_eq!(new_read, orig_read);
1404 }
1405
1406 #[test]
1407 fn snapshot_different_sandbox() {
1408 let mut sandbox = {
1409 let path = simple_guest_as_string().unwrap();
1410 let u_sbox = UninitializedSandbox::new(GuestBinary::FilePath(path), None).unwrap();
1411 u_sbox.evolve().unwrap()
1412 };
1413
1414 let mut sandbox2 = {
1415 let path = simple_guest_as_string().unwrap();
1416 let u_sbox = UninitializedSandbox::new(GuestBinary::FilePath(path), None).unwrap();
1417 u_sbox.evolve().unwrap()
1418 };
1419 assert_ne!(sandbox.id, sandbox2.id);
1420
1421 let snapshot = sandbox.snapshot().unwrap();
1422 let err = sandbox2.restore(snapshot.clone());
1423 assert!(matches!(err, Err(HyperlightError::SnapshotSandboxMismatch)));
1424
1425 let sandbox_id = sandbox.id;
1426 drop(sandbox);
1427 drop(sandbox2);
1428 drop(snapshot);
1429
1430 let sandbox3 = {
1431 let path = simple_guest_as_string().unwrap();
1432 let u_sbox = UninitializedSandbox::new(GuestBinary::FilePath(path), None).unwrap();
1433 u_sbox.evolve().unwrap()
1434 };
1435 assert_ne!(sandbox3.id, sandbox_id);
1436 }
1437
1438 #[test]
1441 fn snapshot_restore_resets_debug_registers() {
1442 let mut sandbox: MultiUseSandbox = {
1443 let path = simple_guest_as_string().unwrap();
1444 let u_sbox = UninitializedSandbox::new(GuestBinary::FilePath(path), None).unwrap();
1445 u_sbox.evolve().unwrap()
1446 };
1447
1448 let snapshot = sandbox.snapshot().unwrap();
1449
1450 let dr0_initial: u64 = sandbox.call("GetDr0", ()).unwrap();
1452 assert_eq!(dr0_initial, 0, "DR0 should initially be 0");
1453
1454 const DIRTY_VALUE: u64 = 0xDEAD_BEEF_CAFE_BABE;
1456 sandbox.call::<()>("SetDr0", DIRTY_VALUE).unwrap();
1457 let dr0_dirty: u64 = sandbox.call("GetDr0", ()).unwrap();
1458 assert_eq!(
1459 dr0_dirty, DIRTY_VALUE,
1460 "DR0 should be dirty after SetDr0 call"
1461 );
1462
1463 sandbox.restore(snapshot).unwrap();
1465
1466 let dr0_after_restore: u64 = sandbox.call("GetDr0", ()).unwrap();
1467 assert_eq!(
1468 dr0_after_restore, 0,
1469 "DR0 should be 0 after restore (reset_vcpu should have been called)"
1470 );
1471 }
1472
1473 #[test]
1476 fn stale_abort_buffer_does_not_leak_across_calls() {
1477 let mut sbox: MultiUseSandbox = {
1478 let path = simple_guest_as_string().unwrap();
1479 let u_sbox = UninitializedSandbox::new(GuestBinary::FilePath(path), None).unwrap();
1480 u_sbox.evolve().unwrap()
1481 };
1482
1483 sbox.mem_mgr.abort_buffer.extend_from_slice(&[0xAA; 1020]);
1485
1486 let res = sbox.call::<String>("Echo", "hello".to_string());
1487 assert!(
1488 res.is_ok(),
1489 "Expected Ok after stale abort buffer, got: {:?}",
1490 res.unwrap_err()
1491 );
1492
1493 assert!(
1495 sbox.mem_mgr.abort_buffer.is_empty(),
1496 "abort_buffer should be empty after a guest call"
1497 );
1498 }
1499
1500 #[test]
1502 fn test_sandbox_creation_various_sizes() {
1503 let test_cases: [(&str, u64); 3] = [
1504 ("small (8MB heap)", SMALL_HEAP_SIZE),
1505 ("medium (64MB heap)", MEDIUM_HEAP_SIZE),
1506 ("large (256MB heap)", LARGE_HEAP_SIZE),
1507 ];
1508
1509 for (name, heap_size) in test_cases {
1510 let mut cfg = SandboxConfiguration::default();
1511 cfg.set_heap_size(heap_size);
1512 cfg.set_scratch_size(0x100000);
1513
1514 let path = simple_guest_as_string().unwrap();
1515 let sbox = UninitializedSandbox::new(GuestBinary::FilePath(path), Some(cfg))
1516 .unwrap_or_else(|e| panic!("Failed to create {} sandbox: {}", name, e))
1517 .evolve()
1518 .unwrap_or_else(|e| panic!("Failed to evolve {} sandbox: {}", name, e));
1519
1520 drop(sbox);
1521 }
1522 }
1523
1524 #[cfg(feature = "trace_guest")]
1526 fn sandbox_for_gva_tests() -> MultiUseSandbox {
1527 let path = simple_guest_as_string().unwrap();
1528 UninitializedSandbox::new(GuestBinary::FilePath(path), None)
1529 .unwrap()
1530 .evolve()
1531 .unwrap()
1532 }
1533
1534 #[cfg(feature = "trace_guest")]
1538 fn assert_gva_read_matches(sbox: &mut MultiUseSandbox, gva: u64, len: usize) {
1539 let expected: Vec<u8> = sbox
1541 .call("ReadMappedBuffer", (gva, len as u64, true))
1542 .unwrap();
1543 assert_eq!(expected.len(), len);
1544
1545 let root_pt = sbox.vm.get_root_pt().unwrap();
1547 let actual = sbox
1548 .mem_mgr
1549 .read_guest_memory_by_gva(gva, len, root_pt)
1550 .unwrap();
1551
1552 assert_eq!(
1553 actual, expected,
1554 "read_guest_memory_by_gva at GVA {:#x} (len {}) differs from guest ReadMappedBuffer",
1555 gva, len,
1556 );
1557 }
1558
1559 #[test]
1562 #[cfg(feature = "trace_guest")]
1563 fn read_guest_memory_by_gva_single_page() {
1564 let mut sbox = sandbox_for_gva_tests();
1565 let code_gva = sbox.mem_mgr.layout.get_guest_code_address() as u64;
1566 assert_gva_read_matches(&mut sbox, code_gva, 128);
1567 }
1568
1569 #[test]
1572 #[cfg(feature = "trace_guest")]
1573 fn read_guest_memory_by_gva_full_page() {
1574 let mut sbox = sandbox_for_gva_tests();
1575 let code_gva = sbox.mem_mgr.layout.get_guest_code_address() as u64;
1576 assert_gva_read_matches(&mut sbox, code_gva, 4096);
1577 }
1578
1579 #[test]
1582 #[cfg(feature = "trace_guest")]
1583 fn read_guest_memory_by_gva_unaligned_cross_page() {
1584 let mut sbox = sandbox_for_gva_tests();
1585 let code_gva = sbox.mem_mgr.layout.get_guest_code_address() as u64;
1586 let start = code_gva + 4096 - 1;
1589 println!(
1590 "Testing unaligned cross-page read starting at {:#x} spanning 4097 bytes",
1591 start
1592 );
1593 assert_gva_read_matches(&mut sbox, start, 4097);
1594 }
1595
1596 #[test]
1598 #[cfg(feature = "trace_guest")]
1599 fn read_guest_memory_by_gva_two_full_pages() {
1600 let mut sbox = sandbox_for_gva_tests();
1601 let code_gva = sbox.mem_mgr.layout.get_guest_code_address() as u64;
1602 assert_gva_read_matches(&mut sbox, code_gva, 4096 * 2);
1603 }
1604
1605 #[test]
1609 #[cfg(feature = "trace_guest")]
1610 fn read_guest_memory_by_gva_cross_page_boundary() {
1611 let mut sbox = sandbox_for_gva_tests();
1612 let code_gva = sbox.mem_mgr.layout.get_guest_code_address() as u64;
1613 let start = code_gva + 4096 - 100;
1615 assert_gva_read_matches(&mut sbox, start, 200);
1616 }
1617
1618 fn create_test_file(name: &str, content: &[u8]) -> (std::path::PathBuf, Vec<u8>) {
1622 use std::io::Write;
1623
1624 let page_size = page_size::get();
1625 let padded_len = content.len().max(page_size).div_ceil(page_size) * page_size;
1626 let mut padded = vec![0u8; padded_len];
1627 padded[..content.len()].copy_from_slice(content);
1628
1629 let temp_dir = std::env::temp_dir();
1630 let path = temp_dir.join(name);
1631 let _ = std::fs::remove_file(&path); let mut f = std::fs::File::create(&path).unwrap();
1633 f.write_all(&padded).unwrap();
1634 (path, content.to_vec())
1635 }
1636
1637 #[test]
1640 fn test_map_file_cow_basic() {
1641 let expected = b"hello world from map_file_cow";
1642 let (path, expected_bytes) =
1643 create_test_file("hyperlight_test_map_file_cow_basic.bin", expected);
1644
1645 let mut sbox = UninitializedSandbox::new(
1646 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
1647 None,
1648 )
1649 .unwrap()
1650 .evolve()
1651 .unwrap();
1652
1653 let guest_base: u64 = 0x1_0000_0000;
1654 let mapped_size = sbox.map_file_cow(&path, guest_base, None).unwrap();
1655 assert!(mapped_size > 0, "mapped_size should be positive");
1656 assert!(
1657 mapped_size >= expected.len() as u64,
1658 "mapped_size should be >= file content length"
1659 );
1660
1661 let actual: Vec<u8> = sbox
1663 .call(
1664 "ReadMappedBuffer",
1665 (guest_base, expected_bytes.len() as u64, true),
1666 )
1667 .unwrap();
1668
1669 assert_eq!(
1670 actual, expected_bytes,
1671 "Guest should read back the exact file content"
1672 );
1673
1674 let _ = std::fs::remove_file(&path);
1676 }
1677
1678 #[test]
1681 fn test_map_file_cow_read_only_enforcement() {
1682 let content = &[0xBB; 4096];
1683 let (path, _) = create_test_file("hyperlight_test_map_file_cow_readonly.bin", content);
1684
1685 let mut sbox = UninitializedSandbox::new(
1686 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
1687 None,
1688 )
1689 .unwrap()
1690 .evolve()
1691 .unwrap();
1692
1693 let guest_base: u64 = 0x1_0000_0000;
1694 sbox.map_file_cow(&path, guest_base, None).unwrap();
1695
1696 let err = sbox
1698 .call::<bool>("WriteMappedBuffer", (guest_base, content.len() as u64))
1699 .unwrap_err();
1700
1701 match err {
1702 HyperlightError::MemoryAccessViolation(addr, ..) if addr == guest_base => {}
1703 _ => panic!(
1704 "Expected MemoryAccessViolation at guest_base, got: {:?}",
1705 err
1706 ),
1707 };
1708
1709 let _ = std::fs::remove_file(&path);
1711 }
1712
1713 #[test]
1716 fn test_map_file_cow_poisoned() {
1717 let (path, _) = create_test_file("hyperlight_test_map_file_cow_poison.bin", &[0xCC; 4096]);
1718
1719 let mut sbox: MultiUseSandbox = {
1720 let path = simple_guest_as_string().unwrap();
1721 let u_sbox = UninitializedSandbox::new(GuestBinary::FilePath(path), None).unwrap();
1722 u_sbox.evolve()
1723 }
1724 .unwrap();
1725 let snapshot = sbox.snapshot().unwrap();
1726
1727 let _ = sbox
1729 .call::<()>("guest_panic", "hello".to_string())
1730 .unwrap_err();
1731 assert!(sbox.poisoned());
1732
1733 let err = sbox.map_file_cow(&path, 0x1_0000_0000, None).unwrap_err();
1735 assert!(matches!(err, HyperlightError::PoisonedSandbox));
1736
1737 sbox.restore(snapshot).unwrap();
1739 assert!(!sbox.poisoned());
1740 let result = sbox.map_file_cow(&path, 0x1_0000_0000, None);
1741 assert!(result.is_ok());
1742
1743 let _ = std::fs::remove_file(&path);
1744 }
1745
1746 #[test]
1749 fn test_map_file_cow_multi_vm_same_file() {
1750 let expected = b"shared file content across VMs";
1751 let (path, expected_bytes) =
1752 create_test_file("hyperlight_test_map_file_cow_multi_vm.bin", expected);
1753
1754 let guest_base: u64 = 0x1_0000_0000;
1755
1756 let mut sbox1 = UninitializedSandbox::new(
1757 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
1758 None,
1759 )
1760 .unwrap()
1761 .evolve()
1762 .unwrap();
1763
1764 let mut sbox2 = UninitializedSandbox::new(
1765 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
1766 None,
1767 )
1768 .unwrap()
1769 .evolve()
1770 .unwrap();
1771
1772 sbox1.map_file_cow(&path, guest_base, None).unwrap();
1774 sbox2.map_file_cow(&path, guest_base, None).unwrap();
1775
1776 let actual1: Vec<u8> = sbox1
1778 .call(
1779 "ReadMappedBuffer",
1780 (guest_base, expected_bytes.len() as u64, true),
1781 )
1782 .unwrap();
1783 let actual2: Vec<u8> = sbox2
1784 .call(
1785 "ReadMappedBuffer",
1786 (guest_base, expected_bytes.len() as u64, true),
1787 )
1788 .unwrap();
1789
1790 assert_eq!(
1791 actual1, expected_bytes,
1792 "Sandbox 1 should read correct content"
1793 );
1794 assert_eq!(
1795 actual2, expected_bytes,
1796 "Sandbox 2 should read correct content"
1797 );
1798
1799 let _ = std::fs::remove_file(&path);
1800 }
1801
1802 #[test]
1805 fn test_map_file_cow_multi_vm_threaded() {
1806 let expected = b"threaded file mapping test data";
1807 let (path, expected_bytes) =
1808 create_test_file("hyperlight_test_map_file_cow_threaded.bin", expected);
1809
1810 const NUM_THREADS: usize = 5;
1811 let path = Arc::new(path);
1812 let expected_bytes = Arc::new(expected_bytes);
1813 let barrier = Arc::new(Barrier::new(NUM_THREADS));
1814 let mut handles = vec![];
1815
1816 for _ in 0..NUM_THREADS {
1817 let path = path.clone();
1818 let expected_bytes = expected_bytes.clone();
1819 let barrier = barrier.clone();
1820
1821 handles.push(thread::spawn(move || {
1822 barrier.wait();
1823
1824 let mut sbox = UninitializedSandbox::new(
1825 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
1826 None,
1827 )
1828 .unwrap()
1829 .evolve()
1830 .unwrap();
1831
1832 let guest_base: u64 = 0x1_0000_0000;
1833 sbox.map_file_cow(&path, guest_base, None).unwrap();
1834
1835 let actual: Vec<u8> = sbox
1836 .call(
1837 "ReadMappedBuffer",
1838 (guest_base, expected_bytes.len() as u64, true),
1839 )
1840 .unwrap();
1841
1842 assert_eq!(actual, *expected_bytes);
1843 }));
1844 }
1845
1846 for h in handles {
1847 h.join().unwrap();
1848 }
1849
1850 let _ = std::fs::remove_file(&*path);
1851 }
1852
1853 #[test]
1856 #[cfg(target_os = "windows")]
1857 fn test_map_file_cow_cleanup_no_handle_leak() {
1858 let (path, _) = create_test_file("hyperlight_test_map_file_cow_cleanup.bin", &[0xDD; 4096]);
1859
1860 {
1861 let mut sbox = UninitializedSandbox::new(
1862 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
1863 None,
1864 )
1865 .unwrap()
1866 .evolve()
1867 .unwrap();
1868
1869 sbox.map_file_cow(&path, 0x1_0000_0000, None).unwrap();
1870 }
1872
1873 std::fs::remove_file(&path)
1874 .expect("File should be deletable after sandbox with map_file_cow is dropped");
1875 }
1876
1877 #[test]
1881 fn test_map_file_cow_snapshot_remapping_cycle() {
1882 let expected = b"snapshot remapping cycle test!";
1883 let (path, expected_bytes) =
1884 create_test_file("hyperlight_test_map_file_cow_snapshot_remap.bin", expected);
1885
1886 let mut sbox = UninitializedSandbox::new(
1887 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
1888 None,
1889 )
1890 .unwrap()
1891 .evolve()
1892 .unwrap();
1893
1894 let guest_base: u64 = 0x1_0000_0000;
1895
1896 let snapshot1 = sbox.snapshot().unwrap();
1898
1899 sbox.map_file_cow(&path, guest_base, None).unwrap();
1901
1902 let actual: Vec<u8> = sbox
1904 .call(
1905 "ReadMappedBuffer",
1906 (guest_base, expected_bytes.len() as u64, true),
1907 )
1908 .unwrap();
1909 assert_eq!(actual, expected_bytes);
1910
1911 let snapshot2 = sbox.snapshot().unwrap();
1913
1914 sbox.restore(snapshot1.clone()).unwrap();
1916 let is_mapped: bool = sbox.call("CheckMapped", (guest_base,)).unwrap();
1917 assert!(
1918 !is_mapped,
1919 "Region should be unmapped after restoring to snapshot₁"
1920 );
1921
1922 sbox.restore(snapshot2).unwrap();
1925 let is_mapped: bool = sbox.call("CheckMapped", (guest_base,)).unwrap();
1926 assert!(
1927 is_mapped,
1928 "Region should be mapped after restoring to snapshot₂"
1929 );
1930 let actual2: Vec<u8> = sbox
1931 .call(
1932 "ReadMappedBuffer",
1933 (guest_base, expected_bytes.len() as u64, false),
1934 )
1935 .unwrap();
1936 assert_eq!(
1937 actual2, expected_bytes,
1938 "Data should be intact after snapshot₂ restore"
1939 );
1940
1941 let _ = std::fs::remove_file(&path);
1942 }
1943
1944 #[test]
1947 fn test_map_file_cow_snapshot_restore() {
1948 let expected = b"snapshot restore basic test!!";
1949 let (path, expected_bytes) =
1950 create_test_file("hyperlight_test_map_file_cow_snap_restore.bin", expected);
1951
1952 let mut sbox = UninitializedSandbox::new(
1953 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
1954 None,
1955 )
1956 .unwrap()
1957 .evolve()
1958 .unwrap();
1959
1960 let guest_base: u64 = 0x1_0000_0000;
1961 sbox.map_file_cow(&path, guest_base, None).unwrap();
1962
1963 let actual: Vec<u8> = sbox
1965 .call(
1966 "ReadMappedBuffer",
1967 (guest_base, expected_bytes.len() as u64, true),
1968 )
1969 .unwrap();
1970 assert_eq!(actual, expected_bytes);
1971
1972 let snapshot = sbox.snapshot().unwrap();
1974
1975 sbox.restore(snapshot).unwrap();
1977
1978 let actual2: Vec<u8> = sbox
1980 .call(
1981 "ReadMappedBuffer",
1982 (guest_base, expected_bytes.len() as u64, false),
1983 )
1984 .unwrap();
1985 assert_eq!(
1986 actual2, expected_bytes,
1987 "Data should be readable after restore from snapshot"
1988 );
1989
1990 let _ = std::fs::remove_file(&path);
1991 }
1992
1993 #[test]
1997 fn test_map_file_cow_deferred_basic() {
1998 let expected = b"deferred map_file_cow test data";
1999 let (path, expected_bytes) =
2000 create_test_file("hyperlight_test_map_file_cow_deferred.bin", expected);
2001
2002 let guest_base: u64 = 0x1_0000_0000;
2003
2004 let mut u_sbox = UninitializedSandbox::new(
2005 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
2006 None,
2007 )
2008 .unwrap();
2009
2010 let mapped_size = u_sbox.map_file_cow(&path, guest_base, None).unwrap();
2012 assert!(mapped_size > 0, "mapped_size should be positive");
2013 assert!(
2014 mapped_size >= expected.len() as u64,
2015 "mapped_size should be >= file content length"
2016 );
2017
2018 let mut sbox: MultiUseSandbox = u_sbox.evolve().unwrap();
2020
2021 let actual: Vec<u8> = sbox
2023 .call(
2024 "ReadMappedBuffer",
2025 (guest_base, expected_bytes.len() as u64, true),
2026 )
2027 .unwrap();
2028
2029 assert_eq!(
2030 actual, expected_bytes,
2031 "Guest should read back the exact file content after deferred mapping"
2032 );
2033
2034 let _ = std::fs::remove_file(&path);
2035 }
2036
2037 #[test]
2041 fn test_map_file_cow_deferred_drop_without_evolve() {
2042 let (path, _) = create_test_file(
2043 "hyperlight_test_map_file_cow_deferred_drop.bin",
2044 &[0xAA; 4096],
2045 );
2046
2047 let guest_base: u64 = 0x1_0000_0000;
2048
2049 {
2050 let mut u_sbox = UninitializedSandbox::new(
2051 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
2052 None,
2053 )
2054 .unwrap();
2055
2056 u_sbox.map_file_cow(&path, guest_base, None).unwrap();
2057 }
2060
2061 #[cfg(target_os = "windows")]
2064 std::fs::remove_file(&path)
2065 .expect("File should be deletable after dropping UninitializedSandbox");
2066 #[cfg(not(target_os = "windows"))]
2067 let _ = std::fs::remove_file(&path);
2068 }
2069
2070 #[test]
2073 fn test_map_file_cow_unaligned_guest_base() {
2074 let (path, _) =
2075 create_test_file("hyperlight_test_map_file_cow_unaligned.bin", &[0xBB; 4096]);
2076
2077 let mut u_sbox = UninitializedSandbox::new(
2078 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
2079 None,
2080 )
2081 .unwrap();
2082
2083 let unaligned_base: u64 = (page_size::get() + 1) as u64;
2085 let result = u_sbox.map_file_cow(&path, unaligned_base, None);
2086 assert!(
2087 result.is_err(),
2088 "map_file_cow should reject unaligned guest_base"
2089 );
2090
2091 let _ = std::fs::remove_file(&path);
2092 }
2093
2094 #[test]
2096 fn test_map_file_cow_empty_file() {
2097 let temp_dir = std::env::temp_dir();
2098 let path = temp_dir.join("hyperlight_test_map_file_cow_empty.bin");
2099 let _ = std::fs::remove_file(&path);
2100 std::fs::File::create(&path).unwrap(); let mut u_sbox = UninitializedSandbox::new(
2103 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
2104 None,
2105 )
2106 .unwrap();
2107
2108 let guest_base: u64 = 0x1_0000_0000;
2109 let result = u_sbox.map_file_cow(&path, guest_base, None);
2110 assert!(result.is_err(), "map_file_cow should reject empty files");
2111
2112 let _ = std::fs::remove_file(&path);
2113 }
2114
2115 #[test]
2117 fn test_map_file_cow_custom_label() {
2118 let (path, _) = create_test_file("hyperlight_test_map_file_cow_label.bin", &[0xDD; 4096]);
2119
2120 let mut sbox = UninitializedSandbox::new(
2121 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
2122 None,
2123 )
2124 .unwrap()
2125 .evolve()
2126 .unwrap();
2127
2128 let result = sbox.map_file_cow(&path, 0x1_0000_0000, Some("my_ramfs"));
2129 assert!(
2130 result.is_ok(),
2131 "map_file_cow with custom label should succeed"
2132 );
2133
2134 let _ = std::fs::remove_file(&path);
2135 }
2136
2137 #[test]
2141 #[cfg(feature = "nanvix-unstable")]
2142 fn test_map_file_cow_peb_entry_multiuse() {
2143 use std::mem::offset_of;
2144
2145 use hyperlight_common::mem::{FILE_MAPPING_LABEL_MAX_LEN, FileMappingInfo};
2146
2147 let (path, _) = create_test_file("hyperlight_test_peb_entry_multiuse.bin", &[0xDD; 4096]);
2148
2149 let guest_base: u64 = 0x1_0000_0000;
2150 let label = "my_ramfs";
2151
2152 let mut sbox = UninitializedSandbox::new(
2153 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
2154 None,
2155 )
2156 .unwrap()
2157 .evolve()
2158 .unwrap();
2159
2160 let mapped_size = sbox.map_file_cow(&path, guest_base, Some(label)).unwrap();
2162
2163 let count = sbox
2165 .mem_mgr
2166 .shared_mem
2167 .read::<u64>(sbox.mem_mgr.layout.get_file_mappings_size_offset())
2168 .unwrap();
2169 assert_eq!(
2170 count, 1,
2171 "PEB file_mappings count should be 1 after one mapping"
2172 );
2173
2174 let entry_offset = sbox.mem_mgr.layout.get_file_mappings_array_offset();
2176
2177 let stored_addr = sbox
2178 .mem_mgr
2179 .shared_mem
2180 .read::<u64>(entry_offset + offset_of!(FileMappingInfo, guest_addr))
2181 .unwrap();
2182 assert_eq!(stored_addr, guest_base, "PEB entry guest_addr should match");
2183
2184 let stored_size = sbox
2185 .mem_mgr
2186 .shared_mem
2187 .read::<u64>(entry_offset + offset_of!(FileMappingInfo, size))
2188 .unwrap();
2189 assert_eq!(
2190 stored_size, mapped_size,
2191 "PEB entry size should match mapped_size"
2192 );
2193
2194 let label_offset = entry_offset + offset_of!(FileMappingInfo, label);
2196 let mut label_buf = [0u8; FILE_MAPPING_LABEL_MAX_LEN + 1];
2197 for (i, byte) in label_buf.iter_mut().enumerate() {
2198 *byte = sbox
2199 .mem_mgr
2200 .shared_mem
2201 .read::<u8>(label_offset + i)
2202 .unwrap();
2203 }
2204 let label_len = label_buf
2205 .iter()
2206 .position(|&b| b == 0)
2207 .unwrap_or(label_buf.len());
2208 let stored_label = std::str::from_utf8(&label_buf[..label_len]).unwrap();
2209 assert_eq!(stored_label, label, "PEB entry label should match");
2210
2211 let _ = std::fs::remove_file(&path);
2212 }
2213
2214 #[test]
2217 #[cfg(feature = "nanvix-unstable")]
2218 fn test_map_file_cow_peb_entry_deferred() {
2219 use std::mem::offset_of;
2220
2221 use hyperlight_common::mem::{FILE_MAPPING_LABEL_MAX_LEN, FileMappingInfo};
2222
2223 let (path, _) = create_test_file("hyperlight_test_peb_entry_deferred.bin", &[0xEE; 4096]);
2224
2225 let guest_base: u64 = 0x1_0000_0000;
2226 let label = "deferred_fs";
2227
2228 let mut u_sbox = UninitializedSandbox::new(
2229 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
2230 None,
2231 )
2232 .unwrap();
2233
2234 let mapped_size = u_sbox.map_file_cow(&path, guest_base, Some(label)).unwrap();
2235
2236 let sbox: MultiUseSandbox = u_sbox.evolve().unwrap();
2238
2239 let count = sbox
2241 .mem_mgr
2242 .shared_mem
2243 .read::<u64>(sbox.mem_mgr.layout.get_file_mappings_size_offset())
2244 .unwrap();
2245 assert_eq!(count, 1, "PEB file_mappings count should be 1 after evolve");
2246
2247 let entry_offset = sbox.mem_mgr.layout.get_file_mappings_array_offset();
2249
2250 let stored_addr = sbox
2251 .mem_mgr
2252 .shared_mem
2253 .read::<u64>(entry_offset + offset_of!(FileMappingInfo, guest_addr))
2254 .unwrap();
2255 assert_eq!(stored_addr, guest_base);
2256
2257 let stored_size = sbox
2258 .mem_mgr
2259 .shared_mem
2260 .read::<u64>(entry_offset + offset_of!(FileMappingInfo, size))
2261 .unwrap();
2262 assert_eq!(stored_size, mapped_size);
2263
2264 let label_offset = entry_offset + offset_of!(FileMappingInfo, label);
2266 let mut label_buf = [0u8; FILE_MAPPING_LABEL_MAX_LEN + 1];
2267 for (i, byte) in label_buf.iter_mut().enumerate() {
2268 *byte = sbox
2269 .mem_mgr
2270 .shared_mem
2271 .read::<u8>(label_offset + i)
2272 .unwrap();
2273 }
2274 let label_len = label_buf
2275 .iter()
2276 .position(|&b| b == 0)
2277 .unwrap_or(label_buf.len());
2278 let stored_label = std::str::from_utf8(&label_buf[..label_len]).unwrap();
2279 assert_eq!(
2280 stored_label, label,
2281 "PEB entry label should match after evolve"
2282 );
2283
2284 let _ = std::fs::remove_file(&path);
2285 }
2286
2287 #[test]
2291 #[cfg(feature = "nanvix-unstable")]
2292 fn test_map_file_cow_peb_multiple_entries() {
2293 use std::mem::{offset_of, size_of};
2294
2295 use hyperlight_common::mem::{FILE_MAPPING_LABEL_MAX_LEN, FileMappingInfo};
2296
2297 const NUM_FILES: usize = 5;
2298 const DEFERRED_COUNT: usize = 3;
2299
2300 let mut paths = Vec::new();
2302 let mut labels: Vec<String> = Vec::new();
2303 for i in 0..NUM_FILES {
2304 let name = format!("hyperlight_test_peb_multi_{}.bin", i);
2305 let content = vec![i as u8 + 0xA0; 4096];
2306 let (path, _) = create_test_file(&name, &content);
2307 paths.push(path);
2308 labels.push(format!("file_{}", i));
2309 }
2310
2311 let page_size = page_size::get() as u64;
2314 let base: u64 = 0x1_0000_0000;
2315 let guest_bases: Vec<u64> = (0..NUM_FILES as u64)
2316 .map(|i| base + i * page_size)
2317 .collect();
2318
2319 let mut u_sbox = UninitializedSandbox::new(
2320 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
2321 None,
2322 )
2323 .unwrap();
2324
2325 let mut mapped_sizes = Vec::new();
2327 for i in 0..DEFERRED_COUNT {
2328 let size = u_sbox
2329 .map_file_cow(&paths[i], guest_bases[i], Some(&labels[i]))
2330 .unwrap();
2331 mapped_sizes.push(size);
2332 }
2333
2334 let mut sbox: MultiUseSandbox = u_sbox.evolve().unwrap();
2336
2337 for i in DEFERRED_COUNT..NUM_FILES {
2339 let size = sbox
2340 .map_file_cow(&paths[i], guest_bases[i], Some(&labels[i]))
2341 .unwrap();
2342 mapped_sizes.push(size);
2343 }
2344
2345 let count = sbox
2347 .mem_mgr
2348 .shared_mem
2349 .read::<u64>(sbox.mem_mgr.layout.get_file_mappings_size_offset())
2350 .unwrap();
2351 assert_eq!(
2352 count, NUM_FILES as u64,
2353 "PEB should have {NUM_FILES} entries"
2354 );
2355
2356 let array_base = sbox.mem_mgr.layout.get_file_mappings_array_offset();
2358 for i in 0..NUM_FILES {
2359 let entry_offset = array_base + i * size_of::<FileMappingInfo>();
2360
2361 let stored_addr = sbox
2362 .mem_mgr
2363 .shared_mem
2364 .read::<u64>(entry_offset + offset_of!(FileMappingInfo, guest_addr))
2365 .unwrap();
2366 assert_eq!(
2367 stored_addr, guest_bases[i],
2368 "Entry {i}: guest_addr mismatch"
2369 );
2370
2371 let stored_size = sbox
2372 .mem_mgr
2373 .shared_mem
2374 .read::<u64>(entry_offset + offset_of!(FileMappingInfo, size))
2375 .unwrap();
2376 assert_eq!(stored_size, mapped_sizes[i], "Entry {i}: size mismatch");
2377
2378 let label_base = entry_offset + offset_of!(FileMappingInfo, label);
2380 let mut label_buf = [0u8; FILE_MAPPING_LABEL_MAX_LEN + 1];
2381 for (j, byte) in label_buf.iter_mut().enumerate() {
2382 *byte = sbox.mem_mgr.shared_mem.read::<u8>(label_base + j).unwrap();
2383 }
2384 let label_len = label_buf
2385 .iter()
2386 .position(|&b| b == 0)
2387 .unwrap_or(label_buf.len());
2388 let stored_label = std::str::from_utf8(&label_buf[..label_len]).unwrap();
2389 assert_eq!(stored_label, labels[i], "Entry {i}: label mismatch");
2390 }
2391
2392 for path in &paths {
2394 let _ = std::fs::remove_file(path);
2395 }
2396 }
2397
2398 #[test]
2400 fn test_map_file_cow_label_too_long() {
2401 let (path, _) =
2402 create_test_file("hyperlight_test_map_file_cow_long_label.bin", &[0xEE; 4096]);
2403
2404 let guest_base: u64 = 0x1_0000_0000;
2405
2406 let mut u_sbox = UninitializedSandbox::new(
2407 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
2408 None,
2409 )
2410 .unwrap();
2411
2412 let long_label = "A".repeat(64);
2414 let result = u_sbox.map_file_cow(&path, guest_base, Some(&long_label));
2415 assert!(
2416 result.is_err(),
2417 "map_file_cow should reject labels longer than 63 bytes"
2418 );
2419
2420 let ok_label = "B".repeat(63);
2422 let result = u_sbox.map_file_cow(&path, guest_base, Some(&ok_label));
2423 assert!(
2424 result.is_ok(),
2425 "map_file_cow should accept labels of exactly 63 bytes"
2426 );
2427
2428 let _ = std::fs::remove_file(&path);
2429 }
2430
2431 #[test]
2433 fn test_map_file_cow_label_null_byte() {
2434 let (path, _) =
2435 create_test_file("hyperlight_test_map_file_cow_null_label.bin", &[0xFF; 4096]);
2436
2437 let guest_base: u64 = 0x1_0000_0000;
2438
2439 let mut u_sbox = UninitializedSandbox::new(
2440 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
2441 None,
2442 )
2443 .unwrap();
2444
2445 let result = u_sbox.map_file_cow(&path, guest_base, Some("has\0null"));
2446 assert!(
2447 result.is_err(),
2448 "map_file_cow should reject labels containing null bytes"
2449 );
2450
2451 let _ = std::fs::remove_file(&path);
2452 }
2453
2454 #[test]
2456 fn test_map_file_cow_overlapping_mappings() {
2457 let (path1, _) =
2458 create_test_file("hyperlight_test_map_file_cow_overlap1.bin", &[0xAA; 4096]);
2459 let (path2, _) =
2460 create_test_file("hyperlight_test_map_file_cow_overlap2.bin", &[0xBB; 4096]);
2461
2462 let guest_base: u64 = 0x1_0000_0000;
2463
2464 let mut u_sbox = UninitializedSandbox::new(
2465 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
2466 None,
2467 )
2468 .unwrap();
2469
2470 u_sbox.map_file_cow(&path1, guest_base, None).unwrap();
2472
2473 let result = u_sbox.map_file_cow(&path2, guest_base, None);
2475 assert!(
2476 result.is_err(),
2477 "map_file_cow should reject overlapping guest address ranges"
2478 );
2479
2480 let _ = std::fs::remove_file(&path1);
2481 let _ = std::fs::remove_file(&path2);
2482 }
2483
2484 #[test]
2487 fn test_map_file_cow_shared_mem_overlap() {
2488 let (path, _) = create_test_file(
2489 "hyperlight_test_map_file_cow_overlap_shm.bin",
2490 &[0xCC; 4096],
2491 );
2492
2493 let mut u_sbox = UninitializedSandbox::new(
2494 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
2495 None,
2496 )
2497 .unwrap();
2498
2499 let base_addr = crate::mem::layout::SandboxMemoryLayout::BASE_ADDRESS as u64;
2501 let result = u_sbox.map_file_cow(&path, base_addr, None);
2503 assert!(
2504 result.is_err(),
2505 "map_file_cow should reject guest_base inside shared memory"
2506 );
2507
2508 let _ = std::fs::remove_file(&path);
2509 }
2510
2511 #[test]
2514 fn test_map_file_cow_max_limit() {
2515 use hyperlight_common::mem::MAX_FILE_MAPPINGS;
2516
2517 let mut u_sbox = UninitializedSandbox::new(
2518 GuestBinary::FilePath(simple_guest_as_string().expect("Guest Binary Missing")),
2519 None,
2520 )
2521 .unwrap();
2522
2523 let page_size = page_size::get() as u64;
2524 let base: u64 = 0x1_0000_0000;
2526
2527 let mut paths = Vec::new();
2530 for i in 0..MAX_FILE_MAPPINGS {
2531 let name = format!("hyperlight_test_max_limit_{}.bin", i);
2532 let (path, _) = create_test_file(&name, &[0xAA; 4096]);
2533 let guest_base = base + (i as u64) * page_size;
2534 u_sbox.map_file_cow(&path, guest_base, None).unwrap();
2535 paths.push(path);
2536 }
2537
2538 let name = format!("hyperlight_test_max_limit_{}.bin", MAX_FILE_MAPPINGS);
2540 let (path, _) = create_test_file(&name, &[0xBB; 4096]);
2541 let guest_base = base + (MAX_FILE_MAPPINGS as u64) * page_size;
2542 let result = u_sbox.map_file_cow(&path, guest_base, None);
2543 assert!(
2544 result.is_err(),
2545 "map_file_cow should reject after MAX_FILE_MAPPINGS registrations"
2546 );
2547
2548 for p in &paths {
2550 let _ = std::fs::remove_file(p);
2551 }
2552 let _ = std::fs::remove_file(&path);
2553 }
2554}