1use std::fmt::Debug;
64use std::mem::{offset_of, size_of};
65
66use hyperlight_common::mem::{HyperlightPEB, PAGE_SIZE_USIZE};
67use tracing::{Span, instrument};
68
69use super::memory_region::MemoryRegionType::{Code, Heap, InitData, Peb};
70use super::memory_region::{
71 DEFAULT_GUEST_BLOB_MEM_FLAGS, MemoryRegion, MemoryRegion_, MemoryRegionFlags, MemoryRegionKind,
72 MemoryRegionVecBuilder,
73};
74#[cfg(any(gdb, feature = "mem_profile"))]
75use super::shared_mem::HostSharedMemory;
76use super::shared_mem::{ExclusiveSharedMemory, ReadonlySharedMemory};
77use crate::error::HyperlightError::{MemoryRequestTooBig, MemoryRequestTooSmall};
78use crate::sandbox::SandboxConfiguration;
79use crate::{Result, new_error};
80
81pub(crate) enum BaseGpaRegion<Sn, Sc> {
82 Snapshot(Sn),
83 Scratch(Sc),
84 Mmap(MemoryRegion),
85}
86
87pub(crate) struct ResolvedGpa<Sn, Sc> {
90 pub(crate) offset: usize,
91 pub(crate) base: BaseGpaRegion<Sn, Sc>,
92}
93
94impl AsRef<[u8]> for ExclusiveSharedMemory {
95 fn as_ref(&self) -> &[u8] {
96 self.as_slice()
97 }
98}
99impl AsRef<[u8]> for ReadonlySharedMemory {
100 fn as_ref(&self) -> &[u8] {
101 self.as_slice()
102 }
103}
104
105impl<Sn, Sc> ResolvedGpa<Sn, Sc> {
106 pub(crate) fn with_memories<Sn2, Sc2>(self, sn: Sn2, sc: Sc2) -> ResolvedGpa<Sn2, Sc2> {
107 ResolvedGpa {
108 offset: self.offset,
109 base: match self.base {
110 BaseGpaRegion::Snapshot(_) => BaseGpaRegion::Snapshot(sn),
111 BaseGpaRegion::Scratch(_) => BaseGpaRegion::Scratch(sc),
112 BaseGpaRegion::Mmap(r) => BaseGpaRegion::Mmap(r),
113 },
114 }
115 }
116}
117impl<'a> BaseGpaRegion<&'a [u8], &'a [u8]> {
118 pub(crate) fn as_ref<'b>(&'b self) -> &'a [u8] {
119 match self {
120 BaseGpaRegion::Snapshot(sn) => sn,
121 BaseGpaRegion::Scratch(sc) => sc,
122 BaseGpaRegion::Mmap(r) => unsafe {
123 #[allow(clippy::useless_conversion)]
124 let host_region_base: usize = r.host_region.start.into();
125 #[allow(clippy::useless_conversion)]
126 let host_region_end: usize = r.host_region.end.into();
127 let len = host_region_end - host_region_base;
128 std::slice::from_raw_parts(host_region_base as *const u8, len)
129 },
130 }
131 }
132}
133impl<'a> ResolvedGpa<&'a [u8], &'a [u8]> {
134 pub(crate) fn as_ref<'b>(&'b self) -> &'a [u8] {
135 let base = self.base.as_ref();
136 if self.offset > base.len() {
137 return &[];
138 }
139 &self.base.as_ref()[self.offset..]
140 }
141}
142#[cfg(any(gdb, feature = "mem_profile"))]
143#[allow(unused)] pub(crate) trait ReadableSharedMemory {
145 fn copy_to_slice(&self, slice: &mut [u8], offset: usize) -> Result<()>;
146}
147#[cfg(any(gdb, feature = "mem_profile"))]
148impl ReadableSharedMemory for &HostSharedMemory {
149 fn copy_to_slice(&self, slice: &mut [u8], offset: usize) -> Result<()> {
150 HostSharedMemory::copy_to_slice(self, slice, offset)
151 }
152}
153#[cfg(any(gdb, feature = "mem_profile"))]
154mod coherence_hack {
155 use super::{ExclusiveSharedMemory, ReadonlySharedMemory};
156 #[allow(unused)] pub(super) trait SharedMemoryAsRefMarker: AsRef<[u8]> {}
158 impl SharedMemoryAsRefMarker for ExclusiveSharedMemory {}
159 impl SharedMemoryAsRefMarker for &ExclusiveSharedMemory {}
160 impl SharedMemoryAsRefMarker for ReadonlySharedMemory {}
161 impl SharedMemoryAsRefMarker for &ReadonlySharedMemory {}
162}
163#[cfg(any(gdb, feature = "mem_profile"))]
164impl<T: coherence_hack::SharedMemoryAsRefMarker> ReadableSharedMemory for T {
165 fn copy_to_slice(&self, slice: &mut [u8], offset: usize) -> Result<()> {
166 let ss: &[u8] = self.as_ref();
167 let end = offset + slice.len();
168 if end > ss.len() {
169 return Err(new_error!(
170 "Attempt to read up to {} in memory of size {}",
171 offset + slice.len(),
172 self.as_ref().len()
173 ));
174 }
175 slice.copy_from_slice(&ss[offset..end]);
176 Ok(())
177 }
178}
179#[cfg(any(gdb, feature = "mem_profile"))]
180impl<Sn: ReadableSharedMemory, Sc: ReadableSharedMemory> ResolvedGpa<Sn, Sc> {
181 #[allow(unused)] pub(crate) fn copy_to_slice(&self, slice: &mut [u8]) -> Result<()> {
183 match &self.base {
184 BaseGpaRegion::Snapshot(sn) => sn.copy_to_slice(slice, self.offset),
185 BaseGpaRegion::Scratch(sc) => sc.copy_to_slice(slice, self.offset),
186 BaseGpaRegion::Mmap(r) => unsafe {
187 #[allow(clippy::useless_conversion)]
188 let host_region_base: usize = r.host_region.start.into();
189 #[allow(clippy::useless_conversion)]
190 let host_region_end: usize = r.host_region.end.into();
191 let len = host_region_end - host_region_base;
192 let ss = std::slice::from_raw_parts(host_region_base as *const u8, len);
201 let end = self.offset + slice.len();
202 if end > ss.len() {
203 return Err(new_error!(
204 "Attempt to read up to {} in memory of size {}",
205 self.offset + slice.len(),
206 ss.len()
207 ));
208 }
209 slice.copy_from_slice(&ss[self.offset..end]);
210 Ok(())
211 },
212 }
213 }
214}
215
216#[derive(Copy, Clone)]
217pub(crate) struct SandboxMemoryLayout {
218 pub(super) sandbox_memory_config: SandboxConfiguration,
219 pub(super) heap_size: usize,
221 init_data_size: usize,
222
223 peb_offset: usize,
226 peb_input_data_offset: usize,
227 peb_output_data_offset: usize,
228 peb_init_data_offset: usize,
229 peb_heap_data_offset: usize,
230 #[cfg(feature = "nanvix-unstable")]
231 peb_file_mappings_offset: usize,
232
233 guest_heap_buffer_offset: usize,
234 init_data_offset: usize,
235 pt_size: Option<usize>,
236
237 pub(crate) peb_address: usize,
239 code_size: usize,
240 guest_code_offset: usize,
242 #[cfg_attr(feature = "i686-guest", allow(unused))]
243 pub(crate) init_data_permissions: Option<MemoryRegionFlags>,
244
245 scratch_size: usize,
248 snapshot_size: usize,
253}
254
255impl Debug for SandboxMemoryLayout {
256 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
257 let mut ff = f.debug_struct("SandboxMemoryLayout");
258 ff.field(
259 "Total Memory Size",
260 &format_args!("{:#x}", self.get_memory_size().unwrap_or(0)),
261 )
262 .field("Heap Size", &format_args!("{:#x}", self.heap_size))
263 .field(
264 "Init Data Size",
265 &format_args!("{:#x}", self.init_data_size),
266 )
267 .field("PEB Address", &format_args!("{:#x}", self.peb_address))
268 .field("PEB Offset", &format_args!("{:#x}", self.peb_offset))
269 .field("Code Size", &format_args!("{:#x}", self.code_size))
270 .field(
271 "Input Data Offset",
272 &format_args!("{:#x}", self.peb_input_data_offset),
273 )
274 .field(
275 "Output Data Offset",
276 &format_args!("{:#x}", self.peb_output_data_offset),
277 )
278 .field(
279 "Init Data Offset",
280 &format_args!("{:#x}", self.peb_init_data_offset),
281 )
282 .field(
283 "Guest Heap Offset",
284 &format_args!("{:#x}", self.peb_heap_data_offset),
285 );
286 #[cfg(feature = "nanvix-unstable")]
287 ff.field(
288 "File Mappings Offset",
289 &format_args!("{:#x}", self.peb_file_mappings_offset),
290 );
291 ff.field(
292 "Guest Heap Buffer Offset",
293 &format_args!("{:#x}", self.guest_heap_buffer_offset),
294 )
295 .field(
296 "Init Data Offset",
297 &format_args!("{:#x}", self.init_data_offset),
298 )
299 .field("PT Size", &format_args!("{:#x}", self.pt_size.unwrap_or(0)))
300 .field(
301 "Guest Code Offset",
302 &format_args!("{:#x}", self.guest_code_offset),
303 )
304 .field(
305 "Scratch region size",
306 &format_args!("{:#x}", self.scratch_size),
307 )
308 .finish()
309 }
310}
311
312impl SandboxMemoryLayout {
313 const MAX_MEMORY_SIZE: usize = (16 * 1024 * 1024 * 1024) - Self::BASE_ADDRESS; pub(crate) const BASE_ADDRESS: usize = 0x1000;
322
323 pub(crate) const STACK_POINTER_SIZE_BYTES: u64 = 8;
325
326 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
329 pub(crate) fn new(
330 cfg: SandboxConfiguration,
331 code_size: usize,
332 init_data_size: usize,
333 init_data_permissions: Option<MemoryRegionFlags>,
334 ) -> Result<Self> {
335 let heap_size = usize::try_from(cfg.get_heap_size())?;
336 let scratch_size = cfg.get_scratch_size();
337 if scratch_size > Self::MAX_MEMORY_SIZE {
338 return Err(MemoryRequestTooBig(scratch_size, Self::MAX_MEMORY_SIZE));
339 }
340 let min_scratch_size = hyperlight_common::layout::min_scratch_size(
341 cfg.get_input_data_size(),
342 cfg.get_output_data_size(),
343 );
344 if scratch_size < min_scratch_size {
345 return Err(MemoryRequestTooSmall(scratch_size, min_scratch_size));
346 }
347
348 let guest_code_offset = 0;
349 let peb_offset = code_size.next_multiple_of(PAGE_SIZE_USIZE);
351 let peb_input_data_offset = peb_offset + offset_of!(HyperlightPEB, input_stack);
352 let peb_output_data_offset = peb_offset + offset_of!(HyperlightPEB, output_stack);
353 let peb_init_data_offset = peb_offset + offset_of!(HyperlightPEB, init_data);
354 let peb_heap_data_offset = peb_offset + offset_of!(HyperlightPEB, guest_heap);
355 #[cfg(feature = "nanvix-unstable")]
356 let peb_file_mappings_offset = peb_offset + offset_of!(HyperlightPEB, file_mappings);
357
358 let peb_address = Self::BASE_ADDRESS + peb_offset;
361 #[cfg(feature = "nanvix-unstable")]
371 let file_mappings_array_end = peb_offset
372 + size_of::<HyperlightPEB>()
373 + hyperlight_common::mem::MAX_FILE_MAPPINGS
374 * size_of::<hyperlight_common::mem::FileMappingInfo>();
375 #[cfg(feature = "nanvix-unstable")]
376 let guest_heap_buffer_offset = file_mappings_array_end.next_multiple_of(PAGE_SIZE_USIZE);
377 #[cfg(not(feature = "nanvix-unstable"))]
378 let guest_heap_buffer_offset =
379 (peb_offset + size_of::<HyperlightPEB>()).next_multiple_of(PAGE_SIZE_USIZE);
380
381 let init_data_offset =
383 (guest_heap_buffer_offset + heap_size).next_multiple_of(PAGE_SIZE_USIZE);
384 let mut ret = Self {
385 peb_offset,
386 heap_size,
387 peb_input_data_offset,
388 peb_output_data_offset,
389 peb_init_data_offset,
390 peb_heap_data_offset,
391 #[cfg(feature = "nanvix-unstable")]
392 peb_file_mappings_offset,
393 sandbox_memory_config: cfg,
394 code_size,
395 guest_heap_buffer_offset,
396 peb_address,
397 guest_code_offset,
398 init_data_offset,
399 init_data_size,
400 init_data_permissions,
401 pt_size: None,
402 scratch_size,
403 snapshot_size: 0,
404 };
405 ret.set_snapshot_size(ret.get_memory_size()?);
406 Ok(ret)
407 }
408
409 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
411 pub(super) fn get_output_data_size_offset(&self) -> usize {
412 self.peb_output_data_offset
414 }
415
416 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
418 pub(super) fn get_init_data_size_offset(&self) -> usize {
419 self.peb_init_data_offset
421 }
422
423 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
424 pub(crate) fn get_scratch_size(&self) -> usize {
425 self.scratch_size
426 }
427
428 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
430 fn get_output_data_pointer_offset(&self) -> usize {
431 self.get_output_data_size_offset() + size_of::<u64>()
434 }
435
436 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
438 pub(super) fn get_init_data_pointer_offset(&self) -> usize {
439 self.get_init_data_size_offset() + size_of::<u64>()
442 }
443
444 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
446 pub(crate) fn get_output_data_buffer_gva(&self) -> u64 {
447 hyperlight_common::layout::scratch_base_gva(self.scratch_size)
448 + self.sandbox_memory_config.get_input_data_size() as u64
449 }
450
451 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
454 pub(crate) fn get_output_data_buffer_scratch_host_offset(&self) -> usize {
455 self.sandbox_memory_config.get_input_data_size()
456 }
457
458 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
460 pub(super) fn get_input_data_size_offset(&self) -> usize {
461 self.peb_input_data_offset
463 }
464
465 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
467 fn get_input_data_pointer_offset(&self) -> usize {
468 self.get_input_data_size_offset() + size_of::<u64>()
471 }
472
473 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
475 fn get_input_data_buffer_gva(&self) -> u64 {
476 hyperlight_common::layout::scratch_base_gva(self.scratch_size)
477 }
478
479 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
482 pub(crate) fn get_input_data_buffer_scratch_host_offset(&self) -> usize {
483 0
484 }
485
486 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
489 pub(crate) fn get_pt_base_scratch_offset(&self) -> usize {
490 (self.sandbox_memory_config.get_input_data_size()
491 + self.sandbox_memory_config.get_output_data_size())
492 .next_multiple_of(hyperlight_common::vmem::PAGE_SIZE)
493 }
494
495 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
498 pub(crate) fn get_pt_base_gpa(&self) -> u64 {
499 hyperlight_common::layout::scratch_base_gpa(self.scratch_size)
500 + self.get_pt_base_scratch_offset() as u64
501 }
502
503 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
506 pub(crate) fn get_first_free_scratch_gpa(&self) -> u64 {
507 self.get_pt_base_gpa() + self.pt_size.unwrap_or(0) as u64
508 }
509
510 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
512 fn get_heap_size_offset(&self) -> usize {
513 self.peb_heap_data_offset
514 }
515
516 #[cfg(feature = "nanvix-unstable")]
519 pub(crate) fn get_file_mappings_size_offset(&self) -> usize {
520 self.peb_file_mappings_offset
521 }
522
523 #[cfg(feature = "nanvix-unstable")]
525 fn get_file_mappings_pointer_offset(&self) -> usize {
526 self.get_file_mappings_size_offset() + size_of::<u64>()
527 }
528
529 #[cfg(feature = "nanvix-unstable")]
532 pub(crate) fn get_file_mappings_array_offset(&self) -> usize {
533 self.peb_offset + size_of::<HyperlightPEB>()
534 }
535
536 #[cfg(feature = "nanvix-unstable")]
538 fn get_file_mappings_array_gva(&self) -> u64 {
539 (Self::BASE_ADDRESS + self.get_file_mappings_array_offset()) as u64
540 }
541
542 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
544 fn get_heap_pointer_offset(&self) -> usize {
545 self.get_heap_size_offset() + size_of::<u64>()
548 }
549
550 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
553 fn get_unaligned_memory_size(&self) -> usize {
554 self.init_data_offset + self.init_data_size
555 }
556
557 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
560 pub(crate) fn get_guest_code_offset(&self) -> usize {
561 self.guest_code_offset
562 }
563
564 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
566 pub(crate) fn get_guest_code_address(&self) -> usize {
567 Self::BASE_ADDRESS + self.guest_code_offset
568 }
569
570 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
573 pub(crate) fn get_memory_size(&self) -> Result<usize> {
574 let total_memory = self.get_unaligned_memory_size();
575
576 let remainder = total_memory % PAGE_SIZE_USIZE;
578 let multiples = total_memory / PAGE_SIZE_USIZE;
579 let size = match remainder {
580 0 => total_memory,
581 _ => (multiples + 1) * PAGE_SIZE_USIZE,
582 };
583
584 if size > Self::MAX_MEMORY_SIZE {
585 Err(MemoryRequestTooBig(size, Self::MAX_MEMORY_SIZE))
586 } else {
587 Ok(size)
588 }
589 }
590
591 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
593 pub(crate) fn set_pt_size(&mut self, size: usize) -> Result<()> {
594 let min_fixed_scratch = hyperlight_common::layout::min_scratch_size(
595 self.sandbox_memory_config.get_input_data_size(),
596 self.sandbox_memory_config.get_output_data_size(),
597 );
598 let min_scratch = min_fixed_scratch + size;
599 if self.scratch_size < min_scratch {
600 return Err(MemoryRequestTooSmall(self.scratch_size, min_scratch));
601 }
602 let old_pt_size = self.pt_size.unwrap_or(0);
603 self.snapshot_size = self.snapshot_size - old_pt_size + size;
604 self.pt_size = Some(size);
605 Ok(())
606 }
607
608 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
609 pub(crate) fn set_snapshot_size(&mut self, new_size: usize) {
610 self.snapshot_size = new_size;
611 }
612
613 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
615 pub(crate) fn get_pt_size(&self) -> usize {
616 self.pt_size.unwrap_or(0)
617 }
618
619 #[cfg_attr(feature = "i686-guest", allow(unused))]
622 pub(crate) fn get_memory_regions_<K: MemoryRegionKind>(
623 &self,
624 host_base: K::HostBaseType,
625 ) -> Result<Vec<MemoryRegion_<K>>> {
626 let mut builder = MemoryRegionVecBuilder::new(Self::BASE_ADDRESS, host_base);
627
628 let peb_offset = builder.push_page_aligned(
630 self.code_size,
631 MemoryRegionFlags::READ | MemoryRegionFlags::WRITE | MemoryRegionFlags::EXECUTE,
632 Code,
633 );
634
635 let expected_peb_offset = TryInto::<usize>::try_into(self.peb_offset)?;
636
637 if peb_offset != expected_peb_offset {
638 return Err(new_error!(
639 "PEB offset does not match expected PEB offset expected: {}, actual: {}",
640 expected_peb_offset,
641 peb_offset
642 ));
643 }
644
645 #[cfg(feature = "nanvix-unstable")]
647 let heap_offset = {
648 let peb_and_array_size = size_of::<HyperlightPEB>()
649 + hyperlight_common::mem::MAX_FILE_MAPPINGS
650 * size_of::<hyperlight_common::mem::FileMappingInfo>();
651 builder.push_page_aligned(
652 peb_and_array_size,
653 MemoryRegionFlags::READ | MemoryRegionFlags::WRITE,
654 Peb,
655 )
656 };
657 #[cfg(not(feature = "nanvix-unstable"))]
658 let heap_offset =
659 builder.push_page_aligned(size_of::<HyperlightPEB>(), MemoryRegionFlags::READ, Peb);
660
661 let expected_heap_offset = TryInto::<usize>::try_into(self.guest_heap_buffer_offset)?;
662
663 if heap_offset != expected_heap_offset {
664 return Err(new_error!(
665 "Guest Heap offset does not match expected Guest Heap offset expected: {}, actual: {}",
666 expected_heap_offset,
667 heap_offset
668 ));
669 }
670
671 #[cfg(feature = "executable_heap")]
673 let init_data_offset = builder.push_page_aligned(
674 self.heap_size,
675 MemoryRegionFlags::READ | MemoryRegionFlags::WRITE | MemoryRegionFlags::EXECUTE,
676 Heap,
677 );
678 #[cfg(not(feature = "executable_heap"))]
679 let init_data_offset = builder.push_page_aligned(
680 self.heap_size,
681 MemoryRegionFlags::READ | MemoryRegionFlags::WRITE,
682 Heap,
683 );
684
685 let expected_init_data_offset = TryInto::<usize>::try_into(self.init_data_offset)?;
686
687 if init_data_offset != expected_init_data_offset {
688 return Err(new_error!(
689 "Init Data offset does not match expected Init Data offset expected: {}, actual: {}",
690 expected_init_data_offset,
691 init_data_offset
692 ));
693 }
694
695 let after_init_offset = if self.init_data_size > 0 {
697 let mem_flags = self
698 .init_data_permissions
699 .unwrap_or(DEFAULT_GUEST_BLOB_MEM_FLAGS);
700 builder.push_page_aligned(self.init_data_size, mem_flags, InitData)
701 } else {
702 init_data_offset
703 };
704
705 let final_offset = after_init_offset;
706
707 let expected_final_offset = TryInto::<usize>::try_into(self.get_memory_size()?)?;
708
709 if final_offset != expected_final_offset {
710 return Err(new_error!(
711 "Final offset does not match expected Final offset expected: {}, actual: {}",
712 expected_final_offset,
713 final_offset
714 ));
715 }
716
717 Ok(builder.build())
718 }
719
720 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
721 pub(crate) fn write_init_data(&self, out: &mut [u8], bytes: &[u8]) -> Result<()> {
722 out[self.init_data_offset..self.init_data_offset + self.init_data_size]
723 .copy_from_slice(bytes);
724 Ok(())
725 }
726
727 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
733 pub(crate) fn write_peb(&self, mem: &mut [u8]) -> Result<()> {
734 let guest_offset = SandboxMemoryLayout::BASE_ADDRESS;
735
736 fn write_u64(mem: &mut [u8], offset: usize, value: u64) -> Result<()> {
737 if offset + 8 > mem.len() {
738 return Err(new_error!(
739 "Cannot write to offset {} in slice of len {}",
740 offset,
741 mem.len()
742 ));
743 }
744 mem[offset..offset + 8].copy_from_slice(&u64::to_ne_bytes(value));
745 Ok(())
746 }
747
748 macro_rules! get_address {
749 ($something:ident) => {
750 u64::try_from(guest_offset + self.$something)?
751 };
752 }
753
754 write_u64(
758 mem,
759 self.get_input_data_size_offset(),
760 self.sandbox_memory_config
761 .get_input_data_size()
762 .try_into()?,
763 )?;
764 write_u64(
765 mem,
766 self.get_input_data_pointer_offset(),
767 self.get_input_data_buffer_gva(),
768 )?;
769
770 write_u64(
772 mem,
773 self.get_output_data_size_offset(),
774 self.sandbox_memory_config
775 .get_output_data_size()
776 .try_into()?,
777 )?;
778 write_u64(
779 mem,
780 self.get_output_data_pointer_offset(),
781 self.get_output_data_buffer_gva(),
782 )?;
783
784 write_u64(
786 mem,
787 self.get_init_data_size_offset(),
788 (self.get_unaligned_memory_size() - self.init_data_offset).try_into()?,
789 )?;
790 let addr = get_address!(init_data_offset);
791 write_u64(mem, self.get_init_data_pointer_offset(), addr)?;
792
793 let addr = get_address!(guest_heap_buffer_offset);
795 write_u64(mem, self.get_heap_size_offset(), self.heap_size.try_into()?)?;
796 write_u64(mem, self.get_heap_pointer_offset(), addr)?;
797
798 #[cfg(feature = "nanvix-unstable")]
805 write_u64(mem, self.get_file_mappings_size_offset(), 0)?;
806 #[cfg(feature = "nanvix-unstable")]
807 write_u64(
808 mem,
809 self.get_file_mappings_pointer_offset(),
810 self.get_file_mappings_array_gva(),
811 )?;
812
813 Ok(())
821 }
822
823 pub(crate) fn resolve_gpa(
825 &self,
826 gpa: u64,
827 mmap_regions: &[MemoryRegion],
828 ) -> Option<ResolvedGpa<(), ()>> {
829 let scratch_base = hyperlight_common::layout::scratch_base_gpa(self.scratch_size);
830 if gpa >= scratch_base && gpa < scratch_base + self.scratch_size as u64 {
831 return Some(ResolvedGpa {
832 offset: (gpa - scratch_base) as usize,
833 base: BaseGpaRegion::Scratch(()),
834 });
835 } else if gpa >= SandboxMemoryLayout::BASE_ADDRESS as u64
836 && gpa < SandboxMemoryLayout::BASE_ADDRESS as u64 + self.snapshot_size as u64
837 {
838 return Some(ResolvedGpa {
839 offset: gpa as usize - SandboxMemoryLayout::BASE_ADDRESS,
840 base: BaseGpaRegion::Snapshot(()),
841 });
842 }
843 for rgn in mmap_regions {
844 if gpa >= rgn.guest_region.start as u64 && gpa < rgn.guest_region.end as u64 {
845 return Some(ResolvedGpa {
846 offset: gpa as usize - rgn.guest_region.start,
847 base: BaseGpaRegion::Mmap(rgn.clone()),
848 });
849 }
850 }
851 None
852 }
853}
854
855#[cfg(test)]
856mod tests {
857 use hyperlight_common::mem::PAGE_SIZE_USIZE;
858
859 use super::*;
860
861 fn get_expected_memory_size(layout: &SandboxMemoryLayout) -> usize {
863 let mut expected_size = 0;
864 expected_size += layout.code_size;
866
867 #[cfg(feature = "nanvix-unstable")]
869 let peb_and_array = size_of::<HyperlightPEB>()
870 + hyperlight_common::mem::MAX_FILE_MAPPINGS
871 * size_of::<hyperlight_common::mem::FileMappingInfo>();
872 #[cfg(not(feature = "nanvix-unstable"))]
873 let peb_and_array = size_of::<HyperlightPEB>();
874 expected_size += peb_and_array.next_multiple_of(PAGE_SIZE_USIZE);
875
876 expected_size += layout.heap_size.next_multiple_of(PAGE_SIZE_USIZE);
877
878 expected_size
879 }
880
881 #[test]
882 fn test_get_memory_size() {
883 let sbox_cfg = SandboxConfiguration::default();
884 let sbox_mem_layout = SandboxMemoryLayout::new(sbox_cfg, 4096, 0, None).unwrap();
885 assert_eq!(
886 sbox_mem_layout.get_memory_size().unwrap(),
887 get_expected_memory_size(&sbox_mem_layout)
888 );
889 }
890
891 #[test]
892 fn test_max_memory_sandbox() {
893 let mut cfg = SandboxConfiguration::default();
894 cfg.set_scratch_size(17 * 1024 * 1024 * 1024);
896 cfg.set_input_data_size(16 * 1024 * 1024 * 1024);
897 let layout = SandboxMemoryLayout::new(cfg, 4096, 4096, None);
898 assert!(matches!(layout.unwrap_err(), MemoryRequestTooBig(..)));
899 }
900}