1use std::fmt::Debug;
64use std::mem::{offset_of, size_of};
65
66use hyperlight_common::mem::{HyperlightPEB, PAGE_SIZE_USIZE};
67use tracing::{Span, instrument};
68
69use super::memory_region::MemoryRegionType::{Code, Heap, InitData, Peb};
70use super::memory_region::{
71 DEFAULT_GUEST_BLOB_MEM_FLAGS, MemoryRegion, MemoryRegion_, MemoryRegionFlags, MemoryRegionKind,
72 MemoryRegionVecBuilder,
73};
74#[cfg(any(gdb, feature = "mem_profile"))]
75use super::shared_mem::HostSharedMemory;
76use super::shared_mem::{ExclusiveSharedMemory, ReadonlySharedMemory};
77use crate::error::HyperlightError::{MemoryRequestTooBig, MemoryRequestTooSmall};
78use crate::sandbox::SandboxConfiguration;
79use crate::{Result, new_error};
80
81pub(crate) enum BaseGpaRegion<Sn, Sc> {
82 Snapshot(Sn),
83 Scratch(Sc),
84 Mmap(MemoryRegion),
85}
86
87pub(crate) struct ResolvedGpa<Sn, Sc> {
90 pub(crate) offset: usize,
91 pub(crate) base: BaseGpaRegion<Sn, Sc>,
92}
93
94impl AsRef<[u8]> for ExclusiveSharedMemory {
95 fn as_ref(&self) -> &[u8] {
96 self.as_slice()
97 }
98}
99impl AsRef<[u8]> for ReadonlySharedMemory {
100 fn as_ref(&self) -> &[u8] {
101 self.as_slice()
102 }
103}
104
105impl<Sn, Sc> ResolvedGpa<Sn, Sc> {
106 pub(crate) fn with_memories<Sn2, Sc2>(self, sn: Sn2, sc: Sc2) -> ResolvedGpa<Sn2, Sc2> {
107 ResolvedGpa {
108 offset: self.offset,
109 base: match self.base {
110 BaseGpaRegion::Snapshot(_) => BaseGpaRegion::Snapshot(sn),
111 BaseGpaRegion::Scratch(_) => BaseGpaRegion::Scratch(sc),
112 BaseGpaRegion::Mmap(r) => BaseGpaRegion::Mmap(r),
113 },
114 }
115 }
116}
117impl<'a> BaseGpaRegion<&'a [u8], &'a [u8]> {
118 pub(crate) fn as_ref<'b>(&'b self) -> &'a [u8] {
119 match self {
120 BaseGpaRegion::Snapshot(sn) => sn,
121 BaseGpaRegion::Scratch(sc) => sc,
122 BaseGpaRegion::Mmap(r) => unsafe {
123 #[allow(clippy::useless_conversion)]
124 let host_region_base: usize = r.host_region.start.into();
125 #[allow(clippy::useless_conversion)]
126 let host_region_end: usize = r.host_region.end.into();
127 let len = host_region_end - host_region_base;
128 std::slice::from_raw_parts(host_region_base as *const u8, len)
129 },
130 }
131 }
132}
133impl<'a> ResolvedGpa<&'a [u8], &'a [u8]> {
134 pub(crate) fn as_ref<'b>(&'b self) -> &'a [u8] {
135 let base = self.base.as_ref();
136 if self.offset > base.len() {
137 return &[];
138 }
139 &self.base.as_ref()[self.offset..]
140 }
141}
142#[cfg(any(gdb, feature = "mem_profile"))]
143#[allow(unused)] pub(crate) trait ReadableSharedMemory {
145 fn copy_to_slice(&self, slice: &mut [u8], offset: usize) -> Result<()>;
146}
147#[cfg(any(gdb, feature = "mem_profile"))]
148impl ReadableSharedMemory for &HostSharedMemory {
149 fn copy_to_slice(&self, slice: &mut [u8], offset: usize) -> Result<()> {
150 HostSharedMemory::copy_to_slice(self, slice, offset)
151 }
152}
153#[cfg(any(gdb, feature = "mem_profile"))]
154mod coherence_hack {
155 use super::{ExclusiveSharedMemory, ReadonlySharedMemory};
156 #[allow(unused)] pub(super) trait SharedMemoryAsRefMarker: AsRef<[u8]> {}
158 impl SharedMemoryAsRefMarker for ExclusiveSharedMemory {}
159 impl SharedMemoryAsRefMarker for &ExclusiveSharedMemory {}
160 impl SharedMemoryAsRefMarker for ReadonlySharedMemory {}
161 impl SharedMemoryAsRefMarker for &ReadonlySharedMemory {}
162}
163#[cfg(any(gdb, feature = "mem_profile"))]
164impl<T: coherence_hack::SharedMemoryAsRefMarker> ReadableSharedMemory for T {
165 fn copy_to_slice(&self, slice: &mut [u8], offset: usize) -> Result<()> {
166 let ss: &[u8] = self.as_ref();
167 let end = offset + slice.len();
168 if end > ss.len() {
169 return Err(new_error!(
170 "Attempt to read up to {} in memory of size {}",
171 offset + slice.len(),
172 self.as_ref().len()
173 ));
174 }
175 slice.copy_from_slice(&ss[offset..end]);
176 Ok(())
177 }
178}
179#[cfg(any(gdb, feature = "mem_profile"))]
180impl<Sn: ReadableSharedMemory, Sc: ReadableSharedMemory> ResolvedGpa<Sn, Sc> {
181 #[allow(unused)] pub(crate) fn copy_to_slice(&self, slice: &mut [u8]) -> Result<()> {
183 match &self.base {
184 BaseGpaRegion::Snapshot(sn) => sn.copy_to_slice(slice, self.offset),
185 BaseGpaRegion::Scratch(sc) => sc.copy_to_slice(slice, self.offset),
186 BaseGpaRegion::Mmap(r) => unsafe {
187 #[allow(clippy::useless_conversion)]
188 let host_region_base: usize = r.host_region.start.into();
189 #[allow(clippy::useless_conversion)]
190 let host_region_end: usize = r.host_region.end.into();
191 let len = host_region_end - host_region_base;
192 let ss = std::slice::from_raw_parts(host_region_base as *const u8, len);
201 let end = self.offset + slice.len();
202 if end > ss.len() {
203 return Err(new_error!(
204 "Attempt to read up to {} in memory of size {}",
205 self.offset + slice.len(),
206 ss.len()
207 ));
208 }
209 slice.copy_from_slice(&ss[self.offset..end]);
210 Ok(())
211 },
212 }
213 }
214}
215
216#[derive(Copy, Clone)]
217pub(crate) struct SandboxMemoryLayout {
218 pub(super) sandbox_memory_config: SandboxConfiguration,
219 pub(super) heap_size: usize,
221 init_data_size: usize,
222
223 peb_offset: usize,
226 peb_input_data_offset: usize,
227 peb_output_data_offset: usize,
228 peb_init_data_offset: usize,
229 peb_heap_data_offset: usize,
230 #[cfg(feature = "nanvix-unstable")]
231 peb_file_mappings_offset: usize,
232
233 guest_heap_buffer_offset: usize,
234 init_data_offset: usize,
235 pt_size: Option<usize>,
236
237 pub(crate) peb_address: usize,
239 code_size: usize,
240 guest_code_offset: usize,
242 #[cfg_attr(feature = "nanvix-unstable", allow(unused))]
243 pub(crate) init_data_permissions: Option<MemoryRegionFlags>,
244
245 scratch_size: usize,
248 snapshot_size: usize,
251}
252
253impl Debug for SandboxMemoryLayout {
254 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
255 let mut ff = f.debug_struct("SandboxMemoryLayout");
256 ff.field(
257 "Total Memory Size",
258 &format_args!("{:#x}", self.get_memory_size().unwrap_or(0)),
259 )
260 .field("Heap Size", &format_args!("{:#x}", self.heap_size))
261 .field(
262 "Init Data Size",
263 &format_args!("{:#x}", self.init_data_size),
264 )
265 .field("PEB Address", &format_args!("{:#x}", self.peb_address))
266 .field("PEB Offset", &format_args!("{:#x}", self.peb_offset))
267 .field("Code Size", &format_args!("{:#x}", self.code_size))
268 .field(
269 "Input Data Offset",
270 &format_args!("{:#x}", self.peb_input_data_offset),
271 )
272 .field(
273 "Output Data Offset",
274 &format_args!("{:#x}", self.peb_output_data_offset),
275 )
276 .field(
277 "Init Data Offset",
278 &format_args!("{:#x}", self.peb_init_data_offset),
279 )
280 .field(
281 "Guest Heap Offset",
282 &format_args!("{:#x}", self.peb_heap_data_offset),
283 );
284 #[cfg(feature = "nanvix-unstable")]
285 ff.field(
286 "File Mappings Offset",
287 &format_args!("{:#x}", self.peb_file_mappings_offset),
288 );
289 ff.field(
290 "Guest Heap Buffer Offset",
291 &format_args!("{:#x}", self.guest_heap_buffer_offset),
292 )
293 .field(
294 "Init Data Offset",
295 &format_args!("{:#x}", self.init_data_offset),
296 )
297 .field("PT Size", &format_args!("{:#x}", self.pt_size.unwrap_or(0)))
298 .field(
299 "Guest Code Offset",
300 &format_args!("{:#x}", self.guest_code_offset),
301 )
302 .field(
303 "Scratch region size",
304 &format_args!("{:#x}", self.scratch_size),
305 )
306 .finish()
307 }
308}
309
310impl SandboxMemoryLayout {
311 const MAX_MEMORY_SIZE: usize = (16 * 1024 * 1024 * 1024) - Self::BASE_ADDRESS; #[cfg(not(feature = "nanvix-unstable"))]
320 pub(crate) const BASE_ADDRESS: usize = 0x1000;
321 #[cfg(feature = "nanvix-unstable")]
322 pub(crate) const BASE_ADDRESS: usize = 0x0;
323
324 pub(crate) const STACK_POINTER_SIZE_BYTES: u64 = 8;
326
327 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
330 pub(crate) fn new(
331 cfg: SandboxConfiguration,
332 code_size: usize,
333 init_data_size: usize,
334 init_data_permissions: Option<MemoryRegionFlags>,
335 ) -> Result<Self> {
336 let heap_size = usize::try_from(cfg.get_heap_size())?;
337 let scratch_size = cfg.get_scratch_size();
338 if scratch_size > Self::MAX_MEMORY_SIZE {
339 return Err(MemoryRequestTooBig(scratch_size, Self::MAX_MEMORY_SIZE));
340 }
341 let min_scratch_size = hyperlight_common::layout::min_scratch_size(
342 cfg.get_input_data_size(),
343 cfg.get_output_data_size(),
344 );
345 if scratch_size < min_scratch_size {
346 return Err(MemoryRequestTooSmall(scratch_size, min_scratch_size));
347 }
348
349 let guest_code_offset = 0;
350 let peb_offset = code_size.next_multiple_of(PAGE_SIZE_USIZE);
352 let peb_input_data_offset = peb_offset + offset_of!(HyperlightPEB, input_stack);
353 let peb_output_data_offset = peb_offset + offset_of!(HyperlightPEB, output_stack);
354 let peb_init_data_offset = peb_offset + offset_of!(HyperlightPEB, init_data);
355 let peb_heap_data_offset = peb_offset + offset_of!(HyperlightPEB, guest_heap);
356 #[cfg(feature = "nanvix-unstable")]
357 let peb_file_mappings_offset = peb_offset + offset_of!(HyperlightPEB, file_mappings);
358
359 let peb_address = Self::BASE_ADDRESS + peb_offset;
362 #[cfg(feature = "nanvix-unstable")]
372 let file_mappings_array_end = peb_offset
373 + size_of::<HyperlightPEB>()
374 + hyperlight_common::mem::MAX_FILE_MAPPINGS
375 * size_of::<hyperlight_common::mem::FileMappingInfo>();
376 #[cfg(feature = "nanvix-unstable")]
377 let guest_heap_buffer_offset = file_mappings_array_end.next_multiple_of(PAGE_SIZE_USIZE);
378 #[cfg(not(feature = "nanvix-unstable"))]
379 let guest_heap_buffer_offset =
380 (peb_offset + size_of::<HyperlightPEB>()).next_multiple_of(PAGE_SIZE_USIZE);
381
382 let init_data_offset =
384 (guest_heap_buffer_offset + heap_size).next_multiple_of(PAGE_SIZE_USIZE);
385 let mut ret = Self {
386 peb_offset,
387 heap_size,
388 peb_input_data_offset,
389 peb_output_data_offset,
390 peb_init_data_offset,
391 peb_heap_data_offset,
392 #[cfg(feature = "nanvix-unstable")]
393 peb_file_mappings_offset,
394 sandbox_memory_config: cfg,
395 code_size,
396 guest_heap_buffer_offset,
397 peb_address,
398 guest_code_offset,
399 init_data_offset,
400 init_data_size,
401 init_data_permissions,
402 pt_size: None,
403 scratch_size,
404 snapshot_size: 0,
405 };
406 ret.set_snapshot_size(ret.get_memory_size()?);
407 Ok(ret)
408 }
409
410 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
412 pub(super) fn get_output_data_size_offset(&self) -> usize {
413 self.peb_output_data_offset
415 }
416
417 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
419 pub(super) fn get_init_data_size_offset(&self) -> usize {
420 self.peb_init_data_offset
422 }
423
424 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
425 pub(crate) fn get_scratch_size(&self) -> usize {
426 self.scratch_size
427 }
428
429 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
431 fn get_output_data_pointer_offset(&self) -> usize {
432 self.get_output_data_size_offset() + size_of::<u64>()
435 }
436
437 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
439 pub(super) fn get_init_data_pointer_offset(&self) -> usize {
440 self.get_init_data_size_offset() + size_of::<u64>()
443 }
444
445 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
447 pub(crate) fn get_output_data_buffer_gva(&self) -> u64 {
448 hyperlight_common::layout::scratch_base_gva(self.scratch_size)
449 + self.sandbox_memory_config.get_input_data_size() as u64
450 }
451
452 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
455 pub(crate) fn get_output_data_buffer_scratch_host_offset(&self) -> usize {
456 self.sandbox_memory_config.get_input_data_size()
457 }
458
459 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
461 pub(super) fn get_input_data_size_offset(&self) -> usize {
462 self.peb_input_data_offset
464 }
465
466 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
468 fn get_input_data_pointer_offset(&self) -> usize {
469 self.get_input_data_size_offset() + size_of::<u64>()
472 }
473
474 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
476 fn get_input_data_buffer_gva(&self) -> u64 {
477 hyperlight_common::layout::scratch_base_gva(self.scratch_size)
478 }
479
480 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
483 pub(crate) fn get_input_data_buffer_scratch_host_offset(&self) -> usize {
484 0
485 }
486
487 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
490 pub(crate) fn get_pt_base_scratch_offset(&self) -> usize {
491 (self.sandbox_memory_config.get_input_data_size()
492 + self.sandbox_memory_config.get_output_data_size())
493 .next_multiple_of(hyperlight_common::vmem::PAGE_SIZE)
494 }
495
496 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
499 pub(crate) fn get_pt_base_gpa(&self) -> u64 {
500 hyperlight_common::layout::scratch_base_gpa(self.scratch_size)
501 + self.get_pt_base_scratch_offset() as u64
502 }
503
504 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
507 pub(crate) fn get_first_free_scratch_gpa(&self) -> u64 {
508 self.get_pt_base_gpa() + self.pt_size.unwrap_or(0) as u64
509 }
510
511 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
513 fn get_heap_size_offset(&self) -> usize {
514 self.peb_heap_data_offset
515 }
516
517 #[cfg(feature = "nanvix-unstable")]
520 pub(crate) fn get_file_mappings_size_offset(&self) -> usize {
521 self.peb_file_mappings_offset
522 }
523
524 #[cfg(feature = "nanvix-unstable")]
526 fn get_file_mappings_pointer_offset(&self) -> usize {
527 self.get_file_mappings_size_offset() + size_of::<u64>()
528 }
529
530 #[cfg(feature = "nanvix-unstable")]
533 pub(crate) fn get_file_mappings_array_offset(&self) -> usize {
534 self.peb_offset + size_of::<HyperlightPEB>()
535 }
536
537 #[cfg(feature = "nanvix-unstable")]
539 fn get_file_mappings_array_gva(&self) -> u64 {
540 (Self::BASE_ADDRESS + self.get_file_mappings_array_offset()) as u64
541 }
542
543 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
545 fn get_heap_pointer_offset(&self) -> usize {
546 self.get_heap_size_offset() + size_of::<u64>()
549 }
550
551 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
554 fn get_unaligned_memory_size(&self) -> usize {
555 self.init_data_offset + self.init_data_size
556 }
557
558 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
561 pub(crate) fn get_guest_code_offset(&self) -> usize {
562 self.guest_code_offset
563 }
564
565 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
567 pub(crate) fn get_guest_code_address(&self) -> usize {
568 Self::BASE_ADDRESS + self.guest_code_offset
569 }
570
571 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
574 pub(crate) fn get_memory_size(&self) -> Result<usize> {
575 let total_memory = self.get_unaligned_memory_size();
576
577 let remainder = total_memory % PAGE_SIZE_USIZE;
579 let multiples = total_memory / PAGE_SIZE_USIZE;
580 let size = match remainder {
581 0 => total_memory,
582 _ => (multiples + 1) * PAGE_SIZE_USIZE,
583 };
584
585 if size > Self::MAX_MEMORY_SIZE {
586 Err(MemoryRequestTooBig(size, Self::MAX_MEMORY_SIZE))
587 } else {
588 Ok(size)
589 }
590 }
591
592 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
594 pub(crate) fn set_pt_size(&mut self, size: usize) -> Result<()> {
595 let min_fixed_scratch = hyperlight_common::layout::min_scratch_size(
596 self.sandbox_memory_config.get_input_data_size(),
597 self.sandbox_memory_config.get_output_data_size(),
598 );
599 let min_scratch = min_fixed_scratch + size;
600 if self.scratch_size < min_scratch {
601 return Err(MemoryRequestTooSmall(self.scratch_size, min_scratch));
602 }
603 let old_pt_size = self.pt_size.unwrap_or(0);
604 self.snapshot_size = self.snapshot_size - old_pt_size + size;
605 self.pt_size = Some(size);
606 Ok(())
607 }
608
609 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
610 pub(crate) fn set_snapshot_size(&mut self, new_size: usize) {
611 self.snapshot_size = new_size;
612 }
613
614 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
616 pub(crate) fn get_pt_size(&self) -> usize {
617 self.pt_size.unwrap_or(0)
618 }
619
620 #[cfg_attr(feature = "nanvix-unstable", allow(unused))]
623 pub(crate) fn get_memory_regions_<K: MemoryRegionKind>(
624 &self,
625 host_base: K::HostBaseType,
626 ) -> Result<Vec<MemoryRegion_<K>>> {
627 let mut builder = MemoryRegionVecBuilder::new(Self::BASE_ADDRESS, host_base);
628
629 let peb_offset = builder.push_page_aligned(
631 self.code_size,
632 MemoryRegionFlags::READ | MemoryRegionFlags::WRITE | MemoryRegionFlags::EXECUTE,
633 Code,
634 );
635
636 let expected_peb_offset = TryInto::<usize>::try_into(self.peb_offset)?;
637
638 if peb_offset != expected_peb_offset {
639 return Err(new_error!(
640 "PEB offset does not match expected PEB offset expected: {}, actual: {}",
641 expected_peb_offset,
642 peb_offset
643 ));
644 }
645
646 #[cfg(feature = "nanvix-unstable")]
648 let heap_offset = {
649 let peb_and_array_size = size_of::<HyperlightPEB>()
650 + hyperlight_common::mem::MAX_FILE_MAPPINGS
651 * size_of::<hyperlight_common::mem::FileMappingInfo>();
652 builder.push_page_aligned(
653 peb_and_array_size,
654 MemoryRegionFlags::READ | MemoryRegionFlags::WRITE,
655 Peb,
656 )
657 };
658 #[cfg(not(feature = "nanvix-unstable"))]
659 let heap_offset =
660 builder.push_page_aligned(size_of::<HyperlightPEB>(), MemoryRegionFlags::READ, Peb);
661
662 let expected_heap_offset = TryInto::<usize>::try_into(self.guest_heap_buffer_offset)?;
663
664 if heap_offset != expected_heap_offset {
665 return Err(new_error!(
666 "Guest Heap offset does not match expected Guest Heap offset expected: {}, actual: {}",
667 expected_heap_offset,
668 heap_offset
669 ));
670 }
671
672 #[cfg(feature = "executable_heap")]
674 let init_data_offset = builder.push_page_aligned(
675 self.heap_size,
676 MemoryRegionFlags::READ | MemoryRegionFlags::WRITE | MemoryRegionFlags::EXECUTE,
677 Heap,
678 );
679 #[cfg(not(feature = "executable_heap"))]
680 let init_data_offset = builder.push_page_aligned(
681 self.heap_size,
682 MemoryRegionFlags::READ | MemoryRegionFlags::WRITE,
683 Heap,
684 );
685
686 let expected_init_data_offset = TryInto::<usize>::try_into(self.init_data_offset)?;
687
688 if init_data_offset != expected_init_data_offset {
689 return Err(new_error!(
690 "Init Data offset does not match expected Init Data offset expected: {}, actual: {}",
691 expected_init_data_offset,
692 init_data_offset
693 ));
694 }
695
696 let after_init_offset = if self.init_data_size > 0 {
698 let mem_flags = self
699 .init_data_permissions
700 .unwrap_or(DEFAULT_GUEST_BLOB_MEM_FLAGS);
701 builder.push_page_aligned(self.init_data_size, mem_flags, InitData)
702 } else {
703 init_data_offset
704 };
705
706 let final_offset = after_init_offset;
707
708 let expected_final_offset = TryInto::<usize>::try_into(self.get_memory_size()?)?;
709
710 if final_offset != expected_final_offset {
711 return Err(new_error!(
712 "Final offset does not match expected Final offset expected: {}, actual: {}",
713 expected_final_offset,
714 final_offset
715 ));
716 }
717
718 Ok(builder.build())
719 }
720
721 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
722 pub(crate) fn write_init_data(&self, out: &mut [u8], bytes: &[u8]) -> Result<()> {
723 out[self.init_data_offset..self.init_data_offset + self.init_data_size]
724 .copy_from_slice(bytes);
725 Ok(())
726 }
727
728 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
734 pub(crate) fn write_peb(&self, mem: &mut [u8]) -> Result<()> {
735 let guest_offset = SandboxMemoryLayout::BASE_ADDRESS;
736
737 fn write_u64(mem: &mut [u8], offset: usize, value: u64) -> Result<()> {
738 if offset + 8 > mem.len() {
739 return Err(new_error!(
740 "Cannot write to offset {} in slice of len {}",
741 offset,
742 mem.len()
743 ));
744 }
745 mem[offset..offset + 8].copy_from_slice(&u64::to_ne_bytes(value));
746 Ok(())
747 }
748
749 macro_rules! get_address {
750 ($something:ident) => {
751 u64::try_from(guest_offset + self.$something)?
752 };
753 }
754
755 write_u64(
759 mem,
760 self.get_input_data_size_offset(),
761 self.sandbox_memory_config
762 .get_input_data_size()
763 .try_into()?,
764 )?;
765 write_u64(
766 mem,
767 self.get_input_data_pointer_offset(),
768 self.get_input_data_buffer_gva(),
769 )?;
770
771 write_u64(
773 mem,
774 self.get_output_data_size_offset(),
775 self.sandbox_memory_config
776 .get_output_data_size()
777 .try_into()?,
778 )?;
779 write_u64(
780 mem,
781 self.get_output_data_pointer_offset(),
782 self.get_output_data_buffer_gva(),
783 )?;
784
785 write_u64(
787 mem,
788 self.get_init_data_size_offset(),
789 (self.get_unaligned_memory_size() - self.init_data_offset).try_into()?,
790 )?;
791 let addr = get_address!(init_data_offset);
792 write_u64(mem, self.get_init_data_pointer_offset(), addr)?;
793
794 let addr = get_address!(guest_heap_buffer_offset);
796 write_u64(mem, self.get_heap_size_offset(), self.heap_size.try_into()?)?;
797 write_u64(mem, self.get_heap_pointer_offset(), addr)?;
798
799 #[cfg(feature = "nanvix-unstable")]
806 write_u64(mem, self.get_file_mappings_size_offset(), 0)?;
807 #[cfg(feature = "nanvix-unstable")]
808 write_u64(
809 mem,
810 self.get_file_mappings_pointer_offset(),
811 self.get_file_mappings_array_gva(),
812 )?;
813
814 Ok(())
822 }
823
824 pub(crate) fn resolve_gpa(
826 &self,
827 gpa: u64,
828 mmap_regions: &[MemoryRegion],
829 ) -> Option<ResolvedGpa<(), ()>> {
830 let scratch_base = hyperlight_common::layout::scratch_base_gpa(self.scratch_size);
831 if gpa >= scratch_base && gpa < scratch_base + self.scratch_size as u64 {
832 return Some(ResolvedGpa {
833 offset: (gpa - scratch_base) as usize,
834 base: BaseGpaRegion::Scratch(()),
835 });
836 } else if gpa >= SandboxMemoryLayout::BASE_ADDRESS as u64
837 && gpa < SandboxMemoryLayout::BASE_ADDRESS as u64 + self.snapshot_size as u64
838 {
839 return Some(ResolvedGpa {
840 offset: gpa as usize - SandboxMemoryLayout::BASE_ADDRESS,
841 base: BaseGpaRegion::Snapshot(()),
842 });
843 }
844 for rgn in mmap_regions {
845 if gpa >= rgn.guest_region.start as u64 && gpa < rgn.guest_region.end as u64 {
846 return Some(ResolvedGpa {
847 offset: gpa as usize - rgn.guest_region.start,
848 base: BaseGpaRegion::Mmap(rgn.clone()),
849 });
850 }
851 }
852 None
853 }
854}
855
856#[cfg(test)]
857mod tests {
858 use hyperlight_common::mem::PAGE_SIZE_USIZE;
859
860 use super::*;
861
862 fn get_expected_memory_size(layout: &SandboxMemoryLayout) -> usize {
864 let mut expected_size = 0;
865 expected_size += layout.code_size;
867
868 #[cfg(feature = "nanvix-unstable")]
870 let peb_and_array = size_of::<HyperlightPEB>()
871 + hyperlight_common::mem::MAX_FILE_MAPPINGS
872 * size_of::<hyperlight_common::mem::FileMappingInfo>();
873 #[cfg(not(feature = "nanvix-unstable"))]
874 let peb_and_array = size_of::<HyperlightPEB>();
875 expected_size += peb_and_array.next_multiple_of(PAGE_SIZE_USIZE);
876
877 expected_size += layout.heap_size.next_multiple_of(PAGE_SIZE_USIZE);
878
879 expected_size
880 }
881
882 #[test]
883 fn test_get_memory_size() {
884 let sbox_cfg = SandboxConfiguration::default();
885 let sbox_mem_layout = SandboxMemoryLayout::new(sbox_cfg, 4096, 0, None).unwrap();
886 assert_eq!(
887 sbox_mem_layout.get_memory_size().unwrap(),
888 get_expected_memory_size(&sbox_mem_layout)
889 );
890 }
891
892 #[test]
893 fn test_max_memory_sandbox() {
894 let mut cfg = SandboxConfiguration::default();
895 cfg.set_scratch_size(17 * 1024 * 1024 * 1024);
897 cfg.set_input_data_size(16 * 1024 * 1024 * 1024);
898 let layout = SandboxMemoryLayout::new(cfg, 4096, 4096, None);
899 assert!(matches!(layout.unwrap_err(), MemoryRequestTooBig(..)));
900 }
901}