1use std::fmt::Debug;
17use std::mem::{offset_of, size_of};
18
19use hyperlight_common::mem::{GuestMemoryRegion, HyperlightPEB, PAGE_SIZE_USIZE};
20use rand::{RngCore, rng};
21use tracing::{Span, instrument};
22
23#[cfg(feature = "init-paging")]
24use super::memory_region::MemoryRegionType::PageTables;
25use super::memory_region::MemoryRegionType::{
26 Code, GuardPage, Heap, HostFunctionDefinitions, InitData, InputData, OutputData, Peb, Stack,
27};
28use super::memory_region::{
29 DEFAULT_GUEST_BLOB_MEM_FLAGS, MemoryRegion, MemoryRegionFlags, MemoryRegionVecBuilder,
30};
31#[cfg(feature = "init-paging")]
32use super::mgr::AMOUNT_OF_MEMORY_PER_PT;
33use super::shared_mem::{ExclusiveSharedMemory, GuestSharedMemory, SharedMemory};
34use crate::error::HyperlightError::{GuestOffsetIsInvalid, MemoryRequestTooBig};
35use crate::sandbox::SandboxConfiguration;
36use crate::{Result, new_error};
37
38#[derive(Copy, Clone)]
88pub(crate) struct SandboxMemoryLayout {
89 pub(super) sandbox_memory_config: SandboxConfiguration,
90 pub(super) stack_size: usize,
92 pub(super) heap_size: usize,
94 init_data_size: usize,
95
96 peb_offset: usize,
99 peb_security_cookie_seed_offset: usize,
100 peb_guest_dispatch_function_ptr_offset: usize, peb_code_pointer_offset: usize,
102 pub(super) peb_host_function_definitions_offset: usize,
103 peb_input_data_offset: usize,
104 peb_output_data_offset: usize,
105 peb_init_data_offset: usize,
106 peb_heap_data_offset: usize,
107 peb_guest_stack_data_offset: usize,
108
109 pub(crate) host_function_definitions_buffer_offset: usize,
112 pub(super) input_data_buffer_offset: usize,
113 pub(super) output_data_buffer_offset: usize,
114 guest_heap_buffer_offset: usize,
115 guard_page_offset: usize,
116 guest_user_stack_buffer_offset: usize, init_data_offset: usize,
118
119 pub(crate) peb_address: usize,
121 code_size: usize,
122 total_page_table_size: usize,
124 guest_code_offset: usize,
126 pub(crate) init_data_permissions: Option<MemoryRegionFlags>,
127}
128
129impl Debug for SandboxMemoryLayout {
130 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
131 f.debug_struct("SandboxMemoryLayout")
132 .field(
133 "Total Memory Size",
134 &format_args!("{:#x}", self.get_memory_size().unwrap_or(0)),
135 )
136 .field("Stack Size", &format_args!("{:#x}", self.stack_size))
137 .field("Heap Size", &format_args!("{:#x}", self.heap_size))
138 .field(
139 "Init Data Size",
140 &format_args!("{:#x}", self.init_data_size),
141 )
142 .field("PEB Address", &format_args!("{:#x}", self.peb_address))
143 .field("PEB Offset", &format_args!("{:#x}", self.peb_offset))
144 .field("Code Size", &format_args!("{:#x}", self.code_size))
145 .field(
146 "Security Cookie Seed Offset",
147 &format_args!("{:#x}", self.peb_security_cookie_seed_offset),
148 )
149 .field(
150 "Guest Dispatch Function Pointer Offset",
151 &format_args!("{:#x}", self.peb_guest_dispatch_function_ptr_offset),
152 )
153 .field(
154 "Host Function Definitions Offset",
155 &format_args!("{:#x}", self.peb_host_function_definitions_offset),
156 )
157 .field(
158 "Code Pointer Offset",
159 &format_args!("{:#x}", self.peb_code_pointer_offset),
160 )
161 .field(
162 "Input Data Offset",
163 &format_args!("{:#x}", self.peb_input_data_offset),
164 )
165 .field(
166 "Output Data Offset",
167 &format_args!("{:#x}", self.peb_output_data_offset),
168 )
169 .field(
170 "Init Data Offset",
171 &format_args!("{:#x}", self.peb_init_data_offset),
172 )
173 .field(
174 "Guest Heap Offset",
175 &format_args!("{:#x}", self.peb_heap_data_offset),
176 )
177 .field(
178 "Guest Stack Offset",
179 &format_args!("{:#x}", self.peb_guest_stack_data_offset),
180 )
181 .field(
182 "Host Function Definitions Buffer Offset",
183 &format_args!("{:#x}", self.host_function_definitions_buffer_offset),
184 )
185 .field(
186 "Input Data Buffer Offset",
187 &format_args!("{:#x}", self.input_data_buffer_offset),
188 )
189 .field(
190 "Output Data Buffer Offset",
191 &format_args!("{:#x}", self.output_data_buffer_offset),
192 )
193 .field(
194 "Guest Heap Buffer Offset",
195 &format_args!("{:#x}", self.guest_heap_buffer_offset),
196 )
197 .field(
198 "Guard Page Offset",
199 &format_args!("{:#x}", self.guard_page_offset),
200 )
201 .field(
202 "Guest User Stack Buffer Offset",
203 &format_args!("{:#x}", self.guest_user_stack_buffer_offset),
204 )
205 .field(
206 "Init Data Offset",
207 &format_args!("{:#x}", self.init_data_offset),
208 )
209 .field(
210 "Page Table Size",
211 &format_args!("{:#x}", self.total_page_table_size),
212 )
213 .field(
214 "Guest Code Offset",
215 &format_args!("{:#x}", self.guest_code_offset),
216 )
217 .finish()
218 }
219}
220
221impl SandboxMemoryLayout {
222 pub(crate) const PML4_OFFSET: usize = 0x0000;
225 #[cfg(feature = "init-paging")]
228 pub(super) const PDPT_OFFSET: usize = 0x1000;
229 #[cfg(feature = "init-paging")]
231 pub(super) const PD_OFFSET: usize = 0x2000;
232 #[cfg(feature = "init-paging")]
234 pub(super) const PT_OFFSET: usize = 0x3000;
235 #[cfg(feature = "init-paging")]
237 pub(super) const PD_GUEST_ADDRESS: usize = Self::BASE_ADDRESS + Self::PD_OFFSET;
238 #[cfg(feature = "init-paging")]
241 pub(super) const PDPT_GUEST_ADDRESS: usize = Self::BASE_ADDRESS + Self::PDPT_OFFSET;
242 #[cfg(feature = "init-paging")]
245 pub(super) const PT_GUEST_ADDRESS: usize = Self::BASE_ADDRESS + Self::PT_OFFSET;
246 const MAX_MEMORY_SIZE: usize = 0x40000000 - Self::BASE_ADDRESS;
250
251 pub(crate) const BASE_ADDRESS: usize = 0x0;
253
254 const STACK_POINTER_SIZE_BYTES: u64 = 8;
256
257 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
260 pub(super) fn new(
261 cfg: SandboxConfiguration,
262 code_size: usize,
263 stack_size: usize,
264 heap_size: usize,
265 init_data_size: usize,
266 init_data_permissions: Option<MemoryRegionFlags>,
267 ) -> Result<Self> {
268 #[cfg(feature = "init-paging")]
269 let base = Self::get_total_page_table_size(cfg, code_size, stack_size, heap_size);
270 #[cfg(not(feature = "init-paging"))]
271 let base = Self::BASE_ADDRESS;
272 let guest_code_offset = base;
273 let peb_offset = base + round_up_to(code_size, PAGE_SIZE_USIZE);
275 let peb_security_cookie_seed_offset =
276 peb_offset + offset_of!(HyperlightPEB, security_cookie_seed);
277 let peb_guest_dispatch_function_ptr_offset =
278 peb_offset + offset_of!(HyperlightPEB, guest_function_dispatch_ptr);
279 let peb_code_pointer_offset = peb_offset + offset_of!(HyperlightPEB, code_ptr);
280 let peb_input_data_offset = peb_offset + offset_of!(HyperlightPEB, input_stack);
281 let peb_output_data_offset = peb_offset + offset_of!(HyperlightPEB, output_stack);
282 let peb_init_data_offset = peb_offset + offset_of!(HyperlightPEB, init_data);
283 let peb_heap_data_offset = peb_offset + offset_of!(HyperlightPEB, guest_heap);
284 let peb_guest_stack_data_offset = peb_offset + offset_of!(HyperlightPEB, guest_stack);
285 let peb_host_function_definitions_offset =
286 peb_offset + offset_of!(HyperlightPEB, host_function_definitions);
287
288 let peb_address = Self::BASE_ADDRESS + peb_offset;
291 let host_function_definitions_buffer_offset = round_up_to(
293 peb_host_function_definitions_offset + size_of::<GuestMemoryRegion>(),
294 PAGE_SIZE_USIZE,
295 );
296 let input_data_buffer_offset = round_up_to(
297 host_function_definitions_buffer_offset + cfg.get_host_function_definition_size(),
298 PAGE_SIZE_USIZE,
299 );
300 let output_data_buffer_offset = round_up_to(
301 input_data_buffer_offset + cfg.get_input_data_size(),
302 PAGE_SIZE_USIZE,
303 );
304 let guest_heap_buffer_offset = round_up_to(
306 output_data_buffer_offset + cfg.get_output_data_size(),
307 PAGE_SIZE_USIZE,
308 );
309 let guard_page_offset = round_up_to(guest_heap_buffer_offset + heap_size, PAGE_SIZE_USIZE);
311 let guest_user_stack_buffer_offset = guard_page_offset + PAGE_SIZE_USIZE;
312 let stack_size_rounded = round_up_to(stack_size, PAGE_SIZE_USIZE);
314 let init_data_offset = guest_user_stack_buffer_offset + stack_size_rounded;
315
316 Ok(Self {
317 peb_offset,
318 stack_size: stack_size_rounded,
319 heap_size,
320 peb_security_cookie_seed_offset,
321 peb_guest_dispatch_function_ptr_offset,
322 peb_code_pointer_offset,
323 peb_host_function_definitions_offset,
324 peb_input_data_offset,
325 peb_output_data_offset,
326 peb_init_data_offset,
327 peb_heap_data_offset,
328 peb_guest_stack_data_offset,
329 sandbox_memory_config: cfg,
330 code_size,
331 host_function_definitions_buffer_offset,
332 input_data_buffer_offset,
333 output_data_buffer_offset,
334 guest_heap_buffer_offset,
335 guest_user_stack_buffer_offset,
336 peb_address,
337 guard_page_offset,
338 total_page_table_size: base,
339 guest_code_offset,
340 init_data_offset,
341 init_data_size,
342 init_data_permissions,
343 })
344 }
345
346 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
348 pub(super) fn get_output_data_size_offset(&self) -> usize {
349 self.peb_output_data_offset
351 }
352
353 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
356 pub(super) fn get_host_function_definitions_size_offset(&self) -> usize {
357 self.peb_host_function_definitions_offset
359 }
360
361 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
364 fn get_host_function_definitions_pointer_offset(&self) -> usize {
365 self.peb_host_function_definitions_offset + size_of::<u64>()
367 }
368
369 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
371 pub(super) fn get_init_data_size_offset(&self) -> usize {
372 self.peb_init_data_offset
374 }
375
376 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
378 fn get_min_guest_stack_address_offset(&self) -> usize {
379 self.peb_guest_stack_data_offset
381 }
382
383 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
384 pub(super) fn get_guest_stack_size(&self) -> usize {
385 self.stack_size
386 }
387
388 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
390 fn get_output_data_pointer_offset(&self) -> usize {
391 self.get_output_data_size_offset() + size_of::<u64>()
394 }
395
396 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
398 pub(super) fn get_init_data_pointer_offset(&self) -> usize {
399 self.get_init_data_size_offset() + size_of::<u64>()
402 }
403
404 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
406 #[cfg(test)]
407 pub(crate) fn get_output_data_offset(&self) -> usize {
408 self.output_data_buffer_offset
409 }
410
411 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
413 pub(super) fn get_input_data_size_offset(&self) -> usize {
414 self.peb_input_data_offset
416 }
417
418 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
420 fn get_input_data_pointer_offset(&self) -> usize {
421 self.get_input_data_size_offset() + size_of::<u64>()
424 }
425
426 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
428 pub(super) fn get_code_pointer_offset(&self) -> usize {
429 self.peb_code_pointer_offset
432 }
433
434 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
437 pub(super) fn get_dispatch_function_pointer_offset(&self) -> usize {
438 self.peb_guest_dispatch_function_ptr_offset
439 }
440
441 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
443 fn get_heap_size_offset(&self) -> usize {
444 self.peb_heap_data_offset
445 }
446
447 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
449 fn get_heap_pointer_offset(&self) -> usize {
450 self.get_heap_size_offset() + size_of::<u64>()
453 }
454
455 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
457 pub(super) fn get_top_of_user_stack_offset(&self) -> usize {
458 self.guest_user_stack_buffer_offset
459 }
460
461 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
463 fn get_user_stack_pointer_offset(&self) -> usize {
464 self.get_min_guest_stack_address_offset() + size_of::<u64>()
467 }
468
469 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
472 fn get_unaligned_memory_size(&self) -> usize {
473 self.init_data_offset + self.init_data_size
474 }
475
476 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
479 pub(super) fn get_guest_code_offset(&self) -> usize {
480 self.guest_code_offset
481 }
482
483 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
485 pub(crate) fn get_guest_code_address(&self) -> usize {
486 Self::BASE_ADDRESS + self.guest_code_offset
487 }
488
489 #[cfg(test)]
490 #[cfg(feature = "init-paging")]
491 fn get_page_table_size(&self) -> usize {
493 self.total_page_table_size
494 }
495
496 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
507 #[cfg(feature = "init-paging")]
508 fn get_total_page_table_size(
509 cfg: SandboxConfiguration,
510 code_size: usize,
511 stack_size: usize,
512 heap_size: usize,
513 ) -> usize {
514 let mut total_mapped_memory_size: usize = round_up_to(code_size, PAGE_SIZE_USIZE);
517 total_mapped_memory_size += round_up_to(stack_size, PAGE_SIZE_USIZE);
518 total_mapped_memory_size += round_up_to(heap_size, PAGE_SIZE_USIZE);
519 total_mapped_memory_size +=
520 round_up_to(cfg.get_host_function_definition_size(), PAGE_SIZE_USIZE);
521 total_mapped_memory_size += round_up_to(cfg.get_input_data_size(), PAGE_SIZE_USIZE);
522 total_mapped_memory_size += round_up_to(cfg.get_output_data_size(), PAGE_SIZE_USIZE);
523 total_mapped_memory_size += round_up_to(size_of::<HyperlightPEB>(), PAGE_SIZE_USIZE);
524
525 total_mapped_memory_size += Self::BASE_ADDRESS;
527
528 total_mapped_memory_size += 3 * PAGE_SIZE_USIZE;
530
531 total_mapped_memory_size += 512 * PAGE_SIZE_USIZE;
533
534 let num_pages: usize = total_mapped_memory_size.div_ceil(AMOUNT_OF_MEMORY_PER_PT) + 3; num_pages * PAGE_SIZE_USIZE
539 }
540
541 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
544 pub(super) fn get_memory_size(&self) -> Result<usize> {
545 let total_memory = self.get_unaligned_memory_size();
546
547 let remainder = total_memory % PAGE_SIZE_USIZE;
549 let multiples = total_memory / PAGE_SIZE_USIZE;
550 let size = match remainder {
551 0 => total_memory,
552 _ => (multiples + 1) * PAGE_SIZE_USIZE,
553 };
554
555 if size > Self::MAX_MEMORY_SIZE {
556 Err(MemoryRequestTooBig(size, Self::MAX_MEMORY_SIZE))
557 } else {
558 Ok(size)
559 }
560 }
561
562 pub fn get_memory_regions(&self, shared_mem: &GuestSharedMemory) -> Result<Vec<MemoryRegion>> {
565 let mut builder = MemoryRegionVecBuilder::new(Self::BASE_ADDRESS, shared_mem.base_addr());
566
567 cfg_if::cfg_if! {
568 if #[cfg(feature = "init-paging")] {
569 let code_offset = builder.push_page_aligned(
571 self.total_page_table_size,
572 MemoryRegionFlags::READ | MemoryRegionFlags::WRITE,
573 PageTables,
574 );
575
576 if code_offset != self.guest_code_offset {
577 return Err(new_error!(
578 "Code offset does not match expected code offset expected: {}, actual: {}",
579 self.guest_code_offset,
580 code_offset
581 ));
582 }
583 }
584 }
585
586 let peb_offset = builder.push_page_aligned(
588 self.code_size,
589 MemoryRegionFlags::READ | MemoryRegionFlags::WRITE | MemoryRegionFlags::EXECUTE,
590 Code,
591 );
592
593 let expected_peb_offset = TryInto::<usize>::try_into(self.peb_offset)?;
594
595 if peb_offset != expected_peb_offset {
596 return Err(new_error!(
597 "PEB offset does not match expected PEB offset expected: {}, actual: {}",
598 expected_peb_offset,
599 peb_offset
600 ));
601 }
602
603 let host_functions_definitions_offset = builder.push_page_aligned(
605 size_of::<HyperlightPEB>(),
606 MemoryRegionFlags::READ | MemoryRegionFlags::WRITE,
607 Peb,
608 );
609
610 let expected_host_functions_definitions_offset =
611 TryInto::<usize>::try_into(self.host_function_definitions_buffer_offset)?;
612
613 if host_functions_definitions_offset != expected_host_functions_definitions_offset {
614 return Err(new_error!(
615 "Host Function Definitions offset does not match expected Host Function Definitions offset expected: {}, actual: {}",
616 expected_host_functions_definitions_offset,
617 host_functions_definitions_offset
618 ));
619 }
620
621 let input_data_offset = builder.push_page_aligned(
623 self.sandbox_memory_config
624 .get_host_function_definition_size(),
625 MemoryRegionFlags::READ,
626 HostFunctionDefinitions,
627 );
628
629 let expected_input_data_offset = TryInto::<usize>::try_into(self.input_data_buffer_offset)?;
630
631 if input_data_offset != expected_input_data_offset {
632 return Err(new_error!(
633 "Input Data offset does not match expected Input Data offset expected: {}, actual: {}",
634 expected_input_data_offset,
635 input_data_offset
636 ));
637 }
638
639 let output_data_offset = builder.push_page_aligned(
641 self.sandbox_memory_config.get_input_data_size(),
642 MemoryRegionFlags::READ | MemoryRegionFlags::WRITE,
643 InputData,
644 );
645
646 let expected_output_data_offset =
647 TryInto::<usize>::try_into(self.output_data_buffer_offset)?;
648
649 if output_data_offset != expected_output_data_offset {
650 return Err(new_error!(
651 "Output Data offset does not match expected Output Data offset expected: {}, actual: {}",
652 expected_output_data_offset,
653 output_data_offset
654 ));
655 }
656
657 let heap_offset = builder.push_page_aligned(
659 self.sandbox_memory_config.get_output_data_size(),
660 MemoryRegionFlags::READ | MemoryRegionFlags::WRITE,
661 OutputData,
662 );
663
664 let expected_heap_offset = TryInto::<usize>::try_into(self.guest_heap_buffer_offset)?;
665
666 if heap_offset != expected_heap_offset {
667 return Err(new_error!(
668 "Guest Heap offset does not match expected Guest Heap offset expected: {}, actual: {}",
669 expected_heap_offset,
670 heap_offset
671 ));
672 }
673
674 #[cfg(feature = "executable_heap")]
676 let guard_page_offset = builder.push_page_aligned(
677 self.heap_size,
678 MemoryRegionFlags::READ | MemoryRegionFlags::WRITE | MemoryRegionFlags::EXECUTE,
679 Heap,
680 );
681 #[cfg(not(feature = "executable_heap"))]
682 let guard_page_offset = builder.push_page_aligned(
683 self.heap_size,
684 MemoryRegionFlags::READ | MemoryRegionFlags::WRITE,
685 Heap,
686 );
687
688 let expected_guard_page_offset = TryInto::<usize>::try_into(self.guard_page_offset)?;
689
690 if guard_page_offset != expected_guard_page_offset {
691 return Err(new_error!(
692 "Guard Page offset does not match expected Guard Page offset expected: {}, actual: {}",
693 expected_guard_page_offset,
694 guard_page_offset
695 ));
696 }
697
698 let stack_offset = builder.push_page_aligned(
700 PAGE_SIZE_USIZE,
701 MemoryRegionFlags::READ | MemoryRegionFlags::STACK_GUARD,
702 GuardPage,
703 );
704
705 let expected_stack_offset =
706 TryInto::<usize>::try_into(self.guest_user_stack_buffer_offset)?;
707
708 if stack_offset != expected_stack_offset {
709 return Err(new_error!(
710 "Stack offset does not match expected Stack offset expected: {}, actual: {}",
711 expected_stack_offset,
712 stack_offset
713 ));
714 }
715
716 let init_data_offset = builder.push_page_aligned(
718 self.get_guest_stack_size(),
719 MemoryRegionFlags::READ | MemoryRegionFlags::WRITE,
720 Stack,
721 );
722
723 let expected_init_data_offset = TryInto::<usize>::try_into(self.init_data_offset)?;
724
725 if init_data_offset != expected_init_data_offset {
726 return Err(new_error!(
727 "Init Data offset does not match expected Init Data offset expected: {}, actual: {}",
728 expected_init_data_offset,
729 init_data_offset
730 ));
731 }
732
733 let final_offset = if self.init_data_size > 0 {
734 let mem_flags = self
735 .init_data_permissions
736 .unwrap_or(DEFAULT_GUEST_BLOB_MEM_FLAGS);
737 builder.push_page_aligned(self.init_data_size, mem_flags, InitData)
738 } else {
739 init_data_offset
740 };
741
742 let expected_final_offset = TryInto::<usize>::try_into(self.get_memory_size()?)?;
743
744 if final_offset != expected_final_offset {
745 return Err(new_error!(
746 "Final offset does not match expected Final offset expected: {}, actual: {}",
747 expected_final_offset,
748 final_offset
749 ));
750 }
751
752 Ok(builder.build())
753 }
754
755 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
756 pub(crate) fn write_init_data(
757 &self,
758 shared_mem: &mut ExclusiveSharedMemory,
759 bytes: &[u8],
760 ) -> Result<()> {
761 shared_mem.copy_from_slice(bytes, self.init_data_offset)?;
762 Ok(())
763 }
764
765 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
771 pub(crate) fn write(
772 &self,
773 shared_mem: &mut ExclusiveSharedMemory,
774 guest_offset: usize,
775 size: usize,
776 ) -> Result<()> {
777 macro_rules! get_address {
778 ($something:ident) => {
779 u64::try_from(guest_offset + self.$something)?
780 };
781 }
782
783 if guest_offset != SandboxMemoryLayout::BASE_ADDRESS
784 && guest_offset != shared_mem.base_addr()
785 {
786 return Err(GuestOffsetIsInvalid(guest_offset));
787 }
788
789 let mut security_cookie_seed = [0u8; 8];
793 rng().fill_bytes(&mut security_cookie_seed);
794 shared_mem.copy_from_slice(&security_cookie_seed, self.peb_security_cookie_seed_offset)?;
795
796 shared_mem.write_u64(
800 self.get_host_function_definitions_size_offset(),
801 self.sandbox_memory_config
802 .get_host_function_definition_size()
803 .try_into()?,
804 )?;
805 let addr = get_address!(host_function_definitions_buffer_offset);
806 shared_mem.write_u64(self.get_host_function_definitions_pointer_offset(), addr)?;
807
808 shared_mem.write_u64(
813 self.get_input_data_size_offset(),
814 self.sandbox_memory_config
815 .get_input_data_size()
816 .try_into()?,
817 )?;
818 let addr = get_address!(input_data_buffer_offset);
819 shared_mem.write_u64(self.get_input_data_pointer_offset(), addr)?;
820
821 shared_mem.write_u64(
823 self.get_output_data_size_offset(),
824 self.sandbox_memory_config
825 .get_output_data_size()
826 .try_into()?,
827 )?;
828 let addr = get_address!(output_data_buffer_offset);
829 shared_mem.write_u64(self.get_output_data_pointer_offset(), addr)?;
830
831 shared_mem.write_u64(
833 self.get_init_data_size_offset(),
834 (self.get_unaligned_memory_size() - self.init_data_offset).try_into()?,
835 )?;
836 let addr = get_address!(init_data_offset);
837 shared_mem.write_u64(self.get_init_data_pointer_offset(), addr)?;
838
839 let addr = get_address!(guest_heap_buffer_offset);
841 shared_mem.write_u64(self.get_heap_size_offset(), self.heap_size.try_into()?)?;
842 shared_mem.write_u64(self.get_heap_pointer_offset(), addr)?;
843
844 let bottom = guest_offset + size;
852 let min_user_stack_address = bottom - self.stack_size;
853
854 shared_mem.write_u64(
857 self.get_min_guest_stack_address_offset(),
858 min_user_stack_address.try_into()?,
859 )?;
860
861 let start_of_user_stack: u64 = (min_user_stack_address + self.stack_size).try_into()?;
864
865 shared_mem.write_u64(self.get_user_stack_pointer_offset(), start_of_user_stack)?;
866
867 shared_mem.write_u64(
873 self.input_data_buffer_offset,
874 Self::STACK_POINTER_SIZE_BYTES,
875 )?;
876 shared_mem.write_u64(
877 self.output_data_buffer_offset,
878 Self::STACK_POINTER_SIZE_BYTES,
879 )?;
880
881 Ok(())
882 }
883}
884
885fn round_up_to(value: usize, multiple: usize) -> usize {
886 (value + multiple - 1) & !(multiple - 1)
887}
888
889#[cfg(test)]
890mod tests {
891 use hyperlight_common::mem::PAGE_SIZE_USIZE;
892
893 use super::*;
894
895 #[test]
896 fn test_round_up() {
897 assert_eq!(0, round_up_to(0, 4));
898 assert_eq!(4, round_up_to(1, 4));
899 assert_eq!(4, round_up_to(2, 4));
900 assert_eq!(4, round_up_to(3, 4));
901 assert_eq!(4, round_up_to(4, 4));
902 assert_eq!(8, round_up_to(5, 4));
903 assert_eq!(8, round_up_to(6, 4));
904 assert_eq!(8, round_up_to(7, 4));
905 assert_eq!(8, round_up_to(8, 4));
906 assert_eq!(PAGE_SIZE_USIZE, round_up_to(44, PAGE_SIZE_USIZE));
907 assert_eq!(PAGE_SIZE_USIZE, round_up_to(4095, PAGE_SIZE_USIZE));
908 assert_eq!(PAGE_SIZE_USIZE, round_up_to(4096, PAGE_SIZE_USIZE));
909 assert_eq!(PAGE_SIZE_USIZE * 2, round_up_to(4097, PAGE_SIZE_USIZE));
910 assert_eq!(PAGE_SIZE_USIZE * 2, round_up_to(8191, PAGE_SIZE_USIZE));
911 }
912
913 fn get_expected_memory_size(layout: &SandboxMemoryLayout) -> usize {
915 let cfg = layout.sandbox_memory_config;
916 let mut expected_size = 0;
917 #[cfg(feature = "init-paging")]
919 {
920 expected_size += layout.get_page_table_size();
921 }
922 expected_size += layout.code_size;
923
924 expected_size += round_up_to(size_of::<HyperlightPEB>(), PAGE_SIZE_USIZE);
925
926 expected_size += round_up_to(cfg.get_host_function_definition_size(), PAGE_SIZE_USIZE);
927
928 expected_size += round_up_to(cfg.get_input_data_size(), PAGE_SIZE_USIZE);
929
930 expected_size += round_up_to(cfg.get_output_data_size(), PAGE_SIZE_USIZE);
931
932 expected_size += round_up_to(layout.heap_size, PAGE_SIZE_USIZE);
933
934 expected_size += PAGE_SIZE_USIZE; expected_size += round_up_to(layout.stack_size, PAGE_SIZE_USIZE);
937
938 expected_size
939 }
940
941 #[test]
942 fn test_get_memory_size() {
943 let sbox_cfg = SandboxConfiguration::default();
944 let sbox_mem_layout =
945 SandboxMemoryLayout::new(sbox_cfg, 4096, 2048, 4096, 0, None).unwrap();
946 assert_eq!(
947 sbox_mem_layout.get_memory_size().unwrap(),
948 get_expected_memory_size(&sbox_mem_layout)
949 );
950 }
951}