1use std::fmt::Debug;
17use std::mem::{offset_of, size_of};
18
19use hyperlight_common::mem::{GuestMemoryRegion, HyperlightPEB, PAGE_SIZE_USIZE};
20use rand::{RngCore, rng};
21use tracing::{Span, instrument};
22
23#[cfg(feature = "init-paging")]
24use super::memory_region::MemoryRegionType::PageTables;
25use super::memory_region::MemoryRegionType::{
26 Code, GuardPage, Heap, HostFunctionDefinitions, InitData, InputData, OutputData, Peb, Stack,
27};
28use super::memory_region::{
29 DEFAULT_GUEST_BLOB_MEM_FLAGS, MemoryRegion, MemoryRegionFlags, MemoryRegionVecBuilder,
30};
31#[cfg(feature = "init-paging")]
32use super::mgr::AMOUNT_OF_MEMORY_PER_PT;
33use super::shared_mem::{ExclusiveSharedMemory, GuestSharedMemory, SharedMemory};
34use crate::error::HyperlightError::{GuestOffsetIsInvalid, MemoryRequestTooBig};
35use crate::sandbox::SandboxConfiguration;
36use crate::{Result, new_error};
37
38#[derive(Copy, Clone)]
88pub(crate) struct SandboxMemoryLayout {
89 pub(super) sandbox_memory_config: SandboxConfiguration,
90 pub(super) stack_size: usize,
92 pub(super) heap_size: usize,
94 init_data_size: usize,
95
96 peb_offset: usize,
99 peb_security_cookie_seed_offset: usize,
100 peb_guest_dispatch_function_ptr_offset: usize, peb_code_pointer_offset: usize,
102 pub(super) peb_host_function_definitions_offset: usize,
103 peb_input_data_offset: usize,
104 peb_output_data_offset: usize,
105 peb_init_data_offset: usize,
106 peb_heap_data_offset: usize,
107 peb_guest_stack_data_offset: usize,
108
109 pub(crate) host_function_definitions_buffer_offset: usize,
112 pub(super) input_data_buffer_offset: usize,
113 pub(super) output_data_buffer_offset: usize,
114 guest_heap_buffer_offset: usize,
115 guard_page_offset: usize,
116 guest_user_stack_buffer_offset: usize, init_data_offset: usize,
118
119 pub(crate) peb_address: usize,
121 code_size: usize,
122 total_page_table_size: usize,
124 guest_code_offset: usize,
126 pub(crate) init_data_permissions: Option<MemoryRegionFlags>,
127}
128
129impl Debug for SandboxMemoryLayout {
130 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
131 f.debug_struct("SandboxMemoryLayout")
132 .field(
133 "Total Memory Size",
134 &format_args!("{:#x}", self.get_memory_size().unwrap_or(0)),
135 )
136 .field("Stack Size", &format_args!("{:#x}", self.stack_size))
137 .field("Heap Size", &format_args!("{:#x}", self.heap_size))
138 .field(
139 "Init Data Size",
140 &format_args!("{:#x}", self.init_data_size),
141 )
142 .field("PEB Address", &format_args!("{:#x}", self.peb_address))
143 .field("PEB Offset", &format_args!("{:#x}", self.peb_offset))
144 .field("Code Size", &format_args!("{:#x}", self.code_size))
145 .field(
146 "Security Cookie Seed Offset",
147 &format_args!("{:#x}", self.peb_security_cookie_seed_offset),
148 )
149 .field(
150 "Guest Dispatch Function Pointer Offset",
151 &format_args!("{:#x}", self.peb_guest_dispatch_function_ptr_offset),
152 )
153 .field(
154 "Host Function Definitions Offset",
155 &format_args!("{:#x}", self.peb_host_function_definitions_offset),
156 )
157 .field(
158 "Code Pointer Offset",
159 &format_args!("{:#x}", self.peb_code_pointer_offset),
160 )
161 .field(
162 "Input Data Offset",
163 &format_args!("{:#x}", self.peb_input_data_offset),
164 )
165 .field(
166 "Output Data Offset",
167 &format_args!("{:#x}", self.peb_output_data_offset),
168 )
169 .field(
170 "Init Data Offset",
171 &format_args!("{:#x}", self.peb_init_data_offset),
172 )
173 .field(
174 "Guest Heap Offset",
175 &format_args!("{:#x}", self.peb_heap_data_offset),
176 )
177 .field(
178 "Guest Stack Offset",
179 &format_args!("{:#x}", self.peb_guest_stack_data_offset),
180 )
181 .field(
182 "Host Function Definitions Buffer Offset",
183 &format_args!("{:#x}", self.host_function_definitions_buffer_offset),
184 )
185 .field(
186 "Input Data Buffer Offset",
187 &format_args!("{:#x}", self.input_data_buffer_offset),
188 )
189 .field(
190 "Output Data Buffer Offset",
191 &format_args!("{:#x}", self.output_data_buffer_offset),
192 )
193 .field(
194 "Guest Heap Buffer Offset",
195 &format_args!("{:#x}", self.guest_heap_buffer_offset),
196 )
197 .field(
198 "Guard Page Offset",
199 &format_args!("{:#x}", self.guard_page_offset),
200 )
201 .field(
202 "Guest User Stack Buffer Offset",
203 &format_args!("{:#x}", self.guest_user_stack_buffer_offset),
204 )
205 .field(
206 "Init Data Offset",
207 &format_args!("{:#x}", self.init_data_offset),
208 )
209 .field(
210 "Page Table Size",
211 &format_args!("{:#x}", self.total_page_table_size),
212 )
213 .field(
214 "Guest Code Offset",
215 &format_args!("{:#x}", self.guest_code_offset),
216 )
217 .finish()
218 }
219}
220
221impl SandboxMemoryLayout {
222 pub(crate) const PML4_OFFSET: usize = 0x0000;
225 #[cfg(feature = "init-paging")]
228 pub(super) const PDPT_OFFSET: usize = 0x1000;
229 #[cfg(feature = "init-paging")]
231 pub(super) const PD_OFFSET: usize = 0x2000;
232 #[cfg(feature = "init-paging")]
234 pub(super) const PT_OFFSET: usize = 0x3000;
235 #[cfg(feature = "init-paging")]
237 pub(super) const PD_GUEST_ADDRESS: usize = Self::BASE_ADDRESS + Self::PD_OFFSET;
238 #[cfg(feature = "init-paging")]
241 pub(super) const PDPT_GUEST_ADDRESS: usize = Self::BASE_ADDRESS + Self::PDPT_OFFSET;
242 #[cfg(feature = "init-paging")]
245 pub(super) const PT_GUEST_ADDRESS: usize = Self::BASE_ADDRESS + Self::PT_OFFSET;
246 const MAX_MEMORY_SIZE: usize = 0x40000000 - Self::BASE_ADDRESS;
250
251 pub(crate) const BASE_ADDRESS: usize = 0x0;
253
254 const STACK_POINTER_SIZE_BYTES: u64 = 8;
256
257 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
260 pub(super) fn new(
261 cfg: SandboxConfiguration,
262 code_size: usize,
263 stack_size: usize,
264 heap_size: usize,
265 init_data_size: usize,
266 init_data_permissions: Option<MemoryRegionFlags>,
267 ) -> Result<Self> {
268 #[cfg(feature = "init-paging")]
269 let base = Self::get_total_page_table_size(cfg, code_size, stack_size, heap_size);
270 #[cfg(not(feature = "init-paging"))]
271 let base = Self::BASE_ADDRESS;
272 let guest_code_offset = base;
273 let peb_offset = base + round_up_to(code_size, PAGE_SIZE_USIZE);
275 let peb_security_cookie_seed_offset =
276 peb_offset + offset_of!(HyperlightPEB, security_cookie_seed);
277 let peb_guest_dispatch_function_ptr_offset =
278 peb_offset + offset_of!(HyperlightPEB, guest_function_dispatch_ptr);
279 let peb_code_pointer_offset = peb_offset + offset_of!(HyperlightPEB, code_ptr);
280 let peb_input_data_offset = peb_offset + offset_of!(HyperlightPEB, input_stack);
281 let peb_output_data_offset = peb_offset + offset_of!(HyperlightPEB, output_stack);
282 let peb_init_data_offset = peb_offset + offset_of!(HyperlightPEB, init_data);
283 let peb_heap_data_offset = peb_offset + offset_of!(HyperlightPEB, guest_heap);
284 let peb_guest_stack_data_offset = peb_offset + offset_of!(HyperlightPEB, guest_stack);
285 let peb_host_function_definitions_offset =
286 peb_offset + offset_of!(HyperlightPEB, host_function_definitions);
287
288 let peb_address = Self::BASE_ADDRESS + peb_offset;
291 let host_function_definitions_buffer_offset = round_up_to(
293 peb_host_function_definitions_offset + size_of::<GuestMemoryRegion>(),
294 PAGE_SIZE_USIZE,
295 );
296 let input_data_buffer_offset = round_up_to(
297 host_function_definitions_buffer_offset + cfg.get_host_function_definition_size(),
298 PAGE_SIZE_USIZE,
299 );
300 let output_data_buffer_offset = round_up_to(
301 input_data_buffer_offset + cfg.get_input_data_size(),
302 PAGE_SIZE_USIZE,
303 );
304 let guest_heap_buffer_offset = round_up_to(
306 output_data_buffer_offset + cfg.get_output_data_size(),
307 PAGE_SIZE_USIZE,
308 );
309 let guard_page_offset = round_up_to(guest_heap_buffer_offset + heap_size, PAGE_SIZE_USIZE);
311 let guest_user_stack_buffer_offset = guard_page_offset + PAGE_SIZE_USIZE;
312 let stack_size_rounded = round_up_to(stack_size, PAGE_SIZE_USIZE);
314 let init_data_offset = guest_user_stack_buffer_offset + stack_size_rounded;
315
316 Ok(Self {
317 peb_offset,
318 stack_size: stack_size_rounded,
319 heap_size,
320 peb_security_cookie_seed_offset,
321 peb_guest_dispatch_function_ptr_offset,
322 peb_code_pointer_offset,
323 peb_host_function_definitions_offset,
324 peb_input_data_offset,
325 peb_output_data_offset,
326 peb_init_data_offset,
327 peb_heap_data_offset,
328 peb_guest_stack_data_offset,
329 sandbox_memory_config: cfg,
330 code_size,
331 host_function_definitions_buffer_offset,
332 input_data_buffer_offset,
333 output_data_buffer_offset,
334 guest_heap_buffer_offset,
335 guest_user_stack_buffer_offset,
336 peb_address,
337 guard_page_offset,
338 total_page_table_size: base,
339 guest_code_offset,
340 init_data_offset,
341 init_data_size,
342 init_data_permissions,
343 })
344 }
345
346 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
348 pub(super) fn get_output_data_size_offset(&self) -> usize {
349 self.peb_output_data_offset
351 }
352
353 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
356 pub(super) fn get_host_function_definitions_size_offset(&self) -> usize {
357 self.peb_host_function_definitions_offset
359 }
360
361 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
364 fn get_host_function_definitions_pointer_offset(&self) -> usize {
365 self.peb_host_function_definitions_offset + size_of::<u64>()
367 }
368
369 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
371 pub(super) fn get_init_data_size_offset(&self) -> usize {
372 self.peb_init_data_offset
374 }
375
376 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
378 fn get_min_guest_stack_address_offset(&self) -> usize {
379 self.peb_guest_stack_data_offset
381 }
382
383 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
384 pub(super) fn get_guest_stack_size(&self) -> usize {
385 self.stack_size
386 }
387
388 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
390 #[allow(dead_code)]
391 pub(super) fn get_outb_pointer_offset(&self) -> usize {
392 self.peb_code_pointer_offset + size_of::<u64>()
395 }
396
397 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
399 #[allow(dead_code)]
400 pub(super) fn get_outb_context_offset(&self) -> usize {
401 self.get_outb_pointer_offset() + size_of::<u64>()
404 }
405
406 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
408 fn get_output_data_pointer_offset(&self) -> usize {
409 self.get_output_data_size_offset() + size_of::<u64>()
412 }
413
414 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
416 pub(super) fn get_init_data_pointer_offset(&self) -> usize {
417 self.get_init_data_size_offset() + size_of::<u64>()
420 }
421
422 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
427 #[allow(dead_code)]
428 pub(crate) fn get_output_data_offset(&self) -> usize {
429 self.output_data_buffer_offset
430 }
431
432 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
434 pub(super) fn get_input_data_size_offset(&self) -> usize {
435 self.peb_input_data_offset
437 }
438
439 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
441 fn get_input_data_pointer_offset(&self) -> usize {
442 self.get_input_data_size_offset() + size_of::<u64>()
445 }
446
447 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
449 pub(super) fn get_code_pointer_offset(&self) -> usize {
450 self.peb_code_pointer_offset
453 }
454
455 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
458 pub(super) fn get_dispatch_function_pointer_offset(&self) -> usize {
459 self.peb_guest_dispatch_function_ptr_offset
460 }
461
462 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
464 #[allow(dead_code)]
465 pub(super) fn get_in_process_peb_offset(&self) -> usize {
466 self.peb_offset
467 }
468
469 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
471 fn get_heap_size_offset(&self) -> usize {
472 self.peb_heap_data_offset
473 }
474
475 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
477 fn get_heap_pointer_offset(&self) -> usize {
478 self.get_heap_size_offset() + size_of::<u64>()
481 }
482
483 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
485 pub(super) fn get_top_of_user_stack_offset(&self) -> usize {
486 self.guest_user_stack_buffer_offset
487 }
488
489 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
491 fn get_user_stack_pointer_offset(&self) -> usize {
492 self.get_min_guest_stack_address_offset() + size_of::<u64>()
495 }
496
497 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
499 #[allow(dead_code)]
500 pub fn get_guard_page_offset(&self) -> usize {
501 self.guard_page_offset
502 }
503
504 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
507 fn get_unaligned_memory_size(&self) -> usize {
508 self.init_data_offset + self.init_data_size
509 }
510
511 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
514 pub(super) fn get_guest_code_offset(&self) -> usize {
515 self.guest_code_offset
516 }
517
518 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
520 pub(crate) fn get_guest_code_address(&self) -> usize {
521 Self::BASE_ADDRESS + self.guest_code_offset
522 }
523
524 #[cfg(test)]
525 #[cfg(feature = "init-paging")]
526 fn get_page_table_size(&self) -> usize {
528 self.total_page_table_size
529 }
530
531 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
542 #[cfg(feature = "init-paging")]
543 fn get_total_page_table_size(
544 cfg: SandboxConfiguration,
545 code_size: usize,
546 stack_size: usize,
547 heap_size: usize,
548 ) -> usize {
549 let mut total_mapped_memory_size: usize = round_up_to(code_size, PAGE_SIZE_USIZE);
552 total_mapped_memory_size += round_up_to(stack_size, PAGE_SIZE_USIZE);
553 total_mapped_memory_size += round_up_to(heap_size, PAGE_SIZE_USIZE);
554 total_mapped_memory_size +=
555 round_up_to(cfg.get_host_function_definition_size(), PAGE_SIZE_USIZE);
556 total_mapped_memory_size += round_up_to(cfg.get_input_data_size(), PAGE_SIZE_USIZE);
557 total_mapped_memory_size += round_up_to(cfg.get_output_data_size(), PAGE_SIZE_USIZE);
558 total_mapped_memory_size += round_up_to(size_of::<HyperlightPEB>(), PAGE_SIZE_USIZE);
559
560 total_mapped_memory_size += Self::BASE_ADDRESS;
562
563 total_mapped_memory_size += 3 * PAGE_SIZE_USIZE;
565
566 total_mapped_memory_size += 512 * PAGE_SIZE_USIZE;
568
569 let num_pages: usize = total_mapped_memory_size.div_ceil(AMOUNT_OF_MEMORY_PER_PT) + 3; num_pages * PAGE_SIZE_USIZE
574 }
575
576 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
579 pub(super) fn get_memory_size(&self) -> Result<usize> {
580 let total_memory = self.get_unaligned_memory_size();
581
582 let remainder = total_memory % PAGE_SIZE_USIZE;
584 let multiples = total_memory / PAGE_SIZE_USIZE;
585 let size = match remainder {
586 0 => total_memory,
587 _ => (multiples + 1) * PAGE_SIZE_USIZE,
588 };
589
590 if size > Self::MAX_MEMORY_SIZE {
591 Err(MemoryRequestTooBig(size, Self::MAX_MEMORY_SIZE))
592 } else {
593 Ok(size)
594 }
595 }
596
597 pub fn get_memory_regions(&self, shared_mem: &GuestSharedMemory) -> Result<Vec<MemoryRegion>> {
600 let mut builder = MemoryRegionVecBuilder::new(Self::BASE_ADDRESS, shared_mem.base_addr());
601
602 cfg_if::cfg_if! {
603 if #[cfg(feature = "init-paging")] {
604 let code_offset = builder.push_page_aligned(
606 self.total_page_table_size,
607 MemoryRegionFlags::READ | MemoryRegionFlags::WRITE,
608 PageTables,
609 );
610
611 if code_offset != self.guest_code_offset {
612 return Err(new_error!(
613 "Code offset does not match expected code offset expected: {}, actual: {}",
614 self.guest_code_offset,
615 code_offset
616 ));
617 }
618 }
619 }
620
621 let peb_offset = builder.push_page_aligned(
623 self.code_size,
624 MemoryRegionFlags::READ | MemoryRegionFlags::WRITE | MemoryRegionFlags::EXECUTE,
625 Code,
626 );
627
628 let expected_peb_offset = TryInto::<usize>::try_into(self.peb_offset)?;
629
630 if peb_offset != expected_peb_offset {
631 return Err(new_error!(
632 "PEB offset does not match expected PEB offset expected: {}, actual: {}",
633 expected_peb_offset,
634 peb_offset
635 ));
636 }
637
638 let host_functions_definitions_offset = builder.push_page_aligned(
640 size_of::<HyperlightPEB>(),
641 MemoryRegionFlags::READ | MemoryRegionFlags::WRITE,
642 Peb,
643 );
644
645 let expected_host_functions_definitions_offset =
646 TryInto::<usize>::try_into(self.host_function_definitions_buffer_offset)?;
647
648 if host_functions_definitions_offset != expected_host_functions_definitions_offset {
649 return Err(new_error!(
650 "Host Function Definitions offset does not match expected Host Function Definitions offset expected: {}, actual: {}",
651 expected_host_functions_definitions_offset,
652 host_functions_definitions_offset
653 ));
654 }
655
656 let input_data_offset = builder.push_page_aligned(
658 self.sandbox_memory_config
659 .get_host_function_definition_size(),
660 MemoryRegionFlags::READ,
661 HostFunctionDefinitions,
662 );
663
664 let expected_input_data_offset = TryInto::<usize>::try_into(self.input_data_buffer_offset)?;
665
666 if input_data_offset != expected_input_data_offset {
667 return Err(new_error!(
668 "Input Data offset does not match expected Input Data offset expected: {}, actual: {}",
669 expected_input_data_offset,
670 input_data_offset
671 ));
672 }
673
674 let output_data_offset = builder.push_page_aligned(
676 self.sandbox_memory_config.get_input_data_size(),
677 MemoryRegionFlags::READ | MemoryRegionFlags::WRITE,
678 InputData,
679 );
680
681 let expected_output_data_offset =
682 TryInto::<usize>::try_into(self.output_data_buffer_offset)?;
683
684 if output_data_offset != expected_output_data_offset {
685 return Err(new_error!(
686 "Output Data offset does not match expected Output Data offset expected: {}, actual: {}",
687 expected_output_data_offset,
688 output_data_offset
689 ));
690 }
691
692 let heap_offset = builder.push_page_aligned(
694 self.sandbox_memory_config.get_output_data_size(),
695 MemoryRegionFlags::READ | MemoryRegionFlags::WRITE,
696 OutputData,
697 );
698
699 let expected_heap_offset = TryInto::<usize>::try_into(self.guest_heap_buffer_offset)?;
700
701 if heap_offset != expected_heap_offset {
702 return Err(new_error!(
703 "Guest Heap offset does not match expected Guest Heap offset expected: {}, actual: {}",
704 expected_heap_offset,
705 heap_offset
706 ));
707 }
708
709 #[cfg(feature = "executable_heap")]
711 let guard_page_offset = builder.push_page_aligned(
712 self.heap_size,
713 MemoryRegionFlags::READ | MemoryRegionFlags::WRITE | MemoryRegionFlags::EXECUTE,
714 Heap,
715 );
716 #[cfg(not(feature = "executable_heap"))]
717 let guard_page_offset = builder.push_page_aligned(
718 self.heap_size,
719 MemoryRegionFlags::READ | MemoryRegionFlags::WRITE,
720 Heap,
721 );
722
723 let expected_guard_page_offset = TryInto::<usize>::try_into(self.guard_page_offset)?;
724
725 if guard_page_offset != expected_guard_page_offset {
726 return Err(new_error!(
727 "Guard Page offset does not match expected Guard Page offset expected: {}, actual: {}",
728 expected_guard_page_offset,
729 guard_page_offset
730 ));
731 }
732
733 let stack_offset = builder.push_page_aligned(
735 PAGE_SIZE_USIZE,
736 MemoryRegionFlags::READ | MemoryRegionFlags::STACK_GUARD,
737 GuardPage,
738 );
739
740 let expected_stack_offset =
741 TryInto::<usize>::try_into(self.guest_user_stack_buffer_offset)?;
742
743 if stack_offset != expected_stack_offset {
744 return Err(new_error!(
745 "Stack offset does not match expected Stack offset expected: {}, actual: {}",
746 expected_stack_offset,
747 stack_offset
748 ));
749 }
750
751 let init_data_offset = builder.push_page_aligned(
753 self.get_guest_stack_size(),
754 MemoryRegionFlags::READ | MemoryRegionFlags::WRITE,
755 Stack,
756 );
757
758 let expected_init_data_offset = TryInto::<usize>::try_into(self.init_data_offset)?;
759
760 if init_data_offset != expected_init_data_offset {
761 return Err(new_error!(
762 "Init Data offset does not match expected Init Data offset expected: {}, actual: {}",
763 expected_init_data_offset,
764 init_data_offset
765 ));
766 }
767
768 let final_offset = if self.init_data_size > 0 {
769 let mem_flags = self
770 .init_data_permissions
771 .unwrap_or(DEFAULT_GUEST_BLOB_MEM_FLAGS);
772 builder.push_page_aligned(self.init_data_size, mem_flags, InitData)
773 } else {
774 init_data_offset
775 };
776
777 let expected_final_offset = TryInto::<usize>::try_into(self.get_memory_size()?)?;
778
779 if final_offset != expected_final_offset {
780 return Err(new_error!(
781 "Final offset does not match expected Final offset expected: {}, actual: {}",
782 expected_final_offset,
783 final_offset
784 ));
785 }
786
787 Ok(builder.build())
788 }
789
790 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
791 pub(crate) fn write_init_data(
792 &self,
793 shared_mem: &mut ExclusiveSharedMemory,
794 bytes: &[u8],
795 ) -> Result<()> {
796 shared_mem.copy_from_slice(bytes, self.init_data_offset)?;
797 Ok(())
798 }
799
800 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
806 pub(crate) fn write(
807 &self,
808 shared_mem: &mut ExclusiveSharedMemory,
809 guest_offset: usize,
810 size: usize,
811 ) -> Result<()> {
812 macro_rules! get_address {
813 ($something:ident) => {
814 u64::try_from(guest_offset + self.$something)?
815 };
816 }
817
818 if guest_offset != SandboxMemoryLayout::BASE_ADDRESS
819 && guest_offset != shared_mem.base_addr()
820 {
821 return Err(GuestOffsetIsInvalid(guest_offset));
822 }
823
824 let mut security_cookie_seed = [0u8; 8];
828 rng().fill_bytes(&mut security_cookie_seed);
829 shared_mem.copy_from_slice(&security_cookie_seed, self.peb_security_cookie_seed_offset)?;
830
831 shared_mem.write_u64(
835 self.get_host_function_definitions_size_offset(),
836 self.sandbox_memory_config
837 .get_host_function_definition_size()
838 .try_into()?,
839 )?;
840 let addr = get_address!(host_function_definitions_buffer_offset);
841 shared_mem.write_u64(self.get_host_function_definitions_pointer_offset(), addr)?;
842
843 shared_mem.write_u64(
848 self.get_input_data_size_offset(),
849 self.sandbox_memory_config
850 .get_input_data_size()
851 .try_into()?,
852 )?;
853 let addr = get_address!(input_data_buffer_offset);
854 shared_mem.write_u64(self.get_input_data_pointer_offset(), addr)?;
855
856 shared_mem.write_u64(
858 self.get_output_data_size_offset(),
859 self.sandbox_memory_config
860 .get_output_data_size()
861 .try_into()?,
862 )?;
863 let addr = get_address!(output_data_buffer_offset);
864 shared_mem.write_u64(self.get_output_data_pointer_offset(), addr)?;
865
866 shared_mem.write_u64(
868 self.get_init_data_size_offset(),
869 (self.get_unaligned_memory_size() - self.init_data_offset).try_into()?,
870 )?;
871 let addr = get_address!(init_data_offset);
872 shared_mem.write_u64(self.get_init_data_pointer_offset(), addr)?;
873
874 let addr = get_address!(guest_heap_buffer_offset);
876 shared_mem.write_u64(self.get_heap_size_offset(), self.heap_size.try_into()?)?;
877 shared_mem.write_u64(self.get_heap_pointer_offset(), addr)?;
878
879 let bottom = guest_offset + size;
887 let min_user_stack_address = bottom - self.stack_size;
888
889 shared_mem.write_u64(
892 self.get_min_guest_stack_address_offset(),
893 min_user_stack_address.try_into()?,
894 )?;
895
896 let start_of_user_stack: u64 = (min_user_stack_address + self.stack_size).try_into()?;
899
900 shared_mem.write_u64(self.get_user_stack_pointer_offset(), start_of_user_stack)?;
901
902 shared_mem.write_u64(
908 self.input_data_buffer_offset,
909 Self::STACK_POINTER_SIZE_BYTES,
910 )?;
911 shared_mem.write_u64(
912 self.output_data_buffer_offset,
913 Self::STACK_POINTER_SIZE_BYTES,
914 )?;
915
916 Ok(())
917 }
918}
919
920fn round_up_to(value: usize, multiple: usize) -> usize {
921 (value + multiple - 1) & !(multiple - 1)
922}
923
924#[cfg(test)]
925mod tests {
926 use hyperlight_common::mem::PAGE_SIZE_USIZE;
927
928 use super::*;
929
930 #[test]
931 fn test_round_up() {
932 assert_eq!(0, round_up_to(0, 4));
933 assert_eq!(4, round_up_to(1, 4));
934 assert_eq!(4, round_up_to(2, 4));
935 assert_eq!(4, round_up_to(3, 4));
936 assert_eq!(4, round_up_to(4, 4));
937 assert_eq!(8, round_up_to(5, 4));
938 assert_eq!(8, round_up_to(6, 4));
939 assert_eq!(8, round_up_to(7, 4));
940 assert_eq!(8, round_up_to(8, 4));
941 assert_eq!(PAGE_SIZE_USIZE, round_up_to(44, PAGE_SIZE_USIZE));
942 assert_eq!(PAGE_SIZE_USIZE, round_up_to(4095, PAGE_SIZE_USIZE));
943 assert_eq!(PAGE_SIZE_USIZE, round_up_to(4096, PAGE_SIZE_USIZE));
944 assert_eq!(PAGE_SIZE_USIZE * 2, round_up_to(4097, PAGE_SIZE_USIZE));
945 assert_eq!(PAGE_SIZE_USIZE * 2, round_up_to(8191, PAGE_SIZE_USIZE));
946 }
947
948 fn get_expected_memory_size(layout: &SandboxMemoryLayout) -> usize {
950 let cfg = layout.sandbox_memory_config;
951 let mut expected_size = 0;
952 #[cfg(feature = "init-paging")]
954 {
955 expected_size += layout.get_page_table_size();
956 }
957 expected_size += layout.code_size;
958
959 expected_size += round_up_to(size_of::<HyperlightPEB>(), PAGE_SIZE_USIZE);
960
961 expected_size += round_up_to(cfg.get_host_function_definition_size(), PAGE_SIZE_USIZE);
962
963 expected_size += round_up_to(cfg.get_input_data_size(), PAGE_SIZE_USIZE);
964
965 expected_size += round_up_to(cfg.get_output_data_size(), PAGE_SIZE_USIZE);
966
967 expected_size += round_up_to(layout.heap_size, PAGE_SIZE_USIZE);
968
969 expected_size += PAGE_SIZE_USIZE; expected_size += round_up_to(layout.stack_size, PAGE_SIZE_USIZE);
972
973 expected_size
974 }
975
976 #[test]
977 fn test_get_memory_size() {
978 let sbox_cfg = SandboxConfiguration::default();
979 let sbox_mem_layout =
980 SandboxMemoryLayout::new(sbox_cfg, 4096, 2048, 4096, 0, None).unwrap();
981 assert_eq!(
982 sbox_mem_layout.get_memory_size().unwrap(),
983 get_expected_memory_size(&sbox_mem_layout)
984 );
985 }
986}