1use std::cmp::Ordering;
18
19use hyperlight_common::flatbuffer_wrappers::function_call::{
20 FunctionCall, validate_guest_function_call_buffer,
21};
22use hyperlight_common::flatbuffer_wrappers::function_types::ReturnValue;
23use hyperlight_common::flatbuffer_wrappers::guest_error::GuestError;
24use hyperlight_common::flatbuffer_wrappers::guest_log_data::GuestLogData;
25use hyperlight_common::flatbuffer_wrappers::host_function_details::HostFunctionDetails;
26use tracing::{Span, instrument};
27
28use super::exe::ExeInfo;
29use super::layout::SandboxMemoryLayout;
30use super::memory_region::MemoryRegion;
31#[cfg(feature = "init-paging")]
32use super::memory_region::{DEFAULT_GUEST_BLOB_MEM_FLAGS, MemoryRegionType};
33use super::ptr::{GuestPtr, RawPtr};
34use super::ptr_offset::Offset;
35use super::shared_mem::{ExclusiveSharedMemory, GuestSharedMemory, HostSharedMemory, SharedMemory};
36use super::shared_mem_snapshot::SharedMemorySnapshot;
37use crate::sandbox::SandboxConfiguration;
38use crate::sandbox::uninitialized::GuestBlob;
39use crate::{Result, log_then_return, new_error};
40
41cfg_if::cfg_if! {
42 if #[cfg(feature = "init-paging")] {
43 pub(crate) const PAGE_PRESENT: u64 = 1; pub(crate) const PAGE_RW: u64 = 1 << 1; pub(crate) const PAGE_USER: u64 = 1 << 2; pub(crate) const PAGE_NX: u64 = 1 << 63; pub(super) const AMOUNT_OF_MEMORY_PER_PT: usize = 0x200_000;
55 }
56}
57
58pub(crate) const STACK_COOKIE_LEN: usize = 16;
62
63#[derive(Clone)]
66pub(crate) struct SandboxMemoryManager<S> {
67 pub(crate) shared_mem: S,
69 pub(crate) layout: SandboxMemoryLayout,
71 pub(crate) load_addr: RawPtr,
73 pub(crate) entrypoint_offset: Offset,
75 pub(crate) mapped_rgns: u64,
77 pub(crate) stack_cookie: [u8; STACK_COOKIE_LEN],
79 pub(crate) abort_buffer: Vec<u8>,
81}
82
83impl<S> SandboxMemoryManager<S>
84where
85 S: SharedMemory,
86{
87 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
89 pub(crate) fn new(
90 layout: SandboxMemoryLayout,
91 shared_mem: S,
92 load_addr: RawPtr,
93 entrypoint_offset: Offset,
94 stack_cookie: [u8; STACK_COOKIE_LEN],
95 ) -> Self {
96 Self {
97 layout,
98 shared_mem,
99 load_addr,
100 entrypoint_offset,
101 mapped_rgns: 0,
102 stack_cookie,
103 abort_buffer: Vec::new(),
104 }
105 }
106
107 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
109 pub(crate) fn get_stack_cookie(&self) -> &[u8; STACK_COOKIE_LEN] {
110 &self.stack_cookie
111 }
112
113 pub(crate) fn get_abort_buffer_mut(&mut self) -> &mut Vec<u8> {
115 &mut self.abort_buffer
116 }
117
118 #[cfg(any(gdb, test))]
120 pub(crate) fn get_shared_mem_mut(&mut self) -> &mut S {
121 &mut self.shared_mem
122 }
123
124 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
129 #[cfg(feature = "init-paging")]
130 pub(crate) fn set_up_shared_memory(
131 &mut self,
132 mem_size: u64,
133 regions: &mut [MemoryRegion],
134 ) -> Result<u64> {
135 let rsp: u64 = self.layout.get_top_of_user_stack_offset() as u64
136 + SandboxMemoryLayout::BASE_ADDRESS as u64
137 + self.layout.stack_size as u64
138 - 0x28;
143
144 self.shared_mem.with_exclusivity(|shared_mem| {
145 shared_mem.write_u64(
147 SandboxMemoryLayout::PML4_OFFSET,
148 SandboxMemoryLayout::PDPT_GUEST_ADDRESS as u64 | PAGE_PRESENT | PAGE_RW,
149 )?;
150
151 shared_mem.write_u64(
153 SandboxMemoryLayout::PDPT_OFFSET,
154 SandboxMemoryLayout::PD_GUEST_ADDRESS as u64 | PAGE_PRESENT | PAGE_RW,
155 )?;
156
157 for i in 0..512 {
158 let offset = SandboxMemoryLayout::PD_OFFSET + (i * 8);
159 let val_to_write: u64 = (SandboxMemoryLayout::PT_GUEST_ADDRESS as u64
160 + (i * 4096) as u64)
161 | PAGE_PRESENT
162 | PAGE_RW;
163 shared_mem.write_u64(offset, val_to_write)?;
164 }
165
166 let mem_size = usize::try_from(mem_size)?;
172
173 let num_pages: usize = mem_size.div_ceil(AMOUNT_OF_MEMORY_PER_PT);
174
175 let total_ptes = num_pages * 512;
178 let mut pte_buffer = vec![0u64; total_ptes]; let mut cached_region_idx: Option<usize> = None; let mut pte_index = 0;
181
182 for p in 0..num_pages {
183 for i in 0..512 {
184 let flags = match Self::get_page_flags(p, i, regions, &mut cached_region_idx) {
186 Ok(region_type) => match region_type {
187 MemoryRegionType::Code => PAGE_PRESENT | PAGE_RW | PAGE_USER,
190 MemoryRegionType::InitData => self
191 .layout
192 .init_data_permissions
193 .map(|perm| perm.translate_flags())
194 .unwrap_or(DEFAULT_GUEST_BLOB_MEM_FLAGS.translate_flags()),
195 MemoryRegionType::Stack => PAGE_PRESENT | PAGE_RW | PAGE_USER | PAGE_NX,
196 #[cfg(feature = "executable_heap")]
197 MemoryRegionType::Heap => PAGE_PRESENT | PAGE_RW | PAGE_USER,
198 #[cfg(not(feature = "executable_heap"))]
199 MemoryRegionType::Heap => PAGE_PRESENT | PAGE_RW | PAGE_USER | PAGE_NX,
200 MemoryRegionType::GuardPage => {
203 PAGE_PRESENT | PAGE_RW | PAGE_USER | PAGE_NX
204 }
205 MemoryRegionType::InputData => PAGE_PRESENT | PAGE_RW | PAGE_NX,
206 MemoryRegionType::OutputData => PAGE_PRESENT | PAGE_RW | PAGE_NX,
207 MemoryRegionType::Peb => PAGE_PRESENT | PAGE_RW | PAGE_NX,
208 MemoryRegionType::HostFunctionDefinitions => PAGE_PRESENT | PAGE_NX,
210 MemoryRegionType::PageTables => PAGE_PRESENT | PAGE_RW | PAGE_NX,
211 },
212 Err(_) => 0,
214 };
215 let val_to_write = ((p << 21) as u64 | (i << 12) as u64) | flags;
216 pte_buffer[pte_index] = val_to_write.to_le();
218 pte_index += 1;
219 }
220 }
221
222 let pte_bytes = unsafe {
225 std::slice::from_raw_parts(pte_buffer.as_ptr() as *const u8, pte_buffer.len() * 8)
226 };
227 shared_mem.copy_from_slice(pte_bytes, SandboxMemoryLayout::PT_OFFSET)?;
228 Ok::<(), crate::HyperlightError>(())
229 })??;
230
231 Ok(rsp)
232 }
233
234 #[cfg(feature = "init-paging")]
236 fn get_page_flags(
237 p: usize,
238 i: usize,
239 regions: &[MemoryRegion],
240 cached_region_idx: &mut Option<usize>,
241 ) -> Result<MemoryRegionType> {
242 let addr = (p << 21) + (i << 12);
243
244 if let Some(cached_idx) = *cached_region_idx
246 && cached_idx < regions.len()
247 && regions[cached_idx].guest_region.contains(&addr)
248 {
249 return Ok(regions[cached_idx].region_type);
250 }
251
252 if let Some(cached_idx) = *cached_region_idx {
254 if cached_idx + 1 < regions.len()
256 && regions[cached_idx + 1].guest_region.contains(&addr)
257 {
258 *cached_region_idx = Some(cached_idx + 1);
259 return Ok(regions[cached_idx + 1].region_type);
260 }
261 }
262
263 let idx = regions.binary_search_by(|region| {
265 if region.guest_region.contains(&addr) {
266 std::cmp::Ordering::Equal
267 } else if region.guest_region.start > addr {
268 std::cmp::Ordering::Greater
269 } else {
270 std::cmp::Ordering::Less
271 }
272 });
273
274 match idx {
275 Ok(index) => {
276 *cached_region_idx = Some(index);
277 Ok(regions[index].region_type)
278 }
279 Err(_) => Err(new_error!("Could not find region for address: {}", addr)),
280 }
281 }
282
283 pub(crate) fn snapshot(
285 &mut self,
286 sandbox_id: u64,
287 mapped_regions: Vec<MemoryRegion>,
288 ) -> Result<SharedMemorySnapshot> {
289 SharedMemorySnapshot::new(&mut self.shared_mem, sandbox_id, mapped_regions)
290 }
291
292 pub(crate) fn restore_snapshot(&mut self, snapshot: &SharedMemorySnapshot) -> Result<()> {
294 if self.shared_mem.mem_size() != snapshot.mem_size() {
295 return Err(new_error!(
296 "Snapshot size does not match current memory size: {} != {}",
297 self.shared_mem.raw_mem_size(),
298 snapshot.mem_size()
299 ));
300 }
301 snapshot.restore_from_snapshot(&mut self.shared_mem)?;
302 Ok(())
303 }
304}
305
306impl SandboxMemoryManager<ExclusiveSharedMemory> {
307 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
319 pub(crate) fn load_guest_binary_into_memory(
320 cfg: SandboxConfiguration,
321 exe_info: ExeInfo,
322 guest_blob: Option<&GuestBlob>,
323 ) -> Result<(Self, super::exe::LoadInfo)> {
324 let guest_blob_size = guest_blob.map(|b| b.data.len()).unwrap_or(0);
325 let guest_blob_mem_flags = guest_blob.map(|b| b.permissions);
326
327 let layout = SandboxMemoryLayout::new(
328 cfg,
329 exe_info.loaded_size(),
330 usize::try_from(cfg.get_stack_size(&exe_info))?,
331 usize::try_from(cfg.get_heap_size(&exe_info))?,
332 guest_blob_size,
333 guest_blob_mem_flags,
334 )?;
335 let mut shared_mem = ExclusiveSharedMemory::new(layout.get_memory_size()?)?;
336
337 let load_addr: RawPtr = RawPtr::try_from(layout.get_guest_code_address())?;
338
339 let entrypoint_offset = exe_info.entrypoint();
340
341 let offset = layout.get_code_pointer_offset();
342
343 {
344 let load_addr_u64: u64 = load_addr.clone().into();
346 shared_mem.write_u64(offset, load_addr_u64)?;
347 }
348
349 #[allow(clippy::let_unit_value)]
352 let load_info = exe_info.load(
353 load_addr.clone().try_into()?,
354 &mut shared_mem.as_mut_slice()[layout.get_guest_code_offset()..],
355 )?;
356
357 let stack_cookie = rand::random::<[u8; STACK_COOKIE_LEN]>();
358 let stack_offset = layout.get_top_of_user_stack_offset();
359 shared_mem.copy_from_slice(&stack_cookie, stack_offset)?;
360
361 Ok((
362 Self::new(
363 layout,
364 shared_mem,
365 load_addr,
366 entrypoint_offset,
367 stack_cookie,
368 ),
369 load_info,
370 ))
371 }
372
373 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
375 pub(crate) fn write_buffer_host_function_details(&mut self, buffer: &[u8]) -> Result<()> {
376 let host_function_details = HostFunctionDetails::try_from(buffer).map_err(|e| {
377 new_error!(
378 "write_buffer_host_function_details: failed to convert buffer to HostFunctionDetails: {}",
379 e
380 )
381 })?;
382
383 let host_function_call_buffer: Vec<u8> = (&host_function_details).try_into().map_err(|_| {
384 new_error!(
385 "write_buffer_host_function_details: failed to convert HostFunctionDetails to Vec<u8>"
386 )
387 })?;
388
389 let buffer_size = {
390 let size_u64 = self
391 .shared_mem
392 .read_u64(self.layout.get_host_function_definitions_size_offset())?;
393 usize::try_from(size_u64)
394 }?;
395
396 if host_function_call_buffer.len() > buffer_size {
397 log_then_return!(
398 "Host Function Details buffer is too big for the host_function_definitions buffer"
399 );
400 }
401
402 self.shared_mem.copy_from_slice(
403 host_function_call_buffer.as_slice(),
404 self.layout.host_function_definitions_buffer_offset,
405 )?;
406 Ok(())
407 }
408
409 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
411 pub(crate) fn write_memory_layout(&mut self) -> Result<()> {
412 let mem_size = self.shared_mem.mem_size();
413 self.layout.write(
414 &mut self.shared_mem,
415 SandboxMemoryLayout::BASE_ADDRESS,
416 mem_size,
417 )
418 }
419
420 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
422 pub(crate) fn write_init_data(&mut self, user_memory: &[u8]) -> Result<()> {
423 self.layout
424 .write_init_data(&mut self.shared_mem, user_memory)?;
425 Ok(())
426 }
427
428 pub fn build(
430 self,
431 ) -> (
432 SandboxMemoryManager<HostSharedMemory>,
433 SandboxMemoryManager<GuestSharedMemory>,
434 ) {
435 let (hshm, gshm) = self.shared_mem.build();
436 (
437 SandboxMemoryManager {
438 shared_mem: hshm,
439 layout: self.layout,
440 load_addr: self.load_addr.clone(),
441 entrypoint_offset: self.entrypoint_offset,
442 mapped_rgns: self.mapped_rgns,
443 stack_cookie: self.stack_cookie,
444 abort_buffer: self.abort_buffer,
445 },
446 SandboxMemoryManager {
447 shared_mem: gshm,
448 layout: self.layout,
449 load_addr: self.load_addr.clone(),
450 entrypoint_offset: self.entrypoint_offset,
451 mapped_rgns: self.mapped_rgns,
452 stack_cookie: self.stack_cookie,
453 abort_buffer: Vec::new(), },
455 )
456 }
457}
458
459impl SandboxMemoryManager<HostSharedMemory> {
460 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
473 pub(crate) fn check_stack_guard(&self) -> Result<bool> {
474 let expected = self.stack_cookie;
475 let offset = self.layout.get_top_of_user_stack_offset();
476 let actual: [u8; STACK_COOKIE_LEN] = self.shared_mem.read(offset)?;
477 let cmp_res = expected.iter().cmp(actual.iter());
478 Ok(cmp_res == Ordering::Equal)
479 }
480
481 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
483 pub(crate) fn get_pointer_to_dispatch_function(&self) -> Result<u64> {
484 let guest_dispatch_function_ptr = self
485 .shared_mem
486 .read::<u64>(self.layout.get_dispatch_function_pointer_offset())?;
487
488 let guest_ptr = GuestPtr::try_from(RawPtr::from(guest_dispatch_function_ptr))?;
492 guest_ptr.absolute()
493 }
494
495 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
497 pub(crate) fn get_host_function_call(&mut self) -> Result<FunctionCall> {
498 self.shared_mem.try_pop_buffer_into::<FunctionCall>(
499 self.layout.output_data_buffer_offset,
500 self.layout.sandbox_memory_config.get_output_data_size(),
501 )
502 }
503
504 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
506 pub(crate) fn write_response_from_host_method_call(&mut self, res: &ReturnValue) -> Result<()> {
507 let function_call_ret_val_buffer = Vec::<u8>::try_from(res).map_err(|_| {
508 new_error!(
509 "write_response_from_host_method_call: failed to convert ReturnValue to Vec<u8>"
510 )
511 })?;
512 self.shared_mem.push_buffer(
513 self.layout.input_data_buffer_offset,
514 self.layout.sandbox_memory_config.get_input_data_size(),
515 function_call_ret_val_buffer.as_slice(),
516 )
517 }
518
519 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
521 pub(crate) fn write_guest_function_call(&mut self, buffer: &[u8]) -> Result<()> {
522 validate_guest_function_call_buffer(buffer).map_err(|e| {
523 new_error!(
524 "Guest function call buffer validation failed: {}",
525 e.to_string()
526 )
527 })?;
528
529 self.shared_mem.push_buffer(
530 self.layout.input_data_buffer_offset,
531 self.layout.sandbox_memory_config.get_input_data_size(),
532 buffer,
533 )
534 }
535
536 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
538 pub(crate) fn get_guest_function_call_result(&mut self) -> Result<ReturnValue> {
539 self.shared_mem.try_pop_buffer_into::<ReturnValue>(
540 self.layout.output_data_buffer_offset,
541 self.layout.sandbox_memory_config.get_output_data_size(),
542 )
543 }
544
545 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
547 pub(crate) fn read_guest_log_data(&mut self) -> Result<GuestLogData> {
548 self.shared_mem.try_pop_buffer_into::<GuestLogData>(
549 self.layout.output_data_buffer_offset,
550 self.layout.sandbox_memory_config.get_output_data_size(),
551 )
552 }
553
554 pub(crate) fn get_guest_error(&mut self) -> Result<GuestError> {
556 self.shared_mem.try_pop_buffer_into::<GuestError>(
557 self.layout.output_data_buffer_offset,
558 self.layout.sandbox_memory_config.get_output_data_size(),
559 )
560 }
561
562 pub(crate) fn clear_io_buffers(&mut self) {
563 loop {
565 let Ok(_) = self.shared_mem.try_pop_buffer_into::<Vec<u8>>(
566 self.layout.output_data_buffer_offset,
567 self.layout.sandbox_memory_config.get_output_data_size(),
568 ) else {
569 break;
570 };
571 }
572 loop {
574 let Ok(_) = self.shared_mem.try_pop_buffer_into::<Vec<u8>>(
575 self.layout.input_data_buffer_offset,
576 self.layout.sandbox_memory_config.get_input_data_size(),
577 ) else {
578 break;
579 };
580 }
581 }
582}