1use std::cmp::Ordering;
18
19use hyperlight_common::flatbuffer_wrappers::function_call::{
20 FunctionCall, validate_guest_function_call_buffer,
21};
22use hyperlight_common::flatbuffer_wrappers::function_types::ReturnValue;
23use hyperlight_common::flatbuffer_wrappers::guest_error::GuestError;
24use hyperlight_common::flatbuffer_wrappers::guest_log_data::GuestLogData;
25use hyperlight_common::flatbuffer_wrappers::host_function_details::HostFunctionDetails;
26use tracing::{Span, instrument};
27
28use super::exe::ExeInfo;
29use super::layout::SandboxMemoryLayout;
30use super::memory_region::MemoryRegion;
31#[cfg(feature = "init-paging")]
32use super::memory_region::{DEFAULT_GUEST_BLOB_MEM_FLAGS, MemoryRegionType};
33use super::ptr::{GuestPtr, RawPtr};
34use super::ptr_offset::Offset;
35use super::shared_mem::{ExclusiveSharedMemory, GuestSharedMemory, HostSharedMemory, SharedMemory};
36use super::shared_mem_snapshot::SharedMemorySnapshot;
37use crate::sandbox::SandboxConfiguration;
38use crate::sandbox::uninitialized::GuestBlob;
39use crate::{Result, log_then_return, new_error};
40
41cfg_if::cfg_if! {
42 if #[cfg(feature = "init-paging")] {
43 pub(crate) const PAGE_PRESENT: u64 = 1; pub(crate) const PAGE_RW: u64 = 1 << 1; pub(crate) const PAGE_USER: u64 = 1 << 2; pub(crate) const PAGE_NX: u64 = 1 << 63; pub(super) const AMOUNT_OF_MEMORY_PER_PT: usize = 0x200_000;
55 }
56}
57
58pub(crate) const STACK_COOKIE_LEN: usize = 16;
62
63#[derive(Clone)]
66pub(crate) struct SandboxMemoryManager<S> {
67 pub(crate) shared_mem: S,
69 pub(crate) layout: SandboxMemoryLayout,
71 pub(crate) load_addr: RawPtr,
73 pub(crate) entrypoint_offset: Offset,
75 pub(crate) mapped_rgns: u64,
77}
78
79impl<S> SandboxMemoryManager<S>
80where
81 S: SharedMemory,
82{
83 #[instrument(skip_all, parent = Span::current(), level= "Trace")]
85 fn new(
86 layout: SandboxMemoryLayout,
87 shared_mem: S,
88 load_addr: RawPtr,
89 entrypoint_offset: Offset,
90 ) -> Self {
91 Self {
92 layout,
93 shared_mem,
94 load_addr,
95 entrypoint_offset,
96 mapped_rgns: 0,
97 }
98 }
99
100 pub(crate) fn get_shared_mem_mut(&mut self) -> &mut S {
102 &mut self.shared_mem
103 }
104
105 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
110 #[cfg(feature = "init-paging")]
111 pub(crate) fn set_up_shared_memory(
112 &mut self,
113 mem_size: u64,
114 regions: &mut [MemoryRegion],
115 ) -> Result<u64> {
116 let rsp: u64 = self.layout.get_top_of_user_stack_offset() as u64
117 + SandboxMemoryLayout::BASE_ADDRESS as u64
118 + self.layout.stack_size as u64
119 - 0x28;
124
125 self.shared_mem.with_exclusivity(|shared_mem| {
126 shared_mem.write_u64(
128 SandboxMemoryLayout::PML4_OFFSET,
129 SandboxMemoryLayout::PDPT_GUEST_ADDRESS as u64 | PAGE_PRESENT | PAGE_RW,
130 )?;
131
132 shared_mem.write_u64(
134 SandboxMemoryLayout::PDPT_OFFSET,
135 SandboxMemoryLayout::PD_GUEST_ADDRESS as u64 | PAGE_PRESENT | PAGE_RW,
136 )?;
137
138 for i in 0..512 {
139 let offset = SandboxMemoryLayout::PD_OFFSET + (i * 8);
140 let val_to_write: u64 = (SandboxMemoryLayout::PT_GUEST_ADDRESS as u64
141 + (i * 4096) as u64)
142 | PAGE_PRESENT
143 | PAGE_RW;
144 shared_mem.write_u64(offset, val_to_write)?;
145 }
146
147 let mem_size = usize::try_from(mem_size)?;
153
154 let num_pages: usize = mem_size.div_ceil(AMOUNT_OF_MEMORY_PER_PT);
155
156 let total_ptes = num_pages * 512;
159 let mut pte_buffer = vec![0u64; total_ptes]; let mut cached_region_idx: Option<usize> = None; let mut pte_index = 0;
162
163 for p in 0..num_pages {
164 for i in 0..512 {
165 let flags = match Self::get_page_flags(p, i, regions, &mut cached_region_idx) {
167 Ok(region_type) => match region_type {
168 MemoryRegionType::Code => PAGE_PRESENT | PAGE_RW | PAGE_USER,
171 MemoryRegionType::InitData => self
172 .layout
173 .init_data_permissions
174 .map(|perm| perm.translate_flags())
175 .unwrap_or(DEFAULT_GUEST_BLOB_MEM_FLAGS.translate_flags()),
176 MemoryRegionType::Stack => PAGE_PRESENT | PAGE_RW | PAGE_USER | PAGE_NX,
177 #[cfg(feature = "executable_heap")]
178 MemoryRegionType::Heap => PAGE_PRESENT | PAGE_RW | PAGE_USER,
179 #[cfg(not(feature = "executable_heap"))]
180 MemoryRegionType::Heap => PAGE_PRESENT | PAGE_RW | PAGE_USER | PAGE_NX,
181 MemoryRegionType::GuardPage => {
184 PAGE_PRESENT | PAGE_RW | PAGE_USER | PAGE_NX
185 }
186 MemoryRegionType::InputData => PAGE_PRESENT | PAGE_RW | PAGE_NX,
187 MemoryRegionType::OutputData => PAGE_PRESENT | PAGE_RW | PAGE_NX,
188 MemoryRegionType::Peb => PAGE_PRESENT | PAGE_RW | PAGE_NX,
189 MemoryRegionType::HostFunctionDefinitions => PAGE_PRESENT | PAGE_NX,
191 MemoryRegionType::PageTables => PAGE_PRESENT | PAGE_RW | PAGE_NX,
192 },
193 Err(_) => 0,
195 };
196 let val_to_write = ((p << 21) as u64 | (i << 12) as u64) | flags;
197 pte_buffer[pte_index] = val_to_write.to_le();
199 pte_index += 1;
200 }
201 }
202
203 let pte_bytes = unsafe {
206 std::slice::from_raw_parts(pte_buffer.as_ptr() as *const u8, pte_buffer.len() * 8)
207 };
208 shared_mem.copy_from_slice(pte_bytes, SandboxMemoryLayout::PT_OFFSET)?;
209 Ok::<(), crate::HyperlightError>(())
210 })??;
211
212 Ok(rsp)
213 }
214
215 #[cfg(feature = "init-paging")]
217 fn get_page_flags(
218 p: usize,
219 i: usize,
220 regions: &[MemoryRegion],
221 cached_region_idx: &mut Option<usize>,
222 ) -> Result<MemoryRegionType> {
223 let addr = (p << 21) + (i << 12);
224
225 if let Some(cached_idx) = *cached_region_idx {
227 if cached_idx < regions.len() && regions[cached_idx].guest_region.contains(&addr) {
228 return Ok(regions[cached_idx].region_type);
229 }
230 }
231
232 if let Some(cached_idx) = *cached_region_idx {
234 if cached_idx + 1 < regions.len()
236 && regions[cached_idx + 1].guest_region.contains(&addr)
237 {
238 *cached_region_idx = Some(cached_idx + 1);
239 return Ok(regions[cached_idx + 1].region_type);
240 }
241 }
242
243 let idx = regions.binary_search_by(|region| {
245 if region.guest_region.contains(&addr) {
246 std::cmp::Ordering::Equal
247 } else if region.guest_region.start > addr {
248 std::cmp::Ordering::Greater
249 } else {
250 std::cmp::Ordering::Less
251 }
252 });
253
254 match idx {
255 Ok(index) => {
256 *cached_region_idx = Some(index);
257 Ok(regions[index].region_type)
258 }
259 Err(_) => Err(new_error!("Could not find region for address: {}", addr)),
260 }
261 }
262
263 pub(crate) fn snapshot(
265 &mut self,
266 sandbox_id: u64,
267 mapped_regions: Vec<MemoryRegion>,
268 ) -> Result<SharedMemorySnapshot> {
269 SharedMemorySnapshot::new(&mut self.shared_mem, sandbox_id, mapped_regions)
270 }
271
272 pub(crate) fn restore_snapshot(&mut self, snapshot: &SharedMemorySnapshot) -> Result<()> {
274 if self.shared_mem.mem_size() != snapshot.mem_size() {
275 return Err(new_error!(
276 "Snapshot size does not match current memory size: {} != {}",
277 self.shared_mem.raw_mem_size(),
278 snapshot.mem_size()
279 ));
280 }
281 snapshot.restore_from_snapshot(&mut self.shared_mem)?;
282 Ok(())
283 }
284
285 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
289 #[allow(dead_code)]
290 pub(crate) fn set_outb_address_and_context(&mut self, addr: u64, context: u64) -> Result<()> {
291 let pointer_offset = self.layout.get_outb_pointer_offset();
292 let context_offset = self.layout.get_outb_context_offset();
293 self.shared_mem.with_exclusivity(|excl| -> Result<()> {
294 excl.write_u64(pointer_offset, addr)?;
295 excl.write_u64(context_offset, context)?;
296 Ok(())
297 })?
298 }
299}
300
301impl SandboxMemoryManager<ExclusiveSharedMemory> {
302 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
314 pub(crate) fn load_guest_binary_into_memory(
315 cfg: SandboxConfiguration,
316 exe_info: ExeInfo,
317 guest_blob: Option<&GuestBlob>,
318 ) -> Result<(Self, super::exe::LoadInfo)> {
319 let guest_blob_size = guest_blob.map(|b| b.data.len()).unwrap_or(0);
320 let guest_blob_mem_flags = guest_blob.map(|b| b.permissions);
321
322 let layout = SandboxMemoryLayout::new(
323 cfg,
324 exe_info.loaded_size(),
325 usize::try_from(cfg.get_stack_size(&exe_info))?,
326 usize::try_from(cfg.get_heap_size(&exe_info))?,
327 guest_blob_size,
328 guest_blob_mem_flags,
329 )?;
330 let mut shared_mem = ExclusiveSharedMemory::new(layout.get_memory_size()?)?;
331
332 let load_addr: RawPtr = RawPtr::try_from(layout.get_guest_code_address())?;
333
334 let entrypoint_offset = exe_info.entrypoint();
335
336 let offset = layout.get_code_pointer_offset();
337
338 {
339 let load_addr_u64: u64 = load_addr.clone().into();
341 shared_mem.write_u64(offset, load_addr_u64)?;
342 }
343
344 #[allow(clippy::let_unit_value)]
347 let load_info = exe_info.load(
348 load_addr.clone().try_into()?,
349 &mut shared_mem.as_mut_slice()[layout.get_guest_code_offset()..],
350 )?;
351
352 Ok((
353 Self::new(layout, shared_mem, load_addr, entrypoint_offset),
354 load_info,
355 ))
356 }
357
358 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
360 pub(crate) fn write_buffer_host_function_details(&mut self, buffer: &[u8]) -> Result<()> {
361 let host_function_details = HostFunctionDetails::try_from(buffer).map_err(|e| {
362 new_error!(
363 "write_buffer_host_function_details: failed to convert buffer to HostFunctionDetails: {}",
364 e
365 )
366 })?;
367
368 let host_function_call_buffer: Vec<u8> = (&host_function_details).try_into().map_err(|_| {
369 new_error!(
370 "write_buffer_host_function_details: failed to convert HostFunctionDetails to Vec<u8>"
371 )
372 })?;
373
374 let buffer_size = {
375 let size_u64 = self
376 .shared_mem
377 .read_u64(self.layout.get_host_function_definitions_size_offset())?;
378 usize::try_from(size_u64)
379 }?;
380
381 if host_function_call_buffer.len() > buffer_size {
382 log_then_return!(
383 "Host Function Details buffer is too big for the host_function_definitions buffer"
384 );
385 }
386
387 self.shared_mem.copy_from_slice(
388 host_function_call_buffer.as_slice(),
389 self.layout.host_function_definitions_buffer_offset,
390 )?;
391 Ok(())
392 }
393
394 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
397 pub(crate) fn set_stack_guard(&mut self, cookie: &[u8; STACK_COOKIE_LEN]) -> Result<()> {
398 let stack_offset = self.layout.get_top_of_user_stack_offset();
399 self.shared_mem.copy_from_slice(cookie, stack_offset)
400 }
401
402 pub fn build(
404 self,
405 ) -> (
406 SandboxMemoryManager<HostSharedMemory>,
407 SandboxMemoryManager<GuestSharedMemory>,
408 ) {
409 let (hshm, gshm) = self.shared_mem.build();
410 (
411 SandboxMemoryManager {
412 shared_mem: hshm,
413 layout: self.layout,
414 load_addr: self.load_addr.clone(),
415 entrypoint_offset: self.entrypoint_offset,
416 mapped_rgns: 0,
417 },
418 SandboxMemoryManager {
419 shared_mem: gshm,
420 layout: self.layout,
421 load_addr: self.load_addr.clone(),
422 entrypoint_offset: self.entrypoint_offset,
423 mapped_rgns: 0,
424 },
425 )
426 }
427}
428
429impl SandboxMemoryManager<HostSharedMemory> {
430 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
443 pub(crate) fn check_stack_guard(&self, cookie: [u8; STACK_COOKIE_LEN]) -> Result<bool> {
444 let offset = self.layout.get_top_of_user_stack_offset();
445 let test_cookie: [u8; STACK_COOKIE_LEN] = self.shared_mem.read(offset)?;
446 let cmp_res = cookie.iter().cmp(test_cookie.iter());
447 Ok(cmp_res == Ordering::Equal)
448 }
449
450 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
452 pub(crate) fn get_pointer_to_dispatch_function(&self) -> Result<u64> {
453 let guest_dispatch_function_ptr = self
454 .shared_mem
455 .read::<u64>(self.layout.get_dispatch_function_pointer_offset())?;
456
457 let guest_ptr = GuestPtr::try_from(RawPtr::from(guest_dispatch_function_ptr))?;
461 guest_ptr.absolute()
462 }
463
464 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
466 pub(crate) fn get_host_function_call(&mut self) -> Result<FunctionCall> {
467 self.shared_mem.try_pop_buffer_into::<FunctionCall>(
468 self.layout.output_data_buffer_offset,
469 self.layout.sandbox_memory_config.get_output_data_size(),
470 )
471 }
472
473 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
475 pub(crate) fn write_response_from_host_method_call(&mut self, res: &ReturnValue) -> Result<()> {
476 let function_call_ret_val_buffer = Vec::<u8>::try_from(res).map_err(|_| {
477 new_error!(
478 "write_response_from_host_method_call: failed to convert ReturnValue to Vec<u8>"
479 )
480 })?;
481 self.shared_mem.push_buffer(
482 self.layout.input_data_buffer_offset,
483 self.layout.sandbox_memory_config.get_input_data_size(),
484 function_call_ret_val_buffer.as_slice(),
485 )
486 }
487
488 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
490 pub(crate) fn write_guest_function_call(&mut self, buffer: &[u8]) -> Result<()> {
491 validate_guest_function_call_buffer(buffer).map_err(|e| {
492 new_error!(
493 "Guest function call buffer validation failed: {}",
494 e.to_string()
495 )
496 })?;
497
498 self.shared_mem.push_buffer(
499 self.layout.input_data_buffer_offset,
500 self.layout.sandbox_memory_config.get_input_data_size(),
501 buffer,
502 )
503 }
504
505 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
507 pub(crate) fn get_guest_function_call_result(&mut self) -> Result<ReturnValue> {
508 self.shared_mem.try_pop_buffer_into::<ReturnValue>(
509 self.layout.output_data_buffer_offset,
510 self.layout.sandbox_memory_config.get_output_data_size(),
511 )
512 }
513
514 #[instrument(err(Debug), skip_all, parent = Span::current(), level= "Trace")]
516 pub(crate) fn read_guest_log_data(&mut self) -> Result<GuestLogData> {
517 self.shared_mem.try_pop_buffer_into::<GuestLogData>(
518 self.layout.output_data_buffer_offset,
519 self.layout.sandbox_memory_config.get_output_data_size(),
520 )
521 }
522
523 pub(crate) fn get_guest_error(&mut self) -> Result<GuestError> {
525 self.shared_mem.try_pop_buffer_into::<GuestError>(
526 self.layout.output_data_buffer_offset,
527 self.layout.sandbox_memory_config.get_output_data_size(),
528 )
529 }
530
531 pub(crate) fn clear_io_buffers(&mut self) {
532 loop {
534 let Ok(_) = self.shared_mem.try_pop_buffer_into::<Vec<u8>>(
535 self.layout.output_data_buffer_offset,
536 self.layout.sandbox_memory_config.get_output_data_size(),
537 ) else {
538 break;
539 };
540 }
541 loop {
543 let Ok(_) = self.shared_mem.try_pop_buffer_into::<Vec<u8>>(
544 self.layout.input_data_buffer_offset,
545 self.layout.sandbox_memory_config.get_input_data_size(),
546 ) else {
547 break;
548 };
549 }
550 }
551}