use crate::{
instruction_set, CompilationError, DataSegmentIdx, ElementSegmentIdx, GlobalIdx,
GlobalVariable, I64ValueSplit, InstructionSet, TableIdx, DEFAULT_MEMORY_INDEX, NULL_FUNC_IDX,
N_BYTES_PER_MEMORY_PAGE,
};
use alloc::{vec, vec::Vec};
use hashbrown::HashMap;
use wasmparser::{TableType, ValType};
#[derive(Debug)]
pub struct SegmentBuilder {
pub(crate) global_memory_section: Vec<u8>,
pub(crate) memory_sections: HashMap<DataSegmentIdx, (u32, u32)>,
pub(crate) global_element_section: Vec<u32>,
pub(crate) element_sections: HashMap<ElementSegmentIdx, (u32, u32)>,
pub(crate) total_allocated_pages: u32,
pub(crate) entrypoint_bytecode: InstructionSet,
}
impl Default for SegmentBuilder {
fn default() -> Self {
let entrypoint_bytecode = instruction_set! {
StackCheck(5)
};
Self {
global_memory_section: vec![],
memory_sections: Default::default(),
global_element_section: vec![],
element_sections: Default::default(),
total_allocated_pages: 0,
entrypoint_bytecode,
}
}
}
impl SegmentBuilder {
pub fn add_global_variable(
&mut self,
global_idx: GlobalIdx,
global_variable: &GlobalVariable,
) -> Result<(), CompilationError> {
let global_type = global_variable.global_type.content_type;
match global_type {
ValType::I32 | ValType::F32 => self
.entrypoint_bytecode
.op_i32_const(global_variable.default_value),
ValType::I64 | ValType::F64 => {
let (lower, upper) = global_variable.default_value.split_into_i32_tuple();
self.entrypoint_bytecode.op_i32_const(lower);
self.entrypoint_bytecode.op_i32_const(upper)
}
ValType::FuncRef | ValType::ExternRef => self
.entrypoint_bytecode
.op_ref_func(global_variable.default_value as u32),
_ => return Err(CompilationError::NotSupportedGlobalType),
};
self.entrypoint_bytecode.op_global_set(global_idx * 2);
if global_type == ValType::I64 || global_type == ValType::F64 {
self.entrypoint_bytecode.op_global_set(global_idx * 2 + 1);
}
Ok(())
}
pub fn add_memory_pages(
&mut self,
initial_pages: u32,
max_allowed_memory_pages: u32,
) -> Result<(), CompilationError> {
let next_pages = self
.total_allocated_pages.saturating_add(initial_pages);
if next_pages >= max_allowed_memory_pages {
return Err(CompilationError::MaxReadonlyDataReached);
}
if initial_pages > 0 {
self.entrypoint_bytecode.op_i32_const(initial_pages);
self.entrypoint_bytecode.op_memory_grow_checked(None, false);
self.entrypoint_bytecode.op_drop();
}
self.total_allocated_pages = next_pages;
Ok(())
}
pub fn emit_table_segment(
&mut self,
table_index: TableIdx,
table_type: &TableType,
) -> Result<(), CompilationError> {
self.entrypoint_bytecode.op_ref_func(NULL_FUNC_IDX);
self.entrypoint_bytecode.op_i32_const(table_type.initial);
self.entrypoint_bytecode.op_table_grow(table_index);
self.entrypoint_bytecode.op_drop();
Ok(())
}
pub fn add_active_memory(&mut self, segment_idx: DataSegmentIdx, offset: u32, bytes: &[u8]) {
let has_memory_overflow = || -> Option<bool> {
let max_affected_page = offset
.checked_add(bytes.len() as u32)?
.checked_add(N_BYTES_PER_MEMORY_PAGE - 1)?
.checked_div(N_BYTES_PER_MEMORY_PAGE)?;
Some(max_affected_page > self.total_allocated_pages)
};
let data_offset = self.global_memory_section.len();
let data_length = bytes.len();
self.global_memory_section.extend(bytes);
self.entrypoint_bytecode.op_i32_const(offset);
self.entrypoint_bytecode.op_i32_const(data_offset);
if has_memory_overflow().unwrap_or_default() {
self.entrypoint_bytecode.op_i32_const(u32::MAX);
} else {
self.entrypoint_bytecode.op_i32_const(data_length);
}
self.entrypoint_bytecode
.op_memory_init(DEFAULT_MEMORY_INDEX);
self.entrypoint_bytecode.op_data_drop(segment_idx + 1);
self.memory_sections
.insert(segment_idx, (offset, bytes.len() as u32));
}
pub fn add_passive_memory(&mut self, segment_idx: DataSegmentIdx, bytes: &[u8]) {
let data_offset = self.global_memory_section.len() as u32;
let data_length = bytes.len() as u32;
self.global_memory_section.extend(bytes);
self.memory_sections
.insert(segment_idx, (data_offset, data_length));
}
pub fn add_active_elements<T: IntoIterator<Item = u32>>(
&mut self,
segment_idx: ElementSegmentIdx,
offset: u32,
table_idx: TableIdx,
elements: T,
) {
let segment_offset = self.global_element_section.len();
self.global_element_section.extend(elements);
let segment_length = self.global_element_section.len() - segment_offset;
self.entrypoint_bytecode.op_i32_const(offset);
self.entrypoint_bytecode.op_i32_const(segment_offset);
self.entrypoint_bytecode.op_i32_const(segment_length);
self.entrypoint_bytecode.op_table_init(segment_idx + 1);
self.entrypoint_bytecode.op_table_get(table_idx);
self.entrypoint_bytecode.op_elem_drop(segment_idx + 1);
self.element_sections
.insert(segment_idx, (offset, segment_length as u32));
}
pub fn add_passive_elements<T: IntoIterator<Item = u32>>(
&mut self,
segment_idx: ElementSegmentIdx,
elements: T,
) {
let segment_offset = self.global_element_section.len() as u32;
self.global_element_section.extend(elements);
let segment_length = self.global_element_section.len() as u32 - segment_offset;
self.element_sections
.insert(segment_idx, (segment_offset, segment_length));
}
}