use crate::emit::{Emit, EmitContext};
use crate::ir::Value;
use crate::parse::IndicesToIds;
use crate::tombstone_arena::{Id, Tombstone, TombstoneArena};
use crate::{ConstExpr, MemoryId, Module, Result, ValType};
use anyhow::{bail, Context};
pub type DataId = Id<Data>;
#[derive(Debug)]
pub struct Data {
id: DataId,
pub kind: DataKind,
pub value: Vec<u8>,
pub name: Option<String>,
}
#[derive(Debug)]
pub enum DataKind {
Active {
memory: MemoryId,
offset: ConstExpr,
},
Passive,
}
impl Tombstone for Data {
fn on_delete(&mut self) {
self.value = Vec::new();
}
}
impl Data {
pub fn id(&self) -> DataId {
self.id
}
pub fn is_passive(&self) -> bool {
matches!(self.kind, DataKind::Passive)
}
}
#[derive(Debug, Default)]
pub struct ModuleData {
arena: TombstoneArena<Data>,
}
impl ModuleData {
pub fn get(&self, id: DataId) -> &Data {
&self.arena[id]
}
pub fn get_mut(&mut self, id: DataId) -> &mut Data {
&mut self.arena[id]
}
pub fn delete(&mut self, id: DataId) {
self.arena.delete(id);
}
pub fn iter(&self) -> impl Iterator<Item = &Data> {
self.arena.iter().map(|(_, f)| f)
}
pub fn add(&mut self, kind: DataKind, value: Vec<u8>) -> DataId {
let id = self.arena.next_id();
let id2 = self.arena.alloc(Data {
id,
kind,
value,
name: None,
});
debug_assert_eq!(id, id2);
id
}
pub(crate) fn emit_data_count(&self, cx: &mut EmitContext) {
#[cfg(feature = "parallel")]
use rayon::iter::ParallelIterator;
if self.arena.len() == 0 {
return;
}
let mut count = 0;
let mut any_passive = false;
for data in self.iter() {
cx.indices.set_data_index(data.id(), count);
count += 1;
any_passive |= data.is_passive();
}
let funcs = &cx.module.funcs;
if any_passive
|| maybe_parallel!(funcs.(iter_local | par_iter_local))
.any(|(_, f)| !f.used_data_segments().is_empty())
{
cx.wasm_module
.section(&wasm_encoder::DataCountSection { count });
}
}
}
impl Module {
pub(crate) fn reserve_data(&mut self, count: u32, ids: &mut IndicesToIds) {
log::debug!("reserving space for {} data segments", count);
for _ in 0..count {
ids.push_data(self.data.arena.alloc_with_id(|id| Data {
id,
value: Vec::new(),
kind: DataKind::Passive,
name: None,
}));
}
}
pub(crate) fn parse_data(
&mut self,
section: wasmparser::DataSectionReader,
ids: &mut IndicesToIds,
) -> Result<()> {
log::debug!("parse data section");
let preallocated = self.data.arena.len() > 0;
for (i, segment) in section.into_iter().enumerate() {
let segment = segment?;
let id = if preallocated {
ids.get_data(i as u32)?
} else {
let id = self.data.arena.alloc_with_id(|id| Data {
id,
value: Vec::new(),
kind: DataKind::Passive,
name: None,
});
ids.push_data(id);
id
};
let data = self.data.get_mut(id);
match segment.kind {
wasmparser::DataKind::Passive => {
data.value = segment.data.to_vec();
data.kind = DataKind::Passive;
}
wasmparser::DataKind::Active {
memory_index,
offset_expr,
} => {
data.value = segment.data.to_vec();
let memory_id = ids.get_memory(memory_index)?;
let memory = self.memories.get_mut(memory_id);
memory.data_segments.insert(data.id);
let offset = ConstExpr::eval(&offset_expr, ids)
.with_context(|| format!("failed to evaluate the offset of data {}", i))?;
if memory.memory64 {
match offset {
ConstExpr::Value(Value::I64(_)) => {}
ConstExpr::Global(global)
if self.globals.get(global).ty == ValType::I64 => {}
ConstExpr::Extended(_) => {}
_ => bail!(
"data {} is active for 64-bit memory but has non-i64 offset",
i
),
}
} else {
match offset {
ConstExpr::Value(Value::I32(_)) => {}
ConstExpr::Global(global)
if self.globals.get(global).ty == ValType::I32 => {}
ConstExpr::Extended(_) => {}
_ => bail!(
"data {} is active for 32-bit memory but has non-i32 offset",
i
),
}
}
data.kind = DataKind::Active {
memory: memory_id,
offset,
}
}
}
}
Ok(())
}
}
impl Emit for ModuleData {
fn emit(&self, cx: &mut EmitContext) {
log::debug!("emit data section");
if self.arena.len() == 0 {
return;
}
let mut wasm_data_section = wasm_encoder::DataSection::new();
for data in self.iter() {
match &data.kind {
DataKind::Passive => {
wasm_data_section.passive(data.value.clone());
}
DataKind::Active { memory, offset } => {
wasm_data_section.active(
cx.indices.get_memory_index(*memory),
&offset.to_wasmencoder_type(cx),
data.value.clone(),
);
}
}
}
cx.wasm_module.section(&wasm_data_section);
}
}