use move_binary_format::{
binary_views::{BinaryIndexedView, FunctionView},
control_flow_graph::{BlockId, ControlFlowGraph},
errors::{PartialVMError, PartialVMResult},
file_format::{Bytecode, CodeUnit, FunctionDefinitionIndex, Signature, StructFieldInformation},
};
use move_core_types::vm_status::StatusCode;
pub(crate) struct StackUsageVerifier<'a> {
resolver: &'a BinaryIndexedView<'a>,
current_function: Option<FunctionDefinitionIndex>,
code: &'a CodeUnit,
return_: &'a Signature,
}
impl<'a> StackUsageVerifier<'a> {
pub(crate) fn verify(
resolver: &'a BinaryIndexedView<'a>,
function_view: &'a FunctionView,
) -> PartialVMResult<()> {
let verifier = Self {
resolver,
current_function: function_view.index(),
code: function_view.code(),
return_: function_view.return_(),
};
for block_id in function_view.cfg().blocks() {
verifier.verify_block(block_id, function_view.cfg())?
}
Ok(())
}
fn verify_block(&self, block_id: BlockId, cfg: &dyn ControlFlowGraph) -> PartialVMResult<()> {
let code = &self.code.code;
let mut stack_size_increment = 0;
let block_start = cfg.block_start(block_id);
for i in block_start..=cfg.block_end(block_id) {
let (num_pops, num_pushes) = self.instruction_effect(&code[i as usize])?;
if stack_size_increment < num_pops {
return Err(
PartialVMError::new(StatusCode::NEGATIVE_STACK_SIZE_WITHIN_BLOCK)
.at_code_offset(self.current_function(), block_start),
);
}
stack_size_increment -= num_pops;
stack_size_increment += num_pushes;
}
if stack_size_increment == 0 {
Ok(())
} else {
Err(
PartialVMError::new(StatusCode::POSITIVE_STACK_SIZE_AT_BLOCK_END)
.at_code_offset(self.current_function(), block_start),
)
}
}
fn instruction_effect(&self, instruction: &Bytecode) -> PartialVMResult<(u64, u64)> {
Ok(match instruction {
Bytecode::Pop
| Bytecode::BrTrue(_)
| Bytecode::BrFalse(_)
| Bytecode::StLoc(_)
| Bytecode::Abort => (1, 0),
Bytecode::LdU8(_)
| Bytecode::LdU64(_)
| Bytecode::LdU128(_)
| Bytecode::LdTrue
| Bytecode::LdFalse
| Bytecode::LdConst(_)
| Bytecode::CopyLoc(_)
| Bytecode::MoveLoc(_)
| Bytecode::MutBorrowLoc(_)
| Bytecode::ImmBorrowLoc(_) => (0, 1),
Bytecode::Not
| Bytecode::FreezeRef
| Bytecode::ReadRef
| Bytecode::Exists(_)
| Bytecode::ExistsGeneric(_)
| Bytecode::MutBorrowGlobal(_)
| Bytecode::MutBorrowGlobalGeneric(_)
| Bytecode::ImmBorrowGlobal(_)
| Bytecode::ImmBorrowGlobalGeneric(_)
| Bytecode::MutBorrowField(_)
| Bytecode::MutBorrowFieldGeneric(_)
| Bytecode::ImmBorrowField(_)
| Bytecode::ImmBorrowFieldGeneric(_)
| Bytecode::MoveFrom(_)
| Bytecode::MoveFromGeneric(_)
| Bytecode::CastU8
| Bytecode::CastU64
| Bytecode::CastU128
| Bytecode::VecLen(_)
| Bytecode::VecPopBack(_) => (1, 1),
Bytecode::Add
| Bytecode::Sub
| Bytecode::Mul
| Bytecode::Mod
| Bytecode::Div
| Bytecode::BitOr
| Bytecode::BitAnd
| Bytecode::Xor
| Bytecode::Shl
| Bytecode::Shr
| Bytecode::Or
| Bytecode::And
| Bytecode::Eq
| Bytecode::Neq
| Bytecode::Lt
| Bytecode::Gt
| Bytecode::Le
| Bytecode::Ge => (2, 1),
Bytecode::VecPack(_, num) => (*num, 1),
Bytecode::VecUnpack(_, num) => (1, *num),
Bytecode::VecImmBorrow(_) | Bytecode::VecMutBorrow(_) => (2, 1),
Bytecode::MoveTo(_)
| Bytecode::MoveToGeneric(_)
| Bytecode::WriteRef
| Bytecode::VecPushBack(_) => (2, 0),
Bytecode::VecSwap(_) => (3, 0),
Bytecode::Branch(_) | Bytecode::Nop => (0, 0),
Bytecode::Ret => {
let return_count = self.return_.len();
(return_count as u64, 0)
}
Bytecode::Call(idx) => {
let function_handle = self.resolver.function_handle_at(*idx);
let arg_count = self.resolver.signature_at(function_handle.parameters).len() as u64;
let return_count = self.resolver.signature_at(function_handle.return_).len() as u64;
(arg_count, return_count)
}
Bytecode::CallGeneric(idx) => {
let func_inst = self.resolver.function_instantiation_at(*idx);
let function_handle = self.resolver.function_handle_at(func_inst.handle);
let arg_count = self.resolver.signature_at(function_handle.parameters).len() as u64;
let return_count = self.resolver.signature_at(function_handle.return_).len() as u64;
(arg_count, return_count)
}
Bytecode::Pack(idx) => {
let struct_definition = self.resolver.struct_def_at(*idx)?;
let field_count = match &struct_definition.field_information {
StructFieldInformation::Native => 0,
StructFieldInformation::Declared(fields) => fields.len(),
};
(field_count as u64, 1)
}
Bytecode::PackGeneric(idx) => {
let struct_inst = self.resolver.struct_instantiation_at(*idx)?;
let struct_definition = self.resolver.struct_def_at(struct_inst.def)?;
let field_count = match &struct_definition.field_information {
StructFieldInformation::Native => 0,
StructFieldInformation::Declared(fields) => fields.len(),
};
(field_count as u64, 1)
}
Bytecode::Unpack(idx) => {
let struct_definition = self.resolver.struct_def_at(*idx)?;
let field_count = match &struct_definition.field_information {
StructFieldInformation::Native => 0,
StructFieldInformation::Declared(fields) => fields.len(),
};
(1, field_count as u64)
}
Bytecode::UnpackGeneric(idx) => {
let struct_inst = self.resolver.struct_instantiation_at(*idx)?;
let struct_definition = self.resolver.struct_def_at(struct_inst.def)?;
let field_count = match &struct_definition.field_information {
StructFieldInformation::Native => 0,
StructFieldInformation::Declared(fields) => fields.len(),
};
(1, field_count as u64)
}
})
}
fn current_function(&self) -> FunctionDefinitionIndex {
self.current_function.unwrap_or(FunctionDefinitionIndex(0))
}
}