revm_interpreter/interpreter/
ext_bytecode.rs1use core::ops::Deref;
2
3use bytecode::{eof::CodeInfo, utils::read_u16, Bytecode};
4use primitives::{Bytes, B256};
5
6use super::{EofCodeInfo, EofContainer, EofData, Immediates, Jumps, LegacyBytecode};
7
8#[cfg(feature = "serde")]
9mod serde;
10
11#[derive(Debug)]
12pub struct ExtBytecode {
13 base: Bytecode,
14 bytecode_hash: Option<B256>,
15 instruction_pointer: *const u8,
16}
17
18impl Deref for ExtBytecode {
19 type Target = Bytecode;
20
21 fn deref(&self) -> &Self::Target {
22 &self.base
23 }
24}
25
26impl ExtBytecode {
27 pub fn new(base: Bytecode) -> Self {
29 let instruction_pointer = base.bytecode_ptr();
30 Self {
31 base,
32 instruction_pointer,
33 bytecode_hash: None,
34 }
35 }
36
37 pub fn new_with_hash(base: Bytecode, hash: B256) -> Self {
39 let instruction_pointer = base.bytecode_ptr();
40 Self {
41 base,
42 instruction_pointer,
43 bytecode_hash: Some(hash),
44 }
45 }
46
47 pub fn regenerate_hash(&mut self) -> B256 {
49 let hash = self.base.hash_slow();
50 self.bytecode_hash = Some(hash);
51 hash
52 }
53
54 pub fn hash(&mut self) -> Option<B256> {
56 self.bytecode_hash
57 }
58}
59
60impl Jumps for ExtBytecode {
61 #[inline]
62 fn relative_jump(&mut self, offset: isize) {
63 self.instruction_pointer = unsafe { self.instruction_pointer.offset(offset) };
64 }
65
66 #[inline]
67 fn absolute_jump(&mut self, offset: usize) {
68 self.instruction_pointer = unsafe { self.base.bytes_ref().as_ptr().add(offset) };
69 }
70
71 #[inline]
72 fn is_valid_legacy_jump(&mut self, offset: usize) -> bool {
73 self.base
74 .legacy_jump_table()
75 .expect("Panic if not legacy")
76 .is_valid(offset)
77 }
78
79 #[inline]
80 fn opcode(&self) -> u8 {
81 unsafe { *self.instruction_pointer }
83 }
84
85 #[inline]
86 fn pc(&self) -> usize {
87 unsafe {
90 self.instruction_pointer
91 .offset_from(self.base.bytes_ref().as_ptr()) as usize
92 }
93 }
94}
95
96impl Immediates for ExtBytecode {
97 #[inline]
98 fn read_u16(&self) -> u16 {
99 unsafe { read_u16(self.instruction_pointer) }
100 }
101
102 #[inline]
103 fn read_u8(&self) -> u8 {
104 unsafe { *self.instruction_pointer }
105 }
106
107 #[inline]
108 fn read_slice(&self, len: usize) -> &[u8] {
109 unsafe { core::slice::from_raw_parts(self.instruction_pointer, len) }
110 }
111
112 #[inline]
113 fn read_offset_u16(&self, offset: isize) -> u16 {
114 unsafe {
115 read_u16(
116 self.instruction_pointer
117 .offset(offset),
119 )
120 }
121 }
122}
123
124impl EofCodeInfo for ExtBytecode {
125 fn code_info(&self, idx: usize) -> Option<&CodeInfo> {
126 self.base.eof().and_then(|eof| eof.body.code_info.get(idx))
127 }
128
129 fn code_section_pc(&self, idx: usize) -> Option<usize> {
130 self.base
131 .eof()
132 .and_then(|eof| eof.body.eof_code_section_start(idx))
133 }
134}
135
136impl EofData for ExtBytecode {
137 fn data(&self) -> &[u8] {
138 self.base.eof().expect("eof").data()
139 }
140
141 fn data_slice(&self, offset: usize, len: usize) -> &[u8] {
142 self.base.eof().expect("eof").data_slice(offset, len)
143 }
144
145 fn data_size(&self) -> usize {
146 self.base.eof().expect("eof").header.data_size as usize
147 }
148}
149
150impl EofContainer for ExtBytecode {
151 fn eof_container(&self, index: usize) -> Option<&Bytes> {
152 self.base
153 .eof()
154 .and_then(|eof| eof.body.container_section.get(index))
155 }
156}
157
158impl LegacyBytecode for ExtBytecode {
159 fn bytecode_len(&self) -> usize {
160 assume!(!self.base.is_eof());
162 self.base.len()
163 }
164
165 fn bytecode_slice(&self) -> &[u8] {
166 assume!(!self.base.is_eof());
168 self.base.original_byte_slice()
169 }
170}
171
172#[cfg(test)]
173mod tests {
174 use super::*;
175 use primitives::Bytes;
176
177 #[test]
178 fn test_with_hash_constructor() {
179 let bytecode = Bytecode::new_raw(Bytes::from(&[0x60, 0x00][..]));
180 let hash = bytecode.hash_slow();
181 let ext_bytecode = ExtBytecode::new_with_hash(bytecode.clone(), hash);
182 assert_eq!(ext_bytecode.bytecode_hash, Some(hash));
183 }
184}