1use crate::args::EntrypointArgument;
2use crate::call_def::CallDef;
3use crate::casper_types::bytesrepr::{deserialize_from_slice, Bytes, FromBytes, ToBytes};
4use crate::casper_types::crypto::PublicKey;
5use crate::casper_types::{CLTyped, CLValue, BLAKE2B_DIGEST_LENGTH, U512};
6use crate::module::Revertible;
7use crate::validator::ValidatorInfo;
8pub use crate::ContractContext;
9use crate::VmError::{Serialization, TypeMismatch};
10use crate::{consts, prelude::*, utils};
11use casper_event_standard::{EventInstance, Schema, Schemas, EVENTS_SCHEMA};
12use casper_types::CLValueError;
13use rand_chacha::rand_core::{RngCore, SeedableRng};
14use rand_chacha::ChaCha8Rng;
15
16const KEY_LEN: usize = 64;
17pub(crate) type StorageKey = [u8; KEY_LEN];
18
19pub(crate) const MAX_PATH_LEN: usize = 8;
21
22pub trait ContractRef {
24 fn new(env: Rc<ContractEnv>, address: Address) -> Self;
26 fn address(&self) -> &Address;
28 fn with_tokens(&self, tokens: U512) -> Self;
33}
34
35#[derive(Clone)]
42pub struct ContractEnv {
43 path: [u8; MAX_PATH_LEN],
44 path_len: u8,
45 mapping_data: Vec<u8>,
46 backend: Rc<RefCell<dyn ContractContext>>
47}
48
49impl Revertible for ContractEnv {
50 fn revert<E: Into<OdraError>>(&self, e: E) -> ! {
51 self.revert(e)
52 }
53}
54
55impl ContractEnv {
56 pub const fn new(backend: Rc<RefCell<dyn ContractContext>>) -> Self {
58 Self {
59 path: [0u8; MAX_PATH_LEN],
60 path_len: 0,
61 mapping_data: Vec::new(),
62 backend
63 }
64 }
65
66 pub(crate) fn index_bytes(&self) -> Vec<u8> {
96 let path = &self.path[..self.path_len as usize];
97 if path.iter().all(|&idx| idx <= 15) {
100 let index: u32 = path.iter().fold(0u32, |acc, &idx| (acc << 4) + idx as u32);
101 index.to_be_bytes().to_vec()
102 } else {
103 let mut bytes = Vec::with_capacity(2 + path.len());
105 bytes.push(0xFF);
106 bytes.push(self.path_len);
107 bytes.extend_from_slice(path);
108 bytes
109 }
110 }
111
112 pub(crate) fn current_key(&self) -> StorageKey {
114 let mut result = [0u8; KEY_LEN];
115 let index_bytes = self.index_bytes();
116 let mut key = Vec::with_capacity(index_bytes.len() + self.mapping_data.len());
117 key.extend_from_slice(&index_bytes);
118 key.extend_from_slice(&self.mapping_data);
119 let hashed_key = self.backend.borrow().hash(key.as_slice());
120 utils::hex_to_slice(&hashed_key, &mut result);
121 result
122 }
123
124 pub(crate) fn add_to_mapping_data(&mut self, data: &[u8]) {
126 self.mapping_data.extend_from_slice(data);
127 }
128
129 pub(crate) fn child(&self, index: u8) -> Self {
131 let mut new_path = self.path;
132 let Some(slot) = new_path.get_mut(self.path_len as usize) else {
133 self.revert(ExecutionError::PathIndexOutOfBounds)
134 };
135 *slot = index;
136
137 Self {
138 path: new_path,
139 path_len: self.path_len + 1,
140 mapping_data: self.mapping_data.clone(),
141 backend: self.backend.clone()
142 }
143 }
144
145 pub fn get_value<T: FromBytes>(&self, key: &[u8]) -> Option<T> {
151 self.backend
152 .borrow()
153 .get_value(key)
154 .map(|bytes| deserialize_from_slice(bytes).unwrap_or_revert(self))
155 }
156
157 pub fn set_value<T: ToBytes + CLTyped>(&self, key: &[u8], value: T) {
159 let result = value.to_bytes().map_err(ExecutionError::from);
160 let bytes = result.unwrap_or_revert(self);
161 self.backend.borrow().set_value(key, bytes.into());
162 }
163
164 pub fn get_named_value<T: FromBytes + CLTyped, U: AsRef<str>>(&self, name: U) -> Option<T> {
166 let key = name.as_ref();
167 let bytes = self.backend.borrow().get_named_value(key);
168 bytes.map(|b| deserialize_from_slice(b).unwrap_or_revert(self))
169 }
170
171 pub fn set_named_value<T: CLTyped + ToBytes, U: AsRef<str>>(&self, name: U, value: T) {
173 let key = name.as_ref();
174 let cl_value = CLValue::from_t(value)
175 .map_err(|e| match e {
176 CLValueError::Serialization(_) => OdraError::VmError(Serialization),
177 CLValueError::Type(e) => OdraError::VmError(TypeMismatch {
178 found: e.found,
179 expected: e.expected
180 })
181 })
182 .unwrap_or_revert(self);
183 self.backend.borrow().set_named_value(key, cl_value);
184 }
185
186 pub fn get_dictionary_value<T: FromBytes + CLTyped, U: AsRef<str>>(
188 &self,
189 dictionary_name: U,
190 key: &[u8]
191 ) -> Option<T> {
192 let dictionary_name = dictionary_name.as_ref();
193 let bytes = self
194 .backend
195 .borrow()
196 .get_dictionary_value(dictionary_name, key);
197 bytes.map(|b| {
198 deserialize_from_slice(b)
199 .map_err(|_| ExecutionError::Formatting)
200 .unwrap_or_revert(self)
201 })
202 }
203
204 pub fn set_dictionary_value<T: CLTyped + ToBytes, U: AsRef<str>>(
206 &self,
207 dictionary_name: U,
208 key: &[u8],
209 value: T
210 ) {
211 let dictionary_name = dictionary_name.as_ref();
212 let cl_value = CLValue::from_t(value)
213 .map_err(|_| ExecutionError::Formatting)
214 .unwrap_or_revert(self);
215 self.backend
216 .borrow()
217 .set_dictionary_value(dictionary_name, key, cl_value);
218 }
219
220 pub fn remove_dictionary<U: AsRef<str>>(&self, dictionary_name: U) {
222 let dictionary_name = dictionary_name.as_ref();
223 self.backend.borrow().remove_dictionary(dictionary_name);
224 }
225
226 pub fn init_dictionary<U: AsRef<str>>(&self, dictionary_name: U) {
228 let dictionary_name = dictionary_name.as_ref();
229 self.backend.borrow().init_dictionary(dictionary_name);
230 }
231
232 pub fn caller(&self) -> Address {
234 let backend = self.backend.borrow();
235 backend.caller()
236 }
237
238 pub fn call_contract<T: FromBytes>(&self, address: Address, call: CallDef) -> T {
244 let backend = self.backend.borrow();
245 let bytes = backend.call_contract(address, call);
246 deserialize_from_slice(bytes).unwrap_or_revert(self)
247 }
248
249 pub fn self_address(&self) -> Address {
251 let backend = self.backend.borrow();
252 backend.self_address()
253 }
254
255 pub fn transfer_tokens(&self, to: &Address, amount: &U512) {
257 let backend = self.backend.borrow();
258 backend.transfer_tokens(to, amount)
259 }
260
261 pub fn get_block_time(&self) -> u64 {
263 let backend = self.backend.borrow();
264 backend.get_block_time()
265 }
266
267 pub fn get_block_time_millis(&self) -> u64 {
269 let backend = self.backend.borrow();
270 backend.get_block_time()
271 }
272
273 pub fn get_block_time_secs(&self) -> u64 {
275 let backend = self.backend.borrow();
276 backend.get_block_time().checked_div(1000).unwrap()
277 }
278
279 pub fn attached_value(&self) -> U512 {
281 let backend = self.backend.borrow();
282 backend.attached_value()
283 }
284
285 pub fn self_balance(&self) -> U512 {
287 let backend = self.backend.borrow();
288 backend.self_balance()
289 }
290
291 pub fn revert<E: Into<OdraError>>(&self, error: E) -> ! {
293 let backend = self.backend.borrow();
294 backend.revert(error.into())
295 }
296
297 pub fn emit_event<T: ToBytes + EventInstance>(&self, event: T) {
299 let backend = self.backend.borrow();
300 let result = event.to_bytes().map_err(ExecutionError::from);
301 let bytes = result.unwrap_or_revert(self);
302 backend.emit_event(&bytes.into())
303 }
304
305 pub fn emit_native_event<T: ToBytes + EventInstance>(&self, event: T) {
307 let backend = self.backend.borrow();
308 let result = event.to_bytes().map_err(ExecutionError::from);
309 let bytes = result.unwrap_or_revert(self);
310 backend.emit_native_event(&bytes.into())
311 }
312
313 pub fn verify_signature(
325 &self,
326 message: &Bytes,
327 signature: &Bytes,
328 public_key: &PublicKey
329 ) -> bool {
330 let (signature, _) = casper_types::crypto::Signature::from_bytes(signature.as_slice())
331 .unwrap_or_else(|_| self.revert(ExecutionError::CouldNotDeserializeSignature));
332 casper_types::crypto::verify(message.as_slice(), &signature, public_key).is_ok()
333 }
334
335 pub fn hash<T: AsRef<[u8]>>(&self, value: T) -> [u8; BLAKE2B_DIGEST_LENGTH] {
341 self.backend.borrow().hash(value.as_ref())
342 }
343
344 pub fn delegate(&self, validator: PublicKey, amount: U512) {
351 self.backend.borrow().delegate(validator, amount)
352 }
353
354 pub fn undelegate(&self, validator: PublicKey, amount: U512) {
361 self.backend.borrow().undelegate(validator, amount)
362 }
363
364 pub fn delegated_amount(&self, validator: PublicKey) -> U512 {
374 self.backend.borrow().delegated_amount(validator)
375 }
376
377 pub fn get_validator_info(&self, validator: PublicKey) -> Option<ValidatorInfo> {
385 self.backend.borrow().get_validator_info(validator)
386 }
387
388 pub fn pseudorandom_bytes(&self, size: usize) -> Vec<u8> {
391 let seed_bytes = self.backend.borrow().pseudorandom_bytes();
392
393 if size <= seed_bytes.len() {
394 return seed_bytes[..size].to_vec();
395 }
396
397 let mut result = seed_bytes.to_vec();
399 let mut rng = ChaCha8Rng::from_seed(seed_bytes);
400 let additional_bytes = size - result.len();
401 let mut extra = vec![0u8; additional_bytes];
402 rng.fill_bytes(&mut extra);
403 result.extend_from_slice(&extra);
404
405 result
406 }
407
408 pub fn pseudorandom_number(&self, high: U512) -> U512 {
410 let seed_bytes = self.backend.borrow().pseudorandom_bytes();
411 let mut rng = ChaCha8Rng::from_seed(seed_bytes);
412 let bits = high.bits();
413 let bytes_len = bits.div_ceil(8);
414 let max = U512::from(1u64) << bits; let limit = max - (max % high);
416 loop {
417 let mut bytes = vec![0u8; bytes_len];
418 rng.fill_bytes(&mut bytes);
419 let candidate = U512::from_big_endian(&bytes);
420
421 if candidate < limit {
422 return candidate % high;
423 }
424 }
426 }
427}
428
429pub struct ExecutionEnv {
434 env: Rc<ContractEnv>
435}
436
437impl Revertible for ExecutionEnv {
438 fn revert<E: Into<OdraError>>(&self, e: E) -> ! {
439 self.env.revert(e)
440 }
441}
442
443impl ExecutionEnv {
444 pub fn new(env: Rc<ContractEnv>) -> Self {
446 Self { env }
447 }
448
449 pub fn non_reentrant_before(&self) {
451 let status: bool = self
453 .env
454 .get_value(consts::REENTRANCY_GUARD.as_slice())
455 .unwrap_or_default();
456 if status {
457 self.env.revert(ExecutionError::ReentrantCall);
459 }
460 self.env
462 .set_value(consts::REENTRANCY_GUARD.as_slice(), true);
463 }
464
465 pub fn non_reentrant_after(&self) {
467 self.env
469 .set_value(consts::REENTRANCY_GUARD.as_slice(), false);
470 }
471
472 pub fn handle_attached_value(&self) {
474 self.env.backend.borrow().handle_attached_value();
475 }
476
477 pub fn clear_attached_value(&self) {
479 self.env.backend.borrow().clear_attached_value();
480 }
481
482 pub fn get_named_arg<T: FromBytes + EntrypointArgument>(&self, name: &str) -> T {
489 if T::is_required() {
490 let result = self.env.backend.borrow().get_named_arg_bytes(name);
491 match result {
492 Ok(bytes) => deserialize_from_slice(bytes).unwrap_or_revert(self),
493 Err(err) => self.env.revert(err)
494 }
495 } else {
496 let bytes = self.env.backend.borrow().get_opt_named_arg_bytes(name);
497 let result = bytes.map(|bytes| deserialize_from_slice(bytes).unwrap_or_revert(self));
498 T::unwrap(result, &self.env)
499 }
500 }
501
502 pub fn migrate_schemas(&self, new_schemas: BTreeMap<String, Schema>) {
504 let mut old_schemas: Schemas = self.env.get_named_value(EVENTS_SCHEMA).unwrap_or_default();
505
506 for (name, new_schema) in new_schemas.iter() {
507 match old_schemas.0.get(name) {
508 None => {
510 old_schemas.0.insert(name.clone(), new_schema.clone());
511 }
512 Some(old_schema) => {
514 if old_schema != new_schema {
515 self.env.revert(ExecutionError::SchemaMismatch);
516 }
517 }
518 }
519 }
520
521 self.env.set_named_value(EVENTS_SCHEMA, old_schemas);
523 }
524
525 pub fn emit_event<T: ToBytes + EventInstance>(&self, event: T) {
527 self.env.emit_event(event);
528 }
529}
530
531#[cfg(test)]
532mod tests {
533 use super::*;
534 use crate::contract_context::MockContractContext;
535
536 fn make_env() -> ContractEnv {
537 let mut ctx = MockContractContext::new();
538 ctx.expect_hash().returning(|input| {
539 let mut result = [0u8; 32];
540 for (i, byte) in input.iter().enumerate() {
541 if i < 32 {
542 result[i] = *byte;
543 }
544 }
545 result
546 });
547 ContractEnv::new(Rc::new(RefCell::new(ctx)))
548 }
549
550 fn legacy_u32_for_path(path: &[u8]) -> u32 {
551 path.iter().fold(0u32, |acc, &idx| (acc << 4) + idx as u32)
552 }
553
554 #[test]
555 fn encoding_matches_old_u32_formula() {
556 let env = make_env();
557 let child = env.child(3);
558 assert_eq!(
559 child.index_bytes(),
560 legacy_u32_for_path(&[3]).to_be_bytes().to_vec()
561 );
562
563 let grandchild = child.child(15);
564 assert_eq!(
565 grandchild.index_bytes(),
566 legacy_u32_for_path(&[3, 15]).to_be_bytes().to_vec()
567 );
568
569 let deep = env.child(1).child(2).child(3).child(4);
570 assert_eq!(
571 deep.index_bytes(),
572 legacy_u32_for_path(&[1, 2, 3, 4]).to_be_bytes().to_vec()
573 );
574 }
575
576 #[test]
577 fn path_encoding_used_for_indices_above_15() {
578 let env = make_env();
579 let child = env.child(3).child(16);
580 let bytes = child.index_bytes();
581 assert_eq!(bytes[0], 0xFF);
582 assert_eq!(bytes[1], 2);
583 assert_eq!(bytes[2], 3);
584 assert_eq!(bytes[3], 16);
585 }
586
587 #[test]
588 fn no_collision_between_var_and_mapping() {
589 let env = make_env();
590
591 let var_key = env.child(3).child(16).current_key();
592 let mut map_env = env.child(3);
593 map_env.add_to_mapping_data(&[16]);
594 let map_key = map_env.current_key();
595 assert_ne!(var_key, map_key);
596
597 let var_key2 = env.child(3).child(1).current_key();
598 let mut map_env2 = env.child(3);
599 map_env2.add_to_mapping_data(&[0xFF, 2, 3, 1]);
600 let map_key2 = map_env2.current_key();
601 assert_ne!(var_key2, map_key2);
602 }
603
604 #[test]
605 fn no_collision_between_small_and_path_encoding() {
606 let env = make_env();
607 let small_key = env.child(1).child(2).current_key();
608 let path_key = env.child(1).child(20).current_key();
609 assert_ne!(small_key, path_key);
610 }
611}